code
stringlengths 114
1.05M
| path
stringlengths 3
312
| quality_prob
float64 0.5
0.99
| learning_prob
float64 0.2
1
| filename
stringlengths 3
168
| kind
stringclasses 1
value |
|---|---|---|---|---|---|
defmodule Appsignal.Utils.DataEncoder do
@moduledoc """
Encodes data
"""
alias Appsignal.Nif
def encode(%{__struct__: _} = data) do
data |> Map.from_struct() |> encode
end
def encode(data) when is_tuple(data) do
data |> Tuple.to_list() |> encode
end
def encode(data) when is_map(data) do
{:ok, resource} = Nif.data_map_new()
Enum.each(data, fn item -> encode(resource, item) end)
resource
end
def encode(data) when is_list(data) do
{:ok, resource} = Nif.data_list_new()
Enum.each(data, fn item -> encode(resource, item) end)
resource
end
def encode(resource, {key, value}) when is_atom(key) do
encode(resource, {to_string(key), value})
end
def encode(resource, {key, value}) when not is_binary(key) do
encode(resource, {inspect(key), value})
end
def encode(resource, {key, value}) when is_binary(value) do
Nif.data_set_string(resource, key, value)
end
def encode(resource, {key, value})
when is_integer(value) and value >= 9_223_372_036_854_775_808 do
Nif.data_set_string(resource, key, "bigint:#{value}")
end
def encode(resource, {key, value}) when is_integer(value) do
Nif.data_set_integer(resource, key, value)
end
def encode(resource, {key, value}) when is_float(value) do
Nif.data_set_float(resource, key, value)
end
def encode(resource, {key, value}) when is_map(value) or is_tuple(value) do
Nif.data_set_data(resource, key, encode(value))
end
def encode(resource, {key, value}) when is_list(value) do
if proper_list?(value) do
Nif.data_set_data(resource, key, encode(value))
else
Nif.data_set_string(
resource,
key,
"improper_list:#{inspect(value)}"
)
end
end
def encode(resource, {key, true}) do
Nif.data_set_boolean(resource, key, 1)
end
def encode(resource, {key, false}) do
Nif.data_set_boolean(resource, key, 0)
end
def encode(resource, {key, nil}) do
Nif.data_set_nil(resource, key)
end
def encode(resource, {key, value}) when is_atom(value) do
encode(resource, {key, to_string(value)})
end
def encode(resource, {key, value}) do
encode(resource, {key, inspect(value)})
end
def encode(resource, value) when is_binary(value) do
Nif.data_set_string(resource, value)
end
def encode(resource, value) when is_integer(value) and value >= 9_223_372_036_854_775_808 do
Nif.data_set_string(resource, "bigint:#{value}")
end
def encode(resource, value) when is_integer(value) do
Nif.data_set_integer(resource, value)
end
def encode(resource, value) when is_float(value) do
Nif.data_set_float(resource, value)
end
def encode(resource, value) when is_map(value) or is_tuple(value) do
Nif.data_set_data(resource, encode(value))
end
def encode(resource, value) when is_list(value) do
if proper_list?(value) do
Nif.data_set_data(resource, encode(value))
else
Nif.data_set_string(
resource,
"improper_list:#{inspect(value)}"
)
end
end
def encode(resource, true) do
Nif.data_set_boolean(resource, 1)
end
def encode(resource, false) do
Nif.data_set_boolean(resource, 0)
end
def encode(resource, nil) do
Nif.data_set_nil(resource)
end
def encode(resource, value) when is_atom(value) do
encode(resource, to_string(value))
end
def encode(resource, value) do
encode(resource, inspect(value))
end
def proper_list?([_head | tail]) when is_list(tail) do
proper_list?(tail)
end
def proper_list?([]), do: true
def proper_list?(_), do: false
end
|
lib/appsignal/utils/data_encoder.ex
| 0.64791
| 0.443179
|
data_encoder.ex
|
starcoder
|
defmodule EWalletConfig.Setting do
@moduledoc """
Schema overlay acting as an interface to the StoredSetting schema.
This is needed because some transformation is applied to the
attributes before saving them to the database. Indeed, value is stored
in a map to allow any type to be saved, but is for simplicity, the
users never need to know that - all they need to care is the "value"
field.
Here are some explanations about some of the fields of the settings:
- position
Position is used to have a constant order for settings that we define.
It cannot be updated and is only set at creation. We can add more settings
in the seeds later and fix their positions.
- parent and parent_value
Those are used to link settings together in a very simple way. No logic is
actually implemented for those, it's mostly intended to be used by clients
(like the admin panel) to show settings in a logical way. So if someone
selects gcs for file_storage, you can show all settings that have file_storage
as a parent were parent_value=gcs.
"""
require Ecto.Query
use ActivityLogger.ActivityLogging
alias EWalletConfig.{Repo, StoredSetting, Setting}
alias Ecto.{Changeset, Query}
defstruct [
:uuid,
:id,
:key,
:value,
:type,
:description,
:options,
:parent,
:parent_value,
:secret,
:position,
:inserted_at,
:updated_at
]
@spec get_setting_mappings() :: [map()]
def get_setting_mappings, do: Application.get_env(:ewallet_config, :settings_mappings)
@spec get_default_settings() :: [map()]
def get_default_settings, do: Application.get_env(:ewallet_config, :default_settings)
@spec types() :: [String.t()]
def types, do: StoredSetting.types()
@doc """
Retrieves all settings.
"""
@spec all() :: [%Setting{}]
def all do
StoredSetting
|> Query.order_by(asc: :position)
|> Repo.all()
|> Enum.map(&build/1)
end
def query do
StoredSetting
end
@doc """
Retrieves a setting by its string name.
"""
@spec get(String.t()) :: %Setting{}
def get(key) when is_atom(key) do
get(Atom.to_string(key))
end
def get(key) when is_binary(key) do
case Repo.get_by(StoredSetting, key: key) do
nil -> nil
stored_setting -> build(stored_setting)
end
end
def get(_), do: nil
@doc """
Retrieves a setting's value by its string name.
"""
@spec get_value(String.t() | atom()) :: any()
def get_value(key, default \\ nil)
def get_value(key, default) when is_atom(key) do
key
|> Atom.to_string()
|> get_value(default)
end
def get_value(key, default) when is_binary(key) do
case Repo.get_by(StoredSetting, key: key) do
nil -> default
stored_setting -> extract_value(stored_setting)
end
end
def get_value(nil, _), do: nil
@doc """
Creates a new setting with the passed attributes.
"""
@spec insert(map()) :: {:ok, %Setting{}} | {:error, %Changeset{}}
def insert(attrs) do
attrs = cast_attrs(attrs)
%StoredSetting{}
|> StoredSetting.changeset(attrs)
|> Repo.insert_record_with_activity_log()
|> return_from_change()
end
@doc """
Inserts all the default settings.
"""
@spec insert_all_defaults(map(), map()) :: [{:ok, %Setting{}}] | [{:error, %Changeset{}}]
def insert_all_defaults(originator, overrides \\ %{}) do
Repo.transaction(fn ->
get_default_settings()
|> Enum.map(fn data ->
insert_default(data, originator, overrides)
end)
|> all_defaults_inserted?()
end)
|> return_tx_result()
end
defp insert_default({key, data}, originator, overrides) do
data = Map.put(data, :originator, originator)
case overrides[key] do
nil ->
insert(data)
override ->
data
|> Map.put(:value, override)
|> insert()
end
end
defp all_defaults_inserted?(list) do
Enum.all?(list, fn res ->
case res do
{:ok, _} -> true
_ -> false
end
end)
end
defp return_tx_result({:ok, true}), do: :ok
defp return_tx_result({:ok, false}), do: :error
defp return_tx_result({:error, _}), do: {:error, :setting_insert_failed}
@spec update(String.t(), map()) ::
{:ok, %Setting{}} | {:error, atom()} | {:error, Changeset.t()}
def update(nil, _), do: {:error, :setting_not_found}
def update(key, attrs) when is_atom(key) do
key
|> Atom.to_string()
|> update(attrs)
end
def update(key, attrs) when is_binary(key) do
case Repo.get_by(StoredSetting, %{key: key}) do
nil ->
{:error, :setting_not_found}
setting ->
attrs = cast_attrs(setting, attrs)
setting
|> StoredSetting.update_changeset(attrs)
|> Repo.update_record_with_activity_log()
|> return_from_change()
end
end
@spec update_all(List.t()) :: [{:ok, %Setting{}} | {:error, atom()} | {:error, Changeset.t()}]
def update_all(attrs) when is_list(attrs) do
case Keyword.keyword?(attrs) do
true -> update_all_with_keyword_list(attrs)
false -> update_all_with_map_list(attrs)
end
end
@spec update_all(map()) :: [{:ok, %Setting{}} | {:error, atom()} | {:error, Changeset.t()}]
def update_all(attrs) do
originator = attrs[:originator]
attrs
|> Map.delete(:originator)
|> Enum.map(fn {key, value} ->
{key,
update(key, %{
value: value,
originator: originator
})}
end)
end
def lock_all do
StoredSetting
|> Query.lock("FOR UPDATE")
|> Repo.all()
end
def lock(keys) do
StoredSetting
|> Query.lock("FOR UPDATE")
|> Query.where([s], s.key in ^keys)
|> Repo.all()
end
def build(stored_setting) do
%Setting{
uuid: stored_setting.uuid,
id: stored_setting.id,
key: stored_setting.key,
value: extract_value(stored_setting),
type: stored_setting.type,
description: stored_setting.description,
options: get_options(stored_setting),
parent: stored_setting.parent,
parent_value: stored_setting.parent_value,
secret: stored_setting.secret,
position: stored_setting.position,
inserted_at: stored_setting.inserted_at,
updated_at: stored_setting.updated_at
}
end
defp update_all_with_keyword_list(attrs) do
originator = attrs[:originator]
attrs
|> Keyword.delete(:originator)
|> Enum.map(fn {key, value} ->
{key, update(key, %{value: value, originator: originator})}
end)
end
defp update_all_with_map_list(attrs) do
Enum.map(attrs, fn data ->
key = data[:key] || data["key"]
{key, update(key, data)}
end)
end
defp cast_attrs(attrs) do
attrs
|> cast_value()
|> cast_options()
|> add_position()
end
defp cast_attrs(setting, attrs) do
attrs
|> cast_value(setting)
|> cast_options()
|> add_position()
end
defp return_from_change({:ok, stored_setting}) do
{:ok, build(stored_setting)}
end
defp return_from_change({:error, changeset}) do
{:error, changeset}
end
defp extract_value(%{secret: true, encrypted_data: nil}), do: nil
defp extract_value(%{secret: true, encrypted_data: data}) do
case Map.get(data, :value) do
nil -> Map.get(data, "value")
value -> value
end
end
defp extract_value(%{secret: false, data: nil}), do: nil
defp extract_value(%{secret: false, data: data}) do
case Map.get(data, :value) do
nil -> Map.get(data, "value")
value -> value
end
end
defp get_options(%{options: nil}), do: nil
defp get_options(%{options: options}) do
Map.get(options, :array) || Map.get(options, "array")
end
defp cast_value(%{value: value} = attrs, %{secret: true}) do
Map.put(attrs, :encrypted_data, %{value: value})
end
defp cast_value(%{value: value} = attrs, _) do
Map.put(attrs, :data, %{value: value})
end
defp cast_value(attrs, _), do: attrs
defp cast_value(%{secret: true, value: value} = attrs) do
Map.put(attrs, :encrypted_data, %{value: value})
end
defp cast_value(%{value: value} = attrs) do
Map.put(attrs, :data, %{value: value})
end
defp cast_value(attrs) do
Map.put(attrs, :data, %{value: nil})
end
defp cast_options(%{options: nil} = attrs), do: attrs
defp cast_options(%{options: options} = attrs) do
Map.put(attrs, :options, %{array: options})
end
defp cast_options(attrs), do: attrs
defp add_position(%{position: position} = attrs)
when not is_nil(position) and is_integer(position) do
attrs
end
defp add_position(attrs) do
case get_last_setting() do
nil ->
Map.put(attrs, :position, 0)
latest_setting ->
Map.put(attrs, :position, latest_setting.position + 1)
end
end
defp get_last_setting do
StoredSetting
|> Query.order_by(desc: :position)
|> Query.limit(1)
|> Repo.one()
end
end
|
apps/ewallet_config/lib/ewallet_config/setting.ex
| 0.799912
| 0.431464
|
setting.ex
|
starcoder
|
defmodule NeuralNetwork do
defstruct layers: []
alias NeuralNetwork.Layer
def create(configuration) do
layers = create_layers(configuration)
connect_layers(layers)
%NeuralNetwork{layers: layers}
end
def create_layers(configuration) do
Enum.map configuration, fn num ->
Layer.create(num)
end
end
def connect_layers(layers) do
layers
|> Enum.chunk(2, 1)
|> Enum.each(fn [src, dest] -> Layer.connect(src, dest) end)
end
def set_inputs(network, inputs) do
Enum.at(network.layers, 0)
|> Layer.set_outputs(inputs)
network
end
def get_outputs(network) do
Enum.at(network.layers, -1) |> Layer.get_outputs
end
def prop_forward(network) do
Enum.at(network.layers, 0)
|> Layer.prop_forward
Enum.drop(network.layers, 1)
|> Enum.each(fn layer ->
Layer.update_outputs(layer)
Layer.prop_forward(layer)
end)
network
end
def prop_backward(network, target_outputs) do
# Propagete output layer backwards
List.last(network.layers)
|> Layer.prop_backward(target_outputs)
# Propagate hidden layers backwards
Enum.reverse(network.layers)
|> Enum.drop(1)
|> Enum.each(&Layer.prop_backward/1)
# Return the network for chaining operations
network
end
def adjust_weights(network, target_outputs) do
# Adjust weights of the output layer
List.last(network.layers)
|> Layer.adjust_weights(target_outputs)
# Adjust weights of the hidden layers
Enum.reverse(network.layers)
|> Enum.drop(1)
|> Enum.each(&Layer.adjust_weights/1)
# Return the network for chaining operations
network
end
def get_configuration(network) do
Enum.map network.layers, fn layer ->
length(layer.neurons)
end
end
def get_in_conns(network) do
Enum.map network.layers, fn layer ->
Layer.get_in_conns(layer)
end
end
def set_in_conns(network, in_conns) do
List.zip([network.layers, in_conns])
|> Enum.each(fn {layer, layer_in_conns} ->
Layer.set_in_conns(layer, layer_in_conns)
end)
network
end
def get_neuron_state(network, layer, neuron) do
network.layers
|> Enum.at(layer) |> Map.get(:neurons)
|> Enum.at(neuron) |> GenServer.call(:get_state)
end
def process(network, inputs) do
NeuralNetwork.set_inputs(network, inputs)
|> NeuralNetwork.prop_forward
|> NeuralNetwork.get_outputs
end
def train(network, inputs, target_outputs) do
NeuralNetwork.set_inputs(network, inputs)
|> NeuralNetwork.prop_forward
|> NeuralNetwork.prop_backward(target_outputs)
|> NeuralNetwork.adjust_weights(target_outputs)
calculate_error(network, target_outputs)
end
def calculate_error(network, target_outputs) do
outputs = NeuralNetwork.get_outputs(network)
List.zip([outputs, target_outputs])
|> Enum.map(fn {output, target_output} ->
abs(target_output - output)
end)
|> Enum.sum
end
end
|
lib/neural_network.ex
| 0.892132
| 0.587973
|
neural_network.ex
|
starcoder
|
defmodule CodeFlow.Streams do
@moduledoc """
Code that sets up scenarios where you can play and develop a better sense for
how Enum and Stream compare.
"""
def experiment_1_enum(data) do
simple_measurements(fn ->
data
|> Enum.map(&(&1 * 2))
|> Enum.map(&(&1 + 1))
|> Enum.map(&(&1 + 2))
|> Enum.map(&(&1 + 3))
|> Enum.map(&(&1 + 4))
|> Enum.map(&(&1 + 5))
|> Enum.map(&(&1 + 6))
|> Enum.map(&(&1 + 7))
|> Enum.map(&(&1 + 8))
|> Enum.map(&(&1 + 9))
|> Enum.map(&(&1 - 10))
|> Enum.to_list()
end)
end
def experiment_1_stream(data) do
simple_measurements(fn ->
data
|> Stream.map(&(&1 * 2))
|> Stream.map(&(&1 + 1))
|> Stream.map(&(&1 + 2))
|> Stream.map(&(&1 + 3))
|> Stream.map(&(&1 + 4))
|> Stream.map(&(&1 + 5))
|> Stream.map(&(&1 + 6))
|> Stream.map(&(&1 + 7))
|> Stream.map(&(&1 + 8))
|> Stream.map(&(&1 + 9))
|> Stream.map(&(&1 - 10))
|> Enum.to_list()
end)
end
def experiment_2_enum(data) do
simple_measurements(fn ->
data
|> Enum.map(&(&1 * 2))
|> Enum.map(&(&1 + 1))
|> Enum.map(&(&1 + 2))
|> Enum.map(&(&1 + 3))
|> Enum.map(&(&1 + 4))
|> Enum.map(&(&1 + 5))
|> Enum.map(&(&1 + 6))
|> Enum.map(&(&1 + 7))
|> Enum.map(&(&1 + 8))
|> Enum.map(&(&1 + 9))
|> Enum.map(&(&1 - 10))
|> Enum.sum()
end)
end
def experiment_2_stream(data) do
simple_measurements(fn ->
data
|> Stream.map(&(&1 * 2))
|> Stream.map(&(&1 + 1))
|> Stream.map(&(&1 + 2))
|> Stream.map(&(&1 + 3))
|> Stream.map(&(&1 + 4))
|> Stream.map(&(&1 + 5))
|> Stream.map(&(&1 + 6))
|> Stream.map(&(&1 + 7))
|> Stream.map(&(&1 + 8))
|> Stream.map(&(&1 + 9))
|> Stream.map(&(&1 - 10))
|> Enum.sum()
end)
end
def experiment_3_enum(data) do
simple_measurements(fn ->
data
|> Enum.map(&(&1 * 2))
|> Enum.map(&(&1 + 1))
|> Enum.map(&(&1 + 2))
|> Enum.map(&(&1 + 3))
|> Enum.map(&(&1 + 4))
|> Enum.map(&(&1 + 5))
|> Enum.map(&(&1 + 6))
|> Enum.map(&(&1 + 7))
|> Enum.map(&(&1 + 8))
|> Enum.map(&(&1 + 9))
|> Enum.map(&(&1 - 10))
|> Enum.take(5)
end)
end
def experiment_3_stream(data) do
simple_measurements(fn ->
data
|> Stream.map(&(&1 * 2))
|> Stream.map(&(&1 + 1))
|> Stream.map(&(&1 + 2))
|> Stream.map(&(&1 + 3))
|> Stream.map(&(&1 + 4))
|> Stream.map(&(&1 + 5))
|> Stream.map(&(&1 + 6))
|> Stream.map(&(&1 + 7))
|> Stream.map(&(&1 + 8))
|> Stream.map(&(&1 + 9))
|> Stream.map(&(&1 - 10))
|> Enum.take(5)
end)
end
def experiment_4_enum() do
simple_measurements(fn ->
contents = File.read!("./test/support/lorem.txt")
lines = String.split(contents, "\n")
lines
|> Enum.map(fn line ->
line
|> String.split(" ")
|> Enum.count()
end)
|> Enum.sum()
|> IO.inspect(label: "Total words counted")
end)
end
def experiment_4_stream() do
simple_measurements(fn ->
File.stream!("./test/support/lorem.txt", [:read], :line)
|> Stream.map(fn line ->
line
|> String.split(" ")
|> Enum.count()
end)
|> Enum.sum()
|> IO.inspect(label: "Total words counted")
end)
end
def process_memory() do
{:memory, value} = :erlang.process_info(self(), :memory)
:erlang.float_to_binary(value / 1024 / 1024, decimals: 2) <> " MB"
end
@doc """
This function just helps wrap the experimental one inside some crude
measurement tools. These help give some feedback about how the function
performed.
"""
def simple_measurements(fun) do
# Force garbage collection before running the experiment.
# You would never do this in a production system!
:erlang.garbage_collect()
# Print out the starting memory usage
IO.puts(process_memory())
# start the timer
start = Time.utc_now()
# execute the passed in function. Ignore the result as that isn't what we
# care about right now.
_result = fun.()
# stop the timer, output the final memory usage and time.
stop = Time.utc_now()
IO.puts(process_memory())
IO.puts("#{Time.diff(stop, start, :millisecond)} msec")
# We don't actually care to receive the function's result. Return an `:ok`
# prevents a very large returned value from being added to the IEx history
# which takes up more RAM and isn't part of a normal running system.
:ok
end
end
|
code_flow/lib/streams.ex
| 0.658418
| 0.45423
|
streams.ex
|
starcoder
|
defmodule Arfficionado.Handler do
@moduledoc """
Handler behaviour.
`Arfficionado.read/4` will:
1. call `init(arg)` to obtain the initial handler state (`arg` defaults to `nil`)
2. parse the ARFF header and
- call `relation` with the relation name and optional comment
- call `attributes` with the list of attributes found in the header
- call `line_comment` for lines that consists of commentary only
- call `begin_data` with optional comment to indicate that the header is finished and instance data will follow
3. parse the ARFF data section
- call `instance` for each line of data, reporting instance values, weight and optional comment
- call `line_comment` for lines that consists of commentary only
4. call `close`
`Arfficionado.read/4` will pass in the current handler state on each callback invocation. Callback functions return an updated handler state and generally indicate whether to continue reading the ARFF file or to stop early.
Once the ARFF file is exhausted, an error is encountered in the ARFF file, or the handler has indicated that it wishes to stop the processing, the `close` callback will be invoked, allowing the handler to create the final result state (and clean up resources).
Implement this behaviour as per your application's requirements (for example, including side effects such as creating and writing to an ETS table). The sources for `Arfficionado.ListHandler` and `Arfficionado.MapHandler` may provide some general guidance.
"""
@typedoc """
Arfficionado.Handler type.
"""
@type t :: Arfficionado.Handler.t()
@typedoc """
Your handler's state.
"""
@type state() :: any
@typedoc """
Return value for most handler callbacks.
"""
@type updated_state() :: {:cont, state()} | {:halt, state()}
@typedoc """
A comment or nil.
"""
@type optional_comment :: comment() | nil
@typedoc """
A comment.
"""
@type comment :: String.t()
@typedoc """
An instance's attribute-values.
"""
@type values() :: list(value())
@typedoc """
An attribute-value.
"""
@type value() :: number() | String.t() | atom() | DateTime.t()
@typedoc """
Instance weight.
"""
@type weight() :: integer()
@typedoc """
List of attribute definitions (ARFF header).
"""
@type attributes() :: list(attribute())
@typedoc """
An attribute definition with name, type and optional comment.
"""
@type attribute() :: {:attribute, name(), attribute_type(), optional_comment()}
@typedoc """
Attribute type.
"""
@type attribute_type() :: :numeric | {:nominal, list(atom())} | :string | {:date, date_format()}
@typedoc """
Date format.
"""
@type date_format() :: :iso_8601 | String.t()
@typedoc """
Name for an attribute/relation.
"""
@type name :: String.t()
@typedoc """
Success/failure indicator.
"""
@type status() :: :ok | :error
@doc """
Creates initial state from given argument.
"""
@callback init(any()) :: state()
@doc """
Invoked when a line is encountered that contains nothing but a comment.
"""
@callback line_comment(comment(), state()) :: updated_state()
@optional_callbacks line_comment: 2
@doc """
Invoked when @relation is encountered, reporting the relation name and an optional comment.
"""
@callback relation(name(), optional_comment(), state()) :: updated_state()
@optional_callbacks relation: 3
@doc """
Invoked once all @attributes have been parsed.
"""
@callback attributes(attributes(), state()) :: updated_state()
@doc """
Invoked when @data has been encountered, reporting the optional comment following @data.
"""
@callback begin_data(optional_comment(), state()) :: updated_state()
@optional_callbacks begin_data: 2
@doc """
Invoked for each data instance, reporting the list of attribute-values (in the order given in the ARFF header), the instance weight (defaulting to 1) and an optional comment.
Return `{:cont, updated_state}` to continue parsing, or `{:halt, updated_state}` to stop early.
"""
@callback instance(values(), weight(), optional_comment(), state()) :: updated_state()
@doc """
Invoked when the processing has finished. The first argument indicates whether processing was successful (`:ok`) or an error was encountered (`:error`).
"""
@callback close(status(), state()) :: state()
end
|
lib/handler.ex
| 0.879354
| 0.663785
|
handler.ex
|
starcoder
|
defmodule Membrane.RTSP.Response do
@moduledoc """
This module represents a RTSP response.
"""
alias Membrane.Protocol.SDP
@start_line_regex ~r/^RTSP\/(\d\.\d) (\d\d\d) [A-Z a-z]+$/
@line_ending ["\r\n", "\r", "\n"]
@enforce_keys [:status, :version]
defstruct @enforce_keys ++ [headers: [], body: ""]
@type t :: %__MODULE__{
status: non_neg_integer(),
headers: Membrane.RTSP.headers(),
body: SDP.Session.t() | binary()
}
@type result :: {:ok, t()} | {:error, atom()}
@doc """
Parses RTSP response.
If the body is present it will be parsed according to `Content-Type` header.
Currently only the `application/sdp` is supported.
"""
@spec parse(binary()) :: {:ok, t()} | {:error, :invalid_start_line | :malformed_header}
def parse(response) do
[headers, body] = String.split(response, ["\r\n\r\n", "\n\n", "\r\r"], parts: 2)
with {:ok, {response, headers}} <- parse_start_line(headers),
{:ok, headers} <- parse_headers(headers),
{:ok, body} <- parse_body(body, headers) do
{:ok, %__MODULE__{response | headers: headers, body: body}}
end
end
@doc """
Retrieves the first header matching given name from a response.
```
iex> response = %Response{
...> status: 200,
...> version: "1.0",
...> headers: [{"header_name", "header_value"}]
...> }
iex> Response.get_header(response, "header_name")
{:ok, "header_value"}
iex> Response.get_header(response, "non_existent_header")
{:error, :no_such_header}
```
"""
@spec get_header(__MODULE__.t(), binary()) :: {:error, :no_such_header} | {:ok, binary()}
def get_header(%__MODULE__{headers: headers}, name) do
case List.keyfind(headers, name, 0) do
{_, value} -> {:ok, value}
nil -> {:error, :no_such_header}
end
end
@spec parse_start_line(raw_response :: binary()) ::
{:ok, {response :: t(), remainder :: binary}} | {:error, :invalid_start_line}
defp parse_start_line(binary) do
[line, rest] = String.split(binary, @line_ending, parts: 2)
case Regex.run(@start_line_regex, line) do
[_, version, code] ->
case Integer.parse(code) do
:error ->
{:error, :invalid_status_code}
{code, _} when is_number(code) ->
response = %__MODULE__{version: version, status: code}
{:ok, {response, rest}}
end
_other ->
{:error, :invalid_start_line}
end
end
defp parse_headers(headers) do
headers
|> String.split(@line_ending)
|> Bunch.Enum.try_map(fn header ->
case String.split(header, ":", parts: 2) do
[name, " " <> value] -> {:ok, {name, value}}
_else -> {:error, {:malformed_header, header}}
end
end)
end
defp parse_body(data, headers) do
case List.keyfind(headers, "Content-Type", 0) do
{"Content-Type", "application/sdp"} ->
with {:ok, result} <- SDP.parse(data) do
{:ok, result}
end
_else ->
{:ok, data}
end
end
end
|
lib/membrane_rtsp/response.ex
| 0.884788
| 0.665818
|
response.ex
|
starcoder
|
defmodule PiviEx do
@moduledoc """
Documentation for `PiviEx`.
Creates a pivot table from a list.
The strcut holds its original data in the data attribute.
data
|> pivot(fn r -> {r.company_id, r.account_id} end,
fn r -> {Period.period(r.date)} end,
fn r -> Decimal.sub(r.debit, r.credit) end)
"""
@me __MODULE__
alias PiviEx.Period
defstruct(
data: nil,
row_sum: %{},
col_sum: %{},
element: %{},
total: nil
)
def new(data) do
%@me{data: data}
end
@doc """
data
|> pivot(fn r -> {r.company_id, r.account_id} end,
fn r -> {Period.period(r.date)} end,
fn r -> Decimal.sub(r.debit, r.credit) end)
"""
def pivot(%@me{data: data} = _pi, row, col, amount) do
_pivot(data, row, col, amount, new(data))
end
def pivot(lst, row, col, amount) when is_list(lst) do
_pivot(lst, row, col, amount, new(lst))
end
defp _pivot([], _row, _col, _amount, stu) do
col_sum =
stu.element
|> Enum.reduce(%{}, fn {{_, col}, amount}, acc ->
Map.update(acc, col, amount, &(Decimal.add(&1, amount)))
end
)
row_sum =
stu.element
|> Enum.reduce(%{}, fn {{row, _}, amount}, acc ->
Map.update(acc, row, amount, &(Decimal.add(&1, amount)))
end
)
total = Enum.reduce(Map.values(col_sum), 0, &(Decimal.add(&1, &2)))
%{stu | col_sum: col_sum, row_sum: row_sum, total: total}
end
defp _pivot([h | t], row, col, amount, stu) do
row_h = row.(h)
col_h = col.(h)
amount_h = if amount.(h)==nil, do: Decimal.new(0), else: amount.(h)
calculate_element =
Map.update(stu.element, {row_h, col_h}, amount_h, &(Decimal.add(&1, amount_h)))
stu = Map.put(stu, :element, calculate_element)
_pivot(t, row, col, amount, stu)
end
defp empty_table_cells(%@me{} = me) do
hd(Map.keys(me.row_sum))
|> Tuple.to_list()
|> Enum.map(fn _ -> nil end)
end
@doc """
Returns the header for the calculated elements.
Optionally add a list of titles to the row elements.
"""
def head_as_list(%@me{} = me) do
head_list = Map.keys(me.col_sum)
lst =
for head <- head_list do
Tuple.to_list(head) |> Enum.join("-")
end
empty_table_cells(me) ++ lst ++ ["Total"]
end
def head_as_list(%@me{} = me, row_titles) when is_list(row_titles) do
head_list = Map.keys(me.col_sum) |> Enum.sort()
lst =
for head <- head_list do
Tuple.to_list(head) |> Enum.join("-")
end
row_titles ++ lst ++ ["Total"]
end
def footer_as_list(%@me{} = me) do
head_list = Map.keys(me.col_sum) |> Enum.sort()
lst =
for head <- head_list do
Map.get(me.col_sum, head, Decimal.new(0))
end
empty_table_cells(me) ++ lst ++ [me.total]
end
defp row_as_list(%@me{} = me, row) do
head_list = Map.keys(me.col_sum) |> Enum.sort()
lst =
for head <- head_list do
Map.get(me.element, {row, head}, Decimal.new(0))
end
#[row | lst ] ++ [Map.get(me.row_sum, row)]
Tuple.to_list(row) ++ lst ++ [Map.get(me.row_sum, row)]
end
defp row_as_map(%@me{} = me, row) do
head_list = Map.keys(me.col_sum) |> Enum.sort()
lst =
for head <- head_list do
v = Map.get(me.element, {row, head}, Decimal.new(0))
%{value: v, col: head, row: row}
end
#[row | lst ] ++ [Map.get(me.row_sum, row)]
Tuple.to_list(row) ++ lst ++ [%{value: Map.get(me.row_sum, row), row: row}]
end
def elements_as_list(%@me{} = me) do
row_list = Map.keys(me.row_sum) |> Enum.sort()
for row <- row_list do
row_as_list(me, row)
end
end
def elements_as_map(%@me{} = me) do
row_list = Map.keys(me.row_sum) |> Enum.sort()
for row <- row_list do
row_as_map(me, row)
end
|> Enum.sort()
end
def as_list(%@me{} = me) do
[head_as_list(me)] ++ elements_as_list(me) ++ [footer_as_list(me)]
end
def as_map(%@me{} = me) do
elements_as_map(me) ++ [footer_as_list(me)]
end
def filter(%@me{data: data}, func) do
Enum.filter(data, func)
|> new()
end
@doc """
Export the data to a CSV list by providing a list of field atoms
converts the underlying data to list.
Usage:
csv =
%PiviEx{}
|> to_csv()
File.wite("/tmp/example.csv", csv)
"""
def to_csv(%@me{data: data}, header) do
data
|> Enum.reduce([header], fn d, acc ->
row = Enum.map(header, fn h ->
Map.get(d, h)
end)
[row | acc]
end)
|> Enum.reverse()
|> CSV.encode(separator: ?;)
|> Enum.to_list()
end
def to_csv(%@me{data: data} = me) do
header = hd(data) |> Map.keys()
to_csv(me, header)
end
def csv_test() do
test2()
|> to_csv([:company_id, :amount])
end
def test() do
data2()
|> pivot(fn r -> {r.account_id, r.company_id} end,
fn r -> {Period.period(r.date)} end,
fn r -> Decimal.sub(r.debit, r.credit) end)
end
def test3() do
test()
data2()
|> pivot(fn r -> {r.account_id, nil} end,
fn r -> {Period.period(r.date)} end,
fn r -> Decimal.sub(r.debit, r.credit) end)
end
@doc """
Combine two Pivis to create list with sub totals
Create two Pivis with same size and join them.
"""
def test_combine() do
a = PiviEx.test3 |> PiviEx.elements_as_map
b = PiviEx.test |> PiviEx.elements_as_map
(a ++ b) |> Enum.sort()
end
defp data do
[
%{company_id: 1, gender: "m", account_id: "Acc. #1",
date: ~D[2020-06-05], amount: Decimal.new(15)},
%{company_id: 1, gender: "m", account_id: "Acc. #1",
date: ~D[2020-06-05], amount: nil},
%{company_id: 1, gender: "f", account_id: "Acc. #1",
date: ~D[2020-06-05], amount: Decimal.new(15)},
%{company_id: 1, gender: "m", account_id: "Acc. #1",
date: ~D[2020-06-05], amount: Decimal.new(15)},
%{company_id: 2, gender: "f", account_id: "Acc. #1",
date: ~D[2020-06-05], amount: Decimal.new(15)},
]
end
defp data2 do
[
%{company_id: 3, account_id: "Acc. #2", date: ~D[2020-03-05],
debit: Decimal.new(10), credit: Decimal.new(0)},
%{company_id: 3, account_id: "Acc. #2", date: ~D[2020-03-05],
debit: Decimal.new(10), credit: Decimal.new(0)},
%{company_id: 3, account_id: "Acc. #2", date: ~D[2020-03-05],
debit: 0, credit: Decimal.new(0)},
%{company_id: 1, account_id: "Acc. #1", date: ~D[2020-05-05],
debit: Decimal.new(10), credit: Decimal.new(0)},
%{company_id: 2, account_id: "Acc. #1", date: ~D[2020-05-05],
debit: Decimal.new(10), credit: Decimal.new(0)},
%{company_id: 1, account_id: "Acc. #1", date: ~D[2020-05-05],
debit: Decimal.new(10), credit: Decimal.new(0)},
%{company_id: 1, account_id: "Acc. #1", date: ~D[2020-03-05],
debit: Decimal.new("10.7"), credit: Decimal.new(0)},
%{company_id: 1, account_id: "Acc. #1", date: ~D[2020-05-05],
debit: Decimal.new(8), credit: Decimal.new(0)},
%{company_id: 1, account_id: "Acc. #1", date: ~D[2020-06-05],
debit: Decimal.new(8), credit: Decimal.new(0)},
%{company_id: 1, account_id: "Acc. #1", date: ~D[2020-06-05],
debit: Decimal.new(8), credit: Decimal.new(0)},
%{company_id: 1, account_id: "Acc. #1", date: ~D[2020-06-05],
debit: Decimal.new(8), credit: Decimal.new(0)},
%{company_id: 1, account_id: "Acc. #1", date: ~D[2020-03-05],
debit: Decimal.new(10), credit: Decimal.new(0)},
%{company_id: 2, account_id: "Acc. #1", date: ~D[2020-03-05],
debit: Decimal.new(10), credit: Decimal.new(0)},
]
end
def test2() do
data()
|> pivot(fn r -> {r.company_id, r.account_id} end,
fn r -> {Period.period(r.date), r.gender} end,
fn r -> r.amount end)
end
end
|
lib/pivi_ex.ex
| 0.809916
| 0.560072
|
pivi_ex.ex
|
starcoder
|
defmodule Comeonin.Otp do
@moduledoc """
Generate and verify HOTP and TOTP one-time passwords.
Module to generate and check HMAC-based one-time passwords and
time-based one-time passwords, in accordance with
[RFC 4226](https://tools.ietf.org/html/rfc4226) and
[RFC 6238](https://tools.ietf.org/html/rfc6238).
## Two factor authentication
These one-time passwords are often used together with regular passwords
to provide two factor authentication (2FA), which forms a layered approach
to user authentication. The advantage of 2FA over just using passwords is
that an attacker would face an additional challenge to being authorized.
"""
use Bitwise
@doc """
Generate a secret key to be used with one-time passwords.
By default, this function creates a 32 character base32 string, which
can be used with the other functions in this module.
It is also possible to create a 16 or 24 character long secret, but
this is not recommended.
"""
def gen_secret(secret_length \\ 32)
def gen_secret(secret_length) when secret_length in [16, 24, 32] do
trunc(secret_length / 1.6) |> :crypto.strong_rand_bytes |> Base.encode32
end
def gen_secret(_), do: raise ArgumentError, "Invalid length"
@doc """
Check the one-time password is valid.
The one-time password should be at least 6 characters long, and it
should be a string which only contains numeric values.
"""
def valid_token(token, _) when not is_binary(token) do
raise ArgumentError, "The token should be a string"
end
def valid_token(token, token_length)
when token_length >= 6 and token_length == byte_size(token) do
Regex.match?(~r/^[0-9]+$/, token)
end
def valid_token(_, _), do: false
@doc """
Generate a HMAC-based one-time password.
There is one option:
* token_length - the length of the one-time password
* the default is 6
"""
def gen_hotp(secret, count, opts \\ []) do
token_length = Keyword.get(opts, :token_length, 6)
hash = :crypto.hmac(:sha, Base.decode32!(secret, padding: false),
<<count :: size(8)-big-unsigned-integer-unit(8)>>)
offset = :binary.at(hash, 19) &&& 15
<<truncated :: size(4)-integer-unit(8)>> = :binary.part(hash, offset, 4)
(truncated &&& 0x7fffffff) |> rem(trunc(:math.pow(10, token_length)))
|> :erlang.integer_to_binary
|> String.rjust(token_length, ?0)
end
@doc """
Generate a time-based one-time password.
There are two options:
* token_length - the length of the one-time password
* the default is 6
* interval_length - the length of each timed interval
* the default is 30 (seconds)
"""
def gen_totp(secret, opts \\ []) do
gen_hotp(secret, Keyword.get(opts, :interval_length, 30) |> interval_count, opts)
end
@doc """
Verify a HMAC-based one-time password.
There are three options:
* token_length - the length of the one-time password
* the default is 6
* last - the count when the one-time password was last used
* this count needs to be stored server-side
* window - the number of future attempts allowed
* the default is 3
"""
def check_hotp(token, secret, opts \\ []) do
valid_token(token, Keyword.get(opts, :token_length, 6)) and
({last, window} = {Keyword.get(opts, :last, 0), Keyword.get(opts, :window, 3)}
check_token(token, secret, last + 1, last + window + 1, opts))
end
@doc """
Verify a time-based one-time password.
There are three options:
* token_length - the length of the one-time password
* the default is 6
* interval_length - the length of each timed interval
* the default is 30 (seconds)
* window - the number of attempts, before and after the current one, allowed
* the default is 1 (1 interval before and 1 interval after)
* you might need to increase this window to allow for clock skew on the server
"""
def check_totp(token, secret, opts \\ []) do
valid_token(token, Keyword.get(opts, :token_length, 6)) and
({count, window} = {Keyword.get(opts, :interval_length, 30) |> interval_count,
Keyword.get(opts, :window, 1)}
check_token(token, secret, count - window, count + window, opts))
end
defp interval_count(interval_length) do
trunc(System.system_time(:seconds) / interval_length)
end
defp check_token(_token, _secret, current, last, _opts) when current > last do
false
end
defp check_token(token, secret, current, last, opts) do
case gen_hotp(secret, current, opts) do
^token -> current
_ -> check_token(token, secret, current + 1, last, opts)
end
end
end
|
deps/comeonin/lib/comeonin/otp.ex
| 0.750187
| 0.826502
|
otp.ex
|
starcoder
|
defmodule Scientist do
@moduledoc ~S"""
A library for carefully refactoring critical paths in your elixir application.
"""
defmacro __using__(opts) do
mod = Keyword.get(opts, :experiment, Scientist.Default)
quote do
import unquote(__MODULE__)
Module.put_attribute(__MODULE__, :scientist_experiment, unquote(mod))
end
end
@doc """
Creates a new experiment.
Creates an experiment with `name` and `opts`. The block will behave the same as the
control block given, returning the same value and raising the same exceptions.
The stacktrace of the raised exceptions will be preserved.
See `Scientist.Experiment.new/2` for a list of available options.
"""
defmacro science(name, opts \\ [], do: block) do
should_run = Keyword.get(opts, :run, true)
exp_opts = Keyword.delete(opts, :run)
quote do
var!(ex, Scientist) = @scientist_experiment.new(unquote(name), unquote(exp_opts))
unquote(block)
if unquote(should_run) do
Scientist.Experiment.run(var!(ex, Scientist))
else
var!(ex, Scientist)
end
end
end
@doc """
Adds a control block to the experiment created in `science/3`.
See `Scientist.Experiment.add_control/2`.
"""
defmacro control(do: block) do
quote do
c = fn -> unquote(block) end
var!(ex, Scientist) =
Scientist.Experiment.add_control(var!(ex, Scientist), c)
end
end
@doc """
Adds a candidate block to the experiment created in `science/3`.
See `Scientist.Experiment.add_candidate/2`.
"""
defmacro candidate(name \\ "candidate", do: block) do
quote do
c = fn -> unquote(block) end
var!(ex, Scientist) =
Scientist.Experiment.add_candidate(var!(ex, Scientist), unquote(name), c)
end
end
@doc """
Adds an ignore block to the experiment created in `science/3`.
See `Scientist.Experiment.ignore/2`.
"""
defmacro ignore(do: block) do
quote do
i = fn _, _ -> unquote(block) end
var!(ex, Scientist) = Scientist.Experiment.ignore(var!(ex, Scientist), i)
end
end
@doc """
Adds an ignore block to the experiment created in `science/3`.
The control and candidate values will be bound to the declared parameters in order.
See `Scientist.Experiment.ignore/2`.
"""
defmacro ignore(x, y, do: block) do
quote do
i = fn (unquote(x), unquote(y)) -> unquote(block) end
var!(ex, Scientist) = Scientist.Experiment.ignore(var!(ex, Scientist), i)
end
end
@doc """
Adds a compare block to the experiment created in `science/3`.
The control and candidate values will be bound to the declared parameters in order.
See `Scientist.Experiment.compare_with/2`.
"""
defmacro compare(x, y, do: block) do
quote do
c = fn (unquote(x), unquote(y)) -> unquote(block) end
var!(ex, Scientist) = Scientist.Experiment.compare_with(var!(ex, Scientist), c)
end
end
@doc """
Adds a clean function to the experiment created in `science/3`.
The observed values will be bound to the parameter given.
See `Scientist.Experiment.clean_with/2`.
"""
defmacro clean(x, do: block) do
quote do
c = fn (unquote(x)) -> unquote(block) end
var!(ex, Scientist) = Scientist.Experiment.clean_with(var!(ex, Scientist), c)
end
end
@doc """
Adds a before_run function to the experiment created in `science/3`.
See `Scientist.Experiment.set_before_run/2`.
"""
defmacro before_run(do: block) do
quote do
b = fn -> unquote(block) end
var!(ex, Scientist) = Scientist.Experiment.set_before_run(var!(ex, Scientist), b)
end
end
@doc """
Adds a run_if function to the experiment created in `science/3`.
See `Scientist.Experiment.set_run_if/2`.
"""
defmacro run_if(do: block) do
quote do
r = fn -> unquote(block) end
var!(ex, Scientist) = Scientist.Experiment.set_run_if(var!(ex, Scientist), r)
end
end
end
|
lib/scientist.ex
| 0.658637
| 0.679611
|
scientist.ex
|
starcoder
|
defmodule Strava.Segment do
@moduledoc """
Segments are specific sections of road. Athletes’ times are compared on these segments and leaderboards are created.
https://strava.github.io/api/v3/segments/
"""
import Strava.Util, only: [parse_date: 1]
@type t :: %__MODULE__{
id: number,
resource_state: number,
name: String.t,
activity_type: String.t,
distance: number,
average_grade: float,
maximum_grade: float,
elevation_high: float,
elevation_low: float,
start_latlng: list(float),
end_latlng: list(float),
climb_category: integer,
city: String.t,
state: String.t,
country: String.t,
private: boolean,
starred: boolean,
created_at: NaiveDateTime.t | String.t,
updated_at: NaiveDateTime.t | String.t,
total_elevation_gain: float,
map: Strava.Map.t,
effort_count: integer,
athlete_count: integer,
hazardous: boolean,
star_count: integer
}
defstruct [
:id,
:resource_state,
:name,
:activity_type,
:distance,
:average_grade,
:maximum_grade,
:elevation_high,
:elevation_low,
:start_latlng,
:end_latlng,
:climb_category,
:city,
:state,
:country,
:private,
:starred,
:created_at,
:updated_at,
:total_elevation_gain,
:map,
:effort_count,
:athlete_count,
:hazardous,
:star_count
]
@doc """
Retrieve details about a specific segment.
## Example
Strava.Segment.retrieve(229781)
More info: https://strava.github.io/api/v3/segments/#retrieve
"""
@spec retrieve(integer, Strava.Client.t) :: Strava.Segment.t
def retrieve(id, client \\ Strava.Client.new) do
"segments/#{id}"
|> Strava.request(client, as: %Strava.Segment{})
|> parse
end
@doc """
Retrieve a list the segments starred by the authenticated athlete.
## Example
Strava.Segment.list_starred()
More info: http://strava.github.io/api/v3/segments/#starred
"""
@spec list_starred(Strava.Client.t) :: list(Strava.Segment.t)
def list_starred(client \\ Strava.Client.new) do
list_starred_request(%Strava.Pagination{}, client)
end
@doc """
Retrieve a list the segments starred by the authenticated athlete, for a given page.
## Example
Strava.Segment.paginate_starred(%Strava.Pagination{per_page: 10, page: 1})
More info: http://strava.github.io/api/v3/segments/#starred
"""
@spec paginate_starred(Strava.Pagination.t, Strava.Client.t) :: list(Strava.Segment.t)
def paginate_starred(pagination, client \\ Strava.Client.new) do
list_starred_request(pagination, client)
end
@doc """
Create a stream of segments starred by the authenticated athlete.
## Example
Strava.Segment.stream_starred()
More info: http://strava.github.io/api/v3/segments/#starred
"""
@spec stream_starred(Strava.Client.t) :: Enum.t
def stream_starred(client \\ Strava.Client.new) do
Strava.Paginator.stream(fn pagination -> paginate_starred(pagination, client) end)
end
@doc """
Retrieve a list of segment efforts, for a given segment, optionally filtered by athlete and/or a date range.
## Example
Strava.Segment.list_efforts(229781)
Strava.Segment.list_efforts(229781, %{athlete_id: 5287})
More info: https://strava.github.io/api/v3/segments/#efforts
"""
@spec list_efforts(integer, map, Strava.Client.t) :: list(Strava.SegmentEffort.t)
def list_efforts(id, filters \\ %{}, client \\ Strava.Client.new) do
list_efforts_request(id, filters, %Strava.Pagination{}, client)
end
@doc """
Retrieve a list of segment efforts for a given segment, filtered by athlete and/or a date range, for a given page.
## Example
Strava.Segment.paginate_efforts(229781, %{athlete_id: 5287}, %Strava.Pagination{per_page: 10, page: 1})
More info: https://strava.github.io/api/v3/segments/#efforts
"""
@spec paginate_efforts(integer, map, Strava.Pagination.t, Strava.Client.t) :: list(Strava.SegmentEffort.t)
def paginate_efforts(id, filters, pagination, client \\ Strava.Client.new) do
list_efforts_request(id, filters, pagination, client)
end
@doc """
Create a stream of segment efforts for a given segment, filtered by athlete and/or a date range.
## Example
Strava.Segment.stream_efforts(229781)
More info: https://strava.github.io/api/v3/segments/#efforts
"""
@spec stream_efforts(integer, map, Strava.Client.t) :: Enum.t
def stream_efforts(id, filters \\ %{}, client \\ Strava.Client.new) do
Strava.Paginator.stream(fn pagination -> paginate_efforts(id, filters, pagination, client) end)
end
@doc """
Finds segments within a given area
## Example
[sw_lat, sw_lon, ne_lat, ne_lon] =
[37.821362,-122.505373,37.842038,-122.465977]
Strava.Segment.explore([sw_lat, sw_lon, ne_lat, ne_lon])
Strava.Segment.explore([sw_lat, sw_lon, ne_lat, ne_lon], %{activity_type: "running"})
More info: https://developers.strava.com/docs/reference/#api-Segments-exploreSegments
"""
@spec explore(list(float), map, Strava.Client.t) :: list(Strava.Segment.t)
def explore([_s, _w, _n, _e] = bounds, filters \\ %{}, client \\ Strava.Client.new) do
optional_params = if Enum.empty?(filters), do: "", else: "&" <> URI.encode_query(filters)
%{segments: segments} =
"segments/explore?bounds=#{Enum.join(bounds, ",")}#{optional_params}"
|> Strava.request(client, as: %{segments: [%Strava.Segment{}] ++ [:avg_grade]})
Enum.map(segments, &convert_from_summary/1)
end
# The endpoint for `explore` returns a segment summary, which doesn't have the same fields.
# Most notably, it returns the average grade as `avg_grade` rather than the `average_grade`
# used by the `retrieve` endpoint.
@spec convert_from_summary(map) :: list(Strava.Segment.t)
defp convert_from_summary(%{} = segment_summary) do
summary = Map.put(segment_summary, :average_grade, segment_summary.avg_grade)
|> Map.delete(:avg_grade)
struct(Strava.Segment, summary)
end
@doc """
Returns a leaderboard: the ranking of athletes on specific segments.
## Example
Strava.Segment.leaderboard(segment.id)
Strava.Segment.leaderboard(segment.id, %{date_range: 'this_month', age_group: '35_44'})
More info: http://strava.github.io/api/v3/segments/#leaderboard
"""
def leaderboard(id, filters \\ %{}, pagination \\ %Strava.Pagination{}, client \\ Strava.Client.new) do
"segments/#{id}/leaderboard?#{Strava.Util.query_string(pagination, filters)}"
|> Strava.request(client, as: %Strava.SegmentLeaderboard{})
end
@spec list_efforts_request(integer, map, Strava.Pagination.t, Strava.Client.t) :: list(Strava.SegmentEffort.t)
defp list_efforts_request(id, filters, pagination, client) do
"segments/#{id}/all_efforts?#{Strava.Util.query_string(pagination, filters)}"
|> Strava.request(client, as: [%Strava.SegmentEffort{}])
|> Enum.map(&Strava.SegmentEffort.parse/1)
end
@spec list_starred_request(Strava.Pagination.t, Strava.Client.t) :: list(Strava.Segment.t)
defp list_starred_request(pagination, client) do
"segments/starred?#{Strava.Util.query_string(pagination)}"
|> Strava.request(client, as: [%Strava.Segment{}])
|> Enum.map(&Strava.Segment.parse/1)
end
@doc """
Parse the map and dates in the segment
"""
@spec parse(Strava.Segment.t) :: Strava.Segment.t
def parse(%Strava.Segment{} = segment) do
segment
|> parse_map
|> parse_dates
end
@spec parse_map(Strava.Segment.t) :: Strava.Segment.t
defp parse_map(%Strava.Segment{map: nil} = segment), do: segment
defp parse_map(%Strava.Segment{map: map} = segment) do
%Strava.Segment{segment |
map: struct(Strava.Map, map)
}
end
@spec parse_dates(Strava.Segment.t) :: Strava.Segment.t
defp parse_dates(%Strava.Segment{created_at: created_at, updated_at: updated_at} = segment) do
%Strava.Segment{segment |
created_at: parse_date(created_at),
updated_at: parse_date(updated_at),
}
end
end
|
lib/strava/segment.ex
| 0.933188
| 0.624623
|
segment.ex
|
starcoder
|
defmodule Sanbase.Validation do
import Sanbase.DateTimeUtils, only: [str_to_sec: 1]
defguard is_valid_price(price) when is_number(price) and price >= 0
defguard is_valid_percent(percent) when is_number(percent) and percent >= -100
defguard is_valid_percent_change(percent) when is_number(percent) and percent > 0
defguard is_valid_min_max(min, max)
when min < max and is_number(min) and is_number(max)
defguard is_valid_min_max_price(min, max)
when min < max and is_valid_price(min) and is_valid_price(max)
def valid_percent?(percent) when is_valid_percent(percent), do: :ok
def valid_percent?(percent),
do: {:error, "#{inspect(percent)} is not a valid percent"}
def valid_time_window?(time_window) when is_binary(time_window) do
Regex.match?(~r/^\d+[smhdw]$/, time_window)
|> case do
true -> :ok
false -> {:error, "#{inspect(time_window)} is not a valid time window"}
end
end
def valid_time_window?(time_window),
do: {:error, "#{inspect(time_window)} is not a valid time window"}
def time_window_is_whole_days?(time_window) do
case rem(str_to_sec(time_window), 86_400) do
0 ->
:ok
_ ->
{:error, "Time window should represent whole days. Time window provided: #{time_window}"}
end
end
def time_window_bigger_than?(time_window, min_time_window) do
case str_to_sec(time_window) >= str_to_sec(min_time_window) do
true ->
:ok
false ->
{:error,
"Time window should be bigger than #{min_time_window}. Time window provided: #{time_window}"}
end
end
def valid_iso8601_time_string?(time) when is_binary(time) do
case Time.from_iso8601(time) do
{:ok, _time} ->
:ok
_ ->
{:error, "#{time} is not a valid ISO8601 time"}
end
end
def valid_iso8601_time_string?(str), do: {:error, "#{inspect(str)} is not a valid ISO8601 time"}
def valid_url?(url) do
case URI.parse(url) do
%URI{scheme: nil} ->
{:error,
"`#{url}` is not a valid URL. Reason: it is missing scheme (e.g. missing https:// part)"}
%URI{host: nil} ->
{:error,
"`#{url}` is not a valid URL. Reason: it is missing host (e.g. missing the example.com part)"}
%URI{path: nil} ->
{:error,
"`#{url}` is not a valid URL. Reason: it is missing path (e.g. missing the /image.png part)"}
_ ->
:ok
end
end
def valid_threshold?(t) when is_number(t) and t > 0, do: :ok
def valid_threshold?(t) do
{:error, "#{inspect(t)} is not valid threshold. It must be a number bigger than 0"}
end
def valid_metric?(metric) do
Sanbase.Metric.has_metric?(metric)
end
def valid_signal?(signal) do
Sanbase.Signal.has_signal?(signal)
end
def valid_5m_min_interval_metric?(metric) do
with {:ok, %{min_interval: min_interval}} <- Sanbase.Metric.metadata(metric),
interval_sec when is_number(interval_sec) and interval_sec <= 300 <-
Sanbase.DateTimeUtils.str_to_sec(min_interval) do
:ok
else
_ ->
{:error,
"The metric #{inspect(metric)} is not supported or is mistyped or does not have min interval equal or less than to 5 minutes."}
end
end
def valid_above_5m_min_interval_metric?(metric) do
with {:ok, %{min_interval: min_interval}} <- Sanbase.Metric.metadata(metric),
interval_sec when is_number(interval_sec) and interval_sec > 300 <-
Sanbase.DateTimeUtils.str_to_sec(min_interval) do
:ok
else
_ ->
{:error,
"The metric #{inspect(metric)} is not supported or is mistyped or does not have min interval equal or bigger than to 1 day."}
end
end
end
|
lib/sanbase/utils/validation.ex
| 0.791055
| 0.561636
|
validation.ex
|
starcoder
|
defmodule RemoteIp do
@moduledoc """
A plug to overwrite the `Plug.Conn`'s `remote_ip` based on request headers.
To use, add the `RemoteIp` plug to your app's plug pipeline:
```elixir
defmodule MyApp do
use Plug.Builder
plug RemoteIp
end
```
Keep in mind the order of plugs in your pipeline and place `RemoteIp` as
early as possible. For example, if you were to add `RemoteIp` *after* [the
Plug Router](https://github.com/elixir-lang/plug#the-plug-router), your route
action's logic would be executed *before* the `remote_ip` actually gets
modified - not very useful!
There are 3 options that can be passed in:
* `:headers` - A list of strings naming the `req_headers` to use when
deriving the `remote_ip`. Order does not matter. Defaults to `~w[forwarded
x-forwarded-for x-client-ip x-real-ip]`.
* `:proxies` - A list of strings in
[CIDR](https://en.wikipedia.org/wiki/CIDR) notation specifying the IPs of
known proxies. Defaults to `[]`.
[Loopback](https://en.wikipedia.org/wiki/Loopback) and
[private](https://en.wikipedia.org/wiki/Private_network) IPs are always
appended to this list:
* 127.0.0.0/8
* ::1/128
* fc00::/7
* 10.0.0.0/8
* 172.16.0.0/12
* 192.168.0.0/16
Since these IPs are internal, they often are not the actual client
address in production, so we add them by default. To override this
behavior, whitelist known client IPs using the `:clients` option.
* `:clients` - A list of strings in
[CIDR](https://en.wikipedia.org/wiki/CIDR) notation specifying the IPs of
known clients. Defaults to `[]`.
An IP in any of the ranges listed here will never be considered a proxy.
This takes precedence over the `:proxies` option, including
loopback/private addresses. Any IP that is **not** covered by `:clients`
or `:proxies` is assumed to be a client IP.
For example, suppose you know:
* you are behind proxies in the 1.2.x.x block
* the proxies use the `X-Foo`, `X-Bar`, and `X-Baz` headers
* but the IP 1.2.3.4 is actually a client, not one of the proxies
Then you could say
```elixir
defmodule MyApp do
use Plug.Builder
plug RemoteIp,
headers: ~w[x-foo x-bar x-baz],
proxies: ~w[1.2.0.0/16],
clients: ~w[1.2.3.4/32]
end
```
Note that, due to limitations in the
[inet_cidr](https://github.com/Cobenian/inet_cidr) library used to parse
them, `:proxies` and `:clients` **must** be written in full CIDR notation,
even if specifying just a single IP. So instead of `"127.0.0.1"` and
`"fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b:d"`, you would use `"127.0.0.1/32"` and `"fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b:d/128"`.
For more details, refer to the
[README](https://github.com/ajvondrak/remote_ip/blob/master/README.md) on
GitHub.
"""
require Logger
@behaviour Plug
@headers ~w[
forwarded
x-forwarded-for
x-client-ip
x-real-ip
]
@proxies []
@clients []
# https://en.wikipedia.org/wiki/Loopback
# https://en.wikipedia.org/wiki/Private_network
@reserved ~w[
127.0.0.0/8
::1/128
fc00::/7
10.0.0.0/8
172.16.0.0/12
192.168.0.0/16
]
def init(opts \\ []) do
headers = Keyword.get(opts, :headers, @headers)
headers = MapSet.new(headers)
proxies = Keyword.get(opts, :proxies, @proxies) ++ @reserved
proxies = proxies |> Enum.map(&InetCidr.parse/1)
clients = Keyword.get(opts, :clients, @clients)
clients = clients |> Enum.map(&InetCidr.parse/1)
%RemoteIp.Config{headers: headers, proxies: proxies, clients: clients}
end
def call(conn, %RemoteIp.Config{} = config) do
case last_forwarded_ip(conn.req_headers, config) do
nil -> conn
ip -> %{conn | remote_ip: ip}
end
end
@doc """
Standalone function to extract the remote IP from a list of headers.
It's possible to get a subset of headers without access to a full `Plug.Conn`
struct. For instance, when [using Phoenix
sockets](https://hexdocs.pm/phoenix/Phoenix.Endpoint.html), your socket's
`connect/3` callback may only be receiving `:x_headers` in the
`connect_info`. Such situations make it inconvenient to use `RemoteIp`
outside of a plug pipeline.
Therefore, this function will fetch the remote IP from a plain list of header
key-value pairs (just as you'd have in the `req_headers` of a `Plug.Conn`).
You may optionally specify the same options as if you were using `RemoteIp`
as a plug: they'll be processed by `RemoteIp.init/1` each time you call this
function.
If a remote IP cannot be parsed from the given headers (e.g., if the list is
empty), this function will return `nil`.
## Examples
iex> RemoteIp.from([{"x-forwarded-for", "1.2.3.4"}])
{1, 2, 3, 4}
iex> [{"x-foo", "1.2.3.4"}, {"x-bar", "2.3.4.5"}]
...> |> RemoteIp.from(headers: ~w[x-foo])
{1, 2, 3, 4}
iex> [{"x-foo", "1.2.3.4"}, {"x-bar", "2.3.4.5"}]
...> |> RemoteIp.from(headers: ~w[x-bar])
{2, 3, 4, 5}
iex> [{"x-foo", "1.2.3.4"}, {"x-bar", "2.3.4.5"}]
...> |> RemoteIp.from(headers: ~w[x-baz])
nil
"""
@spec from([{String.t, String.t}], keyword) :: :inet.ip_address | nil
def from(req_headers, opts \\ []) do
last_forwarded_ip(req_headers, init(opts))
end
defp last_forwarded_ip(req_headers, config) do
maybe_log_debug(fn -> start(config) end)
ip = req_headers |> ips_given(config) |> most_recent_client_given(config)
maybe_log_debug(fn -> stop(ip) end)
ip
end
defp ips_given(req_headers, %RemoteIp.Config{headers: headers}) do
RemoteIp.Headers.parse(req_headers, headers)
end
defp most_recent_client_given(ips, config) do
Enum.reverse(ips) |> Enum.find(&client?(&1, config))
end
defp client?(ip, %RemoteIp.Config{clients: clients, proxies: proxies}) do
cond do
clients |> contains?(ip) ->
maybe_log_debug(fn -> known_client(ip) end)
true
proxies |> contains?(ip) ->
maybe_log_debug(fn -> known_proxy(ip) end)
false
true ->
maybe_log_debug(fn -> presumably_client(ip) end)
true
end
end
defp contains?(cidrs, ip) do
Enum.any?(cidrs, &InetCidr.contains?(&1, ip))
end
defp start(config) do
[inspect(__MODULE__), " is configured with ", inspect(config, pretty: true)]
end
defp stop(ip) do
if ip do
[inspect(__MODULE__), " determined the remote IP is ", inspect(ip)]
else
[inspect(__MODULE__), " could not determine the remote IP"]
end
end
defp known_client(ip) do
[inspect(__MODULE__), " thinks ", inspect(ip), " is a known client IP"]
end
defp known_proxy(ip) do
[inspect(__MODULE__), " thinks ", inspect(ip), " is a known proxy IP"]
end
defp presumably_client(ip) do
[inspect(__MODULE__), " assumes ", inspect(ip), " is a client IP"]
end
defp maybe_log_debug(any) do
if Application.get_env(:remote_ip, :debug), do: Logger.debug(any)
end
end
|
lib/remote_ip.ex
| 0.898961
| 0.822902
|
remote_ip.ex
|
starcoder
|
defmodule Day2 do
def solve do
input = prepare_input()
start = System.monotonic_time(unquote(:milli_seconds))
IO.puts("Solving part one:")
Day2.Part1.solve(input)
|> IO.puts()
time_part_one = System.monotonic_time(unquote(:milli_seconds)) - start
IO.puts("Part one took #{time_part_one} milliseconds")
start = System.monotonic_time(unquote(:milli_seconds))
IO.puts("Solving part two")
Day2.Part2.solve(input)
|> IO.puts()
time_part_two = System.monotonic_time(unquote(:milli_seconds)) - start
IO.puts("Part Two took #{time_part_two} milliseconds")
end
defp prepare_input do
"../../inputFiles/day2/input.txt"
|> File.stream!()
|> Stream.map(&String.trim_trailing/1)
|> Enum.to_list()
end
end
defmodule Day2.Part1 do
def solve(input) do
%{twos: twos, threes: threes} =
input
|> Enum.reduce(%{twos: 0, threes: 0}, &find_repeating_chars/2)
twos * threes
end
defp find_repeating_chars(string, %{twos: twos, threes: threes}) do
counts =
string
|> String.graphemes()
|> Enum.reduce(%{}, &find_threes_and_twos/2)
|> Map.values()
case [2 in counts, 3 in counts] do
[true, true] -> %{twos: twos + 1, threes: threes + 1}
[true, false] -> %{twos: twos + 1, threes: threes}
[false, true] -> %{twos: twos, threes: threes + 1}
_ -> %{twos: twos, threes: threes}
end
end
defp find_threes_and_twos(char, acc) do
Map.put(acc, char, (acc[char] || 0) + 1)
end
end
defmodule Day2.Part2 do
def solve(input) do
input
|> Enum.map(fn x -> %{string: x, closest: find_closest(x, input)} end)
|> Enum.reduce(%{distance: 0, sibling: nil, string: nil}, fn x, acc ->
keep_closest(x, acc)
end)
|> common_part
end
defp find_closest(string, list) do
Enum.reduce(list, %{}, fn x, acc ->
distance = distance_between(string, x)
if distance > 0 do
case acc do
%{distance: current_distance} ->
if current_distance > distance do
%{acc | distance: distance, sibling: x}
else
acc
end
_ ->
Map.put(acc, :distance, distance)
|> Map.put(:sibling, x)
end
else
acc
end
end)
end
defp keep_closest(candidate, acc) do
%{closest: %{distance: current_distance, sibling: sibling}, string: string} = candidate
%{distance: previous_distance} = acc
if current_distance < previous_distance || previous_distance == 0 do
%{acc | distance: current_distance, string: string, sibling: sibling}
else
acc
end
end
defp common_part(%{distance: _, string: string, sibling: sibling}) do
temp = String.graphemes(string) -- String.graphemes(sibling)
final = String.graphemes(string) -- temp
Enum.join(final)
end
defp distance_between(string1, string2) do
String.graphemes(string1)
|> Enum.with_index()
|> Enum.reduce(0, fn {value, index}, acc ->
acc +
if value ==
string2
|> String.graphemes()
|> Enum.at(index),
do: 0,
else: 1
end)
end
end
|
elixir/day2/lib/day2.ex
| 0.570092
| 0.581927
|
day2.ex
|
starcoder
|
defmodule ApiAccounts.Keys do
@moduledoc """
Validates and caches API keys.
When validating a key, the API key is looked up in the cache. If a key isn't
present in the cache, the key will be validated against the V2 API and V3 set
of API keys and stored in the cache if it's a valid key.
"""
use GenServer
alias ApiAccounts.Key
@table :api_key_cache
@fetch_timeout 60_000
def start_link(opts \\ []) do
GenServer.start_link(__MODULE__, opts, name: opts[:name] || __MODULE__)
end
def init(_opts) do
table_opts = [:set, :named_table, :public, read_concurrency: true]
_ =
if :ets.info(table_name()) == :undefined do
:ets.new(table_name(), table_opts)
end
schedule_timeout!()
{:ok, %{}}
end
defp schedule_timeout! do
Process.send_after(self(), :timeout, @fetch_timeout)
end
def handle_call(:update!, _, state) do
_ = handle_info(:timeout, state)
{:reply, :ok, state}
end
def handle_info(:timeout, state) do
:ets.foldl(&update_keys/2, :ok, @table)
schedule_timeout!()
{:noreply, state}
end
defp update_keys({key_id, key}, :ok) do
case fetch_key_remote(key_id) do
[] ->
:ets.delete(@table, key_id)
[^key] ->
# same key
:ok
[new_key] ->
# new key
:ets.insert(@table, {key_id, new_key})
end
:ok
end
@spec fetch_key(String.t()) :: {:ok, Key.t()} | {:error, :not_found}
defp fetch_key(key) do
case fetch_key_remote(key) do
[key] ->
cache_key(key)
{:ok, key}
[] ->
{:error, :not_found}
end
end
defp fetch_key_remote(key) when byte_size(key) == 32 do
case ApiAccounts.get_key(key) do
{:ok, %Key{approved: true, locked: false} = key} -> [key]
_ -> []
end
end
defp fetch_key_remote(_key) do
[]
end
@doc """
Caches a key in ETS.
"""
@spec cache_key(Key.t()) :: true
def cache_key(%Key{key: key} = struct) do
:ets.insert(table_name(), {key, struct})
end
@doc """
Removes a key from ETS.
"""
@spec revoke_key(Key.t()) :: true
def revoke_key(%Key{key: key}) do
:ets.delete(table_name(), key)
end
@doc false
def table_name, do: @table
@doc """
Fetches a Key if it is valid.
"""
@spec fetch_valid_key(String.t()) :: {:ok, Key.t()} | {:error, :not_found}
def fetch_valid_key(api_key) do
case :ets.lookup(table_name(), api_key) do
[{^api_key, key}] -> {:ok, key}
[] -> fetch_key(api_key)
end
end
@doc false
def update! do
# test-only function
:ok = GenServer.call(__MODULE__, :update!)
end
end
|
apps/api_accounts/lib/api_accounts/keys.ex
| 0.71123
| 0.407304
|
keys.ex
|
starcoder
|
defmodule Bingo.BingoChecker do
@doc """
Checks for a bingo!
Given a 2D list of `squares`, returns `true` if all the squares of
any row, column, or diagonal have been marked by the same player.
Otherwise `false` is returned.
"""
def bingo?(squares) do
possible_winning_square_sequences(squares)
|> sequences_with_at_least_one_square_marked()
|> Enum.map(&all_squares_marked_by_same_player?(&1))
|> Enum.any?()
end
@doc """
Given a 2D list of `squares`, returns a 2D list of all possible winning square sequences: rows, columns, left diagonal, and right diagonal.
"""
def possible_winning_square_sequences(squares) do
squares ++ # rows
transpose(squares) ++ # columns
[left_diagonal_squares(squares), right_diagonal_squares(squares)]
end
@doc """
Given a list of possible winning square sequences, returns a list of
those sequences that have at least one square marked.
"""
def sequences_with_at_least_one_square_marked(squares) do
Enum.reject(squares, fn sequence ->
Enum.reject(sequence, &is_nil(&1.marked_by)) |> Enum.empty?()
end)
end
@doc """
Given a list of possible winning square sequences, returns `true` if
the sequence has all squares marked by the same player.
Otherwise, returns `false`.
"""
def all_squares_marked_by_same_player?(squares) do
first_square = Enum.at(squares, 0)
Enum.all?(squares, fn s ->
s.marked_by == first_square.marked_by
end)
end
@doc """
Given a 2D list of elements, returns a new 2D list where the
row and column indices have been switched. In other words,
it flips the given matrix over its left diagonal.
## Example
iex> m = [[1, 2, 3], [4, 5, 6], [7, 8, 9]]
iex> Bingo.BingoChecker.transpose(m)
[[1, 4, 7], [2, 5, 8], [3, 6, 9]]
"""
def transpose(squares) do
squares
|> List.zip()
|> Enum.map(&Tuple.to_list/1)
end
@doc """
Rotates the given 2D list of elements 90 degrees.
## Example
iex> m = [[1, 2, 3], [4, 5, 6], [7, 8, 9]]
iex> Bingo.BingoChecker.rotate_90_degrees(m)
[[3, 6, 9], [2, 5, 8], [1, 4, 7]]
"""
def rotate_90_degrees(squares) do
squares
|> transpose()
|> Enum.reverse()
end
@doc """
Returns the elements on the left diagonal of the given 2D list.
## Example
iex> m = [[1, 2, 3], [4, 5, 6], [7, 8, 9]]
iex> Bingo.BingoChecker.left_diagonal_squares(m)
[1, 5, 9]
"""
def left_diagonal_squares(squares) do
squares
|> List.flatten()
|> Enum.take_every(Enum.count(squares) + 1)
end
@doc """
Returns the elements on the right diagonal of the given 2D list.
## Example
iex> m = [[1, 2, 3], [4, 5, 6], [7, 8, 9]]
iex> Bingo.BingoChecker.right_diagonal_squares(m)
[3, 5, 7]
"""
def right_diagonal_squares(squares) do
squares
|> rotate_90_degrees()
|> left_diagonal_squares()
end
end
|
apps/bingo/lib/bingo/bingo_checker.ex
| 0.937089
| 0.726304
|
bingo_checker.ex
|
starcoder
|
defmodule Harnais.Form.Schatten.Workflow.Filter do
@moduledoc false
require Plymio.Fontais.Option
alias Harnais.Utility, as: HUU
alias Harnais.Form.Schatten.Workflow.Utility, as: HASWU
use Harnais.Form.Attribute.Schatten
import Harnais.Error,
only: [
new_error_result: 1
],
warn: false
import Harnais.Error,
only: [
new_error_result: 1
],
warn: false
def filter_workflow_pipeline(pipeline, fun_filter \\ nil)
def filter_workflow_pipeline([], _) do
{:ok, []}
end
def filter_workflow_pipeline(pipeline, nil) do
pipeline |> HASWU.validate_workflow_pipeline()
end
def filter_workflow_pipeline(pipeline, fun_filter) when is_function(fun_filter, 1) do
with {:ok, pipeline} <- pipeline |> HASWU.validate_workflow_pipeline(),
{:ok, fun_filter} <-
fun_filter
|> HUU.value_validate_by_predicate(&is_function(&1, 1)) do
pipeline =
pipeline
|> Enum.filter(fun_filter)
{:ok, pipeline}
else
{:error, %{__exception__: true}} = result -> result
end
end
def filter_workflow_pipeline(_value, fun_filter) do
new_error_result(m: "filter function invalid", v: fun_filter)
end
def filter_workflow_pipeline_singleton_entry(pipeline, fun_filter) do
with {:ok, pipeline} <- pipeline |> filter_workflow_pipeline(fun_filter) do
case pipeline |> length do
1 ->
{:ok, pipeline |> hd}
n ->
new_error_result(m: "pipeline entries too few or too many; expected 1", v: n)
end
else
{:error, %{__exception__: true}} = result -> result
end
end
def filter_workflow_pipeline_by_singleton(pipeline) do
with {:ok, pipeline} <- pipeline |> HASWU.validate_workflow_pipeline() do
case pipeline |> length do
1 ->
{:ok, pipeline |> hd}
n ->
new_error_result(m: "pipeline too small or too large; expected 1", v: n)
end
else
{:error, %{__exception__: true}} = result -> result
end
end
def filter_workflow_pipeline_by_index(pipeline, index)
def filter_workflow_pipeline_by_index(pipeline, index) when is_integer(index) do
with {:ok, pipeline} <- pipeline |> HASWU.validate_workflow_pipeline() do
pipeline
|> Enum.at(index, @harnais_form_schatten_value_not_set)
|> case do
@harnais_form_schatten_value_not_set ->
new_error_result(m: "filter index #{inspect(index)} not found", v: pipeline)
value ->
{:ok, value}
end
else
{:error, %{__exception__: true}} = result -> result
end
end
def filter_workflow_pipeline_by_index(_value, index) do
new_error_result(m: "filter index invalid", v: index)
end
def filter_workflow_pipeline_by_verb(pipeline, verb)
def filter_workflow_pipeline_by_verb([], _verb) do
{:ok, []}
end
def filter_workflow_pipeline_by_verb(pipeline, verb) do
pipeline
|> filter_workflow_pipeline(fn
{^verb, _, _} -> true
_ -> false
end)
end
def filter_workflow_pipeline_by_field(pipeline, field)
def filter_workflow_pipeline_by_field([], _field) do
{:ok, []}
end
def filter_workflow_pipeline_by_field(pipeline, field) do
pipeline
|> filter_workflow_pipeline(fn
{_, ^field, _} -> true
_ -> false
end)
end
def filter_workflow_pipeline_by_last_field(pipeline, field) do
with {:ok, pipeline} <- pipeline |> filter_workflow_pipeline_by_field(field) do
{:ok, pipeline |> List.last()}
else
{:error, %{__exception__: true}} = result -> result
end
end
def filter_workflow_pipeline_by_verb_field(pipeline, verb, field) do
with {:ok, pipeline} <- pipeline |> filter_workflow_pipeline_by_verb(verb),
{:ok, _pipeline} = result <- pipeline |> filter_workflow_pipeline_by_field(field) do
result
else
{:error, %{__exception__: true}} = result -> result
end
end
def filter_workflow_pipeline_by_singleton_verb_field(pipeline, verb, field) do
with {:ok, pipeline} <- pipeline |> filter_workflow_pipeline_by_verb_field(verb, field),
{:ok, _tuple} = result <- pipeline |> filter_workflow_pipeline_by_singleton do
result
else
{:error, %{__exception__: true}} = result -> result
end
end
def filter_workflow_pipeline_by_last_verb_field(pipeline, verb, field) do
with {:ok, pipeline} <- pipeline |> filter_workflow_pipeline_by_verb_field(verb, field),
{:ok, _tuple} = result <- pipeline |> filter_workflow_pipeline_by_index(-1) do
result
else
{:error, %{__exception__: true}} = result -> result
end
end
end
|
lib/schatten/workflow/filter.ex
| 0.696268
| 0.431165
|
filter.ex
|
starcoder
|
defmodule SurfaceBulma.TabUtils do
@moduledoc false
# TODO make this dependent on the version of LiveView used. It would be preferable to use JS commands instead of a LiveComponent.
defmacro __using__(opts \\ []) do
moduledoc =
Keyword.get(opts, :doc, "") <>
"""
Tab selection is handled by an internal event when Phoenix LiveView version < 0.17.* is used.
A custom event can be specified for a tab, but be aware that the event handler has to send an update to the
component containing the tab.
For example given this markup:
```jsx
~F\"\"\"
<Panel id="mypanel">
<:title>Test Panel</:title>
<Tab title="first">
<TabItem>Item</TabItem>
</Tab>
<Tab title="special" click="my-event">
<TabItem>SpecialItem</TabItem>
</Tab>
</Panel>
\"\"\"
```
The parent LiveView or LiveComponent needs to call `Panel.set_tab/2` to tell the `Panel` that the selected tab has changed.
The event handler would then look like this:
```
def handle_event("my-event", %{"index" => index_str}, socket) do
... do some other things ...
Panel.set_tab("mypanel", String.to_integer(index_str))
end
"""
quote do
@moduledoc unquote(moduledoc)
use Surface.LiveComponent
data active_tab, :integer
data set_active_tab, :integer
data animation, :string, default: ""
def update(%{set_active_tab: set_active_tab} = assigns, socket) do
socket =
socket
|> assign(assigns)
|> assign(:active_tab, set_active_tab)
|> assign(:set_active_tab, nil)
{:ok, socket}
end
def update(%{tabs: tabs} = assigns, socket) do
socket =
socket
|> assign(assigns)
|> assign(:active_tab, Enum.find_index(tabs, & &1.visible))
{:ok, socket}
end
def update(assigns, socket) do
{:ok, assign(socket, assigns)}
end
def set_tab(id, index) do
send_update(unquote(__CALLER__.module), id: id, set_active_tab: index)
end
def handle_event("tab_click", %{"index" => index_str}, socket) do
index = String.to_integer(index_str)
animation = next_animation(socket.assigns, index)
{:noreply, assign(socket, active_tab: index, animation: animation)}
end
defp next_animation(assigns, clicked_index) do
%{animation: animation, active_tab: active_tab} = assigns
cond do
clicked_index > active_tab ->
"slideInRight"
clicked_index < active_tab ->
"slideInLeft"
true ->
animation
end
end
end
end
end
|
lib/surface_bulma/tab_utils.ex
| 0.537284
| 0.677431
|
tab_utils.ex
|
starcoder
|
defmodule AWS.IoTAnalytics do
@moduledoc """
AWS IoT Analytics allows you to collect large amounts of device data,
process messages, and store them. You can then query the data and run
sophisticated analytics on it. AWS IoT Analytics enables advanced data
exploration through integration with Jupyter Notebooks and data
visualization through integration with Amazon QuickSight.
Traditional analytics and business intelligence tools are designed to
process structured data. IoT data often comes from devices that record
noisy processes (such as temperature, motion, or sound). As a result the
data from these devices can have significant gaps, corrupted messages, and
false readings that must be cleaned up before analysis can occur. Also, IoT
data is often only meaningful in the context of other data from external
sources.
AWS IoT Analytics automates the steps required to analyze data from IoT
devices. AWS IoT Analytics filters, transforms, and enriches IoT data
before storing it in a time-series data store for analysis. You can set up
the service to collect only the data you need from your devices, apply
mathematical transforms to process the data, and enrich the data with
device-specific metadata such as device type and location before storing
it. Then, you can analyze your data by running queries using the built-in
SQL query engine, or perform more complex analytics and machine learning
inference. AWS IoT Analytics includes pre-built models for common IoT use
cases so you can answer questions like which devices are about to fail or
which customers are at risk of abandoning their wearable devices.
"""
@doc """
Sends messages to a channel.
"""
def batch_put_message(client, input, options \\ []) do
path_ = "/messages/batch"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, 200)
end
@doc """
Cancels the reprocessing of data through the pipeline.
"""
def cancel_pipeline_reprocessing(client, pipeline_name, reprocessing_id, input, options \\ []) do
path_ = "/pipelines/#{URI.encode(pipeline_name)}/reprocessing/#{URI.encode(reprocessing_id)}"
headers = []
query_ = []
request(client, :delete, path_, query_, headers, input, options, nil)
end
@doc """
Creates a channel. A channel collects data from an MQTT topic and archives
the raw, unprocessed messages before publishing the data to a pipeline.
"""
def create_channel(client, input, options \\ []) do
path_ = "/channels"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, 201)
end
@doc """
Creates a data set. A data set stores data retrieved from a data store by
applying a "queryAction" (a SQL query) or a "containerAction" (executing a
containerized application). This operation creates the skeleton of a data
set. The data set can be populated manually by calling
"CreateDatasetContent" or automatically according to a "trigger" you
specify.
"""
def create_dataset(client, input, options \\ []) do
path_ = "/datasets"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, 201)
end
@doc """
Creates the content of a data set by applying a "queryAction" (a SQL query)
or a "containerAction" (executing a containerized application).
"""
def create_dataset_content(client, dataset_name, input, options \\ []) do
path_ = "/datasets/#{URI.encode(dataset_name)}/content"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Creates a data store, which is a repository for messages.
"""
def create_datastore(client, input, options \\ []) do
path_ = "/datastores"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, 201)
end
@doc """
Creates a pipeline. A pipeline consumes messages from a channel and allows
you to process the messages before storing them in a data store. You must
specify both a `channel` and a `datastore` activity and, optionally, as
many as 23 additional activities in the `pipelineActivities` array.
"""
def create_pipeline(client, input, options \\ []) do
path_ = "/pipelines"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, 201)
end
@doc """
Deletes the specified channel.
"""
def delete_channel(client, channel_name, input, options \\ []) do
path_ = "/channels/#{URI.encode(channel_name)}"
headers = []
query_ = []
request(client, :delete, path_, query_, headers, input, options, 204)
end
@doc """
Deletes the specified data set.
You do not have to delete the content of the data set before you perform
this operation.
"""
def delete_dataset(client, dataset_name, input, options \\ []) do
path_ = "/datasets/#{URI.encode(dataset_name)}"
headers = []
query_ = []
request(client, :delete, path_, query_, headers, input, options, 204)
end
@doc """
Deletes the content of the specified data set.
"""
def delete_dataset_content(client, dataset_name, input, options \\ []) do
path_ = "/datasets/#{URI.encode(dataset_name)}/content"
headers = []
{query_, input} =
[
{"versionId", "versionId"},
]
|> AWS.Request.build_params(input)
request(client, :delete, path_, query_, headers, input, options, 204)
end
@doc """
Deletes the specified data store.
"""
def delete_datastore(client, datastore_name, input, options \\ []) do
path_ = "/datastores/#{URI.encode(datastore_name)}"
headers = []
query_ = []
request(client, :delete, path_, query_, headers, input, options, 204)
end
@doc """
Deletes the specified pipeline.
"""
def delete_pipeline(client, pipeline_name, input, options \\ []) do
path_ = "/pipelines/#{URI.encode(pipeline_name)}"
headers = []
query_ = []
request(client, :delete, path_, query_, headers, input, options, 204)
end
@doc """
Retrieves information about a channel.
"""
def describe_channel(client, channel_name, include_statistics \\ nil, options \\ []) do
path_ = "/channels/#{URI.encode(channel_name)}"
headers = []
query_ = []
query_ = if !is_nil(include_statistics) do
[{"includeStatistics", include_statistics} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Retrieves information about a data set.
"""
def describe_dataset(client, dataset_name, options \\ []) do
path_ = "/datasets/#{URI.encode(dataset_name)}"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Retrieves information about a data store.
"""
def describe_datastore(client, datastore_name, include_statistics \\ nil, options \\ []) do
path_ = "/datastores/#{URI.encode(datastore_name)}"
headers = []
query_ = []
query_ = if !is_nil(include_statistics) do
[{"includeStatistics", include_statistics} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Retrieves the current settings of the AWS IoT Analytics logging options.
"""
def describe_logging_options(client, options \\ []) do
path_ = "/logging"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Retrieves information about a pipeline.
"""
def describe_pipeline(client, pipeline_name, options \\ []) do
path_ = "/pipelines/#{URI.encode(pipeline_name)}"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Retrieves the contents of a data set as pre-signed URIs.
"""
def get_dataset_content(client, dataset_name, version_id \\ nil, options \\ []) do
path_ = "/datasets/#{URI.encode(dataset_name)}/content"
headers = []
query_ = []
query_ = if !is_nil(version_id) do
[{"versionId", version_id} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Retrieves a list of channels.
"""
def list_channels(client, max_results \\ nil, next_token \\ nil, options \\ []) do
path_ = "/channels"
headers = []
query_ = []
query_ = if !is_nil(next_token) do
[{"nextToken", next_token} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"maxResults", max_results} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Lists information about data set contents that have been created.
"""
def list_dataset_contents(client, dataset_name, max_results \\ nil, next_token \\ nil, scheduled_before \\ nil, scheduled_on_or_after \\ nil, options \\ []) do
path_ = "/datasets/#{URI.encode(dataset_name)}/contents"
headers = []
query_ = []
query_ = if !is_nil(scheduled_on_or_after) do
[{"scheduledOnOrAfter", scheduled_on_or_after} | query_]
else
query_
end
query_ = if !is_nil(scheduled_before) do
[{"scheduledBefore", scheduled_before} | query_]
else
query_
end
query_ = if !is_nil(next_token) do
[{"nextToken", next_token} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"maxResults", max_results} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Retrieves information about data sets.
"""
def list_datasets(client, max_results \\ nil, next_token \\ nil, options \\ []) do
path_ = "/datasets"
headers = []
query_ = []
query_ = if !is_nil(next_token) do
[{"nextToken", next_token} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"maxResults", max_results} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Retrieves a list of data stores.
"""
def list_datastores(client, max_results \\ nil, next_token \\ nil, options \\ []) do
path_ = "/datastores"
headers = []
query_ = []
query_ = if !is_nil(next_token) do
[{"nextToken", next_token} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"maxResults", max_results} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Retrieves a list of pipelines.
"""
def list_pipelines(client, max_results \\ nil, next_token \\ nil, options \\ []) do
path_ = "/pipelines"
headers = []
query_ = []
query_ = if !is_nil(next_token) do
[{"nextToken", next_token} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"maxResults", max_results} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Lists the tags (metadata) which you have assigned to the resource.
"""
def list_tags_for_resource(client, resource_arn, options \\ []) do
path_ = "/tags"
headers = []
query_ = []
query_ = if !is_nil(resource_arn) do
[{"resourceArn", resource_arn} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Sets or updates the AWS IoT Analytics logging options.
Note that if you update the value of any `loggingOptions` field, it takes
up to one minute for the change to take effect. Also, if you change the
policy attached to the role you specified in the roleArn field (for
example, to correct an invalid policy) it takes up to 5 minutes for that
change to take effect.
"""
def put_logging_options(client, input, options \\ []) do
path_ = "/logging"
headers = []
query_ = []
request(client, :put, path_, query_, headers, input, options, nil)
end
@doc """
Simulates the results of running a pipeline activity on a message payload.
"""
def run_pipeline_activity(client, input, options \\ []) do
path_ = "/pipelineactivities/run"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Retrieves a sample of messages from the specified channel ingested during
the specified timeframe. Up to 10 messages can be retrieved.
"""
def sample_channel_data(client, channel_name, end_time \\ nil, max_messages \\ nil, start_time \\ nil, options \\ []) do
path_ = "/channels/#{URI.encode(channel_name)}/sample"
headers = []
query_ = []
query_ = if !is_nil(start_time) do
[{"startTime", start_time} | query_]
else
query_
end
query_ = if !is_nil(max_messages) do
[{"maxMessages", max_messages} | query_]
else
query_
end
query_ = if !is_nil(end_time) do
[{"endTime", end_time} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Starts the reprocessing of raw message data through the pipeline.
"""
def start_pipeline_reprocessing(client, pipeline_name, input, options \\ []) do
path_ = "/pipelines/#{URI.encode(pipeline_name)}/reprocessing"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Adds to or modifies the tags of the given resource. Tags are metadata which
can be used to manage a resource.
"""
def tag_resource(client, input, options \\ []) do
path_ = "/tags"
headers = []
{query_, input} =
[
{"resourceArn", "resourceArn"},
]
|> AWS.Request.build_params(input)
request(client, :post, path_, query_, headers, input, options, 204)
end
@doc """
Removes the given tags (metadata) from the resource.
"""
def untag_resource(client, input, options \\ []) do
path_ = "/tags"
headers = []
{query_, input} =
[
{"resourceArn", "resourceArn"},
{"tagKeys", "tagKeys"},
]
|> AWS.Request.build_params(input)
request(client, :delete, path_, query_, headers, input, options, 204)
end
@doc """
Updates the settings of a channel.
"""
def update_channel(client, channel_name, input, options \\ []) do
path_ = "/channels/#{URI.encode(channel_name)}"
headers = []
query_ = []
request(client, :put, path_, query_, headers, input, options, nil)
end
@doc """
Updates the settings of a data set.
"""
def update_dataset(client, dataset_name, input, options \\ []) do
path_ = "/datasets/#{URI.encode(dataset_name)}"
headers = []
query_ = []
request(client, :put, path_, query_, headers, input, options, nil)
end
@doc """
Updates the settings of a data store.
"""
def update_datastore(client, datastore_name, input, options \\ []) do
path_ = "/datastores/#{URI.encode(datastore_name)}"
headers = []
query_ = []
request(client, :put, path_, query_, headers, input, options, nil)
end
@doc """
Updates the settings of a pipeline. You must specify both a `channel` and a
`datastore` activity and, optionally, as many as 23 additional activities
in the `pipelineActivities` array.
"""
def update_pipeline(client, pipeline_name, input, options \\ []) do
path_ = "/pipelines/#{URI.encode(pipeline_name)}"
headers = []
query_ = []
request(client, :put, path_, query_, headers, input, options, nil)
end
@spec request(AWS.Client.t(), binary(), binary(), list(), list(), map(), list(), pos_integer()) ::
{:ok, Poison.Parser.t(), Poison.Response.t()}
| {:error, Poison.Parser.t()}
| {:error, HTTPoison.Error.t()}
defp request(client, method, path, query, headers, input, options, success_status_code) do
client = %{client | service: "iotanalytics"}
host = build_host("iotanalytics", client)
url = host
|> build_url(path, client)
|> add_query(query)
additional_headers = [{"Host", host}, {"Content-Type", "application/x-amz-json-1.1"}]
headers = AWS.Request.add_headers(additional_headers, headers)
payload = encode_payload(input)
headers = AWS.Request.sign_v4(client, method, url, headers, payload)
perform_request(method, url, payload, headers, options, success_status_code)
end
defp perform_request(method, url, payload, headers, options, nil) do
case HTTPoison.request(method, url, payload, headers, options) do
{:ok, %HTTPoison.Response{status_code: 200, body: ""} = response} ->
{:ok, response}
{:ok, %HTTPoison.Response{status_code: status_code, body: body} = response}
when status_code == 200 or status_code == 202 or status_code == 204 ->
{:ok, Poison.Parser.parse!(body, %{}), response}
{:ok, %HTTPoison.Response{body: body}} ->
error = Poison.Parser.parse!(body, %{})
{:error, error}
{:error, %HTTPoison.Error{reason: reason}} ->
{:error, %HTTPoison.Error{reason: reason}}
end
end
defp perform_request(method, url, payload, headers, options, success_status_code) do
case HTTPoison.request(method, url, payload, headers, options) do
{:ok, %HTTPoison.Response{status_code: ^success_status_code, body: ""} = response} ->
{:ok, %{}, response}
{:ok, %HTTPoison.Response{status_code: ^success_status_code, body: body} = response} ->
{:ok, Poison.Parser.parse!(body, %{}), response}
{:ok, %HTTPoison.Response{body: body}} ->
error = Poison.Parser.parse!(body, %{})
{:error, error}
{:error, %HTTPoison.Error{reason: reason}} ->
{:error, %HTTPoison.Error{reason: reason}}
end
end
defp build_host(_endpoint_prefix, %{region: "local"}) do
"localhost"
end
defp build_host(endpoint_prefix, %{region: region, endpoint: endpoint}) do
"#{endpoint_prefix}.#{region}.#{endpoint}"
end
defp build_url(host, path, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}#{path}"
end
defp add_query(url, []) do
url
end
defp add_query(url, query) do
querystring = AWS.Util.encode_query(query)
"#{url}?#{querystring}"
end
defp encode_payload(input) do
if input != nil, do: Poison.Encoder.encode(input, %{}), else: ""
end
end
|
lib/aws/iot_analytics.ex
| 0.83612
| 0.706646
|
iot_analytics.ex
|
starcoder
|
defmodule Fxnk.List do
@moduledoc """
`Fxnk.List` are functions for working with lists.
"""
@doc """
Returns a list of whatever was passed to it.
## Example
iex> Fxnk.List.of(3)
[3]
iex> Fxnk.List.of([])
[[]]
"""
@spec of(any()) :: [any(), ...]
def of(x), do: [x]
@doc """
Takes a list of maps and a key, returns a list of values in the key over all the maps
## Examples
iex> list = [%{user_id: "1234"}, %{user_id: "4567"}, %{user_id: "6789"}]
iex> Fxnk.List.pluck(list, :user_id)
["1234", "4567", "6789"]
"""
@spec pluck([map(), ...], binary() | atom()) :: [any()]
def pluck(list, property) do
Enum.map(list, fn map -> Fxnk.Map.prop(map, property) end)
end
@doc """
`reduce_right/3` takes a list of args, an initial value and a function and returns a single value.
Like reduce, it applies the function to each of the arguments, and accumulating the result, except it does it right to left.
## Examples
iex> Fxnk.List.reduce_right([1,2,3,4,5], 0, fn a, b -> a + b end)
15
"""
@spec reduce_right(list(any), any, function()) :: any()
def reduce_right(args, initial, func) do
args
|> Enum.reverse()
|> Enum.reduce(initial, func)
end
@doc """
Takes two lists and returns a map where the keys are the elements from the first list and the values are the elements from the second.
## Examples
iex> Fxnk.List.reduce_map([1, 2, 3], [:one, :two, :three])
%{one: 1, two: 2, three: 3}
"""
@spec reduce_map([any(), ...], [any()]) :: %{any() => any()}
def reduce_map(values, keys) do
keys
|> Enum.zip(values)
|> Enum.reduce(%{}, fn {key, value}, acc ->
Map.merge(acc, %{key => value})
end)
end
@doc """
Takes two lists and returns a map where the keys are the elements from the second list and the values are the elements from the first.
## Examples
iex> Fxnk.List.reduce_map_right([:one, :two, :three], [1, 2, 3])
%{one: 1, two: 2, three: 3}
"""
@spec reduce_map_right([any()], [any(), ...]) :: %{any() => any()}
def reduce_map_right(keys, values) do
reduce_map(values, keys)
end
@doc """
`zip_map/2` is a lot like `Enum.zip/2`, but instead of returning a list of tuples,
it returns a list of maps, where the keys are the second list passed in.
## Examples
iex> Fxnk.List.zip_map(["hello", "world"], ["first", "second"])
[%{"first" => "hello"}, %{"second" => "world"}]
"""
@spec zip_map(list(any), list(any)) :: [%{any() => any()}]
def zip_map(list, keys) do
keys
|> Enum.zip(list)
|> Enum.map(fn {k, v} -> %{k => v} end)
end
end
|
lib/fxnk/list.ex
| 0.857723
| 0.565599
|
list.ex
|
starcoder
|
defmodule Ryal.Payments.Method do
@moduledoc """
A standard adapter to multiple payment methods.
You can specify which payment you'd like to use via the `type` field and
placing the data of a payment in the `data` field.
The `data` field uses PostgreSQL's JSONB column to store the dynamic
information. We use validations and whatnot at the application level to ensure
the data is consistent. (Although, it would be nice is we could add
constraints to PG later on.)
## Example
iex> Ryal.Payments.Method.changeset(%Ryal.Payments.Method{}, %{
type: "credit_card",
user_id: 1,
proxy: %{
name: "<NAME>",
number: "4242 4242 4242 4242",
month: "03",
year: "2048",
cvc: "004"
}
})
#Ecto.Changeset<action: nil,
changes: %{data: #Ecto.Changeset<action: :insert,
changes: %{cvc: "123", month: "03", name: "<NAME>",
number: "4242 4242 4242 4242", year: "2048"}, errors: [],
data: #Ryal.PaymentMethods.CreditCard<>, valid?: true>,
type: "credit_card"},
errors: [], data: #Ryal.Payments.Method<>, valid?: true>
"""
use Ryal, :schema
alias Ryal.Config
schema "ryal_payment_methods" do
field :type, :string
embeds_one :proxy, Ryal.PaymentMethods.Proxy
has_many :payment_method_gateways, MethodGateway
belongs_to :user, Config.user_module()
timestamps()
end
@required_fields ~w(type user_id)a
@doc """
You hand us some `data` and a `type` and we associate a payment method to a
user.
For an example on how to use this function, see the module description.
"""
def changeset(struct, params \\ %{}) do
struct
|> cast(set_module_type(params), @required_fields)
|> assoc_constraint(:user)
|> validate_required(@required_fields)
|> cast_embed(:proxy, required: true)
end
# Loads up the module for the payment method type and then applies it to the
# proxy column. This struct carries the module name and the data carried over.
defp set_module_type(%{type: type} = params) do
type = String.to_atom(type)
proxy_data = Map.get(params, :proxy, %{})
module_name = Config.payment_method(type)
Map.put(params, :proxy, struct(module_name, proxy_data))
end
defp set_module_type(params), do: params
end
|
lib/ryal/payments/method.ex
| 0.855384
| 0.598664
|
method.ex
|
starcoder
|
defmodule Spell.Role.Session do
@moduledoc """
The `Spell.Role.Session` module implements the behaviour for a session
role.
Sessions are pseudo-roles; each peer started with `Spell.Connect`
has `Spell.Role.Session` added as the first role in `roles`.
"""
use Spell.Role
import Spell.Message, only: [receive_message: 3]
alias Spell.Message
alias Spell.Peer
alias Spell.Authentication, as: Auth
require Logger
# Module Attributes
defstruct [
:realm,
:roles,
:authentication,
:auth_lookup,
session: nil,
details: nil,
pid_hello: nil,
pid_goodbye: nil]
# TODO: rest of types
@type t :: %__MODULE__{
realm: String.t,
authentication: Keyword.t,
session: integer}
@goodbye_close_realm "wamp.error.close_realm"
@goodbye_and_out "wamp.error.goodbye_and_out"
@default_timeout 2000
# Public Functions
@doc """
Send a GOODBYE message to the remote peer. The remote peer should
reply with a GOODBYE.
"""
@spec cast_goodbye(pid, Keyword.t) :: :ok
def cast_goodbye(peer, options \\ []) do
reason = Keyword.get(options, :reason, @goodbye_close_realm)
details = Keyword.get(options, :details, %{message: "goodbye"})
{:ok, message} = new_goodbye(reason, details)
{:ok, ^message} = Peer.call(peer, __MODULE__, {:send, message})
:ok
end
@doc """
Send a GOODBYE message to the remote peer and wait for the GOODBYE reply.
This must be called from the peer's owner, otherwise the listening
process won't receive the GOODBYE message.
"""
@spec call_goodbye(pid, Keyword.t) :: {:ok, Message.t} | {:error, :timeout}
def call_goodbye(peer, options \\ []) do
timeout = Keyword.get(options, :timeout, config_timeout)
:ok = cast_goodbye(peer, options)
Peer.await(peer, :goodbye, timeout)
end
@doc """
Await the welcome message. Useful for blocking until the session is
established.
"""
@spec await_welcome(pid) :: {:ok, Message.t} | {:error, :timeout}
def await_welcome(peer), do: Peer.await(peer, :welcome, config_timeout)
def receive_welcome(peer) do
receive_message peer, :welcome do
{:ok, _} -> :ok
{:error, reason} -> {:error, reason}
end
end
# Role Callbacks
@doc """
Returns the state with the specified realm, role, and authentication info.
* `peer_options :: Map.t`
"""
def init(%{realm: nil}, _) do
{:error, :no_realm}
end
def init(%{role: role}, options) do
auth_lookup = get_in(options, [:authentication, :schemes])
|> Auth.schemes_to_lookup()
{:ok, struct(%__MODULE__{roles: role.features, auth_lookup: auth_lookup},
options)}
end
@doc """
Send a `HELLO` message when the connection is opened.
"""
def on_open(peer, %{realm: realm} = state) when realm != nil do
{:ok, hello} = new_hello(state.realm, get_hello_details(state))
case Peer.send_message(peer, hello) do
:ok ->
{:ok, %{state | pid_hello: peer.owner}}
{:error, reason} ->
{:error, reason}
end
end
@doc """
Handle `CHALLENGE`, `WELCOME`, `GOODBYE`, and `ABORT` messages.
"""
def handle_message(%Message{type: :challenge,
args: [name, details]},
peer, %{pid_hello: pid_hello} = state)
when is_pid(pid_hello) do
case get_auth_by_name(state, name) do
nil ->
{:error, {:challenge, :bad_scheme}}
{auth_module, options} when is_atom(auth_module) ->
case auth_module.response(details, options) do
{:ok, signature, details} ->
{:ok, message} = new_authenticate(signature, details)
:ok = Peer.send_message(peer, message)
{:ok, state}
{:error, reason} ->
{:error, {:challenge, reason}}
end
end
end
def handle_message(%Message{type: :welcome,
args: [session, details]} = welcome,
_peer, %{pid_hello: pid_hello} = state)
when is_pid(pid_hello) do
:ok = Peer.notify(pid_hello, welcome)
{:ok, %{state | session: session, details: details, pid_hello: nil}}
end
def handle_message(%Message{type: :goodbye} = goodbye, _peer,
%{pid_goodbye: pid_goodbye} = state) do
:ok = Peer.notify(pid_goodbye, goodbye)
{:close, goodbye, state}
end
def handle_message(%Message{type: :abort} = abort, _peer, _state) do
# TODO: test against various abort messages
{:error, abort}
end
def handle_message(message, peer, state) do
super(message, peer, state)
end
@doc """
The `handle_call` function is used to send `GOODBYE` messages.
"""
def handle_call({:send, %Message{type: :goodbye} = message},
{pid_goodbye, _}, peer, %{pid_goodbye: nil} = state) do
:ok = Peer.send_message(peer, message)
{:ok, {:ok, message}, %{state | pid_goodbye: pid_goodbye}}
end
@doc """
The `on_close/2` callback notifies processes which own open `HELLO` or
`GOODBYE` commands that the peer is closing by sending a
`{Spell.Peer, pid, {:closed, command}}` message.
"""
def on_close(peer,
%{pid_hello: pid_hello, pid_goodbye: pid_goodbye} = state) do
if pid_hello, do: :ok = Peer.notify(pid_hello, {:closed, :hello})
if pid_goodbye, do: :ok = Peer.notify(pid_goodbye, {:closed, :goodbye})
super(peer, state)
end
# Private Functions
@spec new_hello(String.t, map) :: {:ok, Message.t} | {:error, any}
defp new_hello(realm, details) do
Message.new(type: :hello, args: [realm, details])
end
@spec get_hello_details(t) :: map
defp get_hello_details(state) do
case state.authentication do
nil -> %{}
authentication -> Auth.get_details(authentication)
end
|> Dict.merge(%{roles: state.roles})
end
@spec new_goodbye(String.t, map) :: {:ok, Message.t} | {:error, any}
defp new_goodbye(reason, details) do
# TODO: if `reason` is an atom, lookup its value
Message.new(type: :goodbye, args: [details, reason])
end
@spec new_authenticate(String.t, map) :: {:ok, Message.t} | {:error, any}
def new_authenticate(signature, details) do
Message.new(type: :authenticate, args: [signature, details])
end
@spec get_auth_by_name(t, String.t) :: {module, Keyword.t} | nil
defp get_auth_by_name(state, name) do
case Dict.get(state.auth_lookup, name) do
nil -> nil
auth_module when is_atom(auth_module) ->
case Dict.get(state.authentication[:schemes], auth_module) do
nil -> nil
options -> {auth_module, options}
end
end
end
@spec config_timeout() :: integer
defp config_timeout do
Application.get_env(:spell, :timeout, @default_timeout)
end
end
|
lib/spell/role/session.ex
| 0.604866
| 0.412234
|
session.ex
|
starcoder
|
defmodule Aja.String do
@moduledoc ~S"""
Some extra helper functions for working with strings,
that are not in the core `String` module.
"""
@type mode :: :default | :ascii | :greek
@modes [:default, :ascii, :greek]
@doc ~S"""
Transforms the text as a slug. Removes whitespace, special characters and
converts the rest to lowercase.
This is typically useful to generate URLs based on content, e.g. the title of an article.
Like `String.downcase/2`, `slugify/2` also can handle the following modes:
`:default` (keeps unicode), `:ascii` or `:greek`.
## Examples
iex> Aja.String.slugify("> \"It Was Me, Dio!!!\"\n")
"it-was-me-dio"
iex> Aja.String.slugify("<NAME> a.k.a ジョジョ")
"joseph-joestar-aka-ジョジョ"
iex> Aja.String.slugify(<<220>>)
** (ArgumentError) Invalid string <<220>>
`:ascii` converts to ascii when possible or strips characters:
iex> Aja.String.slugify("OLÁ!\n", :ascii)
"ola"
iex> Aja.String.slugify("DIOの世界 -さらば友よ- ", :ascii)
"dio"
`:greek` handles the context sensitive sigma in Greek:
iex> Aja.String.slugify("\tΣΣ?")
"σσ"
iex> Aja.String.slugify("\tΣΣ?", :greek)
"σς"
"""
@spec slugify(String.t(), mode) :: String.t()
def slugify(string, mode \\ :default) when is_binary(string) and mode in @modes do
string
|> normalize(mode)
|> String.downcase(mode)
|> try_replace(~r/[^\w\s-]/u, "")
|> String.replace(~r/[-\s]+/u, "-")
|> String.replace(~r/^(-|_)+/u, "")
|> String.replace(~r/(-|_)+$/u, "")
end
defp normalize(string, :ascii) do
for <<c <- nfkd_normalize(string)>>, c in 0..127, into: "", do: <<c>>
end
defp normalize(string, _mode) do
nfkc_normalize(string)
end
defp nfkc_normalize(string) do
# Note: same implementation as `String.normalize(string, :nfkc)` in Elixir 1.11
# TODO replace when removing support for 1.10
case :unicode.characters_to_nfkc_binary(string) do
normalized when is_binary(normalized) -> normalized
{:error, good, <<head, rest::binary>>} -> good <> <<head>> <> nfkc_normalize(rest)
end
end
defp nfkd_normalize(string) do
# Note: same implementation as `String.normalize(string, :nfkd)` in Elixir 1.11
# TODO replace when removing support for 1.10
case :unicode.characters_to_nfkd_binary(string) do
normalized when is_binary(normalized) -> normalized
{:error, good, <<head, rest::binary>>} -> good <> <<head>> <> nfkd_normalize(rest)
end
end
defp try_replace(string, regex, new) do
try do
String.replace(string, regex, new)
rescue
ArgumentError ->
raise ArgumentError, "Invalid string #{inspect(string)}"
end
end
end
|
lib/string.ex
| 0.727685
| 0.503601
|
string.ex
|
starcoder
|
defmodule PhoneToString do
@mapping %{
"1" => ["1"],
"2" => ["A", "B", "C"],
"3" => ["D", "E", "F"],
"4" => ["G", "H", "I"],
"5" => ["J", "K", "L"],
"6" => ["M", "N", "O"],
"7" => ["P", "Q", "R", "S"],
"8" => ["T", "U", "V"],
"9" => ["W", "X", "Y", "Z"],
"0" => ["0"]
}
@doc """
Append the mapping for the given character to the end of each string in a list, returning the
mapping itself if there are no strings to append onto.
## Examples
iex> PhoneToString.append_to_existing("6", ["A", "B", "C"])
["AM", "BM", "CM", "AN", "BN", "CN", "AO", "BO", "CO"]
iex> PhoneToString.append_to_existing("2", [])
["A", "B", "C"]
"""
def append_to_existing(char, []), do: @mapping[char]
def append_to_existing(char, strings) do
Enum.flat_map(@mapping[char], fn letter ->
Enum.map(strings, fn string -> string <> letter end)
end)
end
@doc """
Given a list of digits, return a list of strings for all character permutations given @mapping.
"""
def get_permutations(_, acc \\ [])
def get_permutations([], acc), do: acc
def get_permutations([x | tail], acc) do
get_permutations(tail, append_to_existing(x, acc))
end
def get_permutations_slow([]), do: []
def get_permutations_slow([x]), do: @mapping[x]
def get_permutations_slow([x | tail]) when length(tail) > 0 do
Enum.flat_map(@mapping[x], fn char ->
Enum.map(get_permutations_slow(tail), fn string -> char <> string end)
end)
end
@doc """
Get all permutations of an input of digits and return them as a list.
## Examples
iex> PhoneToString.call("1")
["1"]
iex> PhoneToString.call("2")
["A", "B", "C"]
iex> PhoneToString.call("23") |> Enum.sort()
["AD", "AE", "AF", "BD", "BE", "BF", "CD", "CE", "CF"]
iex> PhoneToString.call("")
[]
"""
def call(input) do
input
|> String.codepoints()
|> get_permutations
end
def call_slow(input) do
input
|> String.codepoints()
|> get_permutations_slow
end
end
|
lib/phoneToString.ex
| 0.708616
| 0.462898
|
phoneToString.ex
|
starcoder
|
defmodule TrademarkFreeStrategicLandWarfare.Players.JeffsBeard do
alias TrademarkFreeStrategicLandWarfare.{Board, Player, Piece}
@behaviour Player
@type direction() :: :north | :west | :east | :south
@type count() :: Integer.t()
@type state() :: any()
@spec name() :: binary()
def name(), do: "<NAME> 🧔"
# should return a list with 4 lists of 10 piece-name atoms (:miner, :colonel, etc) per list
@spec initial_pieces_placement() :: nonempty_list([Atom.t(), ...])
def initial_pieces_placement() do
[
[
:captain,
:scout,
:scout,
:lieutenant,
:scout,
:captain,
:miner,
:marshall,
:scout,
:captain
],
[:lieutenant, :sergeant, :bomb, :spy, :general, :scout, :major, :major, :colonel, :scout],
[
:sergeant,
:bomb,
:sergeant,
:major,
:colonel,
:lieutenant,
:bomb,
:lieutenant,
:captain,
:sergeant
],
[:scout, :miner, :bomb, :scout, :miner, :bomb, :flag, :bomb, :miner, :miner]
]
end
@spec turn(
%TrademarkFreeStrategicLandWarfare.Board{},
%TrademarkFreeStrategicLandWarfare.Player{},
state()
) :: {binary(), direction(), count(), state()}
def turn(%Board{} = board, %Player{module: module, number: number}, state) do
possible_pieces = all_pieces_for_player(board, number)
all_possible_moves = possible_moves(board, possible_pieces, state)
weighted_moves = weight_moves(board, number, all_possible_moves)
case weighted_moves do
[] ->
raise "no move possible"
moves ->
Enum.random(moves)
end
end
defp possible_moves(board, possible_pieces, state) do
res =
for piece <- possible_pieces,
direction <- [:north, :west, :east, :south],
move_count <- [1, 2, 3, 4, 5, 6, 7, 8, 9, 10] do
case move_count in move_counts(piece) do
true ->
case Board.move(board, piece.player, piece.uuid, direction, move_count) do
{:ok, :win, _} -> {piece.uuid, direction, move_count, state}
{:error, :unknown_result} -> {piece.uuid, direction, move_count, state}
{:ok, %Board{}} -> {piece.uuid, direction, move_count, state}
_ -> nil
end
false ->
nil
end
end
Enum.reject(res, &is_nil(&1))
end
defp move_counts(:scout), do: [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
defp move_counts(_), do: [1]
defp weight_moves(board, player, moves) do
Enum.flat_map(moves, fn {uuid, direction, spaces, state} = move ->
case Board.move(board, player, uuid, direction, spaces) do
{:ok, :win, _} ->
Enum.map(0..1_000_000, fn _ -> move end)
{:error, :unknown_result} ->
[move]
{:ok, %Board{} = new_board} ->
other_player =
case player do
1 -> 2
2 -> 1
end
old_count_other_player = length(all_pieces_for_player(board, other_player))
new_count_other_player = length(all_pieces_for_player(new_board, other_player))
old_count_me = length(all_pieces_for_player(board, player))
new_count_me = length(all_pieces_for_player(new_board, player))
case {new_count_other_player < old_count_other_player, new_count_me < old_count_me} do
{true, _} -> Enum.map(0..200, fn _ -> move end)
{_, true} -> [move]
{_, _} -> Enum.map(0..20, fn _ -> move end)
end
end
end)
end
def all_pieces_for_player(%Board{rows: rows}, number) do
rows
|> List.flatten()
|> Enum.flat_map(fn
%Piece{player: ^number, name: name} = piece when name not in [:bomb, :flag] -> [piece]
_ -> []
end)
end
end
|
lib/trademark_free_strategic_land_warfare/players/jeffs_beard.ex
| 0.661376
| 0.48688
|
jeffs_beard.ex
|
starcoder
|
defmodule RDF.Resource.Generator do
@moduledoc """
A configurable and customizable way to generate resource identifiers.
The basis are different implementations of the behaviour defined in this
module for configurable resource identifier generation methods.
Generally two kinds of identifiers are differentiated:
1. parameter-less identifiers which are generally random
2. identifiers which are based on some value, where every attempt to create
an identifier for the same value, should produce the same identifier
Not all implementations must support both kind of identifiers.
The `RDF.Resource.Generator` module provides two `generate` functions for the
kindes of identifiers, `generate/1` for random-based and `generate/2` for
value-based identifiers.
The `config` keyword list they take must contain a `:generator` key, which
provides the module implementing the `RDF.Resource.Generator` behaviour.
All other keywords are specific to the generator implementation.
When the generator is configured differently for the different
identifier types, the identifier-type specific configuration can be put under
the keys `:random_based` and `:value_based` respectively.
The `RDF.Resource.Generator.generate` implementations will be called with the
general configuration options from the top-level merged with the identifier-type
specific configuration.
The `generate` functions however are usually not called directly.
See the [guide](https://rdf-elixir.dev/rdf-ex/resource-generators.html) on
how they are meant to be used.
The following `RDF.Resource.Generator` implementations are provided with RDF.ex:
- `RDF.BlankNode`
- `RDF.BlankNode.Generator`
- `RDF.IRI.UUID.Generator`
"""
@type id_type :: :random_based | :value_based
@doc """
Generates a random resource identifier based on the given `config`.
"""
@callback generate(config :: any) :: RDF.Resource.t()
@doc """
Generates a resource identifier based on the given `config` and `value`.
"""
@callback generate(config :: any, value :: binary) :: RDF.Resource.t()
@doc """
Allows to normalize the configuration.
This callback is optional. A default implementation is generated which
returns the configuration as-is.
"""
@callback generator_config(id_type, keyword) :: any
defmacro __using__(_opts) do
quote do
@behaviour RDF.Resource.Generator
@impl RDF.Resource.Generator
def generator_config(_, config), do: config
defoverridable generator_config: 2
end
end
@doc """
Generates a random resource identifier based on the given `config`.
See the [guide](https://rdf-elixir.dev/rdf-ex/resource-generators.html) on
how it is meant to be used.
"""
def generate(config) do
{generator, config} = config(:random_based, config)
generator.generate(config)
end
@doc """
Generates a resource identifier based on the given `config` and `value`.
See the [guide](https://rdf-elixir.dev/rdf-ex/resource-generators.html) on
how it is meant to be used.
"""
def generate(config, value) do
{generator, config} = config(:value_based, config)
generator.generate(config, value)
end
defp config(id_type, config) do
{random_config, config} = Keyword.pop(config, :random_based)
{value_based_config, config} = Keyword.pop(config, :value_based)
{generator, config} =
id_type
|> merge_config(config, random_config, value_based_config)
|> Keyword.pop!(:generator)
{generator, generator.generator_config(id_type, config)}
end
defp merge_config(:random_based, config, nil, _), do: config
defp merge_config(:random_based, config, random_config, _),
do: Keyword.merge(config, random_config)
defp merge_config(:value_based, config, _, nil), do: config
defp merge_config(:value_based, config, _, value_based_config),
do: Keyword.merge(config, value_based_config)
end
|
lib/rdf/resource_generator.ex
| 0.890306
| 0.628436
|
resource_generator.ex
|
starcoder
|
defmodule MaterializedStore do
@moduledoc """
Materialized Store implements the consumer-side of a Kafka-backed KV store.
Messages are written to Kafka as (key,val) tuples and read by the store. Each
message passed through the provided parser, and the resulting, deserialized,
representation is then stored for retrieval.
The provided function must satisfy a very simple interface: it is passed the
key and value as byte strings, as read directly from Kafka, and must return
a tuple with {key,value} to use in the store. If key is nil the message is
ignored, if value is nil the old key is deleted from the store. If both key
and value are set, the value is set for the key in the store.
The data in the store can be retrieved with the `get` call. The user can wait
for the store to "synchronize", i.e. catch up to the end of the Kafka topic,
using the `wait_sync` call. These two operations can be combined with the
`get_sync` call.
Note that Materialized Store provides only provides sequential consistency on
a per-partition basis. This is a result of Kafka providing ordering guarantee
only for individual partitions. In practice only the per-key ordering will be
relevant when using Materialized Store.
"""
use GenServer
require Logger
defstruct topic: "",
parser: nil,
store: nil,
seen_offsets: %{},
unsynchronized: nil,
waiters: []
@doc """
Starts a materialized store.
The user must provide the following arguments:
- broker: a list of initial brokers to connect to, of the format `[{'host', port}]`.
- topic: the topic to consume from. All partitions in the topic will be consumed.
- parser: a function `(key, val -> {key,val})`
Examples:
- `MaterializedStore.start_link([{'localhost', 9092}], "test", fn k,v -> {k,v} end)`
For return values, and valid options, see `GenServer.start_link`.
"""
def start_link(broker, topic, parser, options \\ []) do
GenServer.start_link(__MODULE__, [broker, topic, parser], options)
end
@doc """
Like `start_link`, but returns a child spec rather than starting the store.
Usually used for running a materialized store in a supervisor tree.
"""
def child_spec(broker, topic, parser, options \\ []) do
import Supervisor.Spec
worker(__MODULE__, [broker, topic, parser], options)
end
@doc false
def init([brokers, topic, parser]) do
{:ok, cli} = :brod.start_link_client(brokers, :'#{__MODULE__}_#{inspect make_ref}', [])
{:ok, meta} = :brod_client.get_metadata(cli, topic)
offsets = spawn_consumer(cli, meta)
store = :ets.new(:materialized_store, [:set, :protected])
{:ok, %__MODULE__{topic: topic, parser: parser, store: store, seen_offsets: offsets, unsynchronized: MapSet.new(Map.keys(offsets))}}
end
defp spawn_consumer(cli, {:kpro_MetadataResponse, _brokers, [{:kpro_TopicMetadata, :no_error, topic, parts}]}) do
(for {:kpro_PartitionMetadata, :no_error, part, _leader, _replicas, _isr} <- parts, do: part)
|> Enum.map(fn p ->
{:ok, consumer} = :brod_consumer.start_link(cli, topic, p, [], [])
:ok = :brod_consumer.subscribe(consumer, self, [begin_offset: :earliest])
{p, -1}
end)
|> Enum.into(%{})
end
defp synchronized?(state) do
MapSet.size(state.unsynchronized) == 0
end
defp handle_msg({:kafka_message, offset, _, _, _, _}, _part, %{seen_offset: seen} = state) when seen >= offset, do: state
defp handle_msg({:kafka_message, offset, _, _, key, value, _}, part, state) do
try do
# Sanitize inputs
key = case key do :undefined -> ""; rest -> rest end
value = case value do :undefined -> ""; rest -> rest end
case state.parser.(key, value) do
{nil,_} -> nil
{k, nil} -> :ets.delete(state.store, k)
{k, v} -> :ets.insert(state.store, {k,v})
end
rescue
e -> Logger.error(fn -> "Message parsing failed for input message at #{state.topic}:#{part} @ #{offset}: #{inspect e}" end, offset: offset)
end
%{state | seen_offsets: Map.put(state.seen_offsets, part, offset)}
end
defp mark_synchronized(state, part) do
state = %{state | unsynchronized: MapSet.delete(state.unsynchronized, part)}
case synchronized?(state) do
false -> state
true -> for w <- state.waiters, do: GenServer.reply(w, :ok)
state = %{state | waiters: []}
end
end
@doc false
def handle_info({consumer, {:kafka_message_set, _topic, part, hwm, msgs}}, state) do
state = msgs |> Enum.reduce(state, &(handle_msg(&1, part, &2)))
:brod_consumer.ack(consumer, state.seen_offsets[part])
lag = hwm - state.seen_offsets[part] - 1
state = case lag do 0 -> mark_synchronized(state, part); _ -> state end
{:noreply, state}
end
@doc false
def handle_call(:get_table, _from, state) do
{:reply, state.store, state}
end
def handle_call({:get, key}, _from, state) do
case :ets.lookup(state.store, key) do
[{_k, val}] -> {:reply, val, state}
_ -> {:reply, nil, state}
end
end
@doc false
def handle_call(:wait_sync, from, state) do
if synchronized?(state) do
{:reply, :ok, state}
else
{:noreply, %{state | waiters: [from | state.waiters]}}
end
end
# Public API
@doc """
Retrieve the underlying ETS table. The table can be queried directly if
maximum performance is needed. The semantics on-read are the same as for
`get`.
"""
def get_table(pid, timeout \\ 5000) do
GenServer.call(pid, :get_table, timeout)
end
@doc """
Retrieves the value for the given key, or nil if the key does not exist.
"""
def get(pid, key, timeout \\ 5000) do
GenServer.call(pid, {:get, key}, timeout)
end
@doc """
Waits for the materialized store to catch up to the head of the Kafka topic.
Until the head has been reached, stale reads may be served.
"""
def wait_sync(pid, timeout \\ 30000) do
GenServer.call(pid, :wait_sync, timeout)
end
@doc """
Combines wait_sync and get as a single call. Note that internally this simply
calls `wait_sync` followed by `get`, and thus exists purely as a helper.
"""
def get_sync(pid, key, timeout \\ 30000) do
wait_sync(pid, timeout)
get(pid, key, timeout)
end
end
|
lib/materialized_store.ex
| 0.840488
| 0.563948
|
materialized_store.ex
|
starcoder
|
defmodule Cards do
@moduledoc """
Provides methods for creating and handling a deck of cards
"""
#run shell with iex -S mix
# run file with Cards.fx name
def help do
"hello, there! This module provides functions for creating deck, shuffling, checking if card is in deck, "
end
@doc """
Return a list of strings which represent a deck of playing cards
"""
def create_deck do
values = ["Ace", "Two", "Three", "Four", "Five", "Six", "Seven", "Eight", "Nine", "Ten", "Jack", "Queen", "King"]
suits = ["Spades", "Hearts", "Diamonds", "Clubs"]
#calling bad method for generating list
#cards = doubleLoopDeckGeneration(values, suits)
#List.flatten(cards)
# list comprehension for iterating over lists. Its a for loop essentially.
cards = doubleComprehension(values,suits)
cards
end
@doc """
creates the card collection
"""
#iterates over both the lists sequentially. No Need to flatten list here as no other lists will be generated
def doubleComprehension(values, suits) do
for suit <- suits, value <- values do
"#{value} of #{suit}"
end
end
#nested double loop -- bad approach as O(n^2)
def doubleLoopDeckGeneration(values, suits) do
for value <- values do
for suit <- suits do
# we can create a nested for loop here for generating
"#{value} of #{suit}"
end
end
end
def shuffle(deck) do
Enum.shuffle(deck)
end
def contains?(deck, card) do
Enum.member?(deck, card)
end
# Output would be in form of tuple {[hand to be returned], [rest of the cards in given deck]}
def deal(deck, cardsToBeDealt) do
Enum.split(deck, cardsToBeDealt)
end
def save(deck, filename) do
binary = :erlang.term_to_binary(deck)
File.write(filename, binary)
end
@doc """
Loads the file from erlang binary
"""
# load by using atoms
def load1(filename) do
{status, binary} = File.read(filename)
case status do
:ok -> :erlang.binary_to_term binary
:error -> "File does not exist"
_ -> "File not found"
end
end
# load by using comparison and assignment - 2 steps at a time
def load2(filename) do
case File.read(filename) do
{:ok, binary} -> :erlang.binary_to_term binary
# check optional body of error as it is in form of tuple by using _ which is for optional element which we dont care
{:error, _} -> "File does not exist"
end
end
@doc """
Non-Piped: Creates a deck of cards, shuffles it and then deals it based on hand size
"""
def create_hand_without_pipe(hand_size) do
deck = Cards.create_deck
deck = Cards.shuffle(deck)
hand = Cards.deal(deck,hand_size)
end
@doc """
Piped: Creates a deck of cards, shuffles it and then deals it based on hand size
"""
#using Pipe operator to remove mundane code. NOTE: Pipe operator ALWAYS APPLIES AS FIRST ARGUMENT ONLY.
def create_hand(hand_size) do
Cards.create_deck
|> Cards.shuffle
|> Cards.deal(hand_size)
end
end
|
cards/lib/cards.ex
| 0.645455
| 0.575916
|
cards.ex
|
starcoder
|
defmodule Changex.Grouper do
@moduledoc """
This module will take a list of commits and sort them based on the
type of the commit.
"""
@doc """
Take a list of `commits` in the format:
[hash, subject | body]
And transform them into a map based on the type of the commits. the
map could look like:
%{
fix: [commit1, commit2],
chore: [commit3, commit4]
}
"""
def group_by_type(commits) do
group(commits)
end
@doc """
Take a map of `commits` in the format:
%{
fix: [commit1, commit2],
chore: [commit3, commit4]
}
And transform them into a map based on the scope of the commits. the
map could look like:
%{
fix: %{
scope1: [commit1, commit2],
scope2: [commit5, commit6]
}
chore: %{
scope1: [commit3, commit4],
scope2: [commit7, commit8]
}
}
"""
def group_by_scope(commits) do
Map.keys(commits)
|> create_scope_map(%{}, commits)
end
defp create_scope_map([], map, _), do: map
defp create_scope_map([type | rest], map, commits) do
map = add_scopes_to_map(Map.put(map, type, %{}), Map.get(commits, type))
create_scope_map(rest, map, commits)
end
defp add_scopes_to_map(map, commits) do
commits
|> Enum.reduce(map, fn commit, acc -> add_scope_to_map(commit, acc) end)
end
defp add_scope_to_map(commit, map) do
type = Keyword.get(commit, :type)
scope = Keyword.get(commit, :scope)
scope_map = default_scope_map(map, type, scope)
commits = Map.get(scope_map, scope)
scope_map = Map.put(scope_map, scope, commits ++ [commit])
Map.put(map, type, scope_map)
end
defp default_scope_map(map, type, scope) do
scope_map = Map.get(map, type)
Map.put_new(scope_map, scope, [])
end
defp strip_body([hash, subject | _rest]) do
[hash, subject]
end
defp strip_nil_commits(commit) do
commit != []
end
defp group(commits) do
non_breaking =
commits
|> Enum.map(&strip_body/1)
|> Enum.map(&get_commit_parts/1)
|> Enum.filter(&strip_nil_commits/1)
breaking =
commits
|> Enum.reduce([], &extract_breaking_changes/2)
(non_breaking ++ breaking)
|> group_commits
end
defp extract_breaking_changes([hash, subject | rest], all) do
get_commit_parts([hash, subject])
|> get_breaking_changes(rest, all)
end
defp get_breaking_changes(_parts, [], all), do: all
defp get_breaking_changes(parts, [head | tail], all) do
case head do
"BREAKING CHANGE: " <> message ->
get_breaking_change_description(parts, tail, message, all)
_other ->
get_breaking_changes(parts, tail, all)
end
end
defp get_breaking_change_description(parts, [], description, all) do
all ++ [breaking_commit(parts, description)]
end
defp get_breaking_change_description(parts, commit = [head | tail], description, all) do
case head do
"BREAKING CHANGE: " <> _message ->
list = all ++ [breaking_commit(parts, description)]
get_breaking_changes(parts, commit, list)
_other ->
get_breaking_change_description(parts, tail, description <> "\n" <> head, all)
end
end
defp breaking_commit(parts, description) do
[
hash: parts[:hash],
type: :break,
scope: parts[:scope],
description: description |> String.trim_trailing()
]
end
defp get_commit_parts([hash, subject]) do
format = "%{type}(%{scope}): %{description}"
parts = Changex.SubjectSplitter.get_parts(subject, format)
case Keyword.get(parts, :type) do
nil ->
[]
_ ->
type = Keyword.get(parts, :type) |> String.to_atom()
parts = Keyword.put(parts, :type, type)
Keyword.put(parts, :hash, hash)
end
end
defp group_commits(commits) do
group_commits(commits, %{})
end
defp group_commits([], dict), do: dict
defp group_commits([commit | rest], dict) do
type = Keyword.get(commit, :type)
dict = Map.put_new(dict, type, [])
type_list = Map.get(dict, type)
dict = Map.put(dict, type, type_list ++ [commit])
group_commits(rest, dict)
end
end
|
lib/changex/grouper.ex
| 0.651577
| 0.537041
|
grouper.ex
|
starcoder
|
defmodule OAuth2Utils do
@moduledoc """
Util functions for OAuth2 and connected (OpenID Connect, UMA2) standards
Standard sets are the following:
* `:oauth2`: refers to RFC6749 and all other RFCs published by the IETF
* `:oidc`: refers to OpenID Connect ([https://openid.net/developers/specs/](speifications))
* `:uma2`: refers to User Managed Access specifications published by Kantara initiative
Note that regarding origin of values, IETF have precedence over the others.
"""
@access_token_types %{
"Bearer" => [standard_set: :oauth2]
}
@authorization_endpoint_response_types %{
"code" => [standard_set: :oauth2],
"code id_token" => [standard_set: :oidc],
"code id_token token" => [standard_set: :oidc],
"code token" => [standard_set: :oidc],
"id_token" => [standard_set: :oidc],
"id_token token" => [standard_set: :oidc],
"none" => [standard_set: :oidc],
"token" => [standard_set: :oauth2]
}
@extension_errors %{
"invalid_request" => [standard_set: :oauth2],
"invalid_token" => [standard_set: :oauth2],
"insufficient_scope" => [standard_set: :oauth2],
"unsupported_token_type" => [standard_set: :oauth2],
"interaction_required" => [standard_set: :oidc],
"login_required" => [standard_set: :oidc],
"session_selection_required" => [standard_set: :oidc],
"consent_required" => [standard_set: :oidc],
"invalid_request_uri" => [standard_set: :oidc],
"invalid_request_object" => [standard_set: :oidc],
"request_not_supported" => [standard_set: :oidc],
"request_uri_not_supported" => [standard_set: :oidc],
"registration_not_supported" => [standard_set: :oidc],
"need_info" => [standard_set: :uma2],
"request_denied" => [standard_set: :uma2],
"request_submitted" => [standard_set: :uma2]
}
@parameters %{
"client_id" => [standard_set: :oauth2, locations: [:authorization_request, :token_request]],
"client_secret" => [standard_set: :oauth2, locations: [:token_request]],
"response_type" => [standard_set: :oauth2, locations: [:authorization_request]],
"redirect_uri" => [standard_set: :oauth2, locations: [:authorization_request, :token_request]],
"scope" => [standard_set: :oauth2, locations: [:authorization_request, :authorization_response, :token_request, :token_response]],
"state" => [standard_set: :oauth2, locations: [:authorization_request, :authorization_response]],
"code" => [standard_set: :oauth2, locations: [:authorization_response, :token_request]],
"error" => [standard_set: :oauth2, locations: [:authorization_response, :token_response]],
"error_description" => [standard_set: :oauth2, locations: [:authorization_response, :token_response]],
"error_uri" => [standard_set: :oauth2, locations: [:authorization_response, :token_response]],
"grant_type" => [standard_set: :oauth2, locations: [:token_request]],
"access_token" => [standard_set: :oauth2, locations: [:authorization_response, :token_response]],
"token_type" => [standard_set: :oauth2, locations: [:authorization_response, :token_response]],
"expires_in" => [standard_set: :oauth2, locations: [:authorization_response, :token_response]],
"username" => [standard_set: :oauth2, locations: [:token_request]],
"password" => [standard_set: :oauth2, locations: [:token_request]],
"refresh_token" => [standard_set: :oauth2, locations: [:token_request, :token_response]],
"nonce" => [standard_set: :oidc, locations: [:authorization_request]],
"display" => [standard_set: :oidc, locations: [:authorization_request]],
"prompt" => [standard_set: :oidc, locations: [:authorization_request]],
"max_age" => [standard_set: :oidc, locations: [:authorization_request]],
"ui_locales" => [standard_set: :oidc, locations: [:authorization_request]],
"claims_locales" => [standard_set: :oidc, locations: [:authorization_request]],
"id_token_hint" => [standard_set: :oidc, locations: [:authorization_request]],
"login_hint" => [standard_set: :oidc, locations: [:authorization_request]],
"acr_values" => [standard_set: :oidc, locations: [:authorization_request]],
"claims" => [standard_set: :oidc, locations: [:authorization_request]],
"registration" => [standard_set: :oidc, locations: [:authorization_request]],
"request" => [standard_set: :oidc, locations: [:authorization_request]],
"request_uri" => [standard_set: :oidc, locations: [:authorization_request]],
"id_token" => [standard_set: :oidc, locations: [:authorization_response, :access_token_response]],
"session_state" => [standard_set: :oidc, locations: [:authorization_response, :access_token_response]],
"assertion" => [standard_set: :oidc, locations: [:token_request]],
"client_assertion" => [standard_set: :oauth2, locations: [:token_request]],
"client_assertion_type" => [standard_set: :oauth2, locations: [:token_request]],
"code_verifier" => [standard_set: :oauth2, locations: [:token_request]],
"code_challenge" => [standard_set: :oauth2, locations: [:authorization_request]],
"code_challenge_method" => [standard_set: :oauth2, locations: [:authorization_request]],
"claim_token" => [standard_set: :uma2, locations: [:client_request, :token_endpoint]],
"pct" => [standard_set: :uma2, locations: [:client_request, :token_endpoint, :authorization_server_response, :token_endpoint]],
"rpt" => [standard_set: :uma2, locations: [:client_request, :token_endpoint]],
"ticket" => [standard_set: :uma2, locations: [:client_request, :token_endpoint]],
"upgraded" => [standard_set: :uma2, locations: [:authorization_server_response, :token_endpoint]],
"vtr" => [standard_set: :oauth2, locations: [:authorization_request, :token_request]]
}
@token_type_hints %{
"access_token" => [standard_set: :oauth2],
"refresh_token" => [standard_set: :oauth2],
"pct" => [standard_set: :uma2]
}
@uris %{
"urn:ietf:params:oauth:grant-type:jwt-bearer" => [standard_set: :oauth2],
"urn:ietf:params:oauth:client-assertion-type:jwt-bearer" => [standard_set: :oauth2],
"urn:ietf:params:oauth:grant-type:saml2-bearer" => [standard_set: :oauth2],
"urn:ietf:params:oauth:client-assertion-type:saml2-bearer" => [standard_set: :oauth2],
"urn:ietf:params:oauth:token-type:jwt" => [standard_set: :oauth2]
}
@dynamic_client_registration_metadata %{
"redirect_uris" => [standard_set: :oauth2],
"token_endpoint_auth_method" => [standard_set: :oauth2],
"grant_types" => [standard_set: :oauth2],
"response_types" => [standard_set: :oauth2],
"client_name" => [standard_set: :oauth2],
"client_uri" => [standard_set: :oauth2],
"logo_uri" => [standard_set: :oauth2],
"scope" => [standard_set: :oauth2],
"contacts" => [standard_set: :oauth2],
"tos_uri" => [standard_set: :oauth2],
"policy_uri" => [standard_set: :oauth2],
"jwks_uri" => [standard_set: :oauth2],
"jwks" => [standard_set: :oauth2],
"software_id" => [standard_set: :oauth2],
"software_version" => [standard_set: :oauth2],
"client_id" => [standard_set: :oauth2],
"client_secret" => [standard_set: :oauth2],
"client_id_issued_at" => [standard_set: :oauth2],
"client_secret_expires_at" => [standard_set: :oauth2],
"registration_access_token" => [standard_set: :oauth2],
"registration_client_uri" => [standard_set: :oauth2],
"application_type" => [standard_set: :oidc],
"sector_identifier_uri" => [standard_set: :oidc],
"subject_type" => [standard_set: :oidc],
"id_token_signed_response_alg" => [standard_set: :oidc],
"id_token_encrypted_response_alg" => [standard_set: :oidc],
"id_token_encrypted_response_enc" => [standard_set: :oidc],
"userinfo_signed_response_alg" => [standard_set: :oidc],
"userinfo_encrypted_response_alg" => [standard_set: :oidc],
"userinfo_encrypted_response_enc" => [standard_set: :oidc],
"request_object_signing_alg" => [standard_set: :oidc],
"request_object_encryption_alg" => [standard_set: :oidc],
"request_object_encryption_enc" => [standard_set: :oidc],
"token_endpoint_auth_signing_alg" => [standard_set: :oidc],
"default_max_age" => [standard_set: :oidc],
"require_auth_time" => [standard_set: :oidc],
"default_acr_values" => [standard_set: :oidc],
"initiate_login_uri" => [standard_set: :oidc],
"request_uris" => [standard_set: :oidc],
"claims_redirect_uris" => [standard_set: :uma2]
}
@token_endpoint_authentication_methods %{
"none" => [standard_set: :oauth2],
"client_secret_post" => [standard_set: :oauth2],
"client_secret_basic" => [standard_set: :oauth2],
"client_secret_jwt" => [standard_set: :oidc],
"private_key_jwt" => [standard_set: :oidc]
}
@pkce_code_challenge_methods %{
"plain" => [standard_set: :oauth2],
"S256" => [standard_set: :oauth2]
}
@token_introspection_response_members %{
"active" => [standard_set: :oauth2],
"username" => [standard_set: :oauth2],
"client_id" => [standard_set: :oauth2],
"scope" => [standard_set: :oauth2],
"token_type" => [standard_set: :oauth2],
"exp" => [standard_set: :oauth2],
"iat" => [standard_set: :oauth2],
"nbf" => [standard_set: :oauth2],
"sub" => [standard_set: :oauth2],
"aud" => [standard_set: :oauth2],
"iss" => [standard_set: :oauth2],
"jti" => [standard_set: :oauth2],
"permissions" => [standard_set: :uma2],
"vot" => [standard_set: :oauth2],
"vtm" => [standard_set: :oauth2],
}
@authorization_server_metadata %{
"issuer" => [standard_set: :oauth2],
"authorization_endpoint" => [standard_set: :oauth2],
"token_endpoint" => [standard_set: :oauth2],
"jwks_uri" => [standard_set: :oauth2],
"registration_endpoint" => [standard_set: :oauth2],
"scopes_supported" => [standard_set: :oauth2],
"response_types_supported" => [standard_set: :oauth2],
"response_modes_supported" => [standard_set: :oauth2],
"grant_types_supported" => [standard_set: :oauth2],
"token_endpoint_auth_methods_supported" => [standard_set: :oauth2],
"token_endpoint_auth_signing_alg_values_supported" => [standard_set: :oauth2],
"service_documentation" => [standard_set: :oauth2],
"ui_locales_supported" => [standard_set: :oauth2],
"op_policy_uri" => [standard_set: :oauth2],
"op_tos_uri" => [standard_set: :oauth2],
"revocation_endpoint" => [standard_set: :oauth2],
"revocation_endpoint_auth_methods_supported" => [standard_set: :oauth2],
"revocation_endpoint_auth_signing_alg_values_supported" => [standard_set: :oauth2],
"introspection_endpoint" => [standard_set: :oauth2],
"introspection_endpoint_auth_methods_supported" => [standard_set: :oauth2],
"introspection_endpoint_auth_signing_alg_values_supported" => [standard_set: :oauth2],
"code_challenge_methods_supported" => [standard_set: :oauth2],
"signed_metadata" => [standard_set: :oauth2],
"userinfo_endpoint" => [standard_set: :oidc],
"acr_values_supported" => [standard_set: :oidc],
"subject_types_supported" => [standard_set: :oidc],
"id_token_signing_alg_values_supported" => [standard_set: :oidc],
"id_token_encryption_alg_values_supported" => [standard_set: :oidc],
"id_token_encryption_enc_values_supported" => [standard_set: :oidc],
"userinfo_signing_alg_values_supported" => [standard_set: :oidc],
"userinfo_encryption_alg_values_supported" => [standard_set: :oidc],
"userinfo_encryption_enc_values_supported" => [standard_set: :oidc],
"request_object_signing_alg_values_supported" => [standard_set: :oidc],
"request_object_encryption_alg_values_supported" => [standard_set: :oidc],
"request_object_encryption_enc_values_supported" => [standard_set: :oidc],
"display_values_supported" => [standard_set: :oidc],
"claim_types_supported" => [standard_set: :oidc],
"claims_supported" => [standard_set: :oidc],
"claims_locales_supported" => [standard_set: :oidc],
"claims_parameter_supported" => [standard_set: :oidc],
"request_parameter_supported" => [standard_set: :oidc],
"request_uri_parameter_supported" => [standard_set: :oidc],
"require_request_uri_registration" => [standard_set: :oidc]
}
@grant_types %{
"authorization_code" => [standard_set: :oauth2, uses_authorization_endpoint: true],
"implicit" => [standard_set: :oauth2, uses_authorization_endpoint: true],
"password" => [standard_set: :oauth2, uses_authorization_endpoint: false],
"client_credentials" => [standard_set: :oauth2, uses_authorization_endpoint: false],
"refresh_token" => [standard_set: :oauth2, uses_authorization_endpoint: false],
"urn:ietf:params:oauth:grant-type:jwt-bearer" => [standard_set: :oauth2, uses_authorization_endpoint: false],
"urn:ietf:params:oauth:grant-type:saml2-bearer" => [standard_set: :oauth2, uses_authorization_endpoint: false]
}
@type standard_set :: :oauth2 | :oidc | :uma2
@type standard_sets :: [standard_set]
@type access_token_type :: String.t
@type authorization_endpoint_response_type :: String.t
@type extension_error :: String.t
@type parameter :: String.t
@type parameter_location :: :authorization_request | :authorization_response | :token_request | :token_response | :access_token_response | :client_request | :authorization_server_response
@type token_type_hint :: String.t
@type uri :: String.t
@type dynamic_client_registration_metadata :: String.t
@type token_endpoint_authentication_method :: String.t
@type pkce_code_challenge_method :: String.t
@type token_introspection_response_member :: String.t
@type authorization_server_metadata :: String.t
@type grant_type :: String.t
@type client_id :: String.t
@type client_secret :: String.t
@doc """
Returns the access token types as documented in the [IANA registry](https://www.iana.org/assignments/oauth-parameters/oauth-parameters.xhtml#token-types)
## Example
```elixir
iex> OAuth2Utils.get_access_token_types()
["Bearer"]
```
"""
@spec get_access_token_types(standard_sets) :: [access_token_type]
def get_access_token_types(standard_sets \\ [:oauth2]) do
@access_token_types
|> Enum.filter(fn {_, v} -> Keyword.get(v, :standard_set) in standard_sets end)
|> Enum.unzip()
|> elem(0)
end
@doc """
Returns the authorization endpoint response types as documented in the [IANA registry](https://www.iana.org/assignments/oauth-parameters/oauth-parameters.xhtml#endpoint)
## Example
```elixir
iex> OAuth2Utils.get_authorization_endpoint_response_types([:oauth2, :oidc])
["code", "code id_token", "code id_token token", "code token", "id_token",
"id_token token", "none", "token"]
```
"""
@spec get_authorization_endpoint_response_types(standard_sets) :: [authorization_endpoint_response_type]
def get_authorization_endpoint_response_types(standard_sets \\ [:oauth2]) do
@authorization_endpoint_response_types
|> Enum.filter(fn {_, v} -> Keyword.get(v, :standard_set) in standard_sets end)
|> Enum.unzip()
|> elem(0)
end
@doc """
Returns the extension errors as documented in the [IANA registry](https://www.iana.org/assignments/oauth-parameters/oauth-parameters.xhtml#extensions-error)
## Example
```elixir
iex> OAuth2Utils.get_extension_errors([:oidc])
["consent_required", "interaction_required", "invalid_request_object",
"invalid_request_uri", "login_required", "registration_not_supported",
"request_not_supported", "request_uri_not_supported",
"session_selection_required"]
```
"""
@spec get_extension_errors(standard_sets) :: [extension_error]
def get_extension_errors(standard_sets \\ [:oauth2]) do
@extension_errors
|> Enum.filter(fn {_, v} -> Keyword.get(v, :standard_set) in standard_sets end)
|> Enum.unzip()
|> elem(0)
end
@doc """
Returns the oauth parameters as documented in the [IANA registry](https://www.iana.org/assignments/oauth-parameters/oauth-parameters.xhtml#parameters)
## Example
```elixir
iex> OAuth2Utils.get_parameters([:uma2])
["rpt", "pct", "claim_token", "upgraded", "ticket"]
```
"""
@spec get_parameters(standard_sets) :: [parameter]
def get_parameters(standard_sets \\ [:oauth2]) do
@parameters
|> Enum.filter(fn {_, v} -> Keyword.get(v, :standard_set) in standard_sets end)
|> Enum.unzip()
|> elem(0)
end
@doc """
Returns the oauth parameters for a location. The locations are the following:
* `:authorization_request`, `:authorization_response`, `:token_request` and `:token_response` from the OAuth2 specification
* `:access_token_response` specific value from the OpenID Connect specification
* `:client_request` and `:authorization_server_response` specific values from UMA 2.0 specification
## Example
```elixir
iex> OAuth2Utils.get_parameters_for_location(:authorization_response, [:oauth2, :oidc])
["error_uri", "error", "error_description", "token_type", "access_token",
"state", "scope", "expires_in", "code", "session_state", "id_token"]
```
"""
@spec get_parameters_for_location(parameter_location, standard_sets) :: [parameter]
def get_parameters_for_location(location, standard_sets \\ [:oauth2]) do
@parameters
|> Enum.filter(fn {_, v} -> location in Keyword.get(v, :locations) and Keyword.get(v, :standard_set) in standard_sets end)
|> Enum.unzip()
|> elem(0)
end
@doc """
Returns the token type hints as documented in the [IANA registry](https://www.iana.org/assignments/oauth-parameters/oauth-parameters.xhtml#token-type-hint)
## Example
```elixir
iex> OAuth2Utils.get_token_type_hints()
["access_token", "refresh_token"]
```
"""
@spec get_token_type_hints(standard_sets) :: [token_type_hint]
def get_token_type_hints(standard_sets \\ [:oauth2]) do
@token_type_hints
|> Enum.filter(fn {_, v} -> Keyword.get(v, :standard_set) in standard_sets end)
|> Enum.unzip()
|> elem(0)
end
@doc """
Returns the OAuth2 URIs as documented in the [IANA registry](https://www.iana.org/assignments/oauth-parameters/oauth-parameters.xhtml#uri)
## Example
```elixir
iex> OAuth2Utils.get_uris()
["urn:ietf:params:oauth:grant-type:jwt-bearer",
"urn:ietf:params:oauth:client-assertion-type:jwt-bearer",
"urn:ietf:params:oauth:grant-type:saml2-bearer",
"urn:ietf:params:oauth:client-assertion-type:saml2-bearer",
"urn:ietf:params:oauth:token-type:jwt"]
```
"""
@spec get_uris(standard_sets) :: [uri]
def get_uris(standard_sets \\ [:oauth2]) do
@uris
|> Enum.filter(fn {_, v} -> Keyword.get(v, :standard_set) in standard_sets end)
|> Enum.unzip()
|> elem(0)
end
@doc """
Returns dynamic client registration metadata as documented in the [IANA registry](https://www.iana.org/assignments/oauth-parameters/oauth-parameters.xhtml#client-metadata)
## Example
```elixir
iex> OAuth2Utils.get_dynamic_client_registration_metadata([:oidc])
["default_max_age", "require_auth_time", "userinfo_signed_response_alg",
"userinfo_encrypted_response_enc", "token_endpoint_auth_signing_alg",
"request_object_encryption_alg", "request_uris",
"id_token_signed_response_alg", "request_object_encryption_enc",
"userinfo_encrypted_response_alg", "sector_identifier_uri", "application_type",
"id_token_encrypted_response_alg", "default_acr_values", "subject_type",
"initiate_login_uri", "request_object_signing_alg",
"id_token_encrypted_response_enc"]
```
"""
@spec get_dynamic_client_registration_metadata(standard_sets) :: [dynamic_client_registration_metadata]
def get_dynamic_client_registration_metadata(standard_sets \\ [:oauth2]) do
@dynamic_client_registration_metadata
|> Enum.filter(fn {_, v} -> Keyword.get(v, :standard_set) in standard_sets end)
|> Enum.unzip()
|> elem(0)
end
@doc """
Returns the token endpoint authentication methods as documented in the [IANA registry](https://www.iana.org/assignments/oauth-parameters/oauth-parameters.xhtml#token-endpoint-auth-method)
## Example
```elixir
iex> OAuth2Utils.get_token_endpoint_authentication_methods()
["client_secret_basic", "client_secret_post", "none"]
iex> OAuth2Utils.get_token_endpoint_authentication_methods([:oauth2, :oidc])
["client_secret_basic", "client_secret_jwt", "client_secret_post", "none",
"private_key_jwt"]
```
"""
@spec get_token_endpoint_authentication_methods(standard_sets) :: [token_endpoint_authentication_method]
def get_token_endpoint_authentication_methods(standard_sets \\ [:oauth2]) do
@token_endpoint_authentication_methods
|> Enum.filter(fn {_, v} -> Keyword.get(v, :standard_set) in standard_sets end)
|> Enum.unzip()
|> elem(0)
end
@doc """
Returns the PKCE code challenge methods as documented in the [IANA registry](https://www.iana.org/assignments/oauth-parameters/oauth-parameters.xhtml#pkce-code-challenge-method)
## Example
```elixir
iex> OAuth2Utils.get_pkce_code_challenge_methods()
["S256", "plain"]
```
"""
@spec get_pkce_code_challenge_methods(standard_sets) :: [pkce_code_challenge_method]
def get_pkce_code_challenge_methods(standard_sets \\ [:oauth2]) do
@pkce_code_challenge_methods
|> Enum.filter(fn {_, v} -> Keyword.get(v, :standard_set) in standard_sets end)
|> Enum.unzip()
|> elem(0)
end
@doc """
Returns the token introspection response members as documented in the [IANA registry](https://www.iana.org/assignments/oauth-parameters/oauth-parameters.xhtml#token-introspection-response)
## Example
```elixir
iex> OAuth2Utils.get_token_introspection_response_members([:uma2])
["permissions"]
```
"""
@spec get_token_introspection_response_members(standard_sets) :: [token_introspection_response_member]
def get_token_introspection_response_members(standard_sets \\ [:oauth2]) do
@token_introspection_response_members
|> Enum.filter(fn {_, v} -> Keyword.get(v, :standard_set) in standard_sets end)
|> Enum.unzip()
|> elem(0)
end
@doc """
Returns the authorization server metadata as documented in the [IANA registry](https://www.iana.org/assignments/oauth-parameters/oauth-parameters.xhtml#authorization-server-metadata)
and in the [Open ID Connect Discovery 1.0](https://openid.net/specs/openid-connect-discovery-1_0.html#ProviderMetadata) specification
## Example
```elixir
iex> OAuth2Utils.get_authorization_server_metadata([:oidc])
["require_request_uri_registration", "claims_parameter_supported",
"subject_types_supported", "id_token_encryption_enc_values_supported",
"request_object_encryption_enc_values_supported",
"userinfo_signing_alg_values_supported", "display_values_supported",
"userinfo_encryption_enc_values_supported", "request_uri_parameter_supported",
"request_object_signing_alg_values_supported", "claim_types_supported",
"request_object_encryption_alg_values_supported", "userinfo_endpoint",
"id_token_encryption_alg_values_supported", "claims_locales_supported",
"request_parameter_supported", "userinfo_encryption_alg_values_supported",
"acr_values_supported", "claims_supported",
"id_token_signing_alg_values_supported"]
```
"""
@spec get_authorization_server_metadata(standard_sets) :: [authorization_server_metadata]
def get_authorization_server_metadata(standard_sets \\ [:oauth2]) do
@authorization_server_metadata
|> Enum.filter(fn {_, v} -> Keyword.get(v, :standard_set) in standard_sets end)
|> Enum.unzip()
|> elem(0)
end
@doc """
Returns the grant types as documented in ["OAuth 2.0 Dynamic Client Registration Protocol [RFC7591]"](https://tools.ietf.org/html/rfc7591)
## Example
```elixir
iex> OAuth2Utils.get_authorization_server_metadata([:oidc])
iex> OAuth2Utils.get_grant_types()
["authorization_code", "client_credentials", "implicit", "password",
"refresh_token", "urn:ietf:params:oauth:grant-type:jwt-bearer",
"urn:ietf:params:oauth:grant-type:saml2-bearer"]
```
"""
@spec get_grant_types(standard_sets) :: [grant_type]
def get_grant_types(standard_sets \\ [:oauth2]) do
@grant_types
|> Enum.filter(fn {_, v} -> Keyword.get(v, :standard_set) in standard_sets end)
|> Enum.unzip()
|> elem(0)
end
@doc """
Returns `true` if the grant type requires the use of the authorization endpoint, `false` otherwise
## Example
```elixir
iex> OAuth2Utils.uses_authorization_endpoint?("implicit")
true
iex> OAuth2Utils.uses_authorization_endpoint?("client_credentials")
false
iex> OAuth2Utils.uses_authorization_endpoint?("password")
false
```
"""
@spec uses_authorization_endpoint?(grant_type) :: boolean()
def uses_authorization_endpoint?(grant_type), do: @grant_types[grant_type][:uses_authorization_endpoint] == true
@vschar "\\x20-\\x7E"
#@nqchar "\\x21\\x23-\\x5B\\x5D-\\x7E"
#@nqschar "\\x20-\\x21\\x23-\\x5B\\x5D-\\x7E"
@unicodecharnocrlf "\\x09\\x20-\\x7E\\x80-\\x{D7FF}\\x{E000}-\\x{FFFD}\\x{10000}-\\x{10FFFF}"
@doc """
Returns `true` if the parameter is a valid client_id, `false` otherwise
## Example
```elixir
iex> OAuth2Utils.valid_client_id_param?("my_client_23")
true
iex> OAuth2Utils.valid_client_id_param?("my_client¯23")
false
```
"""
@spec valid_client_id_param?(client_id) :: boolean()
def valid_client_id_param?(client_id) do
Regex.run(~r{^[#{@vschar}]*$}, client_id) != nil
end
@doc """
Returns `true` if the parameter is a valid client secret, `false` otherwise
"""
@spec valid_client_secret_param?(client_secret) :: boolean()
def valid_client_secret_param?(client_secret) do
Regex.run(~r{^[#{@vschar}]*$}, client_secret) != nil
end
@doc """
Returns `true` is the authorization code parameter is valid, `false` otherwise
## Example
```elixir
iex> OAuth2Utils.valid_authorization_code_param?("WIrgzqwBTQrgx*^TcyhBXonuCQ;',oi2~QO")
true
iex> OAuth2Utils.valid_authorization_code_param?("Hï")
false
```
"""
@spec valid_authorization_code_param?(String.t()) :: boolean
def valid_authorization_code_param?(authorization_code) do
Regex.run(~r{^[#{@vschar}]+$}, authorization_code) != nil
end
@doc """
Returns `true` is the access token parameter is valid, `false` otherwise
## Example
```elixir
iex> OAuth2Utils.valid_access_token_param?("<KEY>")
true
iex> OAuth2Utils.valid_access_token_param?("<KEY>")
false
```
"""
@spec valid_access_token_param?(String.t()) :: boolean
def valid_access_token_param?(access_token) do
Regex.run(~r{^[#{@vschar}]+$}, access_token) != nil
end
@doc """
Returns `true` is the refresh token parameter is valid, `false` otherwise
## Example
```elixir
iex> OAuth2Utils.valid_refresh_token_param?("<KEY>")
true
iex> OAuth2Utils.valid_refresh_token_param?("<KEY>")
false
```
"""
@spec valid_refresh_token_param?(String.t()) :: boolean
def valid_refresh_token_param?(refresh_token) do
Regex.run(~r{^[#{@vschar}]+$}, refresh_token) != nil
end
@doc """
Returns `true` is the parameter is a valid RFC6749 username parameter,
`false` otherwise
```elixir
iex> OAuth2Utils.valid_username_param?("молду")
true
iex> OAuth2Utils.valid_username_param?("john\nsmith")
false
```
"""
@spec valid_username_param?(String.t()) :: boolean
def valid_username_param?(username) do
Regex.run(~r{^[#{@unicodecharnocrlf}]*$}iu, username) != nil
end
@doc """
Returns `true` is the parameter is a valid RFC6749 password parameter,
`false` otherwise
"""
@spec valid_password_param?(String.t()) :: boolean
def valid_password_param?(password) do
Regex.run(~r{^[#{@unicodecharnocrlf}]*$}iu, password) != nil
end
end
|
lib/oauth2_utils.ex
| 0.630344
| 0.652394
|
oauth2_utils.ex
|
starcoder
|
defmodule CanvasAPI.CommentService do
@moduledoc """
A service for viewing and manipulating comments.
"""
alias CanvasAPI.{Account, Canvas, CanvasService, Comment,
SlackNotifier, Team, User, UserService}
alias Ecto.Changeset
use CanvasAPI.Web, :service
@preload [:creator, canvas: [:team]]
@doc """
Create a new comment on a given block and block.
"""
@spec create(map, Keyword.t) :: {:ok, Comment.t} | {:error, Changeset.t}
def create(attrs, opts) do
%Comment{}
|> Comment.changeset(attrs)
|> put_canvas(attrs["canvas_id"], opts[:account])
|> put_block(attrs["block_id"])
|> put_creator(opts[:account])
|> Repo.insert
|> case do
{:ok, comment} ->
notify_comment(comment, "new_comment")
{:ok, comment}
error ->
error
end
end
@spec put_block(Changeset.t, String.t | nil) :: Changeset.t
defp put_block(changeset, id) when is_binary(id) do
with canvas when not is_nil(canvas) <- get_field(changeset, :canvas),
block when not is_nil(block) <- Canvas.find_block(canvas, id) do
put_change(changeset, :block_id, id)
else
_ -> add_error(changeset, :block, "was not found")
end
end
defp put_block(changeset, _),
do: add_error(changeset, :block, "is required")
@spec put_canvas(Changeset.t, String.t | nil, Account.t) :: Changeset.t
defp put_canvas(changeset, id, account) when is_binary(id) do
id
|> CanvasService.get(account: account)
|> case do
{:ok, canvas} ->
changeset |> put_assoc(:canvas, canvas)
{:error, _} ->
changeset |> add_error(:canvas, "was not found")
end
end
defp put_canvas(changeset, _, _),
do: changeset |> add_error(:canvas, "is required")
@spec put_creator(Changeset.t, Account.t) :: Changeset.t
defp put_creator(changeset, account) do
with canvas when not is_nil(canvas) <- get_field(changeset, :canvas) do
{:ok, user} = UserService.find_by_team(account, team_id: canvas.team_id)
put_assoc(changeset, :creator, user)
else
_ -> changeset
end
end
@doc """
Retrieve a single comment by ID.
"""
@spec get(String.t, Keyword.t) :: {:ok, Comment.t}
| {:error, :comment_not_found}
def get(id, opts \\ []) do
opts[:account]
|> comment_query
|> maybe_lock
|> Repo.get(id)
|> case do
comment = %Comment{} ->
{:ok, comment}
nil ->
{:error, :comment_not_found}
end
end
@doc """
List comments.
"""
@spec list(Keyword.t) :: [Comment.t]
def list(opts) do
opts[:account]
|> comment_query
|> filter(opts[:filter])
|> Repo.all
end
@spec filter(Ecto.Query.t, map | nil) :: Ecto.Query.t
defp filter(query, filter) when is_map(filter) do
filter
|> Enum.reduce(query, &do_filter/2)
end
defp filter(query, _), do: query
@spec do_filter({String.t, String.t}, Ecto.Query.t) :: Ecto.Query.t
defp do_filter({"canvas.id", canvas_id}, query),
do: where(query, canvas_id: ^canvas_id)
defp do_filter({"block.id", block_id}, query),
do: where(query, block_id: ^block_id)
defp do_filter(_, query), do: query
@doc """
Update a comment.
"""
@spec update(String.t | Comment.t, map, Keyword.t)
:: {:ok, Comment.t}
| {:error, Changeset.t | :comment_not_found | :does_not_own}
def update(id, attrs, opts \\ [])
def update(id, attrs, opts) when is_binary(id) do
Repo.transaction fn ->
with {:ok, comment} <- get(id, opts) do
__MODULE__.update(comment, attrs, opts)
end
|> case do
{:ok, comment} -> comment
{:error, error} -> Repo.rollback(error)
end
end
end
def update(comment, attrs, opts) do
if opts[:account].id == comment.creator.account_id do
comment
|> Comment.changeset(attrs)
|> Repo.update
|> case do
{:ok, comment} ->
notify_comment(comment, "updated_comment")
{:ok, comment}
error -> error
end
else
{:error, :does_not_own}
end
end
@doc """
Delete a comment.
"""
@spec delete(String.t | Comment.t, Keyword.t) :: {:ok, Comment.t}
| {:error, :comment_not_found}
def delete(id, opts \\ [])
def delete(id, opts) when is_binary(id) do
Repo.transaction fn ->
with {:ok, comment} <- get(id, opts) do
__MODULE__.delete(comment, opts)
end
|> case do
{:ok, comment} -> comment
{:error, error} -> Repo.rollback(error)
end
end
end
def delete(comment, _opts) do
comment
|> Repo.delete
|> case do
{:ok, comment} ->
notify_comment(comment, "deleted_comment")
{:ok, comment}
error -> error
end
end
@spec comment_query(Account.t | nil) :: Ecto.Query.t
defp comment_query(nil), do: Comment |> preload(^@preload)
defp comment_query(account) do
Comment
|> join(:left, [co], ca in Canvas, co.canvas_id == ca.id)
|> join(:left, [..., ca], t in Team, ca.team_id == t.id)
|> join(:left, [..., t], u in User, u.team_id == t.id)
|> where([..., u], u.account_id == ^account.id)
|> preload(^@preload)
end
@spec notify_comment(Comment.t, String.t) :: any
defp notify_comment(comment, event) do
if event == "new_comment", do: notify_slack(comment)
broadcast("canvas:#{comment.canvas_id}",
event,
"show.json",
comment: comment)
end
@spec notify_slack(Comment.t) :: any
defp notify_slack(comment) do
with {:ok, token} <- Team.get_token(comment.canvas.team, "slack"),
token = get_in(token.meta, ~w(bot bot_access_token)) do
comment.canvas.slack_channel_ids
|> Enum.each(
&SlackNotifier.delay(
{:notify_new_comment, [token, comment.id, &1]}))
SlackNotifier.delay({:dm_new_comment, [token, comment.id]})
end
end
end
|
lib/canvas_api/services/comment_service.ex
| 0.740362
| 0.414603
|
comment_service.ex
|
starcoder
|
defmodule Presto.Table.Manager do
@moduledoc """
It's more performant to write a data file directly to object storage
than to write through PrestoDB, but writing some formats directly to
object storage results in poor read performance. JSON, for example.
In these cases, we write data files directly to object storage and use
PrestoDB to copy into a new (and properly) formatted table.
## Example
JSON data is written to a file via `Presto.Table.DataFile` behaviour,
uploaded to a staging table via `Presto.Table.DataStorage` behaviour,
and staged JSON data is written to a production table in ORC format through
PrestoDB with this behaviour.
"""
use Properties, otp_app: :definition_presto
@formats %{json: "JSON", avro: "AVRO", orc: "ORC"}
@callback create(
session :: Prestige.Session.t(),
table :: String.t(),
dictionary :: Dictionary.t(),
Presto.Table.DataFile.format()
) ::
:ok | {:error, term}
@callback create_from(
session :: Prestige.Session.t(),
table :: String.t(),
from :: String.t(),
format :: Presto.Table.DataFile.format(),
with_data :: boolean
) ::
{:ok, term} | {:error, term}
@callback copy(
session :: Prestige.Session.t(),
from_table :: String.t(),
to_table :: String.t()
) :: {:ok, term} | {:error, term}
@callback delete(session :: Prestige.Session.t(), table :: String.t()) ::
{:ok, term} | {:error, term}
getter(:impl, default: Presto.Table.Manager.Impl)
def create(session, table, dictionary, format \\ :orc) do
impl().create(session, table, dictionary, @formats[format])
end
def create_from(session, table, from, opts) do
format = Keyword.get(opts, :format, :orc)
with_data = Keyword.get(opts, :with_data, false)
impl().create_from(session, table, from, @formats[format], with_data)
end
def copy(session, from_table, to_table) do
impl().copy(session, from_table, to_table)
end
def delete(session, table) do
impl().delete(session, table)
end
end
defmodule Presto.Table.Manager.Impl do
@moduledoc false
@behaviour Presto.Table.Manager
alias Presto.Table.Dictionary.Translator
@impl Presto.Table.Manager
def create(session, table, dictionary, format) do
columns =
Enum.map(dictionary, fn type ->
result = Translator.translate_type(type)
"#{result.name} #{result.type}"
end)
|> Enum.join(",")
create_table =
"CREATE TABLE IF NOT EXISTS #{table} (#{columns}) with ( format = '#{format}' )"
case Prestige.execute(session, create_table) do
{:ok, _} -> :ok
error_result -> error_result
end
end
@impl Presto.Table.Manager
def create_from(session, table, from, format, false) do
create_table =
"CREATE TABLE IF NOT EXISTS #{table} WITH (format = '#{format}') AS SELECT * FROM #{from} WITH NO DATA"
Prestige.execute(session, create_table)
end
def create_from(session, table, from, format, true) do
create_table =
"CREATE TABLE IF NOT EXISTS #{table} WITH (format = '#{format}') AS SELECT * FROM #{from}"
Prestige.execute(session, create_table)
end
@impl Presto.Table.Manager
def copy(session, from_table, to_table) do
Prestige.execute(session, "INSERT INTO #{to_table} SELECT * FROM #{from_table}")
end
@impl Presto.Table.Manager
def delete(session, table) do
Prestige.execute(session, "DROP TABLE IF EXISTS #{table}")
end
end
|
apps/definition_presto/lib/presto/table/manager.ex
| 0.753557
| 0.421433
|
manager.ex
|
starcoder
|
defprotocol Casus.Domain.Root do
@moduledoc """
The Casus.Domain.Root protocol.
Implemented by an adapter to standardise all Root Domain Modules behaviour.
The protocol is implemented on a struct representing the aggregate root identity.
This is the struct that will be used to guaranty the uniqueness of the aggregate process in the cluster.
"""
@typedoc """
Struct representing the identity of the aggregate root.
Usually a simple struct is enough. Like if the name of the domain root module is `Mission`, a struct named
%Mission{} and with one id parameter `{id: "uuid-sting"}` is a nice way to implement the protocol.
It is a way to use protocols as extension on modules and not structs.
"""
@type aggregate_id :: struct
@typedoc "A struct containing all the params necessary to dispatch a command to the aggregate."
@type command :: struct
@typedoc """
A struct containing all the data necessary to represent an event that happened to the aggregate.
The event should implement the Casus.AggregateModule.Event Protocol to manage the adaptation to the storage part.
"""
@type event :: struct
@typedoc "A struct containing all the data necessary to represent the state of the aggregate."
@type state :: struct
@doc """
Function used to initialize the state of the aggregate.
"""
@spec init_state(aggregate_id) :: state
def init_state(aggregate_id)
@doc """
Function used to dispatch a command to an aggregate represented by it's state.
"""
@spec handle(aggregate_id, command, state) :: {:ok, [event]} | {:error, reason :: term}
def handle(aggregate_id, command, state)
@doc """
Function used to apply an event to a aggregate state.
"""
@spec apply(aggregate_id, event, state) :: state
def apply(aggregate_id, event, state)
@doc """
Function used to represent the aggregate id as a standardised struct of strings.
"""
@spec to_raw_id(aggregate_id) :: Casus.Infra.RootRawId.t
def to_raw_id(aggregate_id)
@doc """
Function used to represent the aggregate id as a String.
ex : `Mission-2345-4567`
"""
@spec to_string(aggregate_id) :: String.t
def to_string(aggregate_id)
end
|
lib/casus/domain/root.ex
| 0.869438
| 0.645672
|
root.ex
|
starcoder
|
defmodule Bepaid.Gateway do
@moduledoc """
Utility module for the [bePaid API](https://docs.bepaid.by/ru/introduction)
Provides API wrapper functions for remote API.
## Examples:
iex> {"uid" => uid} = Gateway.put_authorization(%Payment{amount: 10})
%{...}
iex> Gateway.void_authorization(uid, 10)
%{...}
iex> {"uid" => uid} = Gateway.put_charge(%Payment{amount: 10})
%{...}
"""
@behaviour Bepaid.GatewayBehaviour
alias Bepaid.{HttpClient, Payment}
@doc """
Put authorization: https://docs.bepaid.by/ru/beyag/transactions/authorization
Accepts `Payment` struct with credit card and customer info.
"""
def put_authorization(%Payment{} = payment), do: Map.from_struct(payment) |> post_request("authorizations")
@doc """
Void authorization: https://docs.bepaid.by/ru/gateway/transactions/void
Accepts UID of authorization transaction and amount in cents.
"""
def void_authorization(uid, amount), do: %{parent_uid: uid, amount: amount} |> post_request("voids")
@doc """
Put charge: https://docs.bepaid.by/ru/gateway/transactions/payment
Accepts `Payment` struct with credit card and customer info.
"""
def put_charge(%Payment{} = payment), do: Map.from_struct(payment) |> post_request("payments")
@doc """
Load transaction: https://docs.bepaid.by/ru/gateway/transactions/query
Accepts UID of payment or authorization transaction.
"""
def get_transaction(uid), do: exec(:get, uid, nil)
@doc """
Put refund: https://docs.bepaid.by/ru/gateway/transactions/refund
Accepts UID of transaction, amount in cents and reason (optionally).
"""
def put_refund(uid, amount, reason \\ "Возврат средств") do
%{parent_uid: uid, amount: amount, reason: reason}
|> post_request("refunds")
end
def put_refund(%{} = data), do: post_request(data, "refunds")
@doc """
Wrapper for exec(:post, url, params). Handy for piping.
"""
def post_request(data, url), do: exec(:post, url, %{request: data})
@doc """
Executes API request to bePaid API server. Accepts `:get` or `:post` atom as a first argument.
Returns `{:ok, data}` or `{:error, error, data}`.
"""
def exec(:post, url, params), do: HttpClient.post(url, Poison.encode!(params)) |> HttpClient.parse_response()
def exec(:get, url, nil), do: HttpClient.get(url) |> HttpClient.parse_response()
def exec(:get, url, params) when is_map(params), do: HttpClient.get(url, [], params: params) |> HttpClient.parse_response()
def exec(url, params), do: exec(:post, url, params) |> HttpClient.parse_response()
end
|
lib/bepaid/gateway.ex
| 0.841793
| 0.492676
|
gateway.ex
|
starcoder
|
defmodule LexibombServer.Rack do
@moduledoc """
Provides functions related to dealing with a rack of tiles.
A rack is a list of (usually 7) tiles, like `["E", "E", "L", "R", "T", "T", "S"]`.
"""
defstruct tiles: []
alias LexibombServer.WordList
@type t :: %{tiles: [String.t]}
@blank "_"
@alpha ~w{a b c d e f g h i j k l m n o p q r s t u v w x y z}
@doc """
Creates a new rack with the given tiles.
The tiles are normalized to uppercase.
`tiles` can be either a string, which will be tokenized for you, or a list of
tiles.
## Examples
iex> LexibombServer.Rack.new "Hello"
%LexibombServer.Rack{tiles: ["H", "E", "L", "L", "O"]}
iex> LexibombServer.Rack.new ~W(E E L R T T _)
%LexibombServer.Rack{tiles: ["E", "E", "L", "R", "T", "T", "_"]}
"""
@spec new(String.t | [String.t]) :: t
def new(tiles)
def new(string) when is_binary(string) do
string |> String.graphemes |> new
end
def new(tiles) when is_list(tiles) do
tiles = tiles |> Enum.map(&String.upcase/1)
%LexibombServer.Rack{tiles: tiles}
end
@doc """
Returns the distinct letters in a `rack`.
Includes the lowercase alphabet if there are any blank tiles.
## Examples
iex> LexibombServer.Rack.new("EELRTTS") |> LexibombServer.Rack.letters
["E", "L", "R", "T", "S"]
iex> LexibombServer.Rack.new("EELRTT_") |> LexibombServer.Rack.letters
["E", "L", "R", "T", "a", "b", "c", "d", "e", "f", "g", "h", "i", "j", "k", "l",
"m", "n", "o", "p", "q", "r", "s", "t", "u", "v", "w", "x", "y", "z"]
"""
@spec letters(t) :: [String.t]
def letters(rack) do
if @blank in rack.tiles do
rack.tiles
|> Stream.filter(&(&1 !== @blank))
|> Stream.concat(@alpha)
|> Enum.uniq
else
rack.tiles
|> Enum.uniq
end
end
@doc """
Returns a copy of `rack` with the given letters removed.
If any of the given letters are lowercase, a corresponding blank tile is
removed from the rack.
`letters` can be either a string, which will be tokenized for you, or a list
of letters.
## Examples
iex> LexibombServer.Rack.new("EELRTTS") |> LexibombServer.Rack.remove("SET")
%LexibombServer.Rack{tiles: ["E", "L", "R", "T"]}
iex> LexibombServer.Rack.new("EELRTT_") |> LexibombServer.Rack.remove(~W(T R E a T))
%LexibombServer.Rack{tiles: ["E", "L"]}
"""
@spec remove(t, String.t | [String.t]) :: t
def remove(rack, letters)
def remove(rack, string) when is_binary(string) do
letters = string |> String.graphemes
remove(rack, letters)
end
def remove(rack, letters) when is_list(letters) do
tiles =
Enum.reduce(letters, rack.tiles, fn(letter, tiles) ->
if lowercase?(letter), do: letter = @blank
tiles |> List.delete(letter)
end)
%{rack | tiles: tiles}
end
@doc """
Returns the set of all prefixes that can be made from the tiles in a `rack`.
## Examples
> rack = LexibombServer.Rack.new("ABC")
> LexibombServer.Rack.prefixes(rack)
#MapSet<["", "A", "AB", "ABC", "AC", "B", "BA", "BAC", "C", "CA", "CAB"]>
"""
@spec prefixes(t) :: MapSet.t
def prefixes(rack) do
do_prefixes("", rack, MapSet.new())
end
defp do_prefixes(prefix, rack, results) do
if WordList.prefix?(prefix) do
results = MapSet.put(results, prefix)
Enum.reduce(letters(rack), results, fn letter, results ->
do_prefixes(prefix <> letter, remove(rack, letter), results)
end)
else
results
end
end
@spec lowercase?(String.t) :: boolean
defp lowercase?(letter) do
String.downcase(letter) === letter
end
end
|
apps/lexibomb_server/lib/lexibomb_server/rack.ex
| 0.891163
| 0.589185
|
rack.ex
|
starcoder
|
defmodule Raygun do
@moduledoc """
Send errors to Raygun. Errors can be captured in three different ways.
1. Any errors that are logged
2. Any exceptions that occur in a Plug
3. Programmatically
All the functions will return `:ok` or `{:error, reason}`
"""
@api_endpoint "https://api.raygun.io/entries"
@doc """
Reports a string message. This function is used by the Raygun.Logger but it
can also be used to report any string message.
"""
def report_message(msg, opts \\ []) do
Raygun.Format.message_payload(msg, opts) |> send_report
end
@deprecated "Use report_stacktrace/2 instead"
def report_exception(exception, opts \\ []) do
apply(:erlang, :get_stacktrace, [])
|> report_stacktrace(exception, opts)
end
@doc """
Reports an exception and its corresponding stacktrace to Raygun.
"""
def report_stacktrace(stacktrace, exception, opts \\ []) do
Raygun.Format.stacktrace_payload(stacktrace, exception, opts) |> send_report
end
@doc """
Reports an exception and its corresponding stacktrace to Raygun. Additionally
this captures some additional information about the environment in which
the exception occurred by retrieving some state from the Plug Conn.
"""
def report_plug(conn, stacktrace, exception, opts \\ []) do
Raygun.Format.conn_payload(conn, stacktrace, exception, opts) |> send_report
end
defp send_report(error) do
headers = %{
"Content-Type": "application/json; charset=utf-8",
Accept: "application/json",
"User-Agent": "Elixir Client",
"X-ApiKey": Raygun.Util.get_env(:raygun, :api_key)
}
opts = Application.get_env(:raygun, :httpoison_opts, [])
case HTTPoison.post(@api_endpoint, Jason.encode!(error), headers, opts) do
{:ok, %HTTPoison.Response{status_code: 202}} -> :ok
{:ok, %HTTPoison.Response{status_code: 400}} -> {:error, :bad_message}
{:ok, %HTTPoison.Response{status_code: 403}} -> {:error, :invalid_api_key}
{:error, _} -> {:error, :unexpected}
end
end
end
|
lib/raygun.ex
| 0.845942
| 0.468487
|
raygun.ex
|
starcoder
|
defmodule Vector do
@moduledoc ~S"""
A library of two- and three-dimensional vector operations. All vectors
are represented as tuples with either two or three elements.
## Examples
iex> # Vector Tripple Product Identity
...> a = {2, 3, 1}
...> b = {1, 4, -2}
...> c = {-1, 2, 1}
...> Vector.equal?(
...> Vector.cross(Vector.cross(a, b), c),
...> Vector.subtract(Vector.multiply(b, Vector.dot(a, c)), Vector.multiply(a, Vector.dot(b, c))))
true
"""
@type vector :: {number, number} | {number, number, number}
@type location :: {number, number} | {number, number, number}
@doc ~S"""
Returns the cross product of two vectors *A*⨯*B*
## Examples
iex> Vector.cross({2, 3}, {1, 4})
{0, 0, 5}
iex> Vector.cross({2, 2, -1}, {1, 4, 2})
{8, -5, 6}
iex> Vector.cross({3, -3, 1}, {4, 9, 2})
{-15, -2, 39}
"""
@spec cross(vector, vector) :: vector
def cross({x1, y1}, {x2, y2}), do: {0, 0, x1 * y2 - y1 * x2}
def cross({x1, y1, z1}, {x2, y2}), do: cross({x1, y1, z1}, {x2, y2, 0})
def cross({x1, y1}, {x2, y2, z2}), do: cross({x1, y1, 0}, {x2, y2, z2})
def cross({x1, y1, z1}, {x2, y2, z2}) do
{y1 * z2 - z1 * y2,
z1 * x2 - x1 * z2,
x1 * y2 - y1 * x2}
end
@doc ~S"""
Returns the norm (magnitude) of the cross product of two vectors *A*⨯*B*
## Examples
iex> Vector.cross_norm({2, 3}, {1, 4})
5
iex> Vector.cross_norm({1, 4}, {2, 2})
6
iex> Vector.cross_norm({2, 0, -1}, {0, 3, 3})
9.0
iex> Float.floor(:math.pow(Vector.cross_norm({2, 2, -1}, {1, 4, 2}), 2))
125.0
"""
@spec cross_norm(vector, vector) :: number
def cross_norm({x1, y1}, {x2, y2}), do: cross_norm({x1, y1, 0}, {x2, y2, 0})
def cross_norm({x1, y1, z1}, {x2, y2, z2}) do
norm(cross({x1, y1, z1}, {x2, y2, z2}))
end
@doc ~S"""
Returns the dot product of two vectors *A*⨯*B*
## Examples
iex> Vector.dot({2, 3}, {1, 4})
14
iex> Vector.dot({1, 4}, {2, 2})
10
iex> Vector.dot({2, 0, -1}, {0, 3, 3})
-3
"""
@spec dot(vector, vector) :: number
def dot({x1, y1}, {x2, y2}), do: dot({x1, y1, 0}, {x2, y2, 0})
def dot({x1, y1, z1}, {x2, y2}), do: dot({x1, y1, z1}, {x2, y2, 0})
def dot({x1, y1}, {x2, y2, z2}), do: dot({x1, y1, 0}, {x2, y2, z2})
def dot({x1, y1, z1}, {x2, y2, z2}) do
x1 * x2 + y1 * y2 + z1 * z2
end
@doc ~S"""
Returns the norm (magnitude) of a vector
## Examples
iex> Vector.norm({3, 4})
5.0
iex> Vector.norm({-1, 0})
1
iex> Vector.norm({0, -2, 0})
2
"""
@spec norm(vector) :: number
def norm({x, y}), do: norm({x, y, 0})
def norm({0, 0, z}), do: abs(z)
def norm({x, 0, 0}), do: abs(x)
def norm({0, y, 0}), do: abs(y)
def norm({x, y, z}), do: :math.sqrt(norm_squared({x, y, z}))
@doc ~S"""
Returns the square of the norm norm (magnitude) of a vector
## Examples
iex> Vector.norm_squared({3, 4})
25
iex> Vector.norm_squared({1, 0})
1
iex> Vector.norm_squared({2, 0, -1})
5
iex> Vector.norm_squared({-2, 3, 1})
14
"""
@spec norm_squared(vector) :: number
def norm_squared({x, y}), do: norm_squared({x, y, 0})
def norm_squared({x, y, z}) do
x * x + y * y + z * z
end
@doc ~S"""
Returns the unit vector parallel ot the given vector.
This will raise an `ArithmeticError` if a zero-magnitude vector is given.
Use `unit_safe` if there is a chance that a zero-magnitude vector
will be sent.
## Examples
iex> Vector.unit({3, 4})
{0.6, 0.8}
iex> Vector.unit({8, 0, 6})
{0.8, 0.0, 0.6}
iex> Vector.unit({-2, 0, 0})
{-1.0, 0.0, 0.0}
iex> Vector.unit({0, 0, 0})
** (ArithmeticError) bad argument in arithmetic expression
"""
@spec unit(vector) :: vector
def unit({x, 0, 0}), do: {x / abs(x), 0.0, 0.0}
def unit({0, y, 0}), do: {0.0, y / abs(y), 0.0}
def unit({0, 0, z}), do: {0.0, 0.0, z / abs(z)}
def unit(v), do: divide(v, norm(v))
@doc ~S"""
Returns the unit vector parallel ot the given vector, but will handle
the vectors `{0, 0}` and `{0, 0, 0}` by returning the same vector
## Examples
iex> Vector.unit_safe({3, 4})
{0.6, 0.8}
iex> Vector.unit_safe({0, 0})
{0, 0}
iex> Vector.unit_safe({0, 0, 0})
{0, 0, 0}
"""
@spec unit_safe(vector) :: vector
def unit_safe({0, 0}), do: {0, 0}
def unit_safe({0, 0, 0}), do: {0, 0, 0}
def unit_safe(v), do: unit(v)
@doc ~S"""
Reverses a vector
## Examples
iex> Vector.reverse({3, -4})
{-3, 4}
iex> Vector.reverse({-2, 0, 5})
{2, 0, -5}
iex> Vector.cross_norm({-2, 3, 5}, Vector.reverse({-2, 3, 5}))
0
"""
@spec reverse(vector) :: vector
def reverse({x, y}), do: {-x, -y}
def reverse({x, y, z}), do: {-x, -y, -z}
@doc ~S"""
Adds two vectors
## Examples
iex> Vector.add({3, -4}, {2, 1})
{5,-3}
iex> Vector.add({-2, 0, 5}, {0, 0, 0})
{-2, 0, 5}
iex> Vector.add({2, 1, -2}, Vector.reverse({2, 1, -2}))
{0, 0, 0}
"""
@spec add(vector, vector) :: vector
def add({x1, y1}, {x2, y2}), do: {x1 + x2, y1 + y2}
def add({x1, y1, z1}, {x2, y2}), do: add({x1, y1, z1}, {x2, y2, 0})
def add({x1, y1}, {x2, y2, z2}), do: add({x1, y1, 0}, {x2, y2, z2})
def add({x1, y1, z1}, {x2, y2, z2}), do: {x1 + x2, y1 + y2, z1 + z2}
@doc ~S"""
Subtract vector *B* from vector *A*. Equivalent to `Vector.add(A, Vector.revers(B))`
## Examples
iex> Vector.subtract({3, -4}, {2, 1})
{1,-5}
iex> Vector.subtract({-2, 0, 5}, {-3, 1, 2})
{1, -1, 3}
"""
@spec subtract(vector, vector) :: vector
def subtract(a, b), do: add(a, reverse(b))
@doc ~S"""
Multiply a vector by scalar value `s`
## Examples
iex> Vector.multiply({3, -4}, 2.5)
{7.5, -10.0}
iex> Vector.multiply({-2, 0, 5}, -2)
{4, 0, -10}
"""
@spec multiply(vector, number) :: vector
def multiply({x, y}, s), do: {x * s, y * s}
def multiply({x, y, z}, s), do: {x * s, y * s, z * s}
@doc ~S"""
Divide a vector by scalar value `s`
## Examples
iex> Vector.divide({3, -4}, 2.5)
{1.2, -1.6}
iex> Vector.divide({-2, 0, 5}, -2)
{1.0, 0.0, -2.5}
"""
@spec divide(vector, number) :: vector
def divide({x, y}, s), do: {x / s, y / s}
def divide({x, y, z}, s), do: {x / s, y / s, z / s}
@doc ~S"""
Returns a new coordinate by projecting a given length `distance` from
coordinate `start` along `vector`
## Examples
iex> Vector.project({3, -4}, {-1, 1}, 4)
{1.4, -2.2}
iex> Vector.project({-6, 0, 8}, {1, -2, 0.4}, 2.5)
{-0.5, -2.0, 2.4}
iex> Vector.project({-2, 1, 3}, {0, 0, 0}, 2.5) |> Vector.norm()
2.5
"""
@spec project(vector, location, number) :: location
def project(vector, start, distance) do
vector
|> unit()
|> multiply(distance)
|> add(start)
end
@doc ~S"""
Compares two vectors for equality, with an optional tolerance
## Examples
iex> Vector.equal?({3, -4}, {3, -4})
true
iex> Vector.equal?({3, -4}, {3.0001, -3.9999})
false
iex> Vector.equal?({3, -4}, {3.0001, -3.9999}, 0.001)
true
iex> Vector.equal?({3, -4, 1}, {3.0001, -3.9999, 1.0}, 0.001)
true
"""
@spec equal?(vector, vector, number) :: boolean
def equal?(a, b, tolerance \\ 0.0) do
norm_squared(subtract(a, b)) <= tolerance * tolerance
end
@doc ~S"""
Returns the scalar component for the axis given
## Examples
iex> Vector.component({3, -4}, :y)
-4
iex> Vector.component({-6, 0, 8}, :z)
8
iex> Vector.component({1, -2}, :z)
0
iex> Vector.component(Vector.basis(:x), :z)
0
"""
@spec component(vector, atom) :: number
def component({x, _}, :x), do: x
def component({_, y}, :y), do: y
def component({_, _}, :z), do: 0
def component({x, _, _}, :x), do: x
def component({_, y, _}, :y), do: y
def component({_, _, z}, :z), do: z
@doc ~S"""
Returns the basis vector for the given axis
## Examples
iex> Vector.basis(:x)
{1, 0, 0}
iex> Vector.basis(:y)
{0, 1, 0}
iex> Vector.component(Vector.basis(:y), :y)
1
"""
@spec basis(atom) :: vector
def basis(:x), do: {1, 0, 0}
def basis(:y), do: {0, 1, 0}
def basis(:z), do: {0, 0, 1}
end
|
lib/vector.ex
| 0.947137
| 0.880951
|
vector.ex
|
starcoder
|
defmodule UnicodeEmojiFlag do
@moduledoc """
Convert country codes to emoji flags 🔡 ➡️ 🇹🇼.
For the list of available country codes please refer to https://en.wikipedia.org/wiki/Regional_indicator_symbol#Emoji_flag_sequences
"""
@codes %{
"a" => 0x1F1E6,
"b" => 0x1F1E7,
"c" => 0x1F1E8,
"d" => 0x1F1E9,
"e" => 0x1F1EA,
"f" => 0x1F1EB,
"g" => 0x1F1EC,
"h" => 0x1F1ED,
"i" => 0x1F1EE,
"j" => 0x1F1EF,
"k" => 0x1F1F0,
"l" => 0x1F1F1,
"m" => 0x1F1F2,
"n" => 0x1F1F3,
"o" => 0x1F1F4,
"p" => 0x1F1F5,
"q" => 0x1F1F6,
"r" => 0x1F1F7,
"s" => 0x1F1F8,
"t" => 0x1F1F9,
"u" => 0x1F1FA,
"v" => 0x1F1FB,
"w" => 0x1F1FC,
"x" => 0x1F1FD,
"y" => 0x1F1FE,
"z" => 0x1F1F
}
@doc """
Takes two or more characters-long country code and produces an HTML-ready hexadecimal representation of its flag.
For example in your Phoenix template `<%= raw(UnicodeEmojiFlag.html("tw")) %>`
"""
@spec html(binary()) :: binary() | {:error, binary()}
def html(country_code) do
case translated = translate(country_code) do
{:error, error} ->
{:error, error}
_ ->
translated
|> Enum.map(&"&##{@codes[&1]};")
|> Enum.join()
end
end
@doc """
Takes two or more characters-long country code and produces a sequence ready to be printed on a terminal.
For example `IO.puts UnicodeEmojiFlag.console("tw")`
"""
@spec console(binary()) :: binary() | {:error, binary()}
def console(country_code) do
case translated = translate(country_code) do
{:error, error} ->
{:error, error}
_ ->
translated
|> Enum.map(&@codes[&1])
|> List.to_string()
end
end
defp translate(country_code) when byte_size(country_code) < 2 do
{:error, "Country code should be at least two characters long"}
end
defp translate("uk") do
translate("gb")
end
defp translate(country_code) when is_binary(country_code) do
country_code
|> String.split("", trim: true)
end
end
|
lib/unicode_emoji_flag.ex
| 0.713831
| 0.405272
|
unicode_emoji_flag.ex
|
starcoder
|
defmodule Adventofcode.Circle do
@moduledoc """
Circle is a circular data structure.
It's implemented using a map where every value is a three-element tuple
containing the id of the previous item, the value of the current item and then
the id of the next item.
There are also meta-keys like size, current and counter that are kept up to
date automatically as long as the built in API functions are used.
"""
def new(values \\ []) do
values
|> Enum.reduce(%{size: 0, current: 0, counter: 0}, &insert_next(&2, &1))
|> move_next
end
def to_list(state) do
[]
|> do_to_list(state, state.current, state.current)
|> Enum.reverse()
end
defp do_to_list(list, _state, current, current) when length(list) > 0 do
list
end
defp do_to_list(list, state, current, last) do
{_, value, next} = state[current]
do_to_list([value | list], state, next, last)
end
def all(state) do
state
|> Map.delete(:size)
|> Map.delete(:current)
|> Map.delete(:counter)
|> Enum.map(fn {id, {_, val, _}} -> {id, val} end)
end
def size(state) do
state.size
end
def current(%{size: 0}), do: nil
def current(state) do
elem(state[state.current], 1)
end
def at(state, id) do
{_, val, _} = state[id]
val
end
def move_next(%{size: 0} = state), do: state
def move_next(state) do
{_, _, next} = state[state.current]
Map.put(state, :current, next)
end
def move_prev(state) do
{prev, _, _} = state[state.current]
Map.put(state, :current, prev)
end
def insert_after(state, values), do: insert_after(state, values, state.current)
def insert_after(state, values, after_id) when is_list(values) do
values
|> Enum.reduce({state, after_id}, fn value, {acc, prev} ->
id = acc.counter + 1
{_, _, next} = acc[prev]
{do_insert(acc, id, {prev, value, next}), id}
end)
|> elem(0)
end
def insert_next(state, values) when is_list(values) do
values
|> Enum.reduce(state, &insert_next(&2, &1))
end
def insert_next(%{size: 0} = state, value) do
id = state.counter + 1
state
|> do_insert(id, {id, value, id})
|> Map.put(:current, id)
end
def insert_next(state, value) do
id = state.counter + 1
prev = state.current
{_, _, next} = state[state.current]
state
|> do_insert(id, {prev, value, next})
|> Map.put(:current, id)
end
defp do_insert(state, id, {prev, _value, next} = item) do
state
|> Map.put(id, item)
|> Map.update(prev, item, &put_elem(&1, 2, id))
|> Map.update(next, item, &put_elem(&1, 0, id))
|> Map.update(:size, 0, &(&1 + 1))
|> Map.update(:counter, 0, &(&1 + 1))
end
def remove_current(%{size: 0} = state), do: state
def remove_current(state) do
{prev, _, next} = state[state.current]
state
|> Map.update(prev, nil, &put_elem(&1, 2, next))
|> Map.update(next, nil, &put_elem(&1, 0, prev))
|> Map.put(:current, next)
|> Map.update(:size, 0, &(&1 - 1))
|> Map.delete(state.current)
end
def take_after(state, amount) when is_integer(amount) and amount >= 1 do
{result, state} = Enum.reduce(1..amount, {[], move_next(state)}, &do_take/2)
{result, move_prev(state)}
end
defp do_take(_, {result, state}) do
{result ++ [current(state)], remove_current(state)}
end
end
|
lib/circle.ex
| 0.694613
| 0.77949
|
circle.ex
|
starcoder
|
defmodule ElixirLeaderboard.TermStore do
@moduledoc """
Use this storage engine to make small size one-off leaderboards that are
stored entirely in a variable. Useful for leaderboards scoped to small groups
of participants.
"""
@behaviour ElixirLeaderboard.Storage
alias ElixirLeaderboard.{Indexer, Entry}
## Writers
def create(_) do
{:ok, %{table: [], index: %{}, count: 0}}
end
def clear(_) do
{:ok, %{table: [], index: %{}, count: 0}}
end
def populate(_, data, indexer) do
table = Enum.sort(data)
count = Enum.count(data)
index = build_index(table, count, indexer)
{:ok, %{table: table, index: index, count: count}}
end
def add(state = %{table: table, count: count}, entry, indexer) do
id = Entry.get_id(entry)
if get(state, id) do
{:error, :entry_already_exists}
else
table = Enum.sort([entry | table])
count = count + 1
index = build_index(table, count, indexer)
{:ok, %{table: table, index: index, count: count}}
end
end
def remove(
state = %{
table: table,
index: index,
count: count
},
id,
indexer
) do
if get(state, id) do
{_, key, _} = index[id]
table = List.keydelete(table, key, 0)
count = count - 1
index = build_index(table, count, indexer)
{:ok, %{table: table, index: index, count: count}}
else
{:error, :entry_not_found}
end
end
def update(
state = %{table: table, index: index, count: count},
entry,
indexer
) do
id = Entry.get_id(entry)
if get(state, id) do
{_, key, _} = index[id]
table = Enum.sort([entry | List.keydelete(table, key, 0)])
index = build_index(table, count, indexer)
{:ok, %{table: table, index: index}}
else
{:error, :entry_not_found}
end
end
def add_or_update(state, entry, indexer) do
id = Entry.get_id(entry)
case get(state, id) do
nil -> add(state, entry, indexer)
_ -> update(state, entry, indexer)
end
end
## Readers
def get(%{table: table, index: index}, id) do
with {_, key, stats} <- index[id],
{_, payload} <- List.keyfind(table, key, 0) do
{key, payload, stats}
else
_ -> nil
end
end
def get(state = %{table: table}, id, start..finish) do
case get(state, id) do
nil ->
[]
{key, _, _} ->
key_index =
table
|> Enum.find_index(fn
{^key, _} -> true
_ -> false
end)
{min, max} = Enum.min_max([start, finish])
min_index = Enum.max([key_index + min, 0])
max_index = Enum.max([key_index + max, 0])
slice =
table
|> Enum.slice(min_index..max_index)
|> Enum.map(fn entry -> get(state, Entry.get_id(entry)) end)
if finish < start, do: Enum.reverse(slice), else: slice
end
end
def top(state = %{table: table}) do
table
|> Stream.map(fn entry ->
id = Entry.get_id(entry)
get(state, id)
end)
end
def bottom(state = %{table: table}) do
table
|> Enum.reverse()
|> Stream.map(fn entry ->
id = Entry.get_id(entry)
get(state, id)
end)
end
def count(%{count: count}) do
count
end
## Private
defp build_index(table, count, indexer) do
table
|> Stream.map(fn {key, _} -> key end)
|> Indexer.index(count, indexer)
|> Stream.map(fn term = {id, _, _} -> {id, term} end)
|> Enum.into(%{})
end
end
|
lib/elixir_leaderboard/term_store.ex
| 0.605799
| 0.45647
|
term_store.ex
|
starcoder
|
defmodule Grizzly.ZWave.Commands.DoorLockCapabilitiesReport do
@moduledoc """
This command is used to advertise the Door Lock capabilities supported by the sending node.
Params:
* `:supported_operations` - the supported door lock operation types
* `:supported_door_lock_modes` - the supported door lock modes
* `:configurable_outside_handles` - which outside handles can be enabled and disabled via configuration
* `:configurable_inside_handles` - which inside handles can be enabled and disabled via configuration
* `:supported_door_components` - the supported door lock components that can be reported on
* `:auto_relock_supported?` - whether the auto-relock functionality is supported
* `:hold_and_release_supported?` - whether the hold-and-release functionality is supported
* `:twist_assist_supported?` - whether the twist assist functionality is supported
* `:block_to_block_supported?` - whether the block-to-block functionality is supported
"""
@behaviour Grizzly.ZWave.Command
alias Grizzly.ZWave.{Command, DecodeError}
alias Grizzly.ZWave.CommandClasses.DoorLock
@type param ::
{:supported_operations, [DoorLock.operation_type()]}
| {:supported_door_lock_modes, [DoorLock.mode()]}
| {:configurable_outside_handles, [1..4]}
| {:configurable_inside_handles, [1..4]}
| {:supported_door_components, [DoorLock.door_components()]}
| {:auto_relock_supported?, boolean}
| {:hold_and_release_supported?, boolean}
| {:twist_assist_supported?, boolean}
| {:block_to_block_supported?, boolean}
@impl true
@spec new([param()]) :: {:ok, Command.t()}
def new(params) do
command = %Command{
name: :door_lock_capabilities_report,
command_byte: 0x08,
command_class: DoorLock,
params: params,
impl: __MODULE__
}
{:ok, command}
end
@impl true
@spec encode_params(Command.t()) :: binary()
def encode_params(command) do
supported_operations_bitmask =
Command.param!(command, :supported_operations) |> operations_to_binary()
supported_bitmasks_length = 1
supported_door_lock_modes = Command.param!(command, :supported_door_lock_modes)
supported_modes_list_length = Enum.count(supported_door_lock_modes)
supported_door_lock_modes_binary = door_lock_modes_to_binary(supported_door_lock_modes)
configurable_outside_handles_bitmask =
Command.param!(command, :configurable_outside_handles) |> DoorLock.door_handles_to_bitmask()
configurable_inside_handles_bitmask =
Command.param!(command, :configurable_inside_handles) |> DoorLock.door_handles_to_bitmask()
supported_door_components_bitmask =
Command.param!(command, :supported_door_components) |> door_components_to_bitmask()
auto_relock_supported_bit =
if Command.param!(command, :auto_relock_supported?), do: 1, else: 0
hold_and_release_supported_bit =
if Command.param!(command, :hold_and_release_supported?), do: 1, else: 0
twist_assist_supported_bit =
if Command.param!(command, :twist_assist_supported?), do: 1, else: 0
block_to_block_supported_bit =
if Command.param!(command, :block_to_block_supported?), do: 1, else: 0
<<0x00::size(3), supported_bitmasks_length::size(5)>> <>
supported_operations_bitmask <>
<<supported_modes_list_length>> <>
supported_door_lock_modes_binary <>
<<configurable_outside_handles_bitmask::size(4),
configurable_inside_handles_bitmask::size(4)>> <>
supported_door_components_bitmask <>
<<0x00::size(4), auto_relock_supported_bit::size(1),
hold_and_release_supported_bit::size(1), twist_assist_supported_bit::size(1),
block_to_block_supported_bit::size(1)>>
end
@impl true
@spec decode_params(binary()) :: {:ok, [param()]} | {:error, DecodeError.t()}
def decode_params(
# Assuming a single supported operations bitmask
<<0x00::size(3), 0x01::size(5), supported_operations_bitmask, supported_modes_list_length,
supported_door_lock_modes_binary::binary-size(supported_modes_list_length),
configurable_outside_handles_bitmask::size(4),
configurable_inside_handles_bitmask::size(4), supported_door_components_bitmask,
0x00::size(4), auto_relock_supported_bit::size(1),
hold_and_release_supported_bit::size(1), twist_assist_supported_bit::size(1),
block_to_block_supported_bit::size(1)>>
) do
supported_operations = operations_from_bitmask(supported_operations_bitmask)
with {:ok, modes} <- door_lock_modes_from_binary(supported_door_lock_modes_binary) do
{:ok,
[
supported_operations: supported_operations,
supported_door_lock_modes: modes,
configurable_outside_handles:
DoorLock.door_handles_from_bitmask(configurable_outside_handles_bitmask),
configurable_inside_handles:
DoorLock.door_handles_from_bitmask(configurable_inside_handles_bitmask),
supported_door_components:
door_components_from_bitmask(supported_door_components_bitmask),
auto_relock_supported?: auto_relock_supported_bit == 1,
hold_and_release_supported?: hold_and_release_supported_bit == 1,
block_to_block_supported?: block_to_block_supported_bit == 1,
twist_assist_supported?: twist_assist_supported_bit == 1
]}
else
{:error, %DecodeError{} = decode_error} ->
{:error, %DecodeError{decode_error | command: :door_lock_capabilities_report}}
end
end
defp operations_to_binary(operations) do
constant_bit = if :constant_operation in operations, do: 1, else: 0
timed_bit = if :timed_operation in operations, do: 1, else: 0
<<0x00::size(5), timed_bit::size(1), constant_bit::size(1), 0x00::size(1)>>
end
defp operations_from_bitmask(bitmask) do
<<0x00::size(5), timed_bit::size(1), constant_bit::size(1), _reserved::size(1)>> = <<bitmask>>
operations = []
operations = if timed_bit == 1, do: [:timed_operation | operations], else: operations
if constant_bit == 1, do: [:constant_operation | operations], else: operations
end
defp door_components_to_bitmask(components) do
latch_bit = if :latch in components, do: 1, else: 0
bolt_bit = if :bolt in components, do: 1, else: 0
door_bit = if :door in components, do: 1, else: 0
<<0x00::size(5), latch_bit::size(1), bolt_bit::size(1), door_bit::size(1)>>
end
defp door_components_from_bitmask(bitmask) do
<<0x00::size(5), latch_bit::size(1), bolt_bit::size(1), door_bit::size(1)>> = <<bitmask>>
components = []
components = if latch_bit == 1, do: [:latch | components], else: components
components = if bolt_bit == 1, do: [:bolt | components], else: components
if door_bit == 1, do: [:door | components], else: components
end
defp door_lock_modes_to_binary(modes) do
for mode <- modes, into: <<>>, do: <<DoorLock.mode_to_byte(mode)>>
end
defp door_lock_modes_from_binary(binary) do
mode_bytes = :erlang.binary_to_list(binary)
Enum.reduce_while(mode_bytes, {:ok, []}, fn byte, {:ok, acc} ->
case DoorLock.mode_from_byte(byte) do
{:ok, mode} -> {:cont, {:ok, [mode | acc]}}
{:error, %DecodeError{}} = error -> {:halt, error}
end
end)
end
end
|
lib/grizzly/zwave/commands/door_lock_capabilities_report.ex
| 0.879069
| 0.458894
|
door_lock_capabilities_report.ex
|
starcoder
|
defmodule Membrane.RTP.VP9.PayloadDescriptor do
@moduledoc """
Defines a structure representing VP9 payload descriptor
Described here: https://tools.ietf.org/html/draft-ietf-payload-vp9-10#section-4.2
Flexible mode:
```
0 1 2 3 4 5 6 7
+-+-+-+-+-+-+-+-+
|I|P|L|F|B|E|V|Z| (REQUIRED)
+-+-+-+-+-+-+-+-+
I: |M| PICTURE ID | (REQUIRED)
+-+-+-+-+-+-+-+-+
M: | EXTENDED PID | (RECOMMENDED)
+-+-+-+-+-+-+-+-+
L: | TID |U| SID |D| (CONDITIONALLY RECOMMENDED)
+-+-+-+-+-+-+-+-+ -\
P,F: | P_DIFF |N| (CONDITIONALLY REQUIRED) - up to 3 times
+-+-+-+-+-+-+-+-+ -/
V: | SS |
| .. |
+-+-+-+-+-+-+-+-+
```
Non-flexible mode:
```
0 1 2 3 4 5 6 7
+-+-+-+-+-+-+-+-+
|I|P|L|F|B|E|V|Z| (REQUIRED)
+-+-+-+-+-+-+-+-+
I: |M| PICTURE ID | (RECOMMENDED)
+-+-+-+-+-+-+-+-+
M: | EXTENDED PID | (RECOMMENDED)
+-+-+-+-+-+-+-+-+
L: | TID |U| SID |D| (CONDITIONALLY RECOMMENDED)
+-+-+-+-+-+-+-+-+
| TL0PICIDX | (CONDITIONALLY REQUIRED)
+-+-+-+-+-+-+-+-+
V: | SS |
| .. |
+-+-+-+-+-+-+-+-+
```
"""
@type first_octet :: binary()
@type picture_id :: 0..32_767
@type tid :: 0..7
@type u :: 0..1
@type d :: 0..1
@type sid :: 0..7
@type p_diff :: 0..255
@type tl0picidx :: 0..255
@type t :: %__MODULE__{
first_octet: first_octet(),
picture_id: picture_id(),
tid: tid(),
u: u(),
sid: sid(),
d: d(),
p_diffs: [p_diff()],
tl0picidx: tl0picidx(),
scalability_structure: ScalabilityStructure.t()
}
defstruct [
:first_octet,
:picture_id,
:tid,
:u,
:sid,
:d,
:tl0picidx,
:scalability_structure,
p_diffs: []
]
defmodule PGDescription do
@moduledoc """
Struct representing picture group description. Present if G bit is set in first octet of scalability structure.
"""
alias Membrane.RTP.VP9.PayloadDescriptor
@type t :: %__MODULE__{
tid: PayloadDescriptor.tid(),
u: PayloadDescriptor.u(),
p_diffs: [PayloadDescriptor.p_diff()]
}
defstruct [:tid, :u, p_diffs: []]
end
defmodule SSDimension do
@moduledoc """
Struct representing spatial layer frame resolution. Present if Y bit is set in first octet of scalability structure.
"""
@type t :: %__MODULE__{
width: 0..65_535,
height: 0..65_535
}
@enforce_keys [:width, :height]
defstruct @enforce_keys
end
defmodule ScalabilityStructure do
@moduledoc """
A struct representing VP9 scalability structure.
"""
alias Membrane.RTP.VP9.PayloadDescriptor
alias Membrane.RTP.VP9.PayloadDescriptor.{SSDimension, PGDescription}
@type t :: %__MODULE__{
first_octet: PayloadDescriptor.first_octet(),
dimensions: [SSDimension.t()],
pg_descriptions: [PGDescription.t()]
}
@enforce_keys [:first_octet]
defstruct [:first_octet, dimensions: [], pg_descriptions: []]
end
@spec serialize(__MODULE__.t() | PGDescription.t() | SSDimension.t() | ScalabilityStructure.t()) ::
binary()
def serialize(%__MODULE__{} = payload_descriptor) do
first_octet = payload_descriptor.first_octet
<<i::1, p::1, l::1, f::1, _be::2, v::1, _z::1>> = first_octet
picture_id =
if i == 1 do
case <<payload_descriptor.picture_id::16>> do
<<0::1, _rest::15>> -> <<payload_descriptor.picture_id::8>>
<<1::1, _rest::15>> -> <<payload_descriptor.picture_id::16>>
end
else
<<>>
end
tid_u_sid_d =
if l == 1,
do:
<<payload_descriptor.tid::3, payload_descriptor.u::1, payload_descriptor.sid::3,
payload_descriptor.d::1>>,
else: <<>>
p_diffs =
if p == 1 and f == 1,
do: payload_descriptor.p_diffs |> Enum.map_join(&<<&1>>),
else: <<>>
tl0picidx = if payload_descriptor.tl0picidx, do: <<payload_descriptor.tl0picidx>>, else: <<>>
scalability_structure =
if v == 1,
do: serialize(payload_descriptor.scalability_structure),
else: <<>>
first_octet <> picture_id <> tid_u_sid_d <> p_diffs <> tl0picidx <> scalability_structure
end
def serialize(%ScalabilityStructure{} = ss) do
first_octet = ss.first_octet
dimensions = ss.dimensions |> Enum.map_join(&serialize(&1))
n_g = <<length(ss.pg_descriptions)>>
pg_descriptions = ss.pg_descriptions |> Enum.map_join(&serialize(&1))
first_octet <> dimensions <> n_g <> pg_descriptions
end
def serialize(%SSDimension{} = dimension) do
<<dimension.width::16, dimension.height::16>>
end
def serialize(%PGDescription{} = pg_description) do
r = length(pg_description.p_diffs)
p_diffs = pg_description.p_diffs |> Enum.map_join(&<<&1>>)
<<pg_description.tid::3, pg_description.u::1, r::2, 0::2>> <> p_diffs
end
@spec parse_payload_descriptor(binary()) :: {:error, :malformed_data} | {:ok, {t(), binary()}}
def parse_payload_descriptor(raw_payload)
def parse_payload_descriptor(<<header::binary-size(1), rest::binary()>>)
when byte_size(rest) > 0 do
<<i::1, _p::1, _l::1, f::1, _bevz::4>> = header
with false <- i == 0 and f == 1,
{:ok, {descriptor_acc, rest}} <-
get_pid(header, rest, %__MODULE__{first_octet: header}),
{:ok, {descriptor_acc, rest}} <- get_layer_indices(header, rest, descriptor_acc),
{:ok, {descriptor_acc, rest}} <- get_pdiffs(header, rest, 0, descriptor_acc),
{:ok, {ss, rest}} <- get_scalability_structure(header, rest) do
{:ok, {%{descriptor_acc | scalability_structure: ss}, rest}}
else
_error -> {:error, :malformed_data}
end
end
def parse_payload_descriptor(_binary), do: {:error, :malformed_data}
# no picture id (PID)
defp get_pid(<<0::1, _::7>>, rest, descriptor_acc) when byte_size(rest) > 0,
do: {:ok, {descriptor_acc, rest}}
# picture id (PID) present
defp get_pid(<<fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b, _::7>>, <<pid, rest::binary()>>, descriptor_acc)
when byte_size(rest) > 0 do
case <<pid>> do
<<0::1, _rest_of_pid::7>> ->
{:ok, {%{descriptor_acc | picture_id: pid}, rest}}
<<1::1, _rest_of_pid::7>> ->
<<second_byte, rest::binary()>> = rest
<<pid::16>> = <<pid, second_byte>>
{:ok, {%{descriptor_acc | picture_id: pid}, rest}}
end
end
defp get_pid(_header, _rest, _descriptor_acc), do: {:error, :malformed_data}
# no layer indices
defp get_layer_indices(<<_i::1, _p::1, 0::1, _::5>>, rest, descriptor_acc)
when byte_size(rest) > 0,
do: {:ok, {descriptor_acc, rest}}
# layer indices and TL0PICIDX present
defp get_layer_indices(<<_i::1, _p::1, fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b, 0::1, _::4>>, rest, descriptor_acc)
when byte_size(rest) > 2 do
<<tid::3, u::1, sid::3, d::1, tl0picidx, rest::binary()>> = rest
{:ok, {%{descriptor_acc | tid: tid, u: u, sid: sid, d: d, tl0picidx: tl0picidx}, rest}}
end
# layer indices present, no TL0PICIDX
defp get_layer_indices(<<_i::1, _p::1, fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b, fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b, _::4>>, rest, descriptor_acc)
when byte_size(rest) > 1 do
<<tid::3, u::1, sid::3, d::1, rest::binary()>> = rest
{:ok, {%{descriptor_acc | tid: tid, u: u, sid: sid, d: d}, rest}}
end
defp get_layer_indices(_header, _rest, _descriptor_acc), do: {:error, :malformed_data}
defp get_pdiffs(
<<_i::1, fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b, _l::1, fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b, _::4>> = header,
<<p_diff::binary-size(1), rest::binary>>,
diff_count,
descriptor_acc
)
when diff_count < 3 do
<<_::7, n::1>> = p_diff
with 1 <- n,
{:ok, {descriptor_acc, rest}} <-
get_pdiffs(header, rest, diff_count + 1, descriptor_acc) do
{:ok,
{%{
descriptor_acc
| p_diffs: [:binary.decode_unsigned(p_diff) | descriptor_acc.p_diffs]
}, rest}}
else
0 ->
{:ok,
{%{descriptor_acc | p_diffs: [:binary.decode_unsigned(p_diff) | descriptor_acc.p_diffs]},
rest}}
{:error, :malformed_data} ->
{:error, :malformed_data}
end
end
defp get_pdiffs(_header, rest, _diff_count, descriptor_acc) when byte_size(rest) > 0,
do: {:ok, {descriptor_acc, rest}}
defp get_pdiffs(_header, _rest, _diff_count, _descriptor_acc), do: {:error, :malformed_data}
# no scalability structure
defp get_scalability_structure(<<_iplfbe::6, 0::1, _z::1>>, rest), do: {:ok, {nil, rest}}
defp get_scalability_structure(<<_iplfbe::6, fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b, _z::1>>, rest) do
<<ss_header::binary-size(1), rest::binary()>> = rest
with {:ok, {widths_and_heights, rest}} <- ss_get_widths_and_heights(ss_header, rest, 0, []),
{:ok, {pg_descriptions, rest}} <- ss_get_pg_descriptions(ss_header, rest) do
{:ok,
{%ScalabilityStructure{
first_octet: :binary.decode_unsigned(ss_header),
dimensions: widths_and_heights,
pg_descriptions: pg_descriptions
}, rest}}
else
_error -> {:error, :malformed_data}
end
end
defp ss_get_widths_and_heights(<<_n_s::3, 0::1, _g::1, _::3>>, rest, _count, dimensions),
do: {:ok, {dimensions, rest}}
defp ss_get_widths_and_heights(
<<n_s::3, fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b, _g::1, _::3>> = ss_header,
rest,
count,
dimensions
)
when count <= n_s and byte_size(rest) > 4 do
<<width::binary-size(2), height::binary-size(2), rest::binary()>> = rest
case ss_get_widths_and_heights(ss_header, rest, count + 1, dimensions) do
{:ok, {next_dims, rest}} ->
{:ok,
{[
%SSDimension{
width: :binary.decode_unsigned(width),
height: :binary.decode_unsigned(height)
}
| next_dims
], rest}}
_error ->
{:error, :malformed_data}
end
end
defp ss_get_widths_and_heights(<<n_s::3, fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b, _g::1, _::3>>, rest, count, dimensions)
when count == n_s + 1,
do: {:ok, {dimensions, rest}}
defp ss_get_widths_and_heights(_ss_header, _rest, _count, _dimensions),
do: {:error, :malformed_data}
defp ss_get_pg_descriptions(<<_n_s::3, _y::1, fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b, _::3>>, rest) do
<<n_g, rest::binary()>> = rest
{maybe_descriptions, rest} =
1..n_g
|> Bunch.Enum.try_map_reduce(rest, fn _i, rest ->
case ss_get_pg_description(rest) do
{:ok, {pg_description, rest}} -> {{:ok, pg_description}, rest}
_error -> {{:error, :malformed_data}, rest}
end
end)
with {:ok, descriptions} <- maybe_descriptions do
{:ok, {descriptions, rest}}
end
end
defp ss_get_pg_descriptions(_ss_header, rest), do: {:ok, {[], rest}}
defp ss_get_pg_description(<<_tid::3, _u::1, r::2, _::2, rest::binary()>>)
when byte_size(rest) < r,
do: {:error, :malformed_data}
defp ss_get_pg_description(<<tid::3, u::1, fc00:db20:35b:7399::5, _::2, rest::binary()>>),
do: {%PGDescription{tid: tid, u: u}, rest}
defp ss_get_pg_description(<<tid::3, u::1, r::2, _::2, rest::binary()>>)
when byte_size(rest) > r do
pg_description = %PGDescription{tid: tid, u: u}
<<p_diffs::binary-size(r), rest::binary()>> = rest
{:ok, {%{pg_description | p_diffs: :binary.bin_to_list(p_diffs)}, rest}}
end
defp ss_get_pg_description(_binary), do: {:error, :malformed_data}
end
|
lib/rtp_vp9/payload_descriptor.ex
| 0.641871
| 0.641338
|
payload_descriptor.ex
|
starcoder
|
defmodule SmartCity.Helpers do
@moduledoc """
Functions used across SmartCity modules.
"""
@type file_type :: String.t()
@type mime_type :: String.t()
@doc """
Convert a map with string keys to one with atom keys. Will convert keys nested in a sub-map or a
map that is part of a list. Ignores atom keys.
## Examples
iex> SmartCity.Helpers.to_atom_keys(%{"abc" => 123})
%{abc: 123}
iex> SmartCity.Helpers.to_atom_keys(%{"a" => %{"b" => "c"}})
%{a: %{b: "c"}}
iex> SmartCity.Helpers.to_atom_keys(%{"a" => [%{"b" => "c"}]})
%{a: [%{b: "c"}]}
"""
@spec to_atom_keys(map()) :: map()
def to_atom_keys(map) when is_map(map) do
Map.new(map, fn
{key, val} when is_map(val) or is_list(val) ->
{safe_string_to_atom(key), to_atom_keys(val)}
{key, val} when is_binary(key) ->
{safe_string_to_atom(key), val}
keyval ->
keyval
end)
end
def to_atom_keys(list) when is_list(list), do: Enum.map(list, &to_atom_keys/1)
def to_atom_keys(value), do: value
def safe_string_to_atom(string) when is_binary(string), do: String.to_atom(string)
def safe_string_to_atom(atom) when is_atom(atom), do: atom
def safe_string_to_atom(value), do: value
@doc """
Standardize file type definitions by deferring to the
official media type of the file based on a supplied extension.
"""
@spec mime_type(file_type()) :: mime_type()
def mime_type(file_type) do
downcased_type = String.downcase(file_type)
case MIME.valid?(downcased_type) do
true -> downcased_type
false -> MIME.type(downcased_type)
end
end
@doc """
Merges two maps into one, including sub maps. Matching keys from the right map will override their corresponding key in the left map.
"""
@spec deep_merge(map(), map()) :: map()
def deep_merge(%{} = _left, %{} = right) when right == %{}, do: right
def deep_merge(left, right), do: Map.merge(left, right, &deep_resolve/3)
defp deep_resolve(_key, %{} = left, %{} = right), do: deep_merge(left, right)
defp deep_resolve(_key, _left, right), do: right
end
|
lib/smart_city/helpers.ex
| 0.858199
| 0.501892
|
helpers.ex
|
starcoder
|
defmodule Crux.Structs.Channel do
@moduledoc """
Represents a Discord [Channel Object](https://discord.com/developers/docs/resources/channel#channel-object).
List of where every property can be present:
| Property | Text (0) | DM (1) | Voice (2) | Group (3) | Category (4) | News (5) |
| :-------------------: | :------: | :---------------: | :-------: | :-------: | :----------: | :------: |
| application_id | no | no | no | yes | no | no |
| bitrate | no | no | yes | no | no | no |
| guild_id | yes | no | yes | no | yes | yes |
| icon | no | no | no | yes | no | no |
| id | yes | yes | yes | yes | yes | yes |
| last_message_id | yes | yes | no | yes | no | yes |
| last_pin_timestamp | yes | yes | no | yes | no | yes |
| name | yes | no | yes | yes | yes | yes |
| nsfw | yes | no | no | no | no | yes |
| owner_id | no | no | no | yes | no | no |
| parent_id | yes | no | yes | no | no | yes |
| permission_overwrites | yes | no | yes | no | yes | yes |
| position | yes | no | yes | no | yes | yes |
| rate_limit_per_user | yes | no | no | no | no | no |
| recipients | no | yes (One Element) | no | yes | no | no |
| topic | yes | no | yes | no | yes | yes |
| type | `0` | `1` | `2` | `3` | `4` | `5` |
| user_limit | no | no | yes | no | no | no |
Differences opposed to the Discord API Object:
- `:recipients` is a MapSet of user ids
"""
@moduledoc since: "0.1.0"
@behaviour Crux.Structs
alias Crux.Structs
alias Crux.Structs.{Channel, Message, Overwrite, Snowflake, Util}
defstruct [
:id,
:type,
:guild_id,
:position,
:permission_overwrites,
:name,
:topic,
:nsfw,
:last_message_id,
:bitrate,
:user_limit,
:rate_limit_per_user,
:recipients,
:icon,
:owner_id,
:application_id,
:parent_id,
:last_pin_timestamp
]
@typedoc since: "0.1.0"
@type t :: %__MODULE__{
application_id: Snowflake.t(),
bitrate: integer(),
guild_id: Snowflake.t(),
icon: String.t(),
id: Snowflake.t(),
last_message_id: Snowflake.t(),
last_pin_timestamp: String.t(),
name: String.t(),
nsfw: boolean(),
owner_id: Snowflake.t(),
parent_id: Snowflake.t(),
permission_overwrites: %{optional(Snowflake.t()) => Overwrite.t()},
position: integer(),
rate_limit_per_user: integer(),
recipients: MapSet.t(Snowflake.t()),
topic: String.t(),
type: type(),
user_limit: non_neg_integer()
}
@typedoc """
The type of a channel.
| Type | ID | Description |
| :------------: | :-: | :---------------------------------------------------------------------------------------: |
| GUILD_TEXT | 0 | A text channel within a guild. |
| DM | 1 | A direct text channel between two users. |
| GUILD_VOICE | 2 | A voice channel withing a guild. |
| GROUP_DM | 3 | A direct channel between multiple users. Bots do not have access to those. |
| GUILD_CATEGORY | 4 | An organizational category. |
| GUILD_NEWS | 5 | A text channel users can follow and crosspost messages to. |
| GUILD_STORE | 6 | A channel in which game developers can sell their game. Bots can not interact with those. |
For more information see the [Discord Developer Documentation](https://discord.com/developers/docs/resources/channel#channel-object-channel-types).
"""
@typedoc since: "0.2.3"
@type type :: non_neg_integer()
@typedoc """
All available types that can be resolved into a channel id.
"""
@typedoc since: "0.2.1"
@type id_resolvable() :: Message.t() | Channel.t() | Snowflake.t() | String.t()
@doc """
Resolves the id of a `t:Crux.Structs.Channel.t/0`.
> Automatically invoked by `Crux.Structs.resolve_id/2`.
```elixir
iex> %Crux.Structs.Message{channel_id: 222079895583457280}
...> |> Crux.Structs.Channel.resolve_id()
222079895583457280
iex> %Crux.Structs.Channel{id: 222079895583457280}
...> |> Crux.Structs.Channel.resolve_id()
222079895583457280
iex> 222079895583457280
...> |> Crux.Structs.Channel.resolve_id()
222079895583457280
iex> "222079895583457280"
...> |> Crux.Structs.Channel.resolve_id()
222079895583457280
```
"""
@doc since: "0.2.3"
@spec resolve_id(id_resolvable()) :: Snowflake.t() | nil
def resolve_id(%Message{channel_id: channel_id}) do
resolve_id(channel_id)
end
def resolve_id(%Channel{id: id}) do
resolve_id(id)
end
def resolve_id(resolvable), do: Structs.resolve_id(resolvable)
@typedoc """
All available types that can be resolved into a channel position.
"""
@typedoc since: "0.2.1"
@type position_resolvable() ::
Channel.t()
| %{channel: id_resolvable(), position: integer()}
| {id_resolvable(), integer()}
| %{id: id_resolvable(), position: integer()}
@doc """
Resolves a `t:position_resolvable/0` into a channel position.
## Examples
```elixir
iex> %Crux.Structs.Channel{id: 222079895583457280, position: 5}
...> |> Crux.Structs.Channel.resolve_position()
%{id: 222079895583457280, position: 5}
iex> {%Crux.Structs.Channel{id: 222079895583457280}, 5}
...> |> Crux.Structs.Channel.resolve_position()
%{id: 222079895583457280, position: 5}
iex> {222079895583457280, 5}
...> |> Crux.Structs.Channel.resolve_position()
%{id: 222079895583457280, position: 5}
iex> %{id: 222079895583457280, position: 5}
...> |> Crux.Structs.Channel.resolve_position()
%{id: 222079895583457280, position: 5}
iex> %{channel: 222079895583457280, position: 5}
...> |> Crux.Structs.Channel.resolve_position()
%{id: 222079895583457280, position: 5}
iex> {nil, 5}
...> |> Crux.Structs.Channel.resolve_position()
nil
```
"""
@doc since: "0.2.1"
@spec resolve_position(position_resolvable()) :: %{id: Snowflake.t(), position: integer()} | nil
def resolve_position(resolvable)
def resolve_position(%Channel{id: id, position: position}) do
validate_position(%{id: id, position: position})
end
def resolve_position(%{channel: resolvable, position: position}) do
validate_position(%{id: resolve_id(resolvable), position: position})
end
def resolve_position(%{id: resolvable, position: position}) do
validate_position(%{id: resolve_id(resolvable), position: position})
end
def resolve_position({resolvable, position}) do
validate_position(%{id: resolve_id(resolvable), position: position})
end
@doc false
@spec validate_position(%{id: Snowflake.t(), position: integer()}) :: %{
id: Snowflake.t(),
position: integer()
}
@spec validate_position(%{id: nil, position: integer()}) :: nil
defp validate_position(%{id: nil, position: _}), do: nil
defp validate_position(%{id: _id, position: position} = entry)
when is_integer(position) do
entry
end
@doc """
Creates a `t:Crux.Structs.Channel.t/0` struct from raw data.
> Automatically invoked by `Crux.Structs.create/2`
"""
@typedoc since: "0.1.0"
@spec create(data :: map()) :: t()
def create(data) do
channel =
data
|> Util.atomify()
|> Map.update!(:id, &Snowflake.to_snowflake/1)
|> Map.update(:guild_id, nil, &Snowflake.to_snowflake/1)
|> Map.update(:owner_id, nil, &Snowflake.to_snowflake/1)
|> Map.update(:last_message_id, nil, &Snowflake.to_snowflake/1)
|> Map.update(:application_id, nil, &Snowflake.to_snowflake/1)
|> Map.update(:parent_id, nil, &Snowflake.to_snowflake/1)
|> Map.update(:permission_overwrites, nil, &Util.raw_data_to_map(&1, Overwrite))
|> Map.update(:recipients, nil, &MapSet.new(&1, Util.map_to_id()))
struct(__MODULE__, channel)
end
@doc ~S"""
Converts a `t:Crux.Structs.Channel.t/0` into its discord mention format.
## Example
```elixir
iex> %Crux.Structs.Channel{id: 316880197314019329}
...> |> Crux.Structs.Channel.to_mention()
"<#316880197314019329>"
```
"""
@doc since: "0.1.1"
@spec to_mention(user :: Channel.t()) :: String.t()
def to_mention(%__MODULE__{id: id}), do: "<##{id}>"
defimpl String.Chars, for: Crux.Structs.Channel do
@spec to_string(Channel.t()) :: String.t()
def to_string(%Channel{} = data), do: Channel.to_mention(data)
end
end
|
lib/structs/channel.ex
| 0.875348
| 0.789822
|
channel.ex
|
starcoder
|
defmodule AWS.IoT do
@moduledoc """
AWS IoT
AWS IoT provides secure, bi-directional communication between
Internet-connected devices (such as sensors, actuators, embedded devices,
or smart appliances) and the AWS cloud. You can discover your custom
IoT-Data endpoint to communicate with, configure rules for data processing
and integration with other services, organize resources associated with
each device (Registry), configure logging, and create and manage policies
and credentials to authenticate devices.
The service endpoints that expose this API are listed in [AWS IoT Core
Endpoints and
Quotas](https://docs.aws.amazon.com/general/latest/gr/iot-core.html). You
must use the endpoint for the region that has the resources you want to
access.
The service name used by [AWS Signature Version
4](https://docs.aws.amazon.com/general/latest/gr/signature-version-4.html)
to sign the request is: *execute-api*.
For more information about how AWS IoT works, see the [Developer
Guide](https://docs.aws.amazon.com/iot/latest/developerguide/aws-iot-how-it-works.html).
For information about how to use the credentials provider for AWS IoT, see
[Authorizing Direct Calls to AWS
Services](https://docs.aws.amazon.com/iot/latest/developerguide/authorizing-direct-aws.html).
"""
@doc """
Accepts a pending certificate transfer. The default state of the
certificate is INACTIVE.
To check for pending certificate transfers, call `ListCertificates` to
enumerate your certificates.
"""
def accept_certificate_transfer(client, certificate_id, input, options \\ []) do
path_ = "/accept-certificate-transfer/#{URI.encode(certificate_id)}"
headers = []
{query_, input} =
[
{"setAsActive", "setAsActive"},
]
|> AWS.Request.build_params(input)
request(client, :patch, path_, query_, headers, input, options, nil)
end
@doc """
Adds a thing to a billing group.
"""
def add_thing_to_billing_group(client, input, options \\ []) do
path_ = "/billing-groups/addThingToBillingGroup"
headers = []
query_ = []
request(client, :put, path_, query_, headers, input, options, nil)
end
@doc """
Adds a thing to a thing group.
"""
def add_thing_to_thing_group(client, input, options \\ []) do
path_ = "/thing-groups/addThingToThingGroup"
headers = []
query_ = []
request(client, :put, path_, query_, headers, input, options, nil)
end
@doc """
Associates a group with a continuous job. The following criteria must be
met:
<ul> <li> The job must have been created with the `targetSelection` field
set to "CONTINUOUS".
</li> <li> The job status must currently be "IN_PROGRESS".
</li> <li> The total number of targets associated with a job must not
exceed 100.
</li> </ul>
"""
def associate_targets_with_job(client, job_id, input, options \\ []) do
path_ = "/jobs/#{URI.encode(job_id)}/targets"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Attaches a policy to the specified target.
"""
def attach_policy(client, policy_name, input, options \\ []) do
path_ = "/target-policies/#{URI.encode(policy_name)}"
headers = []
query_ = []
request(client, :put, path_, query_, headers, input, options, nil)
end
@doc """
Attaches the specified policy to the specified principal (certificate or
other credential).
**Note:** This API is deprecated. Please use `AttachPolicy` instead.
"""
def attach_principal_policy(client, policy_name, input, options \\ []) do
path_ = "/principal-policies/#{URI.encode(policy_name)}"
{headers, input} =
[
{"principal", "x-amzn-iot-principal"},
]
|> AWS.Request.build_params(input)
query_ = []
request(client, :put, path_, query_, headers, input, options, nil)
end
@doc """
Associates a Device Defender security profile with a thing group or this
account. Each thing group or account can have up to five security profiles
associated with it.
"""
def attach_security_profile(client, security_profile_name, input, options \\ []) do
path_ = "/security-profiles/#{URI.encode(security_profile_name)}/targets"
headers = []
{query_, input} =
[
{"securityProfileTargetArn", "securityProfileTargetArn"},
]
|> AWS.Request.build_params(input)
request(client, :put, path_, query_, headers, input, options, nil)
end
@doc """
Attaches the specified principal to the specified thing. A principal can be
X.509 certificates, IAM users, groups, and roles, Amazon Cognito identities
or federated identities.
"""
def attach_thing_principal(client, thing_name, input, options \\ []) do
path_ = "/things/#{URI.encode(thing_name)}/principals"
{headers, input} =
[
{"principal", "x-amzn-principal"},
]
|> AWS.Request.build_params(input)
query_ = []
request(client, :put, path_, query_, headers, input, options, nil)
end
@doc """
Cancels a mitigation action task that is in progress. If the task is not in
progress, an InvalidRequestException occurs.
"""
def cancel_audit_mitigation_actions_task(client, task_id, input, options \\ []) do
path_ = "/audit/mitigationactions/tasks/#{URI.encode(task_id)}/cancel"
headers = []
query_ = []
request(client, :put, path_, query_, headers, input, options, nil)
end
@doc """
Cancels an audit that is in progress. The audit can be either scheduled or
on-demand. If the audit is not in progress, an "InvalidRequestException"
occurs.
"""
def cancel_audit_task(client, task_id, input, options \\ []) do
path_ = "/audit/tasks/#{URI.encode(task_id)}/cancel"
headers = []
query_ = []
request(client, :put, path_, query_, headers, input, options, nil)
end
@doc """
Cancels a pending transfer for the specified certificate.
**Note** Only the transfer source account can use this operation to cancel
a transfer. (Transfer destinations can use `RejectCertificateTransfer`
instead.) After transfer, AWS IoT returns the certificate to the source
account in the INACTIVE state. After the destination account has accepted
the transfer, the transfer cannot be cancelled.
After a certificate transfer is cancelled, the status of the certificate
changes from PENDING_TRANSFER to INACTIVE.
"""
def cancel_certificate_transfer(client, certificate_id, input, options \\ []) do
path_ = "/cancel-certificate-transfer/#{URI.encode(certificate_id)}"
headers = []
query_ = []
request(client, :patch, path_, query_, headers, input, options, nil)
end
@doc """
Cancels a job.
"""
def cancel_job(client, job_id, input, options \\ []) do
path_ = "/jobs/#{URI.encode(job_id)}/cancel"
headers = []
{query_, input} =
[
{"force", "force"},
]
|> AWS.Request.build_params(input)
request(client, :put, path_, query_, headers, input, options, nil)
end
@doc """
Cancels the execution of a job for a given thing.
"""
def cancel_job_execution(client, job_id, thing_name, input, options \\ []) do
path_ = "/things/#{URI.encode(thing_name)}/jobs/#{URI.encode(job_id)}/cancel"
headers = []
{query_, input} =
[
{"force", "force"},
]
|> AWS.Request.build_params(input)
request(client, :put, path_, query_, headers, input, options, nil)
end
@doc """
Clears the default authorizer.
"""
def clear_default_authorizer(client, input, options \\ []) do
path_ = "/default-authorizer"
headers = []
query_ = []
request(client, :delete, path_, query_, headers, input, options, nil)
end
@doc """
Confirms a topic rule destination. When you create a rule requiring a
destination, AWS IoT sends a confirmation message to the endpoint or base
address you specify. The message includes a token which you pass back when
calling `ConfirmTopicRuleDestination` to confirm that you own or have
access to the endpoint.
"""
def confirm_topic_rule_destination(client, confirmation_token, options \\ []) do
path_ = "/confirmdestination/#{AWS.Util.encode_uri(confirmation_token, true)}"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Creates a Device Defender audit suppression.
"""
def create_audit_suppression(client, input, options \\ []) do
path_ = "/audit/suppressions/create"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Creates an authorizer.
"""
def create_authorizer(client, authorizer_name, input, options \\ []) do
path_ = "/authorizer/#{URI.encode(authorizer_name)}"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Creates a billing group.
"""
def create_billing_group(client, billing_group_name, input, options \\ []) do
path_ = "/billing-groups/#{URI.encode(billing_group_name)}"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Creates an X.509 certificate using the specified certificate signing
request.
**Note:** The CSR must include a public key that is either an RSA key with
a length of at least 2048 bits or an ECC key from NIST P-256 or NIST P-384
curves.
**Note:** Reusing the same certificate signing request (CSR) results in a
distinct certificate.
You can create multiple certificates in a batch by creating a directory,
copying multiple .csr files into that directory, and then specifying that
directory on the command line. The following commands show how to create a
batch of certificates given a batch of CSRs.
Assuming a set of CSRs are located inside of the directory
my-csr-directory:
On Linux and OS X, the command is:
$ ls my-csr-directory/ | xargs -I {} aws iot create-certificate-from-csr
--certificate-signing-request file://my-csr-directory/{}
This command lists all of the CSRs in my-csr-directory and pipes each CSR
file name to the aws iot create-certificate-from-csr AWS CLI command to
create a certificate for the corresponding CSR.
The aws iot create-certificate-from-csr part of the command can also be run
in parallel to speed up the certificate creation process:
$ ls my-csr-directory/ | xargs -P 10 -I {} aws iot
create-certificate-from-csr --certificate-signing-request
file://my-csr-directory/{}
On Windows PowerShell, the command to create certificates for all CSRs in
my-csr-directory is:
> ls -Name my-csr-directory | %{aws iot create-certificate-from-csr
--certificate-signing-request file://my-csr-directory/$_}
On a Windows command prompt, the command to create certificates for all
CSRs in my-csr-directory is:
> forfiles /p my-csr-directory /c "cmd /c aws iot
create-certificate-from-csr --certificate-signing-request file://@path"
"""
def create_certificate_from_csr(client, input, options \\ []) do
path_ = "/certificates"
headers = []
{query_, input} =
[
{"setAsActive", "setAsActive"},
]
|> AWS.Request.build_params(input)
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Create a dimension that you can use to limit the scope of a metric used in
a security profile for AWS IoT Device Defender. For example, using a
`TOPIC_FILTER` dimension, you can narrow down the scope of the metric only
to MQTT topics whose name match the pattern specified in the dimension.
"""
def create_dimension(client, name, input, options \\ []) do
path_ = "/dimensions/#{URI.encode(name)}"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Creates a domain configuration.
<note> The domain configuration feature is in public preview and is subject
to change.
</note>
"""
def create_domain_configuration(client, domain_configuration_name, input, options \\ []) do
path_ = "/domainConfigurations/#{URI.encode(domain_configuration_name)}"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Creates a dynamic thing group.
"""
def create_dynamic_thing_group(client, thing_group_name, input, options \\ []) do
path_ = "/dynamic-thing-groups/#{URI.encode(thing_group_name)}"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Creates a job.
"""
def create_job(client, job_id, input, options \\ []) do
path_ = "/jobs/#{URI.encode(job_id)}"
headers = []
query_ = []
request(client, :put, path_, query_, headers, input, options, nil)
end
@doc """
Creates a 2048-bit RSA key pair and issues an X.509 certificate using the
issued public key. You can also call `CreateKeysAndCertificate` over MQTT
from a device, for more information, see [Provisioning MQTT
API](https://docs.aws.amazon.com/iot/latest/developerguide/provision-wo-cert.html#provision-mqtt-api).
**Note** This is the only time AWS IoT issues the private key for this
certificate, so it is important to keep it in a secure location.
"""
def create_keys_and_certificate(client, input, options \\ []) do
path_ = "/keys-and-certificate"
headers = []
{query_, input} =
[
{"setAsActive", "setAsActive"},
]
|> AWS.Request.build_params(input)
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Defines an action that can be applied to audit findings by using
StartAuditMitigationActionsTask. Only certain types of mitigation actions
can be applied to specific check names. For more information, see
[Mitigation
actions](https://docs.aws.amazon.com/iot/latest/developerguide/device-defender-mitigation-actions.html).
Each mitigation action can apply only one type of change.
"""
def create_mitigation_action(client, action_name, input, options \\ []) do
path_ = "/mitigationactions/actions/#{URI.encode(action_name)}"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Creates an AWS IoT OTAUpdate on a target group of things or groups.
"""
def create_o_t_a_update(client, ota_update_id, input, options \\ []) do
path_ = "/otaUpdates/#{URI.encode(ota_update_id)}"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Creates an AWS IoT policy.
The created policy is the default version for the policy. This operation
creates a policy version with a version identifier of **1** and sets **1**
as the policy's default version.
"""
def create_policy(client, policy_name, input, options \\ []) do
path_ = "/policies/#{URI.encode(policy_name)}"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Creates a new version of the specified AWS IoT policy. To update a policy,
create a new policy version. A managed policy can have up to five versions.
If the policy has five versions, you must use `DeletePolicyVersion` to
delete an existing version before you create a new one.
Optionally, you can set the new version as the policy's default version.
The default version is the operative version (that is, the version that is
in effect for the certificates to which the policy is attached).
"""
def create_policy_version(client, policy_name, input, options \\ []) do
path_ = "/policies/#{URI.encode(policy_name)}/version"
headers = []
{query_, input} =
[
{"setAsDefault", "setAsDefault"},
]
|> AWS.Request.build_params(input)
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Creates a provisioning claim.
"""
def create_provisioning_claim(client, template_name, input, options \\ []) do
path_ = "/provisioning-templates/#{URI.encode(template_name)}/provisioning-claim"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Creates a fleet provisioning template.
"""
def create_provisioning_template(client, input, options \\ []) do
path_ = "/provisioning-templates"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Creates a new version of a fleet provisioning template.
"""
def create_provisioning_template_version(client, template_name, input, options \\ []) do
path_ = "/provisioning-templates/#{URI.encode(template_name)}/versions"
headers = []
{query_, input} =
[
{"setAsDefault", "setAsDefault"},
]
|> AWS.Request.build_params(input)
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Creates a role alias.
"""
def create_role_alias(client, role_alias, input, options \\ []) do
path_ = "/role-aliases/#{URI.encode(role_alias)}"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Creates a scheduled audit that is run at a specified time interval.
"""
def create_scheduled_audit(client, scheduled_audit_name, input, options \\ []) do
path_ = "/audit/scheduledaudits/#{URI.encode(scheduled_audit_name)}"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Creates a Device Defender security profile.
"""
def create_security_profile(client, security_profile_name, input, options \\ []) do
path_ = "/security-profiles/#{URI.encode(security_profile_name)}"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Creates a stream for delivering one or more large files in chunks over
MQTT. A stream transports data bytes in chunks or blocks packaged as MQTT
messages from a source like S3. You can have one or more files associated
with a stream.
"""
def create_stream(client, stream_id, input, options \\ []) do
path_ = "/streams/#{URI.encode(stream_id)}"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Creates a thing record in the registry. If this call is made multiple times
using the same thing name and configuration, the call will succeed. If this
call is made with the same thing name but different configuration a
`ResourceAlreadyExistsException` is thrown.
<note> This is a control plane operation. See
[Authorization](https://docs.aws.amazon.com/iot/latest/developerguide/iot-authorization.html)
for information about authorizing control plane actions.
</note>
"""
def create_thing(client, thing_name, input, options \\ []) do
path_ = "/things/#{URI.encode(thing_name)}"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Create a thing group.
<note> This is a control plane operation. See
[Authorization](https://docs.aws.amazon.com/iot/latest/developerguide/iot-authorization.html)
for information about authorizing control plane actions.
</note>
"""
def create_thing_group(client, thing_group_name, input, options \\ []) do
path_ = "/thing-groups/#{URI.encode(thing_group_name)}"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Creates a new thing type.
"""
def create_thing_type(client, thing_type_name, input, options \\ []) do
path_ = "/thing-types/#{URI.encode(thing_type_name)}"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Creates a rule. Creating rules is an administrator-level action. Any user
who has permission to create rules will be able to access data processed by
the rule.
"""
def create_topic_rule(client, rule_name, input, options \\ []) do
path_ = "/rules/#{URI.encode(rule_name)}"
{headers, input} =
[
{"tags", "x-amz-tagging"},
]
|> AWS.Request.build_params(input)
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Creates a topic rule destination. The destination must be confirmed prior
to use.
"""
def create_topic_rule_destination(client, input, options \\ []) do
path_ = "/destinations"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Restores the default settings for Device Defender audits for this account.
Any configuration data you entered is deleted and all audit checks are
reset to disabled.
"""
def delete_account_audit_configuration(client, input, options \\ []) do
path_ = "/audit/configuration"
headers = []
{query_, input} =
[
{"deleteScheduledAudits", "deleteScheduledAudits"},
]
|> AWS.Request.build_params(input)
request(client, :delete, path_, query_, headers, input, options, nil)
end
@doc """
Deletes a Device Defender audit suppression.
"""
def delete_audit_suppression(client, input, options \\ []) do
path_ = "/audit/suppressions/delete"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Deletes an authorizer.
"""
def delete_authorizer(client, authorizer_name, input, options \\ []) do
path_ = "/authorizer/#{URI.encode(authorizer_name)}"
headers = []
query_ = []
request(client, :delete, path_, query_, headers, input, options, nil)
end
@doc """
Deletes the billing group.
"""
def delete_billing_group(client, billing_group_name, input, options \\ []) do
path_ = "/billing-groups/#{URI.encode(billing_group_name)}"
headers = []
{query_, input} =
[
{"expectedVersion", "expectedVersion"},
]
|> AWS.Request.build_params(input)
request(client, :delete, path_, query_, headers, input, options, nil)
end
@doc """
Deletes a registered CA certificate.
"""
def delete_c_a_certificate(client, certificate_id, input, options \\ []) do
path_ = "/cacertificate/#{URI.encode(certificate_id)}"
headers = []
query_ = []
request(client, :delete, path_, query_, headers, input, options, nil)
end
@doc """
Deletes the specified certificate.
A certificate cannot be deleted if it has a policy or IoT thing attached to
it or if its status is set to ACTIVE. To delete a certificate, first use
the `DetachPrincipalPolicy` API to detach all policies. Next, use the
`UpdateCertificate` API to set the certificate to the INACTIVE status.
"""
def delete_certificate(client, certificate_id, input, options \\ []) do
path_ = "/certificates/#{URI.encode(certificate_id)}"
headers = []
{query_, input} =
[
{"forceDelete", "forceDelete"},
]
|> AWS.Request.build_params(input)
request(client, :delete, path_, query_, headers, input, options, nil)
end
@doc """
Removes the specified dimension from your AWS account.
"""
def delete_dimension(client, name, input, options \\ []) do
path_ = "/dimensions/#{URI.encode(name)}"
headers = []
query_ = []
request(client, :delete, path_, query_, headers, input, options, nil)
end
@doc """
Deletes the specified domain configuration.
<note> The domain configuration feature is in public preview and is subject
to change.
</note>
"""
def delete_domain_configuration(client, domain_configuration_name, input, options \\ []) do
path_ = "/domainConfigurations/#{URI.encode(domain_configuration_name)}"
headers = []
query_ = []
request(client, :delete, path_, query_, headers, input, options, nil)
end
@doc """
Deletes a dynamic thing group.
"""
def delete_dynamic_thing_group(client, thing_group_name, input, options \\ []) do
path_ = "/dynamic-thing-groups/#{URI.encode(thing_group_name)}"
headers = []
{query_, input} =
[
{"expectedVersion", "expectedVersion"},
]
|> AWS.Request.build_params(input)
request(client, :delete, path_, query_, headers, input, options, nil)
end
@doc """
Deletes a job and its related job executions.
Deleting a job may take time, depending on the number of job executions
created for the job and various other factors. While the job is being
deleted, the status of the job will be shown as "DELETION_IN_PROGRESS".
Attempting to delete or cancel a job whose status is already
"DELETION_IN_PROGRESS" will result in an error.
Only 10 jobs may have status "DELETION_IN_PROGRESS" at the same time, or a
LimitExceededException will occur.
"""
def delete_job(client, job_id, input, options \\ []) do
path_ = "/jobs/#{URI.encode(job_id)}"
headers = []
{query_, input} =
[
{"force", "force"},
]
|> AWS.Request.build_params(input)
request(client, :delete, path_, query_, headers, input, options, nil)
end
@doc """
Deletes a job execution.
"""
def delete_job_execution(client, execution_number, job_id, thing_name, input, options \\ []) do
path_ = "/things/#{URI.encode(thing_name)}/jobs/#{URI.encode(job_id)}/executionNumber/#{URI.encode(execution_number)}"
headers = []
{query_, input} =
[
{"force", "force"},
]
|> AWS.Request.build_params(input)
request(client, :delete, path_, query_, headers, input, options, nil)
end
@doc """
Deletes a defined mitigation action from your AWS account.
"""
def delete_mitigation_action(client, action_name, input, options \\ []) do
path_ = "/mitigationactions/actions/#{URI.encode(action_name)}"
headers = []
query_ = []
request(client, :delete, path_, query_, headers, input, options, nil)
end
@doc """
Delete an OTA update.
"""
def delete_o_t_a_update(client, ota_update_id, input, options \\ []) do
path_ = "/otaUpdates/#{URI.encode(ota_update_id)}"
headers = []
{query_, input} =
[
{"deleteStream", "deleteStream"},
{"forceDeleteAWSJob", "forceDeleteAWSJob"},
]
|> AWS.Request.build_params(input)
request(client, :delete, path_, query_, headers, input, options, nil)
end
@doc """
Deletes the specified policy.
A policy cannot be deleted if it has non-default versions or it is attached
to any certificate.
To delete a policy, use the DeletePolicyVersion API to delete all
non-default versions of the policy; use the DetachPrincipalPolicy API to
detach the policy from any certificate; and then use the DeletePolicy API
to delete the policy.
When a policy is deleted using DeletePolicy, its default version is deleted
with it.
"""
def delete_policy(client, policy_name, input, options \\ []) do
path_ = "/policies/#{URI.encode(policy_name)}"
headers = []
query_ = []
request(client, :delete, path_, query_, headers, input, options, nil)
end
@doc """
Deletes the specified version of the specified policy. You cannot delete
the default version of a policy using this API. To delete the default
version of a policy, use `DeletePolicy`. To find out which version of a
policy is marked as the default version, use ListPolicyVersions.
"""
def delete_policy_version(client, policy_name, policy_version_id, input, options \\ []) do
path_ = "/policies/#{URI.encode(policy_name)}/version/#{URI.encode(policy_version_id)}"
headers = []
query_ = []
request(client, :delete, path_, query_, headers, input, options, nil)
end
@doc """
Deletes a fleet provisioning template.
"""
def delete_provisioning_template(client, template_name, input, options \\ []) do
path_ = "/provisioning-templates/#{URI.encode(template_name)}"
headers = []
query_ = []
request(client, :delete, path_, query_, headers, input, options, nil)
end
@doc """
Deletes a fleet provisioning template version.
"""
def delete_provisioning_template_version(client, template_name, version_id, input, options \\ []) do
path_ = "/provisioning-templates/#{URI.encode(template_name)}/versions/#{URI.encode(version_id)}"
headers = []
query_ = []
request(client, :delete, path_, query_, headers, input, options, nil)
end
@doc """
Deletes a CA certificate registration code.
"""
def delete_registration_code(client, input, options \\ []) do
path_ = "/registrationcode"
headers = []
query_ = []
request(client, :delete, path_, query_, headers, input, options, nil)
end
@doc """
Deletes a role alias
"""
def delete_role_alias(client, role_alias, input, options \\ []) do
path_ = "/role-aliases/#{URI.encode(role_alias)}"
headers = []
query_ = []
request(client, :delete, path_, query_, headers, input, options, nil)
end
@doc """
Deletes a scheduled audit.
"""
def delete_scheduled_audit(client, scheduled_audit_name, input, options \\ []) do
path_ = "/audit/scheduledaudits/#{URI.encode(scheduled_audit_name)}"
headers = []
query_ = []
request(client, :delete, path_, query_, headers, input, options, nil)
end
@doc """
Deletes a Device Defender security profile.
"""
def delete_security_profile(client, security_profile_name, input, options \\ []) do
path_ = "/security-profiles/#{URI.encode(security_profile_name)}"
headers = []
{query_, input} =
[
{"expectedVersion", "expectedVersion"},
]
|> AWS.Request.build_params(input)
request(client, :delete, path_, query_, headers, input, options, nil)
end
@doc """
Deletes a stream.
"""
def delete_stream(client, stream_id, input, options \\ []) do
path_ = "/streams/#{URI.encode(stream_id)}"
headers = []
query_ = []
request(client, :delete, path_, query_, headers, input, options, nil)
end
@doc """
Deletes the specified thing. Returns successfully with no error if the
deletion is successful or you specify a thing that doesn't exist.
"""
def delete_thing(client, thing_name, input, options \\ []) do
path_ = "/things/#{URI.encode(thing_name)}"
headers = []
{query_, input} =
[
{"expectedVersion", "expectedVersion"},
]
|> AWS.Request.build_params(input)
request(client, :delete, path_, query_, headers, input, options, nil)
end
@doc """
Deletes a thing group.
"""
def delete_thing_group(client, thing_group_name, input, options \\ []) do
path_ = "/thing-groups/#{URI.encode(thing_group_name)}"
headers = []
{query_, input} =
[
{"expectedVersion", "expectedVersion"},
]
|> AWS.Request.build_params(input)
request(client, :delete, path_, query_, headers, input, options, nil)
end
@doc """
Deletes the specified thing type. You cannot delete a thing type if it has
things associated with it. To delete a thing type, first mark it as
deprecated by calling `DeprecateThingType`, then remove any associated
things by calling `UpdateThing` to change the thing type on any associated
thing, and finally use `DeleteThingType` to delete the thing type.
"""
def delete_thing_type(client, thing_type_name, input, options \\ []) do
path_ = "/thing-types/#{URI.encode(thing_type_name)}"
headers = []
query_ = []
request(client, :delete, path_, query_, headers, input, options, nil)
end
@doc """
Deletes the rule.
"""
def delete_topic_rule(client, rule_name, input, options \\ []) do
path_ = "/rules/#{URI.encode(rule_name)}"
headers = []
query_ = []
request(client, :delete, path_, query_, headers, input, options, nil)
end
@doc """
Deletes a topic rule destination.
"""
def delete_topic_rule_destination(client, arn, input, options \\ []) do
path_ = "/destinations/#{AWS.Util.encode_uri(arn, true)}"
headers = []
query_ = []
request(client, :delete, path_, query_, headers, input, options, nil)
end
@doc """
Deletes a logging level.
"""
def delete_v2_logging_level(client, input, options \\ []) do
path_ = "/v2LoggingLevel"
headers = []
{query_, input} =
[
{"targetName", "targetName"},
{"targetType", "targetType"},
]
|> AWS.Request.build_params(input)
request(client, :delete, path_, query_, headers, input, options, nil)
end
@doc """
Deprecates a thing type. You can not associate new things with deprecated
thing type.
"""
def deprecate_thing_type(client, thing_type_name, input, options \\ []) do
path_ = "/thing-types/#{URI.encode(thing_type_name)}/deprecate"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Gets information about the Device Defender audit settings for this account.
Settings include how audit notifications are sent and which audit checks
are enabled or disabled.
"""
def describe_account_audit_configuration(client, options \\ []) do
path_ = "/audit/configuration"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Gets information about a single audit finding. Properties include the
reason for noncompliance, the severity of the issue, and when the audit
that returned the finding was started.
"""
def describe_audit_finding(client, finding_id, options \\ []) do
path_ = "/audit/findings/#{URI.encode(finding_id)}"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Gets information about an audit mitigation task that is used to apply
mitigation actions to a set of audit findings. Properties include the
actions being applied, the audit checks to which they're being applied, the
task status, and aggregated task statistics.
"""
def describe_audit_mitigation_actions_task(client, task_id, options \\ []) do
path_ = "/audit/mitigationactions/tasks/#{URI.encode(task_id)}"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Gets information about a Device Defender audit suppression.
"""
def describe_audit_suppression(client, input, options \\ []) do
path_ = "/audit/suppressions/describe"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Gets information about a Device Defender audit.
"""
def describe_audit_task(client, task_id, options \\ []) do
path_ = "/audit/tasks/#{URI.encode(task_id)}"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Describes an authorizer.
"""
def describe_authorizer(client, authorizer_name, options \\ []) do
path_ = "/authorizer/#{URI.encode(authorizer_name)}"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Returns information about a billing group.
"""
def describe_billing_group(client, billing_group_name, options \\ []) do
path_ = "/billing-groups/#{URI.encode(billing_group_name)}"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Describes a registered CA certificate.
"""
def describe_c_a_certificate(client, certificate_id, options \\ []) do
path_ = "/cacertificate/#{URI.encode(certificate_id)}"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Gets information about the specified certificate.
"""
def describe_certificate(client, certificate_id, options \\ []) do
path_ = "/certificates/#{URI.encode(certificate_id)}"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Describes the default authorizer.
"""
def describe_default_authorizer(client, options \\ []) do
path_ = "/default-authorizer"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Provides details about a dimension that is defined in your AWS account.
"""
def describe_dimension(client, name, options \\ []) do
path_ = "/dimensions/#{URI.encode(name)}"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Gets summary information about a domain configuration.
<note> The domain configuration feature is in public preview and is subject
to change.
</note>
"""
def describe_domain_configuration(client, domain_configuration_name, options \\ []) do
path_ = "/domainConfigurations/#{URI.encode(domain_configuration_name)}"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Returns a unique endpoint specific to the AWS account making the call.
"""
def describe_endpoint(client, endpoint_type \\ nil, options \\ []) do
path_ = "/endpoint"
headers = []
query_ = []
query_ = if !is_nil(endpoint_type) do
[{"endpointType", endpoint_type} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Describes event configurations.
"""
def describe_event_configurations(client, options \\ []) do
path_ = "/event-configurations"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Describes a search index.
"""
def describe_index(client, index_name, options \\ []) do
path_ = "/indices/#{URI.encode(index_name)}"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Describes a job.
"""
def describe_job(client, job_id, options \\ []) do
path_ = "/jobs/#{URI.encode(job_id)}"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Describes a job execution.
"""
def describe_job_execution(client, job_id, thing_name, execution_number \\ nil, options \\ []) do
path_ = "/things/#{URI.encode(thing_name)}/jobs/#{URI.encode(job_id)}"
headers = []
query_ = []
query_ = if !is_nil(execution_number) do
[{"executionNumber", execution_number} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Gets information about a mitigation action.
"""
def describe_mitigation_action(client, action_name, options \\ []) do
path_ = "/mitigationactions/actions/#{URI.encode(action_name)}"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Returns information about a fleet provisioning template.
"""
def describe_provisioning_template(client, template_name, options \\ []) do
path_ = "/provisioning-templates/#{URI.encode(template_name)}"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Returns information about a fleet provisioning template version.
"""
def describe_provisioning_template_version(client, template_name, version_id, options \\ []) do
path_ = "/provisioning-templates/#{URI.encode(template_name)}/versions/#{URI.encode(version_id)}"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Describes a role alias.
"""
def describe_role_alias(client, role_alias, options \\ []) do
path_ = "/role-aliases/#{URI.encode(role_alias)}"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Gets information about a scheduled audit.
"""
def describe_scheduled_audit(client, scheduled_audit_name, options \\ []) do
path_ = "/audit/scheduledaudits/#{URI.encode(scheduled_audit_name)}"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Gets information about a Device Defender security profile.
"""
def describe_security_profile(client, security_profile_name, options \\ []) do
path_ = "/security-profiles/#{URI.encode(security_profile_name)}"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Gets information about a stream.
"""
def describe_stream(client, stream_id, options \\ []) do
path_ = "/streams/#{URI.encode(stream_id)}"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Gets information about the specified thing.
"""
def describe_thing(client, thing_name, options \\ []) do
path_ = "/things/#{URI.encode(thing_name)}"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Describe a thing group.
"""
def describe_thing_group(client, thing_group_name, options \\ []) do
path_ = "/thing-groups/#{URI.encode(thing_group_name)}"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Describes a bulk thing provisioning task.
"""
def describe_thing_registration_task(client, task_id, options \\ []) do
path_ = "/thing-registration-tasks/#{URI.encode(task_id)}"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Gets information about the specified thing type.
"""
def describe_thing_type(client, thing_type_name, options \\ []) do
path_ = "/thing-types/#{URI.encode(thing_type_name)}"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Detaches a policy from the specified target.
"""
def detach_policy(client, policy_name, input, options \\ []) do
path_ = "/target-policies/#{URI.encode(policy_name)}"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Removes the specified policy from the specified certificate.
**Note:** This API is deprecated. Please use `DetachPolicy` instead.
"""
def detach_principal_policy(client, policy_name, input, options \\ []) do
path_ = "/principal-policies/#{URI.encode(policy_name)}"
{headers, input} =
[
{"principal", "x-amzn-iot-principal"},
]
|> AWS.Request.build_params(input)
query_ = []
request(client, :delete, path_, query_, headers, input, options, nil)
end
@doc """
Disassociates a Device Defender security profile from a thing group or from
this account.
"""
def detach_security_profile(client, security_profile_name, input, options \\ []) do
path_ = "/security-profiles/#{URI.encode(security_profile_name)}/targets"
headers = []
{query_, input} =
[
{"securityProfileTargetArn", "securityProfileTargetArn"},
]
|> AWS.Request.build_params(input)
request(client, :delete, path_, query_, headers, input, options, nil)
end
@doc """
Detaches the specified principal from the specified thing. A principal can
be X.509 certificates, IAM users, groups, and roles, Amazon Cognito
identities or federated identities.
<note> This call is asynchronous. It might take several seconds for the
detachment to propagate.
</note>
"""
def detach_thing_principal(client, thing_name, input, options \\ []) do
path_ = "/things/#{URI.encode(thing_name)}/principals"
{headers, input} =
[
{"principal", "x-amzn-principal"},
]
|> AWS.Request.build_params(input)
query_ = []
request(client, :delete, path_, query_, headers, input, options, nil)
end
@doc """
Disables the rule.
"""
def disable_topic_rule(client, rule_name, input, options \\ []) do
path_ = "/rules/#{URI.encode(rule_name)}/disable"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Enables the rule.
"""
def enable_topic_rule(client, rule_name, input, options \\ []) do
path_ = "/rules/#{URI.encode(rule_name)}/enable"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Returns the approximate count of unique values that match the query.
"""
def get_cardinality(client, input, options \\ []) do
path_ = "/indices/cardinality"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Gets a list of the policies that have an effect on the authorization
behavior of the specified device when it connects to the AWS IoT device
gateway.
"""
def get_effective_policies(client, input, options \\ []) do
path_ = "/effective-policies"
headers = []
{query_, input} =
[
{"thingName", "thingName"},
]
|> AWS.Request.build_params(input)
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Gets the indexing configuration.
"""
def get_indexing_configuration(client, options \\ []) do
path_ = "/indexing/config"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Gets a job document.
"""
def get_job_document(client, job_id, options \\ []) do
path_ = "/jobs/#{URI.encode(job_id)}/job-document"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Gets the logging options.
NOTE: use of this command is not recommended. Use `GetV2LoggingOptions`
instead.
"""
def get_logging_options(client, options \\ []) do
path_ = "/loggingOptions"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Gets an OTA update.
"""
def get_o_t_a_update(client, ota_update_id, options \\ []) do
path_ = "/otaUpdates/#{URI.encode(ota_update_id)}"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Groups the aggregated values that match the query into percentile
groupings. The default percentile groupings are: 1,5,25,50,75,95,99,
although you can specify your own when you call `GetPercentiles`. This
function returns a value for each percentile group specified (or the
default percentile groupings). The percentile group "1" contains the
aggregated field value that occurs in approximately one percent of the
values that match the query. The percentile group "5" contains the
aggregated field value that occurs in approximately five percent of the
values that match the query, and so on. The result is an approximation, the
more values that match the query, the more accurate the percentile values.
"""
def get_percentiles(client, input, options \\ []) do
path_ = "/indices/percentiles"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Gets information about the specified policy with the policy document of the
default version.
"""
def get_policy(client, policy_name, options \\ []) do
path_ = "/policies/#{URI.encode(policy_name)}"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Gets information about the specified policy version.
"""
def get_policy_version(client, policy_name, policy_version_id, options \\ []) do
path_ = "/policies/#{URI.encode(policy_name)}/version/#{URI.encode(policy_version_id)}"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Gets a registration code used to register a CA certificate with AWS IoT.
"""
def get_registration_code(client, options \\ []) do
path_ = "/registrationcode"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Returns the count, average, sum, minimum, maximum, sum of squares,
variance, and standard deviation for the specified aggregated field. If the
aggregation field is of type `String`, only the count statistic is
returned.
"""
def get_statistics(client, input, options \\ []) do
path_ = "/indices/statistics"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Gets information about the rule.
"""
def get_topic_rule(client, rule_name, options \\ []) do
path_ = "/rules/#{URI.encode(rule_name)}"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Gets information about a topic rule destination.
"""
def get_topic_rule_destination(client, arn, options \\ []) do
path_ = "/destinations/#{AWS.Util.encode_uri(arn, true)}"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Gets the fine grained logging options.
"""
def get_v2_logging_options(client, options \\ []) do
path_ = "/v2LoggingOptions"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Lists the active violations for a given Device Defender security profile.
"""
def list_active_violations(client, max_results \\ nil, next_token \\ nil, security_profile_name \\ nil, thing_name \\ nil, options \\ []) do
path_ = "/active-violations"
headers = []
query_ = []
query_ = if !is_nil(thing_name) do
[{"thingName", thing_name} | query_]
else
query_
end
query_ = if !is_nil(security_profile_name) do
[{"securityProfileName", security_profile_name} | query_]
else
query_
end
query_ = if !is_nil(next_token) do
[{"nextToken", next_token} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"maxResults", max_results} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Lists the policies attached to the specified thing group.
"""
def list_attached_policies(client, target, input, options \\ []) do
path_ = "/attached-policies/#{URI.encode(target)}"
headers = []
{query_, input} =
[
{"marker", "marker"},
{"pageSize", "pageSize"},
{"recursive", "recursive"},
]
|> AWS.Request.build_params(input)
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Lists the findings (results) of a Device Defender audit or of the audits
performed during a specified time period. (Findings are retained for 90
days.)
"""
def list_audit_findings(client, input, options \\ []) do
path_ = "/audit/findings"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Gets the status of audit mitigation action tasks that were executed.
"""
def list_audit_mitigation_actions_executions(client, action_status \\ nil, finding_id, max_results \\ nil, next_token \\ nil, task_id, options \\ []) do
path_ = "/audit/mitigationactions/executions"
headers = []
query_ = []
query_ = if !is_nil(task_id) do
[{"taskId", task_id} | query_]
else
query_
end
query_ = if !is_nil(next_token) do
[{"nextToken", next_token} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"maxResults", max_results} | query_]
else
query_
end
query_ = if !is_nil(finding_id) do
[{"findingId", finding_id} | query_]
else
query_
end
query_ = if !is_nil(action_status) do
[{"actionStatus", action_status} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Gets a list of audit mitigation action tasks that match the specified
filters.
"""
def list_audit_mitigation_actions_tasks(client, audit_task_id \\ nil, end_time, finding_id \\ nil, max_results \\ nil, next_token \\ nil, start_time, task_status \\ nil, options \\ []) do
path_ = "/audit/mitigationactions/tasks"
headers = []
query_ = []
query_ = if !is_nil(task_status) do
[{"taskStatus", task_status} | query_]
else
query_
end
query_ = if !is_nil(start_time) do
[{"startTime", start_time} | query_]
else
query_
end
query_ = if !is_nil(next_token) do
[{"nextToken", next_token} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"maxResults", max_results} | query_]
else
query_
end
query_ = if !is_nil(finding_id) do
[{"findingId", finding_id} | query_]
else
query_
end
query_ = if !is_nil(end_time) do
[{"endTime", end_time} | query_]
else
query_
end
query_ = if !is_nil(audit_task_id) do
[{"auditTaskId", audit_task_id} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Lists your Device Defender audit listings.
"""
def list_audit_suppressions(client, input, options \\ []) do
path_ = "/audit/suppressions/list"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Lists the Device Defender audits that have been performed during a given
time period.
"""
def list_audit_tasks(client, end_time, max_results \\ nil, next_token \\ nil, start_time, task_status \\ nil, task_type \\ nil, options \\ []) do
path_ = "/audit/tasks"
headers = []
query_ = []
query_ = if !is_nil(task_type) do
[{"taskType", task_type} | query_]
else
query_
end
query_ = if !is_nil(task_status) do
[{"taskStatus", task_status} | query_]
else
query_
end
query_ = if !is_nil(start_time) do
[{"startTime", start_time} | query_]
else
query_
end
query_ = if !is_nil(next_token) do
[{"nextToken", next_token} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"maxResults", max_results} | query_]
else
query_
end
query_ = if !is_nil(end_time) do
[{"endTime", end_time} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Lists the authorizers registered in your account.
"""
def list_authorizers(client, ascending_order \\ nil, marker \\ nil, page_size \\ nil, status \\ nil, options \\ []) do
path_ = "/authorizers/"
headers = []
query_ = []
query_ = if !is_nil(status) do
[{"status", status} | query_]
else
query_
end
query_ = if !is_nil(page_size) do
[{"pageSize", page_size} | query_]
else
query_
end
query_ = if !is_nil(marker) do
[{"marker", marker} | query_]
else
query_
end
query_ = if !is_nil(ascending_order) do
[{"isAscendingOrder", ascending_order} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Lists the billing groups you have created.
"""
def list_billing_groups(client, max_results \\ nil, name_prefix_filter \\ nil, next_token \\ nil, options \\ []) do
path_ = "/billing-groups"
headers = []
query_ = []
query_ = if !is_nil(next_token) do
[{"nextToken", next_token} | query_]
else
query_
end
query_ = if !is_nil(name_prefix_filter) do
[{"namePrefixFilter", name_prefix_filter} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"maxResults", max_results} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Lists the CA certificates registered for your AWS account.
The results are paginated with a default page size of 25. You can use the
returned marker to retrieve additional results.
"""
def list_c_a_certificates(client, ascending_order \\ nil, marker \\ nil, page_size \\ nil, options \\ []) do
path_ = "/cacertificates"
headers = []
query_ = []
query_ = if !is_nil(page_size) do
[{"pageSize", page_size} | query_]
else
query_
end
query_ = if !is_nil(marker) do
[{"marker", marker} | query_]
else
query_
end
query_ = if !is_nil(ascending_order) do
[{"isAscendingOrder", ascending_order} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Lists the certificates registered in your AWS account.
The results are paginated with a default page size of 25. You can use the
returned marker to retrieve additional results.
"""
def list_certificates(client, ascending_order \\ nil, marker \\ nil, page_size \\ nil, options \\ []) do
path_ = "/certificates"
headers = []
query_ = []
query_ = if !is_nil(page_size) do
[{"pageSize", page_size} | query_]
else
query_
end
query_ = if !is_nil(marker) do
[{"marker", marker} | query_]
else
query_
end
query_ = if !is_nil(ascending_order) do
[{"isAscendingOrder", ascending_order} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
List the device certificates signed by the specified CA certificate.
"""
def list_certificates_by_c_a(client, ca_certificate_id, ascending_order \\ nil, marker \\ nil, page_size \\ nil, options \\ []) do
path_ = "/certificates-by-ca/#{URI.encode(ca_certificate_id)}"
headers = []
query_ = []
query_ = if !is_nil(page_size) do
[{"pageSize", page_size} | query_]
else
query_
end
query_ = if !is_nil(marker) do
[{"marker", marker} | query_]
else
query_
end
query_ = if !is_nil(ascending_order) do
[{"isAscendingOrder", ascending_order} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
List the set of dimensions that are defined for your AWS account.
"""
def list_dimensions(client, max_results \\ nil, next_token \\ nil, options \\ []) do
path_ = "/dimensions"
headers = []
query_ = []
query_ = if !is_nil(next_token) do
[{"nextToken", next_token} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"maxResults", max_results} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Gets a list of domain configurations for the user. This list is sorted
alphabetically by domain configuration name.
<note> The domain configuration feature is in public preview and is subject
to change.
</note>
"""
def list_domain_configurations(client, marker \\ nil, page_size \\ nil, service_type \\ nil, options \\ []) do
path_ = "/domainConfigurations"
headers = []
query_ = []
query_ = if !is_nil(service_type) do
[{"serviceType", service_type} | query_]
else
query_
end
query_ = if !is_nil(page_size) do
[{"pageSize", page_size} | query_]
else
query_
end
query_ = if !is_nil(marker) do
[{"marker", marker} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Lists the search indices.
"""
def list_indices(client, max_results \\ nil, next_token \\ nil, options \\ []) do
path_ = "/indices"
headers = []
query_ = []
query_ = if !is_nil(next_token) do
[{"nextToken", next_token} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"maxResults", max_results} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Lists the job executions for a job.
"""
def list_job_executions_for_job(client, job_id, max_results \\ nil, next_token \\ nil, status \\ nil, options \\ []) do
path_ = "/jobs/#{URI.encode(job_id)}/things"
headers = []
query_ = []
query_ = if !is_nil(status) do
[{"status", status} | query_]
else
query_
end
query_ = if !is_nil(next_token) do
[{"nextToken", next_token} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"maxResults", max_results} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Lists the job executions for the specified thing.
"""
def list_job_executions_for_thing(client, thing_name, max_results \\ nil, next_token \\ nil, status \\ nil, options \\ []) do
path_ = "/things/#{URI.encode(thing_name)}/jobs"
headers = []
query_ = []
query_ = if !is_nil(status) do
[{"status", status} | query_]
else
query_
end
query_ = if !is_nil(next_token) do
[{"nextToken", next_token} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"maxResults", max_results} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Lists jobs.
"""
def list_jobs(client, max_results \\ nil, next_token \\ nil, status \\ nil, target_selection \\ nil, thing_group_id \\ nil, thing_group_name \\ nil, options \\ []) do
path_ = "/jobs"
headers = []
query_ = []
query_ = if !is_nil(thing_group_name) do
[{"thingGroupName", thing_group_name} | query_]
else
query_
end
query_ = if !is_nil(thing_group_id) do
[{"thingGroupId", thing_group_id} | query_]
else
query_
end
query_ = if !is_nil(target_selection) do
[{"targetSelection", target_selection} | query_]
else
query_
end
query_ = if !is_nil(status) do
[{"status", status} | query_]
else
query_
end
query_ = if !is_nil(next_token) do
[{"nextToken", next_token} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"maxResults", max_results} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Gets a list of all mitigation actions that match the specified filter
criteria.
"""
def list_mitigation_actions(client, action_type \\ nil, max_results \\ nil, next_token \\ nil, options \\ []) do
path_ = "/mitigationactions/actions"
headers = []
query_ = []
query_ = if !is_nil(next_token) do
[{"nextToken", next_token} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"maxResults", max_results} | query_]
else
query_
end
query_ = if !is_nil(action_type) do
[{"actionType", action_type} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Lists OTA updates.
"""
def list_o_t_a_updates(client, max_results \\ nil, next_token \\ nil, ota_update_status \\ nil, options \\ []) do
path_ = "/otaUpdates"
headers = []
query_ = []
query_ = if !is_nil(ota_update_status) do
[{"otaUpdateStatus", ota_update_status} | query_]
else
query_
end
query_ = if !is_nil(next_token) do
[{"nextToken", next_token} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"maxResults", max_results} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Lists certificates that are being transferred but not yet accepted.
"""
def list_outgoing_certificates(client, ascending_order \\ nil, marker \\ nil, page_size \\ nil, options \\ []) do
path_ = "/certificates-out-going"
headers = []
query_ = []
query_ = if !is_nil(page_size) do
[{"pageSize", page_size} | query_]
else
query_
end
query_ = if !is_nil(marker) do
[{"marker", marker} | query_]
else
query_
end
query_ = if !is_nil(ascending_order) do
[{"isAscendingOrder", ascending_order} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Lists your policies.
"""
def list_policies(client, ascending_order \\ nil, marker \\ nil, page_size \\ nil, options \\ []) do
path_ = "/policies"
headers = []
query_ = []
query_ = if !is_nil(page_size) do
[{"pageSize", page_size} | query_]
else
query_
end
query_ = if !is_nil(marker) do
[{"marker", marker} | query_]
else
query_
end
query_ = if !is_nil(ascending_order) do
[{"isAscendingOrder", ascending_order} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Lists the principals associated with the specified policy.
**Note:** This API is deprecated. Please use `ListTargetsForPolicy`
instead.
"""
def list_policy_principals(client, ascending_order \\ nil, marker \\ nil, page_size \\ nil, policy_name, options \\ []) do
path_ = "/policy-principals"
headers = []
headers = if !is_nil(policy_name) do
[{"x-amzn-iot-policy", policy_name} | headers]
else
headers
end
query_ = []
query_ = if !is_nil(page_size) do
[{"pageSize", page_size} | query_]
else
query_
end
query_ = if !is_nil(marker) do
[{"marker", marker} | query_]
else
query_
end
query_ = if !is_nil(ascending_order) do
[{"isAscendingOrder", ascending_order} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Lists the versions of the specified policy and identifies the default
version.
"""
def list_policy_versions(client, policy_name, options \\ []) do
path_ = "/policies/#{URI.encode(policy_name)}/version"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Lists the policies attached to the specified principal. If you use an
Cognito identity, the ID must be in [AmazonCognito Identity
format](https://docs.aws.amazon.com/cognitoidentity/latest/APIReference/API_GetCredentialsForIdentity.html#API_GetCredentialsForIdentity_RequestSyntax).
**Note:** This API is deprecated. Please use `ListAttachedPolicies`
instead.
"""
def list_principal_policies(client, ascending_order \\ nil, marker \\ nil, page_size \\ nil, principal, options \\ []) do
path_ = "/principal-policies"
headers = []
headers = if !is_nil(principal) do
[{"x-amzn-iot-principal", principal} | headers]
else
headers
end
query_ = []
query_ = if !is_nil(page_size) do
[{"pageSize", page_size} | query_]
else
query_
end
query_ = if !is_nil(marker) do
[{"marker", marker} | query_]
else
query_
end
query_ = if !is_nil(ascending_order) do
[{"isAscendingOrder", ascending_order} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Lists the things associated with the specified principal. A principal can
be X.509 certificates, IAM users, groups, and roles, Amazon Cognito
identities or federated identities.
"""
def list_principal_things(client, max_results \\ nil, next_token \\ nil, principal, options \\ []) do
path_ = "/principals/things"
headers = []
headers = if !is_nil(principal) do
[{"x-amzn-principal", principal} | headers]
else
headers
end
query_ = []
query_ = if !is_nil(next_token) do
[{"nextToken", next_token} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"maxResults", max_results} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
A list of fleet provisioning template versions.
"""
def list_provisioning_template_versions(client, template_name, max_results \\ nil, next_token \\ nil, options \\ []) do
path_ = "/provisioning-templates/#{URI.encode(template_name)}/versions"
headers = []
query_ = []
query_ = if !is_nil(next_token) do
[{"nextToken", next_token} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"maxResults", max_results} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Lists the fleet provisioning templates in your AWS account.
"""
def list_provisioning_templates(client, max_results \\ nil, next_token \\ nil, options \\ []) do
path_ = "/provisioning-templates"
headers = []
query_ = []
query_ = if !is_nil(next_token) do
[{"nextToken", next_token} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"maxResults", max_results} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Lists the role aliases registered in your account.
"""
def list_role_aliases(client, ascending_order \\ nil, marker \\ nil, page_size \\ nil, options \\ []) do
path_ = "/role-aliases"
headers = []
query_ = []
query_ = if !is_nil(page_size) do
[{"pageSize", page_size} | query_]
else
query_
end
query_ = if !is_nil(marker) do
[{"marker", marker} | query_]
else
query_
end
query_ = if !is_nil(ascending_order) do
[{"isAscendingOrder", ascending_order} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Lists all of your scheduled audits.
"""
def list_scheduled_audits(client, max_results \\ nil, next_token \\ nil, options \\ []) do
path_ = "/audit/scheduledaudits"
headers = []
query_ = []
query_ = if !is_nil(next_token) do
[{"nextToken", next_token} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"maxResults", max_results} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Lists the Device Defender security profiles you have created. You can use
filters to list only those security profiles associated with a thing group
or only those associated with your account.
"""
def list_security_profiles(client, dimension_name \\ nil, max_results \\ nil, next_token \\ nil, options \\ []) do
path_ = "/security-profiles"
headers = []
query_ = []
query_ = if !is_nil(next_token) do
[{"nextToken", next_token} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"maxResults", max_results} | query_]
else
query_
end
query_ = if !is_nil(dimension_name) do
[{"dimensionName", dimension_name} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Lists the Device Defender security profiles attached to a target (thing
group).
"""
def list_security_profiles_for_target(client, max_results \\ nil, next_token \\ nil, recursive \\ nil, security_profile_target_arn, options \\ []) do
path_ = "/security-profiles-for-target"
headers = []
query_ = []
query_ = if !is_nil(security_profile_target_arn) do
[{"securityProfileTargetArn", security_profile_target_arn} | query_]
else
query_
end
query_ = if !is_nil(recursive) do
[{"recursive", recursive} | query_]
else
query_
end
query_ = if !is_nil(next_token) do
[{"nextToken", next_token} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"maxResults", max_results} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Lists all of the streams in your AWS account.
"""
def list_streams(client, ascending_order \\ nil, max_results \\ nil, next_token \\ nil, options \\ []) do
path_ = "/streams"
headers = []
query_ = []
query_ = if !is_nil(next_token) do
[{"nextToken", next_token} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"maxResults", max_results} | query_]
else
query_
end
query_ = if !is_nil(ascending_order) do
[{"isAscendingOrder", ascending_order} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Lists the tags (metadata) you have assigned to the resource.
"""
def list_tags_for_resource(client, next_token \\ nil, resource_arn, options \\ []) do
path_ = "/tags"
headers = []
query_ = []
query_ = if !is_nil(resource_arn) do
[{"resourceArn", resource_arn} | query_]
else
query_
end
query_ = if !is_nil(next_token) do
[{"nextToken", next_token} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
List targets for the specified policy.
"""
def list_targets_for_policy(client, policy_name, input, options \\ []) do
path_ = "/policy-targets/#{URI.encode(policy_name)}"
headers = []
{query_, input} =
[
{"marker", "marker"},
{"pageSize", "pageSize"},
]
|> AWS.Request.build_params(input)
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Lists the targets (thing groups) associated with a given Device Defender
security profile.
"""
def list_targets_for_security_profile(client, security_profile_name, max_results \\ nil, next_token \\ nil, options \\ []) do
path_ = "/security-profiles/#{URI.encode(security_profile_name)}/targets"
headers = []
query_ = []
query_ = if !is_nil(next_token) do
[{"nextToken", next_token} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"maxResults", max_results} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
List the thing groups in your account.
"""
def list_thing_groups(client, max_results \\ nil, name_prefix_filter \\ nil, next_token \\ nil, parent_group \\ nil, recursive \\ nil, options \\ []) do
path_ = "/thing-groups"
headers = []
query_ = []
query_ = if !is_nil(recursive) do
[{"recursive", recursive} | query_]
else
query_
end
query_ = if !is_nil(parent_group) do
[{"parentGroup", parent_group} | query_]
else
query_
end
query_ = if !is_nil(next_token) do
[{"nextToken", next_token} | query_]
else
query_
end
query_ = if !is_nil(name_prefix_filter) do
[{"namePrefixFilter", name_prefix_filter} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"maxResults", max_results} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
List the thing groups to which the specified thing belongs.
"""
def list_thing_groups_for_thing(client, thing_name, max_results \\ nil, next_token \\ nil, options \\ []) do
path_ = "/things/#{URI.encode(thing_name)}/thing-groups"
headers = []
query_ = []
query_ = if !is_nil(next_token) do
[{"nextToken", next_token} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"maxResults", max_results} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Lists the principals associated with the specified thing. A principal can
be X.509 certificates, IAM users, groups, and roles, Amazon Cognito
identities or federated identities.
"""
def list_thing_principals(client, thing_name, options \\ []) do
path_ = "/things/#{URI.encode(thing_name)}/principals"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Information about the thing registration tasks.
"""
def list_thing_registration_task_reports(client, task_id, max_results \\ nil, next_token \\ nil, report_type, options \\ []) do
path_ = "/thing-registration-tasks/#{URI.encode(task_id)}/reports"
headers = []
query_ = []
query_ = if !is_nil(report_type) do
[{"reportType", report_type} | query_]
else
query_
end
query_ = if !is_nil(next_token) do
[{"nextToken", next_token} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"maxResults", max_results} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
List bulk thing provisioning tasks.
"""
def list_thing_registration_tasks(client, max_results \\ nil, next_token \\ nil, status \\ nil, options \\ []) do
path_ = "/thing-registration-tasks"
headers = []
query_ = []
query_ = if !is_nil(status) do
[{"status", status} | query_]
else
query_
end
query_ = if !is_nil(next_token) do
[{"nextToken", next_token} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"maxResults", max_results} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Lists the existing thing types.
"""
def list_thing_types(client, max_results \\ nil, next_token \\ nil, thing_type_name \\ nil, options \\ []) do
path_ = "/thing-types"
headers = []
query_ = []
query_ = if !is_nil(thing_type_name) do
[{"thingTypeName", thing_type_name} | query_]
else
query_
end
query_ = if !is_nil(next_token) do
[{"nextToken", next_token} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"maxResults", max_results} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Lists your things. Use the **attributeName** and **attributeValue**
parameters to filter your things. For example, calling `ListThings` with
attributeName=Color and attributeValue=Red retrieves all things in the
registry that contain an attribute **Color** with the value **Red**.
<note> You will not be charged for calling this API if an `Access denied`
error is returned. You will also not be charged if no attributes or
pagination token was provided in request and no pagination token and no
results were returned.
</note>
"""
def list_things(client, attribute_name \\ nil, attribute_value \\ nil, max_results \\ nil, next_token \\ nil, thing_type_name \\ nil, options \\ []) do
path_ = "/things"
headers = []
query_ = []
query_ = if !is_nil(thing_type_name) do
[{"thingTypeName", thing_type_name} | query_]
else
query_
end
query_ = if !is_nil(next_token) do
[{"nextToken", next_token} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"maxResults", max_results} | query_]
else
query_
end
query_ = if !is_nil(attribute_value) do
[{"attributeValue", attribute_value} | query_]
else
query_
end
query_ = if !is_nil(attribute_name) do
[{"attributeName", attribute_name} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Lists the things you have added to the given billing group.
"""
def list_things_in_billing_group(client, billing_group_name, max_results \\ nil, next_token \\ nil, options \\ []) do
path_ = "/billing-groups/#{URI.encode(billing_group_name)}/things"
headers = []
query_ = []
query_ = if !is_nil(next_token) do
[{"nextToken", next_token} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"maxResults", max_results} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Lists the things in the specified group.
"""
def list_things_in_thing_group(client, thing_group_name, max_results \\ nil, next_token \\ nil, recursive \\ nil, options \\ []) do
path_ = "/thing-groups/#{URI.encode(thing_group_name)}/things"
headers = []
query_ = []
query_ = if !is_nil(recursive) do
[{"recursive", recursive} | query_]
else
query_
end
query_ = if !is_nil(next_token) do
[{"nextToken", next_token} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"maxResults", max_results} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Lists all the topic rule destinations in your AWS account.
"""
def list_topic_rule_destinations(client, max_results \\ nil, next_token \\ nil, options \\ []) do
path_ = "/destinations"
headers = []
query_ = []
query_ = if !is_nil(next_token) do
[{"nextToken", next_token} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"maxResults", max_results} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Lists the rules for the specific topic.
"""
def list_topic_rules(client, max_results \\ nil, next_token \\ nil, rule_disabled \\ nil, topic \\ nil, options \\ []) do
path_ = "/rules"
headers = []
query_ = []
query_ = if !is_nil(topic) do
[{"topic", topic} | query_]
else
query_
end
query_ = if !is_nil(rule_disabled) do
[{"ruleDisabled", rule_disabled} | query_]
else
query_
end
query_ = if !is_nil(next_token) do
[{"nextToken", next_token} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"maxResults", max_results} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Lists logging levels.
"""
def list_v2_logging_levels(client, max_results \\ nil, next_token \\ nil, target_type \\ nil, options \\ []) do
path_ = "/v2LoggingLevel"
headers = []
query_ = []
query_ = if !is_nil(target_type) do
[{"targetType", target_type} | query_]
else
query_
end
query_ = if !is_nil(next_token) do
[{"nextToken", next_token} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"maxResults", max_results} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Lists the Device Defender security profile violations discovered during the
given time period. You can use filters to limit the results to those alerts
issued for a particular security profile, behavior, or thing (device).
"""
def list_violation_events(client, end_time, max_results \\ nil, next_token \\ nil, security_profile_name \\ nil, start_time, thing_name \\ nil, options \\ []) do
path_ = "/violation-events"
headers = []
query_ = []
query_ = if !is_nil(thing_name) do
[{"thingName", thing_name} | query_]
else
query_
end
query_ = if !is_nil(start_time) do
[{"startTime", start_time} | query_]
else
query_
end
query_ = if !is_nil(security_profile_name) do
[{"securityProfileName", security_profile_name} | query_]
else
query_
end
query_ = if !is_nil(next_token) do
[{"nextToken", next_token} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"maxResults", max_results} | query_]
else
query_
end
query_ = if !is_nil(end_time) do
[{"endTime", end_time} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Registers a CA certificate with AWS IoT. This CA certificate can then be
used to sign device certificates, which can be then registered with AWS
IoT. You can register up to 10 CA certificates per AWS account that have
the same subject field. This enables you to have up to 10 certificate
authorities sign your device certificates. If you have more than one CA
certificate registered, make sure you pass the CA certificate when you
register your device certificates with the RegisterCertificate API.
"""
def register_c_a_certificate(client, input, options \\ []) do
path_ = "/cacertificate"
headers = []
{query_, input} =
[
{"allowAutoRegistration", "allowAutoRegistration"},
{"setAsActive", "setAsActive"},
]
|> AWS.Request.build_params(input)
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Registers a device certificate with AWS IoT. If you have more than one CA
certificate that has the same subject field, you must specify the CA
certificate that was used to sign the device certificate being registered.
"""
def register_certificate(client, input, options \\ []) do
path_ = "/certificate/register"
headers = []
{query_, input} =
[
{"setAsActive", "setAsActive"},
]
|> AWS.Request.build_params(input)
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Register a certificate that does not have a certificate authority (CA).
"""
def register_certificate_without_c_a(client, input, options \\ []) do
path_ = "/certificate/register-no-ca"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Provisions a thing in the device registry. RegisterThing calls other AWS
IoT control plane APIs. These calls might exceed your account level [ AWS
IoT Throttling
Limits](https://docs.aws.amazon.com/general/latest/gr/aws_service_limits.html#limits_iot)
and cause throttle errors. Please contact [AWS Customer
Support](https://console.aws.amazon.com/support/home) to raise your
throttling limits if necessary.
"""
def register_thing(client, input, options \\ []) do
path_ = "/things"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Rejects a pending certificate transfer. After AWS IoT rejects a certificate
transfer, the certificate status changes from **PENDING_TRANSFER** to
**INACTIVE**.
To check for pending certificate transfers, call `ListCertificates` to
enumerate your certificates.
This operation can only be called by the transfer destination. After it is
called, the certificate will be returned to the source's account in the
INACTIVE state.
"""
def reject_certificate_transfer(client, certificate_id, input, options \\ []) do
path_ = "/reject-certificate-transfer/#{URI.encode(certificate_id)}"
headers = []
query_ = []
request(client, :patch, path_, query_, headers, input, options, nil)
end
@doc """
Removes the given thing from the billing group.
"""
def remove_thing_from_billing_group(client, input, options \\ []) do
path_ = "/billing-groups/removeThingFromBillingGroup"
headers = []
query_ = []
request(client, :put, path_, query_, headers, input, options, nil)
end
@doc """
Remove the specified thing from the specified group.
You must specify either a `thingGroupArn` or a `thingGroupName` to identify
the thing group and either a `thingArn` or a `thingName` to identify the
thing to remove from the thing group.
"""
def remove_thing_from_thing_group(client, input, options \\ []) do
path_ = "/thing-groups/removeThingFromThingGroup"
headers = []
query_ = []
request(client, :put, path_, query_, headers, input, options, nil)
end
@doc """
Replaces the rule. You must specify all parameters for the new rule.
Creating rules is an administrator-level action. Any user who has
permission to create rules will be able to access data processed by the
rule.
"""
def replace_topic_rule(client, rule_name, input, options \\ []) do
path_ = "/rules/#{URI.encode(rule_name)}"
headers = []
query_ = []
request(client, :patch, path_, query_, headers, input, options, nil)
end
@doc """
The query search index.
"""
def search_index(client, input, options \\ []) do
path_ = "/indices/search"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Sets the default authorizer. This will be used if a websocket connection is
made without specifying an authorizer.
"""
def set_default_authorizer(client, input, options \\ []) do
path_ = "/default-authorizer"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Sets the specified version of the specified policy as the policy's default
(operative) version. This action affects all certificates to which the
policy is attached. To list the principals the policy is attached to, use
the ListPrincipalPolicy API.
"""
def set_default_policy_version(client, policy_name, policy_version_id, input, options \\ []) do
path_ = "/policies/#{URI.encode(policy_name)}/version/#{URI.encode(policy_version_id)}"
headers = []
query_ = []
request(client, :patch, path_, query_, headers, input, options, nil)
end
@doc """
Sets the logging options.
NOTE: use of this command is not recommended. Use `SetV2LoggingOptions`
instead.
"""
def set_logging_options(client, input, options \\ []) do
path_ = "/loggingOptions"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Sets the logging level.
"""
def set_v2_logging_level(client, input, options \\ []) do
path_ = "/v2LoggingLevel"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Sets the logging options for the V2 logging service.
"""
def set_v2_logging_options(client, input, options \\ []) do
path_ = "/v2LoggingOptions"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Starts a task that applies a set of mitigation actions to the specified
target.
"""
def start_audit_mitigation_actions_task(client, task_id, input, options \\ []) do
path_ = "/audit/mitigationactions/tasks/#{URI.encode(task_id)}"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Starts an on-demand Device Defender audit.
"""
def start_on_demand_audit_task(client, input, options \\ []) do
path_ = "/audit/tasks"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Creates a bulk thing provisioning task.
"""
def start_thing_registration_task(client, input, options \\ []) do
path_ = "/thing-registration-tasks"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Cancels a bulk thing provisioning task.
"""
def stop_thing_registration_task(client, task_id, input, options \\ []) do
path_ = "/thing-registration-tasks/#{URI.encode(task_id)}/cancel"
headers = []
query_ = []
request(client, :put, path_, query_, headers, input, options, nil)
end
@doc """
Adds to or modifies the tags of the given resource. Tags are metadata which
can be used to manage a resource.
"""
def tag_resource(client, input, options \\ []) do
path_ = "/tags"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Tests if a specified principal is authorized to perform an AWS IoT action
on a specified resource. Use this to test and debug the authorization
behavior of devices that connect to the AWS IoT device gateway.
"""
def test_authorization(client, input, options \\ []) do
path_ = "/test-authorization"
headers = []
{query_, input} =
[
{"clientId", "clientId"},
]
|> AWS.Request.build_params(input)
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Tests a custom authorization behavior by invoking a specified custom
authorizer. Use this to test and debug the custom authorization behavior of
devices that connect to the AWS IoT device gateway.
"""
def test_invoke_authorizer(client, authorizer_name, input, options \\ []) do
path_ = "/authorizer/#{URI.encode(authorizer_name)}/test"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Transfers the specified certificate to the specified AWS account.
You can cancel the transfer until it is acknowledged by the recipient.
No notification is sent to the transfer destination's account. It is up to
the caller to notify the transfer target.
The certificate being transferred must not be in the ACTIVE state. You can
use the UpdateCertificate API to deactivate it.
The certificate must not have any policies attached to it. You can use the
DetachPrincipalPolicy API to detach them.
"""
def transfer_certificate(client, certificate_id, input, options \\ []) do
path_ = "/transfer-certificate/#{URI.encode(certificate_id)}"
headers = []
{query_, input} =
[
{"targetAwsAccount", "targetAwsAccount"},
]
|> AWS.Request.build_params(input)
request(client, :patch, path_, query_, headers, input, options, nil)
end
@doc """
Removes the given tags (metadata) from the resource.
"""
def untag_resource(client, input, options \\ []) do
path_ = "/untag"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Configures or reconfigures the Device Defender audit settings for this
account. Settings include how audit notifications are sent and which audit
checks are enabled or disabled.
"""
def update_account_audit_configuration(client, input, options \\ []) do
path_ = "/audit/configuration"
headers = []
query_ = []
request(client, :patch, path_, query_, headers, input, options, nil)
end
@doc """
Updates a Device Defender audit suppression.
"""
def update_audit_suppression(client, input, options \\ []) do
path_ = "/audit/suppressions/update"
headers = []
query_ = []
request(client, :patch, path_, query_, headers, input, options, nil)
end
@doc """
Updates an authorizer.
"""
def update_authorizer(client, authorizer_name, input, options \\ []) do
path_ = "/authorizer/#{URI.encode(authorizer_name)}"
headers = []
query_ = []
request(client, :put, path_, query_, headers, input, options, nil)
end
@doc """
Updates information about the billing group.
"""
def update_billing_group(client, billing_group_name, input, options \\ []) do
path_ = "/billing-groups/#{URI.encode(billing_group_name)}"
headers = []
query_ = []
request(client, :patch, path_, query_, headers, input, options, nil)
end
@doc """
Updates a registered CA certificate.
"""
def update_c_a_certificate(client, certificate_id, input, options \\ []) do
path_ = "/cacertificate/#{URI.encode(certificate_id)}"
headers = []
{query_, input} =
[
{"newAutoRegistrationStatus", "newAutoRegistrationStatus"},
{"newStatus", "newStatus"},
]
|> AWS.Request.build_params(input)
request(client, :put, path_, query_, headers, input, options, nil)
end
@doc """
Updates the status of the specified certificate. This operation is
idempotent.
Certificates must be in the ACTIVE state to authenticate devices that use a
certificate to connect to AWS IoT.
Within a few minutes of updating a certificate from the ACTIVE state to any
other state, AWS IoT disconnects all devices that used that certificate to
connect. Devices cannot use a certificate that is not in the ACTIVE state
to reconnect.
"""
def update_certificate(client, certificate_id, input, options \\ []) do
path_ = "/certificates/#{URI.encode(certificate_id)}"
headers = []
{query_, input} =
[
{"newStatus", "newStatus"},
]
|> AWS.Request.build_params(input)
request(client, :put, path_, query_, headers, input, options, nil)
end
@doc """
Updates the definition for a dimension. You cannot change the type of a
dimension after it is created (you can delete it and re-create it).
"""
def update_dimension(client, name, input, options \\ []) do
path_ = "/dimensions/#{URI.encode(name)}"
headers = []
query_ = []
request(client, :patch, path_, query_, headers, input, options, nil)
end
@doc """
Updates values stored in the domain configuration. Domain configurations
for default endpoints can't be updated.
<note> The domain configuration feature is in public preview and is subject
to change.
</note>
"""
def update_domain_configuration(client, domain_configuration_name, input, options \\ []) do
path_ = "/domainConfigurations/#{URI.encode(domain_configuration_name)}"
headers = []
query_ = []
request(client, :put, path_, query_, headers, input, options, nil)
end
@doc """
Updates a dynamic thing group.
"""
def update_dynamic_thing_group(client, thing_group_name, input, options \\ []) do
path_ = "/dynamic-thing-groups/#{URI.encode(thing_group_name)}"
headers = []
query_ = []
request(client, :patch, path_, query_, headers, input, options, nil)
end
@doc """
Updates the event configurations.
"""
def update_event_configurations(client, input, options \\ []) do
path_ = "/event-configurations"
headers = []
query_ = []
request(client, :patch, path_, query_, headers, input, options, nil)
end
@doc """
Updates the search configuration.
"""
def update_indexing_configuration(client, input, options \\ []) do
path_ = "/indexing/config"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Updates supported fields of the specified job.
"""
def update_job(client, job_id, input, options \\ []) do
path_ = "/jobs/#{URI.encode(job_id)}"
headers = []
query_ = []
request(client, :patch, path_, query_, headers, input, options, nil)
end
@doc """
Updates the definition for the specified mitigation action.
"""
def update_mitigation_action(client, action_name, input, options \\ []) do
path_ = "/mitigationactions/actions/#{URI.encode(action_name)}"
headers = []
query_ = []
request(client, :patch, path_, query_, headers, input, options, nil)
end
@doc """
Updates a fleet provisioning template.
"""
def update_provisioning_template(client, template_name, input, options \\ []) do
path_ = "/provisioning-templates/#{URI.encode(template_name)}"
headers = []
query_ = []
request(client, :patch, path_, query_, headers, input, options, nil)
end
@doc """
Updates a role alias.
"""
def update_role_alias(client, role_alias, input, options \\ []) do
path_ = "/role-aliases/#{URI.encode(role_alias)}"
headers = []
query_ = []
request(client, :put, path_, query_, headers, input, options, nil)
end
@doc """
Updates a scheduled audit, including which checks are performed and how
often the audit takes place.
"""
def update_scheduled_audit(client, scheduled_audit_name, input, options \\ []) do
path_ = "/audit/scheduledaudits/#{URI.encode(scheduled_audit_name)}"
headers = []
query_ = []
request(client, :patch, path_, query_, headers, input, options, nil)
end
@doc """
Updates a Device Defender security profile.
"""
def update_security_profile(client, security_profile_name, input, options \\ []) do
path_ = "/security-profiles/#{URI.encode(security_profile_name)}"
headers = []
{query_, input} =
[
{"expectedVersion", "expectedVersion"},
]
|> AWS.Request.build_params(input)
request(client, :patch, path_, query_, headers, input, options, nil)
end
@doc """
Updates an existing stream. The stream version will be incremented by one.
"""
def update_stream(client, stream_id, input, options \\ []) do
path_ = "/streams/#{URI.encode(stream_id)}"
headers = []
query_ = []
request(client, :put, path_, query_, headers, input, options, nil)
end
@doc """
Updates the data for a thing.
"""
def update_thing(client, thing_name, input, options \\ []) do
path_ = "/things/#{URI.encode(thing_name)}"
headers = []
query_ = []
request(client, :patch, path_, query_, headers, input, options, nil)
end
@doc """
Update a thing group.
"""
def update_thing_group(client, thing_group_name, input, options \\ []) do
path_ = "/thing-groups/#{URI.encode(thing_group_name)}"
headers = []
query_ = []
request(client, :patch, path_, query_, headers, input, options, nil)
end
@doc """
Updates the groups to which the thing belongs.
"""
def update_thing_groups_for_thing(client, input, options \\ []) do
path_ = "/thing-groups/updateThingGroupsForThing"
headers = []
query_ = []
request(client, :put, path_, query_, headers, input, options, nil)
end
@doc """
Updates a topic rule destination. You use this to change the status,
endpoint URL, or confirmation URL of the destination.
"""
def update_topic_rule_destination(client, input, options \\ []) do
path_ = "/destinations"
headers = []
query_ = []
request(client, :patch, path_, query_, headers, input, options, nil)
end
@doc """
Validates a Device Defender security profile behaviors specification.
"""
def validate_security_profile_behaviors(client, input, options \\ []) do
path_ = "/security-profile-behaviors/validate"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@spec request(AWS.Client.t(), binary(), binary(), list(), list(), map(), list(), pos_integer()) ::
{:ok, map() | nil, map()}
| {:error, term()}
defp request(client, method, path, query, headers, input, options, success_status_code) do
client = %{client | service: "execute-api"}
host = build_host("iot", client)
url = host
|> build_url(path, client)
|> add_query(query, client)
additional_headers = [{"Host", host}, {"Content-Type", "application/x-amz-json-1.1"}]
headers = AWS.Request.add_headers(additional_headers, headers)
payload = encode!(client, input)
headers = AWS.Request.sign_v4(client, method, url, headers, payload)
perform_request(client, method, url, payload, headers, options, success_status_code)
end
defp perform_request(client, method, url, payload, headers, options, success_status_code) do
case AWS.Client.request(client, method, url, payload, headers, options) do
{:ok, %{status_code: status_code, body: body} = response}
when is_nil(success_status_code) and status_code in [200, 202, 204]
when status_code == success_status_code ->
body = if(body != "", do: decode!(client, body))
{:ok, body, response}
{:ok, response} ->
{:error, {:unexpected_response, response}}
error = {:error, _reason} -> error
end
end
defp build_host(_endpoint_prefix, %{region: "local", endpoint: endpoint}) do
endpoint
end
defp build_host(_endpoint_prefix, %{region: "local"}) do
"localhost"
end
defp build_host(endpoint_prefix, %{region: region, endpoint: endpoint}) do
"#{endpoint_prefix}.#{region}.#{endpoint}"
end
defp build_url(host, path, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}#{path}"
end
defp add_query(url, [], _client) do
url
end
defp add_query(url, query, client) do
querystring = encode!(client, query, :query)
"#{url}?#{querystring}"
end
defp encode!(client, payload, format \\ :json) do
AWS.Client.encode!(client, payload, format)
end
defp decode!(client, payload) do
AWS.Client.decode!(client, payload, :json)
end
end
|
lib/aws/generated/iot.ex
| 0.796213
| 0.531635
|
iot.ex
|
starcoder
|
defmodule BertInt do
@moduledoc """
Binary Erlang Term encoding for internal node-to-node encoding.
"""
@spec encode!(any()) :: binary()
def encode!(term) do
term
|> :erlang.term_to_binary()
|> :zlib.zip()
end
@spec decode!(binary()) :: any()
def decode!(term) do
try do
:zlib.unzip(term)
rescue
[ErlangError, :data_error] ->
term
end
|> :erlang.binary_to_term()
end
@doc """
decode! variant for decoding locally created files, can decode atoms.
"""
def decode_unsafe!(term) do
try do
:zlib.unzip(term)
rescue
[ErlangError, :data_error] ->
term
end
|> :erlang.binary_to_term()
end
end
defmodule BertExt do
@spec encode!(any()) :: binary()
def encode!(term) do
:erlang.term_to_binary(term_to_binary(term))
end
defp term_to_binary(map) when is_map(map) do
^map = Map.from_struct(map)
map
|> Map.to_list()
|> Enum.map(fn {key, value} -> {key, term_to_binary(value)} end)
|> Enum.into(%{})
end
defp term_to_binary(list) when is_list(list) do
Enum.map(list, &term_to_binary(&1))
end
defp term_to_binary(tuple) when is_tuple(tuple) do
Tuple.to_list(tuple)
|> Enum.map(&term_to_binary(&1))
|> List.to_tuple()
end
defp term_to_binary(other) do
other
end
@spec decode!(binary()) :: any()
def decode!(term) do
:erlang.binary_to_term(term, [:safe])
end
end
defmodule ZBert do
require Record
Record.defrecord(:zbert, in_stream: nil, out_stream: nil, module: nil)
def init(mod) do
out = :zlib.open()
:ok = :zlib.deflateInit(out)
inc = :zlib.open()
:ok = :zlib.inflateInit(inc)
zbert(in_stream: inc, out_stream: out, module: mod)
end
def encode!(zbert(out_stream: str, module: mod), term) do
data = mod.encode!(term)
:zlib.deflate(str, data, :sync)
end
def decode!(zbert(in_stream: str, module: mod), data) do
:zlib.inflate(str, data)
|> mod.decode!()
end
end
|
lib/bert.ex
| 0.744656
| 0.440168
|
bert.ex
|
starcoder
|
defmodule SecureStorage.Schema.EncryptedMessage do
@moduledoc """
Schema for an encrypted message
# States
- New: EncryptedMessage created but has no significance
- Pending: EncryptedMessage waiting for encryption (Used for incoming)
- Encryption Failed: Failed to encrypted message
- Encrypted: Message encrypted and ready for decryption
- Expired: Beyond exp_at
- No attempts left: All attempts used
- No reads left: All reads used
"""
use SecureStorage.Schema
# Last minute of y 2999 ~ infinity
@default_exp_at ~U[2999-12-31 23:59:59Z]
@encryption_algos ~w(
plain
aes_gcm
)a
@states ~w(
new
pending
encryption_failed
encrypted
expired
no_attempts_left
no_reads_left
)a
@castable_fields ~w(
short_description
description
password_hint
encryption_algo
txt
max_attempts
max_reads
enc_at
exp_at
state
)a
@required_fields_encrypted ~w(
short_description
txt
enc_at
exp_at
)a
@required_fields_new ~w(short_description)a
@primary_key {:id, :binary_id, autogenerate: true}
schema "encrypted_messages" do
# This is for searching and is indexed
field(:short_description, :string)
# Bigger non-indexed description
field(:description, :string)
# Password hint for decryption
field(:password_hint, :string)
# Algorithm used to encrypt the message
field(:encryption_algo, Ecto.Enum,
values: @encryption_algos,
default: :aes_gcm
)
# Encrypted text
field(:txt, :string)
# Max unsuccessful decryption attempts
field(:max_attempts, :integer, default: 999)
# Max number of times decrypted and read
field(:max_reads, :integer, default: 999)
# Timestamps for encrypted at and expiration at
field(:enc_at, :utc_datetime)
field(:exp_at, :utc_datetime, default: @default_exp_at)
# State of the message
field(:state, Ecto.Enum, values: @states, default: :new)
# Stores information regarding unsuccessful attempts
embeds_many :attempts, Attempt do
field(:ip, :string)
field(:at, :utc_datetime)
field(:failure_reason, :string)
end
# Stores information regarding reads
embeds_many :reads, Read do
field(:ip, :string)
field(:at, :utc_datetime)
end
timestamps()
end
@impl true
def changeset(struct \\ %__MODULE__{}, params) do
struct
|> cast(params, @castable_fields)
|> cast_embed(:attempts, with: &attempt_changeset/2)
|> cast_embed(:reads, with: &read_changeset/2)
|> validate_inclusion(:state, @states)
|> validate_inclusion(:encryption_algo, @encryption_algos)
|> validate_length(:short_description, max: 100)
|> validate_state_fields_parity()
end
defp attempt_changeset(attempt, params) do
attempt
|> cast(params, [:ip, :at, :failure_reason])
|> validate_required([:ip, :at, :failure_reason])
end
defp read_changeset(read, params) do
read
|> cast(params, [:ip, :at])
|> validate_required([:ip, :at])
end
defp validate_state_fields_parity(changeset) do
%__MODULE__{state: state} = apply_changes(changeset)
case state do
:new -> validate_required(changeset, @required_fields_new)
_ -> validate_required(changeset, @required_fields_encrypted)
end
end
end
|
apps/secure_storage/lib/secure_storage/schema/encrypted_message.ex
| 0.850949
| 0.4016
|
encrypted_message.ex
|
starcoder
|
defmodule QRCode.Svg do
@moduledoc """
SVG structure and helper functions.
"""
alias MatrixReloaded.Matrix
alias QRCode.{QR, SvgSettings}
@type t :: %__MODULE__{
xmlns: String.t(),
xlink: String.t(),
width: ExMaybe.t(integer),
height: ExMaybe.t(integer),
body: String.t(),
rank_matrix: ExMaybe.t(pos_integer)
}
defstruct xmlns: "http://www.w3.org/2000/svg",
xlink: "http://www.w3.org/1999/xlink",
width: nil,
height: nil,
body: nil,
rank_matrix: nil
@doc """
Create Svg structure from QR matrix as binary. This binary contains svg
attributes and svg elements.
"""
@spec create(QR.t(), SvgSettings.t()) :: binary()
def create(%QR{matrix: matrix}, settings \\ %SvgSettings{}) do
create_svg(matrix, settings)
end
@doc """
Saves QR code to svg file. This function returns [Result](https://hexdocs.pm/result/api-reference.html),
it means either tuple of `{:ok, "path/to/file.svg"}` or `{:error, reason}`.
Also there are a few settings for svg:
```elixir
| Setting | Type | Default value | Description |
|------------------|---------------------|---------------|-------------------------|
| scale | positive integer | 10 | scale for svg QR code |
| background_color | string or {r, g, b} | "#ffffff" | background color of svg |
| qrcode_color | string or {r, g, b} | "#000000" | color of QR code |
| format | :none or :indent | :none | indentation of elements |
```
By this option, you can set the background of QR code, QR code colors or size QR code. The
format option is for removing indentation (of elements) in a svg file.
Let's see an example below:
iex> settings = %QRCode.SvgSettings{qrcode_color: {17, 170, 136}}
iex> qr = QRCode.QR.create("your_string")
iex> qr |> Result.and_then(&QRCode.Svg.save_as(&1,"/tmp/your_name.svg", settings))
{:ok, "/tmp/your_name.svg"}
The svg file will be saved into your tmp directory.

"""
@spec save_as(QR.t(), Path.t(), SvgSettings.t()) ::
Result.t(String.t() | File.posix() | :badarg | :terminated, Path.t())
def save_as(%QR{matrix: matrix}, svg_name, settings \\ %SvgSettings{}) do
matrix
|> create_svg(settings)
|> save(svg_name)
end
@doc """
Create Svg structure from QR matrix as binary and encode it into a base 64.
This encoded string can be then used in Html as
`<img src="data:image/svg+xml; base64, encoded_svg_qr_code" />`
"""
@spec to_base64(QR.t(), SvgSettings.t()) :: binary()
def to_base64(%QR{matrix: matrix}, settings \\ %SvgSettings{}) do
matrix
|> create_svg(settings)
|> Base.encode64()
end
defp create_svg(matrix, settings) do
matrix
|> construct_body(%__MODULE__{}, settings)
|> construct_svg(settings)
end
defp construct_body(matrix, svg, %SvgSettings{qrcode_color: qc, scale: scale}) do
{rank_matrix, _} = Matrix.size(matrix)
%{
svg
| body:
matrix
|> find_nonzero_element()
|> Enum.map(&create_rect(&1, scale, qc)),
rank_matrix: rank_matrix
}
end
defp construct_svg(
%__MODULE__{
xmlns: xmlns,
xlink: xlink,
body: body,
rank_matrix: rank_matrix
},
%SvgSettings{background_color: bg, scale: scale, format: format}
) do
{:svg,
%{
xmlns: xmlns,
xlink: xlink,
width: rank_matrix * scale,
height: rank_matrix * scale
}, [background_rect(bg) | body]}
|> XmlBuilder.generate(format: format)
end
defp save(svg, svg_name) do
svg_name
|> File.open([:write])
|> Result.and_then(&write(&1, svg))
|> Result.and_then(&close(&1, svg_name))
end
defp write(file, svg) do
case IO.binwrite(file, svg) do
:ok -> {:ok, file}
err -> err
end
end
defp close(file, svg_name) do
case File.close(file) do
:ok -> {:ok, svg_name}
err -> err
end
end
# Helpers
defp create_rect({x_pos, y_pos}, scale, color) do
{:rect,
%{width: scale, height: scale, x: scale * x_pos, y: scale * y_pos, fill: to_hex(color)}, nil}
end
defp background_rect(color) do
{:rect, %{width: "100%", height: "100%", fill: to_hex(color)}, nil}
end
defp find_nonzero_element(matrix) do
matrix
|> Enum.with_index()
|> Enum.map(fn {row, i} ->
row
|> Enum.with_index()
|> Enum.reduce([], fn
{0, _}, acc -> acc
{1, j}, acc -> [{i, j} | acc]
end)
end)
|> List.flatten()
end
defp to_hex(color) when is_tuple(color) do
{r, g, b} = color
"#" <>
(r |> :binary.encode_unsigned() |> Base.encode16()) <>
(g |> :binary.encode_unsigned() |> Base.encode16()) <>
(b |> :binary.encode_unsigned() |> Base.encode16())
end
defp to_hex(color) do
color
end
end
|
lib/qr_code/svg.ex
| 0.858303
| 0.848408
|
svg.ex
|
starcoder
|
defmodule CcValidation do
@moduledoc """
CcValidation implements methods to verify validity of a credit card number
"""
@doc """
Validate a credit card number using Luhn Algorithm
## Examples
Valid credit card
iex> CcValidation.validate("4716892095589823")
{:ok, true}
Invalid credit card
iex> CcValidation.validate("4024007106963124")
{:error, false}
Test card returns false
iex> CcValidation.validate("4111111111111111")
{:error, false}
### Validate test card numbers when check_test_numbers is true
A valid test card
iex> CcValidation.validate("4111111111111111", true)
{:ok, true, test_number: true}
An invalid test card
iex> CcValidation.validate("4212121212121212", true)
{:error, false, test_number: false}
A valid card when checking for test numbers just passes through
to the validate function but will also return that it is not
a test card
iex> CcValidation.validate("4716892095589823", true)
{:ok, true, test_number: false}
### Card numbers that are of length less than 13 or greater than 19 are
invalid.
iex> CcValidation.validate("123")
{:error, false}
iex> CcValidation.validate("12345678901234567890")
{:error, false}
"""
def validate(number, check_test_numbers \\ false)
def validate(number, true) do
case CcValidation.TestNumbers.exists?(number) do
true ->
{:ok, true, test_number: true}
false ->
Tuple.append(validate(number), test_number: false)
end
end
def validate(number, false)
when byte_size(number) < 13 or byte_size(number) > 19 do
{:error, false}
end
def validate(number, false) do
case CcValidation.TestNumbers.exists?(number) do
true ->
{:error, false}
_ ->
total = number
|> String.codepoints
|> Enum.reverse
|> Stream.with_index
|> Enum.map(fn {num, i} ->
case rem(i + 1, 2) do
0 ->
doubled = String.to_integer(num) * 2
if doubled > 9 do
doubled - 9
else
doubled
end
_ ->
String.to_integer(num)
end
end)
|> Enum.sum
case rem(total, 10) do
0 ->
{:ok, true}
_ ->
{:error, false}
end
end
end
end
|
lib/cc_validation.ex
| 0.799247
| 0.509703
|
cc_validation.ex
|
starcoder
|
defmodule Trans.Translator do
@moduledoc """
Provides easy access to struct translations.
Although translations are stored in regular fields of an struct and can be accessed directly, **it
is recommended to access translations using the functions provided by this module** instead. This
functions present additional behaviours such as:
* Checking that the given struct uses `Trans`
* Automatically inferring the [translation container](Trans.html#module-translation-container)
when needed.
* Falling back along a locale fallback chain (list of locales in which to look for
a translation). If not found, then return the default value or raise and exception if a
translation does not exist.
* Translating entire structs.
All examples in this module assume the following article, based on the schema defined in
[Structured translations](Trans.html#module-structured-translations)
article = %MyApp.Article{
title: "How to Write a Spelling Corrector",
body: "A wonderful article by <NAME>",
translations: %MyApp.Article.Translations{
es: %MyApp.Article.Translation{
title: "Cómo escribir un corrector ortográfico",
body: "Un artículo maravilloso de <NAME>"
},
fr: %MyApp.Article.Translation{
title: "Comment écrire un correcteur orthographique",
body: "Un merveilleux article de <NAME>"
}
}
}
"""
defguardp is_locale(locale) when is_binary(locale) or is_atom(locale)
@doc """
Translate a whole struct into the given locale.
Translates the whole struct with all translatable values and translatable associations into the
given locale. Similar to `translate/3` but returns the whole struct.
## Examples
Assuming the example article in this module, we can translate the entire struct into Spanish:
# Translate the entire article into Spanish
article_es = Trans.Translator.translate(article, :es)
article_es.title #=> "Cómo escribir un corrector ortográfico"
article_es.body #=> "Un artículo maravilloso de <NAME>"
Just like `translate/3`, falls back to the default locale if the translation does not exist:
# The Deutsch translation does not exist so the default values are returned
article_de = Trans.Translator.translate(article, :de)
article_de.title #=> "How to Write a Spelling Corrector"
article_de.body #=> "A wonderful article by <NAME>"
Rather than just one locale, a list of locales (a locale fallback chain) can also be
used. In this case, translation tries each locale in the fallback chain in sequence
until a translation is found. If none is found, the default value is returned.
# The Deutsch translation does not exist but the Spanish one does
article_de = Trans.Translator.translate(article, [:de, :es])
article_de.title #=> "Cómo escribir un corrector ortográfico"
article_de.body #=> "Un artículo maravilloso de <NAME>"
"""
@doc since: "2.3.0"
@spec translate(Trans.translatable(), Trans.locale_list()) :: Trans.translatable()
def translate(%{__struct__: module} = translatable, locale)
when is_locale(locale) or is_list(locale) do
if Keyword.has_key?(module.__info__(:functions), :__trans__) do
default_locale = module.__trans__(:default_locale)
translatable
|> translate_fields(locale, default_locale)
|> translate_assocs(locale)
else
translatable
end
end
@doc """
Translate a single field into the given locale.
Translates the field into the given locale or falls back to the default value if there is no
translation available.
## Examples
Assuming the example article in this module:
# We can get the Spanish title:
Trans.Translator.translate(article, :title, :es)
"Cómo escribir un corrector ortográfico"
# If the requested locale is not available, the default value will be returned:
Trans.Translator.translate(article, :title, :de)
"How to Write a Spelling Corrector"
# A fallback chain can also be used:
Trans.Translator.translate(article, :title, [:de, :es])
"Cómo escribir un corrector ortográfico"
# If we request a translation for an invalid field, we will receive an error:
Trans.Translator.translate(article, :fake_attr, :es)
** (RuntimeError) 'Article' module must declare 'fake_attr' as translatable
"""
@spec translate(Trans.translatable(), atom, Trans.locale_list()) :: any
def translate(%{__struct__: module} = translatable, field, locale)
when (is_locale(locale) or is_list(locale)) and is_atom(field) do
default_locale = module.__trans__(:default_locale)
unless Trans.translatable?(translatable, field) do
raise not_translatable_error(module, field)
end
# Return the translation or fall back to the default value
case translate_field(translatable, locale, field, default_locale) do
:error -> Map.fetch!(translatable, field)
nil -> Map.fetch!(translatable, field)
translation -> translation
end
end
@doc """
Translate a single field into the given locale or raise if there is no translation.
Just like `translate/3` gets a translated field into the given locale. Raises if there is no
translation available.
## Examples
Assuming the example article in this module:
Trans.Translator.translate!(article, :title, :de)
** (RuntimeError) translation doesn't exist for field ':title' in locale 'de'
"""
@doc since: "2.3.0"
@spec translate!(Trans.translatable(), atom, Trans.locale_list()) :: any
def translate!(%{__struct__: module} = translatable, field, locale)
when (is_locale(locale) or is_list(locale)) and is_atom(field) do
default_locale = module.__trans__(:default_locale)
unless Trans.translatable?(translatable, field) do
raise not_translatable_error(module, field)
end
# Return the translation or fall back to the default value
case translate_field(translatable, locale, field, default_locale) do
:error ->
raise no_translation_error(field, locale)
translation ->
translation
end
end
defp translate_field(%{__struct__: _module} = struct, locales, field, default_locale)
when is_list(locales) do
Enum.reduce_while(locales, :error, fn locale, translated_field ->
case translate_field(struct, locale, field, default_locale) do
:error -> {:cont, translated_field}
nil -> {:cont, translated_field}
translation -> {:halt, translation}
end
end)
end
defp translate_field(%{__struct__: _module} = struct, default_locale, field, default_locale) do
Map.fetch!(struct, field)
end
defp translate_field(%{__struct__: module} = struct, locale, field, _default_locale) do
with {:ok, all_translations} <- Map.fetch(struct, module.__trans__(:container)),
{:ok, translations_for_locale} <- get_translations_for_locale(all_translations, locale),
{:ok, translated_field} <- get_translated_field(translations_for_locale, field) do
translated_field
end
end
defp translate_fields(%{__struct__: module} = struct, locale, default_locale) do
fields = module.__trans__(:fields)
Enum.reduce(fields, struct, fn field, struct ->
case translate_field(struct, locale, field, default_locale) do
:error -> struct
translation -> Map.put(struct, field, translation)
end
end)
end
defp translate_assocs(%{__struct__: module} = struct, locale) do
associations = module.__schema__(:associations)
embeds = module.__schema__(:embeds)
Enum.reduce(associations ++ embeds, struct, fn assoc_name, struct ->
Map.update(struct, assoc_name, nil, fn
%Ecto.Association.NotLoaded{} = item ->
item
items when is_list(items) ->
Enum.map(items, &translate(&1, locale))
%{} = item ->
translate(item, locale)
item ->
item
end)
end)
end
# check if struct (means it's using ecto embeds); if so, make sure 'locale' is also atom
defp get_translations_for_locale(%{__struct__: _} = all_translations, locale)
when is_binary(locale) do
get_translations_for_locale(all_translations, String.to_existing_atom(locale))
end
defp get_translations_for_locale(%{__struct__: _} = all_translations, locale)
when is_atom(locale) do
Map.fetch(all_translations, locale)
end
# fallback to default behaviour
defp get_translations_for_locale(all_translations, locale) do
Map.fetch(all_translations, to_string(locale))
end
# there are no translations for this locale embed
defp get_translated_field(nil, _field), do: nil
# check if struct (means it's using ecto embeds); if so, make sure 'field' is also atom
defp get_translated_field(%{__struct__: _} = translations_for_locale, field)
when is_binary(field) do
get_translated_field(translations_for_locale, String.to_existing_atom(field))
end
defp get_translated_field(%{__struct__: _} = translations_for_locale, field)
when is_atom(field) do
Map.fetch(translations_for_locale, field)
end
# fallback to default behaviour
defp get_translated_field(translations_for_locale, field) do
Map.fetch(translations_for_locale, to_string(field))
end
defp no_translation_error(field, locales) when is_list(locales) do
"translation doesn't exist for field '#{inspect(field)}' in locales #{inspect(locales)}"
end
defp no_translation_error(field, locale) do
"translation doesn't exist for field '#{inspect(field)}' in locale #{inspect(locale)}"
end
defp not_translatable_error(module, field) do
"'#{inspect(module)}' module must declare '#{inspect(field)}' as translatable"
end
end
|
lib/trans/translator.ex
| 0.902229
| 0.457743
|
translator.ex
|
starcoder
|
defmodule Plug.Adapters.Wait1 do
@moduledoc """
Adapter interface for the Wait1 websocket subprotocol.
## Options
* `:ip` - the ip to bind the server to.
Must be a tuple in the format `{x, y, z, w}`.
* `:port` - the port to run the server.
Defaults to 4000 (http) and 4040 (https).
* `:acceptors` - the number of acceptors for the listener.
Defaults to 100.
* `:max_connections` - max number of connections supported.
Defaults to `:infinity`.
* `:dispatch` - manually configure Cowboy's dispatch.
If this option is used, the given plug won't be initialized
nor dispatched to (and doing so becomes the user responsibility).
* `:ref` - the reference name to be used.
Defaults to `plug.HTTP` (http) and `plug.HTTPS` (https).
This is the value that needs to be given on shutdown.
* `:compress` - Cowboy will attempt to compress the response body.
"""
@doc """
Run Wait1 under http.
## Example
# Starts a new interface
Plug.Adapters.Wait1.http MyPlug, [], port: 80
# The interface above can be shutdown with
Plug.Adapters.Wait1.shutdown MyPlug.HTTP
"""
def http(plug, opts, cowboy_options \\ []) do
Plug.Adapters.Cowboy.http(plug, opts, set_dispatch(plug, opts, cowboy_options))
end
@doc """
Run Wait1 under https.
see Plug.Adapters.Cowboy for more information
## Example
# Starts a new interface
Plug.Adapters.Wait1.https MyPlug, [],
port: 443,
password: "<PASSWORD>",
otp_app: :my_app,
keyfile: "ssl/key.pem",
certfile: "ssl/cert.pem"
# The interface above can be shutdown with
Plug.Adapters.Wait1.shutdown MyPlug.HTTPS
"""
def https(plug, opts, cowboy_options \\ []) do
Plug.Adapters.Cowboy.https(plug, opts, set_dispatch(plug, opts, cowboy_options))
end
@doc """
Shutdowns the given reference.
"""
def shutdown(ref) do
Plug.Adapters.Cowboy.shutdown(ref)
end
@doc """
Returns a child spec to be supervised by your application.
"""
def child_spec(scheme, plug, opts, cowboy_options \\ []) do
cowboy_options = set_dispatch(plug, opts, cowboy_options)
Plug.Adapters.Cowboy.child_spec(scheme, plug, opts, cowboy_options)
end
## Helpers
defp set_dispatch(plug, opts, options) do
Keyword.put(options, :dispatch, dispatch_for(plug, opts, options))
end
defp dispatch_for(plug, opts, options) do
URI.default_port("ws", 80)
URI.default_port("wss", 443)
opts = plug.init(opts)
onconnection = Keyword.get(options, :onconnection, &onconnection/1)
[{:_, [ {:_, Plug.Adapters.Wait1.Handler, {plug, opts, onconnection}} ]}]
end
defp onconnection(req) do
{:ok, req}
end
end
|
deps/plug_wait1/lib/plug/adapters/wait1.ex
| 0.811078
| 0.58347
|
wait1.ex
|
starcoder
|
defmodule StarkInfra.CreditNote do
alias __MODULE__, as: CreditNote
alias StarkInfra.Error
alias StarkInfra.Utils.API
alias StarkInfra.Utils.Rest
alias StarkInfra.Utils.Check
alias StarkInfra.User.Project
alias StarkInfra.User.Organization
alias StarkInfra.CreditNote.Signer
alias StarkInfra.CreditNote.Invoice
alias StarkInfra.CreditNote.Transfer
@moduledoc """
Groups CreditNote related functions
"""
@doc """
CreditNotes are used to generate CCB contracts between you and your customers.
When you initialize a CreditNote, the entity will not be automatically
created in the Stark Infra API. The 'create' function sends the structs
to the Stark Infra API and returns the list of created structs.
## Parameters (required):
- `:template_id` [string]: ID of the contract template on which the CreditNote will be based. ex: "0123456789101112"
- `:name` [string]: credit receiver's full name. ex: "<NAME>"
- `:tax_id` [string]: credit receiver's tax ID (CPF or CNPJ). ex: "20.018.183/0001-80"
- `:nominal_amount` [integer]: amount in cents transferred to the credit receiver, before deductions. ex: 11234 (= R$ 112.34)
- `:scheduled` [Date Datetime or string]: Date or datetime of transfer execution. ex: ~D[2020-03-10]
- `:invoices` [list of Invoice structs or maps]: list of Invoice structs to be created and sent to the credit receiver. ex: [%{ due: "2023-06-25", amount: 120000, fine: 10, interest: 2}]
- `:payment` [Transfer struct or map]: payment entity to be created and sent to the credit receiver. ex: %{ bankCode: "00000000", branchCode: "1234", accountNumber: "129340-1", name: "<NAME>", taxId: "012.345.678-90"}
- `:signers` [list of Signer objects or maps]: name and e-mail of signers that sign the contract. ex: [%{"name": "<NAME>", "contact": "<EMAIL>", "method": "link"}]
- `:external_id` [string]: a string that must be unique among all your CreditNotes, used to avoid resource duplication. ex: “my-internal-id-123456”
- `:street_line_1` [string]: credit receiver main address. ex: "Av. Paulista, 200"
- `:street_line_2` [string]: credit receiver address complement. ex: "Apto. 123"
- `:district` [string]: credit receiver address district / neighbourhood. ex: "Bela Vista"
- `:city` [string]: credit receiver address city. ex: "Rio de Janeiro"
- `:state_code` [string]: credit receiver address state. ex: "GO"
- `:zip_code` [string]: credit receiver address zip code. ex: "01311-200"
## Parameters (conditionally required):
- `:payment_type` [string]: payment type, inferred from the payment parameter if it is not a dictionary. ex: "transfer"
## Parameters (optional):
- `:rebate_amount` [integer, default 0]: credit analysis fee deducted from lent amount. ex: 11234 (= R$ 112.34)
- `:tags` [list of strings, default []]: list of strings for reference when searching for CreditNotes. ex: [\"employees\", \"monthly\"]
## Attributes (return-only):
- `:id` [string]: unique id returned when the CreditNote is created. ex: "5656565656565656"
- `:amount` [integer]: CreditNote value in cents. ex: 1234 (= R$ 12.34)
- `:expiration` [integer]: time interval in seconds between due date and expiration date. ex 123456789
- `:document_id` [string]: ID of the signed document to execute this CreditNote. ex: "4545454545454545"
- `:status` [string]: current status of the CreditNote. ex: "canceled", "created", "expired", "failed", "processing", "signed", "success"
- `:transaction_ids` [list of strings]: ledger transaction ids linked to this CreditNote. ex: [\"19827356981273\"]
- `:workspace_id` [string]: ID of the Workspace that generated this CreditNote. ex: "4545454545454545"
- `:tax_amount` [integer]: tax amount included in the CreditNote. ex: 100
- `:interest` [float]: yearly effective interest rate of the CreditNote, in percentage. ex: 12.5
- `:created` [DateTime]: creation DateTime for the CreditNote. ex: ~U[2020-3-10 10:30:0:0]
- `:updated` [DateTime]: latest update DateTime for the CreditNote. ex: ~U[2020-3-10 10:30:0:0]
"""
@enforce_keys [
:template_id,
:name,
:tax_id,
:nominal_amount,
:scheduled,
:invoices,
:payment,
:signers,
:external_id,
:street_line_1,
:street_line_2,
:district,
:city,
:state_code,
:zip_code,
]
defstruct [
:template_id,
:name,
:tax_id,
:nominal_amount,
:scheduled,
:invoices,
:payment,
:signers,
:external_id,
:street_line_1,
:street_line_2,
:district,
:city,
:state_code,
:zip_code,
:payment_type,
:rebate_amount,
:tags,
:id,
:interest,
:amount,
:expiration,
:document_id,
:status,
:transaction_ids,
:workspace_id,
:tax_amount,
:created,
:updated
]
@type t() :: %__MODULE__{}
@doc """
Send a list of CreditNote structs for creation in the Stark Infra API
## Parameters (required):
- `:notes` [list of CreditNote structs]: list of CreditNote structs to be created in the API
## Options:
- `:user` [Organization/Project, default nil]: Organization or Project struct returned from StarkInfra.project(). Only necessary if default project or organization has not been set in configs.
## Return:
- list of CreditNote structs with updated attributes
"""
@spec create(
[CreditNote.t() | map()],
user: Project.t() | Organization.t() | nil
) ::
{ :ok, [CreditNote.t()]} |
{:error, [Error.t()]}
def create(notes, options \\ []) do
Rest.post(
resource(),
notes,
options
)
end
@doc """
Same as create(), but it will unwrap the error tuple and raise in case of errors.
"""
@spec create(
[CreditNote.t() | map()],
user: Project.t() | Organization.t() | nil
) :: any
def create!(notes, options \\ []) do
Rest.post!(
resource(),
notes,
options
)
end
@doc """
Receive a single CreditNote struct previously created in the Stark Infra API by its id
## Parameters (required):
- `:id` [string]: struct's unique id. ex: "5656565656565656"
## Options:
- `:user` [Organization/Project, default nil]: Organization or Project struct returned from StarkInfra.project(). Only necessary if default project or organization has not been set in configs.
## Return:
- CreditNote struct with updated attributes
"""
@spec get(
binary,
user: Project.t() | Organization.t() | nil
) ::
{:ok, [CreditNote.t()]} |
{:error, [Error.t()]}
def get(id, options \\ []) do
Rest.get_id(resource(), id, options)
end
@doc """
Same as get(), but it will unwrap the error tuple and raise in case of errors.
"""
@spec get!(binary, user: Project.t() | Organization.t() | nil) :: any
def get!(id, options \\ []) do
Rest.get_id!(resource(), id, options)
end
@doc """
Receive a stream of CreditNote structs previously created in the Stark Infra API
## Options:
- `:limit` [integer, default nil]: maximum number of structs to be retrieved. Unlimited if nil. ex: 35
- `:after` [Date or string, default nil] date filter for structs created only after specified date. ex: ~D[2020, 3, 10]
- `:before` [Date or string, default nil] date filter for structs created only before specified date. ex: ~D[2020, 3, 10]
- `:status` [string, default nil]: filter for status of retrieved structs. Options: “canceled”, “created”, “expired”, “failed”, “processing”, “signed”, “success”.
- `:tags` [list of strings, default []]: tags to filter retrieved structs. ex: ["tony", "stark"]
- `:ids` [list of strings, default []]: list of ids to filter retrieved structs. ex: ["5656565656565656", "4545454545454545"]
- `:user` [Organization/Project, default nil]: Organization or Project struct returned from StarkInfra.project(). Only necessary if default project or organization has not been set in configs.
## Return:
- stream of CreditNote structs with updated attributes
"""
@spec query(
limit: integer,
after: Date.t() | binary,
before: Date.t() | binary,
status: binary,
tags: [binary],
ids: [binary],
user: Project.t() | Organization.t() | nil
) ::
({:cont, {:ok, [CreditNote.t()]}} |
{:error, [Error.t()]} |
{:halt, any} |
{:suspend, any},
any -> any)
def query(options \\ []) do
Rest.get_list(resource(), options)
end
@doc """
Same as query(), but it will unwrap the error tuple and raise in case of errors.
"""
@spec query!(
limit: integer,
after: Date.t() | binary,
before: Date.t() | binary,
status: binary,
tags: [binary],
ids: [binary],
user: Project.t() | Organization.t() | nil
) ::
({:cont, [CreditNote.t()]} |
{:error, [Error.t()]} |
{:halt, any} |
{:suspend, any},
any -> any)
def query!(options \\ []) do
Rest.get_list!(resource(), options)
end
@doc """
Receive a list of up to 100 CreditNote structs previously created in the Stark Infra API and the cursor to the next page.
Use this function instead of query if you want to manually page your requests.
## Options:
- `:cursor` [string, default nil]: cursor returned on the previous page function call
- `:limit` [integer, default 100]: maximum number of structs to be retrieved. It must be an integer between 1 and 100. ex: 50
- `:after` [Date or string, default nil] Date filter for structs created only after specified date. ex: ~D[2020-3-10]
- `:before` [Date or string, default nil] Date filter for structs created only before specified date. ex: ~D(2020-3-10]
- `:status` [string, default nil]: filter for status of retrieved structs. Options: “canceled”, “created”, “expired”, “failed”, “processing”, “signed”, “success”.
- `:tags` [list of strings, default []]: tags to filter retrieved structs. ex: ["tony", "stark"]
- `:ids` [list of strings, default []]: list of ids to filter retrieved structs. ex: ["5656565656565656", "4545454545454545"]
- `:user` [Organization/Project, default nil]: Organization or Project struct returned from StarkInfra.project(). Only necessary if default project or organization has not been set in configs.
## Return:
- list of CreditNote structs with updated attributes
- cursor to retrieve the next page of CreditNote structs
"""
@spec page(
cursor: binary,
limit: integer,
after: Date.t() | binary,
before: Date.t() | binary,
status: binary,
tags: [binary],
ids: [binary],
user: Project.t() | Organization.t() | nil
) ::
{:ok, {binary, [CreditNote.t()]}} |
{:error, [Error.t()]}
def page(options \\ []) do
Rest.get_page(resource(), options)
end
@doc """
Same as page(), but it will unwrap the error tuple and raise in case of errors.
"""
@spec page!(
cursor: binary,
limit: integer,
after: Date.t() | binary,
before: Date.t() | binary,
status: binary,
tags: [binary],
ids: [binary],
user: Project.t() | Organization.t() | nil
) ::
[CreditNote.t()]
def page!(options \\ []) do
Rest.get_page!(resource(), options)
end
@doc """
Cancel a CreditNote entity previously created in the Stark Infra API
## Parameters (required):
- `:id` [string]: id of the CreditNote to be canceled
## Options:
- `:user` [Organization/Project, default nil]: Organization or Project struct returned from StarkInfra.project(). Only necessary if default project or organization has not been set in configs.
## Return:
- canceled CreditNote struct
"""
@spec cancel(
binary,
user: Project.t() | Organization.t() | nil
) ::
{:ok, CreditNote.t()} |
{:error, [Error.t()]}
def cancel(id, options \\ []) do
Rest.delete_id(resource(), id, options)
end
@doc """
Same as cancel(), but it will unwrap the error tuple and raise in case of errors.
"""
@spec cancel!(
binary,
user: Project.t() | Organization.t() | nil
) :: CreditNote.t()
def cancel!(id, options \\ []) do
Rest.delete_id!(resource(), id, options)
end
defp parse_payment!(payment, payment_type) do
case payment_type do
"transfer" -> API.from_api_json(payment, &Transfer.resource_maker/1)
_ -> payment
end
end
@doc false
def resource() do
{
"CreditNote",
&resource_maker/1
}
end
@doc false
def resource_maker(json) do
%CreditNote{
template_id: json[:template_id],
name: json[:name],
tax_id: json[:tax_id],
nominal_amount: json[:nominal_amount],
scheduled: json[:scheduled],
invoices: json[:invoices] |> Enum.map(fn invoice -> API.from_api_json(invoice, &Invoice.resource_maker/1) end),
signers: json[:signers] |> Enum.map(fn signer -> API.from_api_json(signer, &Signer.resource_maker/1) end),
payment: parse_payment!(json[:payment], json[:payment_type]),
payment_type: json[:payment_type],
external_id: json[:external_id],
street_line_1: json[:street_line_1],
street_line_2: json[:street_line_2],
district: json[:district],
city: json[:city],
state_code: json[:state_code],
zip_code: json[:zip_code],
interest: json[:interest],
rebate_amount: json[:rebate_amount],
tags: json[:tags],
created: json[:created] |> Check.datetime(),
updated: json[:updated] |> Check.datetime(),
id: json[:id]
}
end
end
|
lib/credit_note/credit_note.ex
| 0.872646
| 0.712382
|
credit_note.ex
|
starcoder
|
defmodule Meeseeks.Selector.CSS.Parser do
@moduledoc false
alias Meeseeks.{Error, Selector}
alias Meeseeks.Selector.{Combinator, Element}
alias Meeseeks.Selector.Element.{Attribute, Namespace, PseudoClass, Tag}
# Parse Elements
def parse_elements(toks) do
parse_elements(toks, [])
end
defp parse_elements([], elements) do
Enum.reverse(elements)
end
defp parse_elements(toks, elements) do
{element, toks} = parse_element(toks)
parse_elements(toks, [element | elements])
end
# Parse Element
defp parse_element(toks) do
parse_element(toks, %Element{})
end
defp parse_element([], element) do
element = %{element | selectors: Enum.reverse(element.selectors)}
{element, []}
end
defp parse_element([',' | toks], element) do
element = %{element | selectors: Enum.reverse(element.selectors)}
{element, toks}
end
defp parse_element([{:ident, namespace}, '|' | toks], element) do
selector = %Namespace{value: List.to_string(namespace)}
element = %{element | selectors: [selector | element.selectors]}
parse_element(toks, element)
end
defp parse_element(['*', '|' | toks], element) do
selector = %Namespace{value: "*"}
element = %{element | selectors: [selector | element.selectors]}
parse_element(toks, element)
end
defp parse_element([{:ident, tag} | toks], element) do
selector = %Tag{value: List.to_string(tag)}
element = %{element | selectors: [selector | element.selectors]}
parse_element(toks, element)
end
defp parse_element(['*' | toks], element) do
selector = %Tag{value: "*"}
element = %{element | selectors: [selector | element.selectors]}
parse_element(toks, element)
end
defp parse_element([{:id, id} | toks], element) do
selector = %Attribute.Value{attribute: "id", value: List.to_string(id)}
element = %{element | selectors: [selector | element.selectors]}
parse_element(toks, element)
end
defp parse_element([{:class, class} | toks], element) do
selector = %Attribute.ValueIncludes{attribute: "class", value: List.to_string(class)}
element = %{element | selectors: [selector | element.selectors]}
parse_element(toks, element)
end
defp parse_element(['[' | toks], element) do
{selector, toks} = parse_attribute(toks)
element = %{element | selectors: [selector | element.selectors]}
parse_element(toks, element)
end
defp parse_element([':' | toks], element) do
{selector, toks} = parse_pseudo_class(toks)
element = %{element | selectors: [selector | element.selectors]}
Selector.validate!(selector)
parse_element(toks, element)
end
defp parse_element(['>' | toks], element) do
{combinator_selector, toks} = parse_element(toks)
combinator = %Combinator.ChildElements{selector: combinator_selector}
element = %{element | combinator: combinator}
parse_element([',' | toks], element)
end
defp parse_element([:space | toks], element) do
{combinator_selector, toks} = parse_element(toks)
combinator = %Combinator.DescendantElements{selector: combinator_selector}
element = %{element | combinator: combinator}
parse_element([',' | toks], element)
end
defp parse_element(['+' | toks], element) do
{combinator_selector, toks} = parse_element(toks)
combinator = %Combinator.NextSiblingElement{selector: combinator_selector}
element = %{element | combinator: combinator}
parse_element([',' | toks], element)
end
defp parse_element(['~' | toks], element) do
{combinator_selector, toks} = parse_element(toks)
combinator = %Combinator.NextSiblingElements{selector: combinator_selector}
element = %{element | combinator: combinator}
parse_element([',' | toks], element)
end
defp parse_element(toks, element) do
raise Error.new(:css_selector_parser, :invalid_input, %{
description: "Unsupported sequence of tokens when parsing element",
tokens: toks,
element: element
})
end
# Parse Attribute
@attribute_value_selector_types [
:value,
:value_contains,
:value_dash,
:value_includes,
:value_prefix,
:value_suffix
]
defp parse_attribute(['^', {:ident, attr}, ']' | toks]) do
selector = %Attribute.AttributePrefix{attribute: List.to_string(attr)}
{selector, toks}
end
defp parse_attribute([{:ident, attr}, type, {:ident, val}, ']' | toks])
when type in @attribute_value_selector_types do
selector = attribute_value_selector(type, List.to_string(attr), List.to_string(val))
{selector, toks}
end
defp parse_attribute([{:ident, attr}, type, {:string, val}, ']' | toks])
when type in @attribute_value_selector_types do
selector = attribute_value_selector(type, List.to_string(attr), List.to_string(val))
{selector, toks}
end
defp parse_attribute([{:ident, attr}, ']' | toks]) do
selector = %Attribute.Attribute{attribute: List.to_string(attr)}
{selector, toks}
end
defp parse_attribute(toks) do
raise Error.new(:css_selector_parser, :invalid_input, %{
description: "Unsupported sequence of tokens when parsing attribute",
tokens: toks
})
end
defp attribute_value_selector(type, attr, val) do
case type do
:value -> %Attribute.Value{attribute: attr, value: val}
:value_contains -> %Attribute.ValueContains{attribute: attr, value: val}
:value_dash -> %Attribute.ValueDash{attribute: attr, value: val}
:value_includes -> %Attribute.ValueIncludes{attribute: attr, value: val}
:value_prefix -> %Attribute.ValuePrefix{attribute: attr, value: val}
:value_suffix -> %Attribute.ValueSuffix{attribute: attr, value: val}
end
end
# Parse Pseudo Class
defp parse_pseudo_class([{:ident, type} | toks]) do
selector = pseudo_class_selector(type, [])
{selector, toks}
end
defp parse_pseudo_class([{:function, type} | toks]) do
{args, toks} = parse_pseudo_class_args(type, toks)
selector = pseudo_class_selector(type, args)
{selector, toks}
end
defp parse_pseudo_class_args('not', toks) do
parse_not_args(toks, 0, [])
end
defp parse_pseudo_class_args(type, toks) do
parse_pseudo_class_args(type, toks, [])
end
defp parse_pseudo_class_args(_type, [')' | toks], args) do
{Enum.reverse(args), toks}
end
defp parse_pseudo_class_args(type, [{:int, arg} | toks], args) do
parse_pseudo_class_args(type, toks, [List.to_integer(arg) | args])
end
defp parse_pseudo_class_args(type, [{:ident, arg} | toks], args) do
parse_pseudo_class_args(type, toks, [List.to_string(arg) | args])
end
defp parse_pseudo_class_args(type, [{:string, arg} | toks], args) do
parse_pseudo_class_args(type, toks, [List.to_string(arg) | args])
end
defp parse_pseudo_class_args(type, [{:ab_formula, arg} | toks], args) do
case Regex.run(~r/\s*([\+\-])?\s*(\d+)?[nN]\s*(([\+\-])\s*(\d+))?\s*/, List.to_string(arg)) do
[_] ->
parse_pseudo_class_args(type, toks, [0, 1 | args])
[_, a_op] ->
a = parse_a(a_op)
parse_pseudo_class_args(type, toks, [0, a | args])
[_, a_op, a_str] ->
a = parse_a(a_op <> a_str)
parse_pseudo_class_args(type, toks, [0, a | args])
[_, a_op, a_str, _, b_op, b_str] ->
a = parse_a(a_op <> a_str)
b = parse_b(b_op <> b_str)
parse_pseudo_class_args(type, toks, [b, a | args])
end
end
defp parse_a(a_str) do
case String.replace(a_str, "+", "") do
"" -> 1
"-" -> -1
str -> String.to_integer(str)
end
end
defp parse_b(b_str) do
b_str
|> String.replace("+", "")
|> String.to_integer()
end
defp parse_not_args([')' | toks], 0, acc) do
tokens = Enum.reverse(acc)
selectors = parse_elements(tokens)
{[selectors], toks}
end
defp parse_not_args([')' | toks], depth, acc) do
parse_not_args(toks, depth - 1, [')' | acc])
end
defp parse_not_args([{:function, _type} = tok | toks], depth, acc) do
parse_not_args(toks, depth + 1, [tok | acc])
end
defp parse_not_args([tok | toks], depth, acc) do
parse_not_args(toks, depth, [tok | acc])
end
defp pseudo_class_selector(type, args) do
case type do
'first-child' ->
%PseudoClass.FirstChild{args: args}
'first-of-type' ->
%PseudoClass.FirstOfType{args: args}
'last-child' ->
%PseudoClass.LastChild{args: args}
'last-of-type' ->
%PseudoClass.LastOfType{args: args}
'not' ->
%PseudoClass.Not{args: args}
'nth-child' ->
%PseudoClass.NthChild{args: args}
'nth-last-child' ->
%PseudoClass.NthLastChild{args: args}
'nth-last-of-type' ->
%PseudoClass.NthLastOfType{args: args}
'nth-of-type' ->
%PseudoClass.NthOfType{args: args}
_ ->
raise Error.new(:css_selector_parser, :invalid_input, %{
description: "Pseudo class \"#{type}\" not supported"
})
end
end
end
|
lib/meeseeks/selector/css/parser.ex
| 0.72086
| 0.551876
|
parser.ex
|
starcoder
|
defmodule Magritte do
@moduledoc """
Alternative pipe operator definition.
## Usage
Just add `use Magritte` to the top of your module and then
follow the documentation for `Margitte.|>/2` below.
"""
defmacro __using__(_) do
quote do
import Kernel, except: [|>: 2]
import unquote(__MODULE__), only: [|>: 2]
end
end
@doc """
Enhanced pipe operator.
This operator introduces the expression on the left-hand side as an argument
to the function call on the right-hand side. The `...` placeholder operator
determines the position of the given argument on the right-hand side.
If the `...` operator is not present, the first position is used by default.
## Examples
```elixir
iex> [1, [2], 3] |> List.flatten()
[1, 2, 3]
```
The example above is the same as calling `List.flatten([1, [2], 3])`.
Using `...` you can pick the position where the result of the left side will
be inserted:
```elixir
iex> 2 |> Integer.to_string(10, ...)
"1010"
```
The example above is the same as calling `Integer.to_string(10, 2)`.
You can also join these into longer chains:
```elixir
iex> 2 |> Integer.to_string(10, ...) |> Integer.parse
{1010, ""}
```
The operator `...` can be used only once in the pipeline, otherwise
it will return compile-time error:
```elixir
2 |> Integer.to_string(..., ...)
** (CompileError) Doubled placeholder in Integer.to_string(..., ...)
```
"""
defmacro left |> right do
[{h, _} | t] = unpipe({:|>, [], [left, right]}, __CALLER__)
fun = fn {x, pos}, acc ->
Macro.pipe(acc, x, pos)
end
:lists.foldl(fun, h, t)
end
defp unpipe(ast, caller), do: :lists.reverse(unpipe(ast, [], caller))
defp unpipe({:|>, _, [left, right]}, acc, caller) do
unpipe(right, unpipe(left, acc, caller), caller)
end
defp unpipe(ast, acc, %Macro.Env{line: line, file: file}) do
case find_pos(ast) do
{:ok, new_ast, pos} ->
[{new_ast, pos} | acc]
{:error, {:already_found, _, _}} ->
raise CompileError,
file: file,
line: line,
description: "Doubled placeholder in #{Macro.to_string(ast)}"
end
end
defguardp is_empty(a) when a == [] or not is_list(a)
defp find_pos({fun, env, args}) when not is_empty(args) do
with {:ok, found, new_args} <- locate(args, 0, nil, []),
do: {:ok, {fun, env, new_args}, found}
end
defp find_pos(ast), do: {:ok, ast, 0}
pattern = quote do: {:..., _, var!(args)}
defp locate([unquote(pattern) | rest], pos, nil, acc) when is_empty(args),
do: locate(rest, pos + 1, pos, acc)
defp locate([unquote(pattern) | _], pos, found, _acc) when is_empty(args),
do: {:error, {:already_found, found, pos}}
defp locate([arg | rest], pos, found, args),
do: locate(rest, pos + 1, found, [arg | args])
defp locate([], _, found, args),
do: {:ok, found || 0, :lists.reverse(args)}
end
|
lib/magritte.ex
| 0.897529
| 0.945601
|
magritte.ex
|
starcoder
|
defmodule CredoContrib.CheckUtils do
@moduledoc """
Test utils cribbed from the main credo repo:
https://git.io/vNxf5
"""
alias Credo.Execution.{ExecutionIssues, ExecutionSourceFiles}
alias Credo.SourceFile
alias ExUnit.Assertions
def assert_issue(source_file, callback) when is_function(callback) do
assert_issue(source_file, nil, [], callback)
end
def assert_issue(source_file, check, callback) when is_function(callback) do
assert_issue(source_file, check, [], callback)
end
def assert_issue(source_file, check \\ nil, params \\ [], callback \\ nil) do
issues = issues_for(source_file, check, create_config(), params)
Assertions.refute(Enum.empty?(issues), "There should be one issue, got none.")
Assertions.assert(
Enum.count(issues) == 1,
"There should be only 1 issue, got #{Enum.count(issues)}: #{to_inspected(issues)}"
)
if callback do
issues |> List.first() |> callback.()
end
issues
end
def assert_issues(source_file, callback) when is_function(callback) do
assert_issues(source_file, nil, [], callback)
end
def assert_issues(source_file, check, callback) when is_function(callback) do
assert_issues(source_file, check, [], callback)
end
def assert_issues(source_file, check \\ nil, params \\ [], callback \\ nil) do
issues = issues_for(source_file, check, create_config(), params)
Assertions.assert(Enum.count(issues) > 0, "There should be multiple issues, got none.")
Assertions.assert(
Enum.count(issues) > 1,
"There should be more than one issue, got: #{to_inspected(issues)}"
)
if callback, do: callback.(issues)
issues
end
def refute_issues(source_file, check \\ nil, params \\ []) do
issues = issues_for(source_file, check, create_config(), params)
Assertions.assert(
[] == issues,
"There should be no issues, got #{Enum.count(issues)}: #{to_inspected(issues)}"
)
issues
end
def to_source_file(source) do
to_source_file(source, generate_file_name())
end
def to_source_file(source, filename) do
case Credo.SourceFile.parse(source, filename) do
%{status: :valid} = source_file ->
source_file
_ ->
raise "Source could not be parsed!"
end
end
def to_source_files(list) do
Enum.map(list, &to_source_file/1)
end
defp create_config do
%Credo.Execution{}
|> ExecutionSourceFiles.start_server()
|> ExecutionIssues.start_server()
end
defp generate_file_name do
"test-untitled.#{System.unique_integer([:positive])}.ex"
end
defp get_issues_from_source_file(source_file, exec) do
ExecutionIssues.get(exec, source_file)
end
defp issues_for(source_files, nil, exec, _) when is_list(source_files) do
Enum.flat_map(source_files, &(&1 |> get_issues_from_source_file(exec)))
end
defp issues_for(source_files, check, _exec, params) when is_list(source_files) do
exec = create_config()
if check.run_on_all? do
:ok = check.run(source_files, exec, params)
source_files
|> Enum.flat_map(&(&1 |> get_issues_from_source_file(exec)))
else
source_files
|> check.run(params)
|> Enum.flat_map(&(&1 |> get_issues_from_source_file(exec)))
end
end
defp issues_for(%SourceFile{} = source_file, nil, exec, _) do
source_file |> get_issues_from_source_file(exec)
end
defp issues_for(%SourceFile{} = source_file, check, _exec, params) do
_issues = check.run(source_file, params)
end
defp to_inspected(value) do
value
|> Inspect.Algebra.to_doc(%Inspect.Opts{})
|> Inspect.Algebra.format(50)
|> Enum.join("")
end
end
|
test/support/check_utils.ex
| 0.631481
| 0.444685
|
check_utils.ex
|
starcoder
|
defmodule Solana.Transaction do
@moduledoc """
Functions for building and encoding Solana
[transactions](https://docs.solana.com/developing/programming-model/transactions)
"""
require Logger
alias Solana.{Account, CompactArray, Instruction}
@typedoc """
All the details needed to encode a transaction.
"""
@type t :: %__MODULE__{
payer: Solana.key() | nil,
blockhash: binary | nil,
instructions: [Instruction.t()],
signers: [Solana.keypair()]
}
@typedoc """
The possible errors encountered when encoding a transaction.
"""
@type encoding_err ::
:no_payer
| :no_blockhash
| :no_program
| :no_instructions
| :mismatched_signers
defstruct [
:payer,
:blockhash,
instructions: [],
signers: []
]
@doc """
decodes a base58-encoded signature and returns it in a tuple.
If it fails, return an error tuple.
"""
@spec decode(encoded :: binary) :: {:ok, binary} | {:error, binary}
def decode(encoded) when is_binary(encoded) do
case B58.decode58(encoded) do
{:ok, decoded} -> check(decoded)
_ -> {:error, "invalid signature"}
end
end
def decode(_), do: {:error, "invalid signature"}
@doc """
decodes a base58-encoded signature and returns it.
Throws an `ArgumentError` if it fails.
"""
@spec decode!(encoded :: binary) :: binary
def decode!(encoded) when is_binary(encoded) do
case decode(encoded) do
{:ok, key} ->
key
{:error, _} ->
raise ArgumentError, "invalid signature input: #{encoded}"
end
end
@doc """
Checks to see if a transaction's signature is valid.
Returns `{:ok, signature}` if it is, and an error tuple if it isn't.
"""
@spec check(binary) :: {:ok, binary} | {:error, :invalid_signature}
def check(signature)
def check(<<signature::binary-64>>), do: {:ok, signature}
def check(_), do: {:error, :invalid_signature}
@doc """
Encodes a `t:Solana.Transaction.t/0` into a [binary
format](https://docs.solana.com/developing/programming-model/transactions#anatomy-of-a-transaction)
Returns `{:ok, encoded_transaction}` if the transaction was successfully
encoded, or an error tuple if the encoding failed -- plus more error details
via `Logger.error/1`.
"""
@spec to_binary(tx :: t) :: {:ok, binary()} | {:error, encoding_err()}
def to_binary(%__MODULE__{payer: nil}), do: {:error, :no_payer}
def to_binary(%__MODULE__{blockhash: nil}), do: {:error, :no_blockhash}
def to_binary(%__MODULE__{instructions: []}), do: {:error, :no_instructions}
def to_binary(tx = %__MODULE__{instructions: ixs, signers: signers}) do
with {:ok, ixs} <- check_instructions(List.flatten(ixs)),
accounts = compile_accounts(ixs, tx.payer),
true <- signers_match?(accounts, signers) do
message = encode_message(accounts, tx.blockhash, ixs)
signatures =
signers
|> reorder_signers(accounts)
|> Enum.map(&sign(&1, message))
|> CompactArray.to_iolist()
{:ok, :erlang.list_to_binary([signatures, message])}
else
{:error, :no_program, idx} ->
Logger.error("Missing program id on instruction at index #{idx}")
{:error, :no_program}
{:error, message, idx} ->
Logger.error("error compiling instruction at index #{idx}: #{inspect(message)}")
{:error, message}
false ->
{:error, :mismatched_signers}
end
end
defp check_instructions(ixs) do
ixs
|> Enum.with_index()
|> Enum.reduce_while({:ok, ixs}, fn
{{:error, message}, idx}, _ -> {:halt, {:error, message, idx}}
{%{program: nil}, idx}, _ -> {:halt, {:error, :no_program, idx}}
_, acc -> {:cont, acc}
end)
end
# https://docs.solana.com/developing/programming-model/transactions#account-addresses-format
defp compile_accounts(ixs, payer) do
ixs
|> Enum.map(fn ix -> [%Account{key: ix.program} | ix.accounts] end)
|> List.flatten()
|> Enum.reject(&(&1.key == payer))
|> Enum.sort_by(&{&1.signer?, &1.writable?}, &>=/2)
|> Enum.uniq_by(& &1.key)
|> cons(%Account{writable?: true, signer?: true, key: payer})
end
defp cons(list, item), do: [item | list]
defp signers_match?(accounts, signers) do
expected = MapSet.new(Enum.map(signers, &elem(&1, 1)))
accounts
|> Enum.filter(& &1.signer?)
|> Enum.map(& &1.key)
|> MapSet.new()
|> MapSet.equal?(expected)
end
# https://docs.solana.com/developing/programming-model/transactions#message-format
defp encode_message(accounts, blockhash, ixs) do
[
create_header(accounts),
CompactArray.to_iolist(Enum.map(accounts, & &1.key)),
blockhash,
CompactArray.to_iolist(encode_instructions(ixs, accounts))
]
|> :erlang.list_to_binary()
end
# https://docs.solana.com/developing/programming-model/transactions#message-header-format
defp create_header(accounts) do
accounts
|> Enum.reduce(
{0, 0, 0},
&{
unary(&1.signer?) + elem(&2, 0),
unary(&1.signer? && !&1.writable?) + elem(&2, 1),
unary(!&1.signer? && !&1.writable?) + elem(&2, 2)
}
)
|> Tuple.to_list()
end
defp unary(result?), do: if(result?, do: 1, else: 0)
# https://docs.solana.com/developing/programming-model/transactions#instruction-format
defp encode_instructions(ixs, accounts) do
idxs = index_accounts(accounts)
Enum.map(ixs, fn ix = %Instruction{} ->
[
Map.get(idxs, ix.program),
CompactArray.to_iolist(Enum.map(ix.accounts, &Map.get(idxs, &1.key))),
CompactArray.to_iolist(ix.data)
]
end)
end
defp reorder_signers(signers, accounts) do
account_idxs = index_accounts(accounts)
Enum.sort_by(signers, &Map.get(account_idxs, elem(&1, 1)))
end
defp index_accounts(accounts) do
Enum.into(Enum.with_index(accounts, &{&1.key, &2}), %{})
end
defp sign({secret, pk}, message), do: Ed25519.signature(message, secret, pk)
@doc """
Parses a `t:Solana.Transaction.t/0` from data encoded in Solana's [binary
format](https://docs.solana.com/developing/programming-model/transactions#anatomy-of-a-transaction)
Returns `{transaction, extras}` if the transaction was successfully
parsed, or `:error` if the provided binary could not be parsed. `extras`
is a keyword list containing information about the encoded transaction,
namely:
- `:header` - the [transaction message
header](https://docs.solana.com/developing/programming-model/transactions#message-header-format)
- `:accounts` - an [ordered array of
accounts](https://docs.solana.com/developing/programming-model/transactions#account-addresses-format)
- `:signatures` - a [list of signed copies of the transaction
message](https://docs.solana.com/developing/programming-model/transactions#signatures)
"""
@spec parse(encoded :: binary) :: {t(), keyword} | :error
def parse(encoded) do
with {signatures, message, _} <- CompactArray.decode_and_split(encoded, 64),
<<header::binary-size(3), contents::binary>> <- message,
{account_keys, hash_and_ixs, key_count} <- CompactArray.decode_and_split(contents, 32),
<<blockhash::binary-size(32), ix_data::binary>> <- hash_and_ixs,
{:ok, instructions} <- extract_instructions(ix_data) do
tx_accounts = derive_accounts(account_keys, key_count, header)
indices = Enum.into(Enum.with_index(tx_accounts, &{&2, &1}), %{})
{
%__MODULE__{
payer: tx_accounts |> List.first() |> Map.get(:key),
blockhash: blockhash,
instructions:
Enum.map(instructions, fn {program, accounts, data} ->
%Instruction{
data: if(data == "", do: nil, else: :binary.list_to_bin(data)),
program: Map.get(indices, program) |> Map.get(:key),
accounts: Enum.map(accounts, &Map.get(indices, &1))
}
end)
},
[
accounts: tx_accounts,
header: header,
signatures: signatures
]
}
else
_ -> :error
end
end
defp extract_instructions(data) do
with {ix_data, ix_count} <- CompactArray.decode_and_split(data),
{reversed_ixs, ""} <- extract_instructions(ix_data, ix_count) do
{:ok, Enum.reverse(reversed_ixs)}
else
error -> error
end
end
defp extract_instructions(data, count) do
Enum.reduce_while(1..count, {[], data}, fn _, {acc, raw} ->
case extract_instruction(raw) do
{ix, rest} -> {:cont, {[ix | acc], rest}}
_ -> {:halt, :error}
end
end)
end
defp extract_instruction(raw) do
with <<program::8, rest::binary>> <- raw,
{accounts, rest, _} <- CompactArray.decode_and_split(rest, 1),
{data, rest, _} <- extract_instruction_data(rest) do
{{program, Enum.map(accounts, &:binary.decode_unsigned/1), data}, rest}
else
_ -> :error
end
end
defp extract_instruction_data(""), do: {"", "", 0}
defp extract_instruction_data(raw), do: CompactArray.decode_and_split(raw, 1)
defp derive_accounts(keys, total, header) do
<<signers_count::8, signers_readonly_count::8, nonsigners_readonly_count::8>> = header
{signers, nonsigners} = Enum.split(keys, signers_count)
{signers_write, signers_read} = Enum.split(signers, signers_count - signers_readonly_count)
{nonsigners_write, nonsigners_read} =
Enum.split(nonsigners, total - signers_count - nonsigners_readonly_count)
List.flatten([
Enum.map(signers_write, &%Account{key: &1, writable?: true, signer?: true}),
Enum.map(signers_read, &%Account{key: &1, signer?: true}),
Enum.map(nonsigners_write, &%Account{key: &1, writable?: true}),
Enum.map(nonsigners_read, &%Account{key: &1})
])
end
end
|
lib/solana/tx.ex
| 0.861858
| 0.427397
|
tx.ex
|
starcoder
|
defmodule Harald.HCI.ArrayedData do
@moduledoc """
Serialization functions for arrayed data.
> Arrayed parameters are specified using the following notation: ParameterA[i]. If more than one
> set of arrayed parameters are specified (e.g. ParameterA[i], ParameterB[i]), then, unless
> noted otherwise, the order of the parameters are as follows: ParameterA[0], ParameterB[0],
> ParameterA[1], ParameterB[1], ParameterA[2], ParameterB[2], ... ParameterA[n], ParameterB[n]
Reference: Version 5.0, Vol 2, Part E, 5.2
Both `serialize/2` and `deserialize/4` rely on a schema to function. A schema is a keyword list
where each key is a field and each value shall be:
- a positive integer when denoting the size in bits of the field
- a three-tuple when the field itself represents the length of a subsequent variable length
field
- an atom when the field is variable length and a preceding field represents its length
For example if `length_data` itself was 8 bits and represented the length of `data` that would
be written as:
```
[
length_data: {:variable, :data, 8},
data: :length_data
]
```
"""
@type field :: atom()
@type field_size ::
pos_integer()
| {:variable, atom(), pos_integer()}
| atom()
@type schema :: [{field(), field_size()}, ...]
@doc """
Serializes a list of `structs` into their binary representation according to `schema`.
"""
def serialize(schema, structs) do
data =
structs
|> Enum.with_index()
|> Map.new(fn {map, index} -> {index + 1, map} end)
length = length(structs)
serialize(schema, <<length>>, %{
data: data,
field: nil,
field_size: nil,
index: 1,
length: length
})
end
@doc """
Deserializes the binary representation of a list of structs according to `schema`.
"""
def deserialize(schema, length, struct_module, bin)
def deserialize(_, length, _, <<>> = bin) when length > 0, do: {:error, bin}
def deserialize(schema, length, struct_module, bin) do
schema
|> deserialize(bin, %{
data: init_data(length, struct(struct_module)),
field: nil,
field_size: nil,
index: 1,
length: length,
variable: %{}
})
|> case do
{:ok, _} = ret -> ret
{:error, :incomplete} -> {:error, bin}
end
end
# pull a tuple off the schema - recursion base case
defp serialize([], bin, %{field: nil}), do: {:ok, bin}
# pull a tuple off the schema - defining variable lengths
defp serialize([{field, {:variable, _, _} = field_size} | schema], bin, %{field: nil} = state) do
serialize(schema, bin, %{state | field: field, field_size: field_size})
end
# pull a tuple off the schema
defp serialize([{field, field_size} | schema], bin, %{field: nil} = state) do
serialize(schema, bin, %{state | field: field, field_size: field_size})
end
# put data on the binary - writing variable lengths
defp serialize(
schema,
bin,
%{field_size: {:variable, field_target, field_size}, index: index, length: length} =
state
)
when index <= length do
target_length = byte_size(Map.fetch!(state.data[index], field_target))
bin = <<bin::binary, target_length::little-size(field_size)>>
serialize(schema, bin, %{state | index: index + 1})
end
# put data on the binary - writing variable length targets
defp serialize(schema, bin, %{field_size: variable_key, index: index, length: length} = state)
when index <= length and is_atom(variable_key) do
bin = <<bin::binary, Map.fetch!(state.data[index], state.field)::binary>>
serialize(schema, bin, %{state | index: index + 1})
end
# put data on the binary
defp serialize(schema, bin, %{field_size: field_size, index: index, length: length} = state)
when index <= length do
bin =
<<bin::binary, Map.fetch!(state.data[index], state.field)::integer-little-size(field_size)>>
serialize(schema, bin, %{state | index: index + 1})
end
# field completed
defp serialize(schema, bin, state) do
serialize(schema, bin, %{state | field: nil, field_size: nil, index: 1})
end
defp init_data(length, value) do
Enum.reduce(1..length, %{}, fn index, acc -> Map.put(acc, index, value) end)
end
# pull a tuple off the schema - recursion base case
defp deserialize([], _bin, %{field: nil} = state) do
{:ok, for(index <- 1..state.length, do: state.data[index])}
end
# pull a tuple off the schema - defining variable lengths
defp deserialize([{field, {:variable, _, _} = field_size} | schema], bin, %{field: nil} = state) do
variable = Map.put(state.variable, field, [])
deserialize(schema, bin, %{state | field: field, field_size: field_size, variable: variable})
end
# pull a tuple off the schema
defp deserialize([{field, field_size} | schema], bin, %{field: nil} = state) do
deserialize(schema, bin, %{state | field: field, field_size: field_size})
end
# pull data off the binary - reading variable lengths
defp deserialize(
_schema,
<<>>,
%{field_size: {:variable, _, _field_size}, index: index, length: length}
)
when index <= length do
{:error, :incomplete}
end
# pull data off the binary - reading variable lengths
defp deserialize(
schema,
bin,
%{field_size: {:variable, _, field_size}, index: index, length: length} = state
)
when index <= length do
<<parameter::little-size(field_size), bin::binary>> = bin
variable = Map.update!(state.variable, state.field, &[parameter | &1])
deserialize(schema, bin, %{state | index: index + 1, variable: variable})
end
# pull data off the binary - reading variable lengths
defp deserialize(
schema,
bin,
%{field_size: {:variable, _, field_size}, index: index, length: length} = state
)
when index <= length do
<<parameter::little-size(field_size), bin::binary>> = bin
variable = Map.update!(state.variable, state.field, &[parameter | &1])
deserialize(schema, bin, %{state | index: index + 1, variable: variable})
end
# pull data off the binary - reading variable length targets
defp deserialize(schema, bin, %{field_size: variable_key, index: index, length: length} = state)
when index <= length and is_atom(variable_key) do
{field_size, variable} =
Map.get_and_update!(state.variable, variable_key, fn [field_size | rest] ->
{field_size, rest}
end)
case bin do
<<parameter::binary-size(field_size), bin::binary>> ->
data = Map.update!(state.data, index, &%{&1 | state.field => parameter})
deserialize(schema, bin, %{state | data: data, index: index + 1, variable: variable})
_ ->
{:error, :incomplete}
end
end
# pull data off the binary
defp deserialize(schema, bin, %{field_size: field_size, index: index, length: length} = state)
when index <= length do
case bin do
<<parameter::little-size(field_size), bin::binary>> ->
data = Map.update!(state.data, index, &%{&1 | state.field => parameter})
deserialize(schema, bin, %{state | data: data, index: index + 1})
_ ->
{:error, :incomplete}
end
end
# field completed - defining variable lengths
defp deserialize(schema, bin, %{field_size: {:variable, _, _}} = state) do
variable = Map.update!(state.variable, state.field, &Enum.reverse(&1))
deserialize(schema, bin, %{state | field: nil, field_size: nil, index: 1, variable: variable})
end
# field completed
defp deserialize(schema, bin, state) do
deserialize(schema, bin, %{state | field: nil, field_size: nil, index: 1})
end
end
|
lib/harald/hci/arrayed_data.ex
| 0.91054
| 0.946892
|
arrayed_data.ex
|
starcoder
|
defmodule DataDaemon.Extensions.DataDog do
@moduledoc false
import DataDaemon.Util, only: [config: 5, iso8601: 1]
@event_fields ~w(
timestamp
hostname
aggregation_key
priority
source_type_name
alert_type
)a
@doc false
@spec build_event(String.t(), Keyword.t()) :: iodata
def build_event(text, opts \\ []),
do: Enum.reduce(@event_fields, text, &add_event_opt(&1, opts[&1], &2))
@spec add_event_opt(atom, any, iodata) :: iodata
defp add_event_opt(opt, {:system, env}, event),
do: add_event_opt(opt, System.get_env(env), event)
defp add_event_opt(:timestamp, value, event), do: [event, "|d:", iso8601(value)]
defp add_event_opt(:hostname, nil, event), do: event
defp add_event_opt(:hostname, hostname, event), do: [event, "|h:", hostname]
defp add_event_opt(:aggregation_key, nil, event), do: event
defp add_event_opt(:aggregation_key, key, event), do: [event, "|k:", key]
defp add_event_opt(:priority, nil, event), do: [event, "|p:normal"]
defp add_event_opt(:priority, priority, event), do: [event, "|p:", to_string(priority)]
defp add_event_opt(:source_type_name, nil, event), do: event
defp add_event_opt(:source_type_name, name, event), do: [event, "|s:", name]
defp add_event_opt(:alert_type, nil, event), do: [event, "|t:info"]
defp add_event_opt(:alert_type, type, event), do: [event, "|t:", to_string(type)]
@doc false
@spec child_spec(module, Keyword.t()) :: false
def child_spec(_daemon, _opts \\ []), do: false
@doc false
@spec init(module, Keyword.t()) :: :ok
def init(daemon, opts \\ []) do
handler =
opts[:error_handler] ||
Keyword.get(Application.get_env(opts[:otp_app], daemon, []), :error_handler, false)
if handler do
level = if(handler === true, do: :info, else: handler)
:error_logger.add_report_handler(
DataDaemon.Extensions.DataDog.ErrorHandler,
{daemon, level}
)
Logger.add_backend({DataDaemon.Extensions.DataDog.ErrorHandler, {daemon, level}})
end
:ok
end
defmacro __using__(opts \\ []) do
tags = config(opts, opts[:otp_app], __CALLER__.module, :tags, [])
quote location: :keep do
@doc """
Distribution tracks the statistical distribution of a set of values across your infrastructure.
The value for the given metric key needs to be an integer.
## Example
```elixir
iex> #{inspect(__MODULE__)}.distribution("connections", 123)
:ok
iex> #{inspect(__MODULE__)}.distribution("connections", 123, zone: "us-east-1a")
:ok
```
"""
@spec distribution(DataDaemon.key(), integer, Keyword.t()) :: :ok | {:error, atom}
def distribution(key, value, opts \\ []), do: metric(key, value, "d", opts)
import DataDaemon.Extensions.DataDog, only: [build_event: 2]
@doc """
Create an event for the DataDog event stream.
The event consist of a string title and text,
the latter can be multi-line.
## Options
| **Option** | **Description** |
|--------------------------------|-------------------------------------------------------------------------------------------|
| `:timestamp` (optional) | Add a timestamp to the event. Default is the current timestamp. |
| `:hostname` (optional) | Add a hostname to the event. No default. |
| `:aggregation_key` (optional) | Add an aggregation key to group the event with others that have the same key. No default. |
| `:priority` (optional) | Set to `:normal` or `:low`. Default `:normal`. |
| `:source_type_name` (optional) | Add a source type to the event. No default. |
| `:alert_type` (optional) | Set to `:error`, `:warning`, `:info` or `:success`. Default `:info`. |
## Example
```elixir
iex> #{inspect(__MODULE__)}.event("Event Title", "Event body.\\nMore details")
:ok
iex> #{inspect(__MODULE__)}.event("Event Title", "Event body.\\nMore details", zone: "us-east-1a")
:ok
```
"""
@spec event(String.t(), String.t(), Keyword.t()) :: :ok | {:error, atom}
def event(title, text, opts \\ []) do
text = String.replace(text, "\n", "\\n")
send_metric(
"_e{#{String.length(title)},#{String.length(text)}}",
title,
build_event(text, opts),
unquote(
if tags == [],
do: quote(do: opts),
else: quote(do: Keyword.update(opts, :tags, unquote(tags), &(unquote(tags) ++ &1)))
)
)
end
end
end
defmodule ErrorHandler do
@moduledoc false
@behaviour :gen_event
@unix_g 62_167_219_200
@ignored ~w(info_report)a
@doc false
@impl :gen_event
def init({_, {module, level}}) do
{:ok, hostname} = :inet.gethostname()
{:ok, %{module: module, hostname: hostname, level: level}}
end
@doc false
@impl :gen_event
def handle_event(event, state)
def handle_event(:flush, state), do: {:ok, state}
def handle_event({_level, gl, _event}, state) when node(gl) != node(), do: {:ok, state}
def handle_event({level, _gl, _event}, state) when level in @ignored, do: {:ok, state}
def handle_event(
{level, _gl, {Logger, message, timestamp, meta}},
state = %{module: module, hostname: hostname, level: min_level}
) do
unless Logger.compare_levels(min_level, level) == :gt do
level = translate_level(level)
{{year, month, day}, {hour, minute, second, _millisecond}} = timestamp
message = if is_list(message), do: :erlang.iolist_to_binary(message), else: message
ts =
:calendar.datetime_to_gregorian_seconds({{year, month, day}, {hour, minute, second}}) -
@unix_g
case String.split(message, "\n", parts: 2, trim: true) do
[title] ->
report(module, level, title, inspect(meta), ts, hostname)
[title, message] ->
report(module, level, title, message <> "\n\n" <> inspect(meta), ts, hostname)
end
end
{:ok, state}
end
def handle_event({_level, _gl, _event}, state) do
# Erlang errors, implement at later point
{:ok, state}
end
@spec report(
atom,
:error | :info | :debug | :warn,
String.t(),
String.t(),
integer,
String.t()
) :: atom
defp report(module, level, title, message, timestamp, hostname) do
module.event(title, message,
timestamp: timestamp,
alert_type: level,
hostname: hostname
)
end
@spec translate_level(atom) :: atom
defp translate_level(:error), do: :error
defp translate_level(:info), do: :info
defp translate_level(:debug), do: :info
defp translate_level(:warn), do: :warning
@doc false
@impl :gen_event
def handle_call(request, _state), do: exit({:bad_call, request})
@doc false
@impl :gen_event
def handle_info(_message, state), do: {:ok, state}
@doc false
@impl :gen_event
def terminate(_reason, _state), do: :ok
@doc false
@impl :gen_event
def code_change(_old_vsn, state, _extra), do: {:ok, state}
end
end
|
lib/data_daemon/extensions/data_dog.ex
| 0.867654
| 0.582729
|
data_dog.ex
|
starcoder
|
defmodule P1 do
@moduledoc """
## Examples
iex> P1.solve(3, 100, 3, [1, 2, 1], [2, 3, 3], [10, 90, 10], [10, 10, 50])
20
iex> P1.solve(3, 100, 3, [1, 2, 1], [2, 3, 3], [1, 100, 10], [10, 10, 50])
50
iex> P1.solve(10, 10, 19,
...> [1, 1, 2, 4, 5, 1, 3, 4, 6, 4, 6, 4, 5, 7, 8, 2, 3, 4, 9],
...> [3, 5, 5, 5, 6, 7, 7, 7, 7, 8, 8, 9, 9, 9, 9, 10, 10, 10, 10],
...> [8, 6, 8, 7, 6, 6, 9, 9, 7, 6, 9, 7, 7, 8, 7, 6, 6, 8, 6],
...> [8, 9, 10, 4, 10, 3, 5, 9, 3, 4, 1, 8, 3, 1, 3, 6, 6, 10, 4]
...>)
-1
"""
alias Matrix
alias BinaryTupleHeap, as: Heap
def main do
n = IO.read(:line) |> String.trim() |> String.to_integer()
c = IO.read(:line) |> String.trim() |> String.to_integer()
v = IO.read(:line) |> String.trim() |> String.to_integer()
sn = IO.read(:line) |> String.trim() |> String.split(" ") |> Enum.map(&String.to_integer/1)
tn = IO.read(:line) |> String.trim() |> String.split(" ") |> Enum.map(&String.to_integer/1)
yn = IO.read(:line) |> String.trim() |> String.split(" ") |> Enum.map(&String.to_integer/1)
mn = IO.read(:line) |> String.trim() |> String.split(" ") |> Enum.map(&String.to_integer/1)
solve(n, c, v, sn, tn, yn, mn) |> IO.puts()
end
@doc """
初期位置: n = 1
Goal: N
n : 町番号
c : 最大コスト(円)
v : 道の数
sv : 町番号リスト
tv : 町番号リスト
yv : si から ti へ行くのにかかるお金
mv : si から ti へ行くのにかかる単位時間
# Output
最短時間を返す。制約条件内で存在しない場合は、-1を返す。
"""
def solve(n, c, v, sv, tv, yv, mv)
def solve(n, c, v, sv, tv, yv, mv) when 2 <= n and n <= 50 and
1 <= c and c <= 300 and
length(sv) == v and
length(tv) == v and
length(yv) == v and
length(mv) == v
do
with [] <- Enum.filter(mv, &(not(1 <= &1 and &1 <= 1000))) do
fn {_ln, ly, lm}, {_rn, ry, rm} ->
lm < rm || (lm == rm && ly < ry)
end
|> Heap.new()
|> Heap.push({1, 0, 0})
|> do_solve(n, c, build_cost_map(sv, tv, yv, mv))
else
_ ->
-1
end
end
def solve(_n, _c, _v, _sv, _tv, _yv, _mv), do: -1
defp do_solve(heap, goal, max_cost, cost_map) do
with false <- Heap.empty?(heap),
{g, y, t} when g == goal and y <= max_cost <- Heap.top(heap) do
t
else
true ->
-1
nil ->
-1
{current_node, current_yen, current_time} ->
heap = Heap.pop(heap)
cost_map
|> Map.get(current_node)
|> case do
nil ->
heap
map ->
map
|> Map.keys()
|> Enum.reduce(heap, fn next_node, heap ->
Matrix.get(cost_map, [current_node, next_node])
|> Enum.reduce(heap, fn {yen, time}, heap ->
Heap.push(heap, {next_node, current_yen + yen, current_time + time})
end)
end)
end
|> do_solve(goal, max_cost, cost_map)
end
end
def build_cost_map(sv, tv, yv, mv), do: do_build_cost_map(sv, tv, yv, mv)
defp do_build_cost_map(sv, tv, yv, mv, map \\ %{})
defp do_build_cost_map([], [], [], [], map), do: map
defp do_build_cost_map([s | sv], [t | tv], [y | yv], [m | mv], map) do
if s < t do
with nil <- Matrix.get(map, [s, t]) do
do_build_cost_map(sv, tv, yv, mv, Matrix.put(map, [s, t], [{y, m}]))
else
list ->
do_build_cost_map(sv, tv, yv, mv, Matrix.put(map, [s, t], [{y, m} | list]))
end
else
do_build_cost_map(sv, tv, yv, mv)
end
end
end
"""
defmodule Main do
defmodule Matrix do
def put(map, keys, value), do: do_map_put(value, keys, map)
defp do_map_put(value, keys, map)
defp do_map_put(value, [], _), do: value
defp do_map_put(value, [key | tail], nil), do: Map.put(%{}, key, do_map_put(value, tail, Map.get(%{}, key)))
defp do_map_put(value, [key | tail], map), do: Map.put(map, key, do_map_put(value, tail, Map.get(map, key)))
def get(map, key_or_keys)
def get(nil, _key), do: nil
def get(map, [key | []]), do: Map.get(map, key)
def get(map, [key | tail]), do: get(Map.get(map, key), tail)
def get(map, key), do: Map.get(map, key)
end
defmodule Heap do
defstruct data: nil, comparator: nil
def new(comparator), do: %__MODULE__{comparator: comparator}
def empty?(%__MODULE__{data: nil}), do: true
def empty?(%__MODULE__{}), do: false
def size(%__MODULE__{data: nil}), do: 0
def size(%__MODULE__{data: {size, _value, _left, _right}}), do: size
def top(%__MODULE__{data: nil}), do: nil
def top(%__MODULE__{data: {_size, value, _left, _right}}), do: value
def pop(%__MODULE__{data: data, comparator: comp} = heap) do
%{ heap | data: do_pop(comp, data)}
end
defp do_pop(_comparator, nil), do: nil
defp do_pop(comparator, {size, _v0, left, right}) do
with nil <- swap_on_pop(comparator, left, right) do
nil
else
{v1, left, right} ->
{size - 1, v1, do_pop(comparator, left), right}
end
end
defp swap_on_pop(comparator, left, right)
defp swap_on_pop(_comparator, nil, nil), do: nil
defp swap_on_pop(_comparator, left, nil), do: {elem(left, 1), left, nil}
defp swap_on_pop(_comparator, nil, right), do: {elem(right, 1), right, nil}
defp swap_on_pop(comparator, left, right),
do: if comparator.(elem(left, 1), elem(right, 1)),
do: {elem(left, 1), left, right},
else: {elem(right,1), right, left}
def push(%__MODULE__{data: data, comparator: comp} = heap, value) do
%{
heap |
data: do_push(value, comp, data)
}
end
defp do_push(value, comparator, data \\ nil)
defp do_push(v0, _comparator, nil), do: {1, v0, nil, nil}
defp do_push(v0, comparator, {size, v1, nil, nil}) do
{v0, v1} = swap_on_push(v0, v1, comparator)
{size + 1, v0, do_push(v1, comparator), nil}
end
defp do_push(v0, comparator, {size, v1, left, nil}) do
{v0, v1} = swap_on_push(v0, v1, comparator)
{size + 1, v0, left, do_push(v1, comparator)}
end
defp do_push(v0, comparator, {size, v1, nil, right}) do
{v0, v1} = swap_on_push(v0, v1, comparator)
{size + 1, v0, do_push(v1, comparator), right}
end
defp do_push(v0, comparator, {size, v1, {ls, _, _, _} = left, {rs, _, _, _} = right}) do
{v0, v1} = swap_on_push(v0, v1, comparator)
if rs < ls do
{size + 1, v0, left, do_push(v1, comparator, right)}
else
{size + 1, v0, do_push(v1, comparator, left), right}
end
end
defp swap_on_push(v0, v1, comparator) do
if comparator.(v0, v1) do
{v0, v1}
else
{v1, v0}
end
end
end
def main do
n = IO.read(:line) |> String.trim() |> String.to_integer()
c = IO.read(:line) |> String.trim() |> String.to_integer()
v = IO.read(:line) |> String.trim() |> String.to_integer()
sn = IO.read(:line) |> String.trim() |> String.split(" ") |> Enum.map(&String.to_integer/1)
tn = IO.read(:line) |> String.trim() |> String.split(" ") |> Enum.map(&String.to_integer/1)
yn = IO.read(:line) |> String.trim() |> String.split(" ") |> Enum.map(&String.to_integer/1)
mn = IO.read(:line) |> String.trim() |> String.split(" ") |> Enum.map(&String.to_integer/1)
solve(n, c, v, sn, tn, yn, mn) |> IO.puts()
end
def solve(n, c, _v, sv, tv, yv, mv) do
fn {_ln, ly, lm}, {_rn, ry, rm} ->
lm < rm || (lm == rm && ly < ry)
end
|> Heap.new()
|> Heap.push({1, 0, 0})
|> do_solve(n, c, build_cost_map(sv, tv, yv, mv))
end
defp do_solve(heap, goal, max_cost, cost_map) do
with false <- Heap.empty?(heap),
{g, y, t} when g == goal and y <= max_cost <- Heap.top(heap) do
t
else
true ->
-1
nil ->
-1
{current_node, current_yen, current_time} ->
heap = Heap.pop(heap)
cost_map
|> Map.get(current_node)
|> case do
nil ->
heap
map ->
map
|> Map.keys()
|> Enum.reduce(heap, fn next_node, heap ->
Matrix.get(cost_map, [current_node, next_node])
|> Enum.reduce(heap, fn {yen, time}, heap ->
Heap.push(heap, {next_node, current_yen + yen, current_time + time})
end)
end)
end
|> do_solve(goal, max_cost, cost_map)
end
end
def build_cost_map(sv, tv, yv, mv), do: do_build_cost_map(sv, tv, yv, mv)
defp do_build_cost_map(sv, tv, yv, mv, map \\ %{})
defp do_build_cost_map([], [], [], [], map), do: map
defp do_build_cost_map([s | sv], [t | tv], [y | yv], [m | mv], map) do
if s < t do
with nil <- Matrix.get(map, [s, t]) do
do_build_cost_map(sv, tv, yv, mv, Matrix.put(map, [s, t], [{y, m}]))
else
list ->
do_build_cost_map(sv, tv, yv, mv, Matrix.put(map, [s, t], [{y, m} | list]))
end
else
do_build_cost_map(sv, tv, yv, mv)
end
end
end
defmodule Main do
defmodule Matrix do
def put(map, keys, value), do: do_map_put(value, keys, map)
defp do_map_put(value, keys, map)
defp do_map_put(value, [], _), do: value
defp do_map_put(value, [key | tail], nil), do: Map.put(%{}, key, do_map_put(value, tail, Map.get(%{}, key)))
defp do_map_put(value, [key | tail], map), do: Map.put(map, key, do_map_put(value, tail, Map.get(map, key)))
def get(map, key_or_keys)
def get(nil, _key), do: nil
def get(map, [key | []]), do: Map.get(map, key)
def get(map, [key | tail]), do: get(Map.get(map, key), tail)
def get(map, key), do: Map.get(map, key)
end
defmodule Heap do
defstruct data: nil, comparator: nil
def new(comparator), do: %__MODULE__{comparator: comparator}
def empty?(%__MODULE__{data: nil}), do: true
def empty?(%__MODULE__{}), do: false
def size(%__MODULE__{data: nil}), do: 0
def size(%__MODULE__{data: {size, _value, _left, _right}}), do: size
def top(%__MODULE__{data: nil}), do: nil
def top(%__MODULE__{data: {_size, value, _left, _right}}), do: value
def pop(%__MODULE__{data: data, comparator: comp} = heap) do
%{ heap | data: do_pop(comp, data)}
end
defp do_pop(_comparator, nil), do: nil
defp do_pop(comparator, {size, _v0, left, right}) do
with nil <- swap_on_pop(comparator, left, right) do
nil
else
{v1, left, right} ->
{size - 1, v1, do_pop(comparator, left), right}
end
end
defp swap_on_pop(comparator, left, right)
defp swap_on_pop(_comparator, nil, nil), do: nil
defp swap_on_pop(_comparator, left, nil), do: {elem(left, 1), left, nil}
defp swap_on_pop(_comparator, nil, right), do: {elem(right, 1), right, nil}
defp swap_on_pop(comparator, left, right),
do: if comparator.(elem(left, 1), elem(right, 1)),
do: {elem(left, 1), left, right},
else: {elem(right,1), right, left}
def push(%__MODULE__{data: data, comparator: comp} = heap, value) do
%{
heap |
data: do_push(value, comp, data)
}
end
defp do_push(value, comparator, data \\ nil)
defp do_push(v0, _comparator, nil), do: {1, v0, nil, nil}
defp do_push(v0, comparator, {size, v1, nil, nil}) do
{v0, v1} = swap_on_push(v0, v1, comparator)
{size + 1, v0, do_push(v1, comparator), nil}
end
defp do_push(v0, comparator, {size, v1, left, nil}) do
{v0, v1} = swap_on_push(v0, v1, comparator)
{size + 1, v0, left, do_push(v1, comparator)}
end
defp do_push(v0, comparator, {size, v1, nil, right}) do
{v0, v1} = swap_on_push(v0, v1, comparator)
{size + 1, v0, do_push(v1, comparator), right}
end
defp do_push(v0, comparator, {size, v1, {ls, _, _, _} = left, {rs, _, _, _} = right}) do
{v0, v1} = swap_on_push(v0, v1, comparator)
if rs < ls do
{size + 1, v0, left, do_push(v1, comparator, right)}
else
{size + 1, v0, do_push(v1, comparator, left), right}
end
end
defp swap_on_push(v0, v1, comparator) do
if comparator.(v0, v1) do
{v0, v1}
else
{v1, v0}
end
end
end
def main do
n = IO.read(:line) |> String.trim() |> String.to_integer()
c = IO.read(:line) |> String.trim() |> String.to_integer()
v = IO.read(:line) |> String.trim() |> String.to_integer()
sn = IO.read(:line) |> String.trim() |> String.split(" ") |> Enum.map(&String.to_integer/1)
tn = IO.read(:line) |> String.trim() |> String.split(" ") |> Enum.map(&String.to_integer/1)
yn = IO.read(:line) |> String.trim() |> String.split(" ") |> Enum.map(&String.to_integer/1)
mn = IO.read(:line) |> String.trim() |> String.split(" ") |> Enum.map(&String.to_integer/1)
solve(n, c, v, sn, tn, yn, mn) |> IO.puts()
end
def solve(n, c, _v, sv, tv, yv, mv) do
fn {_ln, ly, lm}, {_rn, ry, rm} ->
lm < rm || (lm == rm && ly < ry)
end
|> Heap.new()
|> Heap.push({1, 0, 0})
|> do_solve(n, c, build_cost_map(sv, tv, yv, mv))
end
defp do_solve(heap, goal, max_cost, cost_map) do
with false <- Heap.empty?(heap),
{g, y, t} when g == goal and y <= max_cost <- Heap.top(heap) do
t
else
true ->
-1
nil ->
-1
{current_node, current_yen, current_time} ->
heap = Heap.pop(heap)
cost_map
|> Map.get(current_node)
|> case do
nil ->
heap
map ->
map
|> Map.keys()
|> Enum.reduce(heap, fn next_node, heap ->
Matrix.get(cost_map, [current_node, next_node])
|> Enum.reduce(heap, fn {yen, time}, heap ->
Heap.push(heap, {next_node, current_yen + yen, current_time + time})
end)
end)
end
|> do_solve(goal, max_cost, cost_map)
end
end
def build_cost_map(sv, tv, yv, mv), do: do_build_cost_map(sv, tv, yv, mv)
defp do_build_cost_map(sv, tv, yv, mv, map \\ %{})
defp do_build_cost_map([], [], [], [], map), do: map
defp do_build_cost_map([s | sv], [t | tv], [y | yv], [m | mv], map) do
if s < t do
with nil <- Matrix.get(map, [s, t]) do
do_build_cost_map(sv, tv, yv, mv, Matrix.put(map, [s, t], [{y, m}]))
else
list ->
do_build_cost_map(sv, tv, yv, mv, Matrix.put(map, [s, t], [{y, m} | list]))
end
else
do_build_cost_map(sv, tv, yv, mv)
end
end
end
"""
|
lib/100/p1.ex
| 0.64646
| 0.541954
|
p1.ex
|
starcoder
|
defmodule Chess.Move.Parse do
@moduledoc """
Module for parsing moves
"""
alias Chess.Game
defmacro __using__(_opts) do
quote do
defp do_parse_move(%Game{status: "check", check: check}, move, active)
when check != active and (move == "0-0" or move == "0-0-0"),
do: {:error, "Your king is under attack, castling is forbidden"}
defp do_parse_move(_, move, active) when move == "0-0" or move == "0-0-0" do
[
define_kings_from(active),
define_kings_to(active, move)
]
end
defp do_parse_move(_, move, _) do
result = check_move_format(move)
cond do
is_binary(result) -> String.split(result, "-")
true -> result
end
end
defp define_kings_from("w"), do: "e1"
defp define_kings_from(_), do: "e8"
defp define_kings_to("w", "0-0"), do: "g1"
defp define_kings_to("w", "0-0-0"), do: "c1"
defp define_kings_to("b", "0-0"), do: "g8"
defp define_kings_to("b", "0-0-0"), do: "c8"
defp check_move_format(move) do
cond do
String.length(move) != 5 -> {:error, "Invalid move format"}
true -> check_move_squares(move)
end
end
defp check_move_squares(move) do
splitted_move = coordinates(move)
cond do
Enum.find(Chess.x_fields, fn x -> x == Enum.at(splitted_move, 0) end) == nil ->
{:error, "There is no such square on the board"}
Enum.find(Chess.y_fields, fn x -> x == Enum.at(splitted_move, 1) end) == nil ->
{:error, "There is no such square on the board"}
Enum.find(Chess.x_fields, fn x -> x == Enum.at(splitted_move, 3) end) == nil ->
{:error, "There is no such square on the board"}
Enum.find(Chess.y_fields, fn x -> x == Enum.at(splitted_move, 4) end) == nil ->
{:error, "There is no such square on the board"}
true ->
move
end
end
end
end
end
|
lib/chess/move/parse.ex
| 0.701202
| 0.409516
|
parse.ex
|
starcoder
|
defmodule GatherSubmissions.DOMjudge.Connection do
@moduledoc """
Defines a struct containing connection information to a server, and provides basic functions for
issuing authorized GET requests.
This information consists in the URL of the DOMjudge server (`endpoint`), and
login data (`username` and `password` fields).
"""
@type t :: %__MODULE__{
endpoint: String.t(),
username: String.t() | nil,
password: String.t() | nil
}
defstruct [:endpoint, :username, :password]
defmodule UnauthorizedError do
defexception message: "DOMjudge request failed: Unauthorized"
end
defmodule ConnectionError do
defexception [:url, :status_code]
@impl true
def message(exception) do
"DOMjudge request failed: received #{exception.status_code} on url: #{exception.url}"
end
end
@doc """
Creates a connection to the server given in the `url` parameter.
"""
@spec create(String.t()) :: t()
def create(url) do
%__MODULE__{endpoint: url}
end
@doc """
Extends the connection with login info.
"""
@spec with_authorization(t(), String.t(), String.t()) :: t()
def with_authorization(conn, username, password) do
%__MODULE__{conn | username: username, password: password}
end
@doc """
Issues a GET request with the given `query_params`, and parses the result, which is
expected to be in JSON format.
It raises an `Connection.UnauthorizedError` exception when the servers return a 401 code.
It raises an `Connection.ConnectionError` exception when the servers return a code different
from 200 or 401.
"""
@spec get(t(), String.t(), Keyword.t()) :: any()
def get(%__MODULE__{} = conn, url, query_params \\ []) do
query_string = if query_params == [], do: "", else: "?" <> URI.encode_query(query_params)
headers = [accept: "application/json"] |> with_auth_header(conn)
full_url = conn.endpoint <> url <> query_string
response = HTTPoison.get!(full_url, headers)
case response.status_code do
200 ->
Jason.decode!(response.body)
401 ->
raise UnauthorizedError
x when not (x >= 200 and x < 300) ->
raise ConnectionError, url: full_url, status_code: response.status_code
end
end
defp with_auth_header(headers, %__MODULE__{username: nil}) do
headers
end
defp with_auth_header(headers, %__MODULE__{username: username, password: password}) do
encoded_login = "#{username}:#{password}" |> Base.encode64()
headers
|> Keyword.put(:authorization, "Basic #{encoded_login}")
end
end
|
lib/domjudge/connection.ex
| 0.741674
| 0.57341
|
connection.ex
|
starcoder
|
defmodule DSMR do
@moduledoc """
A library for parsing Dutch Smart Meter Requirements (DSMR) telegram data.
"""
alias DSMR.Telegram
defmodule ParseError do
@type t() :: %__MODULE__{}
defexception [:message]
end
@doc """
Parses telegram data from a string and returns a struct.
If the telegram is parsed successfully, this function returns `{:ok, telegram}`
where `telegram` is a `DSMR.Telegram` struct. If the parsing fails, this
function returns `{:error, parse_error}` where `parse_error` is a `DSMR.ParseError` struct.
You can use `raise/1` with that struct or `Exception.message/1` to turn it into a string.
"""
@spec parse(String.t()) :: {:ok, Telegram.t()} | {:error, ParseError.t()}
def parse(string) do
with {:ok, parsed, "", _, _, _} <- DSMR.Parser.telegram_parser(string),
{:ok, telegram} <- create_telegram(parsed) do
{:ok, telegram}
else
_ ->
{:error, %ParseError{message: "Could not parse #{inspect(string)}."}}
end
end
@doc """
Parses telegram data from a string and raises if the data cannot be parsed.
This function behaves exactly like `parse/1`, but returns the telegram directly
if parsed successfully or raises a `DSMR.ParseError` exception otherwise.
"""
@spec parse!(String.t()) :: Telegram.t()
def parse!(string) do
case parse(string) do
{:ok, telegram} -> telegram
{:error, %ParseError{} = error} -> raise error
end
end
defp create_telegram(parsed) do
telegram =
Enum.reduce(parsed, %Telegram{}, fn line, telegram ->
case line do
{:header, header} ->
%{telegram | header: Telegram.Header.new(header)}
{:cosem, [{:obis, [0, channel, 24, 1, 0]} | _value] = mbus} ->
append_mbus(telegram, channel, mbus)
{:cosem, [{:obis, [0, channel, 96, 1, 0]} | _value] = mbus} ->
append_mbus(telegram, channel, mbus)
{:cosem, [{:obis, [0, channel, 24, 2, 1]} | _values] = mbus} ->
append_mbus(telegram, channel, mbus)
{:cosem, cosem} ->
append_cosem(telegram, cosem)
{:footer, checksum} ->
%{telegram | checksum: Telegram.Checksum.new(checksum)}
end
end)
{:ok, telegram}
end
defp append_cosem(telegram, cosem) do
%{telegram | data: telegram.data ++ [Telegram.COSEM.new(cosem)]}
end
defp append_mbus(telegram, channel, cosem) do
if index = find_mbus_index(telegram.data, channel) do
new_mbus = Telegram.MBus.new(channel, cosem)
mbus = Enum.fetch!(telegram.data, index)
mbus = %{mbus | data: mbus.data ++ new_mbus.data}
%{telegram | data: List.replace_at(telegram.data, index, mbus)}
else
mbus = Telegram.MBus.new(channel, cosem)
%{telegram | data: telegram.data ++ [mbus]}
end
end
defp find_mbus_index(data, channel) when is_list(data) do
Enum.find_index(data, &find_mbus_index(&1, channel))
end
defp find_mbus_index(%Telegram.MBus{} = mbus, channel), do: mbus.channel == channel
defp find_mbus_index(_cosem, _channel), do: false
end
|
lib/dsmr.ex
| 0.812459
| 0.494263
|
dsmr.ex
|
starcoder
|
defmodule ExUnit.ClusteredCase.Cluster.Partition do
@moduledoc false
alias ExUnit.ClusteredCase.Cluster.PartitionChange
@type opts ::
pos_integer
| [pos_integer]
| [[node]]
@type t :: [[node]]
@doc """
Given a list of nodes and partitioning options, generates a partition spec,
which is a list of partitions, each of which is a list of nodes.
"""
@spec new([node], opts) :: t
def new(nodes, nil),
do: new(nodes, 1)
def new(_nodes, 0),
do: {:error, {:invalid_partition_spec, :invalid_partition_count}}
def new(nodes, n) when is_integer(n) do
partition_count = div(length(nodes), n)
if partition_count > 0 do
# Break up into sized chunks
parts = Enum.chunk_every(nodes, partition_count)
# Split into the desired number of partitions, and overflow chunks
{sized, overflow} = Enum.split(parts, n)
# Split the correctly sized partitions to give us a partition to which
# we can join all the overflow chunks, giving us one extra large partition
{sized, overflow_part} = Enum.split(sized, n - 1)
# Join the overflow and add back to the sized partitions list
case List.flatten([overflow_part | overflow]) do
[] ->
# No overflow
sized
flattened ->
sized ++ [flattened]
end
else
# Oversized, so make one big partition
new(nodes, 1)
end
end
def new(nodes, [n | _] = spec) when is_integer(n) do
partition_by_count(nodes, spec)
end
def new(nodes, [n | _] = spec) when is_list(n) do
ln = length(nodes)
lsn = length(List.flatten(spec))
cond do
ln > lsn ->
{:error, {:invalid_partition_spec, :underspecified}}
ln < lsn ->
{:error, {:invalid_partition_spec, :duplicate_memberships}}
:else ->
partition_by_name(nodes, spec)
end
end
defp partition_by_count(nodes, spec),
do: partition_by_count(nodes, spec, [])
defp partition_by_count([], [], acc),
do: Enum.reverse(acc)
defp partition_by_count([], spec, _acc),
do: {:error, {:invalid_partition_spec, {:too_many_partitions, spec}}}
defp partition_by_count(_nodes, [], _acc),
do: {:error, {:invalid_partition_spec, :underspecified}}
defp partition_by_count(nodes, [count | spec], acc) do
{gathered, nodes} = Enum.split(nodes, count)
partition_by_count(nodes, spec, [gathered | acc])
end
defp partition_by_name(nodes, spec),
do: partition_by_name(nodes, spec, [])
defp partition_by_name([], [], acc),
do: Enum.reverse(acc)
defp partition_by_name(nodes, [part | spec], acc) do
{gathered, nodes} = Enum.split_with(nodes, fn n -> n in part end)
partition_by_name(nodes, spec, [gathered | acc])
end
@doc """
Given a list of nodes, current partition spec, and a new partition spec,
this function calculates the difference between the old spec and the new spec,
and returns a `PartitionChange` struct which defines how to modify the current
set of partitions to match the new partition spec.
"""
@spec partition([node], t, t) :: PartitionChange.t()
def partition(nodes, old_spec, new_spec)
def partition(nodes, nil, nil) do
# No partitions, no change in spec, connect all nodes
connects =
nodes
|> Enum.map(fn n -> {n, nodes -- [n]} end)
|> Map.new()
PartitionChange.new([nodes], connects, %{})
end
def partition(nodes, nil, new_spec) do
# No partitions, initial partitioning, form all partitions
connects =
nodes
|> Enum.map(fn n ->
{n, Enum.find(new_spec, fn ns -> n in ns end) -- [n]}
end)
|> Enum.reject(fn
{_n, []} -> true
_ -> false
end)
|> Map.new()
PartitionChange.new(new_spec, connects, %{})
end
def partition(_nodes, old_spec, old_spec) do
# Already partitioned, no change in spec
PartitionChange.new(old_spec, %{}, %{})
end
def partition(nodes, old_spec, new_spec) do
# Already patitioned, change in spec
dg1 = :digraph.new()
# For each partition
for p <- old_spec do
# Add the nodes of this partition as vertices in a graph
for n <- p do
:digraph.add_vertex(dg1, n)
end
# Then add edges between all nodes in the partition representing their connections
for n1 <- p do
for n2 <- p, n2 != n1 do
:digraph.add_edge(dg1, n1, n2)
end
end
end
# Same for new partition spec
dg2 = :digraph.new()
for p <- new_spec do
for n <- p do
:digraph.add_vertex(dg2, n)
end
for n1 <- p do
for n2 <- p, n2 != n1 do
:digraph.add_edge(dg2, n1, n2)
end
end
end
# For each node, apply changes
ops =
for n <- nodes do
old_outgoing = dg1 |> :digraph.out_neighbours(n) |> MapSet.new()
new_outgoing = dg2 |> :digraph.out_neighbours(n) |> MapSet.new()
connects = MapSet.difference(new_outgoing, old_outgoing)
disconnects = MapSet.difference(old_outgoing, new_outgoing)
{n, MapSet.to_list(connects), MapSet.to_list(disconnects)}
end
connects =
ops
|> Enum.map(fn {n, connects, _} -> {n, connects} end)
|> Enum.reject(fn
{_n, []} -> true
_ -> false
end)
|> Map.new()
disconnects =
ops
|> Enum.map(fn {n, _, disconnects} -> {n, disconnects} end)
|> Enum.reject(fn
{_n, []} -> true
_ -> false
end)
|> Map.new()
PartitionChange.new(new_spec, connects, disconnects)
end
end
|
lib/cluster/partition.ex
| 0.778018
| 0.507873
|
partition.ex
|
starcoder
|
defmodule TextBasedFPS.RoomPlayer do
alias TextBasedFPS.{Direction, Player, RoomPlayer}
@max_health 100
@max_loaded_ammo 8
@max_unloaded_ammo 24
@type t :: %TextBasedFPS.RoomPlayer{
player_key: Player.key_t(),
coordinates: TextBasedFPS.GameMap.Coordinates.t() | nil,
direction: Direction.t() | nil,
health: non_neg_integer,
ammo: {non_neg_integer, non_neg_integer},
kills: non_neg_integer,
killed: non_neg_integer
}
defstruct [:player_key, :coordinates, :direction, :health, :ammo, :kills, :killed]
@spec new(Player.key_t()) :: t
def new(player_key) do
%RoomPlayer{
player_key: player_key,
coordinates: nil,
direction: nil,
health: 0,
ammo: {0, 0},
kills: 0,
killed: 0
}
end
@spec dead?(t) :: boolean
def dead?(room_player), do: room_player.health == 0
@spec increment(t, atom) :: t
def increment(room_player, key) do
Map.put(room_player, key, Map.get(room_player, key) + 1)
end
@spec decrement(t, atom) :: t
def decrement(room_player, :ammo) do
{loaded, unloaded} = room_player.ammo
Map.put(room_player, :ammo, {max(loaded - 1, 0), unloaded})
end
def decrement(room_player, key) do
Map.put(room_player, key, Map.get(room_player, key) - 1)
end
@spec add_ammo(t, non_neg_integer) :: t
def add_ammo(room_player, amount) do
{loaded, unloaded} = room_player.ammo
new_unloaded = min(unloaded + amount, @max_unloaded_ammo)
Map.put(room_player, :ammo, {loaded, new_unloaded})
end
@spec heal(t, non_neg_integer) :: t
def heal(room_player, amount) do
new_health = min(room_player.health + amount, @max_health)
Map.put(room_player, :health, new_health)
end
@spec reload_gun(t) :: {:reloaded, t} | {:full, t} | {:no_ammo, t}
def reload_gun(room_player = %{ammo: {_, 0}}), do: {:no_ammo, room_player}
def reload_gun(room_player) do
{loaded, unloaded} = room_player.ammo
amount_to_load = min(@max_loaded_ammo - loaded, unloaded)
case amount_to_load do
0 ->
{:full, room_player}
_ ->
{:reloaded,
Map.put(room_player, :ammo, {loaded + amount_to_load, unloaded - amount_to_load})}
end
end
@spec display_ammo(t) :: String.t()
def display_ammo(%{ammo: {loaded, unloaded}}) do
"#{loaded}/#{unloaded}"
end
@spec max_health() :: non_neg_integer
def max_health, do: @max_health
@spec max_loaded_ammo() :: non_neg_integer
def max_loaded_ammo, do: @max_loaded_ammo
@spec max_unloaded_ammo() :: non_neg_integer
def max_unloaded_ammo, do: @max_unloaded_ammo
end
|
lib/text_based_fps/room_player.ex
| 0.704262
| 0.408572
|
room_player.ex
|
starcoder
|
defmodule Piton.Port do
@moduledoc """
`Piton.Port` is a `GenServer` which will be on charge of the Python Port.
It is prepared to be the base of your own Port.
## Make your own Port
```elixir
defmodule MyPort do
use Piton.PythonPort
# rest of the code if it is need.
end
```
The arguments has to be in a Keyword List and it has to contain:
path: Path to the folder where the python scripts are.
python: python executable
If your port is going to run in a `Piton.Pool` (highly recommended) it has to have a *start()* function
and it has not to be linked.
```elixir
defmodule MyPoolPort do
use Piton.Port
def start(), do: MyPoolPort.start([path: Path.expand("python_folder"), python: "python"], [])
def fun(pid, n), do: MyPoolPort.execute(pid, :functions, :fun, [n])
end
```
## Run a Python code using directly the port (no pool)
```elixir
iex> MyPort.execute(pid_of_the_port, python_module, python_function, list_of_arguments_of_python_function)
```
"""
defmacro __using__(_) do
quote do
use GenServer
@timeout 5000
def start_link(args, opts) do
GenServer.start_link(__MODULE__, args, opts)
end
def start(args, opts) do
GenServer.start(__MODULE__, args, opts)
end
@doc """
It will return the erl port
"""
@spec get_port(pid) :: pid
def get_port(pid), do: GenServer.call(pid, :get_python_port)
@doc """
It will execute the arguments in the given function of the given module using the given port.
"""
@spec execute(pid, atom, atom, list, timeout) :: any
def execute(pid, python_module, python_function, python_arguments, timeout \\ @timeout) do
GenServer.call(pid, {:execute, python_module, python_function, python_arguments}, timeout)
end
def init(path: path, python: python) do
{:ok, py_port} =
:python.start([{:python_path, to_charlist(path)}, {:python, to_charlist(python)}])
Process.link(py_port)
{:ok, %{py_port: py_port}}
end
def handle_call(:get_python_port, _from, state) do
{:reply, state[:py_port], state}
end
def handle_call({:execute, python_module, python_function, python_arguments}, _from, state) do
result =
Piton.PythonFunctions.call(
state[:py_port],
python_module,
python_function,
python_arguments
)
{:reply, result, state}
end
def handle_cast(_msg, state) do
{:noreply, state}
end
def handle_info(_msg, state) do
{:noreply, state}
end
end
end
end
|
lib/piton/port.ex
| 0.768125
| 0.795102
|
port.ex
|
starcoder
|
defmodule Stripe.Accounts do
@moduledoc """
Functions for working with accounts at Stripe. Through this API you can:
* create an account,
* get an account,
* delete an account,
* delete all accounts,
* list accounts,
* list all accounts,
* count accounts.
Stripe API reference: https://stripe.com/docs/api/curl#account_object
"""
@endpoint "accounts"
@doc """
Create an account.
Creates an account using params.
Returns created account.
At the bare minimum, to create and connect to a managed account, simply set
managed to true in the creation request, and provide a country. The country,
once set, cannot be changed.
https://stripe.com/docs/connect/managed-accounts
## Examples
params = [
email: "<EMAIL>",
managed: true,
tos_acceptance: [
date: :os.system_time(:seconds),
ip: "127.0.0.1"
],
legal_entity: [
type: "individual",
dob: [
day: 1,
month: 1,
year: 1991
],
first_name: "John",
last_name: "Doe"
],
external_account: [
object: "bank_account",
country: "US",
currency: "usd",
routing_number: "110000000",
account_number: "000123456789"
]
]
{:ok, account} = Stripe.Accounts.create(params)
"""
def create(params) do
create params, Stripe.config_or_env_key
end
@doc """
Create an account. Accepts Stripe API key.
Creates an account given the account params.
Returns created account.
# Example
{:ok, account} = Stripe.Account.create(params, "my_key")
"""
def create(params, key) do
Stripe.make_request_with_key(:post, @endpoint, key, params)
|> Stripe.Util.handle_stripe_response
end
@doc """
Get an account.
Gets an account using account ID.
## Examples
{:ok, account} = Stripe.Accounts.get("account_id")
"""
def get(id) do
get id, Stripe.config_or_env_key
end
@doc """
Get an account. Accepts Stripe API key.
Gets an account using account ID.
## Examples
{:ok, account} = Stripe.Accounts.get("account_id", "my_key")
"""
def get(id, key) do
Stripe.make_request_with_key(:get, "#{@endpoint}/#{id}", key)
|> Stripe.Util.handle_stripe_response
end
@max_fetch_size 100
@doc """
List all accounts.
Lists all accounts.
Accepts the following parameters:
* `accum` - a list to start accumulating accounts to (optional; defaults to `[]`).,
* `starting_after` - an offset (optional; defaults to `""`).
Returns `{:ok, accounts}` tuple.
## Examples
{:ok, accounts} = Stripe.Accounts.all([], 5)
"""
def all( accum \\ [], starting_after \\ "") do
all Stripe.config_or_env_key, accum, starting_after
end
@doc """
List all accounts. Accepts Stripe API key.
Lists all accounts.
Accepts the following parameters:
* `accum` - a list to start accumulating accounts to (optional; defaults to `[]`).,
* `starting_after` - an offset (optional; defaults to `""`).
Returns `{:ok, accounts}` tuple.
## Examples
{:ok, accounts} = Stripe.Accounts.all("my_key", [], 5)
"""
def all( key, accum, starting_after) do
case Stripe.Util.list_raw("#{@endpoint}", key, @max_fetch_size, starting_after) do
{:ok, resp} ->
case resp[:has_more] do
true ->
last_sub = List.last( resp[:data] )
all( key, resp[:data] ++ accum, last_sub["id"] )
false ->
result = resp[:data] ++ accum
{:ok, result}
end
{:error, err} -> raise err
end
end
@doc """
Delete an account.
Deletes an account given the account ID.
Returns a `{:ok, account}` tuple.
## Examples
{:ok, deleted_account} = Stripe.Accounts.delete("account_id")
"""
def delete(id) do
delete id, Stripe.config_or_env_key
end
@doc """
Delete an account. Accepts Stripe API key.
Deletes an account given the account ID.
Returns a `{:ok, account}` tuple.
## Examples
{:ok, deleted_account} = Stripe.Accounts.delete("account_id", "my_key")
"""
def delete(id, key) do
Stripe.make_request_with_key(:delete, "#{@endpoint}/#{id}", key)
|> Stripe.Util.handle_stripe_response
end
@doc """
Delete all accounts.
Deletes all accounts.
Returns `:ok` atom.
## Examples
Stripe.Accounts.delete_all()
"""
def delete_all do
delete_all Stripe.config_or_env_key
end
@doc """
Delete all accounts. Accepts Stripe API key.
Deletes all accounts.
Returns `:ok` atom.
## Examples
Stripe.Accounts.delete_all("my_key")
"""
def delete_all key do
case all() do
{:ok, accounts} ->
Enum.each accounts, fn c -> delete(c["id"], key) end
{:error, err} -> raise err
end
end
@doc """
Get total number of accounts.
Gets total number of accounts.
Returns `{:ok, count}` tuple.
## Examples
{:ok, count} = Stripe.Accounts.count()
"""
def count do
Stripe.Util.count "#{@endpoint}"
end
@doc """
Get total number of accounts. Accepts Stripe API key.
Gets total number of accounts.
Returns `{:ok, count}` tuple.
## Examples
{:ok, count} = Stripe.Accounts.count("my_key")
"""
def count key do
Stripe.Util.count "#{@endpoint}", key
end
@doc """
Get a list of accounts.
Gets a list of accounts.
Accepts the following parameters:
* `limit` - a limit of items to be returned (optional; defaults to 10).
Returns a `{:ok, accounts}` tuple, where `accounts` is a list of accounts.
## Examples
{:ok, accounts} = Stripe.Accounts.list() # Get a list of 10 accounts
{:ok, accounts} = Stripe.Accounts.list(20) # Get a list of 20 accounts
"""
def list(limit \\ 10) do
list limit, Stripe.config_or_env_key
end
@doc """
Get a list of accounts. Accepts Stripe API key.
Gets a list of accounts.
Accepts the following parameters:
* `limit` - a limit of items to be returned (optional; defaults to 10).
Returns a `{:ok, accounts}` tuple, where `accounts` is a list of accounts.
## Examples
{:ok, accounts} = Stripe.Accounts.list("my_key") # Get a list of 10 accounts
{:ok, accounts} = Stripe.Accounts.list(20, "my_key") # Get a list of 20 accounts
"""
def list(limit, key) do
Stripe.make_request_with_key(:get, "#{@endpoint}?limit=#{limit}", key)
|> Stripe.Util.handle_stripe_response
end
end
|
lib/stripe/accounts.ex
| 0.889697
| 0.442335
|
accounts.ex
|
starcoder
|
defmodule Solana.SPL.TokenSwap do
@moduledoc """
Functions for interacting with Solana's [Token Swap
Program](https://spl.solana.com/token-swap).
"""
alias Solana.{Instruction, Account, SystemProgram}
import Solana.Helpers
@curves [:product, :price, :stable, :offset]
@doc """
The Token Swap Program's ID.
"""
@spec id() :: binary
def id(), do: Solana.pubkey!("SwaPpA9LAaLfeLi3a68M4DjnLqgtticKg6CnyNwgAC8")
@doc """
The size of a serialized token swap account.
"""
@spec byte_size() :: pos_integer
def byte_size(), do: 324
@doc """
Translates the result of a `Solana.RPC.Request.get_account_info/2` into
token swap account information.
"""
@spec from_account_info(info :: map) :: map | :error
def from_account_info(info)
def from_account_info(%{"data" => [data, "base64"]}) do
case Base.decode64(data) do
{:ok, decoded} when byte_size(decoded) == 324 ->
[<<vsn>>, <<init>>, <<seed>>, keys, fees, <<type>>, <<params::integer-size(256)-little>>] =
chunk(decoded, [1, 1, 1, 7 * 32, 8 * 8, 1, 32])
[_, token_a, token_b, pool_mint, mint_a, mint_b, fee_account] = chunk(keys, 32)
[trade_fee, owner_trade_fee, owner_withdraw_fee, host_fee] =
fees
|> chunk(8)
|> Enum.map(fn <<n::integer-size(64)-little>> -> n end)
|> Enum.chunk_every(2)
|> Enum.map(&List.to_tuple/1)
%{
token_a: token_a,
token_b: token_b,
trade_fee: trade_fee,
owner_trade_fee: owner_trade_fee,
owner_withdraw_fee: owner_withdraw_fee,
host_fee: host_fee,
pool_mint: pool_mint,
mint_a: mint_a,
mint_b: mint_b,
fee_account: fee_account,
version: vsn,
initialized?: init == 1,
bump_seed: seed,
curve: {Enum.at(@curves, type), params}
}
_other ->
:error
end
end
def from_account_info(_), do: :error
@doc false
def validate_fee(f = {n, d})
when is_integer(n) and n > 0 and is_integer(d) and d > 0 do
{:ok, f}
end
def validate_fee({_n, 0}), do: {:error, "fee denominator cannot be 0"}
def validate_fee(f), do: {:error, "expected a fee, got: #{inspect(f)}"}
@doc false
def validate_curve({type, params}) when type in @curves do
{:ok, {type, params}}
end
def validate_curve(type) when type in @curves, do: {:ok, {type, 0}}
def validate_curve(c) do
{:error, "expected a curve in #{inspect(@curves)}, got: #{inspect(c)}"}
end
@init_schema [
payer: [
type: {:custom, Solana.Key, :check, []},
required: true,
doc: "The account that will pay for the token swap account creation."
],
balance: [
type: :non_neg_integer,
required: true,
doc: "The lamport balance the token swap account should have."
],
authority: [
type: {:custom, Solana.Key, :check, []},
required: true,
doc: "The token swap account's swap authority"
],
new: [
type: {:custom, Solana.Key, :check, []},
required: true,
doc: "The public key of the newly-created token swap account."
],
token_a: [
type: {:custom, Solana.Key, :check, []},
required: true,
doc: "The `A` token account in token swaps. Must be owned by `authority`."
],
token_b: [
type: {:custom, Solana.Key, :check, []},
required: true,
doc: "The `B` token account in token swaps. Must be owned by `authority`."
],
pool: [
type: {:custom, Solana.Key, :check, []},
required: true,
doc: "The token account which holds outside liquidity and enables A/B trades."
],
pool_mint: [
type: {:custom, Solana.Key, :check, []},
required: true,
doc: "The mint of the `pool`."
],
fee_account: [
type: {:custom, Solana.Key, :check, []},
required: true,
doc: "The token account which receives all trading and withdrawal fees."
],
trade_fee: [
type: {:custom, __MODULE__, :validate_fee, []},
default: {0, 1},
doc: """
The `new` swap account's trading fee. Trade fees are extra token amounts
that are held inside the token accounts during a trade, making the value
of liquidity tokens rise.
"""
],
owner_trade_fee: [
type: {:custom, __MODULE__, :validate_fee, []},
default: {0, 1},
doc: """
The `new` swap account's owner trading fee. Owner trading fees are extra
token amounts that are held inside the token accounts during a trade, with
the equivalent in pool tokens minted to the owner of the program.
"""
],
owner_withdraw_fee: [
type: {:custom, __MODULE__, :validate_fee, []},
default: {0, 1},
doc: """
The `new` swap account's owner withdraw fee. Owner withdraw fees are extra
liquidity pool token amounts that are sent to the owner on every
withdrawal.
"""
],
host_fee: [
type: {:custom, __MODULE__, :validate_fee, []},
default: {0, 1},
doc: """
The `new` swap account's host fee. Host fees are a proportion of the
owner trading fees, sent to an extra account provided during the trade.
"""
],
curve: [
type: {:custom, __MODULE__, :validate_curve, []},
required: true,
doc: """
The automated market maker (AMM) curve to use for the `new` token swap account.
Should take the form `{type, params}`. See [the
docs](https://spl.solana.com/token-swap#curves) on which curves are available.
"""
]
]
@doc """
Creates the instructions to initialize a new token swap account.
## Options
#{NimbleOptions.docs(@init_schema)}
"""
def init(opts) do
case validate(opts, @init_schema) do
{:ok, params} ->
[
SystemProgram.create_account(
lamports: params.balance,
space: byte_size(),
from: params.payer,
new: params.new,
program_id: id()
),
initialize_ix(params)
]
error ->
error
end
end
defp initialize_ix(params) do
%Instruction{
program: id(),
accounts: [
%Account{key: params.new, writable?: true},
%Account{key: params.authority},
%Account{key: params.token_a},
%Account{key: params.token_b},
%Account{key: params.pool_mint, writable?: true},
%Account{key: params.fee_account},
%Account{key: params.pool, writable?: true},
%Account{key: Solana.SPL.Token.id()}
],
data: Instruction.encode_data(initialize_data(params))
}
end
defp initialize_data(params = %{curve: {type, parameters}}) do
[
0,
encode_fee(params.trade_fee),
encode_fee(params.owner_trade_fee),
encode_fee(params.owner_withdraw_fee),
encode_fee(params.host_fee),
Enum.find_index(@curves, &(&1 == type)),
{parameters, 32 * 8}
]
|> List.flatten()
end
defp encode_fee({n, d}), do: [{n, 64}, {d, 64}]
@swap_schema [
swap: [
type: {:custom, Solana.Key, :check, []},
required: true,
doc: "The token swap to use."
],
authority: [
type: {:custom, Solana.Key, :check, []},
required: true,
doc: "the `swap` account's swap authority."
],
user_source: [
type: {:custom, Solana.Key, :check, []},
required: true,
doc: "User's source token account. Must have the same mint as `swap_source`."
],
swap_source: [
type: {:custom, Solana.Key, :check, []},
required: true,
doc: "`swap` source token account. Must have the same mint as `user_source`."
],
user_destination: [
type: {:custom, Solana.Key, :check, []},
required: true,
doc: "User's destination token account. Must have the same mint as `swap_destination`."
],
swap_destination: [
type: {:custom, Solana.Key, :check, []},
required: true,
doc: "`swap` destination token account. Must have the same mint as `user_destination`."
],
pool_mint: [
type: {:custom, Solana.Key, :check, []},
required: true,
doc: "The `swap` pool token's mint."
],
fee_account: [
type: {:custom, Solana.Key, :check, []},
required: true,
doc: "The token account which receives all trading and withdrawal fees."
],
host_fee_account: [
type: {:custom, Solana.Key, :check, []},
doc: "Host account to gather fees."
],
user_authority: [
type: {:custom, Solana.Key, :check, []},
required: true,
doc: "Account delegated to transfer the user's tokens."
],
amount: [
type: :pos_integer,
required: true,
doc: "Amount to transfer from the source account."
],
minimum_return: [
type: :pos_integer,
required: true,
doc: "Minimum number of tokens the user will receive."
]
]
@doc """
Creates the instructions to swap token `A` for token `B` or vice versa.
## Options
#{NimbleOptions.docs(@swap_schema)}
"""
def swap(opts) do
case validate(opts, @swap_schema) do
{:ok, params} ->
%Instruction{
program: id(),
accounts:
List.flatten([
%Account{key: params.swap},
%Account{key: params.authority},
%Account{key: params.user_authority, signer?: true},
%Account{key: params.user_source, writable?: true},
%Account{key: params.swap_source, writable?: true},
%Account{key: params.swap_destination, writable?: true},
%Account{key: params.user_destination, writable?: true},
%Account{key: params.pool_mint, writable?: true},
%Account{key: params.fee_account, writable?: true},
%Account{key: Solana.SPL.Token.id()},
host_fee_account(params)
]),
data: Instruction.encode_data([1, {params.amount, 64}, {params.minimum_return, 64}])
}
error ->
error
end
end
defp host_fee_account(%{host_fee_account: key}) do
[%Account{key: key, writable?: true}]
end
defp host_fee_account(_), do: []
@deposit_all_schema [
swap: [
type: {:custom, Solana.Key, :check, []},
required: true,
doc: "The token swap to use."
],
authority: [
type: {:custom, Solana.Key, :check, []},
required: true,
doc: "the `swap` account's swap authority."
],
user_a: [
type: {:custom, Solana.Key, :check, []},
required: true,
doc: "The user's account for token `A`."
],
user_b: [
type: {:custom, Solana.Key, :check, []},
required: true,
doc: "The user's account for token `B`."
],
swap_a: [
type: {:custom, Solana.Key, :check, []},
required: true,
doc: "The `swap` account for token `A`."
],
swap_b: [
type: {:custom, Solana.Key, :check, []},
required: true,
doc: "The `swap` account for token `B`."
],
user_pool: [
type: {:custom, Solana.Key, :check, []},
required: true,
doc: "The user's account for the pool token. Pool tokens will be deposited here."
],
pool_mint: [
type: {:custom, Solana.Key, :check, []},
required: true,
doc: "The `swap` pool token's mint."
],
user_authority: [
type: {:custom, Solana.Key, :check, []},
required: true,
doc: "Account delegated to transfer the user's tokens."
],
amount_a: [
type: :pos_integer,
required: true,
doc: "Maximum amount of token `A` to deposit."
],
amount_b: [
type: :pos_integer,
required: true,
doc: "Maximum amount of token `B` to deposit."
],
amount_pool: [
type: :pos_integer,
required: true,
doc: "Amount of pool tokens to mint."
]
]
@doc """
Creates the instructions to deposit both `A` and `B` tokens into the pool.
## Options
#{NimbleOptions.docs(@deposit_all_schema)}
"""
def deposit_all(opts) do
case validate(opts, @deposit_all_schema) do
{:ok, params} ->
%Instruction{
program: id(),
accounts:
List.flatten([
%Account{key: params.swap},
%Account{key: params.authority},
%Account{key: params.user_authority, signer?: true},
%Account{key: params.user_a, writable?: true},
%Account{key: params.user_b, writable?: true},
%Account{key: params.swap_a, writable?: true},
%Account{key: params.swap_b, writable?: true},
%Account{key: params.pool_mint, writable?: true},
%Account{key: params.user_pool, writable?: true},
%Account{key: Solana.SPL.Token.id()}
]),
data:
Instruction.encode_data([
2,
{params.amount_pool, 64},
{params.amount_a, 64},
{params.amount_b, 64}
])
}
error ->
error
end
end
@withdraw_all_schema [
swap: [
type: {:custom, Solana.Key, :check, []},
required: true,
doc: "The token swap to use."
],
authority: [
type: {:custom, Solana.Key, :check, []},
required: true,
doc: "the `swap` account's swap authority."
],
user_a: [
type: {:custom, Solana.Key, :check, []},
required: true,
doc: "The user's account for token `A`."
],
user_b: [
type: {:custom, Solana.Key, :check, []},
required: true,
doc: "The user's account for token `B`."
],
swap_a: [
type: {:custom, Solana.Key, :check, []},
required: true,
doc: "The `swap` account for token `A`."
],
swap_b: [
type: {:custom, Solana.Key, :check, []},
required: true,
doc: "The `swap` account for token `B`."
],
user_pool: [
type: {:custom, Solana.Key, :check, []},
required: true,
doc: "The user's account for the pool token. Pool tokens with be withdrawn from here."
],
pool_mint: [
type: {:custom, Solana.Key, :check, []},
required: true,
doc: "The `swap` pool token's mint."
],
user_authority: [
type: {:custom, Solana.Key, :check, []},
required: true,
doc: "Account delegated to transfer the user's tokens."
],
fee_account: [
type: {:custom, Solana.Key, :check, []},
required: true,
doc: "The token account which receives all trading and withdrawal fees."
],
amount_a: [
type: :pos_integer,
required: true,
doc: "Minimum amount of token `A` to withdraw."
],
amount_b: [
type: :pos_integer,
required: true,
doc: "Minimum amount of token `B` to withdraw."
],
amount_pool: [
type: :pos_integer,
required: true,
doc: "Amount of pool tokens to burn."
]
]
@doc """
Creates the instructions to withdraw both `A` and `B` tokens from the pool.
## Options
#{NimbleOptions.docs(@withdraw_all_schema)}
"""
def withdraw_all(opts) do
case validate(opts, @withdraw_all_schema) do
{:ok, params} ->
%Instruction{
program: id(),
accounts:
List.flatten([
%Account{key: params.swap},
%Account{key: params.authority},
%Account{key: params.user_authority, signer?: true},
%Account{key: params.pool_mint, writable?: true},
%Account{key: params.user_pool, writable?: true},
%Account{key: params.swap_a, writable?: true},
%Account{key: params.swap_b, writable?: true},
%Account{key: params.user_a, writable?: true},
%Account{key: params.user_b, writable?: true},
%Account{key: params.fee_account, writable?: true},
%Account{key: Solana.SPL.Token.id()}
]),
data:
Instruction.encode_data([
3,
{params.amount_pool, 64},
{params.amount_a, 64},
{params.amount_b, 64}
])
}
error ->
error
end
end
@deposit_schema [
swap: [
type: {:custom, Solana.Key, :check, []},
required: true,
doc: "The token swap to use."
],
authority: [
type: {:custom, Solana.Key, :check, []},
required: true,
doc: "the `swap` account's swap authority."
],
user_token: [
type: {:custom, Solana.Key, :check, []},
required: true,
doc: "The user's account for token `A` or `B`."
],
swap_a: [
type: {:custom, Solana.Key, :check, []},
required: true,
doc: "The `swap` account for token `A`."
],
swap_b: [
type: {:custom, Solana.Key, :check, []},
required: true,
doc: "The `swap` account for token `B`."
],
user_pool: [
type: {:custom, Solana.Key, :check, []},
required: true,
doc: "The user's account for the pool token. Pool tokens will be deposited here."
],
pool_mint: [
type: {:custom, Solana.Key, :check, []},
required: true,
doc: "The `swap` pool token's mint."
],
user_authority: [
type: {:custom, Solana.Key, :check, []},
required: true,
doc: "Account delegated to transfer the user's tokens."
],
amount: [
type: :pos_integer,
required: true,
doc: "Amount of token `A` or `B` to deposit."
],
amount_pool: [
type: :pos_integer,
required: true,
doc: "Minimum amount of pool tokens to mint."
]
]
@doc """
Creates the instructions to deposit `A` or `B` tokens into the pool.
## Options
#{NimbleOptions.docs(@deposit_schema)}
"""
def deposit(opts) do
case validate(opts, @deposit_schema) do
{:ok, params} ->
%Instruction{
program: id(),
accounts:
List.flatten([
%Account{key: params.swap},
%Account{key: params.authority},
%Account{key: params.user_authority, signer?: true},
%Account{key: params.user_token, writable?: true},
%Account{key: params.swap_a, writable?: true},
%Account{key: params.swap_b, writable?: true},
%Account{key: params.pool_mint, writable?: true},
%Account{key: params.user_pool, writable?: true},
%Account{key: Solana.SPL.Token.id()}
]),
data: Instruction.encode_data([4, {params.amount, 64}, {params.amount_pool, 64}])
}
error ->
error
end
end
@withdraw_schema [
swap: [
type: {:custom, Solana.Key, :check, []},
required: true,
doc: "The token swap to use."
],
authority: [
type: {:custom, Solana.Key, :check, []},
required: true,
doc: "the `swap` account's swap authority."
],
user_token: [
type: {:custom, Solana.Key, :check, []},
required: true,
doc: "The user's account for token `A` or `B`."
],
swap_a: [
type: {:custom, Solana.Key, :check, []},
required: true,
doc: "The `swap` account for token `A`."
],
swap_b: [
type: {:custom, Solana.Key, :check, []},
required: true,
doc: "The `swap` account for token `B`."
],
user_pool: [
type: {:custom, Solana.Key, :check, []},
required: true,
doc: "The user's account for the pool token. Pool tokens with be withdrawn from here."
],
pool_mint: [
type: {:custom, Solana.Key, :check, []},
required: true,
doc: "The `swap` pool token's mint."
],
user_authority: [
type: {:custom, Solana.Key, :check, []},
required: true,
doc: "Account delegated to transfer the user's tokens."
],
fee_account: [
type: {:custom, Solana.Key, :check, []},
required: true,
doc: "The token account which receives all trading and withdrawal fees."
],
amount: [
type: :pos_integer,
required: true,
doc: "Amount of token `A` or `B` to withdraw."
],
amount_pool: [
type: :pos_integer,
required: true,
doc: "Maximum amount of pool tokens to burn."
]
]
@doc """
Creates the instructions to withdraw `A` or `B` tokens from the pool.
## Options
#{NimbleOptions.docs(@withdraw_schema)}
"""
def withdraw(opts) do
case validate(opts, @withdraw_schema) do
{:ok, params} ->
%Instruction{
program: id(),
accounts:
List.flatten([
%Account{key: params.swap},
%Account{key: params.authority},
%Account{key: params.user_authority, signer?: true},
%Account{key: params.pool_mint, writable?: true},
%Account{key: params.user_pool, writable?: true},
%Account{key: params.swap_a, writable?: true},
%Account{key: params.swap_b, writable?: true},
%Account{key: params.user_token, writable?: true},
%Account{key: params.fee_account, writable?: true},
%Account{key: Solana.SPL.Token.id()}
]),
data: Instruction.encode_data([5, {params.amount, 64}, {params.amount_pool, 64}])
}
error ->
error
end
end
end
|
lib/solana/spl/token_swap.ex
| 0.892729
| 0.460895
|
token_swap.ex
|
starcoder
|
defmodule Spandex do
@moduledoc """
Provides the entry point for the application, in addition to a standardized
interface. The functions here call the corresponding functions on the
configured adapter.
"""
require Logger
import Spandex.Adapters.Helpers
defmacro span(name, do: body) do
quote do
if Spandex.disabled?() do
_ = unquote(name)
unquote(body)
else
name = unquote(name)
_ = Spandex.start_span(name)
span_id = Spandex.current_span_id()
_ = Logger.metadata([span_id: span_id])
try do
return_value = unquote(body)
_ = Spandex.finish_span()
return_value
rescue
exception ->
stacktrace = System.stacktrace()
_ = Spandex.span_error(exception)
reraise exception, stacktrace
end
end
end
end
def disabled?() do
truthy?(Confex.get_env(:spandex, :disabled?)) or not(truthy?(Confex.get_env(:spandex, :adapter)))
end
defp truthy?(value) when value in [false, nil], do: false
defp truthy?(_other), do: true
delegate_to_adapter(:start_trace, [name])
def start_trace(name, attributes) do
case start_trace(name) do
{:ok, trace_id} ->
Logger.metadata([trace_id: trace_id])
Spandex.update_span(attributes)
{:error, error} -> {:error, error}
end
end
delegate_to_adapter(:start_span, [name])
def start_span(name, attributes) do
case start_span(name) do
{:ok, span_id} ->
Logger.metadata([span_id: span_id])
Spandex.update_span(attributes)
{:error, error} -> {:error, error}
end
end
delegate_to_adapter(:update_span, [context])
delegate_to_adapter(:update_top_span, [context])
delegate_to_adapter(:finish_trace, [])
delegate_to_adapter(:finish_span, [])
delegate_to_adapter(:span_error, [error])
delegate_to_adapter(:continue_trace, [name, trace_id, span_id])
delegate_to_adapter(:current_trace_id, [])
delegate_to_adapter(:current_span_id, [])
end
|
lib/spandex.ex
| 0.581422
| 0.452354
|
spandex.ex
|
starcoder
|
defmodule Comb.Naive do
@moduledoc false
# This module implements naive implementations of each function in `Comb`. They
# serve as reference implementations for testing and benchmarking. These
# methods may return an `Enum` instead of a `Stream`.
def cartesian_product(a, b), do: (for x <-a, y <-b, do: [x, y])
def combinations(_, 0), do: [[]]
def combinations([], _), do: []
def combinations([h|t], k) do
((for l <- combinations(t, k - 1), do: [h|l]) ++ combinations(t, k))
|> Enum.uniq
end
def combinations(enum, k), do: combinations(Enum.to_list(enum), k)
def count_combinations(enum, k), do: combinations(enum, k) |> Enum.count
def count_permutations(enum), do: permutations(enum) |> Enum.count
def count_subsets(enum), do: subsets(enum) |> Enum.count
def drop_permutations(enum, n), do: permutations(enum) |> Enum.drop(n)
def nth_combination(enum, k, n), do: combinations(enum, k) |> Enum.fetch!(n)
def nth_permutation(enum, n), do: permutations(enum) |> Enum.fetch!(n)
def partitions(enum) do
list = Enum.to_list enum
n = Enum.count list
Enum.flat_map(n..1, &(do_partition_for_size(list, &1)))
|> Enum.uniq
end
defp do_partitions([]), do: [[]]
defp do_partitions(list) do
n = Enum.count list
Enum.flat_map(n..1, &(do_partition_for_size(list, &1)))
end
# feilen ligger i at noe med flat/ikke flat map skal være annerledes i første
# iterasjone
defp do_partition_for_size(list, size) do
list
|> combinations(size)
|> Enum.flat_map(fn comb ->
do_partitions(list -- comb)
|> Enum.map(&(Enum.sort([comb] ++ &1)))
end)
end
def permutation_index(enum) do
list = Enum.to_list enum
list
|> Enum.sort
|> permutations
|> Enum.find_index(fn p -> p == list end)
end
def permutations([]), do: [[]]
def permutations(list) when is_list(list) do
(for h <- list, t <- permutations(list -- [h]), do: t ++ [h])
|> Enum.uniq
end
def permutations(enum), do: permutations(Enum.to_list(enum))
def selections(_, 0), do: [[]]
def selections(enum, n) do
list = Enum.to_list enum
list
|> Enum.flat_map(fn el -> Enum.map(selections(list, n - 1), &([el | &1])) end)
end
def subsets(enum) do
n = Enum.count enum
0..n
|> Enum.flat_map(&(do_subsets_for_n(enum, &1)))
end
defp do_subsets_for_n(enum, n) do
enum
|> combinations(n)
end
end
|
lib/comb/naive.ex
| 0.742795
| 0.661955
|
naive.ex
|
starcoder
|
defmodule MPEGAudioFrameParser do
@moduledoc """
This is the public API for MPEGAudioFrameParser application.
MPEGAudioFrameParser is implemented as a GenServer that, when fed consecutive
packets of binary data (for example, from a file or network source), will
parse individual MPEG audio frames from the incoming data.
No decoding is performed on the audio data. Instead, the resultant frames
are ready to be fed into a separate decoder, or retransmitted over the
network.
"""
@server MPEGAudioFrameParser.Server
@doc """
Start the MPEG audio parser server. This must be done before calling the other
API functions.
iex> {:ok, pid} = MPEGAudioFrameParser.start_link()
...> is_pid(pid)
true
"""
def start_link(name \\ @server) do
GenServer.start_link(@server, nil, name: name)
end
@doc """
Add raw binary data to the current stream.
Returns: A list of zero or more structs, each representing a complete MPEG
audio frame. Note that because frames may be split across multiple packets,
this list may be empty, or contain more than one frame on each call. Any
leftover bytes will be stored by the server, and prepended to subsequent
packets.
## Example
Using a faked 128kbps 44.1k stereo MP3 frame:
iex> packet = <<0b11111111111_11_01_0_1001_00_0_0_00_00_0_0_00::size(32), 1::size(3304)>>
...> {:ok, _pid} = MPEGAudioFrameParser.start_link()
...> MPEGAudioFrameParser.add_packet(packet)
...> MPEGAudioFrameParser.add_packet(packet)
...> |> length
1
"""
def add_packet(packet, name \\ @server) do
GenServer.call(name, {:add_packet, packet})
end
@doc """
Add raw binary data to the current stream.
Similar to `MPEGAudioFrameParser.add_packet/1`, but does not return the
frames. Instead, they can be retrieved at a later point, or by another
process.
See `MPEGAudioFrameParser.pop_frame/0`.
"""
def cast_packet(packet, name \\ @server) do
GenServer.cast(name, {:add_packet, packet})
end
@doc """
Pop a single completed frame.
Useful in combination with `MPEGAudioFrameParser.cast_packet/2`.
Returns a struct representing an individual MPEG audio frame, or `nil` if no
frame is available.
## Example
Using a faked 128kbps 44.1k stereo MP3 frame:
iex> packet = <<0b11111111111_11_01_0_1001_00_0_0_00_00_0_0_00::size(32), 1::size(3304)>>
...> {:ok, _pid} = MPEGAudioFrameParser.start_link()
...> MPEGAudioFrameParser.cast_packet(packet)
:ok
...> MPEGAudioFrameParser.cast_packet(packet)
:ok
...> frame = MPEGAudioFrameParser.pop_frame()
...> frame.__struct__
MPEGAudioFrameParser.Frame
"""
def pop_frame(name \\ @server) do
GenServer.call(name, :pop_frame)
end
@doc """
Reset the server's state, returning any available complete frames. Any
additional bytes that are not part of a completed frame are discarded.
Returns a list containing any available complete audio frames.
"""
def flush(name \\ @server) do
GenServer.call(name, :flush)
end
end
|
lib/mpeg_audio_frame_parser.ex
| 0.926984
| 0.457743
|
mpeg_audio_frame_parser.ex
|
starcoder
|
defmodule Day04 do
@moduledoc """
Advent of Code 2019
Day 4: Secure Container
"""
alias Day04.{Part1, Part2}
def get_range() do
Path.join(__DIR__, "inputs/day04.txt")
|> File.read!()
|> String.trim()
|> String.split("-")
|> Enum.map(&String.to_integer/1)
end
def execute() do
[lower, upper] = get_range()
IO.puts("Part 1: #{Part1.run(lower, upper)}")
IO.puts("Part 2: #{Part2.run(lower, upper)}")
end
end
defmodule Day04.Part1 do
def run(lower, upper) do
calculate_number_of_valid_passwords(lower, upper)
end
# Recursively iterate through all numbers between the lower and upper bounds.
# The lower bound is passed in as the `password` parameter, and is
# incremented until it equals the upper bound. For each password, check if
# it's valid, and increment a counter based on the result.
def calculate_number_of_valid_passwords(
password,
upper_bound,
adjacency_check \\ &adjacent?/2,
num_valid \\ 0
)
def calculate_number_of_valid_passwords(password, upper_bound, _adjacency_check, num_valid)
when password == upper_bound do
num_valid
end
def calculate_number_of_valid_passwords(password, upper_bound, adjacency_check, num_valid) do
calculate_number_of_valid_passwords(
password + 1,
upper_bound,
adjacency_check,
if(valid_password?(password, adjacency_check),
do: num_valid + 1,
else: num_valid
)
)
end
# Recursively process each digit of the password to check if it's valid.
# First, transform the password into a list of its digits. Then check each
# digit to see if it makes a password invalid. We don't check 6-digit-ness
# because the range already restricts passwords to 6-digits.
def valid_password?(
password,
adjacency_check,
index \\ 0,
adjacent_same \\ false
)
def valid_password?(password, adjacency_check, index, adjacent_same)
when is_number(password) do
valid_password?(
split_password(password),
adjacency_check,
index,
adjacent_same
)
end
def valid_password?(digits, _adjacency_check, index, adjacent_same)
when length(digits) == index do
adjacent_same
end
def valid_password?(digits, adjacency_check, 0, adjacent_same) do
valid_password?(digits, adjacency_check, 1, adjacent_same)
end
def valid_password?(digits, adjacency_check, index, adjacent_same) do
if Enum.at(digits, index) >= Enum.at(digits, index - 1),
do:
valid_password?(
digits,
adjacency_check,
index + 1,
adjacent_same or adjacency_check.(digits, index)
),
else: false
end
# Part 1: Check to see if the digit prior to the current digit and the
# current digit are equivalent.
def adjacent?(_digits, 0) do
false
end
def adjacent?(digits, index) do
Enum.at(digits, index) == Enum.at(digits, index - 1)
end
# Split a password into a list of its digits.
defp split_password(password) when password < 10 do
[password]
end
defp split_password(password) do
split_password(div(password, 10)) ++ [rem(password, 10)]
end
end
defmodule Day04.Part2 do
alias Day04.Part1
def run(lower, upper) do
Part1.calculate_number_of_valid_passwords(lower, upper, &exactly_2_adjacent?/2)
end
# Part 2: Check to see if the digit prior to the current digit and the
# current digit are equivalent, and ensure that they are not part of a larger
# group of matching digits.
def exactly_2_adjacent?(_digits, 0) do
false
end
def exactly_2_adjacent?(digits, index) do
cur_digit = Enum.at(digits, index)
cur_digit == Enum.at(digits, index - 1) and
cur_digit != Enum.at(digits, index + 1) and
cur_digit != Enum.at(digits, index - 2)
end
end
|
lib/day04.ex
| 0.793386
| 0.60996
|
day04.ex
|
starcoder
|
defmodule ExVCR.Adapter.Hackney.Converter do
@moduledoc """
Provides helpers to mock :hackney methods.
"""
use ExVCR.Converter
defp string_to_response(string) do
response = Enum.map(string, fn {x, y} -> {String.to_atom(x), y} end)
response = struct(ExVCR.Response, response)
response =
if is_map(response.headers) do
headers = response.headers |> Map.to_list()
%{response | headers: headers}
else
response
end
response
end
defp request_to_string(request) do
method = Enum.fetch!(request, 0) |> to_string()
url = Enum.fetch!(request, 1) |> parse_url()
headers = Enum.at(request, 2, []) |> parse_headers()
body = Enum.at(request, 3, "") |> parse_request_body()
options = Enum.at(request, 4, []) |> sanitize_options() |> parse_options()
%ExVCR.Request{
url: url,
headers: headers,
method: method,
body: body,
options: options
}
end
# Sanitize options so that they can be encoded as json.
defp sanitize_options(options) do
Enum.map(options, &do_sanitize/1)
end
defp do_sanitize({key, value}) when is_function(key) do
{inspect(key), value}
end
defp do_sanitize({key, value}) when is_list(value) do
{key, Enum.map(value, &do_sanitize/1)}
end
defp do_sanitize({key, value}) when is_tuple(value) do
{key, Tuple.to_list(do_sanitize(value))}
end
defp do_sanitize({key, value}) when is_function(value) do
{key, inspect(value)}
end
defp do_sanitize({key, value}) do
{key, value}
end
defp do_sanitize(key) when is_atom(key) do
{key, true}
end
defp do_sanitize(value) do
value
end
defp response_to_string({:ok, status_code, headers, body_or_client}) do
body =
case body_or_client do
string when is_binary(string) -> string
# Client is already replaced by body through ExVCR.Adapter.Hackney adapter.
ref when is_reference(ref) -> inspect(ref)
end
%ExVCR.Response{
type: "ok",
status_code: status_code,
headers: parse_headers(headers),
body: body
}
end
defp response_to_string({:ok, status_code, headers}) do
%ExVCR.Response{
type: "ok",
status_code: status_code,
headers: parse_headers(headers)
}
end
defp response_to_string({:error, reason}) do
%ExVCR.Response{
type: "error",
body: Atom.to_string(reason)
}
end
def parse_request_body({:form, body}) do
:hackney_request.encode_form(body)
|> elem(2)
|> to_string
|> ExVCR.Filter.filter_sensitive_data()
end
def parse_request_body(body), do: super(body)
end
|
lib/exvcr/adapter/hackney/converter.ex
| 0.68763
| 0.452354
|
converter.ex
|
starcoder
|
defmodule StepFlow.Step do
@moduledoc """
The Step context.
"""
require Logger
alias StepFlow.Artifacts
alias StepFlow.Jobs
alias StepFlow.Repo
alias StepFlow.Step.Helpers
alias StepFlow.Step.Launch
alias StepFlow.Workflows
alias StepFlow.Workflows.Workflow
def start_next(%Workflow{id: workflow_id} = workflow) do
workflow = Repo.preload(workflow, :jobs, force: true)
is_live = workflow.is_live
jobs = Repo.preload(workflow.jobs, [:status, :progressions])
steps =
StepFlow.Map.get_by_key_or_atom(workflow, :steps)
|> Workflows.get_step_status(jobs)
{is_completed_workflow, steps_to_start} = get_steps_to_start(steps, is_live)
steps_to_start =
case {steps_to_start, jobs} do
{[], []} ->
case List.first(steps) do
nil ->
Logger.warn("#{__MODULE__}: empty workflow #{workflow_id} is completed")
{:completed_workflow, []}
step ->
{:ok, [step]}
end
{[], _} ->
{:completed_workflow, []}
{list, _} ->
{:ok, list}
end
results = start_steps(steps_to_start, workflow)
get_final_status(workflow, is_completed_workflow, Enum.uniq(results) |> Enum.sort())
end
def skip_step(workflow, step) do
step_id = StepFlow.Map.get_by_key_or_atom(step, :id)
step_name = StepFlow.Map.get_by_key_or_atom(step, :name)
Repo.preload(workflow, :jobs, force: true)
|> Jobs.create_skipped_job(step_id, step_name)
end
def skip_step_jobs(workflow, step) do
step_id = StepFlow.Map.get_by_key_or_atom(step, :id)
step_name = StepFlow.Map.get_by_key_or_atom(step, :name)
Repo.preload(workflow, :jobs, force: true)
|> Jobs.skip_jobs(step_id, step_name)
end
defp set_artifacts(workflow) do
resources = %{}
params = %{
resources: resources,
workflow_id: workflow.id
}
Artifacts.create_artifact(params)
end
defp get_steps_to_start(steps, is_live), do: iter_get_steps_to_start(steps, steps, is_live)
defp iter_get_steps_to_start(steps, all_steps, is_live, completed \\ true, result \\ [])
defp iter_get_steps_to_start([], _all_steps, _is_live, completed, result),
do: {completed, result}
defp iter_get_steps_to_start([step | steps], all_steps, true, completed, result) do
result = List.insert_at(result, -1, step)
iter_get_steps_to_start(steps, all_steps, true, completed, result)
end
defp iter_get_steps_to_start([step | steps], all_steps, false, completed, result) do
completed =
if step.status in [:completed, :skipped] do
completed
else
false
end
result =
if step.status == :queued do
case StepFlow.Map.get_by_key_or_atom(step, :required_to_start) do
nil ->
List.insert_at(result, -1, step)
required_to_start ->
count_not_completed =
Enum.filter(all_steps, fn s ->
StepFlow.Map.get_by_key_or_atom(s, :id) in required_to_start
end)
|> Enum.map(fn s -> StepFlow.Map.get_by_key_or_atom(s, :status) end)
|> Enum.filter(fn s -> s != :completed and s != :skipped end)
|> length
if count_not_completed == 0 do
List.insert_at(result, -1, step)
else
result
end
end
else
result
end
iter_get_steps_to_start(steps, all_steps, false, completed, result)
end
defp start_steps({:completed_workflow, _}, _workflow), do: [:completed_workflow]
defp start_steps({:ok, steps}, workflow) do
dates = Helpers.get_dates()
for step <- steps do
step_name = StepFlow.Map.get_by_key_or_atom(step, :name)
step_id = StepFlow.Map.get_by_key_or_atom(step, :id)
source_paths = Launch.get_source_paths(workflow, step, dates)
Logger.warn(
"#{__MODULE__}: start to process step #{step_name} (index #{step_id}) for workflow #{
workflow.id
}"
)
{result, status} =
StepFlow.Map.get_by_key_or_atom(step, :condition)
|> case do
condition when condition in [0, nil] ->
Launch.launch_step(workflow, step)
condition ->
Helpers.template_process(
"<%= " <> condition <> "%>",
workflow,
step,
dates,
source_paths
)
|> case do
"true" ->
Launch.launch_step(workflow, step)
"false" ->
skip_step(workflow, step)
{:ok, "skipped"}
_ ->
Logger.error(
"#{__MODULE__}: cannot estimate condition for step #{step_name} (index #{
step_id
}) for workflow #{workflow.id}"
)
{:error, "bad step condition"}
end
end
Logger.info("#{step_name}: #{inspect({result, status})}")
topic = "update_workflow_" <> Integer.to_string(workflow.id)
StepFlow.Notification.send(topic, %{workflow_id: workflow.id})
status
end
end
defp get_final_status(_workflow, _is_completed_workflow, ["started"]), do: {:ok, "started"}
defp get_final_status(_workflow, _is_completed_workflow, ["created"]), do: {:ok, "started"}
defp get_final_status(_workflow, _is_completed_workflow, ["created", "started"]),
do: {:ok, "started"}
defp get_final_status(workflow, _is_completed_workflow, ["skipped"]), do: start_next(workflow)
defp get_final_status(workflow, _is_completed_workflow, ["completed"]), do: start_next(workflow)
defp get_final_status(workflow, true, [:completed_workflow]) do
set_artifacts(workflow)
Logger.warn("#{__MODULE__}: workflow #{workflow.id} is completed")
{:ok, "completed"}
end
defp get_final_status(_workflow, _is_completed_workflow, _states), do: {:ok, "still_processing"}
end
|
lib/step_flow/step/step.ex
| 0.621081
| 0.598752
|
step.ex
|
starcoder
|
defmodule XlsxStream.StyleSheet do
alias XmlStream, as: X
alias XlsxStream.Schema
def document(styles) do
[
X.declaration(),
X.element("styleSheet", %{xmlns: Schema.spreedsheetml()}, styles)
]
end
def num_fmts(entries \\ [], attrs \\ %{}) do
counted("numFmts", entries, attrs)
end
def num_fmt(attrs) do
X.empty_element("numFmt", attrs)
end
def fonts(entries \\ [], attrs \\ %{}) do
counted("fonts", entries, attrs)
end
def fills(entries \\ [], attrs \\ %{}) do
counted("fills", entries, attrs)
end
def borders(entries \\ [], attrs \\ %{}) do
counted("borders", entries, attrs)
end
def cell_style_xfs(entries \\ [], attrs \\ %{}) do
counted("cellStyleXfs", entries, attrs)
end
def cell_xfs(entries \\ [], attrs \\ %{}) do
counted("cellXfs", entries, attrs)
end
def cell_styles(entries \\ [], attrs \\ %{}) do
counted("cellStyles", entries, attrs)
end
def table_styles(entries \\ [], attrs \\ %{}) do
counted("tableStyles", entries, attrs)
end
def dxfs(entries \\ [], attrs \\ %{}) do
counted("dxfs", entries, attrs)
end
def color(attrs) do
X.empty_element("color", attrs)
end
def font(body, attrs \\ %{}) do
X.element("font", attrs, body)
end
def fill(body, attrs \\ %{}) do
X.element("fill", attrs, body)
end
def border(body, attrs \\ %{}) do
X.element("border", attrs, body)
end
def left(body, attrs \\ %{}) do
X.element("left", attrs, body)
end
def right(body, attrs \\ %{}) do
X.element("right", attrs, body)
end
def top(body, attrs \\ %{}) do
X.element("top", attrs, body)
end
def bottom(body, attrs \\ %{}) do
X.element("bottom", attrs, body)
end
def xf(body, attrs \\ %{}) do
X.element("xf", attrs, body)
end
def cell_style(attrs \\ %{}) do
X.empty_element("cellStyle", attrs)
end
def name(attrs) do
X.empty_element("name", attrs)
end
def sz(attrs) do
X.empty_element("sz", attrs)
end
def family(attrs) do
X.empty_element("family", attrs)
end
def b(attrs) do
X.empty_element("b", attrs)
end
def pattern_fill(attrs) do
X.empty_element("patternFill", attrs)
end
def alignment(attrs) do
X.empty_element("alignment", attrs)
end
def counted(name, body \\ [], attrs \\ %{}) do
X.element(name, Map.merge(%{count: to_string(length(body))}, attrs), body)
end
end
|
lib/xlsx_stream/style_sheet.ex
| 0.545528
| 0.504211
|
style_sheet.ex
|
starcoder
|
defmodule Contex.Dataset do
@moduledoc """
`Dataset` is a simple wrapper around a datasource for plotting charts.
Dataset marshalls a couple of different data structures into a consistent form for consumption
by the chart plotting functions. For example, it allows a list of lists or a list of tuples to be
treated the same.
The most sensible way to work with a dataset is to provide column headers - it makes code elsewhere
readable. If you don't want to, you can also refer to columns by index.
Dataset provides a few convenience functions for calculating data extents for a column, extracting unique
values from columns, calculating combined extents for multiple columns (handy when plotting bar charts)
and guessing column type (handy when determining whether to use a `Contex.TimeScale` or a `Contex.ContinuousLinearScale`).
The easiest pattern to create a dataset is:
iex> data = [
...> {0.0, 0.0, "Hippo"},
...> {0.5, 0.3, "Turtle"},
...> {0.4, 0.3, "Turtle"},
...> {0.2, 0.3, "Rabbit"}
...> ] # Wherever your data comes from (e.g. could be straight from Ecto)
...> dataset = Dataset.new(data, ["x", "y", "category"]) # Attach descriptive headers
...> Dataset.column_extents(dataset, "x") # Get extents for a named column
{0.0, 0.5}
iex> Dataset.column_name(dataset, 0) # Get name of column by index
"x"
iex> cat_col = Dataset.column_index(dataset, "category") # Get index of column by name
2
iex> Enum.map(dataset.data, fn row -> # Enumerate values in a column
...> Dataset.value(row, cat_col)
...> end)
["Hippo", "Turtle", "Turtle", "Rabbit"]
iex> Dataset.unique_values(dataset, "category") # Extract unique values for legends etc.
["Hippo", "Turtle", "Rabbit"]
While Dataset gives facilities to map between names and column indexes, you can only access data values via index.
This is so that you don't have expensive mappings in tight loops.
**Note** There are very few validation checks (for example, to checks that number of headers supplied matches)
the size of each array or tuple in the data. If there are any issues finding a value, nil is returned.
"""
alias __MODULE__
alias Contex.Utils
defstruct [:headers, :data, :title]
@type column_name() :: String.t() | integer()
@type column_type() :: :datetime | :number | :string | :unknown | nil
@type row() :: list() | tuple()
@type t() :: %__MODULE__{}
@doc """
Creates a new Dataset wrapper around some data.
Data is expected to be a list of tuples of the same size or list of lists of same size.
If no headers are specified, columns are access by index.
"""
@spec new(list(row())) :: Contex.Dataset.t()
def new(data) when is_list(data) do
%Dataset{headers: nil, data: data}
end
@doc """
Creates a new Dataset wrapper around some data with headers.
Data is expected to be a list of tuples of the same size or list of lists of same size.
"""
@spec new(list(row()), list(String.t())) :: Contex.Dataset.t()
def new(data, headers) when is_list(data) and is_list(headers) do
%Dataset{headers: headers, data: data}
end
@doc """
Optionally sets a title.
Not really used at the moment to be honest, but seemed like a good
idea at the time. Might come in handy when overlaying plots.
"""
@spec title(Contex.Dataset.t(), String.t()) :: Contex.Dataset.t()
def title(%Dataset{}=dataset, title) do
%{dataset | title: title}
end
@doc """
Looks up the index for a given column name. Returns nil if not found.
"""
@spec column_index(Contex.Dataset.t(), column_name()) :: nil | integer
def column_index(%Dataset{headers: headers}=_dataset, column_name) when is_list(headers) do
Enum.find_index(headers, fn col -> col == column_name end)
end
def column_index(_, column_name) when is_integer(column_name) do column_name end
def column_index(_, _), do: nil
@doc """
Looks up the column name for a given index.
If there are no headers, or the index is outside the range of the headers
the requested index is returned.
"""
@spec column_name(Contex.Dataset.t(), integer()) :: column_name()
def column_name(%Dataset{headers: headers}=_dataset, column_index)
when is_list(headers)
and is_integer(column_index)
and column_index < length(headers) # Maybe drop this guard and have it throw an exception
do
Enum.at(headers, column_index)
end
def column_name(_, column_index), do: column_index
@doc """
Looks up the value from a row based on the column index.
This simply provides a consistent wrapper regardless of whether the data is represented in a tuple
or a list.
"""
@spec value(row(), integer()) :: any
def value(row, column_index) when is_list(row) and is_integer(column_index), do: Enum.at(row, column_index, nil)
def value(row, column_index) when is_tuple(row) and is_integer(column_index) and column_index < tuple_size(row) do
elem(row, column_index)
end
def value(_, _), do: nil
@doc """
Calculates the min and max value in the specified column
"""
@spec column_extents(Contex.Dataset.t(), column_name()) :: {any, any}
def column_extents(%Dataset{data: data} = dataset, column_name) do
index = column_index(dataset, column_name)
Enum.reduce(data, {nil, nil},
fn row, {min, max} ->
val = value(row, index)
{Utils.safe_min(val, min), Utils.safe_max(val, max)}
end
)
end
@doc """
Tries to guess the data type for a column based on contained data.
Looks through the rows and returns the first match it can find.
"""
@spec guess_column_type(Contex.Dataset.t(), column_name()) :: column_type()
def guess_column_type(%Dataset{data: data} = dataset, column_name) do
index = column_index(dataset, column_name)
Enum.reduce_while(data, nil, fn row, _result ->
val = value(row, index)
case evaluate_type(val) do
{:ok, type} -> {:halt, type}
_ -> {:cont, nil}
end
end)
end
@doc false
@spec check_column_names(Contex.Dataset.t(), list(column_name()) | column_name()) ::{:ok, []} | {:error, list(column_name())}
def check_column_names(%Dataset{}= dataset, column_names) when is_list(column_names) do
missing_columns = MapSet.difference(MapSet.new(column_names), MapSet.new(dataset.headers))
if MapSet.size(missing_columns) > 0 do
{:error, MapSet.to_list(missing_columns)}
else
{:ok, []}
end
end
def check_column_names(%Dataset{}=dataset, column_names) do
check_column_names(dataset, [column_names])
end
defp evaluate_type(%DateTime{}), do: {:ok, :datetime}
defp evaluate_type(%NaiveDateTime{}), do: {:ok, :datetime}
defp evaluate_type(v) when is_number(v), do: {:ok, :number}
defp evaluate_type(v) when is_binary(v), do: {:ok, :string}
defp evaluate_type(_), do: {:unknown}
@doc """
Calculates the data extents for the sum of the columns supplied.
It is the equivalent of evaluating the extents of a calculated row where the calculating
is the sum of the values identified by column_names.
"""
@spec combined_column_extents(Contex.Dataset.t(), list(column_name())) :: {any(), any()}
def combined_column_extents(%Dataset{data: data} = dataset, column_names) do
indices = Enum.map(column_names, fn col -> column_index(dataset, col) end)
Enum.reduce(data, {nil, nil},
fn row, {min, max} ->
val = sum_row_values(row, indices)
{Utils.safe_min(val, min), Utils.safe_max(val, max)}
end
)
end
defp sum_row_values(row, indices) do
Enum.reduce(indices, 0, fn index, acc ->
val = value(row, index)
Utils.safe_add(acc, val)
end)
end
@doc """
Extracts a list of unique values in the given column.
Note that the unique values will maintain order of first detection
in the data.
"""
@spec unique_values(Contex.Dataset.t(), String.t() | integer()) :: [any]
def unique_values(%Dataset{data: data} = dataset, column_name) do
index = column_index(dataset, column_name)
{result, _found} = Enum.reduce(data, {[], MapSet.new},
fn row, {result, found} ->
val = value(row, index)
case MapSet.member?(found, val) do
true -> {result, found}
_ -> {[val | result], MapSet.put(found, val)}
end
end
)
# Maintain order they are found in
Enum.reverse(result)
end
end
|
lib/chart/dataset.ex
| 0.8505
| 0.91452
|
dataset.ex
|
starcoder
|
defmodule LineBot.Message.Template do
use LineBot.Message
@moduledoc """
Represents a [Template message](https://developers.line.biz/en/reference/messaging-api/#template-messages).
"""
@type t :: %__MODULE__{
altText: String.t(),
template:
LineBot.Message.Template.Buttons.t()
| LineBot.Message.Template.Confirm.t()
| LineBot.Message.Template.Carousel.t()
| LineBot.Message.Template.ImageCarousel.t(),
type: :template,
quickReply: LineBot.Message.QuickReply.t() | nil
}
@enforce_keys [:altText, :template]
defstruct [:altText, :template, :quickReply, type: :template]
end
defmodule LineBot.Message.Template.Buttons do
use LineBot.Message
@moduledoc """
Represents a [Buttons template](https://developers.line.biz/en/reference/messaging-api/#buttons).
"""
@type t :: %__MODULE__{
thumbnailImageUrl: String.t() | nil,
imageAspectRatio: :rectangle | :square | nil,
imageSize: :cover | :contain | nil,
imageBackgroundColor: String.t() | nil,
title: String.t() | nil,
text: String.t(),
defaultAction: LineBot.Message.Action.t() | nil,
actions: [LineBot.Message.Action.t()],
type: :buttons
}
@enforce_keys [:text, :actions]
defstruct [
:thumbnailImageUrl,
:imageAspectRatio,
:imageSize,
:imageBackgroundColor,
:title,
:text,
:defaultAction,
:actions,
type: :buttons
]
end
defmodule LineBot.Message.Template.Confirm do
@derive Jason.Encoder
@moduledoc """
Represents a [Confirm template](https://developers.line.biz/en/reference/messaging-api/#confirm).
"""
@type t :: %__MODULE__{
text: String.t(),
actions: [LineBot.Message.Action.t()],
type: :confirm
}
@enforce_keys [:text, :actions]
defstruct [:text, :actions, type: :confirm]
end
defmodule LineBot.Message.Template.Carousel do
use LineBot.Message
@moduledoc """
Represents a [Carousel template](https://developers.line.biz/en/reference/messaging-api/#carousel).
"""
@type t :: %__MODULE__{
columns: [LineBot.Message.Template.Carousel.Column.t()],
imageAspectRatio: :rectangle | :square | nil,
imageSize: :cover | :contain | nil,
type: :carousel
}
@enforce_keys [:columns]
defstruct [:columns, :imageAspectRatio, :imageSize, type: :carousel]
end
defmodule LineBot.Message.Template.Carousel.Column do
use LineBot.Message
@moduledoc """
Represents a [Column object for carousel](https://developers.line.biz/en/reference/messaging-api/#column-object-for-carousel).
"""
@type t :: %__MODULE__{
thumbnailImageUrl: String.t() | nil,
imageBackgroundColor: String.t() | nil,
title: String.t() | nil,
text: String.t(),
defaultAction: LineBot.Message.Action.t() | nil,
actions: [LineBot.Message.Action.t()]
}
@enforce_keys [:text]
defstruct [:thumbnailImageUrl, :imageBackgroundColor, :title, :text, :defaultAction, :actions]
end
defmodule LineBot.Message.Template.ImageCarousel do
@derive Jason.Encoder
@moduledoc """
Represents an [Image carousel template](https://developers.line.biz/en/reference/messaging-api/#image-carousel).
"""
@type t :: %__MODULE__{
columns: [LineBot.Message.Template.ImageCarousel.Column.t()],
type: :image_carousel
}
@enforce_keys [:columns]
defstruct [:columns, type: :image_carousel]
end
defmodule LineBot.Message.Template.ImageCarousel.Column do
@derive Jason.Encoder
@moduledoc """
Represents a [Column object for image carousel](https://developers.line.biz/en/reference/messaging-api/#column-object-for-image-carousel).
"""
@type t :: %__MODULE__{
imageUrl: String.t(),
action: LineBot.Message.Action.t()
}
@enforce_keys [:imageUrl, :action]
defstruct [:imageUrl, :action]
end
|
lib/line_bot/message/template.ex
| 0.83772
| 0.439507
|
template.ex
|
starcoder
|
defmodule Leaflet.MapData do
@moduledoc """
Represents leaflet map data.
"""
alias GoogleMaps.MapData, as: GoogleMapData
alias GoogleMaps.MapData.Marker, as: GoogleMapsMarker
alias GoogleMaps.MapData.Path, as: GoogleMapsPath
alias Leaflet.MapData.{
Marker,
Polyline
}
@type lat_lng :: %{latitude: float, longitude: float}
defstruct default_center: %{latitude: 42.360718, longitude: -71.05891},
height: 0,
markers: [],
stop_markers: [],
polylines: [],
tile_server_url: "",
width: 0,
zoom: nil,
id: ""
@type t :: %__MODULE__{
default_center: lat_lng,
height: integer,
markers: [Marker.t()],
stop_markers: [Marker.t()],
polylines: [Polyline.t()],
tile_server_url: String.t(),
width: integer,
zoom: integer | nil,
id: String.t()
}
@spec new({integer, integer}, integer | nil) :: t
def new({width, height}, zoom \\ nil) do
%__MODULE__{
width: width,
height: height,
zoom: zoom
}
end
@spec add_marker(t, Marker.t()) :: t
def add_marker(map_data, marker) do
%{map_data | markers: [marker | map_data.markers]}
end
@spec add_stop_marker(t, Marker.t()) :: t
def add_stop_marker(map_data, marker) do
%{map_data | stop_markers: [marker | map_data.stop_markers]}
end
@spec add_markers(t, [Marker.t()]) :: t
def add_markers(map_data, markers) do
Enum.reduce(markers, map_data, &add_marker(&2, &1))
end
@spec add_stop_markers(t, [Marker.t()]) :: t
def add_stop_markers(map_data, markers) do
Enum.reduce(markers, map_data, &add_stop_marker(&2, &1))
end
@spec add_polyline(t, Polyline.t()) :: t
def add_polyline(map_data, %Polyline{} = polyline) do
%{map_data | polylines: [polyline | map_data.polylines]}
end
@spec add_polylines(t, [Polyline.t()]) :: t
def add_polylines(map_data, polylines) do
Enum.reduce(polylines, map_data, &add_polyline(&2, &1))
end
def to_google_map_data(%{
default_center: default_center,
width: width,
height: height,
zoom: zoom,
markers: markers,
polylines: polylines
}) do
%GoogleMapData{
default_center: default_center,
width: width,
height: height,
zoom: zoom,
scale: 2,
markers:
Enum.map(markers, fn %{latitude: latitude, longitude: longitude} ->
%GoogleMapsMarker{
longitude: longitude,
latitude: latitude,
visible?: false
}
end),
paths:
Enum.map(polylines, fn %{color: color, weight: weight, dotted?: dotted?} ->
%GoogleMapsPath{
color: color,
weight: weight,
dotted?: dotted?
}
end)
}
end
end
|
apps/site/lib/leaflet/map_data.ex
| 0.852782
| 0.553143
|
map_data.ex
|
starcoder
|
defmodule Month.Range do
@moduledoc """
Represents a range of months.
iex> range = Month.Range.new(~M[2019-01], ~M[2019-03])
{:ok, #Month.Range<~M[2019-01], ~M[2019-03]>}
iex> range.months
[~M[2019-01], ~M[2019-02], ~M[2019-03]]
The `months` field contains all months within the range, inclusive.
"""
import Month.Utils
alias Month.Period
@type t :: %Month.Range{
start: Month.t(),
end: Month.t(),
months: list(Month.t())
}
@required_fields [
:start,
:end,
:months
]
@enforce_keys @required_fields
defstruct @required_fields
@doc """
Creates a new `Month.Range` using given `Month`s as a start and an end.
## Examples
iex> Month.Range.new(~M[2019-01], ~M[2019-03])
{:ok, #Month.Range<~M[2019-01], ~M[2019-03]>}
"""
@spec new(Date.t(), Date.t()) :: {:ok, Month.Range.t()} | {:error, String.t()}
@spec new(Month.t(), Month.t()) :: {:ok, Month.Range.t()} | {:error, String.t()}
def new(%Date{month: first_month, year: first_year}, %Date{month: last_month, year: last_year}) do
with {:ok, first} <- Month.new(first_year, first_month),
{:ok, last} <- Month.new(last_year, last_month) do
new(first, last)
end
end
def new(%Month{} = first, %Month{} = last) do
if Month.compare(first, last) == :lt do
result = %Month.Range{
start: first,
end: last,
months: Period.months(first, last)
}
{:ok, result}
else
{:error, "invalid_range"}
end
end
@doc """
Sames as `new/2` but returs either result or raises an exception.
"""
@spec new!(Date.t(), Date.t()) :: Month.Range.t()
@spec new!(Month.t(), Month.t()) :: Month.Range.t()
def new!(%Date{year: first_year, month: first_month}, %Date{year: last_year, month: last_month}) do
first = Month.new!(first_year, first_month)
last = Month.new!(last_year, last_month)
unwrap_or_raise(new(first, last))
end
def new!(%Month{} = first, %Month{} = last) do
unwrap_or_raise(new(first, last))
end
defimpl Inspect do
def inspect(month_range, _opts) do
"#Month.Range<#{inspect(month_range.start)}, #{inspect(month_range.end)}>"
end
end
end
|
lib/month/range.ex
| 0.855987
| 0.539287
|
range.ex
|
starcoder
|
defmodule AWS.ComputeOptimizer do
@moduledoc """
Compute Optimizer is a service that analyzes the configuration and utilization
metrics of your Amazon Web Services compute resources, such as Amazon EC2
instances, Amazon EC2 Auto Scaling groups, Lambda functions, and Amazon EBS
volumes.
It reports whether your resources are optimal, and generates optimization
recommendations to reduce the cost and improve the performance of your
workloads. Compute Optimizer also provides recent utilization metric data, in
addition to projected utilization metric data for the recommendations, which you
can use to evaluate which recommendation provides the best price-performance
trade-off. The analysis of your usage patterns can help you decide when to move
or resize your running resources, and still meet your performance and capacity
requirements. For more information about Compute Optimizer, including the
required permissions to use the service, see the [Compute Optimizer User Guide](https://docs.aws.amazon.com/compute-optimizer/latest/ug/).
"""
alias AWS.Client
alias AWS.Request
def metadata do
%AWS.ServiceMetadata{
abbreviation: nil,
api_version: "2019-11-01",
content_type: "application/x-amz-json-1.0",
credential_scope: nil,
endpoint_prefix: "compute-optimizer",
global?: false,
protocol: "json",
service_id: "Compute Optimizer",
signature_version: "v4",
signing_name: "compute-optimizer",
target_prefix: "ComputeOptimizerService"
}
end
@doc """
Deletes a recommendation preference, such as enhanced infrastructure metrics.
For more information, see [Activating enhanced infrastructure metrics](https://docs.aws.amazon.com/compute-optimizer/latest/ug/enhanced-infrastructure-metrics.html)
in the *Compute Optimizer User Guide*.
"""
def delete_recommendation_preferences(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteRecommendationPreferences", input, options)
end
@doc """
Describes recommendation export jobs created in the last seven days.
Use the `ExportAutoScalingGroupRecommendations` or
`ExportEC2InstanceRecommendations` actions to request an export of your
recommendations. Then use the `DescribeRecommendationExportJobs` action to view
your export jobs.
"""
def describe_recommendation_export_jobs(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeRecommendationExportJobs", input, options)
end
@doc """
Exports optimization recommendations for Auto Scaling groups.
Recommendations are exported in a comma-separated values (.csv) file, and its
metadata in a JavaScript Object Notation (JSON) (.json) file, to an existing
Amazon Simple Storage Service (Amazon S3) bucket that you specify. For more
information, see [Exporting Recommendations](https://docs.aws.amazon.com/compute-optimizer/latest/ug/exporting-recommendations.html)
in the *Compute Optimizer User Guide*.
You can have only one Auto Scaling group export job in progress per Amazon Web
Services Region.
"""
def export_auto_scaling_group_recommendations(%Client{} = client, input, options \\ []) do
Request.request_post(
client,
metadata(),
"ExportAutoScalingGroupRecommendations",
input,
options
)
end
@doc """
Exports optimization recommendations for Amazon EBS volumes.
Recommendations are exported in a comma-separated values (.csv) file, and its
metadata in a JavaScript Object Notation (JSON) (.json) file, to an existing
Amazon Simple Storage Service (Amazon S3) bucket that you specify. For more
information, see [Exporting Recommendations](https://docs.aws.amazon.com/compute-optimizer/latest/ug/exporting-recommendations.html)
in the *Compute Optimizer User Guide*.
You can have only one Amazon EBS volume export job in progress per Amazon Web
Services Region.
"""
def export_ebs_volume_recommendations(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ExportEBSVolumeRecommendations", input, options)
end
@doc """
Exports optimization recommendations for Amazon EC2 instances.
Recommendations are exported in a comma-separated values (.csv) file, and its
metadata in a JavaScript Object Notation (JSON) (.json) file, to an existing
Amazon Simple Storage Service (Amazon S3) bucket that you specify. For more
information, see [Exporting Recommendations](https://docs.aws.amazon.com/compute-optimizer/latest/ug/exporting-recommendations.html)
in the *Compute Optimizer User Guide*.
You can have only one Amazon EC2 instance export job in progress per Amazon Web
Services Region.
"""
def export_ec2_instance_recommendations(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ExportEC2InstanceRecommendations", input, options)
end
@doc """
Exports optimization recommendations for Lambda functions.
Recommendations are exported in a comma-separated values (.csv) file, and its
metadata in a JavaScript Object Notation (JSON) (.json) file, to an existing
Amazon Simple Storage Service (Amazon S3) bucket that you specify. For more
information, see [Exporting Recommendations](https://docs.aws.amazon.com/compute-optimizer/latest/ug/exporting-recommendations.html)
in the *Compute Optimizer User Guide*.
You can have only one Lambda function export job in progress per Amazon Web
Services Region.
"""
def export_lambda_function_recommendations(%Client{} = client, input, options \\ []) do
Request.request_post(
client,
metadata(),
"ExportLambdaFunctionRecommendations",
input,
options
)
end
@doc """
Returns Auto Scaling group recommendations.
Compute Optimizer generates recommendations for Amazon EC2 Auto Scaling groups
that meet a specific set of requirements. For more information, see the
[Supported resources and requirements](https://docs.aws.amazon.com/compute-optimizer/latest/ug/requirements.html)
in the *Compute Optimizer User Guide*.
"""
def get_auto_scaling_group_recommendations(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetAutoScalingGroupRecommendations", input, options)
end
@doc """
Returns Amazon Elastic Block Store (Amazon EBS) volume recommendations.
Compute Optimizer generates recommendations for Amazon EBS volumes that meet a
specific set of requirements. For more information, see the [Supported resources and
requirements](https://docs.aws.amazon.com/compute-optimizer/latest/ug/requirements.html)
in the *Compute Optimizer User Guide*.
"""
def get_ebs_volume_recommendations(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetEBSVolumeRecommendations", input, options)
end
@doc """
Returns Amazon EC2 instance recommendations.
Compute Optimizer generates recommendations for Amazon Elastic Compute Cloud
(Amazon EC2) instances that meet a specific set of requirements. For more
information, see the [Supported resources and requirements](https://docs.aws.amazon.com/compute-optimizer/latest/ug/requirements.html)
in the *Compute Optimizer User Guide*.
"""
def get_ec2_instance_recommendations(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetEC2InstanceRecommendations", input, options)
end
@doc """
Returns the projected utilization metrics of Amazon EC2 instance
recommendations.
The `Cpu` and `Memory` metrics are the only projected utilization metrics
returned when you run this action. Additionally, the `Memory` metric is returned
only for resources that have the unified CloudWatch agent installed on them. For
more information, see [Enabling Memory Utilization with the CloudWatch Agent](https://docs.aws.amazon.com/compute-optimizer/latest/ug/metrics.html#cw-agent).
"""
def get_ec2_recommendation_projected_metrics(%Client{} = client, input, options \\ []) do
Request.request_post(
client,
metadata(),
"GetEC2RecommendationProjectedMetrics",
input,
options
)
end
@doc """
Returns the recommendation preferences that are in effect for a given resource,
such as enhanced infrastructure metrics.
Considers all applicable preferences that you might have set at the resource,
account, and organization level.
When you create a recommendation preference, you can set its status to `Active`
or `Inactive`. Use this action to view the recommendation preferences that are
in effect, or `Active`.
"""
def get_effective_recommendation_preferences(%Client{} = client, input, options \\ []) do
Request.request_post(
client,
metadata(),
"GetEffectiveRecommendationPreferences",
input,
options
)
end
@doc """
Returns the enrollment (opt in) status of an account to the Compute Optimizer
service.
If the account is the management account of an organization, this action also
confirms the enrollment status of member accounts of the organization. Use the
`GetEnrollmentStatusesForOrganization` action to get detailed information about
the enrollment status of member accounts of an organization.
"""
def get_enrollment_status(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetEnrollmentStatus", input, options)
end
@doc """
Returns the Compute Optimizer enrollment (opt-in) status of organization member
accounts, if your account is an organization management account.
To get the enrollment status of standalone accounts, use the
`GetEnrollmentStatus` action.
"""
def get_enrollment_statuses_for_organization(%Client{} = client, input, options \\ []) do
Request.request_post(
client,
metadata(),
"GetEnrollmentStatusesForOrganization",
input,
options
)
end
@doc """
Returns Lambda function recommendations.
Compute Optimizer generates recommendations for functions that meet a specific
set of requirements. For more information, see the [Supported resources and requirements](https://docs.aws.amazon.com/compute-optimizer/latest/ug/requirements.html)
in the *Compute Optimizer User Guide*.
"""
def get_lambda_function_recommendations(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetLambdaFunctionRecommendations", input, options)
end
@doc """
Returns existing recommendation preferences, such as enhanced infrastructure
metrics.
Use the `scope` parameter to specify which preferences to return. You can
specify to return preferences for an organization, a specific account ID, or a
specific EC2 instance or Auto Scaling group Amazon Resource Name (ARN).
For more information, see [Activating enhanced infrastructure metrics](https://docs.aws.amazon.com/compute-optimizer/latest/ug/enhanced-infrastructure-metrics.html)
in the *Compute Optimizer User Guide*.
"""
def get_recommendation_preferences(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetRecommendationPreferences", input, options)
end
@doc """
Returns the optimization findings for an account.
It returns the number of:
* Amazon EC2 instances in an account that are `Underprovisioned`,
`Overprovisioned`, or `Optimized`.
* Auto Scaling groups in an account that are `NotOptimized`, or
`Optimized`.
* Amazon EBS volumes in an account that are `NotOptimized`, or
`Optimized`.
* Lambda functions in an account that are `NotOptimized`, or
`Optimized`.
"""
def get_recommendation_summaries(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetRecommendationSummaries", input, options)
end
@doc """
Creates a new recommendation preference or updates an existing recommendation
preference, such as enhanced infrastructure metrics.
For more information, see [Activating enhanced infrastructure metrics](https://docs.aws.amazon.com/compute-optimizer/latest/ug/enhanced-infrastructure-metrics.html)
in the *Compute Optimizer User Guide*.
"""
def put_recommendation_preferences(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "PutRecommendationPreferences", input, options)
end
@doc """
Updates the enrollment (opt in and opt out) status of an account to the Compute
Optimizer service.
If the account is a management account of an organization, this action can also
be used to enroll member accounts of the organization.
You must have the appropriate permissions to opt in to Compute Optimizer, to
view its recommendations, and to opt out. For more information, see [Controlling access with Amazon Web Services Identity and Access
Management](https://docs.aws.amazon.com/compute-optimizer/latest/ug/security-iam.html)
in the *Compute Optimizer User Guide*.
When you opt in, Compute Optimizer automatically creates a service-linked role
in your account to access its data. For more information, see [Using Service-Linked Roles for Compute
Optimizer](https://docs.aws.amazon.com/compute-optimizer/latest/ug/using-service-linked-roles.html)
in the *Compute Optimizer User Guide*.
"""
def update_enrollment_status(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UpdateEnrollmentStatus", input, options)
end
end
|
lib/aws/generated/compute_optimizer.ex
| 0.940237
| 0.552238
|
compute_optimizer.ex
|
starcoder
|
defmodule Xgit.ConfigEntry do
@moduledoc ~S"""
Represents one entry in a git configuration dictionary.
This is also commonly referred to as a "config _line_" because it typically
occupies one line in a typical git configuration file.
The semantically-important portion of a configuration file (i.e. everything
except comments and whitespace) could be represented by a list of `ConfigEntry`
structs.
"""
import Xgit.Util.ForceCoverage
@typedoc ~S"""
Represents an entry in a git config file.
## Struct Members
* `section`: (`String`) section name for the entry
* `subsection`: (`String` or `nil`) subsection name
* `name`: (`String`) key name
* `value`: (`String`, `nil`, or `:remove_all`) value
* `nil` if the name is present without an `=`
* `:remove_all` can be used as an instruction in some APIs to remove any corresponding entries
"""
@type t :: %__MODULE__{
section: String.t(),
subsection: String.t() | nil,
name: String.t(),
value: String.t() | :remove_all | nil
}
@enforce_keys [:section, :subsection, :name, :value]
defstruct [:section, :name, subsection: nil, value: nil]
@doc ~S"""
Returns `true` if passed a valid config entry.
"""
@spec valid?(value :: any) :: boolean
def valid?(%__MODULE__{} = entry) do
valid_section?(entry.section) &&
valid_subsection?(entry.subsection) &&
valid_name?(entry.name) &&
valid_value?(entry.value)
end
def valid?(_), do: cover(false)
@doc ~S"""
Returns `true` if passed a valid config section name.
Only alphanumeric characters, `-`, and `.` are allowed in section names.
"""
@spec valid_section?(section :: any) :: boolean
def valid_section?(section) when is_binary(section) do
String.match?(section, ~r/^[-A-Za-z0-9.]+$/)
end
def valid_section?(_), do: cover(false)
@doc ~S"""
Returns `true` if passed a valid config subsection name.
"""
@spec valid_subsection?(subsection :: any) :: boolean
def valid_subsection?(subsection) when is_binary(subsection) do
if String.match?(subsection, ~r/[\0\n]/) do
cover false
else
cover true
end
end
def valid_subsection?(nil), do: cover(true)
def valid_subsection?(_), do: cover(false)
@doc ~S"""
Returns `true` if passed a valid config entry name.
"""
@spec valid_name?(name :: any) :: boolean
def valid_name?(name) when is_binary(name) do
String.match?(name, ~r/^[A-Za-z][-A-Za-z0-9]*$/)
end
def valid_name?(_), do: cover(false)
@doc ~S"""
Returns `true` if passed a valid config value string.
Important: At this level, we do not accept other data types.
"""
@spec valid_value?(value :: any) :: boolean
def valid_value?(value) when is_binary(value) do
if String.match?(value, ~r/\0/) do
cover false
else
cover true
end
end
def valid_value?(nil), do: cover(true)
def valid_value?(:remove_all), do: cover(true)
def valid_value?(_), do: cover(false)
end
|
lib/xgit/config_entry.ex
| 0.862829
| 0.452354
|
config_entry.ex
|
starcoder
|
defmodule Collidex.Detection.Polygons do
@moduledoc """
Detects collisions between polygons using the separating
axis theorem. Has two variants, :fast and :accurate. :fast
will miss a few rare tyes of collisions but is much faster.
"""
alias Graphmath.Vec2
alias Collidex.Geometry.Polygon
alias Collidex.Utils
@doc """
Check if two polygons intersect. Return value is true if they overlap
on the plane.
Uses the separating Axis Theorem, and so can only perform accurate
detection for convex polygons.
The third argument, `method` allow you to select between a default,
fuly accurate implementation of the Separating Axis Theorem, or a
faster method that only checks for separation on a single axis.
If :fast is passed as the third argument, this function will use the
shortcut method of only checking the centroid-to-centroid
axis. This method is at least as fast, with much better worst-case
performance and will correctly detect the vast majority of collisions.
It can, however, occasionally return a false positive for
almost-colliding acute polygons (particularly triangles) at skew angles.
"""
def collision?(poly1, poly2, type \\ :accurate)
def collision?(poly1, poly2, :fast) do
Vec2.subtract(Polygon.center(poly2), Polygon.center(poly1))
|> collision_on_axis?(poly1, poly2)
end
def collision?(poly1, poly2, :accurate) do
axes_to_test = Utils.normals_of_edges(poly1)
++ Utils.normals_of_edges(poly2)
if axes_to_test |> Enum.any?(&(!collision_on_axis?(&1, poly1, poly2))) do
false
else
{ :collision, "todo_provide_vector" }
end
end
defp collision_on_axis?(axis, poly1, poly2) do
collision = [poly1, poly2]
|> Enum.map(&(&1.vertices))
|> Enum.map(fn(vertices) ->
Enum.map(vertices, &(Vec2.dot(&1, axis)))
end)
|> Enum.map(&(Enum.min_max(&1)))
|> Utils.overlap?
if collision do
{ :collision, "todo_provide_vector" }
else
false
end
end
end
|
lib/collidex/detection/polygons.ex
| 0.89411
| 0.772058
|
polygons.ex
|
starcoder
|
defmodule XmerlAccess do
@moduledoc """
Use Elixir meta-programming to generate test and accessor functions.
For each Xmerl record type generate the following:
- A test function, e.g. `is_element/1`, `is_attribute/1`, etc.
- A set of assessor functions, one for each field, e.g. `get_element_name/1`,
`get_element_attributes/1`, ..., `get_attribute_name/1`, etc.
"""
require XmerlRecs
@record_types ["element", "attribute", "text", "namespace", "comment"]
@record_types
|> Enum.each(fn record_type_str ->
record_type_string = "xml#{String.capitalize(record_type_str)}"
record_type_atom = String.to_atom(record_type_string)
is_method_name_str = "is_#{record_type_str}"
is_method_name_atom = String.to_atom(is_method_name_str)
is_method_body_str = """
if is_tuple(item) and tuple_size(item) > 0 do
case elem(item, 0) do
:#{record_type_string} -> true
_ -> false
end
else
false
end
"""
{:ok, is_method_body_ast} = Code.string_to_quoted(is_method_body_str)
def unquote(is_method_name_atom) (item) do
unquote(is_method_body_ast)
end
Record.extract(record_type_atom, from_lib: "xmerl/include/xmerl.hrl")
|> Enum.each(fn {field_name_atom, _} ->
method_name_str = "get_#{record_type_str}_#{to_string(field_name_atom)}"
method_name_atom = String.to_atom(method_name_str)
method_body_str = "XmerlRecs.#{to_string(record_type_atom)}(item, :#{to_string(field_name_atom)})"
{:ok, method_body_ast} = Code.string_to_quoted(method_body_str)
def unquote(method_name_atom)(item) do
unquote(method_body_ast)
end
end)
end)
end
defmodule TestMetaprogramming do
@moduledoc """
Test functions for metaprogramming generated Xmerl access functions
"""
require XmerlAccess
require XmerlRecs
@doc """
Walk and show the tree of XML elements.
## Examples
iex> rec = File.stream!("Data/test02.xml") |> SweetXml.parse
iex> Test22.show_tree(rec)
"""
@spec show_tree(Tuple.t(), String.t()) :: nil
def show_tree(rec, init_filler \\ "", add_filler \\ " ") do
IO.puts("#{init_filler}element name: #{XmerlAccess.get_element_name(rec)}")
Enum.each(XmerlAccess.get_element_attributes(rec), fn attr ->
name = XmerlAccess.get_attribute_name(attr)
value = XmerlAccess.get_attribute_value(attr)
IO.puts("#{init_filler} attribute -- name: #{name} value: #{value}")
end)
Enum.each(XmerlAccess.get_element_content(rec), fn item ->
filler1 = init_filler <> add_filler
case elem(item, 0) do
:xmlElement ->
show_tree(item, filler1, add_filler)
nil
_ -> nil
end
end)
nil
end
@doc """
Show some infomation in the element tree using Elixir Xmerl records.
## Examples
iex> record = File.stream!("path/to/my/doc.xml") |> SweetXml.parse
iex> TestMetaprogramming.demo1 record
"""
@spec demo1(Tuple.t()) :: :ok
def demo1 element do
name = XmerlRecs.xmlElement(element, :name)
IO.puts("element name: #{name}")
XmerlRecs.xmlElement(element, :attributes)
|> Enum.each(fn attr ->
attrname = XmerlRecs.xmlAttribute(attr, :name)
attrvalue = XmerlRecs.xmlAttribute(attr, :value)
IO.puts(" attribute -- name: #{attrname} value: #{attrvalue}")
end)
XmerlRecs.xmlElement(element, :content)
|> Enum.each(fn item ->
case elem(item, 0) do
:xmlText ->
IO.puts(" text -- value: #{XmerlRecs.xmlText(item, :value)}")
_ -> nil
end
end)
end
@doc """
Show some infomation in element tree using functions created with meta-programming.
## Examples
iex> record = File.stream!("path/to/my/doc.xml") |> SweetXml.parse
iex> TestMetaprogramming.demo2 record
"""
@spec demo2(Tuple.t()) :: :ok
def demo2 element do
name = Xml.Element.get_name(element)
IO.puts("element name: #{name}")
Xml.Element.get_attributes(element)
|> Enum.each(fn attr ->
attrname = XmerlRecs.xmlAttribute(attr, :name)
attrvalue = XmerlRecs.xmlAttribute(attr, :value)
IO.puts(" attribute -- name: #{attrname} value: #{attrvalue}")
end)
Xml.Element.get_content(element)
|> Enum.each(fn item ->
case elem(item, 0) do
:xmlText ->
IO.puts(" text -- value: #{Xml.Text.get_value(item)}")
_ -> nil
end
end)
end
end
|
lib/xmlmetaprogramming.ex
| 0.733261
| 0.457016
|
xmlmetaprogramming.ex
|
starcoder
|
defmodule Distance do
@moduledoc ~S"""
Basic distance calculations for cartesian coordinates for calculting
distances on a single plane. If you are looking to calculating distance
on the surface of Earth, check out the `Distance.GreatCircle` module.
## Examples
iex> Distance.distance({2.5, 2.5}, {4, 0.8})
2.2671568097509267
iex> Distance.segment_distance({2.5, 2.5}, {4, 0.8}, {-2, 3})
1.0797077632696
iex> Distance.distance([{2.5, 2.5}, {4, 0.8}, {-2, 3}, {1, -1}])
13.657774933219109
"""
@type point() :: {number(), number()}
@doc """
Returns the geometric distance between two points. Accepts 2- or
3-dimensional points.
## Examples
iex> Distance.distance({1, -2}, {-2, 2})
5.0
iex> Distance.distance({1, -2, 2}, {-2, 2, 1})
5.0990195135927845
"""
@spec distance(point, point) :: float()
def distance(p1, p2), do: :math.sqrt(distance_squared(p1, p2))
@doc """
Returns the square of the distance between two points. This is used by the
`Distance.distance` function above, but having access to the value before
the expensice sqaure root operation is useful for time-sensitive applications
that only need values for comparison.
## Examples
iex> Distance.distance_squared({1, -2}, {-2, 2})
25
iex> Distance.distance_squared({1, -2, 2}, {-2, 2, 1})
26
"""
@spec distance_squared(point, point) :: float()
def distance_squared({x1, y1}, {x2, y2}) do
dx = x1 - x2
dy = y1 - y2
dx * dx + dy * dy
end
def distance_squared({x1, y1, z1}, {x2, y2, z2}) do
dx = x1 - x2
dy = y1 - y2
dz = z1 - z2
dx * dx + dy * dy + dz * dz
end
@doc """
Returns the geometric distance from a point `p` and the infinite line
extending through points `p1` and `p2`.
## Examples
iex> Distance.line_distance({3, 2}, {-2, 1}, {5, 3})
0.4120816918460673 # distance between the point {3, 2} and the closest point along line segment ({-2, 1}, {5, 3})
iex> Distance.line_distance({1, -2}, {-2, 2}, {-10, 102})
2.671464946476815
iex> Distance.line_distance({1, -2}, {-2, 2}, {1, -2})
0.0
"""
@spec line_distance(point, point, point) :: float()
def line_distance(p, p1, p2), do: :math.sqrt(line_distance_squared(p, p1, p2))
@doc """
Similar to `Distance.distance_squared`, this provides much faster comparable
version of `Distance.line_distance`.
## Examples
iex> Distance.line_distance_squared({3, 2}, {-2, 1}, {5, 3})
0.16981132075471717
iex> Distance.line_distance_squared({1, -2}, {-2, 2}, {-10, 102})
7.136724960254371
"""
@spec line_distance_squared(point, point, point) :: float()
def line_distance_squared(_, {x1, y1}, {x2, y2}) when x1 == x2 and y1 == y2, do: 0.0
def line_distance_squared({x, y}, {x1, y1}, {x2, y2}) do
dx = x2 - x1
dy = y2 - y1
t = ((x - x1) * dx + (y - y1) * dy) / (dx * dx + dy * dy)
distance_squared({x, y}, {x1 + dx * t, y1 + dy * t})
end
@doc """
Returns the geometric distance from a point `p` and the line segment
between two points `p1` and `p2`. Note that this is a line segment, not
an infinite line, so points not between `p1` and `p2` will return the
distance to the nearest of the two endpoints.
## Examples
iex> Distance.segment_distance({3, 2}, {-2, 1}, {5, 3})
0.4120816918460673 # distance between the point {3, 2} and the closest point along line segment ({-2, 1}, {5, 3})
iex> Distance.segment_distance({1, -2}, {-2, 2}, {-10, 102})
5.0
iex> Distance.segment_distance({1, -2}, {-2, 2}, {1, -2})
0.0
"""
@spec segment_distance(point, point, point) :: float()
def segment_distance(p, p1, p2), do: :math.sqrt(segment_distance_squared(p, p1, p2))
@doc """
Similar to `Distance.distance_squared`, this provides much faster comparable
version of `Distance.segment_distance`.
## Examples
iex> Distance.segment_distance_squared({3, 2}, {-2, 1}, {5, 3})
0.16981132075471717
iex> Distance.segment_distance_squared({1, -2}, {-2, 2}, {-10, 102})
25
"""
@spec segment_distance_squared(point, point, point) :: float()
def segment_distance_squared({x, y}, {x1, y1}, {x2, y2}) when x1 == x2 and y1 == y2,
do: distance_squared({x, y}, {x1, y1})
def segment_distance_squared({x, y}, {x1, y1}, {x2, y2}) do
dx = x2 - x1
dy = y2 - y1
t = ((x - x1) * dx + (y - y1) * dy) / (dx * dx + dy * dy)
cond do
t > 1 -> distance_squared({x, y}, {x2, y2})
t > 0 -> distance_squared({x, y}, {x1 + dx * t, y1 + dy * t})
true -> distance_squared({x, y}, {x1, y1})
end
end
@doc """
Provides the minimum distance between any two points along the given line
segments. In the case where the segements are not disjoint, this will
always return `0.0`.
## Example
iex> Distance.segment_segment_distance({0, 0}, {1, 1}, {1, 0}, {2, 0})
0.7071067811865476
iex> Distance.segment_segment_distance({0, 0}, {1, 1}, {1, 1}, {2, 2})
0.0
"""
@spec segment_segment_distance(point, point, point, point) :: float()
def segment_segment_distance(a1, a2, b1, b2),
do: :math.sqrt(segment_segment_distance_squared(a1, a2, b1, b2))
@doc """
Similar to `Distance.distance_squared`, this provides much faster comparable
version of `Distance.segment_segment_distance`.
"""
@spec segment_segment_distance_squared(point, point, point, point) :: float()
def segment_segment_distance_squared(a1, a2, b1, b2) do
case SegSeg.intersection(a1, a2, b1, b2) do
{true, _, _} ->
0.0
{false, _, _} ->
[
segment_distance_squared(a1, b1, b2),
segment_distance_squared(a2, b1, b2),
segment_distance_squared(b1, a1, a2),
segment_distance_squared(b2, a1, a2)
]
|> Enum.min()
end
end
@doc """
Returns the geometric distance of the linestring defined by the List of
points. Accepts 2- or 3-dimensional points.
## Examples
iex> Distance.distance([{2.5, 2.5}, {4, 0.8}, {2.5, 3.1}, {2.5, 3.1}])
5.013062853300123
iex> Distance.distance([{1, -2, 1}, {-2, 2, -1}, {-2, 1, 0}, {2, -3, 1}])
12.543941016045627
"""
@spec distance(list(point)) :: float()
def distance([]), do: 0.0
def distance([_]), do: 0.0
def distance([p1, p2]), do: distance(p1, p2)
def distance([p1, p2 | tail]) do
distance(p1, p2) + distance([p2 | tail])
end
@doc """
Returns a point `distance` units away in the direction `direction`.
The direction is measured as radians off of the positive x-axis in the direction of
the positive y-axis. Thus the new coordinates are:
```elixir
x1 = x0 + distance * cos(direction)
y1 = y0 + distance * sin(direction)
```
## Examples
iex> Distance.project({3, 5}, 3 * :math.pi() / 4, 2)
{1.585786437626905, 6.414213562373095}
"""
@spec project(point(), number(), number()) :: point()
def project({x0, y0}, direction, distance) do
{
x0 + distance * :math.cos(direction),
y0 + distance * :math.sin(direction)
}
end
@doc """
Returns the direction from p0 to p1. The direction is
measured as radians off of the positive x-axis in the direction of the
positive y-axis.
The returned value will always be in the range of (-π, π]. The direction
along the negative x-axis will always return positive π.
## Examples
iex> Distance.angle_to({2, -1}, {2, 5})
:math.pi() / 2
"""
@spec angle_to(point(), point()) :: float()
def angle_to({x0, y0}, {x1, y1}) do
:math.atan2(y1 - y0, x1 - x0)
end
@doc """
Returns the cotemrinal angle closest to 0 for the given angle
No matter the angle provided, the returned angle will be in the range (-π, π]
## Examples
iex> Distance.min_coterminal_angle(:math.pi() / 2.0)
:math.pi() / 2
iex> Distance.min_coterminal_angle(-2.0 + :math.pi() * 6)
-2.0
iex> Distance.min_coterminal_angle(-:math.pi())
:math.pi()
"""
@spec min_coterminal_angle(number()) :: float()
def min_coterminal_angle(angle) do
:math.pi() - min_positive_coterminal_angle(:math.pi() - angle)
end
@doc """
Returns the minimal positive coterminal angle for the given angle.
No matter the angle provided, the returned angle will be in the range [0, 2π)
## Examples
iex> Distance.min_positive_coterminal_angle(:math.pi() / 2.0)
:math.pi() / 2
iex> Distance.min_positive_coterminal_angle(-2.0 + :math.pi() * 6)
:math.pi() * 2.0 - 2.0
iex> Distance.min_positive_coterminal_angle(-:math.pi())
:math.pi()
"""
@spec min_positive_coterminal_angle(number()) :: float()
def min_positive_coterminal_angle(angle) do
:math.fmod(angle, :math.pi() * 2)
|> case do
a when a < 0.0 -> a + :math.pi() * 2
a -> a
end
end
@doc """
Returns the angular difference between two directions in the range
"""
@spec angular_difference(number(), number()) :: float()
def angular_difference(a1, a2) do
min_coterminal_angle(a2 - a1)
end
end
|
lib/distance.ex
| 0.962267
| 0.887984
|
distance.ex
|
starcoder
|
defmodule ExAequo.KeywordParams do
use ExAequo.Types
@moduledoc """
## Tools to facilitate dispatching on keyword parameters, used in contexts like the following
@defaults [a: 1, b: false] # Keyword or Map
def some_fun(..., options \\ []) # options again can be a Keyword or Map
{a, b} = tuple_from_params(@defaults, options, [:a, :b])
### Merging defaults and actual parameters
Its most useful feature is that you will get a map whatever the mixtures of maps and keywords the
input was
```elixir
iex(0)> merge_params([])
%{}
iex(1)> merge_params([a: 1], %{b: 2})
%{a: 1, b: 2}
iex(2)> merge_params(%{a: 1}, [a: 2, b: 2])
%{a: 2, b: 2}
```
#### Strict merging
_Not implemented yet_
### Extracting params from the merged defaults and actuals
iex(3)> defaults = [foo: false, depth: 3]
...(3)> tuple_from_params(defaults, %{foo: true}, [:foo, :depth])
{true, 3}
As defaults are required a missing parameter will raise an Error
iex(4)> try do
...(4)> tuple_from_params([], [foo: 1], [:bar])
...(4)> rescue
...(4)> KeyError -> :caught
...(4)> end
:caught
Alternatively on can extract a map
iex(5)> map_from_params([], [hello: "world"], [:hello])
%{hello: "world"}
"""
@doc """
This is the 2 param form which is identical to an empty default map
iex(6)> map_from_params(%{a: 1, b: 2}, [:a])
%{a: 1}
"""
@spec map_from_params(params_t(), list()) :: map()
def map_from_params(actual, keys), do: map_from_params(%{}, actual, keys)
@spec map_from_params(params_t(), params_t(), list()) :: map()
def map_from_params(default, actual, keys) do
merged = merge_params(default, actual)
keys
|> Enum.reduce(%{}, fn key, values -> Map.put(values, key, Map.fetch!(merged, key)) end)
end
@spec merge_params(params_t()) :: map()
def merge_params(actual), do: merge_params(%{}, actual)
@spec merge_params(params_t(), params_t()) :: map()
def merge_params(default, actual)
def merge_params(default, actual) when is_list(default), do: default |> Enum.into(%{}) |> merge_params(actual)
def merge_params(default, actual) when is_list(actual), do: merge_params(default, actual |> Enum.into(%{}))
def merge_params(default, actual), do: Map.merge(default, actual)
@doc """
This is the 2 param form which is identical to an empty default map
iex(7)> tuple_from_params(%{a: 1, b: 2}, [:b, :a])
{2, 1}
"""
@spec tuple_from_params(params_t(), list()) :: tuple()
def tuple_from_params(actual, keys), do: tuple_from_params([], actual, keys)
@spec tuple_from_params(params_t(), params_t(), list()) :: tuple()
def tuple_from_params(default, actual, keys) do
merged = merge_params(default, actual)
keys
|> Enum.reduce([], fn key, values -> [Map.fetch!(merged, key)|values] end)
|> Enum.reverse
|> List.to_tuple
end
end
|
lib/ex_aequo/keyword_params.ex
| 0.798501
| 0.911849
|
keyword_params.ex
|
starcoder
|
defmodule Hanoi do
@moduledoc """
The Tower of Hanoi data structure and operations
"""
defstruct started: false,
ended: false,
duration: 0,
picked: nil,
num_pieces: 4,
num_moves: 0,
tower_a: [],
tower_b: [],
tower_c: [],
display_a: [],
display_b: [],
display_c: []
@max_pieces 8
def max_pieces(), do: @max_pieces
@doc """
Create a new game with `num_pieces` blocks. You can provide an existing
game and grab it's number of pieces too.
## Examples
iex> Hanoi.new_game()
%Hanoi{tower_a: [4,3,2,1], display_a: [{4, :down}, {3, :down}, {2, :down}, {1, :down}]}
iex> Hanoi.new_game(5)
%Hanoi{num_pieces: 5, tower_a: [5,4,3,2,1], display_a: [{5, :down}, {4, :down}, {3, :down}, {2, :down}, {1, :down}]}
iex> Hanoi.new_game(5) |> Hanoi.new_game()
%Hanoi{num_pieces: 5, tower_a: [5,4,3,2,1], display_a: [{5, :down}, {4, :down}, {3, :down}, {2, :down}, {1, :down}]}
"""
def new_game(), do: new_game(4)
def new_game(%Hanoi{num_pieces: num_pieces}), do: new_game(num_pieces)
def new_game(num_pieces) when num_pieces < 1, do: new_game(1)
def new_game(num_pieces) when num_pieces > @max_pieces, do: new_game(@max_pieces)
def new_game(num_pieces) do
%Hanoi{
num_pieces: num_pieces,
tower_a: num_pieces..1 |> Enum.into([])
}
|> display()
end
@doc """
Add another piece to the game, this will restart the game.
The maximum number of pieces (right now) is 8
## Examples
iex> Hanoi.new_game(3) |> Hanoi.inc()
%Hanoi{tower_a: [4,3,2,1], display_a: [{4, :down}, {3, :down}, {2, :down}, {1, :down}]}
iex> Hanoi.new_game(8) |> Hanoi.inc()
Hanoi.new_game(8)
"""
def inc(game), do: new_game(game.num_pieces + 1)
@doc """
Remove a piece from the game, this will restart the game.
The minimum numnber of pieces is 1
## Examples
iex> Hanoi.new_game(5) |> Hanoi.dec()
%Hanoi{tower_a: [4,3,2,1], display_a: [{4, :down}, {3, :down}, {2, :down}, {1, :down}]}
iex> Hanoi.new_game(1) |> Hanoi.dec()
%Hanoi{num_pieces: 1, tower_a: [1], display_a: [{1, :down}]}
"""
def dec(game), do: new_game(game.num_pieces - 1)
@doc """
When you are ready, start the game
## Examples
iex> Hanoi.new_game() |> Hanoi.start_game() |> Hanoi.started?()
true
"""
def start_game(game), do: %{game | started: now()}
@doc """
Did you give up? Ok, then restart the game.
This will create a new game of the same number of blocks and mark it as started
## Examples
iex> Hanoi.new_game(2) |> Hanoi.start_game() |> Hanoi.restart_game() |> Map.fetch!(:num_pieces)
2
iex> Hanoi.new_game(2) |> Hanoi.start_game() |> Hanoi.restart_game() |> Hanoi.started?()
true
"""
def restart_game(game), do: new_game(game.num_pieces) |> start_game()
@doc """
Has the game started? Check if the started is false, if not then it is started
## Examples
iex> Hanoi.new_game(2) |> Hanoi.started?()
false
iex> Hanoi.new_game(2) |> Hanoi.start_game() |> Hanoi.started?()
true
"""
def started?(%Hanoi{started: false}), do: false
def started?(%Hanoi{started: _}), do: true
@doc """
Has the game ended? Check if the ended is false, if not then it has neded
## Examples
iex> Hanoi.new_game(2) |> Hanoi.ended?()
false
iex> Hanoi.ended?(%Hanoi{ended: 1234})
true
"""
def ended?(%Hanoi{ended: false}), do: false
def ended?(%Hanoi{ended: _}), do: true
@doc """
Pick a piece.
## Examples
iex> Hanoi.new_game(2) |> Hanoi.pick(:tower_b)
%Hanoi{num_pieces: 2, tower_a: [2,1], display_a: [{2, :down}, {1, :down}]}
iex> Hanoi.new_game(2) |> Hanoi.pick(:tower_a)
%Hanoi{num_pieces: 2, picked: {:tower_a, 2}, tower_a: [1], display_a: [{2, :up}, {1, :down}]}
iex> Hanoi.new_game(2) |> Hanoi.pick(:tower_a) |> Hanoi.pick(:tower_a)
%Hanoi{num_pieces: 2, picked: nil, tower_a: [2, 1], display_a: [{2, :down}, {1, :down}]}
iex> Hanoi.new_game(2) |> Hanoi.pick(:tower_a) |> Hanoi.pick(:tower_b)
%Hanoi{num_pieces: 2, picked: nil, num_moves: 1, tower_a: [1], tower_b: [2], display_a: [{1, :down}], display_b: [{2, :down}]}
iex> %Hanoi{picked: {:tower_a, 1}, tower_a: [], tower_b: [2]} |> Hanoi.pick(:tower_b)
%Hanoi{picked: {:tower_a, 1}, tower_a: [], tower_b: [2], display_a: [{1, :up}], display_b: [{2, :down}]}
iex> %Hanoi{picked: {:tower_a, 2}, tower_a: [], tower_c: []} |> Hanoi.ended?()
false
iex> %Hanoi{picked: {:tower_a, 2}, tower_a: [], tower_c: [1]} |> Hanoi.pick(:tower_c) |> Hanoi.ended?()
true
"""
def pick(game, tower) do
game
|> _pick(tower, Map.fetch!(game, tower))
|> display()
end
defp _pick(%Hanoi{picked: nil} = game, _tower, []), do: game
defp _pick(%Hanoi{picked: nil} = game, tower, [h | t]) do
game
|> Map.put(:picked, {tower, h})
|> Map.put(tower, t)
end
defp _pick(%Hanoi{picked: {tower, n}} = game, tower, pegs) do
game
|> Map.put(:picked, nil)
|> Map.put(tower, [n | pegs])
end
defp _pick(%Hanoi{picked: {_, n}} = game, _tower, [m | _t]) when m > n, do: game
defp _pick(%Hanoi{picked: {_, n}} = game, tower, pegs) do
game
|> Map.put(:picked, nil)
|> Map.update!(:num_moves, &(&1 + 1))
|> Map.put(tower, [n | pegs])
end
@doc """
Update the game's display.
## Examples
iex> %Hanoi{picked: {:tower_a, 1}, tower_a: [], tower_b: [2]} |> Hanoi.display()
%Hanoi{picked: {:tower_a, 1}, tower_a: [], tower_b: [2], display_a: [{1, :up}], display_b: [{2, :down}]}
"""
def display(game) do
game
|> update_towers()
|> update_won()
|> update_duration()
end
defp display_tower(%Hanoi{picked: {tower, n}} = game, tower) do
[{n, :up}] ++ down_pegs(game, tower)
end
defp display_tower(game, tower) do
down_pegs(game, tower)
end
defp down_pegs(game, tower) do
game
|> Map.fetch!(tower)
|> Enum.map(fn n -> {n, :down} end)
end
defp update_towers(game) do
%{
game
| display_a: display_tower(game, :tower_a),
display_b: display_tower(game, :tower_b),
display_c: display_tower(game, :tower_c)
}
end
defp update_won(%Hanoi{ended: false, picked: nil, tower_a: [], tower_b: []} = game) do
%{game | ended: now()}
end
defp update_won(game), do: game
defp update_duration(%Hanoi{started: false} = game), do: %{game | duration: 0}
defp update_duration(%Hanoi{started: s, ended: false} = game) do
%{game | duration: duration(s, now())}
end
defp update_duration(%Hanoi{started: s, ended: e} = game) do
%{game | duration: duration(s, e)}
end
defp now(), do: :os.system_time(:millisecond)
defp duration(started, ended), do: ((ended - started) / 1000) |> round()
end
|
lib/techblog/lib/hanoi.ex
| 0.765418
| 0.721136
|
hanoi.ex
|
starcoder
|
defmodule Day10 do
def part_one(file_reader \\ InputFile) do
deltas =
jolts(file_reader)
|> Enum.sort()
|> List.insert_at(0, 0)
|> Enum.chunk_every(2, 1, :discard)
|> Enum.map(fn [a, b] -> b - a end)
# Add an extra delta of 3 bc our device has 3 jolts more than the last adapter
Enum.count(deltas, &(&1 == 1)) * (Enum.count(deltas, &(&1 == 3)) + 1)
end
# A 3 jolt delta means that we have to include the pair of adapters with that delta.
# So we can split into chunks of non-3 deltas, figure out the allowed combos of those
# non-3 jolt delta adapters, and then multiply those combos to get the final number
# of allowed combinations.
def part_two(file_reader \\ InputFile) do
jolts(file_reader)
|> Enum.sort()
|> List.insert_at(0, 0)
|> Enum.chunk_every(2, 1, :discard)
|> Enum.map(fn [a, b] -> b - a end)
|> Enum.chunk_while([], &chunk/2, &finish/1)
|> Enum.reject(&(&1 == nil))
|> Enum.map(&Enum.reverse(&1))
|> Enum.map(&combos/1)
|> Enum.reduce(fn a, b -> a * b end)
end
def jolts(file_reader) do
file_reader.contents_of(10, :stream)
|> Enum.map(&String.trim/1)
|> Enum.map(&String.to_integer/1)
end
def valid_orderings(joltages), do: valid_orderings([[0]], joltages)
def valid_orderings(orderings, [joltage]),
do: Enum.filter(orderings, fn o -> joltage - hd(o) <= 3 end)
def valid_orderings(orderings, [joltage | rest]) do
orderings
|> Enum.flat_map(fn
o when joltage - hd(o) <= 3 -> [o, [joltage | o]]
_ -> []
end)
|> valid_orderings(rest)
end
def chunk(3, acc), do: {:cont, [3 | acc], []}
def chunk(v, acc), do: {:cont, [v | acc]}
def finish(acc), do: {:cont, [3 | acc], []}
# Just a 3-delta in isolation means the adapter has to be included
def combos([3]), do: 1
# A 1-3 combo means both are needed (otherwise the delta would be 4+)
def combos([1, 3]), do: 1
# There are 8 combos of the first 3 1-deltas, but we have to throw out the combo
# that excludes all of them, to keep the total delta under 4.
def combos([1, 1, 1, 1, 3]), do: 7
# The first 2 1-deltas are optional, so 4 combos
def combos([1, 1, 1, 3]), do: 4
# The first 1-delta is optional, so 2 combos
def combos([1, 1, 3]), do: 2
end
|
year_2020/lib/day_10.ex
| 0.644561
| 0.507446
|
day_10.ex
|
starcoder
|
defmodule AWS.CloudWatch do
@moduledoc """
Amazon CloudWatch monitors your Amazon Web Services (Amazon Web Services)
resources and the applications you run on Amazon Web Services in real time.
You can use CloudWatch to collect and track metrics, which are the variables you
want to measure for your resources and applications.
CloudWatch alarms send notifications or automatically change the resources you
are monitoring based on rules that you define. For example, you can monitor the
CPU usage and disk reads and writes of your Amazon EC2 instances. Then, use this
data to determine whether you should launch additional instances to handle
increased load. You can also use this data to stop under-used instances to save
money.
In addition to monitoring the built-in metrics that come with Amazon Web
Services, you can monitor your own custom metrics. With CloudWatch, you gain
system-wide visibility into resource utilization, application performance, and
operational health.
"""
alias AWS.Client
alias AWS.Request
def metadata do
%AWS.ServiceMetadata{
abbreviation: "CloudWatch",
api_version: "2010-08-01",
content_type: "application/x-www-form-urlencoded",
credential_scope: nil,
endpoint_prefix: "monitoring",
global?: false,
protocol: "query",
service_id: "CloudWatch",
signature_version: "v4",
signing_name: "monitoring",
target_prefix: nil
}
end
@doc """
Deletes the specified alarms.
You can delete up to 100 alarms in one operation. However, this total can
include no more than one composite alarm. For example, you could delete 99
metric alarms and one composite alarms with one operation, but you can't delete
two composite alarms with one operation.
In the event of an error, no alarms are deleted.
It is possible to create a loop or cycle of composite alarms, where composite
alarm A depends on composite alarm B, and composite alarm B also depends on
composite alarm A. In this scenario, you can't delete any composite alarm that
is part of the cycle because there is always still a composite alarm that
depends on that alarm that you want to delete.
To get out of such a situation, you must break the cycle by changing the rule of
one of the composite alarms in the cycle to remove a dependency that creates the
cycle. The simplest change to make to break a cycle is to change the `AlarmRule`
of one of the alarms to `False`.
Additionally, the evaluation of composite alarms stops if CloudWatch detects a
cycle in the evaluation path.
"""
def delete_alarms(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteAlarms", input, options)
end
@doc """
Deletes the specified anomaly detection model from your account.
"""
def delete_anomaly_detector(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteAnomalyDetector", input, options)
end
@doc """
Deletes all dashboards that you specify.
You can specify up to 100 dashboards to delete. If there is an error during this
call, no dashboards are deleted.
"""
def delete_dashboards(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteDashboards", input, options)
end
@doc """
Permanently deletes the specified Contributor Insights rules.
If you create a rule, delete it, and then re-create it with the same name,
historical data from the first time the rule was created might not be available.
"""
def delete_insight_rules(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteInsightRules", input, options)
end
@doc """
Permanently deletes the metric stream that you specify.
"""
def delete_metric_stream(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteMetricStream", input, options)
end
@doc """
Retrieves the history for the specified alarm.
You can filter the results by date range or item type. If an alarm name is not
specified, the histories for either all metric alarms or all composite alarms
are returned.
CloudWatch retains the history of an alarm even if you delete the alarm.
To use this operation and return information about a composite alarm, you must
be signed on with the `cloudwatch:DescribeAlarmHistory` permission that is
scoped to `*`. You can't return information about composite alarms if your
`cloudwatch:DescribeAlarmHistory` permission has a narrower scope.
"""
def describe_alarm_history(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeAlarmHistory", input, options)
end
@doc """
Retrieves the specified alarms.
You can filter the results by specifying a prefix for the alarm name, the alarm
state, or a prefix for any action.
To use this operation and return information about composite alarms, you must be
signed on with the `cloudwatch:DescribeAlarms` permission that is scoped to `*`.
You can't return information about composite alarms if your
`cloudwatch:DescribeAlarms` permission has a narrower scope.
"""
def describe_alarms(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeAlarms", input, options)
end
@doc """
Retrieves the alarms for the specified metric.
To filter the results, specify a statistic, period, or unit.
This operation retrieves only standard alarms that are based on the specified
metric. It does not return alarms based on math expressions that use the
specified metric, or composite alarms that use the specified metric.
"""
def describe_alarms_for_metric(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeAlarmsForMetric", input, options)
end
@doc """
Lists the anomaly detection models that you have created in your account.
For single metric anomaly detectors, you can list all of the models in your
account or filter the results to only the models that are related to a certain
namespace, metric name, or metric dimension. For metric math anomaly detectors,
you can list them by adding `METRIC_MATH` to the `AnomalyDetectorTypes` array.
This will return all metric math anomaly detectors in your account.
"""
def describe_anomaly_detectors(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeAnomalyDetectors", input, options)
end
@doc """
Returns a list of all the Contributor Insights rules in your account.
For more information about Contributor Insights, see [Using Contributor Insights to Analyze High-Cardinality
Data](https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/ContributorInsights.html).
"""
def describe_insight_rules(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeInsightRules", input, options)
end
@doc """
Disables the actions for the specified alarms.
When an alarm's actions are disabled, the alarm actions do not execute when the
alarm state changes.
"""
def disable_alarm_actions(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DisableAlarmActions", input, options)
end
@doc """
Disables the specified Contributor Insights rules.
When rules are disabled, they do not analyze log groups and do not incur costs.
"""
def disable_insight_rules(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DisableInsightRules", input, options)
end
@doc """
Enables the actions for the specified alarms.
"""
def enable_alarm_actions(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "EnableAlarmActions", input, options)
end
@doc """
Enables the specified Contributor Insights rules.
When rules are enabled, they immediately begin analyzing log data.
"""
def enable_insight_rules(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "EnableInsightRules", input, options)
end
@doc """
Displays the details of the dashboard that you specify.
To copy an existing dashboard, use `GetDashboard`, and then use the data
returned within `DashboardBody` as the template for the new dashboard when you
call `PutDashboard` to create the copy.
"""
def get_dashboard(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetDashboard", input, options)
end
@doc """
This operation returns the time series data collected by a Contributor Insights
rule.
The data includes the identity and number of contributors to the log group.
You can also optionally return one or more statistics about each data point in
the time series. These statistics can include the following:
* `UniqueContributors` -- the number of unique contributors for each
data point.
* `MaxContributorValue` -- the value of the top contributor for each
data point. The identity of the contributor might change for each data point in
the graph.
If this rule aggregates by COUNT, the top contributor for each data point is the
contributor with the most occurrences in that period. If the rule aggregates by
SUM, the top contributor is the contributor with the highest sum in the log
field specified by the rule's `Value`, during that period.
* `SampleCount` -- the number of data points matched by the rule.
* `Sum` -- the sum of the values from all contributors during the
time period represented by that data point.
* `Minimum` -- the minimum value from a single observation during
the time period represented by that data point.
* `Maximum` -- the maximum value from a single observation during
the time period represented by that data point.
* `Average` -- the average value from all contributors during the
time period represented by that data point.
"""
def get_insight_rule_report(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetInsightRuleReport", input, options)
end
@doc """
You can use the `GetMetricData` API to retrieve as many as 500 different metrics
in a single request, with a total of as many as 100,800 data points.
You can also optionally perform math expressions on the values of the returned
statistics, to create new time series that represent new insights into your
data. For example, using Lambda metrics, you could divide the Errors metric by
the Invocations metric to get an error rate time series. For more information
about metric math expressions, see [Metric Math Syntax and Functions](https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/using-metric-math.html#metric-math-syntax)
in the *Amazon CloudWatch User Guide*.
Calls to the `GetMetricData` API have a different pricing structure than calls
to `GetMetricStatistics`. For more information about pricing, see [Amazon CloudWatch Pricing](https://aws.amazon.com/cloudwatch/pricing/).
Amazon CloudWatch retains metric data as follows:
* Data points with a period of less than 60 seconds are available
for 3 hours. These data points are high-resolution metrics and are available
only for custom metrics that have been defined with a `StorageResolution` of 1.
* Data points with a period of 60 seconds (1-minute) are available
for 15 days.
* Data points with a period of 300 seconds (5-minute) are available
for 63 days.
* Data points with a period of 3600 seconds (1 hour) are available
for 455 days (15 months).
Data points that are initially published with a shorter period are aggregated
together for long-term storage. For example, if you collect data using a period
of 1 minute, the data remains available for 15 days with 1-minute resolution.
After 15 days, this data is still available, but is aggregated and retrievable
only with a resolution of 5 minutes. After 63 days, the data is further
aggregated and is available with a resolution of 1 hour.
If you omit `Unit` in your request, all data that was collected with any unit is
returned, along with the corresponding units that were specified when the data
was reported to CloudWatch. If you specify a unit, the operation returns only
data that was collected with that unit specified. If you specify a unit that
does not match the data collected, the results of the operation are null.
CloudWatch does not perform unit conversions.
"""
def get_metric_data(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetMetricData", input, options)
end
@doc """
Gets statistics for the specified metric.
The maximum number of data points returned from a single call is 1,440. If you
request more than 1,440 data points, CloudWatch returns an error. To reduce the
number of data points, you can narrow the specified time range and make multiple
requests across adjacent time ranges, or you can increase the specified period.
Data points are not returned in chronological order.
CloudWatch aggregates data points based on the length of the period that you
specify. For example, if you request statistics with a one-hour period,
CloudWatch aggregates all data points with time stamps that fall within each
one-hour period. Therefore, the number of values aggregated by CloudWatch is
larger than the number of data points returned.
CloudWatch needs raw data points to calculate percentile statistics. If you
publish data using a statistic set instead, you can only retrieve percentile
statistics for this data if one of the following conditions is true:
* The SampleCount value of the statistic set is 1.
* The Min and the Max values of the statistic set are equal.
Percentile statistics are not available for metrics when any of the metric
values are negative numbers.
Amazon CloudWatch retains metric data as follows:
* Data points with a period of less than 60 seconds are available
for 3 hours. These data points are high-resolution metrics and are available
only for custom metrics that have been defined with a `StorageResolution` of 1.
* Data points with a period of 60 seconds (1-minute) are available
for 15 days.
* Data points with a period of 300 seconds (5-minute) are available
for 63 days.
* Data points with a period of 3600 seconds (1 hour) are available
for 455 days (15 months).
Data points that are initially published with a shorter period are aggregated
together for long-term storage. For example, if you collect data using a period
of 1 minute, the data remains available for 15 days with 1-minute resolution.
After 15 days, this data is still available, but is aggregated and retrievable
only with a resolution of 5 minutes. After 63 days, the data is further
aggregated and is available with a resolution of 1 hour.
CloudWatch started retaining 5-minute and 1-hour metric data as of July 9, 2016.
For information about metrics and dimensions supported by Amazon Web Services
services, see the [Amazon CloudWatch Metrics and Dimensions Reference](https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/CW_Support_For_AWS.html)
in the *Amazon CloudWatch User Guide*.
"""
def get_metric_statistics(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetMetricStatistics", input, options)
end
@doc """
Returns information about the metric stream that you specify.
"""
def get_metric_stream(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetMetricStream", input, options)
end
@doc """
You can use the `GetMetricWidgetImage` API to retrieve a snapshot graph of one
or more Amazon CloudWatch metrics as a bitmap image.
You can then embed this image into your services and products, such as wiki
pages, reports, and documents. You could also retrieve images regularly, such as
every minute, and create your own custom live dashboard.
The graph you retrieve can include all CloudWatch metric graph features,
including metric math and horizontal and vertical annotations.
There is a limit of 20 transactions per second for this API. Each
`GetMetricWidgetImage` action has the following limits:
* As many as 100 metrics in the graph.
* Up to 100 KB uncompressed payload.
"""
def get_metric_widget_image(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetMetricWidgetImage", input, options)
end
@doc """
Returns a list of the dashboards for your account.
If you include `DashboardNamePrefix`, only those dashboards with names starting
with the prefix are listed. Otherwise, all dashboards in your account are
listed.
`ListDashboards` returns up to 1000 results on one page. If there are more than
1000 dashboards, you can call `ListDashboards` again and include the value you
received for `NextToken` in the first call, to receive the next 1000 results.
"""
def list_dashboards(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListDashboards", input, options)
end
@doc """
Returns a list of metric streams in this account.
"""
def list_metric_streams(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListMetricStreams", input, options)
end
@doc """
List the specified metrics.
You can use the returned metrics with
[GetMetricData](https://docs.aws.amazon.com/AmazonCloudWatch/latest/APIReference/API_GetMetricData.html) or
[GetMetricStatistics](https://docs.aws.amazon.com/AmazonCloudWatch/latest/APIReference/API_GetMetricStatistics.html)
to obtain statistical data.
Up to 500 results are returned for any one call. To retrieve additional results,
use the returned token with subsequent calls.
After you create a metric, allow up to 15 minutes before the metric appears. You
can see statistics about the metric sooner by using
[GetMetricData](https://docs.aws.amazon.com/AmazonCloudWatch/latest/APIReference/API_GetMetricData.html) or
[GetMetricStatistics](https://docs.aws.amazon.com/AmazonCloudWatch/latest/APIReference/API_GetMetricStatistics.html).
`ListMetrics` doesn't return information about metrics if those metrics haven't
reported data in the past two weeks. To retrieve those metrics, use
[GetMetricData](https://docs.aws.amazon.com/AmazonCloudWatch/latest/APIReference/API_GetMetricData.html) or
[GetMetricStatistics](https://docs.aws.amazon.com/AmazonCloudWatch/latest/APIReference/API_GetMetricStatistics.html).
"""
def list_metrics(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListMetrics", input, options)
end
@doc """
Displays the tags associated with a CloudWatch resource.
Currently, alarms and Contributor Insights rules support tagging.
"""
def list_tags_for_resource(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListTagsForResource", input, options)
end
@doc """
Creates an anomaly detection model for a CloudWatch metric.
You can use the model to display a band of expected normal values when the
metric is graphed.
For more information, see [CloudWatch Anomaly Detection](https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/CloudWatch_Anomaly_Detection.html).
"""
def put_anomaly_detector(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "PutAnomalyDetector", input, options)
end
@doc """
Creates or updates a *composite alarm*.
When you create a composite alarm, you specify a rule expression for the alarm
that takes into account the alarm states of other alarms that you have created.
The composite alarm goes into ALARM state only if all conditions of the rule are
met.
The alarms specified in a composite alarm's rule expression can include metric
alarms and other composite alarms.
Using composite alarms can reduce alarm noise. You can create multiple metric
alarms, and also create a composite alarm and set up alerts only for the
composite alarm. For example, you could create a composite alarm that goes into
ALARM state only when more than one of the underlying metric alarms are in ALARM
state.
Currently, the only alarm actions that can be taken by composite alarms are
notifying SNS topics.
It is possible to create a loop or cycle of composite alarms, where composite
alarm A depends on composite alarm B, and composite alarm B also depends on
composite alarm A. In this scenario, you can't delete any composite alarm that
is part of the cycle because there is always still a composite alarm that
depends on that alarm that you want to delete.
To get out of such a situation, you must break the cycle by changing the rule of
one of the composite alarms in the cycle to remove a dependency that creates the
cycle. The simplest change to make to break a cycle is to change the `AlarmRule`
of one of the alarms to `False`.
Additionally, the evaluation of composite alarms stops if CloudWatch detects a
cycle in the evaluation path.
When this operation creates an alarm, the alarm state is immediately set to
`INSUFFICIENT_DATA`. The alarm is then evaluated and its state is set
appropriately. Any actions associated with the new state are then executed. For
a composite alarm, this initial time after creation is the only time that the
alarm can be in `INSUFFICIENT_DATA` state.
When you update an existing alarm, its state is left unchanged, but the update
completely overwrites the previous configuration of the alarm.
To use this operation, you must be signed on with the
`cloudwatch:PutCompositeAlarm` permission that is scoped to `*`. You can't
create a composite alarms if your `cloudwatch:PutCompositeAlarm` permission has
a narrower scope.
If you are an IAM user, you must have `iam:CreateServiceLinkedRole` to create a
composite alarm that has Systems Manager OpsItem actions.
"""
def put_composite_alarm(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "PutCompositeAlarm", input, options)
end
@doc """
Creates a dashboard if it does not already exist, or updates an existing
dashboard.
If you update a dashboard, the entire contents are replaced with what you
specify here.
All dashboards in your account are global, not region-specific.
A simple way to create a dashboard using `PutDashboard` is to copy an existing
dashboard. To copy an existing dashboard using the console, you can load the
dashboard and then use the View/edit source command in the Actions menu to
display the JSON block for that dashboard. Another way to copy a dashboard is to
use `GetDashboard`, and then use the data returned within `DashboardBody` as the
template for the new dashboard when you call `PutDashboard`.
When you create a dashboard with `PutDashboard`, a good practice is to add a
text widget at the top of the dashboard with a message that the dashboard was
created by script and should not be changed in the console. This message could
also point console users to the location of the `DashboardBody` script or the
CloudFormation template used to create the dashboard.
"""
def put_dashboard(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "PutDashboard", input, options)
end
@doc """
Creates a Contributor Insights rule.
Rules evaluate log events in a CloudWatch Logs log group, enabling you to find
contributor data for the log events in that log group. For more information, see
[Using Contributor Insights to Analyze High-Cardinality Data](https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/ContributorInsights.html).
If you create a rule, delete it, and then re-create it with the same name,
historical data from the first time the rule was created might not be available.
"""
def put_insight_rule(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "PutInsightRule", input, options)
end
@doc """
Creates or updates an alarm and associates it with the specified metric, metric
math expression, or anomaly detection model.
Alarms based on anomaly detection models cannot have Auto Scaling actions.
When this operation creates an alarm, the alarm state is immediately set to
`INSUFFICIENT_DATA`. The alarm is then evaluated and its state is set
appropriately. Any actions associated with the new state are then executed.
When you update an existing alarm, its state is left unchanged, but the update
completely overwrites the previous configuration of the alarm.
If you are an IAM user, you must have Amazon EC2 permissions for some alarm
operations:
* The `iam:CreateServiceLinkedRole` for all alarms with EC2 actions
* The `iam:CreateServiceLinkedRole` to create an alarm with Systems
Manager OpsItem actions.
The first time you create an alarm in the Amazon Web Services Management
Console, the CLI, or by using the PutMetricAlarm API, CloudWatch creates the
necessary service-linked role for you. The service-linked roles are called
`AWSServiceRoleForCloudWatchEvents` and
`AWSServiceRoleForCloudWatchAlarms_ActionSSM`. For more information, see [Amazon Web Services service-linked
role](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_terms-and-concepts.html#iam-term-service-linked-role).
## Cross-account alarms
You can set an alarm on metrics in the current account, or in another account.
To create a cross-account alarm that watches a metric in a different account,
you must have completed the following pre-requisites:
* The account where the metrics are located (the *sharing account*)
must already have a sharing role named **CloudWatch-CrossAccountSharingRole**.
If it does not already have this role, you must create it using the instructions
in **Set up a sharing account** in [ Cross-account cross-Region CloudWatch console](https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/Cross-Account-Cross-Region.html#enable-cross-account-cross-Region).
The policy for that role must grant access to the ID of the account where you
are creating the alarm.
* The account where you are creating the alarm (the *monitoring
account*) must already have a service-linked role named
**AWSServiceRoleForCloudWatchCrossAccount** to allow CloudWatch to assume the
sharing role in the sharing account. If it does not, you must create it
following the directions in **Set up a monitoring account** in [ Cross-account cross-Region CloudWatch
console](https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/Cross-Account-Cross-Region.html#enable-cross-account-cross-Region).
"""
def put_metric_alarm(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "PutMetricAlarm", input, options)
end
@doc """
Publishes metric data points to Amazon CloudWatch.
CloudWatch associates the data points with the specified metric. If the
specified metric does not exist, CloudWatch creates the metric. When CloudWatch
creates a metric, it can take up to fifteen minutes for the metric to appear in
calls to
[ListMetrics](https://docs.aws.amazon.com/AmazonCloudWatch/latest/APIReference/API_ListMetrics.html). You can publish either individual data points in the `Value` field, or arrays of
values and the number of times each value occurred during the period by using
the `Values` and `Counts` fields in the `MetricDatum` structure. Using the
`Values` and `Counts` method enables you to publish up to 150 values per metric
with one `PutMetricData` request, and supports retrieving percentile statistics
on this data.
Each `PutMetricData` request is limited to 40 KB in size for HTTP POST requests.
You can send a payload compressed by gzip. Each request is also limited to no
more than 20 different metrics.
Although the `Value` parameter accepts numbers of type `Double`, CloudWatch
rejects values that are either too small or too large. Values must be in the
range of -2^360 to 2^360. In addition, special values (for example, NaN,
+Infinity, -Infinity) are not supported.
You can use up to 10 dimensions per metric to further clarify what data the
metric collects. Each dimension consists of a Name and Value pair. For more
information about specifying dimensions, see [Publishing
Metrics](https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/publishingMetrics.html)
in the *Amazon CloudWatch User Guide*.
You specify the time stamp to be associated with each data point. You can
specify time stamps that are as much as two weeks before the current date, and
as much as 2 hours after the current day and time.
Data points with time stamps from 24 hours ago or longer can take at least 48
hours to become available for
[GetMetricData](https://docs.aws.amazon.com/AmazonCloudWatch/latest/APIReference/API_GetMetricData.html) or
[GetMetricStatistics](https://docs.aws.amazon.com/AmazonCloudWatch/latest/APIReference/API_GetMetricStatistics.html)
from the time they are submitted. Data points with time stamps between 3 and 24
hours ago can take as much as 2 hours to become available for for
[GetMetricData](https://docs.aws.amazon.com/AmazonCloudWatch/latest/APIReference/API_GetMetricData.html) or
[GetMetricStatistics](https://docs.aws.amazon.com/AmazonCloudWatch/latest/APIReference/API_GetMetricStatistics.html).
CloudWatch needs raw data points to calculate percentile statistics. If you
publish data using a statistic set instead, you can only retrieve percentile
statistics for this data if one of the following conditions is true:
* The `SampleCount` value of the statistic set is 1 and `Min`,
`Max`, and `Sum` are all equal.
* The `Min` and `Max` are equal, and `Sum` is equal to `Min`
multiplied by `SampleCount`.
"""
def put_metric_data(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "PutMetricData", input, options)
end
@doc """
Creates or updates a metric stream.
Metric streams can automatically stream CloudWatch metrics to Amazon Web
Services destinations including Amazon S3 and to many third-party solutions.
For more information, see [ Using Metric Streams](https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/CloudWatch-Metric-Streams.html).
To create a metric stream, you must be logged on to an account that has the
`iam:PassRole` permission and either the `CloudWatchFullAccess` policy or the
`cloudwatch:PutMetricStream` permission.
When you create or update a metric stream, you choose one of the following:
* Stream metrics from all metric namespaces in the account.
* Stream metrics from all metric namespaces in the account, except
for the namespaces that you list in `ExcludeFilters`.
* Stream metrics from only the metric namespaces that you list in
`IncludeFilters`.
When you use `PutMetricStream` to create a new metric stream, the stream is
created in the `running` state. If you use it to update an existing stream, the
state of the stream is not changed.
"""
def put_metric_stream(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "PutMetricStream", input, options)
end
@doc """
Temporarily sets the state of an alarm for testing purposes.
When the updated state differs from the previous value, the action configured
for the appropriate state is invoked. For example, if your alarm is configured
to send an Amazon SNS message when an alarm is triggered, temporarily changing
the alarm state to `ALARM` sends an SNS message.
Metric alarms returns to their actual state quickly, often within seconds.
Because the metric alarm state change happens quickly, it is typically only
visible in the alarm's **History** tab in the Amazon CloudWatch console or
through
[DescribeAlarmHistory](https://docs.aws.amazon.com/AmazonCloudWatch/latest/APIReference/API_DescribeAlarmHistory.html).
If you use `SetAlarmState` on a composite alarm, the composite alarm is not
guaranteed to return to its actual state. It returns to its actual state only
once any of its children alarms change state. It is also reevaluated if you
update its configuration.
If an alarm triggers EC2 Auto Scaling policies or application Auto Scaling
policies, you must include information in the `StateReasonData` parameter to
enable the policy to take the correct action.
"""
def set_alarm_state(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "SetAlarmState", input, options)
end
@doc """
Starts the streaming of metrics for one or more of your metric streams.
"""
def start_metric_streams(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "StartMetricStreams", input, options)
end
@doc """
Stops the streaming of metrics for one or more of your metric streams.
"""
def stop_metric_streams(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "StopMetricStreams", input, options)
end
@doc """
Assigns one or more tags (key-value pairs) to the specified CloudWatch resource.
Currently, the only CloudWatch resources that can be tagged are alarms and
Contributor Insights rules.
Tags can help you organize and categorize your resources. You can also use them
to scope user permissions by granting a user permission to access or change only
resources with certain tag values.
Tags don't have any semantic meaning to Amazon Web Services and are interpreted
strictly as strings of characters.
You can use the `TagResource` action with an alarm that already has tags. If you
specify a new tag key for the alarm, this tag is appended to the list of tags
associated with the alarm. If you specify a tag key that is already associated
with the alarm, the new tag value that you specify replaces the previous value
for that tag.
You can associate as many as 50 tags with a CloudWatch resource.
"""
def tag_resource(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "TagResource", input, options)
end
@doc """
Removes one or more tags from the specified resource.
"""
def untag_resource(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UntagResource", input, options)
end
end
|
lib/aws/generated/cloud_watch.ex
| 0.889475
| 0.576184
|
cloud_watch.ex
|
starcoder
|
defmodule HelloOperator.Controller.V1.Greeting do
@moduledoc """
HelloOperator: Greeting CRD.
## Kubernetes CRD Spec
By default all CRD specs are assumed from the module name, you can override them using attributes.
### Examples
```
# Kubernetes API version of this CRD, defaults to value in module name
@version "v2alpha1"
# Kubernetes API group of this CRD, defaults to "hello-operator.example.com"
@group "kewl.example.io"
The scope of the CRD. Defaults to `:namespaced`
@scope :cluster
CRD names used by kubectl and the kubernetes API
@names %{
plural: "foos",
singular: "foo",
kind: "Foo"
}
```
## Declare RBAC permissions used by this module
RBAC rules can be declared using `@rule` attribute and generated using `mix bonny.manifest`
This `@rule` attribute is cumulative, and can be declared once for each Kubernetes API Group.
### Examples
```
@rule {apiGroup, resources_list, verbs_list}
@rule {"", ["pods", "secrets"], ["*"]}
@rule {"apiextensions.k8s.io", ["foo"], ["*"]}
```
"""
require Logger
use Bonny.Controller
@rule {"apps", ["deployments"], ["*"]}
@rule {"", ["services"], ["*"]}
# @group "your-operator.your-domain.com"
# @version "v1"
@scope :namespaced
@names %{
plural: "greetings",
singular: "greeting",
kind: "Greeting"
}
@doc """
Called periodically for each existing CustomResource to allow for reconciliation.
"""
@spec reconcile(map()) :: :ok | :error
@impl Bonny.Controller
def reconcile(payload) do
track_event(:reconcile, payload)
:ok
end
@doc """
Creates a kubernetes `deployment` and `service` that runs a "Hello, World" app.
"""
@spec add(map()) :: :ok | :error
@impl Bonny.Controller
def add(payload) do
track_event(:add, payload)
resources = parse(payload)
with {:ok, _} <- K8s.Client.create(resources.deployment) |> run,
{:ok, _} <- K8s.Client.create(resources.service) |> run do
:ok
else
{:error, error} -> {:error, error}
end
end
@doc """
Updates `deployment` and `service` resources.
"""
@spec modify(map()) :: :ok | :error
@impl Bonny.Controller
def modify(payload) do
resources = parse(payload)
with {:ok, _} <- K8s.Client.patch(resources.deployment) |> run,
{:ok, _} <- K8s.Client.patch(resources.service) |> run do
:ok
else
{:error, error} -> {:error, error}
end
end
@doc """
Deletes `deployment` and `service` resources.
"""
@spec delete(map()) :: :ok | :error
@impl Bonny.Controller
def delete(payload) do
track_event(:delete, payload)
resources = parse(payload)
with {:ok, _} <- K8s.Client.delete(resources.deployment) |> run,
{:ok, _} <- K8s.Client.delete(resources.service) |> run do
:ok
else
{:error, error} -> {:error, error}
end
end
defp parse(%{
"metadata" => %{"name" => name, "namespace" => ns},
"spec" => %{"greeting" => greeting}
}) do
deployment = gen_deployment(ns, name, greeting)
service = gen_service(ns, name, greeting)
%{
deployment: deployment,
service: service
}
end
defp gen_service(ns, name, _greeting) do
%{
"apiVersion" => "v1",
"kind" => "Service",
"metadata" => %{
"name" => name,
"namespace" => ns,
"labels" => %{"app" => name}
},
"spec" => %{
"ports" => [%{"port" => 5000, "protocol" => "TCP"}],
"selector" => %{"app" => name},
"type" => "NodePort"
}
}
end
defp gen_deployment(ns, name, greeting) do
%{
"apiVersion" => "apps/v1",
"kind" => "Deployment",
"metadata" => %{
"name" => name,
"namespace" => ns,
"labels" => %{"app" => name}
},
"spec" => %{
"replicas" => 2,
"selector" => %{
"matchLabels" => %{"app" => name}
},
"template" => %{
"metadata" => %{
"labels" => %{"app" => name}
},
"spec" => %{
"containers" => [
%{
"name" => name,
"image" => "quay.io/coryodaniel/greeting-server",
"env" => [%{"name" => "GREETING", "value" => greeting}],
"ports" => [%{"containerPort" => 5000}]
}
]
}
}
}
}
end
defp run(%K8s.Operation{} = op),
do: K8s.Client.run(op, Bonny.Config.cluster_name())
defp track_event(type, resource),
do: Logger.info("#{type}: #{inspect(resource)}")
end
|
lib/hello_operator/controllers/v1/greeting.ex
| 0.853913
| 0.792745
|
greeting.ex
|
starcoder
|
defmodule Pow.Store.Backend.MnesiaCache do
@moduledoc """
GenServer based key value Mnesia cache store with auto expiration.
When the MnesiaCache starts, it'll initialize invalidators for all stored
keys using the `expire` value. If the `expire` datetime is past, it'll
send call the invalidator immediately.
## Initialization options
* `:nodes` - list of nodes to use. This value defaults to `[node()]`.
* `:table_opts` - options to add to table definition. This value defaults
to `[disc_copies: nodes]`.
* `:timeout` - timeout value in milliseconds for how long to wait until the
cache table has initiated. Defaults to 15 seconds.
## Configuration options
* `:ttl` - integer value in milliseconds for ttl of records (required).
* `:namespace` - string value to use for namespacing keys, defaults to
"cache".
"""
use GenServer
alias Pow.{Config, Store.Base}
@behaviour Base
@mnesia_cache_tab __MODULE__
@spec start_link(Config.t()) :: GenServer.on_start()
def start_link(config) do
GenServer.start_link(__MODULE__, config, name: __MODULE__)
end
@impl Base
@spec put(Config.t(), binary(), any()) :: :ok
def put(config, key, value) do
GenServer.cast(__MODULE__, {:cache, config, key, value, ttl(config)})
end
@impl Base
@spec delete(Config.t(), binary()) :: :ok
def delete(config, key) do
GenServer.cast(__MODULE__, {:delete, config, key})
end
@impl Base
@spec get(Config.t(), binary()) :: any() | :not_found
def get(config, key) do
table_get(config, key)
end
@impl Base
@spec keys(Config.t()) :: [any()]
def keys(config) do
table_keys(config)
end
# Callbacks
@impl GenServer
@spec init(Config.t()) :: {:ok, map()}
def init(config) do
table_init(config)
{:ok, %{invalidators: init_invalidators(config)}}
end
@impl GenServer
@spec handle_cast({:cache, Config.t(), binary(), any(), integer()}, map()) :: {:noreply, map()}
def handle_cast({:cache, config, key, value, ttl}, %{invalidators: invalidators} = state) do
invalidators = update_invalidators(config, invalidators, key, ttl)
table_update(config, key, value, ttl)
{:noreply, %{state | invalidators: invalidators}}
end
@spec handle_cast({:delete, Config.t(), binary()}, map()) :: {:noreply, map()}
def handle_cast({:delete, config, key}, %{invalidators: invalidators} = state) do
invalidators = clear_invalidator(invalidators, key)
table_delete(config, key)
{:noreply, %{state | invalidators: invalidators}}
end
@impl GenServer
@spec handle_info({:invalidate, Config.t(), binary()}, map()) :: {:noreply, map()}
def handle_info({:invalidate, config, key}, %{invalidators: invalidators} = state) do
invalidators = clear_invalidator(invalidators, key)
table_delete(config, key)
{:noreply, %{state | invalidators: invalidators}}
end
defp update_invalidators(config, invalidators, key, ttl) do
invalidators = clear_invalidator(invalidators, key)
invalidator = trigger_ttl(config, key, ttl)
Map.put(invalidators, key, invalidator)
end
defp clear_invalidator(invalidators, key) do
case Map.get(invalidators, key) do
nil -> nil
invalidator -> Process.cancel_timer(invalidator)
end
Map.drop(invalidators, [key])
end
defp table_get(config, key) do
mnesia_key = mnesia_key(config, key)
{@mnesia_cache_tab, mnesia_key}
|> :mnesia.dirty_read()
|> case do
[{@mnesia_cache_tab, ^mnesia_key, {_key, value, _config, _expire}} | _rest] ->
value
[] ->
:not_found
end
end
defp table_update(config, key, value, ttl) do
mnesia_key = mnesia_key(config, key)
expire = timestamp() + ttl
value = {key, value, config, expire}
:mnesia.transaction(fn ->
:mnesia.write({@mnesia_cache_tab, mnesia_key, value})
end)
end
defp table_delete(config, key) do
mnesia_key = mnesia_key(config, key)
:mnesia.transaction(fn ->
:mnesia.delete({@mnesia_cache_tab, mnesia_key})
end)
end
defp table_keys(config, opts \\ []) do
namespace = mnesia_key(config, "")
@mnesia_cache_tab
|> :mnesia.dirty_all_keys()
|> Enum.filter(&String.starts_with?(&1, namespace))
|> maybe_remove_namespace(namespace, opts)
end
defp maybe_remove_namespace(keys, namespace, opts) do
case Keyword.get(opts, :remove_namespace, true) do
true ->
start = String.length(namespace)
Enum.map(keys, &String.slice(&1, start..-1))
_ ->
keys
end
end
defp table_init(config) do
nodes = Config.get(config, :nodes, [node()])
table_opts = Config.get(config, :table_opts, disc_copies: nodes)
table_def = Keyword.merge(table_opts, [type: :set])
timeout = Config.get(config, :timeout, :timer.seconds(15))
case :mnesia.create_schema(nodes) do
:ok -> :ok
{:error, {_, {:already_exists, _}}} -> :ok
end
:rpc.multicall(nodes, :mnesia, :start, [])
case :mnesia.create_table(@mnesia_cache_tab, table_def) do
{:atomic, :ok} -> :ok
{:aborted, {:already_exists, @mnesia_cache_tab}} -> :ok
end
:ok = :mnesia.wait_for_tables([@mnesia_cache_tab], timeout)
end
defp mnesia_key(config, key) do
namespace = Config.get(config, :namespace, "cache")
"#{namespace}:#{key}"
end
defp init_invalidators(config) do
config
|> table_keys(remove_namespace: false)
|> Enum.map(&init_invalidator(config, &1))
|> Enum.reject(&is_nil/1)
|> Enum.into(%{})
end
defp init_invalidator(_config, key) do
{@mnesia_cache_tab, key}
|> :mnesia.dirty_read()
|> case do
[{@mnesia_cache_tab, ^key, {_key_id, _value, _config, nil}} | _rest] ->
nil
[{@mnesia_cache_tab, ^key, {key_id, _value, config, expire}} | _rest] ->
ttl = Enum.max([expire - timestamp(), 0])
{key, trigger_ttl(config, key_id, ttl)}
[] -> nil
end
end
defp trigger_ttl(config, key, ttl) do
Process.send_after(self(), {:invalidate, config, key}, ttl)
end
defp timestamp, do: :os.system_time(:millisecond)
defp ttl(config) do
Config.get(config, :ttl) || raise_ttl_error()
end
@spec raise_ttl_error :: no_return
defp raise_ttl_error,
do: Config.raise_error("`:ttl` configuration option is required for #{inspect(__MODULE__)}")
end
|
lib/pow/store/backend/mnesia_cache.ex
| 0.8796
| 0.434941
|
mnesia_cache.ex
|
starcoder
|
defmodule Timex.Ecto.DateTimeWithTimezone do
@moduledoc """
This is a special type for storing datetime + timezone information as a composite type.
To use this, you must first make sure you have the `datetimetz` type defined in your database:
```sql
CREATE TYPE datetimetz AS (
dt timestamptz,
tz varchar
);
```
Then you can use that type when creating your table, i.e.:
```sql
CREATE TABLE example (
id integer,
created_at datetimetz
);
```
That's it!
"""
use Timex
@behaviour Ecto.Type
def type, do: :datetimetz
@doc """
We can let Ecto handle blank input
"""
defdelegate blank?(value), to: Ecto.Type
@doc """
Handle casting to Timex.Ecto.DateTimeWithTimezone
"""
def cast(%DateTime{timezone: nil} = datetime), do: {:ok, %{datetime | :timezone => %TimezoneInfo{}}}
def cast(%DateTime{} = datetime), do: {:ok, datetime}
# Support embeds_one/embeds_many
def cast(%{"calendar" => _,
"year" => y, "month" => m, "day" => d,
"hour" => h, "minute" => mm, "second" => s, "ms" => ms,
"timezone" => %{"full_name" => tz_abbr}}) do
datetime = Timex.datetime({{y,m,d},{h,mm,s}}, tz_abbr)
{:ok, %{datetime | :millisecond => ms}}
end
def cast(%{"calendar" => _,
"year" => y, "month" => m, "day" => d,
"hour" => h, "minute" => mm, "second" => s, "millisecond" => ms,
"timezone" => %{"full_name" => tz_abbr}}) do
datetime = Timex.datetime({{y,m,d},{h,mm,s}}, tz_abbr)
{:ok, %{datetime | :millisecond => ms}}
end
def cast(input) do
case Ecto.DateTimeWithTimezone.cast(input) do
{:ok, datetime} ->
load({{{datetime.year, datetime.month, datetime.day},
{datetime.hour, datetime.min, datetime.sec, datetime.usec}
},
datetime.timezone
})
:error -> :error
end
end
@doc """
Load from the native Ecto representation
"""
def load({ {{year, month, day}, {hour, min, sec, usec}}, timezone}) do
datetime = Timex.datetime({{year, month, day}, {hour, min, sec}})
datetime = %{datetime | :millisecond => Time.from(usec, :microseconds) |> Time.to_milliseconds}
tz = Timezone.get(timezone, datetime)
{:ok, %{datetime | :timezone => tz}}
end
def load(_), do: :error
@doc """
Convert to the native Ecto representation
"""
def dump(%DateTime{timezone: nil} = datetime) do
{date, {hour, min, second}} = Timex.to_erlang_datetime(datetime)
micros = datetime.millisecond * 1_000
{:ok, {{date, {hour, min, second, micros}}, "UTC"}}
end
def dump(%DateTime{timezone: %TimezoneInfo{full_name: name}} = datetime) do
{date, {hour, min, second}} = Timex.to_erlang_datetime(datetime)
micros = datetime.millisecond * 1_000
{:ok, {{date, {hour, min, second, micros}}, name}}
end
def dump(_), do: :error
end
|
lib/types/datetimetz.ex
| 0.844024
| 0.785144
|
datetimetz.ex
|
starcoder
|
defmodule AMQP.Connection do
@moduledoc """
Functions to operate on Connections.
"""
import AMQP.Core
alias AMQP.Connection
defstruct [:pid]
@type t :: %Connection{pid: pid}
@doc """
Opens an new Connection to an AMQP broker.
The connections created by this module are supervised under amqp_client's supervision tree.
Please note that connections do not get restarted automatically by the supervision tree in
case of a failure. If you need robust connections and channels, use monitors on the returned
connection PID.
The connection parameters can be passed as a keyword list or as a AMQP URI.
When using a keyword list, the following options can be used:
# Options
* `:username` - The name of a user registered with the broker (defaults to \"guest\");
* `:password` - The password of user (defaults to \"<PASSWORD>\");
* `:virtual_host` - The name of a virtual host in the broker (defaults to \"/\");
* `:host` - The hostname of the broker (defaults to \"localhost\");
* `:port` - The port the broker is listening on (defaults to `5672`);
* `:channel_max` - The channel_max handshake parameter (defaults to `0`);
* `:frame_max` - The frame_max handshake parameter (defaults to `0`);
* `:heartbeat` - The hearbeat interval in seconds (defaults to `0` - turned off);
* `:connection_timeout` - The connection timeout in milliseconds (defaults to `infinity`);
* `:ssl_options` - Enable SSL by setting the location to cert files (defaults to `none`);
* `:client_properties` - A list of extra client properties to be sent to the server, defaults to `[]`;
* `:socket_options` - Extra socket options. These are appended to the default options. \
See http://www.erlang.org/doc/man/inet.html#setopts-2 and http://www.erlang.org/doc/man/gen_tcp.html#connect-4 \
for descriptions of the available options.
## Enabling SSL
To enable SSL, supply the following in the `ssl_options` field:
* `cacertfile` - Specifies the certificates of the root Certificate Authorities that we wish to implicitly trust;
* `certfile` - The client's own certificate in PEM format;
* `keyfile` - The client's private key in PEM format;
### Example
```
AMQP.Connection.open port: 5671,
ssl_options: [cacertfile: '/path/to/testca/cacert.pem',
certfile: '/path/to/client/cert.pem',
keyfile: '/path/to/client/key.pem',
# only necessary with intermediate CAs
# depth: 2,
verify: :verify_peer,
fail_if_no_peer_cert: true]
```
## Examples
iex> AMQP.Connection.open host: \"localhost\", port: 5672, virtual_host: \"/\", username: \"guest\", password: \"<PASSWORD>\"
{:ok, %AMQP.Connection{}}
iex> AMQP.Connection.open \"amqp://guest:guest@localhost\"
{:ok, %AMQP.Connection{}}
"""
@spec open(keyword|String.t) :: {:ok, t} | {:error, atom} | {:error, any}
def open(options \\ [])
def open(options) when is_list(options) do
options = options
|> normalize_ssl_options
amqp_params =
amqp_params_network(username: Keyword.get(options, :username, "guest"),
password: Keyword.get(options, :password, "<PASSWORD>"),
virtual_host: Keyword.get(options, :virtual_host, "/"),
host: Keyword.get(options, :host, 'localhost') |> to_charlist,
port: Keyword.get(options, :port, :undefined),
channel_max: Keyword.get(options, :channel_max, 0),
frame_max: Keyword.get(options, :frame_max, 0),
heartbeat: Keyword.get(options, :heartbeat, 0),
connection_timeout: Keyword.get(options, :connection_timeout, :infinity),
ssl_options: Keyword.get(options, :ssl_options, :none),
client_properties: Keyword.get(options, :client_properties, []),
socket_options: Keyword.get(options, :socket_options, []),
auth_mechanisms: Keyword.get(options, :auth_mechanisms, [&:amqp_auth_mechanisms.plain/3, &:amqp_auth_mechanisms.amqplain/3]))
do_open(amqp_params)
end
def open(uri) when is_binary(uri) do
case uri |> to_charlist |> :amqp_uri.parse do
{:ok, amqp_params} -> do_open(amqp_params)
error -> error
end
end
@doc """
Opens an new direct Connection to an AMQP broker.
Direct connection is the special type of connection that is
supported by RabbitMQ broker, where Erlang distribution protocol is
used to communicate with broker. It's a bit faster than the regular
AMQP protocol, as there is no need to serialize and deserialize AMQP
frames (especially when we are using this library at the same BEAM
node where the RabbitMQ runs). But it's less secure, as giving
direct access to a client means it has full control over RabbitMQ
node.
The connections created by this function are not restaretd
automatically, see open/1 for more details.
The connection parameters are passed as a keyword list with the
following options available:
# Options
* `:username` - The name of a user registered with the broker (defaults to `:none`);
* `:password` - The password of the user (defaults to `:none`);
* `:virtual_host` - The name of a virtual host in the broker (defaults to \"/\");
* `:node` - Erlang node name to connect to (defaults to the current node);
* `:client_properties` - A list of extra client properties to be sent to the server, defaults to `[]`;
# Adapter options
Additional details can be provided when a direct connection is used
to provide connectivity for some non-AMQP protocol (like it happens
in STOMP and MQTT plugins for RabbitMQ). We assume that you know
what you are doing in this case, here is the options that maps to
corresponding fields of `#amqp_adapter_info{}` record:
`:adapter_host`, `:adapter_port`, `:adapter_peer_host`,
`:adapter_peer_port`, `:adapter_name`, `:adapter_protocol`,
`:adapter_additional_info`.
## Examples
AMQP.Connection.open_direct node: :rabbit@localhost
{:ok, %AMQP.Connection{}}
"""
@spec open_direct(keyword) :: {:ok, t} | {:error, atom}
def open_direct(options \\ [])
def open_direct(options) when is_list(options) do
adapter_info = amqp_adapter_info(
host: Keyword.get(options, :adapter_host, :unknown),
port: Keyword.get(options, :adapter_port, :unknown),
peer_host: Keyword.get(options, :adapter_peer_host, :unknown),
peer_port: Keyword.get(options, :adapter_peer_port, :unknown),
name: Keyword.get(options, :adapter_name, :unknown),
protocol: Keyword.get(options, :adapter_protocol, :unknown),
additional_info: Keyword.get(options, :adapter_additional_info, []))
amqp_params = amqp_params_direct(
username: Keyword.get(options, :username, :none),
password: Keyword.get(options, :password, :none),
virtual_host: Keyword.get(options, :virtual_host, "/"),
node: Keyword.get(options, :node, node()),
adapter_info: adapter_info,
client_properties: Keyword.get(options, :client_properties, []))
do_open(amqp_params)
end
@doc """
Closes an open Connection.
"""
@spec close(t) :: :ok | {:error, any}
def close(conn) do
case :amqp_connection.close(conn.pid) do
:ok -> :ok
error -> {:error, error}
end
end
defp do_open(amqp_params) do
case :amqp_connection.start(amqp_params) do
{:ok, pid} -> {:ok, %Connection{pid: pid}}
error -> error
end
end
defp normalize_ssl_options(options) when is_list(options) do
for {k, v} <- options do
if k in [:cacertfile, :cacertfile, :cacertfile] do
{k, to_charlist(v)}
else
{k, v}
end
end
end
defp normalize_ssl_options(options), do: options
end
|
lib/amqp/connection.ex
| 0.88677
| 0.815122
|
connection.ex
|
starcoder
|
defmodule Codepagex do
# unfortunately exdoc doesnt support ``` fenced blocks
@moduledoc File.read!("README.md")
|> String.split("\n")
|> Enum.reject(&String.match?(&1, ~r/#.Codepagex/))
|> Enum.reject(&String.match?(&1, ~r/```|Build Status|Documentation Status/))
|> Enum.join("\n")
require Codepagex.Mappings
alias Codepagex.Mappings
@type to_s_missing_inner ::
(binary, term -> {:ok, String.t(), binary, term} | {:error, term})
@type to_s_missing_outer ::
(String.t() -> {:ok, to_s_missing_inner} | {:error, term})
@type from_s_missing_inner ::
(String.t(), term -> {:ok, binary, String.t(), term} | {:error, term})
@type from_s_missing_outer ::
(String.t() -> {:ok, from_s_missing_inner} | {:error, term})
@type encoding :: atom | String.t()
@on_load :load_atoms
def load_atoms do
Code.ensure_loaded?(Codepagex.Mappings)
:ok
end
format_helper = fn {a, m} ->
" | #{inspect(a) |> String.pad_trailing(15)} | #{m} |"
end
@aliases_markdown Mappings.aliases(:all)
|> Enum.map(format_helper)
|> Enum.join("\n")
@doc """
Returns a list of shorthand aliases that may be used instead of the full name
of the encoding.
The available aliases are:
| Alias | Full name |
|------:|:----------|
#{@aliases_markdown}
Some of these may not be available depending on mix configuration. If the
`selection` parameter is `:all` then all possible aliases are listed,
otherwise, only the available aliases are listed
For a full list of encodings, see `encoding_list/1`
"""
@spec aliases(atom) :: list(atom)
def aliases(selection \\ nil)
defdelegate aliases(selection), to: Mappings
# format as table with 3 columns
@encodings_markdown Mappings.encoding_list(:all)
|> Enum.map(&String.pad_trailing(&1, 30))
|> Enum.chunk_every(3, 3, ["", ""])
|> Enum.map(&Enum.join(&1, " | "))
|> Enum.map(&"| #{&1} |")
|> Enum.join("\n")
@encodings_atom Mappings.encoding_list(:all)
|> Enum.map(&String.to_atom/1)
@doc """
Returns a list of the supported encodings. These are extracted from
http://unicode.org/ and the names correspond to a encoding file on that page
`encoding_list/1` is normally called without any parameters to list the
encodings that are currently configured during compilation. To see all
available options, even those unavailable, use `encoding_list(:all)`
The available encodings are:
#{@encodings_markdown}
For more information about configuring encodings, refer to `Codepagex`.
For a list of shorthand names, see `aliases/1`
"""
@spec encoding_list(atom) :: list(String.t())
def encoding_list(selection \\ nil)
defdelegate encoding_list(selection), to: Mappings
@doc false
def encoding_atoms, do: @encodings_atom
# This is the default missing_fun
defp error_on_missing do
fn _ ->
{:ok,
fn _, _ ->
{:error, "Invalid bytes for encoding", nil}
end}
end
end
@doc """
This function may be used as a parameter to `to_string/4` or `to_string!/4`
such that any bytes in the input binary that don't have a proper encoding are
replaced with a special unicode character and the function will not
fail.
If this function is used, `to_string/4` will never return an error.
The accumulator input `acc` of `to_string/4` is incremented by the number of
replacements made.
## Examples
iex> iso = "Hello æøå!" |> from_string!(:iso_8859_1)
iex> to_string!(iso, :ascii, use_utf_replacement())
"Hello ���!"
iex> iso = "Hello æøå!" |> from_string!(:iso_8859_1)
iex> to_string(iso, :ascii, use_utf_replacement())
{:ok, "Hello ���!", 3}
"""
@spec use_utf_replacement :: to_s_missing_outer
def use_utf_replacement do
fn _encoding ->
{:ok, &use_utf_replacement_inner/2}
end
end
defp use_utf_replacement_inner(<<_, rest::binary>>, acc) do
# � replacement character used to replace an unknown or
# unrepresentable character
new_acc = if is_integer(acc), do: acc + 1, else: 1
{:ok, <<0xFFFD::utf8>>, rest, new_acc}
end
@doc """
This function may be used in conjunction with to `from_string/4` or
`from_string!/4`. If there are utf-8 codepoints in the source string that are
not possible to represent in the target encoding, they are replaced with a
String.
When using this function, `from_string/4` will never return an error if
`replace_with` converts to the target encoding without errors.
The accumulator input `acc` of `from_string/4` is incremented on each
replacement done.
## Examples
iex> from_string!("Hello æøå!", :ascii, replace_nonexistent("_"))
"Hello ___!"
iex> from_string("Hello æøå!", :ascii, replace_nonexistent("_"), 100)
{:ok, "Hello ___!", 103}
"""
@spec replace_nonexistent(String.t()) :: from_s_missing_outer
def replace_nonexistent(replace_with) do
fn encoding ->
case from_string(replace_with, encoding) do
{:ok, encoded_replace_with} ->
inner = fn <<_::utf8, rest::binary>>, acc ->
new_acc = if is_integer(acc), do: acc + 1, else: 1
{:ok, encoded_replace_with, rest, new_acc}
end
{:ok, inner}
err ->
err
end
end
end
@spec strip_acc({atom, term, integer}) :: {atom, term}
defp strip_acc({code, return_value, _acc}), do: {code, return_value}
@doc """
Converts a binary in a specified encoding to an Elixir string in utf-8
encoding.
The encoding parameter should be in `encoding_list/0` (passed as atoms or
strings), or in `aliases/0`.
## Examples
iex> to_string(<<72, 201, 166, 166, 211>>, :iso_8859_1)
{:ok, "Hɦ¦Ó"}
iex> to_string(<<128>>, "ETSI/GSM0338")
{:error, "Invalid bytes for encoding"}
"""
@spec to_string(binary, encoding) :: {:ok, String.t()} | {:error, term}
def to_string(binary, encoding) do
to_string(binary, encoding, error_on_missing())
|> strip_acc
end
@doc """
Convert a binary in a specified encoding into an Elixir string in utf-8
encoding
Compared to `to_string/2`, you may pass a `missing_fun` function parameter to
handle encoding errors in the `binary`. The function `use_utf_replacement/0`
may be used as a default error handling machanism.
## Implementing missing_fun
The `missing_fun` must be an anonymous function that returns a second
function. The outer function will receive the encoding used by `to_string/4`,
and must then return `{:ok, inner_function}` or `{:error, reason}`. Returning
`:error` will cause `to_string/4` to fail.
The returned inner function must receive two arguments.
- a binary containing the remainder of the `binary` parameter that is still
unprocessed.
- the accumulator `acc`
The return value must be
- `{:ok, replacement, new_rest, new_acc}` to continue processing
- `{:error, reason, new_acc}` to cause `to_string/4` to fail
The `acc` parameter from `to_string/4` is passed between every invocation of
the inner function then returned by `to_string/4`. In many use cases, `acc`
may be ignored.
## Examples
Using the `use_utf_replacement/0` function to handle invalid bytes:
iex> iso = "Hello æøå!" |> from_string!(:iso_8859_1)
iex> to_string(iso, :ascii, use_utf_replacement())
{:ok, "Hello ���!", 3}
iex> iso = "Hello æøå!" |> from_string!(:iso_8859_1)
iex> missing_fun =
...> fn encoding ->
...> case to_string("#", encoding) do
...> {:ok, replacement} ->
...> inner_fun =
...> fn <<_, rest :: binary>>, acc ->
...> {:ok, replacement, rest, acc + 1}
...> end
...> {:ok, inner_fun}
...> err ->
...> err
...> end
...> end
iex> to_string(iso, :ascii, missing_fun, 0)
{:ok, "Hello ###!", 3}
The previous code was included for completeness. If you know your replacement
is valid in the target encoding, you might as well do:
iex> iso = "Hello æøå!" |> from_string!(:iso_8859_1)
iex> missing_fun =
...> fn _encoding ->
...> inner_fun =
...> fn <<_, rest :: binary>>, acc ->
...> {:ok, "#", rest, acc + 1}
...> end
...> {:ok, inner_fun}
...> end
iex> to_string(iso, :ascii, missing_fun, 10)
{:ok, "Hello ###!", 13}
"""
@spec to_string(binary, encoding, to_s_missing_outer, term) ::
{:ok, String.t(), integer} | {:error, term, integer}
def to_string(binary, encoding, missing_fun, acc \\ nil)
# create a forwarding to_string implementation for each alias
for {aliaz, encoding} <- Mappings.aliases() do
def to_string(binary, unquote(aliaz), missing_fun, acc) do
to_string(binary, unquote(encoding |> String.to_atom()), missing_fun, acc)
end
end
def to_string(binary, encoding, missing_fun, acc) when is_atom(encoding) do
case missing_fun.(encoding) do
{:ok, inner_fun} ->
Mappings.to_string(binary, encoding, inner_fun, acc)
err ->
err
end
end
def to_string(binary, encoding, missing_fun, acc) when is_binary(encoding) do
try do
to_string(binary, String.to_existing_atom(encoding), missing_fun, acc)
rescue
ArgumentError ->
{:error, "Unknown encoding #{inspect(encoding)}", acc}
end
end
@doc """
Like `to_string/2` but raises exceptions on errors.
## Examples
iex> to_string!(<<72, 201, 166, 166, 211>>, :iso_8859_1)
"Hɦ¦Ó"
iex> to_string!(<<128>>, "ETSI/GSM0338")
** (Codepagex.Error) Invalid bytes for encoding
"""
@spec to_string!(binary, encoding) :: String.t() | no_return
def to_string!(binary, encoding) do
to_string!(binary, encoding, error_on_missing(), nil)
end
@doc """
Like `to_string/4` but raises exceptions on errors.
## Examples
iex> iso = "Hello æøå!" |> from_string!(:iso_8859_1)
iex> to_string!(iso, :ascii, use_utf_replacement())
"Hello ���!"
"""
@spec to_string!(binary, encoding, to_s_missing_outer, term) ::
String.t() | no_return
def to_string!(binary, encoding, missing_fun, acc \\ nil) do
case to_string(binary, encoding, missing_fun, acc) do
{:ok, result, _} ->
result
{:error, reason, _} ->
raise Codepagex.Error, reason
end
end
@doc """
Converts an Elixir string in utf-8 encoding to a binary in another encoding.
The `encoding` parameter should be in `encoding_list/0` as an atom or String,
or in `aliases/0`.
## Examples
iex> from_string("Hɦ¦Ó", :iso_8859_1)
{:ok, <<72, 201, 166, 166, 211>>}
iex> from_string("Hɦ¦Ó", :"ISO8859/8859-1") # without alias
{:ok, <<72, 201, 166, 166, 211>>}
iex> from_string("ʒ", :iso_8859_1)
{:error, "Invalid bytes for encoding"}
"""
@spec from_string(String.t(), encoding) :: {:ok, binary} | {:error, term}
def from_string(string, encoding) do
from_string(string, encoding, error_on_missing(), nil)
|> strip_acc
end
@doc """
Convert an Elixir String in utf-8 to a binary in a specified encoding. A
function parameter specifies how to deal with codepoints that are not
representable in the target encoding.
Compared to `from_string/2`, you may pass a `missing_fun` function parameter
to handle encoding errors in `string`. The function `replace_nonexistent/1`
may be used as a default error handling machanism.
The `encoding` parameter should be in `encoding_list/0` as an atom or String,
or in `aliases/0`.
## Implementing missing_fun
The `missing_fun` must be an anonymous function that returns a second
function. The outer function will receive the encoding used by
`from_string/4`, and must then return `{:ok, inner_function}` or `{:error,
reason}`. Returning `:error` will cause `from_string/4` to fail.
The returned inner function must receive two arguments.
- a String containing the remainder of the `string` parameter that is still
unprocessed.
- the accumulator `acc`
The return value must be
- `{:ok, replacement, new_rest, new_acc}` to continue processing
- `{:error, reason, new_acc}` to cause `from_string/4` to fail
The `acc` parameter from `from_string/4` is passed between every invocation
of the inner function then returned by `to_string/4`. In many use cases,
`acc` may be ignored.
## Examples
Using the `replace_nonexistent/1` function to handle invalid bytes:
iex> from_string("Hello æøå!", :ascii, replace_nonexistent("_"))
{:ok, "Hello ___!", 3}
Defining a custom `missing_fun`:
iex> missing_fun =
...> fn encoding ->
...> case from_string("#", encoding) do
...> {:ok, replacement} ->
...> inner_fun =
...> fn <<_ :: utf8, rest :: binary>>, acc ->
...> {:ok, replacement, rest, acc + 1}
...> end
...> {:ok, inner_fun}
...> err ->
...> err
...> end
...> end
iex> from_string("Hello æøå!", :ascii, missing_fun, 0)
{:ok, "Hello ###!", 3}
The previous code was included for completeness. If you know your replacement
is valid in the target encoding, you might as well do:
iex> missing_fun = fn _encoding ->
...> inner_fun =
...> fn <<_ :: utf8, rest :: binary>>, acc ->
...> {:ok, "#", rest, acc + 1}
...> end
...> {:ok, inner_fun}
...> end
iex> from_string("Hello æøå!", :ascii, missing_fun, 10)
{:ok, "Hello ###!", 13}
"""
@spec from_string(binary, encoding, from_s_missing_outer, term) ::
{:ok, String.t(), integer} | {:error, term, integer}
def from_string(string, encoding, missing_fun, acc \\ nil)
# aliases are forwarded to proper name
for {aliaz, encoding} <- Mappings.aliases() do
def from_string(string, unquote(aliaz), missing_fun, acc) do
from_string(
string,
unquote(encoding |> String.to_atom()),
missing_fun,
acc
)
end
end
def from_string(string, encoding, missing_fun, acc) when is_atom(encoding) do
case missing_fun.(encoding) do
{:ok, inner_fun} ->
Mappings.from_string(string, encoding, inner_fun, acc)
err ->
err
end
end
def from_string(
string,
encoding,
missing_fun,
acc
)
when is_binary(encoding) do
try do
from_string(string, String.to_existing_atom(encoding), missing_fun, acc)
rescue
ArgumentError ->
{:error, "Unknown encoding #{inspect(encoding)}", acc}
end
end
@doc """
Like `from_string/2` but raising exceptions on errors.
## Examples
iex> from_string!("Hɦ¦Ó", :iso_8859_1)
<<72, 201, 166, 166, 211>>
iex> from_string!("ʒ", :iso_8859_1)
** (Codepagex.Error) Invalid bytes for encoding
"""
@spec from_string!(String.t(), encoding) :: binary | no_return
def from_string!(binary, encoding) do
from_string!(binary, encoding, error_on_missing(), nil)
end
@doc """
Like `from_string/4` but raising exceptions on errors.
## Examples
iex> missing_fun = replace_nonexistent("_")
iex> from_string!("Hello æøå!", :ascii, missing_fun)
"Hello ___!"
"""
@spec from_string!(String.t(), encoding, from_s_missing_outer, term) ::
binary | no_return
def from_string!(string, encoding, missing_fun, acc \\ nil) do
case from_string(string, encoding, missing_fun, acc) do
{:ok, result, _} ->
result
{:error, reason, _} ->
raise Codepagex.Error, reason
end
end
@doc """
Convert a binary in one encoding to a binary in another encoding. The string
is converted to utf-8 internally in the process.
The encoding parameters should be in `encoding_list/0` or `aliases/0`. It may
be passed as an atom, or a string for full encoding names.
## Examples
iex> translate(<<174>>, :iso_8859_1, :iso_8859_15)
{:ok, <<174>>}
iex> translate(<<174>>, :iso_8859_1, :iso_8859_2)
{:error, "Invalid bytes for encoding"}
"""
@spec translate(binary, encoding, encoding) :: {:ok, binary} | {:error, term}
def translate(binary, encoding_from, encoding_to) do
case to_string(binary, encoding_from) do
{:ok, string} ->
from_string(string, encoding_to)
err ->
err
end
end
@doc """
Like `translate/3` but raises exceptions on errors
## Examples
iex> translate!(<<174>>, :iso_8859_1, :iso_8859_15)
<<174>>
iex> translate!(<<174>>, :iso_8859_1,:iso_8859_2)
** (Codepagex.Error) Invalid bytes for encoding
"""
@spec translate!(binary, encoding, encoding) :: binary
def translate!(binary, encoding_from, encoding_to) do
binary
|> to_string!(encoding_from)
|> from_string!(encoding_to)
end
end
|
lib/codepagex.ex
| 0.763396
| 0.448366
|
codepagex.ex
|
starcoder
|
defmodule Raxx.Request do
@moduledoc """
HTTP requests to a Raxx application are encapsulated in a `Raxx.Request` struct.
A request has all the properties of the url it was sent to.
In addition it has optional content, in the body.
As well as a variable number of headers that contain meta data.
Where appropriate URI properties are named from this definition.
> scheme:[//[user:password@]host[:port]][/]path[?query][#fragment]
from [wikipedia](https://en.wikipedia.org/wiki/Uniform_Resource_Identifier#Syntax)
The contents are itemised below:
| **scheme** | `http` or `https`, depending on the transport used. |
| **authority** | The location of the hosting server, as a binary. e.g. `www.example.com`. Plus an optional port number, separated from the hostname by a colon |
| **method** | The HTTP request method, such as `:GET` or `:POST`, as an atom. This cannot ever be `nil`. It is always uppercase. |
| **mount** | The segments of the request URL's “path”, that have already been matched. Same as rack path_info. This may be an empty array, if the requested URL targets the application root. |
| **path** | The remainder of the request URL's “path”, split into segments. It designates the virtual “location” of the request's target within the application. This may be an empty array, if the requested URL targets the application root. |
| **raw_path** | The request URL's "path" |
| **query** | the URL query string. |
| **headers** | The headers from the HTTP request as a map of strings. Note all headers will be downcased, e.g. `%{"content-type" => "text/plain"}` |
| **body** | The body content sent with the request |
"""
@typedoc """
Method to indicate the desired action to be performed on the identified resource.
"""
@type method :: atom
@typedoc """
Scheme describing protocol used.
"""
@type scheme :: :http | :https
@typedoc """
Elixir representation for an HTTP request.
"""
@type t :: %__MODULE__{
scheme: scheme,
authority: binary,
method: method,
mount: [binary],
path: [binary],
raw_path: binary,
query: binary | nil,
headers: Raxx.headers(),
body: Raxx.body()
}
defstruct scheme: nil,
authority: nil,
method: nil,
mount: [],
path: [],
raw_path: "",
query: nil,
headers: [],
body: nil
@default_ports %{
http: 80,
https: 443
}
def host(%__MODULE__{authority: authority}) do
hd(String.split(authority, ":"))
end
def port(%__MODULE__{scheme: scheme, authority: authority}, default_ports \\ @default_ports) do
case String.split(authority, ":") do
[_host] ->
Map.get(default_ports, scheme)
[_host, port_string] ->
{port, _} = Integer.parse(port_string)
port
end
end
end
|
lib/raxx/request.ex
| 0.893988
| 0.590543
|
request.ex
|
starcoder
|
defmodule Bunch.KVList do
@deprecated "Use `Bunch.KVEnum` instead"
@moduledoc """
A bunch of helper functions for manipulating key-value lists (including keyword
lists).
Key-value lists are represented as lists of 2-element tuples, where the first
element of each tuple is a key, and the second is a value.
"""
@type t(key, value) :: [{key, value}]
@doc """
Maps keys of `list` using function `f`.
## Example
iex> #{inspect(__MODULE__)}.map_keys([{1, :a}, {2, :b}], & &1+1)
[{2, :a}, {3, :b}]
"""
@spec map_keys(t(k1, v), (k1 -> k2)) :: t(k2, v) when k1: any, k2: any, v: any
def map_keys(list, f) do
list |> Enum.map(fn {key, value} -> {f.(key), value} end)
end
@doc """
Maps values of `list` using function `f`.
## Example
iex> #{inspect(__MODULE__)}.map_values([a: 1, b: 2], & &1+1)
[a: 2, b: 3]
"""
@spec map_values(t(k, v1), (v1 -> v2)) :: t(k, v2) when k: any, v1: any, v2: any
def map_values(list, f) do
list |> Enum.map(fn {key, value} -> {key, f.(value)} end)
end
@doc """
Filters elements of `list` by keys using function `f`.
## Example
iex> #{inspect(__MODULE__)}.filter_by_keys([a: 1, b: 2, a: 3], & &1 == :a)
[a: 1, a: 3]
"""
@spec filter_by_keys(t(k, v), (k -> as_boolean(term))) :: t(k, v) when k: any, v: any
def filter_by_keys(list, f) do
list |> Enum.filter(&apply_to_key(&1, f))
end
@doc """
Filters elements of `list` by values using function `f`.
## Example
iex> #{inspect(__MODULE__)}.filter_by_values([a: 1, b: 2, a: 3], & &1 |> rem(2) == 0)
[b: 2]
"""
@spec filter_by_values(t(k, v), (v -> as_boolean(term))) :: t(k, v) when k: any, v: any
def filter_by_values(list, f) do
list |> Enum.filter(&apply_to_value(&1, f))
end
@doc """
Executes `f` for each key in `list`.
## Example
iex> #{inspect(__MODULE__)}.each_key([a: 1, b: 2, a: 3], & send(self(), &1))
iex> [:a, :b, :a] |> Enum.each(&receive do ^&1 -> :ok end)
:ok
"""
@spec each_key(t(k, v), (k -> any | no_return)) :: :ok when k: any, v: any
def each_key(list, f) do
list |> Enum.each(&apply_to_key(&1, f))
end
@doc """
Executes `f` for each value in `list`.
## Example
iex> #{inspect(__MODULE__)}.each_value([a: 1, b: 2, a: 3], & send(self(), &1))
iex> 1..3 |> Enum.each(&receive do ^&1 -> :ok end)
:ok
"""
@spec each_value(t(k, v), (v -> any | no_return)) :: :ok when k: any, v: any
def each_value(list, f) do
list |> Enum.each(&apply_to_value(&1, f))
end
@doc """
Returns `true` if `f` returns truthy value for any key from `list`, otherwise `false`.
## Example
iex> #{inspect(__MODULE__)}.any_key?([a: 1, b: 2, a: 3], & &1 == :b)
true
iex> #{inspect(__MODULE__)}.any_key?([a: 1, b: 3, a: 5], & &1 == :c)
false
"""
@spec any_key?(t(k, v), (k -> as_boolean(term))) :: boolean when k: any, v: any
def any_key?(list, f) do
list |> Enum.any?(&apply_to_key(&1, f))
end
@doc """
Returns `true` if `f` returns truthy value for any value from `list`, otherwise `false`.
## Example
iex> #{inspect(__MODULE__)}.any_value?([a: 1, b: 2, a: 3], & &1 |> rem(2) == 0)
true
iex> #{inspect(__MODULE__)}.any_value?([a: 1, b: 3, a: 5], & &1 |> rem(2) == 0)
false
"""
@spec any_value?(t(k, v), (v -> as_boolean(term))) :: boolean when k: any, v: any
def any_value?(list, f) do
list |> Enum.any?(&apply_to_value(&1, f))
end
defp apply_to_key({key, _value}, f), do: f.(key)
defp apply_to_value({_key, value}, f), do: f.(value)
end
|
lib/bunch/kv_list.ex
| 0.912514
| 0.674771
|
kv_list.ex
|
starcoder
|
defmodule Multihash do
@moduledoc """
Multihash library that follows jbenet multihash protocol so that the hash contains information
about the hashing algorithm used making it more generic so that one can switch algorithm in future without
much consequences
"""
@type t :: %Multihash{name: atom, code: integer, length: integer, digest: integer}
defstruct name: :nil, code: 0, length: 0, digest: 0
@type hash_type :: :sha1 | :sha2_256 | :sha2_512 | :sha3 | :blake2b | :blake2s
@type error :: {:error, String.t}
@type on_encode :: {:ok, binary} | error
@type on_decode :: {:ok, t} | error
@type integer_default :: integer | :default
@hash_info %{
:sha1 => [code: 0x11, length: 20],
:sha2_256 => [code: 0x12, length: 32],
:sha2_512 => [code: 0x13, length: 64],
:sha3 => [code: 0x14, length: 64],
:blake2b => [code: 0x40, length: 64],
:blake2s => [code: 0x41, length: 32]
}
@code_hash_map %{
0x11 => :sha1,
0x12 => :sha2_256,
0x13 => :sha2_512,
0x14 => :sha3,
0x40 => :blake2b,
0x41 => :blake2s
}
# Error strings
@error_invalid_digest_hash "Invalid digest or hash"
@error_invalid_multihash "Invalid multihash"
@error_invalid_length "Invalid length"
@error_invalid_trunc_length "Invalid truncation length"
@error_invalid_size "Invalid size"
@error_invalid_hash_function "Invalid hash function"
@error_invalid_hash_code "Invalid hash code"
@doc ~S"""
Encode the provided hashed `digest` to the provided multihash of `hash_code`
## Examples
iex> Multihash.encode(:sha1, :crypto.hash(:sha, "Hello"))
{:ok, <<17, 20, 247, 255, 158, 139, 123, 178, 224, 155, 112, 147, 90, 93, 120, 94, 12, 197, 217, 208, 171, 240>>}
iex> Multihash.encode(:sha3, "1234567890123456789012345678901234567890123456789012345678901234", 10)
{:ok, <<20, 10, 49, 50, 51, 52, 53, 54, 55, 56, 57, 48>>}
iex> Multihash.encode(:sha2_256, :crypto.hash(:sha256, "Hello"))
{:ok, <<18, 32, 24, 95, 141, 179, 34, 113, 254, 37, 245, 97, 166, 252, 147, 139, 46, 38, 67, 6, 236, 48, 78, 218, 81, 128, 7, 209, 118, 72, 38, 56, 25, 105>>}
Invalid `hash_code`, `digest` length corresponding to the hash function will return an error
iex> Multihash.encode(:sha2_unknow, :crypto.hash(:sha, "Hello"))
{:error, "Invalid hash function"}
iex> Multihash.encode(0x20, :crypto.hash(:sha, "Hello"))
{:error, "Invalid hash code"}
It's possible to [truncate a digest](https://github.com/jbenet/multihash/issues/1#issuecomment-91783612)
by passing an optional `length` parameter. Passing a `length` longer than the default digest length
of the hash function will return an error.
iex> Multihash.encode(:sha1, :crypto.hash(:sha, "Hello"), 10)
{:ok, <<17, 10, 247, 255, 158, 139, 123, 178, 224, 155, 112, 147>>}
iex> Multihash.encode(:sha1, :crypto.hash(:sha, "Hello"), 30)
{:error, "Invalid truncation length"}
"""
def encode(hash_code, digest, length \\ :default)
@spec encode(integer, binary, integer_default) :: on_encode
def encode(hash_code, digest, length) when is_number(hash_code) and is_binary(digest), do:
encode(<<hash_code>>, digest, length)
@spec encode(binary, binary, integer_default) :: on_encode
def encode(<<_hash_code>> = hash_code, digest, length) when is_binary(digest) do
with {:ok, function} <- get_hash_function(hash_code),
do: encode(function, digest, length)
end
@spec encode(hash_type, binary, integer_default) :: on_encode
def encode(hash_func, digest, length) when is_atom(hash_func) and is_binary(digest) do
with {:ok, info} <- get_hash_info(hash_func),
:ok <- check_digest_length(info, digest),
do: encode_internal(info, digest, length)
end
def encode(_digest,_hash_code, _length), do: {:error, @error_invalid_digest_hash}
@doc ~S"""
Decode the provided multi hash to %Multihash{code: , name: , length: , digest: }
## Examples
iex> Multihash.decode(<<17, 20, 247, 255, 158, 139, 123, 178, 224, 155, 112, 147, 90, 93, 120, 94, 12, 197, 217, 208, 171, 240>>)
{:ok, %Multihash{name: :sha1, code: 17, length: 20, digest: <<247, 255, 158, 139, 123, 178, 224, 155, 112, 147, 90, 93, 120, 94, 12, 197, 217, 208, 171, 240>>}}
iex> Multihash.decode(<<17, 10, 247, 255, 158, 139, 123, 178, 224, 155, 112, 147>>)
{:ok, %Multihash{name: :sha1, code: 17, length: 10, digest: <<247, 255, 158, 139, 123, 178, 224, 155, 112, 147>>}}
Invalid multihash will result in errors
iex> Multihash.decode(<<17, 20, 247, 255, 158, 139, 123, 178, 224, 155, 112, 147, 90, 93, 120, 94, 12, 197, 217, 208, 171>>)
{:error, "Invalid size"}
iex> Multihash.decode(<<25, 20, 247, 255, 158, 139, 123, 178, 224, 155, 112, 147, 90, 93, 120, 94, 12, 197, 217, 208, 171, 240>>)
{:error, "Invalid hash code"}
iex> Multihash.decode(<<17, 32, 247, 255, 158, 139, 123, 178, 224, 155, 112, 147, 90, 93, 120, 94, 12, 197, 217, 208, 171, 240>>)
{:error, "Invalid length"}
iex> Multihash.decode("Hello")
{:error, "Invalid hash code"}
"""
@spec decode(binary) :: on_decode
def decode(<<code, length, digest::binary>>) do
with {:ok, function} <- get_hash_function(<<code>>),
{:ok, info} <- get_hash_info(function),
:ok <- check_length(info, length),
:ok <- check_truncated_digest_length(info, digest, length),
do: decode_internal(info, digest, length)
end
def decode(_), do: {:error, @error_invalid_multihash}
@doc ~S"""
Checks if the code is within application range
## Examples
iex> Multihash.is_app_code(<<0x08>>)
true
iex> Multihash.is_app_code(<<0x10>>)
false
"""
@spec is_app_code(<<_ :: 8 >>) :: boolean
def is_app_code(<<code>>), do: code >= 0 and code < 0x10
@doc ~S"""
Checks if the code is a valid code
## Examples
iex> Multihash.is_valid_code(<<0x8>>)
true
iex> Multihash.is_valid_code(<<0x12>>)
true
iex> Multihash.is_valid_code(<<0x21>>)
false
"""
@spec is_valid_code(<<_ :: 8 >>) :: boolean
def is_valid_code(<<_>> = code) do
if is_app_code(code) do
true
else
is_valid_hash_code code
end
end
@doc """
Checks if the `code` is a valid hash code
"""
defp is_valid_hash_code(<<_>> = code), do: is_valid_hash_code get_hash_function(code)
defp is_valid_hash_code({:ok, _}), do: true
defp is_valid_hash_code({:error, _}), do: false
@doc """
Encode the `digest` to multihash, truncating it to the `trunc_length` if necessary
"""
defp encode_internal([code: code, length: length], <<digest::binary>>, trunc_length) do
case trunc_length do
:default -> {:ok, <<code, length>> <> digest}
l when 0 < l and l <= length -> {:ok, <<code, l>> <> Kernel.binary_part(digest, 0, l)}
_ -> {:error, @error_invalid_trunc_length}
end
end
@doc """
Decode the multihash to %Multihash{name, code, length, digest} structure
"""
defp decode_internal([code: code, length: _default_length], <<digest::binary>>, length) do
{:ok, name} = get_hash_function <<code>>
{:ok,
%Multihash{
name: name,
code: code,
length: length,
digest: digest}}
end
@doc """
Checks if the incoming multihash has a `length` field equal or lower than the `default_length` of the hash function
"""
defp check_length([code: _code, length: default_length], original_length) do
case original_length do
l when 0 < l and l <= default_length -> :ok
_ -> {:error, @error_invalid_length}
end
end
@doc """
Checks if the incoming multihash has a `length` field fitting the actual size of the possibly truncated `digest`
"""
defp check_truncated_digest_length([code: _code, length: _default_length], digest, length) when is_binary(digest) do
case byte_size(digest) do
^length -> :ok
_ -> {:error, @error_invalid_size}
end
end
@doc """
Checks if the length of the `digest` is same as the expected `default_length` of the hash function while encoding
"""
defp check_digest_length([code: _code, length: default_length], digest) when is_binary(digest) do
case byte_size(digest) do
^default_length -> :ok
_ -> {:error, @error_invalid_size}
end
end
@doc """
Get hash info from the @hash_info keyword map based on the provided `hash_func`
"""
defp get_hash_info(hash_func) when is_atom(hash_func), do:
get_from_dict(@hash_info, hash_func, @error_invalid_hash_function)
@doc """
Get hash function from the @code_hash_map based on the `code` key
"""
defp get_hash_function(<<code>>), do:
get_from_dict(@code_hash_map, code, @error_invalid_hash_code)
@doc """
Generic function that retrieves a key from the dictionary and if the key is not there then returns {:error, `failure_message`}
"""
defp get_from_dict(dict, key, failure_message) do
case Map.get(dict, key, :none) do
:none -> {:error, failure_message}
value-> {:ok, value}
end
end
end
|
lib/multihash.ex
| 0.86792
| 0.520618
|
multihash.ex
|
starcoder
|
defmodule ExWire.Packet.Capability.Par.GetBlockHeaders do
@moduledoc """
Requests block headers starting from a given hash.
```
**GetBlockHeaders** [`+0x03`: `P`, `block`: { `P` , `B_32` }, `maxHeaders`: `P`, `skip`: `P`, `reverse`: `P` in { `0` , `1` } ]
Require peer to return a BlockHeaders message. Reply
must contain a number of block headers, of rising number when reverse is 0,
falling when 1, skip blocks apart, beginning at block block (denoted by either
number or hash) in the canonical chain, and with at most maxHeaders items.
```
"""
alias Blockchain.Block
alias ExWire.Bridge.Sync
alias ExWire.Packet
alias ExWire.Packet.Capability.Par.BlockHeaders
require Logger
@behaviour ExWire.Packet
@sync Application.get_env(:ex_wire, :sync_mock, Sync)
@max_headers_supported 100
@type t :: %__MODULE__{
block_identifier: Packet.block_identifier(),
max_headers: pos_integer(),
skip: pos_integer(),
reverse: boolean()
}
defstruct [
:block_identifier,
:max_headers,
:skip,
:reverse
]
@doc """
Returns the relative message id offset for this message.
This will help determine what its message ID is relative to other Packets in the same Capability.
"""
@impl true
@spec message_id_offset() :: 3
def message_id_offset do
0x03
end
@doc """
Given a GetBlockHeaders packet, serializes for transport over Eth Wire Protocol.
## Examples
iex> %ExWire.Packet.Capability.Par.GetBlockHeaders{block_identifier: 5, max_headers: 10, skip: 2, reverse: true}
...> |> ExWire.Packet.Capability.Par.GetBlockHeaders.serialize
[5, 10, 2, 1]
iex> %ExWire.Packet.Capability.Par.GetBlockHeaders{block_identifier: <<5>>, max_headers: 10, skip: 2, reverse: false}
...> |> ExWire.Packet.Capability.Par.GetBlockHeaders.serialize
[<<5>>, 10, 2, 0]
"""
@impl true
@spec serialize(t) :: ExRLP.t()
def serialize(packet = %__MODULE__{}) do
[
packet.block_identifier,
packet.max_headers,
packet.skip,
if(packet.reverse, do: 1, else: 0)
]
end
@doc """
Given an RLP-encoded GetBlockHeaders packet from Eth Wire Protocol,
decodes into a GetBlockHeaders struct.
## Examples
iex> ExWire.Packet.Capability.Par.GetBlockHeaders.deserialize([5, <<10>>, <<2>>, <<1>>])
%ExWire.Packet.Capability.Par.GetBlockHeaders{block_identifier: 5, max_headers: 10, skip: 2, reverse: true}
iex> ExWire.Packet.Capability.Par.GetBlockHeaders.deserialize([<<5>>, <<10>>, <<2>>, <<0>>])
%ExWire.Packet.Capability.Par.GetBlockHeaders{block_identifier: <<5>>, max_headers: 10, skip: 2, reverse: false}
"""
@impl true
@spec deserialize(ExRLP.t()) :: t
def deserialize(rlp) do
[
block_identifier,
max_headers,
skip,
reverse
] = rlp
%__MODULE__{
block_identifier: block_identifier,
max_headers: :binary.decode_unsigned(max_headers),
skip: :binary.decode_unsigned(skip),
reverse: :binary.decode_unsigned(reverse) == 1
}
end
@doc """
Handles a GetBlockHeaders message. We should send the block headers
to the peer if we have them.
For now, we do nothing. This upsets Geth as it thinks we're a bad
peer, which, I suppose, we are.
## Examples
iex> %ExWire.Packet.Capability.Par.GetBlockHeaders{block_identifier: 5, max_headers: 10, skip: 2, reverse: true}
...> |> ExWire.Packet.Capability.Par.GetBlockHeaders.handle()
{:send,
%ExWire.Packet.Capability.Par.BlockHeaders{
headers: []
}}
"""
@impl true
@spec handle(ExWire.Packet.packet()) :: ExWire.Packet.handle_response()
def handle(packet = %__MODULE__{max_headers: max_headers})
when max_headers > @max_headers_supported do
handle(%__MODULE__{packet | max_headers: @max_headers_supported})
end
def handle(packet = %__MODULE__{}) do
headers =
case @sync.get_current_trie() do
{:ok, trie} ->
get_block_headers(
trie,
packet.block_identifier,
packet.max_headers,
packet.skip,
packet.reverse
)
{:error, error} ->
_ =
Logger.warn(fn ->
"Error calling Sync.get_current_trie(): #{error}. Returning empty headers."
end)
[]
end
{:send, %BlockHeaders{headers: headers}}
end
defp get_block_headers(trie, identifier, num_headers, skip, reverse) do
get_block_headers(trie, identifier, num_headers, skip, reverse, [])
end
defp get_block_headers(_trie, _identifier, 0, _skip, _reverse, headers),
do: Enum.reverse(headers)
defp get_block_headers(trie, block_hash, num_headers, skip, reverse, headers)
when is_binary(block_hash) do
case Block.get_block(block_hash, trie) do
{:ok, block} ->
next_number = next_block_number(block.header.number, skip, reverse)
get_block_headers(trie, next_number, num_headers - 1, skip, reverse, [
block.header | headers
])
_ ->
_ =
Logger.debug(fn -> "Could not find block with hash: #{Base.encode16(block_hash)}." end)
headers
end
end
defp get_block_headers(trie, block_number, num_headers, skip, reverse, headers) do
case Block.get_block(block_number, trie) do
{:ok, block} ->
next_block_number = next_block_number(block.header.number, skip, reverse)
get_block_headers(trie, next_block_number, num_headers - 1, skip, reverse, [
block.header | headers
])
_ ->
_ = Logger.debug(fn -> "Could not find block with number: #{block_number}." end)
[]
end
end
defp next_block_number(block_number, skip, reverse) do
if reverse == true do
block_number - skip
else
block_number + skip
end
end
end
|
apps/ex_wire/lib/ex_wire/packet/capability/par/get_block_headers.ex
| 0.895383
| 0.834002
|
get_block_headers.ex
|
starcoder
|
defmodule Hierbautberlin.GeoData.NewsItem do
use Ecto.Schema
import Ecto.Query, warn: false
import Ecto.Changeset
alias Geo.PostGIS.Geometry
alias Hierbautberlin.Repo
alias Hierbautberlin.GeoData.{
GeoPlace,
GeoPosition,
GeoStreet,
GeoStreetNumber,
GeoMapItem,
NewsItem,
Source
}
schema "news_items" do
field :external_id, :string
field :title, :string
field :content, :string
field :url, :string
field :published_at, :utc_datetime
field :geometries, Geometry
field :geo_points, Geometry
field :hidden, :boolean, default: false
belongs_to :source, Source
many_to_many :geo_streets, GeoStreet,
join_through: "geo_streets_news_items",
on_replace: :delete
many_to_many :geo_street_numbers, GeoStreetNumber,
join_through: "geo_street_numbers_news_items",
on_replace: :delete
many_to_many :geo_places, GeoPlace, join_through: "geo_places_news_items", on_replace: :delete
timestamps(type: :utc_datetime)
end
def changeset(news_item, attrs) do
news_item
|> cast(attrs, [:external_id, :title, :content, :url, :published_at, :source_id, :hidden])
|> unique_constraint(:external_id)
end
def change_associations(%NewsItem{} = news_item, attrs) do
news_item
|> cast(%{}, [])
|> put_assoc(:geo_streets, attrs[:geo_streets])
|> put_assoc(:geo_street_numbers, attrs[:geo_street_numbers])
|> put_assoc(:geo_places, attrs[:geo_places])
|> update_cached_geometries()
end
def update_all_geometries() do
NewsItem
|> Repo.all()
|> Repo.preload([:geo_streets, :geo_street_numbers, :geo_places])
|> Enum.each(fn news_item ->
news_item
|> update_cached_geometries()
|> Repo.update!()
end)
end
def update_cached_geometries(news_item) do
changeset = cast(news_item, %{}, [])
geo_streets = get_field(changeset, :geo_streets)
geo_street_numbers = get_field(changeset, :geo_street_numbers)
geo_places = get_field(changeset, :geo_places)
changeset
|> put_change(:geometries, join_geometries([geo_streets, geo_places]))
|> put_change(:geo_points, join_geo_points([geo_streets, geo_street_numbers, geo_places]))
end
defp join_geometries(geometries) do
collection =
geometries
|> List.flatten()
|> Enum.filter(fn item -> !is_nil(item.geometry) end)
|> Enum.map(fn item ->
item.geometry
end)
if Enum.empty?(collection) do
nil
else
%Geo.GeometryCollection{
geometries: collection,
srid: 4326
}
end
end
defp join_geo_points(geometries) do
coordinates =
geometries
|> List.flatten()
|> Enum.filter(fn item -> !is_nil(item.geo_point) end)
|> Enum.map(fn item ->
item.geo_point.coordinates
end)
if Enum.empty?(coordinates) do
nil
else
%Geo.MultiPoint{
coordinates: coordinates,
srid: 4326
}
end
end
def get_near(lat, lng, count) do
geom = %Geo.Point{
coordinates: {lng, lat},
properties: %{},
srid: 4326
}
query =
from item in NewsItem,
where: not (is_nil(item.geometries) and is_nil(item.geo_points)),
limit: ^count,
where: item.hidden == false,
where:
fragment(
"ST_DWithin(geometries, ?, 0.05 ) or ST_DWithin(geo_points, ?, 0.05 )",
^geom,
^geom
),
order_by:
fragment(
"LEAST(ST_Distance(geometries, ?),ST_Distance(geo_points, ?))",
^geom,
^geom
)
query
|> Repo.all()
|> Repo.preload([:source, :geo_streets, :geo_street_numbers, :geo_places])
|> Enum.map(fn item ->
%GeoMapItem{
type: :news_item,
id: item.id,
title: item.title,
description: item.content,
positions: get_positions_for_item(item),
newest_date: item.published_at,
source: item.source,
url: item.url,
participation_open: false,
item: item
}
end)
end
defp get_positions_for_item(item) do
(Enum.map(item.geo_streets, fn geo_street ->
%GeoPosition{
type: :geo_street,
id: geo_street.id,
geopoint: geo_street.geo_point,
geometry: geo_street.geometry
}
end) ++
Enum.map(item.geo_street_numbers, fn geo_street_number ->
%GeoPosition{
type: :geo_street_number,
id: geo_street_number.id,
geopoint: geo_street_number.geo_point
}
end) ++
Enum.map(item.geo_places, fn geo_place ->
%GeoPosition{
type: :geo_place,
id: geo_place.id,
geopoint: geo_place.geo_point,
geometry: geo_place.geometry
}
end))
|> Enum.filter(fn position ->
!is_nil(position.geopoint) || !is_nil(position.geometry)
end)
end
end
|
lib/hierbautberlin/geo_data/news_item.ex
| 0.561575
| 0.490114
|
news_item.ex
|
starcoder
|
defmodule Scrivener.HTML.SEO do
@moduledoc """
SEO related functions for pagination. See [https://support.google.com/webmasters/answer/1663744?hl=en](https://support.google.com/webmasters/answer/1663744?hl=en)
for more information.
`Scrivener.HTML.pagination_links/4` will use this module to add `rel` to each link produced to indicate to search engines which
link is the `next` or `prev`ious link in the chain of links. The default is `rel` value is `canonical` otherwise.
Additionally, it helps Google and other search engines to put `<link/>` tags in the `<head>`. The `Scrivener.HTML.SEO.header_links/4`
function requires the same arguments you passed into your `Scrivener.HTML.pagination_links/4` call in the view. However, `header_links` needs
to go into the `<head>` section of your page. See [SEO Tags in Phoenix](http://blog.danielberkompas.com/2016/01/28/seo-tags-in-phoenix.html)
for help with how to do that. The recommended option is to use `render_existing/2` in your layout file and add a separate view to render that.
"""
alias Scrivener.Page
use Phoenix.HTML
@defaults Keyword.drop(Scrivener.HTML.defaults(), [:view_style])
@doc ~S"""
Produces the value for a `rel` attribute in an `<a>` tag. Returns either `"next"`, `"prev"` or `"canonical"`.
iex> Scrivener.HTML.SEO.rel(%Scrivener.Page{page_number: 5}, 4)
"prev"
iex> Scrivener.HTML.SEO.rel(%Scrivener.Page{page_number: 5}, 6)
"next"
iex> Scrivener.HTML.SEO.rel(%Scrivener.Page{page_number: 5}, 8)
"canonical"
"""
def rel(%Page{page_number: current_page}, page_number) when current_page + 1 == page_number,
do: "next"
def rel(%Page{page_number: current_page}, page_number) when current_page - 1 == page_number,
do: "prev"
def rel(_paginator, _page_number), do: "canonical"
@doc ~S"""
Produces `<link/>` tags for putting in the `<head>` to help SEO as recommended by Google webmasters.
Arguments are the same as `Scrivener.HTML.pagination_links/4`. Consider using one of the following techniques to
call this function: [http://blog.danielberkompas.com/2016/01/28/seo-tags-in-phoenix.html](http://blog.danielberkompas.com/2016/01/28/seo-tags-in-phoenix.html)
iex> Phoenix.HTML.safe_to_string(Scrivener.HTML.SEO.header_links(%Scrivener.Page{total_pages: 10, page_number: 3}))
"<link href=\"?page=2\" rel=\"prev\">\n<link href=\"?page=4\" rel=\"next\">"
"""
def header_links(
conn,
%Page{total_pages: total_pages, page_number: page_number} = paginator,
args,
opts
) do
prev = if page_number > 1, do: prev_header_link(conn, paginator, args, opts)
next = if total_pages > page_number, do: next_header_link(conn, paginator, args, opts)
if prev && next do
[{:safe, prev}, {:safe, next}] = [prev, next]
{:safe, [prev, "\n", next]}
else
prev || next
end
end
def header_links(%Scrivener.Page{} = paginator), do: header_links(nil, paginator, [], [])
def header_links(%Scrivener.Page{} = paginator, opts),
do: header_links(nil, paginator, [], opts)
def header_links(conn, %Scrivener.Page{} = paginator), do: header_links(conn, paginator, [], [])
def header_links(conn, paginator, [{_, _} | _] = opts),
do: header_links(conn, paginator, [], opts)
def header_links(conn, paginator, [_ | _] = args), do: header_links(conn, paginator, args, [])
defp href(conn, paginator, args, opts, page_number) do
merged_opts = Keyword.merge(@defaults, opts)
path = opts[:path] || Scrivener.HTML.find_path_fn(conn && paginator.entries, args)
url_params = Keyword.drop(opts, Keyword.keys(@defaults) ++ [:path])
page_param = merged_opts[:page_param]
params_with_page = url_params ++ [{page_param, page_number}]
args = [conn, merged_opts[:action]] ++ args
apply(path, args ++ [params_with_page])
end
defp prev_header_link(conn, paginator, args, opts) do
href = href(conn, paginator, args, opts, paginator.page_number - 1)
tag(:link, href: href, rel: rel(paginator, paginator.page_number - 1))
end
defp next_header_link(conn, paginator, args, opts) do
href = href(conn, paginator, args, opts, paginator.page_number + 1)
tag(:link, href: href, rel: rel(paginator, paginator.page_number + 1))
end
end
|
lib/scrivener/html/seo.ex
| 0.761184
| 0.562537
|
seo.ex
|
starcoder
|
defmodule Tournament do
@default_team %{mp: 0, w: 0, d: 0, l: 0, p: 0}
@doc """
Given `input` lines representing two teams and whether the first of them won,
lost, or reached a draw, separated by semicolons, calculate the statistics
for each team's number of games played, won, drawn, lost, and total points
for the season, and return a nicely-formatted string table.
A win earns a team 3 points, a draw earns 1 point, and a loss earns nothing.
Order the outcome by most total points for the season, and settle ties by
listing the teams in alphabetical order.
"""
@spec tally(input :: list(String.t())) :: String.t()
def tally(input) do
input
|> Enum.map(fn line -> line |> String.split(";") end)
|> Enum.reduce(%{}, fn x, acc ->
case x do
[f, s, "win"] -> acc |> update_team(f, s, :win)
[f, s, "loss"] -> acc |> update_team(s, f, :win)
[f, s, "draw"] -> acc |> update_team(f, s, :draw)
_ -> acc
end
end)
|> print()
end
defp update_team(map, f, s, :draw) do
map
|> Map.put_new(f, @default_team)
|> Map.put_new(s, @default_team)
|> Map.update!(f, fn team ->
%{mp: team.mp + 1, w: team.w, d: team.d + 1, l: team.l, p: team.p + 1}
end)
|> Map.update!(s, fn team ->
%{mp: team.mp + 1, w: team.w, d: team.d + 1, l: team.l, p: team.p + 1}
end)
end
defp update_team(map, winner, loser, :win) do
map
|> Map.put_new(winner, @default_team)
|> Map.put_new(loser, @default_team)
|> Map.update!(winner, fn team ->
%{mp: team.mp + 1, w: team.w + 1, d: team.d, l: team.l, p: team.p + 3}
end)
|> Map.update!(loser, fn team ->
%{mp: team.mp + 1, w: team.w, d: team.d, l: team.l + 1, p: team.p}
end)
end
defp print(map) do
map
|> Enum.map(fn {k, v} -> {k, v} end)
|> Enum.sort(fn {team_a, results_a}, {team_b, results_b} ->
cond do
results_a.p > results_b.p -> true
results_a.p < results_b.p -> false
true -> [team_a, team_b] |> Enum.sort() |> List.first() == team_a
end
end)
|> Enum.reduce("Team | MP | W | D | L | P", fn {k, v}, acc ->
acc <>
"\n" <>
String.pad_trailing(k, 31) <>
"| #{v.mp} | #{v.w} | #{v.d} | #{v.l} | #{v.p |> Integer.to_string() |> String.pad_leading(2)}"
end)
end
end
|
exercism/elixir/tournament/lib/tournament.ex
| 0.729134
| 0.521471
|
tournament.ex
|
starcoder
|
defmodule Accent.Plug.Response do
@moduledoc """
Transforms the keys of an HTTP response to the case requested by the client.
A client can request what case the keys are formatted in by passing the case
as a header in the request. By default the header key is `Accent`. If the
client does not request a case or requests an unsupported case then a default
case defined by `:default_case` will be used. If no default case is provided
then no conversion will happen. By default the supported cases are `camel`,
`pascal` and `snake`.
## Options
* `:default_case` - module used to case the response when the client does not
request a case or requests an unsupported case. When not provided then no
conversation will happen for the above scenarios. Defaults to `nil`.
* `:header` - the HTTP header used to determine the case to convert the
response body to before sending the response (default: `Accent`)
* `:json_codec` - module used to encode and decode JSON. The module is
expected to define `decode/1` and `encode!/1` functions (required).
* `:supported_cases` - map that defines what cases a client can request. By
default `camel`, `pascal` and `snake` are supported.
## Examples
```
plug Accent.Plug.Response, default_case: Accent.Case.Snake,
header: "x-accent",
supported_cases: %{"pascal" => Accent.Case.Pascal},
json_codec: Jason
```
"""
import Plug.Conn
@default_cases %{
"camel" => Accent.Case.Camel,
"pascal" => Accent.Case.Pascal,
"snake" => Accent.Case.Snake
}
@doc false
def init(opts \\ []) do
%{
default_case:
opts[:default_case] ||
nil,
header:
opts[:header] ||
"accent",
json_codec:
opts[:json_codec] ||
raise(ArgumentError, "Accent.Plug.Response expects a :json_codec option"),
supported_cases:
opts[:supported_cases] ||
@default_cases
}
end
@doc false
def call(conn, opts) do
if do_call?(conn, opts) do
register_before_send(conn, fn conn -> before_send_callback(conn, opts) end)
else
conn
end
end
# private
defp before_send_callback(conn, opts) do
response_content_type =
conn
|> get_resp_header("content-type")
|> Enum.at(0)
# Note - we don't support "+json" content types, and probably shouldn't add
# as a general feature because they may have specifications for the param
# names - e.g. https://tools.ietf.org/html/rfc7265#page-6 that mean the
# translation would be inappropriate
is_json_response = String.contains?(response_content_type || "", "application/json")
if is_json_response do
json_codec = opts[:json_codec]
resp_body =
conn.resp_body
|> json_codec.decode!
|> Accent.Case.convert(select_transformer(conn, opts))
|> json_codec.encode!
%{conn | resp_body: resp_body}
else
conn
end
end
defp do_call?(conn, opts) do
content_type =
conn
|> get_req_header("content-type")
|> Enum.at(0)
is_json = String.contains?(content_type || "", "application/json")
has_transformer = select_transformer(conn, opts)
is_json && has_transformer
end
defp select_transformer(conn, opts) do
accent = get_req_header(conn, opts[:header]) |> Enum.at(0)
default_case = opts[:default_case]
supported_cases = opts[:supported_cases]
supported_cases[accent] || default_case
end
end
|
lib/accent/plug/response.ex
| 0.884039
| 0.782704
|
response.ex
|
starcoder
|
defmodule Wand.CLI.Commands.Upgrade do
use Wand.CLI.Command
alias Wand.CLI.Display
alias Wand.CLI.Commands.Upgrade
@banner """
# Upgrade
Upgrade dependencies in your wand.json file
### Usage
```
wand upgrade
wand upgrade poison ex_doc --latest
wand upgrade --skip=cowboy --skip=mox
```
## Options
```
--download Run mix deps.get after adding (default: **true**)
--latest Upgrade to the latest version, ignoring wand.json restrictions
--pre Allow upgrading to prerelease versions, if available
--skip Do not upgrade the following package
```
The following flags are additionally allowed if `--latest` is passed in:
```
--exact After updating, set the version in wand.json with ^ semantics
--tilde After updating, set the version in wand.json with ~> semantics
```
"""
@moduledoc """
#{@banner}
By default, upgrade will respect the restrictions set in your wand.json file. Meaning,
if your requirement is `>= 3.2.0 and < 4.0.0`, and the latest version in hex is `3.7.3`, wand will update the lower bound of wand.json to `3.7.3`, but leave the upper bound alone.
If you want to update the upper bound, you need to use the --latest flag. The latest flag will always grab the newest (non pre) version in hex, and set that as the new lower bound. The upper bound is set to the next major version, unless you pass in the `--exact` or `--tilde` flags to override this behavior.
Wand prefers setting versions by the caret semantic. That means that the lower bound is the exact version specified, and the upper bound is the next major version. If the version is less than 1.0.0, the upper bound becomes the next minor version, and so forth.
"""
defmodule Options do
@moduledoc false
defstruct download: true,
latest: false,
pre: false,
skip: [],
mode: :caret
end
@doc false
@impl true
def help(:banner), do: Display.print(@banner)
@doc false
@impl true
def help(:verbose), do: Display.print(@moduledoc)
@doc false
@impl true
def help({:invalid_flag, flag}) do
"""
#{flag} is invalid.
Allowed flags are --download, --exact, --latest, --skip, and --tilde.
See wand help upgrade --verbose for more information
"""
|> Display.print()
end
@impl true
def options() do
[
require_core: true,
load_wand_file: true
]
end
@doc false
@impl true
def validate(args) do
{switches, [_ | commands], errors} = OptionParser.parse(args, strict: get_flags(args))
case Wand.CLI.Command.parse_errors(errors) do
:ok -> {:ok, parse(commands, switches)}
error -> error
end
end
@doc false
@impl true
def execute(args, extras), do: Upgrade.Execute.execute(args, extras)
@doc false
@impl true
def after_save(args), do: Upgrade.Execute.after_save(args)
@doc false
@impl true
def handle_error(key, data), do: Upgrade.Execute.handle_error(key, data)
defp parse(commands, switches) do
download = Keyword.get(switches, :download, true)
options = %Options{
download: download,
latest: Keyword.get(switches, :latest, false),
mode: get_mode(switches),
pre: Keyword.get(switches, :pre, false),
skip: Keyword.get_values(switches, :skip)
}
{get_packages(commands), options}
end
defp get_packages([]), do: :all
defp get_packages(commands), do: commands
defp get_mode(switches) do
cond do
Keyword.get(switches, :exact) -> :exact
Keyword.get(switches, :tilde) -> :tilde
true -> %Options{}.mode
end
end
defp get_flags(args) do
base_flags = [
download: :boolean,
latest: :boolean,
pre: :boolean,
skip: :keep
]
latest_flags = [
exact: :boolean,
tilde: :boolean
]
{switches, _commands, _errors} = OptionParser.parse(args)
case Keyword.get(switches, :latest) do
true -> latest_flags ++ base_flags
_ -> base_flags
end
end
end
|
lib/cli/commands/upgrade.ex
| 0.669853
| 0.5564
|
upgrade.ex
|
starcoder
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.