code
stringlengths 114
1.05M
| path
stringlengths 3
312
| quality_prob
float64 0.5
0.99
| learning_prob
float64 0.2
1
| filename
stringlengths 3
168
| kind
stringclasses 1
value |
|---|---|---|---|---|---|
defmodule FDS.ListArray do
defstruct [:ralist, :count]
@type t :: %__MODULE__{ralist: :fds_ralist.ralist(any()), count: non_neg_integer()}
@spec new() :: t
def new() do
%__MODULE__{ralist: :fds_ralist.new(), count: 0}
end
@spec push(t, term()) :: t
def push(%__MODULE__{ralist: ralist, count: count} = list_array, item) do
%{list_array | ralist: :fds_ralist.cons(item, ralist), count: count + 1}
end
@spec peek(t) :: :error | {:ok, term()}
def peek(%__MODULE__{count: 0}), do: :error
def peek(%__MODULE__{ralist: ralist}), do: {:ok, :fds_ralist.head(ralist)}
@spec drop(t) :: :error | {:ok, t()}
def drop(%__MODULE__{count: 0}), do: :error
def drop(%__MODULE__{ralist: ralist, count: count}) do
{:ok, %__MODULE__{ralist: :fds_ralist.tail(ralist), count: count-1}}
end
@spec pop(t) :: :error | {:ok, term(), t()}
def pop(%__MODULE__{count: 0}), do: :error
def pop(%__MODULE__{ralist: ralist, count: count}) do
{:ok, :fds_ralist.head(ralist), %__MODULE__{ralist: :fds_ralist.tail(ralist), count: count-1}}
end
@spec get(t, non_neg_integer) :: :error | {:ok, term()}
def get(%__MODULE__{}, index) when index < 0, do: :error
def get(%__MODULE__{count: count}, index) when index >= count, do: :error
def get(%__MODULE__{ralist: ralist, count: count}, index) do
{:ok, :fds_ralist.lookup(index, ralist)}
end
@spec set(t, non_neg_integer, term()) :: :error | {:ok, t()}
def set(%__MODULE__{}, index, _value) when index < 0, do: :error
def set(%__MODULE__{count: count}, index, _value) when index >= count, do: :error
def set(%__MODULE__{ralist: ralist, count: count} = list_array, index, value) do
{:ok, %{list_array | ralist: :fds_ralist.update(index, value, ralist)}}
end
@spec foldl(t, term(), (term(), term() -> term())) :: term()
def foldl(%__MODULE__{ralist: ralist}, acc, fun) do
:fds_ralist.foldl(fun, acc, ralist)
end
@spec to_list(t) :: [term()]
def to_list(%__MODULE__{} = list_array) do
:lists.reverse foldl(list_array, [], &([&1|&2]))
end
@spec count(t) :: non_neg_integer
def count(%__MODULE__{count: count}), do: count
end
defimpl Collectable, for: FDS.ListArray do
def into(%FDS.ListArray{ralist: ralist, count: count}) do
{{ralist, count}, &do_into/2}
end
defp do_into(_ralist_and_count, :halt), do: :ok
defp do_into({ralist, count}, :done), do: %FDS.ListArray{ralist: ralist, count: count}
defp do_into({ralist, count}, {:cont, item}), do: {:fds_ralist.cons(item, ralist), count+1}
end
defimpl Enumerable, for: FDS.ListArray do
defp do_reduce(_ralist, {:halt, acc}, _fun), do: {:halted, acc}
defp do_reduce(ralist, {:suspend, acc}, fun), do: {:suspended, acc, &do_reduce(ralist, &1, fun)}
defp do_reduce([], {:cont, acc}, _fun), do: {:done, acc}
defp do_reduce(ralist, {:cont, acc}, fun) do
do_reduce(:fds_ralist.tail(ralist), fun.(:fds_ralist.head(ralist), acc), fun)
end
def reduce(%FDS.ListArray{ralist: ralist}, acc, fun) do
do_reduce(ralist, acc, fun)
end
def count(%FDS.ListArray{count: count}), do: {:ok, count}
def member?(%FDS.ListArray{}, _item), do: {:error, @protocol}
def slice(%FDS.ListArray{}), do: {:error, @protocol}
end
defimpl Inspect, for: FDS.ListArray do
import Inspect.Algebra
def inspect(list_array, opts) do
container_doc("#ListArray<[", FDS.ListArray.to_list(list_array), "]>", opts, &Inspect.Algebra.to_doc/2, separator: ",", break: :maybe)
end
end
|
lib/fds/list_array.ex
| 0.719285
| 0.571468
|
list_array.ex
|
starcoder
|
defmodule Grizzly.CommandClass.Configuration.BulkGet do
@moduledoc """
Command module for working with the Configuration command class BULK_GET command
Command Options:
* `:start` - the starting number of the configuration params
* `:number` - the number of params to get
* `:seq_number` the sequence number used by Z/IP packet
* `:parameter_values` - a list of the parameter values that are returned from Z-Wave
* `:acked` - if there the command has successfully acked by Z-Wave
* `:retries` - the number of attempts to send the command (default 2)
"""
@behaviour Grizzly.Command
alias Grizzly.Packet
alias Grizzly.Command.{EncodeError, Encoding}
require Logger
@type t :: %__MODULE__{
start: integer,
number: byte,
seq_number: Grizzly.seq_number(),
parameter_values: [],
acked: boolean,
retries: non_neg_integer()
}
@type opt ::
{:start, integer}
| {:number, byte}
| {:seq_number, Grizzly.seq_number()}
| {:retries, non_neg_integer()}
defstruct start: nil,
number: nil,
seq_number: nil,
acked: false,
parameter_values: [],
retries: 2
@spec init([opt]) :: {:ok, t}
def init(opts) do
{:ok, struct(__MODULE__, opts)}
end
@spec encode(t) :: {:ok, binary} | {:error, EncodeError.t()}
def encode(%__MODULE__{seq_number: seq_number, start: start, number: number} = command) do
with {:ok, _encoded} <-
Encoding.encode_and_validate_args(command, %{
start: {:bytes, 2},
number: :byte
}) do
binary =
Packet.header(seq_number) <>
<<0x70, 0x08, start::size(2)-big-integer-signed-unit(8), number>>
{:ok, binary}
end
end
@spec handle_response(t(), Packet.t()) ::
{:continue, t()}
| {:done, {:error, :nack_response}}
| {:done, {:ok, %{start: any, values: any}}}
| {:queued, t()}
| {:retry, t()}
def handle_response(
%__MODULE__{seq_number: seq_number, retries: 0},
%Packet{
seq_number: seq_number,
types: [:nack_response]
}
),
do: {:done, {:error, :nack_response}}
def handle_response(
%__MODULE__{seq_number: seq_number, retries: n} = command,
%Packet{
seq_number: seq_number,
types: [:nack_response]
}
),
do: {:retry, %{command | retries: n - 1}}
def handle_response(
%__MODULE__{seq_number: seq_number} = command,
%Packet{
seq_number: seq_number,
types: [:nack_response, :nack_waiting]
} = packet
) do
if Packet.sleeping_delay?(packet) do
{:queued, command}
else
{:continue, command}
end
end
def handle_response(
%__MODULE__{acked: false, seq_number: seq_number} = command,
%Packet{
seq_number: seq_number,
types: [:ack_response]
}
) do
{:continue, %__MODULE__{command | acked: true}}
end
def handle_response(
%__MODULE__{parameter_values: parameter_values, acked: true},
%Packet{
body:
%{
command_class: Grizzly.CommandClass.Configuration,
command: :bulk_report,
to_follow: 0,
parameter_offset: parameter_offset,
values: values
} = body
}
) do
_ = Logger.debug("Handling last bulk report #{inspect(body)}")
{:done, {:ok, %{start: parameter_offset, values: parameter_values ++ values}}}
end
def handle_response(
%__MODULE__{parameter_values: parameter_values, acked: true} = command,
%Packet{
body:
%{
command_class: Configuration,
command: :bulk_report,
to_follow: n,
parameter_offset: _parameter_offset,
values: values
} = body
}
) do
_ = Logger.debug("Handling partial bulk report #{inspect(body)} (#{n} to follow)")
{:continue, %__MODULE__{command | parameter_values: parameter_values ++ values}}
end
def handle_response(command, _packet) do
{:continue, command}
end
end
|
lib/grizzly/command_class/configuration/bulk_get.ex
| 0.775009
| 0.459258
|
bulk_get.ex
|
starcoder
|
defmodule EctoSearcher.Utils.SearchCondition do
@moduledoc """
Builds SearchCondition from params
This module is internal. Use at your own risk.
"""
@enforce_keys [:field, :matcher, :value]
defstruct [:field, :matcher, :value]
@doc """
Builds `%SearchCondition{}` from params
## Usage
```elixir
searchable_fields = [:name, :description]
search_params = %{"name_eq" => "<NAME>", "description_cont" => "My president"}
EctoSearcher.Searcher.SearchCondition.from_params(search_params, searchable_fields)
# => [
# EctoSearcher.Searcher.SearchCondition(field: :name, matcher: "eq", value: "<NAME>"),
# EctoSearcher.Searcher.SearchCondition(field: :description, matcher: "cont", value: "My president"),
# ]
```
"""
def from_params(search_params, searchable_fields) do
searchable_fields = Enum.sort_by(searchable_fields, &to_string/1, &>=/2)
Enum.reduce(search_params, [], fn search_param, search_query_list ->
case build(search_param, searchable_fields) do
nil -> search_query_list
search_query -> search_query_list ++ [search_query]
end
end)
end
@doc """
Builds `%SearchCondition{}` from search expression.
## Usage
```elixir
searchable_fields = [:name, :description]
search_expression = {"name_eq", "<NAME>"}
EctoSearcher.Searcher.SearchCondition.build(search_expression, searchable_fields)
# => EctoSearcher.Searcher.SearchCondition(field: :name, matcher: "eq", value: "<NAME>")
```
"""
def build(search_expression, searchable_fields)
def build({search_key, value}, searchable_fields) do
if !blank?(value) do
case field_and_matcher(search_key, searchable_fields) do
{field, matcher} -> %__MODULE__{field: field, matcher: matcher, value: value}
_ -> nil
end
else
nil
end
end
def build(_, _), do: nil
defp field_and_matcher(search_key, searchable_fields) do
field = search_field_name(search_key, searchable_fields)
if field do
split_into_field_and_matcher(search_key, field)
end
end
defp search_field_name(search_key, searchable_fields) do
Enum.find(searchable_fields, fn searchable_field ->
String.starts_with?(search_key, "#{searchable_field}_")
end)
end
defp split_into_field_and_matcher(search_key, field) do
matcher_name = String.replace_leading(search_key, "#{field}_", "")
{field, matcher_name}
end
defp blank?(value) do
is_nil(value) || value == ""
end
end
|
lib/ecto_searcher/utils/search_condition.ex
| 0.788013
| 0.741651
|
search_condition.ex
|
starcoder
|
defmodule Day03.Path do
@moduledoc """
Functions for working with paths of wires.
"""
@typedoc """
A list of path segments that forms a complete wire path.
"""
@type t :: list(segment)
@typedoc """
A single segment of a path, with a direction and distance.
"""
@type segment :: {direction, integer}
@typedoc """
A cardinal direction for a segment of a path.
"""
@type direction :: :up | :down | :left | :right
@doc """
Reads a wire's path from a description of its segments.
## Example
iex> Day03.Path.from_string("R75,D30,R83,U83,L12")
[
{:right, 75},
{:down, 30},
{:right, 83},
{:up, 83},
{:left, 12},
]
"""
@spec from_string(String.t()) :: t
def from_string(str) do
String.split(str, ",") |> Enum.map(&segment_from_string/1)
end
@spec segment_from_string(String.t()) :: segment
defp segment_from_string(<<dir::utf8, dist::binary>>) do
distance = String.to_integer(dist)
case dir do
?U -> {:up, distance}
?D -> {:down, distance}
?L -> {:left, distance}
?R -> {:right, distance}
end
end
@doc """
Walk the path on the given map, marking the number of steps required to get to
each point along the path. Returns the updated map.
"""
@spec apply(Day03.Map.t(), Day03.Map.id(), t) :: Day03.Map.t()
def apply(m, id, path) do
{m, _, _} =
Enum.reduce(path, {m, {0, 0}, 0}, fn segment, {m, start, steps} ->
apply_segment(m, id, segment, start, steps)
end)
m
end
defp apply_segment(m, id, segment, start, steps) do
points = segment_points(segment, start)
point_steps = Stream.iterate(steps + 1, &(&1 + 1))
m =
Enum.zip(points, point_steps)
|> Enum.reduce(m, fn {point, steps}, m ->
Day03.Map.set(m, point, id, steps)
end)
{m, List.last(points), steps + Enum.count(points)}
end
defp segment_points({dir, len}, {x0, y0}) do
{dx, dy} = direction_offset(dir)
for i <- 1..len, do: {x0 + i * dx, y0 + i * dy}
end
defp direction_offset(dir) do
case dir do
:up -> {0, -1}
:down -> {0, 1}
:left -> {-1, 0}
:right -> {1, 0}
end
end
end
|
aoc2019_elixir/apps/day03/lib/path.ex
| 0.918562
| 0.653293
|
path.ex
|
starcoder
|
defmodule EctoMnesia.Record.Context do
@moduledoc """
Context stores `table`, `query` and `match_spec` that can be used for conversions between schemas and Mnesia records.
"""
alias EctoMnesia.Table
alias EctoMnesia.Record.Context
defstruct table: %Context.Table{}, query: %Context.Query{}, match_spec: %Context.MatchSpec{}
@doc """
Creates new context table, and stores schema meta information that can be used to reconstruct query result.
"""
def new(table, schema) when is_atom(table),
do: table |> Atom.to_string() |> new(schema)
def new(table, schema) when is_binary(table) and is_atom(schema) do
table_name = Table.get_name(table)
mnesia_attributes =
case Table.attributes(table_name) do
{:ok, name} -> name
{:error, :no_exists} -> []
end
structure =
Enum.map(1..length(mnesia_attributes), fn index ->
{Enum.at(mnesia_attributes, index - 1), {index - 1, String.to_atom("$#{index}")}}
end)
%Context{
table: %Context.Table{schema: schema, name: table_name, structure: structure},
query: %Context.Query{select: mnesia_attributes}
}
end
@doc """
Assigns `Ecto.Query` to a context and rebuilds MatchSpec with updated data.
"""
def assign_query(_conext, %Ecto.SubQuery{}, _sources),
do: raise Ecto.Query.CompileError, "`Ecto.Query.subquery/1` is not supported by Mnesia adapter."
def assign_query(_context, %Ecto.Query{havings: havings}, _sources) when is_list(havings) and length(havings) > 0,
do: raise Ecto.Query.CompileError, "`Ecto.Query.having/3` is not supported by Mnesia adapter."
def assign_query(context, %Ecto.Query{} = query, sources) do
context
|> update_query_select(query)
|> update_query_sources(sources)
|> build_match_spec(query)
end
# Stores `Ecto.Query.select` value into `context.select` field.
defp update_query_select(context, nil),
do: context
defp update_query_select(context, %Ecto.Query{select: select}),
do: update_query_select(context, select)
defp update_query_select(context, select),
do: %{context | query: %{context.query | select: select}}
# Stores query sources `context.sources` field.
defp update_query_sources(context, sources),
do: %{context | query: %{context.query | sources: sources}}
@doc """
Returns match spec that can be used in `:mnesia.select/3`.
"""
def get_match_spec(%Context{match_spec: %Context.MatchSpec{} = match_spec}),
do: Context.MatchSpec.dump(match_spec)
# Builds new match_spec on query updates
defp build_match_spec(context, query),
do: Context.MatchSpec.update(context, query)
@doc """
Returns MatchSpec record field index by a `field` name.
Raises if field is not found in a context.
"""
def find_field_index!(field, %Context{table: %Context.Table{structure: structure, name: name}})
when is_atom(field) do
case Keyword.get(structure, field) do
nil -> raise ArgumentError, "Field `#{inspect field}` does not exist in table `#{inspect name}`"
{index, _placeholder} -> index
end
end
@doc """
Returns a Mnesia MatchSpec body placeholder for a field.
Raises if field is not found in a context.
"""
def find_field_placeholder!(field, %Context{table: %Context.Table{structure: structure, name: name}})
when is_atom(field) do
case Keyword.get(structure, field) do
nil -> raise ArgumentError, "Field `#{inspect field}` does not exist in table `#{inspect name}`"
{_index, placeholder} -> placeholder
end
end
def find_field_placeholder!(field, %Context{}), do: field
@doc """
Returns MatchSpec body placeholders for all fields in a context.
"""
def get_fields_placeholders(%Context{table: %Context.Table{structure: structure}}) do
Enum.map(structure, fn {_name, {_index, placeholder}} -> placeholder end)
end
end
|
lib/ecto_mnesia/record/context.ex
| 0.818302
| 0.488466
|
context.ex
|
starcoder
|
defmodule Exhort.SAT.Constraint do
@moduledoc """
A constraint on the model.
The binary constraints are:
```
:< | :<= | :== | :>= | :> | :"abs=="
```
The list constraints are:
```
:"all!=" | :no_overlap
```
The expression must include a boundary: `<`, `<=`, `==`, `>=`, `>`.
```
x < y
```
The components of the expressoin may be simple mathematical expressions,
including the use of `+` and `*`:
```
x * y = z
```
The `sum/1` function may be used to sum over a series of terms:
```
sum(x + y) == z
```
The variables in the expression are defined in the model and do not by default
reference the variables in Elixir scope. The pin operator, `^` may be used to
reference a scoped Elixir variable.
For example, where `x` is a model variable (e.g., `def_int_var(x, {0, 3}`))
and `y` is an Elixir variable (e.g., `y = 2`):
```
x < ^y
```
A `for` comprehension may be used to generate list values:
```
sum(for {x, y} <- ^list, do: x * y) == z
```
As a larger example:
```
y = 20
z = [{0, 1}, {2, 3}, {4, 5}]
Builder.new()
|> Builder.def_int_var(x, {0, 3})
|> Builder.constrain(sum(for {a, b} <- ^z, do: ^a * ^b) < y)
|> Builder.build()
...
```
"""
alias Exhort.SAT.DSL
alias __MODULE__
@type constraint :: :< | :<= | :== | :>= | :> | :"abs==" | :"all!=" | :no_overlap
@type t :: %__MODULE__{}
defstruct [:res, :defn]
@doc """
Define a bounded constraint.
"""
defmacro new(expr, opts \\ []) do
expr =
case expr do
{:==, m1, [lhs, {:abs, _m2, [var]}]} ->
{:"abs==", m1, [lhs, var]}
expr ->
expr
end
{op, _, [lhs, rhs]} = expr
lhs = DSL.transform_expression(lhs)
rhs = DSL.transform_expression(rhs)
opts = Enum.map(opts, &DSL.transform_expression(&1))
quote do
%Constraint{defn: {unquote(lhs), unquote(op), unquote(rhs), unquote(opts)}}
end
end
@doc """
Define a constraint on the model using variables.
- `constraint` is specified as an atom. See `Exhort.SAT.Constraint`.
- `lhs` and `rhs` may each either be an atom, string, `LinearExpression`, or
an existing `BoolVar` or `IntVar`.
- `opts` may specify a restriction on the constraint:
- `if: BoolVar` specifies that a constraint only takes effect if `BoolVar`
is true
- `unless: BoolVar` specifies that a constraint only takes effect if
`BoolVar` is false
- `:==` - `lhs == rhs`
- `:abs==` - `lhs == abs(rhs)`
- `:"all!="` - Require each element the provide list has a different value
from all the rest
"""
@spec constrain(
lhs :: atom() | String.t() | BoolVar.t() | IntVar.t() | LinearExpression.t(),
constraint :: Constraint.constraint(),
rhs :: atom() | String.t() | BoolVar.t() | IntVar.t() | LinearExpression.t(),
opts :: [{:if, BoolVar.t()}] | [{:unless, BoolVar.t()}]
) :: Builder.t()
def constrain(lhs, constraint, rhs, opts \\ []) do
%Constraint{defn: {lhs, constraint, rhs, opts}}
end
@doc """
Create a constraint that ensures no overlap among the variables.
"""
@spec no_overlap(list(), Keyword.t()) :: Exhort.SAT.Constraint.t()
def no_overlap(list, opts \\ []) do
%Constraint{defn: {:no_overlap, list, opts}}
end
@doc """
Create a constraint that ensures each item in the list is different in the
solution.
"""
@spec all_different(list(), Keyword.t()) :: Exhort.SAT.Constraint.t()
def all_different(list, opts \\ []) do
%Constraint{defn: {:"all!=", list, opts}}
end
end
|
lib/exhort/sat/constraint.ex
| 0.904252
| 0.989182
|
constraint.ex
|
starcoder
|
defmodule Plaid.Item do
@moduledoc """
Functions for Plaid `item` endpoint.
"""
import Plaid, only: [make_request_with_cred: 4, get_cred: 0]
alias Plaid.Utils
@derive Jason.Encoder
defstruct available_products: [], billed_products: [], error: nil,
institution_id: nil, item_id: nil, webhook: nil, request_id: nil
@type t :: %__MODULE__{available_products: [String.t],
billed_products: [String.t],
error: String.t,
institution_id: String.t,
item_id: String.t,
webhook: String.t,
request_id: String.t
}
@type params :: %{required(atom) => String.t}
@type cred :: %{required(atom) => String.t}
@endpoint "item"
@doc """
Gets an Item.
Parameters
```
%{access_token: "access-env-identifier"}
```
"""
@spec get(params, cred | nil) :: {:ok, Plaid.Item.t} | {:error, Plaid.Error.t}
def get(params, cred \\ get_cred()) do
endpoint = "#{@endpoint}/get"
make_request_with_cred(:post, endpoint, cred, params)
|> Utils.handle_resp(:item)
end
@doc """
Exchanges a public token for an access token and item id.
Parameters
```
%{public_token: "public-env-identifier"}
```
Response
```
{:ok, %{access_token: "access-env-identifier", item_id: "some-id", request_id: "f24wfg"}}
```
"""
@spec exchange_public_token(params, cred | nil) :: {:ok, map} | {:error, Plaid.Error.t}
def exchange_public_token(params, cred \\ get_cred()) do
endpoint = "#{@endpoint}/public_token/exchange"
make_request_with_cred(:post, endpoint, cred, params)
|> Utils.handle_resp(:item)
end
@doc """
Creates a public token. To be used to put Plaid Link into update mode.
Parameters
```
%{access_token: "access-env-identifier"}
```
Response
```
{:ok, %{public_token: "access-env-identifier", expiration: 3600, request_id: "kg414f"}}
```
"""
@spec create_public_token(params, cred | nil) :: {:ok, map} | {:error, Plaid.Error.t}
def create_public_token(params, cred \\ get_cred()) do
endpoint = "#{@endpoint}/public_token/create"
make_request_with_cred(:post, endpoint, cred, params)
|> Utils.handle_resp(:item)
end
@doc """
Updates an Item's webhook.
Parameters
```
%{access_webhook: "access-env-identifier", webhook: "http://mywebsite/api"}
```
"""
@spec update_webhook(params, cred | nil) :: {:ok, Plaid.Item.t} | {:error, Plaid.Error.t}
def update_webhook(params, cred \\ get_cred()) do
endpoint = "#{@endpoint}/webhook/update"
make_request_with_cred(:post, endpoint, cred, params)
|> Utils.handle_resp(:item)
end
@doc """
Invalidates access token and returns a new one.
Parameters
```
%{access_token: "access-env-identifier"}
```
Response
```
{:ok, %{new_access_token: "access-env-identifier", request_id: "gag8fs"}}
```
"""
@spec rotate_access_token(params, cred | nil) :: {:ok, map} | {:error, Plaid.Error.t}
def rotate_access_token(params, cred \\ get_cred()) do
endpoint = "#{@endpoint}/access_token/invalidate"
make_request_with_cred(:post, endpoint, cred, params)
|> Utils.handle_resp(:item)
end
@doc """
Updates a V1 access token to V2.
Parameters
```
%{access_token_v1: "<PASSWORD>"}
```
Response
```
{:ok, %{access_token: "access-env-identifier", item_id: "some-id", request_id: "f24wfg"}}
```
"""
@spec update_version_access_token(params, cred | nil) :: {:ok, map} | {:error, Plaid.Error.t}
def update_version_access_token(params, cred \\ get_cred()) do
endpoint = "#{@endpoint}/access_token/update_version"
make_request_with_cred(:post, endpoint, cred, params)
|> Utils.handle_resp(:item)
end
@doc """
Deletes an Item.
Parameters
```
%{access_token: "<PASSWORD>"}
```
"""
@spec delete(params, cred | nil) :: {:ok, map} | {:error, Plaid.Error.t}
def delete(params, cred \\ get_cred()) do
endpoint = "#{@endpoint}/delete"
make_request_with_cred(:post, endpoint, cred, params)
|> Utils.handle_resp(:item)
end
@doc """
[Creates a processor token](https://developers.dwolla.com/resources/dwolla-plaid-integration.html)
used to create an authenticated funding source with Dwolla.
Parameters
```
%{access_token: "<PASSWORD>", account_id: "plaid-account-id"}
```
Response
```
{:ok, %{processor_token: "some-token", request_id: "k522f2"}}
```
"""
@spec create_processor_token(params, cred | nil) :: {:ok, map} | {:error, Plaid.Error.t}
def create_processor_token(params, cred \\ get_cred()) do
endpoint = "processor/dwolla/processor_token/create"
make_request_with_cred(:post, endpoint, cred, params)
|> Utils.handle_resp(:item)
end
end
|
lib/plaid/item.ex
| 0.820721
| 0.625681
|
item.ex
|
starcoder
|
defmodule Ppc.Live do
@behaviour Ppc
# ------------------------------------ product --------------------------------------
@impl Ppc
def product_list(opts \\ []) do
Ppc.Product.list(opts)
end
@impl Ppc
def product_details(id, opts \\ []) do
Ppc.Product.details(id, opts)
end
@impl Ppc
def product_create(data, opts \\ []) do
Ppc.Product.create(data, opts)
end
@impl Ppc
def product_update(id, data, opts \\ []) do
Ppc.Product.update(id, data, opts)
end
# -------------------------------------- plan --------------------------------------
@impl Ppc
def plan_list(opts \\ []) do
Ppc.Plan.list(opts)
end
@impl Ppc
def plan_details(id, opts \\ []) do
Ppc.Plan.details(id, opts)
end
@impl Ppc
def plan_activate(id, opts \\ []) do
Ppc.Plan.activate(id, opts)
end
@impl Ppc
def plan_deactivate(id, opts \\ []) do
Ppc.Plan.deactivate(id, opts)
end
@impl Ppc
def plan_create(data, opts \\ []) do
Ppc.Plan.create(data, opts)
end
@impl Ppc
def plan_update(id, data, opts \\ []) do
Ppc.Plan.update(id, data, opts)
end
@impl Ppc
def plan_update_pricing(id, data, opts \\ []) do
Ppc.Plan.update_pricing(id, data, opts)
end
# ------------------------------------ subscription --------------------------------------
@impl Ppc
def subscription_details(id, opts \\ []) do
Ppc.Subscription.details(id, opts)
end
@impl Ppc
def subscription_create(data, opts \\ []) do
Ppc.Subscription.create(data, opts)
end
@impl Ppc
def subscription_revise(id, reason, opts \\ []) do
Ppc.Subscription.revise(id, reason, opts)
end
@impl Ppc
def subscription_activate(id, reason, opts \\ []) do
Ppc.Subscription.activate(id, reason, opts)
end
@impl Ppc
def subscription_suspend(id, reason, opts \\ []) do
Ppc.Subscription.suspend(id, reason, opts)
end
@impl Ppc
def subscription_cancel(id, reason, opts \\ []) do
Ppc.Subscription.cancel(id, reason, opts)
end
@impl Ppc
def subscription_transactions(id, opts \\ []) do
Ppc.Subscription.transactions(id, opts)
end
# ------------------------------------ webhook --------------------------------------
@impl Ppc
def webhook_list(opts \\ []) do
Ppc.Webhook.list(opts)
end
@impl Ppc
def webhook_details(id, opts \\ []) do
Ppc.Webhook.details(id, opts)
end
@impl Ppc
def webhook_create(data, opts \\ []) do
Ppc.Webhook.create(data, opts)
end
@impl Ppc
def webhook_delete(id, opts \\ []) do
Ppc.Webhook.delete(id, opts)
end
@impl Ppc
def webhook_update(id, data, opts \\ []) do
Ppc.Webhook.update(id, data, opts)
end
@impl Ppc
def webhook_list_event_types(id, opts \\ []) do
Ppc.Webhook.list_event_types(id, opts)
end
# ------------------------------------ notification --------------------------------------
@impl Ppc
def notification_list(opts \\ []) do
Ppc.Notification.list(opts)
end
@impl Ppc
def notification_details(id, opts \\ []) do
Ppc.Notification.details(id, opts)
end
@impl Ppc
def notification_resend(id, id_list, opts \\ []) do
Ppc.Notification.resend(id, id_list, opts)
end
end
|
lib/ppc_live.ex
| 0.61555
| 0.424889
|
ppc_live.ex
|
starcoder
|
defmodule JsonApiQueryBuilder.Filter do
@moduledoc """
Filter operations for JsonApiQueryBuilder.
"""
@doc """
Applies filter conditions from a parsed JSON-API request to an `Ecto.Queryable.t`.
Each filter condition will be cause the given callback to be invoked with the query, attribute and value.
## Relationship Filters
There are two types of relationship filters: join filters and preload filters.
Join filters use a dotted notation, eg: `"comments.author.articles"`, and will be used to filter the primary data using a SQL inner join.
Eg: `%{"filter" => %{"author.has_bio" => 1}}` can be used to find all articles where the related author has a bio in the database.
Preload filters use a nested notation, eg: `%{"author" => %{"has_bio" => 1}}`, and will only be used to filter the relationships specified in the `include` parameter.
Eg: `%{"include" => "author", "filter" => %{"author" => %{"has_bio" => 1}}}` can be used to find all articles, and include the related authors if they have a bio.
## Example
JsonApiQueryBuilder.Filter.filter(
Article,
%{"filter" => %{"tag" => "animals", "author.has_bio" => "1", "author.has_image" => "1", "comments" => %{"body" => "Great"}}},
&apply_filter/3,
relationships: ["author", "comments"]
)
The example above will cause the `apply_filter/3` callback to be invoked twice:
- `apply_filter(query, "tag", "animals")`
- `apply_filter(query, "author", %{"filter" => %{"has_bio" => "1", "has_image" => "1"}})`
Where `apply_filter` would be implemented like:
```
def apply_filter(query, "tag", val), do: from(article in query, where: ^val in article.tag_list)
def apply_filter(query, "author", params) do
user_query = from(User, select: [:id]) |> User.Query.filter(params)
from(article in query, join: user in ^subquery(user_query), on: article.user_id == user.id)
end
```
"""
@spec filter(Ecto.Queryable.t, map, function, [relationships: [String.t]]) :: Ecto.Queryable.t
def filter(query, params, callback, relationships: relationships) do
query
|> apply_attribute_filters(params, callback, relationships)
|> apply_join_filters(params, callback, relationships)
end
@spec apply_attribute_filters(Ecto.Queryable.t, map, function, [String.t]) :: Ecto.Queryable.t
defp apply_attribute_filters(query, params, callback, relationships) do
params
|> Map.get("filter", %{})
|> Enum.filter(fn {k, _v} -> not is_relationship_filter?(relationships, k) end)
|> Enum.reduce(query, fn {k, v}, query -> callback.(query, k, v) end)
end
@spec apply_join_filters(Ecto.Queryable.t, map, function, [String.t]) :: Ecto.Queryable.t
defp apply_join_filters(query, params, callback, relationships) do
params
|> Map.get("filter", %{})
|> Enum.filter(fn {k, _v} -> is_join_filter?(relationships, k) end)
|> Enum.group_by(fn {k, _v} -> first_relationship_segment(k) end)
|> Enum.map(fn {relation, rel_filters} -> trim_leading_relationship_from_keys(relation, rel_filters) end)
|> Enum.reduce(query, fn {relation, params}, query -> callback.(query, relation, %{"filter" => params}) end)
end
@doc """
Tests if the given string is either a join filter or a preload filter
## Example
iex> JsonApiQueryBuilder.Filter.is_relationship_filter?(["articles", "comments"], "articles.comments.user")
true
iex> JsonApiQueryBuilder.Filter.is_relationship_filter?(["articles", "comments"], "comments")
true
iex> JsonApiQueryBuilder.Filter.is_relationship_filter?(["articles", "comments"], "email")
false
"""
@spec is_relationship_filter?([String.t], String.t) :: boolean
def is_relationship_filter?(relationships, filter) do
is_join_filter?(relationships, filter) || is_preload_filter?(relationships, filter)
end
@doc """
Tests if the given string is a join filter.
## Example
iex> JsonApiQueryBuilder.Filter.is_join_filter?(["articles", "comments"], "articles.comments.user")
true
iex> JsonApiQueryBuilder.Filter.is_join_filter?(["articles", "comments"], "comments")
false
iex> JsonApiQueryBuilder.Filter.is_join_filter?(["articles", "comments"], "email")
false
"""
@spec is_join_filter?([String.t], String.t) :: boolean
def is_join_filter?(relationships, filter) do
Enum.any?(relationships, fn relationship ->
String.starts_with?(filter, relationship <> ".")
end)
end
@doc """
Tests if the given string is a join preload filter.
## Example
iex> JsonApiQueryBuilder.Filter.is_preload_filter?(["articles", "comments"], "articles.comments.user")
false
iex> JsonApiQueryBuilder.Filter.is_preload_filter?(["articles", "comments"], "comments")
true
iex> JsonApiQueryBuilder.Filter.is_preload_filter?(["articles", "comments"], "email")
false
"""
@spec is_preload_filter?([String.t], String.t) :: boolean
def is_preload_filter?(relationships, filter) do
filter in relationships
end
@doc """
Extract the first segment of a dotted relationship path.
## Example
iex> JsonApiQueryBuilder.Filter.first_relationship_segment("a.b.c")
"a"
"""
@spec first_relationship_segment(String.t) :: String.t
def first_relationship_segment(path) do
path
|> String.split(".", parts: 2)
|> hd()
end
@doc """
Removes the leading path segment from map keys after grouping has been applied.
## Example
iex> JsonApiQueryBuilder.Filter.trim_leading_relationship_from_keys("article", [{"article.tag", "animals"}, {"article.comments.user.name", "joe"}])
{"article", %{"comments.user.name" => "joe", "tag" => "animals"}}
"""
@spec trim_leading_relationship_from_keys(String.t, list) :: {String.t, map}
def trim_leading_relationship_from_keys(relation, rel_filters) do
{
relation,
rel_filters
|> Enum.map(fn {k, v} -> {String.trim_leading(k, relation <> "."), v} end)
|> Enum.into(%{})
}
end
end
|
lib/json_api_query_builder/filter.ex
| 0.888544
| 0.775052
|
filter.ex
|
starcoder
|
defmodule CastParams.Schema do
@moduledoc """
Defines a params schema for a plug.
A params schema is just a keyword list where keys are the parameter name
and the value is either a valid `CastParams.Type` (ending with a `!` to mark the parameter as required).
## Example
CastParams.Schema.init(age: :integer, terms: :boolean!, name: :string, weight: :float)
"""
alias CastParams.{Param, Error, Type}
@primitive_types Type.primitive_types()
@required_to_type Enum.reduce(@primitive_types, %{}, &Map.put(&2, :"#{&1}!", &1))
@required_names Map.keys(@required_to_type)
@type t :: [{name :: atom(), Type.t()}]
@doc """
Init schema
## Examples
iex> init(age: :integer)
[%CastParams.Param{names: ["age"], type: :integer}]
iex> init([age: :integer!])
[%CastParams.Param{names: ["age"], type: :integer, required: true}]
iex> init([terms: :boolean!, name: :string, age: :integer])
[
%CastParams.Param{names: ["terms"], type: :boolean, required: true},
%CastParams.Param{names: ["name"], required: false, type: :string},
%CastParams.Param{names: ["age"], required: false, type: :integer},
]
"""
@spec init(options :: list()) :: [Param.t()]
def init(options) when is_list(options) do
options
|> Enum.reduce([], &init_item(%Param{}, &1, &2))
|> Enum.reduce([], fn param, acc ->
updated_item =
param
|> Map.update!(:names, &Enum.reverse/1)
[updated_item | acc]
end)
end
defp init_item(param, {name, type}, acc) when is_atom(name) and is_atom(type) do
updated_param =
param
|> parse_names(name)
|> parse_type(type)
[updated_param | acc]
end
defp init_item(param, {name, options}, acc) when is_list(options) do
updated_param = parse_names(param, name)
options
|> Enum.reduce(acc, &init_item(updated_param, &1, &2))
end
defp parse_names(%{names: names} = param, name) when is_atom(name) do
parsed_name = to_string(name)
%{param | names: [parsed_name | names]}
end
@spec parse_type(Param.t(), atom()) :: Param.t()
defp parse_type(param, type) when type in @primitive_types do
Map.put(param, :type, type)
end
defp parse_type(param, raw_type) when raw_type in @required_names do
param
|> Map.put(:type, @required_to_type[raw_type])
|> Map.put(:required, true)
end
defp parse_type(param, type) do
raise Error, "Error invalid `#{type}` type for `#{param.names}` name."
end
end
|
lib/cast_param/schema.ex
| 0.892818
| 0.584419
|
schema.ex
|
starcoder
|
defmodule NYSETL.Extra.Scrubber do
use Magritte
@moduledoc """
Functions to scrub all PII in ECLRS dump files so that it can be used in development.
Sensitive data is replaced with random strings of the same length and format (letters
for letters, digits for digits and dates for dates). Currently only works with the
45-column ECLRS files but can easily be extended.
To use, run
alias NYSETL.Extra.Scrubber
Scrubber.scrub_file("../eclrs/data/c201012_0932.txt", "./scrubbed.txt")
Some internal functions have been left public for your pleasure.
"""
@doc """
Make a copy of the input file with all sensitive data replaced with random data of the
same type and length.
Uses ETS to make sure that a given source string is consistently replaced with the same
scrubbed string. The caller can supply an existing ETS for testing purposes or similar.
Scrubber.scrub_file("../eclrs/data/c201012_0932.txt", "./scrubbed.txt")
"""
def scrub_file(input_path, output_path, ets \\ :ets.new(:scrubber, [:public])) do
input_file = File.open!(input_path, [:read])
{version, header} = version_and_header_from(input_file)
output_file = File.open!(output_path, [:write])
IO.puts(output_file, header)
row_count =
IO.read(input_file, :all)
|> String.split(~r/\r*\n+/, trim: true)
|> Flow.from_enumerable()
|> Flow.map(fn row -> scrub_row({version, row}, ets) end)
|> Flow.reduce(
fn -> [] end,
fn row, ack ->
IO.puts(output_file, row)
[true | ack]
end
)
|> Enum.count()
:ok = File.close(output_file)
{:ok, row_count}
end
defp version_and_header_from(input_file) do
header =
input_file
|> IO.read(:line)
|> String.trim()
{version, _headers} =
header
|> NYSETL.ECLRS.File.file_headers()
{version, header}
end
@doc ~S"""
Scrub all PII fields in one row.
## Examples
iex> import NYSETL.Extra.Scrubber
iex> ets = :ets.new(:scrubber, [:public])
iex> :rand.seed(:exsss, {101, 102, 103})
iex> scrub_row({:v1, "Doe|J|John|18MAR1965:00:00:00|||||||||||||||||||||||||||||||||||||||||unclear\n"}, ets)
"XTK|K|RDFE|28AUG2018:00:00:00|||||||||||||||||||||||||||||||||||||||||unclear\n"
iex> scrub_row({:v1, "Doe|A|Mary|23MAY1965:00:00:00.000000|||||||||||||||||||||||||||||||||||||||||POS"}, ets)
"XTK|Z|BYLL|18APR2016:00:00:00.000000|||||||||||||||||||||||||||||||||||||||||POS"
"""
def scrub_row({version, row}, ets) do
row
|> String.split("|")
|> Enum.with_index()
|> Enum.map(&scrub_if_index_should_be_scrubbed(version, &1, ets))
|> Enum.join("|")
end
@v1_public %{
"PIDSEXCODE" => 4,
"PIDZIPCODE" => 8,
"PIDCOUNTYCODE" => 9,
"MSHLABPFI" => 11,
"MSHSENDINGFACILITYCLIA" => 12,
"MSHSENDINGFACILITYNAME" => 13,
"PIDPATIENTKEY" => 14,
"ZLRFACILITYCODE" => 18,
"ZLRFACILITYNAME" => 19,
"OBRPROVIDERID" => 23,
"OBRPROVIDERFIRSTNAME" => 24,
"OBRPROVIDERLASTNAME" => 25,
"OBRCOLLECTIONDATE" => 26,
"OBRCREATEDATE" => 27,
"OBXLOCALTESTCODE" => 28,
"OBXLOCALTESTDESC" => 29,
"OBXLOINCCODE" => 30,
"OBXLOINCDESC" => 31,
"OBXOBSERVATIONDATE" => 32,
"OBXOBSERVATIONRESULTTEXT" => 33,
"OBXOBSERVATIONRESULTTEXTSHORT" => 34,
"OBXRESULTSTATUSCODE" => 35,
"OBXPRODUCERLABNAME" => 36,
"OBXSNOMEDCODE" => 37,
"OBXSNOMEDDESC" => 38,
"OBRACCESSIONNUM" => 39,
"OBXANALYSISDATE" => 40,
"OBRSPECIMENSOURCENAME" => 41,
"MSHMESSAGEMASTERKEY" => 42,
"PIDUPDATEDATE" => 43,
"RESULTPOSITIVE" => 44
}
|> Map.values()
@doc """
Scrubs a string if its field index requires scrubbing the specified column layout.
## Examples
iex> import NYSETL.Extra.Scrubber
iex> ets = :ets.new(:scrubber, [:public])
iex> :rand.seed(:exsss, {101, 102, 103})
iex> scrub_if_index_should_be_scrubbed(:v1, {"Doe", 0}, ets)
"XTK"
iex> scrub_if_index_should_be_scrubbed(:v1, {"59483", 8}, ets) # ZIP is not PII
"59483"
"""
def scrub_if_index_should_be_scrubbed(:v1, {string, ix}, _ets) when ix in @v1_public, do: string
def scrub_if_index_should_be_scrubbed(:v1, {string, _ix}, ets), do: scrub_and_remember(string, ets)
@doc """
Return a string which is based on the input string but with all letters replaced
with random uppercase letters and all digits replaced with random digits.
Uses ETS to always return the same output for a certain input.
## Examples
iex> import NYSETL.Extra.Scrubber
iex> ets = :ets.new(:scrubber, [:public])
iex> :rand.seed(:exsss, {101, 102, 103})
iex> scrub_and_remember("<NAME>", ets)
"XTKK RDF"
iex> scrub_and_remember("<NAME>", ets)
"EWZB-YLL ZCH"
iex> scrub_and_remember("<NAME>", ets)
"XTKK RDF"
"""
def scrub_and_remember(string, ets) do
case :ets.lookup(ets, string) do
[{_, scrubbed}] ->
scrubbed
[] ->
scrubbed = scrub_random(string)
case :ets.insert_new(ets, {string, scrubbed}) do
false ->
IO.inspect(string, label: "race condition averted")
[{_, scrubbed}] = :ets.lookup(ets, string)
scrubbed
_ ->
scrubbed
end
end
end
@doc """
Return a string which is based on the input string but with all letters replaced
with random uppercase letters and all digits replaced with random digits.
Replaces valid ISO dates with valid ISO dates.
## Examples
iex> import NYSETL.Extra.Scrubber
iex> :rand.seed(:exsss, {101, 102, 103})
iex> scrub_random("<NAME>")
"XTKK RDF"
iex> scrub_random("<NAME>") # no memory
"EWZB YLL"
iex> scrub_random("123 West 27th St.")
"432 ZCHT 39OA QS."
iex> scrub_random("23MAY1965") # special handling of dates
"17FEB1945"
iex> scrub_random("23MAY1965:00:00:00.000000") # special handling of datetimes
"29NOV1994:00:00:00.000000"
"""
def scrub_random(string) do
case looks_like_a_date?(string) do
[_all, time] ->
random_date(time)
nil ->
string
|> Regex.replace(~r/[[:alpha:]]/, ..., &random_letter/1)
|> Regex.replace(~r/[[:digit:]]/, ..., &random_digit/1)
end
end
defp looks_like_a_date?(string) do
Regex.run(~r/^\d{2}(?:JAN|FEB|MAR|APR|MAY|JUN|JUL|AUG|SEP|OCT|NOV|DEC)\d{4}((?::\d{2}:\d{2}:\d{2}(?:\.\d+)?)?)$/, string)
end
@days_in_span Date.diff(~D[2020-06-01], ~D[1900-01-01])
def random_date(time) do
Date.add(~D[1900-01-01], :rand.uniform(@days_in_span))
|> Timex.format!("{0D}{Mshort}{YYYY}#{time}")
|> String.upcase()
end
@letters String.codepoints("ABCDEFGHIJKLMNOPQRSTUVWXYZ")
defp random_letter(_) do
Enum.random(@letters)
end
@digits String.codepoints("1234567890")
defp random_digit(_) do
Enum.random(@digits)
end
end
|
lib/nys_etl/extra/scrubber.ex
| 0.689201
| 0.418281
|
scrubber.ex
|
starcoder
|
defmodule Thrash.MacroHelpers do
@moduledoc false
# Functions that are helpful for working with macros
# Thrash internal use only
@type escaped_module_name :: {term, list, [atom]}
@type namespace :: nil | {atom, atom}
@doc """
Determine the caller module name with optional override.
Use with the `__CALLER__.module` value as the second argument.
"""
@spec determine_module_name(nil | escaped_module_name, atom) :: atom
def determine_module_name(nil, caller) do
caller
end
def determine_module_name({:__aliases__, _, [module]}, _) do
module
end
@doc """
Turn an atom into an elixir module name with optional namespace.
Examples:
iex> Thrash.MacroHelpers.atom_to_elixir_module(:Struct, nil)
Struct
iex> Thrash.MacroHelpers.atom_to_elixir_module(Struct, nil)
Struct
iex> Thrash.MacroHelpers.atom_to_elixir_module(:Struct, Namespace)
Namespace.Struct
iex> Thrash.MacroHelpers.atom_to_elixir_module(:Struct, :Namespace)
Namespace.Struct
"""
@spec atom_to_elixir_module(atom, nil | atom) :: atom
def atom_to_elixir_module(atom, nil) when is_atom(atom) do
Module.concat([atom])
end
def atom_to_elixir_module(atom, namespace)
when is_atom(atom) and is_atom(namespace) do
Module.concat([namespace, atom])
end
@doc """
Determine the namespace of a module name
Examples:
iex> Thrash.MacroHelpers.find_namespace(Foo)
nil
iex> Thrash.MacroHelpers.find_namespace(Foo.Bar)
Foo
iex> Thrash.MacroHelpers.find_namespace(Foo.Bar.Baz)
Foo.Bar
"""
@spec find_namespace(atom) :: namespace
def find_namespace(modulename) do
parts = Module.split(modulename)
case length(parts) do
1 -> nil
n when n > 1 ->
Module.concat(Enum.take(parts, n - 1))
end
end
@doc """
Create a quoted 'or' expression for an array of values.
Useful for generating typespecs, i.e., `:foo | :bar | :baz`
Examples:
iex> Thrash.MacroHelpers.quoted_chained_or([:a, :b])
quote do: :a | :b
iex> Thrash.MacroHelpers.quoted_chained_or([:a, :b, :c])
quote do: :a | :b | :c
"""
def quoted_chained_or([value]), do: value
def quoted_chained_or(values) when is_list(values) and length(values) > 1 do
values = Enum.reverse(values)
[a, b | rest] = values
quoted_chained_or(rest, {:|, [], [b, a]})
end
defp quoted_chained_or([], ast) do
ast
end
defp quoted_chained_or([h | rest], ast) do
quoted_chained_or(rest, {:|, [], [h, ast]})
end
end
|
lib/thrash/macro_helpers.ex
| 0.844024
| 0.444083
|
macro_helpers.ex
|
starcoder
|
defmodule Concentrate.MergeFilter do
@moduledoc """
ProducerConsumer which merges the data given to it, filters, and outputs the result.
We manage the demand from producers manually.
* On subscription, we ask for 1 event
* Once we've received an event, schedule a timeout for 1s
* When the timeout happens, merge and filter the current state
* Request new events from producers who were part of the last merge
"""
use GenStage
require Logger
alias Concentrate.Encoder.GTFSRealtimeHelpers
alias Concentrate.{Filter, TripDescriptor}
alias Concentrate.Merge.Table
@start_link_opts [:name]
# allow sources some time to load
@initial_timeout 5_000
defstruct timeout: 1_000,
timer: nil,
table: Table.new(),
demand: %{},
filters: [],
group_filters: []
def start_link(opts \\ []) do
start_link_opts = Keyword.take(opts, @start_link_opts)
opts = Keyword.drop(opts, @start_link_opts)
GenStage.start_link(__MODULE__, opts, start_link_opts)
end
@impl GenStage
def init(opts) do
filters = Keyword.get(opts, :filters, [])
group_filters = build_group_filters(Keyword.get(opts, :group_filters, []))
state = %__MODULE__{filters: filters, group_filters: group_filters}
state =
case Keyword.fetch(opts, :timeout) do
{:ok, timeout} -> %{state | timeout: timeout}
_ -> state
end
initial_timeout = Keyword.get(opts, :initial_timeout, @initial_timeout)
opts = Keyword.take(opts, [:subscribe_to, :dispatcher])
opts = Keyword.put_new(opts, :dispatcher, GenStage.BroadcastDispatcher)
state = %{state | timer: Process.send_after(self(), :timeout, initial_timeout)}
{:producer_consumer, state, opts}
end
@impl GenStage
def handle_subscribe(:producer, _options, from, state) do
state = %{state | table: Table.add(state.table, from), demand: Map.put(state.demand, from, 1)}
:ok = GenStage.ask(from, 1)
{:manual, state}
end
def handle_subscribe(_, _, _, state) do
{:automatic, state}
end
@impl GenStage
def handle_cancel(_reason, from, state) do
state = %{
state
| table: Table.remove(state.table, from),
demand: Map.delete(state.demand, from)
}
{:noreply, [], state}
end
@impl GenStage
def handle_events(events, from, state) do
latest_data = List.last(events)
state = %{
state
| table: Table.update(state.table, from, latest_data),
demand: Map.update!(state.demand, from, fn demand -> demand - length(events) end)
}
state =
if state.timer do
state
else
%{state | timer: Process.send_after(self(), :timeout, state.timeout)}
end
{:noreply, [], state}
end
@impl GenStage
def handle_info(:timeout, state) do
{time, merged} = :timer.tc(&Table.items/1, [state.table])
_ =
Logger.debug(fn ->
"#{__MODULE__} merge time=#{time / 1_000}"
end)
{time, filtered} = :timer.tc(&Filter.run/2, [merged, state.filters])
_ =
Logger.debug(fn ->
"#{__MODULE__} filter time=#{time / 1_000}"
end)
{time, grouped} = :timer.tc(>FSRealtimeHelpers.group/1, [filtered])
_ =
Logger.debug(fn ->
"#{__MODULE__} group time=#{time / 1_000}"
end)
{time, group_filtered} = :timer.tc(&group_filter/2, [grouped, state.group_filters])
_ =
Logger.debug(fn ->
"#{__MODULE__} group_filter time=#{time / 1_000}"
end)
state = %{state | timer: nil, demand: ask_demand(state.demand)}
{:noreply, [group_filtered], state}
end
def handle_info(msg, state) do
_ =
Logger.warn(fn ->
"unknown message to #{__MODULE__} #{inspect(self())}: #{inspect(msg)}"
end)
{:noreply, [], state}
end
defp ask_demand(demand_map) do
for {from, demand} <- demand_map, into: %{} do
if demand == 0 do
GenStage.ask(from, 1)
{from, 1}
else
{from, demand}
end
end
end
defp build_group_filters(filters) do
for filter <- filters do
fun =
case filter do
filter when is_atom(filter) ->
&filter.filter/1
filter when is_function(filter, 1) ->
filter
end
flat_mapper(fun)
end
end
defp flat_mapper(fun) do
fn value ->
case fun.(value) do
{%TripDescriptor{} = td, [], []} ->
flat_map_empty_trip_update(td)
{nil, _, _} ->
[]
other ->
[other]
end
end
end
defp flat_map_empty_trip_update(td) do
# allow CANCELED TripUpdates to have no vehicle or stops
if TripDescriptor.schedule_relationship(td) == :CANCELED do
[{td, [], []}]
else
[]
end
end
defp group_filter(groups, filters) do
Enum.reduce(filters, groups, fn filter, groups ->
Enum.flat_map(groups, filter)
end)
end
end
|
lib/concentrate/merge_filter.ex
| 0.787196
| 0.566888
|
merge_filter.ex
|
starcoder
|
defmodule Site.ResponsivePagination do
@moduledoc """
Represents all the contextual information necessary to render the responsive pagination component.
"""
@desktop_max_length 5
@mobile_max_length 3
@type stats :: %{
offset: integer,
per_page: integer,
total: integer,
showing_from: integer,
showing_to: integer
}
defstruct range: [],
mobile_range: [],
current: nil,
previous: nil,
next: nil,
prefix: [],
suffix: []
@type t :: %__MODULE__{
range: [integer],
mobile_range: [integer],
current: integer | nil,
previous: integer | nil,
next: integer | nil,
prefix: [integer | String.t()],
suffix: [integer | String.t()]
}
@spec build(stats) :: t
def build(stats) do
last_page = calculate_last_page(stats.total, stats.per_page)
current = calculate_current(last_page, stats.offset)
range = modify_range([current], last_page, current, @desktop_max_length, &expand_range/3)
first_in_range = List.first(range)
last_in_range = List.last(range)
%__MODULE__{
range: range,
mobile_range: modify_range(range, last_page, current, @mobile_max_length, &shrink_range/3),
current: current,
previous: previous(current),
next: next(current, last_page),
prefix: prefix(first_in_range),
suffix: suffix(last_in_range, last_page)
}
end
@spec calculate_current(integer, integer) :: integer | nil
defp calculate_current(0, _), do: nil
defp calculate_current(_, offset) when offset < 1, do: 1
defp calculate_current(last_page, offset) when offset >= last_page, do: last_page
defp calculate_current(_, offset), do: offset + 1
@spec calculate_last_page(integer, integer) :: integer
defp calculate_last_page(0, _), do: 0
defp calculate_last_page(_, 0), do: 0
defp calculate_last_page(total, per_page) do
total
|> Kernel./(per_page)
|> Float.ceil()
|> round()
|> max(1)
end
@spec prefix(integer | nil) :: [1 | String.t()]
defp prefix(nil), do: []
defp prefix(1), do: []
defp prefix(page), do: [1 | ellipsis(page, 2)]
@spec suffix(integer | nil, integer) :: [String.t() | integer]
defp suffix(nil, _), do: []
defp suffix(page, last_page) when page == last_page, do: []
defp suffix(page, last_page), do: ellipsis(page, last_page - 1) ++ [last_page]
@spec ellipsis(integer, integer) :: [String.t()]
defp ellipsis(first, second) when first == second, do: []
defp ellipsis(_, _), do: ["…"]
@spec previous(integer | nil) :: integer | nil
defp previous(nil), do: nil
defp previous(1), do: nil
defp previous(current), do: current - 1
@spec next(integer | nil, integer) :: integer | nil
defp next(nil, _), do: nil
defp next(current, last_page) when current == last_page, do: nil
defp next(current, _), do: current + 1
@spec modify_range([integer | nil], integer, integer | nil, integer, fun) :: [integer]
defp modify_range([nil], _, _, _, _), do: []
defp modify_range(range, last_page, current, maximum, operation) do
range = operation.(range, last_page, current)
done? = length(range) == min(last_page, maximum)
if done?, do: range, else: modify_range(range, last_page, current, maximum, operation)
end
@spec expand_range([integer], integer, integer | nil) :: [integer]
defp expand_range(range, last_page, _current) do
first = List.first(range)
last = List.last(range)
range
|> add(first - 1, first > 1, 1)
|> add(last + 1, last < last_page, -1)
end
@spec add([integer], integer, boolean, 1 | -1) :: [integer]
defp add(range, _, false, _), do: range
defp add(range, page, true, side) do
if length(range) < @desktop_max_length, do: do_add(range, page, side), else: range
end
@spec do_add([integer], integer, 1 | -1) :: [integer]
defp do_add(list, value, 1), do: [value] ++ list
defp do_add(list, value, -1), do: list ++ [value]
@spec shrink_range([integer], integer, integer | nil) :: [integer]
defp shrink_range(range, _last_page, current) do
range
|> drop(List.first(range) < current, 1)
|> drop(List.last(range) > current, -1)
end
@spec drop([integer], boolean, 1 | -1) :: [integer]
defp drop(range, false, _), do: range
defp drop(range, true, side),
do: if(length(range) > @mobile_max_length, do: Enum.drop(range, side), else: range)
end
|
apps/site/lib/site/responsive_pagination.ex
| 0.795857
| 0.444866
|
responsive_pagination.ex
|
starcoder
|
defmodule Snitch.Data.Schema.TaxRate do
@moduledoc """
Models a TaxRate.
TaxRate belongs to a zone.
A tax rate basically groups the tax values for different tax classes. These are used
for tax calculation.
### e.g.
```
`ProductTaxClass`: 5%,
`ShippingTaxClass`: 2%
```
A tax rate has a priority associated with it. While calculating taxes for a `tax_zone` the tax
rates with lowest priority are calculated first.
After this the taxes are compounded upon the one created with lower priority.
### Example
```
tax_rate_1: %{priority: 0, rate: 2%},
tax_rate_2: %{priority: 0, rate: 3%},
tax_rate_3: %{priority: 1, rate: 1%}
base_amount = 10
level_1_amount = 10 * 0.02 + 10 * 0.01 + 10
total_amount = 0.01 * level_1_amount
```
Here first taxes due to tax_rate_1 and tax_rate_2 are calculated on the base amount which
are added together to give a level_1_amount. The tax_rate_3 is then applied on this to give
total amount.
"""
use Snitch.Data.Schema
alias Snitch.Data.Schema.{TaxZone, TaxRateClassValue}
@type t :: %__MODULE__{}
schema "snitch_tax_rates" do
field(:name, :string)
field(:priority, :integer, default: 0)
field(:is_active?, :boolean, default: true)
belongs_to(:tax_zone, TaxZone)
has_many(:tax_rate_class_values, TaxRateClassValue, on_replace: :delete)
timestamps()
end
@required ~w(name tax_zone_id)a
@optional ~w(is_active? priority)a
@permitted @required ++ @optional
def create_changeset(%__MODULE__{} = tax_rate, params) do
tax_rate
|> cast(params, @permitted)
|> common_changeset()
end
def update_changeset(%__MODULE__{} = tax_rate, params) do
tax_rate
|> cast(params, @permitted)
|> common_changeset()
end
defp common_changeset(changeset) do
changeset
|> validate_required(@required)
|> foreign_key_constraint(:tax_zone_id)
|> unique_constraint(:name,
name: :unique_tax_rate_name_for_tax_zone,
message: "Tax Rate name should be unique for a tax zone."
)
|> cast_assoc(:tax_rate_class_values,
with: &TaxRateClassValue.changeset/2,
required: true
)
end
end
|
apps/snitch_core/lib/core/data/schema/tax/tax_rate.ex
| 0.904698
| 0.968974
|
tax_rate.ex
|
starcoder
|
defmodule SSHKit.SSH.Connection do
@moduledoc """
Defines a `SSHKit.SSH.Connection` struct representing a host connection.
A connection struct has the following fields:
* `host` - the name or IP of the remote host
* `port` - the port to connect to
* `options` - additional connection options
* `ref` - the underlying `:ssh` connection ref
"""
alias SSHKit.SSH.Connection
alias SSHKit.Utils
defstruct [:host, :port, :options, :ref, impl: :ssh]
@type t :: __MODULE__
@default_impl_options [user_interaction: false]
@default_connect_options [port: 22, timeout: :infinity, impl: :ssh]
@doc """
Opens a connection to an SSH server.
The following options are allowed:
* `:timeout`: A timeout in ms after which a command is aborted. Defaults to `:infinity`.
* `:port`: The remote-port to connect to. Defaults to 22.
* `:user`: The username with which to connect.
Defaults to `$LOGNAME` or `$USER` on UNIX, or `$USERNAME` on Windows.
* `:password`: The password to login with.
* `:user_interaction`: Defaults to `false`.
For a complete list of options and their default values, see:
[`:ssh.connect/4`](http://erlang.org/doc/man/ssh.html#connect-4).
Returns `{:ok, conn}` on success, `{:error, reason}` otherwise.
"""
def open(host, options \\ [])
def open(nil, _) do
{:error, "No host given."}
end
def open(host, options) when is_binary(host) do
open(to_charlist(host), options)
end
def open(host, options) do
{details, opts} = extract(options)
port = details[:port]
timeout = details[:timeout]
impl = details[:impl]
case impl.connect(host, port, opts, timeout) do
{:ok, ref} -> {:ok, build(host, port, opts, ref, impl)}
err -> err
end
end
defp extract(options) do
connect_option_keys = Keyword.keys(@default_connect_options)
{connect_options, impl_options} = Keyword.split(options, connect_option_keys)
connect_options =
@default_connect_options
|> Keyword.merge(connect_options)
impl_options =
@default_impl_options
|> Keyword.merge(impl_options)
|> Utils.charlistify()
{connect_options, impl_options}
end
defp build(host, port, options, ref, impl) do
%Connection{host: host, port: port, options: options, ref: ref, impl: impl}
end
@doc """
Closes an SSH connection.
Returns `:ok`.
For details, see [`:ssh.close/1`](http://erlang.org/doc/man/ssh.html#close-1).
"""
def close(conn) do
conn.impl.close(conn.ref)
end
@doc """
Opens a new connection, based on the parameters of an existing one.
The timeout value of the original connection is discarded.
Other connection options are reused and may be overridden.
Uses `SSHKit.SSH.open/2`.
Returns `{:ok, conn}` or `{:error, reason}`.
"""
def reopen(connection, options \\ []) do
options =
connection.options
|> Keyword.put(:port, connection.port)
|> Keyword.put(:impl, connection.impl)
|> Keyword.merge(options)
open(connection.host, options)
end
end
|
lib/sshkit/ssh/connection.ex
| 0.861596
| 0.5119
|
connection.ex
|
starcoder
|
defmodule Cldr.LanguageTag.Parser do
@moduledoc """
Parses a CLDR language tag (also referred to as locale string).
The applicable specification is from [CLDR](http://unicode.org/reports/tr35/#Unicode_Language_and_Locale_Identifiers)
which is similar based upon [RFC5646](https://tools.ietf.org/html/rfc5646) with some variations.
"""
alias Cldr.LanguageTag
alias Cldr.Locale
@doc """
Parse a locale name into a `t:Cldr.LanguageTag`
* `locale_name` is a string representation of a language tag
as defined by [RFC5646](https://tools.ietf.org/html/rfc5646).
Returns
* `{:ok, language_tag}` or
* `{:error, reasons}`
"""
def parse(locale) do
case Cldr.Rfc5646.Parser.parse(normalize_locale_name(locale)) do
{:ok, language_tag} ->
language_tag
|> Keyword.put(:requested_locale_name, locale)
|> normalize_tag()
|> structify(LanguageTag)
|> wrap(:ok)
{:error, reason} ->
{:error, reason}
end
end
@doc """
Parse a locale name into a `t:Cldr.LanguageTag`
* `locale_name` is a string representation of a language tag
as defined by [RFC5646](https://tools.ietf.org/html/rfc5646).
Returns
* `language_tag` or
* raises an exception
"""
def parse!(locale) do
case parse(locale) do
{:ok, language_tag} -> language_tag
{:error, {exception, reason}} -> raise exception, reason
end
end
defp wrap(term, atom) do
{atom, term}
end
defp normalize_tag(language_tag) do
Enum.map(language_tag, &normalize_field/1)
end
def normalize_field({:language = field, language}) do
{field, Cldr.Validity.Language.normalize(language)}
end
def normalize_field({:script = field, script}) do
{field, Cldr.Validity.Script.normalize(script)}
end
def normalize_field({:territory = field, territory}) do
{field, Cldr.Validity.Territory.normalize(territory)}
end
def normalize_field({:language_variants = field, variants}) do
{field, Cldr.Validity.Variant.normalize(variants)}
end
# Everything is downcased before parsing
# and thats the canonical form so no need to
# do it again, just return the value
def normalize_field(other) do
other
end
defp normalize_locale_name(name) do
name
|> String.downcase()
|> Locale.locale_name_from_posix()
end
defp structify(list, module) do
struct(module, list)
end
end
|
lib/cldr/language_tag/parser.ex
| 0.887476
| 0.702836
|
parser.ex
|
starcoder
|
defmodule Wallaby.Selenium do
@moduledoc """
The Selenium driver uses [Selenium Server](https://github.com/SeleniumHQ/selenium) to power many types of browsers (Chrome, Firefox, Edge, etc).
## Usage
Start a Wallaby Session using this driver with the following command:
```
{:ok, session} = Wallaby.start_session()
```
## Configuration
### Capabilities
These capabilities will override the default capabilities.
```
config :wallaby,
selenium: [
capabilities: %{
# something
}
]
```
## Default Capabilities
By default, Selenium will use the following capabilities
You can read more about capabilities in the [JSON Wire Protocol](https://github.com/SeleniumHQ/selenium/wiki/JsonWireProtocol#capabilities-json-object) documentation.
```elixir
%{
javascriptEnabled: true,
browserName: "firefox",
"moz:firefoxOptions": %{
args: ["-headless"]
}
}
```
## Notes
- Requires [selenium-server-standalone](https://www.seleniumhq.org/download/) to be running on port 4444. Wallaby does _not_ manage the start/stop of the Selenium server.
- Requires [GeckoDriver](https://github.com/mozilla/geckodriver) to be installed in your path when using [Firefox](https://www.mozilla.org/en-US/firefox/new/). Firefox is used by default.
"""
use Supervisor
@behaviour Wallaby.Driver
alias Wallaby.{Driver, Element, Session}
alias Wallaby.Helpers.KeyCodes
alias Wallaby.WebdriverClient
@typedoc """
Options to pass to Wallaby.start_session/1
```elixir
Wallaby.start_session(
remote_url: "http://selenium_url",
capabilities: %{browserName: "firefox"}
)
```
"""
@type start_session_opts ::
{:remote_url, String.t()}
| {:capabilities, map}
@doc false
def start_link(opts \\ []) do
Supervisor.start_link(__MODULE__, :ok, opts)
end
@doc false
def init(:ok) do
Supervisor.init([], strategy: :one_for_one)
end
@doc false
def validate do
:ok
end
@doc false
@spec start_session([start_session_opts]) :: Wallaby.Driver.on_start_session() | no_return
def start_session(opts \\ []) do
base_url = Keyword.get(opts, :remote_url, "http://localhost:4444/wd/hub/")
capabilities = Keyword.get(opts, :capabilities, capabilities_from_config())
with {:ok, response, cookies} <- WebdriverClient.create_session(base_url, capabilities) do
id = response["sessionId"]
session = %Session{
session_url: base_url <> "session/#{id}",
url: base_url <> "session/#{id}",
id: id,
driver: __MODULE__,
capabilities: capabilities,
cookies: cookies
}
if window_size = Keyword.get(opts, :window_size),
do: {:ok, _} = set_window_size(session, window_size[:width], window_size[:height])
{:ok, session}
end
end
defp capabilities_from_config() do
:wallaby
|> Application.get_env(:selenium, [])
|> Keyword.get(:capabilities, default_capabilities())
end
@doc false
@spec end_session(Session.t()) :: :ok
def end_session(session) do
WebdriverClient.delete_session(session)
:ok
end
@doc false
def blank_page?(session) do
case current_url(session) do
{:ok, url} ->
url == "about:blank"
_ ->
false
end
end
@doc false
defdelegate window_handle(session), to: WebdriverClient
@doc false
defdelegate window_handles(session), to: WebdriverClient
@doc false
defdelegate focus_window(session, window_handle), to: WebdriverClient
@doc false
defdelegate close_window(session), to: WebdriverClient
@doc false
defdelegate get_window_size(session), to: WebdriverClient
@doc false
defdelegate set_window_size(session, width, height), to: WebdriverClient
@doc false
defdelegate get_window_position(session), to: WebdriverClient
@doc false
defdelegate set_window_position(session, x, y), to: WebdriverClient
@doc false
defdelegate maximize_window(session), to: WebdriverClient
@doc false
defdelegate focus_frame(session, frame), to: WebdriverClient
@doc false
defdelegate focus_parent_frame(session), to: WebdriverClient
@doc false
defdelegate accept_alert(session, fun), to: WebdriverClient
@doc false
defdelegate dismiss_alert(session, fun), to: WebdriverClient
@doc false
defdelegate accept_confirm(session, fun), to: WebdriverClient
@doc false
defdelegate dismiss_confirm(session, fun), to: WebdriverClient
@doc false
defdelegate accept_prompt(session, input, fun), to: WebdriverClient
@doc false
defdelegate dismiss_prompt(session, fun), to: WebdriverClient
@doc false
defdelegate take_screenshot(session_or_element), to: WebdriverClient
@doc false
def cookies(%Session{} = session) do
WebdriverClient.cookies(session)
end
@doc false
def current_path(%Session{} = session) do
with {:ok, url} <- WebdriverClient.current_url(session),
uri <- URI.parse(url),
{:ok, path} <- Map.fetch(uri, :path),
do: {:ok, path}
end
@doc false
def current_url(%Session{} = session) do
WebdriverClient.current_url(session)
end
@doc false
def page_source(%Session{} = session) do
WebdriverClient.page_source(session)
end
@doc false
def page_title(%Session{} = session) do
WebdriverClient.page_title(session)
end
@doc false
def set_cookie(%Session{} = session, key, value) do
WebdriverClient.set_cookie(session, key, value)
end
@doc false
def visit(%Session{} = session, path) do
WebdriverClient.visit(session, path)
end
@doc false
def attribute(%Element{} = element, name) do
WebdriverClient.attribute(element, name)
end
@doc false
@spec clear(Element.t()) :: {:ok, nil} | {:error, Driver.reason()}
def clear(%Element{} = element) do
WebdriverClient.clear(element)
end
@doc false
def click(%Element{} = element) do
WebdriverClient.click(element)
end
@doc false
def click(parent, button) do
WebdriverClient.click(parent, button)
end
@doc false
def button_down(parent, button) do
WebdriverClient.button_down(parent, button)
end
@doc false
def button_up(parent, button) do
WebdriverClient.button_up(parent, button)
end
@doc false
def double_click(parent) do
WebdriverClient.double_click(parent)
end
@doc false
def hover(%Element{} = element) do
WebdriverClient.move_mouse_to(nil, element)
end
@doc false
def move_mouse_by(session, x_offset, y_offset) do
WebdriverClient.move_mouse_to(session, nil, x_offset, y_offset)
end
@doc false
def displayed(%Element{} = element) do
WebdriverClient.displayed(element)
end
@doc false
def selected(%Element{} = element) do
WebdriverClient.selected(element)
end
@doc false
@spec set_value(Element.t(), String.t()) :: {:ok, nil} | {:error, Driver.reason()}
def set_value(%Element{} = element, value) do
WebdriverClient.set_value(element, value)
end
@doc false
def text(%Element{} = element) do
WebdriverClient.text(element)
end
@doc false
def find_elements(parent, compiled_query) do
WebdriverClient.find_elements(parent, compiled_query)
end
@doc false
def execute_script(parent, script, arguments \\ []) do
WebdriverClient.execute_script(parent, script, arguments)
end
@doc false
def execute_script_async(parent, script, arguments \\ []) do
WebdriverClient.execute_script_async(parent, script, arguments)
end
@doc """
Simulates typing into an element.
When sending keys to an element and `keys` is identified as
a local file, the local file is uploaded to the
Selenium server, returning a file path which is then
set to the file input we are interacting with.
We then call the `WebdriverClient.send_keys/2` to set the
remote file path as the input's value.
"""
@spec send_keys(Session.t() | Element.t(), list()) :: {:ok, any}
def send_keys(%Session{} = session, keys), do: WebdriverClient.send_keys(session, keys)
def send_keys(%Element{} = element, keys) do
keys =
case Enum.all?(keys, &is_local_file?(&1)) do
true ->
keys
|> Enum.map(fn key -> upload_file(element, key) end)
|> Enum.intersperse("\n")
false ->
keys
end
WebdriverClient.send_keys(element, keys)
end
@doc false
def default_capabilities do
%{
javascriptEnabled: true,
browserName: "firefox",
"moz:firefoxOptions": %{
args: ["-headless"]
}
}
end
# Create a zip file containing our local file
defp create_zipfile(zipfile, filename) do
{:ok, ^zipfile} =
:zip.create(
zipfile,
[String.to_charlist(Path.basename(filename))],
cwd: String.to_charlist(Path.dirname(filename))
)
zipfile
end
# Base64 encode the zipfile for transfer to remote Selenium
defp encode_zipfile(zipfile) do
File.open!(zipfile, [:read, :raw], fn f ->
f
|> IO.binread(:all)
|> Base.encode64()
end)
end
defp is_local_file?(file) do
file
|> keys_to_binary()
|> File.exists?()
end
defp keys_to_binary(keys) do
keys
|> KeyCodes.chars()
|> IO.iodata_to_binary()
end
# Makes an uploadable file for JSONWireProtocol
defp make_file(filename) do
System.tmp_dir!()
|> Path.join("#{random_filename()}.zip")
|> String.to_charlist()
|> create_zipfile(filename)
|> encode_zipfile()
end
# Generate a random filename
defp random_filename do
Base.encode32(:crypto.strong_rand_bytes(20))
end
# Uploads a local file to remote Selenium server
# Returns the remote file's uploaded location
defp upload_file(element, filename) do
zip64 = make_file(filename)
endpoint = element.session_url <> "/file"
case Wallaby.HTTPClient.request(
:post,
endpoint,
%{file: zip64},
cookies: element.cookies
) do
{:ok, response, _cookies} ->
Map.fetch!(response, "value")
error ->
raise("Selenium Upload failed: #{inspect(error)}")
end
end
end
|
lib/wallaby/selenium.ex
| 0.755907
| 0.779679
|
selenium.ex
|
starcoder
|
defmodule Filterable do
@moduledoc """
`Filterable` allows to map incoming parameters to filter functions.
This module contains functions (`apply_filters/3`, `filter_values/2`)
which allow to perform filtering and `filterable` macro which allows
to define available filters using DSL (see `Filterable.DSL`).
"""
alias Filterable.{Params, Utils}
@default_options [
allow_blank: false,
allow_nil: false,
trim: true,
default: nil,
cast: nil,
cast_errors: true
]
defmacro __using__(_) do
quote do
import unquote(__MODULE__), only: [filterable: 2, filterable: 1]
@before_compile unquote(__MODULE__)
@filters_module __MODULE__
@filter_options []
end
end
defmacro __before_compile__(_) do
quote do
def apply_filters!(queryable, params, opts \\ []) do
Filterable.apply_filters!(queryable, params, @filters_module, filter_options(opts))
end
def apply_filters(queryable, params, opts \\ []) do
Filterable.apply_filters(queryable, params, @filters_module, filter_options(opts))
end
def filter_values(params, opts \\ []) do
Filterable.filter_values(params, @filters_module, filter_options(opts))
end
def filter_options(opts \\ []) do
Keyword.merge(opts, @filter_options)
end
defdelegate defined_filters(), to: @filters_module
defoverridable apply_filters!: 3,
apply_filters!: 2,
apply_filters: 3,
apply_filters: 2,
filter_values: 2,
filter_values: 1,
filter_options: 1
end
end
defmacro filterable(arg, opts \\ [])
defmacro filterable([do: block], opts), do: filterable(nil, block, opts)
defmacro filterable(arg, do: block), do: filterable(nil, block, arg)
defmacro filterable(arg, opts), do: filterable(arg, nil, opts)
defmacro define_module(module, do: block) do
quote do
defmodule unquote(module) do
use Filterable.DSL
use Filterable.Ecto.Helpers
unquote(block)
end
end
end
@spec apply!(any, map | Keyword.t(), module, Keyword.t()) :: any | no_return
def apply!(queryable, params, module, opts \\ []) do
case apply_filters(queryable, params, module, opts) do
{:ok, result, _values} -> result
{:error, message} -> raise Filterable.FilterError, message
end
end
@spec apply_filters!(any, map | Keyword.t(), module, Keyword.t()) :: {any, map} | no_return
def apply_filters!(queryable, params, module, opts \\ []) do
case apply_filters(queryable, params, module, opts) do
{:ok, result, values} -> {result, values}
{:error, message} -> raise Filterable.FilterError, message
end
end
@spec apply_filters(any, map | Keyword.t(), module, Keyword.t()) ::
{:ok, any, map} | {:error, String.t()}
def apply_filters(queryable, params, module, opts \\ []) do
with {:ok, values} <- filter_values(params, module, opts),
{:ok, result} <- filters_result(queryable, values, module, opts),
do: {:ok, result, values}
end
@spec filter_values(map | Keyword.t(), module, Keyword.t()) :: {:ok, map} | {:error, String.t()}
def filter_values(params, module, opts \\ []) do
Utils.reduce_with(module.defined_filters, %{}, fn {filter_name, filter_opts}, acc ->
options =
[param: filter_name]
|> Keyword.merge(@default_options)
|> Keyword.merge(filter_opts)
|> Keyword.merge(opts)
case Params.filter_value(params, options) do
{:ok, nil} -> acc
{:ok, val} -> Map.put(acc, filter_name, val)
error = {:error, _} -> error
end
end)
end
defp filters_result(queryable, filter_values, module, opts) do
Utils.reduce_with(module.defined_filters, queryable, fn {filter_name, filter_opts},
queryable ->
options = Keyword.merge(opts, filter_opts)
value = Map.get(filter_values, filter_name)
share = Keyword.get(options, :share)
allow_nil = Keyword.get(options, :allow_nil)
has_value = value != nil
try do
cond do
(allow_nil || has_value) && share ->
apply(module, filter_name, [queryable, value, share])
allow_nil || has_value ->
apply(module, filter_name, [queryable, value])
true ->
queryable
end
rescue
FunctionClauseError -> queryable
end
end)
end
defp filterable(module, block, opts) do
quote do
@filter_options unquote(opts)
@filters_module unquote(module) || Module.concat([__MODULE__, Filterable])
if unquote(is_tuple(block)) do
Filterable.define_module(@filters_module, do: unquote(block))
end
end
end
end
|
lib/filterable.ex
| 0.79858
| 0.738763
|
filterable.ex
|
starcoder
|
defmodule FinanceTS.Adapters.Yahoo do
@moduledoc """
An Adapter for Yahoo Finance
Homepage: https://finance.yahoo.com/
API Docs: (The api is not officially documented)
"""
@behaviour FinanceTS.Adapter
@supported_resolutions [
:minute,
{:minute, 2},
{:minute, 5},
{:minute, 15},
{:minute, 30},
{:minute, 90},
:hour,
:day,
{:day, 5},
:week,
:month,
{:month, 3}
]
use Tesla
plug(Tesla.Middleware.BaseUrl, "https://query1.finance.yahoo.com/v8/finance")
plug(Tesla.Middleware.JSON)
def get_stream(stock_ticker, resolution, _opts \\ []) do
check_resolution(resolution)
interval_param = convert_resolution_to_parameter(resolution)
range =
case resolution do
:minute -> "7d"
{:minute, _} -> "1mo"
:hour -> "1mo"
:day -> "10y"
:week -> "10y"
_ -> "max"
end
case get("/chart/#{stock_ticker}?range=#{range}&interval=#{interval_param}&events=history,div,splits") do
{:ok, %{body: %{"chart" => %{"error" => nil, "result" => [result]}}}} ->
cast_finance_meta(result)
{:ok, %{body: %{"chart" => %{"error" => error}}}} ->
{:error, error}
{:error, error} ->
{:error, error}
end
end
# Private functions
defp cast_finance_meta(%{"indicators" => %{"quote" => [quotes]}, "meta" => meta, "timestamp" => timestamps}) do
%{"symbol" => symbol, "currency" => currency, "exchangeName" => exchange_name} = meta
stream =
[timestamps, quotes["open"], quotes["high"], quotes["low"], quotes["close"], quotes["volume"]]
|> Stream.zip()
|> Stream.map(fn {t, o, h, l, c, v} -> [t, o, h, l, c, v] end)
{:ok, stream, symbol, currency, exchange_name}
end
defp cast_finance_meta(%{"meta" => %{"symbol" => symbol, "currency" => currency, "exchangeName" => exchange_name}}) do
{:ok, [], symbol, currency, exchange_name}
end
defp convert_resolution_to_parameter(:minute), do: "1m"
defp convert_resolution_to_parameter({:minute, 2}), do: "2m"
defp convert_resolution_to_parameter({:minute, 5}), do: "5m"
defp convert_resolution_to_parameter({:minute, 15}), do: "15m"
defp convert_resolution_to_parameter({:minute, 30}), do: "30m"
defp convert_resolution_to_parameter({:minute, 90}), do: "30m"
defp convert_resolution_to_parameter(:hour), do: "1h"
defp convert_resolution_to_parameter(:day), do: "1d"
defp convert_resolution_to_parameter(:week), do: "1wk"
defp convert_resolution_to_parameter(:month), do: "1mo"
defp convert_resolution_to_parameter({:month, 3}), do: "3mo"
defp check_resolution(r) when r in @supported_resolutions, do: nil
defp check_resolution(r), do: raise("Resolution #{inspect(r)} not supported. Use one of the following: #{inspect(@supported_resolutions)}.")
end
|
lib/finance_ts/adapters/yahoo.ex
| 0.715325
| 0.532425
|
yahoo.ex
|
starcoder
|
defmodule Overpex.Parser.JSON do
@moduledoc """
Provides functions to parse JSON response from Overpass API
"""
alias Overpex.Overpass.{Node, Relation, RelationMember, Tag, Way}
alias Overpex.Response
@doc """
Parses JSON response from Overpass API
## Return values
Returns `{:ok, response}`, where `response` is an `Overpex.Response` struct. Returns `{:error, error}` if the JSON is empty or invalid, where `error` is a String describing the error.
"""
@spec parse(String.t()) :: {:ok, %Response{}} | {:error, String.t()}
def parse(response)
def parse(response) when is_binary(response) do
with {:ok, json} <- Poison.decode(response),
%{"elements" => elements} <- Enum.into(json, %{}),
elems <- Enum.map(elements, fn node -> Enum.into(node, %{}) end) do
{:ok,
%Response{
nodes: parse_nodes(elems),
ways: parse_ways(elems),
relations: parse_relations(elems)
}}
else
{:error, :invalid, _} -> error_response("Error parsing JSON", response)
{:error, {:invalid, _, _}} -> error_response("Error parsing JSON", response)
%{} -> error_response("No elements to parse in response", response)
error -> error_response("Error parsing JSON: #{inspect(error)}", inspect(response))
end
end
def parse(response) do
error_response("Invalid response", inspect(response))
end
defp error_response(message, response) do
{:error, "#{message}\n\nResponse received: #{response}"}
end
defp parse_nodes(elems) do
elems
|> Enum.filter(fn %{"type" => type} -> type == "node" end)
|> Enum.map(fn node ->
%Node{
id: node["id"],
lat: node["lat"],
lon: node["lon"],
tags: node["tags"] |> parse_tags()
}
end)
end
defp parse_ways(elems) do
elems
|> Enum.filter(fn %{"type" => type} -> type == "way" end)
|> Enum.map(fn way ->
%Way{
id: way["id"],
nodes: way["nodes"],
tags: way["tags"] |> parse_tags()
}
end)
end
defp parse_relations(elems) do
elems
|> Enum.filter(fn %{"type" => type} -> type == "relation" end)
|> Enum.map(fn relation ->
%Relation{
id: relation["id"],
tags: relation["tags"] |> parse_tags(),
members: relation["members"] |> parse_relation_members()
}
end)
end
defp parse_relation_members(collection) do
collection |> Enum.map(&parse_relation_member/1)
end
defp parse_relation_member(%{"type" => type, "ref" => ref, "role" => role}) do
%RelationMember{
type: type,
ref: ref,
role: role
}
end
defp parse_tags([]), do: []
defp parse_tags(collection) do
collection
|> Enum.map(&parse_tag/1)
|> Enum.sort(fn a, b -> a.key <= b.key end)
end
defp parse_tag({key, value}) do
%Tag{key: key, value: value}
end
end
|
lib/overpex/parser/json.ex
| 0.851722
| 0.448487
|
json.ex
|
starcoder
|
defmodule Ash.Filter.Predicate.In do
@moduledoc "A predicate for a value being in a list of provided values"
defstruct [:field, :values]
use Ash.Filter.Predicate
alias Ash.Error.Query.InvalidFilterValue
alias Ash.Filter.Expression
alias Ash.Filter.Predicate
alias Ash.Filter.Predicate.Eq
def new(_resource, attribute, []),
do: {:ok, %__MODULE__{field: attribute.name, values: []}}
def new(resource, attribute, [value]), do: Eq.new(resource, attribute, value)
def new(_resource, attribute, values) when is_list(values) do
Enum.reduce_while(values, {:ok, %__MODULE__{field: attribute.name, values: []}}, fn value,
{:ok,
predicate} ->
case Ash.Type.cast_input(attribute.type, value) do
{:ok, casted} ->
{:cont, {:ok, %{predicate | values: [casted | predicate.values]}}}
_ ->
{:halt,
{:error,
InvalidFilterValue.exception(
value: value,
context: %__MODULE__{field: attribute.name, values: values},
message: "Could not be casted to type #{inspect(attribute.type)}"
)}}
end
end)
end
def new(_resource, attribute, values) do
{:error,
InvalidFilterValue.exception(
value: values,
context: %__MODULE__{field: attribute.name, values: values},
message: "Expected a list"
)}
end
def match?(%{values: predicate_values}, value, type) do
Enum.any?(predicate_values, fn predicate_value ->
type.equal?(predicate_value, value)
end)
end
def compare(%__MODULE__{} = left, %__MODULE__{} = right) do
{:simplify, into_or_equals(left), into_or_equals(right)}
end
def compare(%__MODULE__{} = in_expr, _) do
{:simplify, into_or_equals(in_expr)}
end
def compare(_, _), do: :unknown
defp into_or_equals(%{field: field, values: values}) do
Enum.reduce(values, nil, fn value, expression ->
Expression.new(:or, expression, %Eq{field: field, value: value})
end)
end
defimpl Inspect do
import Inspect.Algebra
def inspect(predicate, opts) do
concat([
Predicate.add_inspect_path(opts, predicate.field),
" in ",
to_doc(predicate.values, opts)
])
end
end
end
|
lib/ash/filter/predicate/in.ex
| 0.819605
| 0.448064
|
in.ex
|
starcoder
|
defmodule Telnyx.Messages do
@moduledoc """
Send a message with `create/2`, and retrieve a message with `retrieve/2`.
"""
alias Telnyx.Client
@doc """
Sends a message.
## Examples
```
api_key = "YOUR_API_KEY"
%{
from: "+18665552368", # Your Telnyx number
to: "+18445552367",
text: "Hello, World!"
}
|> Telnyx.Messages.create(api_key)
```
Example response:
```
{:ok,
%{
"carrier" => "Verizon",
"cost" => %{"amount" => "1.23", "currency" => "USD"},
"created_at" => "2019-01-23T18:10:02.574Z",
"direction" => "outbound",
"errors" => [],
"from" => "+18665552368",
"id" => "3fa85f64-5717-4562-b3fc-2c963f66afa6",
"line_type" => "Wireless",
"parts" => 1,
"record_type" => "message",
"text" => "Hello world",
"to" => [
%{
"address" => "+18665550001",
"status" => "queued",
"updated_at" => "2019-01-23T18:10:02.574Z"
}
],
"type" => "sms",
"updated_at" => "2019-01-23T18:10:02.574Z",
"use_profile_webhooks" => false,
"valid_until" => "2019-01-23T18:10:02.574Z",
"webhook_failover_url" => "https://www.example.com/callbacks",
"webhook_url" => "https://www.example.com/hooks"
}
}
```
See https://developers.telnyx.com/docs/api/v2/messaging/Messages
"""
@spec create(map, String.t()) :: {:ok, map} | {:error, %Telnyx.Error{}}
def create(params = %{}, api_key) do
Client.post(api_key, params, "/messages")
end
@doc """
Same as `create/2` but specifically for sending long-code messages.
See the documentation for `create/2` for example.
`create/2` supports long-code already, if the `from` param is a long code number.
See https://developers.telnyx.com/docs/api/v2/messaging/Messages#createLongCodeMessage
"""
@spec create_long_code(map, String.t()) :: {:ok, map} | {:error, %Telnyx.Error{}}
def create_long_code(params = %{}, api_key) do
Client.post(api_key, params, "/messages/long_code")
end
@doc """
Same as `create/2` but specifically for sending messages from a number pool.
The option pool must be turned on in the messaging profile.
`create/2` supports sending from a number pool already with the same params (ensuring that `messaging_profile_id` is included, but omitting the `from`).
## Examples
```
api_key = "YOUR_API_KEY"
%{
messaging_profile_id: "uuid",
to: "+18445552367",
text: "Hello, World!"
}
|> Telnyx.Messages.create(api_key)
```
Example response:
```
{:ok,
%{
"carrier" => "Verizon",
"cost" => %{"amount" => "1.23", "currency" => "USD"},
"created_at" => "2019-01-23T18:10:02.574Z",
"direction" => "outbound",
"errors" => [],
"from" => "+18665552368",
"id" => "3fa85f64-5717-4562-b3fc-2c963f66afa6",
"line_type" => "Wireless",
"parts" => 1,
"record_type" => "message",
"text" => "<NAME>",
"to" => [
%{
"address" => "+18665550001",
"status" => "queued",
"updated_at" => "2019-01-23T18:10:02.574Z"
}
],
"type" => "sms",
"updated_at" => "2019-01-23T18:10:02.574Z",
"use_profile_webhooks" => false,
"valid_until" => "2019-01-23T18:10:02.574Z",
"webhook_failover_url" => "https://www.example.com/callbacks",
"webhook_url" => "https://www.example.com/hooks"
}
}
```
See https://developers.telnyx.com/docs/api/v2/messaging/Messages#createNumberPoolMessage
"""
@spec create_from_number_pool(map, String.t()) :: {:ok, map} | {:error, %Telnyx.Error{}}
def create_from_number_pool(params = %{}, api_key) do
Client.post(api_key, params, "/messages/number_pool")
end
@doc """
Same as `create/2` but specifically for sending short-code messages.
`create/2` supports short-code already with the same params.
See the `create/2` documentation for example.
See https://developers.telnyx.com/docs/api/v2/messaging/Messages#createShortCodeMessage
"""
@spec create_short_code(map, String.t()) :: {:ok, map} | {:error, %Telnyx.Error{}}
def create_short_code(params = %{}, api_key) do
Client.post(api_key, params, "/messages/short_code")
end
@doc """
Retrieves a message.
## Examples
```
api_key = "YOUR_API_KEY"
Telnyx.Messages.retrieve(api_key, "uuid")
```
Example response:
```
{:ok,
%{
"carrier" => "Verizon",
"cost" => %{"amount" => "1.23", "currency" => "USD"},
"created_at" => "2019-01-23T18:10:02.574Z",
"direction" => "outbound",
"errors" => [],
"from" => "+18445550001",
"id" => "uuid",
"line_type" => "Wireless",
"parts" => 1,
"record_type" => "message",
"text" => "Hello, World!",
"to" => [
%{
"address" => "+18665550001",
"status" => "queued",
"updated_at" => "2019-01-23T18:10:02.574Z"
}
],
"type" => "sms",
"updated_at" => "2019-01-23T18:10:02.574Z",
"use_profile_webhooks" => false,
"valid_until" => "2019-01-23T18:10:02.574Z",
"webhook_failover_url" => "https://www.example.com/callbacks",
"webhook_url" => "https://www.example.com/hooks"
}
}
```
See https://developers.telnyx.com/docs/api/v2/messaging/Messages#retrieveMessage
"""
@spec retrieve(String.t(), String.t()) :: {:ok, map} | {:error, %Telnyx.Error{}}
def retrieve(uuid, api_key), do: Client.get(api_key, "/messages/#{uuid}")
@doc """
This call disappeared out of the v2 docs, so it has been deprecated.
You can use an alphanumeric key instead of a phone number in `create/2`.
See `create/2` documentation for example.
"""
@deprecated "Use create/2 instead"
@spec create_alphanumeric(map, String.t()) :: {:ok, map} | {:error, %Telnyx.Error{}}
def create_alphanumeric(params = %{}, api_key) do
Client.post(api_key, params, "/messages/alphanumeric_sender_id")
end
end
|
lib/telnyx/messages.ex
| 0.852782
| 0.787053
|
messages.ex
|
starcoder
|
defmodule Instruments.FastCounter do
@moduledoc false
# A Faster than normal counter.
# Builds one ETS table per scheduler in the system and sends increment / decrement writes to the local
# scheduler. Statistics are reported per scheduler once every `fast_counter_report_interval` milliseconds.
@table_prefix :instruments_counters
@max_tables 128
@fast_counter_report_interval Application.get_env(
:instruments,
:fast_counter_report_interval,
10_000
)
@compile {:inline, get_table_key: 2}
use GenServer
def start_link(_ \\ []) do
GenServer.start_link(__MODULE__, :ok, name: __MODULE__)
end
def init(:ok) do
table_count = :erlang.system_info(:schedulers)
for scheduler_id <- 1..table_count do
:ets.new(table_name(scheduler_id), [:named_table, :public, :set])
end
reporter_module = Application.get_env(:instruments, :reporter_module, Instruments.Statix)
schedule_report()
{:ok, {reporter_module, table_count}}
end
## Public
@spec increment(iodata) :: :ok
@spec increment(iodata, integer) :: :ok
@spec increment(iodata, integer, Statix.options()) :: :ok
def increment(name, amount \\ 1, options \\ []) do
table_key = get_table_key(name, options)
:ets.update_counter(current_table(), table_key, amount, {table_key, 0})
:ok
end
@spec decrement(iodata) :: :ok
@spec decrement(iodata, integer) :: :ok
@spec decrement(iodata, integer, Statix.options()) :: :ok
def decrement(name, amount \\ 1, options \\ []),
do: increment(name, -amount, options)
## GenServer callbacks
def handle_info(:report, {reporter_module, table_count} = state) do
# dump the scheduler's data and decrement its
# counters by the amount we dumped.
dump_and_flush_data = fn scheduler_id ->
table_name = table_name(scheduler_id)
table_data = :ets.tab2list(table_name)
Enum.each(table_data, fn {key, val} ->
:ets.update_counter(table_name, key, -val)
end)
table_data
end
# aggregates each scheduler's table into one metric
aggregate_stats = fn {key, val}, acc ->
Map.update(acc, key, val, &(&1 + val))
end
1..table_count
|> Enum.flat_map(dump_and_flush_data)
|> Enum.reduce(%{}, aggregate_stats)
|> Enum.each(&report_stat(&1, reporter_module))
schedule_report()
{:noreply, state}
end
## Private
defp get_table_key(name, []) do
{name, []}
end
defp get_table_key(name, options) do
case Keyword.get(options, :tags) do
[] ->
{name, options}
[_] ->
{name, options}
tags when is_list(tags) ->
{name, Keyword.replace!(options, :tags, Enum.sort(tags))}
_ ->
{name, options}
end
end
defp report_stat({_key, 0}, _),
do: :ok
defp report_stat({{metric_name, opts}, value}, reporter_module) when value < 0 do
# this -value looks like a bug, but isn't. Since we're aggregating
# counters, the value could be negative, but the decrement
# operation takes positive values.
reporter_module.decrement(metric_name, -value, opts)
end
defp report_stat({{metric_name, opts}, value}, reporter_module) when value > 0 do
reporter_module.increment(metric_name, value, opts)
end
defp schedule_report() do
Process.send_after(self(), :report, @fast_counter_report_interval)
end
defp current_table() do
table_name(:erlang.system_info(:scheduler_id))
end
for scheduler_id <- 1..@max_tables do
defp table_name(unquote(scheduler_id)) do
unquote(:"#{@table_prefix}_#{scheduler_id}")
end
end
end
|
lib/fast_counter.ex
| 0.810366
| 0.425725
|
fast_counter.ex
|
starcoder
|
defmodule ElixirMock.Matchers do
@moduledoc """
Contains utility functions that allow predicate-based matching against arguments passed to mock function calls.
The `ElixirMock.assert_called/1` and `ElixirMock.refute_called/1` macros can take matchers in place of literal arguments
in function call verifications. A matcher is any tuple of the form `{:matches, &matcher_fn/1}` where `matcher_fn` is a
function of arity 1 that returns a boolean given a value. That value is an argument passed to a call to the mock
function in the same position as the matcher as declared in the call verification statement.
## Example
```
defmodule MyTest do
use ExUnit.Case
require ElixirMock
import ElixirMock
defmodule MyModule do
def add(a, b), do: a + b
end
test "add was called with an even number as the first argument" do
an_even_number = fn(number) -> rem(number, 2) == 0 end
mock = mock_of MyModule
mock.add(4, 3)
assert_called mock.add({:matches, an_even_number}, 3) # passes
assert_called mock.add(4, {:matches, an_even_number}) # fails!
end
end
```
The `ElixirMock.Matchers` module contains functions for common matching use cases like matching any argument,
matching only number arguments, e.t.c. See this module's [functions list](#summary) for a list of in-built matchers.
## Deep matching with maps
When a function under test is expecting map arguments, matchers can be used in the match expression for some or all
of the map's keys. When a value of a key in an call verification statement is found to be a matcher expression, the
matcher expression is evaluated with the corresponding value in the actual map argument. If all present matchers in the
expected map evaluate to `true` for the corresponding values in the actual map and the values of all the other keys
in the expected map match the values of the same keys in the actual map, the call verification statement passes.
```
defmodule MyTest do
use ExUnit.Case
require ElixirMock
import ElixirMock
alias ElixirMock.Matchers
defmodule MyModule do
def echo(what_to_say), do: IO.puts(inspect(what_to_say))
end
test "echo/1 should have been called with the correct map" do
mock = mock_of MyModule
mock.echo %{a: 1, b: :something}
assert_called mock.echo(%{a: Matchers.any(:int), b: Matchers.any(:atom)}) # passes
end
end
```
__Also, note that:__
- All the [matchers](#summary) availabe in this module can be used within maps in this fashion.
- Matching on map values with matchers can be done with nested maps of arbitrary depth.
"""
@doc """
A matcher that matches any argument.
Use it when you don't care about some or all arguments in a function call assertion. Also, since `ElixirMock.assert_called/1`
and `ElixirMock.refute_called/1` will not match function calls with different number of arguments from what the assertion
specifies, the `any/0` matcher is necessary to be able do assertions like the one in the example below
Example:
```
defmodule MyTest do
use ExUnit.Case
require ElixirMock
import ElixirMock
alias ElixirMock.Matchers
defmodule MyModule do
def echo(arg), do: IO.puts(arg)
end
test "echo was called with any argument" do
mock = mock_of MyModule
mock.echo("hello")
# If just want to verify that '&echo/1' was called but I don't care about the arguments:
assert_called mock.echo(Matchers.any) # passes
# But this will not pass:
assert_called mock.echo # fails!
end
end
```
"""
def any, do: {:matches, ElixirMock.Matchers.InBuilt.any(:_)}
@doc """
A matcher that matches an argument only if it is of a specified type.
Supported types are `:atom`, `:binary`, `:boolean`, `:float`, `:function`, `:integer`, `:list`, `:map`, `:number`,
`:pid`, `:tuple`, any struct (e.g., `%Person{}`), and `:_` (equivalent to using `any/0`). An `ArgumentError` is thrown
if an argument not in this list is passed to the function.
Example:
```
defmodule MyTest do
use ExUnit.Case
require ElixirMock
import ElixirMock
alias ElixirMock.Matchers
defmodule MyModule do
def echo(arg), do: IO.puts(arg)
end
test "echo was called with a float" do
mock = mock_of MyModule
mock.echo(10.5)
assert_called mock.echo(Matchers.any(:float)) # passes
assert_called mock.echo(Matchers.any(:integer)) # fails!
end
end
```
"""
def any(type), do: {:matches, ElixirMock.Matchers.InBuilt.any(type)}
@doc """
A get-out-of-jail matcher that helps you literally match arguments that look like matchers
When ElixirMock finds an argument that looks like `{:matches, other_thing}` in a function call verification, it will
assume that `other_thing` is a function that is supposed to be used to match an argument. In the rare case that you
need to match an argument that is literally `{:matches, other_thing}`, use this matcher. It will tell ElixirMock
not to think about it as a matcher expression but rather as a literal value.
Example:
```
defmodule MyTest do
use ExUnit.Case
require ElixirMock
import ElixirMock
alias ElixirMock.Matchers
defmodule MyModule do
def echo(arg), do: IO.puts(arg)
end
test "echo was called with a float" do
mock = mock_of MyModule
mock.echo({:matches, 10})
assert_called mock.echo(Matchers.literal({:matches, 10})) # passes
assert_called mock.echo({:matches, 10}) # will blow up!
end
end
```
"""
def literal(value), do: {:__elixir_mock__literal, value}
@doc false
def find_call({expected_fn, expected_args}, calls) do
calls
|> Enum.filter(fn {called_fn, _} -> called_fn == expected_fn end)
|> Enum.any?(fn {_fn_name, args} -> match_call_args(expected_args, args) end)
end
defp match_call_args(expected_args, actual_args) when(length(actual_args) != length(expected_args)), do: false
defp match_call_args(expected_args, actual_args) do
Enum.zip(expected_args, actual_args)
|> Enum.all?(fn {expected, actual} -> match_arg_pair(expected, actual) end)
end
defp match_arg_pair(expected, actual) do
case expected do
{:__elixir_mock__literal, explicit_literal} -> explicit_literal == actual
{:matches, matcher} -> evaluate_matcher(matcher, actual)
%{} -> deep_match(expected, actual)
implicit_literal -> implicit_literal == actual
end
end
defp evaluate_matcher(matcher, actual) when is_function(matcher) do
matcher_arity = :erlang.fun_info(matcher)[:arity]
error_message = "Use of bad function matcher '#{inspect matcher}' in match expression.
Argument matchers must be functions with arity 1. This function has arity #{matcher_arity}"
if matcher_arity != 1 do
raise ArgumentError, message: error_message
end
matcher.(actual)
end
defp evaluate_matcher(_, non_function_matcher) do
error_message = "Use of non-function matcher '#{inspect non_function_matcher}' in match expression.
Argument matchers must be in the form {:matches, &matcher_function/1}. If you expected your stubbed function to have
been called with literal {:matches, #{inspect non_function_matcher}}, use ElixirMock.Matchers.literal({:matches, #{inspect non_function_matcher}})"
raise ArgumentError, message: error_message
end
defp deep_match(%{} = _expected, actual) when not is_map(actual) , do: false
defp deep_match(%{} = expected, %{} = actual) do
Map.keys(actual)
|> Enum.all?(&Map.has_key?(expected, &1))
|> Kernel.and(all_kv_pairs_are_equal(expected, actual))
end
defp all_kv_pairs_are_equal(expected, actual) do
expected
|> Map.to_list
|> Enum.all?(fn {expected_key, expected_val} ->
if Map.has_key?(actual, expected_key),
do: match_arg_pair(expected_val, Map.get(actual, expected_key)),
else: false
end)
end
end
|
lib/matchers/matchers.ex
| 0.943393
| 0.909023
|
matchers.ex
|
starcoder
|
defmodule OliWeb.RevisionHistory.ReingoldTilford do
# Reingold-Tilford algorithm for drawing trees
@moduledoc false
@node_height 30
@node_y_separation 10
@total_y_distance @node_height + @node_y_separation
@node_x_separation 50
defmodule Node do
@moduledoc false
defstruct [:x, :y, :label, :children, :modifier, :type, :height, :width, :level, :value]
end
defmodule Line do
@moduledoc false
defstruct [:x1, :x2, :y1, :y2]
end
@doc """
Returns all nodes in a ReingoldTilford tree.
"""
def nodes(%{children: children} = node) do
[node | Enum.flat_map(children, &nodes/1)]
end
@doc """
Returns the dimensions of a canvas to render all given
ReingoldTilford nodes.
"""
def dimensions(nodes) do
node_y = Enum.max_by(nodes, fn node -> node.y + node.height end)
node_x = Enum.max_by(nodes, fn node -> node.x + node.width end)
{node_x.x + node_x.width, node_y.y + node_y.height}
end
@doc """
Builds a ReingoldTilfolrd tree.
The given tree is in the shape `{value, [child]}`.
The function receives the value and returns the
node label. The label is used to compute its width.
"""
def build(root, nodes) do
change_representation(root, nodes, 0)
|> calculate_initial_y(0, [])
|> ensure_children_inside_screen()
|> put_final_y_values(0)
|> put_x_position()
end
defp change_representation(node, nodes, level) do
children =
Enum.map(node.children, &change_representation(Map.get(nodes, &1), nodes, level + 1))
label = Integer.to_string(node.revision.id)
%Node{
x: 0,
y: 0,
label: label,
children: children,
modifier: 0,
type: if(children == [], do: :leaf, else: :subtree),
height: @node_height,
width: max(String.length(label) * 10 + 2, 42),
level: level,
value: node
}
end
defp calculate_initial_y(%{children: children} = node, previous_sibling, top_siblings) do
{_, children} =
children
|> Enum.reduce({0, []}, fn n, {prev_sibling, nodes} ->
new_node = calculate_initial_y(n, prev_sibling, nodes)
{new_node.y, [new_node | nodes]}
end)
{first_child, last_child} =
if node.type != :leaf do
[last_child | _] = children
[first_child | _] = Enum.reverse(children)
{first_child, last_child}
else
{nil, nil}
end
new_node =
case {node_type(node), top_siblings} do
{:leaf, []} ->
%{node | y: 0}
{:leaf, _} ->
%{node | y: previous_sibling + @total_y_distance}
{:small_subtree, []} ->
%{node | y: first_child.y}
{:small_subtree, _} ->
%{
node
| y: previous_sibling + @total_y_distance,
modifier: previous_sibling + @total_y_distance - first_child.y
}
{:big_subtree, []} ->
mid = (last_child.y + first_child.y) / 2
%{node | y: mid}
{:big_subtree, _} ->
mid = (last_child.y + first_child.y) / 2
%{
node
| y: previous_sibling + @total_y_distance,
modifier: previous_sibling + @total_y_distance - mid
}
end
if children != [] and top_siblings != [] do
fix_sibling_conflicts(%{new_node | children: children}, top_siblings)
else
%{new_node | children: children}
end
end
defp node_type(node) do
cond do
node.type == :leaf -> :leaf
match?([_], node.children) -> :small_subtree
true -> :big_subtree
end
end
defp put_final_y_values(%{children: children} = node, mod) do
new_children = Enum.map(children, &put_final_y_values(&1, node.modifier + mod))
%{node | y: node.y + mod, children: new_children}
end
def fix_sibling_conflicts(node, [top_most_sibling | other_siblings]) do
top = search_contour({node, %{}, 1, 0}, :top)
bottom = search_contour({top_most_sibling, %{}, 1, 0}, :bottom)
distance =
[Map.values(top), Map.values(bottom)]
|> Enum.zip()
|> Enum.reduce(0, fn {t, b}, acc ->
if t - b + acc < @total_y_distance do
@total_y_distance - (t - b)
else
acc
end
end)
if distance > 0 do
new_node = %{
node
| y: node.y + distance,
modifier: node.modifier + distance
}
fix_sibling_conflicts(new_node, other_siblings)
else
fix_sibling_conflicts(node, other_siblings)
end
end
def fix_sibling_conflicts(node, []), do: node
def search_contour({node, contour, level, mod_sum}, :top) do
result =
if Map.has_key?(contour, level) do
Map.put(contour, level, min(contour[level], node.y + mod_sum))
else
Map.put(contour, level, node.y + mod_sum)
end
Enum.reduce(
node.children,
result,
&search_contour({&1, &2, level + 1, mod_sum + node.modifier}, :top)
)
end
def search_contour({node, contour, level, mod_sum}, :bottom) do
result =
if Map.has_key?(contour, level) do
Map.put(contour, level, max(contour[level], node.y + mod_sum))
else
Map.put(contour, level, node.y + mod_sum)
end
Enum.reduce(
node.children,
result,
&search_contour({&1, &2, level + 1, mod_sum + node.modifier}, :bottom)
)
end
defp ensure_children_inside_screen(node) do
result =
{node, %{}, 1, 0}
|> search_contour(:top)
|> Enum.reduce(0, fn {_, value}, acc ->
if value + acc < 0, do: value * -1, else: acc
end)
%{node | y: node.y + result, modifier: node.modifier + result}
end
defp put_x_position(%{children: children} = tree) do
max_width = find_max_width_by_level(tree, %{})
children = Enum.map(children, &put_x_position(&1, tree.width + @node_x_separation, max_width))
%{tree | x: 0, children: children}
end
defp put_x_position(%{children: children, level: level} = node, position, max_width) do
children =
Enum.map(
children,
&put_x_position(&1, max_width[level] + position + @node_x_separation, max_width)
)
%{node | x: position, children: children}
end
defp find_max_width_by_level(node, max_values) do
max_values =
if Map.has_key?(max_values, node.level) do
Map.put(max_values, node.level, max(max_values[node.level], node.width))
else
Map.put(max_values, node.level, node.width)
end
Enum.reduce(
node.children,
max_values,
&find_max_width_by_level(&1, &2)
)
end
@doc """
Returns the tree lines.
"""
def lines(%{children: children} = node) do
lines_to_children = lines_to_children(node)
aditional_lines =
cond do
[node] == children ->
[child | _] = children
line_from_parent(node, child)
match?([_ | _], children) ->
[child | _] = children
[vertical_line(node, child), line_from_parent(node, child)]
true ->
[]
end
children_lines = Enum.flat_map(children, &lines/1)
lines_to_children ++ aditional_lines ++ children_lines
end
defp line_from_parent(node, child) do
%Line{
x1: node.x + node.width,
x2: child.x - @node_x_separation / 2,
y1: node.y + node.height / 2,
y2: node.y + node.height / 2
}
end
defp vertical_line(%{children: children} = node, child) do
[top_most_child | _] = children
[bottom_most_child | _] = Enum.reverse(children)
%Line{
x1: child.x - @node_x_separation / 2,
x2: child.x - @node_x_separation / 2,
y1: top_most_child.y + node.height / 2,
y2: bottom_most_child.y + node.height / 2
}
end
defp lines_to_children(%{children: children} = node) do
Enum.map(children, fn n ->
%Line{
x1: n.x - div(@node_x_separation, 2),
x2: n.x,
y1: n.y + div(node.height, 2),
y2: n.y + div(node.height, 2)
}
end)
end
end
|
lib/oli_web/live/history/reingold_tifford.ex
| 0.778228
| 0.571468
|
reingold_tifford.ex
|
starcoder
|
defmodule Statistics.Distributions.Chisq do
alias Statistics.Math
alias Statistics.Math.Functions
@moduledoc """
Chi square distribution.
Takes a *degrees of freedom* parameter.
"""
@doc """
The probability density function
## Examples
iex> Statistics.Distributions.Chisq.pdf(1).(2)
0.10377687435514868
"""
@spec pdf(non_neg_integer) :: fun
def pdf(df) do
fn x ->
1 / (Math.pow(2, df / 2) * Functions.gamma(df / 2)) * Math.pow(x, df / 2 - 1) *
Math.exp(-1 * x / 2)
end
end
@doc """
The cumulative density function
## Examples
iex> Statistics.Distributions.Chisq.cdf(2).(2)
0.6321205588285578
"""
@spec cdf(non_neg_integer) :: fun
def cdf(df) do
fn x ->
g = Functions.gamma(df / 2.0)
b = Functions.gammainc(df / 2.0, x / 2.0)
b / g
end
end
@doc """
The percentile-point function
## Examples
iex> Statistics.Distributions.Chisq.ppf(1).(0.95)
3.841458820694101
"""
@spec ppf(non_neg_integer) :: fun
def ppf(df) do
fn x ->
ppf_tande(x, df)
end
end
# trial-and-error method which refines guesses
# to arbitrary number of decimal places
defp ppf_tande(x, df, precision \\ 14) do
ppf_tande(x, df, 0, precision + 2, 0)
end
defp ppf_tande(_, _, g, precision, precision) do
g
end
defp ppf_tande(x, df, g, precision, p) do
increment = 100 / Math.pow(10, p)
guess = g + increment
if x < cdf(df).(guess) do
ppf_tande(x, df, g, precision, p + 1)
else
ppf_tande(x, df, guess, precision, p)
end
end
@doc """
Draw a random number from a t distribution with specified degrees of freedom
Uses the [rejection sampling method](https://en.wikipedia.org/wiki/Rejection_sampling)
## Examples
iex> Statistics.Distributions.Chisq.rand(2)
1.232433646523534767
"""
@spec rand(non_neg_integer) :: number
def rand(df) do
x = Math.rand() * 100
if pdf(df).(x) > Math.rand() do
x
else
# keep trying
rand(df)
end
end
end
|
lib/statistics/distributions/chisq.ex
| 0.889873
| 0.609146
|
chisq.ex
|
starcoder
|
defmodule Md5 do
@moduledoc """
Provides methods for calculating a MD5 hash. This module implements the MD5 hashing algorithm in pure Elixir.
"""
use Bitwise
# Pre-determined constants to shift bits - aproximated to give biggest avalanche
@shift_constants {
7, 12, 17, 22, 7, 12, 17, 22, 7, 12, 17, 22, 7, 12, 17, 22,
5, 9, 14, 20, 5, 9, 14, 20, 5, 9, 14, 20, 5, 9, 14, 20,
4, 11, 16, 23, 4, 11, 16, 23, 4, 11, 16, 23, 4, 11, 16, 23,
6, 10, 15, 21, 6, 10, 15, 21, 6, 10, 15, 21, 6, 10, 15, 21,
}
# Pre-defined initial values of message digest buffer
@buffer_preset {0x67452301, 0xEFCDAB89, 0x98BADCFE, 0x10325476}
@doc """
Used to calculate the constants of the 64-element table defined in the specification.
"""
def calc_constant(i) do
overflow(trunc(:math.pow(2, 32) * abs(:math.sin(i + 1))))
end
@doc """
Returns hex representation of a MD5 digest of a message of arbitrary size.
"""
def digest(message) do
x = hash(message)
Base.encode16(<<x::little-unsigned-size(128)>>, case: :lower)
end
@doc """
Returns hex representation of a MD5 digest of a file.
"""
def digest_file(filepath) do
digest(File.read!(filepath))
end
@doc """
Returns MD5 hash of a message of arbitrary size.
"""
def hash(message) do
{a, b, c, d} = @buffer_preset
padded_message = pad(message)
process_message(padded_message, a, b, c, d)
end
# Produced padded message according to MD5 specification
def pad(message) do
msg_length = bit_size(message)
num_of_zeros = 512 - rem(msg_length + 65, 512)
# Message + 1 + (..000..)? + size of message
<<message::binary, 1::little-size(1), 0::little-size(num_of_zeros), msg_length::little-size(64)>>
end
@doc """
Returns the concatenated message digest when no more message left to process.
"""
def process_message(message, a, b, c, d) when bit_size(message) == 0 do
# Bit shift to put 128 bits together
a + (b <<< 32) + (c <<< 64) + (d <<< 96)
end
@doc """
Computes the state change for the message digest using first 512 bits of a message.
"""
def process_message(message, a, b, c, d) do
# Use pattern matching to grab first 512 bits of message
<<chunk::bitstring-size(512), remaining::bitstring>> = message
# Calculate new values for hash segments
{a_new, b_new, c_new, d_new} = step(0, chunk, a, b, c, d)
# Add new segments to old segments and process rest of message
process_message(
remaining,
overflow(a_new + a),
overflow(b_new + b),
overflow(c_new + c),
overflow(d_new + d)
)
end
@doc """
Returns the message digest after 64 steps.
"""
def step(i, _, a, b, c, d) when i >= 64, do: {a, b, c, d}
@doc """
Calculates part of the state change for the message digest.
"""
def step(i, m, a, b, c, d) when i >= 0 do
# Get constants and perform bitwise operations
t = calc_constant(i)
{f, g} = index_func(i, b, c, d)
# Get 32 bit part of message for rotation
start_pos = g * 32
<<_::size(start_pos), chunk::little-size(32), _::bitstring>> = m
# Where A influences the algorithm and rotation
to_rotate = a + f + chunk + t
b_new = overflow(b + leftrotate(to_rotate, elem(@shift_constants, i)))
# Next step A -> D, B -> New B, C -> B, D -> C
step(i + 1, m, d, b_new, b, c)
end
@doc """
Round 1
"""
def index_func(i, x, y, z) when i < 16 do
{(x &&& y) ||| (~~~x &&& z), i}
end
@doc """
Round 2
"""
def index_func(i, x, y, z) when i < 32 do
{(x &&& z) ||| (y &&& ~~~z), rem(5 * i + 1, 16)}
end
@doc """
Round 3
"""
def index_func(i, x, y, z) when i < 48 do
{x ^^^ y ^^^ z, rem(3 * i + 5, 16)}
end
@doc """
Round 1
"""
def index_func(i, x, y, z) when i < 64 do
{y ^^^ (x ||| ~~~z), rem(7 * i, 16)}
end
@doc """
Performs a bitwise left rotation.
"""
def leftrotate(b, shift) do
b_ = overflow(b)
overflow(b_ <<< shift ||| b_ >>> (32 - shift))
end
@doc """
Emulates at 32 bit overflow on a value.
"""
def overflow(value) do
value &&& 0xFFFFFFFF
end
end
|
lib/md5.ex
| 0.810479
| 0.466663
|
md5.ex
|
starcoder
|
defmodule Explorer.PolarsBackend.Series do
@moduledoc false
import Kernel, except: [length: 1]
import Explorer.Shared, only: [check_types!: 1, cast_numerics: 2]
alias Explorer.DataFrame
alias Explorer.PolarsBackend.Native
alias Explorer.PolarsBackend.Shared
alias Explorer.Series
alias __MODULE__, as: PolarsSeries
@type t :: %__MODULE__{resource: binary(), reference: term()}
defstruct resource: nil, reference: nil
@behaviour Explorer.Backend.Series
# Conversion
@impl true
def from_list(data, type, name \\ "") when is_list(data) do
series =
case type do
:integer -> Native.s_new_i64(name, data)
:float -> Native.s_new_f64(name, data)
:boolean -> Native.s_new_bool(name, data)
:string -> Native.s_new_str(name, data)
:date -> Native.s_new_date32(name, data)
:datetime -> Native.s_new_date64(name, data)
end
%Series{data: series, dtype: type}
end
@impl true
def to_list(%Series{data: series}) do
case Native.s_to_list(series) do
{:ok, list} -> list
{:error, e} -> raise "#{e}"
end
end
@impl true
def cast(series, dtype), do: Shared.apply_native(series, :s_cast, [Atom.to_string(dtype)])
# Introspection
@impl true
def dtype(series), do: series |> Shared.apply_native(:s_dtype) |> Shared.normalise_dtype()
@impl true
def length(series), do: Shared.apply_native(series, :s_len)
# Slice and dice
@impl true
def head(series, n_elements), do: Shared.apply_native(series, :s_head, [n_elements])
@impl true
def tail(series, n_elements), do: Shared.apply_native(series, :s_tail, [n_elements])
@impl true
def sample(series, n, with_replacement?, seed) when is_integer(n) do
indices =
series
|> length()
|> Native.s_seedable_random_indices(n, with_replacement?, seed)
take(series, indices)
end
@impl true
def take_every(series, every_n),
do: Shared.apply_native(series, :s_take_every, [every_n])
@impl true
def filter(series, %Series{} = mask),
do: Shared.apply_native(series, :s_filter, [Shared.to_polars_s(mask)])
def filter(series, callback) when is_function(callback) do
mask = callback.(series)
filter(series, mask)
end
@impl true
def slice(series, offset, length), do: Shared.apply_native(series, :s_slice, [offset, length])
@impl true
def take(series, indices) when is_list(indices),
do: Shared.apply_native(series, :s_take, [indices])
@impl true
def get(series, idx) do
idx = if idx < 0, do: length(series) + idx, else: idx
Shared.apply_native(series, :s_get, [idx])
end
@impl true
def concat(s1, s2), do: Shared.apply_native(s1, :s_append, [Shared.to_polars_s(s2)])
# Aggregation
@impl true
def sum(series), do: Shared.apply_native(series, :s_sum)
@impl true
def min(series), do: Shared.apply_native(series, :s_min)
@impl true
def max(series), do: Shared.apply_native(series, :s_max)
@impl true
def mean(series), do: Shared.apply_native(series, :s_mean)
@impl true
def median(series), do: Shared.apply_native(series, :s_median)
@impl true
def var(series), do: Shared.apply_native(series, :s_var)
@impl true
def std(series), do: Shared.apply_native(series, :s_std)
@impl true
def quantile(series, quantile),
do: Shared.apply_native(series, :s_quantile, [quantile, "nearest"])
# Cumulative
@impl true
def cum_max(series, reverse?), do: Shared.apply_native(series, :s_cum_max, [reverse?])
@impl true
def cum_min(series, reverse?), do: Shared.apply_native(series, :s_cum_min, [reverse?])
@impl true
def cum_sum(series, reverse?), do: Shared.apply_native(series, :s_cum_sum, [reverse?])
# Local minima/maxima
@impl true
def peaks(series, :max), do: Shared.apply_native(series, :s_peak_max)
def peaks(series, :min), do: Shared.apply_native(series, :s_peak_min)
# Arithmetic
@impl true
def add(left, %Series{} = right),
do: Shared.apply_native(left, :s_add, [Shared.to_polars_s(right)])
def add(left, right) when is_number(right), do: add(left, scalar_rhs(right, left))
@impl true
def subtract(left, %Series{} = right),
do: Shared.apply_native(left, :s_sub, [Shared.to_polars_s(right)])
def subtract(left, right) when is_number(right), do: subtract(left, scalar_rhs(right, left))
@impl true
def multiply(left, %Series{} = right),
do: Shared.apply_native(left, :s_mul, [Shared.to_polars_s(right)])
def multiply(left, right) when is_number(right), do: multiply(left, scalar_rhs(right, left))
@impl true
def divide(left, %Series{} = right),
do: Shared.apply_native(left, :s_div, [Shared.to_polars_s(right)])
def divide(left, right) when is_number(right), do: divide(left, scalar_rhs(right, left))
@impl true
def pow(left, exponent) when is_float(exponent),
do: Shared.apply_native(left, :s_pow, [exponent])
def pow(left, exponent) when is_integer(exponent) and exponent >= 0 do
cond do
Series.dtype(left) == :integer -> Shared.apply_native(left, :s_int_pow, [exponent])
Series.dtype(left) == :float -> Shared.apply_native(left, :s_pow, [exponent / 1])
end
end
# Comparisons
@impl true
def eq(left, %Series{} = right),
do: Shared.apply_native(left, :s_eq, [Shared.to_polars_s(right)])
def eq(left, right), do: eq(left, scalar_rhs(right, left))
@impl true
def neq(left, %Series{} = right),
do: Shared.apply_native(left, :s_neq, [Shared.to_polars_s(right)])
def neq(left, right), do: neq(left, scalar_rhs(right, left))
@impl true
def gt(left, %Series{} = right),
do: Shared.apply_native(left, :s_gt, [Shared.to_polars_s(right)])
def gt(left, right), do: gt(left, scalar_rhs(right, left))
@impl true
def gt_eq(left, %Series{} = right),
do: Shared.apply_native(left, :s_gt_eq, [Shared.to_polars_s(right)])
def gt_eq(left, right), do: gt_eq(left, scalar_rhs(right, left))
@impl true
def lt(left, %Series{} = right),
do: Shared.apply_native(left, :s_lt, [Shared.to_polars_s(right)])
def lt(left, right), do: lt(left, scalar_rhs(right, left))
@impl true
def lt_eq(left, %Series{} = right),
do: Shared.apply_native(left, :s_lt_eq, [Shared.to_polars_s(right)])
def lt_eq(left, right), do: lt_eq(left, scalar_rhs(right, left))
@impl true
def all_equal?(left, right),
do: Shared.apply_native(left, :s_series_equal, [Shared.to_polars_s(right), true])
@impl true
def binary_and(left, right), do: Shared.apply_native(left, :s_and, [Shared.to_polars_s(right)])
@impl true
def binary_or(left, right), do: Shared.apply_native(left, :s_or, [Shared.to_polars_s(right)])
# Sort
@impl true
def sort(series, reverse?), do: Shared.apply_native(series, :s_sort, [reverse?])
@impl true
def argsort(series, reverse?), do: Shared.apply_native(series, :s_argsort, [reverse?])
@impl true
def reverse(series), do: Shared.apply_native(series, :s_reverse)
# Distinct
@impl true
def distinct(series), do: Shared.apply_native(series, :s_distinct)
@impl true
def unordered_distinct(series), do: Shared.apply_native(series, :s_unordered_distinct)
@impl true
def n_distinct(series), do: Shared.apply_native(series, :s_n_unique)
@impl true
def count(series),
do:
series
|> Shared.to_polars_s()
|> Native.s_value_counts()
|> Shared.unwrap()
|> Shared.to_dataframe()
|> DataFrame.rename(["values", "counts"])
|> DataFrame.mutate(counts: &Series.cast(&1["counts"], :integer))
# Rolling
@impl true
def rolling_max(series, window_size, opts) do
weights = Keyword.fetch!(opts, :weights)
min_periods = Keyword.fetch!(opts, :min_periods)
center = Keyword.fetch!(opts, :center)
Shared.apply_native(series, :s_rolling_max, [window_size, weights, min_periods, center])
end
@impl true
def rolling_mean(series, window_size, opts) do
weights = Keyword.fetch!(opts, :weights)
min_periods = Keyword.fetch!(opts, :min_periods)
center = Keyword.fetch!(opts, :center)
Shared.apply_native(series, :s_rolling_mean, [window_size, weights, min_periods, center])
end
@impl true
def rolling_min(series, window_size, opts) do
weights = Keyword.fetch!(opts, :weights)
min_periods = Keyword.fetch!(opts, :min_periods)
center = Keyword.fetch!(opts, :center)
Shared.apply_native(series, :s_rolling_min, [window_size, weights, min_periods, center])
end
@impl true
def rolling_sum(series, window_size, opts) do
weights = Keyword.fetch!(opts, :weights)
min_periods = Keyword.fetch!(opts, :min_periods)
center = Keyword.fetch!(opts, :center)
Shared.apply_native(series, :s_rolling_sum, [window_size, weights, min_periods, center])
end
# Missing values
@impl true
def fill_missing(series, strategy) when is_atom(strategy),
do: Shared.apply_native(series, :s_fill_none, [Atom.to_string(strategy)])
def fill_missing(series, strategy) do
cond do
is_float(strategy) -> Shared.apply_native(series, :s_fill_none_with_float, [strategy])
is_integer(strategy) -> Shared.apply_native(series, :s_fill_none_with_int, [strategy])
is_binary(strategy) -> Shared.apply_native(series, :s_fill_none_with_bin, [strategy])
end
end
@impl true
def nil?(series), do: Shared.apply_native(series, :s_is_null)
@impl true
def not_nil?(series), do: Shared.apply_native(series, :s_is_not_null)
# Escape hatch
@impl true
def transform(series, fun) do
list = series |> Series.to_list() |> Enum.map(fun)
type = check_types!(list)
{list, type} = cast_numerics(list, type)
from_list(list, type)
end
# Polars specific functions
def name(series), do: Shared.apply_native(series, :s_name)
def rename(series, name), do: Shared.apply_native(series, :s_rename, [name])
# Helpers
defp scalar_rhs(scalar, lhs),
do:
scalar
|> List.duplicate(PolarsSeries.length(lhs))
|> Series.from_list()
end
defimpl Inspect, for: Explorer.PolarsBackend.Series do
alias Explorer.PolarsBackend.Native
def inspect(s, _opts) do
case Native.s_as_str(s) do
{:ok, str} -> str
{:error, error} -> raise "#{error}"
end
end
end
|
lib/explorer/polars_backend/series.ex
| 0.904318
| 0.511656
|
series.ex
|
starcoder
|
defmodule AWS.Snowball do
@moduledoc """
AWS Snow Family is a petabyte-scale data transport solution that uses secure
devices to transfer large amounts of data between your on-premises data centers
and Amazon Simple Storage Service (Amazon S3).
The Snow commands described here provide access to the same functionality that
is available in the AWS Snow Family Management Console, which enables you to
create and manage jobs for a Snow device. To transfer data locally with a Snow
device, you'll need to use the Snowball Edge client or the Amazon S3 API
Interface for Snowball or AWS OpsHub for Snow Family. For more information, see
the [User Guide](https://docs.aws.amazon.com/AWSImportExport/latest/ug/api-reference.html).
"""
alias AWS.Client
alias AWS.Request
def metadata do
%AWS.ServiceMetadata{
abbreviation: "Amazon Snowball",
api_version: "2016-06-30",
content_type: "application/x-amz-json-1.1",
credential_scope: nil,
endpoint_prefix: "snowball",
global?: false,
protocol: "json",
service_id: "Snowball",
signature_version: "v4",
signing_name: "snowball",
target_prefix: "AWSIESnowballJobManagementService"
}
end
@doc """
Cancels a cluster job.
You can only cancel a cluster job while it's in the `AwaitingQuorum` status.
You'll have at least an hour after creating a cluster job to cancel it.
"""
def cancel_cluster(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CancelCluster", input, options)
end
@doc """
Cancels the specified job.
You can only cancel a job before its `JobState` value changes to
`PreparingAppliance`. Requesting the `ListJobs` or `DescribeJob` action returns
a job's `JobState` as part of the response element data returned.
"""
def cancel_job(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CancelJob", input, options)
end
@doc """
Creates an address for a Snow device to be shipped to.
In most regions, addresses are validated at the time of creation. The address
you provide must be located within the serviceable area of your region. If the
address is invalid or unsupported, then an exception is thrown.
"""
def create_address(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateAddress", input, options)
end
@doc """
Creates an empty cluster.
Each cluster supports five nodes. You use the `CreateJob` action separately to
create the jobs for each of these nodes. The cluster does not ship until these
five node jobs have been created.
"""
def create_cluster(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateCluster", input, options)
end
@doc """
Creates a job to import or export data between Amazon S3 and your on-premises
data center.
Your AWS account must have the right trust policies and permissions in place to
create a job for a Snow device. If you're creating a job for a node in a
cluster, you only need to provide the `clusterId` value; the other job
attributes are inherited from the cluster.
"""
def create_job(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateJob", input, options)
end
@doc """
Creates a shipping label that will be used to return the Snow device to AWS.
"""
def create_return_shipping_label(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateReturnShippingLabel", input, options)
end
@doc """
Takes an `AddressId` and returns specific details about that address in the form
of an `Address` object.
"""
def describe_address(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeAddress", input, options)
end
@doc """
Returns a specified number of `ADDRESS` objects.
Calling this API in one of the US regions will return addresses from the list of
all addresses associated with this account in all US regions.
"""
def describe_addresses(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeAddresses", input, options)
end
@doc """
Returns information about a specific cluster including shipping information,
cluster status, and other important metadata.
"""
def describe_cluster(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeCluster", input, options)
end
@doc """
Returns information about a specific job including shipping information, job
status, and other important metadata.
"""
def describe_job(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeJob", input, options)
end
@doc """
Information on the shipping label of a Snow device that is being returned to
AWS.
"""
def describe_return_shipping_label(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeReturnShippingLabel", input, options)
end
@doc """
Returns a link to an Amazon S3 presigned URL for the manifest file associated
with the specified `JobId` value.
You can access the manifest file for up to 60 minutes after this request has
been made. To access the manifest file after 60 minutes have passed, you'll have
to make another call to the `GetJobManifest` action.
The manifest is an encrypted file that you can download after your job enters
the `WithCustomer` status. The manifest is decrypted by using the `UnlockCode`
code value, when you pass both values to the Snow device through the Snowball
client when the client is started for the first time.
As a best practice, we recommend that you don't save a copy of an `UnlockCode`
value in the same location as the manifest file for that job. Saving these
separately helps prevent unauthorized parties from gaining access to the Snow
device associated with that job.
The credentials of a given job, including its manifest file and unlock code,
expire 90 days after the job is created.
"""
def get_job_manifest(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetJobManifest", input, options)
end
@doc """
Returns the `UnlockCode` code value for the specified job.
A particular `UnlockCode` value can be accessed for up to 90 days after the
associated job has been created.
The `UnlockCode` value is a 29-character code with 25 alphanumeric characters
and 4 hyphens. This code is used to decrypt the manifest file when it is passed
along with the manifest to the Snow device through the Snowball client when the
client is started for the first time.
As a best practice, we recommend that you don't save a copy of the `UnlockCode`
in the same location as the manifest file for that job. Saving these separately
helps prevent unauthorized parties from gaining access to the Snow device
associated with that job.
"""
def get_job_unlock_code(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetJobUnlockCode", input, options)
end
@doc """
Returns information about the Snow Family service limit for your account, and
also the number of Snow devices your account has in use.
The default service limit for the number of Snow devices that you can have at
one time is 1. If you want to increase your service limit, contact AWS Support.
"""
def get_snowball_usage(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetSnowballUsage", input, options)
end
@doc """
Returns an Amazon S3 presigned URL for an update file associated with a
specified `JobId`.
"""
def get_software_updates(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetSoftwareUpdates", input, options)
end
@doc """
Returns an array of `JobListEntry` objects of the specified length.
Each `JobListEntry` object is for a job in the specified cluster and contains a
job's state, a job's ID, and other information.
"""
def list_cluster_jobs(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListClusterJobs", input, options)
end
@doc """
Returns an array of `ClusterListEntry` objects of the specified length.
Each `ClusterListEntry` object contains a cluster's state, a cluster's ID, and
other important status information.
"""
def list_clusters(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListClusters", input, options)
end
@doc """
This action returns a list of the different Amazon EC2 Amazon Machine Images
(AMIs) that are owned by your AWS account that would be supported for use on a
Snow device.
Currently, supported AMIs are based on the CentOS 7 (x86_64) - with Updates HVM,
Ubuntu Server 14.04 LTS (HVM), and Ubuntu 16.04 LTS - Xenial (HVM) images,
available on the AWS Marketplace.
"""
def list_compatible_images(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListCompatibleImages", input, options)
end
@doc """
Returns an array of `JobListEntry` objects of the specified length.
Each `JobListEntry` object contains a job's state, a job's ID, and a value that
indicates whether the job is a job part, in the case of export jobs. Calling
this API action in one of the US regions will return jobs from the list of all
jobs associated with this account in all US regions.
"""
def list_jobs(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListJobs", input, options)
end
@doc """
While a cluster's `ClusterState` value is in the `AwaitingQuorum` state, you can
update some of the information associated with a cluster.
Once the cluster changes to a different job state, usually 60 minutes after the
cluster being created, this action is no longer available.
"""
def update_cluster(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UpdateCluster", input, options)
end
@doc """
While a job's `JobState` value is `New`, you can update some of the information
associated with a job.
Once the job changes to a different job state, usually within 60 minutes of the
job being created, this action is no longer available.
"""
def update_job(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UpdateJob", input, options)
end
@doc """
Updates the state when a the shipment states changes to a different state.
"""
def update_job_shipment_state(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UpdateJobShipmentState", input, options)
end
end
|
lib/aws/generated/snowball.ex
| 0.8628
| 0.583263
|
snowball.ex
|
starcoder
|
defmodule ExMinimatch do
@moduledoc """
ExMinimatch
===========
Globbing paths without walking the tree! Elixir and Erlang provide `wildcard`
functions in the stdlib. But these will walk the directory tree. If you simply
want to test whether a file path matches a glob, ExMinimatch is for you.
Quick examples:
iex> import ExMinimatch
nil
iex> match("**/*{1..2}{a,b}.{png,jpg}", "asdf/pic2a.jpg")
true
iex> match("*.+(bar|foo)", "bar.foo")
true
iex> ["me.jpg", "images/me.png", "images/you.svg"] |> filter("**/*.{png,jpg}")
["me.jpg", "images/me.png"]
Compiled forms below allows us to cache the %ExMinimatcher{} struct when used
against large number of files repeated.
iex> compile("**/*{1..2}{a,b}.{png,jpg}") |> match("asdf/pic2a.jpg")
true
iex> ["me.jpg", "images/me.png", "images/you.svg"] |> filter(compile("**/*.{png,jpg}"))
["me.jpg", "images/me.png"]
ExMinimatch is a port of the [minimatch](https://github.com/isaacs/minimatch)
javascript project. It is a close port but not exactly the same. See
"Comparison to minimatch.js" section below.
## Glob Patterns
Supports these glob features:
- [Brace Expansion](https://github.com/gniquil/ex_brace_expansion)
- Extended glob matching
- "Globstar" ** matching
See:
- man sh
- man bash
- man 3 match
- man 5 gitignore
## Options
`compile`, `match`, and `filter` all have forms that take an options argument (as a map %{}).
The following are the explanations. By default, all fo these are false.
### log
Possible values are `:info`, and `:debug`. If set, will dump information into
repl. `:debug` dumps more.
### nobrace
Do not expand `{a,b}` and `{1..3}` brace sets.
### noglobstar
Disable `**` matching against multiple folder names.
### dot
Allow patterns to match filenames starting with a period, even if the pattern
does not explicitly have a period in that spot.
Note that by default, `a/**/b` will not match `a/.d/b`, unless dot is set, e.g.
`match("a/**/b", "a/.d/b", %{dot: true})`
### noext
Disable "extglob" style patterns like `+(a|b)`.
### nocase
Perform a case-insensitive match.
### match_base
If set, then patterns without slashes will be matched against the basename of
the path if it contains slashes. For example, `a?b` would match the path
`/xyz/123/acb`, but not `/xyz/acb/123`.
### nocomment
Suppress the behavior of treating `#` at the start of a pattern as a comment.
### nonegate
Suppress the behavior of treating leading `!` character as negation.
## Comparison to minimatch.js
`minimatch.js` converts a glob into a list of regular expressions. However, when
it comes to matching, one can choose to use `minimatch.match` or use the complete
regular expression generated by `minimatch.makeRe` to test it against a file
pattern. Unfortunately, the 2 approaches are __inconsistent__. Notably the full regular
expression based approach has a few important pieces missing. Therefore, here
we implement the first approach. For detail, take a look at `ExMinimatcher.Matcher`.
"""
@doc """
Return a compiled %ExMinimatcher{} struct, which can be used in conjunction
with `match/2`, 'match/3', or `filter/2` to match files.
This purpose of this function is to save time by precompiling the glob
pattern.
For possible glob patterns and available options, please refer to moduledoc.
"""
def compile(glob), do: compile(glob, %{})
def compile(glob, options) do
options = %{
dot: false,
nocase: false,
match_base: false,
nonegate: false,
noext: false,
noglobstar: false,
nocomment: false,
nobrace: false,
log: nil
} |> Map.merge(options)
ExMinimatch.Compiler.compile_matcher(glob, options)
end
@doc ~S"""
Returns true when file matches the compiled %ExMinimatcher{} struct.
This is intended to be used with `compile`
## Examples
iex> compile("**/*.{png,jpg}") |> match("me.jpg")
true
iex> compile("**/*.{png,jpg}") |> match("images/me.png")
true
iex> compile("**/*.{png,jpg}") |> match("images/you.svg")
false
iex> ["me.jpg", "images/me.png", "images/you.svg"] |> filter(compile("**/*.{png,jpg}"))
["me.jpg", "images/me.png"]
Return true if the file matches the glob. This is a convenience function that
is literally `glob |> compile(options) |> match(file)`
Use this for one off matching, as the glob is recompiled every time this is
called.
For possible glob patterns and available options, please refer to moduledoc.
## Examples
iex> match("**/*.png", "qwer.png")
true
iex> match("**/*.png", "qwer/qwer.png")
true
"""
def match(%ExMinimatcher{pattern: pattern}, file) when pattern == [] and file == "", do: true
def match(%ExMinimatcher{pattern: pattern}, _file) when pattern == [], do: false
def match(%ExMinimatcher{} = matcher, file), do: ExMinimatch.Matcher.match_file(file, matcher)
def match(glob, file) when is_binary(glob), do: match(glob, file, %{})
def match(glob, file, options) when is_binary(glob), do: glob |> compile(options) |> match(file)
@doc """
Returns a list of files filtered by the compiled %ExMinimatcher{} struct.
Note the collection argument comes first, different from `match`. This is
more suitable for piping collections.
## Examples
iex> ["me.jpg", "images/me.png", "images/you.svg"] |> filter(compile("**/*.{png,jpg}"))
["me.jpg", "images/me.png"]
Return a list of files that match the given pattern. This is a convenience
function
For possible glob patterns and available options, please refer to moduledoc.
## Examples
iex> filter(["qwer.png", "asdf/qwer.png"], "**/*.png")
["qwer.png", "asdf/qwer.png"]
iex> filter(["qwer/pic1a.png", "qwer/asdf/pic2a.png", "asdf/pic2c.jpg"], "**/*{1..2}{a,b}.{png,jpg}")
["qwer/pic1a.png", "qwer/asdf/pic2a.png"]
"""
def filter(files, %ExMinimatcher{} = matcher), do: files |> Enum.filter(&match(matcher, &1))
def filter(files, pattern) when is_binary(pattern), do: filter(files, pattern, %{})
def filter(files, pattern, options) when is_binary(pattern), do: files |> filter(compile(pattern, options))
end
|
vendor/ex_minimatch/ex_minimatch.ex
| 0.838515
| 0.55658
|
ex_minimatch.ex
|
starcoder
|
defmodule Grizzly.Trace.Record do
@moduledoc """
Data structure for a single item in the trace log
"""
alias Grizzly.{Trace, ZWave}
alias Grizzly.ZWave.Command
@type t() :: %__MODULE__{
timestamp: Time.t(),
binary: binary(),
src: Trace.src() | nil,
dest: Trace.src() | nil
}
@type opt() :: {:src, Trace.src()} | {:dest, Trace.dest()} | {:timestamp, Time.t()}
defstruct src: nil, dest: nil, binary: nil, timestamp: nil
@doc """
Make a new `Grizzly.Record.t()` from a binary string
Options:
* `:src` - the src as a string
* `:dest` - the dest as a string
"""
@spec new(binary(), [opt()]) :: t()
def new(binary, opts \\ []) do
timestamp = Keyword.get(opts, :timestamp, Time.utc_now())
src = Keyword.get(opts, :src)
dest = Keyword.get(opts, :dest)
%__MODULE__{
src: src,
dest: dest,
binary: binary,
timestamp: timestamp
}
end
@doc """
Turn a record into the string format
"""
@spec to_string(t()) :: String.t()
def to_string(record) do
%__MODULE__{timestamp: ts, src: src, dest: dest, binary: binary} = record
{:ok, zip_packet} = ZWave.from_binary(binary)
"#{Time.to_string(ts)} #{src_dest_to_string(src)} #{src_dest_to_string(dest)} #{command_info_str(zip_packet)}"
end
defp src_dest_to_string(nil) do
Enum.reduce(1..18, "", fn _, str -> str <> " " end)
end
defp src_dest_to_string(src_or_dest), do: src_or_dest
defp command_info_str(%Command{name: :keep_alive}) do
" keep_alive"
end
defp command_info_str(zip_packet) do
seq_number = Command.param!(zip_packet, :seq_number)
flag = Command.param!(zip_packet, :flag)
case flag do
f when f in [:ack_response, :nack_response, :nack_waiting] ->
command_info_empty_response(seq_number, flag)
_ ->
command_info_with_encapsulated_command(seq_number, zip_packet)
end
end
defp command_info_empty_response(seq_number, flag) do
"#{seq_number_to_str(seq_number)} #{flag}"
end
defp command_info_with_encapsulated_command(seq_number, zip_packet) do
command = Command.param!(zip_packet, :command)
"#{seq_number_to_str(seq_number)} #{command.name} #{inspect(Command.encode_params(command))}"
end
defp seq_number_to_str(seq_number) do
case seq_number do
seq_number when seq_number < 10 ->
"#{seq_number} "
seq_number when seq_number < 100 ->
"#{seq_number} "
seq_number ->
"#{seq_number}"
end
end
end
|
lib/grizzly/trace/record.ex
| 0.852721
| 0.470493
|
record.ex
|
starcoder
|
defmodule ExState.Execution do
@moduledoc """
`ExState.Execution` executes state transitions with a state chart.
"""
alias ExState.Result
alias ExState.Definition.Chart
alias ExState.Definition.State
alias ExState.Definition.Step
alias ExState.Definition.Transition
@type t :: %__MODULE__{
chart: Chart.t(),
state: State.t(),
actions: [atom()],
history: [State.t()],
transitions: [Transition.t()],
callback_mod: module(),
context: map(),
meta: map()
}
defstruct chart: %Chart{},
state: nil,
actions: [],
history: [],
transitions: [],
callback_mod: nil,
context: %{},
meta: %{}
@doc """
Creates a new workflow execution from the initial state.
"""
@spec new(module()) :: t()
def new(workflow) do
new(workflow.definition, workflow, %{})
end
@spec new(module(), map()) :: t()
def new(workflow, context) do
new(workflow.definition, workflow, context)
end
@spec new(Chart.t(), module(), map()) :: t()
def new(chart, callback_mod, context) do
%__MODULE__{chart: chart, callback_mod: callback_mod, context: context}
|> enter_state(chart.initial_state)
end
@doc """
Continues a workflow execution from the specified state.
"""
@spec continue(module(), String.t()) :: t()
def continue(workflow, state_name) do
continue(workflow.definition, workflow, state_name, %{})
end
@spec continue(module(), String.t(), map()) :: t()
def continue(workflow, state_name, context) do
continue(workflow.definition, workflow, state_name, context)
end
@spec continue(Chart.t(), module(), String.t(), map()) :: t()
def continue(chart, callback_mod, state_name, context) when is_bitstring(state_name) do
%__MODULE__{chart: chart, callback_mod: callback_mod, context: context}
|> enter_state(state_name, entry_actions: false)
end
def put_subject(execution, subject) do
case execution.chart.subject do
{name, _queryable} ->
put_context(execution, name, subject)
nil ->
raise "No subject defined in chart"
end
end
def get_subject(execution) do
case execution.chart.subject do
{name, _queryable} ->
Map.get(execution.context, name)
nil ->
nil
end
end
def put_context(execution, context) do
%{execution | context: context}
end
def put_context(execution, key, value) do
%{execution | context: Map.put(execution.context, key, value)}
end
@doc """
Continues a workflow execution with the completed steps.
Use in conjunction with `continue` to resume execution.
"""
@spec with_completed(t(), String.t(), String.t(), any()) :: t()
def with_completed(execution, state_name, step_name, decision \\ nil)
def with_completed(
%__MODULE__{state: %State{name: state_name}} = execution,
state_name,
step_name,
decision
) do
put_state(execution, State.put_completed_step(execution.state, step_name, decision))
end
def with_completed(execution, state_name, step_name, decision) do
case Enum.find(execution.history, fn state -> state.name == state_name end) do
nil ->
put_history(
execution,
State.put_completed_step(get_state(execution, state_name), step_name, decision)
)
state ->
put_history(
execution,
Enum.map(execution.history, fn
%State{name: ^state_name} -> State.put_completed_step(state, step_name, decision)
state -> state
end)
)
end
end
@spec with_meta(t(), any(), any()) :: t()
def with_meta(execution, key, value) do
%__MODULE__{execution | meta: Map.put(execution.meta, key, value)}
end
defp enter_state(execution, name, opts \\ [])
defp enter_state(execution, name, opts) when is_bitstring(name) do
enter_state(execution, get_state(execution, name), opts)
end
defp enter_state(execution, %State{} = state, opts) do
execution
|> put_history()
|> put_state(state)
|> filter_steps()
|> put_actions(opts)
|> enter_initial_state()
|> handle_final()
|> handle_null()
|> handle_no_steps()
end
defp enter_initial_state(%__MODULE__{state: %State{initial_state: nil}} = execution) do
execution
end
defp enter_initial_state(%__MODULE__{state: %State{initial_state: initial_state}} = execution) do
enter_state(execution, get_state(execution, initial_state), transition_actions: false)
end
defp handle_final(%__MODULE__{state: %State{type: :final}} = execution) do
transition_maybe(execution, :__final__)
end
defp handle_final(execution) do
execution
end
defp handle_null(execution) do
transition_maybe(execution, :_)
end
defp handle_no_steps(%__MODULE__{state: %State{type: :atomic, steps: []}} = execution) do
transition_maybe(execution, :__no_steps__)
end
defp handle_no_steps(execution) do
execution
end
defp filter_steps(%__MODULE__{state: state} = execution) do
put_state(execution, State.filter_steps(state, fn step -> use_step?(execution, step) end))
end
defp put_history(%__MODULE__{state: nil} = execution), do: execution
defp put_history(execution) do
put_history(execution, execution.state)
end
defp put_history(execution, %State{} = state) do
put_history(execution, [state | execution.history])
end
defp put_history(execution, history) when is_list(history) do
%__MODULE__{execution | history: history}
end
def get_state(execution, name) do
Chart.state(execution.chart, name)
end
def put_state(execution, state) do
%__MODULE__{execution | state: state}
end
def put_transition(execution, transition) do
%__MODULE__{execution | transitions: [transition | execution.transitions]}
end
@doc """
Completes a step and transitions the execution with `{:completed, step_id}` event.
"""
@spec complete(t(), atom()) :: {:ok, t()} | {:error, String.t(), t()}
def complete(execution, step_id) do
case State.complete_step(execution.state, step_id) do
{:ok, state} ->
case do_transition(put_state(execution, state), {:completed, step_id}) do
{:ok, execution} ->
{:ok, execution}
{:error, :no_transition, _reason, execution} ->
{:ok, execution}
{:error, _kind, reason, execution} ->
{:error, reason, execution}
end
{:error, next_steps, state} ->
{:error, step_error(next_steps), put_state(execution, state)}
end
end
def complete!(execution, step_id) do
complete(execution, step_id) |> Result.get()
end
@doc """
Completes a decision and transitions the execution with `{:decision, step_id, decision}` event.
"""
@spec decision(t(), atom(), atom()) :: {:ok, t()} | {:error, String.t(), t()}
def decision(execution, step_id, decision) do
case State.complete_step(execution.state, step_id, decision) do
{:ok, state} ->
case do_transition(put_state(execution, state), {:decision, step_id, decision}) do
{:ok, execution} ->
{:ok, execution}
{:error, _kind, reason, execution} ->
{:error, reason, execution}
end
{:error, next_steps, state} ->
{:error, step_error(next_steps), put_state(execution, state)}
end
end
def decision!(execution, step_id, decision) do
decision(execution, step_id, decision) |> Result.get()
end
defp step_error([]), do: "no next step"
defp step_error([next_step]), do: "next step is: #{next_step.name}"
defp step_error(next_steps) when is_list(next_steps) do
"next steps are: #{Enum.map(next_steps, fn step -> step.name end) |> Enum.join(", ")}"
end
@doc """
Transitions execution with the event and returns a result tuple.
"""
@spec transition(t(), Transition.event()) :: {:ok, t()} | {:error, String.t(), t()}
def transition(execution, event) do
case do_transition(execution, event) do
{:ok, execution} ->
{:ok, execution}
{:error, _kind, reason, execution} ->
{:error, reason, execution}
end
end
def transition!(execution, event) do
transition(execution, event) |> Result.get()
end
@doc """
Transitions execution with the event and returns updated or unchanged execution.
"""
def transition_maybe(execution, event) do
case do_transition(execution, event) do
{:ok, execution} ->
execution
{:error, _kind, _reason, execution} ->
execution
end
end
@spec do_transition(t(), Transition.event()) :: {:ok, t()} | {:error, atom(), any(), t()}
defp do_transition(%__MODULE__{state: %State{name: current_state}} = execution, event) do
case State.transition(execution.state, event) do
nil ->
case Chart.parent(execution.chart, execution.state) do
nil ->
no_transition(execution, event)
parent ->
case do_transition(put_state(execution, parent), event) do
{:ok, execution} ->
{:ok, execution}
{:error, kind, reason, _} ->
{:error, kind, reason, execution}
end
end
%Transition{target: ^current_state, reset: false} = transition ->
next =
execution
|> add_actions(transition.actions)
{:ok, next}
%Transition{target: target} = transition when is_list(target) ->
Enum.reduce_while(target, no_transition(execution, event), fn target, e ->
case use_target(execution, transition, target) do
{:ok, next} -> {:halt, {:ok, next}}
{:error, _code, _reason, _execution} -> {:cont, e}
end
end)
%Transition{target: target} = transition ->
use_target(execution, transition, target)
end
end
defp no_transition(execution, event) do
{:error, :no_transition,
"no transition from #{execution.state.name} for event #{inspect(event)}", execution}
end
defp use_target(execution, transition, target) do
case get_state(execution, target) do
nil ->
{:error, :no_state, "no state found for transition to #{target}", execution}
state ->
case guard_transition(execution, state) do
:ok ->
next =
execution
|> put_transition(transition)
|> enter_state(state)
{:ok, next}
{:error, reason} ->
{:error, :guard_transition, reason, execution}
end
end
end
defp guard_transition(execution, state) do
if function_exported?(execution.callback_mod, :guard_transition, 3) do
execution.callback_mod.guard_transition(
State.id(execution.state),
State.id(state),
execution.context
)
else
:ok
end
end
def will_transition?(execution, event) do
transition_maybe(execution, event).state != execution.state
end
def complete?(execution), do: State.final?(execution.state)
@doc """
Returns serializable data representing the execution.
"""
def dump(execution) do
%{
name: execution.chart.name,
state: execution.state.name,
complete?: complete?(execution),
steps: dump_steps(execution),
participants: dump_participants(execution),
context: execution.context
}
end
defp dump_participants(execution) do
Enum.map(execution.chart.participants, fn name ->
dump_participant(name)
end)
end
defp dump_participant(nil), do: nil
defp dump_participant(name), do: Atom.to_string(name)
defp dump_steps(execution) do
execution
|> merge_states()
|> Enum.flat_map(fn state ->
state.steps
|> Enum.filter(fn step -> use_step?(execution, step) end)
|> Enum.map(fn step ->
%{
state: state.name,
order: step.order,
name: step.name,
complete?: step.complete?,
decision: step.decision,
participant: dump_participant(step.participant)
}
end)
end)
end
defp merge_states(execution) do
Enum.map(execution.chart.states, fn {_, state} ->
case execution.state.name == state.name do
true ->
execution.state
false ->
case Enum.find(execution.history, fn s -> s.name == state.name end) do
nil -> state
history_state -> history_state
end
end
end)
end
defp use_step?(execution, step) do
if function_exported?(execution.callback_mod, :use_step?, 2) do
execution.callback_mod.use_step?(Step.id(step), execution.context)
else
true
end
end
defp put_actions(execution, opts) do
execution =
if Keyword.get(opts, :exit_actions, true) do
put_exit_actions(execution)
else
execution
end
execution =
if Keyword.get(opts, :transition_actions, true) do
put_transition_actions(execution)
else
execution
end
execution =
if Keyword.get(opts, :entry_actions, true) do
put_entry_actions(execution)
else
execution
end
execution
end
@doc """
Executes any queued actions on the execution.
"""
@spec execute_actions(t()) :: {:ok, t(), map()} | {:error, t(), any()}
def execute_actions(execution) do
execution.actions
|> Enum.reverse()
|> Enum.reduce({:ok, execution, %{}}, fn
_next, {:error, _reason} = e ->
e
next, {:ok, execution, acc} ->
case execute_action(execution, next) do
{:ok, execution, result} ->
{:ok, execution, Map.put(acc, next, result)}
{:error, _reason} = e ->
e
end
end)
|> case do
{:ok, execution, results} ->
{:ok, reset_actions(execution), results}
{:error, reason} ->
{:error, execution, reason}
end
end
@doc """
Executes the provided action name through the callback module.
"""
@spec execute_action(t(), atom()) :: {:ok, t(), any()} | {:error, any()}
def execute_action(execution, action) do
if function_exported?(execution.callback_mod, action, 1) do
case apply(execution.callback_mod, action, [execution.context]) do
:ok ->
{:ok, execution, nil}
{:ok, result} ->
{:ok, execution, result}
{:updated, {key, value}} ->
context = Map.put(execution.context, key, value)
{:ok, %__MODULE__{execution | context: context}, context}
{:updated, context} ->
{:ok, %__MODULE__{execution | context: context}, context}
e ->
e
end
else
{:error, "no function defined for action #{action}"}
end
end
def execute_actions!(execution) do
{:ok, execution, _} = execute_actions(execution)
execution
end
defp reset_actions(execution) do
%__MODULE__{execution | actions: []}
end
defp add_actions(execution, actions) do
%__MODULE__{execution | actions: actions ++ execution.actions}
end
defp put_exit_actions(%__MODULE__{state: current, history: [last | _rest]} = execution) do
cond do
State.child?(last, current) ->
execution
!State.sibling?(last, current) ->
execution
|> add_actions(State.actions(last, :exit))
|> add_actions(State.actions(Chart.parent(execution.chart, last), :exit))
true ->
add_actions(execution, State.actions(last, :exit))
end
end
defp put_exit_actions(execution), do: execution
defp put_transition_actions(%__MODULE__{transitions: [last | _rest]} = execution) do
add_actions(execution, last.actions)
end
defp put_transition_actions(execution), do: execution
defp put_entry_actions(%__MODULE__{state: current} = execution) do
add_actions(execution, State.actions(current, :entry))
end
end
|
lib/ex_state/execution.ex
| 0.877935
| 0.40869
|
execution.ex
|
starcoder
|
defmodule AWS.KMS do
@moduledoc """
AWS Key Management Service
AWS Key Management Service (AWS KMS) is an encryption and key management
web service. This guide describes the AWS KMS operations that you can call
programmatically. For general information about AWS KMS, see the [AWS Key
Management Service Developer
Guide](http://docs.aws.amazon.com/kms/latest/developerguide/).
<note> AWS provides SDKs that consist of libraries and sample code for
various programming languages and platforms (Java, Ruby, .Net, iOS,
Android, etc.). The SDKs provide a convenient way to create programmatic
access to AWS KMS and other AWS services. For example, the SDKs take care
of tasks such as signing requests (see below), managing errors, and
retrying requests automatically. For more information about the AWS SDKs,
including how to download and install them, see [Tools for Amazon Web
Services](http://aws.amazon.com/tools/).
</note> We recommend that you use the AWS SDKs to make programmatic API
calls to AWS KMS.
Clients must support TLS (Transport Layer Security) 1.0. We recommend TLS
1.2. Clients must also support cipher suites with Perfect Forward Secrecy
(PFS) such as Ephemeral Diffie-Hellman (DHE) or Elliptic Curve Ephemeral
Diffie-Hellman (ECDHE). Most modern systems such as Java 7 and later
support these modes.
**Signing Requests**
Requests must be signed by using an access key ID and a secret access key.
We strongly recommend that you *do not* use your AWS account (root) access
key ID and secret key for everyday work with AWS KMS. Instead, use the
access key ID and secret access key for an IAM user, or you can use the AWS
Security Token Service to generate temporary security credentials that you
can use to sign requests.
All AWS KMS operations require [Signature Version
4](http://docs.aws.amazon.com/general/latest/gr/signature-version-4.html).
**Logging API Requests**
AWS KMS supports AWS CloudTrail, a service that logs AWS API calls and
related events for your AWS account and delivers them to an Amazon S3
bucket that you specify. By using the information collected by CloudTrail,
you can determine what requests were made to AWS KMS, who made the request,
when it was made, and so on. To learn more about CloudTrail, including how
to turn it on and find your log files, see the [AWS CloudTrail User
Guide](http://docs.aws.amazon.com/awscloudtrail/latest/userguide/).
**Additional Resources**
For more information about credentials and request signing, see the
following:
<ul> <li> [AWS Security
Credentials](http://docs.aws.amazon.com/general/latest/gr/aws-security-credentials.html)
- This topic provides general information about the types of credentials
used for accessing AWS.
</li> <li> [Temporary Security
Credentials](http://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_temp.html)
- This section of the *IAM User Guide* describes how to create and use
temporary security credentials.
</li> <li> [Signature Version 4 Signing
Process](http://docs.aws.amazon.com/general/latest/gr/signature-version-4.html)
- This set of topics walks you through the process of signing a request
using an access key ID and a secret access key.
</li> </ul> **Commonly Used APIs**
Of the APIs discussed in this guide, the following will prove the most
useful for most applications. You will likely perform actions other than
these, such as creating keys and assigning policies, by using the console.
<ul> <li> `Encrypt`
</li> <li> `Decrypt`
</li> <li> `GenerateDataKey`
</li> <li> `GenerateDataKeyWithoutPlaintext`
</li> </ul>
"""
@doc """
Cancels the deletion of a customer master key (CMK). When this operation is
successful, the CMK is set to the `Disabled` state. To enable a CMK, use
`EnableKey`. You cannot perform this operation on a CMK in a different AWS
account.
For more information about scheduling and canceling deletion of a CMK, see
[Deleting Customer Master
Keys](http://docs.aws.amazon.com/kms/latest/developerguide/deleting-keys.html)
in the *AWS Key Management Service Developer Guide*.
"""
def cancel_key_deletion(client, input, options \\ []) do
request(client, "CancelKeyDeletion", input, options)
end
@doc """
Creates a display name for a customer master key (CMK). You can use an
alias to identify a CMK in selected operations, such as `Encrypt` and
`GenerateDataKey`.
Each CMK can have multiple aliases, but each alias points to only one CMK.
The alias name must be unique in the AWS account and region. To simplify
code that runs in multiple regions, use the same alias name, but point it
to a different CMK in each region.
Because an alias is not a property of a CMK, you can delete and change the
aliases of a CMK without affecting the CMK. Also, aliases do not appear in
the response from the `DescribeKey` operation. To get the aliases of all
CMKs, use the `ListAliases` operation.
An alias must start with the word `alias` followed by a forward slash
(`alias/`). The alias name can contain only alphanumeric characters,
forward slashes (/), underscores (_), and dashes (-). Alias names cannot
begin with `aws`; that alias name prefix is reserved by Amazon Web Services
(AWS).
The alias and the CMK it is mapped to must be in the same AWS account and
the same region. You cannot perform this operation on an alias in a
different AWS account.
To map an existing alias to a different CMK, call `UpdateAlias`.
"""
def create_alias(client, input, options \\ []) do
request(client, "CreateAlias", input, options)
end
@doc """
Adds a grant to a customer master key (CMK). The grant specifies who can
use the CMK and under what conditions. When setting permissions, grants are
an alternative to key policies.
To perform this operation on a CMK in a different AWS account, specify the
key ARN in the value of the KeyId parameter. For more information about
grants, see
[Grants](http://docs.aws.amazon.com/kms/latest/developerguide/grants.html)
in the *AWS Key Management Service Developer Guide*.
"""
def create_grant(client, input, options \\ []) do
request(client, "CreateGrant", input, options)
end
@doc """
Creates a customer master key (CMK) in the caller's AWS account.
You can use a CMK to encrypt small amounts of data (4 KiB or less)
directly, but CMKs are more commonly used to encrypt data encryption keys
(DEKs), which are used to encrypt raw data. For more information about DEKs
and the difference between CMKs and DEKs, see the following:
<ul> <li> The `GenerateDataKey` operation
</li> <li> [AWS Key Management Service
Concepts](http://docs.aws.amazon.com/kms/latest/developerguide/concepts.html)
in the *AWS Key Management Service Developer Guide*
</li> </ul> You cannot use this operation to create a CMK in a different
AWS account.
"""
def create_key(client, input, options \\ []) do
request(client, "CreateKey", input, options)
end
@doc """
Decrypts ciphertext. Ciphertext is plaintext that has been previously
encrypted by using any of the following operations:
<ul> <li> `GenerateDataKey`
</li> <li> `GenerateDataKeyWithoutPlaintext`
</li> <li> `Encrypt`
</li> </ul> Note that if a caller has been granted access permissions to
all keys (through, for example, IAM user policies that grant `Decrypt`
permission on all resources), then ciphertext encrypted by using keys in
other accounts where the key grants access to the caller can be decrypted.
To remedy this, we recommend that you do not grant `Decrypt` access in an
IAM user policy. Instead grant `Decrypt` access only in key policies. If
you must grant `Decrypt` access in an IAM user policy, you should scope the
resource to specific keys or to specific trusted accounts.
"""
def decrypt(client, input, options \\ []) do
request(client, "Decrypt", input, options)
end
@doc """
Deletes the specified alias. You cannot perform this operation on an alias
in a different AWS account.
Because an alias is not a property of a CMK, you can delete and change the
aliases of a CMK without affecting the CMK. Also, aliases do not appear in
the response from the `DescribeKey` operation. To get the aliases of all
CMKs, use the `ListAliases` operation.
Each CMK can have multiple aliases. To change the alias of a CMK, use
`DeleteAlias` to delete the current alias and `CreateAlias` to create a new
alias. To associate an existing alias with a different customer master key
(CMK), call `UpdateAlias`.
"""
def delete_alias(client, input, options \\ []) do
request(client, "DeleteAlias", input, options)
end
@doc """
Deletes key material that you previously imported. This operation makes the
specified customer master key (CMK) unusable. For more information about
importing key material into AWS KMS, see [Importing Key
Material](http://docs.aws.amazon.com/kms/latest/developerguide/importing-keys.html)
in the *AWS Key Management Service Developer Guide*. You cannot perform
this operation on a CMK in a different AWS account.
When the specified CMK is in the `PendingDeletion` state, this operation
does not change the CMK's state. Otherwise, it changes the CMK's state to
`PendingImport`.
After you delete key material, you can use `ImportKeyMaterial` to reimport
the same key material into the CMK.
"""
def delete_imported_key_material(client, input, options \\ []) do
request(client, "DeleteImportedKeyMaterial", input, options)
end
@doc """
Provides detailed information about the specified customer master key
(CMK).
To perform this operation on a CMK in a different AWS account, specify the
key ARN or alias ARN in the value of the KeyId parameter.
"""
def describe_key(client, input, options \\ []) do
request(client, "DescribeKey", input, options)
end
@doc """
Sets the state of a customer master key (CMK) to disabled, thereby
preventing its use for cryptographic operations. You cannot perform this
operation on a CMK in a different AWS account.
For more information about how key state affects the use of a CMK, see [How
Key State Affects the Use of a Customer Master
Key](http://docs.aws.amazon.com/kms/latest/developerguide/key-state.html)
in the *AWS Key Management Service Developer Guide*.
"""
def disable_key(client, input, options \\ []) do
request(client, "DisableKey", input, options)
end
@doc """
Disables automatic rotation of the key material for the specified customer
master key (CMK). You cannot perform this operation on a CMK in a different
AWS account.
"""
def disable_key_rotation(client, input, options \\ []) do
request(client, "DisableKeyRotation", input, options)
end
@doc """
Sets the state of a customer master key (CMK) to enabled, thereby
permitting its use for cryptographic operations. You cannot perform this
operation on a CMK in a different AWS account.
"""
def enable_key(client, input, options \\ []) do
request(client, "EnableKey", input, options)
end
@doc """
Enables automatic rotation of the key material for the specified customer
master key (CMK). You cannot perform this operation on a CMK in a different
AWS account.
"""
def enable_key_rotation(client, input, options \\ []) do
request(client, "EnableKeyRotation", input, options)
end
@doc """
Encrypts plaintext into ciphertext by using a customer master key (CMK).
The `Encrypt` operation has two primary use cases:
<ul> <li> You can encrypt up to 4 kilobytes (4096 bytes) of arbitrary data
such as an RSA key, a database password, or other sensitive information.
</li> <li> To move encrypted data from one AWS region to another, you can
use this operation to encrypt in the new region the plaintext data key that
was used to encrypt the data in the original region. This provides you with
an encrypted copy of the data key that can be decrypted in the new region
and used there to decrypt the encrypted data.
</li> </ul> To perform this operation on a CMK in a different AWS account,
specify the key ARN or alias ARN in the value of the KeyId parameter.
Unless you are moving encrypted data from one region to another, you don't
use this operation to encrypt a generated data key within a region. To get
data keys that are already encrypted, call the `GenerateDataKey` or
`GenerateDataKeyWithoutPlaintext` operation. Data keys don't need to be
encrypted again by calling `Encrypt`.
To encrypt data locally in your application, use the `GenerateDataKey`
operation to return a plaintext data encryption key and a copy of the key
encrypted under the CMK of your choosing.
"""
def encrypt(client, input, options \\ []) do
request(client, "Encrypt", input, options)
end
@doc """
Returns a data encryption key that you can use in your application to
encrypt data locally.
You must specify the customer master key (CMK) under which to generate the
data key. You must also specify the length of the data key using either the
`KeySpec` or `NumberOfBytes` field. You must specify one field or the
other, but not both. For common key lengths (128-bit and 256-bit symmetric
keys), we recommend that you use `KeySpec`. To perform this operation on a
CMK in a different AWS account, specify the key ARN or alias ARN in the
value of the KeyId parameter.
This operation returns a plaintext copy of the data key in the `Plaintext`
field of the response, and an encrypted copy of the data key in the
`CiphertextBlob` field. The data key is encrypted under the CMK specified
in the `KeyId` field of the request.
We recommend that you use the following pattern to encrypt data locally in
your application:
<ol> <li> Use this operation (`GenerateDataKey`) to get a data encryption
key.
</li> <li> Use the plaintext data encryption key (returned in the
`Plaintext` field of the response) to encrypt data locally, then erase the
plaintext data key from memory.
</li> <li> Store the encrypted data key (returned in the `CiphertextBlob`
field of the response) alongside the locally encrypted data.
</li> </ol> To decrypt data locally:
<ol> <li> Use the `Decrypt` operation to decrypt the encrypted data key
into a plaintext copy of the data key.
</li> <li> Use the plaintext data key to decrypt data locally, then erase
the plaintext data key from memory.
</li> </ol> To return only an encrypted copy of the data key, use
`GenerateDataKeyWithoutPlaintext`. To return a random byte string that is
cryptographically secure, use `GenerateRandom`.
If you use the optional `EncryptionContext` field, you must store at least
enough information to be able to reconstruct the full encryption context
when you later send the ciphertext to the `Decrypt` operation. It is a good
practice to choose an encryption context that you can reconstruct on the
fly to better secure the ciphertext. For more information, see [Encryption
Context](http://docs.aws.amazon.com/kms/latest/developerguide/encryption-context.html)
in the *AWS Key Management Service Developer Guide*.
"""
def generate_data_key(client, input, options \\ []) do
request(client, "GenerateDataKey", input, options)
end
@doc """
Returns a data encryption key encrypted under a customer master key (CMK).
This operation is identical to `GenerateDataKey` but returns only the
encrypted copy of the data key.
To perform this operation on a CMK in a different AWS account, specify the
key ARN or alias ARN in the value of the KeyId parameter.
This operation is useful in a system that has multiple components with
different degrees of trust. For example, consider a system that stores
encrypted data in containers. Each container stores the encrypted data and
an encrypted copy of the data key. One component of the system, called the
*control plane*, creates new containers. When it creates a new container,
it uses this operation (`GenerateDataKeyWithoutPlaintext`) to get an
encrypted data key and then stores it in the container. Later, a different
component of the system, called the *data plane*, puts encrypted data into
the containers. To do this, it passes the encrypted data key to the
`Decrypt` operation, then uses the returned plaintext data key to encrypt
data, and finally stores the encrypted data in the container. In this
system, the control plane never sees the plaintext data key.
"""
def generate_data_key_without_plaintext(client, input, options \\ []) do
request(client, "GenerateDataKeyWithoutPlaintext", input, options)
end
@doc """
Returns a random byte string that is cryptographically secure.
For more information about entropy and random number generation, see the
[AWS Key Management Service Cryptographic
Details](https://d0.awsstatic.com/whitepapers/KMS-Cryptographic-Details.pdf)
whitepaper.
"""
def generate_random(client, input, options \\ []) do
request(client, "GenerateRandom", input, options)
end
@doc """
Gets a key policy attached to the specified customer master key (CMK). You
cannot perform this operation on a CMK in a different AWS account.
"""
def get_key_policy(client, input, options \\ []) do
request(client, "GetKeyPolicy", input, options)
end
@doc """
Gets a Boolean value that indicates whether automatic rotation of the key
material is enabled for the specified customer master key (CMK).
To perform this operation on a CMK in a different AWS account, specify the
key ARN in the value of the KeyId parameter.
"""
def get_key_rotation_status(client, input, options \\ []) do
request(client, "GetKeyRotationStatus", input, options)
end
@doc """
Returns the items you need in order to import key material into AWS KMS
from your existing key management infrastructure. For more information
about importing key material into AWS KMS, see [Importing Key
Material](http://docs.aws.amazon.com/kms/latest/developerguide/importing-keys.html)
in the *AWS Key Management Service Developer Guide*.
You must specify the key ID of the customer master key (CMK) into which you
will import key material. This CMK's `Origin` must be `EXTERNAL`. You must
also specify the wrapping algorithm and type of wrapping key (public key)
that you will use to encrypt the key material. You cannot perform this
operation on a CMK in a different AWS account.
This operation returns a public key and an import token. Use the public key
to encrypt the key material. Store the import token to send with a
subsequent `ImportKeyMaterial` request. The public key and import token
from the same response must be used together. These items are valid for 24
hours. When they expire, they cannot be used for a subsequent
`ImportKeyMaterial` request. To get new ones, send another
`GetParametersForImport` request.
"""
def get_parameters_for_import(client, input, options \\ []) do
request(client, "GetParametersForImport", input, options)
end
@doc """
Imports key material into an existing AWS KMS customer master key (CMK)
that was created without key material. You cannot perform this operation on
a CMK in a different AWS account. For more information about creating CMKs
with no key material and then importing key material, see [Importing Key
Material](http://docs.aws.amazon.com/kms/latest/developerguide/importing-keys.html)
in the *AWS Key Management Service Developer Guide*.
Before using this operation, call `GetParametersForImport`. Its response
includes a public key and an import token. Use the public key to encrypt
the key material. Then, submit the import token from the same
`GetParametersForImport` response.
When calling this operation, you must specify the following values:
<ul> <li> The key ID or key ARN of a CMK with no key material. Its `Origin`
must be `EXTERNAL`.
To create a CMK with no key material, call `CreateKey` and set the value of
its `Origin` parameter to `EXTERNAL`. To get the `Origin` of a CMK, call
`DescribeKey`.)
</li> <li> The encrypted key material. To get the public key to encrypt the
key material, call `GetParametersForImport`.
</li> <li> The import token that `GetParametersForImport` returned. This
token and the public key used to encrypt the key material must have come
from the same response.
</li> <li> Whether the key material expires and if so, when. If you set an
expiration date, you can change it only by reimporting the same key
material and specifying a new expiration date. If the key material expires,
AWS KMS deletes the key material and the CMK becomes unusable. To use the
CMK again, you must reimport the same key material.
</li> </ul> When this operation is successful, the CMK's key state changes
from `PendingImport` to `Enabled`, and you can use the CMK. After you
successfully import key material into a CMK, you can reimport the same key
material into that CMK, but you cannot import different key material.
"""
def import_key_material(client, input, options \\ []) do
request(client, "ImportKeyMaterial", input, options)
end
@doc """
Gets a list of all aliases in the caller's AWS account and region. You
cannot list aliases in other accounts. For more information about aliases,
see `CreateAlias`.
The response might include several aliases that do not have a `TargetKeyId`
field because they are not associated with a CMK. These are predefined
aliases that are reserved for CMKs managed by AWS services. If an alias is
not associated with a CMK, the alias does not count against the [alias
limit](http://docs.aws.amazon.com/kms/latest/developerguide/limits.html#aliases-limit)
for your account.
"""
def list_aliases(client, input, options \\ []) do
request(client, "ListAliases", input, options)
end
@doc """
Gets a list of all grants for the specified customer master key (CMK).
To perform this operation on a CMK in a different AWS account, specify the
key ARN in the value of the KeyId parameter.
"""
def list_grants(client, input, options \\ []) do
request(client, "ListGrants", input, options)
end
@doc """
Gets the names of the key policies that are attached to a customer master
key (CMK). This operation is designed to get policy names that you can use
in a `GetKeyPolicy` operation. However, the only valid policy name is
`default`. You cannot perform this operation on a CMK in a different AWS
account.
"""
def list_key_policies(client, input, options \\ []) do
request(client, "ListKeyPolicies", input, options)
end
@doc """
Gets a list of all customer master keys (CMKs) in the caller's AWS account
and region.
"""
def list_keys(client, input, options \\ []) do
request(client, "ListKeys", input, options)
end
@doc """
Returns a list of all tags for the specified customer master key (CMK).
You cannot perform this operation on a CMK in a different AWS account.
"""
def list_resource_tags(client, input, options \\ []) do
request(client, "ListResourceTags", input, options)
end
@doc """
Returns a list of all grants for which the grant's `RetiringPrincipal`
matches the one specified.
A typical use is to list all grants that you are able to retire. To retire
a grant, use `RetireGrant`.
"""
def list_retirable_grants(client, input, options \\ []) do
request(client, "ListRetirableGrants", input, options)
end
@doc """
Attaches a key policy to the specified customer master key (CMK). You
cannot perform this operation on a CMK in a different AWS account.
For more information about key policies, see [Key
Policies](http://docs.aws.amazon.com/kms/latest/developerguide/key-policies.html)
in the *AWS Key Management Service Developer Guide*.
"""
def put_key_policy(client, input, options \\ []) do
request(client, "PutKeyPolicy", input, options)
end
@doc """
Encrypts data on the server side with a new customer master key (CMK)
without exposing the plaintext of the data on the client side. The data is
first decrypted and then reencrypted. You can also use this operation to
change the encryption context of a ciphertext.
You can reencrypt data using CMKs in different AWS accounts.
Unlike other operations, `ReEncrypt` is authorized twice, once as
`ReEncryptFrom` on the source CMK and once as `ReEncryptTo` on the
destination CMK. We recommend that you include the `"kms:ReEncrypt*"`
permission in your [key
policies](http://docs.aws.amazon.com/kms/latest/developerguide/key-policies.html)
to permit reencryption from or to the CMK. This permission is automatically
included in the key policy when you create a CMK through the console, but
you must include it manually when you create a CMK programmatically or when
you set a key policy with the `PutKeyPolicy` operation.
"""
def re_encrypt(client, input, options \\ []) do
request(client, "ReEncrypt", input, options)
end
@doc """
Retires a grant. To clean up, you can retire a grant when you're done using
it. You should revoke a grant when you intend to actively deny operations
that depend on it. The following are permitted to call this API:
<ul> <li> The AWS account (root user) under which the grant was created
</li> <li> The `RetiringPrincipal`, if present in the grant
</li> <li> The `GranteePrincipal`, if `RetireGrant` is an operation
specified in the grant
</li> </ul> You must identify the grant to retire by its grant token or by
a combination of the grant ID and the Amazon Resource Name (ARN) of the
customer master key (CMK). A grant token is a unique variable-length
base64-encoded string. A grant ID is a 64 character unique identifier of a
grant. The `CreateGrant` operation returns both.
"""
def retire_grant(client, input, options \\ []) do
request(client, "RetireGrant", input, options)
end
@doc """
Revokes the specified grant for the specified customer master key (CMK).
You can revoke a grant to actively deny operations that depend on it.
To perform this operation on a CMK in a different AWS account, specify the
key ARN in the value of the KeyId parameter.
"""
def revoke_grant(client, input, options \\ []) do
request(client, "RevokeGrant", input, options)
end
@doc """
Schedules the deletion of a customer master key (CMK). You may provide a
waiting period, specified in days, before deletion occurs. If you do not
provide a waiting period, the default period of 30 days is used. When this
operation is successful, the state of the CMK changes to `PendingDeletion`.
Before the waiting period ends, you can use `CancelKeyDeletion` to cancel
the deletion of the CMK. After the waiting period ends, AWS KMS deletes the
CMK and all AWS KMS data associated with it, including all aliases that
refer to it.
You cannot perform this operation on a CMK in a different AWS account.
<important> Deleting a CMK is a destructive and potentially dangerous
operation. When a CMK is deleted, all data that was encrypted under the CMK
is rendered unrecoverable. To restrict the use of a CMK without deleting
it, use `DisableKey`.
</important> For more information about scheduling a CMK for deletion, see
[Deleting Customer Master
Keys](http://docs.aws.amazon.com/kms/latest/developerguide/deleting-keys.html)
in the *AWS Key Management Service Developer Guide*.
"""
def schedule_key_deletion(client, input, options \\ []) do
request(client, "ScheduleKeyDeletion", input, options)
end
@doc """
Adds or overwrites one or more tags for the specified customer master key
(CMK). You cannot perform this operation on a CMK in a different AWS
account.
Each tag consists of a tag key and a tag value. Tag keys and tag values are
both required, but tag values can be empty (null) strings.
You cannot use the same tag key more than once per CMK. For example,
consider a CMK with one tag whose tag key is `Purpose` and tag value is
`Test`. If you send a `TagResource` request for this CMK with a tag key of
`Purpose` and a tag value of `Prod`, it does not create a second tag.
Instead, the original tag is overwritten with the new tag value.
For information about the rules that apply to tag keys and tag values, see
[User-Defined Tag
Restrictions](http://docs.aws.amazon.com/awsaccountbilling/latest/aboutv2/allocation-tag-restrictions.html)
in the *AWS Billing and Cost Management User Guide*.
"""
def tag_resource(client, input, options \\ []) do
request(client, "TagResource", input, options)
end
@doc """
Removes the specified tag or tags from the specified customer master key
(CMK). You cannot perform this operation on a CMK in a different AWS
account.
To remove a tag, you specify the tag key for each tag to remove. You do not
specify the tag value. To overwrite the tag value for an existing tag, use
`TagResource`.
"""
def untag_resource(client, input, options \\ []) do
request(client, "UntagResource", input, options)
end
@doc """
Associates an existing alias with a different customer master key (CMK).
Each CMK can have multiple aliases, but the aliases must be unique within
the account and region. You cannot perform this operation on an alias in a
different AWS account.
This operation works only on existing aliases. To change the alias of a CMK
to a new value, use `CreateAlias` to create a new alias and `DeleteAlias`
to delete the old alias.
Because an alias is not a property of a CMK, you can create, update, and
delete the aliases of a CMK without affecting the CMK. Also, aliases do not
appear in the response from the `DescribeKey` operation. To get the aliases
of all CMKs in the account, use the `ListAliases` operation.
An alias name can contain only alphanumeric characters, forward slashes
(/), underscores (_), and dashes (-). An alias must start with the word
`alias` followed by a forward slash (`alias/`). The alias name can contain
only alphanumeric characters, forward slashes (/), underscores (_), and
dashes (-). Alias names cannot begin with `aws`; that alias name prefix is
reserved by Amazon Web Services (AWS).
"""
def update_alias(client, input, options \\ []) do
request(client, "UpdateAlias", input, options)
end
@doc """
Updates the description of a customer master key (CMK). To see the
decription of a CMK, use `DescribeKey`.
You cannot perform this operation on a CMK in a different AWS account.
"""
def update_key_description(client, input, options \\ []) do
request(client, "UpdateKeyDescription", input, options)
end
@spec request(map(), binary(), map(), list()) ::
{:ok, Poison.Parser.t | nil, Poison.Response.t} |
{:error, Poison.Parser.t} |
{:error, HTTPoison.Error.t}
defp request(client, action, input, options) do
client = %{client | service: "kms"}
host = get_host("kms", client)
url = get_url(host, client)
headers = [{"Host", host},
{"Content-Type", "application/x-amz-json-1.1"},
{"X-Amz-Target", "TrentService.#{action}"}]
payload = Poison.Encoder.encode(input, [])
headers = AWS.Request.sign_v4(client, "POST", url, headers, payload)
case HTTPoison.post(url, payload, headers, options) do
{:ok, response=%HTTPoison.Response{status_code: 200, body: ""}} ->
{:ok, nil, response}
{:ok, response=%HTTPoison.Response{status_code: 200, body: body}} ->
{:ok, Poison.Parser.parse!(body), response}
{:ok, _response=%HTTPoison.Response{body: body}} ->
error = Poison.Parser.parse!(body)
exception = error["__type"]
message = error["message"]
{:error, {exception, message}}
{:error, %HTTPoison.Error{reason: reason}} ->
{:error, %HTTPoison.Error{reason: reason}}
end
end
defp get_host(endpoint_prefix, client) do
if client.region == "local" do
"localhost"
else
"#{endpoint_prefix}.#{client.region}.#{client.endpoint}"
end
end
defp get_url(host, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}/"
end
end
|
lib/aws/kms.ex
| 0.891617
| 0.463991
|
kms.ex
|
starcoder
|
defmodule Circuits.GPIO do
alias Circuits.GPIO.Nif
@moduledoc """
Control GPIOs from Elixir
If you're coming from Elixir/ALE, check out our [porting guide](PORTING.md).
`Circuits.GPIO` works great with LEDs, buttons, many kinds of sensors, and
simple control of motors. In general, if a device requires high speed
transactions or has hard real-time constraints in its interactions, this is not
the right library. For those devices, see if there's a Linux kernel driver.
"""
@typedoc "A GPIO pin number. See your device's documentation for how these connect to wires"
@type pin_number :: non_neg_integer()
@typedoc "The GPIO direction (input or output)"
@type pin_direction :: :input | :output
@typedoc "GPIO logic value (low = 0 or high = 1)"
@type value :: 0 | 1
@typedoc "Trigger edge for pin change notifications"
@type trigger :: :rising | :falling | :both | :none
@typedoc "Pull mode for platforms that support controllable pullups and pulldowns"
@type pull_mode :: :not_set | :none | :pullup | :pulldown
@typedoc "Options for open/3"
@type open_option :: {:initial_value, value() | :not_set} | {:pull_mode, pull_mode()}
# Public API
@doc """
Open a GPIO for use.
`pin` should be a valid GPIO pin number on the system and `pin_direction`
should be `:input` or `:output`. If opening as an output, then be sure to set
the `:initial_value` option if you need the set to be glitch free.
Options:
* :initial_value - Set to `:not_set`, `0` or `1` if this is an output.
`:not_set` is the default.
* :pull_mode - Set to `:not_set`, `:pullup`, `:pulldown`, or `:none` for an
input pin. `:not_set` is the default.
"""
@spec open(pin_number(), pin_direction(), [open_option()]) ::
{:ok, reference()} | {:error, atom()}
def open(pin_number, pin_direction, options \\ []) do
check_open_options(options)
value = Keyword.get(options, :initial_value, :not_set)
pull_mode = Keyword.get(options, :pull_mode, :not_set)
Nif.open(pin_number, pin_direction, value, pull_mode)
end
defp check_open_options([]), do: :ok
defp check_open_options([{:initial_value, value} | rest]) when value in [:not_set, 0, 1] do
check_open_options(rest)
end
defp check_open_options([{:pull_mode, value} | rest])
when value in [:not_set, :pullup, :pulldown, :none] do
check_open_options(rest)
end
defp check_open_options([bad_option | _]) do
raise ArgumentError.exception("Unsupported option to GPIO.open/3: #{inspect(bad_option)}")
end
@doc """
Release the resources associated with the GPIO.
This is optional. The garbage collector will free GPIO resources that aren't in
use, but this will free them sooner.
"""
@spec close(reference()) :: :ok
def close(gpio) do
Nif.close(gpio)
end
@doc """
Read the current value on a pin.
"""
@spec read(reference()) :: value()
def read(gpio) do
Nif.read(gpio)
end
@doc """
Set the value of a pin. The pin should be configured to an output
for this to work.
"""
@spec write(reference(), value()) :: :ok
def write(gpio, value) do
Nif.write(gpio, value)
end
@doc """
Enable or disable pin value change notifications. The notifications
are sent based on the trigger parameter:
* :none - No notifications are sent
* :rising - Send a notification when the pin changes from 0 to 1
* :falling - Send a notification when the pin changes from 1 to 0
* :both - Send a notification on all changes
Available Options:
* `suppress_glitches` - It is possible that the pin transitions to a value
and back by the time that Circuits GPIO gets to process it. This controls
whether a notification is sent. Set this to `false` to receive notifications.
* `receiver` - Process which should receive the notifications.
Defaults to the calling process (`self()`)
Notifications look like:
```
{:gpio, pin_number, timestamp, value}
```
Where `pin_number` is the pin that changed values, `timestamp` is roughly when
the transition occurred in nanoseconds, and `value` is the new value.
"""
@spec set_interrupts(reference(), trigger(), list()) :: :ok | {:error, atom()}
def set_interrupts(gpio, trigger, opts \\ []) do
suppress_glitches = Keyword.get(opts, :suppress_glitches, true)
receiver =
case Keyword.get(opts, :receiver) do
pid when is_pid(pid) -> pid
name when is_atom(name) -> Process.whereis(name) || self()
_ -> self()
end
Nif.set_interrupts(gpio, trigger, suppress_glitches, receiver)
end
@doc """
Change the direction of the pin.
"""
@spec set_direction(reference(), pin_direction()) :: :ok | {:error, atom()}
def set_direction(gpio, pin_direction) do
Nif.set_direction(gpio, pin_direction)
end
@doc """
Enable or disable internal pull-up or pull-down resistor to GPIO pin
"""
@spec set_pull_mode(reference(), pull_mode()) :: :ok | {:error, atom()}
def set_pull_mode(gpio, pull_mode) do
Nif.set_pull_mode(gpio, pull_mode)
end
@doc """
Get the GPIO pin number
"""
@spec pin(reference) :: pin_number
def pin(gpio) do
Nif.pin(gpio)
end
@doc """
Return info about the low level GPIO interface
This may be helpful when debugging issues.
"""
@spec info() :: map()
defdelegate info(), to: Nif
defmodule :circuits_gpio do
@moduledoc """
Erlang interface to Circuits.GPIO
Example Erlang code: `circuits_gpio:open(5, output)`
"""
defdelegate open(pin_number, pin_direction), to: Circuits.GPIO
defdelegate read(gpio), to: Circuits.GPIO
defdelegate write(gpio, value), to: Circuits.GPIO
defdelegate set_interrupts(gpio, trigger), to: Circuits.GPIO
defdelegate set_interrupts(gpio, trigger, opts), to: Circuits.GPIO
defdelegate set_direction(gpio, pin_direction), to: Circuits.GPIO
defdelegate set_pull_mode(gpio, pull_mode), to: Circuits.GPIO
defdelegate pin(gpio), to: Circuits.GPIO
end
end
|
lib/gpio.ex
| 0.862757
| 0.778313
|
gpio.ex
|
starcoder
|
defmodule Custodian.Github.Tentacat.Labels do
@moduledoc """
Provides an implementation of the `Custodian.Github.Labels` behaviour with
the GitHub API.
This module contains convenience methods for listing, adding, and removing
labels from GitHub pull requests via the GitHub v3 API over HTTPS.
"""
@behaviour Custodian.Github.Labels
alias Custodian.Bots.Bot
alias Custodian.Github
alias Custodian.Github.Tentacat.Client
@doc """
Returns a list of all of the labels on a given pull request. [More info].
## Examples
iex> all({%Bot{}, 1})
["needs-review"]
iex> all({%Bot{}, 1})
[]
[More info]: https://developer.github.com/v3/issues/labels/#list-labels-on-an-issue
"""
@spec all(Github.pull_request()) :: [String.t()]
def all(pull_request)
def all({repo, pr}) do
response =
Tentacat.Issues.Labels.list(
repo.owner,
repo.name,
pr,
client(repo)
)
Enum.reduce(response, [], fn x, acc -> acc ++ [x["name"]] end)
end
@doc """
Adds the provided label(s) to the given pull request. Checks if labels
are present before trying to add. This avoids creating duplicate
events on the pull request timeline. [More info].
## Examples
iex> add({%Bot{}, 1}, "needs-review")
{%Bot{}, 1}
iex> add({%Bot{}, 1}, ["needs-review", "in-progress"])
{%Bot{}, 1}
[More info]: https://developer.github.com/v3/issues/labels/#add-labels-to-an-issue
"""
@spec add(
Github.pull_request(),
[String.t()] | String.t()
) :: Github.pull_request()
def add(pull_request, labels)
def add({repo, pr}, labels) do
new_labels = List.wrap(labels) -- all({repo, pr})
Tentacat.Issues.Labels.add(
repo.owner,
repo.name,
pr,
new_labels,
client(repo)
)
{repo, pr}
end
@doc """
Removes a given label from a given pull request. Checks if label is present
before removing to avoid API errors. [More info].
## Examples
iex> remove({%Bot{}, 1}, "needs-review")
{%Bot{}, 1}
iex> remove({%Bot{}, 1}, "in-progress")
{%Bot{}, 1}
[More info]: https://developer.github.com/v3/issues/labels/#remove-a-label-from-an-issue
"""
@spec remove(Github.pull_request(), String.t()) :: Github.pull_request()
def remove(pull_request, label)
def remove({repo, pr}, label) do
if Enum.member?(all({repo, pr}), label) do
Tentacat.Issues.Labels.remove(
repo.owner,
repo.name,
pr,
label,
client(repo)
)
end
{repo, pr}
end
@spec client(Bot.t()) :: struct
defp client(bot) do
Client.installation(bot.installation_id)
end
end
|
lib/custodian/github/tentacat/labels.ex
| 0.526343
| 0.496033
|
labels.ex
|
starcoder
|
defmodule Dust.Requests do
@moduledoc """
Requests provide an API to make fetch pages also it supports
automatic retries with constant backoff the request should fail.
"""
use Retry
alias Dust.Parsers
alias Dust.Requests.{
State,
Proxy,
Result,
Util
}
@type url() :: String.t()
@type options() :: Keyword.t() | any()
@type result() :: {:ok, Result.t(), State.t()} | {:error, Result.t(), State.t()}
@max_redirects 3
@max_retries 3
@wait_ms 100
@doc """
## Arguments
1. `url` - url to page,
2. `options` - keyword list with options
Supports the following options
1. :proxy - Proxy | string
2. :headers - map | keyword list
3. :max_retries - int,
4. :max_redirects - int,
5. :follow_redirect - boolean
## Usage
```elixir
iex> Dust.Requests.get(<URL>, proxy: "socks5://user:pass@10.10.10.10:1080", max_retries: 8,)
```
"""
@spec get(url(), options()) :: result()
def get(url, options \\ []) do
{max_retries, options} = Keyword.pop(options, :max_retries, @max_retries)
{headers, options} = Keyword.pop(options, :headers, [])
{proxy_config, options} = Keyword.pop(options, :proxy, nil)
proxy =
proxy_config
|> Util.get_proxy()
|> Proxy.get_config()
retry with: @wait_ms |> constant_backoff() |> Stream.take(max_retries) do
fetch(url, headers, proxy, get_options(options))
after
result -> result
else
error -> error
end
end
## Private helpers
defp fetch(url, headers, proxy, options) do
start_ms = System.monotonic_time(:millisecond)
client = State.new(url, headers, proxy, options)
{status, result} =
url
|> Parsers.URI.normalize()
|> HTTPoison.get(headers, options)
|> Result.from_request(Util.duration(start_ms))
{status, result, client}
end
defp get_options(options) do
{max_redirects, options} = Keyword.pop(options, :max_redirects, @max_redirects)
{follow_redirect, options} = Keyword.pop(options, :follow_redirect, true)
base_options = [
max_redirects: max_redirects,
follow_redirect: follow_redirect
]
base_options
|> Keyword.merge(options)
end
end
|
lib/dust/requests.ex
| 0.82734
| 0.428712
|
requests.ex
|
starcoder
|
defmodule SSHKit.SCP.Upload do
@moduledoc false
require Bitwise
alias SSHKit.SCP.Command
alias SSHKit.SSH
defstruct [:source, :target, :state, :handler, options: []]
@doc """
Uploads a local file or directory to a remote host.
## Options
* `:verbose` - let the remote scp process be verbose, default `false`
* `:recursive` - set to `true` for copying directories, default `false`
* `:preserve` - preserve timestamps, default `false`
* `:timeout` - timeout in milliseconds, default `:infinity`
## Example
```
:ok = SSHKit.SCP.Upload.transfer(conn, ".", "/home/code/sshkit", recursive: true)
```
"""
def transfer(connection, source, target, options \\ []) do
source
|> init(target, options)
|> exec(connection)
end
@doc """
Configures the upload of a local file or directory to a remote host.
## Options
* `:verbose` - let the remote scp process be verbose, default `false`
* `:recursive` - set to `true` for copying directories, default `false`
* `:preserve` - preserve timestamps, default `false`
* `:timeout` - timeout in milliseconds, default `:infinity`
## Example
```
iex(1)> SSHKit.SCP.Upload.init(".", "/home/code/sshkit", recursive: true)
%SSHKit.SCP.Upload{
handler: #Function<1.78222439/2 in SSHKit.SCP.Upload.connection_handler/1>,
options: [recursive: true],
source: "/Users/sshkit/code/sshkit.ex",
state: {:next, "/Users/sshkit/code", [["sshkit.ex"]], []},
target: "/home/code/sshkit"
}
```
"""
def init(source, target, options \\ []) do
source = Path.expand(source)
state = {:next, Path.dirname(source), [[Path.basename(source)]], []}
handler = connection_handler(options)
%__MODULE__{source: source, target: target, state: state, handler: handler, options: options}
end
@doc """
Executes an upload of a local file or directory to a remote host.
## Example
```
:ok = SSHKit.SCP.Upload.exec(upload, conn)
```
"""
def exec(upload = %{source: source, options: options}, connection) do
recursive = Keyword.get(options, :recursive, false)
if !recursive && File.dir?(source) do
{:error, "SCP option :recursive not specified, but local file is a directory (#{source})"}
else
start(upload, connection)
end
end
defp start(%{target: target, state: state, handler: handler, options: options}, connection) do
timeout = Keyword.get(options, :timeout, :infinity)
map_cmd = Keyword.get(options, :map_cmd, &(&1))
command = map_cmd.(Command.build(:upload, target, options))
ssh = Keyword.get(options, :ssh, SSH)
ssh.run(connection, command, timeout: timeout, acc: {:cont, state}, fun: handler)
end
@normal 0
@warning 1
@fatal 2
defp connection_handler(options) do
fn message, state ->
case message do
{:data, _, 0, <<@warning, data :: binary>>} -> warning(options, state, data)
{:data, _, 0, <<@fatal, data :: binary>>} -> fatal(options, state, data)
{:data, _, 0, <<@normal>>} ->
handle_data(state, options)
{:data, _, 0, data} ->
handle_error_data(state, options, data)
{:exit_status, _, status} -> exited(options, state, status)
{:eof, _} -> eof(options, state)
{:closed, _} -> closed(options, state)
end
end
end
defp handle_data(state, options) do
case state do
{:next, cwd, stack, errs} -> next(options, cwd, stack, errs)
{:directory, name, stat, cwd, stack, errs} -> directory(options, name, stat, cwd, stack, errs)
{:regular, name, stat, cwd, stack, errs} -> regular(options, name, stat, cwd, stack, errs)
{:write, name, stat, cwd, stack, errs} -> write(options, name, stat, cwd, stack, errs)
end
end
defp handle_error_data(state, options, data) do
case state do
{:warning, state, buffer} -> warning(options, state, buffer <> data)
{:fatal, state, buffer} -> fatal(options, state, buffer <> data)
end
end
defp next(_, _, [[]], errs) do
{:cont, :eof, {:done, nil, errs}}
end
defp next(_, cwd, [[] | dirs], errs) do
{:cont, 'E\n', {:next, Path.dirname(cwd), dirs, errs}}
end
defp next(options, cwd, [[name | rest] | dirs], errs) do
path = Path.join(cwd, name)
stat = File.stat!(path, time: :posix)
stack = case stat.type do
:directory -> [File.ls!(path) | [rest | dirs]]
:regular -> [rest | dirs]
end
if Keyword.get(options, :preserve, false) do
time(options, stat.type, name, stat, cwd, stack, errs)
else
case stat.type do
:directory -> directory(options, name, stat, cwd, stack, errs)
:regular -> regular(options, name, stat, cwd, stack, errs)
end
end
end
defp time(_, type, name, stat, cwd, stack, errs) do
directive = 'T#{stat.mtime} 0 #{stat.atime} 0\n'
{:cont, directive, {type, name, stat, cwd, stack, errs}}
end
defp directory(_, name, stat, cwd, stack, errs) do
directive = 'D#{modefmt(stat.mode)} 0 #{name}\n'
{:cont, directive, {:next, Path.join(cwd, name), stack, errs}}
end
defp regular(_, name, stat, cwd, stack, errs) do
directive = 'C#{modefmt(stat.mode)} #{stat.size} #{name}\n'
{:cont, directive, {:write, name, stat, cwd, stack, errs}}
end
defp write(_, name, _, cwd, stack, errs) do
fs = File.stream!(Path.join(cwd, name), [], 16_384)
{:cont, Stream.concat(fs, [<<0>>]), {:next, cwd, stack, errs}}
end
defp exited(_, {:done, nil, errs}, status) do
{:cont, {:done, status, errs}}
end
defp exited(_, {_, _, _, errs}, status) do
{:halt, {:error, "SCP exited before completing the transfer (#{status}): #{Enum.join(errs, ", ")}"}}
end
defp eof(_, state) do
{:cont, state}
end
defp closed(_, {:done, 0, _}) do
{:cont, :ok}
end
defp closed(_, {:done, status, errs}) do
{:cont, {:error, "SCP exited with non-zero exit code #{status}: #{Enum.join(errs, ", ")}"}}
end
defp closed(_, _) do
{:cont, {:error, "SCP channel closed before completing the transfer"}}
end
defp warning(_, state = {name, cwd, stack, errs}, buffer) do
if String.last(buffer) == "\n" do
{:cont, {name, cwd, stack, errs ++ [String.trim(buffer)]}}
else
{:cont, {:warning, state, buffer}}
end
end
defp fatal(_, state, buffer) do
if String.last(buffer) == "\n" do
{:halt, {:error, String.trim(buffer)}}
else
{:cont, {:fatal, state, buffer}}
end
end
defp modefmt(value) do
value
|> Bitwise.band(0o7777)
|> Integer.to_string(8)
|> String.pad_leading(4, "0")
end
end
|
lib/sshkit/scp/upload.ex
| 0.86342
| 0.737855
|
upload.ex
|
starcoder
|
defmodule Tradehub.Exchange do
@moduledoc """
This module allows developers to interact with the public endpoints mainly focusing on the
exchange information.
"""
import Tradehub.Raising
@doc """
Requests all known tokens on the Tradehub chain.
## Examples
iex> Tradehub.Exchange.tokens
"""
@spec tokens :: {:error, HTTPoison.Error.t()} | {:ok, list(Tradehub.token())}
@spec tokens! :: list(Tradehub.token())
def tokens do
case Tradehub.get("get_tokens") do
{:ok, response} -> {:ok, response.body}
other -> other
end
end
raising(:tokens)
@doc """
Request information about a token
## Examples
iex> Tradehub.Exchange.token("swth")
"""
@spec token(String.t()) :: {:error, HTTPoison.Error.t()} | String.t() | {:ok, Tradehub.token()}
@spec token!(String.t()) :: Tradehub.token() | String.t()
def token(denom) do
case Tradehub.get("get_token", params: %{token: String.downcase(denom)}) do
{:ok, response} -> {:ok, response.body}
other -> other
end
end
raising(:token, denom)
@doc """
Requests all markets or filtered markets
## Examples
iex> Tradehub.Exchange.markets
"""
@typedoc """
Query params for `/get_markets` endpoint
- **market_type** - type of the market, `futures` or `spot`
- **is_active** - if only active markets should be returned
- **is_settled** - if only settled markets should be returned
"""
@type market_options :: %{
market_type: :future | :spot,
is_active: boolean(),
is_settled: boolean()
}
@spec markets(%{}) :: {:error, HTTPoison.Error.t()} | {:ok, list(Tradehub.market())}
@spec markets(market_options()) :: {:error, HTTPoison.Error.t()} | {:ok, list(Tradehub.market())}
@spec markets!(market_options()) :: list(Tradehub.market())
def markets(market_options \\ %{}) do
request = Tradehub.get("get_markets", params: market_options)
case request do
{:ok, response} -> {:ok, response.body}
other -> other
end
end
raising(:markets)
raising(:markets, market_options)
@doc """
Request information about a market
## Examples
iex> Tradehub.Exchange.market("swth_eth1")
"""
@spec market(String.t()) :: {:ok, Tradehub.market()} | String.t() | {:error, HTTPoison.Error.t()}
@spec market!(String.t()) :: Tradehub.market() | String.t()
def market(market) do
case Tradehub.get("get_market", params: %{market: String.downcase(market)}) do
{:ok, response} -> {:ok, response.body}
other -> other
end
end
raising(:market, market)
@doc """
Get the latest orderbook of given market
## Parameters
- **market**: a market ticker used by the chain, e.g `swth_eth1`
- **limit**: number of results per side (asks, bids)
## Examples
iex> Tradehub.Exchange.orderbook("swth_eth1")
"""
@spec orderbook(String.t(), integer) :: {:ok, Tradehub.orderbook()} | {:error, HTTPoison.Error.t()}
@spec orderbook!(String.t(), integer) :: Tradehub.orderbook()
def orderbook(market, limit \\ 50) do
request = Tradehub.get("get_orderbook", params: %{market: String.downcase(market), limit: limit})
case request do
{:ok, response} -> {:ok, response.body}
other -> other
end
end
raising(:orderbook, market)
raising(:orderbook, market, limit)
@doc """
Requests oracle informations of the Tradehub
## Examples
iex> Tradehub.Exchange.oracle_results
"""
@type oracle_id :: String.t()
@type id_to_oracle :: %{oracle_id => Tradehub.oracle()}
@spec oracle_results :: {:ok, id_to_oracle} | {:error, HTTPoison.Error.t()}
@spec oracle_results! :: id_to_oracle
def oracle_results() do
case Tradehub.get("get_oracle_results") do
{:ok, response} -> {:ok, response.body}
other -> other
end
end
raising(:oracle_results)
@doc """
Requests oracle information about a given oracle id
## Examples
iex> Tradehub.Exchange.oracle_result("SIDXBTC")
"""
@spec oracle_result(oracle_id) :: {:ok, Tradehub.oracle()} | {:error, HTTPoison.Error.t()}
@spec oracle_result!(oracle_id) :: Tradehub.oracle()
def oracle_result(oracle_id) do
case Tradehub.get("get_oracle_result", params: %{id: String.upcase(oracle_id)}) do
{:ok, response} -> {:ok, response.body}
other -> other
end
end
raising(:oracle_result, oracle_id)
@doc """
Get insurance fund balances of the chain.
## Examples
iex> Tradehub.Exchange.insurance_balances
"""
@spec insurance_balances :: {:ok, list(Tradehub.amount())} | {:error, HTTPoison.Error.t()}
@spec insurance_balances! :: list(Tradehub.amount())
def insurance_balances do
case Tradehub.get("get_insurance_balance") do
{:ok, response} -> {:ok, response.body}
other -> other
end
end
raising(:insurance_balances)
end
|
lib/tradehub/exchange.ex
| 0.910694
| 0.449513
|
exchange.ex
|
starcoder
|
Parameters, xscale=1000, xname=$Horizontal\ displacement\ along\ x\ axis\ (mm)$, marksnumber=15, title=$One\ Element\ Torus\ Test$, crop=True
# Temperature curve
temperature, yname=$Temperature\ in\ ^{\circ}C$, legendlocate=bottomright, name=$DynELA\ T$, temperature.plot, name=$Abaqus\ T$, Abaqus/Torus_Temperature.plot
# Plastic strain curve
plasticStrain, yname=$Equivalent\ plastic\ strain\ \overline{\varepsilon}^{p}$, legendlocate=bottomright, name=$DynELA\ \overline{\varepsilon}^{p}$, plasticStrain.plot, name=$Abaqus\ \overline{\varepsilon}^{p}$, Abaqus/Torus_PlasticStrain.plot
# von Mises equivalent stress curve
vonMises, yname=$von\ Mises\ stress\ \overline{\sigma}$, legendlocate=bottomright, name=$DynELA\ \overline{\sigma}$, vonMises.plot, name=$Abaqus\ \overline{\sigma}$, Abaqus/Torus_vonMises.plot
# Density curve
density, yname=$Density$, legendlocate=bottomleft, name=$DynELA\ \rho$, density.plot, name=$Abaqus\ \rho$, Abaqus/Torus_density.plot
# TimeStep curve
timeStep, yname=$Time\ increment\ \Delta t$, legendlocate=bottomleft, name=$DynELA\ \Delta t$, dt.plot, name=$Abaqus\ \Delta t$, Abaqus/Torus_timeStep.plot
# Normal Stress XX curve
stress_11, yname=$Normal\ stress\ \sigma_{11}$, legendlocate=bottomright, name=$DynELA\ \sigma_{11}$, Stress.plot[0:1], name=$Abaqus\ \sigma_{11}$, Abaqus/Torus_StressXX.plot
# Normal Stress YY curve
stress_22, yname=$Normal\ stress\ \sigma_{22}$, legendlocate=bottomright, name=$DynELA\ \sigma_{22}$, Stress.plot[0:2], name=$Abaqus\ \sigma_{22}$, Abaqus/Torus_StressYY.plot
# Torus Stress ZZ curve
stress_33, yname=$Torus\ stress\ \sigma_{33}$, legendlocate=bottomright, name=$DynELA\ \sigma_{33}$, Stress.plot[0:3], name=$Abaqus\ \sigma_{33}$, Abaqus/Torus_StressZZ.plot
# Torus Stress XY curve
stress_12, yname=$Torus\ stress\ \sigma_{12}$, legendlocate=bottomright, name=$DynELA\ \sigma_{12}$, Stress.plot[0:4], name=$Abaqus\ \sigma_{12}$, Abaqus/Torus_StressXY.plot
# Torus Displacement X curve
dispX, yname=$Right\ edge\ displacement\ u_x$, legendlocate=bottomright, name=$DynELA\ u_x$, dispX.plot, name=$Abaqus\ u_x$, Abaqus/Torus_dispX.plot
# Plastic strains curves
plasticStrains, yname=$Equivalent\ plastic\ strain\ \overline{\varepsilon}^{p}$, legendlocate=bottomright, name=$DynELA\ \overline{\varepsilon}^{p}_1;\ \overline{\varepsilon}^{p}_4$, plasticStrains.plot[0:1], name=$DynELA\ \overline{\varepsilon}^{p}_2;\ \overline{\varepsilon}^{p}_3$, plasticStrains.plot[0:2], name=$Abaqus\ \overline{\varepsilon}^{p}_1;\ \overline{\varepsilon}^{p}_4$, Abaqus/Torus_PlasticStrains.plot[0:1], name=$Abaqus\ \overline{\varepsilon}^{p}_2;\ \overline{\varepsilon}^{p}_3$, Abaqus/Torus_PlasticStrains.plot[0:2]
|
Samples/Element/Torus/ElQua4NAx/Curves.ex
| 0.672117
| 0.642678
|
Curves.ex
|
starcoder
|
defmodule CNPJ do
@moduledoc """
CNPJ provides you functions to work with CNPJs.
"""
alias CNPJ.ParsingError
defguardp is_positive_integer(number) when is_integer(number) and number > 0
@v1_weights [5, 4, 3, 2, 9, 8, 7, 6, 5, 4, 3, 2]
@v2_weights [6, 5, 4, 3, 2, 9, 8, 7, 6, 5, 4, 3, 2]
defstruct [:digits]
@typedoc """
The CNPJ type. It' composed of fourteen digits(0-9]).
"""
@opaque t :: %CNPJ{
digits:
{pos_integer(), pos_integer(), pos_integer(), pos_integer(), pos_integer(),
pos_integer(), pos_integer(), pos_integer(), pos_integer(), pos_integer(),
pos_integer(), pos_integer(), pos_integer(), pos_integer()}
}
@doc """
Returns a tuple with the eleven digits of the given cnpj.
## Examples
iex> 70947414000108 |> CNPJ.parse!() |> CNPJ.digits()
{7, 0, 9, 4, 7, 4, 1, 4, 0, 0, 0, 1, 0, 8}
"""
@spec digits(t) :: tuple
def digits(%CNPJ{digits: digits}), do: digits
@doc """
Formats a `cnpj` to friendly readable text.
## Examples
iex> 70947414000108 |> CNPJ.parse!() |> CNPJ.format()
"70.947.414/0001-08"
"""
@spec format(CNPJ.t()) :: String.t()
def format(%CNPJ{digits: digits}) do
dig_string(digits, 1) <>
dig_string(digits, 2) <>
"." <>
dig_string(digits, 3) <>
dig_string(digits, 4) <>
dig_string(digits, 5) <>
"." <>
dig_string(digits, 6) <>
dig_string(digits, 7) <>
dig_string(digits, 8) <>
"/" <>
dig_string(digits, 9) <>
dig_string(digits, 10) <>
dig_string(digits, 11) <>
dig_string(digits, 12) <>
"-" <>
dig_string(digits, 13) <>
dig_string(digits, 14)
end
defp dig_string(digits, pos) do
pos = pos - 1
digits |> elem(pos) |> to_string
end
@doc """
Returns a `cnpj` when the given `number` is valid. Otherwise raises
`CNPJ.ParsingError` error.
## Examples
iex> CNPJ.parse!(30794968000106)
%CNPJ{digits: {3, 0, 7, 9, 4, 9, 6, 8, 0, 0, 0, 1, 0, 6}}
iex> CNPJ.parse!("30794968000106")
%CNPJ{digits: {3, 0, 7, 9, 4, 9, 6, 8, 0, 0, 0, 1, 0, 6}}
iex> CNPJ.parse!("70.947.414/0001-08")
%CNPJ{digits: {7, 0, 9, 4, 7, 4, 1, 4, 0, 0, 0, 1, 0, 8}}
iex> CNPJ.parse!(82)
** (CNPJ.ParsingError) invalid_verifier
"""
@spec parse!(pos_integer() | String.t()) :: CNPJ.t() | no_return()
def parse!(number) do
case parse(number) do
{:ok, cnpj} -> cnpj
{:error, error} -> raise error
end
end
@doc """
Returns an `:ok` tuple with a `cnpj` when the given `number` is valid.
Otherwise returns an `:error` tuple with the error reason.
## Examples
iex> CNPJ.parse(30794968000106)
{:ok, %CNPJ{digits: {3, 0, 7, 9, 4, 9, 6, 8, 0, 0, 0, 1, 0, 6}}}
iex> CNPJ.parse("30794968000106")
{:ok, %CNPJ{digits: {3, 0, 7, 9, 4, 9, 6, 8, 0, 0, 0, 1, 0, 6}}}
iex> CNPJ.parse("70.947.414/0001-08")
{:ok, %CNPJ{digits: {7, 0, 9, 4, 7, 4, 1, 4, 0, 0, 0, 1, 0, 8}}}
iex> CNPJ.parse(82)
{:error, %CNPJ.ParsingError{reason: :invalid_verifier}}
"""
@spec parse(pos_integer() | String.t()) ::
{:ok, CNPJ.t()} | {:error, ParsingError.t()}
def parse(
<<first_digits::bytes-size(2), ".", second_digits::bytes-size(3), ".",
third_digits::bytes-size(3), "/", fourth_digits::bytes-size(4), "-",
last_digits::bytes-size(2)>>
) do
parse(first_digits <> second_digits <> third_digits <> fourth_digits <> last_digits)
end
def parse(number) when is_binary(number) do
case Integer.parse(number) do
{integer, ""} -> parse(integer)
_ -> {:error, %ParsingError{reason: :invalid_format}}
end
end
def parse(number) when is_positive_integer(number) do
digits = number |> Integer.digits()
to_add = 14 - length(digits)
if to_add >= 0 do
verify(digits, to_add)
else
{:error, %ParsingError{reason: :too_long}}
end
end
def parse(0), do: {:error, %ParsingError{reason: :all_zero_digits}}
defp verify(digits, to_add) do
safe_digits = add_padding(digits, to_add)
v1 = verifier(safe_digits, @v1_weights)
v2 = verifier(safe_digits, @v2_weights)
[input_v1, input_v2] = Enum.take(safe_digits, -2)
if v1 == input_v1 and v2 == input_v2 do
{:ok, %CNPJ{digits: List.to_tuple(digits)}}
else
{:error, %ParsingError{reason: :invalid_verifier}}
end
end
defp add_padding(digits, 0 = _to_add), do: digits
defp add_padding(digits, to_add), do: add_padding([0 | digits], to_add - 1)
defp verifier(digits, weights) do
acc =
weights
|> Enum.zip(digits)
|> Enum.reduce(0, fn {weight, digit}, acc ->
acc + weight * digit
end)
verififer = 11 - rem(acc, 11)
if verififer >= 10, do: 0, else: verififer
end
@doc """
Returns `true` if given `number` is a valid CNPJ, otherwise `false`.
## Examples
iex> CNPJ.valid?(87)
false
iex> CNPJ.valid?(30794968000106)
true
iex> CNPJ.valid?("87")
false
iex> CNPJ.valid?("30794968000106")
true
iex> CNPJ.valid?("70.947.414/0001-08")
true
"""
@spec valid?(pos_integer | String.t()) :: boolean
def valid?(number) do
case parse(number) do
{:ok, _} -> true
_ -> false
end
end
end
|
lib/cnpj/cnpj.ex
| 0.897504
| 0.669103
|
cnpj.ex
|
starcoder
|
defmodule Kojin.Pod.PodMap do
@moduledoc """
Represents a map keyed by string and some other `Kojin.Pod.PodType`
"""
use TypedStruct
alias Kojin.Pod.{PodMap, PodArray, PodType, PodTypeRef, PodTypes}
@typedoc """
Defines a map keyed by string and value of some other `Kojin.Pod.PodType`
"""
typedstruct enforce: true do
field(:key_doc, String.t() | nil)
field(:value_type, PodType.t() | PodTypeRef.t())
end
def map_of(type, key_doc \\ nil)
@doc """
Returns a `Kojin.Pod.PodMap` with the type of values specified
by `value_type` and documentation (`key_doc`) for the string key
## Examples
Passing a known pod type identified by atom (e.g. :i32, :i64)
creates a map of string to that type.
iex> alias Kojin.Pod.{PodMap, PodTypes}
...> PodMap.map_of(:i32, "Keyed by name of person")
%Kojin.Pod.PodMap{
key_doc: "Keyed by name of person",
value_type: %Kojin.Pod.PodTypeRef{type_id: :i32, type_path: []}
}
Passing a type that *references* another `PodObject`, `PodArray` or
`PodMap` treats that named reference as a `PodTypeRef` to that
type. In this case, the type `:person` is defined elsewhere, likely
with a call to `pod_object(:person, ...)`
iex> alias Kojin.Pod.{PodMap, PodTypes}
...> import Kojin.Pod.PodTypeRef
...> PodMap.map_of(pod_type_ref("root.person"), "Person keyed by name of person")
%Kojin.Pod.PodMap{
key_doc: "Person keyed by name of person",
value_type: %Kojin.Pod.PodTypeRef{type_id: :person, type_path: [:root]}
}
Passing a type that is an atom turns it into a `PodTypeReference`.
iex> alias Kojin.Pod.{PodMap, PodTypes}
...> PodMap.map_of(:person, "Person keyed by name of person")
%Kojin.Pod.PodMap{
key_doc: "Person keyed by name of person",
value_type: %Kojin.Pod.PodTypeRef{type_id: :person, type_path: []}
}
Passing `Kojin.Pod.PodMap` in just returns the pod map.
iex> alias Kojin.Pod.{PodMap, PodTypes}
...> PodMap.map_of(PodMap.map_of(:person, "Person keyed by name of person"))
%Kojin.Pod.PodMap{
key_doc: "Person keyed by name of person",
value_type: %Kojin.Pod.PodTypeRef{type_id: :person, type_path: []}
}
"""
@spec map_of(String.t() | atom(), String.t() | nil) :: PodMap.t()
def map_of(item_type, key_doc) when is_binary(item_type) or is_atom(item_type) do
%PodMap{value_type: PodTypes.pod_type(item_type), key_doc: key_doc}
end
@spec map_of(PodType.t(), any) :: PodMap.t()
def map_of(%PodType{} = item_type, key_doc) do
%PodMap{value_type: item_type, key_doc: key_doc}
end
@spec map_of(PodTypeRef.t(), String.t() | nil) :: PodMap.t()
def map_of(%PodTypeRef{} = pod_type_ref, key_doc) do
%PodMap{value_type: pod_type_ref, key_doc: key_doc}
end
@spec map_of(PodMap.t(), nil) :: PodMap.t()
def map_of(%PodMap{} = pod_map, nil), do: pod_map
@spec map_of(PodArray.t(), nil | String.t()) :: PodMap.t()
def map_of(%PodArray{} = pod_array, key_doc) do
%PodMap{value_type: pod_array, key_doc: key_doc}
end
end
|
lib/kojin/pod/pod_map.ex
| 0.872266
| 0.547525
|
pod_map.ex
|
starcoder
|
defmodule Cepex do
@spec lookup(String.t() | integer(), [keyword()]) ::
{:ok, Cepex.Address.t()}
| {:error, :request_failed}
| {:error, {:invalid_response, Cepex.HTTP.Response.t()}}
| {:error, :invalid_cep}
| {:error, :cep_not_found}
@doc """
Lookups a postal code (CEP) and returns a `Cepex.Address` struct with the address
information.
Available options are `http_client` (defaults to `Cepex.HTTP.Hackney`) and `service`
(defaults to `Cepex.Service.ViaCEP`).
## Examples
iex> Cepex.lookup("80010-180")
{:ok, %Cepex.Address{
address: "Rua Barão do Rio Branco",
cep: "80010180",
city: "Curitiba",
complement: "",
http_response: %Cepex.HTTP.Response{},
neighborhood: "Centro",
state: "PR"
}}
iex> Cepex.lookup(80010180)
{:ok, %Cepex.Address{
address: "Rua Barão do Rio Branco",
cep: "80010180",
city: "Curitiba",
complement: "",
http_response: %Cepex.HTTP.Response{},
neighborhood: "Centro",
state: "PR"
}}
iex> Cepex.lookup("80210130")
{:ok, %Cepex.Address{
address: "Rua Barão do Rio Branco",
cep: "80010180",
city: "Curitiba",
complement: "",
http_response: %Cepex.HTTP.Response{},
neighborhood: "Centro",
state: "PR"
}}
"""
def lookup(cep, opts \\ []) do
opts = Keyword.merge([http_client: get_http_client(), service: get_service()], opts)
with {:parse, {:ok, cep}} <- {:parse, Cepex.CEP.parse(cep)},
{:request, {:ok, address}} <- {:request, opts[:service].lookup(opts[:http_client], cep)} do
{:ok, address}
else
{:parse, {:error, _cause}} -> {:error, :invalid_cep}
{:request, {:error, cause}} -> {:error, cause}
end
end
defp get_http_client, do: Application.get_env(:cepex, :http_client, Cepex.HTTP.Hackney)
defp get_service, do: Application.get_env(:cepex, :service, Cepex.Service.ViaCEP)
end
|
lib/cepex.ex
| 0.796253
| 0.420719
|
cepex.ex
|
starcoder
|
defmodule Edeliver.Relup.Instructions.SuspendChannels do
@moduledoc """
This upgrade instruction suspends the websocket processes
connected to phoenix channels to avoid that new channel
events will be processed during the code upgrade / downgrade
process. It will be appended to the instructions after the "point of no return"
but before any application code is reloaded. It should be
used in conjunction with and after the
`Edeliver.Relup.Instructions.SuspendRanchAcceptors`
instruction which avoids that new websockets processes for
phoenix channels are started.
To make sure that the websocket connections can
be found on the node, use this instruction after the
`Edeliver.Relup.Instructions.CheckRanchConnections`
instruction which will abort the upgrade if ranch
(websocket) connections cannot be found in the supervision
tree. Use the
`Edeliver.Relup.Instructions.ResumeRanchAcceptors`
instruction at the end of your instructions list to
resume the websocket processes and reenable handling
channel messages.
Suspending and resuming websocket processes for
phoenix channels requires a recent phoenix version
which handles sys events for websockets. It also
requires that the builtin phoenix pubsub backend
`Phoenix.PubSub.PG2` is used for the phoenix channels.
"""
use Edeliver.Relup.RunnableInstruction
alias Edeliver.Relup.Instructions.CheckRanchAcceptors
alias Edeliver.Relup.Instructions.CheckRanchConnections
@doc """
Appends this instruction to the instructions after the
"point of no return" but before any instruction which
loads or unloads new code, (re-)starts or stops
any running processes, or (re-)starts or stops any
application or the emulator.
"""
def insert_where, do: &append_after_point_of_no_return/2
@doc """
Returns name of the application. This name is taken as argument
for the `run/1` function and is required to access the acceptor processes
through the supervision tree
"""
def arguments(_instructions = %Instructions{}, _config = %{name: name}) when is_atom(name) do
name
end
def arguments(_instructions = %Instructions{}, _config = %{name: name}) when is_binary(name) do
name |> String.to_atom
end
@doc """
This module depends on the `Edeliver.Relup.Instructions.CheckRanchAcceptors` and
the `Edeliver.Relup.Instructions.CheckRanchConnections` module which must be loaded
before this instruction for upgrades and unload after this instruction for downgrades.
"""
@spec dependencies() :: [Edeliver.Relup.Instructions.CheckRanchAcceptors]
def dependencies do
[Edeliver.Relup.Instructions.CheckRanchAcceptors, Edeliver.Relup.Instructions.CheckRanchConnections]
end
@doc """
Suspends a list of processes. Because suspending a process might take a while depending on the length
of the message queue or duration of current operation processed by the pid, suspending is done
asynchronously for each process by spawing a new process which calls `:sys.suspend/2` and then waiting
for all results before returning from this function. Be careful when using `:infinity` as timeout,
because this function might hang for infinite time if one of the process does not handle sys events.
"""
@spec bulk_suspend(processes::[pid], timeout::pos_integer|:infinity) :: :ok | {:errors, count::pos_integer, [{pid::pid, reason::term}]} | :not_supported
def bulk_suspend(processes, timeout \\ 1000) do
pids_and_monitor_refs = for pid <- processes do
spawned_pid = :proc_lib.spawn(fn ->
:ok = :sys.suspend(pid, timeout)
end)
{pid, spawned_pid, :erlang.monitor(:process, spawned_pid)}
end
result = Enum.reduce(pids_and_monitor_refs, {0, 0, []}, fn({pid, spawned_pid, monitor_ref}, {errors_count, not_supported_count, errors}) ->
receive do
{:DOWN, ^monitor_ref, :process, ^spawned_pid, reason} ->
case reason do
:normal -> {errors_count, not_supported_count, errors}
error = {:noproc, {:sys, :suspend, [^pid, ^timeout]}} -> {errors_count+1, not_supported_count+1, [{pid, error}|errors]}
error = {:timeout, {:sys, :suspend, [^pid, ^timeout]}} -> {errors_count+1, not_supported_count+1, [{pid, error}|errors]}
error -> {errors_count+1, not_supported_count, [{pid, error}|errors]}
end
end
end)
case result do
{_errors_count = 0, _not_supported_count = 0, _errors = []} -> :ok
{not_supported_count, not_supported_count, _errors = [_|_]} when length(processes) == not_supported_count -> :not_supported
{errors_count, _not_supported_count, errors} -> {:errors, errors_count, Enum.reverse(errors)}
end
end
@doc """
Suspends all websocket channels to avoid handling new channel events
during the upgrade. This is possible only in recent phoenix versions
since handling sys events is required for suspending. If an older version
is used, a warning is printed that suspending is not supported.
"""
@spec run(otp_application_name::atom) :: :ok
def run(otp_application_name) do
info "Suspending phoenix websocket channels..."
ranch_listener_sup = CheckRanchAcceptors.ranch_listener_sup(otp_application_name)
assume true = is_pid(ranch_listener_sup), "Failed to suspend phoenix websocket channels. Ranch listener supervisor not found."
ranch_connections_sup = CheckRanchConnections.ranch_connections_sup(ranch_listener_sup)
assume true = is_pid(ranch_connections_sup), "Failed to suspend phoenix websocket channels. Ranch connections supervisor not found."
assume true = is_list(connections = CheckRanchConnections.ranch_connections(ranch_connections_sup)), "Failed to suspend phoenix websocket channels. No connection processes found."
case CheckRanchConnections.websocket_channel_connections(otp_application_name, connections) do
[] -> info "No websocket connections for phoenix channels are running."
websocket_connections = [_|_] ->
websocket_connections_count = Enum.count(websocket_connections)
info "Suspending #{inspect websocket_connections_count} websocket connections..."
case bulk_suspend(websocket_connections) do
:ok -> info "Suspended #{inspect websocket_connections_count} websocket connections."
:not_supported ->
warn "Suspending websocket connections for phoenix channels is not supported."
debug "#{inspect websocket_connections_count} websockets were not suspended."
debug "Please upgrade the 'phoenix' dependeny to a newer version which supports handling sys events for websockets."
debug "Not suspended websockets might crash during the code upgrade."
{:errors, errors_count, _errors} ->
succeeded_count = websocket_connections_count - errors_count
warn "Suspended #{inspect succeeded_count} of #{inspect websocket_connections_count} websocket connections. #{inspect errors_count} failed."
debug "#{inspect errors_count} not suspended websockets might crash during the code upgrade."
end
:not_detected ->
warn "Cannot detect websocket channel connections."
debug "They won't be suspended but treated as normal http request connections."
debug "Detection is possible only if 'Phoenix.PubSub.PG2' is used as pubsub backend."
end
end
end
|
lib/edeliver/relup/instructions/suspend_channels.ex
| 0.739328
| 0.499939
|
suspend_channels.ex
|
starcoder
|
defmodule Memento.Schema do
require Memento.Mnesia
@moduledoc """
Module to interact with the database schema.
For persisting data, Mnesia databases need to be created on disk. This
module provides an interface to create the database on the disk of the
specified nodes. Most of the time that is usually the node that the
application is running on.
```
# Create schema on current node
Memento.Schema.create([ node() ]
# Create schema on many nodes
node_list = [node(), :alice@host_x, :bob@host_y, :eve@host_z]
Memento.Schema.create(node_list)
```
Important thing to note here is that only the nodes where data has to
be persisted to disk have to be included. RAM-only nodes should be
left out. Disk schemas can also be deleted by calling `delete/1` and
you can get information about them by calling `info/0`.
## Example
```elixir
# The nodes where you want to persist
nodes = [ node() ]
# Create the schema
Memento.stop
Memento.Schema.create(nodes)
Memento.start
# Create disc copies of your tables
Memento.Table.create!(TableA, disc_copies: nodes)
Memento.Table.create!(TableB, disc_copies: nodes)
```
"""
# Public API
# ----------
@doc """
Creates a new database on disk on the specified nodes.
Calling `:mnesia.create_schema` for a custom path throws an exception
if that path does not exist. Memento's version avoids this by ensuring
that the directory exists.
Also see `:mnesia.create_schema/1`.
"""
@spec create(list(node)) :: :ok | {:error, any}
def create(nodes) do
if path = Application.get_env(:mnesia, :dir) do
:ok = File.mkdir_p!(path)
end
:create_schema
|> Memento.Mnesia.call_and_catch([nodes])
|> Memento.Mnesia.handle_result()
end
@doc """
Deletes the database previously created by `create/1` on the specified
nodes.
Use this with caution, as it makes persisting data obsolete. Also see
`:mnesia.delete_schema/1`.
"""
@spec delete(list(node)) :: :ok | {:error, any}
def delete(nodes) do
:delete_schema
|> Memento.Mnesia.call_and_catch([nodes])
|> Memento.Mnesia.handle_result()
end
@doc """
Prints schema information about all Tables to the console.
"""
@spec info() :: :ok
def info do
:schema
|> Memento.Mnesia.call_and_catch()
|> Memento.Mnesia.handle_result()
end
@doc """
Prints schema information about the specified Table to the console.
"""
@spec info(Memento.Table.name) :: :ok
def info(table) do
:schema
|> Memento.Mnesia.call_and_catch([table])
|> Memento.Mnesia.handle_result()
end
@doc """
Sets the schema storage mode for the specified node.
Useful when you want to change the schema mode on the fly,
usually when connecting to a new, unsynchronized node on
discovery at runtime.
The mode can only be `:ram_copies` or `:disc_copies`. If the
storage mode is set to `ram_copies`, then no table on that
node can be disc-resident.
This just calls `Memento.Table.set_storage_type/3` underneath
with `:schema` as the table. Also see
`:mnesia.change_table_copy_type/3` for more details.
## Example
```
Memento.Schema.set_storage_type(:node@host, :disc_copies)
```
"""
@spec set_storage_type(node, Memento.Table.storage_type) :: :ok | {:error, any}
def set_storage_type(node, type) do
Memento.Table.set_storage_type(:schema, node, type)
end
end
|
lib/memento/schema.ex
| 0.762733
| 0.878783
|
schema.ex
|
starcoder
|
defmodule Pascal do
@moduledoc """
# Pascal.addrow([0,1,0])
# 0 | 1 | 0
# 0 | 1 | 1 | 0
# 0 | 1 | 2 | 1 | 0
# 0 | 1 | 3 | 3 | 1 | 0
The pattern is simple to grasp.
This is non-tail stac tracked recursive pattern matching.
[0, 1, 0]
=> [0, 1] | [1, 0]
=> [1, 0] = [1+0] | [0]
=> [0, 1] [1] [0]
==========================================
[0, 1, 1, 0]
=> [0, 1] | [1, 1, 0]
=> [1, 1, 0] = [1+1] | [1, 0]
=> [1, 0] = [1+0] | [0]
=> [0, 1] [2] [1] [0]
===========================================
[0, 1, 2, 1, 0]
=> [0, 1] | [1, 2, 1, 0]
=> [1,2,1,0] = [1+2] | [2, 1, 0]
=> [2, 1, 0] = [2+1] | [1, 0]
=> [1, 0] = [1+0] | [0]
=> [0, 1] [3] [3] [1] [0]
==========================================
"""
@doc """
@params
- list: input a start list like [0, 1, 2, 1, 0]
segregates first head = 0
"""
def addrow(list) do
[head | tail] = list
addrow head, tail
end
@doc """
matches head=0 and tail list
prepends and calls recursively
"""
defp addrow(0, tail) do
[head | tail] = tail
[0 | [head] ++ addrow(head, tail)]
end
@doc """
Base case:
if tail is [0]
append and return
"""
defp addrow(head, [0]) do
[head | [0]]
end
@doc """
actual recursive function
"""
defp addrow(head, tail) do
h = head
[head | tail] = tail
[h+head] ++ addrow head, tail
end
# ========================================================
@doc """
Generates Pascal triangle/1 in a list
@params:
- level: Takes a level starting no less than 1
"""
def triangle(level) do
cond do
level >= 1 -> triangle [0, 1, 0], 1, level, [[0, 1, 0]]
level < 1 -> IO.puts "This is not a valid input to generate a Pascal triangle"
end
end
defp triangle(list, start_level, level, acc_list) when start_level < level do
row = addrow(list)
triangle row, start_level+1, level, [row | acc_list]
end
defp triangle(list, start_level, level, acc_list) do
Enum.reverse acc_list
end
end
|
pascal.ex
| 0.503906
| 0.716343
|
pascal.ex
|
starcoder
|
if elem(Code.ensure_compiled(Plug), 0) != :error do
defmodule Msgpax.PlugParser do
@moduledoc """
A `Plug.Parsers` plug for parsing a MessagePack-encoded body.
Look at the [documentation for
`Plug.Parsers`](http://hexdocs.pm/plug/Plug.Parsers.html) for more
information on how to use `Plug.Parsers`.
This parser accepts the `:unpacker` option to configure how unpacking should be done.
Its value can either be a module that implements the `unpack!/1` function
or a module, function, and arguments tuple. Note, the response
body will be prepended to the given list of arguments before applying.
## Examples
defmodule MyPlugPipeline do
use Plug.Builder
plug Plug.Parsers,
parsers: [Msgpax.PlugParser],
pass: ["application/msgpack"]
# Or use the :unpacker option:
plug Plug.Parsers,
parsers: [Msgpax.PlugParser],
pass: ["application/msgpack"],
unpacker: {Msgpax, :unpack!, [[binary: true]]}
# ... rest of the pipeline
end
"""
@behaviour Plug.Parsers
import Plug.Conn
def parse(%Plug.Conn{} = conn, "application", "msgpack", _params, {unpacker, options}) do
case read_body(conn, options) do
{:ok, body, conn} ->
{:ok, unpack_body(body, unpacker), conn}
{:more, _partial_body, conn} ->
{:error, :too_large, conn}
end
end
def parse(%Plug.Conn{} = conn, _type, _subtype, _params, _opts) do
{:next, conn}
end
def init(options) do
{unpacker, options} = Keyword.pop(options, :unpacker, Msgpax)
validate_unpacker!(unpacker)
{unpacker, options}
end
defp unpack_body(body, unpacker) do
case apply_mfa_or_module(body, unpacker) do
data = %_{} -> %{"_msgpack" => data}
data when is_map(data) -> data
data -> %{"_msgpack" => data}
end
rescue
exception ->
raise Plug.Parsers.ParseError, exception: exception
end
defp apply_mfa_or_module(body, {module, function, extra_args}) do
apply(module, function, [body | extra_args])
end
defp apply_mfa_or_module(body, unpacker) do
unpacker.unpack!(body)
end
defp ensure_compiled?(module) do
case Code.ensure_compiled(module) do
{:module, _} -> true
{:error, _} -> false
end
end
defp validate_unpacker!({module, function, extra_args})
when is_atom(module) and is_atom(function) and is_list(extra_args) do
arity = length(extra_args) + 1
unless ensure_compiled?(module) and function_exported?(module, function, arity) do
raise ArgumentError,
"invalid :unpacker option. Undefined function " <>
Exception.format_mfa(module, function, arity)
end
end
defp validate_unpacker!(unpacker) when is_atom(unpacker) do
unless ensure_compiled?(unpacker) do
raise ArgumentError,
"invalid :unpacker option. The module #{inspect(unpacker)} is not " <>
"loaded and could not be found"
end
unless function_exported?(unpacker, :unpack!, 1) do
raise ArgumentError,
"invalid :unpacker option. The module #{inspect(unpacker)} must " <>
"implement unpack!/1"
end
end
defp validate_unpacker!(unpacker) do
raise ArgumentError,
"the :unpacker option expects a module, or a three-element " <>
"tuple in the form of {module, function, extra_args}, got: #{inspect(unpacker)}"
end
end
end
|
lib/msgpax/plug_parser.ex
| 0.80837
| 0.410609
|
plug_parser.ex
|
starcoder
|
defmodule Scidata.IMDBReviews do
@moduledoc """
Module for downloading the [Large Movie Review Dataset](https://ai.stanford.edu/~amaas/data/sentiment/).
"""
@base_url "http://ai.stanford.edu/~amaas/data/sentiment/"
@dataset_file "aclImdb_v1.tar.gz"
alias Scidata.Utils
@type train_sentiment :: :pos | :neg | :unsup
@type test_sentiment :: :pos | :neg
@type opts :: [
transform_inputs: ([binary, ...] -> any),
transform_labels: ([integer, ...] -> any)
]
@doc """
Downloads the IMDB reviews training dataset or fetches it locally.
`example_types` specifies which examples in the dataset should be returned
according to each example's label: `:pos` for positive examples, `:neg` for
negative examples, and `:unsup` for unlabeled examples.
"""
@spec download(example_types: [test_sentiment]) :: %{review: [binary(), ...], sentiment: 1 | 0}
def download(opts \\ []), do: download_dataset(:train, opts)
@doc """
Downloads the IMDB reviews test dataset or fetches it locally.
`example_types` is the same argument in `download/2` but excludes `:unsup`
because all unlabeled examples are in the training set.
"""
@spec download_test(example_types: [test_sentiment]) :: %{
review: [binary(), ...],
sentiment: 1 | 0
}
def download_test(opts \\ []), do: download_dataset(:test, opts)
defp download_dataset(dataset_type, opts) do
example_types = opts[:example_types] || [:pos, :neg]
transform_inputs = opts[:transform_inputs] || (& &1)
transform_labels = opts[:transform_labels] || (& &1)
files = Utils.get!(@base_url <> @dataset_file).body
regex = ~r"#{dataset_type}/(#{Enum.join(example_types, "|")})/"
{inputs, labels} =
for {fname, contents} <- files,
List.to_string(fname) =~ regex,
reduce: {[], []} do
{inputs, labels} ->
{[contents | inputs], [get_label(fname) | labels]}
end
%{review: transform_inputs.(inputs), sentiment: transform_labels.(labels)}
end
defp get_label(fname) do
fname = List.to_string(fname)
cond do
fname =~ "pos" -> 1
fname =~ "neg" -> 0
fname =~ "unsup" -> nil
end
end
end
|
lib/scidata/imdb_reviews.ex
| 0.78316
| 0.776284
|
imdb_reviews.ex
|
starcoder
|
defmodule AWS.TimestreamWrite do
@moduledoc """
Amazon Timestream is a fast, scalable, fully managed time series database
service that makes it easy to store and analyze trillions of time series
data points per day. With Timestream, you can easily store and analyze IoT
sensor data to derive insights from your IoT applications. You can analyze
industrial telemetry to streamline equipment management and maintenance.
You can also store and analyze log data and metrics to improve the
performance and availability of your applications. Timestream is built from
the ground up to effectively ingest, process, and store time series data.
It organizes data to optimize query processing. It automatically scales
based on the volume of data ingested and on the query volume to ensure you
receive optimal performance while inserting and querying data. As your data
grows over time, Timestream’s adaptive query processing engine spans across
storage tiers to provide fast analysis while reducing costs.
"""
@doc """
Creates a new Timestream database. If the KMS key is not specified, the
database will be encrypted with a Timestream managed KMS key located in
your account. Refer to [AWS managed KMS
keys](https://docs.aws.amazon.com/kms/latest/developerguide/concepts.html#aws-managed-cmk)
for more info. Service quotas apply. For more information, see [Access
Management](https://docs.aws.amazon.com/timestream/latest/developerguide/ts-limits.html)
in the Timestream Developer Guide.
"""
def create_database(client, input, options \\ []) do
request(client, "CreateDatabase", input, options)
end
@doc """
The CreateTable operation adds a new table to an existing database in your
account. In an AWS account, table names must be at least unique within each
Region if they are in the same database. You may have identical table names
in the same Region if the tables are in seperate databases. While creating
the table, you must specify the table name, database name, and the
retention properties. Service quotas apply. For more information, see
[Access
Management](https://docs.aws.amazon.com/timestream/latest/developerguide/ts-limits.html)
in the Timestream Developer Guide.
"""
def create_table(client, input, options \\ []) do
request(client, "CreateTable", input, options)
end
@doc """
Deletes a given Timestream database. *This is an irreversible operation.
After a database is deleted, the time series data from its tables cannot be
recovered.*
All tables in the database must be deleted first, or a ValidationException
error will be thrown.
"""
def delete_database(client, input, options \\ []) do
request(client, "DeleteDatabase", input, options)
end
@doc """
Deletes a given Timestream table. This is an irreversible operation. After
a Timestream database table is deleted, the time series data stored in the
table cannot be recovered.
"""
def delete_table(client, input, options \\ []) do
request(client, "DeleteTable", input, options)
end
@doc """
Returns information about the database, including the database name, time
that the database was created, and the total number of tables found within
the database. Service quotas apply. For more information, see [Access
Management](https://docs.aws.amazon.com/timestream/latest/developerguide/ts-limits.html)
in the Timestream Developer Guide.
"""
def describe_database(client, input, options \\ []) do
request(client, "DescribeDatabase", input, options)
end
@doc """
DescribeEndpoints returns a list of available endpoints to make Timestream
API calls against. This API is available through both Write and Query.
Because Timestream’s SDKs are designed to transparently work with the
service’s architecture, including the management and mapping of the service
endpoints, *it is not recommended that you use this API unless*:
<ul> <li> Your application uses a programming language that does not yet
have SDK support
</li> <li> You require better control over the client-side implementation
</li> </ul> For detailed information on how to use DescribeEndpoints, see
[The Endpoint Discovery Pattern and REST
APIs](https://docs.aws.amazon.com/timestream/latest/developerguide/Using-API.endpoint-discovery.html).
"""
def describe_endpoints(client, input, options \\ []) do
request(client, "DescribeEndpoints", input, options)
end
@doc """
Returns information about the table, including the table name, database
name, retention duration of the memory store and the magnetic store.
Service quotas apply. For more information, see [Access
Management](https://docs.aws.amazon.com/timestream/latest/developerguide/ts-limits.html)
in the Timestream Developer Guide.
"""
def describe_table(client, input, options \\ []) do
request(client, "DescribeTable", input, options)
end
@doc """
Returns a list of your Timestream databases. Service quotas apply. For more
information, see [Access
Management](https://docs.aws.amazon.com/timestream/latest/developerguide/ts-limits.html)
in the Timestream Developer Guide.
"""
def list_databases(client, input, options \\ []) do
request(client, "ListDatabases", input, options)
end
@doc """
A list of tables, along with the name, status and retention properties of
each table.
"""
def list_tables(client, input, options \\ []) do
request(client, "ListTables", input, options)
end
@doc """
List all tags on a Timestream resource.
"""
def list_tags_for_resource(client, input, options \\ []) do
request(client, "ListTagsForResource", input, options)
end
@doc """
Associate a set of tags with a Timestream resource. You can then activate
these user-defined tags so that they appear on the Billing and Cost
Management console for cost allocation tracking.
"""
def tag_resource(client, input, options \\ []) do
request(client, "TagResource", input, options)
end
@doc """
Removes the association of tags from a Timestream resource.
"""
def untag_resource(client, input, options \\ []) do
request(client, "UntagResource", input, options)
end
@doc """
Modifies the KMS key for an existing database. While updating the database,
you must specify the database name and the identifier of the new KMS key to
be used (`KmsKeyId`). If there are any concurrent `UpdateDatabase`
requests, first writer wins.
"""
def update_database(client, input, options \\ []) do
request(client, "UpdateDatabase", input, options)
end
@doc """
Modifies the retention duration of the memory store and magnetic store for
your Timestream table. Note that the change in retention duration takes
effect immediately. For example, if the retention period of the memory
store was initially set to 2 hours and then changed to 24 hours, the memory
store will be capable of holding 24 hours of data, but will be populated
with 24 hours of data 22 hours after this change was made. Timestream does
not retrieve data from the magnetic store to populate the memory store.
Service quotas apply. For more information, see [Access
Management](https://docs.aws.amazon.com/timestream/latest/developerguide/ts-limits.html)
in the Timestream Developer Guide.
"""
def update_table(client, input, options \\ []) do
request(client, "UpdateTable", input, options)
end
@doc """
The WriteRecords operation enables you to write your time series data into
Timestream. You can specify a single data point or a batch of data points
to be inserted into the system. Timestream offers you with a flexible
schema that auto detects the column names and data types for your
Timestream tables based on the dimension names and data types of the data
points you specify when invoking writes into the database. Timestream
support eventual consistency read semantics. This means that when you query
data immediately after writing a batch of data into Timestream, the query
results might not reflect the results of a recently completed write
operation. The results may also include some stale data. If you repeat the
query request after a short time, the results should return the latest
data. Service quotas apply. For more information, see [Access
Management](https://docs.aws.amazon.com/timestream/latest/developerguide/ts-limits.html)
in the Timestream Developer Guide.
"""
def write_records(client, input, options \\ []) do
request(client, "WriteRecords", input, options)
end
@spec request(AWS.Client.t(), binary(), map(), list()) ::
{:ok, map() | nil, map()}
| {:error, term()}
defp request(client, action, input, options) do
client = %{client | service: "timestream"}
host = build_host("ingest.timestream", client)
url = build_url(host, client)
headers = [
{"Host", host},
{"Content-Type", "application/x-amz-json-1.0"},
{"X-Amz-Target", "Timestream_20181101.#{action}"}
]
payload = encode!(client, input)
headers = AWS.Request.sign_v4(client, "POST", url, headers, payload)
post(client, url, payload, headers, options)
end
defp post(client, url, payload, headers, options) do
case AWS.Client.request(client, :post, url, payload, headers, options) do
{:ok, %{status_code: 200, body: body} = response} ->
body = if body != "", do: decode!(client, body)
{:ok, body, response}
{:ok, response} ->
{:error, {:unexpected_response, response}}
error = {:error, _reason} -> error
end
end
defp build_host(_endpoint_prefix, %{region: "local", endpoint: endpoint}) do
endpoint
end
defp build_host(_endpoint_prefix, %{region: "local"}) do
"localhost"
end
defp build_host(endpoint_prefix, %{region: region, endpoint: endpoint}) do
"#{endpoint_prefix}.#{region}.#{endpoint}"
end
defp build_url(host, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}/"
end
defp encode!(client, payload) do
AWS.Client.encode!(client, payload, :json)
end
defp decode!(client, payload) do
AWS.Client.decode!(client, payload, :json)
end
end
|
lib/aws/generated/timestream_write.ex
| 0.87588
| 0.735487
|
timestream_write.ex
|
starcoder
|
defmodule T2ServerQuery do
@moduledoc """
Querying a Tribes 2 server actually requires sending 2 different packets to the server where the first byte is denoting the type of information we're asking for. The first is called the `info` packet which doesnt contain much more then the server name. The second is called the `status` packet which contains all the meat and potatoes.
The `T2ServerQuery.query/3` function makes requests for both `info` and `status` and combines them into a single response for easy consumption.
## Installation
def deps do
[
{:t2_server_query, "~> 0.1.3"}
]
end
## Usage
# T2ServerQuery.query("192.168.127.12", port // 28_000, timeout // 3_500)
T2ServerQuery.query("192.168.127.12")
---
"""
require Logger
alias T2ServerQuery.PacketParser
@doc """
Perform a server query. **Results should be in the form of a tuple with either `:ok` or `:error`**
{:ok, %T2ServerQuery.QueryResult{...} }
{:error, %T2ServerQuery.QueryResult{...} }
## Examples
iex> T2ServerQuery.query("192.168.127.12")
{:ok,
%T2ServerQuery.QueryResult{
bot_count: 0,
game_type: "Classic",
map_name: "Canker",
max_player_count: 64,
mission_type: "LakRabbit",
player_count: 0,
players: [%{}],
server_description: "Celebrating 20 Years of Tribes2! More information in Discord. <a:playt2.com/discord>playt2.com/discord</a>",
server_name: "Discord PUB",
server_status: :online,
team_count: 1,
teams: [%{name: "Storm", score: 0}]
}}
iex> T2ServerQuery.query("127.0.0.1")
{:error,
%T2ServerQuery.QueryResult{
bot_count: 0,
game_type: "",
map_name: "",
max_player_count: 0,
mission_type: "",
player_count: 0,
players: [],
server_description: "Host unreachable, timed out.",
server_name: "127.0.0.1:28000",
server_status: :offline,
team_count: 0,
teams: []
}}
"""
@spec query(String.t(), integer(), integer()) :: {atom(), T2ServerQuery.QueryResult.t()}
def query(server_ip, port \\ 28_000, timeout \\ 3_500) do
Logger.info "query: #{server_ip}"
case is_valid_ip?(server_ip) do
true -> handle_query(server_ip, port, timeout)
false -> PacketParser.init({:error, "#{server_ip} - Invalid IP" }, nil)
end
end
@spec handle_query(String.t(), integer(), integer()) :: {atom(), T2ServerQuery.QueryResult.t()}
defp handle_query(server_ip, port, timeout) do
{:ok, socket} = :gen_udp.open(0, [:binary, {:active, false}])
# Convert a string ip from "127.0.0.1" into {127, 0, 0, 1}
{:ok, s_ip} = server_ip
|> to_charlist()
|> :inet.parse_address()
qry_info_packet = <<14, 2, 1, 2, 3, 4>>
qry_status_packet = <<18, 2, 1, 2, 3, 4>>
# Requst info packet
:gen_udp.send(socket, s_ip, port, qry_info_packet)
hex_info_packet = :gen_udp.recv(socket, 0, timeout)
|> handle_udp_response(server_ip, port)
# Request status packet
:gen_udp.send(socket, s_ip, port, qry_status_packet)
hex_status_packet = :gen_udp.recv(socket, 0, timeout)
|> handle_udp_response(server_ip, port)
# Combine and parse results
PacketParser.init(hex_info_packet, hex_status_packet)
end
@spec is_valid_ip?(any()) :: boolean()
defp is_valid_ip?(nil), do: false
defp is_valid_ip?(server_ip) do
case Regex.match?(~r/^([1-2]?[0-9]{1,2}\.){3}([1-2]?[0-9]{1,2})$/, server_ip) do
false -> false
true -> true
end
end
@spec handle_udp_response(tuple(), String.t(), integer()) :: tuple() | String.t()
defp handle_udp_response({:ok, {_ip, port, packet}}, _server_ip, port) do
packet
|> Base.encode16
end
defp handle_udp_response({:error, :timeout}, server_ip, port) do
Logger.error "TIMEOUT --> #{server_ip}:#{port}"
{:error, "#{server_ip}:#{port}"}
end
@doc false
def log(thing_to_log) do
# Just a simple debug logging util
Logger.info(inspect thing_to_log)
IO.puts "\n____________________________________________\n"
thing_to_log
end
end
|
lib/t2_server_query.ex
| 0.729134
| 0.573977
|
t2_server_query.ex
|
starcoder
|
defmodule Hades do
@moduledoc """
A wrapper for `NMAP` written in Elixir.
Nmap (network mapper), the god of port scanners used for network discovery and the basis for most security enumeration during the initial stages of a penetration test. The tool was written and maintained by Fyodor AKA <NAME>.
Nmap displays exposed services on a target machine along with other useful information such as the verion and OS detection.
Nmap has made twelve movie appearances, including The Matrix Reloaded, Die Hard 4, Girl With the Dragon Tattoo, and The Bourne Ultimatum.
### Nmap in a nutshell
- Host discovery
- Port discovery / enumeration
- Service discovery
- Operating system version detection
- Hardware (MAC) address detection
- Service version detection
- Vulnerability / exploit detection, using Nmap scripts (NSE
"""
alias Hades.Command
alias Hades.Argument
alias Hades.Helpers
use Hades.Arguments
@doc """
Begin a new blank (no options) `NMAP` command.
Returns `%Hades.Command{}`.
## Example
iex> Hades.new_command()
%Hades.Command{scan_types: [], target: ""}
"""
def new_command, do: %Command{}
@doc """
With the scan function you can start the execute of a existing nmap command to your desired target.
Returns the parsed `NMAP` output in a `t:map/0`.
## Example
iex> Hades.new_command()
...> |> Hades.add_argument(Hades.Arguments.ScanTechniques.arg_sP())
...> |> Hades.add_target("192.168.120.42")
...> |> Hades.scan()
%{
hosts: [
%{hostname: "Felixs-MACNCHEESEPRO.root.box", ip: "192.168.120.42", ports: []}
],
time: %{
elapsed: 0.93,
endstr: "Sat Jan 18 10:07:32 2020",
startstr: "Sat Jan 18 10:07:31 2020",
unix: 1579338452
}
}
"""
def scan(command) do
{command, target} = Helpers.prepare(command)
Hades.NMAP.execute({command, target})
end
@doc """
This function adds the ability to add a specific `target_ip` to the nmap `command`.
Currently there are only standard formatted IPv4 adresses supported.
Inputs with trailing subnmasks are not supported, but I'll work on this in the future.
Returns a `%Hades.Command{}` with the added `target_ip`.
## Example
iex> Hades.new_command()
...> |> Hades.add_target("192.168.120.42")
%Hades.Command{scan_types: [], target: "192.168.120.42"}
"""
def add_target(%Command{} = command, target_ip) do
target_ip =
case Helpers.check_ip_address(target_ip) do
{:ok, ip} -> ip
_ -> nil
end
%Command{command | target: target_ip}
end
@doc """
The `add_argument` function adds the ability to bind the in `Hades.Arguments`
defined aruments to the given command.
Returns a `%Hades.Command{}` with the added `%Hades.Argument{}`.
## Example
iex> Hades.new_command()
...> |> Hades.add_argument(Hades.Arguments.ScanTechniques.arg_sP())
%Hades.Command{
scan_types: [
%Hades.Argument{
argument: nil,
context: :scan_type,
desc: "Ping Scan",
name: "-sP",
options: false
}
],
target: ""
}
"""
def add_argument(
%Command{scan_types: scan_types} = command,
%Argument{context: context} = scan_type
) do
Helpers.validate_contexts!(context, [:scan_type, :option])
%Command{command | scan_types: [scan_type | scan_types]}
end
end
|
lib/hades.ex
| 0.88512
| 0.654453
|
hades.ex
|
starcoder
|
defmodule Ticker.Quote.Processor.Simulate do
use Timex
@historical_hours 2
@ticks_per_minute 4
@behaviour Ticker.Quote.Processor.Behaviour
@initial_quote %Ticker.Quote{c: "0.00", c_fix: "0.00", ccol: "chr", cp: "0.00", cp_fix: "0.00", e: "NASDAQ", id: "99999", l: "120.00", l_cur: "120.00", l_fix: "120.00", lt: "", lt_dts: "", ltt: "", pcls_fix: "120.00", s: "0"}
def start_link do
Agent.start_link(fn -> @initial_quote end, name: __MODULE__)
end
@doc "Process the given symbols (@see Ticker.Quote.Processor.Behaviour.process}. Used for simulating quotes"
def process(symbols) do
quotes = process(symbols, Timex.now)
{:ok, quotes}
end
@doc "Simulated historical quotes (@see Ticker.Quote.Processor.Behaviour.historical)"
def historical(symbols) do
dt = Timex.shift(Timex.now, hours: -@historical_hours)
quotes = Interval.new(from: dt, until: [hours: @historical_hours], step: [minutes: 1])
|> Enum.map(fn(i) -> Enum.map(1..@ticks_per_minute, fn(_) -> process(symbols, i) end) end)
|> List.flatten
{:ok, quotes}
end
defp process(symbols, date_time) do
current_quote = get_quote()
current_quote
|> move_quote(date_time)
|> set_quote
Enum.map(symbols, fn(s) -> %{move_quote(current_quote, date_time) | t: s} end)
end
defp set_quote(new_value) do
Agent.update(__MODULE__, fn(_) -> new_value end)
end
defp get_quote do
Agent.get(__MODULE__, &(&1))
end
defp move_quote(from_quote, date_time) do
initial_quote = from_quote |> Ticker.Quote.as_type(:float)
direction = Enum.random([-1, 0, 0, 0, 1])
delta = :rand.uniform()
|> Float.round(2)
|> Kernel.*(direction)
initial_price = initial_quote.pcls_fix
current_price = initial_quote.l + delta
change_price = current_price - initial_price
change_pct = (change_price / initial_price) * 100
{lt, ltt, lt_dts} = date_fields(date_time)
to_quote = %{@initial_quote | c: change_price, c_fix: change_price, cp: change_pct, cp_fix: change_pct, l: current_price, l_cur: current_price, l_fix: current_price, lt: lt, lt_dts: lt_dts, ltt: ltt, pcls_fix: String.to_float(@initial_quote.pcls_fix)}
Ticker.Quote.as_type(to_quote, :string)
end
defp date_fields(date_time) do
tz = Timezone.get("America/New_York", date_time)
edt = Timezone.convert(date_time, tz)
ltt = ~s(#{Timex.format!(edt, "%I:%M%p", :strftime)} #{tz.abbreviation})
lt = ~s(#{Timex.format!(edt, "%b %d, %I:%M%p", :strftime)} #{tz.abbreviation})
lt_dts = ~s(#{Timex.format!(date_time, "%FT%T", :strftime)}Z)
{lt, ltt, lt_dts}
end
end
|
lib/ticker/quote/processor/simulate.ex
| 0.592195
| 0.469095
|
simulate.ex
|
starcoder
|
defmodule Deparam.Type do
@moduledoc """
A behavior that can be used to implement a custom coercer.
"""
alias Deparam.TypeContext
alias Deparam.Types
@doc """
Coerces the given value using the type and additional options specified in the
specified type context.
"""
@callback coerce(value :: any, context :: TypeContext.t()) ::
{:ok, any} | :error
@aliases %{
any: Types.Any,
array: Types.Array,
boolean: Types.Boolean,
enum: Types.Enum,
float: Types.Float,
integer: Types.Integer,
map: Types.Map,
string: Types.String,
word_list: Types.WordList,
upload: Types.Upload,
url: Types.URL
}
@modifiers [:non_empty, :non_nil]
@type modifier :: nil | :non_empty | :non_nil
@type primitive ::
nil
| atom
| module
| tuple
| nonempty_list
| TypeContext.t()
| (any -> {:ok, any} | :error)
| (any, TypeContext.t() -> {:ok, any} | :error)
@type type :: primitive | {modifier, primitive}
@doc """
Translates the given type specification to a type context that can be passed
as argument to a coercer.
"""
@spec resolve(type) :: {:ok, TypeContext.t()} | :error
def resolve(nil), do: resolve(:any)
def resolve(%TypeContext{} = context) do
{:ok, context}
end
def resolve({modifier, context_or_fun_or_type}) when modifier in @modifiers do
with {:ok, context} <- resolve(context_or_fun_or_type) do
{:ok, %{context | modifier: modifier}}
end
end
def resolve(definition) when tuple_size(definition) >= 2 do
definition
|> Tuple.to_list()
|> resolve()
end
def resolve([context_or_fun_or_type | args]) do
with {:ok, context} <- resolve(context_or_fun_or_type) do
{:ok, %{context | args: args}}
end
end
def resolve(type) when is_atom(type) do
mod = Map.get(@aliases, type, type)
if Code.ensure_loaded?(mod) && function_exported?(mod, :coerce, 2) do
{:ok, TypeContext.new(&mod.coerce/2)}
else
:error
end
end
def resolve(fun) when is_function(fun, 1) do
resolve(fn value, _context -> fun.(value) end)
end
def resolve(fun) when is_function(fun, 2) do
{:ok, TypeContext.new(fun)}
end
def resolve(_), do: :error
@doc """
Coerces the given value using the given type context or specification.
"""
@spec coerce(any, type) :: {:ok, any} | :error
def coerce(value, type) do
with {:ok, context} <- resolve(type),
{:ok, value} <- do_coerce(value, context) do
{:ok, value}
end
end
defp do_coerce(nil, %{modifier: modifier})
when modifier in [:non_nil, :non_empty] do
:error
end
defp do_coerce(nil, _context), do: {:ok, nil}
defp do_coerce(value, context) do
context.coercer.(value, context)
end
end
|
lib/deparam/type.ex
| 0.855474
| 0.482612
|
type.ex
|
starcoder
|
defmodule Scenic.Primitive.Arc do
@moduledoc """
Draw an arc on the screen.
An arc is a segment that traces part of the outline of a circle. If you are
looking for something shaped like a piece of pie, then you want a segment.
Arcs are often drawn on top of a segment to get an affect where a piece of pie
is filled in, but only the curvy edge is stroked.
Note that you can fill an arc, but that will result in a shape that looks
like a potato wedge.
## Data
`{radius, start, finish}`
The data for an arc is a three-tuple.
* `radius` - the radius of the arc
* `start` - the starting angle in radians
* `finish` - end ending angle in radians
## Styles
This primitive recognizes the following styles
* [`hidden`](Scenic.Primitive.Style.Hidden.html) - show or hide the primitive
* [`fill`](Scenic.Primitive.Style.Fill.html) - fill in the area of the primitive
* [`stroke`](Scenic.Primitive.Style.Stroke.html) - stroke the outline of the primitive. In this case, only the curvy part.
## Usage
You should add/modify primitives via the helper functions in
[`Scenic.Primitives`](Scenic.Primitives.html#arc/3)
"""
use Scenic.Primitive
alias Scenic.Primitive.Sector
alias Scenic.Primitive.Triangle
@styles [:hidden, :fill, :stroke]
# ============================================================================
# data verification and serialization
# --------------------------------------------------------
@doc false
def info(data),
do: """
#{IO.ANSI.red()}#{__MODULE__} data must be: {radius, start, finish}
#{IO.ANSI.yellow()}Received: #{inspect(data)}
#{IO.ANSI.default_color()}
"""
# --------------------------------------------------------
@doc false
def verify(data) do
normalize(data)
{:ok, data}
rescue
_ -> :invalid_data
end
# --------------------------------------------------------
@doc false
@spec normalize({number(), number(), number()}) :: {number(), number(), number()}
def normalize({radius, start, finish} = data)
when is_number(start) and is_number(finish) and is_number(radius),
do: data
# ============================================================================
@doc """
Returns a list of styles recognized by this primitive.
"""
@spec valid_styles() :: [:fill | :hidden | :stroke]
def valid_styles(), do: @styles
# --------------------------------------------------------
def contains_point?({radius, start, finish} = data, pt) do
# first, see if it is in the sector described by the arc data
if Sector.contains_point?(data, pt) do
# See if it is NOT in the triangle part of sector.
# If it isn't in the triangle, then it must be in the arc part.
p1 = {
radius * :math.cos(start),
radius * :math.sin(start)
}
p2 = {
radius * :math.cos(finish),
radius * :math.sin(finish)
}
!Triangle.contains_point?({{0, 0}, p1, p2}, pt)
else
false
end
end
end
|
lib/scenic/primitive/arc.ex
| 0.910932
| 0.703333
|
arc.ex
|
starcoder
|
defmodule MeteoStick.WeatherStation do
use GenServer
require Logger
defmodule State do
defstruct id: 0,
outdoor_temperature: 0,
indoor_temperature: 0,
humidity: 0,
pressure: 0,
wind: %{
speed: 0,
direction: 0,
gust: 0
},
rain: 0,
uv: 0,
solar: %{
radiation: 0,
intensity: 0
},
low_battery: nil
end
def start_link(data) do
id = :"MeteoStation-#{Enum.at(data, 1)}"
GenServer.start_link(__MODULE__, [id, data], name: id)
end
def data(station, data) do
type = Enum.at(data, 0)
values = Enum.drop(data, 1) |> Enum.map(fn(d) ->
case Float.parse d do
:error -> 0.0
{num, remainder} -> num
end
end)
GenServer.cast(station, {type, values})
end
def init([id, data]) do
{:ok, %State{id: id}}
end
def handle_cast({"W", values}, state) do
[id, wind_speed, gust, wind_direction, rf_signal | low_battery] = values
Logger.debug("Wind Speed: #{inspect wind_speed}")
Logger.debug("#{inspect values}")
state = %State{state | wind: %{state.wind | speed: wind_speed, direction: wind_direction, gust: gust}, low_battery: low_battery}
GenEvent.notify(MeteoStick.Events, state)
{:noreply, state}
end
def handle_cast({"R", values}, state) do
[id, tick, rf_signal | low_battery] = values
Logger.debug("Rain: #{inspect tick}")
state = %State{state | rain: tick, low_battery: low_battery}
GenEvent.notify(MeteoStick.Events, state)
{:noreply, state}
end
def handle_cast({"T", values}, state) do
[id, temp_c, humidity, rf_signal | low_battery] = values
Logger.debug("Temperature: #{inspect temp_c}")
state = %State{state | outdoor_temperature: temp_c, humidity: humidity, low_battery: low_battery}
GenEvent.notify(MeteoStick.Events, state)
{:noreply, state}
end
def handle_cast({"U", values}, state) do
[id, uv_index, rf_signal | low_battery] = values
Logger.debug("UV: #{inspect uv_index}")
state = %State{state | :uv => uv_index, low_battery: low_battery}
GenEvent.notify(MeteoStick.Events, state)
{:noreply, state}
end
def handle_cast({"S", values}, state) do
[id, solar_radiation, intensity, rf_signal | low_battery] = values
Logger.debug("Solar Radiation: #{inspect solar_radiation}")
Logger.debug("#{inspect values}")
state = %State{state | :solar => %{state.solar | radiation: solar_radiation, intensity: intensity}, low_battery: low_battery}
GenEvent.notify(MeteoStick.Events, state)
{:noreply, state}
end
def handle_cast({"B", values}, state) do
[temp_c, pressure, good_packets | low_battery] = values
Logger.debug("Indoor Temperature: #{inspect temp_c}")
Logger.debug("Pressure: #{inspect pressure}")
state = %State{state | indoor_temperature: temp_c, pressure: pressure, low_battery: low_battery}
GenEvent.notify(MeteoStick.Events, state)
{:noreply, state}
end
end
|
lib/meteo_stick/weather_station.ex
| 0.544559
| 0.574634
|
weather_station.ex
|
starcoder
|
defmodule StreamingMetrics.PrometheusMetricCollector do
@moduledoc """
Prometheus backend.
It is the client's responsibility to expose the metrics
to prometheus via a scrape endpoint or the Pushgateway.
This module simply creates and increments the counters.
"""
@behaviour StreamingMetrics.MetricCollector
require Logger
use Prometheus.Metric
def init() do
:ok
end
@doc """
Formats info into a format `record_metrics` understands.
`timestamp` is ignored because Prometheus handles timestamps.
"""
def count_metric(count, name, dimensions \\ [], _timestamp \\ []) do
format_metric(count, name, dimensions, :count)
end
@doc """
Formats info into a format `record_metrics` understands.
`unit` is ignored because Prometheus prefers that you incorporate the unit name into the metric name.
`timestamp` is ignored because Prometheus handles timestamps.
"""
def gauge_metric(value, name, dimensions \\ [], _unit \\ nil, _timestamp \\ []) do
format_metric(value, name, dimensions, :gauge)
end
defp format_metric(value, name, dimensions, type) do
%{
name: name,
value: value,
dimensions: dimensions,
type: type
}
end
@doc """
Declares Prometheus metrics, if they doesn't exist, and records them for the provided type.
Metrics are recorded in Prometheus in the following format.
`{namespace}_{metric.name}`
Spaces are replaced with underscores for compatibility with Prometheus.
"""
def record_metrics(metrics, namespace) do
metrics
|> Enum.map(fn metric -> record_metric(metric, namespace) end)
|> Enum.reduce({:ok, []}, &prometheus_to_collector_reducer/2)
end
defp record_metric(%{type: :count} = metric, namespace) do
record_metric(metric, namespace, Counter, :inc)
end
defp record_metric(%{type: :gauge} = metric, namespace) do
record_metric(metric, namespace, Gauge, :set)
end
defp record_metric(metric, namespace, prometheus_module, prometheus_func) do
prometheus_metric_name = prometheus_metric_name(namespace, metric.name)
declare_metric(prometheus_metric_name, metric.dimensions, prometheus_module)
try do
apply(prometheus_module, prometheus_func, [
[name: prometheus_metric_name, labels: Keyword.values(metric.dimensions)],
metric.value
])
rescue
e -> {:error, e}
end
end
defp prometheus_metric_name(namespace, name) do
(namespace <> "_" <> name)
|> String.replace(" ", "_")
end
defp declare_metric(name, dimensions, prometheus_module) do
apply(prometheus_module, :declare, [[name: name, labels: Keyword.keys(dimensions), help: ""]])
end
defp prometheus_to_collector_reducer(:ok, {:ok, term}) do
{:ok, term}
end
defp prometheus_to_collector_reducer({:error, reason}, _acc) do
{:error, reason}
end
defp prometheus_to_collector_reducer(_result, acc) do
acc
end
end
|
lib/streaming_metrics/prometheus_metric_collector.ex
| 0.85115
| 0.520679
|
prometheus_metric_collector.ex
|
starcoder
|
defmodule Kaguya.Module do
defmodule Doc do
defstruct command: "", args: [], trailing: "", user: nil
end
@moduledoc """
Module which provides functionality used for creating IRC modules.
When this module is used, it will create wrapper
functions which allow it to be automatically registered
as a module and include all macros. It can be included with:
`use Kaguya.Module, "module name here"`
Once this is done, the module will autogenerate help documents,
start automatically and be able to use the `handle`, `defh`, and other macros.
Modules can be loaded and unloaded using `Kaguya.Util.loadModule`, `Kaguya.Util.unloadModule`,
and `Kaguya.Util.reloadModule`.
Modules also provide three hooks for certain events that you may want to react to.
These are `module_init()`, `module_load()`, and `module_unload()`. Init will be run when the bot
starts, and the load/unload functions are run whenever a module is loaded or unloaded.
"""
defmacro __using__(module_name) do
quote bind_quoted: [module_name: module_name] do
use GenServer
import Kaguya.Module
@module_name module_name
@task_table String.to_atom("#{@module_name}_tasks")
@before_compile Kaguya.Module
init_attrs()
def start_link(opts \\ []) do
{:ok, _pid} = GenServer.start_link(__MODULE__, :ok, [])
end
defoverridable start_link: 1
def init(:ok) do
require Logger
Logger.log :debug, "Started module #{@module_name}!"
:pg2.join(:modules, self())
:ets.insert(:modules, {@module_name, self()})
table_name = String.to_atom "#{@module_name}_tasks"
:ets.new(@task_table, [:set, :public, :named_table, {:read_concurrency, true}, {:write_concurrency, true}])
Process.register(self(), __MODULE__)
module_init()
{:ok, []}
end
defoverridable init: 1
def handle_cast(:unload, state) do
require Logger
:pg2.leave(:modules, self())
module_unload()
Logger.log :debug, "Unloaded module #{@module_name}!"
{:noreply, state}
end
def handle_cast(:load, state) do
require Logger
:pg2.join(:modules, self())
module_load()
Logger.log :debug, "Loaded module #{@module_name}!"
{:noreply, state}
end
def module_init do
end
defoverridable module_init: 0
def module_load do
end
defoverridable module_load: 0
def module_unload do
end
defoverridable module_unload: 0
def on_message(msg) do
end
defoverridable on_message: 1
# Used to scan for valid modules on start
defmodule Kaguya_Module do
end
end
end
defmacro __before_compile__(_env) do
help_func =
case Application.get_env(:kaguya, :help_cmd) do
nil -> nil
_ -> add_help_commands()
end
quote do
unquote(help_func)
def handle_cast({:msg, message}, state) do
on_message(message)
{:noreply, state}
end
end
end
defp add_help_commands do
quote do
def print_help(var!(message), %{"search_term" => term}) do
import Kaguya.Util
@match_docs
|> Enum.filter(fn %{"cmd" => cmd} -> term == cmd end)
|> Enum.map(fn %{"data" => [ms, f, m, opts], "aliases" => aliases} ->
doc = make_docstring(ms, f, m, opts)
reply_priv_notice(doc)
if aliases != [] do
reply_priv_notice("aliases: #{Enum.join(aliases, "; ")}")
end
end)
end
def print_all_help(var!(message), %{}) do
if length(@match_docs) > 0 do
cmds =
@match_docs
|> Enum.map(fn %{"cmd" => cmd} -> cmd end)
|> Enum.join("; ")
reply_priv_notice("#{@module_name}: #{cmds}")
end
end
defp make_docstring(match_str, function, module, opts) do
import Kaguya.Util
desc =
case Keyword.get(opts, :doc) do
# TODO: Reimplement for Code.fetch_docs
nil -> nil
# Code.get_docs(module, :docs)
# |> Enum.filter(fn {{f, _}, _, _, _, _} -> f == function end)
# |> Enum.map(fn {{_, _}, _, _, _, doc} -> doc end)
# |> List.first
d -> d
end
command = get_doc_command_string(match_str)
if desc != nil do
"#{yellow()}#{command}#{clear()}: #{desc}"
else
"#{yellow()}#{command}#{clear()}"
end
end
defp get_doc_command_string(match_str) do
String.split(match_str)
|> Enum.map(fn part ->
case String.first(part) do
":" ->
var_name = String.trim_leading(part, ":")
"<#{var_name}>"
"~" ->
var_name = String.trim_leading(part, "~")
"<#{var_name}...>"
_ -> part
end
end)
|> Enum.join(" ")
end
end
end
defmacro init_attrs do
Module.register_attribute __CALLER__.module,
:match_docs, accumulate: true, persist: true
Module.register_attribute __CALLER__.module,
:handler_impls, accumulate: true, persist: true
Module.register_attribute __CALLER__.module,
:handlers, accumulate: true, persist: true
end
defp generate_privmsg_handler(body) do
help_ast =
case Application.get_env(:kaguya, :help_cmd) do
nil -> nil
help_cmd ->
help_search = help_cmd <> " ~search_term"
quote do
match unquote(help_cmd), :print_all_help, nodoc: true
match unquote(help_search), :print_help, nodoc: true
end
end
quote do
def handle_cast({:msg, %{command: "PRIVMSG"} = var!(message)}, state) do
on_message(var!(message))
unquote(body)
unquote(help_ast)
{:noreply, state}
end
end
end
@doc """
Defines a group of matchers which will handle all messages of the corresponding
IRC command.
## Example
```
handle "PING" do
match_all :pingHandler
match_all :pingHandler2
end
```
In the example, all IRC messages which have the PING command
will be matched against `:pingHandler` and `:pingHandler2`
"""
defmacro handle("PRIVMSG", do: body), do: generate_privmsg_handler(body)
defmacro handle(command, do: body) do
quote do
def handle_cast({:msg, %{command: unquote(command)} = var!(message)}, state) do
on_message(var!(message))
unquote(body)
{:noreply, state}
end
end
end
@doc """
Defines a matcher which always calls its corresponding
function. Example: `match_all :pingHandler`
The available options are:
* async - runs the matcher asynchronously when this is true
* uniq - ensures only one version of the matcher can be running per channel.
Should be used with async: true.
"""
defmacro match_all(function, opts \\ []) do
add_handler_impl(function, __CALLER__.module, [])
func_exec_ast = quote do: unquote(function)(var!(message), %{})
uniq? = Keyword.get(opts, :uniq, false)
overrideable? = Keyword.get(opts, :overrideable, false)
func_exec_ast
|> check_async(Keyword.get(opts, :async, false))
|> check_unique(function, uniq?, overrideable?)
end
@doc """
Defines a matcher which will match a regex againt the trailing portion
of an IRC message. Example: `match_re ~r"me|you", :meOrYouHandler`
The available options are:
* async - runs the matcher asynchronously when this is true
* uniq - ensures only one version of the matcher can be running per channel.
Should be used with async: true.
"""
defmacro match_re(re, function, opts \\ []) do
add_handler_impl(function, __CALLER__.module, [])
func_exec_ast = quote do: unquote(function)(var!(message), res)
uniq? = Keyword.get(opts, :uniq, false)
overrideable? = Keyword.get(opts, :overrideable, false)
func_exec_ast
|> check_async(Keyword.get(opts, :async, false))
|> check_unique(function, uniq?, overrideable?)
|> add_re_matcher(re)
end
defp add_re_matcher(body, re) do
quote do
case Regex.named_captures(unquote(re), var!(message).trailing) do
nil -> :ok
res -> unquote(body)
end
end
end
@doc """
Defines a matcher which will match a string defining
various capture variables against the trailing portion
of an IRC message.
## Example
```
handle "PRIVMSG" do
match "!rand :low :high", :genRand, match_group: "[0-9]+"
match "!join :channel([#&][a-zA-Z0-9]+)", :joinChannel"
match ["!say ~msg", "!s ~msg"], :sayMessage
end
```
In this example, the genRand function will be called
when a user sends a message to a channel saying something like
`!rand 0 10`. If both parameters are strings, the genRand function
will be passed the messages, and a map which will look like `%{low: 0, high: 10}`.
Additionally the usage of a list allows for command aliases, in the second match.
The second match well find channel joining messages, using an embedded regex to
validate a channel. These embedded regexs will override the match_group value
and should be used when you need to match multiple parameters which will not
accept the same regex. That or if you just don't feel like writing `match_group: ""`.
Available match string params are `:param` and `~param`. The former
will match a specific space separated parameter, whereas the latter matches
an unlimited number of characters.
Match can also be called with a few different options. Currently there are:
* match_group - Default regex which is used for matching in the match string. By default
it is `[a-zA-Z0-9]+`
* async - Whether or not the matcher should be run synchronously or asynchronously.
By default it is false, but should be set to true if await_resp is to be used.
* uniq - When used with the async option, this ensures only one version of the matcher
can be running at any given time. The uniq option can be either channel level or nick level,
specified with the option :chan or :nick.
* uniq_overridable - This is used to determine whether or not a unique match can be overriden
by a new match, or if the new match should exit and allow the previous match to continue running.
By default it is true, and new matches will kill off old matches.
Help commands will additionally be generated for all matches. The "help_cmd" option of the config
will allow specifying a prefix to use for help triggers, and this command can be used standalone
to list all commands in format "cmd1; cmd2; ...". Additionally, "help_cmd some_cmd" will display more
detailed documentation for a given command, including a description(derived from the @doc attribute
of the specified handler), and aliases(all non head items in a list based match - ["!queue", "!q"]
will generate a single help command for !queue, and list "!q" as an alias".
"""
defmacro match(match, function, opts \\ [])
defmacro match(match_str, function, opts) when is_bitstring(match_str) do
make_match(match_str, function, opts, __CALLER__.module, [])
end
defmacro match([primary|aliases], function, opts) do
pm = make_match(primary, function, opts, __CALLER__.module, aliases)
am = for match <- aliases, do: make_match(match, function, opts, __CALLER__.module, nil)
[pm|am]
end
defp make_match(match_str, function, opts, module, aliases) do
if aliases != nil do
add_docs(match_str, function, module, opts, aliases)
end
add_handler_impl(function, module, get_var_list(match_str))
uniq? = Keyword.get(opts, :uniq, false)
overrideable? = Keyword.get(opts, :overrideable, false)
async? = Keyword.get(opts, :async, false)
match_group = Keyword.get(opts, :match_group, "[a-zA-Z0-9]+")
gen_match_func_call(function)
|> check_unique(function, uniq?, overrideable?)
|> check_async(async?)
|> add_captures(match_str, match_group)
end
defp add_docs(match_str, function, module, opts, aliases) do
if !Keyword.has_key?(opts, :nodoc) do
cmd = get_cmd(match_str)
Module.put_attribute(module, :match_docs, %{
"cmd" => cmd,
"data" => [match_str, function, module, opts],
"aliases" => Enum.map(aliases, &get_cmd/1) |> Enum.filter(fn a -> a != cmd end)
})
end
end
defp get_cmd(match_str) do
match_str |> String.split(" ") |> List.first
end
defp get_var_list(match_str) do
String.split(match_str)
|> Enum.reduce([], fn(part, acc) ->
case String.first(part) do
":" -> [String.trim_leading(part, ":")|acc]
"~" -> [String.trim_leading(part, "~")|acc]
_ -> acc
end
end)
end
defp add_handler_impl(name, module, vars) do
Module.put_attribute(module, :handlers, {name, vars})
end
defp gen_match_func_call(function) do
quote do
unquote(function)(var!(message), res)
end
end
defp check_unique(body, function, use_uniq?, overrideable?)
defp check_unique(body, _function, false, _overrideable), do: body
defp check_unique(body, function, uniq_type, overrideable?) do
id_string = get_unique_table_id(function, uniq_type)
create_unique_match(body, id_string, overrideable?)
end
defp get_unique_table_id(function, type) do
fun_string = Atom.to_string(function)
case type do
true -> quote do: "#{unquote(fun_string)}_#{chan}_#{nick}"
:chan -> quote do: "#{unquote(fun_string)}_#{chan}"
:nick -> quote do: "#{unquote(fun_string)}_#{chan}_#{nick}"
end
end
defp create_unique_match(body, id_string, overrideable?)
defp create_unique_match(body, id_string, true) do
quote do
[chan] = var!(message).args
%{nick: nick} = var!(message).user
case :ets.lookup(@task_table, unquote(id_string)) do
[{_fun, pid}] ->
Process.exit(pid, :kill)
:ets.delete(@task_table, unquote(id_string))
[] -> nil
end
:ets.insert(@task_table, {unquote(id_string), self()})
unquote(body)
:ets.delete(@task_table, unquote(id_string))
end
end
defp create_unique_match(body, id_string, false) do
quote do
[chan] = var!(message).args
%{nick: nick} = var!(message).user
case :ets.lookup(@task_table, unquote(id_string)) do
[{_fun, pid}] -> nil
[] ->
:ets.insert(@task_table, {unquote(id_string), self()})
unquote(body)
:ets.delete(@task_table, unquote(id_string))
end
end
end
defp check_async(body, async?)
defp check_async(body, true) do
quote do
Task.start fn ->
unquote(body)
end
end
end
defp check_async(body, false), do: body
defp add_captures(body, match_str, match_group) do
re = match_str |> extract_vars(match_group) |> Macro.escape
quote do
case Regex.named_captures(unquote(re), var!(message).trailing) do
nil ->
:ok
res -> unquote(body)
end
end
end
defp extract_vars(match_str, match_group) do
parts = String.split(match_str)
l = for part <- parts, do: gen_part(part, match_group)
expr = "^#{Enum.join(l, " ")}$"
Regex.compile!(expr)
end
defp gen_part(part, match_group) do
case part do
":" <> param ->
# Check for embedded regex capture
case Regex.named_captures(~r/(?<name>[a-zA-Z0-9]+)\((?<re>.+)\)/, param) do
%{"name" => name, "re" => re} -> "(?<#{name}>#{re})"
nil -> "(?<#{param}>#{match_group})"
end
"~" <> param -> "(?<#{param}>.+)"
text -> Regex.escape(text)
end
end
@doc """
Convenience macro for defining handlers. It injects the variable `message` into
the environment allowing macros like `reply` to work automatically. It additionally
detects various map types as arguments and is able to differentiate between maps
which destructure Kaguya messages, vs. the match argument.
For example:
```
# This handler matches all calls to it.
defh some_handler do
...
end
# This handler matches the IRC message struct's nick param.
defh some_other_handler(%{user: %{nick: nick}}) do
...
end
# This handler matches the given match argument's value.
defh some_other_handler(%{"match_arg" => val) do
...
end
# This handler matches the given match argument's value and the IRC message's nick.
# Note that the order of these two maps in the arguments DOES NOT MATTER.
# The macro will automatically detect which argument is mapped to which type of input for you.
defh some_other_handler(%{user: %{nick: nick}, %{"match_arg" => val) do
...
end
```
"""
defmacro defh({name, _line, nil}, do: body) do
args = quote do: [var!(message), var!(args)]
make_defh_func(name, args, body)
end
defmacro defh({name, _line, [arg]}, do: body) do
args =
case get_map_type(arg) do
:msg_map -> quote do: [var!(message) = unquote(arg), var!(args)]
:arg_map -> quote do: [var!(message), var!(args) = unquote(arg)]
end
make_defh_func(name, args, body)
end
defmacro defh({name, _line, [arg1, arg2]}, do: body) do
args =
case {get_map_type(arg1), get_map_type(arg2)} do
{:msg_map, :arg_map} -> quote do: [var!(message) = unquote(arg1), var!(args) = unquote(arg2)]
{:arg_map, :msg_map} -> quote do: [var!(args) = unquote(arg1), var!(message) = unquote(arg2)]
end
make_defh_func(name, args, body)
end
# Maintain legacy compat in a few situations with old defh
defp get_map_type({:_, _, _}) do
:msg_map
end
defp get_map_type(qmap) do
{:%{}, _line, kvs} = qmap
keys = Enum.map(kvs, fn {key, _val} -> key end)
case {Enum.all?(keys, &is_atom/1), Enum.all?(keys, &is_bitstring/1)} do
{true, false} -> :msg_map
{false, true} -> :arg_map
_ -> raise "Maps in defh must be all atoms for a message, or all strings for arguments!"
end
end
defp make_defh_func(name, args, body) do
quote do
def unquote(name)(unquote_splicing(args)) do
unquote(body)
# Suppress unused var warning
var!(message)
var!(args)
end
end
end
@doc """
Creates a validation stack for use in a handler.
## Example:
```
validator :is_me do
:check_nick_for_me
end
def check_nick_for_me(%{user: %{nick: "me"}}), do: true
def check_nick_for_me(_message), do: false
```
In the example, a validator named :is_me is created.
In the validator, any number of function can be defined
with atoms, and they will be all called. Every validator
function will be given a message, and should return either
true or false.
"""
defmacro validator(name, do: body) when is_atom(body) do
create_validator(name, [body])
end
defmacro validator(name, do: body) do
{:__block__, [], funcs} = body
create_validator(name, funcs)
end
defp create_validator(name, funcs) do
require Logger
Logger.log(:warn, "The validate macro is being deprecated, please use enforce instead")
quote do
def unquote(name)(message) do
res = for func <- unquote(funcs), do: apply(__MODULE__, func, [message])
!Enum.member?(res, false)
end
end
end
@doc """
Creates a scope in which only messages that succesfully pass through
the given will be used.
## Example:
```
handle "PRIVMSG" do
validate :is_me do
match "Hi", :hiHandler
end
end
```
In the example, only messages which pass through the is_me validator,
defined prior will be matched within this scope.
"""
defmacro validate(validator, do: body) do
require Logger
Logger.log(:warn, "The validate macro is being deprecated, please use enforce instead")
quote do
if unquote(validator)(var!(message)) do
unquote(body)
end
end
end
@doc """
Enforces certain constraints around a block. This will replace
the validate macro.
## Example:
```
def is_me(msg), do: true
def not_ignored(msg), do: true
handle "PRIVMSG" do
enforce [:is_me, :not_ignored] do
match "Hi", :someHandler
end
enforce :is_me do
match "Bye", :someOtherHandler
end
```
"""
defmacro enforce(validators, do: body) when is_list(validators) do
enforce_rec(validators, body)
end
defmacro enforce(validator, do: body) do
enforce_rec([validator], body)
end
def enforce_rec([v], body) do
quote do
if unquote(v)(var!(message)) do
unquote(body)
end
end
end
def enforce_rec([v|rest], body) do
nb =
quote do
if unquote(v)(var!(message)) do
unquote(body)
end
end
enforce_rec(rest, nb)
end
@doc """
Sends a response to the sender of the PRIVMSG with a given message.
Example: `reply "Hi"`
"""
defmacro reply(response) do
quote do
recip = get_recip(var!(message))
Kaguya.Util.sendPM(unquote(response), recip)
end
end
@doc """
Sends a response to the sender of the PRIVMSG with a given message via a private message.
Example: `reply_priv "Hi"`
"""
defmacro reply_priv(response) do
quote do
recip = Map.get(var!(message).user, :nick)
Kaguya.Util.sendPM(unquote(response), recip)
end
end
@doc """
Sends a response to the sender of the PRIVMSG with a given message via a private message.
Example: `reply_priv "Hi"`
"""
defmacro reply_notice(response) do
quote do
recip = Kaguya.Module.get_recip(var!(message))
Kaguya.Util.sendNotice(unquote(response), recip)
end
end
@doc """
Sends a response to the user who sent the PRIVMSG with a given message via a private message.
Example: `reply_priv "Hi"`
"""
defmacro reply_priv_notice(response) do
quote do
recip = Map.get(var!(message).user, :nick)
Kaguya.Util.sendNotice(unquote(response), recip)
end
end
@doc """
Determines whether or not a response should be sent back to a channel
or if the recipient sent the message in a PM
"""
def get_recip(message) do
case message.command do
"PRIVMSG" -> get_pm_recip(message)
"JOIN" -> message.trailing
end
end
defp get_pm_recip(message) do
[chan] = message.args
bot = Application.get_env(:kaguya, :bot_name)
case chan do
^bot -> Map.get(message.user, :nick)
_ -> chan
end
end
@doc """
Waits for an irc user to send a message which matches the given match string,
and returns the resulting map. The user(s) listened for, channel listened for,
timeout, and match params can all be tweaked. If the matcher times out,
the variables new_message and resp will be set to nil, otherwise they will
contain the message and the parameter map respectively for use.
You must use await_resp in a match which has the asnyc: true
flag enabled or the module will time out.
## Example
```
def handleOn(message, %{"target" => t, "repl" => r}) do
reply "Fine."
{msg, _resp} = await_resp t
if msg != nil do
reply r
end
end
```
In this example, the bot will say "Fine." upon the function being run,
and then wait for the user in the channel to say the target phrase.
On doing so, the bot responds with the given reply.
await_resp also can be called with certain options, these are:
* match_group - regex to be used for matching parameters in the given string.
By default this is `[a-zA-Z0-9]+`
* nick - the user whose nick will be matched against in the callback. Use :any
to allow for any nick to be matched against. By default, this will be the nick
of the user who sent the currently processed messages
* chan - the channel to be matched against. Use :any to allow any channel to be matched
against. By default this is the channel where the currently processed message was sent from.
* timeout - the timeout period for a message to be matched, in milliseconds. By default it is
60000, or 60 seconds.
"""
defmacro await_resp(match_str, opts \\ []) do
match_group = Keyword.get(opts, :match_group, "[a-zA-Z0-9]+")
timeout = Keyword.get(opts, :timeout, 60000)
quote bind_quoted: [opts: opts, timeout: timeout, match_str: match_str, match_group: match_group] do
nick = Keyword.get(opts, :nick, var!(message).user.nick)
[def_chan] = var!(message).args
chan = Keyword.get(opts, :chan, def_chan)
Kaguya.Module.await_resp(match_str, chan, nick, timeout, match_group)
end
end
@doc """
Actual function used to execute await_resp. The macro should be preferred
most of the time, but the function can be used if necessary.
"""
def await_resp(match_str, chan, nick, timeout, match_group) do
has_vars? = match_str |> get_var_list |> length > 0
match_fun = get_match_fun(match_str, chan, nick, match_group, has_vars?)
try do
GenServer.call(Kaguya.Module.Core, {:add_callback, match_fun}, timeout)
catch
:exit, _ -> GenServer.cast(Kaguya.Module.Core, {:remove_callback, self()})
{nil, nil}
end
end
defp get_match_fun(match_str, chan, nick, match_group, has_vars?)
defp get_match_fun(match_str, chan, nick, match_group, true) do
re = match_str |> extract_vars(match_group)
fn msg ->
if (msg.args == [chan] or chan == :any) and (msg.user.nick == nick or nick == :any) do
case Regex.named_captures(re, msg.trailing) do
nil -> false
res -> {true, {msg, res}}
end
else
false
end
end
end
defp get_match_fun(match_str, chan, nick, _match_group, false) do
fn msg ->
if match_str == msg.trailing and (msg.args == [chan] or chan == :any) and (msg.user.nick == nick or nick == :any) do
{true, {msg, nil}}
else
false
end
end
end
end
|
lib/kaguya/module.ex
| 0.571169
| 0.402862
|
module.ex
|
starcoder
|
defmodule SRP do
@moduledoc """
SRP provides an implementation of the Secure Remote Password Protocol presented on
[The SRP Authentication and Key Exchange System](https://tools.ietf.org/html/rfc2945),
[Using the Secure Remote Password (SRP) Protocol for TLS Authentication](https://tools.ietf.org/html/rfc5054)
and [The Secure Remote Password Protocol](http://srp.stanford.edu/ndss.html).
The protocol provides a way to do zero-knowledge authentication between client and servers.
By using the SRP protocol you can:
- authenticate without ever sending a password over the network.
- authenticate without the risk of anyone learning any of your secrets – even
if they intercept your communication.
- authenticate both the identity of the client and the server to guarantee
that a client isn’t communicating with an impostor server.
## Signing up
After the user provides his username and password, the client must generate
a password verifier. Then it must send to the server:
- The username for future identification.
- The password verifier that will be used in the future to verify the client credentials.
- The salt used in the process.
```elixir
username = "alice"
password = "<PASSWORD>"
identity = SRP.new_identity(username, password)
%SRP.IdentityVerifier{username: username, salt: salt, password_verifier: password_verifier} =
SRP.generate_verifier(identity)
# Send to the server -> username + salt + password_verifier
# Server stores the information
```
## Logging in
Authenticating a user on the server involves multiple steps.
1. The client sends to the server the username.
2. The server finds the password verifier and salt for that username.
Then it generates a ephemeral key pair and sends back to the client the salt and the public key.
```elixir
# Find the record for the given username
# Load from the database the password_verifier, and the salt
key_pair = SRP.server_key_pair(password_verifier)
# Send back to the client -> key_pair.public + salt
```
If the username does not exist the server can send a fake value.
It is important to not reveal if an username is registered on the system or not.
An attacker could use the login to find the registered usernames
and try a dictionary attack specific for those users.
3. The client generates a key pair and a client proof of identity.
Then the client sends to the server the proof and the client's public key.
```elixir
# receives from the server the server_public_key and the salt.
identity = SRP.new_identity(username, password)
key_pair = SRP.client_key_pair()
proof = SRP.client_proof(identity, salt, key_pair, server_public_key)
# Send to the server -> proof + server_public_key
```
4. Server verify client proof then build its own proof of identity.
Then sends back the server's proof.
```elixir
valid? = SRP.valid_client_proof?(client_proof, password_verifier, server_key_pair, client_public_key)
if valid? do
# Send back to client the server's proof -> server_proof
else
# Send back unauthorized
end
```
5. The client receives the server's proof and validates it.
This step can be skipped if you don't feel the need to verify the server's identity.
```elixir
identity = SRP.new_identity(username, password)
valid? = SRP.valid_server_proof?(server_proof, identity, salt, client_key_pair, server_public_key)
```
From now on is to safe to create a new session between the client and server.
## Prime Groups
The default prime size is 2048. Each prime group contains a large prime and a generator.
These two values are used to derive several values on the calculations defined by the RFC.
The 1024-, 1536-, and 2048-bit groups are taken from software developed by Tom
Wu and <NAME> for the Stanford SRP distribution, and subsequently proven
to be prime. The larger primes are taken from
[More Modular Exponential (MODP) Diffie-Hellman groups for Internet Key Exchange (IKE)](https://tools.ietf.org/html/rfc3526),
but generators have been calculated that are primitive roots of N, unlike the generators in
[More Modular Exponential (MODP) Diffie-Hellman groups for Internet Key Exchange (IKE)](https://tools.ietf.org/html/rfc3526).
The following prime sizes are supported by SRP:
- 1024
- 1536
- 2048
- 3072
- 4096
- 6144
- 8192
## Hash Algorithm
By default the algorithm is SHA-1 because it is the algorithm used on the RFC.
The SRP protocol uses a hash function to derive several values:
- The hash of the public keys prevents an attacker who learns a user's verifier
from being able to authenticate as that user.
- The hash of the prime group prevents an attacker who can select group parameters
from being able to launch a 2-for-1 guessing attack.
- Another hash contains the user's password mixed with a salt.
Cryptanalytic attacks against SHA-1 that only affect its collision-
resistance do not compromise these uses. If attacks against SHA-1
are discovered that do compromise these uses, new cipher suites
should be specified to use a different hash algorithm.
The following hash algorithms are supported by SRP:
- sha
- sha224
- sha256
- sha384
- sha512
- md4
- md5
## Shared options
Almost all of the srp function below accept the following options:
- `:prime_size` - The size of the prime to be used on the calculations (default: `2048`);
- `:hash_algorithm` - The hash algorithm used to derive several values (default: `:sha`);
- `:random_bytes` - Quantity of random bytes used internally (default: `32`)
"""
import SRP.Math
alias SRP.{Group, Identity, IdentityVerifier, KeyPair}
require SRP.Group
@default_options [prime_size: 2048, hash_algorithm: :sha, random_bytes: 32]
@doc """
Generate a identity verifier that should be passed to the server during account creation.
## Examples
iex> alice_identity = SRP.new_identity("alice", "<PASSWORD>")
iex> %SRP.IdentityVerifier{username: "alice", salt: salt, password_verifier: password_verifier} =
...> SRP.generate_verifier(alice_identity)
iex> is_binary(salt)
true
iex> is_binary(password_verifier)
true
iex> bob_identity = SRP.new_identity("bob", "<PASSWORD>")
iex> %SRP.IdentityVerifier{username: "bob", salt: salt, password_verifier: password_verifier} =
...> SRP.generate_verifier(bob_identity, hash_algorithm: :sha512)
iex> is_binary(salt)
true
iex> is_binary(password_verifier)
true
iex> kirk_identity = SRP.new_identity("kirk", "<PASSWORD>")
iex> %SRP.IdentityVerifier{username: "kirk", salt: salt, password_verifier: password_verifier} =
...> SRP.generate_verifier(kirk_identity, prime_size: 1024)
iex> is_binary(salt)
true
iex> is_binary(password_verifier)
true
iex> spock_identity = SRP.new_identity("spock", "<PASSWORD>")
iex> %SRP.IdentityVerifier{username: "spock", salt: salt, password_verifier: password_verifier} =
...> SRP.generate_verifier(spock_identity, prime_size: 8192, hash_algorithm: :sha256)
iex> is_binary(salt)
true
iex> is_binary(password_verifier)
true
"""
@spec generate_verifier(Identity.t(), Keyword.t()) :: IdentityVerifier.t()
def generate_verifier(%Identity{username: username, password: password}, options \\ []) do
options = Keyword.merge(@default_options, options)
prime_size = Keyword.get(options, :prime_size)
hash_algorithm = Keyword.get(options, :hash_algorithm)
random_bytes = Keyword.get(options, :random_bytes)
%Group{prime: prime, generator: generator} = Group.get(prime_size)
salt = random(random_bytes)
credentials = hash(hash_algorithm, salt <> hash(hash_algorithm, username <> ":" <> password))
password_verifier = mod_pow(generator, credentials, prime)
IdentityVerifier.new(username, salt, password_verifier)
end
@doc """
Create a new `SRP.Identity` struct.
## Examples
iex> SRP.new_identity("alice", "<PASSWORD>")
%SRP.Identity{username: "alice", password: "<PASSWORD>"}
"""
def new_identity(username, password) when is_binary(username) and is_binary(password) do
%Identity{
username: username,
password: password
}
end
@doc """
Generate a ephemeral key pair for the server.
The private key is randomly generated, and the public key is
derived from the private key and the password verifier.
## Examples
iex> password_verifier = Base.decode16!("<PASSWORD>")
iex> %SRP.KeyPair{public: public_key, private: private_key} =
...> SRP.server_key_pair(password_verifier)
iex> is_binary(public_key)
true
iex> is_binary(private_key)
true
iex> password_verifier = Base.decode16!("<PASSWORD>")
iex> %SRP.KeyPair{public: public_key, private: private_key} =
...> SRP.server_key_pair(password_verifier, hash_algorithm: :sha512)
iex> is_binary(public_key)
true
iex> is_binary(private_key)
true
iex> password_verifier = Base.decode16!("<PASSWORD>")
iex> %SRP.KeyPair{public: public_key, private: private_key} =
...> SRP.server_key_pair(password_verifier, prime_size: 1024)
iex> is_binary(public_key)
true
iex> is_binary(private_key)
true
iex> password_verifier = Base.decode16!("<PASSWORD>")
iex> %SRP.KeyPair{public: public_key, private: private_key} =
...> SRP.server_key_pair(password_verifier, prime_size: 8192, hash_algorithm: :sha256)
iex> is_binary(public_key)
true
iex> is_binary(private_key)
true
"""
@spec server_key_pair(binary(), Keyword.t()) :: KeyPair.t()
def server_key_pair(password_verifier, options \\ []) when is_binary(password_verifier) do
options = Keyword.merge(@default_options, options)
prime_size = Keyword.get(options, :prime_size)
hash_algorithm = Keyword.get(options, :hash_algorithm)
random_bytes = Keyword.get(options, :random_bytes)
%Group{prime: prime, prime_length: prime_length, generator: generator} = Group.get(prime_size)
multiplier = hash(hash_algorithm, prime <> String.pad_leading(generator, prime_length))
private_key = random(random_bytes)
public_key =
add(
mult(multiplier, password_verifier),
mod_pow(generator, private_key, prime)
)
%KeyPair{private: private_key, public: :binary.encode_unsigned(public_key)}
end
@doc """
Generate a ephemeral key pair for the client.
The private key is randomly generated and
then the public key is derived from the private key.
## Examples
iex> %SRP.KeyPair{public: public_key, private: private_key} =
...> SRP.client_key_pair()
iex> is_binary(public_key)
true
iex> is_binary(private_key)
true
iex> %SRP.KeyPair{public: public_key, private: private_key} =
...> SRP.client_key_pair(hash_algorithm: :sha512)
iex> is_binary(public_key)
true
iex> is_binary(private_key)
true
iex> %SRP.KeyPair{public: public_key, private: private_key} =
...> SRP.client_key_pair(prime_size: 1024)
iex> is_binary(public_key)
true
iex> is_binary(private_key)
true
iex> %SRP.KeyPair{public: public_key, private: private_key} =
...> SRP.client_key_pair(prime_size: 8192, hash_algorithm: :sha256)
iex> is_binary(public_key)
true
iex> is_binary(private_key)
true
"""
@spec client_key_pair(Keyword.t()) :: KeyPair.t()
def client_key_pair(options \\ []) do
options = Keyword.merge(@default_options, options)
prime_size = Keyword.get(options, :prime_size)
random_bytes = Keyword.get(options, :random_bytes)
%Group{prime: prime, generator: generator} = Group.get(prime_size)
private_key = random(random_bytes)
public_key = mod_pow(generator, private_key, prime)
%KeyPair{private: private_key, public: public_key}
end
@doc """
Generate a proof of identity for the client.
## Examples
iex> identity = SRP.new_identity("bob", "<PASSWORD>")
iex> identity_verifier = SRP.generate_verifier(identity)
iex> client_key_pair = SRP.client_key_pair()
iex> server_key_pair = SRP.server_key_pair(identity_verifier.password_verifier)
iex> proof =
...> SRP.client_proof(
...> identity,
...> identity_verifier.salt,
...> client_key_pair,
...> server_key_pair.public
...> )
iex> is_binary(proof)
true
"""
@spec client_proof(binary(), binary(), KeyPair.t(), binary(), Keyword.t()) :: binary()
def client_proof(
%Identity{} = identity,
salt,
%KeyPair{} = client_key_pair,
server_public_key,
options \\ []
) do
premaster_secret =
generate_client_premaster_secret(
identity,
salt,
client_key_pair,
server_public_key,
options
)
generate_client_proof(client_key_pair.public, server_public_key, premaster_secret, options)
end
@doc """
Validate the client's proof of identity.
## Examples
iex> identity = SRP.new_identity("bob", "<PASSWORD>")
iex> identity_verifier = SRP.generate_verifier(identity)
iex> client_key_pair = SRP.client_key_pair()
iex> server_key_pair = SRP.server_key_pair(identity_verifier.password_verifier)
iex> client_proof =
...> SRP.client_proof(
...> identity,
...> identity_verifier.salt,
...> client_key_pair,
...> server_key_pair.public
...> )
iex> SRP.valid_client_proof?(
...> client_proof,
...> identity_verifier.password_verifier,
...> server_key_pair,
...> client_key_pair.public
...> )
true
"""
@spec valid_client_proof?(binary(), binary(), KeyPair.t(), binary(), Keyword.t()) :: boolean()
def valid_client_proof?(
client_proof,
password_verifier,
%KeyPair{} = server_key_pair,
client_public_key,
options \\ []
) do
premaster_secret =
generate_server_premaster_secret(
password_verifier,
server_key_pair,
client_public_key,
options
)
client_proof ==
generate_client_proof(client_public_key, server_key_pair.public, premaster_secret, options)
end
@doc """
Generate a proof of identity for the server.
## Examples
iex> identity = SRP.new_identity("bob", "<PASSWORD>")
iex> identity_verifier = SRP.generate_verifier(identity)
iex> client_key_pair = SRP.client_key_pair()
iex> server_key_pair = SRP.server_key_pair(identity_verifier.password_verifier)
iex> client_proof =
...> SRP.client_proof(
...> identity,
...> identity_verifier.salt,
...> client_key_pair,
...> server_key_pair.public
...> )
iex> server_proof =
...> SRP.server_proof(
...> client_proof,
...> identity_verifier.password_verifier,
...> server_key_pair,
...> client_key_pair.public
...> )
iex> is_binary(server_proof)
true
"""
@spec server_proof(binary(), binary(), KeyPair.t(), binary(), Keyword.t()) :: binary()
def server_proof(
client_proof,
password_verifier,
%KeyPair{} = server_key_pair,
client_public_key,
options \\ []
) do
premaster_secret =
generate_server_premaster_secret(
password_verifier,
server_key_pair,
client_public_key,
options
)
generate_server_proof(client_proof, client_public_key, premaster_secret, options)
end
@doc """
Validate the server's proof of identity.
## Examples
iex> identity = SRP.new_identity("bob", "<PASSWORD>")
iex> identity_verifier = SRP.generate_verifier(identity)
iex> client_key_pair = SRP.client_key_pair()
iex> server_key_pair = SRP.server_key_pair(identity_verifier.password_verifier)
iex> client_proof =
...> SRP.client_proof(
...> identity,
...> identity_verifier.salt,
...> client_key_pair,
...> server_key_pair.public
...> )
iex> server_proof =
...> SRP.server_proof(
...> client_proof,
...> identity_verifier.password_verifier,
...> server_key_pair,
...> client_key_pair.public
...> )
iex> SRP.valid_server_proof?(
...> server_proof,
...> identity,
...> identity_verifier.salt,
...> client_key_pair,
...> server_key_pair.public
...> )
true
"""
@spec valid_server_proof?(binary(), Identity.t(), binary(), KeyPair.t(), binary(), Keyword.t()) ::
boolean()
def valid_server_proof?(
server_proof,
%Identity{} = identity,
salt,
%KeyPair{} = client_key_pair,
server_public_key,
options \\ []
) do
premaster_secret =
generate_client_premaster_secret(
identity,
salt,
client_key_pair,
server_public_key,
options
)
client_proof =
generate_client_proof(client_key_pair.public, server_public_key, premaster_secret, options)
server_proof ==
generate_server_proof(client_proof, client_key_pair.public, premaster_secret, options)
end
defp generate_client_premaster_secret(
%Identity{username: username, password: password},
salt,
%KeyPair{} = client_key_pair,
server_public_key,
options
)
when is_binary(salt) and is_binary(server_public_key) do
options = Keyword.merge(@default_options, options)
prime_size = Keyword.get(options, :prime_size)
hash_algorithm = Keyword.get(options, :hash_algorithm)
%Group{prime: prime, prime_length: prime_length, generator: generator} = Group.get(prime_size)
scrambling =
hash(
hash_algorithm,
String.pad_leading(client_key_pair.public, prime_length) <>
String.pad_leading(server_public_key, prime_length)
)
multiplier = hash(hash_algorithm, prime <> String.pad_leading(generator, prime_length))
credentials = hash(hash_algorithm, salt <> hash(hash_algorithm, username <> ":" <> password))
mod_pow(
sub(server_public_key, mult(multiplier, mod_pow(generator, credentials, prime))),
add(client_key_pair.private, mult(scrambling, credentials)),
prime
)
end
defp generate_server_premaster_secret(
password_verifier,
%KeyPair{} = server_key_pair,
client_public_key,
options
)
when is_binary(password_verifier) and is_binary(client_public_key) do
options = Keyword.merge(@default_options, options)
prime_size = Keyword.get(options, :prime_size)
hash_algorithm = Keyword.get(options, :hash_algorithm)
%Group{prime: prime, prime_length: prime_length} = Group.get(prime_size)
scrambling =
hash(
hash_algorithm,
String.pad_leading(client_public_key, prime_length) <>
String.pad_leading(server_key_pair.public, prime_length)
)
mod_pow(
mult(
client_public_key,
mod_pow(
password_verifier,
scrambling,
prime
)
),
server_key_pair.private,
prime
)
end
defp generate_client_proof(
client_public_key,
server_public_key,
premaster_secret,
options
) do
options = Keyword.merge(@default_options, options)
hash_algorithm = Keyword.get(options, :hash_algorithm)
hash(
hash_algorithm,
client_public_key <> server_public_key <> hash(hash_algorithm, premaster_secret)
)
end
defp generate_server_proof(client_proof, client_public_key, premaster_secret, options) do
options = Keyword.merge(@default_options, options)
hash_algorithm = Keyword.get(options, :hash_algorithm)
hash(
hash_algorithm,
client_public_key <> client_proof <> hash(hash_algorithm, premaster_secret)
)
end
defp hash(type, value) when type in [:sha224, :sha256, :sha384, :sha512, :sha, :md5, :md4] do
:crypto.hash(type, value)
end
defp random(bytes_quantity) when is_integer(bytes_quantity) do
:crypto.strong_rand_bytes(bytes_quantity)
end
end
|
lib/srp.ex
| 0.904457
| 0.946151
|
srp.ex
|
starcoder
|
defmodule SanbaseWeb.Graphql.Middlewares.AccessControl do
@moduledoc """
Middleware that is used to restrict the API access in a certain timeframe.
Currently the implemented scheme is like this:
* For users accessing data for slug `santiment` - there is no restriction.
* If the logged in user is subscribed to a plan - the allowed historical days is the value of `historical_data_in_days`
for this plan.
"""
@behaviour Absinthe.Middleware
@compile :inline_list_funcs
@compile {:inline,
transform_resolution: 1,
check_plan: 1,
check_from_to_params: 1,
do_call: 2,
restrict_from: 3,
restrict_to: 3,
check_from_to_both_outside: 1}
import Sanbase.DateTimeUtils, only: [from_iso8601!: 1]
alias Absinthe.Resolution
alias Sanbase.Billing.{Subscription, GraphqlSchema, Plan, Product}
@freely_available_slugs ["santiment"]
@minimal_datetime_param from_iso8601!("2009-01-01T00:00:00Z")
@free_subscription Subscription.free_subscription()
@extension_metrics Plan.AccessChecker.extension_metrics()
@extension_metric_product_map GraphqlSchema.extension_metric_product_map()
def call(resolution, opts) do
# First call `check_from_to_params` and then pass the execution to do_call/2
resolution
|> transform_resolution()
|> check_plan()
|> check_from_to_params()
|> do_call(opts)
|> check_from_to_both_outside()
end
# The name of the query/mutation can be passed in snake case or camel case.
# Here we transform the name to an atom in snake case for consistency
# and faster comparison of atoms
defp transform_resolution(%Resolution{} = resolution) do
%{context: context, definition: definition, arguments: arguments, source: source} = resolution
query_atom_name =
definition.name
|> Macro.underscore()
|> String.to_existing_atom()
|> get_query_or_metric(source, arguments)
context = context |> Map.put(:__query_or_metric_atom_name__, query_atom_name)
%Resolution{resolution | context: context}
end
# Basic auth should have no restrictions
defp check_plan(%Resolution{context: %{auth: %{auth_method: :basic}}} = resolution) do
resolution
end
defp check_plan(%Resolution{arguments: %{slug: slug}} = resolution)
when slug in @freely_available_slugs do
resolution
end
defp check_plan(
%Resolution{context: %{__query_or_metric_atom_name__: query_or_metric} = context} =
resolution
) do
plan = context[:auth][:plan] || :free
subscription = context[:auth][:subscription] || @free_subscription
product_id = subscription.plan.product_id || context.product_id
product = Product.code_by_id(product_id)
case Plan.AccessChecker.plan_has_access?(plan, product, query_or_metric) do
true ->
resolution
false ->
min_plan = Plan.AccessChecker.min_plan(product, query_or_metric)
Resolution.put_result(
resolution,
{:error,
"The metric #{elem(query_or_metric, 1)} is not accessible with your current plan #{
plan
}. Please upgrade to #{min_plan} plan."}
)
end
end
# If the query is resolved there's nothing to do here
# A query can get resolved if it's rejected by the AccessControl middleware
defp do_call(%Resolution{state: :resolved} = resolution, _) do
resolution
end
# If the query is marked as having free realtime and historical data
# do not restrict anything
defp do_call(resolution, %{allow_realtime_data: true, allow_historical_data: true}) do
resolution
end
# Basic auth should have no restrictions
defp do_call(
%Resolution{context: %{auth: %{auth_method: :basic}}} = resolution,
_middleware_args
) do
resolution
end
# Allow access to historical data and real-time data for the Santiment project.
# This will serve the purpose of showing to anonymous and users with lesser plans
# how the data looks like.
defp do_call(%Resolution{arguments: %{slug: slug}} = resolution, _)
when slug in @freely_available_slugs do
resolution
end
# Some specific queries/metrics are available only when a special extension is
# present.
defp do_call(%Resolution{context: %{__query_or_metric_atom_name__: query}} = resolution, _)
when query in @extension_metrics do
case resolution.context[:auth][:current_user] do
%Sanbase.Auth.User{} = user ->
product_ids = Subscription.user_subscriptions_product_ids(user)
if Map.get(@extension_metric_product_map, query) in product_ids do
resolution
else
Resolution.put_result(resolution, {:error, :unauthorized})
end
_ ->
Resolution.put_result(resolution, {:error, :unauthorized})
end
end
# Dispatch the resolution of restricted and not-restricted queries to
# different functions if there are `from` and `to` parameters
defp do_call(
%Resolution{
context: %{__query_or_metric_atom_name__: query},
arguments: %{from: _from, to: _to}
} = resolution,
middleware_args
) do
if Plan.AccessChecker.is_restricted?(query) do
restricted_query(resolution, middleware_args, query)
else
not_restricted_query(resolution, middleware_args)
end
end
defp do_call(resolution, _) do
resolution
end
defp get_query_or_metric(:timeseries_data, %{metric: metric}, _args), do: {:metric, metric}
defp get_query_or_metric(:aggregated_timeseries_data, %{metric: metric}, _args),
do: {:metric, metric}
defp get_query_or_metric(:aggregated_timeseries_data, _source, %{metric: metric}),
do: {:metric, metric}
defp get_query_or_metric(:histogram_data, %{metric: metric}, _args), do: {:metric, metric}
defp get_query_or_metric(query, _source, _args), do: {:query, query}
defp restricted_query(
%Resolution{arguments: %{from: from, to: to}, context: context} = resolution,
middleware_args,
query
) do
subscription = context[:auth][:subscription] || @free_subscription
product_id = subscription.plan.product_id || context.product_id
historical_data_in_days =
Subscription.historical_data_in_days(subscription, query, product_id)
realtime_data_cut_off_in_days =
Subscription.realtime_data_cut_off_in_days(subscription, query, product_id)
resolution
|> update_resolution_from_to(
restrict_from(from, middleware_args, historical_data_in_days),
restrict_to(to, middleware_args, realtime_data_cut_off_in_days)
)
end
defp not_restricted_query(resolution, _middleware_args) do
resolution
end
# Move the `to` datetime back so access to realtime data is not given
defp restrict_to(to_datetime, %{allow_realtime_data: true}, _), do: to_datetime
defp restrict_to(to_datetime, _, nil), do: to_datetime
defp restrict_to(to_datetime, _, days) do
restrict_to = Timex.shift(Timex.now(), days: -days)
Enum.min_by([to_datetime, restrict_to], &DateTime.to_unix/1)
end
# Move the `from` datetime forward so access to historical data is not given
defp restrict_from(from_datetime, %{allow_historical_data: true}, _), do: from_datetime
defp restrict_from(from_datetime, _, nil), do: from_datetime
defp restrict_from(from_datetime, _, days) when is_integer(days) do
restrict_from = Timex.shift(Timex.now(), days: -days)
Enum.max_by([from_datetime, restrict_from], &DateTime.to_unix/1)
end
defp to_param_is_after_from(from, to) do
if DateTime.compare(to, from) == :gt do
true
else
{:error,
"""
The `to` datetime parameter must be after the `from` datetime parameter.
"""}
end
end
defp from_or_to_params_are_after_minimal_datetime(from, to) do
if DateTime.compare(from, @minimal_datetime_param) == :gt and
DateTime.compare(to, @minimal_datetime_param) == :gt do
true
else
{:error,
"""
Cryptocurrencies didn't exist before #{@minimal_datetime_param}.
Please check `from` and/or `to` parameters values.
"""}
end
end
defp check_from_to_params(%Resolution{arguments: %{from: from, to: to}} = resolution) do
with true <- to_param_is_after_from(from, to),
true <- from_or_to_params_are_after_minimal_datetime(from, to) do
resolution
else
{:error, _message} = error ->
resolution
|> Resolution.put_result(error)
end
end
defp check_from_to_params(%Resolution{} = resolution), do: resolution
defp check_from_to_both_outside(%Resolution{state: :resolved} = resolution), do: resolution
defp check_from_to_both_outside(
%Resolution{arguments: %{from: from, to: to}, context: context} = resolution
) do
case to_param_is_after_from(from, to) do
true ->
resolution
_ ->
# If we reach here the first time we checked to < from was not true
# This means that the middleware rewrote the params in a way that this is
# now true. If that happens - both from and to are outside the allowed interval
%{restricted_from: restricted_from, restricted_to: restricted_to} =
Sanbase.Billing.Plan.Restrictions.get(
context[:__query_or_metric_atom_name__],
context[:auth][:subscription],
context[:product_id]
)
resolution
|> Resolution.put_result(
{:error,
"""
Both `from` and `to` parameters are outside the allowed interval you can query #{
context[:__query_or_metric_atom_name__] |> elem(1)
} with your current subscription #{context[:product_id] |> Product.code_by_id()} #{
context[:auth][:plan] || :free
}. Upgrade to a higher tier in order to access more data.
Allowed time restrictions:
- `from` - #{restricted_from || "unrestricted"}
- `to` - #{restricted_to || "unrestricted"}
"""}
)
end
end
defp check_from_to_both_outside(%Resolution{} = resolution), do: resolution
defp update_resolution_from_to(
%Resolution{arguments: %{from: _from, to: _to} = args} = resolution,
from,
to
) do
%Resolution{
resolution
| arguments: %{
args
| from: from,
to: to
}
}
end
end
|
lib/sanbase_web/graphql/middlewares/access_control.ex
| 0.90101
| 0.460471
|
access_control.ex
|
starcoder
|
defmodule Pbuf.Protoc.Field do
alias Pbuf.Protoc
alias Pbuf.Protoc.Fields
@enforce_keys [:tag, :name, :prefix, :typespec, :encode_fun, :decode_fun, :default]
defstruct @enforce_keys ++ [
hidden: false,
oneof_index: nil,
post_decode: :none,
json?: true,
]
@type t :: %__MODULE__{
tag: pos_integer,
name: String.t,
prefix: binary,
typespec: String.t,
encode_fun: String.t,
decode_fun: String.t,
default: String.t,
hidden: boolean,
oneof_index: nil | non_neg_integer,
post_decode: :none | :map | :repack | :oneof1 | :oneof2
}
defmacro __using__(_opts) do
quote do
alias Pbuf.Protoc.Field
import Pbuf.Protoc.Field
end
end
# returns a type that implements the Field protocol
@spec build(Protoc.proto_field, Context.t) :: any
def build(desc, context) do
case get_type(desc, context) do
:map -> Fields.Map.new(desc, context)
:oneof -> Fields.OneOf.new(desc, context)
:simple -> Fields.Simple.new(desc, context)
:enum -> Fields.Enumeration.new(desc, context)
end
end
@spec get_type(Protoc.proto_field, Context.t) :: :map | :oneof | :enum | :simple
defp get_type(desc, context) do
with {false, _} <- {is_map?(desc.type_name, context.maps), :map},
{false, _} <- {is_oneof?(desc.oneof_index, context), :oneof},
{false, _} <- {is_enum?(desc.type), :enum}
do
:simple
else
{true, type} -> type
end
end
defp is_enum?(type) do
internal_type(type) == :enum
end
defp is_oneof?(nil, _ctx) do
false
end
defp is_oneof?(0, %{oneofs: m}) when map_size(m) == 0 do
false
end
defp is_oneof?(_index, _ctx) do
true
end
defp is_map?(nil, _maps) do
false
end
defp is_map?(type_name, maps) do
Enum.any?(maps, fn {name, _value} ->
String.ends_with?(type_name, name)
end)
end
# helpers for actual field builders
def module_name(%{type_name: <<".", type::binary>>}) do
type
|> String.split(".")
|> Enum.map(&Protoc.capitalize_first/1)
|> Enum.join(".")
end
@spec is_repeated?(Protoc.proto_field) :: boolean
def is_repeated?(desc) do
desc.label == :LABEL_REPEATED
end
@spec extract_core(Protoc.proto_field) :: {non_neg_integer, String.t, atom, binary}
def extract_core(desc, packed? \\ true) do
tag = desc.number
name = desc.name
type = internal_type(desc.type)
prefix = case is_repeated?(desc) && packed? do
true -> Pbuf.Encoder.prefix(tag, :bytes) # bytes share the same encoding type as any type of array
false -> Pbuf.Encoder.prefix(tag, type)
end
{tag, name, type, stringify_binary(prefix)}
end
@spec internal_type(byte | atom) :: atom
def internal_type(:TYPE_BOOL), do: :bool
def internal_type(:TYPE_BYTES), do: :bytes
def internal_type(:TYPE_STRING), do: :string
def internal_type(:TYPE_DOUBLE), do: :double
def internal_type(:TYPE_FLOAT), do: :float
def internal_type(:TYPE_INT32), do: :int32
def internal_type(:TYPE_INT64), do: :int64
def internal_type(:TYPE_SINT32), do: :sint32
def internal_type(:TYPE_SINT64), do: :sint64
def internal_type(:TYPE_FIXED32), do: :fixed32
def internal_type(:TYPE_FIXED64), do: :fixed64
def internal_type(:TYPE_SFIXED32), do: :sfixed32
def internal_type(:TYPE_SFIXED64), do: :sfixed64
def internal_type(:TYPE_UINT32), do: :uint32
def internal_type(:TYPE_UINT64), do: :uint64
def internal_type(:TYPE_MESSAGE), do: :struct
def internal_type(:TYPE_ENUM), do: :enum
def stringify_binary(bin) do
s = bin
|> :erlang.binary_to_list()
|> Enum.map(&Integer.to_string/1)
|> Enum.join(", ")
"<<#{s}>>"
end
end
|
lib/protoc/fields/field.ex
| 0.658418
| 0.468365
|
field.ex
|
starcoder
|
defmodule Expug.Tokenizer do
@moduledoc ~S"""
Tokenizes a Pug template into a list of tokens. The main entry point is
`tokenize/1`.
iex> Expug.Tokenizer.tokenize("title= name")
[
{{1, 8}, :buffered_text, "name"},
{{1, 1}, :element_name, "title"},
{{1, 1}, :indent, 0}
]
Note that the tokens are reversed! It's easier to append to the top of a list
rather than to the end, making it more efficient.
This output is the consumed next by `Expug.Compiler`, which turns them into
an Abstract Syntax Tree.
## Token types
```
div.blue#box
```
- `:indent` - 0
- `:element_name` - `"div"`
- `:element_class` - `"blue"`
- `:element_id` - `"box"`
```
div(name="en")
```
- `:attribute_open` - `"("`
- `:attribute_key` - `"name"`
- `:attribute_value` - `"\"en\""`
- `:attribute_close` - `")"`
```
div= hello
```
- `:buffered_text` - `hello`
```
div!= hello
```
- `:unescaped_text` - `hello`
```
div hello
```
- `:raw_text` - `"hello"`
```
| Hello there
```
- `:raw_text` - `"Hello there"`
```
= Hello there
```
- `:buffered_text` - `"Hello there"`
```
- foo = bar
```
- `:statement` - `foo = bar`
```
doctype html5
```
- `:doctype` - `html5`
```
-# comment
more comments
```
- `:line_comment` - `comment`
- `:subindent` - `more comments`
```
// comment
more comments
```
- `:html_comment` - `comment`
- `:subindent` - `more comments`
## Also see
- `Expug.TokenizerTools` has the functions used by this tokenizer.
- `Expug.Compiler` uses the output of this tokenizer to build an AST.
- `Expug.ExpressionTokenizer` is used to tokenize expressions.
"""
import Expug.TokenizerTools
alias Expug.TokenizerTools.State
@doc """
Tokenizes a string.
Returns a list of tokens. Each token is in the format `{position, token, value}`.
"""
def tokenize(source, opts \\ []) do
source = trim_trailing(source)
run(source, opts, &document/1)
end
@doc """
Matches an entire document.
"""
def document(state) do
state
|> optional(&newlines/1)
|> optional(&doctype/1)
|> many_of(
&(&1 |> element_or_text() |> newlines()),
&(&1 |> element_or_text()))
end
@doc """
Matches `doctype html`.
"""
def doctype(state) do
state
|> discard(~r/^doctype/, :doctype_prelude)
|> whitespace()
|> eat(~r/^[^\n]+/, :doctype)
|> optional(&newlines/1)
end
@doc """
Matches an HTML element, text node, or, you know... the basic statements.
I don't know what to call this.
"""
def element_or_text(state) do
state
|> indent()
|> one_of([
&line_comment/1, # `-# hello`
&html_comment/1, # `// hello`
&buffered_text/1, # `= hello`
&unescaped_text/1, # `!= hello`
&raw_text/1, # `| hello`
&statement/1, # `- hello`
&element/1 # `div.blue hello`
])
end
@doc """
Matches any number of blank newlines. Whitespaces are accounted for.
"""
def newlines(state) do
state
|> discard(~r/^\n(?:[ \t]*\n)*/, :newlines)
end
@doc """
Matches an indentation. Gives a token that looks like `{_, :indent, 2}`
where the last number is the number of spaces/tabs.
Doesn't really care if you use spaces or tabs; a tab is treated like a single
space.
"""
def indent(state) do
state
|> eat(~r/^\s*/, :indent, &[{&3, :indent, String.length(&2)} | &1])
end
@doc """
Matches `div.foo[id="name"]= Hello world`
"""
def element(state) do
state
|> element_descriptor()
|> optional(&attributes_block/1)
|> optional(fn s -> s
|> one_of([
&sole_buffered_text/1,
&sole_unescaped_text/1,
&sole_raw_text/1,
&block_text/1
])
end)
end
@doc """
Matches `div`, `div.foo` `div.foo.bar#baz`, etc
"""
def element_descriptor(state) do
state
|> one_of([
&element_descriptor_full/1,
&element_name/1,
&element_class_or_id_list/1
])
end
@doc """
Matches `div.foo.bar#baz`
"""
def element_descriptor_full(state) do
state
|> element_name()
|> element_class_or_id_list()
end
@doc """
Matches `.foo.bar#baz`
"""
def element_class_or_id_list(state) do
state
|> many_of(&element_class_or_id/1)
end
@doc """
Matches `.foo` or `#id` (just one)
"""
def element_class_or_id(state) do
state
|> one_of([ &element_class/1, &element_id/1 ])
end
@doc """
Matches `.foo`
"""
def element_class(state) do
state
|> discard(~r/^\./, :dot)
|> eat(~r/^[A-Za-z0-9_\-]+/, :element_class)
end
@doc """
Matches `#id`
"""
def element_id(state) do
state
|> discard(~r/^#/, :hash)
|> eat(~r/^[A-Za-z0-9_\-]+/, :element_id)
end
@doc """
Matches `[name='foo' ...]`
"""
def attributes_block(state) do
state
|> optional_whitespace()
|> one_of([
&attribute_bracket/1,
&attribute_paren/1,
&attribute_brace/1
])
end
def attribute_bracket(state) do
state
|> eat(~r/^\[/, :attribute_open)
|> optional_whitespace()
|> optional(&attribute_list/1)
|> eat(~r/^\]/, :attribute_close)
end
def attribute_paren(state) do
state
|> eat(~r/^\(/, :attribute_open)
|> optional_whitespace()
|> optional(&attribute_list/1)
|> eat(~r/^\)/, :attribute_close)
end
def attribute_brace(state) do
state
|> eat(~r/^\{/, :attribute_open)
|> optional_whitespace()
|> optional(&attribute_list/1)
|> eat(~r/^\}/, :attribute_close)
end
@doc """
Matches `foo='val' bar='val'`
"""
def attribute_list(state) do
state
|> optional_whitespace_or_newline()
|> many_of(
&(&1 |> attribute() |> attribute_separator() |> whitespace_or_newline()),
&(&1 |> attribute()))
|> optional_whitespace_or_newline()
end
@doc """
Matches an optional comma in between attributes.
div(id=a class=b)
div(id=a, class=b)
"""
def attribute_separator(state) do
state
|> discard(~r/^,?/, :comma)
end
@doc """
Matches `foo='val'` or `foo`
"""
def attribute(state) do
state
|> one_of([
&attribute_key_value/1,
&attribute_key/1
])
end
def attribute_key_value(state) do
state
|> attribute_key()
|> optional_whitespace()
|> attribute_equal()
|> optional_whitespace()
|> attribute_value()
end
def attribute_key(state) do
state
|> eat(~r/^[A-Za-z][A-Za-z\-0-9:]*/, :attribute_key)
end
def attribute_value(state) do
state
|> Expug.ExpressionTokenizer.expression(:attribute_value)
end
def attribute_equal(state) do
state
|> discard(~r/^=/, :eq)
end
@doc "Matches whitespace; no tokens emitted"
def whitespace(state) do
state
|> discard(~r/^[ \t]+/, :whitespace)
end
@doc "Matches whitespace or newline; no tokens emitted"
def whitespace_or_newline(state) do
state
|> discard(~r/^[ \t\n]+/, :whitespace_or_newline)
end
def optional_whitespace(state) do
state
|> discard(~r/^[ \t]*/, :whitespace)
end
def optional_whitespace_or_newline(state) do
state
|> discard(~r/^[ \t\n]*/, :whitespace_or_newline)
end
@doc "Matches `=`"
def sole_buffered_text(state) do
state
|> optional_whitespace()
|> buffered_text()
end
@doc "Matches `!=`"
def sole_unescaped_text(state) do
state
|> optional_whitespace()
|> unescaped_text()
end
@doc "Matches text"
def sole_raw_text(state) do
state
|> whitespace()
|> eat(~r/^[^\n]+/, :raw_text)
end
@doc "Matches `title` in `title= hello`"
def element_name(state) do
state
|> eat(~r/^[A-Za-z_][A-Za-z0-9:_\-]*/, :element_name)
end
def line_comment(state) do
state
|> one_of([
&(&1 |> discard(~r/^\/\/-/, :line_comment)),
&(&1 |> discard(~r/^-\s*(?:#|\/\/)/, :line_comment))
])
|> optional_whitespace()
|> eat(~r/^[^\n]*/, :line_comment)
|> optional(&subindent_block/1)
end
def block_text(state) do
state
|> eat(~r/^\./, :block_text)
|> subindent_block()
end
def subindent_block(state) do
sublevel = state |> get_next_indent()
state
|> many_of(& &1 |> newlines() |> subindent(sublevel))
end
def subindent(state, level) do
state
|> discard(~r/^[ \t]{#{level}}/, :whitespace)
|> eat(~r/^[^\n]*/, :subindent)
end
def get_indent([{_, :indent, text} | _]) do
text
end
def get_indent([_ | rest]) do
get_indent(rest)
end
def get_indent([]) do
""
end
def html_comment(state) do
state
|> discard(~r[^//], :html_comment)
|> optional_whitespace()
|> eat(~r/^[^\n$]*/, :html_comment)
|> optional(&subindent_block/1)
end
def buffered_text(state) do
state
|> one_of([
&one_line_buffered_text/1,
&multiline_buffered_text/1
])
end
def one_line_buffered_text(state) do
state
|> discard(~r/^=/, :eq)
|> optional_whitespace()
|> eat(~r/^(?:[,\[\(\{]\s*\n|[^\n$])+/, :buffered_text)
end
def multiline_buffered_text(state) do
state
|> discard(~r/^=/, :eq)
|> start_empty(:buffered_text)
|> subindent_block()
end
def unescaped_text(state) do
state
|> one_of([
&one_line_unescaped_text/1,
&multiline_unescaped_text/1
])
end
def one_line_unescaped_text(state) do
state
|> discard(~r/^!=/, :bang_eq)
|> optional_whitespace()
|> eat(~r/^(?:[,\[\(\{]\s*\n|[^\n$])+/, :unescaped_text)
end
def multiline_unescaped_text(state) do
state
|> discard(~r/^!=/, :bang_eq)
|> start_empty(:unescaped_text)
|> subindent_block()
end
def raw_text(state) do
state
|> discard(~r/^\|/, :pipe)
|> optional_whitespace()
|> eat(~r/^[^\n]+/, :raw_text)
end
def statement(state) do
state
|> one_of([
&one_line_statement/1,
&multiline_statement/1
])
end
def one_line_statement(state) do
state
|> discard(~r/^\-/, :dash)
|> optional_whitespace()
|> eat(~r/^(?:[,\[\(\{]\s*\n|[^\n$])+/, :statement)
end
def multiline_statement(state) do
state
|> discard(~r/^\-/, :dash)
|> start_empty(:statement)
|> subindent_block()
end
@doc ~S"""
Returns the next indentation level after some newlines.
Infers the last indentation level based on `doc`.
iex> source = "-#\n span"
iex> doc = [{0, :indent, 0}]
iex> Expug.Tokenizer.get_next_indent(%{tokens: doc, source: source, position: 2}, 0)
2
"""
def get_next_indent(%State{tokens: doc} = state) do
level = get_indent(doc)
get_next_indent(state, level)
end
@doc ~S"""
Returns the next indentation level after some newlines.
iex> source = "-#\n span"
iex> Expug.Tokenizer.get_next_indent(%{tokens: [], source: source, position: 2}, 0)
2
iex> source = "-#\n\n\n span"
iex> Expug.Tokenizer.get_next_indent(%{tokens: [], source: source, position: 2}, 0)
2
"""
def get_next_indent(state, level) do
%{tokens: [{_, :indent, sublevel} |_], position: pos} =
state |> newlines() |> indent()
if sublevel <= level, do: throw {:parse_error, pos, [:indent]}
sublevel
end
# Shim for String.trim_trailing/1, which doesn't exist in Elixir 1.2.6. It
# falls back to String.rstrip/1 in these cases.
if Keyword.has_key?(String.__info__(:functions), :trim_trailing) do
defp trim_trailing(source) do
String.trim_trailing(source)
end
else
defp trim_trailing(source) do
String.rstrip(source)
end
end
end
|
lib/expug/tokenizer.ex
| 0.832543
| 0.864139
|
tokenizer.ex
|
starcoder
|
defmodule AdventOfCode2016.NoTimeForATaxiCab do
@north "North"
@south "South"
@east "East"
@west "West"
def distance_to_final_location(instructions) do
{_dir, {x, y}} =
instructions
|> final_location
abs(x) + abs(y)
end
def final_location(instructions) do
instructions
|> parse_instructions
|> Enum.reduce({@north, {0, 0}}, fn({left_or_right, dist}, acc) ->
turn_and_move(left_or_right, dist, acc)
end)
end
def turn_and_move(left_or_right, dist, {current_dir, current_loc}) do
new_dir = current_dir |> turn(left_or_right)
new_loc = current_loc |> move(new_dir, dist)
{new_dir, new_loc}
end
def distance_to_first_location_visited_twice(instructions) do
{_dir, {x, y}, _visited_locations} =
instructions
|> first_location_visited_twice
abs(x) + abs(y)
end
def first_location_visited_twice(instructions) do
instructions
|> parse_instructions
|> Enum.reduce_while({@north, {0, 0}, %{{0, 0} => 1}}, fn({left_or_right, dist}, acc) ->
visit_locations(left_or_right, dist, acc)
end)
end
def visit_locations(left_or_right, dist, {current_dir, current_loc, visited_locations}) do
current_dir
|> turn(left_or_right)
|> do_visit_locations(dist, current_loc, visited_locations)
end
def do_visit_locations(dir, 0, current_loc, visited_locations), do: {:cont, {dir, current_loc, visited_locations}}
def do_visit_locations(dir, dist, current_loc, visited_locations) do
new_loc = move(current_loc, dir, 1)
if Map.has_key?(visited_locations, new_loc) do
visited_locations = Map.put(visited_locations, new_loc, 1)
{:halt, {dir, new_loc, visited_locations}}
else
visited_locations = Map.put(visited_locations, new_loc, 1)
do_visit_locations(dir, dist - 1, new_loc, visited_locations)
end
end
def parse_instructions(instructions) do
instructions
|> String.split([", ", "\n"], trim: true)
|> Enum.map(&String.graphemes/1)
|> Enum.map(fn [left_or_right | dist] -> {left_or_right, dist |> Enum.join |> String.to_integer} end)
end
def turn(@north, "L"), do: @west
def turn(@north, "R"), do: @east
def turn(@south, "L"), do: @east
def turn(@south, "R"), do: @west
def turn(@east, "L"), do: @north
def turn(@east, "R"), do: @south
def turn(@west, "L"), do: @south
def turn(@west, "R"), do: @north
def move({x, y}, @north, dist), do: {x, y + dist}
def move({x, y}, @south, dist), do: {x, y - dist}
def move({x, y}, @east, dist), do: {x + dist, y}
def move({x, y}, @west, dist), do: {x - dist, y}
end
|
lib/day1/no_time_for_a_taxicab.ex
| 0.670069
| 0.436442
|
no_time_for_a_taxicab.ex
|
starcoder
|
defmodule PinElixir.Refund do
import PinElixir.Utils.RequestOptions
import PinElixir.Utils.Response
@moduledoc """
Responsible for refunding of charges and retreiving refund details
"""
@pin_url Application.get_env(:pin_elixir, :pin_url)
@doc """
Requests a full refund given a charge_token
Returns a tuple
```
{:ok,
%{amount: 500, charge: "ch_NCoA7oBzrycXEPBTEUWNdQ",
created_at: "2015-11-15T08:49:46Z", currency: "AUD", error_message: nil,
status_message: "Pending", success: nil, token: "<KEY>"}}
```
OR
{:error, error_map}
"""
def request(charge_token) do
HTTPotion.post("#{charge_refund_url}/#{charge_token}/refunds", with_auth(headers: ["Content-Type": "application/json"]))
|> handle_refund_request_response
end
@doc """
Requests a partial refund given a charge_token
Returns a tuple
```
{:ok,
%{amount: 100, charge: "ch_lENRObt9AvXvuUszuq5FBA",
created_at: "2015-11-15T08:50:55Z", currency: "AUD", error_message: nil,
status_message: "Pending", success: nil, token: "<KEY>"}}
```
OR
{:error, error_map}
"""
def request(charge_token, partial_amount) do
json = Poison.encode!(%{amount: partial_amount})
HTTPotion.post("#{charge_refund_url}/#{charge_token}/refunds", with_auth(headers: ["Content-Type": "application/json"],body: json))
|> handle_refund_request_response
end
defp handle_refund_request_response(%{status_code: 201, body: body}) do
decoded = decode(body).response
{:ok, decoded}
end
defp handle_refund_request_response(%{status_code: 422, body: body}) do
body
|> to_error_tuple
end
@doc """
Retreives all refunds
Returns a tuple
```
{:ok,
%{pagination: %{count: 9, current: 1, next: nil, pages: 1, per_page: 25,
previous: nil},
refunds: [%{amount: 100, charge: "ch_lENRObt9AvXvuUszuq5FBA",
created_at: "2015-11-15T08:50:55Z", currency: "AUD", error_message: nil,
status_message: "Pending", success: nil,
token: "<KEY>"},
%{amount: 500, charge: "ch_NCoA7oBzrycXEPBTEUWNdQ",
created_at: "2015-11-15T08:49:46Z", currency: "AUD", error_message: nil,
status_message: "Pending", success: nil,
token: "<KEY>"}]
}
}
```
OR
{:error, error_map}
"""
def get do
HTTPotion.get(refund_url, with_auth)
|> handle_get
end
@doc """
Given a charge token, retreives associated refund(s)
Returns a tuple
```
{:ok,
%{pagination: %{count: 1, current: 1, next: nil, pages: 1, per_page: 25,
previous: nil},
refunds: [%{amount: 100, charge: "ch_lENRObt9AvXvuUszuq5FBA",
created_at: "2015-11-15T08:50:55Z", currency: "AUD", error_message: nil,
status_message: "Pending", success: nil,
token: "rf_djFwf03PLpp4G7cGEr_5mg"}]}}
```
OR
{:error, error_map}
"""
def get(charge_token) do
HTTPotion.get(charge_refund_url <> "/#{charge_token}/refunds", with_auth)
|> handle_get
end
defp handle_get(%{status_code: 200, body: body}) do
decoded = decode(body)
{:ok, %{refunds: decoded.response, pagination: decoded.pagination}}
end
defp handle_get(%{status_code: ___, body: body}) do
body |> to_error_tuple
end
defp refund_url do
"https://#{@pin_url}/refunds"
end
defp charge_refund_url do
"https://#{@pin_url}/charges"
end
end
|
lib/refunds/refund.ex
| 0.683842
| 0.699139
|
refund.ex
|
starcoder
|
defmodule OnCrash do
@moduledoc """
Convinence module to wrap a monitor and call a function when a process ends. Useful to setp cleanup tasks on process shutdown.
```elixir
worker = spawn(fn ->
OnCrash.call(fn -> cleanup() end)
do_the_work()
end)
```
OnCrash is always called when the process finished. So it will aso run the callback when the process ends with a `:normal` exit
reason. To have exit reason specific code a fun with a `reason` parameter can be specified:
```elixir
worker = spawn(fn ->
OnCrash.call(fn reason ->
if reason != :normal do
IO.puts("Worker died with reason \#{inspect(reason)}")
cleanup()
end
end)
do_the_work()
end)
```
"""
@doc """
Registers the given fun as callback to be executed once the process exits.
pid is provided it binds to the given process. Otherwise it binds to the current
executing process.
```elixir
worker = spawn(fn ->
OnCrash.call(fn -> cleanup() end)
do_the_work()
end)
```
And to differentiate
```elixir
worker = spawn(fn ->
OnCrash.call(fn reason ->
case reason do
# On raise "oh_no!"
{%RuntimeError{message: "oh_no!"}, _backtrace} -> you_code_here()
# On throw(:oh_no!)
{{:nocatch, :oh_no!}, _backtrace} -> you_code_here()
# On exit(:oh_no!)
:oh_no! -> you_code_here()
end
cleanup()
end)
do_the_work()
end)
```
"""
@spec call(pid() | nil, (() -> any()) | (reason -> any())) :: true when reason: any()
def call(pid \\ self(), fun) do
me = self()
spawn(fn ->
ref = Process.monitor(pid)
send(me, :continue)
receive do
{:DOWN, ^ref, :process, ^pid, reason} ->
case fun do
fun when is_function(fun, 0) -> fun.()
fun when is_function(fun, 1) -> fun.(reason)
end
end
end)
# Ensure that the monitor is registered before returning
receive do
:continue -> true
end
end
end
|
lib/oncrash.ex
| 0.627723
| 0.768081
|
oncrash.ex
|
starcoder
|
defmodule Vax.Adapter.Query do
alias Vax.Adapter.Helpers
def query_to_objs(query, params, bucket) do
{source, schema} = query.from.source
primary_key = Helpers.schema_primary_key!(schema)
case query.wheres do
[%{expr: expr}] ->
where_expr_to_key(expr, source, primary_key, bucket, params) |> List.flatten()
wheres ->
raise "Vax only supports a single `where` expression per query. Received #{Enum.count(wheres)}"
end
end
def select_schema(query) do
{_source, schema} = query.from.source
schema
end
def select_fields(
%{select: %{from: {:any, {:source, _source_schema, _, fields_and_types}}}} = _query_meta
) do
Keyword.keys(fields_and_types)
end
defp where_expr_to_key({:or, _, exprs}, source, primary_key, bucket, params) do
Enum.map(exprs, &where_expr_to_key(&1, source, primary_key, bucket, params))
end
defp where_expr_to_key(
{:==, [], [{{:., [], [{:&, [], [0]}, field]}, [], []}, ix_or_literal]},
source,
primary_key,
bucket,
params
) do
assert_primary_key!(field, primary_key)
param = maybe_fetch_param(ix_or_literal, params)
[Helpers.build_object(source, param, bucket)]
end
defp where_expr_to_key(
{:in, [], [{{:., [], [{:&, [], [0]}, field]}, [], []}, ix_or_literal]} = _expr,
source,
primary_key,
bucket,
params
) do
assert_primary_key!(field, primary_key)
param = maybe_fetch_param(ix_or_literal, params)
Enum.map(param, &Helpers.build_object(source, &1, bucket))
end
defp where_expr_to_key(
expr,
_source,
_primary_key,
_bucket,
_params
) do
raise "Unsupported expression #{Macro.to_string(expr)}"
end
defp assert_primary_key!(field, primary_key) do
if field != primary_key,
do: raise("Vax only supports filtering by primary key in where expressions")
:ok
end
defp maybe_fetch_param(ix_or_literal, params) do
case ix_or_literal do
{:^, [], [ix | _]} -> Enum.at(params, ix)
literal -> literal
end
end
end
|
lib/vax/adapter/query.ex
| 0.716516
| 0.4099
|
query.ex
|
starcoder
|
defmodule LoadResource.Plug do
@moduledoc """
This plug allows you to specify resources that your app should load and (optionally) validate as part of a request.
## Examples
Load a Book resource using the `id` param on the incoming request:
```
plug LoadResource.Plug, [model: Book, handler: &MyErrorHandler.not_found/1]
```
Use the `book_id` param instead of `id` (useful when composing multiple resources):
```
plug LoadResource.Plug, [model: Book, id_key: "book_id", handler: &MyErrorHandler.not_found/1]
```
Load a Quote that matches to a previously loaded Book:
```
plug LoadResource.Plug, [model: Quote, scopes: [:book], handler: &MyErrorHandler.not_found/1]
```
(See `LoadResource.Scope` for more information on scopes.)
## Accepted Options
* `model`: an Ecto model representing the resource you want to load (required)
* `handler`: a function/1 that gets called if the record can't be found and `required: true` (required)
* `id_key`: what param in the incoming request represents the ID of the record (optional, default: "id")
* `required`: whether to halt the plug pipeline and return an error response if the record can't be found (optional, default: true)
* `resource_name`: under what value to store the resource in `conn.assigns` (optional, default: derived from the model)
* `scopes`: an list of atoms and/or `LoadResource.Scope` structs (optional, default: [])
"""
import Plug.Conn
alias LoadResource.Scope
@doc """
Initialize the plug with any options provided in the controller or pipeline, including calculating the resource_name (which key will written to in `conn.assigns`) from the model.
"""
def init(default_options) do
options = Enum.into(default_options, %{required: true})
# In order to allow us to load multiple resource for one controller, we need to have unique
# names for the value that gets stored on conn. To do that, we generate the name of the
# resource from the model name.
# It's safe to use Macro.underscore here because we know the text only contains characters
# valid for Elixir identifiers. (See https://hexdocs.pm/elixir/Macro.html#underscore/1.)
model = Map.fetch!(options, :model)
resource_name = options[:resource_name] || String.to_atom(Macro.underscore(List.last(String.split(to_string(model), "."))))
Map.put(options, :resource_name, resource_name)
end
@doc """
Load a resource for a given request based on the previously-provided options.
"""
def call(conn, %{model: model, handler: handler, resource_name: resource_name} = options) do
id_key = options[:id_key] || "id"
id_scope = %Scope{
column: :id,
value: fn(conn) -> conn.params[id_key] end
}
scopes = options[:scopes] || []
LoadResource.QueryBuilder.build(model, conn, [id_scope] ++ scopes)
|> repo().one
|> handle_resource(conn, options)
end
defp handle_resource(nil, conn, %{required: true, handler: handler}) do
conn
|> handler.()
|> halt
end
defp handle_resource(nil, conn, _options) do
conn
end
defp handle_resource(resource, conn, %{resource_name: resource_name}) do
assign(conn, resource_name, resource)
end
defp repo() do
Application.get_env(:load_resource, :repo)
end
end
|
lib/load_resource/plug.ex
| 0.885495
| 0.839931
|
plug.ex
|
starcoder
|
defmodule DarkMatter.Decimals.Variance do
@moduledoc """
Decimal variance functions
"""
@moduledoc since: "1.0.0"
alias DarkMatter.Decimals.Arithmetic
alias DarkMatter.Decimals.Comparison
alias DarkMatter.Decimals.Conversion
@type minmax() :: {DarkMatter.strict_numeric(), DarkMatter.strict_numeric()}
@defaults %{
variance: {0, 0},
variance_percent: {0, 100},
max_variance_percent: {100, 100}
}
@doc """
Calculate variance
"""
@spec variance([DarkMatter.numeric()]) :: Decimal.t()
def variance([]) do
@defaults.variance
|> elem(0)
|> Conversion.cast_decimal()
end
def variance([_]) do
@defaults.variance
|> elem(1)
|> Conversion.cast_decimal()
end
def variance(list) when is_list(list) do
list
|> Enum.map(&Conversion.to_number/1)
|> Numerix.Statistics.variance()
|> Conversion.cast_decimal()
end
@doc """
Variance percent relative to the mean
"""
@spec variance_percent([DarkMatter.numeric()], minmax()) :: Decimal.t()
def variance_percent(list, default \\ @defaults.variance_percent)
def variance_percent([], {default, _}) do
Conversion.cast_decimal(default)
end
def variance_percent([_], {_, default}) do
Conversion.cast_decimal(default)
end
def variance_percent(list, _default) when is_list(list) do
mean = Arithmetic.decimal_avg(list)
list
|> Enum.map(&Conversion.cast_decimal!/1)
|> Enum.map(fn item ->
if Comparison.decimal_equal?(item, 0) do
Decimal.new(0)
else
item
|> Arithmetic.decimal_sub(mean)
|> Decimal.abs()
|> Arithmetic.decimal_div(item)
end
end)
|> Arithmetic.decimal_avg()
|> Arithmetic.to_percentage()
|> Arithmetic.decimal_add(100)
end
@doc """
Determine if a list is a uniform set of `t:DarkMatter.numeric/0`.
"""
@spec decimal_uniform?([DarkMatter.numeric()]) :: boolean()
def decimal_uniform?([]) do
true
end
def decimal_uniform?([_]) do
true
end
def decimal_uniform?(list) when is_list(list) do
list
|> Enum.map(&Conversion.cast_decimal!/1)
|> MapSet.new()
|> MapSet.size()
|> Comparison.decimal_equal?(1)
end
@doc """
Max entries percent variance relative to the mean
"""
@spec max_variance_percent([DarkMatter.numeric()], minmax()) :: Decimal.t()
def max_variance_percent(list, default \\ @defaults.max_variance_percent)
def max_variance_percent([], {default, _}) do
Conversion.cast_decimal(default)
end
def max_variance_percent([_], {_, default}) do
Conversion.cast_decimal(default)
end
def max_variance_percent(list, _default) when is_list(list) do
mean = Arithmetic.decimal_avg(list)
cond do
decimal_uniform?(list) ->
Conversion.cast_decimal(100)
Comparison.decimal_equal?(mean, 0) ->
Conversion.cast_decimal(0)
true ->
list
|> Enum.map(&Conversion.cast_decimal!/1)
|> Enum.map(&Arithmetic.decimal_sub(&1, mean))
|> Enum.max_by(&Decimal.abs/1)
|> Arithmetic.decimal_div(mean)
|> Arithmetic.to_percentage()
|> Arithmetic.decimal_add(100)
end
end
end
|
lib/dark_matter/decimals/variance.ex
| 0.908684
| 0.438966
|
variance.ex
|
starcoder
|
defmodule Util.Header do
@moduledoc """
This module defines a structure for interpretting some of the ROM's metadata
"""
use Bitwise
@base_size 0x400
defstruct [
:title,
:rom_makeup,
:rom_type,
:rom_size,
:sram_size,
:license_id,
:version_number,
:checksum,
:checksum_complement,
:native_mode_interrupts,
:emulation_mode_interrupts
]
@type rom_makeup :: :lorom | :hirom | :sa1rom | :lofastrom | :hifastrom | :exlorom | :exhirom
@type rom_type :: :rom | :ram | :sram | :dsp1 | :fx
@type t :: %Util.Header{
title: String.t(),
rom_makeup: rom_makeup(),
rom_type: rom_type(),
rom_size: non_neg_integer(),
sram_size: non_neg_integer(),
license_id: integer(),
version_number: integer(),
checksum: integer(),
checksum_complement: integer(),
native_mode_interrupts: Util.InterruptVector.t(),
emulation_mode_interrupts: Util.InterruptVector.t()
}
@spec new(binary()) :: Util.Header.t() | :invalid
def new(<<
raw_title::binary-size(21),
raw_rom_makeup::size(8),
raw_rom_type::size(8),
raw_rom_size::size(8),
raw_sram_size::size(8),
license_id::size(8),
version::size(8),
checksum_complement::size(16),
checksum::size(16),
_unknown::size(8),
native_vectors::binary-size(16),
emulation_vectors::binary-size(16)
>>) do
with {:ok, title} <- determine_title(raw_title),
{:ok, rom_makeup} <- determine_rom_makeup(raw_rom_makeup),
{:ok, rom_type} <- determine_rom_type(raw_rom_type),
rom_size <- determine_size(raw_rom_size),
sram_size <- determine_size(raw_sram_size) do
%Util.Header{
title: title,
rom_makeup: rom_makeup,
rom_type: rom_type,
rom_size: rom_size,
sram_size: sram_size,
license_id: license_id,
version_number: version,
checksum: checksum,
checksum_complement: checksum_complement,
native_mode_interrupts: parse_native_interrupts(native_vectors),
emulation_mode_interrupts: parse_emulation_interrupts(emulation_vectors)
}
else
_ -> :invalid
end
end
defp determine_title(raw_title) do
case String.valid?(raw_title) do
true ->
title = raw_title |> String.codepoints() |> List.to_string() |> String.trim()
{:ok, title}
_ ->
:invalid
end
end
defp determine_rom_makeup(0x20), do: {:ok, :lorom}
defp determine_rom_makeup(0x21), do: {:ok, :hirom}
defp determine_rom_makeup(0x23), do: {:ok, :sa1rom}
defp determine_rom_makeup(0x30), do: {:ok, :lofastrom}
defp determine_rom_makeup(0x31), do: {:ok, :hifastrom}
defp determine_rom_makeup(0x32), do: {:ok, :exlorom}
defp determine_rom_makeup(0x35), do: {:ok, :exhirom}
defp determine_rom_makeup(_), do: :invalid
# eventually figure this out...
defp determine_rom_type(_), do: {:ok, :rom}
defp determine_size(raw_size) do
@base_size <<< raw_size
end
defp parse_native_interrupts(<<
_unknown1::binary-size(2),
_unknown2::binary-size(2),
cop_bytes::binary-size(2),
break_bytes::binary-size(2),
abort_bytes::binary-size(2),
nmi_bytes::binary-size(2),
reset_bytes::binary-size(2),
irq_bytes::binary-size(2)
>>) do
%Util.InterruptVector{
coprocessor: correct_interrupt_address(cop_bytes),
break: correct_interrupt_address(break_bytes),
abort: correct_interrupt_address(abort_bytes),
non_maskable: correct_interrupt_address(nmi_bytes),
reset: correct_interrupt_address(reset_bytes),
irq: correct_interrupt_address(irq_bytes)
}
end
defp parse_emulation_interrupts(<<
_unknown1::binary-size(2),
_unknown2::binary-size(2),
cop_bytes::binary-size(2),
_unknown3::binary-size(2),
abort_bytes::binary-size(2),
nmi_bytes::binary-size(2),
reset_bytes::binary-size(2),
break_and_irq_bytes::binary-size(2)
>>) do
break_and_irq_address = correct_interrupt_address(break_and_irq_bytes)
%Util.InterruptVector{
coprocessor: correct_interrupt_address(cop_bytes),
abort: correct_interrupt_address(abort_bytes),
non_maskable: correct_interrupt_address(nmi_bytes),
reset: correct_interrupt_address(reset_bytes),
break: break_and_irq_address,
irq: break_and_irq_address
}
end
defp correct_interrupt_address(<<lower::size(8), upper::size(8)>>) do
upper <<< 8 ||| lower
end
end
|
lib/util/header.ex
| 0.671686
| 0.528473
|
header.ex
|
starcoder
|
defmodule Utils.Feedback do
@moduledoc """
Utils.Feedback defines tests using the `feedback` macro.
Each `feedback` macro creates an associated Utils.feedback function and
ensures that each has a corresponding solution in Utils.Solutions.
## Examples
```elixir
feedback :example do
answer = get_answers()
assert answer == 5
end
```
Creates a a Utils.feedback(:example, answers) function clause.
"""
Module.register_attribute(Utils.Feedback, :test_names, accumulate: true)
require Utils.Macros
import Utils.Macros
# Allows for tests that don't require input
def test(test_name), do: test(test_name, "")
def test({:module, _, _, _} = module, test_name), do: test(test_name, module)
def test(test_name, answers) do
answers_in_list_provided =
is_list(answers) and Enum.all?(answers, fn each -> not is_nil(each) end)
answer_provided = not is_list(answers) and not is_nil(answers)
if answer_provided or answers_in_list_provided or Mix.env() == :test do
ExUnit.start(auto_run: false)
test_module(test_name, answers)
ExUnit.run()
else
"Please enter an answer above."
end
end
feedback :card_count_four do
next_count = get_answers()
assert next_count == 1
end
feedback :card_count_king do
next_count = get_answers()
assert next_count === 4
end
feedback :card_count_random do
[card, next_count] = get_answers()
cond do
card in 2..6 ->
assert next_count === 1
card in 7..9 ->
assert next_count === 0
card in 10..14 ->
assert next_count === -1
true ->
raise "Something went wrong. Please reset the exercise."
end
end
feedback :habit_tracker_definition do
[small, medium, large] = get_answers()
assert small == 5
assert medium == 20
assert large == 30
end
feedback :habit_tracker_add do
total_points = get_answers()
assert total_points == 20 + 5
end
feedback :habit_tracker_percentage do
percentage = get_answers()
assert percentage == (5 + 20) / 40 * 100
end
feedback :habit_tracker_penalties_1 do
total_points = get_answers()
assert total_points == 5 + 20 + 30 * 0.5
end
feedback :habit_tracker_penalties_2 do
total_points = get_answers()
assert total_points == 5 / 2 * 3 + 20 / 2 * 3
end
feedback :habit_tracker_rewards do
total_points = get_answers()
assert total_points == 20 * 1.6 + 5 * 1.6 + 30 * 0.5
end
feedback :percentage do
[completed_items, total_items, percentage] = get_answers()
assert completed_items == 10,
"completed_items should always be 10. Please reset the exercise."
assert total_items == 100, "total_items should always be 100. Please reset the exercise."
assert percentage == completed_items / total_items * 100
end
feedback :pythagorean_c_square do
c_square = get_answers()
assert c_square == 10 ** 2 + 10 ** 2
end
feedback :pythagorean_c do
c = get_answers()
assert c == :math.sqrt(200)
end
feedback :string_concatenation do
answer = get_answers()
assert is_bitstring(answer), "the answer should be a string."
assert "Hi, " <> _name = answer, "the answer should be in the format: Hi, name."
assert Regex.match?(~r/Hi, \w+\./, answer), "the answer should end in a period."
end
feedback :string_interpolation do
answer = get_answers()
assert is_bitstring(answer), "the answer should be a string."
assert Regex.match?(~r/I have/, answer),
"the answer should be in the format: I have 10 classmates"
assert Regex.match?(~r/I have \d+/, answer),
"the answer should contain an integer for classmates."
assert Regex.match?(~r/I have \d+ classmates\./, answer) ||
answer === "I have 1 classmate.",
"the answer should end in a period."
end
feedback :tip_amount do
[cost_of_the_meal, tip_rate, tip_amount] = get_answers()
assert tip_rate == 0.20, "tip rate should be 0.2."
assert cost_of_the_meal == 55.5, "cost_of_the_meal should be 55.5."
assert tip_amount === cost_of_the_meal * tip_rate,
"tip_amount should be cost_of_the_meal * tip_rate."
end
feedback :rock_paper_scissors_ai do
[player_choice, ai_choice] = get_answers()
case player_choice do
:rock ->
assert ai_choice === :paper,
"when player_choice is :rock, ai_choice should be :paper."
:paper ->
assert ai_choice === :scissors,
"when player_choice is :paper, ai_choice should be :scissors."
:scissors ->
assert ai_choice === :rock,
"when player_choice is :scissors, ai_choice should be :rock."
end
end
feedback :rock_paper_scissors_two_player do
[player1_choice, player2_choice, winner] = get_answers()
case {player1_choice, player2_choice} do
{:rock, :scissors} -> assert winner == :player1
{:paper, :rock} -> assert winner == :player1
{:scissors, :paper} -> assert winner == :player1
{:scissors, :rock} -> assert winner == :player2
{:rock, :paper} -> assert winner == :player2
{:paper, :scissors} -> assert winner == :player2
_ -> assert winner == :draw
end
end
feedback :rocket_ship do
force = get_answers()
assert force == 20
end
feedback :startup_madlib do
[
madlib,
name_of_company,
a_defined_offering,
a_defined_audience,
solve_a_problem,
secret_sauce
] = answers = get_answers()
assert Enum.all?(answers, fn each -> is_bitstring(each) and String.length(each) > 0 end),
"each variable should be bound to a non-empty string"
assert madlib ==
"My company, #{name_of_company} is developing #{a_defined_offering} to help #{a_defined_audience} #{solve_a_problem} with #{secret_sauce}."
end
feedback :nature_show_madlib do
[
animal,
country,
plural_noun,
a_food,
type_of_screen_device,
noun,
verb1,
verb2,
adjective,
madlib
] = answers = get_answers()
assert Enum.all?(answers, fn each -> is_bitstring(each) and String.length(each) > 0 end),
"each variable should be bound to a non-empty string"
assert madlib ==
"The majestic #{animal} has roamed the forests of #{country} for thousands of years. Today she wanders in search of #{plural_noun}. She must find food to survive. While hunting for #{a_food}, she found a/an #{type_of_screen_device} hidden behind a #{noun}. She has never seen anything like this before. What will she do? With the device in her teeth, she tries to #{verb1}, but nothing happens. She takes it back to her family. When her family sees it, they quickly #{verb2}. Soon, the device becomes #{adjective}, and the family decides to put it back where they found it."
end
feedback :boolean_diagram1 do
answer = get_answers()
assert answer == false
end
feedback :boolean_diagram2 do
answer = get_answers()
assert answer == true
end
feedback :boolean_diagram3 do
answer = get_answers()
assert answer == false
end
feedback :boolean_diagram4 do
answer = get_answers()
assert answer == false
end
feedback :boolean_diagram5 do
answer = get_answers()
assert answer == true
end
feedback :boolean_diagram6 do
answer = get_answers()
assert answer == true
end
feedback :guess_the_word do
[guess, answer, correct] = answers = get_answers()
assert Enum.all?(answers, &is_bitstring/1),
"Ensure `guess`, `answer`, and `correct` are all strings"
if guess == answer do
assert correct == "Correct!"
else
assert correct == "Incorrect."
end
end
feedback :guess_the_number do
[guess, answer, correct] = get_answers()
assert is_integer(guess), "Ensure `guess` is an integer"
assert is_integer(answer), "Ensure `answer` is an integer"
assert is_bitstring(correct), "Ensure `correct` is a string"
cond do
guess == answer -> assert correct == "Correct!"
guess < answer -> assert correct == "Too low!"
guess > answer -> assert correct == "Too high!"
end
end
feedback :copy_file do
file_name = get_answers()
assert {:ok, "Copy me!"} = File.read("../data/#{file_name}")
end
feedback :shopping_list do
shopping_list = get_answers()
list = [] ++ ["grapes", "walnuts", "apples"]
list = list ++ ["blueberries", "chocolate", "pizza"]
list = list -- ["grapes", "walnuts"]
list = list ++ ["banana", "banana", "banana"]
assert is_list(shopping_list), "Ensure shopping_list is still a list."
assert Enum.sort(list) == Enum.sort(shopping_list),
"Ensure your shopping list has all of the expected items"
assert shopping_list == list, "Ensure you add and remove items in the expected order"
end
feedback :shopping_list_with_quantities do
shopping_list = get_answers()
list = [] ++ [milk: 1, eggs: 12]
list = list ++ [bars_of_butter: 2, candies: 10]
list = list -- [bars_of_butter: 2]
list = list -- [candies: 10]
list = list ++ [candies: 5]
assert is_list(shopping_list), "Ensure shopping_list is still a list."
assert Enum.sort(shopping_list) == Enum.sort(shopping_list),
"Ensure your shopping list has all of the expected items"
assert shopping_list == list, "Ensure you add and remove items in the expected order"
end
feedback :family_tree do
family_tree = get_answers()
assert is_map(family_tree), "Ensure `family_tree` is a map."
assert %{name: "Arthur"} = family_tree, "Ensure `family_tree` starts with Arthur."
assert %{name: "Arthur", parents: _list} = family_tree,
"Ensure Arthur in `family_tree` has a list of parents."
assert family_tree == Utils.Solutions.family_tree()
end
feedback :naming_numbers do
naming_numbers = get_answers()
assert is_function(naming_numbers),
"Ensure you bind `naming_numbers` to an anonymous function."
list = ["zero", "one", "two", "three", "four", "five", "six", "seven", "eight", "nine"]
Enum.each(1..9, fn integer ->
assert naming_numbers.(integer) == Enum.at(list, integer)
end)
end
feedback :numbering_names do
numbering_names = get_answers()
list = ["zero", "one", "two", "three", "four", "five", "six", "seven", "eight", "nine"]
capital_list = Enum.map(list, &String.capitalize/1)
assert is_function(numbering_names),
"Ensure you bind `naming_numbers` to an anonymous function."
Enum.each(list, fn name ->
assert numbering_names.(name) ==
Enum.find_index(list, fn each -> each == String.downcase(name) end)
end)
Enum.each(capital_list, fn name ->
assert numbering_names.(name) ==
Enum.find_index(list, fn each -> each == String.downcase(name) end)
end)
end
feedback :define_character_struct do
character_module = get_answers()
assert Keyword.get(character_module.__info__(:functions), :__struct__),
"Ensure you use `defstruct`"
assert match?(%{name: nil, class: nil, weapon: nil}, struct(character_module)),
"Ensure you use `defstruct` with :name, :class, and :weapon"
assert_raise ArgumentError, fn ->
struct!(character_module, %{weapon: "", class: ""})
end
end
feedback :character_structs do
[arthur, gandalf, jarlaxle] = get_answers()
assert is_struct(arthur), "Ensure `arthur` is a struct."
assert is_struct(gandalf), "Ensure `gandalf` is a struct."
assert is_struct(jarlaxle), "Ensure `jarlaxle` is a struct."
assert %{name: "Arthur", weapon: "sword", class: "warrior"} = arthur
assert %{name: "Gandalf", weapon: "staff", class: "wizard"} = gandalf
assert %{name: "Jarlaxle", weapon: "daggers", class: "rogue"} = jarlaxle
end
feedback :character_dialogue do
dialogue_module = get_answers()
character_permutations =
for class <- ["wizard", "rogue", "warrior", nil],
weapon <- ["daggers", "sword", "staff", nil],
name <- [Utils.Factory.name(), nil] do
%{class: class, weapon: weapon, name: name}
end
enemy = Utils.Factory.name()
Enum.each(character_permutations, fn character ->
assert dialogue_module.greet(character) == "Hello, my name is #{character.name}."
assert dialogue_module.attack(character, enemy) ==
"#{character.name} attacks #{enemy} with a #{character.weapon}."
relinquish_weapon_dialogue =
case character.class do
"rogue" -> "Fine, have my #{character.weapon}. I have more hidden anyway."
"wizard" -> "You would not part an old man from his walking stick?"
"warrior" -> "Have my #{character.weapon} then!"
_ -> "My precious!"
end
assert dialogue_module.relinquish_weapon(character) == relinquish_weapon_dialogue
matching_weapon_dialogue =
case {character.class, character.weapon} do
{"wizard", "staff"} -> "My lovely magical staff"
{"rogue", "daggers"} -> "Hidden and deadly."
{"warrior", "sword"} -> "My noble sword!"
{_, nil} -> "I'm unarmed!"
{_, _} -> "I'm not sure a #{character.weapon} suits a #{character.class}."
end
assert dialogue_module.matching_weapon(character) == matching_weapon_dialogue
end)
end
feedback :define_pokemon_struct do
pokemon_module = get_answers()
assert Keyword.get(pokemon_module.__info__(:functions), :__struct__),
"Ensure you use `defstruct`."
assert match?(%{name: nil, type: nil, speed: nil}, struct(pokemon_module)),
"Ensure you use `defstruct` with :name, :type, :health, :attack, and :speed."
assert match?(%{health: 20, attack: 5}, struct(pokemon_module)),
"Ensure :health has a default value of 20 and :attack has a default value of 5."
end
feedback :pokemon_structs do
[charmander, bulbasaur, squirtle] = get_answers()
assert is_struct(charmander), "Ensure `charmander` is a struct."
assert is_struct(squirtle), "Ensure `squirtle` is a struct."
assert is_struct(bulbasaur), "Ensure `bulbasaur` is a struct."
assert %{name: "Charmander", type: :fire, attack: 5, health: 20, speed: 20} = charmander
assert %{name: "Bulbasaur", type: :grass, attack: 5, health: 20, speed: 15} = bulbasaur
assert %{name: "Squirtle", type: :water, attack: 5, health: 20, speed: 10} = squirtle
end
feedback :pokemon_battle do
[pokemon_battle_module, pokemon_module] = get_answers()
assert Keyword.get(pokemon_module.__info__(:functions), :__struct__),
"Ensure you complete the `Pokemon` module above first."
assert match?(
%{name: nil, type: nil, speed: nil, health: 20, attack: 5},
struct(pokemon_module)
),
"Ensure you complete the `Pokemon` module above first."
pokemon_types =
for speed <- 10..30//5,
type <- [:water, :fire, :grass],
attack <- 5..40//5,
health <- 5..20//5 do
struct(pokemon_module, %{
name: "#{Atom.to_string(type)} pokemon",
speed: speed,
attack: attack,
health: health
})
end
pokemon_types
|> Enum.shuffle()
|> Enum.take(2)
|> Enum.chunk_every(2)
|> Enum.each(fn [pokemon1, pokemon2] ->
attacked_pokemon = pokemon_battle_module.attack(pokemon1, pokemon2)
multiplier = Utils.Solutions.PokemonBattle.multiplier(pokemon1, pokemon2)
assert attacked_pokemon == %{
pokemon2
| health: pokemon2.health - pokemon2.attack * multiplier
}
assert {battled_pokemon1, battled_pokemon2} =
pokemon_battle_module.battle(pokemon1, pokemon2)
{expected_battled_pokemon1, expected_battled_pokemon2} =
Utils.Solutions.PokemonBattle.battle(pokemon1, pokemon2)
assert battled_pokemon1 == expected_battled_pokemon1
assert battled_pokemon2 == expected_battled_pokemon2
end)
end
feedback :rock_paper_scissors_lizard_spock do
module = get_answers()
assert Keyword.has_key?(module.__info__(:functions), :play),
"Ensure you define the `play/2` function"
game_permutations =
for player1 <- [:rock, :paper, :scissors, :lizard, :spock],
player2 <- [:rock, :paper, :scissors, :lizard, :spock] do
{player1, player2}
end
beats? = fn player1, player2 ->
{player1, player2} in [
{:rock, :paper},
{:paper, :rock},
{:scissors, :paper},
{:rock, :lizard},
{:lizard, :spock},
{:scissors, :lizard},
{:lizard, :paper},
{:paper, :spock},
{:spock, :rock}
]
end
Enum.each(game_permutations, fn {p1, p2} ->
expected_result =
cond do
beats?.(p1, p2) -> "#{p1} beats #{p2}."
beats?.(p2, p1) -> "#{p2} beats #{p1}."
true -> "tie game, play again?"
end
actual = module.play(p1, p2)
assert actual == expected_result,
"Failed on RockPaperScissorsLizardSpock.play(:#{p1}, :#{p2})."
end)
end
feedback :custom_rps do
custom_game_module = get_answers()
assert 3 == Keyword.get(custom_game_module.__info__(:functions), :new),
"Ensure you define the `new/3` function"
assert Keyword.get(custom_game_module.__info__(:functions), :__struct__),
"Ensure you use `defstruct`."
assert match?(%{rock: _, paper: _, scissors: _}, struct(custom_game_module)),
"Ensure you use `defstruct` with :rock, :paper, and :scissors."
assert %{rock: :custom_rock, paper: :custom_paper, scissors: :custom_scissors} =
custom_game_module.new(:custom_rock, :custom_paper, :custom_scissors)
assert 3 == Keyword.get(custom_game_module.__info__(:functions), :play),
"Ensure you define the `play/3` function"
game = custom_game_module.new(:custom_rock, :custom_paper, :custom_scissors)
beats? = fn p1, p2 ->
{p1, p2} in [
{:custom_rock, :custom_scissors},
{:custom_paper, :custom_rock},
{:custom_scissors, :custom_paper}
]
end
for player1 <- [:custom_rock, :custom_paper, :custom_scissors],
player2 <- [:custom_rock, :custom_paper, :custom_scissors] do
result = custom_game_module.play(game, player1, player2)
expected_result =
cond do
beats?.(player1, player2) -> "#{player1} beats #{player2}"
beats?.(player2, player1) -> "#{player2} beats #{player1}"
true -> "draw"
end
assert result == expected_result
end
end
feedback :fizzbuzz do
fizz_buzz_module = get_answers()
assert fizz_buzz_module.run(1..15) == [
1,
2,
"fizz",
4,
"buzz",
"fizz",
7,
8,
"fizz",
"buzz",
11,
"fizz",
13,
14,
"fizzbuzz"
]
assert fizz_buzz_module.run(1..100) == Utils.Solutions.FizzBuzz.run(1..100)
end
feedback :voter_count do
voter_count = get_answers()
assert voter_count.count([], :test), "Implement the `count` function."
assert voter_count.count([:dogs, :dogs, :dogs, :cats], :dogs) == 3,
"failed on ([:dogs, :dogs, :dogs, :cats], :dogs)"
assert voter_count.count([:dogs, :dogs, :dogs, :cats], :cats) == 1,
"Failed on ([:dogs, :dogs, :dogs, :cats], :cats)"
assert voter_count.count([:apples, :oranges, :apples, :cats], :birds) == 0,
"Failed on ([:apples, :oranges, :apples, :cats], :birds)"
list = Enum.map(1..10, fn _ -> Enum.random([:cat, :dog, :bird, :apple, :orange]) end)
choice = Enum.random([:cat, :dog, :bird, :apple, :orange])
assert voter_count.count(list, choice) == Enum.count(list, fn each -> each == choice end)
end
feedback :is_anagram do
anagram_module = get_answers()
assert anagram_module.is_anagram?("stop", "pots") == true
refute anagram_module.is_anagram?("example", "nonanagram") == true
word = Utils.Factory.string()
generate_non_anagram = fn word ->
word <> Enum.random(["a", "b", "c"])
end
generate_anagram = fn word ->
String.split(word, "", trim: true) |> Enum.shuffle() |> Enum.join("")
end
assert anagram_module.is_anagram?(word, generate_anagram.(word)),
"`is_anagram?/1` failed to identify anagram."
refute anagram_module.is_anagram?(word, generate_non_anagram.(word)),
"`is_anagram?/1` failed to identify non-anagram."
non_anagrams = Enum.map(1..5, fn _ -> generate_non_anagram.(word) end)
anagrams = Enum.map(1..5, fn _ -> generate_anagram.(word) end)
result = anagram_module.filter_anagrams(word, anagrams ++ non_anagrams)
assert is_list(result), "filter_anagrams/2 should return a list"
assert Enum.sort(result) == Enum.sort(anagrams), "filter_anagrams/2 failed to filter anagrams"
end
feedback :bottles_of_soda do
bottles_of_soda = get_answers()
result = bottles_of_soda.on_the_wall()
assert result, "Implement the `on_the_wall/0` function."
assert is_list(result), "`on_the_wall/0` should return a list."
assert Enum.at(result, 0) ==
"99 bottles of soda on the wall.\n99 bottles of soda.\nTake one down, pass it around.\n98 bottles of soda on the wall."
assert length(result) == 100, "There should be 100 total verses."
assert Enum.at(result, 97) ==
"2 bottles of soda on the wall.\n2 bottles of soda.\nTake one down, pass it around.\n1 bottle of soda on the wall."
assert Enum.at(result, 98) ==
"1 bottle of soda on the wall.\n1 bottle of soda.\nTake one down, pass it around.\n0 bottles of soda on the wall."
assert Enum.at(result, 99) ==
"No more bottles of soda on the wall, no more bottles of soda.\nGo to the store and buy some more, 99 bottles of soda on the wall."
assert result == Utils.Solutions.BottlesOfSoda.on_the_wall()
end
feedback :bottles_of_blank do
bottles_of_soda = get_answers()
result = bottles_of_soda.on_the_wall(50..0, "pop", "cans")
assert result, "Implement the `on_the_wall/3` function."
assert is_list(result), "`on_the_wall/3` should return a list."
assert Enum.at(result, 0) ==
"50 cans of pop on the wall.\n50 cans of pop.\nTake one down, pass it around.\n49 cans of pop on the wall."
assert length(result) == 51, "There should be 51 total verses."
assert Enum.at(result, 48) ==
"2 cans of pop on the wall.\n2 cans of pop.\nTake one down, pass it around.\n1 can of pop on the wall."
assert Enum.at(result, 49) ==
"1 can of pop on the wall.\n1 can of pop.\nTake one down, pass it around.\n0 cans of pop on the wall."
assert Enum.at(result, 50) ==
"No more cans of pop on the wall, no more cans of pop.\nGo to the store and buy some more, 99 cans of pop on the wall."
end
feedback :item_generator_item do
item = get_answers()
assert Keyword.get(item.__info__(:functions), :__struct__),
"Ensure you use `defstruct`."
assert match?(%{type: nil, effect: nil, level: nil, size: nil, style: nil}, struct(item)),
"Ensure you use `defstruct` with :type, :effect, :level, :size, and :style."
end
feedback :item_generator do
items = get_answers()
assert is_list(items), "`items` should be a list."
expected_items = Utils.Solutions.item_generator()
expected_length = length(expected_items)
assert length(items) == expected_length,
"There should be #{expected_length} permutations of items."
Enum.each(items, fn item ->
assert is_struct(item), "Each item should be an `Item` struct."
assert match?(%{type: _, effect: _, style: _, size: _, level: _, __struct__: _}, item)
end)
end
feedback :item_generator_search do
search = get_answers()
items = [Utils.Factory.item(%{}), Utils.Factory.item()]
result = search.all_items(items, [])
assert result, "Implement the `all_items/2` function."
assert is_list(result), "`all_items/2` should return a list."
assert length(result) == 2,
"`all_items/2` should return all items when no filters are provided."
assert length(result) == 2,
"`all_items/2` should return all items when no filters are provided."
assert Enum.sort(items) == Enum.sort(result),
"`all_items/2` should return all items when no filters are provided."
[item1, _] = items = [Utils.Factory.item(%{type: "a"}), Utils.Factory.item(%{type: "b"})]
result = search.all_items(items, type: "a")
assert result == [item1], "`all_items/2` should filter by type."
[item1, _] = items = [Utils.Factory.item(%{effect: "a"}), Utils.Factory.item(%{effect: "b"})]
result = search.all_items(items, effect: "a")
assert result == [item1], "`all_items/2` should filter by type."
[item1, _] = items = [Utils.Factory.item(%{style: "a"}), Utils.Factory.item(%{style: "b"})]
result = search.all_items(items, style: "a")
assert result == [item1], "`all_items/2` should filter by style."
[item1, _] = items = [Utils.Factory.item(%{size: 1}), Utils.Factory.item(%{size: 2})]
result = search.all_items(items, size: 1)
assert result == [item1], "`all_items/2` should filter by size."
[item1, _] = items = [Utils.Factory.item(%{level: 1}), Utils.Factory.item(%{level: 2})]
result = search.all_items(items, level: 1)
assert result == [item1], "`all_items/2` should filter by level."
[item1, item2] = items = [Utils.Factory.item(), Utils.Factory.item(%{level: 2})]
result =
search.all_items(items,
type: item1.type,
effect: item1.effect,
style: item1.style,
size: item1.size,
level: item1.level
)
assert result == [item1], "`all_items/2` should work with multiple filters."
result =
search.all_items(items,
type: item1.type,
effect: item1.effect,
style: item1.style,
size: item1.size,
level: item2.level,
inclusive: true
)
assert Enum.sort(result) == Enum.sort([item1, item2]),
"`all_items/2` should work with multiple inclusive filters."
end
feedback :custom_enum do
list = Enum.to_list(1..10)
custom_enum = get_answers()
assert custom_enum.map(list, & &1), "Implement the `map/2` funtion."
assert is_list(custom_enum.map(list, & &1)), "`map/2` should return a list."
assert custom_enum.map(list, &(&1 * 2)) == Enum.map(list, &(&1 * 2)),
"`map/2` should call the function on each element and return a new list."
assert custom_enum.each(list, & &1), "Implement the `each/2` funtion."
assert custom_enum.each(list, & &1) == :ok, "`each/2` should return :ok."
assert custom_enum.filter(list, & &1), "Implement the `filter/2` funtion."
assert is_list(custom_enum.filter(list, & &1)), "`each/2` should return a list."
assert custom_enum.filter(list, &(&1 < 5)) == Enum.filter(list, &(&1 < 5))
assert custom_enum.sum(list), "Implement the `sum/1` funtion."
assert is_integer(custom_enum.sum(list)), "`sum/1` should return an integer."
assert custom_enum.sum(list) == Enum.sum(list)
end
feedback :voter_tally do
voter_tally = get_answers()
assert voter_tally.tally([]), "Implement the `tally/1` function."
assert is_map(voter_tally.tally([])), "`tally/1` should return a map."
assert voter_tally.tally([:dogs, :cats]) == %{dogs: 1, cats: 1}
assert voter_tally.tally([:dogs, :dogs, :cats]) == %{dogs: 2, cats: 1}
votes = Enum.map(1..10, fn _ -> Enum.random([:cats, :dogs, :birds]) end)
expected_result =
Enum.reduce(votes, %{}, fn each, acc ->
Map.update(acc, each, 1, fn existing_value -> existing_value + 1 end)
end)
assert voter_tally.tally(votes) == expected_result
end
feedback :voter_power do
voter_tally = get_answers()
assert voter_tally.tally([]), "Implement the `tally/1` function."
assert is_map(voter_tally.tally([])), "`tally/1` should return a map."
assert voter_tally.tally(dogs: 2) == %{dogs: 2}
assert voter_tally.tally(dogs: 2, cats: 1) == %{dogs: 2, cats: 1}
assert voter_tally.tally([:cats, dogs: 2]) == %{dogs: 2, cats: 1}
assert voter_tally.tally([:dogs, :cats, birds: 3, dogs: 10]) == %{dogs: 11, cats: 1, birds: 3}
end
feedback :measurements do
measurements = get_answers()
list = Utils.Factory.integers()
assert measurements.increased([1, 1]), "Implement the `increased` function."
assert measurements.increased([1, 2, 1]) == 1
assert measurements.increased([1, 1, 2, 3, 1]) == 2
assert measurements.increased(list) == Utils.Solutions.Measurements.increased(list)
assert measurements.increased_by([1, 1]), "Implement the `increased_by` function."
assert measurements.increased_by([100, 150, 120, 130]) == 60
assert measurements.increased_by([10, 20, 10, 40]) == 40
assert measurements.increased_by(list) == Utils.Solutions.Measurements.increased_by(list)
assert measurements.increments([1, 2]), "Implement the `increments` function."
assert measurements.increments([100, 150, 120, 130]) == [50, -30, 10]
assert measurements.increments([10, 20, 10, 40]) == [10, -10, 30]
assert measurements.increments(list) == Utils.Solutions.Measurements.increments(list)
assert measurements.average([1, 1]), "Implement the `average` function."
assert measurements.average([4, 5, 6]) == 5
assert measurements.average([2, 10]) == 6
assert measurements.average(list) == Utils.Solutions.Measurements.average(list)
end
# test names must be after tests that require a solution.
def test_names, do: @test_names
feedback :created_project do
path = get_answers()
assert File.dir?("../projects/#{path}"),
"Ensure you create a mix project `#{path}` in the `projects` folder."
end
end
|
utils/lib/feedback.ex
| 0.882662
| 0.828766
|
feedback.ex
|
starcoder
|
defmodule TaggedTuple do
@moduledoc "README.md"
|> File.read!()
|> String.split("[//]: # (Documentation)\n")
|> Enum.at(1)
|> String.trim("\n")
defmacro __using__(_opts) do
quote do
require unquote(__MODULE__)
import unquote(__MODULE__),
only: [
---: 2
]
end
end
@doc """
Defines a tagged tuple. It's equivalent to `{tag, value}`.
The operator is right-associative. When it's called several times in a row,
it assembles a tag chain with a value in the core.
:x --- :y --- :z --- "value" == {:x, {:y, {:z, "value"}}}
Can be used in Expressions and Pattern Matchings.
## Examples
iex> use TaggedTuple
...> :tag --- 12
{:tag, 12}
...> tagged_tuple = :a --- :tag --- :chain --- 12
{:a, {:tag, {:chain, 12}}}
...> match?(:a --- :tag --- _tail, tagged_tuple)
true
...> :a --- t1 --- t2 --- core_value = tagged_tuple
...> t1 == :tag
...> t2 == :chain
...> core_value == 12
"""
# credo:disable-for-next-line
defmacro tag --- value do
quote do
{unquote(tag), unquote(value)}
end
end
defguardp not_tuple(t1)
when not is_tuple(t1)
defguardp not_tuple(t2, t1)
when not is_tuple(t2) and not is_tuple(t1)
defguardp not_tuple(t3, t2, t1)
when not is_tuple(t3) and not is_tuple(t2) and not is_tuple(t1)
defguardp not_tuple(t4, t3, t2, t1)
when not is_tuple(t4) and not is_tuple(t3) and not is_tuple(t2) and not is_tuple(t1)
defguardp not_tuple(t5, t4, t3, t2, t1)
when not is_tuple(t5) and not is_tuple(t4) and not is_tuple(t3) and not is_tuple(t2) and
not is_tuple(t1)
@doc """
Returns a tagged tuple by attaching the tag chain to the core value.
`tag_chain` can be a nested tuple with tags returned from the `split/1`
or a list with tags.
## Example
iex> TaggedTuple.tag(2.5, :some_tag)
{:some_tag, 2.5}
iex> TaggedTuple.tag(7, {:a, {:tag, :chain}})
{:a, {:tag, {:chain, 7}}}
iex> TaggedTuple.tag(7, [:a, :tag, :chain])
{:a, {:tag, {:chain, 7}}}
"""
def tag(value, tag_chain)
def tag(v, []), do: v
def tag(v, [_ | _] = tag_chain) do
tag_chain
|> List.insert_at(-1, v)
|> from_list()
end
def tag(v, t1) when not_tuple(t1),
do: {t1, v}
def tag(v, {t2, t1}) when not_tuple(t2, t1),
do: {t2, {t1, v}}
def tag(v, {t3, {t2, t1}}) when not_tuple(t3, t2, t1),
do: {t3, {t2, {t1, v}}}
def tag(v, {t4, {t3, {t2, t1}}}) when not_tuple(t4, t3, t2, t1),
do: {t4, {t3, {t2, {t1, v}}}}
def tag(v, {t5, {t4, {t3, {t2, t1}}}}) when not_tuple(t5, t4, t3, t2, t1),
do: {t5, {t4, {t3, {t2, {t1, v}}}}}
def tag(v, tag_chain) when tuple_size(tag_chain) == 2 do
tag_chain
|> to_list()
|> List.insert_at(-1, v)
|> from_list()
end
@doc """
Removes the given subchain from the tag chain's beginning and return the
tagged tuple built from the rest. When the full tag chain is given,
it returns the core value.
Raises `ArgumentError` exception if the passed tag subchain doesn't match
the begginning of the tag chain of the tagged tuple.
`tag_chain` can be a nested tuple with tags returned from the `split/1`
or a list with tags.
## Examples
iex> value = {:a, {:tag, {:chain, 2}}}
iex> TaggedTuple.untag!(value, :a)
{:tag, {:chain, 2}}
iex> TaggedTuple.untag!(value, {:a, :tag})
{:chain, 2}
iex> TaggedTuple.untag!(value, {:a, {:tag, :chain}})
2
iex> TaggedTuple.untag!(value, [:a, :tag, :chain])
2
iex> value = {:other, {:stuff, 2}}
...> TaggedTuple.untag!(value, {:a, {:tag, :chain}})
** (ArgumentError) Tag chain {:a, {:tag, :chain}} doesn't match one in the tagged tuple {:other, {:stuff, 2}}.
"""
def untag!(tagged_tuple, tag_chain) do
case untag(tagged_tuple, tag_chain) do
{:ok, value} ->
value
{:error, :mismatch} ->
Kernel.raise(ArgumentError, """
Tag chain #{inspect(tag_chain)} doesn't match one in \
the tagged tuple #{inspect(tagged_tuple)}.\
""")
end
end
@doc """
Same as `untag!/2`.
Returns `{:error, :mismatch}` when `tag_chain` doesn't match
the beginning part of the tag chain of the tagged tuple.
"""
def untag(tagged_tuple, tag_chain)
def untag({t1, value}, [t1]) do
{:ok, value}
end
def untag({t1, tail}, [t1 | rest]) do
untag(tail, rest)
end
def untag({t1, value}, t1) do
{:ok, value}
end
def untag({t1, tail}, {t1, rest}) do
untag(tail, rest)
end
def untag(_, _) do
{:error, :mismatch}
end
@doc """
Returns the tag chain and the core value from the given tagged tuple.
## Examples
iex> {chain, value} = TaggedTuple.split({:a, {:tag, {:chain, 2}}})
...> chain == {:a, {:tag, :chain}}
...> value == 2
iex> TaggedTuple.tag(value, chain)
{:a, {:tag, {:chain, 2}}}
"""
def split(tagged_tuple) when is_tuple(tagged_tuple) do
list = to_list(tagged_tuple)
value = List.last(list)
tags =
case List.delete_at(list, -1) do
[one] -> one
[_ | _] = many -> from_list(many)
end
{tags, value}
end
@doc """
Converts a tagged tuple to a list.
The `tag_fun` will be invoked with each tag, and the result will be inserted
into the list. The `value_fun` will be invoked with the tagged tuple's
core value, and the result will be inserted into the list.
## Examples
iex> TaggedTuple.to_list({:a, {:tag, {:chain, 2}}})
[:a, :tag, :chain, 2]
iex> TaggedTuple.to_list({:a, {:tag, {:chain, 2}}}, &to_string/1, &(&1 * 100))
["a", "tag", "chain", 200]
"""
def to_list(tagged_tuple, tag_fun \\ fn x -> x end, value_fun \\ fn y -> y end) do
do_to_list(tagged_tuple, tag_fun, value_fun, [])
end
defp do_to_list({head, tail}, tag_fun, value_fun, acc),
do: do_to_list(tail, tag_fun, value_fun, [tag_fun.(head) | acc])
defp do_to_list(value, _tag_fun, value_fun, acc), do: Enum.reverse([value_fun.(value) | acc])
@doc """
Converts a list to a tagged tuple.
The list must have at least two elements, a tag and a value, respectfully.
The `tag_fun` will be invoked with each tag, and the result will be added
to the tuple. The `value_fun` will be invoked with the core value, and
the result will be added to the tuple.
## Examples
iex> TaggedTuple.from_list([:a, :tag, :chain, 2])
{:a, {:tag, {:chain, 2}}}
iex> TaggedTuple.from_list(["a", "tag", "chain", 200], &String.to_existing_atom/1, &div(&1, 100))
{:a, {:tag, {:chain, 2}}}
"""
def from_list(list, tag_fun \\ fn x -> x end, value_fun \\ fn y -> y end)
def from_list([tag], tag_fun, _value_fun) do
tag_fun.(tag)
end
def from_list(list, tag_fun, value_fun) do
[value, tag | tail] = Enum.reverse(list)
do_from_list(tail, tag_fun, {tag_fun.(tag), value_fun.(value)})
end
defp do_from_list([tag | tail], tag_fun, acc) do
do_from_list(tail, tag_fun, {tag_fun.(tag), acc})
end
defp do_from_list([], _tag_fun, acc) do
acc
end
@doc """
Converts a tagged tuple to a map.
The returned map can be encoded into a JSON to transmit over the network
or persist in a database.
The `tag_fun` will be invoked with each tag, and the result will be inserted
into the map. The `value_fun` will be invoked with the tagged tuple's
core value, and the result will be inserted into the map.
## Examples
iex> TaggedTuple.to_map({:a, {:tag, {:chain, 2}}})
%{a: %{tag: %{chain: 2}}}
iex> TaggedTuple.to_map({:a, {:tag, {:chain, 2}}}, &to_string/1, &(&1 * 100))
%{"a" => %{"tag" => %{"chain" => 200}}}
"""
def to_map(tagged_tuple, tag_fun \\ fn x -> x end, value_fun \\ fn y -> y end)
when is_tuple(tagged_tuple) do
[value, tag | tail] =
tagged_tuple
|> to_list()
|> Enum.reverse()
do_to_map(tail, tag_fun, %{tag_fun.(tag) => value_fun.(value)})
end
defp do_to_map([tag | tail], tag_fun, acc),
do: do_to_map(tail, tag_fun, %{tag_fun.(tag) => acc})
defp do_to_map([], _tag_fun, acc), do: acc
@doc """
Converts a map to a tagged tuple.
The `tag_fun` will be invoked with each tag, and the result will be added
to the tuple. The `value_fun` will be invoked with the core value,
and the result will be added to the tuple.
## Examples
iex> TaggedTuple.from_map(%{a: %{tag: %{chain: 2}}})
{:a, {:tag, {:chain, 2}}}
iex> TaggedTuple.from_map(%{"a" => %{"tag" => %{"chain" => 20}}}, &String.to_existing_atom/1, &div(&1, 10))
{:a, {:tag, {:chain, 2}}}
"""
def from_map(map, tag_fun \\ fn x -> x end, value_fun \\ fn y -> y end) do
do_from_map(map, tag_fun, value_fun, [])
end
defp do_from_map(%{} = map, tag_fun, value_fun, acc) do
{tag1, tail} = Enum.at(map, 0)
do_from_map(tail, tag_fun, value_fun, [tag_fun.(tag1) | acc])
end
defp do_from_map(value, _tag_fun, value_fun, acc) do
[value_fun.(value) | acc]
|> Enum.reverse()
|> from_list()
end
end
|
lib/tagged_tuple.ex
| 0.826852
| 0.523968
|
tagged_tuple.ex
|
starcoder
|
defmodule ScrollHat.Buttons do
@moduledoc """
Buttons interface for Scroll HAT Mini
Pass a `:handler` option as a pid or {m, f, a} to receive the button events
"""
use GenServer
alias Circuits.GPIO
require Logger
@typedoc """
A name of Scroll HAT Mini button
These are labelled A, B, X, and Y on the board.
"""
@type name() :: :a | :b | :x | :y
defmodule Event do
defstruct [:action, :name, :value, :timestamp]
@type t :: %Event{
action: :pressed | :released,
name: Buttons.name(),
value: 1 | 0,
timestamp: non_neg_integer()
}
end
@pin_a 5
@pin_b 6
@pin_x 16
@pin_y 24
@doc """
Start a GenServer to watch the buttons on the Scroll HAT Mini
Options:
* `:handler` - pass a pid or an MFA to receive button events
"""
@spec start_link(keyword) :: GenServer.on_start()
def start_link(opts \\ []) do
GenServer.start_link(__MODULE__, opts, name: __MODULE__)
end
@doc """
Return the current state of the button
`0` - released
`1` - pressed
"""
@spec get_value(name()) :: 0 | 1
def get_value(button) do
GenServer.call(__MODULE__, {:get_value, button})
end
@impl GenServer
def init(opts) do
{:ok, %{button_to_ref: %{}, pin_to_button: %{}, handler: opts[:handler]}, {:continue, :init}}
end
@impl GenServer
def handle_continue(:init, state) do
{:ok, a} = GPIO.open(@pin_a, :input, pull_mode: :pullup)
{:ok, b} = GPIO.open(@pin_b, :input, pull_mode: :pullup)
{:ok, x} = GPIO.open(@pin_x, :input, pull_mode: :pullup)
{:ok, y} = GPIO.open(@pin_y, :input, pull_mode: :pullup)
:ok = GPIO.set_interrupts(a, :both)
:ok = GPIO.set_interrupts(b, :both)
:ok = GPIO.set_interrupts(x, :both)
:ok = GPIO.set_interrupts(y, :both)
button_to_ref = %{a: a, b: b, x: x, y: y}
pin_to_button = %{
@pin_a => :a,
@pin_b => :b,
@pin_x => :x,
@pin_y => :y
}
{:noreply, %{state | button_to_ref: button_to_ref, pin_to_button: pin_to_button}}
end
@impl GenServer
def handle_call({:get_value, name}, _from, state) do
inverted_value = GPIO.read(state.button_to_ref[name])
value = 1 - inverted_value
{:reply, value, state}
end
@impl GenServer
def handle_info({:circuits_gpio, pin, timestamp, inverted_value}, state) do
value = 1 - inverted_value
action = if value != 0, do: :pressed, else: :released
event = %Event{
action: action,
name: state.pin_to_button[pin],
value: value,
timestamp: timestamp
}
_ = send_event(state.handler, event)
{:noreply, state}
end
def handle_info(_other, state), do: {:noreply, state}
defp send_event(handler, event) when is_pid(handler), do: send(handler, event)
defp send_event({m, f, a}, event) when is_atom(m) and is_atom(f) and is_list(a) do
apply(m, f, [event | a])
end
defp send_event(_, event) do
Logger.info("[ScrollHat] unhandled button event - #{inspect(event)}")
end
end
|
lib/scroll_hat/buttons.ex
| 0.850779
| 0.521898
|
buttons.ex
|
starcoder
|
defmodule PromEx.Config do
@moduledoc """
This module defines a struct that contains all of the fields necessary to configure
an instance of PromEx.
While this module does not directly access your Application config, PromEx will call the
`PromEx.Config.build/1` function directly with the contents of `Application.get_env(:your_otp_app, YourPromEx.Module)`. As
such, this is an appropriate place to talk about how you go about configuring PromEx via your Application config.
By default, you can run PromEx without any additional configuration and PromEx will fall back on some sane defaults. Specifically,
if you were to not add any configuration to your config.exs, dev.exs, prod.exs, etc files it would be the same as setting the
following config:
```elixir
config :web_app, WebApp.PromEx,
disabled: false,
manual_metrics_start_delay: :no_delay,
drop_metrics_groups: [],
grafana: :disabled,
metrics_server: :disabled
```
In this configuration, the Grafana dashboards are not uploaded on application start, and a standalone HTTP metrics server is not
started. In addition, the `PromEx.ManualMetricsManager` is started without any time delay, and all metrics groups from all the plugins
are registered and set up.
If you would like to set up PromEx to communicate with Grafana, your config would look something like:
```elixir
config :web_app, WebApp.PromEx,
grafana: [
host: "http://localhost:3000",
username: "<YOUR_USERNAME>", # Or authenticate via Basic Auth
password: "<<PASSWORD>>"
auth_token: "<YOUR_AUTH_TOKEN_HERE>", # Or authenticate via API Token
upload_dashboards_on_start: true # This is an optional setting and will default to `true`
]
```
If you would like PromEx to start a standalone HTTP server to serve your aggregated metrics, you can leverage the `:metrics_server`
config:
```elixir
config :web_app, WebApp.PromEx,
metrics_server: [
port: 4021,
path: "/metrics", # This is an optional setting and will default to `"/metrics"`
protocol: :http, # This is an optional setting and will default to `:http`
pool_size: 5, # This is an optional setting and will default to `5`
cowboy_opts: [], # This is an optional setting and will default to `[]`
auth_strategy: :none # This is an optional and will default to `:none`
]
```
If you would like the metrics server to be protected behind some sort of authentication, you can configure your `:metrics_server`
like so:
```elixir
config :web_app, WebApp.PromEx,
metrics_server: [
port: 4021,
auth_strategy: :bearer,
auth_token: "<KEY>
]
```
## Option Details
* `:disabled` - This option will diable the PromEx supervision tree entirely and will not
start any metris collectors. This is primarily used for disabling PromEx during testing. Default
value: false
* `:manual_metrics_start_delay` - Manual metrics are gathered once on start up and then only when
you call `PromEx.ManualMetricsManager.refresh_metrics/1`. Sometimes, you may have metrics
that require your entire supervision tree to be started in order to fetch accurate data.
This option will allow you to delays the initial metrics capture of the
`ManualMetricsManager` by a certain number of milliseconds or the `:no_delay` atom if you
want the metrics to be captured as soon as the `ManualMetricsManager` starts up. Default
value: `:no_delay`
* `:drop_metrics_groups` - A list of all the metrics groups that you are not interested in
tracking. For example, if your application does not leverage Phoenix channels at all but
you still would like to use the `PromEx.Plugins.Phoenix` plugin, you can pass
`[:phoenix_channel_event_metrics]` as the value to `:drop_metrics_groups` and that set of
metrics will not be captured. Default value: `[]`
* `ets_flush_interval` - This value denotes how often the metrics ETS table is compacted. In order
to keep things performant and as low-overhead as possible, Telemetry metrics are buffered up in
ETS until a request is made to retrieve metrics from the PromEx process. If no requests come in
to extract the metrics, the ETS table can grow infinitely. Luckily, PromEx bundles a GenServer
that periodically compacts ETS. This config value determines how often ETS should be compacted.
Default value: `7_500`
* `:grafana` - This key contains the configuration information for connecting to Grafana. Its
configuration options are:
* `:host` - The host address of your Grafana instance. In order for PromEx to communicate with
Grafana this value should be in the format `protocol://host:port` like `http://localhost:3000`
for example.
* `:username` - The username that was created in Grafana so that PromEx can upload dashboards
via the API.
* `:password` - The password that was created in Grafana so that PromEx can upload dashboards
via the API.
* `:auth_token` - The auth token that was created in Grafana so that PromEx can upload dashboards
via the API.
* `:upload_dashboards_on_start` - Using the config values that you set in your application config
(`config.exs`, `dev.exs`, `prod.exs`, etc) PromEx will attempt to upload your Dashboards to
Grafana using Grafana's HTTP API.
* `:folder_name` - The name of the folder that PromEx will put all of the project dashboards in.
PromEx will automatically generate a unique ID for the folder based on the project's otp_app
value so that it can access the correct folder in Grafana. This also makes sure that different
Elixir projects running in the same cluster and publishing dashboards to Grafana do not collide
with one another. If no name is provided, then the dashboards will all be uploaded to the default
Grafana folder.
* `:annotate_app_lifecycle` - By enabling this setting, PromEx will leverage the Grafana API to annotate
when the application was started, and when it was shut down. By default this is disabled but if you
do enable it, no action is required from you in order to display these events on the dashboards. The
annotations will automatically contain the necessary tags to only display on the PromEx dashboards.
The annotation will include information including:
- Hostname
- OTP app name
- App version
- Git SHA of the last commit (if the GIT_SHA environment variable is present)
- Git author of the last commit (if the GIT_AUTHOR environment variable is present)
* `:grafana_agent` - This key contains the configuration information for running GrafanaAgent via a
port in order to push metrics to a Prometheus instance via `remote_write` functionality:
> ### Environment dependencies {: .warning}
>
> If your application is running inside of an Alpine Linux container (or any environment that
> is based on [musl](https://www.musl-libc.org/) as opposed to
> [glibc](https://www.gnu.org/software/libc/), be sure to add `libc6-compat` to to your list
> of packages. In addition, you'll also need bash running, as this port is wrapped by a
> [bash script](https://hexdocs.pm/elixir/1.12/Port.html#module-zombie-operating-system-processes).
> For example, in a Dockerfile you would add:
> `RUN apk add --no-cache bash libc6-compat`
* `:version` - The version of GrafanaAgent that you want to run. This is a string denoting the
GrafanaAgent release version. Below are the supported versions (the downloaded artifacts
are validated against their known SHA256 values so that you can be sure you are not downloading
any malicious binaries and running them). By default, PromEx will use the result of
`PromEx.GrafanaAgent.Downloader.latest_version()` if no value is provided.
* Supported versions are `["0.23.0", "0.22.0", "0.21.2", "0.20.1"]`
* `:working_directory` - In order to leverage the GrafanaAgent functionality, PromEx needs to have
read/write access to a directory in order to download and copy the GrafanaAgent binary. This is the
full path to that directory.
* `:config_opts` - The configuration file that GrafanaAgent is started with. This option
can either accept an MFA that will return a string of the full path where the YAML configuration
file is, or a keyword list with options so that PromEx can generate a config file for you. If you
take the route where PromEx generates a config file for you, you must provide the following
otions:
* `:metrics_server_path` - The path where the Prometheus metrics are exposed.
* `:metrics_server_port` - The port that the metrics server is running on.
* `:metrics_server_scheme` - Whether the app reachable via HTTPS or HTTP.
* `:metrics_server_host` - The host to scrape for metrics.
* `:instance` - This value denotes what instance the metrics are associated with. This value
is a string and defaults to the hostname.
* `:job` - This value denotes what job the metrics are associated with. This value
is a string and defaults to the otp_app.
* `:agent_port` - What port should GrafanaAgent run on.
* `:scrape_interval` - How often should GrafanaAgent scrape the application. The default is `15s`.
* `:bearer_token` - The bearer token that GrafanaAgent should attach to the request to your app.
* `:log_level` - The logging level for GrafanaAgent.
* `:prometheus_url` - The url to your Prometheus instance.
* `:prometheus_username` - The username to the hosted Prometheus instance
* `:prometheus_password` - The password to the hosted Prometheus instance
* `:metrics_server` - This key contains the configuration information needed to run a standalone
HTTP server powered by Cowboy. This server provides a lightweight solution to serving up PromEx
metrics. Its configuration options are:
* `:port` - The port that the Cowboy HTTP server should run on.
* `:path` - The path that the metrics should be accessible at.
* `:protocol` - The protocol that the metrics should be accessible over (`:http` or `:https`).
* `:pool_size` - How many Cowboy processes should be in the pool to handle metrics related requests.
* `:auth_strategy` - What authentication strategy should be used to authorize requests to your metrics. The
Supported strategies are `:none`, `:bearer`, and `:basic`. Depending on what strategy is selected, you
will need to also add additional config values. For `:none` (which is the default), no additional
information needs to be provided. When using a `:bearer` strategy, you'll need to provide a `:auth_token`
config value. When using `:basic` strategy you'll need to provide `:auth_user` and `:auth_password` values.
* `:auth_token` - When using a `:bearer` authentication strategy, this field is required to validate the
incoming request against a valid auth token.
* `:auth_user` - When using a `:basic` authentication strategy, this field is required to validate the
incoming request against a valid user.
* `:auth_password` - When using a `:bearer` authentication strategy, this field is required to validate the
incoming request against a valid password.
* `:cowboy_opts` - A keyword list of any additional options that should be passed to `Plug.Cowboy` (see
docs for more information https://hexdocs.pm/plug_cowboy/Plug.Cowboy.html). The `:port` and
`:transport_options` options are handled by PromEx via the aforementioned config settings and so
adding them again here has no effect.
"""
@typedoc """
- `disabled`: Whether PromEx will start up the metric collection supervision tree.
- `manual_metrics_start_delay`: How the ManualMetricsManager worker process should be started (instantly or with a millisecond delay).
- `drop_metrics_groups`: A list of metrics groups that should be omitted from the metrics collection process.
- `ets_flush_interval`: How often should the ETS buffer table be compacted.
- `grafana_config`: A map containing all the relevant settings to connect to Grafana.
- `grafana_agent_config`: A map containing all the relevant settings to connect to GrafanaAgent.
- `metrics_server_config`: A map containing all the relevant settings to start a standalone HTTP Cowboy server for metrics.
"""
alias PromEx.GrafanaAgent.Downloader
@type t :: %__MODULE__{
disabled: boolean(),
manual_metrics_start_delay: :no_delay | pos_integer(),
drop_metrics_groups: MapSet.t(),
ets_flush_interval: :integer,
grafana_config: map(),
grafana_agent_config: map(),
metrics_server_config: map()
}
defstruct [
:disabled,
:manual_metrics_start_delay,
:drop_metrics_groups,
:ets_flush_interval,
:grafana_config,
:grafana_agent_config,
:metrics_server_config
]
@doc """
Create a struct that encapsulates all of the configuration needed to start a PromEx supervisor instance as well as all
of the worker processes.
"""
@spec build(keyword()) :: __MODULE__.t()
def build(opts) do
grafana_config =
opts
|> Keyword.get(:grafana, :disabled)
|> generate_grafana_config()
grafana_agent_config =
opts
|> Keyword.get(:grafana_agent, :disabled)
|> generate_grafana_agent_config()
metrics_server_config =
opts
|> Keyword.get(:metrics_server, :disabled)
|> generate_metrics_server_config()
%__MODULE__{
disabled: Keyword.get(opts, :disabled, false),
manual_metrics_start_delay: Keyword.get(opts, :manual_metrics_start_delay, :no_delay),
drop_metrics_groups: opts |> Keyword.get(:drop_metrics_groups, []) |> MapSet.new(),
ets_flush_interval: Keyword.get(opts, :ets_flush_interval, 7_500),
grafana_config: grafana_config,
grafana_agent_config: grafana_agent_config,
metrics_server_config: metrics_server_config
}
end
defp generate_grafana_config(:disabled), do: :disabled
defp generate_grafana_config(grafana_opts) do
%{
host: grafana_opts |> get_grafana_config(:host) |> normalize_host(),
username: Keyword.get(grafana_opts, :username),
password: Keyword.get(grafana_opts, :password),
auth_token: Keyword.get(grafana_opts, :auth_token),
upload_dashboards_on_start: Keyword.get(grafana_opts, :upload_dashboards_on_start, true),
folder_name: Keyword.get(grafana_opts, :folder_name, :default),
annotate_app_lifecycle: Keyword.get(grafana_opts, :annotate_app_lifecycle, false)
}
end
defp get_grafana_config(grafana_opts, config_key) do
case Keyword.fetch(grafana_opts, config_key) do
{:ok, value} ->
value
:error ->
raise "When configuring the Grafana client for PromEx, the #{inspect(config_key)} key is required."
end
end
defp generate_grafana_agent_config(:disabled), do: :disabled
defp generate_grafana_agent_config(grafana_agent_opts) do
%{
version: Keyword.get(grafana_agent_opts, :version, Downloader.latest_version()),
working_directory: Keyword.get(grafana_agent_opts, :working_directory),
config_opts: grafana_agent_opts |> get_grafana_agent_config(:config_opts) |> extract_opts_for_config()
}
end
defp extract_opts_for_config(opts) do
%{
scrape_interval: Keyword.get(opts, :scrape_interval, "15s"),
bearer_token: Keyword.get(opts, :bearer_token, "blank"),
log_level: Keyword.get(opts, :log_level, "error"),
agent_port: Keyword.get(opts, :agent_port, "4040"),
job: Keyword.get(opts, :job, nil),
instance: Keyword.get(opts, :instance, nil),
prometheus_url: get_grafana_agent_config(opts, :prometheus_url),
prometheus_username: get_grafana_agent_config(opts, :prometheus_username),
prometheus_password: get_grafana_agent_config(opts, :prometheus_password),
metrics_server_path: Keyword.get(opts, :metrics_server_path, "/metrics"),
metrics_server_port: Keyword.get(opts, :metrics_server_port, 4000),
metrics_server_host: Keyword.get(opts, :metrics_server_host, "localhost"),
metrics_server_scheme: Keyword.get(opts, :metrics_server_scheme, :https)
}
end
defp get_grafana_agent_config(grafana_agent_opts, config_key) do
case Keyword.fetch(grafana_agent_opts, config_key) do
{:ok, value} ->
value
:error ->
raise "When configuring the GrafanaAgent client for PromEx, the #{inspect(config_key)} key is required."
end
end
defp normalize_host(host_string) do
host_string
|> URI.parse()
|> Map.put(:path, nil)
|> Map.put(:query, nil)
|> URI.to_string()
end
defp generate_metrics_server_config(:disabled), do: :disabled
defp generate_metrics_server_config(metrics_server_opts) do
base_config = %{
port: get_metrics_server_config(metrics_server_opts, :port),
path: Keyword.get(metrics_server_opts, :path, "/metrics"),
protocol: Keyword.get(metrics_server_opts, :protocol, :http),
pool_size: Keyword.get(metrics_server_opts, :pool_size, 5),
cowboy_opts: Keyword.get(metrics_server_opts, :cowboy_opts, []),
auth_strategy: Keyword.get(metrics_server_opts, :auth_strategy, :none)
}
add_auth_config(base_config, metrics_server_opts)
end
defp add_auth_config(%{auth_strategy: :none} = base_config, _add_auth_config) do
base_config
end
defp add_auth_config(%{auth_strategy: :bearer} = base_config, add_auth_config) do
Map.put(base_config, :auth_token, get_metrics_server_config(add_auth_config, :auth_token))
end
defp add_auth_config(%{auth_strategy: :basic} = base_config, add_auth_config) do
base_config
|> Map.put(:auth_user, get_metrics_server_config(add_auth_config, :auth_user))
|> Map.put(:auth_password, get_metrics_server_config(add_auth_config, :auth_password))
end
defp add_auth_config(_base_config, _add_auth_config) do
raise "Unknown auth strategy provided to PromEx metrics server. Supported strategies include :none, :bearer, or :basic."
end
defp get_metrics_server_config(metrics_server_opts, config_key) do
case Keyword.fetch(metrics_server_opts, config_key) do
{:ok, value} ->
value
:error ->
raise "When configuring the PromEx metrics server, the #{inspect(config_key)} key is required."
end
end
end
|
lib/prom_ex/config.ex
| 0.897074
| 0.652324
|
config.ex
|
starcoder
|
defmodule Crux.Structs.Emoji do
@moduledoc """
Represents a Discord [Emoji Object](https://discordapp.com/developers/docs/resources/emoji#emoji-object-emoji-structure).
Differences opposed to the Discord API Object:
- `:user` is just the user id
"""
@behaviour Crux.Structs
alias Crux.Structs.{Emoji, Util}
require Util
Util.modulesince("0.1.0")
defstruct(
animated: nil,
id: nil,
name: nil,
roles: nil,
user: nil,
require_colons: nil,
managed: nil
)
Util.typesince("0.1.0")
@type t :: %__MODULE__{
animated: boolean() | nil,
id: Crux.Rest.snowflake() | nil,
name: String.t(),
roles: MapSet.t(Crux.Rest.snowflake()),
user: Crux.Rest.snowflake() | nil,
require_colons: boolean() | nil,
managed: boolean() | nil
}
@doc """
Creates a `Crux.Structs.Emoji` struct from raw data.
> Automatically invoked by `Crux.Structs.create/2`.
"""
@spec create(data :: map()) :: t()
Util.since("0.1.0")
def create(data) do
emoji =
data
|> Util.atomify()
|> Map.update(:id, nil, &Util.id_to_int/1)
|> Map.update(:roles, MapSet.new(), &MapSet.new(&1, fn role -> Util.id_to_int(role) end))
|> Map.update(:user, nil, Util.map_to_id())
struct(__MODULE__, emoji)
end
@doc ~S"""
Converts an `Crux.Structs.Emoji`, a `Crux.Structs.Reaction`, or a `String.t()` to its discord identifier format.
> This is automatically done if using a appropriate rest function.
## Examples
```elixir
# A custom emoji
iex> %Crux.Structs.Emoji{animated: false, id: 396521773216301056, name: "blobwavereverse"}
...> |> Crux.Structs.Emoji.to_identifier()
"blobwavereverse:396521773216301056"
# A custom animated emoji
iex> %Crux.Structs.Emoji{animated: true, id: 396521774466203659, name: "ablobwavereverse"}
...> |> Crux.Structs.Emoji.to_identifier()
"a:ablobwavereverse:396521774466203659"
# A regular emoji
iex> %Crux.Structs.Emoji{animated: false, id: nil, name: "👋"}
...> |> Crux.Structs.Emoji.to_identifier()
"%F0%9F%91%8B"
# A reaction struct
iex> %Crux.Structs.Reaction{
...> emoji: %Crux.Structs.Emoji{animated: false, id: 356830260626456586, name: "blobReach"}
...> }
...> |> Crux.Structs.Emoji.to_identifier()
"blobReach:356830260626456586"
# An already encoded identifier
iex> "👀" |> URI.encode_www_form()
...> |> Crux.Structs.Emoji.to_identifier()
"%F0%9F%91%80"
# A custom emoji's identifier
iex> "eyesRight:271412698267254784"
...> |> Crux.Structs.Emoji.to_identifier()
"eyesRight:271412698267254784"
```
"""
@spec to_identifier(emoji :: Crux.Structs.Emoji.t() | Crux.Structs.Reaction.t() | String.t()) ::
String.t()
Util.since("0.1.1")
def to_identifier(%Crux.Structs.Reaction{emoji: emoji}), do: to_identifier(emoji)
def to_identifier(%__MODULE__{id: nil, name: name}), do: URI.encode_www_form(name)
def to_identifier(%__MODULE__{id: id, name: name, animated: true}), do: "a:#{name}:#{id}"
def to_identifier(%__MODULE__{id: id, name: name}), do: "#{name}:#{id}"
def to_identifier(identifier) when is_bitstring(identifier), do: identifier
defimpl String.Chars, for: Crux.Structs.Emoji do
@spec to_string(Emoji.t()) :: String.t()
def to_string(%Emoji{id: nil, name: name}), do: name
def to_string(%Emoji{id: id, name: name, animated: true}),
do: "<a:#{name}:#{id}>"
def to_string(%Emoji{id: id, name: name}), do: "<:#{name}:#{id}>"
end
end
|
lib/structs/emoji.ex
| 0.896063
| 0.421284
|
emoji.ex
|
starcoder
|
defmodule Nostrum.Permission do
@moduledoc """
Functions that work on permissions.
Some functions return a list of permissions. You can use enumerable functions
to work with permissions:
```Elixir
alias Nostrum.Cache.GuildCache
alias Nostrum.Struct.Guild.Member
guild = GuildCache.get!(279093381723062272)
member = Map.get(guild.members, 177888205536886784)
member_perms = Member.guild_permissions(member, guild)
if :administrator in member_perms do
IO.puts("This user has the administrator permission.")
end
```
"""
import Bitwise
@typedoc """
Represents a single permission as a bitvalue.
"""
@type bit :: non_neg_integer
@typedoc """
Represents a set of permissions as a bitvalue.
"""
@type bitset :: non_neg_integer
@type general_permission ::
:create_instant_invite
| :kick_members
| :ban_members
| :administrator
| :manage_channels
| :manage_guild
| :view_audit_log
| :view_channel
| :change_nickname
| :manage_nicknames
| :manage_roles
| :manage_webhooks
| :manage_emojis_and_stickers
| :view_guild_insights
| :use_application_commands
| :moderate_members
@type text_permission ::
:add_reactions
| :send_messages
| :send_tts_messages
| :manage_messages
| :embed_links
| :attach_files
| :read_message_history
| :mention_everyone
| :use_external_emojis
| :create_public_threads
| :create_private_threads
| :send_messages_in_threads
| :manage_threads
| :use_external_stickers
@type voice_permission ::
:connect
| :speak
| :mute_members
| :deafen_members
| :move_members
| :use_vad
| :priority_speaker
| :stream
| :request_to_speak
| :manage_events
| :use_embedded_activities
@type t ::
general_permission
| text_permission
| voice_permission
@permission_to_bit_map %{
create_instant_invite: 1 <<< 0,
kick_members: 1 <<< 1,
ban_members: 1 <<< 2,
administrator: 1 <<< 3,
manage_channels: 1 <<< 4,
manage_guild: 1 <<< 5,
add_reactions: 1 <<< 6,
view_audit_log: 1 <<< 7,
priority_speaker: 1 <<< 8,
stream: 1 <<< 9,
view_channel: 1 <<< 10,
send_messages: 1 <<< 11,
send_tts_messages: 1 <<< 12,
manage_messages: 1 <<< 13,
embed_links: 1 <<< 14,
attach_files: 1 <<< 15,
read_message_history: 1 <<< 16,
mention_everyone: 1 <<< 17,
use_external_emojis: 1 <<< 18,
view_guild_insights: 1 <<< 19,
connect: 1 <<< 20,
speak: 1 <<< 21,
mute_members: 1 <<< 22,
deafen_members: 1 <<< 23,
move_members: 1 <<< 24,
use_vad: 1 <<< 25,
change_nickname: 1 <<< 26,
manage_nicknames: 1 <<< 27,
manage_roles: 1 <<< 28,
manage_webhooks: 1 <<< 29,
manage_emojis_and_stickers: 1 <<< 30,
use_application_commands: 1 <<< 31,
request_to_speak: 1 <<< 32,
manage_events: 1 <<< 33,
manage_threads: 1 <<< 34,
create_public_threads: 1 <<< 35,
create_private_threads: 1 <<< 36,
use_external_stickers: 1 <<< 37,
send_messages_in_threads: 1 <<< 38,
use_embedded_activities: 1 <<< 39,
moderate_members: 1 <<< 40
}
@bit_to_permission_map Map.new(@permission_to_bit_map, fn {k, v} -> {v, k} end)
@permission_list Map.keys(@permission_to_bit_map)
@doc """
Returns `true` if `term` is a permission; otherwise returns `false`.
## Examples
```Elixir
iex> Nostrum.Permission.is_permission(:administrator)
true
iex> Nostrum.Permission.is_permission(:not_a_permission)
false
```
"""
defguard is_permission(term) when is_atom(term) and term in @permission_list
@doc """
Returns a list of all permissions.
"""
@spec all() :: [t]
def all, do: @permission_list
@doc """
Converts the given bit to a permission.
This function returns `:error` if `bit` does not map to a permission.
## Examples
```Elixir
iex> Nostrum.Permission.from_bit(0x04000000)
{:ok, :change_nickname}
iex> Nostrum.Permission.from_bit(0)
:error
```
"""
@spec from_bit(bit) :: {:ok, t} | :error
def from_bit(bit) do
Map.fetch(@bit_to_permission_map, bit)
end
@doc """
Same as `from_bit/1`, but raises `ArgumentError` in case of failure.
## Examples
```Elixir
iex> Nostrum.Permission.from_bit!(0x04000000)
:change_nickname
iex> Nostrum.Permission.from_bit!(0)
** (ArgumentError) expected a valid bit, got: `0`
```
"""
@spec from_bit!(bit) :: t
def from_bit!(bit) do
case from_bit(bit) do
{:ok, perm} -> perm
:error -> raise(ArgumentError, "expected a valid bit, got: `#{inspect(bit)}`")
end
end
@doc """
Converts the given bitset to a list of permissions.
If invalid bits are given they will be omitted from the results.
## Examples
```Elixir
iex> Nostrum.Permission.from_bitset(0x08000002)
[:manage_nicknames, :kick_members]
iex> Nostrum.Permission.from_bitset(0x4000000000000)
[]
```
"""
@spec from_bitset(bitset) :: [t]
def from_bitset(bitset) do
0..53
|> Enum.map(fn index -> 0x1 <<< index end)
|> Enum.filter(fn mask -> (bitset &&& mask) === mask end)
|> Enum.reduce([], fn bit, acc ->
case from_bit(bit) do
{:ok, perm} -> [perm | acc]
:error -> acc
end
end)
end
@doc """
Converts the given permission to a bit.
## Examples
```Elixir
iex> Nostrum.Permission.to_bit(:administrator)
8
```
"""
@spec to_bit(t) :: bit
def to_bit(permission) when is_permission(permission), do: @permission_to_bit_map[permission]
@doc """
Converts the given enumerable of permissions to a bitset.
## Examples
```Elixir
iex> Nostrum.Permission.to_bitset([:administrator, :create_instant_invite])
9
```
"""
@spec to_bitset(Enum.t()) :: bitset
def to_bitset(permissions) do
permissions
|> Enum.map(&to_bit(&1))
|> Enum.reduce(fn bit, acc -> acc ||| bit end)
end
end
|
lib/nostrum/permission.ex
| 0.895454
| 0.812719
|
permission.ex
|
starcoder
|
defmodule WordsWithEnemies.WordAi do
use GenServer
alias WordsWithEnemies.{Letters}
import WordsWithEnemies.WordFinder
# Client
@doc """
Creates an instance of the AI, with a skill
level of `difficulty`.
"""
def start_link(id, difficulty) do
GenServer.start_link(__MODULE__, %{id: id, difficulty: difficulty})
end
@doc """
Returns the AI's difficulty.
"""
def get_difficulty(pid) do
GenServer.call(pid, :get_difficulty)
end
@doc """
Sets the difficulty of the AI to `difficulty`.
"""
def set_difficulty(pid, difficulty) do
GenServer.cast(pid, {:set_difficulty, difficulty})
end
@doc """
Returns the AI's letters.
"""
def get_letters(pid) do
GenServer.call(pid, :get_letters)
end
@doc """
Adds a new letter to the AI's letter bank.
"""
def add_letter(pid) do
GenServer.cast(pid, :add_letter)
end
@doc """
Replaces the AI's current letters with a new set.
"""
def new_letters(pid) do
GenServer.cast(pid, :new_letters)
end
def make_word(pid, user_word) do
GenServer.call(pid, {:make_word, user_word}, 10_000)
end
# Server
def init(state) do
letters = Letters.generate_set(:ai, state.difficulty)
state = Map.put(state, :letters, letters)
{:ok, state}
end
def handle_call(:get_difficulty, _from, state) do
{:reply, state.difficulty, state}
end
def handle_call(:get_letters, _from, state) do
{:reply, state.letters, state}
end
def handle_call({:make_word, user_word}, _from, state) do
inspect(user_word)
word = 10
# state.difficulty
# |> find_words(state.letters, user_word)
# |> choose_word(state.difficulty, user_word)
{:reply, word, state}
end
def handle_cast(:add_letter, state) do
new_letters = state.letters |> Letters.add_letter
{:noreply, %{state | letters: new_letters}}
end
def handle_cast({:set_difficulty, difficulty}, state) do
{:noreply, %{state | difficulty: difficulty}}
end
def handle_cast(:new_letters, state) do
new_letters = Letters.generate_set(:ai, state.difficulty)
{:noreply, %{state | letters: new_letters}}
end
# Private
def find_words("easy" = difficulty, letters, user_word) do
word_list
|> using(letters)
|> between(min: 5, max: 6)
end
def find_words("medium" = difficulty, letters, user_word) do
word_list
|> using(letters)
|> between(min: 7, max: 8)
|> starting_with(user_word |> Letters.most_common)
end
def find_words("hard" = difficulty, letters, user_word) do
prevailing_letter = user_word |> Letters.most_common
word_list
|> using(letters)
|> between(min: 8)
|> starting_with(prevailing_letter)
|> containing(%{prevailing_letter: 2}, precise: false)
end
end
|
lib/words_with_enemies/language/word_ai.ex
| 0.748076
| 0.508483
|
word_ai.ex
|
starcoder
|
defmodule AdventOfCode.DayNineSolution.Location do
defstruct [:coords, :height]
end
defmodule AdventOfCode.DayNineSolution do
alias AdventOfCode.DayNineSolution.Location, as: Location
defp load_data() do
m =
AdventOfCode.load_data(9, 'data.txt')
|> Enum.map(&String.graphemes/1)
|> Enum.map(fn i -> Enum.map(i, &String.to_integer/1) end)
|> Matrex.new()
{max_r, max_c} = Matrex.size(m)
gen_graph(m, 1, 1, max_r, max_c, :digraph.new())
end
defp gen_graph(_, r, _, max_r, _, g) when r > max_r, do: g
defp gen_graph(m, r, c, max_r, max_c, g) when c > max_c,
do: gen_graph(m, r + 1, 1, max_r, max_c, g)
defp gen_graph(m, r, c, max_r, max_c, g) do
up = if r == 1, do: nil, else: {r - 1, c}
down = if r == max_r, do: nil, else: {r + 1, c}
left = if c == 1, do: nil, else: {r, c - 1}
right = if c == max_c, do: nil, else: {r, c + 1}
cur = %Location{coords: {r, c}, height: m[r][c]}
:digraph.add_vertex(g, cur)
[up, down, left, right]
|> Enum.filter(&(&1 != nil))
|> Enum.each(fn {row, col} ->
pt = %Location{coords: {row, col}, height: m[row][col]}
:digraph.add_vertex(g, pt)
:digraph.add_edge(g, cur, pt)
end)
gen_graph(m, r, c + 1, max_r, max_c, g)
end
def part_one() do
g = load_data()
g
|> :digraph.vertices()
|> Enum.filter(fn pt ->
:digraph.out_neighbours(g, pt) |> Enum.all?(fn %{height: h} -> h > pt.height end)
end)
|> Enum.map(fn %{height: h} -> h + 1 end)
|> Enum.sum()
end
def part_two() do
g = load_data()
g
|> :digraph.vertices()
|> Enum.filter(fn %{height: h} -> h == 9 end)
|> Enum.each(&:digraph.del_vertex(g, &1))
g
|> :digraph.vertices()
|> Enum.filter(fn pt ->
:digraph.out_neighbours(g, pt) |> Enum.all?(fn %{height: h} -> h > pt.height end)
end)
|> Enum.map(fn pt ->
:digraph_utils.reachable_neighbours([pt], g) |> Enum.count()
end)
|> Enum.sort()
|> Enum.reverse()
|> Enum.slice(0, 3)
|> Enum.reduce(1, fn c, acc -> acc * c end)
end
end
|
lib/advent_of_code/day-9/solution.ex
| 0.566258
| 0.487734
|
solution.ex
|
starcoder
|
defmodule Dinero do
alias Dinero.Currency
alias Dinero.Utils
@moduledoc """
`Dinero` is a struct that provides methods for working with currencies
## Examples
iex> d1 = Dinero.new(100, :USD)
%Dinero{amount: 10000, currency: :USD}
iex> d2 = Dinero.new(200, :USD)
%Dinero{amount: 20000, currency: :USD}
iex> Dinero.add(d1, d2)
%Dinero{amount: 30000, currency: :USD}
**Note:** `Dinero` uses coins value for calculations. So when you
create a new `Dinero` struct with 100 USD it automatically transforms this into 10000 cents
"""
@enforce_keys [:amount, :currency]
defstruct [:amount, :currency]
@type t :: %__MODULE__{
amount: integer,
currency: atom
}
@spec new(integer | float, atom | String.t()) :: t
@doc ~S"""
Creates a new `Dinero` struct with provided currency.
If currency is not supported, ArgumentError will be raised
## Examples
iex> Dinero.new(100, :USD)
%Dinero{amount: 10000, currency: :USD}
iex> Dinero.new(1.0e4, :USD)
%Dinero{amount: 1000000, currency: :USD}
iex> Dinero.new(100, :RUR)
** (ArgumentError) currency RUR not found
"""
def new(amount, currency \\ :USD)
when (is_integer(amount) or is_float(amount)) and (is_atom(currency) or is_binary(currency)) do
%Dinero{
amount: Utils.convert_currency_to_coins(amount),
currency: get_currency_code(currency)
}
end
@spec add(t, t) :: t
@doc ~S"""
Adds two `Dinero` structs
## Examples
iex> d1 = Dinero.new(100, :USD)
%Dinero{amount: 10000, currency: :USD}
iex> d2 = Dinero.new(20, :USD)
%Dinero{amount: 2000, currency: :USD}
iex> Dinero.add(d1, d2)
%Dinero{amount: 12000, currency: :USD}
"""
def add(%Dinero{amount: a, currency: currency}, %Dinero{amount: b, currency: currency}) do
%Dinero{amount: a + b, currency: get_currency_code(currency)}
end
@spec subtract(t, t) :: t
@doc ~S"""
Subtracts one `Dinero` from another
## Examples
iex> d1 = Dinero.new(100, :USD)
%Dinero{amount: 10000, currency: :USD}
iex> d2 = Dinero.new(20, :USD)
%Dinero{amount: 2000, currency: :USD}
iex> Dinero.subtract(d1, d2)
%Dinero{amount: 8000, currency: :USD}
"""
def subtract(%Dinero{amount: a, currency: currency}, %Dinero{amount: b, currency: currency}) do
%Dinero{amount: a - b, currency: get_currency_code(currency)}
end
@spec multiply(t, integer | float) :: t
@doc ~S"""
Multiplies a `Dinero` by a value and truncates the result
## Examples
iex> d = Dinero.new(120, :USD)
%Dinero{amount: 12000, currency: :USD}
iex> Dinero.multiply(d, 4)
%Dinero{amount: 48000, currency: :USD}
iex> d = Dinero.new(100, :USD)
%Dinero{amount: 10000, currency: :USD}
iex> Dinero.multiply(d, 1.005)
%Dinero{amount: 10049, currency: :USD}
"""
def multiply(%Dinero{amount: a, currency: currency}, value)
when is_integer(value) or is_float(value) do
%Dinero{amount: trunc(a * value), currency: get_currency_code(currency)}
end
@spec multiply(t, integer | float, :round_up) :: t
@doc ~S"""
Multiplies a `Dinero` by a value and rounds up the result
## Examples
iex> d = Dinero.new(100, :USD)
%Dinero{amount: 10000, currency: :USD}
iex> Dinero.multiply(d, 1.005, :round_up)
%Dinero{amount: 10050, currency: :USD}
"""
def multiply(%Dinero{amount: a, currency: currency}, value, :round_up)
when is_number(value) do
%Dinero{amount: round(a * value), currency: get_currency_code(currency)}
end
@spec divide(t, integer | float) :: t
@doc ~S"""
Divides `Dinero` by a value and truncates the result
## Examples
iex> d = Dinero.new(100.24, :USD)
%Dinero{amount: 10024, currency: :USD}
iex> Dinero.divide(d, 3)
%Dinero{amount: 3341, currency: :USD}
iex> Dinero.divide(d, 5)
%Dinero{amount: 2004, currency: :USD}
"""
def divide(%Dinero{amount: a, currency: currency}, value)
when is_integer(value) or is_float(value) do
%Dinero{amount: trunc(a / value), currency: get_currency_code(currency)}
end
@spec divide(t, integer | float, atom) :: t
@doc ~S"""
Divides `Dinero` by a value and rounds up the result
## Examples
iex> d = Dinero.new(100.24, :USD)
%Dinero{amount: 10024, currency: :USD}
iex> Dinero.divide(d, 3, :round_up)
%Dinero{amount: 3341, currency: :USD}
iex> Dinero.divide(d, 5, :round_up)
%Dinero{amount: 2005, currency: :USD}
"""
def divide(%Dinero{amount: a, currency: currency}, value, round_up)
when (is_integer(value) or is_float(value)) and is_atom(round_up) do
%Dinero{amount: round(a / value), currency: get_currency_code(currency)}
end
@spec convert(t, atom | String.t(), float) :: t
@doc ~S"""
Converts value of `Dinero` to target currency using exchange_rate and truncates the result
## Examples
iex> d = Dinero.new(100, :USD)
%Dinero{amount: 10000, currency: :USD}
iex> Dinero.convert(d, :UAH, 26.2)
%Dinero{amount: 262000, currency: :UAH}
iex> Dinero.convert(d, :USD, 26.2)
** (ArgumentError) target currency must be different from source currency
"""
def convert(%Dinero{} = d, target, exchange_rate) do
if get_currency_code(d.currency) != get_currency_code(target) do
%Dinero{
amount: trunc(d.amount * exchange_rate),
currency: get_currency_code(target)
}
else
raise(ArgumentError, "target currency must be different from source currency")
end
end
@spec parse!(String.t(), atom) :: t
@doc ~S"""
Creates `Dinero` from String that represents integer or float. If a string can't be parsed ArgumentError is raised
If the second param is not provided it uses USD as default currency
## Examples
iex> Dinero.parse!("123.23")
%Dinero{amount: 12323, currency: :USD}
iex> Dinero.parse!("112")
%Dinero{amount: 11200, currency: :USD}
iex> Dinero.parse!("2", :UAH)
%Dinero{amount: 200, currency: :UAH}
iex> Dinero.parse!("100.00")
%Dinero{amount: 10000, currency: :USD}
iex> Dinero.parse!("invalid string")
** (ArgumentError) invalid string. it must contain string that represents integer or float
"""
def parse!(amount, currency \\ :USD) when is_binary(amount) do
case parse(amount, currency) do
{:ok, %Dinero{} = dinero} ->
dinero
:error ->
raise(
ArgumentError,
"invalid string. it must contain string that represents integer or float"
)
end
end
@spec parse(String.t(), atom) :: {:ok, t} | :error
@doc """
Same as `parse!/2`, but returns either `{:ok, %Dinero{}}` or `:error`
## Examples
iex> Dinero.parse("123.23")
{:ok, %Dinero{amount: 12323, currency: :USD}}
iex> Dinero.parse("invalid string")
:error
"""
def parse(amount, currency \\ :USD) when is_binary(amount) do
amount = String.replace(amount, ~r/[_,]/, "")
amount =
if String.contains?(amount, ".") do
Float.parse(amount)
else
Integer.parse(amount)
end
case amount do
{a, _} -> {:ok, Dinero.new(a, currency)}
:error -> :error
end
end
@spec equals?(t, t) :: boolean
@doc """
Compares two Dinero structs.
Returns `true`, if amounts and currencies match. Otherwise returns `false`.
"""
def equals?(%Dinero{} = a, %Dinero{} = b) do
a.amount == b.amount && get_currency_code(a.currency) == get_currency_code(b.currency)
end
@spec zero?(t) :: boolean
@doc """
Returns `true`, if amount is zero.
"""
def zero?(%Dinero{} = a), do: a.amount == 0
defp get_currency_code(currency) do
Currency.get!(currency).code
end
@spec to_float(t) :: float
@doc """
Returns float representation of Dinero.
## Examples
iex> import Dinero.Sigil
iex> Dinero.to_float(~m[100.14]USD)
100.14
iex> Dinero.to_float(~m[0.01]uah)
0.01
"""
def to_float(%Dinero{amount: amount}) do
amount / 100.0
end
end
defimpl String.Chars, for: Dinero do
def to_string(dinero) do
:erlang.float_to_binary(dinero.amount / 100, decimals: 2)
end
end
if Code.ensure_compiled?(Jason.Encoder) do
require Protocol
Protocol.derive(Jason.Encoder, Dinero, only: [:amount, :currency])
end
|
lib/dinero.ex
| 0.88602
| 0.563408
|
dinero.ex
|
starcoder
|
defmodule Upstairsbox.WindowCovering do
@moduledoc """
Responsible for managing the state of a window covering
"""
@behaviour HAP.ValueStore
use GenServer
require Logger
# It takes 27s to fully close the blind
@percent_per_second 100 / 27
@closing 0
@opening 1
@stopped 2
@open_pin 22
@close_pin 23
@hold_pin 24
def start_link(config) do
GenServer.start_link(__MODULE__, config, name: __MODULE__)
end
@impl HAP.ValueStore
def get_value(opts) do
GenServer.call(__MODULE__, {:get, opts})
end
@impl HAP.ValueStore
def put_value(value, opts) do
GenServer.call(__MODULE__, {:put, value, opts})
end
@impl GenServer
def init(_) do
# Home ourselves to fully open at startup
{:ok, %{current: 50.0, target: 50.0, position_state: @stopped} |> seek()}
end
@impl GenServer
def handle_call({:get, :current_position}, _from, %{current: current} = state) do
{:reply, {:ok, round(current)}, state}
end
def handle_call({:get, :target_position}, _from, %{target: target} = state) do
{:reply, {:ok, round(target)}, state}
end
def handle_call({:get, :position_state}, _from, %{position_state: position_state} = state) do
{:reply, {:ok, position_state}, state}
end
def handle_call({:put, target, :target_position}, _from, state) do
{:reply, :ok, state |> Map.put(:target, target / 1) |> seek()}
end
def handle_call({:put, true, :hold_position}, _from, %{current: current} = state) do
{:reply, :ok, state |> Map.put(:target, current) |> seek()}
end
def handle_call({:put, false, :hold_position}, _from, state) do
{:reply, :ok, state}
end
@impl GenServer
def handle_info(:move, state) do
{:noreply, state |> seek()}
end
defp seek(%{current: current, target: target, position_state: position_state} = state) do
desired_state =
cond do
target == 0.0 && current <= 0.0 -> @stopped
target == 100.0 && current >= 100.0 -> @stopped
current - target > 5 -> @closing
current - target < -5 -> @opening
true -> @stopped
end
Logger.info("state: #{position_state}, #{desired_state}. position: #{current}, #{target}")
case {position_state, desired_state} do
{@closing, @closing} ->
# Don't change our controls, but update position & reschedule
Process.send_after(self(), :move, 1000)
%{state | current: current - @percent_per_second}
{@opening, @opening} ->
# Don't change our controls, but update position & reschedule
Process.send_after(self(), :move, 1000)
%{state | current: current + @percent_per_second}
{@stopped, @stopped} ->
state
{_, @closing} ->
# Start to close & reschedule a position update
Process.send_after(self(), :move, 1000)
close()
%{state | position_state: @closing, current: current - @percent_per_second}
{_, @opening} ->
# Start to open & reschedule a position update
Process.send_after(self(), :move, 1000)
open()
%{state | position_state: @opening, current: current + @percent_per_second}
{_, @stopped} ->
# Only stop movement if we're in the middle somewhere. Otherwise let the
# blind run to completion so we don't stop a tiny bit short of the end point
if target != 0.0 && target != 100.0, do: hold()
# Mark ourselves as having reached our target in any case
%{state | position_state: @stopped, current: target}
end
end
# Button manipulation
def open do
Logger.info("Tapping open")
release_all_buttons()
tap_button(@open_pin)
end
def close do
Logger.info("Tapping close")
release_all_buttons()
tap_button(@close_pin)
end
def hold do
Logger.info("Tapping hold")
release_all_buttons()
tap_button(@hold_pin)
end
defp release_all_buttons do
release_button(@open_pin)
release_button(@close_pin)
release_button(@hold_pin)
end
defp release_button(pin) do
Circuits.GPIO.open(pin, :input, pull_mode: :none)
end
defp hold_button(pin) do
{:ok, gpio} = Circuits.GPIO.open(pin, :output, initial_value: 0)
Circuits.GPIO.write(gpio, 0)
end
defp tap_button(pin, duration \\ 250) do
hold_button(pin)
Process.sleep(duration)
release_button(pin)
Process.sleep(duration)
hold_button(pin)
Process.sleep(duration)
release_button(pin)
end
end
|
lib/upstairsbox/window_covering.ex
| 0.793586
| 0.411702
|
window_covering.ex
|
starcoder
|
defmodule Queens do
@board_length 8
@black "B"
@white "W"
@blank "_"
@type t :: %Queens{black: {integer, integer}, white: {integer, integer}}
defstruct black: nil, white: nil
@doc """
Creates a new set of Queens
"""
@spec new() :: Queens.t()
@spec new({integer, integer}, {integer, integer}) :: Queens.t()
def new(white \\ {0, 3}, black \\ {7, 3})
def new(queen, queen), do: raise(ArgumentError)
def new(white, black), do: %Queens{white: white, black: black}
@doc """
Gives a string representation of the board with
white and black queen locations shown
"""
@spec to_string(Queens.t()) :: String.t()
def to_string(queens) do
generate_row(&row(queens, &1, &2))
|> Enum.map(&Enum.join(&1, " "))
|> Enum.join("\n")
end
@doc """
Checks if the queens can attack each other
"""
@spec can_attack?(Queens.t()) :: boolean
def can_attack?(%Queens{black: {row, _col1}, white: {row, _col2}}), do: true
def can_attack?(%Queens{black: {_row1, col}, white: {_row2, col}}), do: true
def can_attack?(%Queens{black: {row1, col1}, white: {row2, col2}}) do
abs(row1 - row2) == abs(col1 - col2)
end
defp generate_row(fun) do
0..(@board_length - 1)
|> Enum.reduce([], &fun.(&1, &2))
|> Enum.reverse()
end
defp row(queens, row_num, acc) do
row =
case queens do
%Queens{white: {^row_num, white_col}, black: {^row_num, black_col}} ->
generate_row(&add_column(white_col, black_col, &1, &2))
%Queens{white: {^row_num, white_col}} ->
generate_row(&add_column(white_col, nil, &1, &2))
%Queens{black: {^row_num, black_col}} ->
generate_row(&add_column(nil, black_col, &1, &2))
_ ->
List.duplicate(@blank, @board_length)
end
[row | acc]
end
defp add_column(white_col, _black_col, white_col, acc), do: [@white | acc]
defp add_column(_white_col, black_col, black_col, acc), do: [@black | acc]
defp add_column(_white_col, _black_col, _column, acc), do: [@blank | acc]
end
|
elixir/queen-attack/lib/queens.ex
| 0.802013
| 0.452475
|
queens.ex
|
starcoder
|
defmodule SevenSegment do
import LogicGates
@moduledoc """
SevenSegment decoder that outputs a list of booleans
equivalent to the inputs of a seven segment display
"""
@doc """
outputs a list of booleans representing the segments of a seven segment display
## Parameters
- a: boolean representing the first digital input
- b: boolean representing the second digital input
- c: boolean representing the third digital input
- d: boolean representing the last digital input
"""
def decode(a, b, c, d)
when is_boolean(a) and is_boolean(b) and is_boolean(c) and is_boolean(d) do
nb = not_gate(b)
nc = not_gate(c)
nd = not_gate(d)
nb_nd = and_gate(nb, nd)
nc_nd = and_gate(nc, nd)
c_nd = and_gate(c, nd)
nb_c = and_gate(nb, c)
b_nc = and_gate(b, nc)
[
and_gate(b, d) |> or_gate(a, c, nb_nd),
and_gate(c, d) |> or_gate(nb, nc_nd),
or_gate(b, nc, d),
and_gate(b, nc, d) |> or_gate(nb_nd, c_nd, nb_c, a),
or_gate(nb_nd, c_nd),
and_gate(b, nd) |> or_gate(a, nc_nd, b_nc),
or_gate(a, b_nc, nb_c, c_nd)
]
end
@doc """
outputs a list of booleans representing the segments of a seven segment display
## Parameters
- value: can be of the following types =>
-list: a list of 4 booleans representing digital input pins
-number: integer up to 9 representing the decimal numbers to be printed on the seven segment display
"""
def decode(value) when is_list(value) and length(value) == 4 do
apply(SevenSegment, :decode, value)
end
def decode(value) when is_integer(value) and value < 10 do
integer_to_bool_list(value) |> decode
end
defp integer_to_bool_list(number) do
Integer.to_string(number, 2) |> String.pad_leading(4, ["0"]) |> binary_string_to_bool_list
end
defp binary_string_to_bool_list("0") do
[false]
end
defp binary_string_to_bool_list("1") do
[true]
end
defp binary_string_to_bool_list("0" <> rest) do
[false] ++ binary_string_to_bool_list(rest)
end
defp binary_string_to_bool_list("1" <> rest) do
[true] ++ binary_string_to_bool_list(rest)
end
end
|
lib/seven_segment.ex
| 0.8067
| 0.778691
|
seven_segment.ex
|
starcoder
|
defmodule Ravix.Ecto.Conversions do
@moduledoc false
defmacro is_keyword(doc) do
quote do
unquote(doc) |> hd |> tuple_size == 2
end
end
defmacro is_literal(value) do
quote do
is_atom(unquote(value)) or is_number(unquote(value)) or is_binary(unquote(value))
end
end
def to_ecto(%Ecto.Query.Tagged{type: type, value: value}) do
{:ok, dumped} = Ecto.Type.adapter_dump(Ravix.Ecto.Adapter, type, value)
dumped
end
def to_ecto(%{__struct__: _} = value, _pk), do: value
def to_ecto(map, pk) when is_map(map) do
Enum.into(map, %{}, fn
{"@metadata", value} -> {Atom.to_string(pk), to_ecto(value["@id"], pk)}
{key, value} -> {key, to_ecto(value, pk)}
end)
end
def to_ecto(list, pk) when is_list(list), do: Enum.map(list, &to_ecto(&1, pk))
def to_ecto(value, _pk), do: value
def inject_params(doc, params, pk) when is_keyword(doc), do: document(doc, params, pk)
def inject_params(list, params, pk) when is_list(list) do
case map(list, &inject_params(&1, params, pk)) do
{:ok, values} when is_list(values) -> {:ok, values |> Enum.map(&keywords_to_object/1)}
{:ok, values} -> {:ok, values}
:error -> {:error, "Failed to inject the query parameters into a map"}
end
end
def inject_params(
%Ecto.Query.Tagged{tag: _tag, type: _type, value: {:^, _, [idx]} = _value},
params,
pk
) do
elem(params, idx) |> inject_params(params, pk)
end
def inject_params({:^, _, [idx]}, params, pk),
do: elem(params, idx) |> inject_params(params, pk)
def inject_params(%{__struct__: _} = struct, _params, pk), do: from_ecto(struct, pk)
def inject_params(map, params, pk) when is_map(map) do
case document(map, params, pk) do
{:ok, values} when is_keyword(values) -> {:ok, keywords_to_object(values)}
{:ok, values} -> {:ok, values}
:error -> {:error, "Failed to inject the query parameters into a map"}
end
end
def inject_params(value, _params, pk), do: from_ecto(value, pk)
def from_ecto(%Ecto.Query.Tagged{tag: :binary_id, value: value}, _pk),
do: {:ok, value}
def from_ecto(%Ecto.Query.Tagged{type: type, value: value}, _pk),
do: Ecto.Type.adapter_dump(Ravix.Ecto.Adapter, type, value)
def from_ecto(%{__struct__: _} = value, _pk), do: {:ok, value}
def from_ecto(map, pk) when is_map(map), do: document(map, pk)
def from_ecto(keyword, pk) when is_keyword(keyword), do: document(keyword, pk)
def from_ecto(list, pk) when is_list(list), do: map(list, &from_ecto(&1, pk))
def from_ecto(value, _pk) when is_literal(value), do: {:ok, value}
def from_ecto({{_, _, _}, {_, _, _, _}} = value, _pk),
do: Ecto.Type.adapter_dump(Ravix.Ecto.Adapter, :naive_datetime, value)
def from_ecto({_, _, _} = value, _pk),
do: Ecto.Type.adapter_dump(Ravix.Ecto.Adapter, :date, value)
def from_ecto({_, _, _, _} = value, _pk),
do: Ecto.Type.adapter_dump(Ravix.Ecto.Adapter, :time, value)
def from_ecto(_value, _pk), do: :error
defp document(doc, pk) do
map(doc, fn {key, value} ->
pair(key, value, pk, &from_ecto(&1, pk))
end)
end
defp document(doc, params, pk) do
map(doc, fn {key, value} ->
pair(key, value, pk, &inject_params(&1, params, pk))
end)
end
defp pair(key, value, pk, fun) do
case fun.(value) do
{:ok, {subkey, encoded}} -> {:ok, {"#{key}.#{subkey}", encoded}}
{:ok, encoded} -> {:ok, {key(key, pk), encoded}}
:error -> :error
end
end
defp key(pk, pk), do: :"id()"
defp key(key, _) do
key
end
defp map(map, _fun) when is_map(map) and map_size(map) == 0 do
{:ok, %{}}
end
defp map(list, fun) do
return =
Enum.flat_map_reduce(list, :ok, fn elem, :ok ->
case fun.(elem) do
{:ok, value} -> {[value], :ok}
:error -> {:halt, :error}
end
end)
case return do
{values, :ok} -> {:ok, values}
{_values, :error} -> :error
end
end
defp keywords_to_object(keyword_list) when is_keyword(keyword_list),
do: Enum.into(keyword_list, %{})
defp keywords_to_object(list) when is_list(list),
do: Enum.map(list, &keywords_to_object/1)
defp keywords_to_object(value), do: value
end
|
lib/ravix_ecto/parsers/conversions.ex
| 0.66769
| 0.42937
|
conversions.ex
|
starcoder
|
defmodule Zipper do
defstruct focus: nil, trail: []
@doc """
Get a zipper focused on the root node.
"""
@spec from_tree(BinTree.t()) :: Zipper.t()
def from_tree(bin_tree), do: %Zipper{focus: bin_tree}
@doc """
Get the complete tree from a zipper.
"""
@spec to_tree(Zipper.t()) :: BinTree.t()
def to_tree(%Zipper{focus: bin_tree, trail: []}), do: bin_tree
def to_tree(zipper) do
zipper
|> up()
|> to_tree()
end
@doc """
Get the value of the focus node.
"""
@spec value(Zipper.t()) :: any
def value(%Zipper{focus: %BinTree{value: value}}), do: value
@doc """
Get the left child of the focus node, if any.
"""
@spec left(Zipper.t()) :: Zipper.t() | nil
def left(%Zipper{focus: %BinTree{left: nil}}), do: nil
def left(%Zipper{focus: %BinTree{left: left} = focus, trail: trail}) do
%Zipper{focus: left, trail: [{:left, focus} | trail]}
end
@doc """
Get the right child of the focus node, if any.
"""
@spec right(Zipper.t()) :: Zipper.t() | nil
def right(%Zipper{focus: %BinTree{right: nil}}), do: nil
def right(%Zipper{focus: %BinTree{right: right} = focus, trail: trail}) do
%Zipper{focus: right, trail: [{:right, focus} | trail]}
end
@doc """
Get the parent of the focus node, if any.
"""
@spec up(Zipper.t()) :: Zipper.t() | nil
def up(%Zipper{trail: []}), do: nil
def up(%Zipper{focus: focus, trail: [parent | trail]}) do
%Zipper{focus: parent_tree(parent, focus), trail: trail}
end
@doc """
Set the value of the focus node.
"""
@spec set_value(Zipper.t(), any) :: Zipper.t()
def set_value(%Zipper{focus: focus} = zipper, value) do
%Zipper{zipper | focus: %BinTree{focus | value: value}}
end
@doc """
Replace the left child tree of the focus node.
"""
@spec set_left(Zipper.t(), BinTree.t() | nil) :: Zipper.t()
def set_left(%Zipper{focus: focus} = zipper, left) do
%Zipper{zipper | focus: %BinTree{focus | left: left}}
end
@doc """
Replace the right child tree of the focus node.
"""
@spec set_right(Zipper.t(), BinTree.t() | nil) :: Zipper.t()
def set_right(%Zipper{focus: focus} = zipper, right) do
%Zipper{zipper | focus: %BinTree{focus | right: right}}
end
defp parent_tree({:left, parent}, focus), do: %BinTree{parent | left: focus}
defp parent_tree({:right, parent}, focus), do: %BinTree{parent | right: focus}
end
|
elixir/zipper/lib/zipper.ex
| 0.863852
| 0.727637
|
zipper.ex
|
starcoder
|
defmodule Geocoder.Providers.OpenStreetMaps do
use HTTPoison.Base
use Towel
# url="https://nominatim.openstreetmap.org/reverse?format=json&accept-language={{ language }}&lat={{ latitude }}&lon={{ longitude }}&zoom={{ zoom }}&addressdetails=1"
@endpoint "https://nominatim.openstreetmap.org/"
@endpath_reverse "/reverse"
@endpath_search "/search"
@defaults [format: "json", "accept-language": "en", "addressdetails": 1]
def geocode(opts) do
request(@endpath_search, extract_opts(opts))
|> fmap(&parse_geocode/1)
end
def geocode_list(opts) do
request_all(@endpath_search, extract_opts(opts))
|> fmap(fn(r) -> Enum.map(r, &parse_geocode/1) end)
end
def reverse_geocode(opts) do
request(@endpath_reverse, extract_opts(opts))
|> fmap(&parse_reverse_geocode/1)
end
def reverse_geocode_list(opts) do
request_all(@endpath_search, extract_opts(opts))
|> fmap(fn(r) -> Enum.map(r, &parse_reverse_geocode/1) end)
end
defp extract_opts(opts) do
@defaults
|> Keyword.merge(opts)
|> Keyword.put(:q, case opts |> Keyword.take([:address, :latlng]) |> Keyword.values do
[{lat, lon}] -> "#{lat},#{lon}"
[query] -> query
_ -> nil
end)
|> Keyword.take([:q, :key, :address, :components, :bounds, :language, :region,
:latlon, :lat, :lon, :placeid, :result_type, :location_type] ++ Keyword.keys(@defaults))
end
defp parse_geocode(response) do
coords = geocode_coords(response)
bounds = geocode_bounds(response)
location = geocode_location(response)
%{coords | bounds: bounds, location: location}
end
defp parse_reverse_geocode(response) do
coords = geocode_coords(response)
bounds = geocode_bounds(response)
location = geocode_location(response)
%{coords | bounds: bounds, location: location}
end
defp geocode_coords(%{"lat" => lat, "lon" => lon}) do
[lat, lon] = [lat, lon] |> Enum.map(&String.to_float(&1))
%Geocoder.Coords{lat: lat, lon: lon}
end
defp geocode_bounds(%{"boundingbox" => bbox}) do
[north, south, west, east] = bbox |> Enum.map(&String.to_float(&1))
%Geocoder.Bounds{top: north, right: east, bottom: south, left: west}
end
defp geocode_bounds(_), do: %Geocoder.Bounds{}
# %{"address" =>
# %{"city" => "Ghent", "city_district" => "Wondelgem", "country" => "Belgium",
# "country_code" => "be", "county" => "Gent", "postcode" => "9032",
# "road" => "Dikkelindestraat", "state" => "Flanders"},
# "boundingbox" => ["51.075731", "51.0786674", "3.7063849", "3.7083991"],
# "display_name" => "<NAME>, Ghent, Gent, East Flanders, Flanders, 9032, Belgium",
# "lat" => "51.0772661",
# "licence" => "Data © OpenStreetMap contributors, ODbL 1.0. http://www.openstreetmap.org/copyright",
# "lon" => "3.7074267",
# "osm_id" => "45352282", "osm_type" => "way", "place_id" => "70350383"}
@components ~W[city city_district country country_code county postcode road state]
@map %{
"city_district" => :city,
"county" => :city,
"city" => :city,
"road" => :street,
"state" => :state,
"postcode" => :postal_code,
"country" => :country
}
defp geocode_location(%{"address" => address, "display_name" => formatted_address, "osm_type" => type}) do
reduce = fn {type, name}, location ->
Map.put(location, Map.get(@map, type), name)
end
location = %Geocoder.Location{country_code: address["country_code"], formatted_address: formatted_address}
address
# |> Enum.filter_map(type, map)
|> Enum.reduce(location, reduce)
end
defp request_all(path, params) do
httpoison_options = Application.get_env(:geocoder, Geocoder.Worker)[:httpoison_options] || []
case get(path, [], Keyword.merge(httpoison_options, params: Enum.into(params, %{}))) |> fmap(&Map.get(&1, :body)) do
{:ok, [list]} -> {:ok, [list]}
{:ok, single} -> {:ok, [single]}
other -> other
end
end
defp request(path, params) do
case request_all(path, params) do
{:ok, [head | _]} -> {:ok, head}
{:ok, [head]} -> {:ok, head}
{:ok, head} -> {:ok, head}
other -> other
end
end
defp process_url(url) do
@endpoint <> url
end
defp process_response_body(body) do
body |> Poison.decode!
end
end
|
lib/geocoder/providers/open_street_maps.ex
| 0.592667
| 0.520253
|
open_street_maps.ex
|
starcoder
|
defmodule Sanbase.Alert.Validation.Operation do
import Sanbase.Validation
alias Sanbase.Alert.Operation
@percent_operations Operation.percent_operations()
@absolute_value_operations Operation.absolute_value_operations()
@absolute_change_operations Operation.absolute_change_operations()
@absolute_operations Operation.absolute_operations()
@channel_operations Operation.channel_operations()
@combinator_operations Operation.combinator_operations()
def valid_percent_change_operation?(operation) when is_map(operation) do
case Map.keys(operation) do
[op] when op in @percent_operations or op in @channel_operations ->
valid_operation?(operation)
[op] when op in @absolute_operations ->
{:error, "#{inspect(operation)} is an absolute operation, not a percent change one."}
[op] when op in @combinator_operations ->
combinator_operation_valid?(operation, @percent_operations)
_ ->
{:error, "#{inspect(operation)} is not a valid percent change operation"}
end
end
def valid_absolute_change_operation?(operation) do
case Map.keys(operation) do
[op] when op in @absolute_change_operations or op in @channel_operations ->
valid_operation?(operation)
[op] when op in @absolute_value_operations ->
{:error,
"#{inspect(operation)} is an absolute value operation, not an absolute change one."}
[op] when op in @percent_operations ->
{:error, "#{inspect(operation)} is a percent, not an absolute change one."}
[op] when op in @combinator_operations ->
combinator_operation_valid?(operation, @absolute_change_operations)
_ ->
{:error, "#{inspect(operation)} is not a valid absolute change operation"}
end
end
def valid_absolute_value_operation?(operation) do
case Map.keys(operation) do
[op] when op in @absolute_value_operations or op in @channel_operations ->
valid_operation?(operation)
[op] when op in @absolute_change_operations ->
{:error,
"#{inspect(operation)} is an absolute change operation, not an absolute value one."}
[op] when op in @percent_operations ->
{:error, "#{inspect(operation)} is a percent, not an absolute value one."}
[op] when op in @combinator_operations ->
combinator_operation_valid?(operation, @absolute_value_operations)
_ ->
{:error, "#{inspect(operation)} is not a valid absolute value operation"}
end
end
# Validate combinators
def valid_operation?(%{some_of: list}) when is_list(list), do: valid_combinator_operation?(list)
def valid_operation?(%{all_of: list}) when is_list(list), do: valid_combinator_operation?(list)
def valid_operation?(%{none_of: list}) when is_list(list), do: valid_combinator_operation?(list)
# Validate percent changes
def valid_operation?(%{percent_up: percent} = map)
when map_size(map) == 1 and is_valid_percent_change(percent),
do: :ok
def valid_operation?(%{percent_down: percent} = map)
when map_size(map) == 1 and is_valid_percent_change(percent),
do: :ok
# Validate absolute values
def valid_operation?(%{above: above} = map) when map_size(map) == 1 and is_number(above),
do: :ok
def valid_operation?(%{below: below} = map) when map_size(map) == 1 and is_number(below),
do: :ok
def valid_operation?(%{above_or_equal: above_or_equal} = map)
when map_size(map) == 1 and is_number(above_or_equal),
do: :ok
def valid_operation?(%{below_or_equal: below_or_equal} = map)
when map_size(map) == 1 and is_number(below_or_equal),
do: :ok
# Validate channels
def valid_operation?(%{inside_channel: [min, max]}),
do: valid_channel_operation?(:inside_channel, [min, max])
def valid_operation?(%{outside_channel: [min, max]}),
do: valid_channel_operation?(:outside_channel, [min, max])
# Validate absolute value changes
def valid_operation?(%{amount_up: value} = map) when map_size(map) == 1 and is_number(value),
do: :ok
def valid_operation?(%{amount_down: value} = map) when map_size(map) == 1 and is_number(value),
do: :ok
# Validate screener alert
def valid_operation?(%{selector: %{watchlist_id: id}}) when is_integer(id) and id > 0, do: :ok
def valid_operation?(%{selector: _} = selector) do
case Sanbase.Model.Project.ListSelector.valid_selector?(selector) do
true -> :ok
false -> {:error, "The provided selector is not valid."}
end
end
# All else is invalid operation
def valid_operation?(op), do: {:error, "#{inspect(op)} is not a valid operation"}
# Validate trending words operations
def valid_trending_words_operation?(%{trending_word: true}), do: :ok
def valid_trending_words_operation?(%{trending_project: true}), do: :ok
def valid_trending_words_operation?(%{send_at_predefined_time: true, trigger_time: time_str}) do
valid_iso8601_time_string?(time_str)
end
# Private functions
defp all_operations_have_same_type?(list, operation_type) do
Enum.all?(list, fn elem ->
type = Map.keys(elem) |> List.first()
type in operation_type
end)
end
defp valid_channel_operation?(op, [min, max])
when op in [:inside_channel, :outside_channel] and is_valid_min_max(min, max),
do: :ok
defp valid_channel_operation?(op, [min, max]) do
{:error, "#{inspect(op)} with arguments [#{min}, #{max}] is not a valid channel operation"}
end
defp all_operations_have_same_type?(list) do
Enum.map(list, &Operation.type/1)
|> Enum.uniq()
|> case do
[_] -> true
_ -> false
end
end
defp combinator_operation_valid?(operation, type) do
list = Map.values(operation) |> List.first()
with true <- all_operations_valid?(list),
true <- all_operations_have_same_type?(list, type) do
:ok
else
{:error, message} -> {:error, message}
false -> {:error, "Not all operations are from the same type"}
end
end
defp all_operations_valid?(list) do
case Enum.find(list, fn op -> valid_operation?(op) != :ok end) do
nil ->
true
not_valid_op ->
{:error, "The list of operation contains not valid operation: #{inspect(not_valid_op)}"}
end
end
defp valid_combinator_operation?(list) do
with true <- all_operations_valid?(list),
true <- all_operations_have_same_type?(list) do
:ok
else
{:error, message} -> {:error, message}
false -> {:error, "Not all operations are from the same type"}
end
end
end
|
lib/sanbase/alerts/trigger/validation/operation_validation.ex
| 0.691497
| 0.433082
|
operation_validation.ex
|
starcoder
|
defmodule Telegram.Api do
@moduledoc ~S"""
Telegram Bot API request.
The module expose a light layer over the Telegram Bot API HTTP-based interface,
it does not expose any "(data)binding" over the HTTP interface and tries to abstract
away only the boilerplate for building / sending / serializing the API requests.
Compared to a full-binded interface it could result less elixir frendly but it will
work with any version of the Bot API, hopefully without updates or incompatibily
with new BOT API versions (as much as they remain backward compatible).
References:
- [API specification](https://core.telegram.org/bots/api)
- [BOT intro for developers](https://core.telegram.org/bots)
Given the token of your BOT you can issue any request using:
- method: Telegram API method name (ex. "getMe", "sendMessage")
- options: Telegram API method specific parameters (you can use elixir native types)
## Examples:
Given the bot token (something like):
```elixir
token = "<KEY>"
```
### [getMe](https://core.telegram.org/bots/api#getme)
```elixir
Telegram.Api.request(token, "getMe")
{:ok, %{"first_name" => "Abc", "id" => 1234567, "is_bot" => true, "username" => "ABC"}}
```
### [sendMessage](https://core.telegram.org/bots/api#sendmessage)
```elixir
Telegram.Api.request(token, "sendMessage", chat_id: 876532, text: "Hello! .. silently", disable_notification: true)
{:ok,
%{"chat" => %{"first_name" => "Firstname",
"id" => 208255328,
"last_name" => "Lastname",
"type" => "private",
"username" => "xxxx"},
"date" => 1505118722,
"from" => %{"first_name" => "Yyy",
"id" => 234027650,
"is_bot" => true,
"username" => "yyy"},
"message_id" => 1402,
"text" => "Hello! .. silently"}}
```
### [getUpdates](https://core.telegram.org/bots/api#getupdates)
```elixir
Telegram.Api.request(token, "getUpdates", offset: -1, timeout: 30)
{:ok,
[%{"message" => %{"chat" => %{"first_name" => "Firstname",
"id" => 208255328,
"last_name" => "Lastname",
"type" => "private",
"username" => "xxxx"},
"date" => 1505118098,
"from" => %{"first_name" => "Firstname",
"id" => 208255328,
"is_bot" => false,
"language_code" => "en-IT",
"last_name" => "Lastname",
"username" => "xxxx"},
"message_id" => 1401,
"text" => "Hello!"},
"update_id" => 129745295}]}
```
## Sending files
If a API parameter has a InputFile type and you want to send a local file,
for example a photo stored locally at "/tmp/photo.jpg", just wrap the parameter
value in a tuple `{:file, "/tmp/photo.jpg"}`. If the file content is in memory
wrap it in `{:file_content, data, "photo.jpg"}` tuple.
### [sendPhoto](https://core.telegram.org/bots/api#sendphoto)
```elixir
Telegram.Api.request(token, "sendPhoto", chat_id: 876532, photo: {:file, "/tmp/photo.jpg"})
Telegram.Api.request(token, "sendPhoto", chat_id: 876532, photo: {:file_content, photo, "photo.jpg"})
```
## Downloading files
To download a file from the telegram server you need a `file_path` pointer to the file.
With that you can download the file via `Telegram.Api.file`
```elixir
{:ok, res} = Telegram.Api.request(token, "sendPhoto", chat_id: 12345, photo: {:file, "example/photo.jpg"})
# pick the 'file_obj' with the desired resolution
[file_obj | _] = res["photo"]
# get the 'file_id'
file_id = file_obj["file_id"]
```
### [getFile](https://core.telegram.org/bots/api#getfile)
```elixir
{:ok, %{"file_path" => file_path}} = Telegram.Api.request(token, "getFile", file_id: file_id)
{:ok, file} = Telegram.Api.file(token, file_path)
```
## Reply Markup
If a API parameter has a "A JSON-serialized object" type (InlineKeyboardMarkup, ReplyKeyboardMarkup, etc),
just wrap the parameter value in a tuple `{:json, value}`.
Reference: [Keyboards](https://core.telegram.org/bots#keyboards),
[Inline Keyboards](https://core.telegram.org/bots#inline-keyboards-and-on-the-fly-updating)
### [sendMessage](https://core.telegram.org/bots/api#sendmessage) with keyboard
```elixir
keyboard = [
["A0", "A1"],
["B0", "B1", "B2"]
]
keyboard_markup = %{one_time_keyboard: true, keyboard: keyboard}
Telegram.Api.request(token, "sendMessage", chat_id: 876532, text: "Here a keyboard!", reply_markup: {:json, keyboard_markup})
```
"""
@type parameters :: Keyword.t()
@type request_result :: {:ok, term()} | {:error, term()}
@doc """
Send a Telegram Bot API request.
Reference: [BOT Api](https://core.telegram.org/bots/api)
"""
@spec request(Telegram.Types.token(), Telegram.Types.method(), parameters()) :: request_result()
def request(token, method, parameters \\ []) do
body =
parameters
|> do_json_markup()
|> do_body()
Telegram.Client.request(token, method, body)
end
@doc """
Download a file.
Reference: [BOT Api](https://core.telegram.org/bots/api#file)
Example:
```elixir
# send a photo
{:ok, res} = Telegram.Api.request(token, "sendPhoto", chat_id: 12345, photo: {:file, "example/photo.jpg"})
# pick the 'file_obj' with the desired resolution
[file_obj | _] = res["photo"]
# get the 'file_id'
file_id = file_obj["file_id"]
# obtain the 'file_path' to dowload the file identified by 'file_id'
{:ok, %{"file_path" => file_path}} = Telegram.Api.request(token, "getFile", file_id: file_id)
{:ok, file} = Telegram.Api.file(token, file_path)
```
"""
@spec file(Telegram.Types.token(), Telegram.Client.file_path()) :: request_result()
def file(token, file_path) do
Telegram.Client.file(token, file_path)
end
defp do_body(parameters) do
if request_with_file?(parameters) do
# body encoded as "multipart/form-data"
do_multipart_body(parameters)
else
# body encoded as "application/json"
Map.new(parameters)
end
end
defp request_with_file?(parameters) do
Enum.any?(
parameters,
&(match?({_name, {:file, _}}, &1) or match?({_name, {:file_content, _, _}}, &1))
)
end
defp do_multipart_body(parameters) do
Enum.reduce(parameters, Tesla.Multipart.new(), fn
{name, {:file, file}}, multipart ->
Tesla.Multipart.add_file(multipart, file, name: to_string(name))
{name, {:file_content, file_content, filename}}, multipart ->
Tesla.Multipart.add_file_content(multipart, file_content, filename, name: to_string(name))
{name, value}, multipart ->
Tesla.Multipart.add_field(multipart, to_string(name), to_string(value))
end)
end
defp do_json_markup(parameters) do
Enum.map(parameters, fn
{name, {:json, value}} ->
{name, Jason.encode!(value)}
others ->
others
end)
end
end
|
lib/api.ex
| 0.854779
| 0.575051
|
api.ex
|
starcoder
|
defmodule Grizzly.ZWave.Commands.MultiChannelAssociationReport do
@moduledoc """
This command is used to advertise the current destinations for a given association group.
Params:
* `:grouping_identifier` - the association grouping identifier (required)
* `:max_nodes_supported` - the maximum number of destinations supported by the advertised association group. Each destination
may be a NodeID destination or an End Point destination.
* `:reports_to_follow` - if the full destination list is too long for one
report this field reports the number of follow up reports (optional
default `0`)
* `:nodes` - list of nodes to add the grouping identifier (required)
* `:node_endpoints` - Endpoints of multichannel nodes
"""
@behaviour Grizzly.ZWave.Command
alias Grizzly.ZWave
alias Grizzly.ZWave.Command
alias Grizzly.ZWave.CommandClasses.MultiChannelAssociation
@type param() ::
{:grouping_identifier, byte()}
| {:max_nodes_supported, byte()}
| {:reports_to_follow, byte()}
| {:nodes, [ZWave.node_id()]}
| {:node_endpoints, [MultiChannelAssociation.node_endpoint()]}
@marker 0x00
@impl true
@spec new([param()]) :: {:ok, Command.t()}
def new(params) do
command = %Command{
name: :multi_channel_association_report,
command_byte: 0x03,
command_class: MultiChannelAssociation,
params: params,
impl: __MODULE__
}
{:ok, command}
end
@impl true
@spec encode_params(Command.t()) :: binary()
def encode_params(command) do
grouping_identifier = Command.param!(command, :grouping_identifier)
max_nodes_supported = Command.param!(command, :max_nodes_supported)
reports_to_follow = Command.param!(command, :reports_to_follow)
nodes_bin = :erlang.list_to_binary(Command.param!(command, :nodes))
node_endpoints = Command.param!(command, :node_endpoints)
if Enum.empty?(node_endpoints) do
<<Command.param!(command, :grouping_identifier)>> <> nodes_bin
else
encoded_node_endpoints = MultiChannelAssociation.encode_node_endpoints(node_endpoints)
<<grouping_identifier, max_nodes_supported, reports_to_follow>> <>
nodes_bin <>
<<@marker>> <>
encoded_node_endpoints
end
end
@impl true
@spec decode_params(binary) :: {:ok, [param()]}
def decode_params(<<grouping_identifier, max_nodes_supported, reports_to_follow, rest::binary>>) do
{node_ids, node_endpoints} = MultiChannelAssociation.decode_nodes_and_node_endpoints(rest)
{:ok,
[
grouping_identifier: grouping_identifier,
max_nodes_supported: max_nodes_supported,
reports_to_follow: reports_to_follow,
nodes: node_ids,
node_endpoints: node_endpoints
]}
end
end
|
lib/grizzly/zwave/commands/multi_channel_association_report.ex
| 0.913912
| 0.444444
|
multi_channel_association_report.ex
|
starcoder
|
defmodule Params do
@moduledoc ~S"""
Functions for processing params and transforming their changesets.
`use Params` provides a `defparams` macro, allowing you to define
functions that process parameters according to some [schema](Params.Schema.html)
## Example
```elixir
defmodule MyApp.SessionController do
use Params
defparams login_params(%{email!: :string, :password!: :string})
def create(conn, params) do
case login_params(params) do
%Ecto.Changeset{valid?: true} = ch ->
login = Params.data(ch)
User.authenticate(login.email, login.password)
# ...
_ -> text(conn, "Invalid parameters")
end
end
end
```
"""
@relations [:embed, :assoc]
alias Ecto.Changeset
@doc false
defmacro __using__([]) do
quote do
import Params.Def, only: [defparams: 1, defparams: 2, defschema: 1]
end
end
@doc """
Transforms an Ecto.Changeset into a Map with atom keys.
Recursively traverses and transforms embedded changesets and skips keys that
was not part of params given to changeset
"""
@spec to_map(Changeset.t) :: map
def to_map(%Changeset{data: %{__struct__: module}} = ch) do
ecto_defaults = module |> plain_defaults_defined_by_ecto_schema
params_defaults = module |> schema |> defaults
change = changes(ch)
ecto_defaults
|> deep_merge(params_defaults)
|> deep_merge(change)
end
@doc """
Transforms an Ecto.Changeset into a struct.
Recursively traverses and transforms embedded changesets.
For example if the `LoginParams` module was defined like:
```elixir
defmodule LoginParams do
use Params.Schema, %{login!: :string, password!: :string}
end
```
You can transform the changeset returned by `from` into an struct like:
```elixir
data = LoginParams.from(%{"login" => "foo"}) |> Params.data
data.login # => "foo"
```
"""
@spec data(Changeset.t) :: struct
def data(%Changeset{data: data = %{__struct__: module}} = ch) do
default_embeds = default_embeds_from_schema(module)
default = Enum.reduce(default_embeds, data, fn {k, v}, m ->
Map.put(m, k, Map.get(m, k) || v)
end)
Enum.reduce(ch.changes, default, fn {k, v}, m ->
case v do
%Changeset{} -> Map.put(m, k, data(v))
x = [%Changeset{} | _] -> Map.put(m, k, Enum.map(x, &data/1))
_ -> Map.put(m, k, v)
end
end)
end
@doc false
def default_embeds_from_schema(module) when is_atom(module) do
is_embed_default = fn kw ->
Keyword.get(kw, :embeds, [])
|> Enum.any?(&Keyword.has_key?(&1, :default))
end
default_embed = fn kw ->
name = Keyword.get(kw, :name)
embed_name = Params.Def.module_concat(module, name)
{name, default_embeds_from_schema(embed_name)}
end
case schema(module) do
nil -> %{}
schema ->
schema
|> Stream.filter(is_embed_default)
|> Stream.map(default_embed)
|> Enum.into(struct(module) |> Map.from_struct)
end
end
@doc false
def schema(module) when is_atom(module) do
module.__info__(:attributes) |> Keyword.get(:schema)
end
@doc false
def required(module) when is_atom(module) do
module.__info__(:attributes) |> Keyword.get(:required, [])
end
@doc false
def optional(module) when is_atom(module) do
module.__info__(:attributes) |> Keyword.get(:optional) |> case do
nil -> module.__changeset__ |> Map.keys
x -> x
end
end
@doc false
def changeset(%Changeset{data: %{__struct__: module}} = changeset, params) do
{required, required_relations} =
relation_partition(module, required(module))
{optional, optional_relations} =
relation_partition(module, optional(module))
changeset
|> Changeset.cast(params, required ++ optional, [empty_values: [nil]])
|> Changeset.validate_required(required)
|> cast_relations(required_relations, [required: true])
|> cast_relations(optional_relations, [])
end
@doc false
def changeset(model = %{__struct__: _}, params) do
changeset(model |> change, params)
end
@doc false
def changeset(module, params) when is_atom(module) do
changeset(module |> change, params)
end
defp change(%{__struct__: _} = model) do
model |> Changeset.change
end
defp change(module) when is_atom(module) do
module |> struct |> Changeset.change
end
defp relation_partition(module, names) do
types = module.__changeset__
names
|> Stream.map(fn x -> String.to_atom("#{x}") end)
|> Enum.reduce({[], []}, fn name, {fields, relations} ->
case Map.get(types, name) do
{type, _} when type in @relations ->
{fields, [{name, type} | relations]}
_ ->
{[name | fields], relations}
end
end)
end
defp cast_relations(changeset, relations, opts) do
Enum.reduce(relations, changeset, fn
{name, :assoc}, ch -> Changeset.cast_assoc(ch, name, opts)
{name, :embed}, ch -> Changeset.cast_embed(ch, name, opts)
end)
end
defp deep_merge(%{} = map_1, %{} = map_2) do
Map.merge(map_1, map_2, &deep_merge_conflict/3)
end
defp deep_merge_conflict(_k, %{} = m1, %{} = m2) do
deep_merge(m1, m2)
end
defp deep_merge_conflict(_k, _v1, v2), do: v2
defp defaults(params), do: defaults(params, %{}, [])
defp defaults(params, acc, path)
defp defaults([], acc, _path), do: acc
defp defaults(nil, _acc, _path), do: %{}
defp defaults([opts | rest], acc, path) when is_list(opts) do
defaults([Enum.into(opts, %{}) | rest], acc, path)
end
defp defaults([%{name: name, embeds: embeds} | rest], acc, path) do
acc = defaults(embeds, acc, [name | path])
defaults(rest, acc, path)
end
defp defaults([%{name: name, default: value} | rest], acc, path) do
funs = [name | path]
|> Enum.reverse
|> Enum.map(fn nested_name ->
fn :get_and_update, data, next ->
with {nil, inner_data} <- next.(data[nested_name] || %{}),
data = Map.put(data, nested_name, inner_data),
do: {nil, data}
end
end)
acc = put_in(acc, funs, value)
defaults(rest, acc, path)
end
defp defaults([%{} | rest], acc, path) do
defaults(rest, acc, path)
end
defp changes(%Changeset{} = ch) do
Enum.reduce(ch.changes, %{}, fn {k, v}, m ->
case v do
%Changeset{} -> Map.put(m, k, changes(v))
x = [%Changeset{} | _] -> Map.put(m, k, Enum.map(x, &changes/1))
_ -> Map.put(m, k, v)
end
end)
end
defp plain_defaults_defined_by_ecto_schema(module) do
module
|> struct
|> Map.from_struct
|> Map.delete(:__meta__)
|> Enum.reject(fn {_, v} -> is_nil(v) end)
|> Enum.into(%{})
end
end
|
lib/params.ex
| 0.885049
| 0.782018
|
params.ex
|
starcoder
|
defmodule OliWeb.Api.GlobalStateController do
@moduledoc """
Provides user state service endpoints for extrinsic state.
"""
use OliWeb, :controller
use OpenApiSpex.Controller
alias Oli.Delivery.ExtrinsicState
alias OliWeb.Api.State
alias OpenApiSpex.Schema
@moduledoc tags: ["User State Service: Extrinsic State"]
@global_parameters []
@keys [
keys: [
in: :query,
schema: %Schema{type: :list},
required: true,
description: "A collection of key names"
]
]
@doc """
Reads state from the user's global context. State exists as key-value pairs. The
values can be nested JSON structures or simple scalar attributes.
The optional `keys` query parameter allows one to read a subset of the top-level
keys present in this context. Omitting this parameter returns all top-level keys.
An example request, showing how to structure the keys parameter to contain the key names
"one", "two" and "three":
```
/api/v1/state?keys[]=one&keys[]=two&keys=three
```
"""
@doc parameters: @global_parameters,
responses: %{
200 => {"Update Response", "application/json", State.ReadResponse}
}
def read(conn, params) do
State.read(conn, params, fn %{user: user, keys: keys} ->
ExtrinsicState.read_global(user.id, keys)
end)
end
@doc """
Inserts or updates top-level keys into the user's global context.
"""
@doc parameters: @global_parameters,
request_body: {"Global Upsert", "application/json", State.UpsertBody, required: true},
responses: %{
200 => {"Update Response", "application/json", State.UpsertDeleteResponse}
}
def upsert(conn, params) do
State.upsert(conn, params, fn %{user: user, key_values: key_values} ->
ExtrinsicState.upsert_global(user.id, key_values)
end)
end
@doc """
Deletes one or more keys from a user's global context.
An example request, showing how to structure the keys parameter to contain the key names
"one", "two" and "three":
```
/api/v1/state?keys[]=one&keys[]=two&keys=three
```
"""
@doc parameters: @global_parameters ++ @keys,
responses: %{
200 => {"Delete Response", "application/json", State.UpsertDeleteResponse}
}
def delete(conn, params) do
State.delete(conn, params, fn %{user: user, keys: keys} ->
ExtrinsicState.delete_global(user.id, keys)
end)
end
end
|
lib/oli_web/controllers/api/global_state_controller.ex
| 0.871912
| 0.531209
|
global_state_controller.ex
|
starcoder
|
defmodule Timex.Ecto.Date do
@moduledoc """
Support for using Timex with :date fields
"""
use Timex
@behaviour Ecto.Type
def type, do: :date
@doc """
Handle casting to Timex.Ecto.Date
"""
def cast(%Date{} = date), do: {:ok, date}
# Support embeds_one/embeds_many
def cast(%{"calendar" => _,
"year" => y, "month" => m, "day" => d}) do
date = Timex.to_date({y,m,d})
{:ok, date}
end
def cast(date) when is_binary(date) do
case Ecto.Date.cast(date) do
{:ok, d} -> load({d.year,d.month,d.day})
:error -> :error
end
end
def cast(datetime) do
case Timex.to_date(datetime) do
{:error, _} ->
case Ecto.Date.cast(datetime) do
{:ok, date} -> load({date.year, date.month, date.day})
:error -> :error
end
%Date{} = d -> {:ok, d}
end
end
@doc """
Creates a Timex.Date from from a passed in date.
Returns `{:ok, Timex.Date}` when successful.
Returns `:error` if the type passed in is either not an erl date nor Ecto.Date
## Examples
Using an Ecto.Date:
iex> Ecto.Date.from_erl({2017, 2, 1})
...> |> Timex.Ecto.Date.load
{:ok, ~D[2017-02-01]}
Using an erl date:
iex> Timex.Ecto.Date.load({2017, 2, 1})
{:ok, ~D[2017-02-01]}
"""
def load({_year, _month, _day} = date), do: {:ok, Timex.to_date(date)}
def load(%Ecto.Date{} = date), do: {:ok, Ecto.Date.to_erl(date) |> Timex.to_date}
def load(_), do: :error
@doc """
Convert to native Ecto representation
"""
def dump(%DateTime{} = datetime) do
case Timex.Timezone.convert(datetime, "Etc/UTC") do
%DateTime{year: y, month: m, day: d} -> {:ok, {y,m,d}}
{:error, _} -> :error
end
end
def dump(datetime) do
case Timex.to_erl(datetime) do
{:error, _} -> :error
{{_,_,_}=d,_} -> {:ok, d}
{_,_,_} = d -> {:ok, d}
end
end
def autogenerate(precision \\ :sec)
def autogenerate(_) do
{date, {_, _, _}} = :erlang.universaltime
load(date) |> elem(1)
end
end
|
lib/types/date.ex
| 0.90653
| 0.45181
|
date.ex
|
starcoder
|
defmodule UncaloteMe.AppContext do
@moduledoc """
The AppContext context.
"""
import Ecto.Query, warn: false
alias UncaloteMe.Repo
alias UncaloteMe.AppContext.Debtor
@doc """
Returns the list of debtors.
## Examples
iex> list_debtors()
[%Debtor{}, ...]
"""
def list_debtors do
Repo.all(Debtor)
end
@doc """
Gets a single debtor.
Raises `Ecto.NoResultsError` if the Debtor does not exist.
## Examples
iex> get_debtor!(123)
%Debtor{}
iex> get_debtor!(456)
** (Ecto.NoResultsError)
"""
def get_debtor!(id), do: Repo.get!(Debtor, id)
@doc """
Creates a debtor.
## Examples
iex> create_debtor(%{field: value})
{:ok, %Debtor{}}
iex> create_debtor(%{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def create_debtor(attrs \\ %{}) do
%Debtor{}
|> Debtor.changeset(attrs)
|> Repo.insert()
end
@doc """
Updates a debtor.
## Examples
iex> update_debtor(debtor, %{field: new_value})
{:ok, %Debtor{}}
iex> update_debtor(debtor, %{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def update_debtor(%Debtor{} = debtor, attrs) do
debtor
|> Debtor.changeset(attrs)
|> Repo.update()
end
@doc """
Deletes a Debtor.
## Examples
iex> delete_debtor(debtor)
{:ok, %Debtor{}}
iex> delete_debtor(debtor)
{:error, %Ecto.Changeset{}}
"""
def delete_debtor(%Debtor{} = debtor) do
Repo.delete(debtor)
end
@doc """
Returns an `%Ecto.Changeset{}` for tracking debtor changes.
## Examples
iex> change_debtor(debtor)
%Ecto.Changeset{source: %Debtor{}}
"""
def change_debtor(%Debtor{} = debtor) do
Debtor.changeset(debtor, %{})
end
alias UncaloteMe.AppContext.Debt
@doc """
Returns the list of debts.
## Examples
iex> list_debts()
[%Debt{}, ...]
"""
def list_debts(debtor_id) do
# Repo.all from d in Debt, preload: [:debtor]
query = from d in Debt, where: d.debtor_id == ^debtor_id
Repo.all(query)
end
@doc """
Gets a single debt.
Raises `Ecto.NoResultsError` if the Debt does not exist.
## Examples
iex> get_debt!(123)
%Debt{}
iex> get_debt!(456)
** (Ecto.NoResultsError)
"""
def get_debt!(id), do: Repo.get!(Debt, id)
@doc """
Creates a debt.
## Examples
iex> create_debt(%{field: value})
{:ok, %Debt{}}
iex> create_debt(%{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def create_debt(attrs \\ %{}) do
%Debt{}
|> Debt.changeset(attrs)
|> Repo.insert()
end
@doc """
Updates a debt.
## Examples
iex> update_debt(debt, %{field: new_value})
{:ok, %Debt{}}
iex> update_debt(debt, %{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def update_debt(%Debt{} = debt, attrs) do
debt
|> Debt.changeset(attrs)
|> Repo.update()
end
@doc """
Deletes a Debt.
## Examples
iex> delete_debt(debt)
{:ok, %Debt{}}
iex> delete_debt(debt)
{:error, %Ecto.Changeset{}}
"""
def delete_debt(%Debt{} = debt) do
Repo.delete(debt)
end
@doc """
Returns an `%Ecto.Changeset{}` for tracking debt changes.
## Examples
iex> change_debt(debt)
%Ecto.Changeset{source: %Debt{}}
"""
def change_debt(%Debt{} = debt) do
Debt.changeset(debt, %{})
end
end
|
lib/uncalote_me/app_context/app_context.ex
| 0.768342
| 0.417984
|
app_context.ex
|
starcoder
|
defmodule Observables.Reactivity do
alias Observables.Obs
@doc """
* Lifts a unary function and applies it to an observable (essentially a wrapper for the map observable)
"""
def liftapp(obs, fun) do
obs |> Obs.map(fun)
end
@doc """
* Lifts a binary function and applies it to two observables
* Does not consume processed values, but keeps them as state until a more recent value is received,
at which point the value is updated and a new output value is produced.
This is similar to a behaviour with discrete updates in FRP.
"""
def liftapp2_update(obs1, obs2, fun) do
Obs.combinelatest(obs1, obs2)
|> Obs.map(fn {v1, v2} ->
fun.(v1, v2) end)
end
@doc """
* Lifts an n-ary function and applies it to a list of n observables.
* Does not consume processed values, but keeps them as state until a more recent value is received,
at which point the value is updated and a new output value is produced.
This is similar to a behaviour with discrete updates in FRP.
"""
def liftappn_update(obss, fun, inits \\ nil) do
Obs.combinelatest_n(obss, inits)
|> Obs.map(fn arg_tuple ->
apply(fun, Tuple.to_list(arg_tuple)) end)
end
@doc """
* Lifts a binary function and applies it to two observables
* Consumes processed values in a stream-wise fashion.
This is similar to event-stream processing in FRP.
"""
def liftapp2_propagate(obs1, obs2, fun) do
Obs.zip(obs1, obs2)
|> Obs.map(fn {v1, v2} ->
fun.(v1, v2) end)
end
@doc """
* Lifts an n-ary function and applies it to a list of n observables.
* Consumes processed values in a stream-wise fashion.
This is similar to event-stream processing in FRP.
"""
def liftappn_propagate(obss, fun) do
Obs.zip_n(obss)
|> Obs.map(fn arg_tuple ->
apply(fun, Tuple.to_list(arg_tuple)) end)
end
@doc """
* Lifts an n-ary function and applies it to a list of n observables.
* Does not consume processed values of observables in the first lsit,
but instead keeps them as state until a more recent value is received,
Does not produce output when receiving a value of an observable in this list.
* Consumes processed values of observables in the second list in a stream-wise fashion.
Only when all observables of this list have a value available is an output produced
"""
def liftapp_update_propagate(obss1, obss2, fun) do
Obs.combinelatest_n_zip_m(obss1, obss2)
|> Obs.map(fn arg_tuple ->
apply(fun, Tuple.to_list(arg_tuple)) end)
end
@doc """
* Lifts an n-ary function and applies it to a list of n observables.
* Does not consume processed values of observables in the first lsit,
but instead keeps them as state until a more recent value is received,
Does not produce output when receiving a value of an observable in this list.
* Consumes processed values of observables in the second list in a stream-wise fashion.
Only when all observables of this list have a value available is an output produced
Buffers last zipped values from this list so that they do not get lost in the absence
of values from observables in the first list.
"""
def liftapp_update_propagate_buffered(obss1, obss2, fun) do
Obs.combinelatest_n_zip_m_buffered(obss1, obss2)
|> Obs.map(fn arg_tuple ->
apply(fun, Tuple.to_list(arg_tuple)) end)
end
@doc """
* Lifts an n-ary function and applies it to a list of n observables.
* Does not consume processed values of observables in the first lsit,
but instead keeps them as state until a more recent value is received,
Does not produce output when receiving a value of an observable in this list.
* Consumes processed values of observables in the second list in a stream-wise fashion.
Only when all observables of this list have a value available is an output produced
Buffers last zipped values from this list so that they do not get lost in the absence
of values from observables in the first list.
When for all observables in the first list a value is received, the buffered zipped values
are combined with these values until the buffer is empty.
"""
def liftapp_update_propagate_buffered_propagating(obss1, obss2, fun) do
Obs.combinelatest_n_zip_m_buffered_propagating(obss1, obss2)
|> Obs.map(fn arg_tuple ->
apply(fun, Tuple.to_list(arg_tuple)) end)
end
@doc """
* Lifts a function that can operate on lists of variable sizes
and applies it to a variable sized list of observables, which is initially the given list.
* Takes a higher order observable ob that announces new observables to add to the list of incoming dependencies.
Observables that have stopped will be removed and operation will continue with the remaining ones.
* Does not consume processed values, but keeps them as state until a more recent value is received,
at which point the value is updated and a new output value is produced.
This is similar to a behaviour with discrete updates in FRP.
"""
def liftappvar_update(obs, obss, fun, inits \\ nil) do
Obs.combinelatest_var(obs, obss, inits)
|> Obs.map(fn arg_tuple ->
fun.(Tuple.to_list(arg_tuple)) end)
end
@doc """
* Lifts a function that can operate on lists of variable sizes
and applies it to a variable sized list of observables, which is initially the given list.
* Takes a higher order observable ob that announces new observables to add to the list of incoming dependencies.
Observables that have stopped will be removed and operation will continue with the remaining ones.
* Consumes processed values in a stream-wise fashion.
This is similar to event-stream processing in FRP.
"""
def liftappvar_propagate(obs, obss, fun) do
Obs.zip_var(obs, obss)
|> Obs.map(fn arg_tuple ->
fun.(Tuple.to_list(arg_tuple)) end)
end
end
|
lib/reactivity.ex
| 0.737914
| 0.781914
|
reactivity.ex
|
starcoder
|
defmodule AWS.Client do
@moduledoc """
Provides credentials and connection details for making requests to AWS services.
You can configure `access_key_id` and `secret_access_key` which are the credentials
needed by [IAM](https://aws.amazon.com/iam), and also the `region` for your services.
The list of regions can be found in the [AWS service endpoints](https://docs.aws.amazon.com/general/latest/gr/rande.html)
documentation. You can also use "local" to make requests to your localhost.
Note that the region is overwritten with the credential scope when the service is global.
## Custom HTTP client
The option `http_client` accepts a tuple with a module and a list of options.
The module must implement the callback `c:AWS.HTTPClient.request/5`.
## Custom JSON or XML parsers
You can configure a custom JSON parser by using the option `json_module`. This
option accepts a tuple with a module and options. The given module must
implement the callbacks from `AWS.JSON`.
Similarly, there is a `xml_module` option that configures the XML parser. The
XML module must implement the callbacks from `AWS.XML`.
## Additional options
- `session_token`: an option to set the `X-Amz-Security-Token` when performing
the requests.
- `port`: is the port to use when making requests. By default is `443`
- `proto`: is the protocol to use. It can be "http" or "https". By default it's "https".
- `endpoint`: AWS's endpoint. By default it is `amazonaws.com`.
The `service` option is overwritten by each service with its signing name from metadata.
"""
defstruct access_key_id: nil,
secret_access_key: nil,
session_token: nil,
region: nil,
service: nil,
endpoint: "amazonaws.com",
proto: "https",
port: "443",
http_client: {AWS.HTTPClient, []},
json_module: {AWS.JSON, []},
xml_module: {AWS.XML, []}
@type t :: %__MODULE__{}
@aws_access_key_id "AWS_ACCESS_KEY_ID"
@aws_secret_access_key "AWS_SECRET_ACCESS_KEY"
@aws_session_token "AWS_SESSION_TOKEN"
@aws_default_region "AWS_DEFAULT_REGION"
def create() do
case System.get_env(@aws_default_region) do
nil -> raise RuntimeError, "missing default region"
region -> create(region)
end
end
def create(region) do
case {System.get_env(@aws_access_key_id), System.get_env(@aws_secret_access_key),
System.get_env(@aws_session_token)} do
{nil, _, _} ->
raise RuntimeError, "missing access key id"
{_, nil, _} ->
raise RuntimeError, "missing secret access key"
{access_key_id, secret_access_key, nil} ->
create(access_key_id, secret_access_key, region)
{access_key_id, secret_access_key, token} ->
create(access_key_id, secret_access_key, token, region)
end
end
def create(access_key_id, secret_access_key, region) do
%AWS.Client{
access_key_id: access_key_id,
secret_access_key: secret_access_key,
region: region
}
end
def create(access_key_id, secret_access_key, token, region) do
%AWS.Client{
access_key_id: access_key_id,
secret_access_key: secret_access_key,
session_token: token,
region: region
}
end
def request(client, method, url, body, headers, _opts \\ []) do
{mod, options} = Map.fetch!(client, :http_client)
apply(mod, :request, [method, url, body, headers, options])
end
def encode!(_client, payload, :query), do: AWS.Util.encode_query(payload)
def encode!(client, payload, format) do
{mod, opts} = Map.fetch!(client, String.to_existing_atom("#{format}_module"))
apply(mod, :encode_to_iodata!, [payload, opts])
end
def decode!(client, payload, format) do
{mod, opts} = Map.fetch!(client, String.to_existing_atom("#{format}_module"))
apply(mod, :decode!, [payload, opts])
end
end
|
lib/aws/client.ex
| 0.83025
| 0.403596
|
client.ex
|
starcoder
|
defmodule Yatzy.Game do
@moduledoc """
Game module responsible for :
- starting a new game
- delegating player interactions based on game's turn
- finishing a game
"""
use TypedStruct
alias Yatzy.Player
alias Yatzy.Result
alias Yatzy.Roll
typedstruct do
field :players, %{
required(String.t()) => Player.t()
}
field :current_player, String.t()
field :result, :pending | Result.t(), default: :pending
end
@type options :: [
initial_player :: String.t()
]
@doc """
Create a new game with a list of players
"""
@spec new(player_names :: [String.t()], options :: options()) :: t()
def new(player_names, options \\ [])
def new([], _options), do: raise(ArgumentError, "Specify some players")
def new(player_names, options) do
current_player = Keyword.get(options, :initial_player, Enum.random(player_names))
players =
player_names
|> no_blank_names()
|> no_duplicate_names()
|> Enum.map(fn name -> {name, Player.new(name)} end)
|> Enum.into(%{})
%__MODULE__{players: players, current_player: current_player}
end
@doc """
(Re)Rolling the dice for a given player
"""
@spec roll(game :: t(), player_name :: String.t(), opts :: Roll.options()) :: t()
def roll(game = %__MODULE__{}, player_name, opts \\ []) do
with {:ok, ^game, player} <- find_player(game, player_name),
{:ok, ^game} <- game_still_running(game),
{:ok, ^game, player} <- do_player_roll(game, player, opts) do
update_game_player(game, player)
else
_ -> game
end
end
@doc """
Saving a player's current roll and ending that player's turn.
"""
@spec save(game :: t(), player_name :: String.t(), rule :: atom()) :: t()
def save(game = %__MODULE__{}, player_name, rule) do
with {:ok, ^game, player} <- find_player(game, player_name),
{:ok, ^game} <- game_still_running(game),
{:ok, ^game, player} <- save_player_roll(game, player, rule),
{:ok, game} <- next_turn(game, player) do
update_game_player(game, player)
else
_ -> game
end
end
@doc """
Finish the game and calculate the results
"""
@spec finish(game :: t()) :: t()
def finish(game = %__MODULE__{}), do: %{game | result: Result.new(Map.values(game.players))}
@spec no_duplicate_names(names :: [String.t()]) :: [String.t()]
defp no_duplicate_names(names), do: no_duplicate_names(names, Enum.uniq(names) == names)
defp no_duplicate_names(_names, false),
do: raise(ArgumentError, "Players must have distinct names")
defp no_duplicate_names(names, _valid), do: names
@spec no_blank_names(names :: [String.t()]) :: [String.t()]
defp no_blank_names(names),
do: no_blank_names(names, Enum.all?(names, &(String.trim(&1) != "")))
defp no_blank_names(_names, false), do: raise(ArgumentError, "Player names can't be blank")
defp no_blank_names(names, _valid), do: names
@spec find_player(game :: t(), player_name :: String.t()) ::
{:ok, game :: t(), player :: Player.t()} | {:error, game :: t()}
defp find_player(game = %__MODULE__{current_player: current_player}, player_name)
when current_player != player_name do
{:error, game}
end
defp find_player(game = %__MODULE__{current_player: current_player}, player_name)
when current_player == player_name do
{:ok, game, game.players[player_name]}
end
@spec do_player_roll(game :: t(), player :: Player.t(), opts :: Roll.options()) ::
{:ok, game :: t(), player :: Player.t()}
defp do_player_roll(game = %__MODULE__{}, player, opts),
do: {:ok, game, Player.roll(player, opts)}
@spec update_game_player(game :: t(), player :: Player.t()) :: t()
defp update_game_player(game = %__MODULE__{}, player = %Player{}) do
players = %{game.players | player.name => player}
%{game | players: players}
end
@spec save_player_roll(game :: t(), player :: Player.t(), rule :: atom()) ::
{:ok | :error, game :: t(), player :: Player.t()}
defp save_player_roll(game = %__MODULE__{}, player = %Player{}, rule) do
save_player_roll(game, player, rule, player.current_roll.counter != 0)
end
defp save_player_roll(game, player, _rule, false), do: {:error, game, player}
defp save_player_roll(game, player, rule, _valid), do: {:ok, game, Player.save(player, rule)}
@spec next_turn(game :: t(), player :: Player.t()) :: {:ok, game :: t()}
defp next_turn(game = %__MODULE__{}, player = %Player{}) do
players = Map.keys(game.players)
current_index = Enum.find_index(players, &(player.name == &1))
next_index = rem(current_index + 1, length(players))
next_player = Enum.at(players, next_index)
{:ok, %{game | current_player: next_player}}
end
@spec game_still_running(game :: t()) :: {:ok | :error, game :: t()}
defp game_still_running(game = %__MODULE__{result: :pending}), do: {:ok, game}
defp game_still_running(game = %__MODULE__{}), do: {:error, game}
end
|
lib/yatzy/game.ex
| 0.804636
| 0.408365
|
game.ex
|
starcoder
|
defmodule Speedtest do
@moduledoc """
Speedtest.net client for Elixir.
"""
alias Speedtest.Ping
alias Speedtest.Decoder
alias Speedtest.Result
require Logger
defstruct config: [],
servers: [],
include: nil,
exclude: nil,
threads: nil,
selected_server: nil,
result: nil
@doc """
Retrieve a the list of speedtest.net servers, optionally filtered
to servers matching those specified in the servers argument
example:
Speedtest.fetch_servers(%Speedtest{})
"""
def fetch_servers(%Speedtest{} = speedtest \\ %Speedtest{}) do
Logger.info("Retrieving speedtest.net server list...")
urls = [
"https://www.speedtest.net/speedtest-servers-static.php",
"http://c.speedtest.net/speedtest-servers-static.php",
"https://www.speedtest.net/speedtest-servers.php",
"http://c.speedtest.net/speedtest-servers.php"
]
first = List.first(urls)
{_, response} = fetch_server(first)
result = Decoder.server(response)
result =
case speedtest.include == nil do
true -> result
false -> Enum.reject(speedtest.include, fn x -> Enum.member?(result, x) == false end)
end
result =
case speedtest.exclude == nil do
true -> result
false -> Enum.reject(speedtest.exclude, fn x -> Enum.member?(result, x) end)
end
reply = %{speedtest | servers: result}
{:ok, reply}
end
@doc """
Limit servers to the closest speedtest.net servers based on
geographic distance
example:
Speedtest.choose_closest_servers()
"""
def choose_closest_servers(servers \\ [], amount \\ 2) do
servers = Enum.sort_by(servers, fn s -> s.distance end)
reply = Enum.take(servers, amount)
reply
end
@doc """
Perform a speedtest.net "ping" to determine which speedtest.net
server has the lowest latency
example:
Speedtest.choose_best_server([]})
"""
def choose_best_server(servers) do
Logger.info("Selecting best server based on ping...")
reply =
Enum.map(servers, fn s ->
url = Decoder.url(s.host)
ping = Speedtest.Ping.ping(url)
Map.put(s, :ping, ping)
end)
servers = Enum.sort_by(reply, fn s -> s.ping end)
List.first(servers)
end
@doc """
Test download speed against speedtest.net
example:
Speedtest.download(%Speedtest{})
"""
def download(%Speedtest{} = speedtest \\ %Speedtest{}) do
Logger.info("Testing download speed...")
{_, urls} = generate_download_urls(speedtest)
responses =
Enum.map(urls, fn u ->
{time_in_microseconds, return} =
:timer.tc(fn ->
{_, reply} = HTTPoison.get(u)
reply
end)
[{_, length}] =
Enum.filter(return.headers, fn h ->
{key, _} = h
key == "Content-Length"
end)
%{elapsed_time: time_in_microseconds, bytes: String.to_integer(length), url: u}
end)
{:ok, responses}
end
@doc """
Test upload speed against speedtest.net
example:
Speedtest.upload(%Speedtest{})
"""
def upload(%Speedtest{} = speedtest \\ %Speedtest{}) do
Logger.info("Testing Upload Speed...")
{_, data} = generate_upload_data(speedtest)
responses =
Enum.map(data, fn {url, size} ->
{time_in_microseconds, return} =
:timer.tc(fn ->
headers = [{"Content-length", size}]
body = ""
{_, reply} = HTTPoison.post(url, body, headers)
reply
end)
%{elapsed_time: time_in_microseconds, bytes: size, url: url}
end)
{:ok, responses}
end
@doc """
Determine distance between sets of [lat,lon] in km
example:
Speedtest.distance(%Speedtest{})
"""
def distance(%Speedtest{} = speedtest \\ %Speedtest{}) do
servers =
Enum.map(speedtest.servers, fn s ->
distance =
Geocalc.distance_between([speedtest.config.client.lat, speedtest.config.client.lon], [
s.lat,
s.lon
])
Map.put(s, :distance, distance)
end)
speedtest = %{speedtest | servers: servers}
{:ok, speedtest}
end
@doc """
Run the full speedtest.net test
example:
Speedtest.run()
"""
def run() do
{_, init} = init()
{_, result} = fetch_config_data()
config = Decoder.config(result)
Logger.info("Testing from " <> config.client.isp <> " (" <> config.client.ip <> ")...")
{_, result} = fetch_servers(init)
speedtest = %{result | config: config}
{_, result} = distance(speedtest)
closest_servers = choose_closest_servers(result.servers)
selected_server = choose_best_server(closest_servers)
{_, _, ping} = selected_server.ping
ping = to_string(ping)
elapsed_time = ping <> " ms"
Logger.info(
"Hosted by " <>
selected_server.sponsor <>
" (" <>
selected_server.name <>
") " <>
"[" <>
to_string(Float.round(selected_server.distance / 1000)) <> " km]: " <> elapsed_time
)
speedtest = %{result | selected_server: selected_server}
{_, download_reply} = download(speedtest)
{_, upload_reply} = upload(speedtest)
replys = {upload_reply, download_reply}
{_, reply} = Result.create(speedtest, replys)
speed = to_string(Float.round(reply.result.download, 2)) <> " Mbit/s"
Logger.info("Download: " <> speed)
speed = to_string(Float.round(reply.result.upload, 2)) <> " Mbit/s"
Logger.info("Upload: " <> speed)
config = reply.config
client = reply.result.client
config = %{config | client: client}
reply = %{reply | config: config}
{:ok, reply}
end
@doc """
Ping an IP and return a tuple with the time
example:
Speedtest.ping("127.0.0.1")
"""
def ping(ip) do
Ping.ping(ip)
end
@doc """
Setup the base speedtest
## Examples
iex> Speedtest.init()
{:ok, %Speedtest{config: [],exclude: nil,include: nil,result: nil,selected_server: nil,servers: [],threads: nil}}
"""
def init() do
threads = Application.get_env(:speedtest, :threads)
include = Application.get_env(:speedtest, :include)
exclude = Application.get_env(:speedtest, :exclude)
st = %Speedtest{}
reply = %{st | threads: threads, include: include, exclude: exclude}
{:ok, reply}
end
defp generate_download_urls(%Speedtest{} = speedtest \\ %Speedtest{}) do
urls =
Enum.map(speedtest.config.sizes.download, fn s ->
size = to_string(s)
speedtest.selected_server.host <> "/speedtest/random" <> size <> "x" <> size <> ".jpg"
end)
urls = Enum.shuffle(urls)
{:ok, urls}
end
defp generate_upload_data(%Speedtest{} = speedtest \\ %Speedtest{}) do
data =
Enum.map(speedtest.config.sizes.upload, fn s ->
{speedtest.selected_server.url, to_string(s)}
end)
{:ok, data}
end
defp user_agent() do
{"User-Agent",
"Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/33.0.1750.117 Safari/537.36"}
end
defp fetch_server(server) do
HTTPoison.get(server, [user_agent()], hackney: [headers: [user_agent()]])
end
defp fetch_config_data() do
Logger.info("Retrieving speedtest.net configuration...")
{status, response} =
HTTPoison.get(
"https://www.speedtest.net/speedtest-config.php",
[user_agent()],
hackney: [headers: [user_agent()]]
)
{status, response}
end
end
|
lib/speedtest.ex
| 0.80651
| 0.564459
|
speedtest.ex
|
starcoder
|
defmodule ExInsights do
@moduledoc """
Exposes methods for POST events & metrics to Azure Application Insights.
For more information on initialization and usage consult the [README.md](readme.html)
"""
alias ExInsights.Configuration, as: Conf
alias ExInsights.Data.{Envelope, Payload}
@typedoc """
Measurement name. Will be used extensively in the app insights UI
"""
@type name :: String.t() | atom
@typedoc ~S"""
A map of `[name -> string]` to add metadata to a tracking request
"""
@type properties :: %{optional(name) => String.t()}
@typedoc ~S"""
A map of `[name -> number]` to add measurement data to a tracking request
"""
@type measurements :: %{optional(name) => number}
@typedoc ~S"""
Defines the level of severity for the event.
"""
@type severity_level :: :verbose | :info | :warning | :error | :critical
@typedoc ~S"""
Represents the exception's stack trace.
"""
@type stack_trace :: [stack_trace_entry]
@type stack_trace_entry ::
{module, atom, arity_or_args, location}
| {(... -> any), arity_or_args, location}
@type instrumentation_key :: String.t() | nil
@typep arity_or_args :: non_neg_integer | list
@typep location :: keyword
@doc ~S"""
Log a user action or other occurrence.
### Parameters:
```
name: name of the event (string)
properties (optional): a map of [string -> string] pairs for adding extra properties to this event
measurements (optional): a map of [string -> number] values associated with this event that can be aggregated/sumed/etc. on the UI
instrumentation_key (optional): Azure application insights API key. If not set it will be read from the configuration (see README.md)
```
"""
@spec track_event(
name :: name,
properties :: properties,
measurements :: measurements,
instrumentation_key :: instrumentation_key
) :: :ok
def track_event(name, properties \\ %{}, measurements \\ %{}, instrumentation_key \\ nil)
when is_binary(name) do
Payload.create_event_payload(name, properties, measurements)
|> track(instrumentation_key)
end
@doc ~S"""
Log a trace message.
### Parameters:
```
message: A string to identify this event in the portal.
severity_level: The level of severity for the event.
properties: map[string, string] - additional data used to filter events and metrics in the portal. Defaults to empty.
instrumentation_key (optional): Azure application insights API key. If not set it will be read from the configuration (see README.md)
```
"""
@spec track_trace(
String.t(),
severity_level :: severity_level,
properties :: properties,
instrumentation_key :: instrumentation_key
) :: :ok
def track_trace(message, severity_level \\ :info, properties \\ %{}, instrumentation_key \\ nil) do
Payload.create_trace_payload(message, severity_level, properties)
|> track(instrumentation_key)
end
@doc ~S"""
Log an exception you have caught.
### Parameters:
```
exception: An Error from a catch clause, or the string error message.
stack_trace: An erlang stacktrace.
properties: map[string, string] - additional data used to filter events and metrics in the portal. Defaults to empty.
measurements: map[string, number] - metrics associated with this event, displayed in Metrics Explorer on the portal. Defaults to empty.
instrumentation_key (optional): Azure application insights API key. If not set it will be read from the configuration (see README.md)
```
"""
@spec track_exception(
String.t(),
stack_trace :: stack_trace,
String.t() | nil,
properties :: properties,
measurements :: measurements,
instrumentation_key :: instrumentation_key
) :: :ok
def track_exception(
exception,
stack_trace,
handle_at \\ nil,
properties \\ %{},
measurements \\ %{},
instrumentation_key \\ nil
) do
Payload.create_exception_payload(exception, stack_trace, handle_at, properties, measurements)
|> track(instrumentation_key)
end
@doc ~S"""
Log a numeric value that is not associated with a specific event.
Typically used to send regular reports of performance indicators.
### Parameters:
```
name: name of the metric
value: the value of the metric (number)
properties (optional): a map of [string -> string] pairs for adding extra properties to this event
instrumentation_key (optional): Azure application insights API key. If not set it will be read from the configuration (see README.md)
```
"""
@spec track_metric(
name :: name,
number,
properties :: properties,
instrumentation_key :: instrumentation_key
) :: :ok
def track_metric(name, value, properties \\ %{}, instrumentation_key \\ nil)
when is_binary(name) do
Payload.create_metric_payload(name, value, properties)
|> track(instrumentation_key)
end
@doc ~S"""
Log a dependency, for example requests to an external service or SQL calls.
### Parameters:
```
name: String that identifies the dependency.
command_name: String of the name of the command made against the dependency (eg. full URL with querystring or SQL command text).
elapsed_time_ms: Number for elapsed time in milliseconds of the command made against the dependency.
success: Boolean which indicates success.
dependency_type_name: String which denotes dependency type. Defaults to nil.
target: String of the target host of the dependency.
properties (optional): map[string, string] - additional data used to filter events and metrics in the portal. Defaults to empty.
instrumentation_key (optional): Azure application insights API key. If not set it will be read from the configuration (see README.md)
```
"""
@spec track_dependency(
name :: name,
String.t(),
number,
boolean,
String.t(),
String.t() | nil,
properties :: properties,
instrumentation_key :: instrumentation_key
) :: :ok
def track_dependency(
name,
command_name,
elapsed_time_ms,
success,
dependency_type_name \\ "",
target \\ nil,
properties \\ %{},
instrumentation_key \\ nil
) do
Payload.create_dependency_payload(
name,
command_name,
elapsed_time_ms,
success,
dependency_type_name,
target,
properties
)
|> track(instrumentation_key)
end
@doc ~S"""
Log a request, for example incoming HTTP requests
### Parameters:
```
name: String that identifies the request
url: Request URL
source: Request Source. Encapsulates info about the component that initiated the request (can be nil)
elapsed_time_ms: Number for elapsed time in milliseconds
result_code: Result code reported by the application
success: whether the request was successfull
properties (optional): map[string, string] - additional data used to filter events and metrics in the portal. Defaults to empty.
measurements (optional): a map of [string -> number] values associated with this event that can be aggregated/sumed/etc. on the UI
id (optional): a unique identifier representing the request.
instrumentation_key (optional): Azure application insights API key. If not set it will be read from the configuration (see README.md)
```
"""
@spec track_request(
name :: name,
url :: String.t(),
source :: String.t() | nil,
elapsed_time_ms :: number,
result_code :: String.t() | number,
success :: boolean,
properties :: properties,
measurements :: measurements,
id :: String.t() | nil,
instrumentation_key :: instrumentation_key
) ::
:ok
def track_request(
name,
url,
source,
elapsed_time_ms,
result_code,
success,
properties \\ %{},
measurements \\ %{},
id \\ nil,
instrumentation_key \\ nil
) do
id = if (id == nil), do: Base.encode16(<<:rand.uniform(438_964_124)::size(32)>>), else: id
Payload.create_request_payload(
name,
url,
source,
elapsed_time_ms,
result_code,
success,
properties,
measurements,
id
)
|> track(instrumentation_key)
end
@spec track(map, instrumentation_key()) :: :ok
defp track(%Envelope{} = payload, instrumentation_key) do
key = read_instrumentation_key(instrumentation_key)
payload
|> Envelope.set_instrumentation_key(key)
|> Envelope.ensure_instrumentation_key_present()
|> ExInsights.Aggregation.Worker.track()
:ok
end
def read_instrumentation_key(key) when is_binary(key) and byte_size(key) > 0, do: key
def read_instrumentation_key(_), do: Conf.get_value(:instrumentation_key)
end
|
lib/ex_insights.ex
| 0.861101
| 0.722369
|
ex_insights.ex
|
starcoder
|
defmodule ExWire.Message.FindNeighbours do
@moduledoc """
A wrapper for ExWire's `FindNeighbours` message.
"Id of a node. The responding node will send back nodes closest to the target."
"""
@behaviour ExWire.Message
@message_id 0x03
alias ExWire.Util.Timestamp
defstruct target: nil,
timestamp: nil
@type t :: %__MODULE__{
target: ExWire.node_id(),
timestamp: integer()
}
@spec message_id() :: ExWire.Message.message_id()
def message_id, do: @message_id
@doc """
Constructs new FindNeighbours message struct
"""
def new(node_id) do
%__MODULE__{
target: node_id,
timestamp: Timestamp.soon()
}
end
@doc """
Decodes a given message binary, which is assumed
to be an RLP encoded list of elements.
## Examples
iex> ExWire.Message.FindNeighbours.decode([<<1>>, 2] |> ExRLP.encode)
%ExWire.Message.FindNeighbours{
target: <<1>>,
timestamp: 2,
}
iex> ExWire.Message.FindNeighbours.decode([<<1>>] |> ExRLP.encode)
** (MatchError) no match of right hand side value: [<<1>>]
"""
@spec decode(binary()) :: t
def decode(data) do
[target, timestamp] = ExRLP.decode(data)
%__MODULE__{
target: target,
timestamp: :binary.decode_unsigned(timestamp)
}
end
@doc """
Given a FindNeighbours message, encodes it so it can be sent on the wire in RLPx.
## Examples
iex> ExWire.Message.FindNeighbours.encode(%ExWire.Message.FindNeighbours{target: <<1>>, timestamp: 2})
...> |> ExRLP.decode()
[<<1>>, <<2>>]
"""
@spec encode(t) :: binary()
def encode(%__MODULE__{target: target, timestamp: timestamp}) do
ExRLP.encode([
target,
timestamp
])
end
@doc """
FindNeighbours messages do not specify a destination.
## Examples
iex> ExWire.Message.FindNeighbours.to(%ExWire.Message.FindNeighbours{target: <<1>>, timestamp: 2})
nil
"""
@spec to(t) :: ExWire.Struct.Endpoint.t() | nil
def to(_message), do: nil
end
|
apps/ex_wire/lib/ex_wire/message/find_neighbours.ex
| 0.902796
| 0.521471
|
find_neighbours.ex
|
starcoder
|
defmodule LineEx.Webhook do
@moduledoc """
A behaviour for implementing LINE webhook. When `LineEx.Webhook.Plug`
receive an event, a plug module will verify the request and forward
event to the webhook.
## Example
Let's build a echo webhook that reply a message that user sent. We would
create a module and use `LineEx.Webhook` behaviour:
defmodule Echo.Webhook do
use LineEx.Webhook
...
end
After use `LineEx.Webhook`. The module must implements 3 functions, the
first one is `start_link/2` to starting a webhook process:
defmodule Echo.Webhook do
use LineEx.Webhook
def start_link(args, opts \\ []) do
LineEx.Webhook.start_link(__MODULE__, args, opts)
end
end
The second one is `init/1`, `LineEx.Webhook` will invoke this function when initialize
a process. The `init/1` callback must returns `{:ok, state}` or `{:stop, reason}`
if it found an error, such as initialize argument is not valid:
defmodule Echo.Webhook do
use LineEx.Webhook
def start_link(args, opts \\ []) do
LineEx.Webhook.start_link(__MODULE__, args, opts)
end
@impl true
def init(args) do
# Processing arguments here.
{:ok, %{}}
end
end
And finally, the `handle_event/2` for handling LINE webhook event, the first argument is
an event that LINE sent to us and the second is the state of the webhook process:
defmodule Echo.Webhook do
use LineEx.Webhook
def start_link(args, opts \\ []) do
LineEx.Webhook.start_link(__MODULE__, args, opts)
end
@impl true
def init(args) do
# Processing arguments here.
{:ok, %{}}
end
@impl true
def handle_event(event, state) do
...
end
end
the result from this callback must be one of:
* `{:reply, reply_token, messages, state}` - it'll tell the process to reply a `messages` to
the user with a `reply_token`. And `state` of process. With this way, you can a lot of things
with the webhook, such as store chat state per user to do stateful chat.
* `{:noreply, state}` - do not reply any messages to the user.
So our echo webhook will be like this:
defmodule Echo.Webhook do
use LineEx.Webhook
def start_link(args, opts \\ []) do
LineEx.Webhook.start_link(__MODULE__, args, opts)
end
@impl true
def init(args) do
# Processing arguments here.
{:ok, %{}}
end
@impl true
def handle_event(%{"events" => [event]}, state) do
{:reply,
event["replyToken"],
[%{type: "text", text: event["message"]}],
state}
end
end
The `handle_event/2` will reply a text that user sent to the webhook. Note that we
assume the user send text message to us. If you want to handle more kind of event,
you can use `event["type"]` to check type of event which's follow the (LINE Webhook Event Objects)[https://developers.line.biz/en/reference/messaging-api/#webhook-event-objects]
guideline.
The final step, put a webhook module to the supervisor children:
children = [
{Echo.Webhook, channel_access_token: "..."}
...
]
In the process arguments in `children`, set the `channel_access_token` to it to uses for reply
a message.
"""
use GenServer
alias LineEx.MessagingApi.Message
# TODO: add type spec each event.
@type message_event() :: map()
@type unsend_event() :: map()
@type follow_event() :: map()
@type unfollow_event() :: map()
@type join_event() :: map()
@type leave_event() :: map()
@type member_join_event() :: map()
@type member_leave_event() :: map()
@type postback_event() :: map()
@type video_viewing_complete_event() :: map()
@type webhook_event() ::
message_event()
| unsend_event()
| follow_event()
| unfollow_event()
| join_event()
| leave_event()
| member_join_event()
| member_leave_event()
| postback_event()
| video_viewing_complete_event()
@doc """
Initialize state of webhook.
"""
@callback init(opts) :: {:ok, state} | {:stop, reason}
when opts: term(), state: term(), reason: term()
@doc """
Handle webhook event.
"""
@callback handle_event(webhook_event(), state) ::
{:reply, reply_token, [message], new_state} | {:noreply, new_state}
when reply_token: String.t(), message: map(), state: term(), new_state: term()
@type t() :: %__MODULE__{
mod: module(),
state: term(),
channel_access_token: String.t(),
line_api_url: String.t()
}
defstruct [
:mod,
:state,
:channel_access_token,
:line_api_url
]
## Client
@doc """
Starting a `LineEx.Webhook` process.
Once the process started, the `init/1` function of the given `module` is called with
`args`.
"""
@spec start_link(module(), term(), GenServer.options()) :: GenServer.on_start()
def start_link(module, args, opts \\ []) when is_atom(module) and is_list(opts) do
webhook_args = args |> Keyword.put(:mod, module) |> Keyword.put(:init_args, args)
GenServer.start_link(__MODULE__, webhook_args, opts)
end
@doc """
Handling an `event`. The `event` will process asynchronously.
"""
@spec handle_event(GenServer.server(), webhook_event()) :: :ok
def handle_event(webhook, event) do
GenServer.cast(webhook, {:"$webhook_event", event})
end
## Server
# TODO: validate opt here.
@impl true
def init(opts) do
mod = opts[:mod]
case mod.init(opts[:init_args]) do
{:ok, state} ->
{:ok,
%__MODULE__{
mod: mod,
state: state,
channel_access_token: opts[:channel_access_token],
line_api_url: opts[:line_api_url] || "https://api.line.me"
}}
{:stop, reason} ->
{:stop, reason}
end
end
@impl true
def handle_cast({:"$webhook_event", event}, webhook) do
new_state =
case webhook.mod.handle_event(event, webhook.state) do
{:reply, reply_token, messages, new_state} ->
{:ok, %{}} =
webhook.channel_access_token
|> Message.client(timeout: 10_000, api_endpoint: webhook.line_api_url)
|> Message.request(Message.reply_message(reply_token, messages))
new_state
{:noreply, new_state} ->
new_state
end
{:noreply, %{webhook | state: new_state}}
end
## Behaviour
defmacro __using__(opts) do
quote location: :keep, bind_quoted: [opts: opts] do
@behaviour LineEx.Webhook
def child_spec(init_args) do
default = %{
id: __MODULE__,
start: {__MODULE__, :start_link, [init_args]}
}
Supervisor.child_spec(default, unquote(Macro.escape(opts)))
end
defoverridable child_spec: 1
end
end
end
|
line_ex_webhook/lib/line_ex/webhook.ex
| 0.758376
| 0.426262
|
webhook.ex
|
starcoder
|
defmodule Xema.Builder do
@moduledoc """
This module contains some convenience functions to generate schemas.
## Examples
iex> import Xema.Builder
...> schema = Xema.new integer(minimum: 1)
...> Xema.valid?(schema, 6)
true
...> Xema.valid?(schema, 0)
false
"""
alias Xema.{CastError, Schema, ValidationError}
@types Xema.Schema.types()
@types
|> Enum.filter(fn x -> x not in [nil, true, false, :struct] end)
|> Enum.each(fn fun ->
@doc """
Returns a tuple of `:#{fun}` and the given keyword list.
## Examples
iex> Xema.Builder.#{fun}(key: 42)
{:#{fun}, [key: 42]}
"""
@spec unquote(fun)() :: unquote(fun)
def unquote(fun)() do
unquote(fun)
end
@spec unquote(fun)(keyword) :: {unquote(fun), keyword}
def unquote(fun)(keywords) when is_list(keywords) do
{unquote(fun), keywords}
end
end)
@doc """
Returns the tuple `{:ref, ref}`.
"""
def ref(ref) when is_binary(ref), do: {:ref, ref}
@doc """
Returns `:struct`.
"""
@spec strux :: :struct
def strux, do: :struct
@doc """
Returns a tuple of `:stuct` and the given keyword list.
"""
@spec strux(keyword) :: {:struct, keyword}
def strux(keywords) when is_list(keywords), do: {:struct, keywords}
@doc """
Returns the tuple `{:struct, module: module}`.
"""
@spec strux(atom) :: {:struct, module: module}
def strux(module) when is_atom(module), do: strux(module: module)
def strux(module, keywords) when is_atom(module),
do: keywords |> Keyword.put(:module, module) |> strux()
@doc """
Creates a `schema`.
"""
defmacro xema(do: schema) do
quote do
xema :__xema_default__ do
unquote(schema)
end
end
end
@doc """
Creates a `schema` with the given name.
"""
defmacro xema(name, do: schema) do
schema = xema_struct(schema)
quote do
Module.register_attribute(__MODULE__, :xemas, accumulate: true)
multi = Module.get_attribute(__MODULE__, :multi)
default = Module.get_attribute(__MODULE__, :default)
Module.put_attribute(__MODULE__, :default, false)
if multi == nil do
raise "Use `use Xema` to use the `xema/2` macro."
end
if !multi && length(@xemas) > 0 do
raise "Use `use Xema, multi: true` to setup multiple schema in a module."
end
Module.put_attribute(
__MODULE__,
:xemas,
{unquote(name), Xema.new(add_new_module(unquote(schema), __MODULE__))}
)
if multi do
if length(@xemas) == 1 do
unquote(xema_funs(:header))
end
if default do
unquote(xema_funs(:default, name))
end
unquote(xema_funs(:by_name, name))
else
if unquote(name) == :__xema_default__ do
unquote(xema_funs(:single, name))
else
unquote(xema_funs(:header))
unquote(xema_funs(:default, name))
unquote(xema_funs(:by_name, name))
end
end
end
end
defp xema_funs(:header) do
quote do
@doc """
Returns true if the specified `data` is valid against the schema
defined under `name`, otherwise false.
"""
@spec valid?(atom, term) :: boolean
def valid?(name \\ :default, data)
@doc """
Validates the given `data` against the schema defined under `name`.
Returns `:ok` for valid data, otherwise an `:error` tuple.
"""
@spec validate(atom, term) :: :ok | {:error, ValidationError.t()}
def validate(name \\ :default, data)
@doc """
Validates the given `data` against the schema defined under `name`.
Returns `:ok` for valid data, otherwise a `Xema.ValidationError` is
raised.
"""
@spec validate!(atom, term) :: :ok
def validate!(name \\ :default, data)
@doc """
Converts the given `data` according to the schema defined under `name`.
Returns an `:ok` tuple with the converted data for valid `data`, otherwise
an `:error` tuple is returned.
"""
@spec cast(atom, term) ::
{:ok, term} | {:error, ValidationError.t() | CastError.t()}
def cast(name \\ :default, data)
@doc """
Converts the given `data` according to the schema defined under `name`.
Returns converted data for valid `data`, otherwise a `Xema.CastError` or
`Xema.ValidationError` is raised.
"""
@spec cast!(atom, term) :: {:ok, term}
def cast!(name \\ :default, data)
@doc false
def xema(name \\ :default)
end
end
defp xema_funs(:by_name, name) do
quote do
def valid?(unquote(name), data),
do: Xema.valid?(@xemas[unquote(name)], data)
def validate(unquote(name), data),
do: Xema.validate(@xemas[unquote(name)], data)
def validate!(unquote(name), data),
do: Xema.validate!(@xemas[unquote(name)], data)
def cast(unquote(name), data),
do: Xema.cast(@xemas[unquote(name)], data)
def cast!(unquote(name), data),
do: Xema.cast!(@xemas[unquote(name)], data)
@doc false
def xema(unquote(name)),
do: @xemas[unquote(name)]
end
end
defp xema_funs(:default, name) do
quote do
def valid?(:default, data),
do: Xema.valid?(@xemas[unquote(name)], data)
def validate(:default, data),
do: Xema.validate(@xemas[unquote(name)], data)
def validate!(:default, data),
do: Xema.validate!(@xemas[unquote(name)], data)
def cast(:default, data),
do: Xema.cast(@xemas[unquote(name)], data)
def cast!(:default, data),
do: Xema.cast!(@xemas[unquote(name)], data)
@doc false
def xema(:default),
do: @xemas[unquote(name)]
end
end
defp xema_funs(:single, name) do
quote do
@doc """
Returns true if the given `data` valid against the defined schema,
otherwise false.
"""
@spec valid?(term) :: boolean
def valid?(data),
do: Xema.valid?(@xemas[unquote(name)], data)
@doc """
Validates the given `data` against the defined schema.
Returns `:ok` for valid data, otherwise an `:error` tuple.
"""
@spec validate(term) :: :ok | {:error, ValidationError.t()}
def validate(data),
do: Xema.validate(@xemas[unquote(name)], data)
@doc """
Validates the given `data` against the defined schema.
Returns `:ok` for valid data, otherwise a `Xema.ValidationError` is
raised.
"""
@spec validate!(term) :: :ok
def validate!(data),
do: Xema.validate!(@xemas[unquote(name)], data)
@doc """
Converts the given `data` according to the defined schema.
Returns an `:ok` tuple with the converted data for valid `data`, otherwise
an `:error` tuple is returned.
"""
@spec cast(term) :: {:ok, term} | {:error, ValidationError.t() | CastError.t()}
def cast(data),
do: Xema.cast(@xemas[unquote(name)], data)
@doc """
Converts the given `data` according to the defined schema.
Returns converted data for valid `data`, otherwise a `Xeam.CastError` or
`Xema.ValidationError` is raised.
"""
@spec cast!(term) :: term
def cast!(data),
do: Xema.cast!(@xemas[unquote(name)], data)
@doc false
def xema,
do: @xemas[unquote(name)]
end
end
defp xema_struct({:__block__, _context, data}) do
data =
data
|> Enum.group_by(fn
{name, _, _} when name in [:required, :field] -> name
_ -> :rest
end)
|> Map.put_new(:field, [])
|> Map.put_new(:required, nil)
|> Map.put_new(:rest, nil)
quote do
unquote(data.rest)
defstruct unquote(Enum.map(data.field, &xema_field_name/1))
{:struct,
[
properties: Map.new(unquote(Enum.map(data.field, &xema_field/1))),
keys: :atoms
]
|> Keyword.merge(unquote(xema_required(data.required)))}
end
end
defp xema_struct({:field, _context, _args} = data) do
quote do
defstruct [unquote(xema_field_name(data))]
{:struct,
[
properties: Map.new([unquote(xema_field(data))]),
keys: :atoms
]}
end
end
defp xema_struct(data), do: data
defp xema_field({:field, _context, [name | _]} = field) do
quote do
{unquote(name), unquote(field)}
end
end
defp xema_field_name({:field, _context, [name | _]}) do
quote do
unquote(name)
end
end
@doc """
Specifies a field. This function will be used inside `xema/0`.
Arguments:
+ `name`: the name of the field.
+ `type`: the type of the field. The `type` can also be a `struct` or another
schema.
+ `opts`: the rules for the field.
## Examples
iex> defmodule User do
...> use Xema
...>
...> xema do
...> field :name, :string, min_length: 1
...> end
...> end
...>
iex> %{"name" => "Tim"} |> User.cast!() |> Map.from_struct()
%{name: "Tim"}
For more examples see "[Examples: Struct](examples.html#struct)".
"""
@spec field(atom, Schema.type() | module, keyword) ::
{:xema, Xema.t()} | {:module, module} | {:type, atom}
def field(name, type, opts \\ [])
def field(name, type, opts) do
name
|> check_field_type!(type)
|> case do
{:xema, module} -> module.xema()
{:module, module} -> {:struct, Keyword.put(opts, :module, module)}
{:type, type} -> {type, opts}
end
end
defp check_field_type!(field, types) when is_list(types) do
Enum.each(types, fn type -> check_field_type!(field, type) end)
{:type, types}
end
defp check_field_type!(_field, type) when type in @types, do: {:type, type}
defp check_field_type!(_field, module) when is_atom(module) do
case Xema.behaviour?(module) do
true -> {:xema, module}
false -> {:module, module}
end
end
defp check_field_type!(field, type),
do: raise(ArgumentError, "invalid type #{inspect(type)} for field #{inspect(field)}")
defp xema_required([required]) do
quote do
unquote(required)
end
end
defp xema_required(nil) do
quote do: []
end
defp xema_required(_) do
raise ArgumentError, "the required function can only be called once per xema"
end
@doc """
Sets the list of required fields. Specifies a field. This function will be
used inside `xema/0`.
## Examples
iex> defmodule Person do
...> use Xema
...>
...> xema do
...> field :name, :string, min_length: 1
...> required [:name]
...> end
...> end
...>
iex> %{"name" => "Tim"} |> Person.cast!() |> Map.from_struct()
%{name: "Tim"}
"""
@spec required([atom]) :: term
def required(fields), do: [required: fields]
@doc false
def add_new_module({:struct, keywords}, module),
do: {:struct, Keyword.put_new(keywords, :module, module)}
def add_new_module(schema, _module), do: schema
end
|
lib/xema/builder.ex
| 0.911269
| 0.410815
|
builder.ex
|
starcoder
|
defmodule Clickhousex.Codec.Binary do
@moduledoc false
use Bitwise
def encode(:varint, num) when num < 128, do: <<num>>
def encode(:varint, num), do: <<fc00:e968:6179::de52:7100, num::7, encode(:varint, num >>> 7)::binary>>
def encode(:string, str) when is_bitstring(str) do
[encode(:varint, byte_size(str)), str]
end
def encode(:u8, i) when is_integer(i) do
<<i::little-unsigned-size(8)>>
end
def encode(:u16, i) do
<<i::little-unsigned-size(16)>>
end
def encode(:u32, i) do
<<i::little-unsigned-size(32)>>
end
def encode(:u64, i) do
<<i::little-unsigned-size(64)>>
end
def encode(:i8, i) do
<<i::little-signed-size(8)>>
end
def encode(:i16, i) do
<<i::little-signed-size(16)>>
end
def encode(:i32, i) do
<<i::little-signed-size(32)>>
end
def encode(:i64, i) do
<<i::little-signed-size(64)>>
end
def encode(:f64, f) do
<<f::little-signed-float-size(64)>>
end
def encode(:f32, f) do
<<f::little-signed-float-size(32)>>
end
def encode(:boolean, true) do
encode(:u8, 1)
end
def encode(:boolean, false) do
encode(:u8, 0)
end
def encode({:list, type}, list) do
elements = for e <- list, do: encode(type, e)
[encode(:varint, length(list)), elements]
end
def encode({:nullable, _type}, nil) do
encode(:u8, 1)
end
def encode({:nullable, type}, thing) do
[
encode(:u8, 0),
encode(type, thing)
]
end
def decode(bytes, :struct, struct_module) do
decode_struct(bytes, struct_module.decode_spec(), struct(struct_module))
end
def decode(<<1, rest::binary>>, {:nullable, _type}) do
{:ok, nil, rest}
end
def decode(<<0, rest::binary>>, {:nullable, type}) do
decode(rest, type)
end
def decode(<<>>, {:nullable, type}) do
{:resume, fn more_data -> decode(more_data, {:nullable, type}) end}
end
def decode(bytes, :varint) do
decode_varint(bytes, 0, 0)
end
def decode(bytes, :string) do
with {:ok, byte_count, rest} <- decode(bytes, :varint),
true <- byte_size(rest) >= byte_count do
<<decoded_str::binary-size(byte_count), rest::binary>> = rest
{:ok, decoded_str, rest}
else
_ ->
{:resume, fn more_data -> decode(bytes <> more_data, :string) end}
end
end
def decode(<<1::little-unsigned-size(8), rest::binary>>, :boolean) do
{:ok, true, rest}
end
def decode(<<0::little-unsigned-size(8), rest::binary>>, :boolean) do
{:ok, false, rest}
end
def decode(bytes, {:list, data_type}) do
case decode(bytes, :varint) do
{:ok, count, rest} ->
decode_list(rest, data_type, count, [])
_ ->
decoder = fn more_data -> decode(bytes <> more_data, {:list, data_type}) end
{:resume, decoder}
end
end
def decode(<<decoded::little-signed-size(64), rest::binary>>, :i64) do
{:ok, decoded, rest}
end
def decode(<<decoded::little-signed-size(32), rest::binary>>, :i32) do
{:ok, decoded, rest}
end
def decode(<<decoded::little-signed-size(16), rest::binary>>, :i16) do
{:ok, decoded, rest}
end
def decode(<<decoded::little-signed-size(8), rest::binary>>, :i8) do
{:ok, decoded, rest}
end
def decode(<<decoded::little-signed-size(64), rest::binary>>, :u64) do
{:ok, decoded, rest}
end
def decode(<<decoded::little-signed-size(32), rest::binary>>, :u32) do
{:ok, decoded, rest}
end
def decode(<<decoded::little-signed-size(16), rest::binary>>, :u16) do
{:ok, decoded, rest}
end
def decode(<<decoded::little-signed-size(8), rest::binary>>, :u8) do
{:ok, decoded, rest}
end
def decode(<<days_since_epoch::little-unsigned-size(16), rest::binary>>, :date) do
{:ok, date} = Date.new(1970, 01, 01)
date = Date.add(date, days_since_epoch)
{:ok, date, rest}
end
def decode(<<seconds_since_epoch::little-unsigned-size(32), rest::binary>>, :datetime) do
{:ok, date_time} = NaiveDateTime.new(1970, 1, 1, 0, 0, 0)
date_time = NaiveDateTime.add(date_time, seconds_since_epoch)
{:ok, date_time, rest}
end
def decode(<<0, rest::binary>>, :boolean) do
{:ok, false, rest}
end
def decode(<<1, rest::binary>>, :boolean) do
{:ok, true, rest}
end
def decode(<<decoded::little-signed-float-size(64), rest::binary>>, :f64) do
{:ok, decoded, rest}
end
def decode(<<decoded::little-signed-float-size(32), rest::binary>>, :f32) do
{:ok, decoded, rest}
end
def decode(bytes, type) do
{:resume, &decode(bytes <> &1, type)}
end
defp decode_list(rest, _, 0, accum) do
{:ok, Enum.reverse(accum), rest}
end
defp decode_list(bytes, data_type, count, accum) do
case decode(bytes, data_type) do
{:ok, decoded, rest} ->
decode_list(rest, data_type, count - 1, [decoded | accum])
{:resume, _} ->
{:resume, &decode_list(bytes <> &1, data_type, count, accum)}
end
end
defp decode_varint(<<0::size(1), byte::size(7), rest::binary>>, result, shift) do
{:ok, result ||| byte <<< shift, rest}
end
defp decode_varint(<<1::1, byte::7, rest::binary>>, result, shift) do
decode_varint(rest, result ||| byte <<< shift, shift + 7)
end
defp decode_varint(bytes, result, shift) do
{:resume, &decode_varint(bytes <> &1, result, shift)}
end
defp decode_struct(rest, [], struct) do
{:ok, struct, rest}
end
defp decode_struct(rest, [{field_name, type} | specs], struct) do
case decode(rest, type) do
{:ok, decoded, rest} ->
decode_struct(rest, specs, Map.put(struct, field_name, decoded))
{:error, _} = err ->
err
end
end
end
|
lib/clickhousex/codec/binary.ex
| 0.577853
| 0.423577
|
binary.ex
|
starcoder
|
defmodule Ash.Query.Function do
@moduledoc """
A function is a predicate with an arguments list.
For more information on being a predicate, see `Ash.Filter.Predicate`. Most of the complexities
are there. A function must meet both behaviours.
"""
alias Ash.Query.{BooleanExpression, Call, Not, Ref}
@type arg :: any
@doc """
The number and types of arguments supported.
"""
@callback args() :: [arg]
@callback new(list(term)) :: {:ok, term}
@callback evaluate(func :: map) :: :unknown | {:known, term}
def new(mod, args) do
args = List.wrap(args)
case mod.args() do
:var_args ->
# Varargs is special, and should only be used in rare circumstances (like this one)
# no type casting or help can be provided for these functions.
mod.new(args)
mod_args ->
configured_args = List.wrap(mod_args)
configured_arg_count = Enum.count(Enum.at(configured_args, 0))
given_arg_count = Enum.count(args)
if configured_arg_count == given_arg_count do
mod_args
|> Enum.find_value(&try_cast_arguments(&1, args))
|> case do
nil ->
{:error,
"Could not cast function arguments for #{mod.name()}/#{configured_arg_count}"}
casted ->
mod.new(casted)
end
else
{:error,
"function #{mod.name()}/#{configured_arg_count} takes #{configured_arg_count} arguments, provided #{given_arg_count}"}
end
end
end
defp try_cast_arguments(configured_args, args) do
args
|> Enum.zip(configured_args)
|> Enum.reduce_while({:ok, []}, fn
{arg, :any}, {:ok, args} ->
{:cont, {:ok, [arg | args]}}
{%struct{} = arg, _}, {:ok, args}
when struct in [BooleanExpression, Call, Not, Ref] ->
{:cont, {:ok, [arg | args]}}
{%{__predicate__?: _} = arg, _}, {:ok, args} ->
{:cont, {:ok, [arg | args]}}
{arg, type}, {:ok, args} ->
case Ash.Query.Type.try_cast(arg, type) do
{:ok, value} -> {:cont, {:ok, [value | args]}}
:error -> {:halt, :error}
end
end)
|> case do
{:ok, args} ->
Enum.reverse(args)
_ ->
nil
end
end
# Copied from https://github.com/andrewhao/ordinal/blob/master/lib/ordinal.ex
@doc """
Attaches the appropiate suffix to refer to an ordinal number, e.g 1 -> "1st"
"""
def ordinal(num) do
cond do
Enum.any?([11, 12, 13], &(&1 == Integer.mod(num, 100))) ->
"#{num}th"
Integer.mod(num, 10) == 1 ->
"#{num}st"
Integer.mod(num, 10) == 2 ->
"#{num}nd"
Integer.mod(num, 10) == 3 ->
"#{num}rd"
true ->
"#{num}th"
end
end
defmacro __using__(opts) do
quote do
@behaviour Ash.Filter.Predicate
alias Ash.Query.Ref
defstruct [
:arguments,
name: unquote(opts[:name]),
embedded?: false,
__function__?: true,
__predicate__?: unquote(opts[:predicate?] || false)
]
def name, do: unquote(opts[:name])
def new(args), do: {:ok, struct(__MODULE__, arguments: args)}
def evaluate(_), do: :unknown
defoverridable new: 1, evaluate: 1
defimpl Inspect do
import Inspect.Algebra
def inspect(%{arguments: args, name: name}, opts) do
concat(
to_string(name),
container_doc("(", args, ")", opts, &to_doc/2, separator: ",")
)
end
end
end
end
end
|
lib/ash/query/function/function.ex
| 0.834576
| 0.425337
|
function.ex
|
starcoder
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.