code
stringlengths 114
1.05M
| path
stringlengths 3
312
| quality_prob
float64 0.5
0.99
| learning_prob
float64 0.2
1
| filename
stringlengths 3
168
| kind
stringclasses 1
value |
|---|---|---|---|---|---|
defmodule Spiral do
@doc """
Given the dimension, return a square matrix of numbers in clockwise spiral order.
"""
@spec matrix(dimension :: integer) :: list(list(integer))
def matrix(0), do: []
def matrix(dimension) do
Matrix.new(dimension, dimension)
|> generate(0, 0, 1, :right, dimension * dimension)
|> Matrix.to_list()
end
defp generate(matrix, _, _, v, _, limit) when v > limit, do: matrix
defp generate(matrix, i, j, v, :right, limit) do
case Matrix.get(matrix, i, j) do
:undefined -> matrix |> Matrix.set(i, j, v) |> generate(i, j + 1, v + 1, :right, limit)
_ -> matrix |> generate(i + 1, j - 1, v, :down, limit)
end
end
defp generate(matrix, i, j, v, :down, limit) do
case Matrix.get(matrix, i, j) do
:undefined -> matrix |> Matrix.set(i, j, v) |> generate(i + 1, j, v + 1, :down, limit)
_ -> matrix |> generate(i - 1, j - 1, v, :left, limit)
end
end
defp generate(matrix, i, j, v, :left, limit) do
case Matrix.get(matrix, i, j) do
:undefined -> matrix |> Matrix.set(i, j, v) |> generate(i, j - 1, v + 1, :left, limit)
_ -> matrix |> generate(i - 1, j + 1, v, :up, limit)
end
end
defp generate(matrix, i, j, v, :up, limit) do
case Matrix.get(matrix, i, j) do
:undefined -> matrix |> Matrix.set(i, j, v) |> generate(i - 1, j, v + 1, :up, limit)
_ -> matrix |> generate(i + 1, j + 1, v, :right, limit)
end
end
end
defmodule Matrix do
alias __MODULE__
defstruct [:content, :rows, :cols]
defguardp is_valid_index(i, limit) when 0 <= i and i < limit
defguardp is_valid_coords(m, i, j) when is_valid_index(i, m.rows) and is_valid_index(j, m.cols)
def new(rows, cols) do
fun = fn _, _ -> :array.new(fixed: true, size: cols) end
matrix = :array.new(fixed: true, size: rows)
%Matrix{rows: rows, cols: cols, content: :array.map(fun, matrix)}
end
def get(%Matrix{content: matrix} = m, i, j) when is_valid_coords(m, i, j) do
row = :array.get(i, matrix)
:array.get(j, row)
end
def get(_, _, _), do: :out_of_bounds
def set(%Matrix{content: matrix} = m, i, j, v) when is_valid_coords(m, i, j) do
row = :array.get(i, matrix)
updated_row = :array.set(j, v, row)
%Matrix{m | content: :array.set(i, updated_row, matrix)}
end
def set(_, _, _, _), do: :out_of_bounds
def to_list(%Matrix{content: matrix}) do
fun = fn _, a -> :array.to_list(a) end
:array.map(fun, matrix)
|> :array.to_list()
end
end
|
exercism/elixir/spiral-matrix/lib/spiral.ex
| 0.814274
| 0.666716
|
spiral.ex
|
starcoder
|
defmodule Phoenix.Router.Route do
# This module defines the Route struct that is used
# throughout Phoenix's router. This struct is private
# as it contains internal routing information.
@moduledoc false
alias Phoenix.Router.Route
@doc """
The `Phoenix.Router.Route` struct. It stores:
* :verb - the HTTP verb as an upcased string
* :path - the normalized path as string
* :host - the request host or host prefix
* :binding - the route bindings
* :controller - the controller module
* :action - the action as an atom
* :helper - the name of the helper as a string (may be nil)
* :pipe_through - the pipeline names as a list of atoms
* :path_segments - the path match as quoted segments
* :host_segments - the host match as quoted segments
* :pipe_segments - the quoted segments to pipe through
"""
defstruct [:verb, :path, :host, :binding, :controller, :action, :helper, :pipe_through,
:path_segments, :host_segments, :pipe_segments]
@type t :: %Route{}
@doc """
Receives the verb, path, controller, action and helper
and returns a `Phoenix.Router.Route` struct.
"""
@spec build(String.t, String.t, String.t | nil, atom, atom, atom | nil, atom) :: t
def build(verb, path, host, controller, action, helper, pipe_through)
when is_binary(verb) and is_binary(path) and (is_binary(host) or is_nil(host)) and
is_atom(controller) and is_atom(action) and (is_binary(helper) or is_nil(helper)) and
is_list(pipe_through) do
{params, path_segments} = Plug.Router.Utils.build_match(path)
binding = Enum.map(params, fn var ->
{Atom.to_string(var), Macro.var(var, nil)}
end)
%Route{verb: verb, path: path, host: host, binding: binding,
controller: controller, action: action, helper: helper,
pipe_through: [:before | pipe_through], path_segments: path_segments,
host_segments: build_host(host), pipe_segments: build_pipes(pipe_through)}
end
defp build_host(host) do
cond do
is_nil(host) -> quote do: _
String.last(host) == "." -> quote do: unquote(host) <> _
true -> host
end
end
defp build_pipes(pipe_through) do
Enum.reduce(pipe_through, quote(do: var!(conn)), &{&1, [], [&2, []]})
end
end
|
lib/phoenix/router/route.ex
| 0.825238
| 0.425187
|
route.ex
|
starcoder
|
defmodule P11 do
@moduledoc """
- `W`: ใใผใฏ (`1 <= W <= 1_000_000`)
- `H`: ็ชๅท (`1 <= H <= 1_000_000`)
- `N`: ๆๆญใฎๆๆฐ (`1 <= N <= min(W * H, 100)`)
- `(Si, Ki)`: (i็ช็ฎใฎๆๆญใฎใใผใฏ,i็ช็ฎใฎๆๆญใฎๆฐๅญ)
ๅฎ็พฉ: ใใใ = ใใผใฏใพใใฏๆฐๅญใฎใฉใกใใใไธ่ดใใใใจใ
ๅบๅ: [ๆๆญใฎใซใผใไปฅๅค]ใฎใใใใใใซใผใใฎๆๆฐ
่งฃๆณ)
- ใซใผใใฎ็ทๆฐใฏ `W * H`
- ๆๆญใฎใใผใฏ`Si`ๅใณๆฐๅญ`Ki`ใใ้่คใๆ้คใใ็ทๆฐใใใใใ`Su`ๅใณ`Ku`ใจใใ
- ใใใจใใใใใใชใใซใผใใฎ็ทๆฐใฏ, `(W - Su) * (H - Ku)` ใจใชใใ
- ใใใใๆๆญใ่ๆ
ฎใใใซใใใใใ็ทๆฐใ่ใใใจใ`W * H - (W - Su) * (H - Ku)`ใจใชใใ
- ๆ็ต็ใซๆฑใใๅคใฏใใใใใใ็ทๆฐใใๆๆญใ้คใใๅคใซใชใใ
- ใคใพใใ `W * H - (W - Su) * (H - Ku) - N`
- ๅพใฏๅผใๅฑ้ใใใ ใ
```
ans = w * h - (w - su) * (h - ku) - n
= w * h - (w * h - w * ku - su * h + su * ku) - n
= w * ku + h * su - su * ku - n
```
# Examples
iex> P11.solve(2, 5, 1, [[1, 1]])
5
iex> P11.solve(4, 13, 3, [[1, 1], [2, 1], [2, 5]])
27
iex> P11.solve(4, 13, 4, [[1, 5], [2, 6], [3, 7], [4, 8]])
48
iex> P11.solve(3, 2, 2, [[1, 1], [2, 1]])
3
"""
def main do
w = IO.read(:line) |> String.trim() |> String.to_integer()
h = IO.read(:line) |> String.trim() |> String.to_integer()
n = IO.read(:line) |> String.trim() |> String.to_integer()
sk = for _ <- 0..(n - 1) do
IO.read(:line) |> String.trim() |> String.split(" ") |> Enum.map(&String.to_integer/1)
end
solve(w, h, n, sk) |> IO.puts()
end
def solve(w, h, n, sk) do
{s, k} = Enum.reduce(sk, {[], []}, fn [s, k], {sq, kq} -> {[s | sq], [k | kq]} end)
s = s |> Enum.uniq() |> length()
k = k |> Enum.uniq() |> length()
w * k + s * h - s * k - n
end
end
"""
defmodule Main do
def main do
w = IO.read(:line) |> String.trim() |> String.to_integer()
h = IO.read(:line) |> String.trim() |> String.to_integer()
n = IO.read(:line) |> String.trim() |> String.to_integer()
{s, k} = for _ <- 0..(n - 1) do
IO.read(:line) |> String.trim() |> String.split(" ") |> Enum.map(&String.to_integer/1)
end
|> Enum.reduce({[], []}, fn [s, k], {sq, kq} -> {[s | sq], [k | kq]} end)
s = s |> Enum.uniq() |> length()
k = k |> Enum.uniq() |> length()
IO.puts(w * k + s * h - s * k - n)
end
end
"""
|
lib/100/p11.ex
| 0.686265
| 0.796055
|
p11.ex
|
starcoder
|
defmodule OpcUA.Client do
use OpcUA.Common
@config_keys ["requestedSessionTimeout", "secureChannelLifeTime", "timeout"]
alias OpcUA.NodeId
@moduledoc """
OPC UA Client API module.
This module provides functions for configuration, read/write nodes attributes and discovery of a OPC UA Client.
`OpcUA.Client` is implemented as a `__using__` macro so that you can put it in any module,
you can initialize your Client manually (see `test/client_tests`) or by overwriting
`configuration/1` and `monitored_items/1` to autoset the configuration and subscription items. It also helps you to
handle Client's "subscription" events (monitorItems) by overwriting `handle_subscription/2` callback.
The following example shows a module that takes its configuration from the environment (see `test/client_tests/terraform_test.exs`):
```elixir
defmodule MyClient do
use OpcUA.Client
# Use the `init` function to configure your Client.
def init({parent_pid, 103} = _user_init_state, opc_ua_client_pid) do
%{parent_pid: parent_pid, opc_ua_client_pid: opc_ua_client_pid}
end
def configuration(_user_init_state), do: Application.get_env(:my_client, :configuration, [])
def monitored_items(_user_init_state), do: Application.get_env(:my_client, :monitored_items, [])
def handle_subscription_timeout(subscription_id, state) do
send(state.parent_pid, {:subscription_timeout, subscription_id})
state
end
def handle_deleted_subscription(subscription_id, state) do
send(state.parent_pid, {:subscription_delete, subscription_id})
state
end
def handle_monitored_data(changed_data_event, state) do
send(state.parent_pid, {:value_changed, changed_data_event})
state
end
def handle_deleted_monitored_item(subscription_id, monitored_id, state) do
send(state.parent_pid, {:item_deleted, {subscription_id, monitored_id}})
state
end
end
```
Because it is small a GenServer, it accepts the same [options](https://hexdocs.pm/elixir/GenServer.html#module-how-to-supervise) for supervision
to configure the child spec and passes them along to `GenServer`:
```elixir
defmodule MyModule do
use OpcUA.Client, restart: :transient, shutdown: 10_000
end
```
"""
@type conn_params ::
{:hostname, binary()}
| {:port, non_neg_integer()}
| {:users, keyword()}
@type config_options ::
{:config, map()}
| {:conn, conn_params}
@doc """
Optional callback that gets the Server configuration and discovery connection parameters.
"""
@callback configuration(term()) :: config_options
# TODO:
@type monitored_items_options ::
{:subscription, float()}
| {:monitored_item, %OpcUA.MonitoredItem{}}
@callback monitored_items(term()) :: monitored_items_options
@doc """
Optional callback that handles node values updates from a Client to a Server.
It's first argument is a tuple, in which its first element is the `subscription_id`
of the subscription that the monitored item belongs to. the second element
is the 'monitored_item_id' which is an unique number asigned to a monitored item when
its created and the third element of the tuple is the new value of the monitored item.
the second argument it's the GenServer state (Parent process).
"""
@callback handle_monitored_data({integer(), integer(), any()}, term()) :: term()
@doc """
Optional callback that handles a deleted monitored items events.
It's first argument is the `subscription_id` of the subscription that the monitored
item belongs to. The second element is the 'monitored_item_id' which is an unique
number asigned to a monitored item when its created.
The third argument it's the GenServer state (Parent process).
"""
@callback handle_deleted_monitored_item(integer(), integer(), term()) :: term()
@doc """
Optional callback that handles a subscriptions timeout events.
It's first argument is the `subscription_id` of the subscription.
The second argument it's the GenServer state (Parent process).
"""
@callback handle_subscription_timeout(integer(), term()) :: term()
@doc """
Optional callback that handles a subscriptions timeout events.
It's first argument is the `subscription_id` of the subscription.
The second argument it's the GenServer state (Parent process).
"""
@callback handle_deleted_subscription(integer(), term()) :: term()
defmacro __using__(opts) do
quote location: :keep, bind_quoted: [opts: opts] do
use GenServer, Keyword.drop(opts, [:configuration])
@behaviour OpcUA.Client
@mix_env Mix.env()
alias __MODULE__
def start_link(user_initial_params \\ []) do
GenServer.start_link(__MODULE__, user_initial_params, unquote(opts))
end
@impl true
def init(user_initial_params) do
send(self(), :init)
{:ok, user_initial_params}
end
@impl true
def handle_info(:init, user_initial_params) do
# Client Terraform
{:ok, c_pid} = OpcUA.Client.start_link()
configuration = apply(__MODULE__, :configuration, [user_initial_params])
monitored_items = apply(__MODULE__, :monitored_items, [user_initial_params])
#OpcUA.Client.set_config(c_pid)
# configutation = [config: list(), connection: list()]
set_client_config(c_pid, configuration, :config)
set_client_config(c_pid, configuration, :conn)
# monitored_tiems = [subscription: 100.3, monitored_item: %MonitoredItem{}, ...]
set_client_monitored_items(c_pid, monitored_items)
# User initialization.
user_state = apply(__MODULE__, :init, [user_initial_params, c_pid])
{:noreply, user_state}
end
def handle_info({:timeout, subscription_id}, state) do
state = apply(__MODULE__, :handle_subscription_timeout, [subscription_id, state])
{:noreply, state}
end
def handle_info({:delete, subscription_id}, state) do
state = apply(__MODULE__, :handle_deleted_subscription, [subscription_id, state])
{:noreply, state}
end
def handle_info({:data, subscription_id, monitored_id, value}, state) do
state =
apply(__MODULE__, :handle_monitored_data, [
{subscription_id, monitored_id, value},
state
])
{:noreply, state}
end
def handle_info({:delete, subscription_id, monitored_id}, state) do
state =
apply(__MODULE__, :handle_deleted_monitored_item, [subscription_id, monitored_id, state])
{:noreply, state}
end
@impl true
def handle_subscription_timeout(subscription_id, state) do
require Logger
Logger.warn(
"No handle_subscription_timeout/2 clause in #{__MODULE__} provided for #{
inspect(subscription_id)
}"
)
state
end
@impl true
def handle_deleted_subscription(subscription_id, state) do
require Logger
Logger.warn(
"No handle_deleted_subscription/2 clause in #{__MODULE__} provided for #{
inspect(subscription_id)
}"
)
state
end
@impl true
def handle_monitored_data(changed_data_event, state) do
require Logger
Logger.warn(
"No handle_monitored_data/2 clause in #{__MODULE__} provided for #{
inspect(changed_data_event)
}"
)
state
end
@impl true
def handle_deleted_monitored_item(subscription_id, monitored_id, state) do
require Logger
Logger.warn(
"No handle_deleted_monitored_item/3 clause in #{__MODULE__} provided for #{
inspect({subscription_id, monitored_id})
}"
)
state
end
@impl true
def configuration(_user_init_state), do: []
@impl true
def monitored_items(_user_init_state), do: []
defp set_client_config(c_pid, configuration, type) do
config_params = Keyword.get(configuration, type, [])
Enum.each(config_params, fn config_param ->
if(@mix_env != :test) do
GenServer.call(c_pid, {type, config_param})
else
# Valgrind
GenServer.call(c_pid, {type, config_param}, :infinity)
end
end)
end
defp set_client_monitored_items(c_pid, monitored_items) do
Enum.each(monitored_items, fn {item_type, monitored_item} ->
item_args = get_monitored_item_args(monitored_item)
GenServer.call(c_pid, {:subscription, {item_type, item_args}})
end)
end
defp get_monitored_item_args(monitored_item) when is_float(monitored_item),
do: monitored_item
defp get_monitored_item_args(monitored_item) when is_struct(monitored_item),
do: monitored_item[:args]
defoverridable start_link: 0,
start_link: 1,
configuration: 1,
monitored_items: 1,
handle_subscription_timeout: 2,
handle_deleted_subscription: 2,
handle_monitored_data: 2,
handle_deleted_monitored_item: 3
end
end
# Configuration & Lifecycle functions
@doc """
Starts up a OPC UA Client GenServer.
"""
@spec start_link(term(), list()) :: {:ok, pid} | {:error, term} | {:error, :einval}
def start_link(args \\ [], opts \\ []) do
GenServer.start_link(__MODULE__, {args, self()}, opts)
end
@doc """
Stops a OPC UA Client GenServer.
"""
@spec stop(GenServer.server()) :: :ok
def stop(pid) do
GenServer.stop(pid)
end
@doc """
Gets the state of the OPC UA Client.
"""
@spec get_state(GenServer.server()) :: {:ok, binary()} | {:error, term} | {:error, :einval}
def get_state(pid) do
GenServer.call(pid, {:config, {:get_state, nil}})
end
@doc """
Sets the OPC UA Client configuration.
"""
@spec set_config(GenServer.server(), map()) :: :ok | {:error, term} | {:error, :einval}
def set_config(pid, args \\ %{}) when is_map(args) do
GenServer.call(pid, {:config, {:set_config, args}})
end
@doc """
Sets the OPC UA Client configuration with all security policies for the given certificates.
The following must be filled:
* `:private_key` -> binary() or function().
* `:certificate` -> binary() or function().
* `:security_mode` -> interger().
NOTE: [none: 1, sign: 2, sign_and_encrypt: 3]
"""
@spec set_config_with_certs(GenServer.server(), list()) :: :ok | {:error, term} | {:error, :einval}
def set_config_with_certs(pid, args) when is_list(args) do
if(@mix_env != :test) do
GenServer.call(pid, {:config, {:set_config_with_certs, args}})
else
# Valgrind
GenServer.call(pid, {:config, {:set_config_with_certs, args}}, :infinity)
end
end
@doc """
Gets the OPC UA Client current Configuration.
"""
@spec get_config(GenServer.server()) :: {:ok, map()} | {:error, term} | {:error, :einval}
def get_config(pid) do
GenServer.call(pid, {:config, {:get_config, nil}})
end
@doc """
Resets the OPC UA Client.
"""
@spec reset(GenServer.server()) :: :ok | {:error, term} | {:error, :einval}
def reset(pid) do
GenServer.call(pid, {:config, {:reset_client, nil}})
end
# Connection functions
@doc """
Connects the OPC UA Client by a url.
The following must be filled:
* `:url` -> binary().
"""
@spec connect_by_url(GenServer.server(), list()) :: :ok | {:error, term} | {:error, :einval}
def connect_by_url(pid, args) when is_list(args) do
if(@mix_env != :test) do
GenServer.call(pid, {:conn, {:by_url, args}})
else
# Valgrind
GenServer.call(pid, {:conn, {:by_url, args}}, :infinity)
end
end
@doc """
Connects the OPC UA Client by a url using a username and a password.
The following must be filled:
* `:url` -> binary().
* `:user` -> binary().
* `:password` -> binary().
"""
@spec connect_by_username(GenServer.server(), list()) ::
:ok | {:error, term} | {:error, :einval}
def connect_by_username(pid, args) when is_list(args) do
if(@mix_env != :test) do
GenServer.call(pid, {:conn, {:by_username, args}})
else
# Valgrind
GenServer.call(pid, {:conn, {:by_username, args}}, :infinity)
end
end
@doc """
Connects the OPC UA Client by a url without a session.
The following must be filled:
* `:url` -> binary().
"""
@spec connect_no_session(GenServer.server(), list()) :: :ok | {:error, term} | {:error, :einval}
def connect_no_session(pid, args) when is_list(args) do
GenServer.call(pid, {:conn, {:no_session, args}})
end
@doc """
Disconnects the OPC UA Client.
"""
@spec disconnect(GenServer.server()) :: :ok | {:error, term} | {:error, :einval}
def disconnect(pid) do
GenServer.call(pid, {:conn, {:disconnect, nil}})
end
# Discovery functions
@doc """
Finds Servers Connected to a Discovery Server.
The following must be filled:
* `:url` -> binary().
"""
@spec find_servers_on_network(GenServer.server(), binary()) ::
:ok | {:error, term} | {:error, :einval}
def find_servers_on_network(pid, url) when is_binary(url) do
GenServer.call(pid, {:discovery, {:find_servers_on_network, url}})
end
@doc """
Finds Servers Connected to a Discovery Server.
The following must be filled:
* `:url` -> binary().
"""
@spec find_servers(GenServer.server(), binary()) :: :ok | {:error, term} | {:error, :einval}
def find_servers(pid, url) when is_binary(url) do
GenServer.call(pid, {:discovery, {:find_servers, url}})
end
@doc """
Get endpoints from a OPC UA Server.
The following must be filled:
* `:url` -> binary().
"""
@spec get_endpoints(GenServer.server(), binary()) :: :ok | {:error, term} | {:error, :einval}
def get_endpoints(pid, url) when is_binary(url) do
GenServer.call(pid, {:discovery, {:get_endpoints, url}})
end
# Subscriptions and Monitored Items functions.
@doc """
Sends an OPC UA Server request to start subscription (to monitored items, events, etc).
"""
@spec add_subscription(GenServer.server()) ::
{:ok, integer()} | {:error, term} | {:error, :einval}
def add_subscription(pid, publishing_interval \\ 500.0) when is_float(publishing_interval) do
GenServer.call(pid, {:subscription, {:subscription, publishing_interval}})
end
@doc """
Sends an OPC UA Server request to delete a subscription.
"""
@spec delete_subscription(GenServer.server(), integer()) ::
:ok | {:error, term} | {:error, :einval}
def delete_subscription(pid, subscription_id) when is_integer(subscription_id) do
GenServer.call(pid, {:subscription, {:delete, subscription_id}})
end
@doc """
Adds a monitored item used to request a server for notifications of each change of value in a specific node.
The following option must be filled:
* `:subscription_id` -> integer().
* `:monitored_item` -> %NodeId{}.
"""
@spec add_monitored_item(GenServer.server(), list()) ::
{:ok, integer()} | {:error, term} | {:error, :einval}
def add_monitored_item(pid, args) when is_list(args) do
GenServer.call(pid, {:subscription, {:monitored_item, args}})
end
@doc """
Adds a monitored item used to request a server for notifications of each change of value in a specific node.
The following option must be filled:
* `:subscription_id` -> integer().
* `:monitored_item_id` -> integer().
"""
@spec delete_monitored_item(GenServer.server(), list()) ::
:ok | {:error, term} | {:error, :einval}
def delete_monitored_item(pid, args) when is_list(args) do
GenServer.call(pid, {:subscription, {:delete_monitored_item, args}})
end
# Read nodes Attributes
@doc """
Reads 'user_write_mask' attribute of a node in the server.
"""
@spec read_node_user_write_mask(GenServer.server(), %NodeId{}) ::
:ok | {:error, binary()} | {:error, :einval}
def read_node_user_write_mask(pid, %NodeId{} = node_id) do
GenServer.call(pid, {:read, {:user_write_mask, node_id}})
end
@doc """
Reads 'user_access_level' attribute of a node in the server.
"""
@spec read_node_user_access_level(GenServer.server(), %NodeId{}) ::
:ok | {:error, binary()} | {:error, :einval}
def read_node_user_access_level(pid, %NodeId{} = node_id) do
GenServer.call(pid, {:read, {:user_access_level, node_id}})
end
@doc """
Reads 'user_executable' attribute of a node in the server.
"""
@spec read_node_user_executable(GenServer.server(), %NodeId{}) ::
:ok | {:error, binary()} | {:error, :einval}
def read_node_user_executable(pid, %NodeId{} = node_id) do
GenServer.call(pid, {:read, {:user_executable, node_id}})
end
# Write nodes Attributes
@doc """
Change 'node_id' attribute of a node in the server.
"""
@spec write_node_node_id(GenServer.server(), %NodeId{}, %NodeId{}) ::
:ok | {:error, binary()} | {:error, :einval}
def write_node_node_id(pid, %NodeId{} = node_id, %NodeId{} = new_node_id) do
GenServer.call(pid, {:write, {:node_id, node_id, new_node_id}})
end
@doc """
Change 'symmetric' attribute of a node in the server.
"""
@spec write_node_symmetric(GenServer.server(), %NodeId{}, boolean()) ::
:ok | {:error, binary()} | {:error, :einval}
def write_node_symmetric(pid, %NodeId{} = node_id, symmetric) when is_boolean(symmetric) do
GenServer.call(pid, {:write, {:symmetric, node_id, symmetric}})
end
@doc """
Change 'node_class' attribute of a node in the server.
Avalable value are:
UNSPECIFIED = 0,
OBJECT = 1,
VARIABLE = 2,
METHOD = 4,
OBJECTTYPE = 8,
VARIABLETYPE = 16,
REFERENCETYPE = 32,
DATATYPE = 64,
VIEW = 128,
"""
@spec write_node_node_class(GenServer.server(), %NodeId{}, integer()) ::
:ok | {:error, binary()} | {:error, :einval}
def write_node_node_class(pid, %NodeId{} = node_id, node_class)
when node_class in [0, 1, 2, 4, 8, 16, 32, 64, 128] do
GenServer.call(pid, {:write, {:node_class, node_id, node_class}})
end
@doc """
Change 'user_write_mask' attribute of a node in the server.
"""
@spec write_node_user_write_mask(GenServer.server(), %NodeId{}, integer()) ::
:ok | {:error, binary()} | {:error, :einval}
def write_node_user_write_mask(pid, %NodeId{} = node_id, user_write_mask)
when is_integer(user_write_mask) do
GenServer.call(pid, {:write, {:user_write_mask, node_id, user_write_mask}})
end
@doc """
Change 'contains_no_loops' attribute of a node in the server.
"""
@spec write_node_contains_no_loops(GenServer.server(), %NodeId{}, boolean()) ::
:ok | {:error, binary()} | {:error, :einval}
def write_node_contains_no_loops(pid, %NodeId{} = node_id, contains_no_loops)
when is_boolean(contains_no_loops) do
GenServer.call(pid, {:write, {:contains_no_loops, node_id, contains_no_loops}})
end
@doc """
Change 'user_access_level' attribute of a node in the server.
"""
@spec write_node_user_access_level(GenServer.server(), %NodeId{}, integer()) ::
:ok | {:error, binary()} | {:error, :einval}
def write_node_user_access_level(pid, %NodeId{} = node_id, user_access_level)
when is_integer(user_access_level) do
GenServer.call(pid, {:write, {:user_access_level, node_id, user_access_level}})
end
@doc """
Change 'user_executable' attribute of a node in the server.
"""
@spec write_node_user_executable(GenServer.server(), %NodeId{}, boolean()) ::
:ok | {:error, binary()} | {:error, :einval}
def write_node_user_executable(pid, %NodeId{} = node_id, user_executable)
when is_boolean(user_executable) do
GenServer.call(pid, {:write, {:user_executable, node_id, user_executable}})
end
@doc false
def command(pid, request) do
GenServer.call(pid, request)
end
# Handlers
def init({_args, controlling_process}) do
lib_dir =
:opex62541
|> :code.priv_dir()
|> to_string()
|> set_ld_library_path()
executable = lib_dir <> "/opc_ua_client"
port = open_port(executable, use_valgrind?())
state = %State{port: port, controlling_process: controlling_process}
{:ok, state}
end
# Lifecycle Handlers
def handle_call({:config, {:get_state, nil}}, caller_info, state) do
call_port(state, :get_client_state, caller_info, nil)
{:noreply, state}
end
def handle_call({:config, {:set_config, args}}, caller_info, state) do
c_args =
Enum.reduce(args, %{}, fn {key, value}, acc ->
if is_nil(value) or key not in @config_keys do
acc
else
Map.put(acc, key, value)
end
end)
call_port(state, :set_client_config, caller_info, c_args)
{:noreply, state}
end
def handle_call({:config, {:get_config, nil}}, caller_info, state) do
call_port(state, :get_client_config, caller_info, nil)
{:noreply, state}
end
def handle_call({:config, {:reset_client, nil}}, caller_info, state) do
call_port(state, :reset_client, caller_info, nil)
{:noreply, state}
end
# Encryption
def handle_call({:config, {:set_config_with_certs, args}}, caller_info, state) do
with cert <- Keyword.fetch!(args, :certificate),
pkey <- Keyword.fetch!(args, :private_key),
security_mode <- Keyword.get(args, :security_mode, 1),
certificate <- get_binary_data(cert),
private_key <- get_binary_data(pkey),
true <- is_binary(certificate),
true <- is_binary(private_key),
true <- security_mode in [1, 2, 3] do
c_args = {security_mode, certificate, private_key}
call_port(state, :set_config_with_security_policies, caller_info, c_args)
{:noreply, state}
else
_ ->
{:reply, {:error, :einval} ,state}
end
end
# Connect to a Server Handlers
def handle_call({:conn, {:by_url, args}}, caller_info, state) do
url = Keyword.fetch!(args, :url)
call_port(state, :connect_client_by_url, caller_info, url)
{:noreply, state}
end
def handle_call({:conn, {:by_username, args}}, caller_info, state) do
url = Keyword.fetch!(args, :url)
username = Keyword.fetch!(args, :user)
password = Keyword.fetch!(args, :password)
c_args = {url, username, password}
call_port(state, :connect_client_by_username, caller_info, c_args)
{:noreply, state}
end
def handle_call({:conn, {:no_session, args}}, caller_info, state) do
url = Keyword.fetch!(args, :url)
call_port(state, :connect_client_no_session, caller_info, url)
{:noreply, state}
end
def handle_call({:conn, {:disconnect, nil}}, caller_info, state) do
call_port(state, :disconnect_client, caller_info, nil)
{:noreply, state}
end
# Discovery Handlers.
def handle_call({:discovery, {:find_servers_on_network, url}}, caller_info, state) do
call_port(state, :find_servers_on_network, caller_info, url)
{:noreply, state}
end
def handle_call({:discovery, {:find_servers, url}}, caller_info, state) do
call_port(state, :find_servers, caller_info, url)
{:noreply, state}
end
def handle_call({:discovery, {:get_endpoints, url}}, caller_info, state) do
call_port(state, :get_endpoints, caller_info, url)
{:noreply, state}
end
# Subscriptions and Monitored Items functions.
def handle_call({:subscription, {:subscription, publishing_interval}}, caller_info, state) do
call_port(state, :add_subscription, caller_info, publishing_interval)
{:noreply, state}
end
def handle_call({:subscription, {:delete, subscription_id}}, caller_info, state) do
call_port(state, :delete_subscription, caller_info, subscription_id)
{:noreply, state}
end
def handle_call({:subscription, {:monitored_item, args}}, caller_info, state) do
with monitored_item <- Keyword.fetch!(args, :monitored_item) |> to_c(),
subscription_id <- Keyword.fetch!(args, :subscription_id),
sampling_time <- Keyword.get(args, :sampling_time, 250.0),
true <- is_integer(subscription_id),
true <- is_float(sampling_time) do
c_args = {monitored_item, subscription_id, sampling_time}
call_port(state, :add_monitored_item, caller_info, c_args)
{:noreply, state}
else
_ ->
{:reply, {:error, :einval}, state}
end
end
def handle_call({:subscription, {:delete_monitored_item, args}}, caller_info, state) do
with monitored_item_id <- Keyword.fetch!(args, :monitored_item_id),
subscription_id <- Keyword.fetch!(args, :subscription_id),
true <- is_integer(monitored_item_id),
true <- is_integer(subscription_id) do
c_args = {subscription_id, monitored_item_id}
call_port(state, :delete_monitored_item, caller_info, c_args)
{:noreply, state}
else
_ ->
{:reply, {:error, :einval}, state}
end
end
# Write nodes Attributes
def handle_call({:read, {:user_write_mask, node_id}}, caller_info, state) do
c_args = to_c(node_id)
call_port(state, :read_node_user_write_mask, caller_info, c_args)
{:noreply, state}
end
def handle_call({:read, {:user_access_level, node_id}}, caller_info, state) do
c_args = to_c(node_id)
call_port(state, :read_node_user_access_level, caller_info, c_args)
{:noreply, state}
end
def handle_call({:read, {:user_executable, node_id}}, caller_info, state) do
c_args = to_c(node_id)
call_port(state, :read_node_user_executable, caller_info, c_args)
{:noreply, state}
end
# Write nodes Attributes
def handle_call({:write, {:node_id, node_id, new_node_id}}, caller_info, state) do
c_args = {to_c(node_id), to_c(new_node_id)}
call_port(state, :write_node_node_id, caller_info, c_args)
{:noreply, state}
end
def handle_call({:write, {:node_class, node_id, node_class}}, caller_info, state) do
c_args = {to_c(node_id), node_class}
call_port(state, :write_node_node_class, caller_info, c_args)
{:noreply, state}
end
def handle_call({:write, {:user_write_mask, node_id, user_write_mask}}, caller_info, state) do
c_args = {to_c(node_id), user_write_mask}
call_port(state, :write_node_user_write_mask, caller_info, c_args)
{:noreply, state}
end
def handle_call({:write, {:symmetric, node_id, symmetric}}, caller_info, state) do
c_args = {to_c(node_id), symmetric}
call_port(state, :write_node_symmetric, caller_info, c_args)
{:noreply, state}
end
def handle_call({:write, {:contains_no_loops, node_id, contains_no_loops}}, caller_info, state) do
c_args = {to_c(node_id), contains_no_loops}
call_port(state, :write_node_contains_no_loops, caller_info, c_args)
{:noreply, state}
end
def handle_call({:write, {:user_access_level, node_id, user_access_level}}, caller_info, state) do
c_args = {to_c(node_id), user_access_level}
call_port(state, :write_node_user_access_level, caller_info, c_args)
{:noreply, state}
end
def handle_call({:write, {:user_executable, node_id, user_executable}}, caller_info, state) do
c_args = {to_c(node_id), user_executable}
call_port(state, :write_node_user_executable, caller_info, c_args)
{:noreply, state}
end
# Catch all
def handle_call(invalid_call, _caller_info, state) do
Logger.error("#{__MODULE__} Invalid call: #{inspect(invalid_call)}")
{:reply, {:error, :einval}, state}
end
def handle_info({_port, {:exit_status, code}}, state) do
Logger.warn("(#{__MODULE__}) Error code: #{inspect(code)}.")
# retrying delay
Process.sleep(@c_timeout)
{:stop, :restart, state}
end
def handle_info({:EXIT, _port, reason}, state) do
Logger.debug("(#{__MODULE__}) Exit reason: #{inspect(reason)}")
# retrying delay
Process.sleep(@c_timeout)
{:stop, :restart, state}
end
def handle_info(msg, state) do
Logger.warn("(#{__MODULE__}) Unhandled message: #{inspect(msg)}.")
{:noreply, state}
end
# Subscription C message handlers
defp handle_c_response(
{:subscription, {:data, subscription_id, monitored_id, c_value}},
%{controlling_process: c_pid} = state
) do
value = parse_c_value(c_value)
send(c_pid, {:data, subscription_id, monitored_id, value})
state
end
defp handle_c_response(
{:subscription, message},
%{controlling_process: c_pid} = state
) do
send(c_pid, message)
state
end
# Lifecycle C Handlers
defp handle_c_response({:get_client_state, caller_metadata, client_state}, state) do
str_client_state = charlist_to_string(client_state)
GenServer.reply(caller_metadata, str_client_state)
state
end
defp handle_c_response({:set_client_config, caller_metadata, c_response}, state) do
GenServer.reply(caller_metadata, c_response)
state
end
defp handle_c_response({:get_client_config, caller_metadata, c_response}, state) do
GenServer.reply(caller_metadata, c_response)
state
end
defp handle_c_response({:reset_client, caller_metadata, c_response}, state) do
GenServer.reply(caller_metadata, c_response)
state
end
# Encryption Handlers
defp handle_c_response({:set_config_with_security_policies, caller_metadata, data}, state) do
GenServer.reply(caller_metadata, data)
state
end
# Connect to a Server C Handlers
defp handle_c_response({:connect_client_by_url, caller_metadata, c_response}, state) do
GenServer.reply(caller_metadata, c_response)
state
end
defp handle_c_response({:connect_client_by_username, caller_metadata, c_response}, state) do
GenServer.reply(caller_metadata, c_response)
state
end
defp handle_c_response({:connect_client_no_session, caller_metadata, c_response}, state) do
GenServer.reply(caller_metadata, c_response)
state
end
defp handle_c_response({:disconnect_client, caller_metadata, c_response}, state) do
GenServer.reply(caller_metadata, c_response)
state
end
# Discovery functions C Handlers
defp handle_c_response({:find_servers_on_network, caller_metadata, c_response}, state) do
GenServer.reply(caller_metadata, c_response)
state
end
defp handle_c_response({:find_servers, caller_metadata, c_response}, state) do
GenServer.reply(caller_metadata, c_response)
state
end
defp handle_c_response({:get_endpoints, caller_metadata, c_response}, state) do
GenServer.reply(caller_metadata, c_response)
state
end
# Subscriptions and Monitored Items functions.
defp handle_c_response({:add_subscription, caller_metadata, c_response}, state) do
GenServer.reply(caller_metadata, c_response)
state
end
defp handle_c_response({:delete_subscription, caller_metadata, c_response}, state) do
GenServer.reply(caller_metadata, c_response)
state
end
defp handle_c_response({:add_monitored_item, caller_metadata, c_response}, state) do
GenServer.reply(caller_metadata, c_response)
state
end
defp handle_c_response({:delete_monitored_item, caller_metadata, c_response}, state) do
GenServer.reply(caller_metadata, c_response)
state
end
# Read nodes Attributes
defp handle_c_response({:read_node_user_write_mask, caller_metadata, data}, state) do
GenServer.reply(caller_metadata, data)
state
end
defp handle_c_response({:read_node_user_access_level, caller_metadata, data}, state) do
GenServer.reply(caller_metadata, data)
state
end
defp handle_c_response({:read_node_user_executable, caller_metadata, data}, state) do
GenServer.reply(caller_metadata, data)
state
end
# Write nodes Attributes
defp handle_c_response({:write_node_node_id, caller_metadata, data}, state) do
GenServer.reply(caller_metadata, data)
state
end
defp handle_c_response({:write_node_node_class, caller_metadata, data}, state) do
GenServer.reply(caller_metadata, data)
state
end
defp handle_c_response({:write_node_user_write_mask, caller_metadata, data}, state) do
GenServer.reply(caller_metadata, data)
state
end
defp handle_c_response({:write_node_symmetric, caller_metadata, data}, state) do
GenServer.reply(caller_metadata, data)
state
end
defp handle_c_response({:write_node_contains_no_loops, caller_metadata, data}, state) do
GenServer.reply(caller_metadata, data)
state
end
defp handle_c_response({:write_node_user_access_level, caller_metadata, data}, state) do
GenServer.reply(caller_metadata, data)
state
end
defp handle_c_response({:write_node_user_executable, caller_metadata, data}, state) do
GenServer.reply(caller_metadata, data)
state
end
end
|
lib/opc_ua/client.ex
| 0.758332
| 0.584775
|
client.ex
|
starcoder
|
defmodule Openpayex.Charges do
@moduledoc """
Functions for working with charges at Openpay. Through this API you can:
* create a charge
* get a charge
* list charges
"""
alias Openpayex.OpenPay.OpenPayHelper
@doc """
Create charge without client.
## Example:
```
params = %{
method: "bank_account",
amount: 100,
description: "Cargo con banco",
order_id: "oid-00055",
customer: %{
name: "<NAME>",
email: "<EMAIL>"
}
}
iex> Openpayex.Charges.create(params)
{:ok, response}
```
"""
@spec create(map) :: {:ok, map}
def create(
%{
method: _method,
amount: amount,
description: _description,
order_id: _order_id,
customer: customer
} = params
)
when is_integer(amount) or (is_float(amount) and is_map(customer)) do
endpoint = "/#{_get_merchant_id()}/charges"
OpenPayHelper.http_request(:post, endpoint, params)
end
def create(_params), do: {:error, :bad_params}
@doc """
Create charge with client
## Example:
```
params = %{
method: "bank_account",
amount: 100,
description: "Cargo con banco",
order_id: "oid-00055"
}
customer_id = "aqkd4esexql<PASSWORD>6utec"
iex> Openpayex.Charges.create_with_customer(params, customer_id)
{:ok, response}
```
"""
@spec create_with_customer(map, String.t()) :: {:ok, map}
def create_with_customer(
%{
method: _method,
amount: amount,
description: _description,
order_id: _order_id
} = params,
customer_id
)
when is_integer(amount) or (is_float(amount) and is_binary(customer_id)) do
endpoint = "/#{_get_merchant_id()}/customers/#{customer_id}/charges"
OpenPayHelper.http_request(:post, endpoint, params)
end
def create_with_client(_params), do: {:error, :bad_params}
@doc """
Get charge
"""
@spec get(String.t()) :: {:ok, map}
def get(transaction_id) when is_binary(transaction_id) do
endpoint = "/#{_get_merchant_id()}/charges/#{transaction_id}"
OpenPayHelper.http_request(:get, endpoint)
end
def get(%{
transaction_id: transaction_id,
customer_id: customer_id
})
when is_binary(transaction_id) and is_binary(customer_id) do
endpoint = "/#{_get_merchant_id()}/customers/#{customer_id}/charges/#{transaction_id}"
OpenPayHelper.http_request(:get, endpoint)
end
@doc """
List charges
"""
@spec list() :: {:ok, [map]}
def list() do
endpoint = "/#{_get_merchant_id()}/charges"
OpenPayHelper.http_request(:get, endpoint)
end
# Get a merchant id
@spec _get_merchant_id() :: Strint.t()
defp _get_merchant_id() do
if Mix.env() == :test do
"test_merchant_id"
else
Application.get_env(:openpayex, :merchant_id)
end
end
end
|
lib/openpayex/charges.ex
| 0.810028
| 0.618032
|
charges.ex
|
starcoder
|
defmodule Mcrypt do
@moduledoc """
This is a thin wrapper around [`libmcrypt`](http://linux.die.net/man/3/mcrypt).
Please see the linked man page for details on the available ciphers and modes.
"""
@on_load :init
def init do
path = :filename.join(:code.priv_dir(:mcrypt), 'lib_mcrypt')
:ok = :erlang.load_nif(path, 1)
end
@type algorithm :: :arcfour | :blowfish | :blowfish_compat | :cast_128 |
:cast_256 | :des | :enigma | :gost | :loki97 | :rc2 | :rijndael_128 |
:rijndael_192 | :rijndael_256 | :saferplus | :serpent | :tripledes |
:twofish | :wake | :xtea
@type mode :: :ecb | :cbc | :cfb | :ofb | :nofb | :ncfb | :ctr | :stream
@doc """
Does encryption after right padding the input with the null character(0) to the correct block size
"""
@spec block_encrypt(binary, algorithm, mode, binary, binary) :: {:ok, binary} | :error
def block_encrypt(plaintext, algorithm, mode, key, iv) do
padded_plaintext = Mcrypt.Padder.zero_pad(plaintext, algorithm)
encrypt(padded_plaintext, algorithm, mode, key, iv)
end
@doc """
Does decryption and removes the 0 byte padding
"""
@spec block_decrypt(binary, algorithm, mode, binary, binary) :: {:ok, binary} | :error
def block_decrypt(ciphertext, algorithm, mode, key, iv) do
{:ok, padded_binary} = decrypt(ciphertext, algorithm, mode, key, iv)
padded_charlist = :erlang.binary_to_list(padded_binary)
plaintext = Enum.reverse(padded_charlist) |> Enum.drop_while(fn(x)-> x == 0 end) |> Enum.reverse |> to_string
{:ok, plaintext}
end
@doc """
Wraps libmcrypt's `mcrypt_generic`, including setup and teardown of the cipher
module.
"""
@spec encrypt(binary, algorithm, mode, binary, binary) :: {:ok, binary} | :error
def encrypt(_plaintext, _algorithm, _mode, _key, _iv) do
:erlang.nif_error("nif not loaded")
end
@doc """
Like `encrypt`, but raises on error.
"""
@spec encrypt!(binary, algorithm, mode, binary, binary) :: binary | no_return
def encrypt!(plaintext, algorithm, mode, key, iv) do
case encrypt(plaintext, algorithm, mode, key, iv) do
{:ok, ciphertext} -> ciphertext
:error -> raise "Mcrypt error."
end
end
@doc """
Wraps libmcrypt's `mdecrypt_generic`, including setup and teardown of the
cipher module.
"""
@spec decrypt(binary, algorithm, mode, binary, binary) :: {:ok, binary} | :error
def decrypt(_ciphertext, _algorithm, _mode, _key, _iv) do
:erlang.nif_error("nif not loaded")
end
@doc """
Like `decrypt`, but raises on error.
"""
@spec decrypt!(binary, algorithm, mode, binary, binary) :: binary | no_return
def decrypt!(ciphertext, algorithm, mode, key, iv) do
case decrypt(ciphertext, algorithm, mode, key, iv) do
{:ok, plaintext} -> plaintext
:error -> raise "Mcrypt error."
end
end
end
|
lib/mcrypt.ex
| 0.825167
| 0.459743
|
mcrypt.ex
|
starcoder
|
defmodule Nixa.NaiveBayes.Categorical do
@moduledoc """
Implements a categorical Naive Bayes classifier
"""
import Nixa.Shared
import Nixa.Stats
import Nixa.NaiveBayes.Shared
defstruct [
class_probs: nil,
feature_probs: nil,
alpha: nil
]
@doc """
Train a model using the provided inputs and targets
"""
def fit(inputs, targets, opts \\ []) do
class_probability = Keyword.get(opts, :class_probability, :weighted)
alpha = Keyword.get(opts, :alpha, 1.0)
class_probs = if is_list(class_probability),
do: class_probability,
else: calc_class_prob(targets, class_probability, alpha)
num_classes = class_probs |> Nx.size() |> Nx.to_scalar()
feature_probs = 0..(num_classes - 1)
|> Enum.map(fn c -> Task.async(fn -> calc_feature_probs(c, inputs, targets) end) end)
|> Task.await_many(:infinity)
%__MODULE__{
class_probs: class_probs,
feature_probs: feature_probs,
alpha: alpha
}
end
@doc """
Predict classes using a trained model
"""
def predict(%__MODULE__{} = model, inputs) do
inputs
|> Enum.map(fn input -> predict_one(model, input) end)
end
### Internal functions
defp predict_one(model, input) do
model.class_probs
|> Nx.to_flat_list()
|> Enum.zip(model.feature_probs)
|> Enum.map(fn {ck, px} -> calc_input_probs(input, ck, px, model.alpha) end)
|> Nx.tensor()
|> Nx.argmax()
|> Nx.new_axis(0)
end
defp calc_input_probs(inputs, ck, px, alpha) do
num_f = inputs[0] |> Nx.size()
for f <- 0..(num_f - 1), reduce: ck do
p -> p * Map.get(px, {f, Nx.to_scalar(inputs[0][f])}, alpha)
end
end
defp calc_feature_probs(c, inputs, targets) do
t_inputs = inputs
|> Enum.zip(targets)
|> Enum.filter(fn {_input, target} -> target |> Nx.squeeze() |> Nx.to_scalar() == c end)
|> Enum.unzip()
|> elem(0)
|> Nx.concatenate()
num_f = t_inputs[0] |> Nx.size()
for f <- 0..(num_f - 1), reduce: %{} do
acc -> f_vals = t_inputs[[0..-1, f]]
vals = f_vals |> Nx.to_flat_list() |> MapSet.new() |> MapSet.to_list()
px = f_vals |> frequencies() |> Nx.add(1) |> prob_dist() |> Nx.to_flat_list()
vals
|> Enum.zip(px)
|> Enum.reduce(acc, fn {val, p}, a -> Map.put(a, {f, val}, p) end)
end
end
end
|
lib/nixa/naive_bayes/categorical.ex
| 0.833968
| 0.58264
|
categorical.ex
|
starcoder
|
defmodule Tokenizers do
@moduledoc """
Elixir bindings to [Hugging Face Tokenizers](https://github.com/huggingface/tokenizers).
Hugging Face describes the Tokenizers library as:
> Fast State-of-the-art tokenizers, optimized for both research and production
>
> ๐ค Tokenizers provides an implementation of todayโs most used tokenizers, with a focus on performance and versatility. These tokenizers are also used in ๐ค Transformers.
This library has bindings to use pretrained tokenizers. Support for building and training a tokenizer from scratch is forthcoming.
"""
alias Tokenizers.Encoding
alias Tokenizers.Tokenizer
alias Tokenizers.Native
@doc """
Instantiate a new tokenizer from an existing file on the Hugging Face Hub.
"""
@spec from_pretrained(binary()) :: Tokenizer.t()
def from_pretrained(identifier), do: Native.from_pretrained(identifier)
@doc """
Instantiate a new tokenizer from the file at the given path.
"""
@spec from_file(binary()) :: Tokenizer.t()
def from_file(path), do: Native.from_file(path)
@doc """
Save the tokenizer to the provided path.
"""
@spec save(Tokenizer.t(), binary()) :: term()
def save(tokenizer, path), do: Native.save(tokenizer, path, true)
@doc """
Encode the given sequence or batch of sequences to ids.
"""
@spec encode(Tokenizer.t(), binary() | [binary()]) :: Encoding.t() | [Encoding.t()]
def encode(tokenizer, input) when is_binary(input), do: Native.encode(tokenizer, input, false)
def encode(tokenizer, input) when is_list(input),
do: Native.encode_batch(tokenizer, input, false)
@doc """
Decode the given list of ids or list of lists of ids back to strings.
"""
@spec decode(Tokenizer.t(), binary() | [binary()]) ::
{:ok, Encoding.t() | [Encoding.t()]} | {:error, term()}
def decode(tokenizer, [first | _] = ids) when is_integer(first),
do: Native.decode(tokenizer, ids, false)
def decode(tokenizer, [first | _] = ids) when is_list(first),
do: Native.decode_batch(tokenizer, ids, false)
@doc """
Get the tokenizer's vocabulary as a map of token to id.
"""
@spec get_vocab(Tokenizer.t()) :: {:ok, %{binary() => integer()}} | {:error, term()}
def get_vocab(tokenizer), do: Native.get_vocab(tokenizer, false)
@doc """
Get the number of tokens in the vocabulary.
"""
@spec get_vocab_size(Tokenizer.t()) :: {:ok, integer()} | {:error, term()}
def get_vocab_size(tokenizer), do: Native.get_vocab_size(tokenizer, false)
@doc """
Get the tokens from an encoding.
"""
@spec get_tokens(Encoding.t()) :: {:ok, [binary()]} | {:error, term()}
def get_tokens(encoding), do: Native.get_tokens(encoding)
@doc """
Get the ids from an encoding.
"""
@spec get_ids(Encoding.t()) :: {:ok, [integer()]} | {:error, term()}
def get_ids(encoding), do: Native.get_ids(encoding)
@doc """
Get the attention_mask from an encoding.
"""
@spec get_attention_mask(Encoding.t()) :: {:ok, [integer()]} | {:error, term()}
def get_attention_mask(encoding), do: Native.get_attention_mask(encoding)
@doc """
Convert a given id to its token.
"""
@spec id_to_token(Tokenizer.t(), integer()) :: {:ok, binary()} | {:error, term()}
def id_to_token(tokenizer, id), do: Native.id_to_token(tokenizer, id)
@doc """
Convert a given token to its id.
"""
@spec token_to_id(Tokenizer.t(), binary()) :: {:ok, integer()} | {:error, term()}
def token_to_id(tokenizer, token), do: Native.token_to_id(tokenizer, token)
@doc """
Truncate the encoding to the given length.
## Options
* `direction` - The truncation direction. Can be `:right` or `:left`. Default: `:right`.
* `stride` - The length of previous content to be included in each overflowing piece. Default: `0`.
"""
@spec truncate(encoding :: Encoding.t(), length :: integer(), opts :: Keyword.t()) ::
{:ok, Encoding.t()} | {:error, term()}
def truncate(encoding, max_len, opts \\ []) do
opts = Keyword.validate!(opts, direction: :right, stride: 0)
Native.truncate(encoding, max_len, opts[:stride], "#{opts[:direction]}")
end
@doc """
Pad the encoding to the given length.
## Options
* `direction` - The padding direction. Can be `:right` or `:left`. Default: `:right`.
* `pad_id` - The id corresponding to the padding token. Default: `0`.
* `pad_token` - The padding token to use. Default: `"[PAD]"`.
* `pad_type_id` - The type ID corresponding to the padding token. Default: `0`.
"""
@spec pad(encoding :: Encoding.t(), length :: pos_integer(), opts :: Keyword.t()) ::
Encoding.t()
def pad(encoding, length, opts \\ []) do
opts =
Keyword.validate!(opts, direction: :right, pad_id: 0, pad_type_id: 0, pad_token: "[PAD]")
Native.pad(
encoding,
length,
opts[:pad_id],
opts[:pad_type_id],
opts[:pad_token],
"#{opts[:direction]}"
)
end
end
|
lib/tokenizers.ex
| 0.92686
| 0.76921
|
tokenizers.ex
|
starcoder
|
defmodule ConciergeSite.Schedule do
@moduledoc """
The schedule for a route or trip.
"""
alias AlertProcessor.{ApiClient, DayType, ExtendedTime, ServiceInfoCache}
alias AlertProcessor.Model.{Route, Subscription, TripInfo}
alias ConciergeSite.Schedule
@typedoc """
A tuple of a mode and route ID
"""
@type route_mode_id_key :: {String.t(), String.t()}
@type t :: %{route_mode_id_key: TripInfo.t()}
@doc """
Determine the direction (0 for counter to the order of the stop list, or 1 for with the order of the stop list) from some combination of a route stop list, string-formatted direction, origin, and destination.
iex> Schedule.determine_direction_id(nil, "0", nil, nil)
0
iex> Schedule.determine_direction_id(nil, "1", nil, nil)
1
iex> stop_list = [{"Readville", "place-DB-0095", {42.238405, -71.133246}, 1}, {"Fairmount", "place-DB-2205", {42.253638, -71.11927}, 1}, {"Morton Street", "place-DB-2230", {42.280994, -71.085475}, 1}, {"Talbot Avenue", "place-DB-2240", {42.292246, -71.07814}, 1}, {"Four Corners/Geneva", "place-DB-2249", {42.305037, -71.076833}, 1}, {"Uphams Corner", "place-DB-2258", {42.31867, -71.069072}, 1}, {"Newmarket", "place-DB-2265", {42.326701, -71.066314}, 1}, {"South Station", "place-sstat", {42.352271, -71.055242}, 1}]
iex> earlier_stop = "place-DB-2258"
iex> later_stop = "place-DB-2265"
iex> Schedule.determine_direction_id(stop_list, nil, later_stop, earlier_stop)
0
iex> Schedule.determine_direction_id(stop_list, nil, earlier_stop, later_stop)
1
"""
@spec determine_direction_id(
[Route.stop()],
String.t() | nil,
String.t() | nil,
String.t() | nil
) :: 0 | 1
def determine_direction_id(_, "0", _, _), do: 0
def determine_direction_id(_, "1", _, _), do: 1
def determine_direction_id(stop_list, _, origin, destination) do
case stop_list
|> Enum.filter(fn {_name, id, _latlong, _wheelchair} ->
Enum.member?([origin, destination], id)
end)
|> Enum.map(fn {_name, id, _latlong, _wheelchair} -> id end) do
[^origin, ^destination] -> 1
[^destination, ^origin] -> 0
end
end
@doc """
Retrieve TripInfo records given Lists of correlated legs, origins, destination and modes.
"""
@spec get_schedules_for_input([String.t()], [String.t()], [String.t()], [String.t()]) ::
Schedule.t()
def get_schedules_for_input(legs, origins, destinations, modes) do
return_trip = false
[
Enum.reverse(modes),
Enum.reverse(legs),
Enum.reverse(origins),
Enum.reverse(destinations)
]
|> Enum.zip()
|> Enum.map(fn input_tuple ->
{type, route, origin, destination} = input_tuple
subscription = %Subscription{
type: String.to_atom(type),
route: route,
origin: origin,
destination: destination,
return_trip: return_trip
}
subscription
end)
|> get_schedules_for_trip(return_trip)
end
@doc """
Retrieve TripInfo records for a list of Subscriptions and whether or not it is a return trip.
"""
@spec get_schedules_for_trip([Subscription.t()], boolean) :: Schedule.t()
def get_schedules_for_trip(subscriptions, return_trip) do
weekday_schedules =
subscriptions
|> get_typical_weekday_schedules(return_trip)
|> categorize_by_weekend(false)
weekend_schedules =
subscriptions
|> get_typical_weekend_schedules(return_trip)
|> categorize_by_weekend(true)
interleave_schedule_trips(weekday_schedules, weekend_schedules)
end
@spec get_typical_weekday_schedules([Subscription.t()], boolean) :: [Schedule.t()]
defp get_typical_weekday_schedules(subscriptions, return_trip) do
# Get schedules for 5 weekdays to figure out what the typical schedule is. We need to get enough days such that the majority of them will be non-holidays. The worst case scenario is 2 holiday days in a row, so we need 3 extra days more than this.
5
|> DayType.take_weekdays()
|> typical_schedules_for_days(subscriptions, return_trip)
end
@spec get_typical_weekend_schedules([Subscription.t()], boolean) :: [Schedule.t()]
defp get_typical_weekend_schedules(subscriptions, return_trip) do
# Get schedules for 7 weekend days to figure out what the typical schedule is. We need to get enough days such that the majority of them will be non-holidays. Since Christmas day, New Years Eve, and New Years Day all run on holiday schedules, the worst case scenario is asking for a schedule on Christmas Day on a Sunday in which case there will be three weekend holiday days in a row. Therefore we need 4 extra days more than this.
3
|> DayType.take_saturdays()
|> typical_schedules_for_days(subscriptions, return_trip)
end
@spec typical_schedules_for_days([Date.T], [Subscription.t()], boolean) :: [Schedule.t()]
defp typical_schedules_for_days(days, subscriptions, return_trip) do
days
|> Enum.map(&get_schedules_for_subscriptions_and_date(subscriptions, return_trip, &1))
|> most_common_schedule()
end
@spec most_common_schedule([map]) :: [Schedule.t()]
defp most_common_schedule(daily_schedules) do
{most_common_schedule, _count} =
daily_schedules
|> Enum.reduce(%{}, fn schedule, acc -> Map.update(acc, schedule, 1, &(&1 + 1)) end)
|> Enum.sort_by(&elem(&1, 1))
|> List.last()
most_common_schedule
end
@spec get_schedules_for_subscriptions_and_date([Subscription.t()], boolean, Date.t()) :: [
Schedule.t()
]
defp get_schedules_for_subscriptions_and_date(subscriptions, return_trip, date) do
subscriptions
|> Enum.filter(&(&1.return_trip == return_trip))
|> Enum.reduce(%{}, fn %{type: type, route: route, origin: origin, destination: destination},
acc ->
case type do
:subway ->
acc
:bus ->
acc
_ ->
Map.put(
acc,
{Atom.to_string(type), route},
get_schedule(route, origin, destination, date)
)
end
end)
end
@spec get_schedule(String.t(), String.t(), String.t(), Date.t()) :: [TripInfo.t()]
defp get_schedule(route_id, origin, destination, date) do
with {:ok, route} <- ServiceInfoCache.get_route(route_id),
direction_id <- determine_direction_id(route.stop_list, nil, origin, destination),
{:ok, origin_stop} <- ServiceInfoCache.get_stop(origin),
{:ok, destination_stop} <- ServiceInfoCache.get_stop(destination),
trip <- %TripInfo{
origin: origin_stop,
destination: destination_stop,
direction_id: direction_id
} do
case ApiClient.schedules(
origin,
destination,
direction_id,
[route.route_id],
date
) do
{:ok, schedules, trips} ->
schedules
|> remove_shuttle_schedules(route.route_id)
|> map_common_trips(map_trip_names(trips), trip, date)
{:ok, _} ->
[]
{:error, _} ->
[]
end
end
end
@spec remove_shuttle_schedules([map], String.t()) :: [map]
def remove_shuttle_schedules(schedules, route_id) do
Enum.filter(schedules, &(&1["relationships"]["route"]["data"]["id"] == route_id))
end
@spec map_trip_names([map]) :: map
defp map_trip_names(trips) do
Map.new(trips, &map_trip_name/1)
end
@spec map_trip_name(map) :: tuple
defp map_trip_name(%{"type" => "trip", "id" => id, "attributes" => %{"name" => name}}),
do: {id, name}
defp map_trip_name(_), do: {nil, nil}
@spec map_common_trips([map], map, map, Date.t()) :: [TripInfo.t()]
defp map_common_trips(schedules, trip_names_map, trip, date) do
schedules
|> Enum.group_by(fn %{"relationships" => %{"trip" => %{"data" => %{"id" => id}}}} -> id end)
|> Enum.filter(fn {_id, schedules} -> Enum.count(schedules) > 1 end)
|> Enum.map(fn {_id, schedules} ->
[departure_schedule, arrival_schedule | _] =
Enum.sort_by(schedules, fn %{"attributes" => %{"departure_time" => departure_timestamp}} ->
departure_timestamp
end)
%{
"attributes" => %{
"departure_time" => departure_timestamp
},
"relationships" => %{
"trip" => %{
"data" => %{
"id" => trip_id
}
},
"route" => %{
"data" => %{
"id" => route_id
}
}
}
} = departure_schedule
%{"attributes" => %{"arrival_time" => arrival_timestamp}} = arrival_schedule
{:ok, route} = ServiceInfoCache.get_route(route_id)
arrival_datetime = NaiveDateTime.from_iso8601!(arrival_timestamp)
departure_datetime = NaiveDateTime.from_iso8601!(departure_timestamp)
{:ok, arrival_extended_time} = ExtendedTime.new(arrival_datetime, date)
{:ok, departure_extended_time} = ExtendedTime.new(departure_datetime, date)
%{
trip
| arrival_time: NaiveDateTime.to_time(arrival_datetime),
departure_time: NaiveDateTime.to_time(departure_datetime),
arrival_extended_time: arrival_extended_time,
departure_extended_time: departure_extended_time,
trip_number: Map.get(trip_names_map, trip_id),
route: route
}
end)
|> Enum.sort(&by_departure_extended_time/2)
end
@spec categorize_by_weekend([Schedule.t()], boolean) :: Schedule.t()
defp categorize_by_weekend(schedules, weekend?) do
schedules
|> Map.new(fn {key, trips} -> {key, trips |> Enum.map(&Map.put(&1, :weekend?, weekend?))} end)
end
@spec interleave_schedule_trips(Schedule.t(), Schedule.t()) :: Schedule.t()
defp interleave_schedule_trips(weekday_schedules, weekend_schedules) do
# weekday_schedules and weekend_schedules should contain the same keys
weekday_schedules
|> Map.new(fn {key, _} ->
{
key,
merge_and_sort_trips(weekday_schedules[key], weekend_schedules[key])
}
end)
end
@spec merge_and_sort_trips([TripInfo.t()], [TripInfo.t()]) :: [TripInfo.t()]
defp merge_and_sort_trips(weekday_trips, weekend_trips) do
(weekday_trips ++ weekend_trips)
|> Enum.sort(&by_departure_extended_time/2)
end
@spec by_departure_extended_time(TripInfo.t(), TripInfo.t()) :: boolean
defp by_departure_extended_time(
%TripInfo{departure_extended_time: departure_extended_time_a},
%TripInfo{departure_extended_time: departure_extended_time_b}
),
do:
ExtendedTime.compare(departure_extended_time_a, departure_extended_time_b) in [:lt, :eq]
end
|
apps/concierge_site/lib/schedule.ex
| 0.842475
| 0.416915
|
schedule.ex
|
starcoder
|
defmodule StrawHat.Utils.Map do
@moduledoc """
Functions for transforming maps, keys and values.
"""
defmodule AtomizeKeyError do
@type t :: %__MODULE__{key: any}
defexception [:key]
@spec message(%{key: String.t()}) :: String.t()
def message(%{key: key}) do
"\"#{key}\" binary hasn't been used on the system as an atom before"
end
end
@doc ~S"""
Recursively traverse a map and invoke a function for each key/
value pair that transforms the map.
## Examples
iex> map = %{a: "a", b: %{c: "c"}}
iex> StrawHat.Utils.Map.deep_map map, fn {k, v} ->
...> {k, String.upcase(v)}
...> end
%{a: "A", b: %{c: "C"}}
"""
@spec deep_map(Map.t(), function :: function()) :: Map.t()
# Don't deep map structs since they have atom keys anyway and they
# also don't support enumerable
def deep_map(%{__struct__: _any} = map, _function) do
map
end
def deep_map(map, function) when is_map(map) do
Enum.into(map, %{}, fn
{k, v} when is_map(v) or is_list(v) ->
{k, deep_map(v, function)}
{k, v} ->
function.({k, v})
end)
end
def deep_map([head | rest], fun) do
[deep_map(head, fun) | deep_map(rest, fun)]
end
def deep_map(nil, _fun) do
nil
end
def deep_map(value, fun) do
fun.(value)
end
@doc """
Recursively traverse a map and invoke a function for each key
and a function for each value that transform the map.
* `key_function` is a function or function reference that
is called for each key of the provided map and any keys
of any submaps
* `value_function` is a function or function reference that
is called for each value of the provided map and any values
of any submaps
"""
@spec deep_map(Map.t(), key_function :: function(), value_function :: function()) :: Map.t()
def deep_map(map, key_function, value_function)
def deep_map(%{__struct__: _any} = map, _key_function, _value_function) do
map
end
def deep_map(map, key_function, value_function) when is_map(map) do
Enum.into(map, %{}, fn
{k, v} when is_map(v) or is_list(v) ->
{key_function.(k), deep_map(v, key_function, value_function)}
{k, v} ->
{key_function.(k), value_function.(v)}
end)
end
def deep_map([head | rest], key_fun, value_fun) do
[deep_map(head, key_fun, value_fun) | deep_map(rest, key_fun, value_fun)]
end
def deep_map(nil, _key_fun, _value_fun) do
nil
end
def deep_map(value, _key_fun, value_fun) do
value_fun.(value)
end
@doc """
Transforms a `map`'s `String.t` keys to `atom()` keys.
* `options` is a keyword list of options. The
available option is:
* `:only_existing` which is set to `true` will
only convert the binary key to an atom if the atom
already exists. The default is `false`.
"""
@spec atomize_keys(map(), keyword()) :: map()
def atomize_keys(map, options \\ [only_existing: true]) do
deep_map(map, &atomize_element(&1, options[:only_existing]), &StrawHat.identity/1)
end
@doc """
Transforms a `map`'s `String.t` values to `atom()` values.
* `options` is a keyword list of options. The
available option is:
* `:only_existing` which is set to `true` will
only convert the binary value to an atom if the atom
already exists. The default is `false`.
"""
@spec atomize_values(map(), keyword()) :: map()
def atomize_values(map, options \\ [only_existing: false]) do
deep_map(map, &StrawHat.identity/1, &atomize_element(&1, options[:only_existing]))
end
@doc """
Transforms a `map`'s `atom()` keys to `String.t` keys.
"""
@spec stringify_keys(map()) :: map()
def stringify_keys(map) do
deep_map(
map,
fn
k when is_atom(k) -> Atom.to_string(k)
k -> k
end,
&StrawHat.identity/1
)
end
defp atomize_element(x, true) when is_binary(x) do
try do
String.to_existing_atom(x)
rescue
ArgumentError ->
reraise(AtomizeKeyError, [key: x], __STACKTRACE__)
end
end
defp atomize_element(x, false) when is_binary(x), do: String.to_atom(x)
defp atomize_element(x, _), do: x
end
|
lib/straw_hat/utils/map.ex
| 0.927207
| 0.61383
|
map.ex
|
starcoder
|
defmodule Matrix do
@moduledoc """
Documentation Matrix Module
"""
@doc """
Converts a multidimensional list into a zero-indexed map.
## Parameters
- list: the list to convert.
## Example
iex> Matrix.from_list([])
%{}
iex> Matrix.from_list(["x", "o", "x"])
%{0 => "x", 1 => "o", 2 => "x"}
iex> Matrix.from_list([["x", "o", "x"]])
%{0 => %{0 => "x", 1 => "o", 2 => "x"}}
iex> Matrix.from_list([["x", "o", "x"],
...> ["o", "x", "o"],
...> ["x", "o", "x"]])
%{0 => %{0 => "x", 1 => "o", 2 => "x"},
1 => %{0 => "o", 1 => "x", 2 => "o"},
2 => %{0 => "x", 1 => "o", 2 => "x"}}
"""
def from_list(list) when is_list(list) do
do_from_list(list)
end
defp do_from_list(list, map \\ %{}, index \\ 0)
defp do_from_list([], map, _index) do
map
end
defp do_from_list([h | t], map, index) do
map = Map.put(map, index, do_from_list(h))
do_from_list(t, map, index+1)
end
defp do_from_list(other, _, _) do
other
end
@doc """
Converts a zero-indexed map into a multidimensional list.
## Parameters
- matrix: the matrix to convert.
## Example
iex> Matrix.to_list(%{})
[]
iex> Matrix.to_list(%{0 => "x", 1 => "o", 2 => "x"})
["x", "o", "x"]
iex> Matrix.to_list(%{0 => %{0 => "x", 1 => "o", 2 => "x"}})
[["x", "o", "x"]]
iex> Matrix.to_list(%{0 => %{0 => "x", 1 => "o", 2 => "x"},
...> 1 => %{0 => "o", 1 => "x", 2 => "o"},
...> 2 => %{0 => "x", 1 => "o", 2 => "x"}})
[["x", "o", "x"],
["o", "x", "o"],
["x", "o", "x"]]
"""
def to_list(matrix) when is_map(matrix) do
do_to_list(matrix)
end
defp do_to_list(matrix) when is_map(matrix) do
Map.values(matrix)
|> Enum.map(fn matrix -> do_to_list(matrix) end)
end
defp do_to_list(other) do
other
end
end
|
apps/room/lib/matrix.ex
| 0.829871
| 0.494446
|
matrix.ex
|
starcoder
|
defmodule IntelHex.Decoder do
alias IntelHex.{DecodeError, Record}
use Bitwise
@moduledoc false
@doc """
Decode one hex record.
If the record is not in Intel Hex format, an exception will be raised.
"""
@spec decode_record!(String.t()) :: IntelHex.Record.t() | no_return
def decode_record!(string) do
string
|> String.trim()
|> strip_start_code()
|> to_integers()
|> length_ok?()
|> checksum_ok?()
|> to_record()
end
defp strip_start_code(<<?:, rest::binary>>), do: rest
defp strip_start_code(_), do: raise(DecodeError, message: "Missing record start code ':'")
defp to_integers(<<>>), do: []
defp to_integers(<<hex::binary-size(2), rest::binary>>) do
[String.to_integer(hex, 16) | to_integers(rest)]
rescue
ArgumentError ->
raise DecodeError, message: "Expecting a hex integer, but got #{inspect(hex)}."
end
defp to_integers(_other) do
raise DecodeError, message: "Expecting an even number of hex characters"
end
defp length_ok?([data_bytes | _rest] = numbers) do
byte_count = data_bytes + 5
if length(numbers) != byte_count do
raise DecodeError, message: "Checksum failure"
else
numbers
end
end
defp checksum_ok?(numbers) do
csum = Enum.reduce(numbers, 0, &+/2) &&& 0xFF
if csum != 0 do
raise DecodeError, message: "Checksum failure"
else
numbers
end
end
defp to_record([_data_bytes, address_msb, address_lsb, type | data_and_checksum]) do
record_type = record_type(type)
%Record{
address: to_address(record_type, address_msb, address_lsb, data_and_checksum),
type: record_type,
data: Enum.drop(data_and_checksum, -1)
}
end
defp record_type(0), do: :data
defp record_type(1), do: :eof
defp record_type(2), do: :extended_segment_address
defp record_type(3), do: :start_segment_address
defp record_type(4), do: :extended_linear_address
defp record_type(5), do: :start_linear_address
defp record_type(x), do: raise(DecodeError, message: "Unknown record type #{x}")
defp to_address(:extended_linear_address, _address_msb, _address_lsb, [a, b | _rest]) do
(a <<< 24) + (b <<< 16)
end
defp to_address(:start_linear_address, _address_msb, _address_lsb, [a, b, c, d | _rest]) do
(a <<< 24) + (b <<< 16) + (c <<< 8) + d
end
defp to_address(:extended_segment_address, _address_msb, _address_lsb, [a, b | _rest]) do
(a <<< 12) + (b <<< 4)
end
defp to_address(:start_segment_address, _address_msb, _address_lsb, [a, b, c, d | _rest]) do
# ab is segment register, cd is index
(a <<< 12) + (b <<< 4) + (c <<< 8) + d
end
defp to_address(_, address_msb, address_lsb, _) do
(address_msb <<< 8) + address_lsb
end
end
|
lib/intel_hex/decoder.ex
| 0.821939
| 0.420659
|
decoder.ex
|
starcoder
|
defmodule EpicenterWeb.Test.Pages.InvestigationCompleteInterview do
import Euclid.Test.Extra.Assertions
import ExUnit.Assertions
import Phoenix.LiveViewTest
alias Epicenter.Cases.CaseInvestigation
alias Epicenter.ContactInvestigations.ContactInvestigation
alias Epicenter.Test
alias EpicenterWeb.Test.Pages
alias Phoenix.LiveViewTest.View
@form_id "investigation-interview-complete-form"
def visit(%Plug.Conn{} = conn, %CaseInvestigation{id: case_investigation_id}) do
conn |> Pages.visit("/case-investigations/#{case_investigation_id}/complete-interview")
end
def visit(%Plug.Conn{} = conn, %ContactInvestigation{id: id}) do
conn |> Pages.visit("/contact-investigations/#{id}/complete-interview")
end
def assert_header(view, header_text) do
view |> render() |> Test.Html.parse() |> Test.Html.role_text("complete-interview-title") |> assert_eq(header_text)
view
end
def assert_here(view_or_conn_or_html) do
view_or_conn_or_html |> Pages.assert_on_page("investigation-complete-interview")
end
def assert_date_completed(%View{} = view, :today) do
[actual_date] = actual_date_completed(view)
assert actual_date =~ ~r"\d\d\/\d\d\/\d\d\d\d"
view
end
def assert_date_completed(%View{} = view, expected_date_string) do
[actual_date] = actual_date_completed(view)
assert actual_date == expected_date_string
view
end
defp actual_date_completed(%View{} = view) do
view
|> Pages.parse()
|> Test.Html.find("input##{@form_id}_date_completed")
|> Test.Html.attr("value")
end
def assert_time_completed(%View{} = view, :now) do
{actual_time, actual_am_pm} = actual_time_completed(view)
assert actual_time =~ ~r"\d\d:\d\d"
assert actual_am_pm in ~w{AM PM}
view
end
def assert_time_completed(%View{} = view, expected_time, expected_am_pm) do
{actual_time, actual_am_pm} = actual_time_completed(view)
assert actual_time == expected_time
assert actual_am_pm == expected_am_pm
view
end
defp actual_time_completed(view) do
parsed = view |> Pages.parse()
[actual_time] =
parsed
|> Test.Html.find("input##{@form_id}_time_completed")
|> Test.Html.attr("value")
[actual_am_pm] =
parsed
|> Test.Html.find("select##{@form_id}_time_completed_am_pm option[selected]")
|> Enum.map(&Test.Html.text(&1))
{actual_time, actual_am_pm}
end
def change_form(view, attrs) do
view |> element("#investigation-interview-complete-form") |> render_change(attrs)
view
end
end
|
test/support/pages/investigation_complete_interview.ex
| 0.516595
| 0.466724
|
investigation_complete_interview.ex
|
starcoder
|
defmodule Exfile.LocalFile do
@moduledoc """
Represents a file on the local filesystem.
"""
alias Exfile.LocalFile, as: LF
defmacrop not_nil(term) do
quote do
not is_nil(unquote(term))
end
end
defstruct(
path: nil,
io: nil,
meta: %{}
)
@type t :: %LF{path: String.t, io: :file.io_device, meta: map}
@read_buffer 2048
@spec put_meta(t, atom, any) :: t
def put_meta(file, key, value) do
put_in(file.meta[key], value)
end
@doc """
Opens a LocalFile into an IO pid.
If the LocalFile is already IO-based, the IO will be rewound to the beginning
of the file.
"""
@spec open(t) :: {:ok, :file.io_device} | {:error, :file.posix} | no_return
def open(%LF{io: nil, path: path}) when not_nil(path) do
File.open(path, [:read, :binary])
end
def open(%LF{io: io, path: nil}) when not_nil(io) do
{:ok, _} = :file.position(io, :bof)
{:ok, io}
end
def open(%LF{io: io, path: path}) when not_nil(io) and not_nil(path) do
raise ArgumentError, message: "I expected an Exfile.LocalFile with either an io or a path, not both."
end
def open(%LF{io: nil, path: nil}) do
raise ArgumentError, message: "I expected an Exfile.LocalFile with either an io or a path, but you gave me one with neither."
end
@doc """
Copies the LocalFile to a new file-based LocalFile.
Once the calling pid dies, the file will be automatically removed from the
filesystem (see Exfile.Tempfile for more details).
"""
@spec copy_to_tempfile(t, pid() | nil) :: t | no_return
def copy_to_tempfile(file, monitor_pid \\ nil)
def copy_to_tempfile(%LF{path: path, meta: meta}, monitor_pid) when not_nil(path) do
temp = Exfile.Tempfile.random_file!("exfile-file", monitor_pid)
{:ok, _} = File.copy(path, temp)
%LF{path: temp, meta: meta}
end
def copy_to_tempfile(%LF{io: io, meta: meta}, monitor_pid) when not_nil(io) do
temp = Exfile.Tempfile.random_file!("exfile-file", monitor_pid)
{:ok, true} = File.open temp, [:write, :binary], fn(f) ->
Enum.into(
IO.binstream(io, @read_buffer),
IO.binstream(f, @read_buffer)
)
true
end
%LF{path: temp, meta: meta}
end
def copy_to_tempfile(%LF{io: io, path: path}, _monitor_pid) when not_nil(io) and not_nil(path) do
raise ArgumentError, message: "I expected an Exfile.LocalFile with either an io or a path, not both."
end
def copy_to_tempfile(%LF{io: nil, path: nil}, _monitor_pid) do
raise ArgumentError, message: "I expected an Exfile.LocalFile with either an io or a path, but you gave me one with neither."
end
@doc """
Returns the size (in bytes) of the file.
"""
@spec size(t) :: {:ok, integer} | {:error, :file.posix}
def size(%LF{path: path}) when not_nil(path) do
case File.stat(path) do
{:ok, %{size: size}} -> {:ok, size}
error -> error
end
end
def size(%LF{io: io}) when not_nil(io) do
stream = IO.binstream(io, 1)
size = Enum.count(stream)
_ = :file.position(io, :bof)
{:ok, size}
end
end
|
lib/exfile/local_file.ex
| 0.628977
| 0.478712
|
local_file.ex
|
starcoder
|
if Code.ensure_loaded?(Mint.HTTP) do
defmodule Tesla.Adapter.Mint do
@moduledoc """
Adapter for [mint](https://github.com/ericmj/mint)
Caution: The minimum supported Elixir version for mint is 1.5.0
Remember to add `{:mint, "~> 0.2.0"}` and `{:castore, "~> 0.1.0"}` to dependencies
Also, you need to recompile tesla after adding `:mint` dependency:
```
mix deps.clean tesla
mix deps.compile tesla
```
### Example usage
```
# set globally in config/config.exs
config :tesla, :adapter, Tesla.Adapter.Mint
# set per module
defmodule MyClient do
use Tesla
adapter Tesla.Adapter.Mint
end
# set global custom cacert
config :tesla, Tesla.Adapter.Mint, cacert: ["path_to_cacert"]
"""
@behaviour Tesla.Adapter
import Tesla.Adapter.Shared, only: [stream_to_fun: 1, next_chunk: 1]
alias Tesla.Multipart
alias Mint.HTTP
@default adapter: [timeout: 2_000]
@doc false
def call(env, opts) do
opts = Tesla.Adapter.opts(@default, env, opts)
with {:ok, status, headers, body} <- request(env, opts) do
{:ok, %{env | status: status, headers: headers, body: body}}
end
end
defp request(env, opts) do
# Break the URI
%URI{host: host, scheme: scheme, port: port, path: path, query: query} = URI.parse(env.url)
query = (query || "") |> URI.decode_query() |> Map.to_list()
path = Tesla.build_url(path, env.query ++ query)
method = env.method |> Atom.to_string() |> String.upcase()
# Set the global cacert file
opts =
if scheme == "https" && !is_nil(get_global_default_ca()) do
transport_opts = Access.get(opts, :transport_opts, [])
transport_opts =
Keyword.put(
transport_opts,
:cacertfile,
Keyword.get(transport_opts, :cacertfile, []) ++ get_global_default_ca()
)
Keyword.put(opts, :transport_opts, transport_opts)
else
opts
end
request(
method,
scheme,
host,
port,
path,
env.headers,
env.body,
opts
)
end
defp request(method, scheme, host, port, path, headers, %Stream{} = body, opts) do
fun = stream_to_fun(body)
request(method, scheme, host, port, path, headers, fun, opts)
end
defp request(method, scheme, host, port, path, headers, %Multipart{} = body, opts) do
headers = headers ++ Multipart.headers(body)
fun = stream_to_fun(Multipart.body(body))
request(method, scheme, host, port, path, headers, fun, opts)
end
defp request(method, scheme, host, port, path, headers, body, opts) when is_function(body) do
with {:ok, conn} <- HTTP.connect(String.to_atom(scheme), host, port, opts),
# FIXME Stream function in Mint will not append the content length after eof
# This will trigger the failure in unit test
{:ok, body, length} <- stream_request(body),
{:ok, conn, _req_ref} <-
HTTP.request(
conn,
method,
path || "/",
headers ++ [{"content-length", "#{length}"}],
body
),
{:ok, conn, res = %{status: status, headers: headers}} <- stream_response(conn, opts),
{:ok, _conn} <- HTTP.close(conn) do
{:ok, status, headers, Map.get(res, :data)}
end
end
defp request(method, scheme, host, port, path, headers, body, opts) do
with {:ok, conn} <- HTTP.connect(String.to_atom(scheme), host, port, opts),
{:ok, conn, _req_ref} <- HTTP.request(conn, method, path || "/", headers, body),
{:ok, conn, res = %{status: status, headers: headers}} <- stream_response(conn, opts),
{:ok, _conn} <- HTTP.close(conn) do
{:ok, status, headers, Map.get(res, :data)}
end
end
defp get_global_default_ca() do
case Application.get_env(:tesla, Tesla.Adapter.Mint) do
nil -> nil
env -> Keyword.get(env, :cacert)
end
end
defp stream_request(fun, body \\ "") do
case next_chunk(fun) do
{:ok, item, fun} when is_list(item) ->
stream_request(fun, body <> List.to_string(item))
{:ok, item, fun} ->
stream_request(fun, body <> item)
:eof ->
{:ok, body, byte_size(body)}
end
end
defp stream_response(conn, opts, response \\ %{}) do
receive do
msg ->
case HTTP.stream(conn, msg) do
{:ok, conn, stream} ->
response =
Enum.reduce(stream, response, fn
{:status, _req_ref, code}, acc ->
Map.put(acc, :status, code)
{:headers, _req_ref, headers}, acc ->
Map.put(acc, :headers, Map.get(acc, :headers, []) ++ headers)
{:data, _req_ref, data}, acc ->
Map.put(acc, :data, Map.get(acc, :data, "") <> data)
{:done, _req_ref}, acc ->
Map.put(acc, :done, true)
{:error, _req_ref, reason}, acc ->
Map.put(acc, :error, reason)
_, acc ->
acc
end)
cond do
Map.has_key?(response, :error) ->
{:error, Map.get(response, :error)}
Map.has_key?(response, :done) ->
{:ok, conn, Map.drop(response, [:done])}
true ->
stream_response(conn, opts, response)
end
{:error, _conn, error, _res} ->
{:error, "Encounter Mint error #{inspect(error)}"}
:unknown ->
{:error, "Encounter unknown error"}
end
after
opts |> Keyword.get(:adapter) |> Keyword.get(:timeout) ->
{:error, "Response timeout"}
end
end
end
end
|
lib/tesla/adapter/mint.ex
| 0.759181
| 0.619457
|
mint.ex
|
starcoder
|
defmodule Avrora.Utils.Registrar do
@moduledoc """
Memory store-aware schema registration with extended functionality
designed to be used in the intensive loops.
It gives you control over the name under which schema will be registered
(i.e subject in Schema Registry) and allows you to enforce registration
even if the schema exists.
## Examples
defmodule Sample do
alias Avrora.Utils.Registrar
def loop do
Enum.reduce_while(1..100, 0, fn x, acc ->
if x < 100, do: {:cont, register("io.confluent.Payment")}, else: {:halt, acc}
end)
end
defp register(schema_name), do: Registrar.register_schema_by_name(schema_name)
end
"""
require Logger
alias Avrora.Config
alias Avrora.Schema
@doc """
Register schema from local schema file in the Schema Registry.
Schema name conventions inherited from `Avrora.Storage.File.get/1`.
For extended documentation about registration process see `register_schema/2`.
## Options
* `:as` - the name which will be used to register schema (i.e subject).
* `:force` - the flag enforcing registration when schema was found
in the Memory store (`false` by default).
## Examples
...> {:ok, schema} = Avrora.Utils.Registrar.register_schema_by_name("io.confluent.Payment", as: "NewName", force: true)
...> schema.full_name
"io.confluent.Payment"
"""
@spec register_schema_by_name(String.t(), as: String.t(), force: boolean) ::
{:ok, Schema.t()} | {:error, term()}
def register_schema_by_name(name, opts \\ []) do
if Keyword.get(opts, :force, false) do
with {:ok, schema} <- file_storage().get(name), do: register_schema(schema, opts)
else
with {:ok, nil} <- memory_storage().get(name),
{:ok, schema} <- file_storage().get(name) do
register_schema(schema, Keyword.put(opts, :force, true))
end
end
end
@doc """
Register schema in the Schema Registry.
This function relies on a Memory store before taking action.
The most complete schema name will be looked at the store, i.e if the schema
contains `version` then `full_name` + `version` will be used in prior just a `full_name`.
## Options
* `:as` - the name which will be used to register schema (i.e subject).
* `:force` - the flag enforcing registration when schema was found
in the Memory store (`false` by default).
## Examples
...> {:ok, schema} = Avrora.Resolver.resolve("io.confluent.Payment")
...> {:ok, schema} = Avrora.Utils.Registrar.register_schema(schema, as: "NewName", force: true)
...> schema.full_name
"io.confluent.Payment"
"""
@spec register_schema(Schema.t(), as: String.t(), force: boolean) ::
{:ok, Schema.t()} | {:error, term()}
def register_schema(schema, opts \\ []) do
full_name =
if is_nil(schema.version),
do: schema.full_name,
else: "#{schema.full_name}:#{schema.version}"
subject = Keyword.get(opts, :as, full_name)
if Keyword.get(opts, :force, false) do
do_register(subject, schema)
else
with {:ok, nil} <- memory_storage().get(full_name), do: do_register(subject, schema)
end
end
defp do_register(subject, schema) do
with {:ok, schema} <- registry_storage().put(subject, schema.json),
{:ok, schema} <- memory_storage().put(schema.id, schema),
{:ok, schema} <- memory_storage().put(schema.full_name, schema),
{:ok, timestamp} <- memory_storage().expire(schema.full_name, names_ttl()) do
if timestamp == :infinity,
do: Logger.debug("schema `#{schema.full_name}` will be always resolved from memory")
if is_nil(schema.version),
do: {:ok, schema},
else: memory_storage().put("#{schema.full_name}:#{schema.version}", schema)
end
end
defp file_storage, do: Config.self().file_storage()
defp memory_storage, do: Config.self().memory_storage()
defp registry_storage, do: Config.self().registry_storage()
defp names_ttl, do: Config.self().names_cache_ttl()
end
|
lib/avrora/utils/registrar.ex
| 0.849706
| 0.564759
|
registrar.ex
|
starcoder
|
defmodule Benchee.Conversion do
@moduledoc """
Integration of the conversion of multiple units with benchee.
Can be used by plugins to use benchee unit scaling logic.
"""
alias Benchee.Scenario
alias Benchee.Conversion.{Count, Duration, Memory}
@doc """
Takes scenarios and a given scaling_strategy, returns the best units for the
given scaling strategy. The return value changes based on whether you want
units for run time or memory usage.
The units can then be passed on to the appropriate `format` calls to format
the output of arbitrary values with the right unit.
## Examples
iex> statistics = %Benchee.Statistics{average: 1_000_000.0, ips: 1000.0}
iex> scenario = %Benchee.Scenario{
...> run_time_data: %Benchee.CollectionData{statistics: statistics},
...> memory_usage_data: %Benchee.CollectionData{statistics: statistics}
...> }
iex> Benchee.Conversion.units([scenario], :best)
%{
ips: %Benchee.Conversion.Unit{
label: "K",
long: "Thousand",
magnitude: 1000,
name: :thousand
},
run_time: %Benchee.Conversion.Unit{
label: "ms",
long: "Milliseconds",
magnitude: 1_000_000,
name: :millisecond
},
memory: %Benchee.Conversion.Unit{
label: "KB",
long: "Kilobytes",
magnitude: 1024,
name: :kilobyte
}
}
"""
def units(scenarios, scaling_strategy) do
run_time_measurements =
scenarios
|> Enum.flat_map(fn scenario -> Map.to_list(scenario.run_time_data.statistics) end)
|> Enum.group_by(fn {stat_name, _} -> stat_name end, fn {_, value} -> value end)
memory_measurements =
scenarios
|> Enum.flat_map(fn
%Scenario{memory_usage_data: %{statistics: nil}} ->
[]
%Scenario{memory_usage_data: %{statistics: memory_usage_statistics}} ->
Map.to_list(memory_usage_statistics)
end)
|> Enum.group_by(fn {stat_name, _} -> stat_name end, fn {_, value} -> value end)
memory_average =
case memory_measurements do
map when map_size(map) == 0 -> []
_ -> memory_measurements.average
end
%{
run_time: Duration.best(run_time_measurements.average, strategy: scaling_strategy),
ips: Count.best(run_time_measurements.ips, strategy: scaling_strategy),
memory: Memory.best(memory_average, strategy: scaling_strategy)
}
end
end
|
lib/benchee/conversion.ex
| 0.90325
| 0.620047
|
conversion.ex
|
starcoder
|
defmodule Mxpanel.Batcher do
@moduledoc """
Manages a pool of buffers that accumulate the events and sends them to the
Mixpanel API in batches in background. It implements a registry-based routing
pool with round-robing as routing strategy.
Checkout `start_link/1` for all supported options.
## Usage
1. Add to your supervision tree:
```elixir
{Mxpanel.Batcher, name: MyApp.Batcher, token: "mixpanel project token"}
```
2. Enqueue an event:
```elixir
Mxpanel.track_later(MyApp.MxpanelBatcher, event)
```
3. The event will be buffered, and later sent in batch to the Mixpanel API.
"""
use Supervisor
alias Mxpanel.Batcher.Buffer
alias Mxpanel.Batcher.Manager
@type name :: atom()
@opts_schema [
name: [
type: :atom,
doc: "Name of the batcher instance.",
required: true
],
token: [
type: :any,
doc: "Required if active. The Mixpanel token associated with your project."
],
active: [
type: :boolean,
doc:
"Configure Batcher to be active or not. Useful for disabling requests in certain environments.",
default: true
],
base_url: [
type: :string,
doc: "Mixpanel API URL.",
default: "https://api.mixpanel.com"
],
http_client: [
type: {:custom, __MODULE__, :validate_http_client, []},
doc: "HTTP client used by the Batcher.",
default: {Mxpanel.HTTPClient.HackneyAdapter, []}
],
pool_size: [
type: :pos_integer,
doc: "The size of the pool of event buffers. Defaults to `System.schedulers_online()`."
],
flush_interval: [
type: :pos_integer,
doc: "Interval in milliseconds which the event buffer are processed.",
default: 5_000
],
flush_jitter: [
type: :pos_integer,
doc:
"Jitter the flush interval by a random amount. Value in milliseconds. This is primarily to avoid large write spikes. For example, a `flush_jitter` of 1s and `flush_interval` of 1s means flushes will happen every 5-6s.",
default: 1_000
],
retry_max_attempts: [
type: :pos_integer,
doc: "Max attempts that a batch of events should be tried before giving up.",
default: 5
],
retry_base_backoff: [
type: :pos_integer,
doc:
"Base time in milliseconds to calculate the wait time between retry attempts. Formula: `(attempt * retry_base_backoff) + random(1..retry_base_backoff)`.",
default: 100
],
import_timeout: [
type: :timeout,
doc:
"The maximum amount of time in milliseconds each batch of events is allowed to execute for.",
default: 30_000
],
telemetry_buffers_info_interval: [
type: :pos_integer,
doc: "Interval in milliseconds the `telemetry` with the buffers info is published.",
default: 30_000
],
debug: [
type: :boolean,
doc: "Enable debug logging.",
default: false
]
]
@supported_endpoints [:track, :engage, :groups]
@doc """
Starts a `#{inspect(__MODULE__)}` linked to the current process.
## Supported options
#{NimbleOptions.docs(@opts_schema)}
"""
def start_link(opts) do
opts = validate_options!(opts, @opts_schema)
Supervisor.start_link(__MODULE__, opts)
end
@impl Supervisor
def init(opts) do
name = opts[:name]
pool_size = opts[:pool_size]
buffers_specs =
Enum.flat_map(@supported_endpoints, fn endpoint ->
for index <- 1..pool_size do
Supervisor.child_spec({Buffer, Keyword.put(opts, :endpoint, endpoint)},
id: {Buffer, endpoint, index}
)
end
end)
children = [
{Registry, name: Manager.registry_name(name), keys: :duplicate},
{Manager, Keyword.put(opts, :supported_endpoints, @supported_endpoints)},
%{
id: :buffers_supervisor,
type: :supervisor,
start: {Supervisor, :start_link, [buffers_specs, [strategy: :one_for_one]]}
}
]
Supervisor.init(children, strategy: :rest_for_one)
end
@doc """
Synchronously drain all buffers in the batcher.
Returns a list containing all the processed events.
Mxpanel.Batcher.drain_buffers(MyApp.Batcher)
"""
@spec drain_buffers(name()) :: :ok
def drain_buffers(batcher_name) do
Enum.each(@supported_endpoints, fn endpoint ->
batcher_name
|> Manager.buffers(endpoint)
|> Enum.each(fn pid -> GenServer.call(pid, :drain) end)
end)
end
@doc false
def enqueue(batcher_name, operation_or_operations) do
operation_or_operations
|> List.wrap()
|> Enum.each(fn operation ->
batcher_name
|> Manager.checkout(operation)
|> Buffer.enqueue(operation)
end)
end
@doc false
def validate_http_client({mod, opts}) when is_atom(mod) and is_list(opts) do
{:ok, {mod, opts}}
end
def validate_http_client(value) do
{:error, "expected :http_client to be an {mod, opts} tuple, got: #{inspect(value)}"}
end
defp validate_options!(opts, schema) do
opts = Keyword.put_new(opts, :pool_size, System.schedulers_online())
with {:ok, opts} <- NimbleOptions.validate(opts, schema),
{:ok, opts} <- validate_token(opts) do
opts
else
{:error, %NimbleOptions.ValidationError{message: message}} ->
raise ArgumentError,
"invalid configuration given to #{inspect(__MODULE__)}.start_link/1, " <> message
end
end
defp validate_token(opts) do
case {opts[:active] == true, Keyword.has_key?(opts, :token), is_binary(opts[:token])} do
{true, true, true} ->
{:ok, opts}
{true, true, false} ->
{:error,
%NimbleOptions.ValidationError{
message: "expected :token to be a string, got: #{inspect(opts[:token])}"
}}
{true, false, _} ->
{:error, %NimbleOptions.ValidationError{message: "required option :token not found"}}
{false, _, _} ->
{:ok, opts}
end
end
end
|
lib/mxpanel/batcher.ex
| 0.925458
| 0.769557
|
batcher.ex
|
starcoder
|
defmodule Delta.Producer do
@moduledoc """
GenStage which fulfills demand by making HTTP requests on a configurable frequency.
"""
alias Delta.File
use GenStage
require Logger
@type opts :: [opt]
@type opt ::
{:url, binary}
| {:frequency, non_neg_integer}
| {:http_mod, module}
| {:filters, [filter]}
@type filter :: (File.t() -> File.t() | [File.t()])
@default_frequency 60_000
@default_http_mod Delta.Producer.Hackney
@start_link_opts [:name]
def start_link(opts) do
_ = Keyword.fetch!(opts, :url)
GenStage.start_link(__MODULE__, opts, Keyword.take(opts, @start_link_opts))
end
defstruct [:conn, :http_mod, :frequency, :filters, :last_fetched, :ref, demand: 0]
def default_filters do
[&File.ensure_content_type/1, &File.ensure_gzipped/1]
end
@impl GenStage
def init(opts) do
url = Keyword.get(opts, :url)
frequency = Keyword.get(opts, :frequency, @default_frequency)
http_mod = Keyword.get(opts, :http_mod, @default_http_mod)
filters = Keyword.get(opts, :filters, default_filters())
headers = Keyword.get(opts, :headers, [])
{:ok, conn} = http_mod.new(url, headers: Enum.to_list(headers))
state = %__MODULE__{
conn: conn,
http_mod: http_mod,
filters: filters,
frequency: frequency,
last_fetched: monotonic_now() - frequency - 1
}
{:producer, state, dispatcher: GenStage.BroadcastDispatcher}
end
@impl GenStage
def handle_demand(demand, state) do
state = %{state | demand: state.demand + demand}
state = schedule_fetch(state)
{:noreply, [], state}
end
@impl GenStage
def handle_info(:fetch, %{demand: demand} = state) when demand > 0 do
state = %{state | ref: nil, last_fetched: monotonic_now()}
state = schedule_fetch(state)
case state.http_mod.fetch(state.conn) do
{:ok, conn, file} ->
state = %{state | conn: conn}
handle_file(state, file)
{:unmodified, conn} ->
state = %{state | conn: conn}
{:noreply, [], state}
{:error, conn, reason} ->
state = %{state | conn: conn}
handle_error(state, reason)
end
end
def handle_info(:fetch, %{demand: 0} = state) do
# wait for more demand before scheduling again
{:noreply, [], state}
end
def handle_info(message, state) do
case state.http_mod.stream(state.conn, message) do
{:ok, conn, files} ->
state = %{state | conn: conn}
{:noreply, files, state}
:unknown ->
_ =
Logger.warn(
"#{__MODULE__} unexpected message message=#{inspect(message)} state=#{inspect(state)}"
)
{:noreply, [], state}
end
end
defp handle_file(state, file) do
files = apply_filters([file], state.filters)
state = %{state | demand: max(state.demand - Enum.count(files), 0)}
{:noreply, files, state}
end
def handle_error(state, reason) do
_ =
Logger.warn(fn ->
"#{__MODULE__} error fetching url=#{inspect(state.conn.url)} error=#{
inspect(reason, limit: :infinity)
}"
end)
{:noreply, [], state}
end
defp schedule_fetch(%{ref: nil} = state) do
next_fetch_after = max(state.last_fetched + state.frequency - monotonic_now(), 0)
ref = Process.send_after(self(), :fetch, next_fetch_after)
%{state | ref: ref}
end
# coveralls-ignore-start
defp schedule_fetch(%{ref: _} = state) do
# already scheduled! this isn't always hit during testing (but it is
# sometimes) so we skip the coverage check.
state
end
# coveralls-ignore-stop
defp monotonic_now do
System.monotonic_time(:millisecond)
end
@doc "Apply a list of filters to a list of files"
@spec apply_filters([File.t()], [filter]) :: [File.t()]
def apply_filters(files, [filter | rest]) do
files =
Enum.flat_map(files, fn file ->
file
|> filter.()
|> List.wrap()
end)
apply_filters(files, rest)
end
def apply_filters(files, []) do
files
end
end
|
lib/delta/producer.ex
| 0.800068
| 0.404772
|
producer.ex
|
starcoder
|
defmodule Towwwer.Job do
@moduledoc """
Enqueue job for later execution and return immediately:
Rihanna.enqueue(Towwwer.Job, [arg1, arg2])
A recurring job is implemented by having the job reschedule itself after completion
and Postgresโ ACID guarantees will ensure that it continues running.
NOTE: You will need to enqueue the job manually the first time from the console.
"""
@behaviour Rihanna.Job
require Logger
alias Towwwer.Websites
alias Towwwer.Tools.Helpers
@doc """
NOTE: `perform/1` is a required callback. It takes exactly one argument. To pass
multiple arguments, wrap them in a list and destructure in the function head.
This has to return one of: :ok | {:ok, result} | :error | {:error, reason}
"""
@spec perform([map() | map()]) :: :ok | {:error, :failed}
def perform([site, monitor]) do
success? = do_work(site, monitor)
case success? do
:ok ->
naive_now = NaiveDateTime.utc_now()
{:ok, naive_midnight} =
NaiveDateTime.new(naive_now.year, naive_now.month, naive_now.day, 0, 0, 0)
naive_next_midnight = NaiveDateTime.add(naive_midnight, 86_400, :second)
{:ok, next_midnight} = DateTime.from_naive(naive_next_midnight, "Etc/UTC")
Rihanna.schedule(Towwwer.Job, [site, monitor], at: next_midnight)
:ok
:error ->
Rihanna.schedule(Towwwer.Job, [site, monitor], in: :timer.hours(1))
{:error, :failed}
end
end
@spec do_work(map(), map()) :: :ok | :error
defp do_work(site, monitor) do
Logger.info("Doing work for #{site.base_url} at #{monitor.path}")
# Get previous report in order to compare the upcoming one to this one
prev_report = Websites.get_latest_report_for_monitor(monitor)
case Helpers.build_report(site, monitor) do
{:ok, report} ->
Logger.info("Created report for #{site.base_url} at #{monitor.path} successfully")
Helpers.check_score_diff(prev_report, report, site, monitor)
Helpers.check_low_hanging_fruits(site, monitor, report)
Websites.bump_site_timestamp(site)
:ok
{:error, _changeset} ->
Logger.info("Failed to create report for #{site.base_url} monitor #{monitor.path}")
:error
end
end
# Query all pending sites and run the build task for them.
# Should only be called directly from console when initially setting things up.
# Afterwards the jobs should already be enqueued and stored in PostgreSQL.
def loop_sites_for_reports do
sites = Websites.list_sites_with_preloads()
for site <- sites do
Helpers.run_build_task_for_site_monitors(site)
end
end
end
|
lib/towwwer/job.ex
| 0.700895
| 0.47725
|
job.ex
|
starcoder
|
defmodule Bolt.Cogs.Guide do
@moduledoc false
@behaviour Nosedrum.Command
alias Bolt.Constants
alias Bolt.Paginator
alias Nostrum.Struct.Embed
@pages [
%Embed{
title: "guide - introduction",
description: """
bolt is a moderation bot intended for use on medium to large sized guilds.
This command will guide you through using its abilities to help you moderate your guild.
First off, keep in mind you can always invoke this command using `guide`, and if you want help for commands themselves, use `help <command>`.
For subcommands (e.g. `infr list`), you can also view detailed help, e.g. `help infr list`.
Commands documented here have their prefix ommitted.
To navigate through this guide, use the buttons below.
"""
},
%Embed{
title: "guide - meta commands",
description: """
bolt provides a couple meta commands that can prove useful when moderating a server.
These commands are generally available to everyone.
โข `guildinfo`
shows you general information about the current guild.
If you have a concrete guild ID you want to lookup, you can use `guildinfo <id>`.
โข `memberinfo`
When run without commands, shows information about yourself.
You can also pass this command a specific member you want to look up, for example `memberinfo @bolt`.
The member ID can be useful when applying punishments to a member that you currently cannot see (e.g. in your staff channel), as you can pass it to commands that expect a member argument.
โข `roleinfo <role>`
Looks up the given role. You can pass either the role ID, the role name, or mention the role.
For example, to look up information about a role called 'Staff', use `roleinfo Staff`.
The role ID can be useful when selecting actions that the automod should take, which will be explained later.
โข `roles [name]`
Displays all roles on the guild. When given an argument, only displays roles matching the given name.
โข `stats`
Displays general statistics about the bot.
"""
},
%Embed{
title: "guide - mod log",
description: """
bolt comes with an extensive and configurable moderation log.
You can configure it to output only select information on a per-channel basis.
The command used to manage the mod log is `modlog`. It provides a bunch of subcommands for configuring the mod log as well as disabling it temporarily.
It works based on events sent by the bot internally. A couple of these are events from Discord themselves - for example, a member joining - and others are events used by the bot.
If you just want to go for a "set it and forget it" configuration, use `modlog set all <logchannel>`, where `logchannel` is the channel you want to log in (e.g. `modlog set all #modlog`). This will simply log all events captured by the bot in the given channel.
Otherwise, if you want more fine-grained configuration, use `modlog set <event> <logchannel>`. Known events can be seen by using `modlog events`, and you can ask bolt to explain an event to you by using `modlog explain <event>`.
To mute the modlog temporarily, use `modlog mute`. Note that this will not persist across restarts by the bot. Use `modlog unmute` to unmute it again.
It is recommended to at least enable the mod log for the following events:
โข `AUTOMOD`: automatic moderator actions the bot takes (when configured)
โข `BOT_UPDATE`: important bot updates
โข `CONFIG_UPDATE`: someone updated your configuration for bolt
โข `INFRACTION_CREATE`: an infraction was created
โข `INFRACTION_UPDATE`: an infraction was updated
โข `INFRACTION_EVENTS`: the bot did something based on an infraction
โข `MESSAGE_CLEAN`: a moderator ran the `clean` command, includes the deleted messages
Infractions will be explained in detail on the next page.
"""
},
%Embed{
title: "guide - infractions",
description: """
bolt ships an infraction system. In a nutshell, it's a database tracking everything that you've done on members through bolt.
The following commands create new infractions: `note`, `warn`, `temprole`, `kick`, `tempban` and `ban`.
You can list all infractions on your guild with `infr list`, and look up individual infractions using `infr detail <id>`. Finally, to show infractions for a member (or even users who left), use `infr user <member>`.
To edit the reason for an infraction, use `infr reason <id> <new_reason>`. To update the expiration date of a timed infraction (for example, temporary bans), use `infr expiry <id> <new_expiry>`. The expiration date will be calculated relative to the creation time of the infraction.
Note how I use `infr` here instead of the described `infraction`. Both commands work, `infr` is an alias.
As usual, keep in mind that you can use `help infraction` or `help infr` to look up detailed documentation for this command.
The next page will explain available moderation commands.
"""
},
%Embed{
title: "guide - moderation commands",
description: """
As a moderation bot, bolt has many moderation utilities. This page describes those that you can execute yourself.
The basic moderation commands are the following:
โข `note <user> <note>` applies a note on the given user
โข `warn <user> <reason>` warns the given user with the given reason
โข `temprole <user> <role> <duration> [reason]` will temporarily apply the given role to the given user
โข `kick <user> [reason]` kicks the given user
โข `tempban <user> <duration> [reason]` temporarily bans the given user
โข `ban <user> [reason]` permanently bans the given user
โข `clean <args>` to clean messages
If you're confused by what to pass to some commands, just check `help <command>` as usual.
All of these commands (except `clean`) will be logged with the event `INFRACTION_CREATE`, and `temprole` or `tempban` will also log with `INFRACTION_EVENTS` when they perform an action (such as unbanning someone).
The next page will talk about automating role assignment.
"""
},
%Embed{
title: "guide - self-assignable roles",
description: """
Many guilds want to have a bunch of roles that members can assign by themselves without moderator intervention.
Self-assignable roles allow you to configure just that.
At its core, moderators (with the permission to manage roles) can use the following:
โข `role allow <role>` to make a role self-assignable
โข `role deny <role>` to remove a role from the self-assignable roles
Use `help role` if you want further information on these commands.
These configuration commands will be logged under `CONFIG_UDPATE`.
Users can then interact with the self-assignable roles using the following:
โข `lsar` to list all self-assignable roles
โข `assign` or `iam` to assign a self-assignable role
โข `remove` or `iamn` to remove a self-assignable role
Users succeeding in assigning or removing a role will be logged under `AUTOMOD`.
"""
},
%Embed{
title: "guide - combatting spam",
description: """
bolt can take some work off fighting off spam for you using the built-in *uncomplicated spam wall* (USW).
USW works based on filters and punishment. Basically, it works like the following:
If a user sends a message,
- get all configured filters for the guild
- apply all of those on the new message
- if a filter triggers / "hits", punish the user with the configured punishment.
To configure a punishment, use `usw punish`. For example, to apply the role "Muted" to someone triggering a filter for 5 minutes, use `usw punish temprole Muted 5m`.
To set up filters, use `usw set <filter> <count> <interval>`. For example, `usw set BURST 5 10` would mean "allow 5 messages to pass through the `BURST` filter within 10 seconds.
To unset configuration for a filter, use `usw unset <filter>`.
Bolt will create infractions for hit filters depending on the punishment, which
you can see via the `(automod)` word in the infraction reason.
You can see the current configuration for USW on your guild using `usw status`.
As always, use `help usw` if you need further help with this command.
"""
},
%Embed{
title: "guide - gatekeeper",
description: """
bolt can automate assigning users roles or welcoming users when they join the server.
bolt also includes the customizable `accept` command.
The actions which should be triggered on either member join or `accept` invocation can be configured via the **gatekeeper** system. To use it, see:
- `gk onjoin` for configuring member join actions
- `gk onaccept` for configuring accept command actions
- `gk actions` for viewing configured actions
See the help pages for these commands for more information.
"""
},
%Embed{
title: "guide - tags",
description: """
If you encounter often answering the same questions all over again or just want to provide a bunch of useful information some way, then the tags feature is for you.
In a nutshell, you can create, read, and delete tags with this command.
To create a tag, use `tag create <name> <content>`. For example, to create a tag named "duck pics" with questionable content, use `tag create "duck pics" What did you expect?`. Members can now use `tag duck pics` to read it.
Note that you need to put quotes around the tag name for names spanning multiple words, or bolt will assume that all words after the first one are part of the tag's content.
You can list all tags on the guild using `tag list`, and if you want to get rid of the tag, you can use `tag delete <name>`.
Use `help tag` if you want more detailed information.
"""
},
%Embed{
title: "guide - fin",
description: """
This sums up the guide for now.
If you want to give feedback, have questions, want to suggest a feature, or simply want to be informed about updates, don't hesitate to join its server: https://discord.gg/5REguKf
I hope that you enjoy using bolt and it provides use to your server.
"""
}
]
@impl true
def usage, do: ["guide"]
@impl true
def description,
do: """
New to bolt? This command is a paginated overview showcasing how to use bolt on your server.
"""
@impl true
def predicates, do: []
@impl true
def command(msg, _args) do
base_embed = %Embed{
color: Constants.color_blue()
}
Paginator.paginate_over(msg, base_embed, @pages)
end
end
|
lib/bolt/cogs/guide.ex
| 0.868827
| 0.529993
|
guide.ex
|
starcoder
|
defmodule RDF.Serialization do
@moduledoc """
General functions for working with RDF serializations.
"""
alias RDF.{Dataset, Graph}
@type format :: module
@formats [
RDF.Turtle,
JSON.LD,
RDF.NTriples,
RDF.NQuads,
]
@doc """
The list of all known `RDF.Serialization.Format`s in the RDF.ex eco-system.
Note: Not all known formats might be available to an application, see `available_formats/0`.
## Examples
iex> RDF.Serialization.formats
[RDF.Turtle, JSON.LD, RDF.NTriples, RDF.NQuads]
"""
@spec formats :: [format]
def formats, do: @formats
@doc """
The list of all available `RDF.Serialization.Format`s in an application.
A known format might not be available in an application, when the format is
implemented in an external library and this not specified as a Mix dependency
of this application.
## Examples
iex> RDF.Serialization.available_formats
[RDF.Turtle, RDF.NTriples, RDF.NQuads]
"""
@spec available_formats :: [format]
def available_formats do
Enum.filter @formats, &Code.ensure_loaded?/1
end
@doc """
Returns the `RDF.Serialization.Format` with the given name, if available.
## Examples
iex> RDF.Serialization.format(:turtle)
RDF.Turtle
iex> RDF.Serialization.format("turtle")
RDF.Turtle
iex> RDF.Serialization.format(:jsonld)
nil # unless json_ld is defined as a dependency of the application
"""
@spec format(String.t | atom) :: format | nil
def format(name)
def format(name) when is_binary(name) do
name
|> String.to_existing_atom
|> format()
rescue
ArgumentError -> nil
end
def format(name) do
format_where(fn format -> format.name == name end)
end
@doc """
Returns the `RDF.Serialization.Format` with the given media type, if available.
## Examples
iex> RDF.Serialization.format_by_media_type("text/turtle")
RDF.Turtle
iex> RDF.Serialization.format_by_media_type("application/ld+json")
nil # unless json_ld is defined as a dependency of the application
"""
@spec format_by_media_type(String.t) :: format | nil
def format_by_media_type(media_type) do
format_where(fn format -> format.media_type == media_type end)
end
@doc """
Returns the proper `RDF.Serialization.Format` for the given file extension, if available.
## Examples
iex> RDF.Serialization.format_by_extension("ttl")
RDF.Turtle
iex> RDF.Serialization.format_by_extension(".ttl")
RDF.Turtle
iex> RDF.Serialization.format_by_extension("jsonld")
nil # unless json_ld is defined as a dependency of the application
"""
@spec format_by_extension(String.t) :: format | nil
def format_by_extension(extension)
def format_by_extension("." <> extension), do: format_by_extension(extension)
def format_by_extension(extension) do
format_where(fn format -> format.extension == extension end)
end
defp format_where(fun) do
@formats
|> Stream.filter(&Code.ensure_loaded?/1)
|> Enum.find(fun)
end
@doc """
Reads and decodes a serialized graph or dataset from a string.
The format must be specified with the `format` option and a format name or the
`media_type` option and the media type of the format.
It returns an `{:ok, data}` tuple, with `data` being the deserialized graph or
dataset, or `{:error, reason}` if an error occurs.
"""
@spec read_string(String.t, keyword) :: {:ok, Graph.t | Dataset.t} | {:error, any}
def read_string(content, opts) do
with {:ok, format} <- string_format(opts) do
format.read_string(content, opts)
end
end
@doc """
Reads and decodes a serialized graph or dataset from a string.
The format must be specified with the `format` option and a format name or the
`media_type` option and the media type of the format.
As opposed to `read_string`, it raises an exception if an error occurs.
"""
@spec read_string!(String.t, keyword) :: Graph.t | Dataset.t
def read_string!(content, opts) do
with {:ok, format} <- string_format(opts) do
format.read_string!(content, opts)
else
{:error, error} -> raise error
end
end
@doc """
Reads and decodes a serialized graph or dataset from a file.
The format can be specified with the `format` option and a format name or the
`media_type` option and the media type of the format. If none of these are
given, the format gets inferred from the extension of the given file name.
It returns an `{:ok, data}` tuple, with `data` being the deserialized graph or
dataset, or `{:error, reason}` if an error occurs.
"""
@spec read_file(Path.t, keyword) :: {:ok, Graph.t | Dataset.t} | {:error, any}
def read_file(file, opts \\ []) do
with {:ok, format} <- file_format(file, opts) do
format.read_file(file, opts)
end
end
@doc """
Reads and decodes a serialized graph or dataset from a file.
The format can be specified with the `format` option and a format name or the
`media_type` option and the media type of the format. If none of these are
given, the format gets inferred from the extension of the given file name.
As opposed to `read_file`, it raises an exception if an error occurs.
"""
@spec read_file!(Path.t, keyword) :: Graph.t | Dataset.t
def read_file!(file, opts \\ []) do
with {:ok, format} <- file_format(file, opts) do
format.read_file!(file, opts)
else
{:error, error} -> raise error
end
end
@doc """
Encodes and writes a graph or dataset to a string.
The format must be specified with the `format` option and a format name or the
`media_type` option and the media type of the format.
It returns an `{:ok, string}` tuple, with `string` being the serialized graph or
dataset, or `{:error, reason}` if an error occurs.
"""
@spec write_string(Graph.t | Dataset.t, keyword) :: {:ok, String.t} | {:error, any}
def write_string(data, opts) do
with {:ok, format} <- string_format(opts) do
format.write_string(data, opts)
end
end
@doc """
Encodes and writes a graph or dataset to a string.
The format must be specified with the `format` option and a format name or the
`media_type` option and the media type of the format.
As opposed to `write_string`, it raises an exception if an error occurs.
"""
@spec write_string!(Graph.t | Dataset.t, keyword) :: String.t
def write_string!(data, opts) do
with {:ok, format} <- string_format(opts) do
format.write_string!(data, opts)
else
{:error, error} -> raise error
end
end
@doc """
Encodes and writes a graph or dataset to a file.
The format can be specified with the `format` option and a format name or the
`media_type` option and the media type of the format. If none of these are
given, the format gets inferred from the extension of the given file name.
Other available serialization-independent options:
- `:force` - If not set to `true`, an error is raised when the given file
already exists (default: `false`)
- `:file_mode` - A list with the Elixir `File.open` modes to be used for writing
(default: `[:utf8, :write]`)
It returns `:ok` if successful or `{:error, reason}` if an error occurs.
"""
@spec write_file(Graph.t | Dataset.t, Path.t, keyword) :: :ok | {:error, any}
def write_file(data, path, opts \\ []) do
with {:ok, format} <- file_format(path, opts) do
format.write_file(data, path, opts)
end
end
@doc """
Encodes and writes a graph or dataset to a file.
The format can be specified with the `format` option and a format name or the
`media_type` option and the media type of the format. If none of these are
given, the format gets inferred from the extension of the given file name.
See `write_file` for a list of other available options.
As opposed to `write_file`, it raises an exception if an error occurs.
"""
@spec write_file!(Graph.t | Dataset.t, Path.t, keyword) :: :ok
def write_file!(data, path, opts \\ []) do
with {:ok, format} <- file_format(path, opts) do
format.write_file!(data, path, opts)
else
{:error, error} -> raise error
end
end
defp string_format(opts) do
if format =
(opts |> Keyword.get(:format) |> format()) ||
(opts |> Keyword.get(:media_type) |> format_by_media_type())
do
{:ok, format}
else
{:error, "unable to detect serialization format"}
end
end
defp file_format(filename, opts) do
case string_format(opts) do
{:ok, format} -> {:ok, format}
_ -> format_by_file_name(filename)
end
end
defp format_by_file_name(filename) do
if format = filename |> Path.extname() |> format_by_extension() do
{:ok, format}
else
{:error, "unable to detect serialization format"}
end
end
end
|
lib/rdf/serialization/serialization.ex
| 0.89765
| 0.592431
|
serialization.ex
|
starcoder
|
defmodule Geo.CheapRuler do
# via https://blog.mapbox.com/fast-geodesic-approximations-with-cheap-ruler-106f229ad016
@type point() :: %{:lat => float(), :lon => float(), optional(atom()) => any()}
ref_lat =
if Mix.env() == :test do
53.551
else
[_, lat1, _, lat2] = Settings.bounds()
(lat1 + lat2) / 2.0
end
@reference_lat ref_lat
@cos1 :math.cos(@reference_lat * :math.pi() / 180)
@cos2 2 * @cos1 * @cos1 - 1
@cos3 2 * @cos1 * @cos2 - @cos1
@cos4 2 * @cos1 * @cos3 - @cos2
@cos5 2 * @cos1 * @cos4 - @cos3
# get meters instead of km
@m 1000
@kx @m * (111.41513 * @cos1 - 0.09455 * @cos3 + 0.00012 * @cos5)
@ky @m * (111.13209 - 0.56605 * @cos2 + 0.0012 * @cos4)
@doc ~S"""
Takes a list of coordinates and find their bounding box
## Examples
iex> Geo.CheapRuler.bbox([
...> %{lon: 1.2, lat: 3.4}, %{lon: 4.5, lat: 7.1}, %{lon: 0, lat: 0},
...> ])
%Geo.BoundingBox{minLon: 0, minLat: 0, maxLon: 4.5, maxLat: 7.1}
"""
def bbox(coords) when is_list(coords) do
{%{lon: minLon}, %{lon: maxLon}} = coords |> Enum.min_max_by(fn %{lon: lon} -> lon end)
{%{lat: minLat}, %{lat: maxLat}} = coords |> Enum.min_max_by(fn %{lat: lat} -> lat end)
%Geo.BoundingBox{minLon: minLon, minLat: minLat, maxLon: maxLon, maxLat: maxLat}
end
def bbox(%{lon: lon, lat: lat}) do
%Geo.BoundingBox{minLon: lon, minLat: lat, maxLon: lon, maxLat: lat}
end
@doc ~S"""
Unions two bounding boxes
## Examples
iex> Geo.CheapRuler.union(
...> %Geo.BoundingBox{minLon: 1, minLat: 0, maxLon: 1, maxLat: 2},
...> %Geo.BoundingBox{minLon: 0, minLat: 1, maxLon: 2, maxLat: 1}
...> )
%Geo.BoundingBox{minLon: 0, minLat: 0, maxLon: 2, maxLat: 2}
"""
def union(nil, bbox2), do: bbox2
def union(bbox1, nil), do: bbox1
def union(bbox1, bbox2) do
minLon = if bbox1.minLon < bbox2.minLon, do: bbox1.minLon, else: bbox2.minLon
minLat = if bbox1.minLat < bbox2.minLat, do: bbox1.minLat, else: bbox2.minLat
maxLon = if bbox1.maxLon > bbox2.maxLon, do: bbox1.maxLon, else: bbox2.maxLon
maxLat = if bbox1.maxLat > bbox2.maxLat, do: bbox1.maxLat, else: bbox2.maxLat
%Geo.BoundingBox{minLon: minLon, minLat: minLat, maxLon: maxLon, maxLat: maxLat}
end
@doc ~S"""
Finds center of given geo object
## Examples
iex> Geo.CheapRuler.center(
...> %Geo.BoundingBox{minLon: 2, minLat: 3, maxLon: 1, maxLat: 2}
...> )
%Geo.Point{lon: 1.5, lat: 2.5}
"""
@spec center(Geo.BoundingBox.like()) :: Geo.Point.t()
def center(bbox) do
%Geo.Point{lon: (bbox.minLon + bbox.maxLon) / 2, lat: (bbox.minLat + bbox.maxLat) / 2}
end
@doc ~S"""
Returns true if the point is contained within the bounding box
## Examples
iex> Geo.CheapRuler.inside_bbox?(
...> %{lon: 1.3, lat: 4.5},
...> %Geo.BoundingBox{minLon: 0, minLat: 0, maxLon: 4.5, maxLat: 7.1}
...> )
true
"""
def inside_bbox?(
%{lon: lon, lat: lat},
%Geo.BoundingBox{minLon: minLon, minLat: minLat, maxLon: maxLon, maxLat: maxLat}
) do
lon >= minLon &&
lon <= maxLon &&
lat >= minLat &&
lat <= maxLat
end
@doc ~S"""
Increases the bounding box by the given distance
## Examples
iex> Geo.CheapRuler.buffer_bbox(
...> 1000,
...> %Geo.BoundingBox{minLon: 0, minLat: 0, maxLon: 4.5, maxLat: 7.1}
...> )
%Geo.BoundingBox{minLon: -0.015087116638538251,
minLat: -0.008984923104452173,
maxLon: 4.515087116638538,
maxLat: 7.108984923104452}
"""
def buffer_bbox(buffer, %Geo.BoundingBox{
minLon: minLon,
minLat: minLat,
maxLon: maxLon,
maxLat: maxLat
})
when is_float(buffer) or is_integer(buffer) do
v = buffer / @ky
h = buffer / @kx
%Geo.BoundingBox{
minLon: minLon - h,
minLat: minLat - v,
maxLon: maxLon + h,
maxLat: maxLat + v
}
end
# as per https://wiki.openstreetmap.org/wiki/Zoom_levels#Distance_per_pixel_math
@zoom_factor 40_075_016.686 * @cos1
def center_zoom_to_bounds(%{lon: lon, lat: lat, zoom: zoom}) do
buffer_bbox(@zoom_factor / :math.pow(2, zoom), %Geo.BoundingBox{
minLon: lon,
minLat: lat,
maxLon: lon,
maxLat: lat
})
end
@spec meters_per_pixel(number) :: float
def meters_per_pixel(zoom) do
@zoom_factor / :math.pow(2, zoom + 8)
end
def bounds_to_polyline(%Geo.BoundingBox{
minLon: minLon,
minLat: minLat,
maxLon: maxLon,
maxLat: maxLat
}) do
Polyline.encode([
{minLon, minLat},
{maxLon, maxLat}
])
end
@doc ~S"""
Returns the distance between two line or from a line and a point
## Examples
iex> Geo.CheapRuler.dist(
...> [
...> %{lon: 10.03971050427, lat: 53.58988354712},
...> %{lon: 10.04383358673, lat: 53.58986207956},
...> %{lon: 10.04026843693, lat: 53.58887260434}
...> ],
...> %{lon: 10.04289976489, lat: 53.59004976324}
...> )
20.34681678169861
iex> Geo.CheapRuler.dist(
...> %{lon: 10.04383358673, lat: 53.58986207956},
...> %{lon: 10.03971050427, lat: 53.58988354712}
...> )
273.29543042145286
"""
def dist(from, %{lon: _lon1, lat: _lat1} = to) when is_list(from) do
closest_point_on_line(from, to) |> Map.fetch!(:dist)
end
def dist(%{lon: _lon1, lat: _lat1} = from, %{lon: _lon2, lat: _lat2} = to),
do: point2point_dist(from, to)
@type adjuster ::
(prev :: Geo.Point.like(),
next :: Geo.Point.like(),
index :: integer(),
calculated_distance :: float() ->
number())
@doc ~S"""
It finds the closest point of a line to another given point. Optionally you
can pass the bearing and how strongly to consider it. Default is to ignore
the bearing.
It allows to modify the caluclation using additional factors, besides
distance, by specifying any number of adjusters. An adjuster that prefers
the start of the line might look like:
fn prev, next, index, calc_dist -> index*index end
## Examples
It interpolates the point in the format of the given line:
iex> Geo.CheapRuler.closest_point_on_line(
...> [
...> %Video.TimedPoint{lon: 10, lat: 53 , time_offset_ms: 100},
...> %Video.TimedPoint{lon: 10.03971050427, lat: 53.58988354712, time_offset_ms: 200},
...> %Video.TimedPoint{lon: 10.04383358673, lat: 53.58986207956, time_offset_ms: 300},
...> %Video.TimedPoint{lon: 10.04026843693, lat: 53.58887260434, time_offset_ms: 400}
...> ],
...> %{lon: 10.04289976489, lat: 53.59004976324}
...> )
%{index: 1, t: 0.7728627602842454, dist: 20.34681678169861,
point: %Video.TimedPoint{lon: 10.042897081160916, lat: 53.58986695564232, time_offset_ms: 277}}
It does not extend beyond line start/end points:
iex> Geo.CheapRuler.closest_point_on_line(
...> [
...> %{lat: 53.550598, lon: 9.994402},
...> %{lat: 53.550572, lon: 9.994393}
...> ],
...> %{lat: 53.5505342, lon: 9.9944973}
...> )
%{index: 0, t: 1, dist: 8.092672677012276, point: %{lat: 53.550572, lon: 9.994393}}
"""
@spec closest_point_on_line([Geo.Point.like()], Geo.Point.like(), [adjuster()]) :: %{
point: Geo.Point.like(),
index: integer(),
dist: float(),
t: float()
}
def closest_point_on_line(line, point, adjusters \\ [])
def closest_point_on_line(
line,
%{lon: lon, lat: lat},
adjusters
)
when is_list(line) and is_list(adjusters) do
[head | tail] = line
Enum.reduce(tail, %{prev: head, dist: nil, point: head, i: 0, index: 0, t: 0}, fn next, acc ->
x = acc.prev.lon
y = acc.prev.lat
dx = (next.lon - x) * @kx
dy = (next.lat - y) * @ky
{x, y, t} =
if dx == 0 && dy == 0 do
{x, y, 0}
else
(((lon - x) * @kx * dx + (lat - y) * @ky * dy) / (dx * dx + dy * dy))
|> case do
t when t > 1 -> {next.lon, next.lat, 1}
t when t > 0 -> {x + dx / @kx * t, y + dy / @ky * t, t}
_t -> {x, y, 0}
end
end
dx = (lon - x) * @kx
dy = (lat - y) * @ky
dist = :math.sqrt(dx * dx + dy * dy)
final =
adjusters
|> Enum.map(fn adj -> adj.(acc.prev, next, acc.i, dist) end)
|> Enum.sum()
|> Kernel.+(dist)
next_acc = %{acc | i: acc.i + 1, prev: next}
if acc.dist && final >= acc.dist do
next_acc
else
point = Geo.Interpolate.point(acc.prev, next, t)
%{next_acc | dist: final, point: point, index: acc.i, t: t}
end
end)
|> Map.take([:point, :index, :dist, :t])
end
@doc ~S"""
Returns the total length of a line in meters
## Examples
iex> Geo.CheapRuler.line_distance([
...> %{lon: 10.03971050427, lat: 53.58988354712},
...> %{lon: 10.04383358673, lat: 53.58986207956},
...> %{lon: 10.04026843693, lat: 53.58887260434}
...> ])
534.0011528314758
"""
def line_distance(line) when is_list(line) do
line
|> Enum.chunk_every(2, 1, :discard)
|> Enum.reduce(0, fn [one, two], acc ->
acc + point2point_dist(one, two)
end)
end
@spec point2point_dist(%{lat: number, lon: number}, %{lat: number, lon: number}) :: float
@doc ~S"""
Returns the distance in meters between two line
## Examples
iex> Geo.CheapRuler.point2point_dist(
...> %{lon: 10.04383358673, lat: 53.58986207956},
...> %{lon: 10.03971050427, lat: 53.58988354712}
...> )
273.29543042145286
"""
def point2point_dist(%{lon: lon1, lat: lat1}, %{lon: lon2, lat: lat2}) do
dx = (lon1 - lon2) * @kx
dy = (lat1 - lat2) * @ky
:math.sqrt(dx * dx + dy * dy)
end
@doc """
Given two bearings in degrees, calculates how far they are apart and returns
the absolute value (also in degrees)
"""
def bearing_diff(b1, b2) do
case b2 - b1 do
d when d < -180 -> d + 360
d when d > 180 -> d - 360
d -> d
end
|> abs()
end
@doc ~S"""
Returns the bearing between two line in degrees.
## Examples
iex> Geo.CheapRuler.bearing(
...> %{lon: 10.03971050427, lat: 53.58988354712},
...> %{lon: 10.04383358673, lat: 53.58986207956}
...> )
90.5009150905702
"""
def bearing(%{lon: lon1, lat: lat1}, %{lon: lon2, lat: lat2}) do
dx = (lon2 - lon1) * @kx
dy = (lat2 - lat1) * @ky
if dx == 0 && dy == 0 do
0.0
else
bear = :math.atan2(dx, dy) |> to_deg()
if bear < 0, do: bear + 360, else: bear
end
end
@doc """
Ensure that all segments in the given line are shorter than the given
distance. If necessary, additional points will be inserted in between
the too long segment.
iex> Geo.CheapRuler.max_segment_length([
...> %{lat: 53.49806818946, lon: 9.98100144757},
...> %{lat: 53.49689925483, lon: 9.98101267651},
...> %{lat: 53.49690593455, lon: 9.98334829625}
...> ], 75)
[
%{lat: 53.49806818946, lon: 9.98100144757},
%{lat: 53.497483722145, lon: 9.98100706204},
%{lat: 53.49689925483, lon: 9.98101267651},
%{lat: 53.496901481403334, lon: 9.981791216423334},
%{lat: 53.496903707976664, lon: 9.982569756336666},
%{lat: 53.49690593455, lon: 9.98334829625}
]
"""
def max_segment_length(polyline, max_dist) do
polyline
|> Enum.chunk_every(2, 1, :discard)
|> Enum.reduce([hd(polyline)], fn
[a, a], line ->
IO.warn("got duplicated point in #{inspect(polyline)}")
# i.e. remove duplicated point
line
[a, b], line ->
pieces = ceil(dist(a, b) / max_dist)
segmented = Enum.map(pieces..1, fn n -> Geo.Interpolate.point(a, b, n / pieces) end)
segmented ++ line
end)
|> Enum.reverse()
end
defp to_deg(rad) do
rad / :math.pi() * 180.0
end
end
|
lib/geo/cheap_ruler.ex
| 0.913182
| 0.61086
|
cheap_ruler.ex
|
starcoder
|
defmodule ExampleFiles.Fileglobs do
@moduledoc """
Converts filespecs into example-file fileglobs.
"""
@spec parse([binary]) :: [binary]
@doc """
Converts the specified filespec or `List` of zero or more filespecs into a
`List` of one or more fileglob expressions. The resultant fileglob
expressions are suitable for finding example files with `Path.wildcard/1`.
## Examples
iex> [] |> ExampleFiles.Fileglobs.parse
["**/*{example,Example,EXAMPLE}*"]
iex> ["foo"] |> ExampleFiles.Fileglobs.parse
["foo/**/*{example,Example,EXAMPLE}*"]
iex> ~w(foo bar) |> ExampleFiles.Fileglobs.parse
["{foo,bar}/**/*{example,Example,EXAMPLE}*"]
iex> ~w(foo* ba?) |> ExampleFiles.Fileglobs.parse
~w(foo*/**/*{example,Example,EXAMPLE}* ba?/**/*{example,Example,EXAMPLE}*)
iex> ~w(foo bar* baz qu*x) |> ExampleFiles.Fileglobs.parse
~w({foo,baz}/**/*{example,Example,EXAMPLE}* bar*/**/*{example,Example,EXAMPLE}* qu*x/**/*{example,Example,EXAMPLE}*)
iex> ~w(foo {bar,baz} qux qu?x) |> ExampleFiles.Fileglobs.parse
~w({foo,qux}/**/*{example,Example,EXAMPLE}* {bar,baz}/**/*{example,Example,EXAMPLE}* qu?x/**/*{example,Example,EXAMPLE}*)
iex> ~w(foo ba[rz] qux qu?x) |> ExampleFiles.Fileglobs.parse
~w({foo,qux}/**/*{example,Example,EXAMPLE}* ba[rz]/**/*{example,Example,EXAMPLE}* qu?x/**/*{example,Example,EXAMPLE}*)
"""
def parse([]), do: "" |> append_example |> List.wrap
def parse([filespec]), do: filespec |> append_example |> List.wrap
def parse(filespecs) do
categorized = filespecs |> Enum.reduce([], fn(filespec, acc) ->
is_wildcard_or_nested = wildcard?(filespec) || nested?(filespec)
category = if is_wildcard_or_nested, do: :uncombinables, else: :combinables
acc |> Keyword.update(category,
[filespec],
&(&1 |> List.insert_at(-1, filespec)))
end)
combined = categorized |> Enum.reduce([], fn({category, filespecs}, acc) ->
case category do
:uncombinables -> acc ++ filespecs
:combinables -> acc |> List.insert_at(-1, combine(filespecs))
end
end)
combined |> Enum.map(&append_example/1)
end
@spec append_example(binary) :: binary
defp append_example(filespec) do
filespec |> Path.join("**/*{example,Example,EXAMPLE}*")
end
@spec combine([binary]) :: binary
defp combine(filespecs), do: "{#{filespecs |> Enum.join(",")}}"
@spec nested?(binary) :: boolean
defp nested?(filespec) do
if filespec |> String.starts_with?("/") do
true
else
# TODO: Use String.trim_trailing/1 when targeting Elixir >= v1.3
filespec |> String.replace(~r(/+$), "") |> String.contains?("/")
end
end
@spec wildcard?(binary) :: boolean
defp wildcard?(filespec), do: filespec |> String.contains?(~w(* ? { } [ ]))
end
|
lib/example_files/fileglobs.ex
| 0.82251
| 0.489748
|
fileglobs.ex
|
starcoder
|
defmodule Iona.Template.Engine do
@moduledoc false
@behaviour EEx.Engine
defdelegate escape(value), to: Iona.Template.Helper
@impl true
def init(_opts) do
%{
iodata: [],
dynamic: [],
vars_count: 0
}
end
@impl true
def handle_begin(state) do
%{state | iodata: [], dynamic: []}
end
@impl true
def handle_end(quoted) do
handle_body(quoted)
end
@impl true
def handle_body(state) do
%{iodata: iodata, dynamic: dynamic} = state
safe = {:safe, Enum.reverse(iodata)}
{:__block__, [], Enum.reverse([safe | dynamic])}
end
@impl true
def handle_text(state, text) do
%{iodata: iodata} = state
%{state | iodata: [text | iodata]}
end
@impl true
def handle_expr(state, "=", ast) do
line = line_from_expr(ast)
ast = traverse(ast)
%{iodata: iodata, dynamic: dynamic, vars_count: vars_count} = state
var = Macro.var(:"arg#{vars_count}", __MODULE__)
ast = quote do: unquote(var) = unquote(to_safe(ast, line))
%{state | dynamic: [ast | dynamic], iodata: [var | iodata], vars_count: vars_count + 1}
end
def handle_expr(state, "", ast) do
ast = traverse(ast)
%{dynamic: dynamic} = state
%{state | dynamic: [ast | dynamic]}
end
def handle_expr(state, marker, ast) do
EEx.Engine.handle_expr(state, marker, ast)
end
defp traverse(expr) do
Macro.prewalk(expr, &handle_assign/1)
end
defp line_from_expr({_, meta, _}) when is_list(meta), do: Keyword.get(meta, :line)
defp line_from_expr(_), do: nil
# We can do the work at compile time
defp to_safe(literal, _line) when is_binary(literal) or is_atom(literal) or is_number(literal) do
to_iodata(literal)
end
# We can do the work at runtime
defp to_safe(literal, line) when is_list(literal) do
quote line: line, do: Iona.Template.Engine.to_iodata(unquote(literal))
end
# We need to check at runtime and we do so by
# optimizing common cases.
defp to_safe(expr, line) do
# Keep stacktraces for protocol dispatch...
fallback = quote line: line, do: Iona.Template.Engine.to_iodata(other)
# However ignore them for the generated clauses to avoid warnings
quote line: :keep do
case unquote(expr) do
{:safe, data} -> data
bin when is_binary(bin) -> Iona.Template.Engine.escape(bin)
other -> unquote(fallback)
end
end
end
defp handle_assign({:@, meta, [{name, _, atom}]}) when is_atom(name) and is_atom(atom) do
quote line: meta[:line] || 0 do
Iona.Template.Engine.fetch_assign(var!(assigns), unquote(name))
end
end
defp handle_assign(arg), do: arg
@doc false
def fetch_assign(assigns, key) when is_map(assigns) do
fetch_assign(Map.to_list(assigns), key)
end
def fetch_assign(assigns, key) do
case Keyword.fetch(assigns, key) do
:error ->
raise ArgumentError, message: """
assign @#{key} not available in eex template. Available assigns: #{inspect Keyword.keys(assigns)}
"""
{:ok, val} -> val
end
end
def to_iodata({:safe, str}) do
str |> to_string
end
def to_iodata([h|t]) do
[to_iodata(h)|to_iodata(t)]
end
def to_iodata([]) do
[]
end
def to_iodata(value) do
value |> to_string |> escape
end
end
|
lib/iona/template/engine.ex
| 0.659515
| 0.480296
|
engine.ex
|
starcoder
|
defmodule ExLink do
@moduledoc """
Handles WebSocket connections to a Lavalink node, providing:
- `ExLink.Player`s to keep track of Lavalink players and control them
- `ExLink.Message`s to send via `ExLink.Connection`(s)
A somewhat example:
```
# define a player
defmodule MyApp.Player do
@behaviour ExLink.Player
# required callbacks...
end
# define a supervisor
defmodule MyApp.Supervisor do
use Supervisor
def start_link(args), do: Supervisor.start_link(__MODULE__, args, name: __MODULE__)
def init(_args) do
children = [
{ExLink,
{%{
url: "ws://localhost:8080",
authorization: "123",
shard_count: 1,
user_id: 123_456_789_123_456_789,
player: MyApp.Player
}, name: MyApp.Player}}
]
Supervisor.init(children, strategy: :one_for_one)
end
end
# after starting the supervisor, play something
message = ExLink.Message.play(track, guild_id)
ExLink.Connection.send(MyApp.Player, message)
```
Alternatively `ExLink.Player` provides a `__using__` macro to directly start the module under a supervisor.
"""
use Supervisor
@typedoc """
Options for `start_link/1`:
- `:url` - URL of the lavalink node.
- `:authorization` - Authorization for the lavalink node
- `:shard_count` - Number of shards
- `:user_id` - Id of the bot
- `:player` - Module implementing the `ExLink.Player` behaviour.
> Can be a `Map` or `Keyword`.
"""
@typedoc since: "0.1.0"
@type options ::
[
{:url, String.t()}
| {:authorization, String.t()}
| {:shard_count, non_neg_integer()}
| {:user_id, ExLink.Payload.id()}
| {:player, module()}
]
| map()
@doc """
Starts an `ExLink` process linked to the current process.
"""
@doc since: "0.1.0"
@spec start_link(opts :: options() | {options(), GenServer.options()}) :: Supervisor.on_start()
def start_link({opts, gen_opts}) do
Supervisor.start_link(__MODULE__, Map.new(opts), gen_opts)
end
def start_link(opts), do: start_link({opts, []})
@doc false
def init(opts) do
opts = Map.put(opts, :client, self())
children = [
{Agent, fn -> opts.player end},
{ExLink.Connection, opts},
{ExLink.Player.Supervisor, []}
]
Supervisor.init(children, strategy: :rest_for_one)
end
@doc """
Gets a player's pid or :error if not started.
"""
@doc since: "0.1.0"
@spec get_player(client :: term(), guild_id :: ExLink.Message.id()) :: pid() | :error
def get_player(client, guild_id) do
client
|> get_player_supervisor()
|> ExLink.Player.Supervisor.get_player(guild_id)
end
@doc """
Gets all player's pids mapped by guild ids.
"""
@doc since: "0.1.0"
@spec get_players(client :: term()) :: %{required(ExLink.Message.id()) => pid()}
def get_players(client) do
client
|> get_player_supervisor()
|> ExLink.Player.Supervisor.get_players()
end
@doc """
Gets a player's pid, starting it if necessary.
"""
@doc since: "0.1.0"
@spec ensure_player(client :: term(), guild :: ExLink.Message.id()) :: pid()
def ensure_player(client, guild_id) do
with :error <- get_player(client, guild_id) do
ExLink.Player.Supervisor.start_child(client, guild_id)
|> case do
{:ok, child} ->
child
{:ok, child, _info} ->
child
{:error, {:already_started, child}} ->
child
{:error, error} ->
raise "Starting the player failed: #{inspect(error)}"
end
end
end
@doc false
@spec get_module(client :: term()) :: module()
def get_module(client) do
client
|> Supervisor.which_children()
|> Enum.find(fn
{Agent, _pid, _type, _modules} -> true
_ -> false
end)
|> elem(1)
|> Agent.get(& &1)
end
@doc false
@spec get_player_supervisor(client :: term()) :: pid() | :error
def get_player_supervisor(client) do
client
|> Supervisor.which_children()
|> Enum.find(fn
{ExLink.Player.Supervisor, _pid, _type, _modules} -> true
_ -> false
end)
|> case do
{_id, pid, _type, _modules} -> pid
_ -> :error
end
end
@doc false
@spec get_connection(client :: term()) :: pid() | :error
def get_connection(client) do
client
|> Supervisor.which_children()
|> Enum.find(fn
{ExLink.Connection, _pid, _type, _modules} -> true
_ -> false
end)
|> case do
{_id, pid, _type, _modules} -> pid
_ -> :error
end
end
end
|
lib/ex_link.ex
| 0.729905
| 0.660532
|
ex_link.ex
|
starcoder
|
defmodule TrainLoc.Vehicles.State do
@moduledoc """
GenServer for tracking and querying the vehicles in the system. Each vehicle
is represented by a `TrainLoc.Vehicles.Vehicle` struct.
"""
use GenServer
alias TrainLoc.Vehicles.Vehicles
require Logger
# Client Interface
def start_link(opts \\ []) do
GenServer.start_link(__MODULE__, :ok, opts)
end
def all_vehicles(pid \\ __MODULE__) do
GenServer.call(pid, :all_vehicles)
end
def all_vehicle_ids(pid \\ __MODULE__) do
GenServer.call(pid, :all_ids)
end
def upsert_vehicles(pid \\ __MODULE__, vehicles) do
GenServer.call(pid, {:upsert_vehicles, vehicles})
end
def get_duplicate_logons(pid \\ __MODULE__) do
GenServer.call(pid, :get_duplicates)
end
def reset(pid \\ __MODULE__) do
GenServer.call(pid, :reset)
end
@doc """
Awaits a reply.
"""
def await(pid \\ __MODULE__) do
GenServer.call(pid, :await)
end
# Server Callbacks
def init(_) do
_ = Logger.debug(fn -> "Starting #{__MODULE__}..." end)
{:ok, Vehicles.new()}
end
def handle_call(:all_vehicles, _from, vehicles) do
{:reply, Vehicles.all_vehicles(vehicles), vehicles}
end
def handle_call(:all_ids, _from, vehicles) do
{:reply, Vehicles.all_ids(vehicles), vehicles}
end
def handle_call({:upsert_vehicles, new_vehicles}, _from, vehicles) do
vehicles = Vehicles.upsert(vehicles, new_vehicles)
{:reply, vehicles, vehicles}
end
def handle_call(:get_duplicates, _from, vehicles) do
{:reply, Vehicles.find_duplicate_logons(vehicles), vehicles}
end
def handle_call(:reset, _from, _vehicles) do
{:reply, :ok, Vehicles.new()}
end
def handle_call(:await, _from, state) do
{:reply, true, state}
end
# Catchalls
def handle_call(_, _from, vehicles) do
{:reply, {:error, "Unknown callback."}, vehicles}
end
def handle_cast(_, vehicles) do
{:noreply, vehicles}
end
def handle_info(_, vehicles) do
{:noreply, vehicles}
end
end
|
apps/train_loc/lib/train_loc/vehicles/state.ex
| 0.732974
| 0.49292
|
state.ex
|
starcoder
|
defmodule Cog.Command.Service.PipelineMonitor do
alias Cog.Command.Service
alias Cog.Util.ETSWrapper
require Logger
@doc """
Finds the pid of the pipeline executor corresponding to token provided which
is then monitored and stored in the monitor table provided. Calling this
function with the same token multiple times will only result in the creation
of a single monitor.
"""
def monitor_pipeline(monitor_table, token) do
case Service.Tokens.process_for_token(token) do
{:error, error} ->
{:error, error}
pid ->
monitor_pipeline(monitor_table, token, pid)
end
end
def monitor_pipeline(monitor_table, token, pid) do
case ETSWrapper.lookup(monitor_table, pid) do
{:ok, ^token} ->
Logger.debug("Already monitoring #{inspect pid} for token #{inspect token}")
{:error, :unknown_key} ->
Logger.debug("Monitoring #{inspect pid} for token #{inspect token}")
Process.monitor(pid)
ETSWrapper.insert(monitor_table, pid, token)
end
end
@doc """
Removes the pipeline executor pid from the monitor table and removes any
matching keys from the data table. Typically called when a pipeline executor
process dies.
"""
def cleanup_pipeline(monitor_table, data_table, pid, key_match) do
Logger.debug("Pipeline #{inspect pid} is no longer alive; cleaning up after it")
ETSWrapper.match_delete(data_table, {key_match, :_})
ETSWrapper.delete(monitor_table, pid)
end
@doc """
Runs through all pipeline executor pids in the monitor table and either
monitors them if the process is alive, or cleans up after them if the process
is dead. Typically called when restarting a service process. For more details
about what the key_match_fun argument should return, see
http://www.erlang.org/doc/man/ets.html#match-2
"""
def account_for_existing_pipelines(monitor_table, data_table, key_match_fun \\ &(&1)) do
ETSWrapper.each(monitor_table, fn pid, token ->
case Process.alive?(pid) do
true ->
Logger.debug("Remonitoring #{inspect pid} for token #{inspect token}")
Process.monitor(pid)
false ->
cleanup_pipeline(monitor_table, data_table, pid, key_match_fun.(token))
end
end)
end
@doc """
Sends a `:dead_process_cleanup` message to the calling process at interval
milliseconds in the future.
"""
def schedule_dead_pipeline_cleanup(interval) do
Process.send_after(self(), :dead_process_cleanup, interval)
end
@doc """
Runs though all pipeline executor pids in the monitor table and cleans up
after any dead processes. Used to remove any processes that died while the
service process was restarting and we're not caught by a call to
`account_for_existing_pipelines/2` during startup.
"""
def dead_pipeline_cleanup(monitor_table, data_table) do
ETSWrapper.each(monitor_table, fn pid, token ->
unless Process.alive?(pid) do
cleanup_pipeline(monitor_table, data_table, pid, {token, :_})
end
end)
end
end
|
lib/cog/command/service/pipeline_monitor.ex
| 0.661376
| 0.423816
|
pipeline_monitor.ex
|
starcoder
|
defmodule Solar.Events do
@moduledoc """
The `Solar.Events` module provides the calculations for sunrise and sunset
times. This is likely to be refactored as other events are added.
"""
@doc """
The event function takes a minimum of two parameters, the event of interest
which can be either :rise or :set and the latitude and longitude. Additionally
a list of options can be provided as follows:
* `date:` allows a value of either `:today` or an Elixir date. The default
if this option is not provided is the current day.
* `zenith:` can be set to define the sunrise or sunset. See the `Zeniths`
module for a set of standard zeniths that are used. The default if a
zenith is not provided is `:official` most commonly used for sunrise and
sunset.
* `timezone:` can be provided and should be a standard timezone identifier
such as "America/Chicago". If the option is not provided, the timezone is
taken from the system and used.
## Examples
The following, with out any options and run on December 25:
iex> Solar.event (:rise, {39.1371, -88.65})
{:ok,~T[07:12:26]}
iex> Solar.event (:set, {39.1371, -88.65})
{:ok,~T[16:38:01]}
The coordinates are for Lake Sara, IL where sunrise on this day will be at 7:12:26AM and sunset will be at 4:38:01PM.
"""
@type latitude :: number
@type longitude :: number
@type message :: String.t
@spec event(:rise | :set, {latitude, longitude}) ::
{:ok, Time.t} |
{:error, message}
def event(type, location, opts \\ []) do
zenith = case opts[:zenith] do
nil -> Zeniths.official
_ -> opts[:zenith]
end
date = case opts[:date] do
:today -> Timex.to_date(Timex.local())
nil -> Timex.to_date(Timex.local())
_ -> opts[:date]
end
timezone = case opts[:timezone] do
:local -> Timex.local().timezone
nil -> Timex.local().time_zone
_ -> opts[:timezone]
end
{ latitude, longitude } = location
state = %{type: type, zenith: zenith, location: location,
latitude: latitude, longitude: longitude,
date: date, timezone: timezone}
with { :ok, state } <- verify_type_parameter(state),
{ :ok, state } <- get_base_longitude_hour(state),
{ :ok, state } <- get_longitude_hour(state),
{ :ok, state } <- get_mean_anomaly(state),
{ :ok, state } <- get_sun_true_longitude(state),
{ :ok, state } <- get_cos_sun_local_hour(state),
{ :ok, state } <- get_sun_local_hour(state),
{ :ok, state } <- get_right_ascension(state),
{ :ok, state } <- get_local_mean_time(state),
{ :ok, state } <- get_local_time(state) do
state[:local_time]
else
error -> error
end
end
defp verify_type_parameter state do
type = state[:type]
cond do
type != :rise && type != :set ->
{ :error, "Type parameter must be either :rise or :set"}
true -> { :ok, state }
end
end
# Computes the longitude time.
# Uses: location, date and type
# Sets: longitude_hour
defp get_longitude_hour state do
offset = case state[:type] do
:rise -> 6.0
_ -> 18.0
end
dividend = offset - state[:longitude] / 15.0
addend = dividend / 24.0
longitude_hour = Timex.day(state[:date]) + addend
{ :ok, Map.put(state, :longitude_hour, longitude_hour) }
end
# Computes the base longitude hour, lngHour in the algorithm. The longitude
# of the location of the solar event divided by 15 (deg/hour).
defp get_base_longitude_hour state do
base_longitude_hour = state[:longitude] / 15.0
{ :ok, Map.put(state, :base_longitude_hour, base_longitude_hour)}
end
# Computes the mean anomaly of the Sun, M in the algorithm.
defp get_mean_anomaly state do
mean_anomaly = state[:longitude_hour] * 0.9856 - 3.289
{ :ok, Map.put(state, :mean_anomaly, mean_anomaly) }
end
# Computes the true longitude of the sun, L in the algorithm, at the
# given location, adjusted to fit in the range [0-360].
defp get_sun_true_longitude state do
mean_anomaly = state[:mean_anomaly]
sin_mean_anomaly = :math.sin(deg_to_rad(mean_anomaly))
sin_double_mean_anomoly = :math.sin(deg_to_rad(mean_anomaly * 2.0))
first_part = mean_anomaly + (sin_mean_anomaly * 1.916)
second_part = sin_double_mean_anomoly * 0.020 + 282.634
true_longitude = first_part + second_part
sun_true_longitude = case true_longitude > 360.0 do
true -> true_longitude - 360.0
false -> true_longitude
end
{ :ok, Map.put(state, :sun_true_longitude, sun_true_longitude)}
end
defp get_cos_sun_local_hour state do
latitude = state[:latitude]
sin_sun_declination = :math.sin(deg_to_rad(state[:sun_true_longitude])) * 0.39782
cos_sun_declination = :math.cos(:math.asin(sin_sun_declination))
cos_zenith = :math.cos(deg_to_rad(state[:zenith]))
sin_latitude = :math.sin(deg_to_rad(latitude))
cos_latitude = :math.cos(deg_to_rad(latitude))
cos_sun_local_hour =
(cos_zenith - sin_sun_declination * sin_latitude) /
(cos_sun_declination * cos_latitude)
cond do
cos_sun_local_hour < -1.0 -> {:error, "cos_sun_local_hour < -1.0"}
cos_sun_local_hour > +1.0 -> {:error, "cos_sun_local_hour > +1.0"}
true -> { :ok, Map.put(state, :cos_sun_local_hour, cos_sun_local_hour)}
end
end
defp get_sun_local_hour state do
local_hour = rad_to_deg(:math.acos(state[:cos_sun_local_hour]))
local_hour = case state[:type] do
:rise -> (360.0 - local_hour) / 15
_ -> local_hour / 15
end
{:ok, Map.put(state, :sun_local_hour, local_hour)}
end
defp get_local_mean_time state do
local_mean_time = state[:sun_local_hour] + state[:right_ascension] -
(state[:longitude_hour] * 0.06571) - 6.622
val = cond do
local_mean_time < 0 -> local_mean_time + 24.0
local_mean_time > 24 -> local_mean_time - 24.0
true -> local_mean_time
end
{ :ok, Map.put(state, :local_mean_time, val) }
end
# Computes the suns right ascension, RA in the algorithm, adjusting for
# the quadrant of L and turning it into degree-hours. Will be in the
# range [0,360].
defp get_right_ascension state do
tanl = :math.tan(deg_to_rad(state[:sun_true_longitude]))
inner = rad_to_deg(tanl) * 0.91764
right_ascension = :math.atan(deg_to_rad(inner))
right_ascension = rad_to_deg(right_ascension)
right_ascension = cond do
right_ascension < 0.0 -> right_ascension + 360.0
right_ascension > 360.0 -> right_ascension - 360.0
true -> right_ascension
end
long_quad = Kernel.trunc(state[:sun_true_longitude] / 90.0) * 90.0
right_quad = Kernel.trunc(right_ascension / 90.0) * 90.0
val = (right_ascension + (long_quad - right_quad)) / 15.0
{ :ok, Map.put(state, :right_ascension, val)}
end
defp get_local_time state do
utc_time = state[:local_mean_time] - state[:base_longitude_hour]
tzi = Timex.timezone(state[:timezone], state[:date])
offset_minutes = Timex.Timezone.total_offset(tzi)
local_time = utc_time + offset_minutes/3600.0
time = local_time
hour = Kernel.trunc(time)
tmins = (time-hour) * 60
minute = Kernel.trunc(tmins)
tsecs = (tmins-minute) * 60
seconds = Kernel.trunc(tsecs)
time = Time.new(hour,minute,seconds)
{ :ok, Map.put(state, :local_time, time)}
end
# Converts degrees to radians.
defp deg_to_rad degrees do
degrees / 180.0 * :math.pi
end
defp rad_to_deg radians do
radians * 180.0 / :math.pi
end
@doc """
Calculates the hours of daylight returning as a time with hours, minutes and seconds.
"""
def daylight rise, set do
hours_to_time(time_to_hours(set) - time_to_hours(rise))
end
defp time_to_hours time do
time.hour + time.minute/60.0 + time.second / (60.0 * 60.0)
end
defp hours_to_time hours do
h = Kernel.trunc(hours)
value = (hours - h) * 60
m = Kernel.trunc(value)
value = (value - m) * 60
s = Kernel.trunc(value)
{ :ok, time} = Time.new(h,m,s)
time
end
end
|
lib/events/events.ex
| 0.94062
| 0.696158
|
events.ex
|
starcoder
|
defmodule ExTimezoneDB.Timezone do
@moduledoc """
Defines the structure and ancillary functions handling the valid return
data from the TimezoneDB service.
TimezoneDB returns for each zone
- countryName (as a string)
- countryCode (as ISO 3166-1 alpha-2 codes (2 letter codes))
- zoneName (as a string representing the IANA time zone database entries)
- abbreviation (as the one to four letter abbreviation of the zone)
- nextAbbreviation (as the one to four letter abbreviation of the next zone)
- gmtOffset (as an integer of seconds of offset from GMT)
- dst (as string "0" for No or "1" for Yes)
- timestamp (as a Unix time as the current time for the zone)
Values returned by TimezoneDB which are ignored
- zoneStart
- zoneStop
- formatted
In addition, if using a premium key TimezoneDB may (depending on the request)
return two other fields for a zone
- cityName
- regionName
These are packaged into a struct with the following fields
- country_code
- country_name
- zone_name
- abbreviation
- next_abbreviation
- gmt_offset
- dst
- timestamp
- city_name
- region_name
"""
defstruct [
:country_code,
:country_name,
:zone_name,
:abbreviation,
:gmt_offset,
:dst,
:next_abbreviation,
:timestamp,
# These last two are only populated for premium requests
:city_name,
:region_name
]
@type t :: %__MODULE__{
country_code: String.t(),
country_name: String.t(),
zone_name: String.t(),
abbreviation: String.t(),
gmt_offset: integer,
dst: String.t(),
next_abbreviation: String.t(),
timestamp: non_neg_integer(),
city_name: String.t() | nil,
region_name: String.t() | nil
}
alias ExTimezoneDB.Timezone
defp no_empty_string(str) do
case str do
"" -> nil
str -> str
end
end
@doc """
Converts the zone info from the response to a Timezone struct
"""
@spec from_json(Map.t()) :: Timezone.t()
def from_json(json_map) do
struct(%Timezone{},
country_code: json_map["countryCode"],
country_name: json_map["countryName"],
zone_name: json_map["zoneName"],
abbreviation: json_map["abbreviation"],
gmt_offset: json_map["gmtOffset"],
dst: json_map["dst"],
next_abbreviation: json_map["nextAbbreviation"],
timestamp: json_map["timestamp"],
city_name: no_empty_string(json_map["cityName"]),
region_name: no_empty_string(json_map["regionName"])
)
end
end
|
lib/timezone.ex
| 0.873276
| 0.581838
|
timezone.ex
|
starcoder
|
defmodule Loadex do
@moduledoc """
A simple distributed load test runner.
`Loadex` was created with two things in mind - genarating huge loads in a controlled manner, while being able to fully customize the test's flow.
These goals are achieved by using plain Elixir to create *scenarios* and then laveraging Elixir's massive concurrency capabilities to run them on one or multiple machines.
## Example:
defmodule ExampleScenario do
use Loadex.Scenario
setup do
1..100
end
scenario index do
loop_after 500, 10, iteration do
IO.puts("My number is \#{index}, iteration \#{iteration}!")
end
end
teardown index do
IO.puts("Bye from \#{index}!")
end
end
For detailed instructions on how to create a scenario please refer to `Loadex.Scenario`.
"""
@doc """
Runs scenarios.
Running a scenario means executing its `setup` callback and passing its results to the `scenario` implementation.
For more detailed information on how to create scenarios please refer to `Loadex.Scenario`.
When running in a distributed environment (see `join_cluster/1`), **the `setup` callback will be executed on a node `run/1` is called on** and its results will
be distributed along the cluster.
By default scenarios are loaded from `./scenarios` directory and executed all at the same time.
A single scenario can be specified by passing a `scenario` option.
Scenarios can be restarted after crashing or quitting by passing `restart: true` option.
Rate (per second), at which scenarios are started can be adjusted by passing a `rate` option. **Note:** this doesn't affect *restart* rate.
## Example:
iex> Loadex.run(scenario: "./scenarios/example_scenario.exs", rate: 30, restart: true)
"""
@spec run(opts :: [restart: boolean(), scenario: nil | binary(), rate: non_neg_integer()]) ::
{:ok, :scenarios_started}
def run(opts \\ [restart: false, scenario: nil, rate: 1000]) do
opts[:scenario]
|> load_scenarios()
|> IO.inspect(label: "Scenarios")
|> Stream.map(&Loadex.Runner.run(&1, opts[:restart], opts[:rate]))
|> Stream.run()
{:ok, :scenarios_started}
end
@doc """
Adds `nodes` into the `Loadex` cluster.
"""
@spec join_cluster(nodes :: [atom()]) :: [atom()]
def join_cluster(nodes) when is_list(nodes) do
nodes
|> Enum.map(fn node ->
case Node.ping(node) do
:pong ->
{node, :ok}
:pang ->
{node, :node_down}
end
end)
end
@doc """
Stops all scenarios.
"""
@spec stop_all :: :ok
def stop_all do
Loadex.Runner.Supervisor.restart()
end
defp load_scenarios(maybe_scenario) do
on_all_nodes(Loadex.Scenario.Loader, :load, [maybe_scenario])
end
defp on_all_nodes(mod, action, args) do
:rpc.multicall(mod, action, args) |> elem(0) |> List.first()
end
end
|
lib/loadex.ex
| 0.846578
| 0.782538
|
loadex.ex
|
starcoder
|
defmodule Calliope.Engine do
import Calliope.Render
@doc """
The Calliope Engine allows you to precompile your haml templates to be accessed
through functions at runtime.
Example:
defmodule Simple do
use Calliope.Engine
def show do
content_for(:show, [title: Calliope])
end
end
The configure options are:
`:path` - provides the root path. The default is the current working directory.
`:templates` - used to define where the templates are stored.
`:alias` - used to set the directory where the templates are located. The
default value is 'templates'.
`:layout` - the layout to use for templates. The default is `:none` or you can pass in
the name of a layout.
`:layout_directory` - the directory that your layouts are stored relative to the
templates path. The default directory is `layouts`
"""
defmacro __using__(opts \\ []) do
dir = Keyword.get(opts, :alias, "templates")
templates = Keyword.get(opts, :templates, nil)
root = Keyword.get(opts, :path, File.cwd!)
layout = Keyword.get(opts, :layout, :none)
layout_directory = Keyword.get(opts, :layout_directory, "layouts")
path = build_path_for [root, templates, dir]
layout_path = build_path_for [root, templates, layout_directory]
quote do
import unquote(__MODULE__)
use Calliope.Render
compile_layout unquote(layout), unquote(layout_path)
compile_templates unquote(path)
def layout_for(content, args\\[]) do
content_for unquote(layout), [ yield: content ] ++ args
end
def content_with_layout(name, args) do
content_for(name, args) |> layout_for args
end
def content_for(:none, args) do
Keyword.get(args, :yield, "") |> Calliope.Render.eval args
end
end
end
defmacro compile_layout(:none, _path), do: nil
defmacro compile_layout(_layout, path) do
quote do
compile_templates unquote(path)
end
end
defmacro compile_templates(path) do
path = eval_path(path)
quote do: unquote files_for(path) |> haml_views |> view_to_function(path)
end
def build_path_for(list), do: Enum.filter(list, fn(x) -> is_binary x end) |> Enum.join "/"
def eval_path(path) do
{ path, _ } = Code.eval_quoted path
path
end
def files_for(nil), do: []
def files_for(path), do: File.ls! path
def haml_views(files) do
Enum.filter(files, fn(v) -> Regex.match?(~r{^\w*\.html\.haml$}, v) end)
end
def precompile_view(path), do: File.read!(path) |> precompile
def view_to_function([], _), do: ""
def view_to_function([view|t], path) do
[ name, _, _ ] = String.split(view, ".")
content = precompile_view path <> "/" <> view
quote do
def content_for(unquote(String.to_atom name), args) do
Calliope.Render.eval unquote(content), args
end
def content_for(unquote(name), args) do
Calliope.Render.eval unquote(content), args
end
unquote(view_to_function(t, path))
end
end
end
|
lib/calliope/engine.ex
| 0.732974
| 0.433082
|
engine.ex
|
starcoder
|
defmodule ExifParser.Tag.Value do
alias ExifParser.Tag
# 2^31
@max_signed_32_bit_int 2_147_483_648
# 2^15
@max_signed_16_bit_int 632_768
@typedoc """
The data types that are represented in TIFF tags.
"""
@type data_types ::
:tiff_byte
| :tiff_ascii
| :tiff_short
| :tiff_long
| :tiff_rational
| :tiff_sbyte
| :tiff_undefined
| :tiff_sshort
| :tiff_slong
| :tiff_srational
| :tiff_sfloat
| :tiff_dfloat
@doc """
The method provides the lookup for the 12 data_types.
The data_type can inferred from the type_id in the tag buffer.
"""
def type_id_to_data_type(1), do: :tiff_byte
def type_id_to_data_type(2), do: :tiff_ascii
def type_id_to_data_type(3), do: :tiff_short
def type_id_to_data_type(4), do: :tiff_long
def type_id_to_data_type(5), do: :tiff_rational
def type_id_to_data_type(6), do: :tiff_sbyte
def type_id_to_data_type(7), do: :tiff_undefined
def type_id_to_data_type(8), do: :tiff_sshort
def type_id_to_data_type(9), do: :tiff_slong
def type_id_to_data_type(10), do: :tiff_srational
def type_id_to_data_type(11), do: :tiff_sfloat
def type_id_to_data_type(12), do: :tiff_dfloat
@doc """
The method provides the number of bytes that correspond to the data type.
It will give the number of bytes for number of components in the tag.
"""
def data_type_to_byte_length(data_type, component_count \\ 1)
def data_type_to_byte_length(:tiff_byte, component_count), do: component_count
def data_type_to_byte_length(:tiff_ascii, component_count), do: component_count
def data_type_to_byte_length(:tiff_short, component_count), do: 2 * component_count
def data_type_to_byte_length(:tiff_sshort, component_count), do: 2 * component_count
def data_type_to_byte_length(:tiff_long, component_count), do: 4 * component_count
def data_type_to_byte_length(:tiff_slong, component_count), do: 4 * component_count
def data_type_to_byte_length(:tiff_rational, component_count), do: 8 * component_count
def data_type_to_byte_length(:tiff_srational, component_count), do: 8 * component_count
def data_type_to_byte_length(:tiff_undefined, component_count), do: component_count
def data_type_to_byte_length(:tiff_sfloat, component_count), do: 4 * component_count
def data_type_to_byte_length(:tiff_dfloat, component_count), do: 8 * component_count
# defp decode_numeric(value, component_count, size, endian) do
defp decode_numeric("", _,_,_), do: 0
defp decode_numeric(value, 1, type_size, endian) do
<<data::binary-size(type_size), _::binary>> = value
:binary.decode_unsigned(data, endian)
end
defp decode_numeric(value, data_count, type_size, endian) do
decode_many_numeric(value, data_count, type_size, endian)
end
defp decode_many_numeric("", _,_,_), do: []
defp decode_many_numeric(_value, 0, _type_size, _endian), do: []
defp decode_many_numeric(value, data_count, type_size, endian) do
<<data::binary-size(type_size), rest::binary>> = value
[
:binary.decode_unsigned(data, endian)
| decode_many_numeric(rest, data_count - 1, type_size, endian)
]
end
defp maybe_signed_int(x, signed, max_val \\ @max_signed_32_bit_int)
defp maybe_signed_int(x, :signed, max_val) when x > max_val, do: x - max_val - 1
defp maybe_signed_int(x, _, _), do: x
defp decode_rational(value, data_count, endian, signed \\ :unsigned)
defp decode_rational("", _,_,_), do: 0
defp decode_rational(value, 1, endian, signed) do
<<numerator::binary-size(4), denominator::binary-size(4), _rest::binary>> = value
numerator = :binary.decode_unsigned(numerator, endian) |> maybe_signed_int(signed)
denominator = :binary.decode_unsigned(denominator, endian) |> maybe_signed_int(signed)
if denominator == 0 do
0
else
numerator / denominator
end
end
defp decode_rational(value, data_count, endian, signed) do
decode_many_rational(value, data_count, endian, signed)
end
defp decode_many_rational("", _,_,_), do: []
defp decode_many_rational(value, data_count, endian, signed) do
<<rational::binary-size(8), rest::binary>> = value
[
decode_rational(rational, 1, endian, signed)
| decode_many_rational(rest, data_count - 1, endian, signed)
]
end
@doc """
The method is used to decode the binary value in the tag buffer based on the data_type
and endianess of the file.
The total data size is computed based on the type size and the number of components.
+ If the data_size is less or equal to 4 bytes, the value binary represents the actual value.
+ If the data_size is greater than 4, the binary value represents the offset in
the file buffer that points to the actual data.
"""
@spec decode_tag(tag :: Tag, endian :: :little | :big) :: Tag
def decode_tag(%Tag{data_type: :tiff_byte, data_count: data_count, value: value} = tag, endian),
do: %Tag{tag | value: decode_numeric(value, data_count, 1, endian)}
def decode_tag(
%Tag{data_type: :tiff_ascii, data_count: data_count, value: value} = tag,
_endian
) do
string_size = data_count - 1
<<string::binary-size(string_size), _null::binary>> = value
%Tag{tag | value: string}
end
def decode_tag(%Tag{data_type: :tiff_short, data_count: data_count, value: value} = tag, endian),
do: %Tag{tag | value: decode_numeric(value, data_count, 2, endian)}
def decode_tag(
%Tag{data_type: :tiff_sshort, data_count: data_count, value: value} = tag,
endian
),
do: %Tag{
tag
| value:
decode_numeric(value, data_count, 2, endian)
|> maybe_signed_int(:signed, @max_signed_16_bit_int)
}
def decode_tag(%Tag{data_type: :tiff_long, data_count: data_count, value: value} = tag, endian),
do: %Tag{tag | value: decode_numeric(value, data_count, 4, endian)}
def decode_tag(%Tag{data_type: :tiff_slong, data_count: data_count, value: value} = tag, endian),
do: %Tag{
tag
| value: decode_numeric(value, data_count, 4, endian) |> maybe_signed_int(:signed)
}
def decode_tag(
%Tag{data_type: :tiff_rational, data_count: data_count, value: value} = tag,
endian
),
do: %Tag{tag | value: decode_rational(value, data_count, endian)}
def decode_tag(
%Tag{data_type: :tiff_srational, data_count: data_count, value: value} = tag,
endian
),
do: %Tag{tag | value: decode_rational(value, data_count, endian, :signed)}
def decode_tag(
%Tag{data_type: :tiff_undefined, data_count: data_count, value: value} = tag,
endian
),
do: %Tag{tag | value: decode_numeric(value, data_count, 1, endian)}
def decode_tag(tag, _), do: tag
end
|
lib/exif_parser/tag/value.ex
| 0.609989
| 0.710653
|
value.ex
|
starcoder
|
defmodule Exzeitable.HTML.ActionButton do
@moduledoc """
For the actions buttons such as :new, :edit etc, as well as custom buttons.
Custom buttons can be added to the list in :action_buttons
## Example
```elixir
action_buttons: [:new, :edit, :super_cool_custom_action]
```
You can then define the function called for that action in the module where the table is defined.
Don't forget to add your csrf_token.
```elixir
def super_cool_custom_action(socket, item, csrf_token) do
link "SUPER AWESOME", to: Routes.super_cool_path(socket, :custom_action, item), "data-confirm": "Are you sure?", csrf_token: csrf_token
end
```
"""
use Exzeitable.HTML.Helpers
@type action :: :new | :delete | :show | :edit
@doc "Builds an individual button, takes an atom representing the action, and the assigns map"
@spec build(:new, map) :: {:safe, iolist}
@spec build(action, atom, map) :: {:safe, iolist}
def build(:new, %{parent: nil} = assigns) do
%{
socket: socket,
routes: routes,
path: path,
action_buttons: action_buttons
} = assigns
if Enum.member?(action_buttons, :new) do
apply(routes, path, [socket, :new])
|> html(:new, assigns)
else
""
end
end
def build(:new, %{parent: parent} = assigns) do
%{
socket: socket,
routes: routes,
path: path,
action_buttons: action_buttons
} = assigns
if Enum.member?(action_buttons, :new) do
apply(routes, path, [socket, :new, parent])
|> html(:new, assigns)
else
""
end
end
def build(:delete, entry, %{belongs_to: nil} = assigns) do
%{socket: socket, routes: routes, path: path} = assigns
apply(routes, path, [socket, :delete, entry])
|> html(:delete, assigns)
end
def build(:delete, entry, assigns) do
%{socket: socket, routes: routes, path: path} = assigns
params = [socket, :delete, parent_for(entry, assigns), entry]
apply(routes, path, params)
|> html(:delete, assigns)
end
def build(:show, entry, %{belongs_to: nil} = assigns) do
%{socket: socket, routes: routes, path: path} = assigns
apply(routes, path, [socket, :show, entry])
|> html(:show, assigns)
end
def build(:show, entry, assigns) do
%{socket: socket, routes: routes, path: path} = assigns
params = [socket, :show, parent_for(entry, assigns), entry]
apply(routes, path, params)
|> html(:show, assigns)
end
def build(:edit, entry, %{belongs_to: nil} = assigns) do
%{socket: socket, routes: routes, path: path} = assigns
apply(routes, path, [socket, :edit, entry])
|> html(:edit, assigns)
end
def build(:edit, entry, assigns) do
%{socket: socket, routes: routes, path: path} = assigns
params = [socket, :edit, parent_for(entry, assigns), entry]
apply(routes, path, params)
|> html(:edit, assigns)
end
# For custom actions such as archive
def build(custom_action, entry, %{module: module, socket: socket, csrf_token: csrf_token}) do
apply(module, custom_action, [socket, entry, csrf_token])
end
@spec html(String.t(), action, map) :: {:safe, iolist}
defp html(route, :new, assigns) do
assigns
|> text(:new)
|> link(to: route, class: "exz-action-new")
end
defp html(route, :show, assigns) do
assigns
|> text(:show)
|> link(to: route, class: "exz-action-show")
end
defp html(route, :edit, assigns) do
assigns
|> text(:edit)
|> link(to: route, class: "exz-action-edit")
end
defp html(route, :delete, %{csrf_token: csrf_token} = assigns) do
assigns
|> text(:delete)
|> link(
to: route,
class: "exz-action-delete",
method: :delete,
"data-confirm": text(assigns, :confirm_action),
csrf_token: csrf_token
)
end
# Gets the parent that the nested resource belongs to
def parent_for(entry, %{belongs_to: belongs_to}) do
case Map.get(entry, belongs_to) do
nil -> raise "You need to select the association in :belongs_to"
result -> result
end
end
end
|
lib/exzeitable/html/action_button.ex
| 0.808446
| 0.823044
|
action_button.ex
|
starcoder
|
defmodule Number.Percentage do
@moduledoc """
Provides functions for converting numbers into percentages.
"""
import Number.Delimit, only: [number_to_delimited: 2]
@doc """
Formats a number into a percentage string.
## Parameters
* `number` - A value to convert. Can be any value that implements
`Number.Conversion.to_float/1`.
* `options` - A keyword list of options. See the documentation below for all
available options.
## Options
* `:precision` - The number of decimal places to include. Default: 3
* `:delimiter` - The character to use to delimit the number by thousands.
Default: ","
* `:separator` - The character to use to separate the number from the decimal
places. Default: "."
Default configuration for these options can be specified in the `Number`
application configuration.
config :number,
percentage: [
delimiter: ",",
separator: ".",
precision: 2
]
## Examples
iex> Number.Percentage.number_to_percentage(100)
"100.000%"
iex> Number.Percentage.number_to_percentage("98")
"98.000%"
iex> Number.Percentage.number_to_percentage(100, precision: 0)
"100%"
iex> Number.Percentage.number_to_percentage(1000, delimiter: '.', separator: ',')
"1.000,000%"
iex> Number.Percentage.number_to_percentage(302.24398923423, precision: 5)
"302.24399%"
iex> Number.Percentage.number_to_percentage(Decimal.from_float(59.236), precision: 2)
"59.24%"
"""
@spec number_to_percentage(number, Keyword.t()) :: String.t()
def number_to_percentage(number, options \\ [])
def number_to_percentage(number, options) do
options = Keyword.merge(config(), options)
number = number_to_delimited(number, options)
number <> "%"
end
defp config do
defaults = [
delimiter: ",",
separator: ".",
precision: 3
]
Keyword.merge(defaults, Application.get_env(:number, :percentage, []))
end
end
|
lib/number/percentage.ex
| 0.928368
| 0.680547
|
percentage.ex
|
starcoder
|
defmodule Mix.Tasks.Incident.Postgres.Init do
@moduledoc """
This task will generate some basic setup when using `PostgresAdapter`.
When using this adapter you will need to have a table to store the events and aggregate locks.
This task will generate an `Ecto` migration to create the `events` and `aggregate_locks` table
with the needed columns and indexes. The task will respect your `Ecto` configuration for your
`EventStoreRepo`.
# Usage
```
mix incident.postgres.init -r AppName.EventStoreRepo
```
"""
use Mix.Task
import Ecto.Migrator
import Macro, only: [camelize: 1, underscore: 1]
import Mix.{Ecto, Generator}
@shortdoc "Generates the initial setup for Incident with Postgres Adapter"
@impl true
def run(["-r", repo]) do
no_umbrella!("incident.postgres.init")
event_store_repo =
[repo]
|> Module.concat()
|> ensure_repo([])
# Generates the events table migration
name = "create_events_table"
path = Path.relative_to(migrations_path(event_store_repo), Mix.Project.app_path())
file = Path.join(path, "#{timestamp()}_#{underscore(name)}.exs")
create_directory(path)
content =
[module_name: Module.concat([event_store_repo, Migrations, camelize(name)])]
|> events_migration_template()
|> Code.format_string!()
create_file(file, content)
# Generates the aggregate_locks table migration
name = "create_aggregate_locks_table"
path = Path.relative_to(migrations_path(event_store_repo), Mix.Project.app_path())
file = Path.join(path, "#{timestamp()}_#{underscore(name)}.exs")
content =
[module_name: Module.concat([event_store_repo, Migrations, camelize(name)])]
|> aggregate_locks_migration_template()
|> Code.format_string!()
create_file(file, content)
end
@impl true
def run(_) do
Mix.shell().error("""
Error: you need to pass the Ecto Event Store Repo using the -r flag.
Please notice that this task should run after you have your Ecto repos
configuration all set in your application config files.
# Usage
```
mix incident.postgres.init -r AppName.EventStoreRepo
```
""")
end
@spec timestamp :: String.t()
defp timestamp do
{{y, m, d}, {hh, mm, ss}} = :calendar.universal_time()
"#{y}#{pad(m)}#{pad(d)}#{pad(hh)}#{pad(mm)}#{pad(ss)}"
end
@spec pad(integer) :: String.t()
defp pad(i) when i < 10, do: <<?0, ?0 + i>>
defp pad(i), do: to_string(i)
embed_template(:events_migration, """
defmodule <%= inspect @module_name %> do
use Ecto.Migration
def change do
create table(:events, primary_key: false) do
add(:id, :bigserial, primary_key: true)
add(:event_id, :binary_id, null: false)
add(:aggregate_id, :string, null: false)
add(:event_type, :string, null: false)
add(:version, :integer, null: false)
add(:event_date, :utc_datetime_usec, null: false)
add(:event_data, :map, null: false)
timestamps(type: :utc_datetime_usec, updated_at: false)
end
create(index(:events, [:aggregate_id]))
create(index(:events, [:event_type]))
create(index(:events, [:event_date]))
create(index(:events, [:version]))
create constraint(:events, :version_must_be_positive, check: "version > 0")
end
end
""")
embed_template(:aggregate_locks_migration, """
defmodule <%= inspect @module_name %> do
use Ecto.Migration
def change do
create table(:aggregate_locks, primary_key: false) do
add(:id, :bigserial, primary_key: true)
add(:aggregate_id, :string, null: false)
add(:owner_id, :integer, null: false)
add(:valid_until, :utc_datetime_usec, null: false)
end
create(index(:aggregate_locks, [:aggregate_id]))
create(index(:aggregate_locks, [:aggregate_id, :owner_id]))
create(index(:aggregate_locks, [:valid_until]))
end
end
""")
end
|
lib/mix/tasks/postgres_init.ex
| 0.818556
| 0.740409
|
postgres_init.ex
|
starcoder
|
defmodule Range do
@moduledoc """
Defines a Range.
"""
@type t :: { Range, any, any }
@type t(first, last) :: { Range, first, last }
@doc """
Creates a new range.
"""
def new(first, last) do
{ Range, first, last }
end
@doc """
Returns the first item of the range.
"""
def first({ Range, first, _ }) do
first
end
@doc """
Returns the last item of the range.
"""
def last({ Range, _, last }) do
last
end
end
defprotocol Range.Iterator do
@doc """
Returns the function that calculates the next item.
"""
def next(first, range)
@doc """
Count how many items are in the range.
"""
def count(first, range)
end
defimpl Enumerable, for: Range do
def reduce(first .. last = range, acc, fun) do
reduce(first, last, acc, fun, Range.Iterator.next(first, range), last >= first)
end
defp reduce(_x, _y, { :halt, acc }, _fun, _next, _up) do
{ :halted, acc }
end
defp reduce(x, y, { :suspend, acc }, fun, next, up) do
{ :suspended, acc, &reduce(x, y, &1, fun, next, up) }
end
defp reduce(x, y, { :cont, acc }, fun, next, true) when x <= y do
reduce(next.(x), y, fun.(x, acc), fun, next, true)
end
defp reduce(x, y, { :cont, acc }, fun, next, false) when x >= y do
reduce(next.(x), y, fun.(x, acc), fun, next, false)
end
defp reduce(_, _, { :cont, acc }, _fun, _next, _up) do
{ :done, acc }
end
def member?(first .. last, value) do
if first <= last do
{ :ok, first <= value and value <= last }
else
{ :ok, last <= value and value <= first }
end
end
def count(first .. _ = range) do
{ :ok, Range.Iterator.count(first, range) }
end
end
defimpl Range.Iterator, for: Integer do
def next(first, _ .. last) when is_integer(last) do
if last >= first do
&(&1 + 1)
else
&(&1 - 1)
end
end
def count(first, _ .. last) when is_integer(last) do
if last >= first do
last - first + 1
else
first - last + 1
end
end
end
defimpl Inspect, for: Range do
import Inspect.Algebra
def inspect(first .. last, opts) do
concat [to_doc(first, opts), "..", to_doc(last, opts)]
end
end
|
lib/elixir/lib/range.ex
| 0.816077
| 0.574693
|
range.ex
|
starcoder
|
defmodule Workflows.Retrier do
@moduledoc """
Implements a state retrier.
## References
* https://states-language.net/#errors
"""
@type t :: term()
defstruct [:error_equals, :interval_seconds, :max_attempts, :backoff_rate, :attempt]
@default_interval_seconds 1
@default_max_attempts 3
@default_backoff_rate 2.0
@doc """
Create a new Retrier.
"""
@spec create(any()) :: {:ok, t()} | {:error, term()}
def create(%{"ErrorEquals" => errors} = attrs) do
interval_seconds = Map.get(attrs, "IntervalSeconds", @default_interval_seconds)
max_attempts = Map.get(attrs, "MaxAttempts", @default_max_attempts)
backoff_rate = Map.get(attrs, "BackoffRate", @default_backoff_rate)
do_create(errors, interval_seconds, max_attempts, backoff_rate)
end
def create(_attrs) do
{:error, :missing_error_equals}
end
def next(%__MODULE__{attempt: attempt, max_attempts: max_attempts} = retrier)
when attempt < max_attempts do
wait = retrier.interval_seconds + retrier.backoff_rate * attempt
{:wait, wait, %__MODULE__{retrier | attempt: attempt + 1}}
end
def next(retrier), do: {:max_attempts, retrier}
defp do_create([], _interval_seconds, _max_attempts, _backoff_rate),
do: {:error, "ErrorEquals must be non empty"}
defp do_create(_errors, interval_seconds, _max_attempts, _backoff_rate)
when not is_integer(interval_seconds)
when interval_seconds <= 0,
do: {:error, "IntervalSeconds must be a positive integer"}
defp do_create(_errors, _interval_seconds, max_attempts, _backoff_rate)
when not is_integer(max_attempts)
when max_attempts < 0,
do: {:error, "MaxAttempts must be a non-negative integer"}
defp do_create(_errors, _interval_seconds, _max_attempts, backoff_rate)
when backoff_rate < 1.0,
do: {:error, "BackoffRate must be a greater than or equal to 1.0"}
defp do_create(errors, interval_seconds, max_attempts, backoff_rate) do
retrier = %__MODULE__{
error_equals: errors,
interval_seconds: interval_seconds,
max_attempts: max_attempts,
backoff_rate: backoff_rate,
attempt: 0
}
{:ok, retrier}
end
end
|
lib/workflows/retrier.ex
| 0.833121
| 0.453201
|
retrier.ex
|
starcoder
|
defmodule Aoc.Year2015.Day02 do
@moduledoc """
Solution to Day 02 of 2015: I Was Told There Would Be No Math
## --- Day 02: I Was Told There Would Be No Math ---
The elves are running low on wrapping paper, and so they need to submit an order
for more. They have a list of the dimensions (length `l`, width `w`, and height
`h`) of each present, and only want to order exactly as much as they need.
Fortunately, every present is a box (a perfect right rectangular prism), which
makes calculating the required wrapping paper for each gift a little easier:
find the surface area of the box, which is `2*l*w + 2*w*h + 2*h*l`. The elves
also need a little extra paper for each present: the area of the smallest side.
For example:
- A present with dimensions `2x3x4` requires `2*6 + 2*12 + 2*8 = 52` square feet of wrapping paper plus `6` square feet of slack, for a total of `58` square feet.
- A present with dimensions `1x1x10` requires `2*1 + 2*10 + 2*10 = 42` square feet of wrapping paper plus `1` square foot of slack, for a total of `43` square feet.
All numbers in the elves' list are in feet. How many total *square feet of
wrapping paper* should they order?
## --- Part Two ---
The elves are also running low on ribbon. Ribbon is all the same width, so they
only have to worry about the length they need to order, which they would again
like to be exact.
The ribbon required to wrap a present is the shortest distance around its sides,
or the smallest perimeter of any one face. Each present also requires a bow made
out of ribbon as well; the feet of ribbon required for the perfect bow is equal
to the cubic feet of volume of the present. Don't ask how they tie the bow,
though; they'll never tell.
For example:
- A present with dimensions `2x3x4` requires `2+2+3+3 = 10` feet of ribbon to wrap the present plus `2*3*4 = 24` feet of ribbon for the bow, for a total of `34` feet.
- A present with dimensions `1x1x10` requires `1+1+1+1 = 4` feet of ribbon to wrap the present plus `1*1*10 = 10` feet of ribbon for the bow, for a total of `14` feet.
How many total *feet of ribbon* should they order?
"""
@doc """
"""
def part_1(input) do
input
|> split_into_lists_of_ints()
|> Enum.reduce(0, fn x, acc -> sqft_of_paper_per_box(x) + acc end)
end
defp sqft_of_paper_per_box(list_of_ints) do
list_of_ints
|> calculate_areas
|> surface_area_and_slack
end
defp calculate_areas([l, w, h]), do: [l * w, w * h, h * l]
defp surface_area_and_slack(list) do
Enum.reduce(
# list of areas
list,
# smallest side will be the initial value of acc
Enum.min(list),
fn x, acc -> x + x + acc end
)
end
@doc """
"""
def part_2(input) do
input
|> split_into_lists_of_ints()
|> Enum.reduce(0, fn x, acc -> total_feet_of_ribbon_per_box(x) + acc end)
end
defp total_feet_of_ribbon_per_box(list) do
Enum.reduce(
remove_max(list),
feet_of_ribbon_to_bow(list),
fn x, acc -> x + x + acc end
)
end
defp remove_max(list) do
list
|> Enum.sort()
|> Enum.drop(-1)
end
defp feet_of_ribbon_to_bow([l, w, h]), do: l * w * h
defp split_into_lists_of_ints(string) do
string
|> String.split()
|> Enum.map(&string_to_integer_list/1)
end
defp string_to_integer_list(string) do
string
|> String.split("x")
|> Enum.map(&String.to_integer/1)
end
end
|
lib/aoc/year_2015/day_02.ex
| 0.899674
| 0.881207
|
day_02.ex
|
starcoder
|
defmodule AWS.API.Pricing do
@moduledoc """
AWS Price List Service API (AWS Price List Service) is a centralized and
convenient way to programmatically query Amazon Web Services for services,
products, and pricing information.
The AWS Price List Service uses standardized product attributes such as
`Location`, `Storage Class`, and `Operating System`, and provides prices at the
SKU level. You can use the AWS Price List Service to build cost control and
scenario planning tools, reconcile billing data, forecast future spend for
budgeting purposes, and provide cost benefit analysis that compare your internal
workloads with AWS.
Use `GetServices` without a service code to retrieve the service codes for all
AWS services, then `GetServices` with a service code to retreive the attribute
names for that service. After you have the service code and attribute names, you
can use `GetAttributeValues` to see what values are available for an attribute.
With the service code and an attribute name and value, you can use `GetProducts`
to find specific products that you're interested in, such as an `AmazonEC2`
instance, with a `Provisioned IOPS` `volumeType`.
Service Endpoint
AWS Price List Service API provides the following two endpoints:
* https://api.pricing.us-east-1.amazonaws.com
* https://api.pricing.ap-south-1.amazonaws.com
"""
alias AWS.Client
alias AWS.Request
def metadata do
%AWS.ServiceMetadata{
abbreviation: "AWS Pricing",
api_version: "2017-10-15",
content_type: "application/x-amz-json-1.1",
credential_scope: nil,
endpoint_prefix: "api.pricing",
global?: false,
protocol: "json",
service_id: nil,
signature_version: "v4",
signing_name: "pricing",
target_prefix: "AWSPriceListService"
}
end
@doc """
Returns the metadata for one service or a list of the metadata for all services.
Use this without a service code to get the service codes for all services. Use
it with a service code, such as `AmazonEC2`, to get information specific to that
service, such as the attribute names available for that service. For example,
some of the attribute names available for EC2 are `volumeType`, `maxIopsVolume`,
`operation`, `locationType`, and `instanceCapacity10xlarge`.
"""
def describe_services(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeServices", input, options)
end
@doc """
Returns a list of attribute values.
Attibutes are similar to the details in a Price List API offer file. For a list
of available attributes, see [Offer File Definitions](http://docs.aws.amazon.com/awsaccountbilling/latest/aboutv2/reading-an-offer.html#pps-defs)
in the [AWS Billing and Cost Management User Guide](http://docs.aws.amazon.com/awsaccountbilling/latest/aboutv2/billing-what-is.html).
"""
def get_attribute_values(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetAttributeValues", input, options)
end
@doc """
Returns a list of all products that match the filter criteria.
"""
def get_products(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetProducts", input, options)
end
end
|
lib/aws/generated/api_pricing.ex
| 0.84699
| 0.635654
|
api_pricing.ex
|
starcoder
|
defmodule Calendar.AmbiguousDateTime do
@moduledoc """
AmbiguousDateTime provides a struct which represents an ambiguous time and
date in a certain time zone. These structs will be returned from the
DateTime.from_erl/2 function when the provided time is ambiguous.
AmbiguousDateTime contains two DateTime structs. For instance they can
represent both a DST and non-DST time. If clocks are turned back an hour
at 2:00 when going from summer to winter time then the "wall time" between
1:00 and 2:00 happens twice. One of them is on DST and one of them is not.
The provided functions can be used to choose one of the two DateTime structs.
"""
defstruct [:possible_date_times]
@doc """
Disambiguate an AmbiguousDateTime by total offset. Total offset would be UTC
offset plus standard offset.
If only one of the possible data times contained in the ambiguous_date_time
matches the offset a tuple with :ok and the matching DateTime is returned.
## Total offset
For instance, at the time of this writing, for Berlin there is a 1 hour UTC
offset. In the summer, there is another hour of standard offset. This means
that in the summer the total offset is 2 hours or 7200 seconds.
## Examples
iex> {:ambiguous, am} = Calendar.DateTime.from_erl({{2014, 3, 9}, {1, 1, 1}}, "America/Montevideo"); am |> Calendar.AmbiguousDateTime.disamb_total_off(-10800)
{:ok, %DateTime{zone_abbr: "UYT", day: 9, hour: 1, minute: 1, month: 3, second: 1, std_offset: 0, time_zone: "America/Montevideo", utc_offset: -10800, year: 2014, microsecond: {0,0}}}
iex> {:ambiguous, am} = Calendar.DateTime.from_erl({{2014, 3, 9}, {1, 1, 1}}, "America/Montevideo"); am |> Calendar.AmbiguousDateTime.disamb_total_off(0)
{:error, :no_matches}
"""
def disamb_total_off(ambiguous_date_time, total_off_secs) do
func = fn(dt) -> dt.utc_offset+dt.std_offset == total_off_secs end
disamb(ambiguous_date_time, func)
end
@doc """
Disambiguate an AmbiguousDateTime according to filtering function provided
as the second parameter
## Examples
We provide a function that returns true if the abbreviation is "UYT"
iex> {:ambiguous, am} = Calendar.DateTime.from_erl({{2014, 3, 9}, {1, 1, 1}}, "America/Montevideo"); am |> Calendar.AmbiguousDateTime.disamb(fn(dt) -> dt.zone_abbr == "UYT" end)
{:ok, %DateTime{zone_abbr: "UYT", day: 9, hour: 1, minute: 1, month: 3, second: 1, std_offset: 0, time_zone: "America/Montevideo", utc_offset: -10800, year: 2014, microsecond: {0, 0}}}
A function that always returns false
iex> {:ambiguous, am} = Calendar.DateTime.from_erl({{2014, 3, 9}, {1, 1, 1}}, "America/Montevideo"); am |> Calendar.AmbiguousDateTime.disamb(fn(_dt) -> false end)
{:error, :no_matches}
A function that always returns true
iex> {:ambiguous, am} = Calendar.DateTime.from_erl({{2014, 3, 9}, {1, 1, 1}}, "America/Montevideo"); am |> Calendar.AmbiguousDateTime.disamb(fn(_dt) -> true end)
{:error, :more_than_one_match}
"""
def disamb(ambiguous_date_time, filtering_func) do
matching = ambiguous_date_time.possible_date_times
|> Enum.filter(filtering_func)
disamb_matching_date_times(matching, length(matching))
end
defp disamb_matching_date_times(date_times, 1) do
{:ok, hd(date_times)}
end
defp disamb_matching_date_times(_, 0) do
{:error, :no_matches}
end
defp disamb_matching_date_times(_, match_count) when match_count > 1 do
{:error, :more_than_one_match}
end
end
|
data/web/deps/calendar/lib/calendar/ambiguous_date_time.ex
| 0.877418
| 0.656469
|
ambiguous_date_time.ex
|
starcoder
|
defmodule MeshxRpc.Server.Pool do
# @behaviour :ranch_protocol
alias MeshxRpc.App.T
alias MeshxRpc.Common.{Options, Structs.Data, Structs.Svc}
@opts [
timeout_execute: [
type: :timeout,
default: :infinity,
doc: """
request function execution timeout, milliseconds. If timeout is exceeded request function is killed and remote RPC client call will error with: `{:error_rpc, :killed}`.
"""
]
]
@worker_mod MeshxRpc.Server.Worker
@transport :ranch_tcp
@moduledoc """
RPC server workers pool.
## Configuration
RPC server pool is configured with `opts` argument in `child_spec/2` function. Configuration options common to both RPC client and server are described in `MeshxRpc` "Common configuration" section.
Configuration options specific to RPC server `opts` argument in `child_spec/2`:
#{NimbleOptions.docs(@opts)}
"""
@doc """
Returns a specification to start a RPC server workers pool under a supervisor.
`id` is a pool id which should be a name of a module implementing user RPC functions.
`opts` are options described in "Configuration" section above and in `MeshxRpc` "Common configuration" section.
```elixir
iex(1)> MeshxRpc.Server.Pool.child_spec(Example1.Server, address: {:uds, "/tmp/meshx.sock"})
%{
id: {:ranch_embedded_sup, Example1.Server},
start: {:ranch_embedded_sup, :start_link,
[
Example1.Server,
:ranch_tcp,
%{socket_opts: [ip: {:local, "/tmp/meshx.sock"}, port: 0]},
MeshxRpc.Server.Pool,
[
...
]
]},
type: :supervisor
}
```
"""
@spec child_spec(id :: atom(), opts :: Keyword.t()) :: Supervisor.child_spec()
def child_spec(id, opts \\ []) do
opts = NimbleOptions.validate!(opts, @opts ++ Options.common())
node_ref_mfa = Keyword.fetch!(opts, :node_ref_mfa)
svc_ref_mfa = Keyword.get(opts, :svc_ref_mfa, id |> to_string() |> String.slice(0..255))
conn_ref_mfa = Keyword.fetch!(opts, :conn_ref_mfa)
data =
Data.init(id, opts)
|> Map.put(:transport, @transport)
|> Map.put(:timeout_execute, Keyword.fetch!(opts, :timeout_execute))
|> Map.replace(:local, Svc.init(node_ref_mfa, svc_ref_mfa, conn_ref_mfa))
{_type, ip, port} = Map.fetch!(data, :address)
pool_opts = T.merge_improper_keyword([ip: ip, port: port], Keyword.fetch!(opts, :pool_opts))
gen_statem_opts = Keyword.fetch!(opts, :gen_statem_opts)
case :ranch.child_spec(id, @transport, pool_opts, __MODULE__, [data, gen_statem_opts]) do
chi when is_map(chi) ->
chi
{id, start, restart, shutdown, type, modules} ->
%{id: id, start: start, restart: restart, shutdown: shutdown, type: type, modules: modules}
end
end
# start_link/3 compatible with :ranch 2.0
# @impl :ranch_protocol
def start_link(_pool_id, _transport, [opts, gen_statem_opts]),
do: {:ok, :proc_lib.spawn_link(@worker_mod, :init, [[opts, gen_statem_opts]])}
# start_link/4 compatible with :ranch 1.8.0
# @impl :ranch_protocol
def start_link(_pool_id, _socket, _transport, [opts, gen_statem_opts]),
do: {:ok, :proc_lib.spawn_link(@worker_mod, :init, [[opts, gen_statem_opts]])}
end
|
lib/server/pool.ex
| 0.834238
| 0.67393
|
pool.ex
|
starcoder
|
defmodule Brave.Equiptment do
@armor [
%{name: "No Armor", defense: 11, bonus: 1, slots: 0, quality: 7},
%{name: "Gambeson", defense: 12, bonus: 2, slots: 1, quality: 3},
%{name: "Brigandine", defense: 13, bonus: 3, slots: 1, quality: 4},
%{name: "Chain", defense: 14, bonus: 4, slots: 3, quality: 5},
%{name: "Helmet", defense: 0, bonus: 1, slots: 1, quality: 1},
%{name: "Shield", defense: 0, bonus: 1, slots: 1, quality: 1}
]
@weapons [
%{
name: "Dagger, Cudgel, Sickle, Staff, etc.",
damage: "1d6",
type: "one-handed",
slots: 1,
quality: 3,
value: 5
},
%{
name: "Spear, Sword, Mace, Axe, Flail, etc.",
damage: "1d8",
type: "one-handed",
slots: 2,
quality: 3,
value: 10
},
%{
name: "Halberd, War Hammer, Long Sword, Battle Axe, etc.",
damage: "1d10",
type: "two-handed",
slots: 3,
quality: 3,
value: 20
},
%{name: "Sling", damage: "1d4", type: "one-handed", slots: 1, quality: 3, value: 5},
%{name: "Bow", damage: "1d6", type: "two-handed", slots: 2, quality: 3, value: 15},
%{name: "Crossbow", damage: "1d8", type: "two-handed", slots: 3, quality: 3, value: 60}
]
@ammo [
%{name: "Arrows/Bolts (20)", damage: nil, type: "ammo", quality: 3, slots: 1, value: 5},
%{name: "Quiver (capacity 20)", damage: nil, type: "ammo", quality: 3, slots: 0, value: 10}
]
@general_gear [
%{name: "Pole, 10ft", slots: 1},
%{name: "Sack", slots: 1},
%{name: "Tent", slots: 1},
%{name: "Spikes, 5", slots: 1},
%{name: "Torches, 5", slots: 1},
%{name: "Saw", slots: 1},
%{name: "Bucket", slots: 1},
%{name: "Caltrops", slots: 1},
%{name: "Chisel", slots: 1},
%{name: "Drill", slots: 1},
%{name: "Fishing rod", slots: 1},
%{name: "Marbles", slots: 1},
%{name: "Glue", slots: 1},
%{name: "Pick", slots: 1},
%{name: "Hourglass", slots: 1},
%{name: "Net", slots: 1},
%{name: "Tongs", slots: 1},
%{name: "Lockpicks", slots: 1},
%{name: "Metal file", slots: 1},
%{name: "Nails", slots: 1}
]
@general_gear2 [
%{name: "Inscense", slots: 1},
%{name: "Sponge", slots: 1},
%{name: "Lens", slots: 1},
%{name: "Perfume", slots: 1},
%{name: "Horn", slots: 1},
%{name: "Bottle", slots: 1},
%{name: "Soap", slots: 1},
%{name: "Spyglass", slots: 1},
%{name: "Tar pot", slots: 1},
%{name: "Twine", slots: 1},
%{name: "Fake jewels", slots: 1},
%{name: "Blank book", slots: 1},
%{name: "Card deck", slots: 1},
%{name: "Dice set", slots: 1},
%{name: "Cook pots", slots: 1},
%{name: "Face paint", slots: 1},
%{name: "Whistle", slots: 1},
%{name: "Instrument", slots: 1},
%{name: "Quill and Ink", slots: 1},
%{name: "Small bell", slots: 1}
]
@dungeoneering_gear [
%{name: "Rope 50ft", slots: 1},
%{name: "Pulleys", slots: 1},
%{name: "Candles, 5", slots: 1},
%{name: "Chain, 10ft", slots: 1},
%{name: "Chalk 10", slots: 1},
%{name: "Crowbar", slots: 1},
%{name: "Tinderbox", slots: 1},
%{name: "Grappling hook", slots: 1},
%{name: "Hammer", slots: 1},
%{name: "Waterskin", slots: 1},
%{name: "Lantern", slots: 1},
%{name: "Lamp oil", slots: 1},
%{name: "Padlock", slots: 1},
%{name: "Manacles", slots: 1},
%{name: "Mirror", slots: 1},
%{name: "Pole, 10ft", slots: 1},
%{name: "Sack", slots: 1},
%{name: "Tent", slots: 1},
%{name: "Spikes, 5", slots: 1},
%{name: "Torches, 5", slots: 1}
]
def weapons, do: @weapons
def ammo, do: @ammo
def gear, do: @general_gear ++ @general_gear2 ++ @dungeoneering_gear
def random_armor do
roll = Enum.random(1..20)
cond do
roll in 1..3 -> %{name: "No Armor", defense: 10, bonus: 0, slots: 0, quality: 7}
roll in 4..14 -> %{name: "Gambeson", defense: 12, bonus: 2, slots: 1, quality: 3}
roll in 15..19 -> %{name: "Brigandine", defense: 13, bonus: 3, slots: 1, quality: 4}
roll in [20] -> %{name: "Chain", defense: 14, bonus: 4, slots: 3, quality: 5}
end
end
def random_helmets_and_shields do
roll = Enum.random(1..20)
cond do
roll in 1..13 ->
%{name: "None", defense: 0, bonus: 0, slots: 0, quality: 0}
roll in 14..16 ->
%{name: "Helmet", defense: 0, bonus: 1, slots: 1, quality: 1}
roll in 17..19 ->
%{name: "Shield", defense: 0, bonus: 1, slots: 1, quality: 1}
roll in [20] ->
[
%{name: "Helmet", defense: 0, bonus: 1, slots: 1, quality: 1},
%{name: "Shield", defense: 0, bonus: 1, slots: 1, quality: 1}
]
end
end
def random_weapon do
weapons()
|> Enum.shuffle()
|> Enum.take(1)
end
def random_general_gear do
@general_gear
|> Enum.shuffle()
|> Enum.take(1)
end
def random_general_gear_two do
@general_gear2
|> Enum.shuffle()
|> Enum.take(1)
end
def random_dungeoneering_gear do
@dungeoneering_gear
|> Enum.shuffle()
|> Enum.take(1)
end
end
|
lib/brave/equiptment.ex
| 0.524395
| 0.691684
|
equiptment.ex
|
starcoder
|
defmodule Daguex.Processor.StorageHelper do
@moduledoc """
Helpers for using `Daguex.Processor` in the `Daguex.Processor`
"""
@local_storage_key "__local__"
alias Daguex.{Image, ImageFile, ImageHelper}
def local_storage_key, do: @local_storage_key
def put_local_image(context, image_file, format) do
key = ImageHelper.variant_key(image_file, context.image.key, format)
bucket = Keyword.get(context.opts, :bucket)
{local_storage, opts} = context.local_storage
with {:ok, image} <- put_image(context.image, image_file, bucket, key, format, @local_storage_key, context.local_storage),
store_key <- get_key(image, @local_storage_key, format),
image <- update_variant(image, format, key, image_file),
{:ok, path} <- local_storage.get(store_key, opts),
{:ok, image_file} <- ImageFile.from_file(path) do
context = %{context | image: image} |> cache_local_image(format, image_file)
{:ok, context}
end
end
def load_local_image(context, format) do
case get_cached_local_image(context, format) do
nil ->
{local_storage, opts} = context.local_storage
case get_key(context.image, @local_storage_key, format) do
nil -> {:error, :not_found}
key ->
with {:ok, path} <- local_storage.get(key, opts),
{:ok, image_file} <- ImageFile.from_file(path) do
context = context |> cache_local_image(format, image_file)
{:ok, context, image_file}
end
end
local_image -> {:ok, context, local_image}
end
end
defp cache_local_image(context, format, image_file) do
update_in context.private, fn private ->
Map.update(private, :local_images, %{format => image_file}, &Map.put(&1, format, image_file))
end
end
defp get_cached_local_image(context, format) do
get_in context.private, [:local_images, format]
end
def put_image(image, image_file, bucket, key, format, storage_name, {storage, opts}) do
case storage.put(image_file.uri |> to_string, key, bucket, opts) do
{:ok, key} ->
{:ok, update_key(image, storage_name, format, key)}
{:ok, key, extra} ->
image = image |> update_key(storage_name, format, key) |> update_extra(storage_name, format, extra)
{:ok, image}
error -> error
end
end
def get_image(image, format, storage_name, {storage, opts}) do
prepare_params(image, format, storage_name, fn key, extra ->
storage.get(key, extra, opts)
end)
end
def resolve_image(image, format, storage_name, {storage, opts}) do
prepare_params(image, format, storage_name, fn key, extra ->
storage.resolve(key, extra, opts)
end)
end
def saved?(image, storage_name, format) do
get_key(image, storage_name, format)
end
defp prepare_params(image, format, storage_name, callback) do
case Image.get_variant(image, format) do
nil -> {:error, :not_found}
_ ->
key = get_key(image, storage_name, format)
extra = get_extra(image, storage_name, format)
callback.(key, extra)
end
end
def get_extra(image, storage_name, format) do
Image.get_data(image, ["extras", storage_name, format])
end
def get_key(image, storage_name, format) do
Image.get_data(image, ["ids", storage_name, format])
end
def update_key(image, storage_name, format, key) do
Image.put_data(image, ["ids", storage_name, format], key)
end
def update_variant(image, format, key, image_file) do
Image.add_variant(image, format, key, image_file.width, image_file.height, image_file.type)
end
def update_extra(image, storage_name, format, extra) do
Image.put_data(image, ["extras", storage_name, format], extra)
end
end
|
lib/daguex/processor/storage_helper.ex
| 0.778313
| 0.460653
|
storage_helper.ex
|
starcoder
|
defmodule Day05 do
@typedoc """
width, height, data
"""
@type grid_t :: %{width: integer, height: integer, data: tuple}
@typedoc """
x, y
"""
@type point_t :: {integer, integer}
@typedoc """
start, end
"""
@type line_t :: {point_t, point_t}
@spec parse_point(String.t()) :: point_t
def parse_point(s),
do:
s
|> String.split(~r/\s*,\s*/, trim: true)
|> Enum.map(&String.to_integer/1)
|> List.to_tuple()
@spec parse_line(String.t()) :: line_t
def parse_line(s),
do: s |> String.split(~r/\s*->\s*/, trim: true) |> Enum.map(&parse_point/1) |> List.to_tuple()
@spec size_for_lines(list(line_t)) :: {number, number}
def size_for_lines(lines) do
Enum.reduce(lines, {0, 0}, fn {{x0, y0}, {x1, y1}}, {x, y} ->
{Enum.max([x, x0 + 1, x1 + 1]), Enum.max([y, y0 + 1, y1 + 1])}
end)
end
@spec empty_grid(number, number, any) :: grid_t
def empty_grid(width, height, default),
do: %{width: width, height: height, data: Tuple.duplicate(default, width * height)}
@spec get_grid_point(grid_t, point_t) :: any
def get_grid_point(grid, {x, y}), do: elem(grid[:data], grid[:width] * y + x)
@spec set_grid_point(grid_t, point_t, any) :: grid_t
def set_grid_point(grid, {x, y}, value),
do: %{
width: grid[:width],
height: grid[:height],
data: put_elem(grid[:data], grid[:width] * y + x, value)
}
@spec increment_grid_point(grid_t, point_t) :: grid_t
def increment_grid_point(grid, point),
do: set_grid_point(grid, point, get_grid_point(grid, point) + 1)
@spec add_line_to_grid(line_t, grid_t) :: grid_t
def add_line_to_grid({p0, p1}, grid) when elem(p0, 0) == elem(p1, 0) do
x = elem(p0, 0)
y_start = min(elem(p0, 1), elem(p1, 1))
y_end = max(elem(p0, 1), elem(p1, 1))
y_start..y_end |> Enum.reduce(grid, &increment_grid_point(&2, {x, &1}))
end
def add_line_to_grid({p0, p1}, grid) when elem(p0, 1) == elem(p1, 1) do
y = elem(p0, 1)
x_start = min(elem(p0, 0), elem(p1, 0))
x_end = max(elem(p0, 0), elem(p1, 0))
x_start..x_end |> Enum.reduce(grid, &increment_grid_point(&2, {&1, y}))
end
def add_line_to_grid({{x0, y0}, {x1, y1}}, grid) do
step_x = if x0 < x1, do: 1, else: -1
step_y = if y0 < y1, do: 1, else: -1
Enum.zip([Range.new(x0, x1, step_x), Range.new(y0, y1, step_y)])
|> Enum.reduce(grid, &increment_grid_point(&2, &1))
end
@spec parse_data(String.t()) :: list(line_t)
def parse_data(contents),
do: String.split(contents, "\n", trim: true) |> Enum.map(&parse_line/1)
def part_1(contents) do
lines =
contents
|> String.split("\n", trim: true)
|> Enum.map(&parse_line/1)
|> Enum.filter(fn {{x0, y0}, {x1, y1}} -> x0 == x1 || y0 == y1 end)
{width, height} = size_for_lines(lines)
grid = empty_grid(width, height, 0)
grid = lines |> Enum.reduce(grid, fn line, grid -> add_line_to_grid(line, grid) end)
grid[:data] |> Tuple.to_list() |> Enum.filter(fn n -> n > 1 end) |> length()
end
def part_2(contents) do
lines =
contents
|> String.split("\n", trim: true)
|> Enum.map(&parse_line/1)
{width, height} = size_for_lines(lines)
grid = empty_grid(width, height, 0)
grid = lines |> Enum.reduce(grid, fn line, grid -> add_line_to_grid(line, grid) end)
grid[:data] |> Tuple.to_list() |> Enum.filter(fn n -> n > 1 end) |> length()
end
def main do
{:ok, contents} = File.read("data/day05.txt")
IO.inspect(part_1(contents), label: "part 1")
IO.inspect(part_2(contents), label: "part 2")
end
end
|
aoc21/lib/day05.ex
| 0.851706
| 0.612628
|
day05.ex
|
starcoder
|
defmodule Mix.Tasks.Graphme do
@moduledoc """
Generates a relationship graph of your modules using the DOT language
mix graphme
## Command line options
* `-f` `--filter` - One part of a module name, eg: from A.B.C you can filter the B [default: nil]
* `-F` `--filter_at` - The index of a module name part, eg: from A.B.C you can use `1` to filter B [default: 0]
* `-S` `--subgraph_at` - The index of a module name part that will be used to clusterize the graph [default: nil]
* `-o` `--output` - The output file name [default: graph]
* `-O` `--output_format` - The output file format, eg: svg, png [default: png]
## Examples
Filter the modules using the first (0) module name and output as "my_graph.svg"
mix graphme -f "YourModulePart" -F 0 -S "AnotherModulePart" -o "my_graph" -O "svg"
"""
@shortdoc "Generates a relationship graph of your modules"
use Mix.Task
alias Mix.Tasks.Xref
@requirements ["app.start"]
@joiner "\n"
@impl true
def run(params) do
# OPTIONS
{options, _, _} =
OptionParser.parse(params,
aliases: [
f: :filter,
F: :filter_at,
S: :subgraph_at,
o: :output,
O: :output_format
],
strict: [
filter: :string,
filter_at: :integer,
subgraph_at: :integer,
output: :string,
output_format: :string
]
)
filter = Keyword.get(options, :filter, nil)
filter_at = Keyword.get(options, :filter_at, 0)
subgraph_at = Keyword.get(options, :subgraph_at, nil)
out = Keyword.get(options, :output, "graph")
format = Keyword.get(options, :output_format, "png")
# MODULES
config = Mix.Project.config()
{:ok, mods} = :application.get_key(config[:app], :modules)
mods = (not is_nil(filter) && filter_mods(mods, filter, filter_at)) || mods
# relations, we get those outside because its very slow
calls = Xref.calls()
# GRAPHS
subgraphs = (subgraph_at && subgraphs(mods, subgraph_at)) || []
mods
|> Enum.map(&{&1, find_caller_modules(&1, calls)})
|> Enum.map(&stringfy/1)
|> List.flatten()
|> print(subgraphs, out)
# DOT COMMANDS
System.cmd("dot", ["-T#{format}", "#{out}.gv", "-o", "#{out}.#{format}"])
end
defp find_caller_modules(module, calls) do
calls
|> Enum.filter(&(elem(&1.callee, 0) == module))
|> Enum.map(& &1.caller_module)
end
defp filter_mods(mods, filter, at) do
Enum.map(mods, &Module.split/1)
|> Enum.filter(&(Enum.at(&1, at) == filter))
|> Enum.map(&Module.concat/1)
end
# Parse Relations
defp stringfy({caller, callees}) do
Enum.map(callees, fn x ->
~s|"#{stringfy_module(x)}" -> "#{stringfy_module(caller)}"|
end)
|> Enum.dedup()
end
defp stringfy_module(module) do
Module.split(module)
|> Enum.join(@joiner)
end
# Parse Clusters
defp subgraphs(mods, deph) do
mods
|> Enum.map(&Module.split/1)
|> Enum.map(&List.pop_at(&1, deph))
|> Enum.chunk_by(&elem(&1, 0))
|> Enum.reject(&Enum.any?(&1, fn x -> elem(x, 0) == nil end))
|> Enum.map(fn x ->
result =
Enum.map(x, fn {first, list} ->
List.insert_at(list, deph, first)
end)
if length(x) > 1 do
names =
Enum.map(result, &Enum.join(&1, @joiner))
|> Enum.join(~s|" "|)
"""
subgraph "cluster_#{List.first(x) |> elem(0)}" {
"#{names}"
}
"""
# the sub-subgraphs usualy dont end good
# but you can test with:
# {subgraphs(mods, deph + 1)}
else
""
end
end)
end
# Print to file
defp print(lines, subgraphs, out) do
body = Enum.join(lines, "\n ")
graph = """
digraph G{
rankdir=LR
graph [splines=true overlap=false model="subset" mindist=2 style=dotted];
node [shape=ellipse];
nodesep=0.7
#{subgraphs}
#{body}
}
"""
File.write("#{out}.gv", graph)
end
end
|
lib/mix/tasks/graphme.ex
| 0.846419
| 0.599632
|
graphme.ex
|
starcoder
|
defmodule Dnsimple.Tlds do
@moduledoc """
Provides functions to interact with the
[TLD endpoints](https://developer.dnsimple.com/v2/tlds/).
See:
- https://developer.dnsimple.com/v2/tlds/
"""
alias Dnsimple.Client
alias Dnsimple.Listing
alias Dnsimple.Response
alias Dnsimple.Tld
alias Dnsimple.TldExtendedAttribute
@doc """
Returns the lists of DNSimple supported TLDs.
See:
- https://developer.dnsimple.com/v2/tlds/#listTlds
## Examples
client = %Dnsimple.Client{access_token: "<KEY>"}
{:ok, response} = Dnsimple.Tlds.list_tlds(client)
{:ok, response} = Dnsimple.Tlds.list_tlds(client, page: 2, per_page: 10)
{:ok, response} = Dnsimple.Tlds.list_tlds(client, sort: "tlds:desc")
"""
@spec list_tlds(Client.t, Keyword.t) :: {:ok|:error, Response.t}
def list_tlds(client, options \\ []) do
url = Client.versioned("/tlds")
Listing.get(client, url, options)
|> Response.parse(%{"data" => [%Tld{}], "pagination" => %Response.Pagination{}})
end
@doc """
Returns a TLD.
See:
- https://developer.dnsimple.com/v2/tlds/#getTld
## Examples
client = %Dnsimple.Client{access_token: "<KEY>"}
{:ok, response} = Dnsimple.Tlds.get_tld(client, "com")
"""
@spec get_tld(Client.t, String.t, Keyword.t) :: {:ok|:error, Response.t}
def get_tld(client, tld, options \\ []) do
url = Client.versioned("/tlds/#{tld}")
Client.get(client, url, options)
|> Response.parse(%{"data" => %Tld{}})
end
@doc """
Returns the extended attributes for a TLD.
See:
- https://developer.dnsimple.com/v2/tlds/#getTldExtendedAttributes
## Examples
client = %Dnsimple.Client{access_token: "<KEY>"}
{:ok, response} = Dnsimple.Tlds.get_tld_extended_attributes(client, "com")
"""
@spec get_tld_extended_attributes(Client.t, String.t, Keyword.t) :: {:ok|:error, Response.t}
def get_tld_extended_attributes(client, tld, options \\ []) do
url = Client.versioned("/tlds/#{tld}/extended_attributes")
Client.get(client, url, options)
|> Response.parse(%{"data" => [%TldExtendedAttribute{options: [%TldExtendedAttribute.Option{}]}]})
end
end
|
lib/dnsimple/tlds.ex
| 0.731634
| 0.419797
|
tlds.ex
|
starcoder
|
defmodule Nostrum.Cache.PresenceCache do
@moduledoc """
Cache for presences.
The ETS table name associated with the User Cache is `:presences`. Besides the
methods provided below you can call any other ETS methods on the table.
## Example
```elixir
info = :ets.info(:presences)
[..., heir: :none, name: :presences, size: 1, ...]
size = info[:size]
1
```
"""
alias Nostrum.Struct.{Guild, User}
alias Nostrum.Util
import Nostrum.Snowflake, only: [is_snowflake: 1]
@doc ~S"""
Retreives a presence for a user from the cache by guild and id.
If successful, returns `{:ok, presence}`. Otherwise returns `{:error, reason}`.
## Example
```elixir
case Nostrum.Cache.PresenceCache.get(111133335555, 222244446666) do
{:ok, presence} ->
"They're #{presence.status}"
{:error, _reason} ->
"They're dead Jim"
end
```
"""
@spec get(User.id(), Guild.id()) :: {:error, :presence_not_found} | {:ok, map}
def get(user_id, guild_id) when is_snowflake(user_id) and is_snowflake(guild_id) do
case :ets.lookup(:presences, {user_id, guild_id}) do
[] -> {:error, :presence_not_found}
[{{^user_id, ^guild_id}, presence}] -> {:ok, presence}
end
end
@doc """
Same as `get/1`, but raises `Nostrum.Error.CacheError` in case of a failure.
"""
@spec get!(User.id(), Guild.id()) :: no_return | map
def get!(user_id, guild_id) when is_snowflake(user_id) and is_snowflake(guild_id) do
user_id |> get(guild_id) |> Util.bangify_find({user_id, guild_id}, __MODULE__)
end
@doc false
@spec create(map) :: :ok
def create(presence) do
:ets.insert(:presences, {{presence.user.id, presence.guild_id}, presence})
:ok
end
@doc false
@spec update(map) :: {Guild.id(), nil | map, map} | :noop
def update(presence) do
case get(presence.user.id, presence.guild_id) do
{:ok, p} ->
new_presence = Map.merge(p, presence)
create(new_presence)
if p.game == new_presence.game and p.status == new_presence.status,
do: :noop,
else: {presence.guild_id, p, new_presence}
{:error, _} ->
create(presence)
{presence.guild_id, nil, presence}
end
end
@doc false
@spec bulk_create(Guild.id(), [map]) :: :ok
def bulk_create(_, []), do: :ok
def bulk_create(guild_id, presences) when is_list(presences) do
Enum.each(presences, fn p ->
:ets.insert(:presences, {{p.user.id, guild_id}, p})
end)
end
end
|
lib/nostrum/cache/presence_cache.ex
| 0.84653
| 0.730674
|
presence_cache.ex
|
starcoder
|
defmodule Trans.Translator do
@moduledoc """
Provides functions to easily access translated values from schemas and fallback
to a default locale when the translation does not exist in the required one.
The functions provided by this module require structs declared in modules
using `Trans`.
"""
@doc """
Gets a translated value into the given locale or falls back to the default
value if there is no translation available.
## Usage example
Imagine that we have an _Article_ schema declared as follows:
defmodule Article do
use Ecto.Schema
use Trans, translates: [:title, :body]
schema "articles" do
field :title, :string
field :body, :string
field :translations, :map
end
end
We may have an `Article` like this (Our main locale is `:en`, but we have
translations in `:es` and `:fr`):
iex> article = %Article{
...> title: "How to Write a Spelling Corrector",
...> body: "A wonderful article by <NAME>",
...> translations: %{
...> "es" => %{
...> title: "Cรณmo escribir un corrector ortogrรกfico",
...> body: "Un artรญculo maravilloso de <NAME>"
...> },
...> "fr" => %{
...> title: "Comment รฉcrire un correcteur orthographique",
...> body: "Un merveilleux article de <NAME>"
...> }
...> }
...> }
We can then get the Spanish title:
iex> Trans.Translator.translate(article, :title, :es)
"Cรณmo escribir un corrector ortogrรกfico"
If the requested locale is not available, the default value will be returned:
iex> Trans.Translator.translate(article, :title, :de)
"How to Write a Spelling Corrector"
If we request a translation for an invalid field, we will receive an error:
iex> Trans.Translator.Translate(article, :fake_attr, :es)
** (RuntimeError) 'fake_attr' is not translatable. Translatable fields are [:title, :body]
"""
@spec translate(struct, atom, String.t() | atom) :: any
def translate(%{__struct__: module} = struct, field, locale)
when (is_binary(locale) or is_atom(locale)) and is_atom(field) do
unless Trans.translatable?(struct, field) do
raise "'#{inspect(module)}' module must declare '#{inspect(field)}' as translatable"
end
# Return the translation or fall back to the default value
case translated_field(struct, locale, field) do
:error -> Map.fetch!(struct, field)
translation -> translation
end
end
defp translated_field(%{__struct__: module} = struct, locale, field) do
with {:ok, all_translations} <- Map.fetch(struct, module.__trans__(:container)),
{:ok, translations_for_locale} <- Map.fetch(all_translations, to_string(locale)),
{:ok, translated_field} <- Map.fetch(translations_for_locale, to_string(field)) do
translated_field
end
end
@doc """
Translates the whole struct with all translatable values and translatable associations to the given locale
## Usage example
Similar to `translate/3` but returns the whole struct
We can get the Spanish version like this:
iex> Trans.Translator.translate(article, :es)
...> %Article{
...> title: "Cรณmo escribir un corrector ortogrรกfico",
...> body: "Un artรญculo maravilloso de <NAME>",
...> translations: %{
...> "es" => %{
...> title: "Cรณmo escribir un corrector ortogrรกfico",
...> body: "Un artรญculo maravilloso de <NAME>"
...> },
...> "fr" => %{
...> title: "Comment รฉcrire un correcteur orthographique",
...> body: "Un merveilleux article de <NAME>"
...> }
...> }
...> }
"""
@spec translate(struct, String.t() | atom) :: struct
def translate(%{__struct__: module} = struct, locale)
when is_binary(locale) or is_atom(locale) do
if Keyword.has_key?(module.__info__(:functions), :__trans__) do
struct
|> translate_fields(locale)
|> translate_assocs(locale)
else
struct
end
end
def translate(struct, _locale), do: struct
defp translate_fields(%{__struct__: module} = struct, locale) do
fields = module.__trans__(:fields)
Enum.reduce(fields, struct, fn field, struct ->
case translated_field(struct, locale, field) do
:error -> struct
translation -> Map.put(struct, field, translation)
end
end)
end
defp translate_assocs(%{__struct__: module} = struct, locale) do
associations = module.__schema__(:associations)
embeds = module.__schema__(:embeds)
Enum.reduce(associations ++ embeds, struct, fn assoc_name, struct ->
Map.update(struct, assoc_name, nil, fn
%Ecto.Association.NotLoaded{} = item ->
item
items when is_list(items) ->
Enum.map(items, &translate(&1, locale))
%{} = item ->
translate(item, locale)
item ->
item
end)
end)
end
end
|
lib/trans/translator.ex
| 0.887796
| 0.499146
|
translator.ex
|
starcoder
|
defmodule ExULID.Crockford do
@moduledoc """
This module provides data encoding and decoding functions
according to [Crockford's Base32](http://www.crockford.com/wrmg/base32.html).
"""
@bits 5
@encoding '0123456789ABCDEFGHJKMNPQRSTVWXYZ'
defmodule UnknownCharacterError do
defexception [:message]
def exception(char) do
msg = "a character could not be decoded, got: #{inspect char}"
%UnknownCharacterError{message: msg}
end
end
defp encoding, do: @encoding
defp decoding do
@encoding
|> Enum.with_index()
|> Enum.reduce(%{}, fn({char, index}, acc) ->
Map.put(acc, char, index)
end)
end
@spec encode32(binary | integer) :: binary
def encode32(data) when is_integer(data) do
data
|> :binary.encode_unsigned()
|> encode32()
end
# Pad or remove any leading zero
def encode32(""), do: {:ok, ""}
def encode32(data) do
data = pad_bitlength(data, @bits)
{:ok, encode32("", data)}
end
# Ignore leading zeros unless it's the only character
defp encode32("", <<0::@bits>>), do: "0"
defp encode32("", <<0::@bits, remainder::bits>>), do: encode32("", remainder)
# Main meaty part. Take the expected bits, convert to the encoding character,
# then append the character to the accumulator.
defp encode32(acc, <<bits::@bits, remainder::bits>>) do
acc
|> Kernel.<>(<<Enum.at(encoding(), bits)>>)
|> encode32(remainder)
end
# Last remainder will be an empty binary,
# the accumulator is now final, return it as the result.
defp encode32(acc, <<>>), do: acc
defp pad_bitlength(data, bitlength) when rem(bit_size(data), bitlength) == 0, do: data
defp pad_bitlength(data, bitlength) when rem(bit_size(data), bitlength) > 0 do
remainder = rem(bit_size(data), bitlength)
<<a::size(remainder), remaining::bits>> = data
if a > 0 do
missing_bits = bitlength - remainder
<<(<<0::size(missing_bits)>>), (data::bits)>>
else
<<remaining::bits>>
end
end
def decode32("0"), do: {:ok, <<0>>}
def decode32(string) do
string = String.to_charlist(string)
{:ok, decode32("", string)}
rescue
e in UnknownCharacterError -> {:error, e.message}
end
defp decode32(acc, ''), do: pad_bitlength(acc, 8)
defp decode32(acc, charlist) do
[char | remainder] = charlist
decode32(acc, remainder, char)
end
defp decode32(acc, remainder, char) do
mapped =
case Map.fetch(decoding(), char) do
{:ok, mapped} -> mapped
:error -> raise(UnknownCharacterError, <<char>>)
end
<<_::3, data::5>> = :binary.encode_unsigned(mapped)
decode32(<<(acc::bits), data::5>>, remainder)
end
end
|
lib/ex_ulid/crockford.ex
| 0.78789
| 0.496338
|
crockford.ex
|
starcoder
|
defmodule Litmus.Type.List do
@moduledoc """
This type validates that a value is list.
## Options
* `:default` - Setting `:default` will populate a field with the provided
value, assuming that it is not present already. If a field already has a
value present, it will not be altered.
* `:min_length` - Specifies the minimum list length. Allowed values are
non-negative integers.
* `:max_length` - Specifies the maximum list length. Allowed values are
non-negative integers.
* `:length` - Specifies the exact list length. Allowed values are
non-negative integers.
* `:required` - Setting `:required` to `true` will cause a validation error
when a field is not present or the value is `nil`. Allowed values for
required are `true` and `false`. The default is `false`.
* `:type` - Specifies the data type of elements in the list. Allowed values
are are atoms `:atom, :boolean, :number and :string`. Default value is `nil`.
If `nil`, any element type is allowed in the list.
* `:unique` - Setting `:unique` to true will validate that all values in
the list are unique. The default value is `false`.
## Examples
iex> schema = %{
...> "ids" => %Litmus.Type.List{
...> min_length: 1,
...> max_length: 5,
...> type: :number
...> }
...> }
iex> Litmus.validate(%{"ids" => [1, 2]}, schema)
{:ok, %{"ids" => [1, 2]}}
iex> Litmus.validate(%{"ids" => [1, "a"]}, schema)
{:error, "ids must be a list of numbers"}
iex> schema = %{
...> "ids" => %Litmus.Type.List{
...> default: []
...> }
...> }
iex> Litmus.validate(%{}, schema)
{:ok, %{"ids" => []}}
"""
alias Litmus.{Default, Required}
alias Litmus.Type
defstruct [
:min_length,
:max_length,
:length,
:type,
default: Litmus.Type.Any.NoDefault,
required: false,
unique: false
]
@type t :: %__MODULE__{
default: any,
min_length: non_neg_integer | nil,
max_length: non_neg_integer | nil,
length: non_neg_integer | nil,
type: atom | nil,
required: boolean,
unique: boolean
}
@spec validate_field(t, term, map) :: {:ok, map} | {:error, String.t()}
def validate_field(type, field, data) do
with {:ok, data} <- Required.validate(type, field, data),
{:ok, data} <- validate_list(type, field, data),
{:ok, data} <- type_validate(type, field, data),
{:ok, data} <- min_length_validate(type, field, data),
{:ok, data} <- max_length_validate(type, field, data),
{:ok, data} <- length_validate(type, field, data),
{:ok, data} <- unique_validate(type, field, data) do
{:ok, data}
else
{:ok_not_present, data} -> Default.validate(type, field, data)
{:error, msg} -> {:error, msg}
end
end
@spec validate_list(t, term, map) :: {:ok, map} | {:error, String.t()}
defp validate_list(%__MODULE__{}, field, params) do
cond do
params[field] == nil ->
{:ok, params}
is_list(params[field]) ->
{:ok, params}
true ->
{:error, "#{field} must be a list"}
end
end
@spec min_length_validate(t, term, map) :: {:ok, map} | {:error, String.t()}
defp min_length_validate(%__MODULE__{min_length: nil}, _field, params) do
{:ok, params}
end
defp min_length_validate(%__MODULE__{min_length: min_length}, field, params)
when is_integer(min_length) and min_length >= 0 do
if length(params[field]) < min_length do
{:error, "#{field} must not be below length of #{min_length}"}
else
{:ok, params}
end
end
@spec max_length_validate(t, term, map) :: {:ok, map} | {:error, String.t()}
defp max_length_validate(%__MODULE__{max_length: nil}, _field, params) do
{:ok, params}
end
defp max_length_validate(%__MODULE__{max_length: max_length}, field, params)
when is_integer(max_length) and max_length >= 0 do
if length(params[field]) > max_length do
{:error, "#{field} must not exceed length of #{max_length}"}
else
{:ok, params}
end
end
@spec length_validate(t, term, map) :: {:ok, map} | {:error, String.t()}
defp length_validate(%__MODULE__{length: nil}, _field, params) do
{:ok, params}
end
defp length_validate(%__MODULE__{length: length}, field, params)
when is_integer(length) and length >= 0 do
if length(params[field]) != length do
{:error, "#{field} length must be of #{length} length"}
else
{:ok, params}
end
end
@spec type_validate(t, term, map) :: {:ok, map} | {:error, String.t()}
defp type_validate(%__MODULE__{type: nil}, _field, params) do
{:ok, params}
end
defp type_validate(%__MODULE__{type: type}, field, params) do
case type do
:atom -> validate_atom(params, field)
:boolean -> validate_boolean(params, field)
:number -> validate_number(params, field)
:string -> validate_string(params, field)
end
end
@spec validate_atom(map, term) :: {:ok, map} | {:error, String.t()}
defp validate_atom(params, field) do
if Enum.all?(params[field], &is_atom/1) do
{:ok, params}
else
{:error, "#{field} must be a list of atoms"}
end
end
@spec validate_boolean(map, term) :: {:ok, map} | {:error, String.t()}
defp validate_boolean(params, field) do
if Enum.all?(params[field], &is_boolean/1) do
{:ok, params}
else
{:error, "#{field} must be a list of boolean"}
end
end
@spec validate_number(map, term) :: {:ok, map} | {:error, String.t()}
defp validate_number(params, field) do
if Enum.all?(params[field], &is_number/1) do
{:ok, params}
else
{:error, "#{field} must be a list of numbers"}
end
end
@spec validate_string(map, term) :: {:ok, map} | {:error, String.t()}
defp validate_string(params, field) do
if Enum.all?(params[field], &is_binary/1) do
{:ok, params}
else
{:error, "#{field} must be a list of strings"}
end
end
@spec unique_validate(t, term, map) :: {:ok, map} | {:error, String.t()}
defp unique_validate(%__MODULE__{unique: false}, _field, params) do
{:ok, params}
end
defp unique_validate(%__MODULE__{unique: true}, field, params) do
list = params[field]
if uniq?(list, %{}) do
{:ok, params}
else
{:error, "#{field} cannot contain duplicate values"}
end
end
@spec uniq?([term], map) :: boolean
defp uniq?([], _set), do: true
defp uniq?([head | tail], set) do
case set do
%{^head => true} -> false
%{} -> uniq?(tail, Map.put(set, head, true))
end
end
defimpl Litmus.Type do
alias Litmus.Type
@spec validate(Type.t(), term, map) :: {:ok, map} | {:error, String.t()}
def validate(type, field, data), do: Type.List.validate_field(type, field, data)
end
end
|
lib/litmus/type/list.ex
| 0.915498
| 0.606557
|
list.ex
|
starcoder
|
defmodule AdventOfCode.Day06 do
@moduledoc ~S"""
[Advent Of Code day 6](https://adventofcode.com/2018/day/6).
iex> input = Enum.join(["1, 1", "1, 6", "8, 3", "3, 4", "5, 5", "8, 9"], "\n")
iex> AdventOfCode.Day06.solve("1", input)
17
iex> AdventOfCode.Day06.solve("2", input, 32)
16
"""
import AdventOfCode.Utils, only: [map_increment: 2]
@spec solve(part :: String.t(), String.t()) :: integer
@spec solve(part :: String.t(), String.t(), max_distance :: integer) :: integer
def solve("1", input) do
{{{min_x, min_y}, {max_x, max_y}}, coordinates} = parse_coordinates(input)
{_point, frequency} =
for(x <- min_x..max_x, y <- min_y..max_y, do: {x, y})
|> build_points_frequncy_map(coordinates, {[min_x, max_x], [min_y, max_y]})
|> Enum.max_by(fn {_point, frequency} -> frequency end)
frequency
end
@max_distance 10_000
def solve("2", input, max_distance \\ @max_distance) do
{{{min_x, min_y}, {max_x, max_y}}, coordinates} = parse_coordinates(input)
for(x <- min_x..max_x, y <- min_y..max_y, do: {x, y})
|> Enum.count(fn p1 ->
Enum.reduce(coordinates, 0, fn p2, acc -> acc + manhattan_distance(p1, p2) end) < max_distance
end)
end
defp manhattan_distance({x, y}, {x1, y1}) do
abs(x - x1) + abs(y - y1)
end
defp build_points_frequncy_map(all_points, coordinates, {banned_x, banned_y}) do
initial_acc = Enum.into(coordinates, %{}, &{&1, 0})
Enum.reduce(all_points, initial_acc, fn {x, y} = point, acc ->
case closest_point(point, coordinates) do
:more_than_one ->
acc
cp ->
cond do
not Map.has_key?(acc, cp) ->
acc
x in banned_x || y in banned_y ->
Map.delete(acc, cp)
true ->
map_increment(acc, cp)
end
end
end)
end
defp closest_point(point, points) do
[{x, xd}, {y, yd}] =
points
|> Enum.map(&{&1, manhattan_distance(&1, point)})
|> Enum.sort_by(fn {_, distance} -> distance end)
|> Enum.take(2)
cond do
xd == yd ->
:more_than_one
xd > yd ->
y
true ->
x
end
end
defp parse_coordinates(string) do
string
|> String.split("\n")
|> Enum.reduce({{{nil, nil}, {nil, nil}}, []}, fn str, {minmax, acc} ->
coordinate = do_parse(str)
updated_minmax = maybe_update_minmax(coordinate, minmax)
{updated_minmax, [coordinate | acc]}
end)
end
defp do_parse(string) do
[x, y] = String.split(string, ", ") |> Enum.map(&String.to_integer/1)
{x, y}
end
defp maybe_update_minmax({x, y}, {{min_x, min_y}, {max_x, max_y}}) do
{{_min(min_x, x), _min(min_y, y)}, {_max(max_x, x), _max(max_y, y)}}
end
defp _min(nil, b), do: b
defp _min(a, nil), do: a
defp _min(a, b), do: Kernel.min(a, b)
defp _max(nil, b), do: b
defp _max(a, nil), do: a
defp _max(a, b), do: Kernel.max(a, b)
end
|
lib/advent_of_code/day_06.ex
| 0.792825
| 0.65132
|
day_06.ex
|
starcoder
|
defmodule LocalHex.Storage.S3 do
@moduledoc """
Adapter module to provide S3 abilities
In the config files (ex. config.exs) you can configure each repository individually by
providing a `:store` field that contains a tuple with the details.
Additionally you need to configure `ex_aws` as well to be able to connect properly to a server and bucket
of your choice. More details you find here [ExAWS](https://github.com/ex-aws/ex_aws/blob/master/lib/ex_aws/config.ex)
```
config :ex_aws, :s3,
access_key_id: "123456789",
secret_access_key: "123456789",
scheme: "http://",
host: "localhost",
port: 9000,
region: "local"
storage_config =
{LocalHex.Storage.S3,
bucket: "localhex",
options: [
region: "local"
]}
config :local_hex,
auth_token: "<PASSWORD>",
repositories: [
main: [
name: "local_hex_dev",
store: storage_config,
...
]
]
```
"""
@behaviour LocalHex.Storage
require Logger
alias ExAws.S3
defstruct [:bucket, :options]
@impl true
def write(repository, path, value) do
s3_config = s3_config_for_repository(repository)
path = path(repository, path)
Logger.debug(inspect({__MODULE__, :write, path}))
request = S3.put_object(s3_config.bucket, path, value, s3_config.options)
case ExAws.request(request, s3_config.options) do
{:ok, _} ->
:ok
{:error, _} ->
{:error, :bad_request}
end
end
@impl true
def read(repository, path) do
s3_config = s3_config_for_repository(repository)
path = path(repository, path)
Logger.debug(inspect({__MODULE__, :read, path}))
request = S3.get_object(s3_config.bucket, path, s3_config.options)
case ExAws.request(request, s3_config.options) do
{:ok, %{body: body}} ->
{:ok, body}
{:error, {:http_error, 404, _}} ->
{:error, :not_found}
other ->
other
end
end
@impl true
def delete(repository, path) do
s3_config = s3_config_for_repository(repository)
path = path(repository, path)
Logger.debug(inspect({__MODULE__, :delete, path}))
request = S3.delete_object(s3_config.bucket, path, s3_config.options)
case ExAws.request(request, s3_config.options) do
{:ok, _} ->
:ok
{:error, {:http_error, 404, _}} ->
{:error, :not_found}
other ->
other
end
end
defp path(repository, path) do
Path.join(["", repository.name | List.wrap(path)])
end
defp s3_config_for_repository(repository) do
{_, config} = repository.store
struct!(__MODULE__, config)
end
end
|
lib/local_hex/storage/s3.ex
| 0.853593
| 0.760562
|
s3.ex
|
starcoder
|
defmodule ChattingHangmans.Hangman do
@moduledoc """
Handles Hangman gameplay
"""
@lifes_number 8
alias ChattingHangmans.Game
@doc "Transforms current game state into new one"
def play(%Game{} = game) do
game
|> parse
|> validate_input
|> initialize_new_game
|> advance_game
|> draw_game
|> determine_game_state
end
def parse(%Game{secret_phrase: secret_phrase, current_letter: current_letter} = game) do
%{
game
| secret_phrase: String.trim(secret_phrase),
current_letter: String.trim(current_letter)
}
end
def validate_input(%Game{current_letter: ""} = _game) do
raise "Current letter cannot be empty!"
end
def validate_input(%Game{current_letter: current_letter} = game) do
if String.length(current_letter) > 1 do
raise "You can only guess one character at a time, no cheating!"
else
game
end
end
def validate_input(%Game{} = game) do
if game.current_letter in game.letters_guessed_right or
game.current_letter in game.letters_guessed_wrong do
raise "You have already guessed this letter"
else
game
end
end
def initialize_new_game(%Game{guessed_phrase: ""} = game) do
secret_size = String.length(game.secret_phrase)
# fix me
%Game{game | guessed_phrase: encode_secret(secret_size), life: @lifes_number}
end
# There is no need to initialize
def initialize_new_game(%Game{} = game) do
game
end
def advance_game(%Game{} = game) do
matching_letters =
game.secret_phrase
|> String.to_charlist()
|> Enum.with_index()
|> Enum.filter(fn {letter, _index} ->
letter == List.first(to_charlist(game.current_letter))
end)
|> Enum.into(%{}, fn {letter, index} -> {index, letter} end)
IO.inspect(game.secret_phrase |> String.to_charlist())
IO.inspect(matching_letters)
process_guessed_letters(game, matching_letters)
end
# Case when guessed letter was wrong
def process_guessed_letters(%Game{} = game, matching_letters)
when map_size(matching_letters) == 0 do
%Game{
game
| letters_guessed_wrong: [game.current_letter | game.letters_guessed_wrong],
life: game.life - 1
}
end
# Case when guessed letter was right
def process_guessed_letters(%Game{} = game, matching_letters) do
%Game{
game
| guessed_phrase: encode_secret(game.guessed_phrase, matching_letters),
letters_guessed_right: [game.current_letter | game.letters_guessed_right]
}
end
def encode_secret(encoded_phrase, matching_letters)
when is_map(matching_letters) do
encoded_phrase
|> String.to_charlist()
|> Enum.to_list()
|> Enum.with_index()
|> Enum.map(fn {letter, index} -> Map.get(matching_letters, index, letter) end)
|> to_string
end
def encode_secret(encoded_phrase, []) do
encoded_phrase
end
def encode_secret(number_of_letters) when is_integer(number_of_letters) do
String.duplicate("_", number_of_letters)
end
defp draw_game(%Game{} = game) do
lost_life = @lifes_number - game.life
%{game | drawing: draw_game(lost_life)}
end
defp draw_game(8 = _lost_lifes) do
'''
|--------
| |
| 0
| /|\
| |
| / \
|
/|\__________
'''
end
defp draw_game(7 = _lost_lifes) do
'''
|--------
| |
| 0
| /|\
| |
| /
|
/|\__________
'''
end
defp draw_game(6 = _lost_lifes) do
'''
|--------
| |
| 0
| /|\
| |
|
|
/|\__________
'''
end
defp draw_game(5 = _lost_lifes) do
'''
|--------
| |
| 0
| /|\
|
|
|
/|\__________
'''
end
defp draw_game(4 = _lost_lifes) do
'''
|--------
| |
| 0
| /|
|
|
|
/|\__________
'''
end
defp draw_game(3 = _lost_lifes) do
'''
|--------
| |
| 0
| |
|
|
|
/|\__________
'''
end
defp draw_game(2 = _lost_lifes) do
'''
|--------
| |
| 0
|
|
|
|
/|\__________
'''
end
defp draw_game(1 = _lost_lifes) do
'''
|--------
|
|
|
|
|
|
/|\__________
'''
end
defp draw_game(0 = _lost_lifes) do
'''
|
|
|
|
|
|
|
/|\__________
'''
end
defp draw_game(_) do
'''
|--------
|
|
| What?
| How many lifes
| Do you have!?
|
/|\__________
'''
end
def determine_game_state(%Game{} = game) do
if is_game_lost(game) do
%Game{game | game_state: Game.lost()}
else
if is_game_won(game) do
%Game{game | game_state: Game.won()}
else
%Game{game | game_state: Game.in_progress()}
end
end
end
def is_game_won(%Game{} = game) do
game.secret_phrase == game.guessed_phrase
end
def is_game_lost(%Game{} = game) do
game.life <= 0
end
end
|
chatting_hangmans/lib/chatting_hangmans/hangman.ex
| 0.570331
| 0.404684
|
hangman.ex
|
starcoder
|
defmodule Cloak.Ciphers.Deprecated.AES.CTR do
@moduledoc """
DEPRECATED version of the `Cloak.Ciphers.AES.CTR` cipher, for use in
migrating existing data to the new format used by `Cloak.Ciphers.AES.CTR`.
## Rationale
The old `Cloak.AES.CTR` cipher used the following format for ciphertext:
+---------------------------------------------------------+----------------------+
| HEADER | BODY |
+----------------------+------------------+---------------+----------------------+
| Module Tag (n bytes) | Key Tag (1 byte) | IV (16 bytes) | Ciphertext (n bytes) |
+----------------------+------------------+---------------+----------------------+
The new `Cloak.Ciphers.AES.CTR` implementation no longer prepends the "Module Tag"
component, and uses a new format as described in its docs. This cipher can
assist in upgrading old ciphertext to the new format.
See the [Upgrading from 0.6.x](0.6.x_to_0.7.x.html) guide for usage.
"""
@behaviour Cloak.Cipher
@deprecated "Use Cloak.Ciphers.AES.CTR.encrypt/2 instead. This call will raise an error."
@impl Cloak.Cipher
def encrypt(_plaintext, _opts) do
raise RuntimeError,
"#{inspect(__MODULE__)} is deprecated, and can only be used for decryption"
end
@impl Cloak.Cipher
def decrypt(ciphertext, opts) do
key = Keyword.fetch!(opts, :key)
with true <- can_decrypt?(ciphertext, opts),
<<iv::binary-16, ciphertext::binary>> <-
String.replace_leading(ciphertext, tag(opts), <<>>) do
state = :crypto.stream_init(:aes_ctr, key, iv)
{_state, plaintext} = :crypto.stream_decrypt(state, ciphertext)
{:ok, plaintext}
else
_other ->
:error
end
end
@impl Cloak.Cipher
def can_decrypt?(ciphertext, opts) do
String.starts_with?(ciphertext, tag(opts))
end
defp tag(opts) do
Keyword.fetch!(opts, :module_tag) <> Keyword.fetch!(opts, :tag)
end
end
|
lib/cloak/ciphers/deprecated/aes_ctr.ex
| 0.782288
| 0.406361
|
aes_ctr.ex
|
starcoder
|
defmodule Cassandrax do
@moduledoc ~S"""
Cassandrax is a Cassandra ORM built on top of [Xandra](https://github.com/lexhide/xandra) and
[Ecto](https://github.com/elixir-ecto/ecto). `Xandra` provides the driver for communication
with the Database, and `Ecto` provides the data mapping functionality as well as changesets
to control data mutations.
Cassandrax is heavily inspired by [Triton](https://github.com/blitzstudios/triton) and
[Ecto](https://github.com/elixir-ecto/ecto) projects, if you have used any of those projects,
you'll get up to speed in no time to use Cassandrax.
Cassandrax is split into 3 components following the design choices made on Ecto:
* `Cassandrax.Keyspace` - keyspaces are the touchpoints with your Cassandra.
Keyspaces provide the API for inserting, updating and deleting data from your tables,
as well as querying the data already stored.
Keyspaces need a `cluster` or a connection and a `name`.
* `Cassandrax.Schema` - schemas are used to map data fetched from Cassandra into an Elixir
struct. They use `Ecto.Schema` under the hood and should require a `@primary_key` to be
defined before the table definition.
* `Cassandrax.Query` - following `Ecto` design, they're written in Elixir syntax
and are used to retrieve data from a Cassandra table. Queries are composable,
even though they should remain as straight forward as possible. Unlike with
relational databases, where you first model the data and their relationships and later
you define how you should query that data, on Cassandra query design decisions are made
when modeling the tables. For more information, please refer to
[Cassandra Docs](https://cassandra.apache.org/doc/latest/data_modeling/index.html)
Next we'll provide an overview of these components and how you'll use them to
insert/change/fetch from/to Cassandra. Please check their corresponding module documentation
for more in-depth description of the features available and options.
## Keyspaces
`Cassandrax.Keyspace` is a wrapper around the keyspace. Each keyspace contains one or multiple
tables and belongs to a cluster. A keyspace can be defined like so:
defmodule SomeKeyspace do
use Cassandrax.Keyspace, cluster: SomeCluster, name: "some_keyspace"
end
And the configuration for `SomeCluster` must be in your application environment,
usually defined in your `config/config.exs`:
config :cassandrax, clusters: [SomeCluster]
config :cassandrax, SomeCluster,
protocol_version: :v4,
nodes: ["127.0.0.1:9042"]
username: "cassandra",
password: "<PASSWORD>",
# cassandrax accepts all options you'd use in xandra
Cassandrax automatically picks up these configs to start the pool of connections for each
cluster. Keep in mind that all keyspaces that belong to the same cluster
will share the same pool of connections. If you need your keyspace to have its own
connection pool, please refer to the `Cassandrax.Keyspace` specific documentation.
## Schemas
Schemas are used for table definition. Here's an example:
defmodule UserByEmail do
use Cassandrax.Schema
# needs to be defined *before* defining the schema
@primary_key [:email]
# table name is users_by_email. Notice we don't need to set the keyspace here
table "users_by_email" do
field :email, :string
field :id, :integer
field :username, :string
end
end
Cassandrax uses `Ecto.Schema` to define a struct with the schema fields:
iex> user_by_email = %UserByEmail{email: "<EMAIL>"}
iex> user_by_email.email
"<EMAIL>"
Just like with `Ecto`, this schema allows us to interact with keyspaces, like so:
iex> user_by_email = %UserByEmail{email: "<EMAIL>", id: 123_456, username: "user"}
iex> SomeKeyspace.insert!(user_by_email)
%UserByEmail{...}
Unlike relational databases which come with the autoincrement ID as default primary key,
Cassandra requires you to define your own primary key. Therefore calling `Keyspace.insert/2`
always returns the struct itself with updated metadata, but no changed fields.
Also, bear in mind that Cassandra doesn't provice consistency guarantees the same way relational
databases do, so the returned values of, for instance, deleting a record that doesn't exist
anymore is the same as deleting an existing one:
iex> user_by_email = %UserByEmail{email: "<EMAIL>", id: 123_456, username: "user"}
# Store the result in a variable
iex> result = SomeKeyspace.insert!(user_by_email)
%UserByEmail{...}
# Now delete the recently inserted record...
iex> SomeKeyspace.delete!(result)
%UserByEmail{...}
# And if you try to delete a record that doesn't exist, no error is returned
iex> SomeKeyspace.delete!(result)
%UserByEmail{...}
## Queries
Cassandrax provides you with a DSL so you can write queries in Elixir, lowering the chances
of writing invalid CQL statements. In some occasions, `Cassandrax.Query` will validate your
query at compile time and fail as soon as possible if your query is invalid:
import Ecto.Query
query = UserByEmail |> where(:email == "<EMAIL>")
# Returns a List of %UserByEmail{} structs matching the query
result = SomeKeyspace.all(query)
Specific examples and detailed documentation for all available keywords are available in
`Cassandrax.Query` module docs, but the supported keywords are:
* `:allow_filtering`
* `:distinct`
* `:group_by`
* `:limit`
* `:order_by`
* `:per_partition_limit`
* `:select`
* `:where`
`Cassandrax.Keyspace` provides the same API as Ecto: You have `Keyspace.all/1` which returns
all records matching a query, `Keyspace.one/1` which returns a single entry or raises and
`Keyspace.get/2` which fetches an entry by its primary key.
"""
use Application
def start(_type, _args) do
Application.get_env(:cassandrax, :clusters, [])
|> Enum.map(fn cluster ->
config = Application.get_env(:cassandrax, cluster) |> ensure_cluster_config!(cluster)
Cassandrax.Supervisor.child_spec(cluster, config)
end)
|> start_link()
end
def ensure_cluster_config!(empty, cluster) when is_nil(empty) or empty == [] do
raise(
Cassandrax.ClusterConfigError,
"Expected to find keyword configs for #{inspect(cluster)}, found #{inspect(empty)}"
)
end
def ensure_cluster_config!(config, _cluster), do: config
def start_link(children) do
Supervisor.start_link(children, strategy: :one_for_one, name: Cassandrax.Supervisor)
end
def cql(conn, statement, values \\ [], opts \\ []) do
case Cassandrax.Connection.prepare(conn, statement) do
{:ok, prepared} -> Cassandrax.Connection.execute(conn, prepared, values, opts)
{:error, error} -> {:error, error}
end
end
end
|
lib/cassandrax.ex
| 0.828488
| 0.750667
|
cassandrax.ex
|
starcoder
|
defmodule Skout.Document do
@moduledoc """
A structure for Skout documents for terse descriptions of SKOS concept schemes.
A Skout document consists of a graph with the description of the [SKOS](http://www.w3.org/TR/skos-reference)
concept scheme and its concepts and a manifest with general settings for the
YAML serialization.
"""
defstruct [:manifest, :skos]
alias Skout.{Manifest, Materialization}
alias RDF.Graph
alias RDF.NS.{SKOS, RDFS}
alias Skout.NS.DC
import Skout.Helper
@doc """
Creates a new document with the given settings.
The following settings for the manifest are available:
- `base_iri`: The base IRI to be used for the concepts.
This is the only required setting.
- `iri_normalization`: The normalization method which is applied to the labels
before they are concatenated to the `base_iri`.
Must be one of `:camelize` or `underscore` and defaults to `camelize`.
- `label_type`: The SKOS label property to be used for produced labeling
statements.
- `default_language`: The language-tag used for the produced labeling
statements.
- `materialization`: Another struct with flag settings controlling which
statements should be materialized.
The following flags are available: `:rdf_type, :in_scheme, :inverse_hierarchy, :inverse_related`.
Return the constructed document in an `:ok` tuple in success case, otherwise an
`:error` tuple.
"""
def new(manifest) do
with {:ok, manifest} <- Manifest.new(manifest) do
{:ok,
%__MODULE__{
manifest: manifest,
skos:
Graph.new(
prefixes: %{
"" => manifest.base_iri,
skos: SKOS,
rdfs: RDFS,
dct: DC
}
)
}}
end
end
@doc """
Creates a new document with the given settings.
As opposed to `new/1` this returns the document directly or raises an exception
in the error case.
"""
def new!(manifest) do
case new(manifest) do
{:ok, document} -> document
{:error, error} -> raise error
end
end
@doc false
def finalize(%__MODULE__{} = document) do
add(document, Materialization.infer_top_concepts(document))
end
@doc """
Adds `triples` to the SKOS graph of `document`.
Note, that this might materialize some forward chained statements.
Returns the updated document in an `:ok` tuple in success case, otherwise an
`:error` tuple.
"""
def add(document, triples)
def add(%__MODULE__{} = document, triple) when is_tuple(triple) do
if RDF.Triple.valid?(triple) do
{:ok,
add_to_graph(
document,
Materialization.infer(triple, document.manifest)
)}
else
{:error, "invalid triple: #{inspect(triple)}"}
end
end
def add(%__MODULE__{} = document, triples) when is_list(triples) do
Enum.reduce_while(triples, {:ok, document}, fn triple, {:ok, document} ->
document
|> add(triple)
|> cont_or_halt()
end)
end
@doc """
Adds `triples` to the SKOS graph of `document`.
As opposed to `add/2` this returns the updated document directly or raises an
exception in the error case.
"""
def add!(manifest, triples) do
case add(manifest, triples) do
{:ok, document} -> document
{:error, error} -> raise error
end
end
@doc false
def update_graph(%__MODULE__{} = document, fun) do
%__MODULE__{document | skos: fun.(document.skos)}
end
defp add_to_graph(%__MODULE__{} = document, data) do
update_graph(document, &Graph.add(&1, data))
end
@doc """
Reads a document from a YAML string.
You can pass in all the options mentioned in `new/1` overwriting the values
in the preamble.
Returns the document in an `:ok` tuple in success case, otherwise an `:error`
tuple.
"""
defdelegate from_yaml(yaml, opts \\ []), to: Skout.YAML.Decoder, as: :decode
@doc """
Reads a document from a YAML string.
You can pass in all the options mentioned in `new/1` overwriting the values
in the preamble.
As opposed to `from_yaml/2` this returns the document directly or raises an
exception in the error case.
"""
defdelegate from_yaml!(yaml, opts \\ []), to: Skout.YAML.Decoder, as: :decode!
@doc """
Returns the YAML serialization of `document`.
Returns the YAML string in an `:ok` tuple in success case, otherwise an `:error`
tuple.
"""
defdelegate to_yaml(document, opts \\ []), to: Skout.YAML.Encoder, as: :encode
@doc """
Returns the YAML serialization of `document`.
As opposed to `to_yaml/2` this returns the YAML string directly or raises an
exception in an error case.
"""
defdelegate to_yaml!(document, opts \\ []), to: Skout.YAML.Encoder, as: :encode!
@doc """
Reads a document from an `RDF.Graph`.
You can pass in all the options mentioned in `new/1` overwriting the values
in the preamble.
Returns the document in an `:ok` tuple in success case, otherwise an `:error`
tuple.
"""
defdelegate from_rdf(graph, opts \\ []), to: Skout.RDF.Import, as: :call
@doc """
Reads a document from an `RDF.Graph`.
As opposed to `from_rdf/2` this returns the document directly or raises an
exception in an error case.
"""
defdelegate from_rdf!(graph, opts \\ []), to: Skout.RDF.Import, as: :call!
@doc """
Returns the RDF graph of the SKOS concept scheme of `document`.
Note that other than the other conversion functions this one doesn't return
the result in an `:ok` tuple, since it can't fail.
"""
def to_rdf(%__MODULE__{} = document), do: document.skos
end
|
lib/skout/document.ex
| 0.886408
| 0.724432
|
document.ex
|
starcoder
|
defmodule HKDF do
@moduledoc """
Provides a simple Hashed Message Authentication Code (HMAC)-based
key derivation function (HKDF).
## Process
Keys are derived in two steps:
1. Extract - a pseudorandom key is extracted from an input key material and optional salt.
2. Expand - an output key material of a specific length is expanded from hashes of the pseudorandom key and an optional info message.
## Source
Defined in [rfc 5859](https://tools.ietf.org/html/rfc5869)
"""
@type hash_fun :: :md5 | :sha | :sha224 | :sha256 | :sha384 | :sha512
@type input_key_material :: binary
@type salt :: binary
@type pseudorandom_key :: binary
@type length :: non_neg_integer
@type info :: binary
@type output_key_material :: binary
@doc """
Derives a key of a specific length using the specified hash function.
An optional salt (extract phase) and/or info message (expand phase)
can be supplied.
## Example
iex> HKDF.derive(:sha256, "some input", 16)
<<47, 231, 129, 75, 82, 47, 198, 78, 55, 31, 167, 66, 15, 128, 63, 243>>
iex> HKDF.derive(:sha256, "some input", 16, "salt", "secret message")
<<28, 213, 201, 204, 16, 226, 160, 120, 69, 47, 46, 58, 15, 255, 54, 52>>
"""
@spec derive(hash_fun, input_key_material, length, salt, info) :: output_key_material
def derive(hash_fun, ikm, len, salt \\ "", info \\ "") do
prk = extract(hash_fun, ikm, salt)
expand(hash_fun, prk, len, info)
end
@doc """
Extract a psuedorandom key from an input key material.
## Example
iex> HKDF.extract(:sha256, "some input")
<<130, 6, 35, 29, 160, 13, 100, 90, 127, 71, 104, 2, 139, 88, 204, 124, 201,
141, 22, 223, 95, 189, 60, 4, 147, 6, 19, 196, 66, 139, 65, 153>>
iex> HKDF.extract(:sha256, "some input", "salt")
<<165, 68, 136, 223, 19, 149, 73, 161, 172, 133, 175, 129, 14, 46, 132, 27, 219,
137, 155, 191, 199, 9, 251, 100, 155, 173, 33, 97, 201, 250, 19, 92>>
"""
@spec extract(hash_fun, input_key_material, salt) :: pseudorandom_key
def extract(hash_fun, ikm, salt \\ "") do
:crypto.mac(:hmac, hash_fun, salt, ikm)
end
@doc """
Expands a pseudorandom key to an output key material of a defined length.
## Example
iex(1)> prk = HKDF.extract(:sha256, "some input", "salt")
iex(2)> HKDF.expand(:sha256, prk, 16)
<<227, 13, 8, 99, 198, 12, 203, 171, 124, 253, 132, 131, 59, 202, 95, 24>>
iex(1)> prk = HKDF.extract(:sha256, "some input", "salt")
iex(2)> HKDF.expand(:sha256, prk, 16, "secret message")
<<28, 213, 201, 204, 16, 226, 160, 120, 69, 47, 46, 58, 15, 255, 54, 52>>
"""
@spec expand(hash_fun, pseudorandom_key, length, info) :: output_key_material
def expand(hash_fun, prk, len, info \\ "") do
hash_len = hash_length(hash_fun)
n = Float.ceil(len/hash_len) |> round()
full =
Enum.scan(1..n, "", fn index, prev ->
data = prev <> info <> <<index>>
:crypto.mac(:hmac, hash_fun, prk, data)
end)
|> Enum.reduce("", &Kernel.<>(&2, &1))
<<output :: unit(8)-size(len), _ :: binary>> = full
<<output :: unit(8)-size(len)>>
end
for fun <- ~w(md5 sha sha224 sha256 sha384 sha512)a do
len = fun |> :crypto.hash("") |> byte_size()
defp hash_length(unquote(fun)) do
unquote(len)
end
end
end
|
lib/hkdf.ex
| 0.829181
| 0.556942
|
hkdf.ex
|
starcoder
|
defmodule ArtemisWeb.TeamView do
use ArtemisWeb, :view
import Artemis.Helpers, only: [keys_to_atoms: 2]
alias Artemis.Permission
# Bulk Actions
def available_bulk_actions() do
[
%BulkAction{
action: &Artemis.DeleteTeam.call_many(&1, &2),
authorize: &has?(&1, "teams:delete"),
extra_fields: &render_extra_fields_delete_warning(&1),
key: "delete",
label: "Delete Teams"
}
]
end
def allowed_bulk_actions(user) do
Enum.reduce(available_bulk_actions(), [], fn entry, acc ->
case entry.authorize.(user) do
true -> [entry | acc]
false -> acc
end
end)
end
# Data Table
def data_table_available_columns() do
[
{"Actions", "actions"},
{"Description", "description"},
{"Name", "name"},
{"User Count", "user_count"}
]
end
def data_table_allowed_columns() do
%{
"actions" => [
label: fn _conn -> nil end,
value: fn _conn, _row -> nil end,
value_html: &data_table_actions_column_html/2
],
"description" => [
label: fn _conn -> "Description" end,
label_html: fn conn ->
sortable_table_header(conn, "description", "Description")
end,
value: fn _conn, row -> row.description end
],
"name" => [
label: fn _conn -> "Name" end,
label_html: fn conn ->
sortable_table_header(conn, "name", "Name")
end,
value: fn _conn, row -> row.name end,
value_html: fn conn, row ->
case has?(conn, "teams:show") do
true -> link(row.name, to: Routes.team_path(conn, :show, row))
false -> row.name
end
end
],
"user_count" => [
label: fn _conn -> "Total Users" end,
value: fn _conn, row -> row.user_count end,
value_html: fn conn, row ->
case has?(conn, "teams:show") do
true -> link(row.user_count, to: Routes.team_path(conn, :show, row) <> "#link-users")
false -> row.user_count
end
end
]
}
end
defp data_table_actions_column_html(conn, row) do
allowed_actions = [
[
verify: has?(conn, "teams:show"),
link: link("Show", to: Routes.team_path(conn, :show, row))
],
[
verify: has?(conn, "teams:update"),
link: link("Edit", to: Routes.team_path(conn, :edit, row))
]
]
content_tag(:div, class: "actions") do
Enum.reduce(allowed_actions, [], fn action, acc ->
case Keyword.get(action, :verify) do
true -> [acc | Keyword.get(action, :link)]
_ -> acc
end
end)
end
end
@doc """
Returns a matching `permission` record based on the passed `permission.id` match value.
The `permission` data could come from:
1. The existing record in the database.
2. The existing form data.
If the form has not been submitted, it uses the existing record data in the database.
Once the form is submitted, the existing form data takes precedence. This
ensures new values are not lost when the form is reloaded after an error.
"""
def find_permission(match, form, record) do
existing_permissions = record.permissions
submitted_permissions =
case form.params["permissions"] do
nil -> nil
values -> Enum.map(values, &struct(Permission, keys_to_atoms(&1, [])))
end
permissions = submitted_permissions || existing_permissions
Enum.find(permissions, fn %{id: id} ->
id =
case is_bitstring(id) do
true -> String.to_integer(id)
_ -> id
end
id == match
end)
end
end
|
apps/artemis_web/lib/artemis_web/views/team_view.ex
| 0.594669
| 0.437944
|
team_view.ex
|
starcoder
|
defmodule Cartel.Message.Wns do
@moduledoc """
Microsoft WNS message
For more details on the format see [Push notification service request and response headers (Windows Runtime apps)](https://msdn.microsoft.com/en-us/library/windows/apps/hh465435.aspx)
section of the [Sending push notifications with WNS](https://msdn.microsoft.com/en-us/library/windows/apps/hh465460.aspx)
"""
@type_toast "wns/toast"
@doc """
Returns the `X-WNS-Type` HTTP header value for type toast
"""
@spec type_toast :: String.t()
def type_toast, do: @type_toast
@type_badge "wns/badge"
@doc """
Returns the `X-WNS-Type` HTTP header value for type badge
"""
@spec type_badge :: String.t()
def type_badge, do: @type_badge
@type_tile "wns/tile"
@doc """
Returns the `X-WNS-Type` HTTP header value for type tile
"""
@spec type_tile :: String.t()
def type_tile, do: @type_tile
@type_raw "wns/raw"
@doc """
Returns the `X-WNS-Type` HTTP header value for type raw
"""
@spec type_raw :: String.t()
def type_raw, do: @type_raw
@typedoc """
Microsoft WNS message
- `channel`: recipient channel URI obtained from the user
- `type`: one of `type_toast/0`, `type_badge/0`, `type_tile/0` or `type_raw/0`
- `tag`: notification tag
- `group`: notification group
- `ttl`: seconds since sending after which the notification expires
- `cache_policy`: wether to cache notification when device is offline.
- `suppress_popup`: suppress popups for `type_toast/0` notification
- `request_for_status`: add device and connection status in reply
- `payload`: raw octet stream data when `type` is `type_raw/0`, serialized XML string otherwise
"""
@type t :: %__MODULE__{
channel: String.t(),
type: String.t(),
tag: String.t(),
group: String.t(),
ttl: Integer.t(),
cache_policy: boolean,
suppress_popup: boolean,
request_for_status: boolean,
payload: binary | String.t()
}
defstruct channel: nil,
type: @type_toast,
cache_policy: nil,
tag: nil,
ttl: 0,
suppress_popup: nil,
request_for_status: nil,
group: nil,
payload: ""
@doc """
Returns the `Content-Type` HTTP header value for the message
"""
@spec content_type(message :: %__MODULE__{}) :: String.t()
def content_type(%__MODULE__{type: @type_raw}) do
"application/octet-stream"
end
def content_type(%__MODULE__{}) do
"text/xml"
end
end
defimpl Cartel.Message, for: Cartel.Message.Wns do
def serialize(message) do
message.payload
end
def update_token(message, token) do
%{message | channel: token}
end
end
|
lib/cartel/message/wns.ex
| 0.857545
| 0.433022
|
wns.ex
|
starcoder
|
defmodule Sanbase.SocialData.TrendingWords do
@moduledoc ~s"""
Module for fetching the list of trending words
This list does NOT calculate the most popular words on crypto social
media overall - those would often be the same, redundant words
such as โBitcoinโ, โEthereumโ, โcryptoโ etc.
Instead, our list aims to discover the biggest developing or emerging
stories within the crypto community. That is why each day youโll see
a new batch of fresh topics, currently gaining steam on crypto social media.
This shows an abnormally high interest in a previously uninspiring
topic, making the list practical for discovering new and developing
talking points in the crypto community.
The results are sourced from more than 1000 crypto-specific social media
channels, including hundreds of telegram groups, subredits, discord groups,
bitcointalk forums, etc.
"""
use Ecto.Schema
import Sanbase.DateTimeUtils, only: [str_to_sec: 1]
import Sanbase.Utils.Transform, only: [maybe_apply_function: 2]
alias Sanbase.ClickhouseRepo
require Sanbase.Utils.Config, as: Config
@type word :: String.t()
@type slug :: String.t()
@type interval :: String.t()
@typedoc """
Defines the position in the list of trending words for a given datetime.
If it has an integer value it means that the word was in the list of emerging
words. If it has a nil value it means that the word was not in that list
"""
@type position :: non_neg_integer() | nil
@type trending_word :: %{
word: word,
score: float()
}
@type trending_slug :: %{
slug: slug,
score: float()
}
@type word_stat :: %{
datetme: DateTime.t(),
position: position
}
@default_sources [:twitter, :telegram, :reddit]
# When calculating the trending now words fetch the data for the last
# N hours to ensure that there is some data and we're not in the middle
# of computing the latest data
@hours_back_ensure_has_data 3
schema Config.get(:trending_words_table) do
field(:dt, :utc_datetime)
field(:word, :string)
field(:volume, :float)
field(:volume_normalized, :float)
field(:unqiue_users, :integer)
field(:score, :float)
field(:source, :string)
# ticker_slug
field(:project, :string)
field(:computed_at, :string)
end
@spec get_trending_words(
DateTime.t(),
DateTime.t(),
interval,
non_neg_integer,
list(atom())
) ::
{:ok, map()} | {:error, String.t()}
def get_trending_words(from, to, interval, size, sources \\ @default_sources) do
{query, args} = get_trending_words_query(from, to, interval, size, sources)
ClickhouseRepo.query_reduce(query, args, %{}, fn
[dt, word, _project, score], acc ->
datetime = DateTime.from_unix!(dt)
elem = %{word: word, score: score}
Map.update(acc, datetime, [elem], fn words -> [elem | words] end)
end)
end
@spec get_trending_projects(
DateTime.t(),
DateTime.t(),
interval,
non_neg_integer,
list(atom())
) ::
{:ok, map()} | {:error, String.t()}
def get_trending_projects(from, to, interval, size, sources \\ @default_sources) do
{query, args} = get_trending_words_query(from, to, interval, sources, size)
ClickhouseRepo.query_reduce(query, args, %{}, fn
[_dt, _word, nil, _score], acc ->
acc
[dt, _word, project, score], acc ->
datetime = DateTime.from_unix!(dt)
[_ticker, slug] = String.split(project, "_")
elem = %{slug: slug, score: score}
Map.update(acc, datetime, [elem], fn slugs -> [elem | slugs] end)
end)
end
@doc ~s"""
Get a list of the currently trending words
"""
@spec get_currently_trending_words(non_neg_integer(), list(atom())) ::
{:ok, list(trending_word)} | {:error, String.t()}
def get_currently_trending_words(size, sources \\ @default_sources)
def get_currently_trending_words(size, sources) do
now = Timex.now()
from = Timex.shift(now, hours: -@hours_back_ensure_has_data)
case get_trending_words(from, now, "1h", size, sources) do
{:ok, %{} = empty_map} when map_size(empty_map) == 0 ->
{:ok, []}
{:ok, stats} ->
{_, words} =
stats
|> Enum.max_by(fn {dt, _} -> DateTime.to_unix(dt) end)
{:ok, words}
{:error, error} ->
{:error, error}
end
end
@doc ~s"""
Get a list of the currently trending projects
"""
@spec get_currently_trending_projects(non_neg_integer(), list(atom())) ::
{:ok, list(trending_slug)} | {:error, String.t()}
def get_currently_trending_projects(size, sources \\ @default_sources)
def get_currently_trending_projects(size, sources) do
now = Timex.now()
from = Timex.shift(now, hours: -@hours_back_ensure_has_data)
case get_trending_projects(from, now, "1h", size, sources) do
{:ok, stats} ->
{_, projects} =
stats
|> Enum.max_by(fn {dt, _} -> DateTime.to_unix(dt) end)
{:ok, projects}
{:error, error} ->
{:error, error}
end
end
@spec get_word_trending_history(
word,
DateTime.t(),
DateTime.t(),
interval,
non_neg_integer,
list(atom())
) ::
{:ok, list(word_stat)} | {:error, String.t()}
def get_word_trending_history(word, from, to, interval, size, sources \\ @default_sources) do
{query, args} = word_trending_history_query(word, from, to, interval, size, sources)
ClickhouseRepo.query_transform(query, args, fn [dt, position] ->
position = if position > 0, do: position
%{
datetime: DateTime.from_unix!(dt),
position: position
}
end)
|> maybe_apply_function(fn result -> Enum.reject(result, &is_nil(&1.position)) end)
end
@spec get_project_trending_history(
slug,
DateTime.t(),
DateTime.t(),
interval,
non_neg_integer,
list(atom())
) ::
{:ok, list(word_stat)} | {:error, String.t()}
def get_project_trending_history(slug, from, to, interval, size, sources \\ @default_sources) do
{query, args} = project_trending_history_query(slug, from, to, interval, size, sources)
ClickhouseRepo.query_transform(query, args, fn [dt, position] ->
position = if position > 0, do: position
%{
datetime: DateTime.from_unix!(dt),
position: position
}
end)
|> maybe_apply_function(fn result -> Enum.reject(result, &is_nil(&1.position)) end)
end
defp get_trending_words_query(from, to, interval, size, sources) do
query = """
SELECT
t,
word,
any(project) AS project,
SUM(score) / #{length(sources)} AS total_score
FROM(
SELECT
toUnixTimestamp(intDiv(toUInt32(toDateTime(dt)), ?1) * ?1) AS t,
word,
any(project) AS project,
argMax(score, dt) as score
FROM #{Config.get(:trending_words_table)}
PREWHERE
dt >= toDateTime(?2) AND
dt < toDateTime(?3) AND
source IN (?4)
GROUP BY t, word, source
ORDER BY t, score DESC
)
GROUP BY t, word
ORDER BY t, total_score DESC
LIMIT ?5 BY t
"""
sources = Enum.map(sources, &to_string/1)
args = [str_to_sec(interval), from, to, sources, size]
{query, args}
end
defp word_trending_history_query(word, from, to, interval, size, sources) do
{query, args} = get_trending_words_query(from, to, interval, size, sources)
args_len = length(args)
next_pos = args_len + 1
query =
[
"""
SELECT
t,
toUInt32(indexOf(groupArray(?#{args_len})(word), ?#{next_pos}))
FROM(
""",
query,
"""
)
GROUP BY t
ORDER BY t
"""
]
|> to_string()
args = args ++ [word]
{query, args}
end
defp project_trending_history_query(slug, from, to, interval, size, sources) do
{query, args} = get_trending_words_query(from, to, interval, size, sources)
args_len = length(args)
next_pos = args_len + 1
query =
[
"""
SELECT
t,
toUInt32(indexOf(groupArray(?#{args_len})(project), ?#{next_pos}))
FROM(
""",
query,
"""
)
GROUP BY t
ORDER BY t
"""
]
|> to_string()
ticker = Sanbase.Model.Project.ticker_by_slug(slug)
args = args ++ [ticker <> "_" <> slug]
{query, args}
end
end
|
lib/sanbase/social_data/trending_words.ex
| 0.843702
| 0.6711
|
trending_words.ex
|
starcoder
|
defmodule ExUnit.Case do
@moduledoc """
This module is meant to be used in other modules
as a way to configure and prepare them for testing.
When used, it allows the following options:
* :async - configure Elixir to run that specific test case
in parallel with others. Must be used for performance
when your test cases do not change any global state;
This module automatically includes all callbacks defined in
`ExUnit.Callbacks`. See that module's documentation for more
information.
## Examples
defmodule AssertionTest do
# Use the module
use ExUnit.Case, async: true
# The `test` macro is imported by ExUnit.Case
test "always pass" do
assert true
end
end
"""
@doc false
defmacro __using__(opts // []) do
async = Keyword.get(opts, :async, false)
parent = Keyword.get(opts, :parent, __MODULE__)
quote do
if unquote(async) do
ExUnit.Server.add_async_case(__MODULE__)
else
ExUnit.Server.add_sync_case(__MODULE__)
end
use ExUnit.Callbacks, parent: unquote(parent)
import ExUnit.Assertions
import ExUnit.Case
import ExUnit.DocTest, only: [doctest: 1, doctest: 2]
end
end
@doc false
def __exunit__(kind, context) when kind in [:setup, :teardown, :setup_all, :teardown_all] do
context
end
@doc """
Provides a convenient macro that allows a test to be
defined with a string. This macro automatically inserts
the atom :ok as the last line of the test. That said,
a passing test always returns :ok, but, more important,
it forces Elixir to not tail call optimize the test and
therefore avoiding hiding lines from the backtrace.
## Examples
test "true is equal to true" do
assert true == true
end
"""
defmacro test(message, var // quote(do: _), contents) do
contents =
case contents do
[do: _] ->
quote do
unquote(contents)
:ok
end
_ ->
quote do
try(unquote(contents))
:ok
end
end
quote do
message = unquote(message)
message = if is_binary(message) do
:"test #{message}"
else
:"test_#{message}"
end
def message, [unquote(Macro.escape var)], [], do:
unquote(Macro.escape_quoted contents)
end
end
end
|
lib/ex_unit/lib/ex_unit/case.ex
| 0.782247
| 0.486758
|
case.ex
|
starcoder
|
defmodule Liquor.Whitelist do
@moduledoc """
Whitelist takes a list of search items and attempts to filter them
"""
@type filter_func :: ((atom, atom, term) -> {:ok, {atom, atom, term} | {atom, term}} | :reject)
@type filter_item ::
nil |
boolean |
atom |
{:apply, module, atom, list} |
filter_func
@type filter :: %{String.t => filter_item} | filter_func
@spec invert_op(Liquor.op) :: Liquor.op
defp invert_op(:match), do: :unmatch
defp invert_op(:unmatch), do: :match
defp invert_op(:>=), do: :<
defp invert_op(:<=), do: :>
defp invert_op(:<), do: :>=
defp invert_op(:>), do: :<=
defp invert_op(:==), do: :!=
defp invert_op(:!=), do: :==
defp do_apply_filter(_op, _key, _value, nil), do: :reject
defp do_apply_filter(_op, _key, _value, false), do: :reject
defp do_apply_filter(op, key, value, true), do: {:ok, {op, String.to_atom(key), value}}
defp do_apply_filter(op, _key, value, atom) when is_atom(atom), do: {:ok, {op, atom, value}}
defp do_apply_filter(op, key, value, {:apply, m, f, a}) when is_atom(m) and is_atom(f) do
:erlang.apply(m, f, [op, key, value | a])
end
defp do_apply_filter(op, key, value, filter) when is_function(filter) do
filter.(op, key, value)
end
defp apply_filter_prefix(_op, "==" <> key, value, filter) do
apply_filter_prefix(:==, key, value, filter)
end
defp apply_filter_prefix(_op, "!=" <> key, value, filter) do
apply_filter_prefix(:!=, key, value, filter)
end
defp apply_filter_prefix(_op, ">=" <> key, value, filter) do
apply_filter_prefix(:>=, key, value, filter)
end
defp apply_filter_prefix(_op, "<=" <> key, value, filter) do
apply_filter_prefix(:<=, key, value, filter)
end
defp apply_filter_prefix(_op, ">" <> key, value, filter) do
apply_filter_prefix(:>, key, value, filter)
end
defp apply_filter_prefix(_op, "<" <> key, value, filter) do
apply_filter_prefix(:<, key, value, filter)
end
defp apply_filter_prefix(op, "-" <> key, value, filter) do
apply_filter_prefix(invert_op(op), key, value, filter)
end
defp apply_filter_prefix(op, "!" <> key, value, filter) do
apply_filter_prefix(invert_op(op), key, value, filter)
end
defp apply_filter_prefix(op, key, value, filter) do
do_apply_filter(op, key, value, filter)
end
defp apply_filter(op, key, value, filter) when is_atom(key) do
# somewhat normalize the input
do_apply_filter(op, Atom.to_string(key), value, filter)
end
defp apply_filter(op, key, value, filter) when is_binary(key) do
apply_filter_prefix(op, key, value, filter)
end
defp handle_item(:reject, acc), do: acc
defp handle_item({:ok, {op, key, _value} = item}, acc) when is_atom(op) and is_atom(key) do
[item | acc]
end
defp handle_item({:ok, {key, value}}, acc) when is_atom(key) do
[{:match, key, value} | acc]
end
@spec whitelist(list, filter) :: list
def whitelist(terms, filter) when is_function(filter) do
Enum.reduce(terms, [], fn
{op, key, value}, acc ->
handle_item(apply_filter(op, key, value, filter), acc)
{key, value}, acc ->
handle_item(apply_filter(:match, key, value, filter), acc)
value, acc ->
handle_item(apply_filter(:match, :_, value, filter), acc)
end)
|> Enum.reverse()
end
def whitelist(terms, filter_spec) when is_map(filter_spec) do
whitelist(terms, &apply_filter(&1, &2, &3, filter_spec[&2]))
end
end
|
lib/liquor/whitelist.ex
| 0.599133
| 0.704376
|
whitelist.ex
|
starcoder
|
defmodule Record.Backend do
# Callback functions invoked by defrecord, defrecordp and friends.
@moduledoc false
@doc """
Splits a keywords list into fields and types.
This logic is shared by records and structs.
"""
def split_fields_and_types(tag, kv) when is_list(kv) do
split_fields_and_types(tag, kv, [], [])
end
def split_fields_and_types(tag, other) do
raise ArgumentError, message: "#{tag} fields must be a keyword list, got: #{Macro.to_string other}"
end
defp split_fields_and_types(tag, [{ field, { :::, _, [default, type] }}|t], fields, types) do
split_fields_and_types(tag, t, [{ field, default }|fields], [{ field, type }|types])
end
defp split_fields_and_types(tag, [{ field, default }|t], fields, types) when is_atom(field) do
split_fields_and_types(tag, t, [{ field, default }|fields], [{ field, quote(do: term) }|types])
end
defp split_fields_and_types(tag, [field|t], fields, types) when is_atom(field) do
split_fields_and_types(tag, t, [{ field, nil }|fields], [{ field, quote(do: term) }|types])
end
defp split_fields_and_types(tag, [other|_], _fields, _types) do
raise ArgumentError, message: "#{tag} fields must be atoms, got: #{Macro.to_string other}"
end
defp split_fields_and_types(_tag, [], fields, types) do
{ :lists.reverse(fields), :lists.reverse(types) }
end
@doc """
Callback invoked from record/0 and record/1 macros.
"""
def access(atom, fields, args, caller) do
cond do
is_atom(args) ->
index(atom, fields, args)
Keyword.keyword?(args) ->
create(atom, fields, args, caller)
true ->
raise ArgumentError,
message: "expected arguments to be a compile time atom or keywords, got: #{Macro.to_string args}"
end
end
@doc """
Callback invoked from the record/2 macro.
"""
def access(atom, fields, record, args, caller) do
cond do
is_atom(args) ->
get(atom, fields, record, args)
Keyword.keyword?(args) ->
update(atom, fields, record, args, caller)
true ->
raise ArgumentError,
message: "expected arguments to be a compile time atom or keywords, got: #{Macro.to_string args}"
end
end
@doc """
Gets the index of field.
"""
def index(atom, fields, field) do
if index = find_index(fields, field, 0) do
index - 1 # Convert to Elixir index
else
raise ArgumentError, message: "record #{inspect atom} does not have the key: #{inspect field}"
end
end
@doc """
Creates a new record with the given default fields and keyword values.
"""
def create(atom, fields, keyword, caller) do
in_match = caller.in_match?
{ match, remaining } =
Enum.map_reduce(fields, keyword, fn({ field, default }, each_keyword) ->
new_fields =
case Keyword.has_key?(each_keyword, field) do
true -> Keyword.get(each_keyword, field)
false ->
case in_match do
true -> { :_, [], nil }
false -> Macro.escape(default)
end
end
{ new_fields, Keyword.delete(each_keyword, field) }
end)
case remaining do
[] ->
{ :{}, [], [atom|match] }
_ ->
keys = for { key, _ } <- remaining, do: key
raise ArgumentError, message: "record #{inspect atom} does not have the key: #{inspect hd(keys)}"
end
end
@doc """
Updates a record given by var with the given keyword.
"""
def update(atom, fields, var, keyword, caller) do
if caller.in_match? do
raise ArgumentError, message: "cannot invoke update style macro inside match"
end
Enum.reduce keyword, var, fn({ key, value }, acc) ->
index = find_index(fields, key, 0)
if index do
quote do
:erlang.setelement(unquote(index), unquote(acc), unquote(value))
end
else
raise ArgumentError, message: "record #{inspect atom} does not have the key: #{inspect key}"
end
end
end
@doc """
Gets a record key from the given var.
"""
def get(atom, fields, var, key) do
index = find_index(fields, key, 0)
if index do
quote do
:erlang.element(unquote(index), unquote(var))
end
else
raise ArgumentError, message: "record #{inspect atom} does not have the key: #{inspect key}"
end
end
defp find_index([{ k, _ }|_], k, i), do: i + 2
defp find_index([{ _, _ }|t], k, i), do: find_index(t, k, i + 1)
defp find_index([], _k, _i), do: nil
end
|
lib/elixir/lib/record/backend.ex
| 0.689828
| 0.589716
|
backend.ex
|
starcoder
|
defmodule Crux.Structs.Snowflake do
@moduledoc """
Custom non discord api module to help with working with Discord's snowflakes.
For more information see [Discord Docs](https://discord.com/developers/docs/reference#snowflakes).
"""
@moduledoc since: "0.2.1"
alias Crux.Structs.Snowflake
use Bitwise
@typedoc """
A discord `snowflake`, an unsigned 64 bit integer.
"""
@typedoc since: "0.2.1"
@type t :: 0..0xFFFF_FFFF_FFFF_FFFF
@typedoc """
All valid types that can be resolved into a `t:t/0`.
"""
@typedoc since: "0.2.1"
@type resolvable :: String.t() | t()
@doc """
Returns `true` if `term` is a `t:t/0`; otherwise returns `false`..
"""
@doc since: "0.2.1"
defguard is_snowflake(snowflake)
when is_integer(snowflake) and snowflake in 0..0xFFFF_FFFF_FFFF_FFFF
@doc """
The discord epoch, the first second of 2015 or `1420070400000`.
```elixir
iex> Crux.Structs.Snowflake.discord_epoch()
1_420_070_400_000
```
"""
@doc since: "0.2.1"
@spec discord_epoch() :: non_neg_integer()
defdelegate discord_epoch(), to: Crux.Structs.Snowflake.Parts
@doc """
Deconstructs a `t:t/0` to its `t:Crux.Structs.Snowflake.Parts.t/0`.
```elixir
iex> Crux.Structs.Snowflake.deconstruct(218348062828003328)
%Crux.Structs.Snowflake.Parts{
increment: 0,
process_id: 0,
timestamp: 1472128634889,
worker_id: 1
}
```
"""
@doc since: "0.2.1"
@spec deconstruct(t) :: Snowflake.Parts.t()
defdelegate deconstruct(snowflake), to: Snowflake.Parts
@doc """
Constructs a `t:t/0` from its `t:Crux.Structs.Snowflake.Parts.t/0` or a keyword of its fields.
```elixir
iex> %Crux.Structs.Snowflake.Parts{increment: 0, process_id: 0, timestamp: 1472128634889, worker_id: 1}
...> |> Crux.Structs.Snowflake.construct()
218348062828003328
iex> Crux.Structs.Snowflake.construct(increment: 1, timestamp: 1451106635493)
130175406673231873
iex> Crux.Structs.Snowflake.construct(timestamp: Crux.Structs.Snowflake.discord_epoch())
0
```
"""
@doc since: "0.2.1"
@spec construct(Snowflake.Parts.t() | Keyword.t()) :: t
defdelegate construct(parts), to: Snowflake.Parts
@doc """
Converts a `t:String.t/0` to a `t:t/0` while allowing `t:t/0` and `nil` to pass through.
Raises an `ArgumentError` if the provided string is not an integer.
```elixir
iex> Crux.Structs.Snowflake.to_snowflake(218348062828003328)
218348062828003328
# Fallbacks
iex> Crux.Structs.Snowflake.to_snowflake("218348062828003328")
218348062828003328
iex> Crux.Structs.Snowflake.to_snowflake(nil)
nil
```
"""
@doc since: "0.2.1"
@spec to_snowflake(t()) :: t()
@spec to_snowflake(String.t()) :: t() | no_return()
@spec to_snowflake(nil) :: nil
def to_snowflake(nil), do: nil
def to_snowflake(snowflake) when is_snowflake(snowflake) do
snowflake
end
def to_snowflake(string) when is_binary(string) do
string
|> String.to_integer()
|> to_snowflake()
end
@doc """
Converts a `t:String.t/0` to a `t:t/0` while allowing `t:t/0` to pass through.
Returns `:error` if the provided string is not a `t:t/0`.
```elixir
iex> Crux.Structs.Snowflake.parse("invalid")
:error
iex> Crux.Structs.Snowflake.parse(218348062828003328)
218348062828003328
# Fallbacks
iex> Crux.Structs.Snowflake.parse("218348062828003328")
218348062828003328
```
"""
@doc since: "0.2.1"
@spec parse(t()) :: t()
@spec parse(String.t()) :: t() | :error
def parse(snowflake) when is_snowflake(snowflake) do
snowflake
end
def parse(string) when is_binary(string) do
case Integer.parse(string) do
{snowflake, ""} when is_snowflake(snowflake) ->
snowflake
_ ->
:error
end
end
# Delegates
@doc """
Deconstructs a `t:t/0` to its `t:Crux.Structs.Snowflake.Parts.t/0`.
"""
@doc since: "0.2.1"
@spec from_integer(t) :: Snowflake.Parts.t()
defdelegate from_integer(snowflake), to: Snowflake.Parts, as: :deconstruct
@doc """
Constructs a `t:t/0` from its `t:Crux.Structs.Snowflake.Parts.t/0`.
"""
@doc since: "0.2.1"
@spec to_integer(Snowflake.Parts.t()) :: t()
defdelegate to_integer(t), to: Snowflake.Parts, as: :construct
end
|
lib/structs/snowflake.ex
| 0.81946
| 0.846006
|
snowflake.ex
|
starcoder
|
defmodule RandomString do
@moduledoc """
An utility to generate random strings of desired character sets.
"""
@simple_classes %{
# {base, number_of_chars}
numeric: {?0, 10},
uppercase: {?A, 26},
lowercase: {?a, 26},
printable_chars: {?!, 94}
}
@misleading_chars '01258' ++ 'ijlouv' ++ 'BIOSUVZ'
@doc """
Take `n` characters from `:alphanumeric` class characters.
"""
# public APIs
def take(n) when is_integer(n) do
take(n, :alphanumeric)
end
@doc """
Take `n` characters from a specified character class.
Available character classes are: `:alphabetical`, `:alphanumeric`, `:numeric`, `:lowercase`, `:uppercase`, `printable_chars`.
"""
def take(n, character_class) when is_integer(n) do
stream(character_class) |> Enum.take(n) |> List.to_string
end
@doc """
Take `n` characters from alphanumeric characters, excluding "misleading characters" (characters that look similar to another character: `01258ijlouvBIOSUVZ`).
"""
def take_without_misleading_characters(n) when is_integer(n) do
stream_without_misleading_characters() |> Enum.take(n) |> List.to_string
end
@doc """
Take `n` characters from a stream with specified character class (defaults to `:alphanumeric`), excluding characters specified in `character_list`.
"""
def take_without_characters(n, character_list, character_class \\ :alphanumeric) when is_integer(n) and is_list(character_list) do
stream_without_characters(character_list, character_class) |> Enum.take(n) |> List.to_string
end
# definition of streams
@doc """
Returns a [Stream](https://hexdocs.pm/elixir/Stream.html) of characters with a specified character class.
"""
def stream(character_class) when character_class in [:numeric, :uppercase, :lowercase, :printable_chars] do
{base, number_of_chars} = @simple_classes[character_class]
# :rand.uniform returns 1 <= X <= N
Stream.repeatedly(fn -> base + :rand.uniform(number_of_chars) - 1 end)
end
def stream(:alphabetical) do
Stream.repeatedly(fn ->
number_of_chars = 26 * 2
offset = :rand.uniform(number_of_chars)
if offset > 26 do
?a + offset - 27
else
?A + offset - 1
end
end)
end
def stream(:alphanumeric) do
Stream.repeatedly(fn ->
number_of_chars = 26 * 2 + 10
offset = :rand.uniform(number_of_chars)
cond do
offset > 10 + 26 -> ?a + offset - 37
offset > 10 -> ?A + offset - 11
true -> ?0 + offset - 1
end
end)
end
@doc """
Returns a [Stream](https://hexdocs.pm/elixir/Stream.html) of characters that does not include "misleading characters".
"""
def stream_without_misleading_characters(character_class \\ :alphanumeric) do
stream(character_class) |> Stream.reject(fn x -> Enum.member?(@misleading_chars, x) end)
end
@doc """
Returns a [Stream](https://hexdocs.pm/elixir/Stream.html) of characters that does not include characters specified in `character_list`.
"""
def stream_without_characters(character_list, character_class \\ :alphanumeric) when is_list(character_list) do
stream(character_class) |> Stream.reject(fn x -> Enum.member?(character_list, x) end)
end
end
|
lib/random_string.ex
| 0.812459
| 0.476519
|
random_string.ex
|
starcoder
|
defmodule Bgp.Protocol.Capability do
@moduledoc """
Capability encoding/decoding for BGP open messages.
"""
defmodule Capability do
@moduledoc """
Capability struct for encoding/decoding.
"""
@enforce_keys [:type, :value]
defstruct [:type, :value]
@type capability_type :: 0..255
@type t :: %Capability{
type: capability_type,
value: binary,
}
end
@doc """
Encode capability into data.
"""
@spec encode(Capability.t) :: binary
def encode(%Capability{} = cap) do
capdata = <<cap.type::8, byte_size(cap.value)::8, cap.value::binary>>
<<
fc00:db20:35b:7399::5, # param type: capability (2)
byte_size(capdata)::8, # param length
capdata::binary # param data
>>
end
@doc """
Encode multiprotocol capability into data (RFC 4760).
"""
@spec multiprotocol_ext(non_neg_integer, non_neg_integer) :: binary
def multiprotocol_ext(afi, safi), do:
encode(%Capability{
type: 1,
value: <<
afi::16,
fdf8:f53e:61e4::18, # Reserved
safi::8,
>>,
})
@doc """
Encode Cisco Route Refresh (RFC 7313).
"""
@spec rr_cisco() :: binary
def rr_cisco(), do:
encode(%Capability{
type: 128,
value: <<>>,
})
@doc """
Encode Route Refresh (RFC 7313).
"""
@spec rr() :: binary
def rr(), do:
encode(%Capability{
type: 2,
value: <<>>,
})
@doc """
Encode 4 byte ASN support and set the 4 byte ASN number (RFC 6793).
"""
@spec asn4(non_neg_integer) :: binary
def asn4(asn), do:
encode(%Capability{
type: 65,
value: <<asn::32>>,
})
@doc """
Encode add-path capability (RFC 7911).
"""
@spec add_path(non_neg_integer, non_neg_integer, non_neg_integer) :: binary
def add_path(afi, safi, sendreceive), do:
encode(%Capability{
type: 69,
value: <<
afi::16,
safi::8,
sendreceive::8,
>>,
})
@doc """
Encode FQDN capability (RFC Draft:
https://tools.ietf.org/html/draft-walton-bgp-hostname-capability-02).
"""
@spec fqdn(binary, binary) :: binary
def fqdn(hostname, domain) do
hostnamelen = byte_size(hostname)
domainlen = byte_size(domain)
encode(%Capability{
type: 73,
value: <<
hostnamelen::8,
hostname::binary,
domainlen::8,
domain::binary,
>>,
})
end
@doc """
Encode Graceful restart capability (RFC 4724).
"""
@spec graceful_restart(non_neg_integer) :: binary
def graceful_restart(timer) do
restart = 0
encode(%Capability{
type: 64,
value: <<
restart::1,
0::3, # reserved
timer::12,
>>,
})
end
@doc """
Decode capability binary into Elixir data structure.
"""
@spec decode(binary) :: {:ok, Capability.t} | :error
def decode(<<type::8, length::8, data::binary>>) do
<<value::bytes-size(length), _::binary>> = data
{:ok, %Capability{type: type, value: value}}
end
def decode(_), do: :error
end
|
lib/bgp/protocol/capability.ex
| 0.801237
| 0.415017
|
capability.ex
|
starcoder
|
defmodule Geocalc.Shape do
@moduledoc """
`Geocalc.Shape` contains `Circle`, `Rectangle` and `Ellipse` shapes.
Those shapes define a geographical area projection and are designed to be
used for geofencing, ie: the user can determine if one point is inside or
outside a geographical zone.
Geographical zones are defined with a center point and several spatial
dimensions (see each shape documentation).
"""
defmodule Circle do
@moduledoc """
`Circle` describes a circular geographical area, centered on `latitude`,
`longitude`, with a `radius` in meters.
`latitude` and `longitude` could be decimal degrees or `Geocalc.DMS`.
"""
@enforce_keys [:latitude, :longitude, :radius]
defstruct [:latitude, :longitude, :radius]
@type t :: %__MODULE__{
latitude: number | Geocalc.DMS.t(),
longitude: number | Geocalc.DMS.t(),
radius: number
}
end
defmodule Rectangle do
@moduledoc """
`Rectangle` describes a rectangular geographical area, centered on
`latitude`, `longitude` (could be decimal degrees or `Geocalc.DMS`), with
`long_semi_axis` and `short_semi_axis` (both in meters) and an azimuth
`angle` (in degrees).
`long_semi_axis` is the distance between the center point and the short
side of the rectangle.
`short_semi_axis` is the distance between the center point and the long
side of the rectangle.
`angle` is the azimuth angle of the long side of the rectangle, ie: the
angle between north and `long_semi_axis`.
"""
@enforce_keys [:latitude, :longitude, :long_semi_axis, :short_semi_axis, :angle]
defstruct [:latitude, :longitude, :long_semi_axis, :short_semi_axis, :angle]
@type t :: %__MODULE__{
latitude: number | Geocalc.DMS.t(),
longitude: number | Geocalc.DMS.t(),
long_semi_axis: number,
short_semi_axis: number,
angle: number
}
end
defmodule Ellipse do
@moduledoc """
`Ellipse` describes an elliptic geographical area, centered on `latitude`,
`longitude` (could be decimal degrees or `Geocalc.DMS`), with
`long_semi_axis` and `short_semi_axis` (both in meters) and an azimuth
`angle` (in degrees).
`long_semi_axis` is the length of the longest diameter, also called
semi-major axis.
`short_semi_axis` is the length of the shortest diameter, also called
semi-minor axis.
`angle` is the azimuth angle of the long semi-axis.
"""
@enforce_keys [:latitude, :longitude, :long_semi_axis, :short_semi_axis, :angle]
defstruct [:latitude, :longitude, :long_semi_axis, :short_semi_axis, :angle]
@type t :: %__MODULE__{
latitude: number | Geocalc.DMS.t(),
longitude: number | Geocalc.DMS.t(),
long_semi_axis: number,
short_semi_axis: number,
angle: number
}
end
end
|
lib/geocalc/shape.ex
| 0.940993
| 0.995497
|
shape.ex
|
starcoder
|
defmodule ThousandIsland.Transports.TCP do
@moduledoc """
Defines a `ThousandIsland.Transport` implementation based on clear TCP sockets
as provided by Erlang's `:gen_tcp` module. For the most part, users of Thousand
Island will only ever need to deal with this module via `transport_options`
passed to `ThousandIsland` at startup time. A complete list of such options
is defined via the `t::gen_tcp.listen_option()` type. This list can be somewhat
difficult to decipher; by far the most common value to pass to this transport
is the following:
* `ip`: The IP to listen on (defaults to all interfaces). IPs should be
described in tuple form (ie: `ip: {1, 2, 3, 4}`). The value `:loopback` can
be used to only bind to localhost. On platforms which support it (macOS and
Linux at a minimum, likely others), you can also bind to a Unix domain socket
by specifying a value of `ip: {:local, "/path/to/socket"}`. Note that the port
*must* be set to `0`, and that the socket is not removed from the filesystem
after the server shuts down.
Unless overridden, this module uses the following default options:
```elixir
backlog: 1024,
nodelay: true,
linger: {true, 30},
send_timeout: 30_000,
send_timeout_close: true,
reuseaddr: true
```
The following options are required for the proper operation of Thousand Island
and cannot be overridden at startup (though they can be set via calls to `setopts/2`)
```elixir
mode: :binary,
active: false
```
"""
alias ThousandIsland.Transport
@behaviour Transport
@hardcoded_options [mode: :binary, active: false]
@impl Transport
def listen(port, user_options) do
default_options = [
backlog: 1024,
nodelay: true,
linger: {true, 30},
send_timeout: 30_000,
send_timeout_close: true,
reuseaddr: true
]
resolved_options =
default_options |> Keyword.merge(user_options) |> Keyword.merge(@hardcoded_options)
:gen_tcp.listen(port, resolved_options)
end
@impl Transport
defdelegate listen_port(listener_socket), to: :inet, as: :port
@impl Transport
defdelegate accept(listener_socket), to: :gen_tcp
@impl Transport
def handshake(socket), do: {:ok, socket}
@impl Transport
defdelegate controlling_process(socket, pid), to: :gen_tcp
@impl Transport
defdelegate recv(socket, length, timeout), to: :gen_tcp
@impl Transport
defdelegate send(socket, data), to: :gen_tcp
@impl Transport
def sendfile(socket, filename, offset, length) do
with {:ok, fd} <- :file.open(filename, [:raw]) do
:file.sendfile(fd, socket, offset, length, [])
end
end
@impl Transport
defdelegate setopts(socket, options), to: :inet
@impl Transport
defdelegate shutdown(socket, way), to: :gen_tcp
@impl Transport
defdelegate close(socket), to: :gen_tcp
@impl Transport
def local_info(socket) do
{:ok, {ip, port}} = :inet.sockname(socket)
%{address: ip, port: port, ssl_cert: nil}
end
@impl Transport
def peer_info(socket) do
{:ok, {ip, port}} = :inet.peername(socket)
%{address: ip, port: port, ssl_cert: nil}
end
@impl Transport
def secure?, do: false
@impl Transport
defdelegate getstat(socket), to: :inet
@impl Transport
def negotiated_protocol(_socket), do: {:error, :protocol_not_negotiated}
end
|
lib/thousand_island/transports/tcp.ex
| 0.917884
| 0.76708
|
tcp.ex
|
starcoder
|
defmodule Cards do
@moduledoc """
Documentation for Cards.
"""
@doc """
create_deck
## Example
iex> Cards.create_deck
["Ace of Spades", "Two of Spades", "Three of Spades",
"Four of Spades", "Five of Spades", "Ace of Clubs", "Two of Clubs",
"Three of Clubs", "Four of Clubs", "Five of Clubs", "Ace of Hearts",
"Two of Hearts", "Three of Hearts", "Four of Hearts",
"Five of Hearts", "Ace of Diamonts", "Two of Diamonts",
"Three of Diamonts", "Four of Diamonts", "Five of Diamonts"]
"""
def create_deck do
values = ["Ace", "Two", "Three", "Four", "Five"]
suits = [ "Spades", "Clubs", "Hearts", "Diamonts"]
for suit <- suits, value <- values do
"#{value} of #{suit}"
end
end
def shuffle(deck) do
Enum.shuffle(deck)
end
@doc """
Determines whether a deck contains a given card
## Examples
iex> deck = Cards.create_deck
iex> Cards.contains?(deck, "Ace of Spades")
true
"""
def contains?(deck, card) do
Enum.member?(deck, card)
end
@doc """
Divides a deck into a hand and the remainder of the deck.
The `hand_size` arg indicates how many cards should be in the hand
## Examples
iex> {hand, _} = Cards.deal(Cards.create_deck(), 3)
iex> hand
["Ace of Spades", "Two of Spades", "Three of Spades"]
"""
def deal(deck, hand_size) do
Enum.split(deck, hand_size)
end
def save(deck) do
bin = :erlang.term_to_binary deck
File.write("my_deck", bin)
end
def load() do
case File.read("my_deck") do
{:ok, bin } -> :erlang.binary_to_term bin
{:error, _ } -> "That file not exist"
end
end
def loads(filename) do
case File.read(filename) do
{:ok, bin } -> :erlang.binary_to_term bin
{:error, _ } -> "That file not exist"
end
end
def create_hand(hand_size) do
Cards.create_deck
|> Cards.shuffle
|> Cards.deal(hand_size)
end
end
|
cards/lib/cards.ex
| 0.693369
| 0.64839
|
cards.ex
|
starcoder
|
defmodule DefUnit do
@moduledoc """
DefUnit defines macros used to create type specs and documentation
when working with a "core" set of measurement units, and also defines
operators to convert them to and from other units.
## Example
```
use DefUnit
@doc_to_operator "to SI"
@doc_from_operator "from SI"
# Units calculations are done in
DefUnit.core "m", :m, "SI length"
DefUnit.core "m2", :m2, "SI area"
DefUnit.core "kg", :kg, "SI mass"
DefUnit.core "kgm<sup>-3</sup>", :kgm3, "SI density"
DefUnit.core "s", :s, "Time"
DefUnit.core "C", :c, "Temperature in Celcius"
DefUnit.core "ms<sup>-1</sup>", :ms, "SI Velocity"
DefUnit.core "ms<sup>-2</sup>", :ms2, "SI Acceleration"
DefUnit.core "Nm<sup>2</sup>", :nm2, "SI Pressure"
# Units we convert to and from above units
DefUnit.other "feet", :feet, 0.3048, :m, "FPS length and altitude"
DefUnit.other "lbs", :lbs, 0.453592, :kg, "FPS mass"
# Units with more complex from/to conversion calculations
DefUnit.other "F", :f,
{
&((&1 - 32.0) * (5.0 / 9.0)),
&((&1 * (9.0 / 5.0)) + 32.0)
},
:c, "Temperature in Farhrenheit"
```
"""
defmacro __using__(_options) do
quote do
import unquote(__MODULE__)
Module.register_attribute __MODULE__,
:core_units, accumulate: true
Module.register_attribute __MODULE__,
:other_units, accumulate: true
Module.register_attribute __MODULE__,
:doc_to_operator, accumulate: false
Module.register_attribute __MODULE__,
:doc_from_operator, accumulate: false
end
end
@doc ~S"""
Define a 'core' unit.
- `eq` is the short name for the unit used in the typedoc - use <sup> for ordinals
- `core_type` is the name used in the type spec for this unit
- `description` is the description used in the typedoc
"""
defmacro core(eq, core_type, description) do
quote do
@core_units {unquote(eq), unquote(core_type)}
@typedoc unquote(description <> " " <> eq)
@type unquote({core_type, [], nil}) :: float
@doc @doc_from_operator
@spec number <~ unquote(core_type) :: unquote({core_type, [], nil})
def value <~ unquote(core_type) do
value
end
@doc @doc_to_operator
@spec unquote({core_type, [], nil}) ~> unquote(core_type) :: unquote({core_type, [], nil})
def value ~> unquote(core_type) do
value
end
end
end
@doc """
Define an 'other' unit.
- `eq` is the short name for the unit used in the typedoc - use <sup> for ordinals
- `other_type` is the name used in the type spec for this unit
- `ratio` is either a multiplier to convert this unit to the core unit, or a 2-tuple of from/to conversion functions
- `core_type` is the name of the corresponding core type
- `description` is the description used in the typedoc
"""
defmacro other(eq, other_type, ratio, core_type, description) do
name_string = Atom.to_string(other_type)
core_type_string = Atom.to_string(core_type)
to_ratio_name = String.to_atom(name_string <> "_to_" <> core_type_string)
from_ratio_name = String.to_atom(core_type_string <> "_to_" <> name_string)
{from_ratio, from_op} = cond do
is_number(ratio) ->
{
{:@, [], [{from_ratio_name, [], [1.0 / ratio]}]},
quote do: value * unquote(ratio)
}
is_tuple(ratio) ->
{fn_from, _} = ratio
{
{:@, [], [{from_ratio_name, [], [:na]}]},
quote do: (unquote(fn_from)).(value)
}
end
{to_ratio, to_op} = cond do
is_number(ratio) ->
{
{:@, [], [{to_ratio_name, [], [ratio]}]},
quote do: value / unquote(ratio)
}
is_tuple(ratio) ->
{_, fn_to} = ratio
{
{:@, [], [{to_ratio_name, [], [:na]}]},
quote do: (unquote(fn_to)).(value)
}
end
quote do
if length(for {_, ct} <- @core_units, ct == unquote(core_type), do: ct) == 0 do
raise ArgumentError,
message: """
Unit '#{unquote(other_type)}' refers to unknown core unit '#{unquote(core_type)}'
"""
end
@other_units {unquote(eq), unquote(other_type), unquote(core_type)}
@typedoc unquote(description <> " " <> eq)
@type unquote({other_type, [], nil}) :: float
unquote(to_ratio)
unquote(from_ratio)
@spec number <~ unquote(other_type) :: unquote({core_type, [], nil})
def value <~ unquote(other_type) do
unquote(from_op)
end
@spec unquote({core_type, [], nil}) ~> unquote(other_type) :: unquote({other_type, [], nil})
def value ~> unquote(other_type) do
unquote(to_op)
end
end
end
end
|
lib/def_unit.ex
| 0.897415
| 0.831964
|
def_unit.ex
|
starcoder
|
defmodule Commanded.Event do
@moduledoc ~S"""
Creates a domain event structure.
## Options
* `from` - A struct to adapt the keys from.
* `with` - A list of keys to add to the event.
* `drop` - A list of keys to drop from the keys adapted from a struct.
* `version` - An optional version of the event. Defaults to `1`.
## Example
# This is for demonstration purposes only. You don't need to create a new event to version one.
defmodule AccountCreatedVersioned do
use Commanded.Event,
version: 2,
from: CreateAccount,
with: [:date, :sex],
drop: [:email],
defimpl Commanded.Event.Upcaster, for: AccountCreatedWithDroppedKeys do
def upcast(%{version: 1} = event, _metadata) do
AccountCreatedVersioned.new(event, sex: "maybe", version: 2)
end
def upcast(event, _metadata), do: event
end
end
iex> changeset = CreateAccount.new(username: "chris", email: "<EMAIL>", age: 5)
iex> cmd = Ecto.Changeset.apply_changes(changeset)
iex> event = AccountCreatedWithDroppedKeys.new(cmd)
iex> Commanded.Event.Upcaster.upcast(event, %{})
%AccountCreatedVersioned{age: 5, date: nil, sex: "maybe", username: "chris", version: 2}
"""
defmacro __using__(opts) do
quote bind_quoted: [opts: opts] do
from =
case Keyword.get(opts, :from) do
nil ->
[]
source ->
case Code.ensure_compiled(source) do
{:error, _} ->
raise "#{source} should be a valid struct to use with DomainEvent"
_ ->
nil
end
struct(source)
|> Map.from_struct()
|> Map.keys()
end
version = Keyword.get(opts, :version, 1)
keys_to_drop = Keyword.get(opts, :drop, [])
explicit_keys = Keyword.get(opts, :with, [])
@derive Jason.Encoder
defstruct from
|> Kernel.++(explicit_keys)
|> Enum.reject(&Enum.member?(keys_to_drop, &1))
|> Kernel.++([{:version, version}])
def new(), do: %__MODULE__{}
def new(source, attrs \\ [])
def new(%{__struct__: _} = source, attrs) do
source
|> Map.from_struct()
|> new(attrs)
end
def new(source, attrs) when is_list(source) do
source
|> Enum.into(%{})
|> new(attrs)
end
def new(source, attrs) when is_map(source) do
Map.merge(source, Enum.into(attrs, %{}))
|> create()
end
use ExConstructor, :create
end
end
end
|
lib/commanded/event.ex
| 0.862496
| 0.501587
|
event.ex
|
starcoder
|
defmodule SteamEx.IPublishedFileService do
@moduledoc """
Provides additional methods for interacting with Steam Workshop items.
See [ISteamRemoteStorage](https://partner.steamgames.com/doc/webapi/ISteamRemoteStorage) for the primary interface.
**NOTE:** This is a Service interface, methods in this interface should be called with the `input_json` parameter.
For more info on how to use the Steamworks Web API please see the [Web API Overview](https://partner.steamgames.com/doc/webapi_overview).
"""
import SteamEx.API.Base
@interface "IPublishedFileService"
@doc """
Performs a search query for published files
| Name | Type | Required | Description |
| key | string | โ | Steamworks Web API user authentication key.|
| query_type | uint32 | โ | enumeration EPublishedFileQueryType in clientenums.h|
| page | uint32 | โ | Current page|
| numperpage | uint32 | | (Optional) The number of results, per page to return.|
| creator_appid | uint32 | โ | App that created the files|
| appid | uint32 | โ | App that consumes the files|
| requiredtags | string | โ | Tags to match on. See match_all_tags parameter below|
| excludedtags | string | โ | (Optional) Tags that must NOT be present on a published file to satisfy the query.|
| match_all_tags | bool | | If true, then items must have all the tags specified, otherwise they must have at least one of the tags.|
| required_flags | string | โ | Required flags that must be set on any returned items|
| omitted_flags | string | โ | Flags that must not be set on any returned items|
| search_text | string | โ | Text to match in the item's title or description|
| filetype | uint32 | โ | EPublishedFileInfoMatchingFileType|
| child_publishedfileid | uint64 | โ | Find all items that reference the given item.|
| days | uint32 | โ | If query_type is k_PublishedFileQueryType_RankedByTrend, then this is the number of days to get votes for [1,7].|
| include_recent_votes_only| bool | โ | If query_type is k_PublishedFileQueryType_RankedByTrend, then limit result set just to items that have votes within the day range given|
| cache_max_age_seconds | uint32 | | Allow stale data to be returned for the specified number of seconds.|
| language | int32 | | Language to search in and also what gets returned. Defaults to English.|
| required_kv_tags | {message}| โ | Required key-value tags to match on.|
| totalonly | bool | โ | (Optional) If true, only return the total number of files that satisfy this query.|
| ids_only | bool | โ | (Optional) If true, only return the published file ids of files that satisfy this query.|
| return_vote_data | bool | โ | Return vote data|
| return_tags | bool | โ | Return tags in the file details|
| return_kv_tags | bool | โ | Return key-value tags in the file details|
| return_previews | bool | โ | Return preview image and video details in the file details|
| return_children | bool | โ | Return child item ids in the file details|
| return_short_description | bool | โ | Populate the short_description field instead of file_description|
| return_for_sale_data | bool | โ | Return pricing information, if applicable|
| return_metadata | bool | | Populate the metadata|
| return_playtime_stats | uint32 | โ | Return playtime stats for the specified number of days before today.|
See other: [https://partner.steamgames.com/doc/webapi/IPublishedFileService#GetAppList](https://partner.steamgames.com/doc/webapi/IPublishedFileService#GetAppList)
"""
def get_app_list(access_key, params \\ %{}, headers \\ %{}) do
get(@interface <> "/GetAppList/v2/", access_key, params, headers)
end
end
|
lib/interfaces/i_published_file_service.ex
| 0.719679
| 0.547101
|
i_published_file_service.ex
|
starcoder
|
defmodule Broca.Naive.NN do
@moduledoc """
Module to handle `list` and `list of lists` for Neural Network.
"""
defguard is_2dlist(list) when is_list(hd(list)) and not is_list(hd(hd(list)))
defguard is_3dlist(list) when is_list(hd(hd(list))) and not is_list(hd(hd(hd(list))))
defguard is_4dlist(list) when is_list(hd(hd(hd(list))))
def new(list) do
list
end
@doc """
Add `list2` to `list1`
## Examples
iex> Broca.Naive.NN.add([1, 2, 3], [4, 5, 6])
[5, 7, 9]
iex> Broca.Naive.NN.add([[1, 2], [3, 4]], [5, 6])
[[6, 8], [8, 10]]
iex> Broca.Naive.NN.add([[1, 2], [3, 4]], [[5, 6], [7, 8]])
[[6, 8], [10, 12]]
"""
@spec add([[number]], [[number]]) :: [[number]]
@spec add([number], [number]) :: [number]
def add(list1, list2) when is_list(hd(list1)) and is_list(hd(list2)) do
Enum.zip(list1, list2)
|> Enum.map(&add(elem(&1, 0), elem(&1, 1)))
end
def add(list1, list2) when is_list(hd(list1)) do
list1
|> Enum.map(&add(&1, list2))
end
def add(list1, list2) do
Enum.zip(list1, list2)
|> Enum.map(&(elem(&1, 0) + elem(&1, 1)))
end
@doc """
Subtract `arg2` from `arg1`
## Examples
iex> Broca.Naive.NN.subtract(10, 7)
3
iex> Broca.Naive.NN.subtract([1, 2, 3], 1)
[0, 1, 2]
iex> Broca.Naive.NN.subtract(1, [1, 2, 3])
[0, -1, -2]
iex> Broca.Naive.NN.subtract([3, 4, 5], [1, 2, 3])
[2, 2, 2]
iex> Broca.Naive.NN.subtract([[1, 2, 3], [11, 12, 13]], [1, 2, 1])
[[0, 0, 2], [10, 10, 12]]
iex> Broca.Naive.NN.subtract([[99, 98], [97, 96]], [[1, 2], [3, 4]])
[[98, 96], [94, 92]]
"""
def subtract(list1, list2) when is_list(hd(list1)) and is_list(hd(list2)) do
Enum.zip(list1, list2)
|> Enum.map(fn {xs, ys} -> subtract(xs, ys) end)
end
def subtract(list1, list2) when is_list(hd(list1)) and is_list(list2) do
list1
|> Enum.map(&subtract(&1, list2))
end
def subtract(list1, list2) when is_list(list1) and is_list(list2) do
Enum.zip(list1, list2)
|> Enum.map(fn {x, y} -> subtract(x, y) end)
end
def subtract(list, y) when is_list(list) do
list
|> Enum.map(&subtract(&1, y))
end
def subtract(x, list) when is_list(list) do
list
|> Enum.map(&subtract(x, &1))
end
def subtract(x, y) do
x - y
end
@doc """
Vector multiplication
## Examples
iex> Broca.Naive.NN.mult([1, 2, 3], [4, 5, 6])
[4, 10, 18]
iex> Broca.Naive.NN.mult([[1, 2, 3], [1, 2, 3]], [[4, 5, 6], [4, 5, 6]])
[[4, 10, 18], [4, 10, 18]]
iex> Broca.Naive.NN.mult(10, 20)
200
"""
def mult(list1, list2) when is_list(hd(list1)) and is_list(hd(list2)) do
Enum.zip(list1, list2)
|> Enum.map(fn {xs, ys} -> mult(xs, ys) end)
end
def mult(list1, list2) when not is_list(hd(list1)) and is_list(hd(list2)) do
list2
|> Enum.map(&mult(list1, &1))
end
def mult(list1, list2) when is_list(list1) and is_list(list2) do
Enum.zip(list1, list2)
|> Enum.map(fn {x, y} -> x * y end)
end
def mult(list, y) when is_list(hd(list)) do
list
|> Enum.map(&mult(&1, y))
end
def mult(list, y) when is_list(list) do
list
|> Enum.map(&(&1 * y))
end
def mult(x, y) do
x * y
end
@doc """
Division x divided by y.
## Examples
iex> Broca.Naive.NN.division([1, 2, 3], 2)
[0.5, 1.0, 1.5]
iex> Broca.Naive.NN.division([[1, 2, 3], [1, 2, 3]], 2)
[[0.5, 1.0, 1.5], [0.5, 1.0, 1.5]]
"""
def division(list1, list2) when is_list(list1) and is_list(list2) do
Enum.zip(list1, list2)
|> Enum.map(fn {sub_list1, sub_list2} -> division(sub_list1, sub_list2) end)
end
def division(list, y) when is_list(list) do
list
|> Enum.map(&division(&1, y))
end
def division(x, y) do
x / y
end
defp concat(list1, list2, merge \\ true)
defp concat(nil, list, _) do
list
end
defp concat(list, nil, _) do
list
end
defp concat(list1, list2, true) when is_list(hd(list2)) do
Enum.zip(list1, list2)
|> Enum.map(fn {sub_list1, sub_list2} -> concat(sub_list1, sub_list2, true) end)
end
defp concat(list1, list2, false) when is_list(hd(hd(list2))) do
Enum.zip(list1, list2)
|> Enum.map(fn {sub_list1, sub_list2} -> concat(sub_list1, sub_list2, false) end)
end
defp concat(list1, list2, _) do
Enum.concat(list1, list2)
end
@doc """
Transpose the 4d `list` given
## Examples
iex> Broca.Naive.NN.transpose([1, 2, 3])
[[1], [2], [3]]
iex> Broca.Naive.NN.transpose([[1, 2, 3], [4, 5, 6]])
[[1, 4], [2, 5], [3, 6]]
iex> list = [[[1, 2, 3, 4, 5], [6, 7, 8, 9, 10], [11, 12, 13, 14, 15], [16, 17, 18, 19, 20], [21, 22, 23, 24, 25]], \
[[26, 27, 28, 29, 30], [31, 32, 33, 34, 35], [36, 37, 38, 39, 40], [41, 42, 43, 44, 45], [46, 47, 48, 49, 50]]]
iex> Broca.Naive.NN.transpose(list)
[[[1, 26], [6, 31], [11, 36], [16, 41], [21, 46]],
[[2, 27], [7, 32], [12, 37], [17, 42], [22, 47]],
[[3, 28], [8, 33], [13, 38], [18, 43], [23, 48]],
[[4, 29], [9, 34], [14, 39], [19, 44], [24, 49]],
[[5, 30], [10, 35], [15, 40], [20, 45], [25, 50]]]
iex> list = [[[[1, 2, 3, 4], [5, 6, 7, 8], [9, 10, 11, 12]], [[13, 14, 15, 16], [17, 18, 19, 20], [21, 22, 23, 24]]], \
[[[25 , 26, 27, 28], [29, 30, 31, 32], [33, 34, 35, 36]], [[37, 38, 39, 40], [41, 42, 43, 44], [45, 46, 47 , 48]]]]
iex> Broca.Naive.NN.transpose(list)
[[[[1, 25], [13, 37]], [[5, 29], [17, 41]], [[9, 33], [21, 45]]],
[[[2, 26], [14, 38]], [[6, 30], [18, 42]], [[10, 34], [22, 46]]],
[[[3, 27], [15, 39]], [[7, 31], [19, 43]], [[11, 35], [23, 47]]],
[[[4, 28], [16, 40]], [[8, 32], [20, 44]], [[12, 36], [24, 48]]]]
"""
@spec transpose([number]) :: [number]
def transpose(list) when is_4dlist(list) do
arr = List.duplicate([], length(hd(hd(hd(list)))))
list
|> Enum.reverse()
|> Enum.map(fn list2 ->
Enum.map(Enum.reverse(list2), fn list3 ->
Enum.reduce(Enum.reverse(list3), arr, fn sub_list, arr ->
Enum.zip(sub_list, arr)
|> Enum.map(&([[[elem(&1, 0)]]] ++ elem(&1, 1)))
end)
end)
|> Enum.reduce(
nil,
fn channel, acc -> concat(channel, acc, false) end
)
end)
|> Enum.reduce(
nil,
fn channel, acc -> concat(channel, acc) end
)
end
def transpose(list) when is_3dlist(list) do
arr = List.duplicate([], length(hd(hd(list))))
list
|> Enum.reverse()
|> Enum.map(fn list2 ->
Enum.reduce(Enum.reverse(list2), arr, fn sub_list, arr ->
Enum.zip(sub_list, arr)
|> Enum.map(&([[elem(&1, 0)]] ++ elem(&1, 1)))
end)
end)
|> Enum.reduce(
nil,
fn batch, acc -> concat(batch, acc) end
)
end
def transpose(list) when is_2dlist(list) do
arr = List.duplicate([], length(hd(list)))
list
|> Enum.reverse()
|> Enum.reduce(arr, fn sub_list, arr ->
Enum.zip(sub_list, arr)
|> Enum.map(&([elem(&1, 0)] ++ elem(&1, 1)))
end)
end
def transpose(list) do
list |> Enum.map(&[&1])
end
@doc """
Transpose axes
## Examples
iex> list = [[[[63, 126], [72, 144], [81, 162]], [[108, 216], [117, 234], [126, 252]], [[153, 306], [162, 324], [171, 342]]], \
[[[288, 576], [297, 594], [306, 612]], [[333, 666], [342, 684], [351, 702]], [[378, 756], [387, 774], [396, 792]]]]
iex> Broca.Naive.NN.transpose(list, 0, 3, 1, 2)
[[[[63, 72, 81], [108, 117, 126], [153, 162, 171]], [[126, 144, 162], [216, 234, 252], [306, 324, 342]]],\
[[[288, 297, 306], [333, 342, 351], [378, 387, 396]], [[576, 594, 612], [666, 684, 702], [756, 774, 792]]]]
iex> list = [[[[0.0, 1.0], [2.0, 3.0], [4.0, 5.0]], [[6.0, 7.0], [8.0, 9.0], [10.0, 11.0]], [[12.0, 13.0], [14.0, 15.0], [16.0, 17.0]]], \
[[[18.0, 19.0], [20.0, 21.0], [22.0, 23.0]], [[24.0, 25.0], [26.0, 27.0], [28.0, 29.0]], [[30.0, 31.0], [32.0, 33.0], [34.0, 35.0]]], \
[[[36.0, 37.0], [38.0, 39.0], [40.0, 41.0]], [[42.0, 43.0], [44.0, 45.0], [46.0, 47.0]], [[48.0, 49.0], [50.0, 51.0], [52.0, 53.0]]]]
iex> Broca.Naive.NN.transpose(list, 0, 2, 3, 1)
[[[[0.0, 6.0, 12.0], [1.0, 7.0, 13.0]], [[2.0, 8.0, 14.0], [3.0, 9.0, 15.0]], [[4.0, 10.0, 16.0], [5.0, 11.0, 17.0]]],
[[[18.0, 24.0, 30.0], [19.0, 25.0, 31.0]], [[20.0, 26.0, 32.0], [21.0, 27.0, 33.0]], [[22.0, 28.0, 34.0], [23.0, 29.0, 35.0]]],
[[[36.0, 42.0, 48.0], [37.0, 43.0, 49.0]], [[38.0, 44.0, 50.0], [39.0, 45.0, 51.0]], [[40.0, 46.0, 52.0], [41.0, 47.0, 53.0]]]]
"""
def transpose(list, 0, 2, 3, 1) do
Enum.map(
list,
fn channel ->
arr = List.duplicate([], length(hd(channel)))
transpose(channel)
|> Enum.reverse()
|> Enum.reduce(
arr,
fn row, acc ->
Enum.zip(row, acc)
|> Enum.map(&([elem(&1, 0)] ++ elem(&1, 1)))
end
)
end
)
end
def transpose(list, 0, 3, 1, 2) do
list
|> Enum.map(&transpose(&1, 2, 0, 1))
end
def transpose(batch, 2, 0, 1) do
batch
|> Enum.reduce(
List.duplicate([], length(hd(hd(batch)))),
fn list, acc ->
data = transpose(list)
Enum.zip(data, acc) |> Enum.map(fn {val, ac} -> [val] ++ ac end)
end
)
|> Enum.map(&Enum.reverse(&1))
end
@doc """
Reshape the 2D list
## Examples
iex> list = [1, 2, 3, 4]
iex> Broca.Naive.NN.reshape(list, [2, 2])
[[1, 2], [3, 4]]
iex> list = [1, 2, 3, 4, 5, 6]
iex> Broca.Naive.NN.reshape(list, [3, 2])
[[1, 2], [3, 4], [5, 6]]
iex> list = [1, 2, 3, 4, 5, 6, 7, 8]
iex> Broca.Naive.NN.reshape(list, [2, 2, 2])
[[[1, 2], [3, 4]],[[5, 6], [7, 8]]]
iex> list = [[[[[1, 2], [3, 4]], [[5, 6], [7, 8]]]], [[[[[9, 10], [11, 12]], [[13, 14], [15, 16]]]]]]
iex> Broca.Naive.NN.reshape(list, [2, 2, 2, 2])
[[[[1, 2], [3, 4]], [[5, 6], [7, 8]]], [[[9, 10], [11, 12]], [[13, 14], [15, 16]]]]
"""
def reshape(list, dims) do
list = if is_list(hd(list)), do: List.flatten(list), else: list
dims
|> Enum.reverse()
|> Enum.reduce(list, fn dim, data -> Enum.chunk_every(data, dim) end)
|> hd
end
def dot_and_add(list, list_dot, list_add) do
Broca.Naive.NN.dot(list, list_dot)
|> Broca.Naive.NN.add(list_add)
end
def dot_nt(list1, list2) do
dot(list1, transpose(list2))
end
def dot_tn(list1, list2) do
dot(transpose(list1), list2)
end
@doc """
Dot product
## Examples
iex> a = [1, 2, 3]
iex> b = [4, 5, 6]
iex> Broca.Naive.NN.dot(a, b)
32
iex> a = [[1, 2], [3, 4], [5, 6]]
iex> b = [7, 8]
iex> Broca.Naive.NN.dot(a, b)
[23, 53, 83]
iex> a = [[1, 2], [3, 4]]
iex> b = [[5, 6], [7, 8]]
iex> Broca.Naive.NN.dot(a, b)
[[19, 22], [43, 50]]
iex> a = [1, 2]
iex> b = [[1, 3, 5], [2, 4, 6]]
iex> Broca.Naive.NN.dot(a, b)
[5, 11, 17]
"""
@spec dot([[number]], [[number]]) :: [[number]]
@spec dot([[number]], [number]) :: [number]
@spec dot([number], [[number]]) :: [number]
@spec dot([number], [number]) :: number
def dot(as, bs) when is_list(hd(as)) and is_list(hd(bs)) do
if length(hd(as)) != length(bs),
do:
raise(
"list should be dot([a x m], [m x b]) but dot([#{length(as)} x #{length(hd(as))}], [#{
length(bs)
} x #{length(hd(bs))}]"
)
bt = transpose(bs)
as
|> Enum.map(fn a -> Enum.map(bt, fn b -> dot(a, b) end) end)
end
def dot(as, b) when is_list(hd(as)) do
as
|> Enum.map(&dot(&1, b))
end
def dot(a, bs) when is_list(hd(bs)) do
transpose(bs)
|> Enum.map(&dot(a, &1))
end
def dot(a, b) do
Enum.zip(a, b)
|> Enum.reduce(0, &(&2 + elem(&1, 0) * elem(&1, 1)))
end
@doc """
Sum the `list`
## Examples
iex> Broca.Naive.NN.sum([1, 2, 3])
6
iex> Broca.Naive.NN.sum([[1, 2, 3], [4, 5, 6]], :row)
[6, 15]
iex> Broca.Naive.NN.sum([[1, 2, 3], [4, 5, 6], [7, 8, 9]], :col)
[12, 15, 18]
"""
def sum(list) do
sum(list, :row)
end
def sum(list, axis) when axis == :col do
arr = List.duplicate(0, length(hd(list)))
list
|> Enum.reduce(arr, fn sub_list, arr ->
Enum.zip(sub_list, arr)
|> Enum.map(fn {x, acc} -> x + acc end)
end)
end
def sum(list, _) when is_list(hd(list)) do
list |> Enum.map(&Enum.sum(&1))
end
def sum(list, _) do
list |> Enum.sum()
end
@doc """
Sigmoid function
## Examples
iex> Broca.Naive.NN.sigmoid([-1.0, 1.0, 2.0])
[0.2689414213699951, 0.7310585786300049, 0.8807970779778823]
iex> Broca.Naive.NN.sigmoid([[-1.0, 1.0, 2.0], [-1.0, 1.0, 2.0]])
[[0.2689414213699951, 0.7310585786300049, 0.8807970779778823],
[0.2689414213699951, 0.7310585786300049, 0.8807970779778823]]
"""
@spec sigmoid([[number]]) :: [[number]]
@spec sigmoid([number]) :: [number]
def sigmoid(list) when is_list(hd(list)) do
list |> Enum.map(&sigmoid(&1))
end
def sigmoid(list) do
list |> Enum.map(&(1 / (1 + :math.exp(-&1))))
end
@doc """
ReLU function
## Examples
iex> Broca.Naive.NN.relu([-1.0, 0.0, 1.0, 2.0])
[0.0, 0.0, 1.0, 2.0]
iex> Broca.Naive.NN.relu([[-1.0, 0.0, 1.0, 2.0], [-1.0, 0.0, 1.0, 2.0]])
[[0.0, 0.0, 1.0, 2.0], [0.0, 0.0, 1.0, 2.0]]
"""
@spec relu([[number]]) :: [[number]]
@spec relu([number]) :: [number]
def relu(list) when is_list(hd(list)) do
list |> Enum.map(&relu(&1))
end
def relu(list) do
list |> Enum.map(&max(0.0, &1))
end
@doc """
Softmax function
## Examples
iex> Broca.Naive.NN.softmax([0.3, 2.9, 4.0])
[0.01821127329554753, 0.24519181293507392, 0.7365969137693786]
iex> Broca.Naive.NN.softmax([[0.3, 2.9, 4.0], [0.3, 2.9, 4.0], [0.3, 2.9, 4.0]])
[[0.01821127329554753, 0.24519181293507392, 0.7365969137693786],
[0.01821127329554753, 0.24519181293507392, 0.7365969137693786],
[0.01821127329554753, 0.24519181293507392, 0.7365969137693786]]
"""
@spec softmax([[number]]) :: [[number]]
@spec softmax([number]) :: [number]
def softmax(list) when is_list(hd(list)) do
list |> Enum.map(&softmax(&1))
end
def softmax(list) do
max = Enum.max(list)
{sum, res} =
list
|> Enum.reverse()
|> Enum.reduce(
{0, []},
fn x, {s, r} ->
ex = :math.exp(x - max)
{s + ex, [ex] ++ r}
end
)
res |> Enum.map(&(&1 / sum))
end
@doc """
Convert class list to one hot vector.
## Examples
iex> Broca.Naive.NN.one_hot(0, 4)
[1.0, 0.0, 0.0, 0.0, 0.0]
iex> Broca.Naive.NN.one_hot(3, 4)
[0.0, 0.0, 0.0, 1.0, 0.0]
iex> Broca.Naive.NN.one_hot([0, 1, 2, 3, 4], 4)
[[1.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 1.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 1.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 1.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 1.0]]
"""
def one_hot(list, max) when is_list(list) do
list |> Enum.map(&one_hot(&1, max))
end
def one_hot(class, max) do
0..max |> Enum.map(&if &1 == class, do: 1.0, else: 0.0)
end
@doc """
Calculate cross entropy error.
## Examples
iex> t = [0, 0, 1, 0, 0, 0, 0, 0, 0, 0]
iex> y = [0.1, 0.05, 0.6, 0.0, 0.05, 0.1, 0.0, 0.1, 0.0, 0.0]
iex> Broca.Naive.NN.cross_entropy_error(y, t)
0.51082545709933802
iex> t = [[0, 0, 1, 0, 0, 0, 0, 0, 0, 0], [0, 0, 1, 0, 0, 0, 0, 0, 0, 0]]
iex> y = [[0.1, 0.05, 0.6, 0.0, 0.05, 0.1, 0.0, 0.1, 0.0, 0.0],[0.1, 0.05, 0.6, 0.0, 0.05, 0.1, 0.0, 0.1, 0.0, 0.0]]
iex> Broca.Naive.NN.cross_entropy_error(y, t)
0.510825457099338
"""
def cross_entropy_error(ys, ts) when is_list(hd(ys)) do
Enum.zip(ys, ts)
|> Enum.reduce(0, fn {y, t}, acc -> acc + cross_entropy_error(y, t) end)
|> Broca.Naive.NN.division(length(ys))
end
def cross_entropy_error(ys, ts) do
delta = 1.0e-7
Enum.zip(ys, ts)
|> Enum.reduce(0, fn {y, t}, acc -> if t == 0, do: acc, else: acc - :math.log(y + delta) end)
end
@doc """
Get the index of maximum value in the list
## Examples
iex> Broca.Naive.NN.argmax([5.0, 1.0, 2.0, 3.0])
0
iex> Broca.Naive.NN.argmax([1.0, 4.0, 2.0, 3.0])
1
iex> Broca.Naive.NN.argmax([[5.0, 1.0, 2.0, 3.0], [1.0, 4.0, 2.0, 3.0]])
[0, 1]
"""
def argmax(list) when is_list(hd(list)) do
list |> Enum.map(&argmax(&1))
end
def argmax(list) do
{res, _, _} =
list
|> Enum.reduce({-1, -999_999, 0}, fn x, {idx, max, cur} ->
if x > max, do: {cur, x, cur + 1}, else: {idx, max, cur + 1}
end)
res
end
@doc """
Mask the list with the filter given
## Examples
iex> Broca.Naive.NN.filter_mask([-1.0, 0.2, 1.0, -0.3], fn x -> x <= 0 end)
[true, false, false, true]
iex> Broca.Naive.NN.filter_mask([[-1.0, 0.2, 1.0, -0.3], [-1.0, 0.2, 1.0, -0.3]], fn x -> x <= 0 end)
[[true, false, false, true], [true, false, false, true]]
"""
def filter_mask(list, filter) when is_list(hd(list)) do
list
|> Enum.map(&filter_mask(&1, filter))
end
def filter_mask(list, filter) do
list
|> Enum.map(&if filter.(&1), do: true, else: false)
end
@doc """
Mask the data. The `value` is replaced by the `replaced_value` if `filter` is `true`
## Examples
iex> Broca.Naive.NN.mask(true, 4.0)
0.0
iex> Broca.Naive.NN.mask(false, 4, 0)
4
iex> Broca.Naive.NN.mask([true, false, true], [1, 2, 4], -1.0)
[-1.0, 2, -1.0]
"""
def mask(filter, values) do
mask(filter, values, 0.0)
end
def mask(filter, values, replace_value) when is_list(filter) do
Enum.zip(filter, values)
|> Enum.map(fn {f, v} -> mask(f, v, replace_value) end)
end
def mask(filter, _, replace_value) when filter do
replace_value
end
def mask(_, value, _) do
value
end
@doc """
Generate zeros with following the structure of `list` given.
## Examples
iex> Broca.Naive.NN.zeros_like([])
[]
iex> Broca.Naive.NN.zeros_like([[1], []])
[[0.0], []]
iex> Broca.Naive.NN.zeros_like([1, 2, 3])
[0.0, 0.0, 0.0]
iex> Broca.Naive.NN.zeros_like([[1, 2], [3, 4, 5]])
[[0.0, 0.0], [0.0, 0.0, 0.0]]
"""
def zeros_like([]) do
[]
end
def zeros_like(list) when is_list(hd(list)) do
list |> Enum.map(&zeros_like(&1))
end
def zeros_like(list) do
List.duplicate(0.0, length(list))
end
@doc """
Get the list of lengthes
## Examples
iex> Broca.Naive.NN.shape([1, 2, 3])
[3]
iex> Broca.Naive.NN.shape([[1, 2], [3, 4], [5, 6]])
[3, 2]
"""
def shape(list) do
shape(list, [])
end
def shape(list, res) when is_list(list) do
shape(hd(list), [length(list)] ++ res)
end
def shape(_, res) do
Enum.reverse(res)
end
@doc """
The shape of the list
This expects the list of the lists at same sizes
## Examples
iex> Broca.Naive.NN.data_size([[[1, 1, 1]], [[1, 1, 1]]])
6
"""
def data_size(list) do
Enum.reduce(shape(list), 1, &(&1 * &2))
end
@doc """
Create Shape string
## Examples
iex> Broca.Naive.NN.shape_string([[1, 2], [2, 2]])
"[2, 2]"
"""
def shape_string([]) do
"[]"
end
def shape_string(list) do
list_string(shape(list))
end
def list_string(list) do
str =
list
|> Enum.reduce(
"",
fn dim, str ->
(str <> Integer.to_string(dim)) <> ", "
end
)
|> String.trim_trailing(", ")
("[" <> str) <> "]"
end
@doc """
Add 0.0 `pad_count` times around the `list` given
## Examples
iex> a = [1..10 |> Enum.to_list |> Enum.map(&(&1 / 1.0))]
iex> Broca.Naive.NN.pad(a, 1)
[[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]]
iex> a = [1..10 |> Enum.to_list |> Enum.map(&(&1 / 1.0))]
iex> Broca.Naive.NN.pad(a, 2)
[[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]]
iex> Broca.Naive.NN.pad([[1, 2, 3], [4, 5, 6], [7, 8, 9]], 1)
[[0.0, 0.0, 0.0, 0.0, 0.0],[0.0, 1, 2, 3, 0.0],[0.0, 4, 5, 6, 0.0],[0.0, 7, 8, 9, 0.0],[0.0, 0.0, 0.0, 0.0, 0.0]]
"""
def pad(list, pad_count) when is_list(hd(list)) do
list =
list
|> Enum.map(
&(List.duplicate(0.0, pad_count) ++
Enum.reverse(List.duplicate(0.0, pad_count) ++ Enum.reverse(&1)))
)
List.duplicate(List.duplicate(0.0, length(hd(list))), pad_count) ++
Enum.reverse(
List.duplicate(List.duplicate(0.0, length(hd(list))), pad_count) ++ Enum.reverse(list)
)
end
@doc """
Create filtered list
## Examples
iex> list = [[[[1, 2, 3, 4, 5], [6, 7, 8, 9, 10], [11, 12, 13, 14, 15], [16, 17, 18, 19, 20], [21, 22, 23, 24, 25]]]]
iex> Broca.Naive.NN.matrix_filtering(list, 3, 3)
[[[[1, 2, 3, 6, 7, 8, 11, 12, 13], [2, 3, 4, 7, 8, 9, 12, 13, 14], [3, 4, 5, 8, 9, 10, 13, 14, 15]],
[[6, 7, 8, 11, 12, 13, 16, 17, 18], [7, 8, 9, 12, 13, 14, 17, 18, 19], [8, 9, 10, 13, 14, 15, 18, 19, 20]],
[[11, 12, 13, 16, 17, 18, 21, 22, 23], [12, 13, 14, 17, 18, 19, 22, 23, 24], [13, 14, 15, 18, 19, 20, 23, 24, 25]]]]
iex> list = [[[[1, 2, 3, 4, 5], [6, 7, 8, 9, 10], [11, 12, 13, 14, 15], [16, 17, 18, 19, 20], [21, 22, 23, 24, 25]]]]
iex> Broca.Naive.NN.matrix_filtering(list, 3, 3, 2)
[[[[1, 2, 3, 6, 7, 8, 11, 12, 13], [3, 4, 5, 8, 9, 10, 13, 14, 15]],
[[11, 12, 13, 16, 17, 18, 21, 22, 23], [13, 14, 15, 18, 19, 20, 23, 24, 25]]]]
iex> list = [[[[1.0, 2.0, 3.0], [4.0, 5.0, 6.0], [7.0, 8.0, 9.0]]]]
iex> Broca.Naive.NN.matrix_filtering(list, 3, 3, 1, 1)
[[[[0.0, 0.0, 0.0, 0.0, 1.0, 2.0, 0.0, 4.0, 5.0], [0.0, 0.0, 0.0, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0], [0.0, 0.0, 0.0, 2.0, 3.0, 0.0, 5.0, 6.0, 0.0]],
[[0.0, 1.0, 2.0, 0.0, 4.0, 5.0, 0.0, 7.0, 8.0], [1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0], [2.0, 3.0, 0.0, 5.0, 6.0, 0.0, 8.0, 9.0, 0.0]],
[[0.0, 4.0, 5.0, 0.0, 7.0, 8.0, 0.0, 0.0, 0.0], [4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 0.0, 0.0, 0.0], [5.0, 6.0, 0.0, 8.0, 9.0, 0.0, 0.0, 0.0, 0.0]]]]
iex> list = [[[[1.0, 2.0, 3.0], [4.0, 5.0, 6.0], [7.0, 8.0, 9.0]]], [[[1.0, 2.0, 3.0], [4.0, 5.0, 6.0], [7.0, 8.0, 9.0]]]]
iex> Broca.Naive.NN.matrix_filtering(list, 3, 3, 1, 1)
[[[[0.0, 0.0, 0.0, 0.0, 1.0, 2.0, 0.0, 4.0, 5.0], [0.0, 0.0, 0.0, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0], [0.0, 0.0, 0.0, 2.0, 3.0, 0.0, 5.0, 6.0, 0.0]],
[[0.0, 1.0, 2.0, 0.0, 4.0, 5.0, 0.0, 7.0, 8.0], [1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0], [2.0, 3.0, 0.0, 5.0, 6.0, 0.0, 8.0, 9.0, 0.0]],
[[0.0, 4.0, 5.0, 0.0, 7.0, 8.0, 0.0, 0.0, 0.0], [4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 0.0, 0.0, 0.0], [5.0, 6.0, 0.0, 8.0, 9.0, 0.0, 0.0, 0.0, 0.0]]],
[[[0.0, 0.0, 0.0, 0.0, 1.0, 2.0, 0.0, 4.0, 5.0], [0.0, 0.0, 0.0, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0], [0.0, 0.0, 0.0, 2.0, 3.0, 0.0, 5.0, 6.0, 0.0]],
[[0.0, 1.0, 2.0, 0.0, 4.0, 5.0, 0.0, 7.0, 8.0], [1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0], [2.0, 3.0, 0.0, 5.0, 6.0, 0.0, 8.0, 9.0, 0.0]],
[[0.0, 4.0, 5.0, 0.0, 7.0, 8.0, 0.0, 0.0, 0.0], [4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 0.0, 0.0, 0.0], [5.0, 6.0, 0.0, 8.0, 9.0, 0.0, 0.0, 0.0, 0.0]]]]
iex> list = [[[[1, 2, 3, 4, 5], [6, 7, 8, 9, 10], [11, 12, 13, 14, 15], [16, 17, 18, 19, 20], [21, 22, 23, 24, 25]]]]
iex> Broca.Naive.NN.matrix_filtering(list, 3, 3, 1, 0, fn l -> Enum.max(l) end)
[[[13, 14, 15],
[18, 19, 20],
[23, 24, 25]]]
iex> tensor = [[[[1, 2, 3], [4, 5, 6], [7, 8, 9]], [[10, 11, 12], [13, 14, 15], [16, 17, 18]], [[19, 20, 21], [22, 23, 24], [25, 26, 27]]], \
[[[28, 29, 30], [31, 32, 33], [34, 35, 36]], [[37, 38, 39], [40, 41, 42], [43, 44, 45]], [[46, 47, 48], [49, 50, 51], [52, 53, 54]]]]
iex> Broca.Naive.NN.matrix_filtering(tensor, 2, 2, 1, 0, fn x -> x end)
[[[[1, 2, 4, 5, 10, 11, 13, 14, 19, 20, 22, 23], [2, 3, 5, 6, 11, 12, 14, 15, 20, 21, 23, 24]],
[[4, 5, 7, 8, 13, 14, 16, 17, 22, 23, 25, 26], [5, 6, 8, 9, 14, 15, 17, 18, 23, 24, 26, 27]]],
[[[28, 29, 31, 32, 37, 38, 40, 41, 46, 47, 49, 50], [29, 30, 32, 33, 38, 39, 41, 42, 47, 48, 50, 51]],
[[31, 32, 34, 35, 40, 41, 43, 44, 49, 50, 52, 53], [32, 33, 35, 36, 41, 42, 44, 45, 50, 51, 53, 54]]]]
"""
def matrix_filtering(
list,
kernel_height,
kernel_width,
stride \\ 1,
padding \\ 0,
map_func \\ fn list -> list end,
type \\ :merge
)
def matrix_filtering(list, kernel_height, kernel_width, stride, padding, map_func, type)
when is_4dlist(list) do
Enum.map(
list,
&matrix_filtering(&1, kernel_height, kernel_width, stride, padding, map_func, type)
)
end
def matrix_filtering(list, kernel_height, kernel_width, stride, padding, map_func, :merge)
when is_3dlist(list) do
list
|> Enum.reverse()
|> Enum.reduce(
nil,
&concat(
matrix_filtering_impl(&1, kernel_height, kernel_width, stride, padding, map_func),
&2
)
)
end
def matrix_filtering(list, kernel_height, kernel_width, stride, padding, map_func, _)
when is_3dlist(list) do
Enum.map(
list,
&matrix_filtering_impl(&1, kernel_height, kernel_width, stride, padding, map_func)
)
end
defp matrix_filtering_impl(list, kernel_height, kernel_width, stride, padding, map_func) do
list = if padding == 0, do: list, else: pad(list, padding)
org_h = length(list)
org_w = length(hd(list))
out_h = div(org_h - kernel_height, stride) + 1
out_w = div(org_w - kernel_width, stride) + 1
for y <- for(i <- 0..(out_h - 1), do: i * stride) |> Enum.filter(&(&1 < org_h)) do
for x <- for(i <- 0..(out_w - 1), do: i * stride) |> Enum.filter(&(&1 < org_w)) do
list
|> Enum.drop(y)
|> Enum.take(kernel_height)
|> Enum.map(&(Enum.drop(&1, x) |> Enum.take(kernel_width)))
|> List.flatten()
|> map_func.()
end
end
end
def for_each(list, func) when is_list(list) do
Enum.map(list, &for_each(&1, func))
end
def for_each(val, func) do
func.(val)
end
end
|
lib/broca/naive/nn.ex
| 0.799599
| 0.470493
|
nn.ex
|
starcoder
|
defmodule ExchemaCoercion.Coercions.Numeric do
@moduledoc """
Coercion functions for numeric types
"""
alias Exchema.Types, as: T
@doc """
Converts string to a numeric type.
It is a Higher-Order-Function because it allows
customization based on wether or not we want the
coercion to be strict (it is, ensure there is no "garbage")
# Examples
iex> ExchemaCoercion.Coercions.Numeric.from_string("1a", Exchema.Types.Integer, [])
{:ok, 1}
iex> ExchemaCoercion.Coercions.Numeric.from_string("1a", Exchema.Types.Integer, [], :strict)
:error
iex> ExchemaCoercion.Coercions.Numeric.from_string("1a", Exchema.Types.Float, [])
{:ok, 1.0}
iex> ExchemaCoercion.Coercions.Numeric.from_string("1.5", Exchema.Types.Float, [])
{:ok, 1.5}
iex> ExchemaCoercion.Coercions.Numeric.from_string("1a", Exchema.Types.Float, [], :strict)
:error
iex> ExchemaCoercion.Coercions.Numeric.from_string("1a", Exchema.Types.Float, [], :normal)
{:ok, 1.0}
iex> ExchemaCoercion.Coercions.Numeric.from_string("1.5", Exchema.Types.Number, [])
{:ok, 1.5}
iex> ExchemaCoercion.Coercions.Numeric.from_string("1", Exchema.Types.Number, [])
{:ok, 1}
iex> ExchemaCoercion.Coercions.Numeric.from_string("1.5a", Exchema.Types.Number, [], :strict)
:error
"""
@spec from_string(any, Exchema.Type.t(), [ExchemaCoercion.coercion()], :strict | :normal) ::
ExchemaCoercion.result()
def from_string(value, type, _, mode \\ :normal)
def from_string(value, _, _, mode)
when mode not in [:strict, :normal] or not is_binary(value),
do: :error
def from_string(value, T.Integer, _, mode) do
value
|> Integer.parse()
|> wrap_parse_result(mode)
end
def from_string(value, T.Float, _, mode) do
value
|> Float.parse()
|> wrap_parse_result(mode)
end
def from_string(value, T.Number, coercions, mode) do
case Integer.parse(value) do
{value, "." <> _} ->
from_string(value, T.Float, coercions, mode)
_ ->
from_string(value, T.Integer, coercions, mode)
end
end
def from_string(_, _, _, _), do: :error
defp wrap_parse_result({value, ""}, _), do: {:ok, value}
defp wrap_parse_result({value, _}, :normal), do: {:ok, value}
defp wrap_parse_result(_, _), do: :error
@doc """
Converts a float to intger by truncating it
"""
@spec truncate(any, Exchema.Type.t(), [ExchemaCoercion.coercion()]) :: ExchemaCoercion.result()
def truncate(value, T.Integer, _) when is_float(value) do
{:ok, trunc(value)}
end
def truncate(_, _, _), do: :error
@doc """
Converts a integer to a float by multiplying by 1.0
"""
@spec integer_as_float(any, Exchema.Type.t(), [ExchemaCoercion.coercion()]) ::
ExchemaCoercion.result()
def integer_as_float(value, T.Float, _) when is_integer(value) do
{:ok, value * 1.0}
end
def integer_as_float(_, _, _), do: :error
end
|
lib/coercions/numeric.ex
| 0.819496
| 0.775987
|
numeric.ex
|
starcoder
|
defmodule Geometry do
@forca "ahhhh!"
def rectangle_area(a, b) do
a * b
end
def square(a) do
a * a
end
# using pipeline operator
def power_of_four(a) do
a
|> square
|> square
end
# manipulating string
def manipulate_string(str) do
# Embedded expression defined inside string
IO.puts "Embedded expression: #{3 + 0.14}"
# Calling already defined var inside string
IO.puts "var inside string: #{str}"
# using Sigils(can be useful if you want to include quotes in a string)
sigil_one = ~s(This a string like #{str} and can use quotes easily with escaping "for example")
IO.puts sigil_one
# Sigil uppercase version ~S that doesnโt handle interpolation or escape characters (\):
sigil_two = ~S(Not escaped \n)
IO.puts sigil_two
# heredocs for better formating with texts containing multiple lines
here_docs =
"""
Heredoc must end on its own line ""
"""
IO.puts here_docs
# String area actually bytes, you can concatenate using <>
concat_str = "hahaha vc รฉ " <> "mt loku " <> "vรฉi!"
IO.puts concat_str
# using single quotes -> it creates a character list which is
# essentially a list of integers representing a single characters.
IO.puts 'ABC'
# It's the same as creating the list itself:
IO.puts [65,66,67]
end
# using default value for arguments
def fun(a, b \\ 5, c, d \\ 5) do
a + b + c + d
end
# first class functions -> lambdas
def first_class_fun(a) do
square = fn(x) -> x * x end
square.(a)
end
def lambda_test() do
# first version(using pure lambda)
Enum.each(
[1, 2, 3],
fn(x) -> IO.puts(x) end
)
# second version(using capture)
Enum.each(
[1,2,3],
&IO.puts/1
)
# first version(using pure lambda)
lambda = fn(x, y, z) -> x * y + z end
IO.puts lambda.(1, 2, 3)
# second version(using capture)
lambda_cap = &(&1 *&2 + &3)
IO.puts lambda_cap.(1, 2, 3)
end
def closure_example() do
outside_var = 5
lambda = fn() -> IO.puts(outside_var) end
outside_var = 6
# should still print 5 because it points to the memory location of
# the var and don't look for any rebind
lambda.()
end
def higher_level_types() do
# Ranges
range = 1..2
IO.puts("2 in range? #{2 in range}")
IO.puts("-1 in range? #{-1 in range}")
# Keyword List
# 1st way
days = [{:monday, 1}, {:tuesday, 2}, {:wednesday, 3}]
# best way
days = [monday: 1, tuesday: 2, wednesday: 3]
IO.puts Keyword.get(days, :monday)
end
def hash_dict() do
days = [{:monday, 1}, {:tuesday, 2}, {:wednesday, 3}]
|> Enum.into(HashDict.new)
#IO.puts days
end
# missing hashSet and iolists from CHAPTER 1(ELIXIR IN ACTION)...
end
|
geometry.ex
| 0.66628
| 0.535281
|
geometry.ex
|
starcoder
|
defmodule FontAwesome do
@moduledoc """
This package adds a convenient way of using [Font Awesome](https://fontawesome.com) SVGs with your Phoenix, Phoenix LiveView and Surface applications.
You can find the original docs [here](https://fontawesome.com) and repo [here](https://github.com/FortAwesome/Font-Awesome).
## Installation
Add `ex_fontawesome` to the list of dependencies in `mix.exs`:
def deps do
[
{:ex_fontawesome, "~> 0.3.0"}
]
end
Then run `mix deps.get`.
## Usage
#### With Eex or Leex
<%= Fontawesome.icon("address-book", type: "regular", class: "h-4 w-4") %>
#### With Surface
<Fontawesome.Components.Icon name="address-book" type="regular" class="h-4 w-4" />
## Config
Defaults can be set in the `FontAwesome` application configuration.
config :ex_hfontawesome, type: "regular"
"""
alias __MODULE__.Icon
icon_paths = "node_modules/@fortawesome/fontawesome-free/svgs/**/*.svg" |> Path.wildcard()
icons =
for icon_path <- icon_paths do
@external_resource Path.relative_to_cwd(icon_path)
Icon.parse!(icon_path)
end
types = icons |> Enum.map(& &1.type) |> Enum.uniq()
@types types
@doc false
def types(), do: @types
@doc false
def default_type() do
case Application.get_env(:ex_fontawesome, :type) do
nil ->
nil
type when is_binary(type) ->
if type in types() do
type
else
raise ArgumentError,
"expected default type to be one of #{inspect(types())}, got: #{inspect(type)}"
end
type ->
raise ArgumentError,
"expected default type to be one of #{inspect(types())}, got: #{inspect(type)}"
end
end
@doc """
Generates an icon.
Options may be passed through to the SVG tag for custom attributes.
## Options
* `:type` - the icon type. Accepted values are #{inspect(types)}. Required if default type is not configured.
* `:class` - the css class added to the SVG tag
## Examples
icon("address-book", type: "regular", class: "h-4 w-4")
#=> <svg class="h-4 w-4" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 448 512">
<!-- Font Awesome Free 5.15.3 by @fontawesome - https://fontawesome.com License - https://fontawesome.com/license/free (Icons: CC BY 4.0, Fonts: SIL OFL 1.1, Code: MIT License) -->
<path d="M436 160c6.6 0 12-5.4 12-12v-40c0-6.6-5.4-12-12-12h-20V48c0-26.5-21.5-48-48-48H48C21.5 0 0 21.5 0 48v416c0 26.5 21.5 48 48 48h320c26.5 0 48-21.5 48-48v-48h20c6.6 0 12-5.4 12-12v-40c0-6.6-5.4-12-12-12h-20v-64h20c6.6 0 12-5.4 12-12v-40c0-6.6-5.4-12-12-12h-20v-64h20zm-68 304H48V48h320v416zM208 256c35.3 0 64-28.7 64-64s-28.7-64-64-64-64 28.7-64 64 28.7 64 64 64zm-89.6 128h179.2c12.4 0 22.4-8.6 22.4-19.2v-19.2c0-31.8-30.1-57.6-67.2-57.6-10.8 0-18.7 8-44.8 8-26.9 0-33.4-8-44.8-8-37.1 0-67.2 25.8-67.2 57.6v19.2c0 10.6 10 19.2 22.4 19.2z"/>
</svg>
"""
@spec icon(String.t(), keyword) :: Phoenix.HTML.safe()
def icon(name, opts \\ []) when is_binary(name) and is_list(opts) do
{type, opts} = Keyword.pop(opts, :type, default_type())
unless type do
raise ArgumentError,
"expected type in options, got: #{inspect(opts)}"
end
unless type in types() do
raise ArgumentError,
"expected type to be one of #{inspect(types())}, got: #{inspect(type)}"
end
icon(type, name, opts)
end
for %Icon{type: type, name: name, file: file} <- icons do
defp icon(unquote(type), unquote(name), opts) do
attrs = Icon.opts_to_attrs(opts)
Icon.insert_attrs(unquote(file), attrs)
end
end
defp icon(type, name, _opts) do
raise ArgumentError,
"icon #{inspect(name)} with type #{inspect(type)} does not exist."
end
end
|
lib/fontawesome.ex
| 0.893674
| 0.55441
|
fontawesome.ex
|
starcoder
|
require Logger
defmodule Day23.NAT do
@moduledoc """
A process that is responsible for keeping the network active.
The NAT receives packets addressed to address `255` during the course of normal
network traffic. When the router informs the NAT that all addresses are idling,
the NAT may rebroadcast the last packet it received to the computer at address
`0` to restart traffic.
If the NAT would send computer `0` two packets in a row with the same `y` value,
it will instead return the `{x, y}` pair from its task.
"""
@typedoc """
A NAT pid.
"""
@type t() :: pid()
@typedoc """
A mode that the NAT can run in.
"""
@type mode() :: :once | :ongoing
@doc """
Start a new NAT for a router.
The NAT should be created before any computers on the network start sending
traffic. The NAT will not receive any packets from the router until it is added
to the router with `Day23.Router.set_nat/2`.
"""
@spec async(Day23.Router.t(), mode) :: Task.t()
def async(router, mode) do
Task.async(__MODULE__, :run, [router, mode])
end
@doc """
Runs the NAT in the given mode.
If `mode` is `:once`, then the NAT will simply return the first packet it
receives from the router. No restarting of network traffic will occur.
If `mode` is `:ongoing`, then the NAT will perform its normal function of
restarting the network traffic when it idles.
"""
@spec run(Day23.Router.t(), mode) :: {number, number}
def run(router, mode) do
Logger.metadata(mode: mode)
case mode do
:once -> run_once()
:ongoing -> loop(router, nil, nil)
end
end
defp run_once do
receive do
{:packet, {x, y}} ->
Logger.debug("nat received packet", x: x, y: y)
{x, y}
end
end
defp loop(router, last_received, last_broadcast) do
receive do
{:packet, {x, y}} ->
Logger.debug("nat received packet", x: x, y: y)
loop(router, {x, y}, last_broadcast)
:all_idle ->
{x, y} = last_received
{_, y0} = last_broadcast || {nil, nil}
Logger.debug("all queues idle", x: x, y: y, prev_y: y0)
cond do
y == y0 ->
{x, y}
true ->
Day23.Router.route(router, {0, x, y})
loop(router, last_received, last_received)
end
end
end
end
|
aoc2019_elixir/apps/day23/lib/nat.ex
| 0.834103
| 0.656896
|
nat.ex
|
starcoder
|
defmodule Custodian.Github.Tentacat.Client do
@moduledoc """
Provides an implementation of the `Custodian.Github.Client` behaviour with
the GitHub API.
This provides two different authentication methods for interacting
with the GitHub v3 API over HTTPS.
"""
@behaviour Custodian.Github.Client
alias JOSE.{JWK, JWS, JWT}
alias Tentacat.Client
alias Tentacat.App.Installations
@doc """
App authentication with the GitHub API client using the app's private key.
This token has limited functionality and is mostly good for getting a token to be used on individual repos. [More info].
[More info]: https://developer.github.com/apps/building-github-apps/authentication-options-for-github-apps/#authenticating-as-a-github-app
"""
@spec app :: Tentacat.Client.t()
def app do
key = private_key(Application.get_env(:custodian, :github_key))
signed = JWT.sign(key, %{"alg" => "RS256"}, jwt_payload())
{_, token} = JWS.compact(signed)
Client.new(%{jwt: token})
end
@doc """
Installation authentication with the GitHub API client using an access
token.
This token is created for a specific installation of the app. Useful for
taking action on specific repos. [More info].
The token is cached in memory using Erlang's ETS. This allows it to be shared
by other processes. Each token GitHub generates is good for 60 minutes. We
set the TTL to 55 minutes to ensure we're grabbing a fresh one before then.
[More info]: https://developer.github.com/apps/building-github-apps/authentication-options-for-github-apps/#authenticating-as-an-installation
"""
@spec installation(integer) :: struct
def installation(installation_id) do
token =
ConCache.get_or_store(:token_cache, installation_id, fn ->
{201, %{"token" => token}} = Installations.token(installation_id, app())
token
end)
Client.new(%{access_token: token})
end
@spec private_key(tuple) :: list
defp private_key({:file, file_path}) do
JWK.from_pem_file(file_path)
end
defp private_key({:system, config}) do
JWK.from_pem(System.get_env(config))
end
@spec jwt_payload :: map
defp jwt_payload do
now = DateTime.to_unix(DateTime.utc_now())
%{
"iat" => now,
"exp" => now + 10 * 60,
"iss" => "7528"
}
end
end
|
lib/custodian/github/tentacat/client.ex
| 0.625324
| 0.422594
|
client.ex
|
starcoder
|
defmodule Cardanoex.Transaction do
alias Cardanoex.Backend
alias Cardanoex.Util
@type asset :: %{
policy_id: String.t(),
asset_name: String.t(),
quantity: non_neg_integer()
}
@type payment :: %{
address: String.t(),
amount: non_neg_integer(),
assets: list(asset()) | nil
}
@type create_transaction :: %{
passphrase: String.t(),
payments: list(payment()),
withdrawal: String.t() | nil,
metadata: map()
}
@type amount :: %{
quantity: non_neg_integer(),
unit: String.t()
}
@type fee_estimation :: %{
deposit: amount(),
estimated_max: amount(),
estimated_min: amount(),
minimum_coins: list(amount())
}
@type input :: %{
address: String.t(),
amount: amount(),
assets: list(asset()),
id: String.t(),
index: non_neg_integer()
}
@type output :: %{
address: String.t(),
amount: amount(),
assets: list(asset())
}
@type collateral :: %{
address: String.t(),
amount: amount(),
id: String.t(),
index: non_neg_integer()
}
@type withdrawal :: %{
stake_address: String.t(),
amount: amount()
}
@type transaction ::
%{
amount: amount(),
collateral: list(collateral()),
deposit: amount(),
depth: %{quantity: non_neg_integer(), unit: String.t()},
direction: String.t(),
expires_at: %{
absolute_slot_number: non_neg_integer(),
epoch_number: non_neg_integer(),
slot_number: non_neg_integer(),
time: String.t()
},
fee: amount(),
id: String.t(),
inputs: list(input()),
inserted_at: %{
absolute_slot_number: non_neg_integer(),
epoch_number: non_neg_integer(),
height: %{quantity: non_neg_integer(), unit: String.t()},
slot_number: non_neg_integer(),
time: String.t()
},
metadata: map | nil,
mint: list(),
outputs: list(output()),
pending_since: %{
absolute_slot_number: non_neg_integer(),
epoch_number: non_neg_integer(),
height: %{quantity: non_neg_integer(), unit: String.t()},
slot_number: non_neg_integer(),
time: String.t()
},
status: String.t(),
withdrawals: list(withdrawal),
script_validity: String.t() | nil
}
@moduledoc """
The Transaction module lets you work with transactions for a wallet.
"""
@spec estimate_fee(String.t(), create_transaction()) ::
{:error, String.t()} | {:ok, fee_estimation()}
@doc """
Estimate fee for the transaction.
The estimate is made by assembling multiple transactions and analyzing the distribution of their fees.
The `estimated_max` is the highest fee observed, and the `estimated_min` is the fee which is lower than at least 90% of the fees observed.
## Options
* `wallet_id` - hex based string. 40 characters
* `transaction` - A map with the following structure:
```elixir
%{
payments: [
%{
address: "addr_test1qruzy7l5...nq04es9elzy7",
amount: %{quantity: 42_000_000, unit: "lovelace"}
}
]
}
# With asset:
%{
payments: [
%{
address:"addr_test1qruzy7l5...nq04es9elzy7",
amount: %{quantity: 1_407_406, unit: "lovelace"},
assets: [
%{
policy_id: "6b8d07d69639e9413dd637a1a815a7323c69c86abbafb66dbfdb1aa7",
asset_name: "",
quantity: 0
}
]
}
]
}
# With metadata:
%{
payments: [
%{
address: "addr_test1qruzy7l5...nq04es9elzy7",
amount: %{quantity: 1_407_406, unit: "lovelace"}
}
],
metadata: %{"0" => %{"string" => "cardano"}, "1" => %{"int" => 14}}
}
```
"""
def estimate_fee(wallet_id, transaction) do
case Backend.estimate_transaction_fee(wallet_id, transaction) do
{:ok, fee_estimation} -> {:ok, Util.keys_to_atom(fee_estimation)}
{:error, message} -> {:error, message}
end
end
@spec create(String.t(), create_transaction()) :: {:error, String.t()} | {:ok, transaction}
@doc """
Create and send transaction from the wallet.
## Options
* `wallet_id` - hex based string. 40 characters
* `transaction` - A map with the following structure:
```elixir
%{
payments: [
%{
address: "addr_test1qruzy7l5...nq04es9elzy7",
amount: %{quantity: 42_000_000, unit: "lovelace"}
}
]
}
# With asset:
%{
payments: [
%{
address:"addr_test1qruzy7l5...nq04es9elzy7",
amount: %{quantity: 1_407_406, unit: "lovelace"},
assets: [
%{
policy_id: "6b8d07d69639e9413dd637a1a815a7323c69c86abbafb66dbfdb1aa7",
asset_name: "",
quantity: 0
}
]
}
]
}
# With metadata:
%{
payments: [
%{
address: "addr_test1qruzy7l5...nq04es9elzy7",
amount: %{quantity: 1_407_406, unit: "lovelace"}
}
],
metadata: %{"0" => %{"string" => "cardano"}, "1" => %{"int" => 14}}
}
```
"""
def create(wallet_id, transaction) do
case Backend.create_transaction(wallet_id, transaction) do
{:ok, transaction} -> {:ok, Util.keys_to_atom(transaction)}
{:error, message} -> {:error, message}
end
end
@spec list(String.t(),
start: String.t(),
stop: String.t(),
order: atom(),
min_withdrawal: non_neg_integer()
) ::
{:error, String.t()} | {:ok, list(transaction())}
@doc """
Lists all incoming and outgoing wallet's transactions.
## Options
* `start` - An optional start time in ISO 8601 date-and-time format. Basic and extended formats are both accepted. Times can be local (with a timezone offset) or UTC. If both a start time and an end time are specified, then the start time must not be later than the end time. Example: `2008-08-08T08:08:08Z`
* `stop` - An optional end time in ISO 8601 date-and-time format. Basic and extended formats are both accepted. Times can be local (with a timezone offset) or UTC. If both a start time and an end time are specified, then the start time must not be later than the end time.
* `order` - Can be set to `:descending` or `:ascending`. Defaults to `:descending`
* `min_withdrawal` - Returns only transactions that have at least one withdrawal above the given amount. This is particularly useful when set to `1` in order to list the withdrawal history of a wallet.
"""
def list(wallet_id, options \\ []) do
default = [
start: nil,
stop: nil,
order: :descending,
min_withdrawal: nil
]
opts = Enum.into(Keyword.merge(default, options), %{})
case Backend.list_transactions(
wallet_id,
opts.start,
opts.stop,
opts.order,
opts.min_withdrawal
) do
{:ok, transactions} -> {:ok, Enum.map(transactions, fn t -> Util.keys_to_atom(t) end)}
{:error, message} -> {:error, message}
end
end
@spec get(String.t(), String.t()) :: {:error, String.t()} | {:ok, transaction()}
@doc """
Get transaction by id.
## Options
* `transaction_id` - Transaction ID
"""
def get(wallet_id, transaction_id) do
case Backend.get_transaction(wallet_id, transaction_id) do
{:ok, transaction} -> {:ok, Util.keys_to_atom(transaction)}
{:error, message} -> {:error, message}
end
end
end
|
lib/transaction.ex
| 0.901044
| 0.6162
|
transaction.ex
|
starcoder
|
defmodule BitPalWeb.ConnCase do
@moduledoc """
This module defines the test case to be used by
tests that require setting up a connection.
Such tests rely on `Phoenix.ConnTest` and also
import other functionality to make it easier
to build common data structures and query the data layer.
Finally, if the test case interacts with the database,
we enable the SQL sandbox, so changes done to the database
are reverted at the end of every test. If you are using
PostgreSQL, you can even run database tests asynchronously
by setting `use BitPalWeb.ConnCase, async: true`, although
this option is not recommended for other databases.
"""
use BitPalFactory
import Phoenix.LiveViewTest
import BitPal.TestHelpers
alias BitPal.Accounts
alias BitPal.DataCase
alias BitPal.HandlerSubscriberCollector
alias BitPal.IntegrationCase
defmacro __using__(params) do
quote do
use ExUnit.Case, unquote(params)
use BitPal.CaseHelpers
import BitPalWeb.ConnCase
import Phoenix.ConnTest
import Phoenix.LiveViewTest
import Plug.Conn
alias BitPal.HandlerSubscriberCollector
alias BitPalWeb.Router.Helpers, as: Routes
# The default endpoint for testing
@endpoint BitPalWeb.Endpoint
@integration Keyword.get(unquote(params), :integration)
setup tags do
res =
if @integration do
IntegrationCase.setup_integration(tags)
else
DataCase.setup_db(tags)
%{}
end
|> Map.put(:conn, Phoenix.ConnTest.build_conn())
{:ok, res}
end
end
end
@doc """
Setup helper that registers and logs in users.
setup :register_and_log_in_user
It stores an updated connection and a registered user in the
test context.
"""
def register_and_log_in_user(tags = %{conn: conn}) do
password = <PASSWORD>()
user = create_user(password: password)
Map.merge(tags, %{conn: log_in_user(conn, user), user: user, password: password})
end
@doc """
Logs the given `user` into the `conn`.
It returns an updated `conn`.
"""
def log_in_user(conn, user) do
token = Accounts.generate_user_session_token(user)
conn
|> Phoenix.ConnTest.init_test_session(%{})
|> Plug.Conn.put_session(:user_token, token)
end
def add_store(tags = %{user: user}, attrs \\ %{}) do
Enum.into(tags, %{
store: create_store(user, attrs)
})
end
def add_open_invoice(tags = %{store: store, currency_id: currency_id}, attrs \\ %{}) do
{:ok, invoice, _stub, _handler} =
HandlerSubscriberCollector.create_invoice(
Enum.into(attrs, %{
store_id: store.id,
currency_id: currency_id
})
)
Enum.into(tags, %{
invoice: invoice
})
end
def render_eventually(view, match) do
eventually(fn -> render(view) =~ match end)
end
def render_eventually(view, match, selector, text_filter \\ nil) do
eventually(fn ->
view |> element(selector, text_filter) |> render() =~ match
end)
end
end
|
test/bitpal_web/support/conn_case.ex
| 0.80077
| 0.408719
|
conn_case.ex
|
starcoder
|
defmodule AsyncWith.Runner do
@moduledoc false
import AsyncWith.Clauses
import AsyncWith.Macro, only: [rename_ignored_vars: 1, var_map: 1]
@doc """
Transforms the list of `clauses` into a format that the runner can work with.
The runner expects each clause to be represented by a map with these fields:
* `:function` - an anonymous function that wraps the clause so it can be
executed inside a task.
It must accept only one argument, a map with the values of the variables
used inside the clause. For example, `%{opts: %{width: 10 }}` could be
a valid argument for the clause `{:ok, width} <- Map.fetch(opts, :width)`.
In case of success, it must return a triplet with `:ok`, the value
returned by the execution of the right hand side of the clause and a map
with the values defined in the left hand side of the clause. For example,
the clause `{:ok, width} <- Map.fetch(opts, :width)` with the argument
`%{opts: %{width: 10 }}` would return `{:ok, {:ok, 10}, %{width: 10}}`.
In case of error, it must return `{:error, right_value}` if the sides of
the clause do not match using the arrow operator `<-`; `{:nomatch, error}`
if the sides of the clause do not match using the match operator `=`;
`{:norescue, exception}` if the clause raises any exception; and
`{:nocatch, value}` if the clause throws any value.
* `:deps` - the list of variables that the clause depends on.
This operation is order dependent.
It's important to keep in mind that this function is executed at compile time,
and that it must return a quoted expression that represents the first argument
that will be passed to `run_nolink/2` at runtime.
"""
@spec format_clauses(Macro.t()) :: Macro.t()
def format_clauses(clauses) do
clauses
|> format_bare_expressions()
|> rename_ignored_vars()
|> rename_local_vars()
|> get_defined_and_used_local_vars()
|> Enum.map(&format_clause/1)
end
defp format_clause({clause, {defined_vars, used_vars}}) do
function = clause_to_function({clause, {defined_vars, used_vars}})
{:%{}, [], [function: function, deps: used_vars]}
end
defp clause_to_function({{operator, meta, [left, right]}, {defined_vars, used_vars}}) do
quote do
fn vars ->
try do
with unquote(var_map(used_vars)) <- vars,
value <- unquote(right),
unquote({operator, meta, [left, Macro.var(:value, __MODULE__)]}) do
{:ok, value, unquote(var_map(defined_vars))}
else
error -> {:error, error}
end
rescue
error in MatchError -> {:nomatch, error}
error -> {:norescue, error}
catch
thrown_value -> {:nocatch, thrown_value}
end
end
end
end
@doc """
Executes `run/1` in a supervised task (under `AsyncWith.TaskSupervisor`) and
returns the results of the operation.
The task wonโt be linked to the caller, see `Task.async/3` for more
information.
A `timeout`, in milliseconds, must be provided to specify the maximum time
allowed for this operation to complete.
"""
@spec run_nolink([map], non_neg_integer) ::
{:ok, any} | {:error | :nomatch | :norescue | :nocatch, any}
def run_nolink(clauses, timeout) do
task = Task.Supervisor.async_nolink(AsyncWith.TaskSupervisor, fn -> run(clauses) end)
case Task.yield(task, timeout) || Task.shutdown(task) do
nil -> {:error, {:exit, {:timeout, {AsyncWith, :async, [timeout]}}}}
{:ok, value} -> value
error -> {:error, error}
end
end
@doc """
Executes all the `clauses` and collects their results.
Each clause is executed inside a new task. Tasks are spawned as soon as all
the variables that it depends on `:deps` are resolved. It also ensures that,
if a clause fails, all the running tasks are shut down.
"""
@spec run([map]) :: {:ok, [any]} | {:error | :nomatch | :norescue | :nocatch, any}
def run(clauses) do
if all_completed?(clauses) do
{:ok, Enum.map(clauses, & &1.value)}
else
clauses
|> maybe_spawn_tasks()
|> await()
end
end
defp all_completed?(clauses), do: Enum.all?(clauses, &Map.get(&1, :completed, false))
defp await(clauses) do
receive do
{ref, {:ok, value, vars}} ->
Process.demonitor(ref, [:flush])
clauses
|> assign_results_and_mark_as_completed(ref, value, vars)
|> run()
{_ref, error} ->
shutdown_tasks(clauses)
error
{:DOWN, _ref, _, _, reason} ->
exit(reason)
end
end
defp maybe_spawn_tasks(clauses) do
vars = Enum.reduce(clauses, %{}, &Map.merge(&2, Map.get(&1, :vars, %{})))
Enum.map(clauses, fn clause ->
if spawn_task?(clause, vars) do
Map.merge(clause, %{task: Task.async(fn -> clause.function.(vars) end)})
else
clause
end
end)
end
defp spawn_task?(%{task: _task}, _vars), do: false
defp spawn_task?(%{deps: deps}, vars), do: Enum.empty?(deps -- Map.keys(vars))
defp assign_results_and_mark_as_completed(clauses, ref, value, vars) do
Enum.map(clauses, fn
%{task: %Task{ref: ^ref}} = clause ->
Map.merge(clause, %{value: value, vars: vars, completed: true})
clause ->
clause
end)
end
defp shutdown_tasks(clauses) do
Enum.each(clauses, fn
%{task: task} -> Task.shutdown(task)
_ -> nil
end)
end
end
|
lib/async_with/runner.ex
| 0.791338
| 0.650433
|
runner.ex
|
starcoder
|
defmodule Multiverses.Clone do
@moduledoc """
allows a module to directly clone all of the public functions of a
given module, except as macros.
thus multiverse equivalents replicate the functionality of the parent
module. Consider dropping a cloned module into a `test/support`
directory so that it can exist as compile-time for test, but not
for dev or prod.
## Usage
In the following example, `FooModule` has all of its functions ported
into the current module as `defdelegate/2`. The functions `FooModule.foo/3`
and `FooModule.foo/4` are not, but rather should be ported using `defclone/2`
```elixir
use Multiverses.Clone, with: FooModule, except: [
foo: 3,
foo: 4
]
```
"""
defmacro __using__(opts) do
unless Keyword.has_key?(opts, :module) and
Keyword.has_key?(opts, :except) do
raise CompileError,
file: __CALLER__.file,
line: __CALLER__.line,
description: "Clone must have :module and :except options"
end
module = Macro.expand(opts[:module], __CALLER__)
except = Macro.expand(opts[:except], __CALLER__)
Module.put_attribute(__CALLER__.module, :parent_module, module)
:functions
|> module.__info__
|> Enum.reject(&(&1 in except))
|> Enum.map(&mfa_to_defdelegate(module, &1))
end
@spec mfa_to_defdelegate(module, {atom, arity}) :: Macro.t
@doc false
## NB This function should be considered "private" and is only public
## so that it can be testable.
def mfa_to_defdelegate(module, {function, arity}) do
{:defdelegate, [context: Elixir, import: Kernel],
[{function, [], arity_to_params(arity)}, [to: module]]}
end
defp arity_to_params(arity, unquoted \\ false)
defp arity_to_params(0, _), do: Elixir
defp arity_to_params(arity, unquoted) do
wrap = if unquoted, do: &unquoted/1, else: &(&1)
for idx <- 1..arity do
param = String.to_atom("p#{idx}")
wrap.({param, [], Elixir})
end
end
defp unquoted(param), do: {:unquote, [], [param]}
end
|
lib/multiverses/macro_clone.ex
| 0.862714
| 0.881258
|
macro_clone.ex
|
starcoder
|
defmodule Phoenix.SimpleForm.FormInput do
@moduledoc ~S"""
Configuration struct that defines the entrypoint for a SimpleForm invokation
<%= input f, :user_id, collection: @form_collections.users %>
The following options are supported
- `label` - Overwrite the default labelname with is the name of the field humanized.
- `collection` - Define a list of values for a select input
- `hint` - Define a hint text
- `label_attrs`- Overwrite the attributes of the label
- `wrapper_attrs` - Overwrite the attributes of the wrapper
- `input_attrs` - Overwrite the attributes of the input
"""
defstruct form: nil,
field: nil,
field_str: nil,
input_attrs: [],
wrapper_attrs: [],
label_attrs: [],
clean_opts: [],
opts: [],
collection: nil,
required: nil,
hint: nil,
label: nil,
label_human: nil,
label_translated: nil,
errors: nil,
errors_translated: nil,
style_module: nil
@doc """
Takes a form and generates input opts
`build(form, field, opts, translate_fn, style_module)`
When you're using validate_required in your changeset the `required` field is set to true.
```
@fields ~w(name)a
def changeset(event, attrs, :default) do
event
|> cast(attrs, @fields)
|> validate_required(~w(name)a)
end
```
"""
def build(%Phoenix.HTML.Form{} = form, field, opts, translate_fn, style_module) do
label_human = Phoenix.HTML.Form.humanize(field)
{collection, clean_opts} = Keyword.pop(opts, :collection)
{hint, clean_opts} = Keyword.pop(clean_opts, :hint)
{label_attrs, clean_opts} = Keyword.pop(clean_opts, :label_attrs, [])
{wrapper_attrs, clean_opts} = Keyword.pop(clean_opts, :wrapper_attrs, [])
{input_attrs, clean_opts} = Keyword.pop(clean_opts, :input_attrs, [])
{label, clean_opts} = Keyword.pop(clean_opts, :label, label_human)
errors = Keyword.get_values(form.errors, field)
errors_translated = Enum.map(errors, fn error -> translate_fn.(error) end)
required = form |> Phoenix.HTML.Form.input_validations(field) |> Keyword.get(:required, false)
%__MODULE__{
form: form,
field: field,
field_str: Atom.to_string(field),
opts: opts,
clean_opts: clean_opts,
label_attrs: label_attrs,
wrapper_attrs: wrapper_attrs,
input_attrs: Keyword.merge(clean_opts, input_attrs),
collection: collection,
required: !!required,
label_human: label_human,
label: label,
hint: hint,
style_module: style_module,
errors: errors,
errors_translated: errors_translated
}
end
end
|
lib/phoenix/simple_form/form_input.ex
| 0.889039
| 0.765944
|
form_input.ex
|
starcoder
|
defmodule Chatter.MessageHandler do
require Record
require Chatter.NetID
alias Chatter.NetID
Record.defrecord :message_handler,
tag: nil,
code: nil,
extract_netids: nil,
encode_with: nil,
decode_with: nil,
dispatch: nil
@type t :: record( :message_handler,
tag: atom,
code: integer,
extract_netids: ((tuple) -> list(NetID.t)),
encode_with: ((tuple, map) -> binary),
decode_with: ((binary, map) -> {tuple, binary}),
dispatch: ((tuple) -> {:ok, tuple} | {:error, atom}) )
@spec new(atom,
((tuple) -> list(NetID.t)),
((tuple, map) -> binary),
((binary, map) -> {tuple, binary}),
((tuple) -> {:ok, tuple} | {:error, atom})) :: t
def new(tag,
extract_netids_fn,
encode_with_fn,
decode_with_fn,
dispatch_fn)
when is_atom(tag) and
is_function(extract_netids_fn,1) and
is_function(encode_with_fn,2) and
is_function(decode_with_fn,2) and
is_function(dispatch_fn, 1)
do
message_handler([tag: tag,
code: to_code(tag),
extract_netids: extract_netids_fn,
encode_with: encode_with_fn,
decode_with: decode_with_fn,
dispatch: dispatch_fn])
end
@spec new(tuple,
((tuple) -> list(NetID.t)),
((tuple, map) -> binary),
((binary, map) -> {tuple, binary}),
((tuple) -> {:ok, tuple} | {:error, atom})) :: t
def new(tup,
extract_netids_fn,
encode_with_fn,
decode_with_fn,
dispatch_fn)
when is_tuple(tup) and
tuple_size(tup) > 1 and
is_function(extract_netids_fn,1) and
is_function(encode_with_fn,2) and
is_function(decode_with_fn,2) and
is_function(dispatch_fn, 1)
do
tag = :erlang.element(1, tup)
message_handler([tag: tag,
code: to_code(tag),
extract_netids: extract_netids_fn,
encode_with: encode_with_fn,
decode_with: decode_with_fn,
dispatch: dispatch_fn])
end
defmacro is_valid(data) do
case Macro.Env.in_guard?(__CALLER__) do
true ->
quote do
is_tuple(unquote(data)) and tuple_size(unquote(data)) == 7 and
:erlang.element(1, unquote(data)) == :message_handler and
# data
is_atom(:erlang.element(2, unquote(data))) and
# code
is_integer(:erlang.element(3, unquote(data))) and
# extract_netids
is_function(:erlang.element(4, unquote(data)),1) and
# encode_with
is_function(:erlang.element(5, unquote(data)),2) and
# decode_with
is_function(:erlang.element(6, unquote(data)),2) and
# dispatch
is_function(:erlang.element(7, unquote(data)),1)
end
false ->
quote bind_quoted: binding() do
is_tuple(data) and tuple_size(data) == 7 and
:erlang.element(1, data) == :message_handler and
# data
is_atom(:erlang.element(2, data)) == false and
# code
is_integer(:erlang.element(3, data)) and
# extract_netids
is_function(:erlang.element(4, data),1) and
# encode_with
is_function(:erlang.element(5, data),2) and
# decode_with
is_function(:erlang.element(6, data),2) and
# dispatch
is_function(:erlang.element(7, data),1)
end
end
end
@spec valid?(t) :: boolean
def valid?(data)
when is_valid(data)
do
true
end
def valid?(_), do: false
@spec extract_netids(t, tuple) :: list(NetID.t)
def extract_netids(coder, obj)
when is_valid(coder) and
is_tuple(obj) and
tuple_size(obj) > 0 and
:erlang.element(1, obj) == :erlang.element(2, coder)
do
fun = message_handler(coder, :extract_netids)
fun.(obj)
end
@spec encode_with(t, tuple, map) :: binary
def encode_with(coder, obj, id_map)
when is_valid(coder) and
is_map(id_map) and
is_tuple(obj) and
tuple_size(obj) > 0 and
:erlang.element(1, obj) == :erlang.element(2, coder)
do
fun = message_handler(coder, :encode_with)
fun.(obj, id_map)
end
@spec decode_with(t, binary, map) :: {tuple, binary}
def decode_with(decoder, bin, id_map)
when is_valid(decoder) and
is_binary(bin) and
byte_size(bin) > 0 and
is_map(id_map)
do
fun = message_handler(decoder, :decode_with)
fun.(bin, id_map)
end
@spec dispatch(t, tuple) :: {:ok, tuple} | {:error | atom}
def dispatch(dispatcher, msg)
when is_valid(dispatcher) and
is_tuple(msg) and
tuple_size(msg) > 1
do
fun = message_handler(dispatcher, :dispatch)
fun.(msg)
end
@spec to_code(atom) :: integer
def to_code(id)
when is_atom(id)
do
to_string(id) |> :xxhash.hash32
end
def to_code(tuple)
when is_tuple(tuple) and
tuple_size(tuple) > 1
do
:erlang.element(1, tuple) |> to_code
end
end
|
lib/message_handler.ex
| 0.736495
| 0.469885
|
message_handler.ex
|
starcoder
|
defmodule ExUssd.Utils do
@moduledoc false
alias ExUssd.Executer
@default_value 436_739_010_658_356_127_157_159_114_145
@spec to_int(term() | {integer(), String.t()}, ExUssd.t(), map(), String.t()) :: integer()
def to_int(input, menu, _, input_value)
def to_int({0, _}, %ExUssd{is_zero_based: is_zero_based}, _payload, _input_value)
when is_zero_based,
do: 0
def to_int({0, _}, menu, payload, input_value),
do: to_int({@default_value, ""}, menu, payload, input_value)
def to_int(
{value, ""},
%ExUssd{split: split, nav: nav, menu_list: menu_list, orientation: orientation},
%{session_id: session},
input_value
) do
%ExUssd.Nav{match: next, show: show_next} = Enum.find(nav, &(&1.type == :next))
%ExUssd.Nav{match: home, show: show_home} = Enum.find(nav, &(&1.type == :home))
%ExUssd.Nav{match: back, show: show_back} = Enum.find(nav, &(&1.type == :back))
%{depth: depth} =
session
|> ExUssd.Registry.fetch_route()
|> List.first()
# 1 * 7
position = depth * split
element = Enum.at(menu_list, position)
menu = Enum.at(menu_list, value - 1)
case input_value do
v
when v == next and show_next and orientation == :horizontal and depth < length(menu_list) ->
605_356_150_351_840_375_921_999_017_933
v when v == next and show_next and orientation == :vertical and not is_nil(element) ->
605_356_150_351_840_375_921_999_017_933
v when v == back and show_back ->
128_977_754_852_657_127_041_634_246_588
v when v == home and show_home ->
705_897_792_423_629_962_208_442_626_284
_v when orientation == :horizontal and is_nil(menu) ->
@default_value
_ ->
value
end
end
def to_int(:error, _menu, _, _input_value), do: @default_value
def to_int(_, _, _, _), do: @default_value
@spec truncate(String.t(), keyword()) :: String.t()
def truncate(text, options \\ []) do
len = options[:length] || 30
omi = options[:omission] || "..."
cond do
!String.valid?(text) ->
text
String.length(text) < len ->
text
true ->
stop = len - String.length(omi)
"#{String.slice(text, 0, stop)}#{omi}"
end
end
@doc """
Generates an unique id.
"""
def new_id, do: "#{System.unique_integer()}"
@spec format(map()) :: map()
def format(payload) do
Map.new(payload, fn {key, val} ->
try do
{String.to_existing_atom(key), val}
rescue
_e in ArgumentError ->
{String.to_atom(key), val}
end
end)
end
@spec fetch_metadata(map()) :: map()
def fetch_metadata(%{session_id: session, service_code: service_code, text: text}) do
%{route: [%{attempt: attempt} | _] = routes} = ExUssd.Registry.fetch_state(session)
routes_string =
routes
|> Enum.reverse()
|> get_in([Access.all(), Access.key(:text)])
|> tl()
|> Enum.join("*")
service_code = String.replace(service_code, "#", "")
routes_string =
if(String.equivalent?(routes_string, ""),
do: IO.iodata_to_binary([service_code, "#"]),
else: IO.iodata_to_binary([service_code, "*", routes_string, "#"])
)
invoked_at = DateTime.truncate(DateTime.utc_now(), :second)
%{attempt: attempt, invoked_at: invoked_at, route: routes_string, text: text}
end
def get_menu(%ExUssd{is_zero_based: is_zero_based} = menu, opts) do
payload = Keyword.get(opts, :payload, %{text: "set_init_text"})
position =
case Integer.parse(payload.text) do
{position, ""} -> position
_ -> 436_739_010_658_356_127_157_159_114_145
end
fun = fn
%{simulate: true, position: position} ->
%{error: error, menu_list: menu_list} =
current_menu = get_menu(menu, :ussd_callback, opts)
if error do
from = if(is_zero_based, do: 0, else: 1)
case Enum.at(Enum.reverse(menu_list), position - from) do
nil ->
get_menu(%{menu | error: true}, :ussd_after_callback, opts)
%ExUssd{} = next_menu ->
get_menu(next_menu, :ussd_init, opts)
end
else
current_menu
end
_ ->
get_menu(menu, :ussd_init, opts)
end
apply(fun, [Map.new(Keyword.put(opts, :position, position))])
end
def get_menu(%ExUssd{} = menu, :ussd_init, opts) do
init_data = Keyword.get(opts, :init_data)
payload = Keyword.get(opts, :payload)
fun = fn
menu, payload ->
menu
|> Executer.execute_navigate(payload)
|> Executer.execute_init_callback!(payload)
end
apply(fun, [%{menu | data: init_data}, payload])
end
def get_menu(%ExUssd{default_error: error} = menu, :ussd_callback, opts) do
init_data = Keyword.get(opts, :init_data)
init_text = Keyword.get(opts, :init_text, "set_init_text")
payload = Keyword.get(opts, :payload)
fun = fn
_menu, opts, nil ->
raise ArgumentError, "`:payload` not found, #{inspect(Keyword.new(opts))}"
menu, _, %{text: _} = payload ->
init_payload = Map.put(payload, :text, init_text)
init_menu =
menu
|> Executer.execute_navigate(init_payload)
|> Executer.execute_init_callback!(init_payload)
with nil <- Executer.execute_callback!(init_menu, payload, state: false) do
%{init_menu | error: error}
end
_menu, _, payload ->
raise ArgumentError, "payload missing `:text`, #{inspect(payload)}"
end
apply(fun, [%{menu | data: init_data}, Map.new(opts), payload])
end
def get_menu(%ExUssd{default_error: error} = menu, :ussd_after_callback, opts) do
init_data = Keyword.get(opts, :init_data)
init_text = Keyword.get(opts, :init_text, "set_init_text")
payload = Keyword.get(opts, :payload)
fun = fn
_menu, opts, nil ->
raise ArgumentError, "`:payload` not found, #{inspect(Keyword.new(opts))}"
menu, _, %{text: _} = payload ->
init_payload = Map.put(payload, :text, init_text)
init_menu =
menu
|> Executer.execute_navigate(init_payload)
|> Executer.execute_init_callback!(init_payload)
callback_menu =
with nil <- Executer.execute_callback!(init_menu, payload, state: false) do
%{init_menu | error: error}
end
with nil <- Executer.execute_after_callback!(callback_menu, payload, state: false) do
callback_menu
end
_menu, _, payload ->
raise ArgumentError, "payload missing `:text`, #{inspect(payload)}"
end
apply(fun, [%{menu | data: init_data}, Map.new(opts), payload])
end
def get_menu(_menu, _atom, _opts), do: nil
end
|
lib/ex_ussd/utils.ex
| 0.655115
| 0.432663
|
utils.ex
|
starcoder
|
defmodule OMG.Performance.SimplePerftest do
@moduledoc """
The simple performance tests runs the critical transaction processing chunk of the child chain.
This allows to easily test the critical path of processing transactions, and profile it using `:fprof`.
"""
use OMG.Utils.LoggerExt
require OMG.Utxo
alias OMG.Eth.Configuration
alias OMG.TestHelper
alias OMG.Utxo
@eth OMG.Eth.zero_address()
@doc """
Runs test with `ntx_to_send` txs for each of the `nspenders` senders with given options.
The test is run on a local limited child chain app instance, not doing any Ethereum connectivity-related activities.
The child chain is setup and torn down as part of the test invocation.
## Usage
From an `iex -S mix run --no-start` shell
```
use OMG.Performance
Performance.SimplePerftest.start(50, 16)
```
The results are going to be waiting for you in a file within `destdir` and will be logged.
Options:
- :destdir - directory where the results will be put, relative to `pwd`, defaults to `"."`
- :profile - if `true`, a `:fprof` will profile the test run, defaults to `false`
- :block_every_ms - how often should the artificial block creation be triggered, defaults to `2000`
- :randomized - whether the non-change outputs of the txs sent out will be random or equal to sender (if `false`),
defaults to `true`
**NOTE**:
With `profile: :fprof` it will print a warning:
```
Warning: {erlang, trace, 3} called in "<0.514.0>" - trace may become corrupt!
```
It is caused by using `procs: :all` in options. So far we're not using `:erlang.trace/3` in our code,
so it has been ignored. Otherwise it's easy to reproduce and report
(github.com/erlang/otp and the JIRA it points you to).
"""
@spec start(pos_integer(), pos_integer(), keyword()) :: :ok
def start(ntx_to_send, nspenders, opts \\ []) do
_ =
Logger.info(
"Number of spenders: #{inspect(nspenders)}, number of tx to send per spender: #{inspect(ntx_to_send)}."
)
defaults = [destdir: ".", profile: false, block_every_ms: 2000]
opts = Keyword.merge(defaults, opts)
{:ok, started_apps, simple_perftest_chain} = setup_simple_perftest(opts)
spenders = create_spenders(nspenders)
utxos = create_deposits(spenders, ntx_to_send)
:ok = OMG.Performance.Runner.run(ntx_to_send, utxos, opts, opts[:profile])
cleanup_simple_perftest(started_apps, simple_perftest_chain)
end
@spec setup_simple_perftest(keyword()) :: {:ok, list, pid}
defp setup_simple_perftest(opts) do
{:ok, dbdir} = Briefly.create(directory: true, prefix: "perftest_db")
Application.put_env(:omg_db, :path, dbdir, persistent: true)
_ = Logger.info("Perftest rocksdb path: #{inspect(dbdir)}")
:ok = OMG.DB.init()
started_apps = ensure_all_started([:omg_db, :omg_bus])
{:ok, simple_perftest_chain} = start_simple_perftest_chain(opts)
{:ok, started_apps, simple_perftest_chain}
end
# Selects and starts just necessary components to run the tests.
# We don't want to start the entire `:omg_child_chain` supervision tree because
# we don't want to start services related to root chain tracking (the root chain contract doesn't exist).
# Instead, we start the artificial `BlockCreator`
defp start_simple_perftest_chain(opts) do
_ =
case :ets.info(OMG.ChildChain.Supervisor.blocks_cache()) do
:undefined ->
:ets.new(OMG.ChildChain.Supervisor.blocks_cache(), [:set, :public, :named_table, read_concurrency: true])
_ ->
:ok
end
children = [
{OMG.ChildChainRPC.Web.Endpoint, []},
{OMG.State,
[
fee_claimer_address: Base.decode16!("DEAD000000000000000000000000000000000000"),
child_block_interval: Configuration.child_block_interval(),
metrics_collection_interval: 60_000
]},
{OMG.ChildChain.API.BlocksCache, [ets: OMG.ChildChain.Supervisor.blocks_cache()]},
{OMG.ChildChain.FeeServer, OMG.ChildChain.Configuration.fee_server_opts()},
{OMG.Performance.BlockCreator, opts[:block_every_ms]}
]
Supervisor.start_link(children, strategy: :one_for_one)
end
@spec cleanup_simple_perftest(list(), pid) :: :ok
defp cleanup_simple_perftest(started_apps, simple_perftest_chain) do
:ok = Supervisor.stop(simple_perftest_chain)
started_apps |> Enum.reverse() |> Enum.each(&Application.stop/1)
:ok = Application.put_env(:omg_db, :path, nil)
:ok
end
# We're not basing on mix to start all neccessary test's components.
defp ensure_all_started(app_list) do
Enum.reduce(app_list, [], fn app, list ->
{:ok, started_apps} = Application.ensure_all_started(app)
list ++ started_apps
end)
end
@spec create_spenders(pos_integer()) :: list(TestHelper.entity())
defp create_spenders(nspenders) do
1..nspenders
|> Enum.map(fn _nspender -> TestHelper.generate_entity() end)
end
@spec create_deposits(list(TestHelper.entity()), pos_integer()) :: list(map())
defp create_deposits(spenders, ntx_to_send) do
spenders
|> Enum.with_index(1)
|> Enum.map(&create_deposit(&1, ntx_to_send * 2))
end
defp create_deposit({spender, index}, ntx_to_send) do
{:ok, _} =
OMG.State.deposit([
%{
# these two are irrelevant
root_chain_txhash: <<0::256>>,
eth_height: 1,
log_index: 0,
owner: spender.addr,
currency: @eth,
amount: ntx_to_send,
blknum: index
}
])
utxo_pos = Utxo.position(index, 0, 0) |> Utxo.Position.encode()
%{owner: spender, utxo_pos: utxo_pos, amount: ntx_to_send}
end
end
|
apps/omg_performance/lib/omg_performance/simple_perftest.ex
| 0.906656
| 0.854703
|
simple_perftest.ex
|
starcoder
|
defmodule Advent20.Cubes do
@moduledoc """
Day 17: Conway Cubes
"""
defp parse(input, dimensions) do
input
|> String.split("\n", trim: true)
|> Enum.with_index()
|> Enum.reduce(%{}, fn {line, y}, all_coordinates ->
String.codepoints(line)
|> Enum.with_index()
|> Enum.reject(fn {char, _} -> char == "." end)
|> Enum.reduce(all_coordinates, fn {char, x}, all_coordinates ->
case dimensions do
3 -> Map.put(all_coordinates, {x, y, 0}, char)
4 -> Map.put(all_coordinates, {x, y, 0, 0}, char)
end
end)
end)
end
def part_1(input) do
input
|> parse(3)
|> Stream.iterate(&step/1)
|> Enum.at(6)
|> Map.values()
|> Enum.count(&(&1 == "#"))
end
def part_2(input) do
input
|> parse(4)
|> Stream.iterate(&step/1)
|> Enum.at(6)
|> Map.values()
|> Enum.count(&(&1 == "#"))
end
# The stream receives a map of coordinates to all active cubes
# It calculates neighbours of these (since the neighbours are the only fields that can change)
# Then, it evaluates each of these coordinates to get the updates cube state for the coord
# And lastly filters out all inactive cubes (so the next iteration receives only active cubes)
defp step(coordinates) do
# Get the MapSet of neighbours of every active cube, these are the coordinates we consider
calculate_neighbours(coordinates)
|> Stream.map(fn coord ->
active_neighbours =
coord
|> neighbours()
|> Enum.count(&(Map.get(coordinates, &1, ".") == "#"))
cube_active? = Map.get(coordinates, coord, ".") == "#"
new_state =
case {cube_active?, active_neighbours} do
{false, 3} -> "#"
{true, count} when count in [2, 3] -> "#"
_ -> "."
end
{coord, new_state}
end)
|> Stream.reject(fn {_coord, value} -> value == "." end)
|> Enum.into(%{})
end
defp calculate_neighbours(coordinates) do
coordinates
|> Map.keys()
|> Enum.reduce(MapSet.new(), fn coord, acc ->
coord |> neighbours() |> MapSet.new() |> MapSet.union(acc)
end)
end
defp neighbours({x, y, z}) do
for cx <- (x - 1)..(x + 1),
cy <- (y - 1)..(y + 1),
cz <- (z - 1)..(z + 1),
{x, y, z} != {cx, cy, cz},
do: {cx, cy, cz}
end
defp neighbours({x, y, z, w}) do
for cx <- (x - 1)..(x + 1),
cy <- (y - 1)..(y + 1),
cz <- (z - 1)..(z + 1),
cw <- (w - 1)..(w + 1),
{x, y, z, w} != {cx, cy, cz, cw},
do: {cx, cy, cz, cw}
end
end
|
lib/advent20/17_cubes.ex
| 0.717606
| 0.590366
|
17_cubes.ex
|
starcoder
|
defmodule LiveMap.Tile do
@moduledoc """
This module contains functions to manipulate map tiles.
"""
alias :math, as: Math
alias __MODULE__, as: Tile
require Logger
@type latitude :: number()
@type longitude :: number()
@type zoom :: pos_integer()
@type x :: pos_integer()
@type y :: pos_integer()
@enforce_keys [:x, :y, :z]
defstruct [:latitude, :longitude, :raw_x, :raw_y, :x, :y, :z]
@type t :: %__MODULE__{
latitude: latitude(),
longitude: longitude(),
raw_x: number(),
raw_y: number(),
x: x(),
y: y(),
z: zoom()
}
# Use Bitwise operations for performant 2^z calculation.
use Bitwise, only_operators: true
# Precalculates at compile time to avoid calling :math.pi
# and performing a division at runtime.
@pi Math.pi()
@deg_to_rad @pi / 180.0
@rad_to_deg 180.0 / @pi
@tile_size 256
@doc """
Retrieves a tile at certain coordinates and zoom level.
Based on https://wiki.openstreetmap.org/wiki/Slippy_map_tilenames.
Examples:
iex> tile = LiveMap.Tile.at(0, 0, 0)
iex> Kernel.match?(%Tile{x: 0, y: 0}, tile)
true
iex> tile = LiveMap.Tile.at(360, 170.1022, 0)
iex> Kernel.match?(%Tile{x: 0, y: 0}, tile)
true
iex> tile = LiveMap.Tile.at(47.47607, 7.56198, 16)
iex> Kernel.match?(%Tile{x: 34144, y: 22923}, tile)
true
"""
@spec at(latitude(), longitude(), zoom()) :: t()
def at(latitude, longitude, zoom) when is_integer(zoom) do
x = Tile.x(longitude, zoom)
y = Tile.y(latitude, zoom)
%Tile{
latitude: latitude,
longitude: longitude,
raw_x: x,
raw_y: y,
x: floor(x),
y: floor(y),
z: zoom,
}
end
@doc """
Converts a longitude at certain zoom to tile x number
Notes that the return value is not rounded. If used with slippy map,
round it down to the nearest integer.
Examples:
iex> floor(Tile.x(0, 0))
0
iex> floor(Tile.x(170.1022, 0))
0
iex> floor(Tile.x(7.56198, 16))
34144
"""
@spec x(longitude(), zoom()) :: number()
def x(longitude, zoom) do
(1 <<< zoom) * ((longitude + 180) / 360)
end
@doc """
Converts a latitude at certain zoom to tile y number
Notes that the return value is not rounded. If used with slippy map,
round it down to the nearest integer.
Examples:
iex> floor(Tile.y(0, 0))
0
iex> floor(Tile.y(360, 0))
0
iex> floor(Tile.y(47.47607, 16))
22923
"""
@spec y(latitude(), zoom()) :: number()
def y(latitude, zoom) do
radian = latitude * @deg_to_rad
r = Math.log(Math.tan(radian) + 1 / Math.cos(radian)) / @pi
(1 <<< zoom) * (1 - r) / 2
end
@doc """
Converts a tile x number to longitude at certain zoom level.
Examples:
iex> Tile.longitude(0, 0)
-180.0
iex> Tile.longitude(34144, 16)
7.55859375
"""
@spec longitude(x(), zoom()) :: longitude()
def longitude(x, zoom) do
x / (1 <<< zoom) * 360 - 180
end
@doc """
Converts a tile y number to latitude at certain zoom level.
Examples:
iex> Tile.latitude(0, 0)
85.0511287798066
iex> Tile.latitude(22923, 16)
47.476375797209336
"""
@spec latitude(y(), zoom()) :: latitude()
def latitude(y, zoom) do
Math.atan(Math.sinh(@pi * (1 - 2 * y / (1 <<< zoom)))) * @rad_to_deg
end
@doc """
Maps tiles around a center tile that covers a rectangle box.
Note that by default, the resulting tiles do not have latitude and longitude
coordinates. If such values are desired, use the last parameter to provide
a custom mapper function to also load the coordinates.
Examples:
# At zoom 0, the whole world is rendered in 1 tile.
iex> center = LiveMap.Tile.at(0, 0, 0)
iex> [center] == LiveMap.Tile.map(center, 256, 256)
true
# At zoom 1, 4 tiles are used on a 512x512 map.
iex> center = LiveMap.Tile.at(0, 0, 1)
iex> tiles = LiveMap.Tile.map(center, 512, 512)
iex> Enum.map(tiles, fn %{x: x, y: y} -> {x, y} end)
[{0, 0}, {0, 1}, {1, 0}, {1, 1}]
# Can also pass a mapper function to transform the tiles.
iex> center = LiveMap.Tile.at(0, 0, 1)
iex> LiveMap.Tile.map(center, 512, 512, fn %{x: x, y: y} -> {x, y} end)
[{0, 0}, {0, 1}, {1, 0}, {1, 1}]
"""
@spec map(t(), number(), number(), function()) :: list()
def map(center, width, height, mapper \\ &Function.identity/1)
# Special case for zoom level 0, in which the whole world is on 1 tile.
def map(%Tile{z: 0} = center, _width, _height, mapper), do: [mapper.(center)]
def map(%Tile{raw_x: center_x, raw_y: center_y, z: zoom}, width, height, mapper) when zoom >= 0 do
half_width = (0.5 * width) / @tile_size
half_height = (0.5 * height) / @tile_size
max_tile = 1 <<< zoom
x_min = floor(center_x - half_width)
y_min = floor(center_y - half_height)
x_max = ceil(center_x + half_width)
y_max = ceil(center_y + half_height)
for x <- x_min..x_max - 1,
y <- y_min..y_max - 1,
# x and y may have crossed the date line
tile_x = rem(x + max_tile, max_tile),
tile_y = rem(y + max_tile, max_tile) do
mapper.(%Tile{
raw_x: tile_x,
raw_y: tile_y,
x: tile_x,
y: tile_y,
z: zoom,
})
end
end
@doc """
Maps tiles around a center coordinates and zoom that covers a rectangle box.
The coordinates and zoom are used to generate a `Tile` and pass to `map/4`.
Examples:
iex> [center] = LiveMap.Tile.map(0, 0, 0, 256, 256)
iex> center.x
0
iex> center.y
0
"""
@spec map(latitude(), longitude(), zoom(), number(), number(), function()) :: list()
def map(latitude, longitude, zoom, width, height, mapper \\ &Function.identity/1) do
center = Tile.at(latitude, longitude, zoom)
Tile.map(center, width, height, mapper)
end
end
|
lib/live_map/tile.ex
| 0.935095
| 0.687276
|
tile.ex
|
starcoder
|
defmodule EctoPolymorphic.Schema do
@moduledoc """
"""
alias EctoPolymorphic.Type, as: PolymorphicType
defmacro __using__(_opts) do
quote do
import EctoPolymorphic.Schema
end
end
defmacro polymorphy(name, types) do
check_polymorphic!(types)
{:module, module, _, _} = PolymorphicType.generate_ecto_type(__CALLER__.module, name, types)
quote do
unquote(__MODULE__).__belongs_to__(
__MODULE__,
unquote(name),
types: unquote(types)
)
field(unquote(:"#{name}_type"), unquote(module))
end
end
def check_polymorphic!(types) when is_list(types), do: :ok
def check_polymorphic!(_) do
raise """
Polymorphic relationships require knowing all the possible types at compile time. Pass them in as
a keyword list mapping the expected database value to the Ecto Schema
"""
end
@valid_belongs_to_options [
:foreign_key,
:references,
:define_field,
:type,
:types,
:on_replace,
:defaults,
:primary_key,
:polymorphic
]
def __belongs_to__(mod, name, opts) do
check_options!(opts, @valid_belongs_to_options, "belongs_to/3")
opts = Keyword.put_new(opts, :foreign_key, :"#{name}_id")
foreign_key_type = opts[:type] || Module.get_attribute(mod, :foreign_key_type)
if name == Keyword.get(opts, :foreign_key) do
raise ArgumentError,
"foreign_key #{inspect(name)} must be distinct from corresponding association name"
end
if Keyword.get(opts, :define_field, true) do
Ecto.Schema.__field__(mod, opts[:foreign_key], foreign_key_type, opts)
end
struct = association(mod, :one, name, EctoPolymorphic.Association, opts)
Module.put_attribute(mod, :changeset_fields, {name, {:assoc, struct}})
end
defp check_options!(opts, valid, fun_arity) do
case Enum.find(opts, fn {k, _} -> not (k in valid) end) do
{k, _} ->
raise ArgumentError, "invalid option #{inspect(k)} for #{fun_arity}"
nil ->
:ok
end
end
defp association(mod, cardinality, name, association, opts) do
not_loaded = %Ecto.Association.NotLoaded{
__owner__: mod,
__field__: name,
__cardinality__: cardinality
}
put_struct_field(mod, name, not_loaded)
opts = [cardinality: cardinality] ++ opts
struct = association.struct(mod, name, opts)
Module.put_attribute(mod, :ecto_assocs, {name, struct})
struct
end
defp put_struct_field(mod, name, assoc) do
fields = Module.get_attribute(mod, :struct_fields)
if List.keyfind(fields, name, 0) do
raise ArgumentError, "field/association #{inspect(name)} is already set on schema"
end
Module.put_attribute(mod, :struct_fields, {name, assoc})
end
end
|
lib/ecto_polymorphic/schema.ex
| 0.703244
| 0.498413
|
schema.ex
|
starcoder
|
defmodule Bincode do
@moduledoc ~S"""
Module defining the functionalities of Bincode.
Bincode allows you to share data between Elixir and Rust using
Rust's [Bincode](https://github.com/servo/bincode) binary format.
You can implement your custom serialization manually, but for most use cases
you can simply declare the Rust structs and enums using `Bincode.Structs.declare_struct/3` and
`Bincode.Structs.declare_enum/3`
## Supported types
Most Rust types are supported, plus user defined structs and enums.
| Rust | Bincode notation | Elixir typespec |
|------------------------|---------------------------|----------------------------------|
| `u8` | `:u8` | `non_neg_integer` |
| ... | ... | ... |
| `u128` | `:u128` | `non_neg_integer` |
| `i8` | `:i8` | `integer` |
| ... | ... | ... |
| `i128` | `:i128` | `integer` |
| `f32` | `:f32` | `float` |
| `f64` | `:f64` | `float` |
| `bool` | `:bool` | `boolean` |
| `String` | `:string` | `binary` |
| `(u32, String)` | `{:u32, :string}` | `{non_neg_integer, binary}` |
| `Option<f32>` | `{:option, :f32}` | `float \| nil` |
| `Vec<String>` | `{:list, :string}` | `[binary]` |
| `HashMap<i64, String>` | `{:map, {:i64, :string}}` | `%{required(integer) => binary}` |
| `HashSet<u8>` | `{:set, :u8}` | `MapSet.t(non_neg_integer)` |
User defined types such as structs and enums can be nested, in this case the type is
the fully qualified module name. See `Bincode.Structs.declare_struct/3`.
The endianness is little since that's the default used by Bincode.
Tuples are implemented for a max size of 12 by default. That should be enough for
most practical cases but if you need to serialize tuples with more elements, you can
set `max_tuple_size` in the mix config, like so:
config :bincode, max_tuple_size: 23
## Examples
Consider the typical example where we want to send data structures across the network.
Here with a Rust client and Elixir server:
```rust
#[derive(Serialize, Deserialize)]
pub struct PacketSendMessage {
pub from: u64,
pub to: u64,
pub content: String,
}
pub fn send_message(sender_id: u64, receiver_id: u64) {
let message = PacketSendMessage {
from: sender_id,
to: receiver_id,
content: "hello!".to_owned()
};
let encoded: Vec<u8> = bincode::serialize(&message).unwrap();
// now send "encoded" to Elixir app
}
```
On the Elixir side you can simply declare the same packet struct and deserialize the received bytes:
defmodule Packets do
import Bincode.Structs
declare_struct(PacketSendMessage,
from: :u64,
to: :u64,
content: :string
)
end
alias Packets.PacketSendMessage
# Receive "data" from the network
{:ok, {%PacketSendMessage{} = message, rest}} = PacketSendMessage.deserialize(data)
Logger.info("Received message packet #{inspect(message)}")
"""
use Bitwise
@type unsigned :: :u8 | :u16 | :u32 | :u64 | :u128
@type signed :: :i8 | :i16 | :i32 | :i64 | :i128
@type floating_point :: :f32 | :f64
@type primitive ::
unsigned | signed | floating_point | :bool | :string | tuple | {:option, bincode_type}
@type collection :: {:list, bincode_type} | {:map, bincode_type} | {:set, bincode_type}
@type user_defined :: module
@type bincode_type :: primitive | collection | user_defined
@type option :: {:varint, boolean}
@type options :: list(option)
@doc """
Serializes the given `term` in binary representation according to the
given `type`.
Returns `{:ok, serialized_term}` when successful or `{:error, error_message}`
otherwise.
## Options
* `varint` - When set to true, enables variable-size integer encoding. It applies to signed
and unsigned integers except for `:u8` and `:i8`. Signed integers are first mapped to unsigned
integers using ZigZag encoding. Variable-size encoding will result in saved bytes the closer
to 0 the value is. This is especially true for collections length and enum variants which
for a lot of cases fit in a single byte instead of the usual `:u64` and `:u32`.
## Examples
iex> Bincode.serialize(255, :u8)
{:ok, <<255>>}
iex> Bincode.serialize(12, :u64)
{:ok, <<12, 0, 0, 0, 0, 0, 0, 0>>}
iex> Bincode.serialize(12, :u64, varint: true)
{:ok, <<12>>}
iex> Bincode.serialize("Bincode", :string)
{:ok, <<7, 0, 0, 0, 0, 0, 0, 0, 66, 105, 110, 99, 111, 100, 101>>}
iex> Bincode.serialize({144, false}, {:u16, :bool})
{:ok, <<144, 0, 0>>}
iex> Bincode.serialize([1, 2, 3, 4], {:list, :u8})
{:ok, <<4, 0, 0, 0, 0, 0, 0, 0, 1, 2, 3, 4>>}
iex> Bincode.serialize(%{"some string key" => 429876423428}, {:map, {:string, :u64}})
{:ok, <<1, 0, 0, 0, 0, 0, 0, 0, 15, 0, 0, 0, 0, 0, 0, 0, 115, 111, 109, 101, 32, 115, 116, 114, 105, 110, 103, 32, 107, 101, 121, 4, 171, 161, 22, 100, 0, 0, 0>>}
iex> Bincode.serialize(%{}, :bool)
{:error, "Cannot serialize value %{} into type :bool"}
"""
@spec serialize(term, bincode_type, options) :: {:ok, binary} | {:error, String.t()}
def serialize(value, type, opts \\ [])
@doc """
Deserializes the given `binary` data into an Elixir term according to the
given `type`.
Returns `{:ok, {term, rest}}` when successful or `{:error, error_message}`
otherwise. The remaining binary data is returned.
## Options
* `varint` - When set to true, enables variable-size integer encoding. It applies to signed
and unsigned integers except for `:u8` and `:i8`. Signed integers are first mapped to unsigned
integers using ZigZag encoding. Variable-size encoding will result in saved bytes the closer
to 0 the value is. This is especially true for collections length and enum variants which
for a lot of cases fit in a single byte instead of the usual `:u64` and `:u32`.
## Examples
iex> Bincode.deserialize(<<255>>, :u8)
{:ok, {255, ""}}
iex> Bincode.deserialize(<<12, 0, 0, 0, 0, 0, 0, 0>>, :u64)
{:ok, {12, ""}}
iex> Bincode.deserialize(<<12>>, :u64, varint: true)
{:ok, {12, ""}}
iex> Bincode.deserialize(<<7, 0, 0, 0, 0, 0, 0, 0, 66, 105, 110, 99, 111, 100, 101>>, :string)
{:ok, {"Bincode", ""}}
iex> Bincode.deserialize(<<144, 0, 0>>, {:u16, :bool})
{:ok, {{144, false}, ""}}
iex> Bincode.deserialize(<<4, 0, 0, 0, 0, 0, 0, 0, 1, 2, 3, 4>>, {:list, :u8})
{:ok, {[1, 2, 3, 4], ""}}
iex> Bincode.deserialize(<<1, 0, 0, 0, 0, 0, 0, 0, 15, 0, 0, 0, 0, 0, 0, 0, 115, 111, 109, 101, 32, 115, 116, 114, 105, 110, 103, 32, 107, 101, 121, 4, 171, 161, 22, 100, 0, 0, 0>>, {:map, {:string, :u64}})
{:ok, {%{"some string key" => 429876423428}, ""}}
iex> Bincode.deserialize([], :bool)
{:error, "Cannot deserialize value [] into type :bool"}
"""
@spec deserialize(binary, bincode_type, options) :: {:ok, {term, binary}} | {:error, String.t()}
def deserialize(value, type, opts \\ [])
@u16_max 0xFFFF
@u32_max 0xFFFF_FFFF
@u64_max 0xFFFF_FFFF_FFFF_FFFF
@single_byte_max 250
@u16_byte 251
@u32_byte 252
@u64_byte 253
@u128_byte 254
defp use_varint(opts) when is_list(opts), do: Keyword.get(opts, :varint, false)
def zigzag_encode(value) when is_integer(value) and value < 0, do: ~~~value * 2 + 1
def zigzag_encode(value) when is_integer(value), do: value * 2
def zigzag_decode(value) when is_integer(value) and rem(value, 2) == 0, do: div(value, 2)
def zigzag_decode(value) when is_integer(value), do: ~~~div(value, 2)
defp serialize_varint(value) when is_integer(value) and value <= @single_byte_max do
{:ok, <<value::size(8)>>}
end
defp serialize_varint(value) when is_integer(value) and value <= @u16_max do
{:ok, <<@u16_byte::size(8), value::little-integer-size(16)>>}
end
defp serialize_varint(value) when is_integer(value) and value <= @u32_max do
{:ok, <<@u32_byte::size(8), value::little-integer-size(32)>>}
end
defp serialize_varint(value) when is_integer(value) and value <= @u64_max do
{:ok, <<@u64_byte::size(8), value::little-integer-size(64)>>}
end
defp serialize_varint(value) when is_integer(value) do
{:ok, <<@u128_byte::size(8), value::little-integer-size(128)>>}
end
defp deserialize_varint(<<byte::size(8), rest::binary>>) when byte in 0..@single_byte_max do
{:ok, {byte, rest}}
end
defp deserialize_varint(<<@u16_byte::size(8), value::little-integer-size(16), rest::binary>>) do
{:ok, {value, rest}}
end
defp deserialize_varint(<<@u32_byte::size(8), value::little-integer-size(32), rest::binary>>) do
{:ok, {value, rest}}
end
defp deserialize_varint(<<@u64_byte::size(8), value::little-integer-size(64), rest::binary>>) do
{:ok, {value, rest}}
end
defp deserialize_varint(<<@u128_byte::size(8), value::little-integer-size(128), rest::binary>>) do
{:ok, {value, rest}}
end
defp deserialize_varint(value) do
{:error, "Cannot deserialize value #{inspect(value)} into variable-size integer"}
end
# Varint has no effect for u8/i8
# Unsigned
def serialize(value, :u8, _opts) when is_integer(value) do
{:ok, <<value::little-integer-size(8)>>}
end
def deserialize(<<value::little-integer-size(8), rest::binary>>, :u8, _opts) do
{:ok, {value, rest}}
end
for int_type <- [:u16, :u32, :u64, :u128] do
{size, ""} = to_string(int_type) |> String.trim_leading("u") |> Integer.parse()
def serialize(value, unquote(int_type), _opts) when value < 0 do
{:error,
"Attempt to serialize negative integer #{inspect(value)} into #{unquote(int_type)}"}
end
def serialize(value, unquote(int_type), opts) when is_integer(value) do
if use_varint(opts) do
serialize_varint(value)
else
{:ok, <<value::little-integer-size(unquote(size))>>}
end
end
def deserialize(<<value::binary>>, unquote(int_type), opts) do
if use_varint(opts) do
deserialize_varint(value)
else
case value do
<<int_value::little-integer-size(unquote(size)), rest::binary>> ->
{:ok, {int_value, rest}}
_ ->
{:error,
"Cannot deserialize value #{inspect(value)} into type #{inspect(unquote(int_type))}"}
end
end
end
end
# Signed
def serialize(value, :i8, _opts) when is_integer(value) do
{:ok, <<value::little-integer-signed-size(8)>>}
end
def deserialize(<<value::little-integer-signed-size(8), rest::binary>>, :i8, _opts) do
{:ok, {value, rest}}
end
for int_type <- [:i16, :i32, :i64, :i128] do
{size, ""} = to_string(int_type) |> String.trim_leading("i") |> Integer.parse()
def serialize(value, unquote(int_type), opts) when is_integer(value) do
if use_varint(opts) do
serialize_varint(zigzag_encode(value))
else
{:ok, <<value::little-integer-signed-size(unquote(size))>>}
end
end
def deserialize(<<value::binary>>, unquote(int_type), opts) do
if use_varint(opts) do
with {:ok, {deserialized, rest}} <- deserialize_varint(value) do
{:ok, {zigzag_decode(deserialized), rest}}
end
else
case value do
<<int_value::little-integer-signed-size(unquote(size)), rest::binary>> ->
{:ok, {int_value, rest}}
_ ->
{:error,
"Cannot deserialize value #{inspect(value)} into type #{inspect(unquote(int_type))}"}
end
end
end
end
# Float
for float_type <- [:f32, :f64] do
{size, ""} = to_string(float_type) |> String.trim_leading("f") |> Integer.parse()
def serialize(value, unquote(float_type), _opts) when is_float(value) do
{:ok, <<value::little-float-size(unquote(size))>>}
end
def deserialize(
<<value::little-float-size(unquote(size)), rest::binary>>,
unquote(float_type),
_opts
) do
{:ok, {value, rest}}
end
end
# Bool
for boolean <- [true, false] do
v = if boolean, do: 1, else: 0
def serialize(unquote(boolean), :bool, _opts) do
{:ok, <<unquote(v)::size(8)>>}
end
def deserialize(<<unquote(v)::size(8), rest::binary>>, :bool, _opts) do
{:ok, {unquote(boolean), rest}}
end
end
# String
def serialize(value, :string, opts) when is_binary(value) do
with {:ok, serialized_size} <- serialize(byte_size(value), :u64, opts) do
{:ok, <<serialized_size::binary, value::binary>>}
end
end
def deserialize(<<rest::binary>>, :string, opts) do
with {:ok, {deserialized_size, rest}} <- deserialize(rest, :u64, opts),
<<content::binary-size(deserialized_size), rest::binary>> <- rest do
{:ok, {content, rest}}
else
_ -> {:error, "Cannot deserialize value #{inspect(rest)} into type :string"}
end
end
# List
def serialize(list, {:list, inner}, opts) when is_list(list) do
serialize(list, 0, <<>>, {:list, inner}, opts)
end
defp serialize([], length, result, {:list, _inner}, opts) do
with {:ok, serialized_size} <- serialize(length, :u64, opts) do
{:ok, <<serialized_size::binary, IO.iodata_to_binary(result)::binary>>}
end
end
defp serialize([head | tail], length, result, {:list, inner}, opts) do
case serialize(head, inner, opts) do
{:ok, serialized} ->
result = [result, serialized]
serialize(tail, length + 1, result, {:list, inner}, opts)
{:error, msg} ->
{:error, msg}
end
end
def deserialize(<<rest::binary>>, {:list, inner}, opts) do
with {:ok, {deserialized_size, rest}} <- deserialize(rest, :u64, opts) do
deserialize(rest, deserialized_size, [], {:list, inner}, opts)
else
_ -> {:error, "Cannot deserialize value #{inspect(rest)} into type :list"}
end
end
defp deserialize(rest, 0, result, {:list, _}, _opts) do
result = Enum.reverse(result)
{:ok, {result, rest}}
end
defp deserialize(rest, remaining, result, {:list, inner}, opts) do
case deserialize(rest, inner, opts) do
{:ok, {deserialized, rest}} ->
result = [deserialized | result]
deserialize(rest, remaining - 1, result, {:list, inner}, opts)
{:error, msg} ->
{:error, msg}
end
end
# Map
def serialize(map, {:map, {key_type, value_type}}, opts) when is_map(map) do
serialize(map, Map.keys(map), 0, <<>>, {:map, {key_type, value_type}}, opts)
end
defp serialize(_map, [], length, result, {:map, {_, _}}, opts) do
with {:ok, serialized_size} <- serialize(length, :u64, opts) do
{:ok, <<serialized_size::binary, IO.iodata_to_binary(result)::binary>>}
end
end
defp serialize(map, [key | keys], length, result, {:map, {key_type, value_type}}, opts) do
case serialize(key, key_type, opts) do
{:ok, serialized_key} ->
case serialize(map[key], value_type, opts) do
{:ok, serialized_value} ->
result = [result, serialized_key, serialized_value]
serialize(map, keys, length + 1, result, {:map, {key_type, value_type}}, opts)
{:error, msg} ->
{:error, msg}
end
{:error, msg} ->
{:error, msg}
end
end
def deserialize(<<rest::binary>>, {:map, {key_type, value_type}}, opts) do
with {:ok, {deserialized_size, rest}} <- deserialize(rest, :u64, opts) do
deserialize(rest, deserialized_size, %{}, {:map, {key_type, value_type}}, opts)
else
_ -> {:error, "Cannot deserialize value #{inspect(rest)} into type :map"}
end
end
defp deserialize(rest, 0, result, {:map, {_, _}}, _opts) do
{:ok, {result, rest}}
end
defp deserialize(rest, remaining, result, {:map, {key_type, value_type}}, opts) do
case deserialize(rest, key_type, opts) do
{:ok, {deserialized_key, rest}} ->
case deserialize(rest, value_type, opts) do
{:ok, {deserialized_value, rest}} ->
result = Map.put(result, deserialized_key, deserialized_value)
deserialize(rest, remaining - 1, result, {:map, {key_type, value_type}}, opts)
{:error, msg} ->
{:error, msg}
end
{:error, msg} ->
{:error, msg}
end
end
# Set
def serialize(%MapSet{} = set, {:set, inner}, opts) do
serialize(MapSet.to_list(set), {:list, inner}, opts)
end
def deserialize(<<rest::binary>>, {:set, inner}, opts) do
case deserialize(rest, {:list, inner}, opts) do
{:ok, {list, rest}} -> {:ok, {MapSet.new(list), rest}}
{:error, msg} -> {:error, msg}
end
end
# Option
def serialize(nil, {:option, _}, _opts) do
{:ok, <<0>>}
end
def serialize(value, {:option, inner}, opts) do
case serialize(value, inner, opts) do
{:ok, serialized} -> {:ok, <<1::size(8), serialized::binary>>}
{:error, msg} -> {:error, msg}
end
end
def deserialize(<<0::size(8), rest::binary>>, {:option, _}, _opts) do
{:ok, {nil, rest}}
end
def deserialize(<<1::size(8), rest::binary>>, {:option, inner}, opts) do
case deserialize(rest, inner, opts) do
{:ok, {deserialized, rest}} -> {:ok, {deserialized, rest}}
{:error, msg} -> {:error, msg}
end
end
# Tuple
max_tuple_size = Application.get_env(:bincode, :max_tuple_size) || 12
for size <- 1..max_tuple_size do
type_variables =
for i <- 1..size do
field_type = String.to_atom("tuple_type_#{i}")
quote do: var!(unquote(Macro.var(field_type, __MODULE__)))
end
value_variables =
for i <- 1..size do
field_value = String.to_atom("tuple_field_#{i}")
quote do: var!(unquote(Macro.var(field_value, __MODULE__)))
end
def serialize({unquote_splicing(value_variables)}, {unquote_splicing(type_variables)}, opts) do
serialized_fields =
Enum.reduce_while(
Enum.zip([unquote_splicing(value_variables)], [unquote_splicing(type_variables)]),
[],
fn {value_var, type_var}, result ->
case serialize(value_var, type_var, opts) do
{:ok, serialized} -> {:cont, [result, serialized]}
{:error, msg} -> {:halt, {:error, msg}}
end
end
)
case serialized_fields do
{:error, msg} ->
{:error, msg}
_ ->
{:ok, IO.iodata_to_binary(serialized_fields)}
end
end
def deserialize(<<rest::binary>>, {unquote_splicing(type_variables)}, opts) do
deserialized_fields =
Enum.reduce_while(
[unquote_splicing(type_variables)],
{[], rest},
fn type_var, {fields, rest} ->
case deserialize(rest, type_var, opts) do
{:ok, {deserialized, rest}} -> {:cont, {[deserialized | fields], rest}}
{:error, msg} -> {:halt, {:error, msg}}
end
end
)
case deserialized_fields do
{:error, msg} ->
{:error, msg}
{fields, rest} ->
tuple = Enum.reverse(fields) |> List.to_tuple()
{:ok, {tuple, rest}}
end
end
end
def serialize(value, type, opts) do
if is_atom(type) and function_exported?(type, :serialize, 2) do
apply(type, :serialize, [value, opts])
else
{:error, "Cannot serialize value #{inspect(value)} into type #{inspect(type)}"}
end
end
@doc """
Same as `serialize/3` but raises an `ArgumentError` when the
given `value` cannot be encoded according to `type`.
## Examples
iex> Bincode.serialize!([111], {:list, :u16})
<<1, 0, 0, 0, 0, 0, 0, 0, 111, 0>>
iex> Bincode.serialize!(<<>>, {:option, :bool})
** (ArgumentError) Cannot serialize value "" into type :bool
"""
def serialize!(value, type, opts \\ []) do
case serialize(value, type, opts) do
{:ok, result} -> result
{:error, message} -> raise ArgumentError, message: message
end
end
def deserialize(value, type, opts) do
if is_atom(type) and function_exported?(type, :deserialize, 2) do
apply(type, :deserialize, [value, opts])
else
{:error, "Cannot deserialize value #{inspect(value)} into type #{inspect(type)}"}
end
end
@doc """
Same as `deserialize/3` but raises an `ArgumentError` when the
given `value` cannot be encoded according to `type`.
## Examples
iex> Bincode.deserialize!(<<1, 54, 23>>, {:option, :u16})
{5942, ""}
iex> Bincode.deserialize!(<<>>, {:list, :string})
** (ArgumentError) Cannot deserialize value "" into type :list
"""
def deserialize!(value, type, opts \\ []) do
case deserialize(value, type, opts) do
{:ok, result} -> result
{:error, message} -> raise ArgumentError, message: message
end
end
end
|
lib/bincode.ex
| 0.926154
| 0.913792
|
bincode.ex
|
starcoder
|
defmodule Elasticlunr.DB do
defstruct [:name, :options]
@type t :: %__MODULE__{
name: atom(),
options: list(atom())
}
@spec init(atom(), list()) :: t()
def init(name, opts \\ []) when is_atom(name) do
default = ~w[compressed named_table]a
options = Enum.uniq(default ++ opts)
unless Enum.member?(:ets.all(), name) do
:ets.new(name, options)
end
struct!(__MODULE__, name: name, options: options)
end
@spec delete(t(), term()) :: boolean()
def delete(%__MODULE__{name: name}, pattern), do: :ets.delete(name, pattern)
@spec destroy(t()) :: boolean()
def destroy(%__MODULE__{name: name}) do
if Enum.member?(:ets.all(), name) do
:ets.delete(name)
else
true
end
end
@spec insert(t(), term()) :: boolean()
def insert(%__MODULE__{name: name}, data), do: :ets.insert(name, data)
@spec lookup(t(), term()) :: list(term())
def lookup(%__MODULE__{name: name}, key), do: :ets.lookup(name, key)
@spec member?(t(), term()) :: boolean()
def member?(%__MODULE__{name: name}, key), do: :ets.member(name, key)
@spec match_delete(t(), term()) :: boolean()
def match_delete(%__MODULE__{name: name}, pattern), do: :ets.match_delete(name, pattern)
@spec match_object(t(), term()) :: list(term())
def match_object(%__MODULE__{name: name}, spec), do: :ets.match_object(name, spec)
@spec select_count(t(), term()) :: pos_integer()
def select_count(%__MODULE__{name: name}, spec), do: :ets.select_count(name, spec)
@spec from(t(), keyword()) :: {:ok, t()}
def from(%__MODULE__{name: name} = db, file: file) do
with true <- File.exists?(file),
{:ok, ^name} <- :dets.open_file(name, file: file),
true <- :ets.from_dets(name, name) do
{:ok, db}
end
end
@spec to(t(), keyword()) :: :ok
def to(%__MODULE__{name: name}, file: file) do
unless Enum.member?(:dets.all(), name) do
:dets.open_file(name, ram_file: true, file: file)
end
with ^name <- :ets.to_dets(name, name) do
:dets.close(name)
end
end
end
|
lib/elasticlunr/db.ex
| 0.734501
| 0.409929
|
db.ex
|
starcoder
|
defmodule Liquor.Filter do
@moduledoc """
Applies search terms as filters to a given Ecto.Query
"""
import Ecto.Query
@type filter ::
((Ecto.Query.t, Liquor.op, atom, term) -> Ecto.Query.t) |
map |
{:apply, module, atom, list}
@spec apply_filter(Ecto.Query.t, Liquor.op, atom, term, filter) :: Ecto.Query.t
def apply_filter(query, _op, _key, _value, nil), do: query
def apply_filter(query, op, key, value, filter) when is_function(filter) do
filter.(query, op, key, value)
end
def apply_filter(query, :match, key, value, {:type, _, _}) when is_list(value) do
where(query, [r], field(r, ^key) in ^value)
end
def apply_filter(query, :unmatch, key, value, {:type, _, _}) when is_list(value) do
where(query, [r], field(r, ^key) not in ^value)
end
def apply_filter(query, op, key, value, {:type, _, _} = spec) when is_list(value) do
Enum.reduce(value, query, fn str, q2 -> apply_filter(q2, op, key, str, spec) end)
end
def apply_filter(query, _op, _key, nil, {:type, _, %{null: false}}) do
# the field is not nullable, doesn't matter what the operator is, if it's nil it can't filter
query
end
def apply_filter(query, op, key, nil, {:type, _, _}) do
Liquor.Filters.Null.apply_filter(query, op, key, nil)
end
def apply_filter(query, op, key, value, {:type, :date, _}) do
Liquor.Filters.Date.apply_filter(query, op, key, value)
end
def apply_filter(query, op, key, value, {:type, :time, _}) do
Liquor.Filters.Time.apply_filter(query, op, key, value)
end
def apply_filter(query, op, key, value, {:type, :naive_datetime, _}) do
Liquor.Filters.NaiveDateTime.apply_filter(query, op, key, value)
end
def apply_filter(query, op, key, value, {:type, type, _}) when type in [:integer, :float, :decimal] do
Liquor.Filters.Numeric.apply_filter(query, op, key, value)
end
# static strings are like Atoms, they cannot be partially matched
def apply_filter(query, op, key, value, {:type, :string, %{static: true}}) when is_binary(value) do
Liquor.Filters.Atom.apply_filter(query, op, key, value)
end
def apply_filter(query, op, key, value, {:type, :string, _}) when is_binary(value) do
Liquor.Filters.String.apply_filter(query, op, key, value)
end
def apply_filter(query, op, key, value, {:type, :atom, _}) when is_atom(value) do
Liquor.Filters.Atom.apply_filter(query, op, key, value)
end
def apply_filter(query, op, key, value, {:type, :boolean, _}) when is_boolean(value) do
Liquor.Filters.Atom.apply_filter(query, op, key, value)
end
def apply_filter(query, op, key, value, {:type, type}) do
apply_filter(query, op, key, value, {:type, type, %{}})
end
def apply_filter(query, op, key, value, {:apply, m, f, a}) when is_atom(m) and is_atom(f) do
:erlang.apply(m, f, [query, op, key, value | a])
end
def filter(query, terms, filter) when is_function(filter) do
Enum.reduce(terms, query, fn
{op, key, value}, q2 -> apply_filter(q2, op, key, value, filter)
end)
end
def filter(query, terms, items) when is_map(items) do
Enum.reduce(terms, query, fn
{op, key, value}, q2 -> apply_filter(q2, op, key, value, items[key])
end)
end
def filter(query, terms, {m, f, a}) when is_atom(m) and is_atom(f) do
Enum.reduce(terms, query, fn
{op, key, value}, q2 -> apply_filter(q2, op, key, value, {m, f, a})
end)
end
end
|
lib/liquor/filter.ex
| 0.788583
| 0.565149
|
filter.ex
|
starcoder
|
defmodule AWS.Config do
@moduledoc """
AWS Config
AWS Config provides a way to keep track of the configurations of all the
AWS resources associated with your AWS account. You can use AWS Config to
get the current and historical configurations of each AWS resource and also
to get information about the relationship between the resources. An AWS
resource can be an Amazon Compute Cloud (Amazon EC2) instance, an Elastic
Block Store (EBS) volume, an Elastic network Interface (ENI), or a security
group. For a complete list of resources currently supported by AWS Config,
see [Supported AWS
Resources](http://docs.aws.amazon.com/config/latest/developerguide/resource-config-reference.html#supported-resources).
You can access and manage AWS Config through the AWS Management Console,
the AWS Command Line Interface (AWS CLI), the AWS Config API, or the AWS
SDKs for AWS Config
This reference guide contains documentation for the AWS Config API and the
AWS CLI commands that you can use to manage AWS Config.
The AWS Config API uses the Signature Version 4 protocol for signing
requests. For more information about how to sign a request with this
protocol, see [Signature Version 4 Signing
Process](http://docs.aws.amazon.com/general/latest/gr/signature-version-4.html).
For detailed information about AWS Config features and their associated
actions or commands, as well as how to work with AWS Management Console,
see [What Is AWS
Config?](http://docs.aws.amazon.com/config/latest/developerguide/WhatIsConfig.html)
in the *AWS Config Developer Guide*.
"""
@doc """
Deletes the specified AWS Config rule and all of its evaluation results.
AWS Config sets the state of a rule to `DELETING` until the deletion is
complete. You cannot update a rule while it is in this state. If you make a
`PutConfigRule` or `DeleteConfigRule` request for the rule, you will
receive a `ResourceInUseException`.
You can check the state of a rule by using the `DescribeConfigRules`
request.
"""
def delete_config_rule(client, input, options \\ []) do
request(client, "DeleteConfigRule", input, options)
end
@doc """
Deletes the configuration recorder.
After the configuration recorder is deleted, AWS Config will not record
resource configuration changes until you create a new configuration
recorder.
This action does not delete the configuration information that was
previously recorded. You will be able to access the previously recorded
information by using the `GetResourceConfigHistory` action, but you will
not be able to access this information in the AWS Config console until you
create a new configuration recorder.
"""
def delete_configuration_recorder(client, input, options \\ []) do
request(client, "DeleteConfigurationRecorder", input, options)
end
@doc """
Deletes the delivery channel.
Before you can delete the delivery channel, you must stop the configuration
recorder by using the `StopConfigurationRecorder` action.
"""
def delete_delivery_channel(client, input, options \\ []) do
request(client, "DeleteDeliveryChannel", input, options)
end
@doc """
Deletes the evaluation results for the specified Config rule. You can
specify one Config rule per request. After you delete the evaluation
results, you can call the `StartConfigRulesEvaluation` API to start
evaluating your AWS resources against the rule.
"""
def delete_evaluation_results(client, input, options \\ []) do
request(client, "DeleteEvaluationResults", input, options)
end
@doc """
Schedules delivery of a configuration snapshot to the Amazon S3 bucket in
the specified delivery channel. After the delivery has started, AWS Config
sends following notifications using an Amazon SNS topic that you have
specified.
<ul> <li> Notification of starting the delivery.
</li> <li> Notification of delivery completed, if the delivery was
successfully completed.
</li> <li> Notification of delivery failure, if the delivery failed to
complete.
</li> </ul>
"""
def deliver_config_snapshot(client, input, options \\ []) do
request(client, "DeliverConfigSnapshot", input, options)
end
@doc """
Indicates whether the specified AWS Config rules are compliant. If a rule
is noncompliant, this action returns the number of AWS resources that do
not comply with the rule.
A rule is compliant if all of the evaluated resources comply with it, and
it is noncompliant if any of these resources do not comply.
If AWS Config has no current evaluation results for the rule, it returns
`INSUFFICIENT_DATA`. This result might indicate one of the following
conditions:
<ul> <li> AWS Config has never invoked an evaluation for the rule. To check
whether it has, use the `DescribeConfigRuleEvaluationStatus` action to get
the `LastSuccessfulInvocationTime` and `LastFailedInvocationTime`.
</li> <li> The rule's AWS Lambda function is failing to send evaluation
results to AWS Config. Verify that the role that you assigned to your
configuration recorder includes the `config:PutEvaluations` permission. If
the rule is a custom rule, verify that the AWS Lambda execution role
includes the `config:PutEvaluations` permission.
</li> <li> The rule's AWS Lambda function has returned `NOT_APPLICABLE` for
all evaluation results. This can occur if the resources were deleted or
removed from the rule's scope.
</li> </ul>
"""
def describe_compliance_by_config_rule(client, input, options \\ []) do
request(client, "DescribeComplianceByConfigRule", input, options)
end
@doc """
Indicates whether the specified AWS resources are compliant. If a resource
is noncompliant, this action returns the number of AWS Config rules that
the resource does not comply with.
A resource is compliant if it complies with all the AWS Config rules that
evaluate it. It is noncompliant if it does not comply with one or more of
these rules.
If AWS Config has no current evaluation results for the resource, it
returns `INSUFFICIENT_DATA`. This result might indicate one of the
following conditions about the rules that evaluate the resource:
<ul> <li> AWS Config has never invoked an evaluation for the rule. To check
whether it has, use the `DescribeConfigRuleEvaluationStatus` action to get
the `LastSuccessfulInvocationTime` and `LastFailedInvocationTime`.
</li> <li> The rule's AWS Lambda function is failing to send evaluation
results to AWS Config. Verify that the role that you assigned to your
configuration recorder includes the `config:PutEvaluations` permission. If
the rule is a custom rule, verify that the AWS Lambda execution role
includes the `config:PutEvaluations` permission.
</li> <li> The rule's AWS Lambda function has returned `NOT_APPLICABLE` for
all evaluation results. This can occur if the resources were deleted or
removed from the rule's scope.
</li> </ul>
"""
def describe_compliance_by_resource(client, input, options \\ []) do
request(client, "DescribeComplianceByResource", input, options)
end
@doc """
Returns status information for each of your AWS managed Config rules. The
status includes information such as the last time AWS Config invoked the
rule, the last time AWS Config failed to invoke the rule, and the related
error for the last failure.
"""
def describe_config_rule_evaluation_status(client, input, options \\ []) do
request(client, "DescribeConfigRuleEvaluationStatus", input, options)
end
@doc """
Returns details about your AWS Config rules.
"""
def describe_config_rules(client, input, options \\ []) do
request(client, "DescribeConfigRules", input, options)
end
@doc """
Returns the current status of the specified configuration recorder. If a
configuration recorder is not specified, this action returns the status of
all configuration recorder associated with the account.
<note> Currently, you can specify only one configuration recorder per
region in your account.
</note>
"""
def describe_configuration_recorder_status(client, input, options \\ []) do
request(client, "DescribeConfigurationRecorderStatus", input, options)
end
@doc """
Returns the details for the specified configuration recorders. If the
configuration recorder is not specified, this action returns the details
for all configuration recorders associated with the account.
<note> Currently, you can specify only one configuration recorder per
region in your account.
</note>
"""
def describe_configuration_recorders(client, input, options \\ []) do
request(client, "DescribeConfigurationRecorders", input, options)
end
@doc """
Returns the current status of the specified delivery channel. If a delivery
channel is not specified, this action returns the current status of all
delivery channels associated with the account.
<note> Currently, you can specify only one delivery channel per region in
your account.
</note>
"""
def describe_delivery_channel_status(client, input, options \\ []) do
request(client, "DescribeDeliveryChannelStatus", input, options)
end
@doc """
Returns details about the specified delivery channel. If a delivery channel
is not specified, this action returns the details of all delivery channels
associated with the account.
<note> Currently, you can specify only one delivery channel per region in
your account.
</note>
"""
def describe_delivery_channels(client, input, options \\ []) do
request(client, "DescribeDeliveryChannels", input, options)
end
@doc """
Returns the evaluation results for the specified AWS Config rule. The
results indicate which AWS resources were evaluated by the rule, when each
resource was last evaluated, and whether each resource complies with the
rule.
"""
def get_compliance_details_by_config_rule(client, input, options \\ []) do
request(client, "GetComplianceDetailsByConfigRule", input, options)
end
@doc """
Returns the evaluation results for the specified AWS resource. The results
indicate which AWS Config rules were used to evaluate the resource, when
each rule was last used, and whether the resource complies with each rule.
"""
def get_compliance_details_by_resource(client, input, options \\ []) do
request(client, "GetComplianceDetailsByResource", input, options)
end
@doc """
Returns the number of AWS Config rules that are compliant and noncompliant,
up to a maximum of 25 for each.
"""
def get_compliance_summary_by_config_rule(client, input, options \\ []) do
request(client, "GetComplianceSummaryByConfigRule", input, options)
end
@doc """
Returns the number of resources that are compliant and the number that are
noncompliant. You can specify one or more resource types to get these
numbers for each resource type. The maximum number returned is 100.
"""
def get_compliance_summary_by_resource_type(client, input, options \\ []) do
request(client, "GetComplianceSummaryByResourceType", input, options)
end
@doc """
Returns a list of configuration items for the specified resource. The list
contains details about each state of the resource during the specified time
interval.
The response is paginated, and by default, AWS Config returns a limit of 10
configuration items per page. You can customize this number with the
`limit` parameter. The response includes a `nextToken` string, and to get
the next page of results, run the request again and enter this string for
the `nextToken` parameter.
<note> Each call to the API is limited to span a duration of seven days. It
is likely that the number of records returned is smaller than the specified
`limit`. In such cases, you can make another call, using the `nextToken`.
</note>
"""
def get_resource_config_history(client, input, options \\ []) do
request(client, "GetResourceConfigHistory", input, options)
end
@doc """
Accepts a resource type and returns a list of resource identifiers for the
resources of that type. A resource identifier includes the resource type,
ID, and (if available) the custom resource name. The results consist of
resources that AWS Config has discovered, including those that AWS Config
is not currently recording. You can narrow the results to include only
resources that have specific resource IDs or a resource name.
<note> You can specify either resource IDs or a resource name but not both
in the same request.
</note> The response is paginated, and by default AWS Config lists 100
resource identifiers on each page. You can customize this number with the
`limit` parameter. The response includes a `nextToken` string, and to get
the next page of results, run the request again and enter this string for
the `nextToken` parameter.
"""
def list_discovered_resources(client, input, options \\ []) do
request(client, "ListDiscoveredResources", input, options)
end
@doc """
Adds or updates an AWS Config rule for evaluating whether your AWS
resources comply with your desired configurations.
You can use this action for custom Config rules and AWS managed Config
rules. A custom Config rule is a rule that you develop and maintain. An AWS
managed Config rule is a customizable, predefined rule that AWS Config
provides.
If you are adding a new custom Config rule, you must first create the AWS
Lambda function that the rule invokes to evaluate your resources. When you
use the `PutConfigRule` action to add the rule to AWS Config, you must
specify the Amazon Resource Name (ARN) that AWS Lambda assigns to the
function. Specify the ARN for the `SourceIdentifier` key. This key is part
of the `Source` object, which is part of the `ConfigRule` object.
If you are adding a new AWS managed Config rule, specify the rule's
identifier for the `SourceIdentifier` key. To reference AWS managed Config
rule identifiers, see [Using AWS Managed Config
Rules](http://docs.aws.amazon.com/config/latest/developerguide/evaluate-config_use-managed-rules.html).
For any new rule that you add, specify the `ConfigRuleName` in the
`ConfigRule` object. Do not specify the `ConfigRuleArn` or the
`ConfigRuleId`. These values are generated by AWS Config for new rules.
If you are updating a rule that you added previously, you can specify the
rule by `ConfigRuleName`, `ConfigRuleId`, or `ConfigRuleArn` in the
`ConfigRule` data type that you use in this request.
The maximum number of rules that AWS Config supports is 50.
For more information about requesting a rule limit increase, see [AWS
Config
Limits](http://docs.aws.amazon.com/general/latest/gr/aws_service_limits.html#limits_config)
in the *AWS General Reference Guide*.
For more information about developing and using AWS Config rules, see
[Evaluating AWS Resource Configurations with AWS
Config](http://docs.aws.amazon.com/config/latest/developerguide/evaluate-config.html)
in the *AWS Config Developer Guide*.
"""
def put_config_rule(client, input, options \\ []) do
request(client, "PutConfigRule", input, options)
end
@doc """
Creates a new configuration recorder to record the selected resource
configurations.
You can use this action to change the role `roleARN` and/or the
`recordingGroup` of an existing recorder. To change the role, call the
action on the existing configuration recorder and specify a role.
<note> Currently, you can specify only one configuration recorder per
region in your account.
If `ConfigurationRecorder` does not have the **recordingGroup** parameter
specified, the default is to record all supported resource types.
</note>
"""
def put_configuration_recorder(client, input, options \\ []) do
request(client, "PutConfigurationRecorder", input, options)
end
@doc """
Creates a delivery channel object to deliver configuration information to
an Amazon S3 bucket and Amazon SNS topic.
Before you can create a delivery channel, you must create a configuration
recorder.
You can use this action to change the Amazon S3 bucket or an Amazon SNS
topic of the existing delivery channel. To change the Amazon S3 bucket or
an Amazon SNS topic, call this action and specify the changed values for
the S3 bucket and the SNS topic. If you specify a different value for
either the S3 bucket or the SNS topic, this action will keep the existing
value for the parameter that is not changed.
<note> You can have only one delivery channel per region in your account.
</note>
"""
def put_delivery_channel(client, input, options \\ []) do
request(client, "PutDeliveryChannel", input, options)
end
@doc """
Used by an AWS Lambda function to deliver evaluation results to AWS Config.
This action is required in every AWS Lambda function that is invoked by an
AWS Config rule.
"""
def put_evaluations(client, input, options \\ []) do
request(client, "PutEvaluations", input, options)
end
@doc """
Runs an on-demand evaluation for the specified Config rules against the
last known configuration state of the resources. Use
`StartConfigRulesEvaluation` when you want to test a rule that you updated
is working as expected. `StartConfigRulesEvaluation` does not re-record the
latest configuration state for your resources; it re-runs an evaluation
against the last known state of your resources.
You can specify up to 25 Config rules per request.
An existing `StartConfigRulesEvaluation` call must complete for the
specified rules before you can call the API again. If you chose to have AWS
Config stream to an Amazon SNS topic, you will receive a
`ConfigRuleEvaluationStarted` notification when the evaluation starts.
<note> You don't need to call the `StartConfigRulesEvaluation` API to run
an evaluation for a new rule. When you create a new rule, AWS Config
automatically evaluates your resources against the rule.
</note> The `StartConfigRulesEvaluation` API is useful if you want to run
on-demand evaluations, such as the following example:
<ol> <li> You have a custom rule that evaluates your IAM resources every 24
hours.
</li> <li> You update your Lambda function to add additional conditions to
your rule.
</li> <li> Instead of waiting for the next periodic evaluation, you call
the `StartConfigRulesEvaluation` API.
</li> <li> AWS Config invokes your Lambda function and evaluates your IAM
resources.
</li> <li> Your custom rule will still run periodic evaluations every 24
hours.
</li> </ol>
"""
def start_config_rules_evaluation(client, input, options \\ []) do
request(client, "StartConfigRulesEvaluation", input, options)
end
@doc """
Starts recording configurations of the AWS resources you have selected to
record in your AWS account.
You must have created at least one delivery channel to successfully start
the configuration recorder.
"""
def start_configuration_recorder(client, input, options \\ []) do
request(client, "StartConfigurationRecorder", input, options)
end
@doc """
Stops recording configurations of the AWS resources you have selected to
record in your AWS account.
"""
def stop_configuration_recorder(client, input, options \\ []) do
request(client, "StopConfigurationRecorder", input, options)
end
@spec request(map(), binary(), map(), list()) ::
{:ok, Poison.Parser.t | nil, Poison.Response.t} |
{:error, Poison.Parser.t} |
{:error, HTTPoison.Error.t}
defp request(client, action, input, options) do
client = %{client | service: "config"}
host = get_host("config", client)
url = get_url(host, client)
headers = [{"Host", host},
{"Content-Type", "application/x-amz-json-1.1"},
{"X-Amz-Target", "StarlingDoveService.#{action}"}]
payload = Poison.Encoder.encode(input, [])
headers = AWS.Request.sign_v4(client, "POST", url, headers, payload)
case HTTPoison.post(url, payload, headers, options) do
{:ok, response=%HTTPoison.Response{status_code: 200, body: ""}} ->
{:ok, nil, response}
{:ok, response=%HTTPoison.Response{status_code: 200, body: body}} ->
{:ok, Poison.Parser.parse!(body), response}
{:ok, _response=%HTTPoison.Response{body: body}} ->
error = Poison.Parser.parse!(body)
exception = error["__type"]
message = error["message"]
{:error, {exception, message}}
{:error, %HTTPoison.Error{reason: reason}} ->
{:error, %HTTPoison.Error{reason: reason}}
end
end
defp get_host(endpoint_prefix, client) do
if client.region == "local" do
"localhost"
else
"#{endpoint_prefix}.#{client.region}.#{client.endpoint}"
end
end
defp get_url(host, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}/"
end
end
|
lib/aws/config.ex
| 0.890145
| 0.542984
|
config.ex
|
starcoder
|
defmodule FinancialSystem do
alias FinancialSystem.Account, as: Account
alias FinancialSystem.Converter, as: Converter
@moduledoc """
After you create accounts with the Account module, you can register with the functions available in
the FinancialSystem module.
"""
@doc """
Transaction function is in charge of making transfers between accounts, the accounts may without the same or different
currencies, if they are different currencies the value is converted.
The role receives an account and a list of accounts.
## Examples
#### transfer with same currency accounts, :BRL to :BRL.
{account1, [account4]} = FinancialSystem.transaction(account1, [account4], 10_00)
#### transfer with different currency accounts
{account3, [account1, account2]} = FinancialSystem.transaction(account3, [account1, account2], 1000_00)
"""
@spec transaction(Account.t(), [Account.t()], integer) ::
{Account.t(), [Account.t()]} | {:error, String.t()}
def transaction(from_account, to_accounts, value) when is_list(to_accounts) do
case balance_enough?(from_account.balance, value) do
true ->
value_float = value / 100
split_value = value_float / length(to_accounts)
values_transfer =
Enum.map(to_accounts, fn accounts ->
Converter.exchange(
split_value,
from_account.balance.currency,
accounts.balance.currency
)
|> to_int()
end)
accounts_values = Enum.zip(to_accounts, values_transfer)
transaction_result =
Enum.map(accounts_values, fn {accounts, values} ->
deposit(accounts, accounts.balance, :balance, values)
end)
updated_to_accounts =
for dest_result <- transaction_result do
dest_result
end
up_from_account = debit(from_account, from_account.balance, :balance, value)
{up_from_account, updated_to_accounts}
false ->
{:error, "Not enough money. (balance #{from_account.balance.amount})"}
end
end
@doc """
The function of debiting a value to a specific account.
It takes as its argument an account structure, %Money{}, an atom and an integer value.
## Example
account1 = FinancialSystem.debit(account1, account1.balance, :balance, 10_00)
"""
@spec debit(Account.t(), Money.t(), atom, integer) :: Account.t() | {:error, String.t()}
def debit(account, money, key, value) do
case balance_enough?(money, value) do
true ->
current = Money.subtract(money, value)
up_account(account, key, current)
false ->
{:error, "Not enough money. (balance #{money.amount})"}
end
end
@doc """
Funciotion to deposit, a value in a account specifies.
This function also receive the sames arguments what the function debit.
## Example
account1 = FinancialSystem.deposit(account1, account1.balance, :balance, 10_00)
"""
@spec deposit(Account.t(), Money.t(), atom, integer) :: Account.t()
def deposit(account, money, key, value) do
current = Money.add(money, value)
up_account(account, key, current)
end
@doc """
The function 'balance_enough?' checks if the account have balance to do operations.
She return true ou false.
## Examples
FinancialSystem.balance_enough?(account1.balance, 100_00)
"""
@spec balance_enough?(Money.t(), integer) :: boolean
def balance_enough?(balance, value) do
balance.amount >= value or balance.amount < 0
end
@spec up_account(Account.t(), atom, Money.t()) :: Account.t()
defp up_account(account, key, current) do
Map.put(account, key, current)
end
@doc """
The function 'consult' shows the balance more friendly way.
Example
FinancialSystem.consult(account1)
"""
@spec consult(atom | %{balance: Money.t(), name: any}) :: :ok
def consult(account) do
IO.puts("#{account.name}, your balance is: #{Money.to_string(account.balance)}")
end
defp to_int(money), do: trunc(100 * money)
end
|
apps/financial_system/lib/financial_system.ex
| 0.811078
| 0.798226
|
financial_system.ex
|
starcoder
|
defmodule Sherbet.Service.Contact.Communication do
@moduledoc """
Provides interfaces to communications.
"""
import Ecto.Query
require Logger
alias Sherbet.Service.Contact.Communication
@type uuid :: String.t
@doc """
Add a communication type to be associated with the given identity.
Returns `:ok` on successful addition. Otherwise returns a error.
"""
@spec add(atom, String.t, uuid) :: :ok | { :error, String.t }
def add(type, communication, identity) do
Communication.Method.add(type, identity, communication)
end
@doc """
Add a communication type to be associated with the given identity. And set its
priority.
Returns `:ok` on successful addition. Otherwise returns a error.
"""
@spec add(atom, String.t, :secondary | :primary, uuid) :: :ok | { :error, String.t }
def add(type, communication, :secondary, identity), do: add(type, communication, identity)
def add(type, communication, :primary, identity) do
case Communication.Method.add(type, identity, communication) do
:ok -> Communication.Method.set_priority(type, identity, communication, :primary)
error -> error
end
end
@doc """
Remove a communication type from a given identity.
Returns `:ok` on successful removal. Otherwise returns an error.
"""
@spec remove(atom, String.t, uuid) :: :ok | { :error, String.t }
def remove(type, communication, identity) do
Communication.Method.remove(type, identity, communication)
end
@doc """
Change a communication type associated with the identity to a new priority.
Will turn any other primary communication of that type for that identity into a
secondary communication option.
Returns `:ok` on successful change. Otherwise returns an error.
"""
@spec set_priority(atom, String.t, :secondary | :primary, uuid) :: :ok | { :error, String.t }
def set_priority(type, communication, priority, identity) do
Communication.Method.set_priority(type, identity, communication, priority)
end
@doc """
Request a communication type associated with another identity, be removed.
Removal requests only apply to unverified communications.
Returns `:ok` if request was successful. Otherwise returns an error.
"""
@spec request_removal(atom, String.t) :: :ok | { :error, String.t }
def request_removal(type, communication) do
Communication.Method.request_removal(type, communication)
end
@doc """
Finalise the request that a communication type be removed.
If the communication is verified, then it should return an error.
Returns `:ok` if removal was successful. Otherwise returns an error.
"""
@spec finalise_removal(atom, String.t, String.t) :: :ok | { :error, String.t }
def finalise_removal(type, communication, key) do
Communication.Method.finalise_removal(type, communication, key)
end
@doc """
Check if a communication type belonging to the given identity has been verified.
Returns `{ :ok, verified }` if the operation was successful, where `verified`
is whether the email was verified (`true`) or not (`false`). Otherwise returns
an error.
"""
@spec verified?(atom, String.t, uuid) :: { :ok, boolean } | { :error, String.t }
def verified?(type, communication, identity) do
Communication.Method.verified?(type, identity, communication)
end
@doc """
Request a comunication type be verified.
If the comunication is already verified, then it should return an error.
Returns `:ok` if request was successful. Otherwise returns an error.
"""
@spec request_verification(atom, String.t, uuid) :: :ok | { :error, String.t }
def request_verification(type, communication, identity) do
Communication.Method.request_verification(type, identity, communication)
end
@doc """
Finalise the verification request for a communication type.
If the communication is already verified, then it should return an error.
Returns `:ok` if verification was successful. Otherwise returns an error.
"""
@spec finalise_verification(atom, String.t, String.t, uuid) :: :ok | { :error, String.t }
def finalise_verification(type, communication, key, identity) do
Communication.Method.finalise_verification(type, identity, communication, key)
end
@doc """
Get a list of communications of type associated with the given identity.
Returns `{ :ok, contacts }` if the operation was successful, where `contacts` is
the list of communication methods associated with the given identity and their
current verification status and priority. Otherwise returns the reason of failure.
"""
@spec contacts(atom, uuid) :: { :ok, [{ :unverified | :verified, :secondary | :primary, String.t }] } | { :error, String.t }
def contacts(type, identity) do
Communication.Method.contacts(type, identity)
end
@doc """
Get the primary communication of type associated with the given identity.
Returns `{ :ok, contact }` if the operation was successful, where `contact` is
the primary communication method associated with the given identity and its
current verification status. Otherwise returns the reason of failure.
"""
@spec primary_contact(atom, uuid) :: { :ok, { :unverified | :verified, String.t } } | { :error, String.t }
def primary_contact(type, identity) do
Communication.Method.primary_contact(type, identity)
end
@doc """
Get the owning identity for the specific communication of type.
Returns `{ :ok, identity }` if the operation was successful. Otherwise returns
the reason of failure.
"""
@spec owner(atom, String.t) :: { :ok, uuid } | { :error, String.t }
def owner(type, communication) do
Communication.Method.owner(type, communication)
end
end
|
apps/sherbet_service/lib/sherbet.service/contact/communication.ex
| 0.881608
| 0.444927
|
communication.ex
|
starcoder
|
defmodule BinFormat.FieldType.Util do
@moduledoc """
Implementations of the AST generator functions for builtin types with the
BinFormat.FieldType.BuiltIn struct replaced by the relevant variables.
These functions are useful for other types and are provided to reduce
duplication.
"""
@doc """
Builds a struct definition for a simple field.
Returns the equivalent of `defstruct ... name: default, ...` in a struct definition.
"""
def standard_struct_def(name, default) do
struct_def = quote do
{unquote(name), unquote(Macro.escape(default))}
end
{:ok, struct_def}
end
@doc """
Builds a struct pattern for a simple field.
Returns the equivalent of `%Module{... name: full_name, ...}` where full_name is
name with prefix appended at the start.
This can be used for both building and matching patterns.
"""
def standard_struct_pattern(name, module, prefix) do
full_name = String.to_atom(prefix <> Atom.to_string(name))
var_name = Macro.var(full_name, module)
struct_pattern = quote do
{unquote(name), unquote(var_name)}
end
{:ok, struct_pattern}
end
@doc """
Builds a binary pattern for an Elixir built in binary type.
Returns the equivalent of << ... full_name :: type-option1-option2-size(s), ... >>
where option1 and option2 are members of the list options and s is the value
of size.
This can be used for both building and matching patterns.
"""
def standard_bin_pattern(name, type, size, options, module, prefix) do
# Turn the option atoms into variables, with type as the first option
option_vars = Enum.map([type | options], fn(opt) -> Macro.var(opt, module) end)
# Add size as a function call to the end of the option list
pattern_options = option_vars ++ case size do
:undefined -> []
_ -> [quote do size(unquote(size)) end]
end
# Add the prefix to the name and convert it to a varialbe
full_name = String.to_atom(prefix <> Atom.to_string(name))
var_name = Macro.var(full_name, module)
# Seperate each option with a dash
merged_options = Enum.reduce(pattern_options, fn(rhs, lhs) ->
quote do
unquote(lhs) - unquote(rhs)
end
end)
# Add options to name
bin_pattern = quote do
unquote(var_name) :: unquote(merged_options)
end
{:ok, bin_pattern}
end
@doc """
Add an implementation of field to the packet structure
Adds an field to the end of the current packet structure. The field should be
an implementation of the BinFormat.Field protocol.
"""
def add_field(field, env) do
#IO.puts("Trying to add a field...")
#IO.puts("Field: #{inspect(field)}")
#IO.puts("To module #{inspect(env.module)}")
BinFormat.FieldServer.add_field(env.module, field)
quote do
# Actually adding to the head but this will be reversed
# @packet_members [unquote(field) | @packet_members]
#unquote(field)
BinFormat.FieldServer.add_field(unquote(env.module), unquote(field))
end
end
end
|
lib/bin_format/field_type/util.ex
| 0.761671
| 0.697442
|
util.ex
|
starcoder
|
defmodule Genex.Evolution do
alias Genex.Tools.{Crossover, Selection}
alias Genex.Support.{Genealogy, HallOfFame}
alias Genex.Types.Population
@moduledoc """
Evolution behaviour definition for Evolutionary algorithms.
Evolutions begin with `init` after the population has been created. The purpose of `init` is to define metrics, initiate a genealogy tree, and create a hall of fame. `init` is like the "constructor" of the evolution.
"""
@doc """
Initialization of the evolution.
"""
@callback init(population :: Population.t(), opts :: Keyword.t()) :: {:ok, Population.t()}
@doc """
Evaluates `population` according to `fitness_fn`.
"""
@callback evaluation(
population :: Population.t(),
opts :: Keyword.t()
) :: {:ok, Population.t()}
@doc """
Select parents for variation. Must populate `selected` field in `Population`.
"""
@callback selection(population :: Population.t(), opts :: Keyword.t()) :: {:ok, Population.t()}
@doc """
Perform variation such as Crossover, Mutation, and Migration.
"""
@callback variation(population :: Population.t(), opts :: Keyword.t()) :: {:ok, Population.t()}
@doc """
Recombine population.
"""
@callback reinsertion(population :: Population.t(), opts :: Keyword.t()) ::
{:ok, Population.t()}
@doc """
Perform housekeeping before next generation. Includes Gene Repair.
"""
@callback transition(population :: Population.t(), opts :: Keyword.t()) :: {:ok, Population.t()}
@doc """
Termination of the evolution.
"""
@callback termination(population :: Population.t(), opts :: Keyword.t()) :: Population.t()
defmacro __using__(_) do
quote do
alias Genex.Types.Chromosome
alias Genex.Types.Population
@behaviour Genex.Evolution
@spec init(Population.t(), Keyword.t()) :: {:ok, Population.t()}
def init(population, opts \\ []) do
visualizer = Keyword.get(opts, :visualizer, Genex.Visualizer.Text)
visualizer.init(opts)
genealogy = Genealogy.new()
HallOfFame.new()
{:ok, %Population{population | genealogy: genealogy}}
end
@spec evolve(
Population.t(),
(Population.t() -> boolean()),
Keyword.t()
) :: Population.t()
def evolve(population, terminate?, opts \\ []) do
visualizer = Keyword.get(opts, :visualizer, Genex.Visualizer.Text)
# Check if the population meets termination criteria
if terminate?.(population) do
{:ok, population}
else
with {:ok, population} <- selection(population, opts),
{:ok, population} <- variation(population, opts),
{:ok, population} <- reinsertion(population, opts),
{:ok, population} <- transition(population, opts),
{:ok, population} <- evaluation(population, opts) do
visualizer.display(population, opts)
evolve(population, terminate?, opts)
else
err -> raise err
end
end
end
@spec evaluation(Population.t(), Keyword.t()) ::
{:ok, Population.t()}
def evaluation(population, opts \\ []) do
chromosomes =
population.chromosomes
|> Enum.map(fn c ->
fitness = c.weights * c.f.(c)
%Chromosome{c | fitness: fitness}
end)
|> Enum.sort_by(& &1.fitness, &>=/2)
strongest = hd(chromosomes)
max_fitness = strongest.fitness
pop = %Population{
population
| chromosomes: chromosomes,
strongest: strongest,
max_fitness: max_fitness
}
{:ok, pop}
end
@spec selection(Population.t(), Keyword.t()) :: {:ok, Population.t()}
def selection(population, opts \\ []) do
strategy = Keyword.get(opts, :selection_type, Selection.natural())
rate = Keyword.get(opts, :selection_rate, 0.8)
do_selection(population, strategy, rate)
end
@spec crossover(Population.t(), Keyword.t()) :: {:ok, Population.t()}
def crossover(population, opts \\ []) do
strategy = Keyword.get(opts, :crossover_type, Crossover.single_point())
do_crossover(population, strategy)
end
@spec mutation(Population.t(), Keyword.t()) :: {:ok, Population.t()}
def mutation(population, opts \\ []) do
strategy = Keyword.get(opts, :mutation_type, :none)
rate = Keyword.get(opts, :mutation_rate, 0.05)
do_mutation(population, strategy, rate)
end
@spec reinsertion(Population.t(), Keyword.t()) :: {:ok, Population.t()}
def reinsertion(population, opts \\ []) do
selection_rate = Keyword.get(opts, :selection_rate, 0.8)
survival_rate = Keyword.get(opts, :survival_rate, Float.round(1.0 - selection_rate, 1))
strategy = Keyword.get(opts, :survival_type, :natural)
survivors =
case strategy do
{_} -> do_survivor_selection(population, strategy, survival_rate)
{_, _} -> do_survivor_selection(population, strategy, survival_rate)
{_, _, _} -> do_survivor_selection(population, strategy, survival_rate)
{_, _, _, _} -> do_survivor_selection(population, strategy, survival_rate)
_ -> do_survivor_selection(population, {strategy}, survival_rate)
end
new_chromosomes = population.children ++ survivors
pop = %Population{
population
| size: length(new_chromosomes),
chromosomes: new_chromosomes
}
{:ok, pop}
end
@spec transition(Population.t(), Keyword.t()) :: {:ok, Population.t()} | {:error, any()}
def transition(population, opts \\ []) do
generation = population.generation + 1
size = length(population.chromosomes)
chromosomes =
population.chromosomes
|> do_update_ages()
|> do_repair_chromosomes()
HallOfFame.add(population)
pop = %Population{
population
| chromosomes: chromosomes,
size: size,
generation: generation,
selected: nil,
children: nil,
survivors: nil
}
{:ok, pop}
end
@spec termination(Population.t(), Keyword.t()) :: Population.t()
def termination(population, opts \\ []) do
chromosomes =
population.chromosomes
|> Enum.sort_by(& &1.fitness, &>=/2)
strongest = hd(chromosomes)
max_fitness = strongest.fitness
HallOfFame.add(population)
%Population{
population
| chromosomes: chromosomes,
strongest: strongest,
max_fitness: max_fitness
}
end
defp do_selection(population, strategy, rate) do
chromosomes = population.chromosomes
n =
if is_function(rate) do
floor(rate.(population) * population.size)
else
floor(rate * population.size)
end
selected = strategy.(chromosomes, n)
pop = %Population{population | selected: selected}
{:ok, pop}
end
defp do_crossover(population, :none), do: {:ok, population}
defp do_crossover(population, strategy) do
parents = population.selected
starting_children =
case population.children do
nil -> []
chd -> chd
end
{children, genealogy} =
parents
|> Enum.chunk_every(2, 2, :discard)
|> Enum.map(fn f -> List.to_tuple(f) end)
|> Enum.reduce(
{starting_children, population.genealogy},
fn {p1, p2}, {chd, his} ->
{c1, c2} = strategy.(p1, p2)
new_his =
his
|> Genealogy.update(c1, p1, p2)
|> Genealogy.update(c2, p1, p2)
{[c1 | [c2 | chd]], new_his}
end
)
pop = %Population{population | children: children, genealogy: genealogy, selected: nil}
{:ok, pop}
end
defp do_mutation(population, :none, rate), do: {:ok, population}
defp do_mutation(population, strategy, rate) do
u = if is_function(rate), do: rate.(population), else: rate
mutate =
case population.selected do
nil -> population.chromosomes
_ -> population.selected
end
{mutants, new_his} =
mutate
|> Enum.reduce(
{[], population.genealogy},
fn c, {mut, his} ->
if :rand.uniform() < u do
mutant = strategy.(c)
new_his = Genealogy.update(his, c, mutant)
{[mutant | mut], new_his}
else
{[c | mut], his}
end
end
)
pop =
case population.selected do
nil -> %Population{population | chromosomes: mutants, genealogy: new_his}
_ -> %Population{population | children: mutants, selected: nil, genealogy: new_his}
end
{:ok, pop}
end
defp do_survivor_selection(population, strategy, rate) do
chromosomes = population.chromosomes
n =
if is_function(rate) do
floor(rate.(population) * length(chromosomes))
else
floor(rate * length(chromosomes))
end
survivors =
if is_function(strategy) do
strategy.(chromosomes, n)
else
[f | args] = Tuple.to_list(strategy)
apply(Selection, f, [chromosomes, n] ++ args)
end
survivors
end
defp do_update_ages(chromosomes) do
chromosomes
|> Enum.map(fn c -> %Chromosome{c | age: c.age + 1} end)
end
defp do_repair_chromosomes(chromosomes) do
chromosomes
|> Enum.map(fn c -> %Chromosome{c | genes: c.collection.().(c.genes)} end)
end
defoverridable init: 2,
transition: 2,
evaluation: 2,
crossover: 2,
mutation: 2,
selection: 2,
evolve: 3,
termination: 2
end
end
end
|
lib/genex/evolution.ex
| 0.869271
| 0.631296
|
evolution.ex
|
starcoder
|
defmodule ExUssd.Nav do
@moduledoc """
USSD Nav module
"""
@type t :: %__MODULE__{
name: String.t(),
match: String.t(),
type: atom(),
orientation: atom(),
delimiter: String.t(),
reverse: boolean(),
top: integer(),
bottom: integer(),
right: integer(),
left: integer(),
show: boolean()
}
@enforce_keys [:name, :match, :type]
defstruct [
:name,
:match,
:type,
orientation: :horizontal,
delimiter: ":",
reverse: false,
top: 0,
bottom: 0,
right: 0,
left: 0,
show: true
]
@allowed_fields [
:type,
:name,
:match,
:delimiter,
:orientation,
:reverse,
:top,
:bottom,
:right,
:left,
:show
]
@doc """
Its used to create a new ExUssd Nav menu.
- **`:type`** - The type of the menu. ExUssd supports 3 types of nav
- :home - Go back to the initial menu
- :back - Go back to the previous menu
- :next - Go to the nested menu
- **`:name`** - The name of the nav.
- **`:match`** - The match string to match the nav. example when the user enters "0" for `:back` type, the match string is "0"
- **`:delimiter`** - The delimiter to split the match string. default is ":"
- **`:orientation`** - The orientation of the nav. default is :horizontal
- **`:reverse`** - Reverse the order of the nav. default is false
- **`:top`** - The top position of the nav. default is 0
- **`:bottom`** - The bottom position of the nav. default is 0
- **`:right`** - The right position of the nav. default is 0
- **`:left`** - The left position of the nav. default is 0
- **`:show`** - Show the nav. default is true. if false, the nav will not be shown in the menu
## Example
```elixir
iex> menu = ExUssd.new(name: "home")
iex> ExUssd.set(menu, nav: Nav.new(type: :next, name: "MORE", match: "98"))
iex> menu = ExUssd.new(name: "home")
iex> ExUssd.set(menu, nav: [
ExUssd.Nav.new(type: :home, name: "HOME", match: "00", reverse: true, orientation: :vertical)
ExUssd.Nav.new(type: :back, name: "BACK", match: "0", right: 1),
ExUssd.Nav.new(type: :next, name: "MORE", match: "98")
])
```
"""
@spec new(keyword()) :: %ExUssd.Nav{}
def new(opts) do
if Keyword.get(opts, :type) in [:home, :next, :back] do
struct!(__MODULE__, Keyword.take(opts, @allowed_fields))
else
raise %ArgumentError{message: "Invalid USSD navigation type: #{Keyword.get(opts, :type)}"}
end
end
@doc """
Convert the USSD navigation menu to string
## Example
```elixir
iex> Nav.new(type: :next, name: "MORE", match: "98") |> Nav.to_string()
"MORE:98"
iex> nav = [
ExUssd.Nav.new(type: :home, name: "HOME", match: "00", reverse: true, orientation: :vertical),
ExUssd.Nav.new(type: :back, name: "BACK", match: "0", right: 1),
ExUssd.Nav.new(type: :next, name: "MORE", match: "98")
]
iex> ExUssd.Nav.to_string(nav)
"HOME:00
BACK:0 MORE:98"
```
"""
@spec to_string([ExUssd.Nav.t()]) :: String.t()
def to_string(nav) when is_list(nav) do
to_string(nav, 1, Enum.map(1..10, & &1), %{}, 1, :vertical)
end
def to_string(navs, depth, menu_list, max, level, orientation) when is_list(navs) do
nav =
navs
|> Enum.reduce("", &reduce_nav(&1, &2, navs, menu_list, depth, max, level, orientation))
|> String.trim_trailing()
if String.starts_with?(nav, "\n") do
nav
else
String.pad_leading(nav, String.length(nav) + 1, "\n")
|> String.trim_trailing()
end
end
def to_string(
%ExUssd.Nav{} = nav,
depth \\ 2,
opts \\ %{},
level \\ 1,
orientation \\ :vertical
) do
menu_list = Map.get(opts, :menu_list, [1, 2, 3, 4, 5, 6, 7, 8, 9, 10])
max = Map.get(opts, :max, 0)
has_next = Enum.at(menu_list, max + 1)
fun = fn
_, %ExUssd.Nav{show: false} ->
""
%{orientation: :vertical, depth: 1, has_next: nil, level: 1}, _nav ->
""
%{orientation: :vertical, depth: 1, level: 1}, %ExUssd.Nav{type: :back} ->
""
%{orientation: :vertical, depth: 1, level: 1}, %ExUssd.Nav{type: :home} ->
""
%{orientation: :vertical, has_next: nil}, %ExUssd.Nav{type: :next} ->
""
%{orientation: :horizontal, depth: 1, level: 1}, %ExUssd.Nav{type: :back} ->
""
%{orientation: :horizontal, level: 1}, %ExUssd.Nav{type: :home} ->
""
%{orientation: :horizontal, depth: depth, menu_length: menu_length},
%ExUssd.Nav{type: :next}
when depth >= menu_length ->
""
_, %ExUssd.Nav{name: name, delimiter: delimiter, match: match, reverse: true} ->
"#{match}#{delimiter}#{name}"
_, %ExUssd.Nav{name: name, delimiter: delimiter, match: match} ->
"#{name}#{delimiter}#{match}"
end
navigation =
apply(fun, [
%{
orientation: orientation,
depth: depth,
has_next: has_next,
level: level,
max: max,
menu_length: length(menu_list)
},
nav
])
if String.equivalent?(navigation, "") do
navigation
else
navigation
|> padding(:left, nav)
|> padding(:right, nav)
|> padding(:top, nav)
|> padding(:bottom, nav)
end
end
@spec padding(String.t(), atom(), ExUssd.Nav.t()) :: String.t()
defp padding(string, direction, nav)
defp padding(string, :left, %ExUssd.Nav{left: amount}) do
String.pad_leading(string, String.length(string) + amount)
end
defp padding(string, :right, %ExUssd.Nav{orientation: :horizontal, right: amount}) do
String.pad_trailing(string, String.length(string) + amount)
end
defp padding(string, :right, %ExUssd.Nav{orientation: :vertical}), do: string
defp padding(string, :top, %ExUssd.Nav{orientation: :vertical, top: amount}) do
padding = String.duplicate("\n", 1 + amount)
IO.iodata_to_binary([padding, string])
end
defp padding(string, :top, %ExUssd.Nav{top: amount}) do
padding = String.duplicate("\n", amount)
IO.iodata_to_binary([padding, string])
end
defp padding(string, :bottom, %ExUssd.Nav{orientation: :vertical, bottom: amount}) do
padding = String.duplicate("\n", 1 + amount)
IO.iodata_to_binary([string, padding])
end
defp padding(string, :bottom, %ExUssd.Nav{orientation: :horizontal}), do: string
defp reduce_nav(%{type: type}, acc, nav, menu_list, depth, max, level, orientation) do
navigation =
to_string(
Enum.find(nav, &(&1.type == type)),
depth,
%{max: max, menu_list: menu_list},
level,
orientation
)
IO.iodata_to_binary([acc, navigation])
end
end
|
lib/ex_ussd/nav.ex
| 0.904961
| 0.796688
|
nav.ex
|
starcoder
|
defmodule JsonJanitor do
@moduledoc """
`JsonJanitor` helps sanitize elixir terms so that they can be serialized to
JSON.
It makes some opinionated decisions to keep as much structure as possible
while guaranteeing that the resulting structure can be serialized to JSON.
"""
@doc """
Accepts anything and returns a structure that can be serialized to JSON.
This is useful in situations where you're not quite sure what can a function
can receive, but you know it'll need to serialize to JSON.
An example of such a situation is in a FallbackController of a Phoenix
application. If you have a universal fallback and want to help callers of
the API diagnose issues, you may want to return the fallback payload in the
response. If that term happens to not be serializable to JSON then an
internal server error will actually be returned. If `JsonJanitor.sanitize/1`
is used first, then you can be sure that it will always send properly.
Another example is when an external service such as Sentry is used to report
errors. If additional metadata is supplied surrounding an error and it cannot
be guaranteed to be serializable, then the call to Sentry itself can fail.
Using `JsonJanitor.sanitize/1` on this data guarantees that the call will not
fail because the data cannot serialize to JSON.
## Examples
iex> JsonJanitor.sanitize([:ok, <<128>>])
[":ok", "<<128>>"]
Tuples are converted to lists.
iex> JsonJanitor.sanitize({'cat'})
['cat']
Keyword lists are converted to maps.
iex> JsonJanitor.sanitize([option: 42])
%{option: 42}
Atoms are converted to strings unless they are keys of maps.
iex> JsonJanitor.sanitize(%{horse: :dog})
%{horse: ":dog"}
Map keys are converted to binary strings even if they are complex.
iex> JsonJanitor.sanitize(%{%{cat: 3} => {}})
%{"%{cat: 3}" => []}
Map keys that are atoms are not converted to strings. They are left as atoms.
iex> JsonJanitor.sanitize(%{cat: 3})
%{cat: 3}
Binaries which cannot be printed are converted to strings of the raw bit
data.
iex> JsonJanitor.sanitize(<<128>>)
"<<128>>"
Structs are converted to maps and then their struct type is added to the map
in the `:struct_type` key.
iex> JsonJanitor.sanitize(%TestStruct{})
%{struct_type: "TestStruct", purpose: "to purr", sound: "meow"}
nil values are left as nil.
iex> JsonJanitor.sanitize(nil)
nil
"""
@spec sanitize(term()) :: term()
def sanitize(%_{} = struct) do
struct
|> Map.from_struct()
|> Map.put(:struct_type, struct.__struct__)
|> sanitize
end
def sanitize(map) when is_map(map) do
Enum.into(map, %{}, fn {k, v} -> {sanitized_key_for_map(k), sanitize(v)} end)
end
def sanitize(nil), do: nil
def sanitize([]), do: []
def sanitize(list) when is_list(list) do
if Keyword.keyword?(list) do
list
|> Enum.into(%{})
|> sanitize
else
Enum.map(list, fn value -> sanitize(value) end)
end
end
def sanitize(reference) when is_reference(reference) do
inspect(reference)
end
def sanitize(tuple) when is_tuple(tuple) do
tuple
|> Tuple.to_list()
|> sanitize
end
def sanitize(binary) when is_binary(binary) do
if String.printable?(binary) do
binary
else
inspect(binary)
end
end
def sanitize(atom) when is_atom(atom), do: inspect(atom)
def sanitize(other), do: other
defp sanitized_key_for_map(binary) when is_binary(binary) do
if String.printable?(binary) do
binary
else
inspect(binary)
end
end
defp sanitized_key_for_map(atom) when is_atom(atom), do: atom
defp sanitized_key_for_map(key) do
inspect(key)
end
end
|
lib/json_janitor.ex
| 0.834441
| 0.580382
|
json_janitor.ex
|
starcoder
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.