code
stringlengths 114
1.05M
| path
stringlengths 3
312
| quality_prob
float64 0.5
0.99
| learning_prob
float64 0.2
1
| filename
stringlengths 3
168
| kind
stringclasses 1
value |
|---|---|---|---|---|---|
defmodule Bandit.HTTP2.Adapter do
@moduledoc false
@behaviour Plug.Conn.Adapter
defstruct connection: nil, peer: nil, stream_id: nil, end_stream: false, uri: nil
@typedoc "A struct for backing a Plug.Conn.Adapter"
@type t :: %__MODULE__{
connection: Bandit.HTTP2.Connection.t(),
peer: Plug.Conn.Adapter.peer_data(),
stream_id: Bandit.HTTP2.Stream.stream_id(),
end_stream: boolean(),
uri: URI.t()
}
@impl Plug.Conn.Adapter
def read_req_body(%__MODULE__{end_stream: true}, _opts), do: raise(Bandit.BodyAlreadyReadError)
def read_req_body(%__MODULE__{} = adapter, opts) do
timeout = Keyword.get(opts, :read_timeout, 15_000)
length = Keyword.get(opts, :length, 8_000_000)
Stream.repeatedly(fn ->
receive do
msg -> msg
after
timeout -> :timeout
end
end)
|> Enum.reduce_while([], fn
{:data, data}, acc ->
if byte_size(data) + IO.iodata_length(acc) <= length do
{:cont, [data | acc]}
else
{:halt, {:more, [data | acc], adapter}}
end
:end_stream, acc ->
{:halt, {:ok, acc, %{adapter | end_stream: true}}}
:timeout, acc ->
{:halt, {:more, acc, adapter}}
end)
|> case do
{:ok, body, adapter} -> {:ok, body |> Enum.reverse() |> IO.iodata_to_binary(), adapter}
{:more, body, adapter} -> {:more, body |> Enum.reverse() |> IO.iodata_to_binary(), adapter}
end
end
@impl Plug.Conn.Adapter
def send_resp(%__MODULE__{} = adapter, status, headers, body) do
if IO.iodata_length(body) == 0 do
send_headers(adapter, status, headers, true)
else
send_headers(adapter, status, headers, false)
send_data(adapter, body, true)
end
{:ok, nil, adapter}
end
@impl Plug.Conn.Adapter
def send_file(%__MODULE__{} = adapter, status, headers, path, offset, length) do
%File.Stat{type: :regular, size: size} = File.stat!(path)
length = if length == :all, do: size - offset, else: length
cond do
offset + length == size && offset == 0 ->
send_chunked(adapter, status, headers)
_ =
File.stream!(path, [], 2048)
|> Enum.reduce(adapter, fn chunk, adapter ->
chunk(adapter, chunk)
end)
chunk(adapter, "")
{:ok, nil, adapter}
offset + length < size ->
with {:ok, fd} <- :file.open(path, [:raw, :binary]),
{:ok, data} <- :file.pread(fd, offset, length) do
send_headers(adapter, status, headers, false)
send_data(adapter, data, true)
{:ok, nil, adapter}
end
true ->
raise "Cannot read #{length} bytes starting at #{offset} as #{path} is only #{size} octets in length"
end
end
@impl Plug.Conn.Adapter
def send_chunked(%__MODULE__{} = adapter, status, headers) do
send_headers(adapter, status, headers, false)
{:ok, nil, adapter}
end
@impl Plug.Conn.Adapter
def chunk(%__MODULE__{} = adapter, chunk) do
# Sending an empty chunk implicitly ends the stream. This is a bit of an undefined corner of
# the Plug.Conn.Adapter behaviour (see https://github.com/elixir-plug/plug/pull/535 for
# details) and closing the stream here carves closest to the underlying HTTP/1.1 behaviour
# (RFC7230§4.1). The whole notion of chunked encoding is moot in HTTP/2 anyway (RFC7540§8.1)
# so this entire section of the API is a bit slanty regardless.
send_data(adapter, chunk, chunk == <<>>)
:ok
end
@impl Plug.Conn.Adapter
def inform(adapter, status, headers) do
headers = split_cookies(headers)
headers = [{":status", to_string(status)} | headers]
GenServer.call(adapter.connection, {:send_headers, adapter.stream_id, headers, false})
end
@impl Plug.Conn.Adapter
def push(adapter, path, headers) do
headers = split_cookies(headers)
headers = [{":path", path} | headers]
headers = [{":authority", adapter.uri.authority} | headers]
headers = [{":scheme", adapter.uri.scheme} | headers]
headers = [{":method", "GET"} | headers]
GenServer.call(adapter.connection, {:send_push, adapter.stream_id, headers})
end
@impl Plug.Conn.Adapter
def get_peer_data(%__MODULE__{peer: peer}), do: peer
@impl Plug.Conn.Adapter
def get_http_protocol(%__MODULE__{}), do: :"HTTP/2"
defp send_headers(adapter, status, headers, end_stream) do
headers = split_cookies(headers)
headers = [{":status", to_string(status)} | headers]
GenServer.call(adapter.connection, {:send_headers, adapter.stream_id, headers, end_stream})
end
defp send_data(adapter, data, end_stream) do
GenServer.call(
adapter.connection,
{:send_data, adapter.stream_id, data, end_stream},
:infinity
)
end
defp split_cookies(headers) do
headers
|> Enum.flat_map(fn
{"cookie", cookie} ->
cookie |> String.split("; ") |> Enum.map(fn crumb -> {"cookie", crumb} end)
{header, value} ->
[{header, value}]
end)
end
end
|
lib/bandit/http2/adapter.ex
| 0.855565
| 0.400192
|
adapter.ex
|
starcoder
|
defmodule Accent.Scopes.Operation do
import Ecto.Query, only: [from: 2]
@doc """
## Examples
iex> Accent.Scopes.Operation.filter_from_user(Accent.Operation, nil)
Accent.Operation
iex> Accent.Scopes.Operation.filter_from_user(Accent.Operation, "test")
#Ecto.Query<from o0 in Accent.Operation, where: o0.user_id == ^"test">
"""
@spec filter_from_user(Ecto.Queryable.t(), any()) :: Ecto.Queryable.t()
def filter_from_user(query, nil), do: query
def filter_from_user(query, user_id) do
from(query, where: [user_id: ^user_id])
end
@doc """
## Examples
iex> Accent.Scopes.Operation.filter_from_project(Accent.Operation, nil)
Accent.Operation
iex> Accent.Scopes.Operation.filter_from_project(Accent.Operation, "test")
#Ecto.Query<from o0 in Accent.Operation, left_join: r1 in assoc(o0, :revision), where: r1.project_id == ^"test" or o0.project_id == ^"test">
"""
@spec filter_from_project(Ecto.Queryable.t(), String.t() | nil) :: Ecto.Queryable.t()
def filter_from_project(query, nil), do: query
def filter_from_project(query, project_id) do
from(o in query, left_join: r in assoc(o, :revision), where: r.project_id == ^project_id or o.project_id == ^project_id)
end
@doc """
## Examples
iex> Accent.Scopes.Operation.filter_from_action(Accent.Operation, nil)
Accent.Operation
iex> Accent.Scopes.Operation.filter_from_action(Accent.Operation, "test")
#Ecto.Query<from o0 in Accent.Operation, where: o0.action == ^"test">
"""
@spec filter_from_action(Ecto.Queryable.t(), any()) :: Ecto.Queryable.t()
def filter_from_action(query, nil), do: query
def filter_from_action(query, action) do
from(query, where: [action: ^action])
end
@doc """
## Examples
iex> Accent.Scopes.Operation.filter_from_batch(Accent.Operation, nil)
Accent.Operation
iex> Accent.Scopes.Operation.filter_from_batch(Accent.Operation, "test")
Accent.Operation
iex> Accent.Scopes.Operation.filter_from_batch(Accent.Operation, true)
#Ecto.Query<from o0 in Accent.Operation, where: o0.batch == ^true, where: is_nil(o0.batch_operation_id)>
"""
@spec filter_from_batch(Ecto.Queryable.t(), any()) :: Ecto.Queryable.t()
def filter_from_batch(query, nil), do: query
def filter_from_batch(query, batch) when not is_boolean(batch), do: query
def filter_from_batch(query, batch) do
from(operations in query, where: [batch: ^batch], where: is_nil(operations.batch_operation_id))
end
@doc """
## Examples
iex> Accent.Scopes.Operation.order_last_to_first(Accent.Operation)
#Ecto.Query<from o0 in Accent.Operation, order_by: [desc: o0.inserted_at, asc: o0.batch]>
"""
@spec order_last_to_first(Ecto.Queryable.t()) :: Ecto.Queryable.t()
def order_last_to_first(query) do
from(query, order_by: [desc: :inserted_at, asc: :batch])
end
@doc """
## Examples
iex> Accent.Scopes.Operation.ignore_actions(Accent.Operation, "action", nil)
Accent.Operation
iex> Accent.Scopes.Operation.ignore_actions(Accent.Operation, nil, true)
Accent.Operation
iex> Accent.Scopes.Operation.ignore_actions(Accent.Operation, nil, nil)
#Ecto.Query<from o0 in Accent.Operation, where: is_nil(o0.batch_operation_id)>
"""
@spec ignore_actions(Ecto.Queryable.t(), any(), any()) :: Ecto.Queryable.t()
def ignore_actions(query, nil, nil) do
from(o in query, where: is_nil(o.batch_operation_id))
end
def ignore_actions(query, _action_argument, _batch_argument), do: query
end
|
lib/accent/scopes/operation.ex
| 0.735167
| 0.415462
|
operation.ex
|
starcoder
|
defmodule Finch.Telemetry do
@moduledoc """
Telemetry integration.
Unless specified, all time's are in `:native` units.
Finch executes the following events:
* `[:finch, :queue, :start]` - Executed before checking out a connection from the pool.
#### Measurements
* `:system_time` - The system time
#### Metadata:
* `:pool` - The pool's pid
* `:scheme` - The scheme used in the connection. either `http` or `https`
* `:host` - The host address
* `:port` - the port to connect on.
* `[:finch, :queue, :stop]` - Executed after a connection is retrieved from the pool.
#### Measurements
* `:duration` - Duration to check out a pool connection.
* `:idle_time` - Elapsed time since the connection was last checked in or initialized.
#### Metadata
* `:pool` - The pool's pid
* `:scheme` - The scheme used in the connection. either `http` or `https`
* `:host` - The host address
* `:port` - the port to connect on.
* `[:finch, :queue, :exception]` - Executed if checking out a connection throws an exception.
#### Measurements
* `:duration` - The time it took before raising an exception
#### Metadata
* `:scheme` - The scheme used in the connection. either `http` or `https`
* `:host` - The host address
* `:port` - the port to connect on.
* `:kind` - The type of exception.
* `:error` - Error description or error data.
* `:stacktrace` - The stacktrace
* `[:finch, :connect, :start]` - Executed before opening a new connection.
If a connection is being re-used this event will *not* be executed.
#### Measurements
* `:system_time` - The system time
#### Metadata
* `:scheme` - The scheme used in the connection. either `http` or `https`
* `:host` - The host address
* `:port` - the port to connect on.
* `[:finch, :connect, :stop]` - Executed after a connection is opened.
#### Measurements
* `:duration` - Duration to connect to the host.
#### Metadata:
* `:scheme` - The scheme used in the connection. either `http` or `https`
* `:host` - The host address
* `:port` - the port to connect on.
* `:error` - This value is optional. It includes any errors that occured while opening the connection.
* `[:finch, :request, :start]` - Executed before sending a request.
#### Measurements:
* `:system_time` - The system time
* `:idle_time` - Elapsed time since the connection was last checked in or initialized.
#### Metadata:
* `:scheme` - The scheme used in the connection. either `http` or `https`
* `:host` - The host address
* `:port` - the port to connect on.
* `:path` - The request path.
* `:method` - The request method.
* `[:finch, :request, :stop]` - Executed after a request is finished.
#### Measurements:
* `:duration` - Duration to make the request.
* `:idle_time` - Elapsed time since the connection was last checked in or initialized.
#### Metadata:
* `:scheme` - The scheme used in the connection. either `http` or `https`
* `:host` - The host address
* `:port` - the port to connect on.
* `:path` - The request path.
* `:method` - The request method.
* `:error` - This value is optional. It includes any errors that occured while making the request.
* `[:finch, :response, :start]` - Executed before receiving the response.
#### Measurements:
* `:system_time` - The system time
* `:idle_time` - Elapsed time since the connection was last checked in or initialized.
#### Metadata:
* `:scheme` - The scheme used in the connection. either `http` or `https`
* `:host` - The host address
* `:port` - the port to connect on.
* `:path` - The request path.
* `:method` - The request method.
* `[:finch, :response, :stop]` - Executed after a response has been fully received.
#### Measurements:
* `:duration` - Duration to receive the response.
* `:idle_time` - Elapsed time since the connection was last checked in or initialized.
#### Metadata:
* `:scheme` - The scheme used in the connection. either `http` or `https`
* `:host` - The host address
* `:port` - the port to connect on.
* `:path` - The request path.
* `:method` - The request method.
* `:error` - This value is optional. It includes any errors that occured while receiving the response.
* `[:finch, :reused_connection]` - Executed if an existing connection is reused. There are no measurements provided with this event.
#### Metadata:
* `:scheme` - The scheme used in the connection. either `http` or `https`
* `:host` - The host address
* `:port` - the port to connect on.
* `[:finch, :max_idle_time_exceeded]` - Executed if a connection was discarded because the max_idle_time had been reached.
#### Measurements:
* `:idle_time` - Elapsed time since the connection was last checked in or initialized.
#### Metadata
* `:scheme` - The scheme used in the connection. either `http` or `https`
* `:host` - The host address
* `:port` - the port to connect on.
"""
@doc false
# emits a `start` telemetry event and returns the the start time
def start(event, meta \\ %{}, extra_measurements \\ %{}) do
start_time = System.monotonic_time()
:telemetry.execute(
[:finch, event, :start],
Map.merge(extra_measurements, %{system_time: System.system_time()}),
meta
)
start_time
end
@doc false
# Emits a stop event.
def stop(event, start_time, meta \\ %{}, extra_measurements \\ %{}) do
end_time = System.monotonic_time()
measurements = Map.merge(extra_measurements, %{duration: end_time - start_time})
:telemetry.execute(
[:finch, event, :stop],
measurements,
meta
)
end
@doc false
def exception(event, start_time, kind, reason, stack, meta \\ %{}, extra_measurements \\ %{}) do
end_time = System.monotonic_time()
measurements = Map.merge(extra_measurements, %{duration: end_time - start_time})
meta =
meta
|> Map.put(:kind, kind)
|> Map.put(:error, reason)
|> Map.put(:stacktrace, stack)
:telemetry.execute([:finch, event, :exception], measurements, meta)
end
@doc false
# Used for reporting generic events
def event(event, measurements, meta) do
:telemetry.execute([:finch, event], measurements, meta)
end
end
|
lib/finch/telemetry.ex
| 0.867008
| 0.75401
|
telemetry.ex
|
starcoder
|
defmodule DatabaseAssertions do
alias Cog.Repo
import ExUnit.Assertions
alias Cog.Models.Permission
alias Cog.Models.Role
@doc """
Asserts that each given permission is directly associated to the
grantee in question.
Does NOT test permissions associated by recursive group membership.
Example:
assert_permission_is_granted(user, permission)
assert_permission_is_granted(user, [perm1, perm2])
"""
def assert_permission_is_granted(grantee, %Permission{}=permission),
do: assert_permission_is_granted(grantee, [permission])
def assert_permission_is_granted(grantee, permissions) when is_list (permissions) do
loaded = Repo.preload(grantee, :permissions)
Enum.each(permissions,
fn(p) -> assert p in loaded.permissions end)
end
@doc """
Asserts that each given permission is NOT directly associated to the
grantee in question.
Does NOT test permissions associated by recursive group membership.
Example:
refute_permission_is_granted(user, permission)
refute_permission_is_granted(user, [perm1, perm2])
"""
def refute_permission_is_granted(grantee, %Permission{}=permission),
do: refute_permission_is_granted(grantee, [permission])
def refute_permission_is_granted(grantee, permissions) when is_list (permissions) do
loaded = Repo.preload(grantee, :permissions)
Enum.each(permissions,
fn(p) -> refute p in loaded.permissions end)
end
def assert_role_is_granted(grantee, %Role{}=role),
do: assert_role_is_granted(grantee, [role])
def assert_role_is_granted(grantee, roles) when is_list(roles) do
loaded = Repo.preload(grantee, :roles)
for role <- roles do
all_ids = Enum.map(loaded.roles, &(&1.id))
assert role.id in all_ids
end
end
def refute_role_is_granted(grantee, %Role{}=role),
do: refute_role_is_granted(grantee, [role])
def refute_role_is_granted(grantee, roles) when is_list(roles) do
loaded = Repo.preload(grantee, :roles)
for role <- roles do
all_ids = Enum.map(loaded.roles, &(&1.id))
refute role.id in all_ids
end
end
def assert_rule_is_persisted(id, rule_text) do
{:ok, expr, _} = Piper.Permissions.Parser.parse(rule_text, json: true)
rule = Repo.get_by!(Cog.Models.Rule, parse_tree: expr)
assert rule.id == id
end
def refute_rule_is_persisted(rule_text) do
{:ok, expr, _} = Piper.Permissions.Parser.parse(rule_text, json: true)
refute Repo.get_by(Cog.Models.Rule, parse_tree: expr)
end
def assert_rule_is_enabled(rule_text) do
{:ok, expr, _} = Piper.Permissions.Parser.parse(rule_text, json: true)
rule = Repo.get_by(Cog.Models.Rule, parse_tree: expr)
assert rule.enabled
end
def assert_rule_is_disabled(rule_text) do
{:ok, expr, _} = Piper.Permissions.Parser.parse(rule_text, json: true)
rule = Repo.get_by(Cog.Models.Rule, parse_tree: expr)
refute rule.enabled
end
def assert_role_was_granted(grantee, role) do
loaded = Repo.preload(grantee, :roles)
assert role in loaded.roles
end
def assert_group_member_was_added(group, %Cog.Models.User{}=member) do
loaded = Repo.preload(group, :users)
assert member in loaded.users
end
def assert_group_member_was_added(group, %Cog.Models.Group{}=member) do
loaded = Repo.preload(group, :direct_group_members)
assert member in loaded.direct_group_members
end
end
|
test/support/database_assertions.ex
| 0.748812
| 0.451508
|
database_assertions.ex
|
starcoder
|
defmodule Verk.Node do
@moduledoc """
Node data controller.
"""
@verk_nodes_key "verk_nodes"
@spec deregister!(String.t(), GenServer.t()) :: :ok
def deregister!(verk_node_id, redis) do
Redix.pipeline!(redis, [
["DEL", verk_node_key(verk_node_id)],
["DEL", verk_node_queues_key(verk_node_id)],
["SREM", @verk_nodes_key, verk_node_id]
])
:ok
end
@spec members(integer, non_neg_integer, GenServer.t()) ::
{:ok, [String.t()]} | {:more, [String.t()], integer} | {:error, term}
def members(cursor \\ 0, count \\ 25, redis) do
case Redix.command(redis, ["SSCAN", @verk_nodes_key, cursor, "COUNT", count]) do
{:ok, ["0", verk_nodes]} -> {:ok, verk_nodes}
{:ok, [cursor, verk_nodes]} -> {:more, verk_nodes, cursor}
{:error, reason} -> {:error, reason}
end
end
@spec ttl!(String.t(), GenServer.t()) :: integer
def ttl!(verk_node_id, redis) do
Redix.command!(redis, ["PTTL", verk_node_key(verk_node_id)])
end
@spec expire_in(String.t(), integer, GenServer.t()) :: {:ok, integer} | {:error, term}
def expire_in(verk_node_id, ttl, redis) do
Redix.command(redis, ["PSETEX", verk_node_key(verk_node_id), ttl, "alive"])
end
@spec queues!(String.t(), integer, non_neg_integer, GenServer.t()) ::
{:ok, [String.t()]} | {:more, [String.t()], integer}
def queues!(verk_node_id, cursor \\ 0, count \\ 25, redis) do
case Redix.command!(redis, [
"SSCAN",
verk_node_queues_key(verk_node_id),
cursor,
"COUNT",
count
]) do
["0", queues] -> {:ok, queues}
[cursor, queues] -> {:more, queues, cursor}
end
end
@doc """
Redis command to add a queue to the set of queues that a node is processing.
iex> Verk.Node.add_queue_redis_command("123", "default")
["SADD", "verk:node:123:queues", "default"]
"""
@spec add_queue_redis_command(String.t(), String.t()) :: [String.t()]
def add_queue_redis_command(verk_node_id, queue) do
["SADD", verk_node_queues_key(verk_node_id), queue]
end
@doc """
Redis command to add a queue to the set of queues that a node is processing.
iex> Verk.Node.add_node_redis_command("123")
["SADD", "verk_nodes", "123"]
"""
@spec add_node_redis_command(String.t()) :: [String.t()]
def add_node_redis_command(verk_node_id) do
["SADD", @verk_nodes_key, verk_node_id]
end
defp verk_node_key(verk_node_id), do: "verk:node:#{verk_node_id}"
defp verk_node_queues_key(verk_node_id), do: "verk:node:#{verk_node_id}:queues"
end
|
lib/verk/node.ex
| 0.784526
| 0.469581
|
node.ex
|
starcoder
|
defmodule Wonderland.Data.Maybe do
use Calculus
use Wonderland.TypeClass
@moduledoc """
Classic sum type which represents optional value
- Functor
- Monad
- Applicative
"""
@typep a :: term
@type t(a) :: __MODULE__.t(a)
defmacrop justp(x) do
quote location: :keep do
{:justp, unquote(x)}
end
end
defmacrop nothingp, do: :nothingp
defcalculus state,
export_return: false,
generate_opaque: false,
generate_return: false do
method when method in [:is_just?, :is_nothing?] ->
case state do
justp(_) -> calculus(return: method == :is_just?)
nothingp() -> calculus(return: method == :is_nothing?)
end
{:functor_fmap, f} ->
case state do
justp(x) -> calculus(return: just(f.(x)))
nothingp() -> calculus(return: nothing())
end
{:monad_bind, f} ->
case state do
justp(x) -> calculus(return: f.(x))
nothingp() -> calculus(return: nothing())
end
{:applicative_ap, mf} ->
case is_just?(mf) do
true ->
case state do
justp(x) -> calculus(return: just(unlift(mf).(x)))
nothingp() -> calculus(return: nothing())
end
false ->
calculus(return: nothing())
end
:wonder_unlift ->
case state do
justp(x) -> calculus(return: x)
nothingp() -> calculus(return: nil)
end
end
@doc """
First constructor
## Examples
```
iex> x = Maybe.just(1)
iex> Maybe.is_just?(x)
true
```
"""
@spec just(a) :: t(a)
def just(x), do: x |> justp() |> construct()
@doc """
Second constructor
## Examples
```
iex> x = Maybe.nothing()
iex> Maybe.is_nothing?(x)
true
```
"""
@spec nothing :: t(a)
def nothing, do: nothingp() |> construct()
@doc """
If argument is `just(a)` then returns `true`
If argument is `nothing()` then returns `false`
Otherwise raise exception
## Examples
```
iex> j = Maybe.just(1)
iex> n = Maybe.nothing()
iex> Maybe.is_just?(j)
true
iex> Maybe.is_just?(n)
false
```
"""
@spec is_just?(t(a)) :: boolean
def is_just?(x), do: eval(x, :is_just?)
@doc """
If argument is `nothing()` then returns `true`
If argument is `just(a)` then returns `false`
Otherwise raise exception
## Examples
```
iex> j = Maybe.just(1)
iex> n = Maybe.nothing()
iex> Maybe.is_nothing?(n)
true
iex> Maybe.is_nothing?(j)
false
```
"""
@spec is_nothing?(t(a)) :: boolean
def is_nothing?(x), do: eval(x, :is_nothing?)
@behaviour Functor
@impl true
def functor_fmap(f, x), do: eval(x, {:functor_fmap, f})
@behaviour Monad
@impl true
def monad_bind(x, f), do: eval(x, {:monad_bind, f})
@behaviour Applicative
@impl true
def applicative_ap(mf, x), do: eval(x, {:applicative_ap, mf})
@behaviour Wonder
@impl true
def wonder_lift(x) when x in [nil, :undefined], do: nothing()
def wonder_lift(x), do: just(x)
@impl true
def wonder_unlift(x), do: eval(x, :wonder_unlift)
end
|
lib/wonderland/data/maybe.ex
| 0.862207
| 0.894375
|
maybe.ex
|
starcoder
|
defmodule EctoEnum do
@moduledoc """
Provides `defenum/2` macro for defining an Enum Ecto type.
"""
@doc """
Defines an enum custom `Ecto.Type`.
It can be used like any other `Ecto.Type` by passing it to a field in your model's
schema block. For example:
import EctoEnum
defenum StatusEnum, registered: 0, active: 1, inactive: 2, archived: 3
defmodule User do
use Ecto.Schema
schema "users" do
field :status, StatusEnum
end
end
In the above example, the `:status` will behave like an enum and will allow you to
pass an `integer`, `atom` or `string` to it. This applies to saving the model,
invoking `Ecto.Changeset.cast/4`, or performing a query on the status field. Let's
do a few examples:
iex> user = Repo.insert!(%User{status: 0})
iex> Repo.get(User, user.id).status
:registered
iex> %{changes: changes} = cast(%User{}, %{"status" => "Active"}, ~w(status), [])
iex> changes.status
:active
iex> from(u in User, where: u.status == :registered) |> Repo.all() |> length
1
Passing an invalid value to a `Ecto.Changeset.cast/3` will add an error to `changeset.errors`
field.
iex> changeset = cast(%User{}, %{"status" => "retroactive"}, ~w(status), [])
iex> changeset.errors
[status: "is invalid"]
Passing an invalid value directly into a model struct will in an error when calling
`Repo` functions.
iex> Repo.insert!(%User{status: :none})
** (Ecto.ChangeError) `"none"` is not a valid enum value for `EctoEnumTest.StatusEnum`.
Valid enum values are `[0, 1, 2, 3, :registered, :active, :inactive, :archived, "active",
"archived", "inactive", "registered"]`
The enum type `StatusEnum` will also have a reflection function for inspecting the
enum map in runtime.
iex> StatusEnum.__enum_map__()
[registered: 0, active: 1, inactive: 2, archived: 3]
You may also use strings as backing storage rather than integers by specifying string
values in place of integral ones.
defenum RoleEnum, user: "string_for_user", admin: "string_for_admin"
They expose all of the same functionality as integer backed storage.
"""
defmodule UndeterminableStorageError do
defexception [:message]
def exception(kw) do
msg = "You have conflicting data types! EctoEnum can only store using " <>
"one type (integers or strings but not both) for the same enum. " <>
"Original keyword was #{inspect kw}"
%__MODULE__{message: msg}
end
end
defmacro defenum(module, type, enum) do
EctoEnum.Postgres.defenum(module, type, enum)
end
defmacro defenum(module, enum) do
quote do
kw = unquote(enum) |> Macro.escape
storage = EctoEnum.storage(kw)
if storage == :indeterminate, do: raise(EctoEnum.UndeterminableStorageError, kw)
defmodule unquote(module) do
@behaviour Ecto.Type
@atom_rawval_kw kw
@rawval_atom_map for {atom, rawval} <- kw, into: %{}, do: {rawval, atom}
@string_rawval_map for {atom, rawval} <- kw, into: %{}, do: {Atom.to_string(atom), rawval}
@string_atom_map for {atom, _} <- kw, into: %{}, do: {Atom.to_string(atom), atom}
@valid_values Keyword.values(@atom_rawval_kw) ++ Keyword.keys(@atom_rawval_kw) ++ Map.keys(@string_rawval_map)
@storage storage
def type, do: @storage
def cast(term) do
EctoEnum.Type.cast(term, @rawval_atom_map, @string_atom_map)
end
def load(rawval) do
Map.fetch(@rawval_atom_map, rawval)
end
def dump(term) do
case EctoEnum.Type.dump(term, @atom_rawval_kw, @string_rawval_map, @rawval_atom_map) do
:error ->
msg = "Value `#{inspect term}` is not a valid enum for `#{inspect __MODULE__}`. " <>
"Valid enums are `#{inspect __valid_values__()}`"
raise Ecto.ChangeError,
message: msg
value ->
value
end
end
def valid_value?(value) do
Enum.member?(@valid_values, value)
end
# Reflection
def __enum_map__(), do: @atom_rawval_kw
def __valid_values__(), do: @valid_values
end
end
end
def storage(kw) do
cond do
Enum.all?(kw, &(is_integer(elem(&1, 1)))) -> :integer
Enum.all?(kw, &(is_binary(elem(&1, 1)))) -> :string
true -> :indeterminate
end
end
defmodule Type do
@spec cast(any, map, map) :: {:ok, atom} | :error
def cast(atom, rawval_atom_map, _) when is_atom(atom) do
if atom in Map.values(rawval_atom_map) do
{:ok, atom}
else
:error
end
end
def cast(val, rawval_atom_map, string_atom_map) do
if val in Map.keys(rawval_atom_map) do
Map.fetch(rawval_atom_map, val)
else
Map.fetch(string_atom_map, val)
end
end
@spec dump(any, [{atom(), any()}], map, map) :: {:ok, integer | String.t} | :error
def dump(atom, atom_rawval_kw, _, _) when is_atom(atom) do
Keyword.fetch(atom_rawval_kw, atom)
end
def dump(val, _, string_rawval_map, rawval_atom_map) do
if val in Map.keys(rawval_atom_map) do
{:ok, val}
else
Map.fetch(string_rawval_map, val)
end
end
end
alias Ecto.Changeset
@spec validate_enum(Ecto.Changeset.t, atom, ((atom, String.t, list(String.t | integer | atom)) -> String.t)) :: Ecto.Changeset.t
def validate_enum(changeset, field, error_msg \\ &default_error_msg/3) do
Changeset.validate_change(changeset, field, :validate_enum, fn field, value ->
type = changeset.types[field]
error_msg = error_msg.(field, value, type.__valid_values__())
if type.valid_value?(value) do
[]
else
Keyword.put([], field, error_msg)
end
end)
end
defp default_error_msg(field, value, valid_values) do
"Value `#{inspect value}` is not a valid enum for `#{inspect field}` field. " <>
"Valid enums are `#{inspect valid_values}`"
end
end
|
lib/ecto_enum.ex
| 0.850794
| 0.42919
|
ecto_enum.ex
|
starcoder
|
defmodule Wild do
@moduledoc """
A wildcard matching library that implements unix-style blob pattern matching
functionality for Elixir binaries. It works on all binary input and defaults
to working with codepoint representations of binaries.
The `match?/3` and `valid_pattern?/1` functions support all of the usual
wildcard pattern mechanisms:
- `*` matches none or many tokens
- `?` matches exactly one token
- `[abc]` matches a set of tokens
- `[a-z]` matches a range of tokens
- `[!...]` matches anything but a set of tokens
"""
alias Wild.{Bash, Engine, Validator}
require Logger
@doc """
Executes a unix-style wildcard blob pattern match on a binary with a given
pattern. By default it tokenizes and runs on `codepoints` but can also be set
to `byte` mode.
## Examples
iex> Wild.match?("foobar", "foo*")
true
iex> Wild.match?("foobar", "fo[a-z]bar")
true
iex> Wild.match?(<<9, 97, 98>>, "?ab")
true
iex> Wild.match?("foobar", "bar*")
false
iex> Wild.match?(<<16, 196, 130, 4>>, "????", mode: :byte)
true
The options are:
* `:mode` - The matching mode. This primarily affects tokenization and
what is considered a single match for the `?` wildcard. Options are:
* `:codepoint` (default) - Tokenize on printable String characters
* `:byte` - Tokenize on each byte
* `:bash` - Using an underlying bash script. Only for debugging
The distinction is important for subject and patterns like the following,
where the binary is represented by two bytes but only one codepoint:
```
iex> Wild.match?("ā", "?", mode: :codepoint)
true
iex> Wild.match?("ā", "?", mode: :byte)
false
```
If we do an example tokenization of our `"ā"` subject we can see that
depending on how you treat the binary you can produce different amounts
tokens:
```
iex> String.codepoints("ā")
["ā"]
iex> :binary.bin_to_list("ā")
[196, 129]
```
If you are dealing with user input from forms this is likely not
something you will encounter and can keep the default value of
`:codepoint`.
* `:on_pattern_error` - What to do when the pattern is invalid. The
options are:
* `:fail` (default) - Simliar to case statements in Bash where an
invalid pattern won't match the subject, simply fail the match and return
`false`
* `:return` - Returns an `{:error, error}` tuple
* `:raise` - Raise an error
"""
@spec match?(binary(), binary()) :: boolean()
@spec match?(binary(), binary(), keyword()) :: boolean() | {:error, String.t()}
def match?(subject, pattern, opts \\ []) do
opts = Keyword.put_new(opts, :mode, :codepoint)
case Keyword.get(opts, :mode) do
:bash -> Bash.match?(subject, pattern, opts)
_ -> Engine.match?(subject, pattern, opts)
end
end
@doc """
Checks if the given pattern is a valid unix-style wildcard pattern. The most
common invalid patterns arise because of invalid escape sequences. Mode can
be either `:byte` or `:codepoint` (default).
## Examples
iex> Wild.valid_pattern?("fo[a-z]b?r")
true
iex> Wild.valid_pattern?(<<?\\\\, ?a>>)
false
iex> Wild.valid_pattern?("hello", :codepoint)
true
iex> Wild.valid_pattern?(123)
false
"""
@spec valid_pattern?(binary()) :: boolean()
@spec valid_pattern?(binary(), :byte | :codepoint) :: boolean()
def valid_pattern?(pattern, mode \\ :codepoint)
def valid_pattern?(pattern, mode) when is_binary(pattern) do
Validator.valid?(pattern, mode)
end
def valid_pattern?(_, _) do
false
end
end
|
lib/wild.ex
| 0.93126
| 0.848972
|
wild.ex
|
starcoder
|
defmodule Rex.Fun do
# Converts a quoted Rex word into a quoted Elixir
# function that handles the state
@moduledoc false
def to_fun({:@, _, [[expr]]}, env) do
quoted_expr(expr, env)
end
def to_fun(expr, _env) do
fun_expr(expr)
end
defp quoted_expr(quoted, env) do
expr = {quoted, env} |> Macro.escape
quote do
fn {stack, queue} -> {[unquote(expr) | stack], queue} end
end
end
def dequote do
fn {[{quoted, env} | stack], queue} ->
code = Rex.Core.rex_fn(quoted, env)
{fun, _} = Code.eval_quoted(code, [], env)
{stack, queue} |> fun.()
end
end
defp fun_expr({:^, _, [expr]}) do
quote do
fn {stack, queue} -> {[unquote(expr) | stack], queue} end
end
end
defp fun_expr({ref = {:., _, [{_, _, _}, name]}, _, []}) when is_atom(name) do
quote do
fn state -> state |> unquote(ref)() end
end
end
defp fun_expr({:/, _, [expr = {name, _, nil}, 0]}) when is_atom(name) do
quote do
fn {stack, queue} -> {[unquote(expr) | stack], queue} end
end
end
defp fun_expr({:/, _, [{name, _, nil}, arity]}) when is_atom(name) and arity > 0 do
vars = for i <- 0..arity-1, do: Macro.var(:"v#{i}", nil)
quote do
fn {[unquote_splicing(Enum.reverse(vars)) | stack], queue} ->
{[unquote(name)(unquote_splicing(vars)) | stack], queue}
end
end
end
defp fun_expr({:/, _, [{ref, _, []}, 0]}) do
quote do
fn {stack, queue} -> {[unquote(ref)() | stack], queue} end
end
end
defp fun_expr({:/, _, [{ref, _, []}, arity]}) do
vars = for i <- 0..arity-1, do: Macro.var(:"v#{i}", nil)
quote do
fn {[unquote_splicing(Enum.reverse(vars)) | stack], queue} ->
{[unquote(ref)(unquote_splicing(vars)) | stack], queue}
end
end
end
defp fun_expr(expr = {:quote, _, _}) do
code = expr |> Macro.escape
quote do
fn {stack, queue} -> {[unquote(code) | stack], queue} end
end
end
defp fun_expr(word = {_, _, nil}) do
quote do
fn state -> state |> unquote(word) end
end
end
defp fun_expr(expr) do
quote do
fn {stack, queue} -> {[unquote(expr) | stack], queue} end
end
end
end
|
lib/rex/fun.ex
| 0.564219
| 0.644561
|
fun.ex
|
starcoder
|
defmodule MuonTrap.Daemon do
use GenServer
require Logger
alias MuonTrap.Options
@moduledoc """
Wrap an OS process in a GenServer so that it can be supervised.
For example, in your children list add MuonTrap.Daemon like this:
```elixir
children = [
{MuonTrap.Daemon, ["myserver", ["--options", "foo")], [cd: "/somedirectory"]]}
]
opts = [strategy: :one_for_one, name: MyApplication.Supervisor]
Supervisor.start_link(children, opts)
```
The same options as `MuonTrap.cmd/3` are available with the following additions:
* {`log_output`, level} - Logs anything that the command sends to stdout
"""
defmodule State do
@moduledoc false
defstruct [:command, :port, :group, :log_output]
end
def child_spec(opts) do
%{
id: __MODULE__,
start: {__MODULE__, :start_link, opts},
type: :worker,
restart: :permanent,
shutdown: 500
}
end
@doc """
Start/link a deamon GenServer for the specified command.
"""
@spec start_link(binary(), [binary()], keyword()) :: GenServer.on_start()
def start_link(command, args, opts \\ []) do
GenServer.start_link(__MODULE__, [command, args, opts])
end
@doc """
Get the value of the specified cgroup variable.
"""
@spec cgget(GenServer.server(), binary(), binary()) :: binary()
def cgget(server, controller, variable_name) do
GenServer.call(server, {:cgget, controller, variable_name})
end
@doc """
Modify a cgroup variable.
"""
@spec cgset(GenServer.server(), binary(), binary(), binary()) :: :ok | no_return()
def cgset(server, controller, variable_name, value) do
GenServer.call(server, {:cgset, controller, variable_name, value})
end
@doc """
Return the OS pid to the muontrap executable.
"""
@spec os_pid(GenServer.server()) :: non_neg_integer()
def os_pid(server) do
GenServer.call(server, :os_pid)
end
def init([command, args, opts]) do
group = Keyword.get(opts, :group)
logging = Keyword.get(opts, :log_output)
opts = Keyword.drop(opts, [:log_output])
{muontrap_args, leftover_opts} = Options.to_args(opts)
updated_args = muontrap_args ++ ["--", command] ++ args
port_options = [:exit_status, {:args, updated_args}, {:line, 256} | leftover_opts]
port = Port.open({:spawn_executable, to_charlist(MuonTrap.muontrap_path())}, port_options)
{:ok, %State{command: command, port: port, group: group, log_output: logging}}
end
def handle_call({:cgget, controller, variable_name}, _from, state) do
result = System.cmd("cat", ["/sys/fs/cgroups/#{controller}/#{state.group}/#{variable_name}"])
{:reply, result, state}
end
def handle_call({:cgset, controller, variable_name, value}, _from, state) do
result = File.write!("/sys/fs/cgroups/#{controller}/#{state.group}/#{variable_name}", value)
{:reply, result, state}
end
def handle_call(:os_pid, _from, state) do
{:os_pid, os_pid} = Port.info(state.port, :os_pid)
{:reply, os_pid, state}
end
def handle_info({_port, {:data, _}}, %State{log_output: nil} = state) do
# Ignore output
{:noreply, state}
end
def handle_info(
{port, {:data, {_, message}}},
%State{port: port, log_output: log_level} = state
) do
_ = Logger.log(log_level, "#{state.command}: #{message}")
{:noreply, state}
end
def handle_info({port, {:exit_status, status}}, %State{port: port} = state) do
_ = Logger.error("#{state.command}: Process exited with status #{status}")
{:stop, :normal, state}
end
end
|
lib/muontrap/daemon.ex
| 0.755096
| 0.668147
|
daemon.ex
|
starcoder
|
defmodule ExAliyunOts.Utils do
@moduledoc """
Common tools.
"""
@geo_point_reg ~r/\-?\d+\.?\d*,\-?\d+\.?\d*/
defmodule Guards do
@moduledoc false
defguard is_valid_primary_key_type(type) when type in [:integer, :binary, :string]
defguard is_valid_string(value) when value != nil and value != "" and is_bitstring(value)
defguard is_valid_table_ttl(value) when is_integer(value) and (value == -1 or value >= 86_400)
defguard is_valid_input_columns(columns) when is_list(columns) or is_map(columns)
end
def valid_geo_point?(point) when is_bitstring(point) do
Regex.match?(@geo_point_reg, point)
end
def row_to_map({pks, attrs}) when is_list(pks) and is_list(attrs) do
%{}
|> do_reduce_pks(pks)
|> do_reduce_attrs(attrs)
end
def row_to_map({pks, nil}) when is_list(pks) do
do_reduce_pks(%{}, pks)
end
def row_to_map({nil, attrs}) when is_list(attrs) do
do_reduce_attrs(%{}, attrs)
end
def row_to_map(nil) do
%{}
end
def row_to_map(row) do
raise ExAliyunOts.RuntimeError, "Fail to transfer invalid row: #{inspect(row)} to map."
end
def attrs_to_row(attrs) when is_list(attrs) do
Enum.reduce(attrs, [], &assemble_attribute_column/2)
end
def attrs_to_row(attrs) when is_map(attrs) do
Enum.reduce(attrs, [], &assemble_attribute_column/2)
end
def attrs_to_row(attrs) do
raise ExAliyunOts.RuntimeError,
"Fail to transfer invalid attributes: #{inspect(attrs)} to row attributes column(s), expect it is a Map or Keyword."
end
defp do_reduce_pks(acc, items) do
Enum.reduce(items, acc, fn {k, v}, acc ->
Map.put(acc, String.to_atom(k), v)
end)
end
defp do_reduce_attrs(acc, items) do
Enum.reduce(items, acc, fn {k, v, _ts}, acc ->
Map.put(acc, String.to_atom(k), v)
end)
end
defp value_to_attribute_column(value) when is_map(value) or is_list(value) do
Jason.encode!(value)
end
defp value_to_attribute_column(value) do
value
end
defp assemble_attribute_column({key, value}, acc) when is_atom(key) do
value = value_to_attribute_column(value)
if value == nil, do: acc, else: acc ++ [{Atom.to_string(key), value}]
end
defp assemble_attribute_column({key, value}, acc) when is_bitstring(key) do
value = value_to_attribute_column(value)
if value == nil, do: acc, else: acc ++ [{key, value}]
end
defp assemble_attribute_column(_, acc) do
acc
end
def downcase_atom(atom), do: atom |> to_string() |> String.downcase() |> String.to_atom()
end
|
lib/ex_aliyun_ots/utils.ex
| 0.641085
| 0.471162
|
utils.ex
|
starcoder
|
defmodule Expublish.Semver do
@moduledoc """
Functions for manipulating [%Version{}](https://hexdocs.pm/elixir/Version.html).
"""
@alpha "alpha"
@beta "beta"
@rc "rc"
alias Expublish.Options
require Logger
@type level() :: :major | :minor | :patch | :rc | :beta | :alpha | :stable
@spec increase!(Version.t(), level(), Options.t()) :: Version.t()
@doc "Interfaces `Expublish.Semver` version increase functions."
def increase!(version, level, options \\ %Options{})
def increase!(version, :major, _options), do: major(version)
def increase!(version, :minor, _options), do: minor(version)
def increase!(version, :patch, _options), do: patch(version)
def increase!(version, :stable, _options), do: stable(version)
def increase!(version, :rc, options), do: rc(version, options)
def increase!(version, :beta, options), do: beta(version, options)
def increase!(version, :alpha, options), do: alpha(version, options)
@doc "Bump major version."
@spec major(Version.t()) :: Version.t()
def major(%Version{} = version) do
%{version | major: version.major + 1, minor: 0, patch: 0}
end
@doc "Bump minor version."
@spec minor(Version.t()) :: Version.t()
def minor(%Version{} = version) do
%{version | minor: version.minor + 1, patch: 0}
end
@doc "Bump patch version."
@spec patch(Version.t()) :: Version.t()
def patch(%Version{} = version) do
%{version | patch: version.patch + 1}
end
@doc "Remove current pre-release suffix and declare current version stable."
@spec stable(Version.t()) :: Version.t()
def stable(%Version{pre: []} = version) do
Logger.error("Can not create stable release from already stable version #{version}. Abort.")
exit({:shutdown, 1})
end
def stable(%Version{} = version) do
%{version | pre: []}
end
@doc "Bump release-candidate pre-release and patch version."
@spec rc(Version.t(), Options.t()) :: Version.t()
def rc(version, options \\ %Options{})
def rc(%Version{} = version, %Options{as_major: true}) do
%{major(version) | pre: [@rc]}
end
def rc(%Version{} = version, %Options{as_minor: true}) do
%{minor(version) | pre: [@rc]}
end
def rc(%Version{pre: [pre]} = version, _options) when pre in [@alpha, @beta] do
%{version | pre: [@rc]}
end
def rc(%Version{} = version, _options) do
%{patch(version) | pre: [@rc]}
end
@doc "Bump beta pre-release and patch version."
@spec beta(Version.t(), Options.t()) :: Version.t()
def beta(version, options \\ %Options{})
def beta(%Version{pre: [pre]} = version, _) when pre in [@rc] do
Logger.error("Can not create beta version from current #{pre} pre-release: #{version}.")
exit({:shutdown, 1})
end
def beta(%Version{} = version, %Options{as_major: true}) do
%{major(version) | pre: [@beta]}
end
def beta(%Version{} = version, %Options{as_minor: true}) do
%{minor(version) | pre: [@beta]}
end
def beta(%Version{pre: [@alpha]} = version, _) do
%{version | pre: [@beta]}
end
def beta(%Version{} = version, _options) do
%{patch(version) | pre: [@beta]}
end
@doc "Bump alpha pre-release and patch version."
@spec alpha(Version.t(), Options.t()) :: Version.t()
def alpha(version, options \\ %Options{})
def alpha(%Version{pre: [pre]} = version, _) when pre in [@beta, @rc] do
Logger.error("Can not create alpha version from current #{pre} pre-release: #{version}.")
exit({:shutdown, 1})
end
def alpha(%Version{} = version, %Options{as_major: true}) do
%{major(version) | pre: [@alpha]}
end
def alpha(%Version{} = version, %Options{as_minor: true}) do
%{minor(version) | pre: [@alpha]}
end
def alpha(%Version{} = version, _options) do
%{patch(version) | pre: [@alpha]}
end
end
|
lib/expublish/semver.ex
| 0.802207
| 0.412264
|
semver.ex
|
starcoder
|
defmodule PlugRest.Router do
@moduledoc ~S"""
A DSL to supplement Plug Router with a resource-oriented routing algorithm.
It provides a macro to generate routes that dispatch to specific
resource handlers. For example:
defmodule MyApp.Router do
use PlugRest.Router
plug :match
plug :dispatch
resource "/pages/:page", PageResource
end
The `resource/4` macro accepts a request of format `"/pages/VALUE"`
and dispatches it to `PageResource`, which must be a Plug module.
See `PlugRest.Resource` for information on how to write a Plug module that
implements REST semantics.
From `Plug.Router`:
Notice the router contains a plug pipeline and by default it requires
two plugs: `match` and `dispatch`. `match` is responsible for
finding a matching route which is then forwarded to `dispatch`.
This means users can easily hook into the router mechanism and add
behaviour before match, before dispatch or after both.
## Routes
resource "/hello", HelloResource
The example above will route any requests for "/hello" to the
`HelloResource` module.
A route can also specify parameters which will be available to the
resource:
resource "/hello/:name", HelloResource
The value of the dynamic path segment can be read inside the
`HelloResource` module:
%{"name" => name} = conn.params
Routes allow globbing, which will match the end of the route. The glob
can be discarded:
# matches all routes starting with /hello
resource "/hello/*_rest", HelloResource
Or saved as a param for the resource to read:
# matches all routes starting with /hello and saves the rest
resource "/hello/*rest", HelloResource
If we make a request to "/hello/value" then `conn.params` will include:
%{"rest" => ["value"]}
A request to "/hello/value/extra" will populate `conn.params` with:
%{"rest" => ["value", "extra"]}
"""
@typedoc "A URL path"
@type path :: String.t
@typedoc "A Plug Module"
@type plug :: atom
@typedoc "Options for the Plug"
@type plug_opts :: any
@typedoc "Options for a Router macro"
@type options :: list
@doc false
defmacro __using__(_options) do
quote location: :keep do
use Plug.Router
import PlugRest.Router
@before_compile PlugRest.Router
end
end
@doc false
defmacro __before_compile__(_env) do
quote do
import Plug.Router, only: [match: 2]
match _ do
send_resp(var!(conn), 404, "")
end
end
end
## Resource
@doc """
Main API to define resource routes.
It accepts an expression representing the path, a Plug module, the
options for the plug, and options for the macro.
## Examples
resource "/path", PlugModule, plug_options, macro_options
## Options
`resource/4` accepts the following options:
* `:host` - the host which the route should match. Defaults to `nil`,
meaning no host match, but can be a string like "example.com" or a
string ending with ".", like "subdomain." for a subdomain match.
* `:private` - a map of private data to merge into the connection
* `:assigns` - a map of data to merge into the connection
The macro accepts options that it will pass to the Plug:
resource "/pages/:page", PageResource, [p: 1]
You can restrict the resource to only match requests for a specific
host. If the plug doesn't take any options, pass an empty list as
the third argument to the macro:
resource "/pages/:page", PageResource, [], host: "host1.example.com"
"""
@spec resource(path, plug, plug_opts, options) :: Macro.t
defmacro resource(path, plug, plug_opts \\ [], options \\ []) do
add_resource(path, plug, plug_opts, options)
end
## Compiles the resource into a match macro from Plug.Router
@spec add_resource(path, plug, plug_opts, options) :: Macro.t
defp add_resource(path, plug, plug_opts, options) do
options = options
|> Keyword.put(:to, plug)
|> Keyword.put(:init_opts, plug_opts)
quote do
match unquote(path), unquote(options)
end
end
end
|
lib/plug_rest/router.ex
| 0.876026
| 0.469581
|
router.ex
|
starcoder
|
defmodule MailgunEx do
@moduledoc """
A client API to the MailGun Email RESTful API.
You can sign up for a free account at:
https://signup.mailgun.com/new/signup
And the latest API documentation is available at:
https://documentation.mailgun.com/en/latest/
To access direct calls to the service, you will want to use the
`MailgunEx.Api` module. When making requests, you can provide
several `opts`, all of which can be defaulted using `Mix.Config`.
Here is an example of how to configure this library
config :mailgun_ex,
base: "https://api.mailgun.net/v3",
mode: :live,
domain: "namedb.org",
api_key: "<KEY>",
http_opts: %{
timeout: 5000,
}
The configs use a library called [Deferred Config](https://hex.pm/packages/deferred_config)
so that you can use environment variables that will be loaded at runtime, versus
compiled into the release.
Here's an example of how to use the system variables
config :mailgun_ex,
base: "https://api.mailgun.net/v3",
mode: {:system, "MAILGUN_MODE", :live, {String, :to_atom}},
domain: {:system, "MAILGUN_DOMAIN", "defaultname.com"} ,
api_key: {:system, "MAILGUN_API_KEY"},
http_opts: %{
timeout: 5000,
}
Our default `mix test` tests will use [Bypass](https://hex.pm/packages/bypass)
as the `base` service URL so that we will not hit your production MailGun
account during testing.
Here is an outline of all the configurations you can set.
* `:base` - The base URL which defaults to `https://api.mailgun.net/v3`
* `:mode` - Defaults to `:live`, but can be set to `:simulate` for testing, or `:ignore` for dev
* `:domain` - The domain making the request (e.g. name<EMAIL>)
* `:api_key` - Your mailgun API key, which looks like `<KEY>`
* `:http_opts` - A passthrough map of options to send to HTTP request, more details below
This client library uses [HTTPoison](https://hex.pm/packages/httpoison)
for all HTTP communication, and we will pass through any `:http_opts` you provide,
which we have shown below.
* `:timeout` - timeout to establish a connection, in milliseconds. Default is 8000
* `:recv_timeout` - timeout used when receiving a connection. Default is 5000
* `:stream_to` - a PID to stream the response to
* `:async` - if given :once, will only stream one message at a time, requires call to stream_next
* `:proxy` - a proxy to be used for the request; it can be a regular url or a {Host, Port} tuple
* `:proxy_auth` - proxy authentication {User, Password} tuple
* `:ssl` - SSL options supported by the ssl erlang module
* `:follow_redirect` - a boolean that causes redirects to be followed
* `:max_redirect` - an integer denoting the maximum number of redirects to follow
* `:params` - an enumerable consisting of two-item tuples that will be appended to the url as query string parameters
If these are out of date, please push a Pull-Request to [mailgun_ex](https://github.com/work-samples/mailgun_ex)
"""
@doc """
Issues an HTTP request with the given method to the given url_opts.
Args:
* `method` - HTTP method as an atom (`:get`, `:head`, `:post`, `:put`, `:delete`, etc.)
* `opts` - A keyword list of options to help create the URL, provide the body and/or query params
The options above can be defaulted using `Mix.Config` configurations,
as documented above.
This function returns `{<status_code>, response}` if the request is successful, and
`{:error, reason}` otherwise.
## Examples
MailgunEx.request(:get, resource: "domains")
"""
defdelegate request(method, opts \\ []), to: MailgunEx.Api
@doc """
Send an email.
Options (`opts`):
* `:to` - The recipient of the email
* `:subject` - The subject of the email
* `:from` - The sender of the email
* `:text` - The body of the email, in TEXT
* `:html` - The body of the email, but in HTML format
Additional client functions are available at `MailgunEx.Client`.
"""
defdelegate send_email(opts \\ []), to: MailgunEx.Client
end
|
lib/mailgun_ex.ex
| 0.825976
| 0.577108
|
mailgun_ex.ex
|
starcoder
|
defmodule Datix do
@moduledoc """
A date-time parser using `Calendar.strftime` format strings.
"""
@type t :: %{
optional(:am_pm) => :am | :pm,
optional(:day) => pos_integer(),
optional(:day_of_week) => pos_integer(),
optional(:day_of_year) => pos_integer(),
optional(:hour) => pos_integer(),
optional(:hour_12) => pos_integer(),
optional(:microsecond) => pos_integer(),
optional(:minute) => pos_integer(),
optional(:month) => pos_integer(),
optional(:quarter) => pos_integer(),
optional(:second) => pos_integer(),
optional(:year) => pos_integer(),
optional(:year_2_digit) => pos_integer(),
optional(:zone_abbr) => String.t(),
optional(:zone_offset) => integer()
}
@doc """
Parses a date-time string according to the given `format`.
See the Calendar.strftime documentation for how to specify a format string.
## Options
* `:preferred_date` - a string for the preferred format to show dates,
it can't contain the `%x` format and defaults to `"%Y-%m-%d"`
if the option is not received
* `:month_names` - a list of the month names, if the option is not received
it defaults to a list of month names in English
* `:abbreviated_month_names` - a list of abbreviated month names, if the
option is not received it defaults to a list of abbreviated month names in
English
* `:day_of_week_names` - a list of day names, if the option is not received
it defaults to a list of day names in English
* `:abbreviated_day_of_week_names` - a list of abbreviated day names, if the
option is not received it defaults to a list of abbreviated day names in
English
* `:preferred_time` - a string for the preferred format to show times,
it can't contain the `%X` format and defaults to `"%H:%M:%S"`
if the option is not received
* `:am_pm_names` - a keyword list with the names of the period of the day,
defaults to `[am: "am", pm: "pm"]`.
## Examples
```elixir
iex> Datix.strptime("2021/01/10", "%Y/%m/%d")
{:ok, %{day: 10, month: 1, year: 2021}}
iex> Datix.strptime("21/01/10", "%y/%m/%d")
{:ok, %{day: 10, month: 1, year_2_digit: 21}}
iex> Datix.strptime("13/14/15", "%H/%M/%S")
{:ok, %{hour: 13, minute: 14, second: 15}}
iex> Datix.strptime("1 PM", "%-I %p")
{:ok, %{am_pm: :pm, hour_12: 1}}
iex> Datix.strptime("Tuesday", "%A")
{:ok, %{day_of_week: 2}}
iex> Datix.strptime("Tue", "%a")
{:ok, %{day_of_week: 2}}
iex> Datix.strptime("Di", "%a",
...> abbreviated_day_of_week_names: ~w(Mo Di Mi Do Fr Sa So))
{:ok, %{day_of_week: 2}}
```
"""
@spec strptime(String.t(), String.t(), keyword()) ::
{:ok, Datix.t()}
| {:error, :invalid_input}
| {:error, {:parse_error, expected: String.t(), got: String.t()}}
| {:error, {:conflict, [expected: term(), got: term(), modifier: String.t()]}}
| {:error, {:invalid_string, [modifier: String.t()]}}
| {:error, {:invalid_integer, [modifier: String.t()]}}
| {:error, {:invalid_modifier, [modifier: String.t()]}}
def strptime(date_time_str, format_str, opts \\ []) when is_binary(format_str) do
with {:ok, options} <- options(opts) do
case parse(format_str, date_time_str, options, %{}) do
{:ok, result, ""} -> {:ok, result}
{:ok, _result, _rest} -> {:error, :invalid_input}
error -> error
end
end
end
@doc """
Parses a date-time string according to the given `format`, erroring out for
invalid arguments.
"""
@spec strptime!(String.t(), String.t(), keyword()) :: Datix.t()
def strptime!(date_time_str, format_str, opts \\ []) do
case strptime(date_time_str, format_str, opts) do
{:ok, data} ->
data
{:error, :invalid_input} ->
raise ArgumentError, "invalid input"
{:error, {:parse_error, expected: exp, got: got}} ->
raise ArgumentError, "parse error: expected #{inspect(exp)}, got #{inspect(got)}"
{:error, {:conflict, [expected: exp, got: got, modifier: mod]}} ->
raise ArgumentError, "expected #{inspect(exp)}, got #{inspect(got)} for #{mod}"
{:error, {:invalid_string, [modifier: mod]}} ->
raise ArgumentError, "invalid string for #{mod}"
{:error, {:invalid_integer, [modifier: mod]}} ->
raise ArgumentError, "invalid integer for #{mod}"
{:error, {:invalid_modifier, [modifier: mod]}} ->
raise ArgumentError, "invalid format: #{mod}"
end
end
@doc false
@spec calendar(keyword()) :: module()
def calendar(opts), do: Keyword.get(opts, :calendar, Calendar.ISO)
@doc false
def assume(data, Date) do
case Map.has_key?(data, :year) || Map.has_key?(data, :year_2_digit) do
true -> Map.merge(%{month: 1, day: 1}, data)
false -> Map.merge(%{year: 0, month: 1, day: 1}, data)
end
end
def assume(data, Time) do
case Map.has_key?(data, :hour) || Map.has_key?(data, :hour_12) do
true -> Map.merge(%{minute: 0, second: 0, microsecond: {0, 0}}, data)
false -> Map.merge(%{hour: 0, minute: 0, second: 0, microsecond: {0, 0}}, data)
end
end
defp parse("", date_time_rest, _opts, acc), do: {:ok, acc, date_time_rest}
defp parse(_format_str, "", _opts, _acc), do: {:error, :invalid_input}
defp parse("%" <> format_rest, date_time_str, opts, acc) do
with {:ok, modifier, new_format_rest} <- parse_modifier(format_rest),
{:ok, new_acc, date_time_rest} <- parse_date_time(modifier, date_time_str, opts, acc) do
parse(new_format_rest, date_time_rest, opts, new_acc)
end
end
defp parse(<<char, format_rest::binary>>, <<char, date_time_rest::binary>>, opts, acc) do
parse(format_rest, date_time_rest, opts, acc)
end
defp parse(<<expected, _format_rest::binary>>, <<got, _date_time_rest::binary>>, _opts, _acc) do
{:error, {:parse_error, expected: to_string([expected]), got: to_string([got])}}
end
defp parse_modifier(format_str, padding \\ nil, with \\ nil)
defp parse_modifier("-" <> format_rest, _padding, nil = width) do
parse_modifier(format_rest, "", width)
end
defp parse_modifier("_" <> format_rest, _padding, nil = width) do
parse_modifier(format_rest, ?\s, width)
end
defp parse_modifier("0" <> format_rest, _padding, nil = width) do
parse_modifier(format_rest, ?0, width)
end
defp parse_modifier(<<digit, format_rest::binary>>, padding, width) when digit in ?0..?9 do
parse_modifier(format_rest, padding, (width || 0) * 10 + (digit - ?0))
end
defp parse_modifier(<<format, format_rest::binary>>, padding, width) do
{
:ok,
{format, padding || default_padding(format), width || default_width(format)},
format_rest
}
end
defp parse_date_time({format, padding, _width} = modifier, date_time_str, opts, acc)
when format in 'aAbBpP' do
with {:ok, value, rest} <- parse_string(date_time_str, padding, enumeration(format, opts)),
{:ok, new_acc} <- put(acc, format, value) do
{:ok, new_acc, rest}
else
error -> error(error, modifier)
end
end
defp parse_date_time({format, padding, width} = modifier, date_time_str, _opts, acc)
when format in 'dHIjmMqSu' do
with {:ok, value, rest} <- parse_pos_integer(date_time_str, padding, width),
{:ok, new_acc} <- put(acc, format, value) do
{:ok, new_acc, rest}
else
error -> error(error, modifier)
end
end
defp parse_date_time({format, padding, width} = modifier, date_time_str, _opts, acc)
when format in 'yY' do
with {:ok, value, rest} <- parse_integer(date_time_str, padding, width),
{:ok, new_acc} <- put(acc, format, value) do
{:ok, new_acc, rest}
else
error -> error(error, modifier)
end
end
defp parse_date_time({?f, _padding, _width} = modifier, date_time_str, _opts, acc) do
with {:ok, microsecond, rest} <- parse_pos_integer(date_time_str),
{:ok, new_acc} <- put(acc, :microsecond, microsecond) do
{:ok, new_acc, rest}
else
error -> error(error, modifier)
end
end
defp parse_date_time({?z, padding, width} = modifier, date_time_str, _opts, acc) do
with {:ok, zone_offset, rest} <- parse_signed_integer(date_time_str, padding, width),
{:ok, new_acc} <- put(acc, :zone_offset, zone_offset(zone_offset)) do
{:ok, new_acc, rest}
else
error -> error(error, modifier)
end
end
defp parse_date_time({?Z, padding, _width} = modifier, date_time_str, _opts, acc) do
with {:ok, zone_abbr, rest} <- parse_upcase_string(date_time_str, padding),
{:ok, new_acc} <- put(acc, :zone_abbr, zone_abbr) do
{:ok, new_acc, rest}
else
error -> error(error, modifier)
end
end
defp parse_date_time(
{format, _padding, _width} = modifier,
_date_time_str,
%{preferred: format},
_acc
) do
{:error, {:cycle, modifier_to_string(modifier)}}
end
defp parse_date_time({format, _padding, _width}, date_time_str, opts, acc)
when format in 'cxX' do
parse(preferred_format(format, opts), date_time_str, Map.put(opts, :preferred, format), acc)
end
defp parse_date_time({?%, _padding, _width}, "%" <> date_time_rest, _opts, acc) do
{:ok, acc, date_time_rest}
end
defp parse_date_time({?%, _padding, _width}, _date_time_rest, _opts, _acc) do
{:error, {:invalid_string, modifier: "%%"}}
end
defp parse_date_time(modifier, _date_time_str, _opts, _acc) do
{:error, {:invalid_modifier, modifier: modifier_to_string(modifier)}}
end
defp parse_integer(str, padding, width, int \\ nil)
defp parse_integer("-" <> int_str, padding, width, nil) do
with {:ok, int, rest} <- parse_pos_integer(int_str, padding, width, 0) do
{:ok, int * -1, rest}
end
end
defp parse_integer(int_str, padding, width, nil) do
parse_pos_integer(int_str, padding, width, nil)
end
defp parse_pos_integer(str, int \\ nil)
defp parse_pos_integer(<<digit, rest::binary>>, int) when digit in ?0..?9 do
parse_pos_integer(rest, (int || 0) * 10 + (digit - ?0))
end
defp parse_pos_integer(_rest, nil), do: {:error, :invalid_integer}
defp parse_pos_integer(rest, int), do: {:ok, int, rest}
defp parse_pos_integer(str, padding, width, int \\ nil)
defp parse_pos_integer(rest, _padding, width, nil) when width < 1, do: {:ok, 0, rest}
defp parse_pos_integer(rest, _padding, width, int) when width < 1, do: {:ok, int, rest}
defp parse_pos_integer(<<digit, rest::binary>>, "" = padding, width, int)
when digit in ?0..?9 do
parse_pos_integer(rest, padding, width - 1, (int || 0) * 10 + (digit - ?0))
end
defp parse_pos_integer(rest, "" = _padding, _width, int), do: {:ok, int, rest}
defp parse_pos_integer(<<padding, rest::binary>>, padding, width, nil = acc) do
parse_pos_integer(rest, padding, width - 1, acc)
end
defp parse_pos_integer(<<digit, rest::binary>>, padding, width, int) when digit in ?0..?9 do
parse_pos_integer(rest, padding, width - 1, (int || 0) * 10 + (digit - ?0))
end
defp parse_pos_integer(_str, _padding, _width, _int), do: {:error, :invalid_integer}
defp parse_signed_integer("-" <> str, padding, width) do
with {:ok, value, rest} <- parse_pos_integer(str, padding, width) do
{:ok, value * -1, rest}
end
end
defp parse_signed_integer("+" <> str, padding, width),
do: parse_pos_integer(str, padding, width)
defp parse_signed_integer(_str, _padding, _width), do: {:error, :invalid_integer}
defp parse_string(str, padding, list, pos \\ 0)
defp parse_string(<<padding, rest::binary>>, padding, list, 0 = pos) do
parse_string(rest, padding, list, pos)
end
defp parse_string(_str, _padding, [], _pos), do: {:error, :invalid_string}
defp parse_string(str, padding, [item | list], pos) do
case String.starts_with?(str, item) do
false -> parse_string(str, padding, list, pos + 1)
true -> {:ok, pos + 1, String.slice(str, String.length(item)..-1)}
end
end
defp parse_upcase_string(str, padding, acc \\ [])
defp parse_upcase_string(<<padding, rest::binary>>, padding, [] = acc) do
parse_upcase_string(rest, padding, acc)
end
defp parse_upcase_string(<<char, rest::binary>>, padding, acc) when char in ?A..?Z do
parse_upcase_string(rest, padding, [char | acc])
end
defp parse_upcase_string(_rest, _padding, []), do: {:error, :invalid_string}
defp parse_upcase_string(rest, _padding, acc) do
{:ok, acc |> Enum.reverse() |> IO.iodata_to_binary(), rest}
end
defp modifier_to_string({format, padding, width}) do
IO.iodata_to_binary([
"%",
padding_to_string(padding, format),
width_to_string(width, format),
format
])
end
defp padding_to_string(padding, format) do
case padding == default_padding(format) do
true -> ""
false -> padding
end
end
defp width_to_string(width, format) do
case width == default_width(format) do
true -> ""
false -> to_string(width)
end
end
defp error({:error, {:conflict, {expected, got}}}, modifier) do
{:error, {:conflict, expected: expected, got: got, modifier: modifier_to_string(modifier)}}
end
defp error({:error, reason}, modifier) do
{:error, {reason, modifier: modifier_to_string(modifier)}}
end
defp zone_offset(value) do
hour = div(value, 100)
minute = rem(value, 100)
hour * 3600 + minute * 60
end
defp default_padding(format) when format in 'aAbBpPZ', do: ?\s
defp default_padding(_format), do: ?0
defp default_width(format) when format in 'Yz', do: 4
defp default_width(?j), do: 3
defp default_width(format) when format in 'dHImMSy', do: 2
defp default_width(format) when format in 'qu', do: 1
defp default_width(_format), do: 0
defp put(acc, key, value) when is_atom(key) do
case Map.fetch(acc, key) do
{:ok, ^value} -> {:ok, acc}
{:ok, expected} -> {:error, {:conflict, {expected, value}}}
:error -> {:ok, Map.put(acc, key, value)}
end
end
defp put(acc, format, 1) when format in 'pP', do: put(acc, :am_pm, :am)
defp put(acc, format, 2) when format in 'pP', do: put(acc, :am_pm, :pm)
defp put(acc, format, value), do: put(acc, key(format), value)
defp key(format) when format in 'aA', do: :day_of_week
defp key(format) when format in 'bB', do: :month
defp key(?d), do: :day
defp key(?H), do: :hour
defp key(?I), do: :hour_12
defp key(?j), do: :day_of_year
defp key(?m), do: :month
defp key(?M), do: :minute
defp key(?y), do: :year_2_digit
defp key(?Y), do: :year
defp key(?q), do: :quarter
defp key(?S), do: :second
defp key(?u), do: :day_of_week
defp preferred_format(?c, opts), do: opts.preferred_datetime
defp preferred_format(?x, opts), do: opts.preferred_date
defp preferred_format(?X, opts), do: opts.preferred_time
defp enumeration(?a, opts), do: opts.abbreviated_day_of_week_names
defp enumeration(?A, opts), do: opts.day_of_week_names
defp enumeration(?b, opts), do: opts.abbreviated_month_names
defp enumeration(?B, opts), do: opts.month_names
defp enumeration(?p, opts),
do: [String.upcase(opts.am_pm_names[:am]), String.upcase(opts.am_pm_names[:pm])]
defp enumeration(?P, opts),
do: [String.downcase(opts.am_pm_names[:am]), String.downcase(opts.am_pm_names[:pm])]
defp options(opts) do
defaults = %{
preferred_date: "%Y-%m-%d",
preferred_time: "%H:%M:%S",
preferred_datetime: "%Y-%m-%d %H:%M:%S",
am_pm_names: [am: "am", pm: "pm"],
month_names: [
"January",
"February",
"March",
"April",
"May",
"June",
"July",
"August",
"September",
"October",
"November",
"December"
],
day_of_week_names: [
"Monday",
"Tuesday",
"Wednesday",
"Thursday",
"Friday",
"Saturday",
"Sunday"
],
abbreviated_month_names: [
"Jan",
"Feb",
"Mar",
"Apr",
"May",
"Jun",
"Jul",
"Aug",
"Sep",
"Oct",
"Nov",
"Dec"
],
abbreviated_day_of_week_names: ["Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun"]
}
opts
|> Keyword.delete(:calendar)
|> Enum.reduce_while({:ok, defaults}, fn {key, value}, {:ok, acc} ->
case Map.has_key?(acc, key) do
true -> {:cont, {:ok, %{acc | key => value}}}
false -> {:halt, {:error, {:unknown, option: key}}}
end
end)
end
end
|
lib/datix.ex
| 0.910466
| 0.8321
|
datix.ex
|
starcoder
|
defmodule Category.Data.Maybe do
use Calculus
use Category.TypeClass
@moduledoc """
Classic sum type `Maybe`
Implements Monad, Functor and Applicative behaviours
"""
defmacrop justp(x) do
quote location: :keep do
{:justp, unquote(x)}
end
end
defmacrop nothingp, do: :nothingp
@get_error "Can't get! from #{inspect(__MODULE__)}.nothing"
defcalculus state, export_return: false, generate_opaque: false do
:get ->
case state do
justp(x) -> calculus(state: state, return: x)
nothingp() -> calculus(state: state, return: nil)
end
:get! ->
case state do
justp(x) -> calculus(state: state, return: x)
nothingp() -> raise(@get_error)
end
method when method in [:is_just?, :is_nothing?] ->
case state do
justp(_) -> calculus(state: state, return: method == :is_just?)
nothingp() -> calculus(state: state, return: method == :is_nothing?)
end
{:functor_fmap, f} ->
case state do
justp(x) -> calculus(state: justp(f.(x)), return: :ok)
nothingp() -> calculus(state: state, return: :ok)
end
{:monad_bind, f} ->
case state do
justp(x) -> calculus(state: state, return: f.(x))
nothingp() -> calculus(state: state, return: nothing())
end
{:applicative_ap, mf} ->
case is_just?(mf) do
true ->
case state do
justp(x) -> calculus(state: justp(get!(mf).(x)), return: :ok)
nothingp() -> calculus(state: state, return: :ok)
end
false ->
calculus(state: nothingp(), return: :ok)
end
end
@typep a :: term
@opaque t(a) :: t(a)
@doc """
First constructor
## Examples
```
iex> x = Maybe.just(1)
iex> Maybe.is_just?(x)
true
```
"""
@spec just(a) :: t(a)
def just(x), do: x |> justp() |> construct()
@doc """
Second constructor
## Examples
```
iex> x = Maybe.nothing()
iex> Maybe.is_nothing?(x)
true
```
"""
@spec nothing :: t(a)
def nothing, do: nothingp() |> construct()
@doc """
If argument is `just(a)` then returns `a`, otherwise returns `nil`
## Examples
```
iex> j = Maybe.just(1)
iex> n = Maybe.nothing()
iex> Maybe.get(j)
1
iex> Maybe.get(n)
nil
```
"""
@spec get(t(a)) :: a | nil
def get(it), do: it |> eval(:get) |> return()
@doc """
If argument is `just(a)` then returns `a`, otherwise raise exception
## Examples
```
iex> j = Maybe.just(1)
iex> n = Maybe.nothing()
iex> Maybe.get!(j)
1
iex> Maybe.get!(n)
** (RuntimeError) Can't get! from Category.Data.Maybe.nothing
```
"""
@spec get!(t(a)) :: a | no_return
def get!(it), do: it |> eval(:get!) |> return()
@doc """
If argument is `just(a)` then returns `true`
If argument is `nothing()` then returns `false`
Otherwise raise exception
## Examples
```
iex> j = Maybe.just(1)
iex> n = Maybe.nothing()
iex> Maybe.is_just?(j)
true
iex> Maybe.is_just?(n)
false
```
"""
@spec is_just?(t(a)) :: boolean
def is_just?(it), do: it |> eval(:is_just?) |> return()
@doc """
If argument is `nothing()` then returns `true`
If argument is `just(a)` then returns `false`
Otherwise raise exception
## Examples
```
iex> j = Maybe.just(1)
iex> n = Maybe.nothing()
iex> Maybe.is_nothing?(n)
true
iex> Maybe.is_nothing?(j)
false
```
"""
@spec is_nothing?(t(a)) :: boolean
def is_nothing?(it), do: it |> eval(:is_nothing?) |> return()
@behaviour Functor
@impl true
def functor_fmap(f, it), do: it |> eval({:functor_fmap, f})
@behaviour Monad
@impl true
def monad_bind(it, f), do: it |> eval({:monad_bind, f}) |> return()
@behaviour Applicative
@impl true
def applicative_ap(mf, it), do: it |> eval({:applicative_ap, mf})
end
|
lib/category/data/maybe.ex
| 0.884551
| 0.880848
|
maybe.ex
|
starcoder
|
defmodule Wasmex.Module do
@moduledoc """
A compiled WebAssembly module.
A WebAssembly Module contains stateless WebAssembly code that has already been compiled and can be instantiated multiple times.
# Read a WASM file and compile it into a WASM module
{:ok, bytes } = File.read("wasmex_test.wasm")
{:ok, module} = Wasmex.Module.compile(bytes)
# use the compiled module to start as many running instances as you want
{:ok, instance } = Wasmex.start_link(%{module: module})
"""
@type t :: %__MODULE__{
resource: binary(),
reference: reference()
}
defstruct resource: nil,
# The actual NIF module resource.
# Normally the compiler will happily do stuff like inlining the
# resource in attributes. This will convert the resource into an
# empty binary with no warning. This will make that harder to
# accidentally do.
reference: nil
@doc """
Compiles a WASM module from it's WASM (usually a .wasm file) or WAT (usually a .wat file)
representation.
Compiled modules can be instantiated using `Wasmex.start_link/1`.
Since module compilation takes time and resources but instantiation is comparatively cheap, it
may be a good idea to compile a module once and instantiate it often if you want to
run a WASM binary multiple times.
"""
@spec compile(binary()) :: {:ok, __MODULE__.t()} | {:error, binary()}
def compile(bytes) when is_binary(bytes) do
case Wasmex.Native.module_compile(bytes) do
{:ok, resource} -> {:ok, wrap_resource(resource)}
{:error, err} -> {:error, err}
end
end
@doc """
Returns the name of the current module if a name is given.
This name is normally set in the WebAssembly bytecode by some compilers,
but can be also overwritten using `set_name/2`.
"""
@spec name(__MODULE__.t()) :: binary() | nil
def name(%__MODULE__{resource: resource}) do
case Wasmex.Native.module_name(resource) do
{:error, _} -> nil
name -> name
end
end
@doc """
Sets the name of the current module.
This is normally useful for stacktraces and debugging.
It will return `:ok` if the module name was changed successfully,
and return an `{:error, reason}` tuple otherwise (in case the module is already
instantiated).
"""
@spec set_name(__MODULE__.t(), binary()) :: :ok | {:error, binary()}
def set_name(%__MODULE__{resource: resource}, name) when is_binary(name) do
Wasmex.Native.module_set_name(resource, name)
end
@doc """
Lists all exports of a WebAssembly module.
Returns a map which has the exports name (string) as key and export info-tuples as values.
Info tuples always start with an atom indicating the exports type:
* `:fn` (function)
* `:global`
* `:table`
* `:memory`
Further parts of the info tuple vary depending on the type.
"""
@spec exports(__MODULE__.t()) :: map()
def exports(%__MODULE__{resource: resource}) do
Wasmex.Native.module_exports(resource)
end
@doc """
Lists all imports of a WebAssembly module grouped by their module namespace.
Returns a map of namespaces, each being a map which has the imports name (string)
as key and import info-tuples as values.
Info tuples always start with an atom indicating the imports type:
* `:fn` (function)
* `:global`
* `:table`
* `:memory`
Further parts of the info tuple vary depending on the type.
"""
@spec imports(__MODULE__.t()) :: map()
def imports(%__MODULE__{resource: resource}) do
Wasmex.Native.module_imports(resource)
end
@doc """
Serializes a compiled WASM module into a binary.
The generated binary can be deserialized back into a module using `unsafe_deserialize/1`.
It is unsafe do alter the binary in any way. See `unsafe_deserialize/1` for safety considerations.
"""
@spec serialize(__MODULE__.t()) :: {:ok, binary()} | {:error, binary()}
def serialize(%__MODULE__{resource: resource}) do
case Wasmex.Native.module_serialize(resource) do
{:error, err} -> {:error, err}
binary -> {:ok, binary}
end
end
@doc """
Deserializes a module from its binary representation.
This function is inherently unsafe as the provided binary:
1. Is going to be deserialized directly into Rust objects.
2. Contains the WASM function assembly bodies and, if intercepted, a malicious actor could inject code into executable memory.
And as such, the deserialize method is unsafe. Only pass binaries directly coming from
`serialize/1`, never any user input. Best case is it crashing the NIF, worst case is
malicious input doing... malicious things.
The deserialization must be done on the same CPU architecture as the serialization
(e.g. don't serialize a x86_64-compiled module and deserialize it on ARM64).
"""
@spec unsafe_deserialize(binary()) :: {:ok, __MODULE__.t()} | {:error, binary()}
def unsafe_deserialize(bytes) when is_binary(bytes) do
case Wasmex.Native.module_unsafe_deserialize(bytes) do
{:ok, resource} -> {:ok, wrap_resource(resource)}
{:error, err} -> {:error, err}
end
end
defp wrap_resource(resource) do
%__MODULE__{
resource: resource,
reference: make_ref()
}
end
end
defimpl Inspect, for: Wasmex.Module do
import Inspect.Algebra
def inspect(dict, opts) do
concat(["#Wasmex.Module<", to_doc(dict.reference, opts), ">"])
end
end
|
lib/wasmex/module.ex
| 0.882085
| 0.478712
|
module.ex
|
starcoder
|
defmodule Chapters.Chapter do
@moduledoc """
One chapter entry.
"""
@typedoc """
* `start` - milliseconds since start. e.g. "00:01:30.000" would be `90_000` (mandatory)
* `title` - title of that chapter (mandatory)
* `href` - link to jump to that chapter (optional)
* `image` - url to a chapter image (optional)
"""
@type t :: %__MODULE__{
start: non_neg_integer,
title: String.t(),
href: String.t() | nil,
image: String.t() | nil
}
defstruct start: 0,
title: "",
href: nil,
image: nil
@doc """
Produces an ordered keylist `:start, :title, :href, :image` with the start already formatted top a normal playtime for use in output formats
iex> %Chapters.Chapter{href: "//foo", image: nil, start: 1234, title: "Title"} |> Chapter.to_keylist()
[start: "00:00:01.234", title: "Title", href: "//foo"]
iex> %Chapters.Chapter{href: "", image: "//foo.jpeg", start: 1234, title: "Title"} |> Chapter.to_keylist()
[start: "00:00:01.234", title: "Title", image: "//foo.jpeg"]
"""
def to_keylist(%__MODULE__{} = chapter) do
chapter = sanitize(chapter)
[
start:
chapter.start
|> Chapters.Formatters.Normalplaytime.Formatter.format(),
title: chapter.title
]
|> maybe_put(:href, chapter.href)
|> maybe_put(:image, chapter.image)
end
@doc """
Sanitizes the href and image values. Trims them and nils them out if they are an empty string.
iex> %Chapters.Chapter{href: " //foo ", image: " ", start: 0, title: "Title"} |> Chapter.sanitize()
%Chapters.Chapter{href: "//foo", image: nil, start: 0, title: "Title"}
iex> %Chapters.Chapter{href: " ", image: " ", start: 0, title: "Title"} |> Chapter.sanitize()
%Chapters.Chapter{href: nil, image: nil, start: 0, title: "Title"}
"""
def sanitize(chapter = %__MODULE__{}) do
[:href, :image]
|> Enum.reduce(chapter, fn key, acc ->
case Map.get(acc, key) do
nil ->
acc
value ->
case String.trim(value) do
"" -> Map.put(acc, key, nil)
^value -> acc
trimmed -> Map.put(acc, key, trimmed)
end
end
end)
end
defp maybe_put(keylist, _key, nil), do: keylist
defp maybe_put(keylist, key, value), do: List.keystore(keylist, key, 0, {key, value})
end
|
lib/chapters/chapter.ex
| 0.870294
| 0.585783
|
chapter.ex
|
starcoder
|
defmodule SPARQL.Algebra.FunctionCall.Builtin do
defstruct name: nil,
arguments: []
alias SPARQL.Algebra.FunctionCall
alias SPARQL.Functions.Builtins
alias RDF.XSD
@xsd_true XSD.true
@xsd_false XSD.false
@doc """
Invokes a SPARQL builtin function.
For most functions this is done by delegating to `SPARQL.Functions.Builtins.call/2`.
However, some functions have special "functional forms" which have different
evaluation rules. All of these are implemented here directly.
see <https://www.w3.org/TR/sparql11-query/#invocation>
"""
def invoke(name, arguments, data, execution)
def invoke(:&&, [left, right], data, execution) do
case evaluate_to_ebv(left, data, execution) do
@xsd_false ->
@xsd_false
@xsd_true ->
case evaluate_to_ebv(right, data, execution) do
@xsd_true -> @xsd_true
@xsd_false -> @xsd_false
nil -> :error
end
nil ->
if match?(@xsd_false, evaluate_to_ebv(right, data, execution)) do
@xsd_false
else
:error
end
end
end
def invoke(:||, [left, right], data, execution) do
case evaluate_to_ebv(left, data, execution) do
@xsd_true ->
@xsd_true
@xsd_false ->
case evaluate_to_ebv(right, data, execution) do
@xsd_true -> @xsd_true
@xsd_false -> @xsd_false
nil -> :error
end
nil ->
if match?(@xsd_true, evaluate_to_ebv(right, data, execution)) do
@xsd_true
else
:error
end
end
end
def invoke(:BOUND, [variable], %{solution: solution}, _) when is_binary(variable) do
if Map.has_key?(solution, variable) do
@xsd_true
else
@xsd_false
end
end
def invoke(:BOUND, _, _, _), do: :error
def invoke(:IF, [cond_expression, then_expression, else_expression], data, execution) do
case evaluate_to_ebv(cond_expression, data, execution) do
@xsd_true -> FunctionCall.evaluate_argument(then_expression, data, execution)
@xsd_false -> FunctionCall.evaluate_argument(else_expression, data, execution)
nil -> :error
end
end
def invoke(:COALESCE, expressions, data, execution) do
expressions
|> Stream.map(&(FunctionCall.evaluate_argument(&1, data, execution)))
|> Enum.find(:error, &(&1 != :error))
end
def invoke(:IN, [lhs, expression_list], data, execution) do
case FunctionCall.evaluate_argument(lhs, data, execution) do
:error -> :error
value ->
expression_list
|> Enum.reduce_while(@xsd_false, fn expression, acc ->
case FunctionCall.evaluate_argument(expression, data, execution) do
:error -> {:cont, :error}
result ->
case RDF.Term.equal_value?(value, result) do
true -> {:halt, @xsd_true}
false -> {:cont, acc}
_ -> {:cont, :error}
end
end
end)
end
end
def invoke(:NOT_IN, [lhs, expression_list], data, execution) do
case FunctionCall.evaluate_argument(lhs, data, execution) do
:error -> :error
value ->
expression_list
|> Enum.reduce_while(@xsd_true, fn expression, acc ->
case FunctionCall.evaluate_argument(expression, data, execution) do
:error -> {:cont, :error}
result ->
case RDF.Term.equal_value?(value, result) do
true -> {:halt, @xsd_false}
false -> {:cont, acc}
_ -> {:cont, :error}
end
end
end)
end
end
def invoke(name, arguments, %{solution: solution} = data, execution) do
with {:ok, evaluated_arguments} <-
FunctionCall.evaluate_arguments(arguments, data, execution)
do
Builtins.call(name, evaluated_arguments,
Map.put(execution, :solution_id, solution.__id__))
end
end
defp evaluate_to_ebv(expr, data, execution) do
expr
|> FunctionCall.evaluate_argument(data, execution)
|> XSD.Boolean.ebv()
end
defimpl SPARQL.Algebra.Expression do
def evaluate(%FunctionCall.Builtin{name: name, arguments: arguments}, data, execution) do
FunctionCall.Builtin.invoke(name, arguments, data, execution)
end
def variables(function_call) do
# TODO: return used and/or introduced variables???
end
end
end
|
lib/sparql/algebra/expression/function_call/builtin.ex
| 0.52902
| 0.579995
|
builtin.ex
|
starcoder
|
defmodule Rtmp.Protocol.RtmpTime do
@moduledoc """
Provides utilities to work with timestamps in an RTMP context.
RTMP timestamps are 32 bits (unsigned) integers and thus roll over every ~50 days.
All adjacent timestamps are within 2^31 - 1 milliseconds of
each other (e.g. 10000 comes after 4000000000, and 3000000000 comes before 4000000000).
"""
@type rtmp_timestamp :: 0..2147483647
@max_timestamp :math.pow(2, 32)
@adjacent_threshold :math.pow(2, 31) - 1
@doc """
Converts a timestamp into a valid RTMP timestamp
(i.e. rolls it over if it's too high or too low)
## Examples
iex> Rtmp.Protocol.RtmpTime.to_rtmp_timestamp(1000)
1000
iex> Rtmp.Protocol.RtmpTime.to_rtmp_timestamp(-1000)
4294966296
iex> Rtmp.Protocol.RtmpTime.to_rtmp_timestamp(4294968296)
1000
"""
def to_rtmp_timestamp(timestamp) when timestamp < 0, do: to_rtmp_timestamp(@max_timestamp + timestamp)
def to_rtmp_timestamp(timestamp) when timestamp > @max_timestamp, do: to_rtmp_timestamp(timestamp - @max_timestamp)
def to_rtmp_timestamp(timestamp), do: trunc(timestamp)
@doc """
Applies the specified delta to a timestamp
## Examples
iex> Rtmp.Protocol.RtmpTime.apply_delta(1000, 500)
1500
iex> Rtmp.Protocol.RtmpTime.apply_delta(1000, -500)
500
iex> Rtmp.Protocol.RtmpTime.apply_delta(1000, -2000)
4294966296
iex> Rtmp.Protocol.RtmpTime.apply_delta(4294966296, 2000)
1000
"""
def apply_delta(timestamp, delta) do
timestamp + delta
|> to_rtmp_timestamp
end
@doc """
Gets the delta between an old RTMP timestamp and a new RTMP timestamp
## Examples
iex> Rtmp.Protocol.RtmpTime.get_delta(4000000000, 4000001000)
1000
iex> Rtmp.Protocol.RtmpTime.get_delta(4000000000, 10000)
294977296
iex> Rtmp.Protocol.RtmpTime.get_delta(4000000000, 3000000000)
-1000000000
"""
def get_delta(previous_timestamp, new_timestamp) do
difference = new_timestamp - previous_timestamp
is_adjacent = if :erlang.abs(difference) <= @adjacent_threshold, do: true, else: false
do_get_delta(previous_timestamp, new_timestamp, is_adjacent)
|> trunc
end
defp do_get_delta(timestamp1, timestamp2, true) do
timestamp2 - timestamp1
end
defp do_get_delta(timestamp1, timestamp2, false) when timestamp1 > timestamp2 do
(@max_timestamp - timestamp1) + timestamp2
end
defp do_get_delta(timestamp1, timestamp2, false) do
(@max_timestamp - timestamp2) + timestamp1
end
end
|
apps/rtmp/lib/rtmp/protocol/rtmp_time.ex
| 0.593845
| 0.550607
|
rtmp_time.ex
|
starcoder
|
defmodule PState do
defstruct p: {nil, nil, nil}, v: {nil, nil, nil}, a: {nil, nil, nil}, bosspid: nil, id: nil
end
defmodule Day20 do
@moduledoc """
--- Day 20: Particle Swarm ---
Suddenly, the GPU contacts you, asking for help. Someone has asked it to simulate too many particles, and it won't be
able to finish them all in time to render the next frame at this rate.
It transmits to you a buffer (your puzzle input) listing each particle in order (starting with particle 0,
then particle 1, particle 2, and so on). For each particle, it provides the X, Y, and Z coordinates for the particle's
position (p), velocity (v), and acceleration (a), each in the format <X,Y,Z>.
Each tick, all particles are updated simultaneously. A particle's properties are updated in the following order:
Increase the X velocity by the X acceleration.
Increase the Y velocity by the Y acceleration.
Increase the Z velocity by the Z acceleration.
Increase the X position by the X velocity.
Increase the Y position by the Y velocity.
Increase the Z position by the Z velocity.
Because of seemingly tenuous rationale involving z-buffering, the GPU would like to know which particle will stay
closest to position <0,0,0> in the long term. Measure this using the Manhattan distance, which in this situation
is simply the sum of the absolute values of a particle's X, Y, and Z position.
For example, suppose you are only given two particles, both of which stay entirely on the X-axis (for simplicity).
Drawing the current states of particles 0 and 1 (in that order) with an adjacent a number line and diagram of
current X positions (marked in parenthesis), the following would take place:
p=< 3,0,0>, v=< 2,0,0>, a=<-1,0,0> -4 -3 -2 -1 0 1 2 3 4
p=< 4,0,0>, v=< 0,0,0>, a=<-2,0,0> (0)(1)
p=< 4,0,0>, v=< 1,0,0>, a=<-1,0,0> -4 -3 -2 -1 0 1 2 3 4
p=< 2,0,0>, v=<-2,0,0>, a=<-2,0,0> (1) (0)
p=< 4,0,0>, v=< 0,0,0>, a=<-1,0,0> -4 -3 -2 -1 0 1 2 3 4
p=<-2,0,0>, v=<-4,0,0>, a=<-2,0,0> (1) (0)
p=< 3,0,0>, v=<-1,0,0>, a=<-1,0,0> -4 -3 -2 -1 0 1 2 3 4
p=<-8,0,0>, v=<-6,0,0>, a=<-2,0,0> (0)
At this point, particle 1 will never be closer to <0,0,0> than particle 0, and so, in the long run, particle 0 will
stay closest.
Which particle will stay closest to position <0,0,0> in the long term?
--- Part Two ---
To simplify the problem further, the GPU would like to remove any particles that collide. Particles collide if their
positions ever exactly match. Because particles are updated simultaneously, more than two particles can collide at
the same time and place. Once particles collide, they are removed and cannot collide with anything else after
that tick.
For example:
p=<-6,0,0>, v=< 3,0,0>, a=< 0,0,0>
p=<-4,0,0>, v=< 2,0,0>, a=< 0,0,0> -6 -5 -4 -3 -2 -1 0 1 2 3
p=<-2,0,0>, v=< 1,0,0>, a=< 0,0,0> (0) (1) (2) (3)
p=< 3,0,0>, v=<-1,0,0>, a=< 0,0,0>
p=<-3,0,0>, v=< 3,0,0>, a=< 0,0,0>
p=<-2,0,0>, v=< 2,0,0>, a=< 0,0,0> -6 -5 -4 -3 -2 -1 0 1 2 3
p=<-1,0,0>, v=< 1,0,0>, a=< 0,0,0> (0)(1)(2) (3)
p=< 2,0,0>, v=<-1,0,0>, a=< 0,0,0>
p=< 0,0,0>, v=< 3,0,0>, a=< 0,0,0>
p=< 0,0,0>, v=< 2,0,0>, a=< 0,0,0> -6 -5 -4 -3 -2 -1 0 1 2 3
p=< 0,0,0>, v=< 1,0,0>, a=< 0,0,0> X (3)
p=< 1,0,0>, v=<-1,0,0>, a=< 0,0,0>
------destroyed by collision------
------destroyed by collision------ -6 -5 -4 -3 -2 -1 0 1 2 3
------destroyed by collision------ (3)
p=< 0,0,0>, v=<-1,0,0>, a=< 0,0,0>
In this example, particles 0, 1, and 2 are simultaneously destroyed at the time and place marked X. On the next tick,
particle 3 passes through unharmed.
How many particles are left after all collisions are resolved?
"""
defp common(file, type) do
{proclist, _i} = File.read!(file)
|> String.split("\n")
|> List.foldl(
{[], 0},
fn (p, {t, acc}) -> {[parse(p, acc, self()) | t], acc + 1}
end
)
indexed = Enum.with_index(Enum.reverse(proclist))
send(self(), {:proclist, indexed})
boss([], type)
end
def test_a do
common("res/day20_test_a.input", :normal)
end
def test_b do
common("res/day20_test_b.input", :collide)
end
def part_a do
common("res/day20.input", :normal)
end
def part_b do
common("res/day20.input", :collide)
end
defp parse(p, i, bosspid) do
String.split(p, ["p=<", "v=<", "a=<", " ", ">", ">, "], trim: true)
|> process_points()
|> spawn_points(i, bosspid)
end
defp spawn_points({p, v, a}, i, bosspid) do
spawn(Day20, :particle, [%PState{p: p, v: v, a: a, id: i, bosspid: bosspid}])
end
defp process_points([a, b, c]) do
p = String.split(a, ",")
|> convert_point()
v = String.split(b, ",")
|> convert_point()
a = String.split(c, ",")
|> convert_point()
{p, v, a}
end
defp convert_point([a, b, c]) do
{String.to_integer(a), String.to_integer(b), String.to_integer(c)}
end
############################### Particle process
def particle(pstate) do
send(pstate.bosspid, {:started, pstate.id})
receive do
:tick ->
particle__started(pstate)
:stop ->
:ok
end
end
def particle__started(pstate) do
{px, py, pz} = pstate.p
{vx, vy, vz} = pstate.v
{ax, ay, az} = pstate.a
#Increase the X velocity by the X acceleration.
vx = vx + ax
#Increase the Y velocity by the Y acceleration.
vy = vy + ay
#Increase the Z velocity by the Z acceleration.
vz = vz + az
#Increase the X position by the X velocity.
px = px + vx
#Increase the Y position by the Y velocity.
py = py + vy
#Increase the Z position by the Z velocity.
pz = pz + vz
send(pstate.bosspid, {:loc, pstate.id, self(), {px, py, pz}})
receive do
:tick ->
particle__started(%{pstate | p: {px, py, pz}, v: {vx, vy, vz}})
:stop ->
:ok
end
end
############################### Boss process
def boss([], type) do
## Wait for all of the procs to start
receive do
{:proclist, proclist} ->
boss__wait_for_proc_starts(proclist, proclist, type)
end
end
defp boss__wait_for_proc_starts([], proclist, type) do
boss__tick(proclist, 0, type)
end
defp boss__wait_for_proc_starts(procs, proclist, type) do
receive do
{:started, id} ->
boss__wait_for_proc_starts(List.keydelete(procs, id, 1), proclist, type)
end
end
defp boss__tick(proclist, count, type) do
for {proc, _id} <- proclist, do: send(proc, :tick)
boss__grab(proclist, proclist, [], count, type)
end
defp boss__grab([], proclist, results, 500, :collide) do
for {proc, _id} <- proclist, do: send(proc, :stop)
length(List.keysort(results, 1))
end
defp boss__grab([], proclist, results, 500, :normal) do
for {proc, _id} <- proclist, do: send(proc, :stop)
{{_pid, id}, _} = hd(List.keysort(results, 1))
id
end
defp boss__grab([], proclist, results, count, :collide) do
ids = remove_collisions(results, [])
newproclist=remove_ids(proclist, ids)
boss__tick(newproclist, count + 1, :collide)
end
defp boss__grab([], proclist, _results, count, type) do
boss__tick(proclist, count + 1, type)
end
defp boss__grab(procs, proclist, results, count, :collide) do
receive do
{:loc, id, pid, {px, py, pz}} ->
boss__grab(List.keydelete(procs, pid, 0), proclist, [{{pid, id}, {px, py, pz}} | results], count, :collide)
end
end
defp boss__grab(procs, proclist, results, count, :normal) do
receive do
{:loc, id, pid, {px, py, pz}} ->
boss__grab(List.keydelete(procs, pid, 0), proclist, [{{pid, id}, abs(px) + abs(py) + abs(pz)} | results], count,
:normal)
end
end
defp remove_collisions([], acc) do
acc
end
defp remove_collisions([x={{_pid, _id}, xyz} | rest], acc) do
if List.keymember?(rest, xyz, 1) do
{removed, newlist} = delete_all(xyz, [x | rest], [], [])
remove_collisions(newlist, removed++acc)
else
remove_collisions(rest, acc)
end
end
defp delete_all(_item, [], newlist, removed) do
{removed, newlist}
end
defp delete_all(xyz, [{{pid, id}, xyz}|t], newlist, removed) do
send(pid, :stop)
delete_all(xyz, t, newlist, [id|removed])
end
defp delete_all(xyz, [other|t], newlist, removed) do
delete_all(xyz, t, [other|newlist], removed)
end
defp remove_ids(proclist, []) do
proclist
end
defp remove_ids(proclist, [id1|rest]) do
remove_ids(List.keydelete(proclist, id1, 1), rest)
end
end
|
lib/day20.ex
| 0.792745
| 0.726037
|
day20.ex
|
starcoder
|
defmodule Clova.ValidatorPlug do
import Plug.Conn
@behaviour Plug
@pubkey """
-----BEGIN PUBLIC KEY-----
<KEY>
-----END PUBLIC KEY-----
"""
@moduledoc """
Validates the HTTP request body against the `signaturecek` header and provided `app_id`.
CEK requests are signed by the server. This module verifies the signature using the published
public key. If the signature is invalid, the connection state is set to 403 Forbidden
and the plug pipeline is halted.
Due to the fact that the raw request body is required in order to validate the signature, this plug
expects the raw request body to be available in the `raw_body` assign of the `Plug.Conn` struct.
The `Clova.CachingBodyReader` module can be provided to the `Plug.Parsers` plug to prepare this data
while still parsing the request body.
Usage:
```
plug Plug.Parsers,
parsers: [:json],
json_decoder: Poison,
body_reader: Clova.CachingBodyReader.spec()
plug Clova.ValidatorPlug, app_id: "com.example.my_extension"
```
## Options
* `:app_id` - The application ID as specified in the Clova Developer Center. All requests must contain this ID in the request body. If this option is not provided, the app ID validity is not checked.
* `:force_signature_valid` - forces the plug to consider the signature to be valid. This is intended for use in development, because only requests signed by the CEK server will validate against the default public key. Note the signature must still be present and base64-encoded.
* `:public_key` - override the public key used by this plug. This can be used during testing and development to validate requests generated with the corresponding private key. Alternatievely if the CEK server changes its public key, this can be used to override the default key used by this module until an updated version of this module is available.
"""
def init(opts) do
opts
|> Keyword.put_new(:app_id, nil)
|> Keyword.put_new(:force_signature_valid, false)
|> Keyword.put_new_lazy(:public_key, fn -> parse_public_key(@pubkey) end)
|> Enum.into(%{})
end
def call(
conn = %{assigns: %{raw_body: body}, body_params: request},
%{public_key: public_key, app_id: expected_id, force_signature_valid: force}
) do
case get_decoded_signature(conn) do
{:ok, signature} ->
validate_sig_and_app_id(conn, body, request, public_key, expected_id, force, signature)
{:error, message} ->
unauthorized(conn, message)
end
end
def call(conn, _opts) do
unauthorized(conn, "Invalid request (validation failed)")
end
defp get_decoded_signature(conn) do
with [signature_header] <- get_req_header(conn, "signaturecek"),
{:ok, signature} <- Base.decode64(signature_header) do
{:ok, signature}
else
[] -> {:error, "Message unsigned"}
:error -> {:error, "Signature not Base64 encoded"}
err -> {:error, "Signature header in unexpected format: #{inspect err}"}
end
end
defp validate_sig_and_app_id(conn, body, request, public_key, expected_id, force, signature) do
app_id = if expected_id, do: Clova.Request.get_application_id(request), else: nil
cond do
!signature_valid?(body, signature, public_key, force: force) ->
unauthorized(conn, "Signature invalid")
!app_id_valid?(expected_id, app_id) ->
unauthorized(conn, "Expected applicationId #{expected_id}, got #{app_id}")
true ->
assign(conn, :clova_valid, true)
end
end
defp unauthorized(conn, why) do
conn
|> assign(:clova_valid, false)
|> halt
|> send_resp(:forbidden, why)
end
defp app_id_valid?(expected, actual), do: !expected || expected === actual
defp parse_public_key(pem_str) do
pem_str
|> :public_key.pem_decode()
|> hd
|> :public_key.pem_entry_decode()
end
defp signature_valid?(_body, _signature, _public_key, force: true), do: true
defp signature_valid?(body, signature, public_key, force: false) do
:public_key.verify(body, :sha256, signature, public_key)
end
end
|
lib/clova/validator_plug.ex
| 0.872633
| 0.717458
|
validator_plug.ex
|
starcoder
|
defmodule Yggdrasil.Transformer do
@moduledoc """
Transformer behaviour that defines how to decode and encode messages from a
`Yggdrasil.Channel`.
## Small example
Let's say we want to implement a transformer to send any Elixir term as a
string to our subscribers. The transformer module would be implemented as
follows:
```
defmodule Yggdrasil.Transformer.Code do
use Yggdrasil.Transformer
def decode(_channel, message) do
with {decoded, []} <- Code.eval_string(message) do
{:ok, decoded}
else
_ ->
{:error, "Bad message"}
end
end
def encode(_channel, message) do
encoded = inspect(message)
{:ok, encoded}
end
end
```
And we could use the following `Channel` to publish or subscribe to this
messages:
```
%Channel{
name: "my_channel",
adapter: :redis,
transformer: Yggdrasil.Transformer.Code
}
```
## Transformer alias
When defining transformers it is possible to define aliases for the module
as follows:
```
defmodule Yggdrasil.Transformer.Code do
use Yggdrasil.Transformer, name: :code
(... same implementation as above ...)
end
```
And adding the following to our application supervision tree:
```
Supervisor.start_link([
{Yggdrasil.Transformer.Code, []}
...
])
```
This will allow you to use the following as a `Channel` to subscribe and
publish:
```
%Channel{name: "my_channel", adapter: :redis, transformer: :code}
```
"""
alias Yggdrasil.Channel
alias Yggdrasil.Registry
@doc """
Callback to define how to decode the `message`s coming from a `channel`.
"""
@callback decode(
channel :: Channel.t(),
message :: term()
) :: {:ok, term()} | {:error, term()}
@doc """
Callback to define how to encode the `message`s going to a `channel`.
"""
@callback encode(
channel :: Channel.t(),
message :: term()
) :: {:ok, term()} | {:error, term()}
@doc """
Macro for using `Yggdrasil.Transformer`.
The following are the available options:
- `:name` - Name of the transformer. Must be an atom.
"""
defmacro __using__(options) do
transformer_alias =
options[:name] || raise ArgumentError, message: "transformer not found"
quote do
@behaviour Yggdrasil.Transformer
use Task, restart: :transient
@doc """
Start task to register the transformer in the `Registry`.
"""
@spec start_link(term()) :: {:ok, pid()}
def start_link(_) do
Task.start_link(__MODULE__, :register, [])
end
@doc """
Registers transformer in `Registry`.
"""
@spec register() :: :ok
def register do
name = unquote(transformer_alias)
Registry.register_transformer(name, __MODULE__)
end
@doc """
Decodes a `message` for a `channel`.
"""
@impl Yggdrasil.Transformer
def decode(channel, message)
def decode(%Channel{} = _channel, message) do
{:ok, message}
end
def decode(_channel, _message) do
{:error, "invalid channel"}
end
@doc """
Encodes a `message` for a `channel`.
"""
@impl Yggdrasil.Transformer
def encode(channel, message)
def encode(%Channel{} = _channel, message) do
{:ok, message}
end
def encode(_channel, _message) do
{:error, "invalid channel"}
end
defoverridable decode: 2, encode: 2
end
end
@doc """
Generic `message` decoder for a `channel`.
"""
@spec decode(Channel.t(), term()) :: {:ok, term()} | {:error, term()}
def decode(channel, message)
def decode(%Channel{transformer: name} = channel, message) do
with {:ok, module} <- Registry.get_transformer_module(name) do
module.decode(channel, message)
end
end
@doc """
Generic `message` encoder for a `channel`.
"""
@spec encode(Channel.t(), term()) :: {:ok, term()} | {:error, term()}
def encode(channel, message)
def encode(%Channel{transformer: name} = channel, message) do
with {:ok, module} <- Registry.get_transformer_module(name) do
module.encode(channel, message)
end
end
end
|
lib/yggdrasil/transformer.ex
| 0.942202
| 0.925162
|
transformer.ex
|
starcoder
|
defmodule SanbaseWeb.Graphql.Resolvers.SocialDataResolver do
import SanbaseWeb.Graphql.Helpers.Async, only: [async: 1]
import Absinthe.Resolution.Helpers, except: [async: 1]
alias SanbaseWeb.Graphql.Helpers.Utils
alias Sanbase.{SocialData, TechIndicators}
alias SanbaseWeb.Graphql.SanbaseDataloader
@context_words_default_size 10
def popular_search_terms(_root, %{from: from, to: to}, _) do
Sanbase.SocialData.PopularSearchTerm.get(from, to)
end
def project_from_slug(_root, _args, %{source: %{slug: slug}, context: %{loader: loader}}) do
loader
|> Dataloader.load(SanbaseDataloader, :project_by_slug, slug)
|> on_load(fn loader ->
{:ok, Dataloader.get(loader, SanbaseDataloader, :project_by_slug, slug)}
end)
end
def twitter_mention_count(
_root,
%{ticker: ticker, from: from, to: to, interval: interval, result_size_tail: size},
_resolution
) do
TechIndicators.twitter_mention_count(ticker, from, to, interval, size)
end
def emojis_sentiment(
_root,
%{from: from, to: to, interval: interval, result_size_tail: size},
_resolution
) do
TechIndicators.emojis_sentiment(from, to, interval, size)
end
def social_volume(
_root,
%{slug: slug, from: from, to: to, interval: interval, social_volume_type: type},
_resolution
) do
# The `*_discussion_overview` are counting the total number of messages in a given medium
# Deprecated. To be replaced with `getMetric(metric: "community_messages_count_*")` and
# `getMetric(metric: "social_volume_*")`
source =
case type do
:professional_traders_chat_overview -> "professional_traders_chat"
_ -> type |> Atom.to_string() |> String.split("_") |> hd
end
case type in [:telegram_discussion_overview, :discord_discussion_overview] do
true ->
SocialData.community_messages_count(%{slug: slug}, from, to, interval, source)
false ->
SocialData.social_volume(%{slug: slug}, from, to, interval, source)
end
end
def social_volume_projects(_root, %{}, _resolution) do
SocialData.social_volume_projects()
end
def topic_search(
_root,
%{source: source, search_text: search_text, from: from, to: to, interval: interval},
_resolution
) do
case SocialData.social_volume(%{text: search_text}, from, to, interval, source) do
{:ok, data} -> {:ok, %{chart_data: data}}
{:error, error} -> {:error, error}
end
end
def get_trending_words(
_root,
%{from: from, to: to, interval: interval, size: size},
_resolution
) do
case SocialData.TrendingWords.get_trending_words(from, to, interval, size) do
{:ok, result} ->
result =
result
|> Enum.map(fn {datetime, top_words} -> %{datetime: datetime, top_words: top_words} end)
|> Enum.sort_by(& &1.datetime, {:asc, DateTime})
{:ok, result}
{:error, error} ->
{:error, error}
end
end
def get_word_trending_history(
_root,
%{word: word, from: from, to: to, interval: interval, size: size},
_resolution
) do
SocialData.TrendingWords.get_word_trending_history(word, from, to, interval, size)
end
def get_project_trending_history(
_root,
%{slug: slug, from: from, to: to, interval: interval, size: size},
_resolution
) do
SocialData.TrendingWords.get_project_trending_history(slug, from, to, interval, size)
end
def trending_words(
_root,
%{source: source, size: size, hour: hour, from: from, to: to},
_resolution
) do
size = Enum.min([size, 30])
SocialData.trending_words(source, size, hour, from, to)
end
def word_context(
_root,
%{word: word, source: source, size: size, from: from, to: to},
_resolution
) do
size = Enum.min([size, 30])
SocialData.word_context(word, source, size, from, to)
end
def word_context(%{word: word}, _args, resolution) do
%{source: source, from: from, to: to} =
Utils.extract_root_query_args(resolution, "trending_words")
async(fn ->
SocialData.word_context(word, source, @context_words_default_size, from, to)
end)
end
def word_trend_score(
_root,
%{word: word, source: source, from: from, to: to},
_resolution
) do
SocialData.word_trend_score(word, source, from, to)
end
def top_social_gainers_losers(_root, args, _resolution) do
SocialData.top_social_gainers_losers(args)
end
def social_gainers_losers_status(_root, args, _resolution) do
SocialData.social_gainers_losers_status(args)
end
def social_dominance(
_root,
%{slug: slug, from: from, to: to, interval: interval, source: source},
_resolution
) do
SocialData.social_dominance(%{slug: slug}, from, to, interval, source)
end
def news(_root, %{tag: tag, from: from, to: to, size: size}, _resolution) do
SocialData.google_news(tag, from, to, size)
end
end
|
lib/sanbase_web/graphql/resolvers/social_data_resolver.ex
| 0.856362
| 0.403391
|
social_data_resolver.ex
|
starcoder
|
defmodule OpentelemetryFunction do
@moduledoc """
Documentation for `OpentelemetryFunction`.
This package provides functions to help propagating OpenTelemetry context
across functions that are executed asynchronously.
"""
require OpenTelemetry.Tracer
@doc """
Accepts a function and wraps it in a function which propagates OpenTelemetry
context.
This function supports functions with arity up to 9.
## Example
# Before
task = Task.async(func)
Task.await(task, timeout)
# With explicit context propagation
ctx = OpenTelemetry.Ctx.get_current()
task = Task.async(fn ->
OpenTelemetry.Ctx.attach(ctx)
func.()
end)
Task.await(task, timeout)
# With this helper function
task = Task.async(OpentelemetryFunction.wrap(func))
Task.await(task, timeout)
## It is also possible to use this with MFA:
# Before
:jobs.enqueue(:tasks_queue, {mod, fun, args})
# After
wrapped_fun = OpenTelemetry.Function.wrap({mod, fun, args})
:jobs.enqueue(:tasks_queue, wrapped_fun)
"""
def wrap(fun_or_mfa, span_name \\ "Function.wrap")
@spec wrap(fun, binary) :: fun
Enum.each(0..9, fn arity ->
args = for _ <- 1..arity, arity > 0, do: Macro.unique_var(:arg, __MODULE__)
def wrap(original_fun, span_name) when is_function(original_fun, unquote(arity)) do
span_ctx = OpenTelemetry.Tracer.start_span(span_name)
ctx = OpenTelemetry.Ctx.get_current()
fn unquote_splicing(args) ->
OpenTelemetry.Ctx.attach(ctx)
OpenTelemetry.Tracer.set_current_span(span_ctx)
try do
original_fun.(unquote_splicing(args))
rescue
exception ->
OpenTelemetry.Span.record_exception(span_ctx, exception, __STACKTRACE__, [])
OpenTelemetry.Tracer.set_status(OpenTelemetry.status(:error, ""))
reraise(exception, __STACKTRACE__)
after
OpenTelemetry.Span.end_span(span_ctx)
end
end
end
end)
@spec wrap({module, atom, [term]}, binary()) :: fun
def wrap({mod, fun, args}, span_name) do
span_ctx = OpenTelemetry.Tracer.start_span(span_name)
ctx = OpenTelemetry.Ctx.get_current()
fn ->
OpenTelemetry.Ctx.attach(ctx)
OpenTelemetry.Tracer.set_current_span(span_ctx)
try do
apply(mod, fun, args)
rescue
exception ->
OpenTelemetry.Span.record_exception(span_ctx, exception, __STACKTRACE__, [])
OpenTelemetry.Tracer.set_status(OpenTelemetry.status(:error, ""))
reraise(exception, __STACKTRACE__)
after
OpenTelemetry.Span.end_span(span_ctx)
end
end
end
end
|
lib/opentelemetry_function.ex
| 0.866274
| 0.403831
|
opentelemetry_function.ex
|
starcoder
|
defmodule Membrane.Scissors do
@moduledoc """
Element for cutting the stream.
"""
use Membrane.Filter
alias Membrane.Buffer
alias Membrane.Time
def_input_pad :input, caps: :any, demand_unit: :buffers
def_output_pad :output, caps: :any
def_options intervals: [
type: :list,
spec: [{Time.t(), duration :: Time.t() | integer}] | Enumerable.t() | Stream.t(),
description: """
Enumerable containing `{start_time, duration}` tuples specifying
parts of the stream that should be preserved. All other parts are
cut off. Duration unit should conform to the `interval_duration_unit`
option. Note that infinite streams are also supported.
"""
],
buffer_duration: [
type: :function,
spec: (Buffer.t(), caps :: any -> Time.t()),
description: """
Function returning the duration of given buffer in Membrane Time units.
"""
],
interval_duration_unit: [
type: :atom,
spec: :time | :buffers,
default: :time,
description: """
Unit of the duration of each interval in the `intervals` option.
If `:buffers` is passed, given amount of buffers is preserved,
unless the next interval starts earlier. In that case, the stream
is cut according to the subsequent intervals.
"""
],
filter: [
type: :function,
spec: (Buffer.t(), caps :: any -> boolean),
default: &__MODULE__.always_pass_filter/2,
description: """
Function for filtering buffers before they are cut. Each buffer
is preserved iff it returns `true`. By default always returns `true`.
"""
]
@doc false
def always_pass_filter(_buffer, _caps), do: true
@impl true
def handle_init(opts) do
%__MODULE__{intervals: intervals} = opts
{next_intervals, intervals} = StreamSplit.take_and_drop(intervals, 2)
state =
opts
|> Map.from_struct()
|> Map.merge(%{
time: 0,
buffers_count: 0,
intervals: intervals,
next_intervals: next_intervals
})
{:ok, state}
end
@impl true
def handle_demand(:output, size, :buffers, _ctx, state) do
{{:ok, demand: {:input, size}}, state}
end
@impl true
def handle_process(:input, buffer, ctx, state) do
use Ratio
%{caps: caps} = ctx.pads.input
{forward?, state} =
if state.filter.(buffer, caps) do
forward?(state)
else
{false, state}
end
actions = if forward?, do: [buffer: {:output, buffer}], else: [redemand: :output]
state = Map.update!(state, :time, &(&1 + state.buffer_duration.(buffer, caps)))
{{:ok, actions}, state}
end
defp forward?(state) do
%{
next_intervals: next_intervals,
time: time,
buffers_count: buffers_count,
interval_duration_unit: interval_duration_unit
} = state
cond do
next_intervals == [] ->
{false, state}
time_for_next_interval?(next_intervals, time) ->
state |> proceed_to_next_interval() |> forward?()
waiting_for_interval_start?(next_intervals, time) ->
{false, state}
within_current_interval?(next_intervals, time, buffers_count, interval_duration_unit) ->
case interval_duration_unit do
:time -> {true, state}
:buffers -> {true, %{state | buffers_count: buffers_count + 1}}
end
true ->
state |> proceed_to_next_interval() |> forward?()
end
end
defp time_for_next_interval?([_interval0, {from, _size} | _], time), do: Ratio.gte?(time, from)
defp time_for_next_interval?(_next_intervals, _time), do: false
defp waiting_for_interval_start?([{from, _size} | _], time), do: Ratio.lt?(time, from)
defp waiting_for_interval_start?(_next_intervals, _time), do: false
defp within_current_interval?([{from, interval_duration} | _], time, _buffers_count, :time) do
use Ratio
Ratio.lt?(time, from + interval_duration)
end
defp within_current_interval?([{_from, interval_size} | _], _time, buffers_count, :buffers) do
buffers_count < interval_size
end
defp within_current_interval?(_next_intervals, _time, _buf_cnt, _unit), do: false
defp proceed_to_next_interval(%{next_intervals: next_intervals, intervals: intervals} = state) do
{new_next_intervals, intervals} = StreamSplit.take_and_drop(intervals, 1)
%{
state
| next_intervals: tl(next_intervals) ++ new_next_intervals,
intervals: intervals,
buffers_count: 0
}
end
end
|
lib/membrane_scissors.ex
| 0.916549
| 0.516474
|
membrane_scissors.ex
|
starcoder
|
import TypeClass
defclass Witchcraft.Arrow do
@moduledoc """
Arrows abstract the idea of computations, potentially with a context.
Arrows are in fact an abstraction above monads, and can be used both to
express all other type classes in Witchcraft. They also enable some nice
flow-based reasoning about computation.
For a nice illustrated explination,
see [Haskell/Understanding arrows](https://en.wikibooks.org/wiki/Haskell/Understanding_arrows)
Arrows let you think diagrammatically, and is a powerful way of thinking
about flow programming, concurrency, and more.
┌---> f --------------------------┐
| v
input ---> split unsplit ---> result
| ^
| ┌--- h ---┐ |
| | v |
└---> g ---> split unsplit ---┘
| ^
└--- i ---┘
## Type Class
An instance of `Witchcraft.Arrow` must also implement `Witchcraft.Category`,
and define `Witchcraft.Arrow.arrowize/2`.
Semigroupoid [compose/2, apply/2]
↓
Category [identity/1]
↓
Arrow [arrowize/2]
"""
alias __MODULE__
extend Witchcraft.Category
use Witchcraft.Internal, deps: [Witchcraft.Category]
use Witchcraft.Category
@type t :: fun()
where do
@doc """
Lift a function into an arrow, much like how `of/2` does with data.
Essentially a label for composing functions end-to-end, where instances
may have their own special idea of what composition means. The simplest example
is a regular function. Others are possible, such as Kleisli arrows.
## Examples
iex> use Witchcraft.Arrow
...> times_ten = arrowize(fn -> nil end, &(&1 * 10))
...> 5 |> pipe(times_ten)
50
"""
@spec arrowize(Arrow.t(), fun()) :: Arrow.t()
def arrowize(sample, fun)
end
properties do
def arrow_identity(sample) do
a = generate(nil)
left = Arrow.arrowize(sample, &Quark.id/1)
right = &Quark.id/1
equal?(a |> pipe(left), a |> pipe(right))
end
def arrow_composition(sample) do
use Witchcraft.Category
a = generate(nil)
f = fn x -> "#{x}-#{x}" end
g = &inspect/1
left = Arrow.arrowize(sample, f) <|> Arrow.arrowize(sample, g)
right = Arrow.arrowize(sample, f <|> g)
equal?(pipe(a, left), pipe(a, right))
end
def first_commutativity(sample) do
a = {generate(nil), generate(nil)}
f = &inspect/1
left = Witchcraft.Arrow.first(Arrow.arrowize(sample, f))
right = Arrow.arrowize(sample, Witchcraft.Arrow.first(f))
equal?(pipe(a, left), pipe(a, right))
end
def first_composition(sample) do
a = {generate(nil), generate(nil)}
f = Arrow.arrowize(sample, fn x -> "#{x}-#{x}" end)
g = Arrow.arrowize(sample, &inspect/1)
left = Witchcraft.Arrow.first(f <|> g)
right = Witchcraft.Arrow.first(f) <|> Witchcraft.Arrow.first(g)
equal?(pipe(a, left), pipe(a, right))
end
def second_arrow_commutativity(sample) do
a = {generate(nil), generate(nil)}
f = &inspect/1
left = Witchcraft.Arrow.second(Arrow.arrowize(sample, f))
right = Arrow.arrowize(sample, Witchcraft.Arrow.second(f))
equal?(pipe(a, left), pipe(a, right))
end
def second_composition(sample) do
a = {generate(nil), generate(nil)}
f = Arrow.arrowize(sample, fn x -> "#{x}-#{x}" end)
g = Arrow.arrowize(sample, &inspect/1)
left = Witchcraft.Arrow.second(f <|> g)
right = Witchcraft.Arrow.second(f) <|> Witchcraft.Arrow.second(g)
equal?(pipe(a, left), pipe(a, right))
end
def product_composition(sample) do
a = {generate(nil), generate(nil)}
f = &inspect/1
g = fn x -> "#{inspect(x)}-#{inspect(x)}" end
left =
Witchcraft.Arrow.product(
Arrow.arrowize(sample, f),
Arrow.arrowize(sample, g)
)
right = Arrow.arrowize(sample, Witchcraft.Arrow.product(f, g))
equal?(pipe(a, left), pipe(a, right))
end
def fanout_composition(sample) do
a = generate(nil)
f = &inspect/1
g = fn x -> "#{inspect(x)}-#{inspect(x)}" end
left =
Witchcraft.Arrow.fanout(
Arrow.arrowize(sample, f),
Arrow.arrowize(sample, g)
)
right = Arrow.arrowize(sample, Witchcraft.Arrow.fanout(f, g))
equal?(pipe(a, left), pipe(a, right))
end
def first_reassociaton(sample) do
a = {{generate(nil), generate(nil)}, {generate(nil), generate(nil)}}
f = fn x -> "#{inspect(x)}-#{inspect(x)}" end
x = Witchcraft.Arrow.first(Arrow.arrowize(sample, f))
y = Arrow.arrowize(sample, &Witchcraft.Arrow.reassociate/1)
left = Witchcraft.Arrow.first(x) <~> y
right = y <~> x
equal?(a |> pipe(left), a |> pipe(right))
end
def first_identity(sample) do
a = {generate(nil), generate(nil)}
f = fn x -> "#{inspect(x)}-#{inspect(x)}" end
left = Witchcraft.Arrow.first(f) <~> Arrow.arrowize(sample, fn {x, _} -> x end)
right = Arrow.arrowize(sample, fn {x, _} -> x end) <~> f
equal?(pipe(a, left), pipe(a, right))
end
def first_product_commutativity(sample) do
a = {generate(nil), generate(nil)}
f = &inspect/1
g = fn x -> "#{inspect(x)}-#{inspect(x)}" end
x = Arrow.arrowize(sample, Witchcraft.Arrow.product(&Quark.id/1, g))
y = Witchcraft.Arrow.first(f)
left = x <|> y
right = y <|> x
equal?(pipe(a, left), pipe(a, right))
end
end
@doc """
Take two arguments (as a 2-tuple), and run one function on the left side (first element),
and run a different function on the right side (second element).
┌------> f.(a) = x -------┐
| v
{a, b} {x, y}
| ^
└------> g.(b) = y -------┘
## Examples
iex> product(&(&1 - 10), &(&1 <> "!")).({42, "Hi"})
{32, "Hi!"}
"""
@spec product(Arrow.t(), Arrow.t()) :: Arrow.t()
def product(arrow_f, arrow_g), do: first(arrow_f) <~> second(arrow_g)
@doc """
Alias for `product/2`, meant to invoke a spacial metaphor.
## Examples
iex> beside(&(&1 - 10), &(&1 <> "!")).({42, "Hi"})
{32, "Hi!"}
"""
@spec beside(Arrow.t(), Arrow.t()) :: Arrow.t()
defalias beside(a, b), as: :product
@doc """
Operator alias for `product/2`.
## Examples
iex> arr = fn x -> x - 10 end ^^^ fn y -> y <> "!" end
...> arr.({42, "Hi"})
{32, "Hi!"}
iex> {42, "Hi"} |> (fn x -> x - 10 end ^^^ fn y -> y <> "!" end).()
{32, "Hi!"}
"""
@spec Arrow.t() ^^^ Arrow.t() :: Arrow.t()
defalias a ^^^ b, as: :product
@doc """
Swap positions of elements in a tuple.
## Examples
iex> swap({1, 2})
{2, 1}
"""
@spec swap({any(), any()}) :: {any(), any()}
def swap({x, y}), do: {y, x}
@doc """
Target the first element of a tuple.
## Examples
iex> first(fn x -> x * 50 end).({1, 1})
{50, 1}
"""
@spec first(Arrow.t()) :: Arrow.t()
def first(arrow) do
arrowize(arrow, fn {x, y} ->
{
x |> pipe(arrow),
y |> pipe(id_arrow(arrow))
}
end)
end
@doc """
Target the second element of a tuple.
## Examples
iex> second(fn x -> x * 50 end).({1, 1})
{1, 50}
"""
@spec second(Arrow.t()) :: Arrow.t()
def second(arrow) do
arrowize(arrow, fn {x, y} ->
{
x |> pipe(id_arrow(arrow)),
y |> pipe(arrow)
}
end)
end
@doc """
The identity function lifted into an arrow of the correct type.
## Examples
iex> id_arrow(fn -> nil end).(99)
99
"""
@spec id_arrow(Arrow.t()) :: (any() -> Arrow.t())
def id_arrow(sample), do: arrowize(sample, &Quark.id/1)
@doc """
Duplicate incoming data into both halves of a 2-tuple, and run one function
on the left copy, and a different function on the right copy.
┌------> f.(a) = x ------┐
| v
a ---> split = {a, a} {x, y}
| ^
└------> g.(a) = y ------┘
## Examples
iex> Witchcraft.Semigroupoid.pipe(42, fanout(&(&1 - 10), &(inspect(&1) <> "!")))
{32, "42!"}
"""
@spec fanout(Arrow.t(), Arrow.t()) :: Arrow.t()
def fanout(arrow_f, arrow_g) do
arrow_f |> arrowize(&split/1) <~> (arrow_f ^^^ arrow_g)
end
@doc """
Operator alias for `fanout/2`.
## Examples
iex> fanned = fn x -> x - 10 end &&& fn y -> inspect(y) <> "!" end
...> fanned.(42)
{32, "42!"}
iex> fanned =
...> fn x -> x - 10 end
...> &&& fn y -> inspect(y) <> "!" end
...> &&& fn z -> inspect(z) <> "?" end
...> &&& fn d -> inspect(d) <> inspect(d) end
...> &&& fn e -> e / 2 end
...>
...> fanned.(42)
{{{{32, "42!"}, "42?"}, "4242"}, 21.0}
"""
@spec Arrow.t() &&& Arrow.t() :: Arrow.t()
defalias a &&& b, as: :fanout
@doc """
Copy a single value into both positions of a 2-tuple.
This is useful is you want to run functions on the input separately.
## Examples
iex> split(42)
{42, 42}
iex> import Witchcraft.Semigroupoid, only: [<~>: 2]
...> 5
...> |> split()
...> |> (second(fn x -> x - 2 end)
...> <~> first(fn y -> y * 10 end)
...> <~> second(&inspect/1)).()
{50, "3"}
iex> use Witchcraft.Arrow
...> 5
...> |> split()
...> |> pipe(second(fn x -> x - 2 end))
...> |> pipe(first(fn y -> y * 10 end))
...> |> pipe(second(&inspect/1))
{50, "3"}
"""
@spec split(any()) :: {any(), any()}
def split(x), do: {x, x}
@doc """
Merge two tuple values with a combining function.
## Examples
iex> unsplit({1, 2}, &+/2)
3
"""
@spec unsplit({any(), any()}, (any(), any() -> any())) :: any()
def unsplit({x, y}, combine), do: combine.(x, y)
@doc """
Switch the associativity of a nested tuple. Helpful since many arrows act
on a subset of a tuple, and you may want to move portions in and out of that stream.
## Examples
iex> reassociate({1, {2, 3}})
{{1, 2}, 3}
iex> reassociate({{1, 2}, 3})
{1, {2, 3}}
"""
@spec reassociate({any(), {any(), any()}} | {{any(), any()}, any()}) ::
{{any(), any()}, any()} | {any(), {any(), any()}}
def reassociate({{a, b}, c}), do: {a, {b, c}}
def reassociate({a, {b, c}}), do: {{a, b}, c}
@doc """
Compose a function (left) with an arrow (right) to produce a new arrow.
## Examples
iex> f = precompose(
...> fn x -> x + 1 end,
...> arrowize(fn _ -> nil end, fn y -> y * 10 end)
...> )
...> f.(42)
430
"""
@spec precompose(fun(), Arrow.t()) :: Arrow.t()
def precompose(fun, arrow), do: arrowize(arrow, fun) <~> arrow
@doc """
Compose an arrow (left) with a function (right) to produce a new arrow.
## Examples
iex> f = postcompose(
...> arrowize(fn _ -> nil end, fn x -> x + 1 end),
...> fn y -> y * 10 end
...> )
...> f.(42)
430
"""
@spec postcompose(Arrow.t(), fun()) :: Arrow.t()
def postcompose(arrow, fun), do: arrow <~> arrowize(arrow, fun)
end
definst Witchcraft.Arrow, for: Function do
use Quark
def arrowize(_, fun), do: curry(fun)
def first(arrow), do: fn {target, unchanged} -> {arrow.(target), unchanged} end
end
|
lib/witchcraft/arrow.ex
| 0.857664
| 0.670979
|
arrow.ex
|
starcoder
|
defmodule Shared.Zeit do
alias Shared.Zeitperiode
@spec mit_deutscher_zeitzone(datum :: Date.t(), zeit :: Time.t()) :: DateTime.t()
@doc """
Wandelt ein Datum und eine Zeit in ein DateTime Struct mit deutscher Zeitzone
um. Es wird also angenommen, dass die übergebene Zeit in Deutschland statt
fand.
iex> Shared.Zeit.mit_deutscher_zeitzone(~D[2018-02-22], ~T[15:00:00])
#DateTime<2018-02-22 15:00:00+01:00 CET Europe/Berlin>
iex> Shared.Zeit.mit_deutscher_zeitzone(~N[2018-03-25 03:00:00])
#DateTime<2018-03-25 03:00:00+02:00 CEST Europe/Berlin>
iex> Shared.Zeit.mit_deutscher_zeitzone(~N[2018-10-28 02:30:00])
#DateTime<2018-10-28 02:30:00+01:00 CET Europe/Berlin>
"""
def mit_deutscher_zeitzone(%Date{} = date, %Time{} = time) do
{:ok, datetime} = NaiveDateTime.new(date, time)
datetime
|> mit_deutscher_zeitzone()
end
@spec mit_deutscher_zeitzone(NaiveDateTime.t()) :: DateTime.t()
def mit_deutscher_zeitzone(%NaiveDateTime{} = datetime) do
datetime = datetime |> Timex.to_datetime("Europe/Berlin")
case datetime do
%Timex.AmbiguousDateTime{type: :ambiguous, after: winterzeit} -> winterzeit
_ -> datetime
end
end
@spec mit_deutscher_zeitzone(DateTime.t()) :: DateTime.t()
def mit_deutscher_zeitzone(%DateTime{} = datetime) do
datetime
|> DateTime.to_naive()
|> mit_deutscher_zeitzone()
end
@spec mit_deutscher_zeitzone(datum :: Date.t(), start :: Time.t(), ende :: Time.t()) ::
Timex.Interval.t()
def mit_deutscher_zeitzone(%Date{} = datum, %Time{} = start, %Time{} = ende) do
zeitperiode = Zeitperiode.new(datum, start, ende)
Zeitperiode.new(
Zeitperiode.von(zeitperiode) |> mit_deutscher_zeitzone(),
Zeitperiode.bis(zeitperiode) |> mit_deutscher_zeitzone(),
"Etc/UTC"
)
end
@spec parse(binary) :: DateTime.t() | NaiveDateTime.t()
def parse(to_parse) when is_binary(to_parse) do
case Timex.parse(to_parse, "{ISO:Extended}") do
{:ok, %DateTime{} = date_time} ->
if date_time.utc_offset != 0 do
DateTime.shift_zone!(date_time, "Europe/Berlin")
else
date_time
end
{:ok, %NaiveDateTime{} = naive_date_time} ->
naive_date_time
end
end
@spec jetzt :: DateTime.t()
def jetzt do
Timex.local() |> DateTime.truncate(:second)
end
defmodule Sigil do
@spec sigil_G(term :: binary(), _modifiers :: charlist()) :: DateTime.t()
@doc """
Wandelt ISO8601 Date Strings und Time Strings in DateTime mit deutscher Zeitzone
## Examples
iex> ~G[2018-04-03 17:20:00]
#DateTime<2018-04-03 17:20:00+02:00 CEST Europe/Berlin>
"""
def sigil_G(string, []) do
# [date_string, time_string] = String.split(string)
# date = Date.from_iso8601!(date_string)
# time = Time.from_iso8601!(time_string)
naive = NaiveDateTime.from_iso8601!(string)
Shared.Zeit.mit_deutscher_zeitzone(naive)
end
end
end
|
lib/zeit.ex
| 0.746324
| 0.482856
|
zeit.ex
|
starcoder
|
defmodule Harness.Tree do
@moduledoc """
Renders things as a tree
See the original implementation in Mix
[here](https://github.com/elixir-lang/elixir/blob/v1.10/lib/mix/lib/mix/utils.ex).
The original implementation has an optimization for dependency trees which
prevents showing the same dependency tree twice. That's great for printing
small dependency trees, but for file trees, we want to see the entire tree
every time, even if a file or directory name is present many times.
The changes to the original implementation are shown as comments below:
"""
@doc """
Prints the given tree according to the callback.
The callback will be invoked for each node and it
must return a `{printed, children}` tuple.
"""
def print_tree(nodes, callback, opts \\ []) do
pretty? =
case Keyword.get(opts, :format) do
"pretty" -> true
"plain" -> false
_other -> elem(:os.type(), 0) != :win32
end
print_tree(
nodes,
_depth = [],
_parent = nil,
_seen = MapSet.new(),
pretty?,
callback
)
:ok
end
defp print_tree(_nodes = [], _depth, _parent, seen, _pretty, _callback) do
seen
end
defp print_tree([node | nodes], depth, parent, seen, pretty?, callback) do
{{name, info}, children} = callback.(node)
# removed
# key = {parent, name}
info = if(info, do: " #{info}", else: "")
Mix.shell().info(
"#{depth(pretty?, depth)}#{prefix(pretty?, depth, nodes)}#{name}#{info}"
)
seen =
print_tree(
children,
[nodes != [] | depth],
name,
# switched this next line (51) for the next (52)
seen,
# MapSet.put(seen, key),
pretty?,
callback
)
print_tree(nodes, depth, parent, seen, pretty?, callback)
end
defp depth(_pretty?, []), do: ""
defp depth(pretty?, depth),
do: Enum.reverse(depth) |> tl |> Enum.map(&entry(pretty?, &1))
defp entry(false, true), do: "| "
defp entry(false, false), do: " "
defp entry(true, true), do: "│ "
defp entry(true, false), do: " "
defp prefix(false, [], _), do: ""
defp prefix(false, _, []), do: "`-- "
defp prefix(false, _, _), do: "|-- "
defp prefix(true, [], _), do: ""
defp prefix(true, _, []), do: "└── "
defp prefix(true, _, _), do: "├── "
end
|
lib/harness/tree.ex
| 0.836204
| 0.503296
|
tree.ex
|
starcoder
|
defmodule Grizzly.ZWave.Commands.CentralSceneSupportedReport do
@moduledoc """
This command is used to report the maximum number of supported scenes and the Key Attributes
supported for each scene.
Versions 1 and 2 are obsolete. Version 3+ fields are required.
Params:
* `:supported_scenes` - This field indicates the maximum number of scenes supported by the requested device. (required)
* `:slow_refresh_support` - This field indicates whether the node supports the Slow Refresh capability. (required)
* `:identical` - This field indicates if all scenes are supporting the same Key Attributes (required)
* `:bit_mask_bytes` - This field advertises the size of each “Supported Key Attributes” field measured in bytes. Must be 1..3. (required)
* `:supported_key_attributes` - This field advertises the attributes supported by the corresponding scene (required)
A list of lists of key attributes where a key attribute
is one of :key_pressed_1_time | :key_released | :key_held_down | :key_pressed_2_times
| :key_pressed_3_times | :key_pressed_4_times | :key_pressed_5_times.
If not identical, the first list of key attributes corresponds to scene 1, the second to scene 2 etc. for each of supported_scenes
If identical, only the key attributes of scene 1 are to be listed
"""
@behaviour Grizzly.ZWave.Command
alias Grizzly.ZWave.{Command, DecodeError}
alias Grizzly.ZWave.CommandClasses.CentralScene
# give me some type specs for your params
@type param ::
{:supported_scenes, non_neg_integer}
| {:slow_refresh_support, boolean}
| {:identical, boolean}
| {:bit_mask_bytes, 1..3}
| {:supported_key_attributes, [CentralScene.key_attributes()]}
@impl true
@spec new([param()]) :: {:ok, Command.t()}
def new(params) do
command = %Command{
name: :central_scene_supported_report,
command_byte: 0x02,
command_class: CentralScene,
params: params,
impl: __MODULE__
}
{:ok, command}
end
@impl true
@spec encode_params(Command.t()) :: binary()
def encode_params(command) do
supported_scenes = Command.param!(command, :supported_scenes)
identical? = Command.param!(command, :identical)
identical_bit = identical? |> CentralScene.boolean_to_bit()
slow_refresh_support_bit =
Command.param!(command, :slow_refresh_support) |> CentralScene.boolean_to_bit()
bit_mask_bytes = Command.param!(command, :bit_mask_bytes)
supported_key_attributes =
Command.param!(command, :supported_key_attributes)
|> CentralScene.validate_supported_key_attributes(supported_scenes, identical?)
supported_key_attributes_binary =
supported_key_attributes_to_binary(bit_mask_bytes, supported_key_attributes)
<<supported_scenes, slow_refresh_support_bit::size(1), 0x00::size(4), bit_mask_bytes::size(2),
identical_bit::size(1)>> <> supported_key_attributes_binary
end
@impl true
@spec decode_params(binary()) :: {:ok, [param()]} | {:error, DecodeError.t()}
def decode_params(
<<supported_scenes, slow_refresh_support_bit::size(1), 0x00::size(4),
bit_mask_bytes::size(2), identical_bit::size(1),
supported_key_attributes_binary::binary>>
) do
identical? = identical_bit == 1
with {:ok, supported_key_attributes} <-
supported_key_attributes_from_binary(
supported_key_attributes_binary,
supported_scenes,
bit_mask_bytes,
identical?
) do
{:ok,
[
supported_scenes: supported_scenes,
slow_refresh_support: slow_refresh_support_bit == 1,
identical: identical?,
bit_mask_bytes: bit_mask_bytes,
supported_key_attributes: supported_key_attributes
]}
else
{:error, %DecodeError{}} = error ->
error
end
end
defp supported_key_attributes_to_binary(bit_mask_bytes, supported_key_attributes) do
Enum.reduce(supported_key_attributes, <<>>, fn scene_key_attributes, acc ->
acc <> key_attributes_bit_masks_binary(bit_mask_bytes, scene_key_attributes)
end)
end
defp key_attributes_bit_masks_binary(bit_mask_bytes, scene_key_attributes) do
# [{byte_index, bit_index}, ...]
bit_indices =
for key_attribute <- scene_key_attributes,
do: CentralScene.key_attribute_to_bit_index(key_attribute)
# [[1,4,5], [], []]
byte_bit_indices =
for i <- 1..bit_mask_bytes do
Enum.reduce(bit_indices, [], fn {byte_index, bit_index}, acc ->
if byte_index == i, do: [bit_index | acc], else: acc
end)
end
# [<<128>>, <<>>, <<>>]
bit_masks =
for per_byte_bit_indices <- byte_bit_indices do
for bit_index <- 7..0, into: <<>> do
if bit_index in per_byte_bit_indices, do: <<1::size(1)>>, else: <<0::size(1)>>
end
end
# <<...>>
for bit_mask <- bit_masks, into: <<>>, do: bit_mask
end
defp supported_key_attributes_from_binary(
supported_key_attributes_binary,
supported_scenes,
bit_mask_bytes,
identical?
) do
# [ [0,2,3], [], [1,4], ...]
all_bit_masks_as_lists = bit_masks_from_binary(supported_key_attributes_binary)
# [ [ [0,2,3], [] ], ...]
per_scene_bit_indices = Enum.chunk_every(all_bit_masks_as_lists, bit_mask_bytes)
scene_count = Enum.count(per_scene_bit_indices)
valid? = if identical?, do: scene_count == 1, else: scene_count == supported_scenes
if valid? do
supported_key_attributes =
for scene_bit_indices <- per_scene_bit_indices do
# [{[0,2,3], 1}, {[], 2}]
byte_indexed_scene_bit_indices = Enum.with_index(scene_bit_indices, 1)
Enum.reduce(
byte_indexed_scene_bit_indices,
[],
fn {bit_indices, byte_index}, acc ->
attribute_keys = attribute_keys_from_bit_indices(bit_indices, byte_index)
acc ++ attribute_keys
end
)
|> List.flatten()
end
{:ok, supported_key_attributes}
else
{:error,
%DecodeError{
param: :supported_key_attributes,
value: supported_key_attributes_binary,
command: :central_scene_supported_report
}}
end
end
defp bit_masks_from_binary(supported_key_attributes_binary) do
for byte <- :erlang.binary_to_list(supported_key_attributes_binary) do
indexed_bit_list =
for(<<(bit::size(1) <- <<byte>>)>>, do: bit) |> Enum.reverse() |> Enum.with_index()
Enum.reduce(
indexed_bit_list,
[],
fn {bit, bit_index}, acc ->
case bit do
0 -> acc
1 -> [bit_index | acc]
end
end
)
end
end
defp attribute_keys_from_bit_indices(bit_indices, byte_index) do
for(
bit_index <- bit_indices,
do: CentralScene.key_attribute_from_bit_index(byte_index, bit_index)
)
|> Enum.reject(&(&1 == :ignore))
end
end
|
lib/grizzly/zwave/commands/central_scene_supported_report.ex
| 0.874433
| 0.558508
|
central_scene_supported_report.ex
|
starcoder
|
defmodule Day11 do
@grid_size 300
@type coords :: {number(), number()}
@spec part_one(number()) :: coords()
def part_one(serial_number) do
{square_top_left_coords, _total_power_level} =
serial_number
|> compute_power_level_by_coords()
|> compute_summed_area_table(@grid_size)
|> get_left_top_coords_and_power_level_of_max_power_level_square_for_size(3)
square_top_left_coords
end
@spec part_two(any()) :: {coords(), integer()}
def part_two(serial_number) do
power_level_by_coords = compute_power_level_by_coords(serial_number)
summed_area_table = compute_summed_area_table(power_level_by_coords, @grid_size)
{size, {coords, _power_level}} =
1..300
|> Flow.from_enumerable()
|> Flow.map(fn size ->
{size, get_left_top_coords_and_power_level_of_max_power_level_square_for_size(summed_area_table, size)}
end)
# |> Flow.group_by(fn {size, {coords, power_level}} -> size end)
# |> Flow.take_sort(1, fn {_size1, {_coords1, power_level1}}, {_size2, {_coords2, power_level2}} ->
# power_level2 < power_level1
# end)
# |> Enum.to_list()
# |> hd()
|> Enum.max_by(fn {_size, {_coords, power_level}} -> power_level end)
{coords, size}
end
@spec get_left_top_coords_and_power_level_of_max_power_level_square_for_size(map(), integer()) ::
{coords(), integer()}
defp get_left_top_coords_and_power_level_of_max_power_level_square_for_size(summed_area_table, size) do
calculate_power_levels_in_squares_with_size(summed_area_table, size)
|> Enum.max_by(fn {_coords, power_level} -> power_level end)
end
@spec calculate_power_levels_in_squares_with_size(map(), integer()) :: [{coords(), integer()}]
defp calculate_power_levels_in_squares_with_size(summed_area_table, size) do
for x <- 1..(@grid_size - size), y <- 1..(@grid_size - size) do
power_level = calculate_sum_in_square(summed_area_table, {x, y}, size)
{{x, y}, power_level}
end
end
@doc """
See [wiki article](https://en.wikipedia.org/wiki/Summed-area_table).
Coordinates are 1-indexed.
"""
@spec compute_summed_area_table(map(), number()) :: map()
def compute_summed_area_table(power_level_by_coords, grid_size) do
coords = for x <- 1..grid_size, y <- 1..grid_size, do: {x, y}
Enum.reduce(coords, %{}, fn {x, y}, table ->
s =
Map.get(table, {x, y}, power_level_by_coords[{x, y}]) + Map.get(table, {x, y - 1}, 0) +
Map.get(table, {x - 1, y}, 0) - Map.get(table, {x - 1, y - 1}, 0)
Map.put(table, {x, y}, s)
end)
end
@spec calculate_sum_in_square(map(), coords(), number()) :: number()
def calculate_sum_in_square(summed_area_table, {x, y} = _top_left_coords, size) when is_map(summed_area_table) do
offset = size - 1
get_value_by_coords = fn {x, y} -> Map.get(summed_area_table, {x, y}, 0) end
bottom_right_value_D = get_value_by_coords.({x + offset, y + offset})
top_left_bounding_value_A = get_value_by_coords.({x - 1, y - 1})
top_right_bounding_value_B = get_value_by_coords.({x + offset, y - 1})
bottom_left_bounding_value_C = get_value_by_coords.({x - 1, y + offset})
top_left_bounding_value_A + bottom_right_value_D - top_right_bounding_value_B - bottom_left_bounding_value_C
end
@spec compute_power_level_by_coords(number()) :: map()
defp compute_power_level_by_coords(serial_number) do
for x <- 1..@grid_size, y <- 1..@grid_size, into: %{} do
{{x, y}, power_level({x, y}, serial_number)}
end
end
@spec power_level(coords(), number()) :: number()
def power_level({x, y}, serial_number) do
rack_id = x + 10
power_level = (rack_id * y + serial_number) * rack_id
hundreds_digit =
power_level
|> Integer.to_string()
|> String.codepoints()
|> Enum.reverse()
|> Enum.at(2, "0")
|> String.to_integer()
hundreds_digit - 5
end
end
|
lib/day11.ex
| 0.823186
| 0.563498
|
day11.ex
|
starcoder
|
defmodule CCSP.Chapter2.Maze do
alias CCSP.Chapter2.MazeLocation
alias CCSP.Chapter2.Node
alias __MODULE__, as: T
@moduledoc """
Corresponds to CCSP in Python, Section 2.2 titled "Maze Solving"
TODO: The constant use of `get_cell` causes heavy use of `List.slice` internally and does not scale well.
"""
@type location :: {non_neg_integer, non_neg_integer}
@type maze_state :: list(list(MazeLocation.t()))
@opaque t :: __MODULE__.t()
defstruct state: [], total_rows: 0, total_columns: 0
@spec cell(String.t()) :: String.t()
def cell(str) do
case str do
"EMPTY" -> " "
"BLOCKED" -> "X"
"START" -> "S"
"GOAL" -> "G"
"PATH" -> "*"
end
end
@spec new(maze_state, non_neg_integer, non_neg_integer) :: t
def new(state, total_rows, total_columns) do
%T{state: state, total_rows: total_rows, total_columns: total_columns}
end
@spec init(integer, integer, float, location, location) :: t
def init(rows \\ 9, columns \\ 9, sparseness \\ 0.2, start \\ {0, 0}, goal \\ {9, 9}) do
randomly_fill_maze(rows, columns, sparseness)
# empty_maze(rows, columns)
|> List.update_at(
elem(start, 0),
&List.update_at(&1, elem(start, 1), fn location -> %{location | value: cell("START")} end)
)
|> List.update_at(
elem(goal, 0),
&List.update_at(&1, elem(goal, 1), fn location -> %{location | value: cell("GOAL")} end)
)
|> new(rows + 1, columns + 1)
end
@spec get_cell(t, integer, integer) :: MazeLocation.t()
def get_cell(maze, row, column) do
maze.state
|> Enum.at(row, [])
|> Enum.at(column, nil)
end
@spec empty_maze(integer, integer) :: maze_state
defp empty_maze(rows, columns) do
Enum.map(0..rows, fn row ->
Enum.map(0..columns, fn column -> MazeLocation.new(cell("EMPTY"), row, column) end)
end)
end
@spec randomly_fill_maze(integer, integer, float) :: maze_state
defp randomly_fill_maze(rows, columns, sparseness) do
Enum.map(0..rows, fn row ->
Enum.map(0..columns, fn column ->
random_cell(row, column, sparseness)
end)
end)
end
defp specific_maze() do
[
["S", " ", " ", " ", " ", "X", " ", " ", " ", " "],
[" ", " ", " ", " ", " ", " ", " ", " ", " ", " "],
[" ", " ", " ", "X", " ", "X", "X", " ", " ", " "],
[" ", "X", " ", " ", " ", " ", "X", " ", " ", " "],
["X", " ", " ", " ", " ", " ", "X", " ", " ", "X"],
[" ", " ", " ", " ", " ", " ", " ", " ", " ", " "],
[" ", "X", " ", " ", "X", " ", "X", " ", " ", "X"],
[" ", " ", "X", "X", " ", " ", "X", " ", " ", " "],
[" ", " ", " ", " ", " ", " ", " ", " ", "X", " "],
[" ", " ", " ", " ", " ", " ", " ", " ", "X", "G"]
]
|> Enum.with_index()
|> Enum.map(fn {row, row_index} ->
row
|> Enum.with_index()
|> Enum.map(fn {column, column_index} ->
MazeLocation.new(column, row_index, column_index)
end)
end)
end
defp random_cell(row, column, value) do
if :random.uniform() < value do
MazeLocation.new(cell("BLOCKED"), row, column)
else
MazeLocation.new(cell("EMPTY"), row, column)
end
end
@spec successors(t, MazeLocation.t()) :: list(MazeLocation.t())
def successors(maze, location) do
row = location.row
column = location.column
total_rows = maze.total_rows
total_columns = maze.total_columns
south = fn ->
neighbor_cell = get_cell(maze, row + 1, column)
if row + 1 < total_rows and neighbor_cell.value != cell("BLOCKED") do
neighbor_cell
end
end
north = fn ->
neighbor_cell = get_cell(maze, row - 1, column)
if row - 1 >= 0 and neighbor_cell.value != cell("BLOCKED") do
neighbor_cell
end
end
east = fn ->
neighbor_cell = get_cell(maze, row, column + 1)
if column + 1 < total_columns and neighbor_cell.value != cell("BLOCKED") do
neighbor_cell
end
end
west = fn ->
neighbor_cell = get_cell(maze, row, column - 1)
if column - 1 >= 0 and neighbor_cell.value != cell("BLOCKED") do
neighbor_cell
end
end
Enum.filter([west.(), east.(), north.(), south.()], &(&1 != nil))
end
@spec manhattan_distance(MazeLocation.t()) :: (MazeLocation.t() -> non_neg_integer)
def manhattan_distance(goal) do
fn m1 ->
x_distance = abs(m1.column - goal.column)
y_distance = abs(m1.row - goal.row)
x_distance + y_distance
end
end
@spec pretty_print(list(list(maze_state))) :: :ok
def pretty_print(maze_state) do
Enum.each(maze_state, fn row ->
Enum.each(row, &IO.write(" #{&1.value} "))
IO.puts("")
end)
end
@spec mark(t, list(Node.t()), MazeLocation.t(), MazeLocation.t()) :: list(list(maze_state))
def mark(maze, path, start, goal) do
Enum.reduce(path, maze.state, fn n, acc ->
List.update_at(
acc,
n.row,
&List.update_at(&1, n.column, fn location -> %{location | value: cell("PATH")} end)
)
end)
|> List.update_at(
start.row,
&List.update_at(&1, start.column, fn location -> %{location | value: cell("START")} end)
)
|> List.update_at(
goal.row,
&List.update_at(&1, goal.column, fn location -> %{location | value: cell("GOAL")} end)
)
end
end
|
lib/ccsp/chapter2/maze.ex
| 0.61057
| 0.632077
|
maze.ex
|
starcoder
|
defmodule Line do
defstruct start: Nil,
end: Nil
@type point :: {integer(), integer()}
@type t :: %__MODULE__{
start: point,
end: point,
}
@spec from_string(String.t())::Line.t()
def from_string(string) do
[s,e] = String.split(string, " -> ", trim: true)
[ss, se] = s |> String.split(",", trim: true) |> Enum.map(&String.to_integer/1)
[es, ee] = e |> String.split(",", trim: true) |> Enum.map(&String.to_integer/1)
%Line{
start: {ss,se},
end: {es,ee}
}
end
@spec diagonal_points(integer(),integer(),integer(),integer())::list(point())
def diagonal_points(a,b,c,d)
# make sure that a -> c is always adding one.
def diagonal_points(a,b,c,d) when a > c do
diagonal_points(c,d,a,b)
end
#a < c && b < d -> +{1,1}
def diagonal_points(a,b,c,d) when c-a == d-b do
Enum.zip(a..c, b..d)
end
#a <c && b > d -> {+1, -1}
def diagonal_points(a,b,c,d) do
Enum.zip(a..c, b..d//-1)
end
@spec get_points(Line.t(), boolean())::list(point())
def get_points(line, do_diagonal \\ false)
def get_points(%{start: {a, b}, end: {a, d}}, _do_diagonal) do
min(b,d)..max(b,d) |> Enum.map(fn x -> {a, x} end)
end
def get_points(%{start: {a, b}, end: {c, b}}, _do_diagonal) do
min(a,c)..max(a,c) |> Enum.map(fn x -> {x, b} end)
end
def get_points(%{start: {a, b}, end: {c, d}}, true) do
diagonal_points(a,b,c,d)
end
def get_points(_line, false) do
[]
end
end
defmodule Vents do
defstruct lines: Nil
@type point :: {integer(), integer()}
@type t :: %__MODULE__{
lines: list(Line.t()),
}
@spec from_strings(list(String.t()))::Vents.t()
def from_strings(strings) do
lines = strings |> Enum.map(&Line.from_string/1)
%Vents{lines: lines}
end
@spec get_overlap(Vents.t(), boolean())::list(point)
def get_overlap(vents, do_diagonal \\ false)
def get_overlap(vents, do_diagonal) do
all_points = vents.lines |> Enum.map(&(Line.get_points(&1, do_diagonal))) |> List.flatten()
overlap_points = all_points -- Enum.uniq(all_points)
Enum.uniq(overlap_points)
end
end
|
lib/vents.ex
| 0.870005
| 0.500305
|
vents.ex
|
starcoder
|
defmodule Telnyx.MessagingProfiles do
@moduledoc """
Context for managing messaging profiles
"""
alias Telnyx.Client
@doc """
Lists all messaging profiles.
```
api_key = "YOUR_API_KEY"
Telnyx.MessagingProfiles.list(api_key, page: %{size: 10})
```
Example response:
```
{:ok,
[
%{
"created_at" => "2019-01-23T18:10:02.574Z",
"enabled" => true,
"id" => "3<PASSWORD>-<PASSWORD>-2c9<PASSWORD>6",
"name" => "Profile for Messages",
"organization_id" => "3fa85f64-5717-4562-b3fc-2c963f66afa6",
"record_type" => "messaging_profile",
"updated_at" => "2019-01-23T18:10:02.574Z",
"webhook_api_version" => "2",
"webhook_failover_url" => nil,
"webhook_url" => nil,
"whitelisted_destinations" => ["US"]
}
]
}
```
"""
@spec list(String.t(), Keyword.t()) :: {:ok, [map]} | {:error, %Telnyx.Error{}}
def list(api_key, opts \\ []) do
query =
if opts != [] do
Enum.map(opts, fn {type, map} ->
Enum.map(map, fn {key, value} ->
build_query_string(type, key, value)
end)
end)
|> List.flatten()
|> Enum.join("&")
end
Client.get(api_key, "/messaging_profiles?#{query}")
end
@doc """
Creates a messaging profile.
## Examples
```
api_key = "YOUR_API_KEY"
%{name: "<NAME>"}
|> Telnyx.MessagingProfiles.create(api_key)
```
Example response:
```
{:ok,
%{
"created_at" => "2019-01-23T18:10:02.574Z",
"enabled" => true,
"id" => "3fa85f64-5717-4562-b3fc-2c963f66afa6",
"name" => "<NAME>",
"organization_id" => "3fa85f64-5717-4562-b3fc-2c963f66afa6",
"record_type" => "messaging_profile",
"updated_at" => "2019-01-23T18:10:02.574Z",
"webhook_api_version" => "2",
"webhook_failover_url" => nil,
"webhook_url" => nil,
"whitelisted_destinations" => ["US"]
}
}
```
See https://developers.telnyx.com/docs/api/v2/messaging/Messaging-Profiles?#createMessagingProfile
"""
@spec create(map, String.t()) :: {:ok, map} | {:error, %Telnyx.Error{}}
def create(params = %{}, api_key) do
Client.post(api_key, params, "/messaging_profiles")
end
@doc """
Deletes a messaging profile.
## Examples
```
api_key = "YOUR_API_KEY"
{:ok, messaging_profile} = Telnyx.MessagingProfiles.retrieve("uuid", api_key)
Telnyx.MessagingProfiles.delete(messaging_profile["id"], api_key)
```
Example response:
```
{:ok,
%{
"created_at" => "2019-01-23T18:10:02.574Z",
"enabled" => true,
"id" => "uuid",
"name" => "<NAME> Messages",
"organization_id" => "uuid",
"record_type" => "messaging_profile",
"updated_at" => "2019-01-23T18:10:02.574Z",
"webhook_api_version" => "2",
"webhook_failover_url" => nil,
"webhook_url" => nil,
"whitelisted_destinations" => ["US"]
}
}
```
See https://developers.telnyx.com/docs/api/v2/messaging/Messaging-Profiles#deleteMessagingProfile
"""
@spec delete(String.t(), String.t()) :: {:ok, map} | {:error, %Telnyx.Error{}}
def delete(uuid, api_key) do
Client.delete(api_key, "/messaging_profiles/#{uuid}")
end
@doc """
Retrieves a messaging profile.
## Examples
```
api_key = "YOUR_API_KEY"
Telnyx.MessagingProfiles.retrieve("uuid", api_key)
```
Example response:
```
{:ok,
%{
"created_at" => "2019-01-23T18:10:02.574Z",
"enabled" => true,
"id" => "uuid",
"name" => "Profile for Messages",
"organization_id" => "uuid",
"record_type" => "messaging_profile",
"updated_at" => "2019-01-23T18:10:02.574Z",
"webhook_api_version" => "2",
"webhook_failover_url" => nil,
"webhook_url" => nil,
"whitelisted_destinations" => ["US"]
}
}
```
See https://developers.telnyx.com/docs/api/v2/messaging/Messaging-Profiles#retrieveMessagingProfile
"""
@spec retrieve(String.t(), String.t()) :: {:ok, map} | {:error, %Telnyx.Error{}}
def retrieve(uuid, api_key) do
Client.get(api_key, "/messaging_profiles/#{uuid}")
end
@doc """
Updates a messaging profile.
## Examples
```
api_key = "YOUR_API_KEY"
%{name: "<NAME>"}
|> Telnyx.MessagingProfiles.update("uuid", api_key)
```
Example response:
```
{:ok,
%{
"created_at" => "2019-01-23T18:10:02.574Z",
"enabled" => true,
"id" => "3fa85f64-5717-4562-b3fc-2c963f66afa6",
"name" => "<NAME>",
"organization_id" => "3fa85f64-5717-4562-b3fc-2c963f66afa6",
"record_type" => "messaging_profile",
"updated_at" => "2019-01-23T18:10:02.574Z",
"webhook_api_version" => "2",
"webhook_failover_url" => nil,
"webhook_url" => nil,
"whitelisted_destinations" => ["US"]
}
}
```
See https://developers.telnyx.com/docs/api/v2/messaging/Messaging-Profiles#updateMessagingProfile
"""
@spec update(map, String.t(), String.t()) :: {:ok, map} | {:error, %Telnyx.Error{}}
def update(params = %{}, uuid, api_key) do
Client.patch(api_key, params, "/messaging_profiles/#{uuid}")
end
@doc """
Lists phone numbers associated with messaging profile.
## Examples
```
api_key = "YOUR_API_KEY"
Telnyx.MessagingProfiles.list_messaging_profile_phone_numbers("uuid", api_key, page: %{size: 10})
```
Example response:
```
{:ok,
[
%{
"created_at" => "2019-01-23T18:10:02.574Z",
"id" => "+18665550001",
"messaging_profile_id" => "3fa85f64-5717-4562-b3fc-2c963f66afa6",
"organization_id" => "3fa85f64-5717-4562-b3fc-2c963f66afa6",
"phone_number" => "+18005550001",
"record_type" => "messaging_phone_number",
"updated_at" => "2019-01-23T18:10:02.574Z"
}
]
}
```
See https://developers.telnyx.com/docs/api/v2/messaging/Messaging-Profiles#listMessagingProfilePhoneNumbers
"""
@spec list_messaging_profile_phone_numbers(String.t(), String.t(), Keyword.t()) ::
{:ok, [map]} | {:error, %Telnyx.Error{}}
def list_messaging_profile_phone_numbers(uuid, api_key, opts \\ []) do
query =
if opts != [] do
Enum.map(opts, fn {type, map} ->
Enum.map(map, fn {key, value} ->
build_query_string(type, key, value)
end)
end)
|> List.flatten()
|> Enum.join("&")
end
Client.get(api_key, "/messaging_profiles/#{uuid}/phone_numbers?#{query}")
end
@doc """
Lists short codes associated with messaging profile.
## Examples
```
api_key = "YOUR_API_KEY"
Telnyx.MessagingProfiles.list_messaging_profile_short_codes("uuid", api_key, page: %{size: 10})
```
Example response:
```
{:ok,
[
%{
"country_code" => "US",
"created_at" => "2019-01-23T18:10:02.574Z",
"id" => "3fa85f64-5717-4562-b3fc-2c963f66afa6",
"messaging_profile_id" => "3fa85f64-5717-4562-b3fc-2c963f66afa6",
"organization_id" => "3fa85f64-5717-4562-b3fc-2c963f66afa6",
"record_type" => "short_code",
"short_code" => "12345",
"updated_at" => "2019-01-23T18:10:02.574Z"
}
]
}
```
See https://developers.telnyx.com/docs/api/v2/messaging/Messaging-Profiles#listMessagingProfileShortCodes
"""
@spec list_messaging_profile_short_codes(String.t(), String.t(), Keyword.t()) ::
{:ok, [map]} | {:error, %Telnyx.Error{}}
def list_messaging_profile_short_codes(uuid, api_key, opts \\ []) do
query =
if opts != [] do
Enum.map(opts, fn {type, map} ->
Enum.map(map, fn {key, value} ->
build_query_string(type, key, value)
end)
end)
|> List.flatten()
|> Enum.join("&")
end
Client.get(api_key, "/messaging_profiles/#{uuid}/short_codes?#{query}")
end
defp build_query_string(type, key, value), do: "#{type}[#{key}]=#{value}"
end
|
lib/telnyx/messaging_profiles.ex
| 0.731634
| 0.57678
|
messaging_profiles.ex
|
starcoder
|
defmodule Elixium.Block do
alias Elixium.Block
alias Elixium.Utilities
alias Elixium.Transaction
alias Elixium.Store.Ledger
alias Decimal, as: D
@moduledoc """
Provides functions for creating blocks and mining new ones
"""
defstruct index: nil,
hash: nil,
version: 1,
previous_hash: nil,
difficulty: nil,
nonce: 0,
timestamp: nil,
merkle_root: nil,
transactions: []
@doc """
When the first node on the Elixium network spins up, there won't be any
blocks in the chain. In order to create a base from which all nodes can agree,
we create a block called a genesis block. This block has the data structure
that a block would have, but has hard-coded values. This block never needs
to be verified by nodes, as it doesn't contain any actual data. The block
mined after the genesis block must reference the hash of the genesis block
as its previous_hash to be valid
"""
@spec initialize :: Block
def initialize do
block = %Block{
index: 0,
difficulty: 3_000_000,
timestamp: DateTime.utc_now() |> DateTime.to_unix(),
transactions: [
%{
inputs: [],
outputs: [
%{
txoid: "79644A8F062F1BA9F7A32AF2242C04711A634D42F0628ADA6B985B3D21296EEA:0",
data: "GENESIS BLOCK",
addr: nil,
amount: nil
}
]
}
]
}
%{block | hash: calculate_block_hash(block)}
end
@doc """
Takes the previous block as an argument (This is the way we create every
block except the genesis block)
"""
@spec initialize(Block) :: Block
def initialize(%{index: index, hash: previous_hash}) do
block = %Block{
index: index + 1,
version: 1,
previous_hash: previous_hash,
difficulty: 4.0,
timestamp: DateTime.utc_now() |> DateTime.to_unix
}
difficulty = calculate_difficulty(block)
Map.put(block, :difficulty, difficulty)
end
@spec calculate_block_hash(Block) :: String.t()
def calculate_block_hash(block) do
%{
index: index,
version: version,
previous_hash: previous_hash,
timestamp: timestamp,
nonce: nonce,
merkle_root: merkle_root
} = block
Utilities.sha3_base16([
Integer.to_string(index),
Integer.to_string(version),
previous_hash,
timestamp,
Integer.to_string(nonce),
merkle_root
])
end
@doc """
The process of mining consists of hashing the index of the block, the hash
of the previous block (thus linking the current and previous block), the
timestamp at which the block was generated, the merkle root of the transactions
within the block, and a random nonce. We then check to see whether the number
represented by the hash is lower than the mining difficulty. If the value of
the hash is lower, it is a valid block, and we can broadcast the block to
other nodes on the network.
"""
@spec mine(Block) :: Block
def mine(block) do
%{nonce: nonce } = block
block = Map.put(block, :hash, calculate_block_hash(block))
if hash_beat_target?(block) do
block
else
mine(%{block | nonce: nonce + 1})
end
end
@doc """
Retrieves a block header from a given block
"""
@spec header(Block) :: map
def header(block) do
%{
hash: block.hash,
index: block.index,
version: block.version,
previous_hash: block.previous_hash,
merkle_root: block.merkle_root,
nonce: block.nonce,
timestamp: block.timestamp
}
end
@doc """
Because the hash is a Base16 string, and not an integer, we must first
convert the hash to an integer, and afterwards compare it to the target
"""
@spec hash_beat_target?(Block) :: boolean
def hash_beat_target?(%{hash: hash, difficulty: difficulty}) do
{integer_value_of_hash, _} = Integer.parse(hash, 16)
integer_value_of_hash < calculate_target(difficulty)
end
@doc """
The target is a number based off of the block difficulty. The higher the block
difficulty, the lower the target. When a block is being mined, the goal is
to find a hash that is lower in numerical value than the target. The maximum
target (when the difficulty is 0) is
115792089237316195423570985008687907853269984665640564039457584007913129639935,
which means any hash is valid.
"""
@spec calculate_target(float) :: number
def calculate_target(difficulty), do: round((:math.pow(16, 64) / difficulty)) - 1
@doc """
Calculates the block reward for a given block index, following our weighted
smooth emission algorithm.
Where x is total token supply, t is block at full emission, i is block index,
and s is the sigma of the total_token_supply, the Smooth emission algorithm
is as follows: (x * max{0, t - i}) / s
"""
@spec calculate_block_reward(number) :: Decimal
def calculate_block_reward(block_index) do
sigma_full_emission = Application.get_env(:elixium_core, :sigma_full_emission)
total_token_supply = Application.get_env(:elixium_core, :total_token_supply)
block_at_full_emission = Application.get_env(:elixium_core, :block_at_full_emission)
D.div(
D.mult(
D.new(total_token_supply),
D.new(max(0, block_at_full_emission - block_index))
),
D.new(sigma_full_emission)
)
end
@spec total_block_fees(list) :: Decimal
def total_block_fees(transactions) do
Enum.reduce(transactions, D.new(0), fn tx, acc -> D.add(acc, Transaction.calculate_fee(tx)) end)
end
@doc """
Return a list of keys that differ between two given block headers.
"""
@spec diff_header(Block, Block) :: list
def diff_header(block1, block2) do
block1
|> header()
|> Map.keys()
|> Enum.filter(&(Map.get(block1, &1) != Map.get(block2, &1)))
end
@doc """
Calculates the difficulty for a block using the WWHM difficulty algorithm
described at https://getmasari.org/research-papers/wwhm.pdf
"""
@spec calculate_difficulty(Block) :: number
def calculate_difficulty(%{index: index}) when index < 11, do: 3_000_000
def calculate_difficulty(block) do
retargeting_window = Application.get_env(:elixium_core, :retargeting_window)
target_solvetime = Application.get_env(:elixium_core, :target_solvetime)
# If we don't have enough blocks to fill our retargeting window, the
# algorithm won't run properly (difficulty will be set too high). Let's scale
# the algo down until then.
retargeting_window = min(block.index, retargeting_window)
{weighted_solvetimes, summed_difficulties} =
retargeting_window
|> Ledger.last_n_blocks()
|> weight_solvetimes_and_sum_difficulties()
min_timespan = (target_solvetime * retargeting_window) / 2
weighted_solvetimes = if weighted_solvetimes < min_timespan, do: min_timespan, else: weighted_solvetimes
target = (retargeting_window + 1) / 2 * target_solvetime
summed_difficulties * target / weighted_solvetimes
end
def weight_solvetimes_and_sum_difficulties(blocks) do
target_solvetime = Application.get_env(:elixium_core, :target_solvetime)
max_solvetime = target_solvetime * 10
{_, weighted_solvetimes, summed_difficulties, _} =
blocks
|> Enum.scan({nil, 0, 0, 0}, fn block, {last_block_timestamp, weighted_solvetimes, sum_difficulties, i} ->
if i == 0 do
{block.timestamp, 0, 0, 1}
else
solvetime = block.timestamp - last_block_timestamp
solvetime = if solvetime > max_solvetime, do: max_solvetime, else: solvetime
solvetime = if solvetime == 0, do: 1, else: solvetime
{block.timestamp, weighted_solvetimes + (solvetime * i), sum_difficulties + block.difficulty, i + 1}
end
end)
|> List.last()
{weighted_solvetimes, summed_difficulties}
end
end
|
lib/block.ex
| 0.818809
| 0.58673
|
block.ex
|
starcoder
|
defmodule Hitbtc.Http.Public do
alias Hitbtc.Util.Api
@moduledoc """
Public API's for HitBtc API v2
"""
@doc """
Get list of avialable Symbols (Currency Pairs). You can read more info at http://www.investopedia.com/terms/c/currencypair.asp
## Example:
```elixir
iex(1)> Hitbtc.Public.symbol_list
{:ok,
[%{baseCurrency: "BCN", feeCurrency: "BTC", id: "BCNBTC",
provideLiquidityRate: "-0.0001", quantityIncrement: "100",
quoteCurrency: "BTC", takeLiquidityRate: "0.001", tickSize: "0.0000000001"},
%{baseCurrency: "BTC", feeCurrency: "EUR", id: "BTCEUR",
provideLiquidityRate: "-0.0001", quantityIncrement: "0.01",
quoteCurrency: "EUR", takeLiquidityRate: "0.001", tickSize: "0.01"},
%{baseCurrency: "DCT", feeCurrency: "BTC", ...}, %{baseCurrency: "ANT", ...},
%{...}, ...]}
```
"""
@spec symbol_list() :: {:ok, [map]} | {:error, term}
def symbol_list, do: Api.get_body("/public/symbol")
@doc """
Get symbol info
## Example
```elixir
iex(1)> Hitbtc.Public.symbol("ETHBTC")
{:ok,
%{baseCurrency: "ETH", feeCurrency: "BTC", id: "ETHBTC",
provideLiquidityRate: "-0.0001", quantityIncrement: "0.001",
quoteCurrency: "BTC", takeLiquidityRate: "0.001", tickSize: "0.000001"}}
```
Or with wrong symbol
```elixir
iex(1)> Hitbtc.Public.symbol("ETHBT")
{:error,
%{code: 2001,
description: "Try get /api/2/public/symbol, to get list of all available symbols.",
message: "Symbol not found"}}
```
"""
@spec symbol(String.t) :: {:ok, map} | {:error, term}
def symbol(symbol), do: Api.get_body("/public/symbol/#{symbol}")
@doc """
Loads list of available currencies
## Example
```elixir
iex(1)> Hitbtc.Public.currency
{:ok,
[%{crypto: true, fullName: "<NAME>", id: "1ST", payinConfirmations: 2,
payinEnabled: true, payinPaymentId: false, payoutEnabled: true,
payoutIsPaymentId: true, transferEnabled: true},
%{crypto: false, fullName: "The 8 Circuit Studios Token", id: "8BT",
payinConfirmations: 2, payinEnabled: false, payinPaymentId: false,
payoutEnabled: false, payoutIsPaymentId: false, transferEnabled: true},
%{crypto: true, fullName: "DigixDAO", ...}, %{crypto: true, ...}, %{...},
...]}
```
"""
@spec currency() :: {:ok, [map]} | {:error, term}
def currency, do: Api.get_body("/public/currency")
@doc """
Get currency info
## Example
```elixir
iex(1)> Hitbtc.Public.currency("ADX")
{:ok,
%{crypto: true, fullName: "AdEx", id: "ADX", payinConfirmations: 2,
payinEnabled: true, payinPaymentId: false, payoutEnabled: true,
payoutIsPaymentId: false, transferEnabled: true}}
```
Or with wrong currency it will return error:
```elixir
iex(1)> Hitbtc.Public.currency("AD")
{:error, %{code: 2002, description: "", message: "Currency not found"}}
```
"""
@spec currency(String.t) :: {:ok, map} | {:error, term}
def currency(currency), do: Api.get_body("/public/currency/#{currency}")
@doc """
The Ticker endpoint returns last 24H information about of all symbol.
## Example
```elixir
iex(1)> Hitbtc.Public.ticker
{:ok,
[%{ask: "0.0000002557", bid: "0.0000002546", high: "0.0000002738",
last: "0.0000002551", low: "0.0000002510", open: "0.0000002663",
symbol: "BCNBTC", timestamp: "2017-10-20T12:43:10.541Z",
volume: "1811898700", volumeQuote: "468.04424347"},
%{ask: "4499.99", bid: "2005.15", high: "4500.00", last: "4499.98",
low: "2005.00", open: "3866.00", symbol: "BTCEUR",
timestamp: "2017-10-20T12:43:00.080Z", volume: "0.31",
volumeQuote: "1041.3065"},
%{ask: "0.000167", bid: "0.000160", high: "0.000168", ...},
%{ask: "0.000084", bid: "0.000081", ...}, %{ask: "0.000320", ...}, %{...},
...]}
```
"""
@spec ticker() :: {:ok, [map]} | {:error, term}
def ticker, do: Api.get_body("/public/ticker")
@doc """
The Ticker endpoint returns last 24H information about symbol.
## Example
```elixir
iex(1)> Hitbtc.Public.ticker("BCNBTC")
{:ok,
%{ask: "0.0000002547", bid: "0.0000002538", high: "0.0000002738",
last: "0.0000002538", low: "0.0000002510", open: "0.0000002607",
symbol: "BCNBTC", timestamp: "2017-10-20T12:50:36.967Z",
volume: "1806289600", volumeQuote: "466.54254156"}}
```
Or will return error for non existing symbol
```elixir
iex(1)> Hitbtc.Public.ticker("BCNBT")
{:error,
%{code: 2001,
description: "Try get /api/2/public/symbol, to get list of all available symbols.",
message: "Symbol not found"}}
```
"""
@spec ticker(String.t) :: {:ok, map} | {:error, term}
def ticker(symbol), do: Api.get_body("/public/ticker/#{symbol}")
@doc """
Load list of trade orders for symbol (currency pair)
List of available params:
- `sort` - Sort direction: `ASC`, `DESC`
- `by` - Filter field. Values: `timestamp`, `id`
- `from` - If filter by timestamp, then datetime in iso format or timestamp in millisecond otherwise trade id
- `till` - If filter by timestamp, then datetime in iso format or timestamp in millisecond otherwise trade id
- `limit` - Limit
- `offset` - Offset
## Example
```elixir
iex(1)> Hitbtc.Public.trades("ETHBTC")
{:ok,
[
%{id: 55085930, price: "0.054067", quantity: "1.348", side: "sell",
timestamp: "2017-10-20T13:36:58.326Z"},
%{id: 55085900, price: "0.054065", quantity: "12.643", side: "buy",
timestamp: "2017-10-20T13:36:54.127Z"},
%{id: 55083603, price: "0.054084", ...}, %{id: 55083594, ...}, %{...}, ...
]
}
```
## With params
```elixir
iex(5)> Hitbtc.Public.trades("ETHBTC", [by: "timestamp", limit: 2, offset: 2])
{:ok,
[%{id: 55110010, price: "0.053644", quantity: "0.072", side: "buy",
timestamp: "2017-10-20T14:26:43.944Z"},
%{id: 55110008, price: "0.053644", quantity: "0.261", side: "buy",
timestamp: "2017-10-20T14:26:43.926Z"}]}
```
Or error if non existing symbol
```elixir
iex(1)> Hitbtc.Public.trades("ETHBT")
{:error,
%{code: 2001,
description: "Try get /api/2/public/symbol, to get list of all available symbols.",
message: "Symbol not found"}}
```
"""
@spec trades(String.t, [tuple]) :: {:ok, [map]} | {:error, term}
def trades(symbol, params \\ []), do: Api.get_body("/public/trades/#{symbol}", params)
@doc """
Load order book
## Example
```elixir
iex(1)> Hitbtc.Public.order_book("ETHBTC")
{:ok,
%{ask: [%{price: "0.053810", size: "4.288"},
%{price: "0.053827", size: "35.000"}, %{price: "0.053828", size: "5.233"},
%{price: "0.053834", size: "5.120"}, %{price: "0.053889", size: "67.811"},
%{price: "0.053891", size: "6.000"}, %{price: "0.053900", size: "0.388"},
%{price: "0.053971", size: "0.215"}, %{price: "0.053972", size: "0.100"},
%{price: "0.053446", size: "0.001"}, %{price: "0.053398", size: "42.000"},
%{price: "0.053395", size: "52.000"}, %{price: "0.053394", ...}, %{...},
...]}}
```
Load list of orders with limit
```elixir
iex(1)> Hitbtc.Public.order_book("ETHBTC", 2)
{:ok,
%{ask: [%{price: "0.053848", size: "7.038"},
%{price: "0.053849", size: "0.975"}],
bid: [%{price: "0.053805", size: "0.388"},
%{price: "0.053800", size: "0.597"}]}}
```
"""
@spec order_book(String.t, integer) :: {:ok, [map]} | {:error, term}
def order_book(symbol, limit \\ 100), do: Api.get_body("/public/orderbook/#{symbol}", [limit: limit])
@doc """
Load candles for symbol
Available params:
- `limit` - Limit. Example: 100
- `period` - `Period to load. Values: `M1`, `M3`, `M5`, `M15`, `M30`, `H1`, `H4`, `D1`, `D7`, `1M`
## Example
```elixir
iex(1)> Hitbtc.Public.candles("ETHBTC", [limit: 2])
{:ok,
[%{close: "0.053640", max: "0.054078", min: "0.053640", open: "0.054006",
timestamp: "2017-10-20T14:00:00.000Z", volume: "509.362",
volumeQuote: "27.450554472"},
%{close: "0.053343", max: "0.053598", min: "0.053157", open: "0.053598",
timestamp: "2017-10-20T14:30:00.000Z", volume: "219.951",
volumeQuote: "11.721044320"}]}
```
"""
@spec candles(String.t, [tuple]) :: {:ok, [map]} | {:error, term}
def candles(symbol, params \\ []), do: Api.get_body("/public/candles/#{symbol}", params)
end
|
lib/hitbtc/http/public.ex
| 0.924262
| 0.735683
|
public.ex
|
starcoder
|
defmodule Day11 do
def part1(input) do
grid = parse_input(input)
grid
|> update_grid(&update_cell_part_1/1)
|> Enum.count(fn {_, value} -> value == :occupied end)
end
def part2(input) do
grid = parse_input(input)
grid
|> update_grid(&update_cell_part_2/1)
|> Enum.count(fn {_, cell} -> cell == :occupied end)
end
def update_grid(grid, update_function) do
update_cell = update_function.(grid)
new_grid =
grid
|> Stream.map(update_cell)
|> Map.new()
unless new_grid == grid do
update_grid(new_grid, update_function)
else
new_grid
end
end
def update_cell_part_1(grid) do
fn {coord, cell} ->
{row, column} = coord
adjacent_coords =
for x <- [-1, 0, 1], y <- [-1, 0, 1], !(x == 0 and y == 0) do
{row + y, column + x}
end
occupied_seats =
adjacent_coords
|> Stream.map(&grid[&1])
|> Enum.count(&(&1 == :occupied))
cond do
cell == :empty and occupied_seats == 0 ->
{coord, :occupied}
cell == :occupied and occupied_seats >= 4 ->
{coord, :empty}
true ->
{coord, cell}
end
end
end
def update_cell_part_2(grid) do
fn {coord, cell} ->
first_possible_seats =
for x <- [-1, 0, 1], y <- [-1, 0, 1], !(x == 0 and y == 0) do
coord
|> Stream.iterate(fn {row, column} -> {row + y, column + x} end)
|> Stream.drop(1)
|> Stream.map(&grid[&1])
|> Enum.find(&(&1 != :floor))
end
occupied_seats = Enum.count(first_possible_seats, &(&1 == :occupied))
cond do
cell == :empty and occupied_seats == 0 ->
{coord, :occupied}
cell == :occupied and occupied_seats >= 5 ->
{coord, :empty}
true ->
{coord, cell}
end
end
end
def parse_input(input) do
input_stream = String.splitter(input, "", trim: true)
row_length = Enum.find_index(input_stream, &(&1 == "\n"))
input_stream
|> Stream.reject(&(&1 == "\n"))
|> Stream.map(fn
"L" -> :empty
"." -> :floor
"#" -> :occupied
end)
|> Stream.with_index()
|> Map.new(fn {row, index} -> {{div(index, row_length), rem(index, row_length)}, row} end)
end
def print_map(map, row_length) do
rows =
for row <- 0..row_length do
row =
for column <- 0..row_length do
case map[{row, column}] do
:occupied -> "#"
:empty -> "L"
:floor -> "."
end
end
Enum.join(row)
end
rows
|> Enum.join("\n")
|> IO.puts()
end
def time_part_2 do
input = File.read!("../input.txt")
total_time =
1..10
|> Stream.map(fn _ ->
start_time = Time.utc_now()
part2(input)
end_time = Time.utc_now()
Time.diff(end_time, start_time, :millisecond)
|> IO.inspect(label: "time")
end)
|> Enum.sum()
total_time / 10
end
end
|
2020/day11/ex/day11.ex
| 0.528777
| 0.678889
|
day11.ex
|
starcoder
|
defmodule Microdata.Helpers do
@moduledoc """
`Microdata.Helpers` is a module for generic parsing helpers (ie those not coupled to the parsing mechanism).
For example, certain tags require their values to be absolute URLs.
"""
@doc """
Validates that URLs include scheme & host
## Examples
```
iex> Microdata.Helpers.validate_url("foo")
{:error, "No scheme"}
iex> Microdata.Helpers.validate_url("http://")
{:error, "No host"}
iex> Microdata.Helpers.validate_url("http://foo.com")
{:ok, "http://foo.com"}
```
"""
@spec validate_url(String.t()) :: {:error, String.t()} | {:ok, String.t()}
def validate_url(url) do
case URI.parse(url) do
%URI{scheme: nil} -> {:error, "No scheme"}
%URI{host: nil} -> {:error, "No host"}
_ -> {:ok, url}
end
end
@doc """
Helper function to determine if a passed URL is absolute. Returns true/false bools.
## Examples
```
iex> Microdata.Helpers.absolute_url?(nil)
false
iex> Microdata.Helpers.absolute_url?("path/to/page")
false
iex> Microdata.Helpers.absolute_url?("/path/to/page")
false
iex> Microdata.Helpers.absolute_url?("https://google.com")
true
```
"""
@spec absolute_url?(String.t()) :: boolean
def absolute_url?(nil), do: false
def absolute_url?(url) do
case Microdata.Helpers.validate_url(url) do
{:ok, _} -> true
_ -> false
end
end
# Microdata.Item helpers
@doc """
Parse item types from a space-separated string.
## Examples
```
iex> Microdata.Helpers.parse_item_types(nil)
[]
iex> Microdata.Helpers.parse_item_types("foo")
["foo"]
iex> Microdata.Helpers.parse_item_types("foo bar")
["foo", "bar"]
iex> Microdata.Helpers.parse_item_types("\\n\\nfoo bar baz \\n ")
["foo", "bar", "baz"]
```
"""
@spec parse_item_types(String.t()) :: [String.t()]
def parse_item_types(nil), do: []
def parse_item_types(string), do: string |> String.trim() |> String.split(" ")
@doc """
Parse item id from a provided string.
## Examples
```
iex> Microdata.Helpers.parse_item_id(nil)
nil
iex> Microdata.Helpers.parse_item_id("\\r foo \\n")
URI.parse("foo")
iex> Microdata.Helpers.parse_item_id("https://google.com")
URI.parse("https://google.com")
```
"""
@spec parse_item_id(String.t()) :: URI.t()
def parse_item_id(nil), do: nil
def parse_item_id(string), do: string |> String.trim() |> URI.parse()
# Microdata.Property helpers
@doc """
Parse property names from a provided string.
## Examples
```
iex> Microdata.Helpers.parse_property_names(nil)
nil
iex> Microdata.Helpers.parse_property_names("bar", %Microdata.Item{types: MapSet.new(["foo"])})
["foo/bar"]
iex> Microdata.Helpers.parse_property_names("\\rbar baz bar \\n", %Microdata.Item{types: MapSet.new(["foo"])})
["foo/bar", "foo/baz"]
```
"""
@spec parse_property_names(String.t(), Microdata.Item.t()) :: [String.t()]
def parse_property_names(nil), do: nil
def parse_property_names(string, item) do
string
|> String.trim()
|> String.split(" ")
|> Enum.reduce([], fn name, names ->
parse_property_name(name, item, names)
end)
|> Enum.reverse()
end
defp parse_property_name(name, item, names) do
cond do
Enum.member?(names, name) ->
names
Microdata.Helpers.absolute_url?(name) ->
[name | names]
true ->
vocabulary = Microdata.Item.vocabulary(item)
if vocabulary != nil do
name = "#{vocabulary}#{name}"
if Enum.member?(names, name), do: names, else: [name | names]
else
[name | names]
end
end
end
end
|
lib/helpers.ex
| 0.889852
| 0.780453
|
helpers.ex
|
starcoder
|
defmodule APDS9960.Gesture do
@moduledoc "The gesture detection."
alias APDS9960.{Comm, Gesture, Sensor}
use TypedStruct
@type gesture_direction :: :down | :left | :right | :up
@typep dataset :: {byte, byte, byte, byte}
typedstruct do
@typedoc "The gesture data accumulator in the gesture processing loop."
field(:sensor, Sensor.t(), enforce: true)
field(:up_count, non_neg_integer, default: 0)
field(:down_count, non_neg_integer, default: 0)
field(:left_count, non_neg_integer, default: 0)
field(:right_count, non_neg_integer, default: 0)
field(:deduced_gesture_direction, gesture_direction)
field(:started_at_ms, integer)
field(:updated_at_ms, integer)
end
@spec read_gesture(Sensor.t(), Enum.t()) :: gesture_direction | {:error, any}
def read_gesture(%Sensor{} = sensor, opts \\ []) do
gesture = %Gesture{sensor: sensor, started_at_ms: System.monotonic_time(:millisecond)}
timeout = Access.get(opts, :timeout, 5000)
if valid?(sensor) do
do_read_gesture(gesture, timeout)
else
{:error, "gesture not available"}
end
end
@spec do_read_gesture(t(), non_neg_integer) :: gesture_direction | {:error, any}
defp do_read_gesture(%Gesture{sensor: sensor} = gesture, timeout) do
# Wait for new FIFO data.
Process.sleep(30)
# Read data from the Gesture FIFO.
datasets = gesture_fifo(sensor, fifo_level(sensor))
# Filter out useless datasets.
datasets =
Enum.filter(datasets, fn {up, down, left, right} ->
cond do
up == down and left == right -> false
(up - down) in -13..13 -> false
(left - right) in -13..13 -> false
true -> true
end
end)
if length(datasets) == 0 do
timeout_or_retry(gesture, timeout)
else
[{fifo_up, fifo_down, fifo_left, fifo_right} | _] = datasets
up_down_diff = fifo_up - fifo_down
left_right_diff = fifo_left - fifo_right
gesture = deduce_gesture_direction!(gesture, up_down_diff, left_right_diff)
if gesture.deduced_gesture_direction do
gesture.deduced_gesture_direction
else
timeout_or_retry(gesture, timeout)
end
end
end
@spec timeout_or_retry(t(), integer) :: gesture_direction | {:error, any}
defp timeout_or_retry(%Gesture{} = gesture, timeout) do
if System.monotonic_time(:millisecond) - gesture.started_at_ms > timeout do
{:error, "timeout #{timeout} ms"}
else
do_read_gesture(gesture, timeout)
end
end
@spec deduce_gesture_direction!(t(), integer, integer) :: t()
defp deduce_gesture_direction!(%Gesture{} = gesture, up_down_diff, left_right_diff) do
gesture =
cond do
up_down_diff < 0 ->
if gesture.down_count > 0 do
%{gesture | deduced_gesture_direction: :up}
else
%{gesture | up_count: gesture.up_count + 1}
end
up_down_diff > 0 ->
if gesture.up_count > 0 do
%{gesture | deduced_gesture_direction: :down}
else
%{gesture | down_count: gesture.down_count + 1}
end
true ->
gesture
end
gesture =
cond do
left_right_diff < 0 ->
if gesture.right_count > 0 do
%{gesture | deduced_gesture_direction: :left}
else
%{gesture | left_count: gesture.left_count + 1}
end
left_right_diff > 0 ->
if gesture.left_count > 0 do
%{gesture | deduced_gesture_direction: :right}
else
%{gesture | right_count: gesture.right_count + 1}
end
true ->
gesture
end
%{gesture | updated_at_ms: System.monotonic_time(:millisecond)}
end
@spec settings(Sensor.t()) :: %{
dimension: 0..3,
enabled: boolean,
exit_mask: byte,
exit_persistence: 0..3,
fifo_threshold: 0..3,
gain: 0..3,
interrupt_enabled: boolean,
led_boost: 0..3,
led_drive_strength: 0..3,
offset: %{down: integer, left: integer, right: integer, up: integer},
pulse: %{count: byte, length: 0..3},
threshold: %{enter: byte, exit: byte},
wait_time: 0..7
}
def settings(%Sensor{} = sensor) do
%{
enabled: enabled?(sensor),
interrupt_enabled: interrupt_enabled?(sensor),
threshold: get_threshold(sensor),
fifo_threshold: get_fifo_threshold(sensor),
exit_mask: get_gesture_exit_mask(sensor),
exit_persistence: get_exit_persistence(sensor),
gain: get_gain(sensor),
led_drive_strength: get_led_drive_strength(sensor),
wait_time: get_wait_time(sensor),
led_boost: get_led_boost(sensor),
offset: get_offset(sensor),
pulse: get_pulse(sensor),
dimension: get_dimension(sensor)
}
end
@spec status(Sensor.t()) :: %{
fifo_overflow: boolean,
saturation: boolean,
valid: boolean
}
def status(%Sensor{transport: i2c}) do
{:ok, s} = Comm.status(i2c)
{:ok, gs} = Comm.gesture_status(i2c)
%{
saturation: s.proximity_or_gesture_saturation == 1,
valid: gs.valid == 1,
fifo_overflow: gs.fifo_overflow == 1
}
end
## Gesture Enable
@spec enabled?(Sensor.t()) :: boolean
def enabled?(%Sensor{transport: i2c}) do
{:ok, %{gesture: value}} = Comm.get_enable(i2c)
value == 1
end
@spec enable(Sensor.t(), 0 | 1) :: :ok
def enable(%Sensor{transport: i2c}, value \\ 1) do
Comm.set_enable(i2c, gesture: value)
end
## Gesture Proximity Enter/Exit Threshold
@spec get_threshold(Sensor.t()) :: %{enter: byte, exit: byte}
def get_threshold(%Sensor{transport: i2c}) do
{:ok, x} = Comm.get_gesture_proximity_threshold(i2c)
%{enter: x.enter, exit: x.exit}
end
@spec set_threshold(Sensor.t(), Enum.t()) :: :ok
def set_threshold(%Sensor{transport: i2c}, opts) do
Comm.set_gesture_proximity_threshold(i2c, opts)
end
## Gesture FIFO Threshold
@spec get_fifo_threshold(Sensor.t()) :: 0..3
def get_fifo_threshold(%Sensor{transport: i2c}) do
{:ok, x} = Comm.get_gesture_conf1(i2c)
x.fifo_threshold
end
@spec set_fifo_threshold(Sensor.t(), 0..3) :: :ok
def set_fifo_threshold(%Sensor{transport: i2c}, value) do
Comm.set_gesture_conf1(i2c, fifo_threshold: value)
end
## Gesture Exit Mask
@spec get_gesture_exit_mask(APDS9960.Sensor.t()) :: 0x0000..0x1111
def get_gesture_exit_mask(%Sensor{transport: i2c}) do
{:ok, x} = Comm.get_gesture_conf1(i2c)
x.exit_mask
end
@spec set_gesture_exit_mask(Sensor.t(), Enum.t()) :: :ok
def set_gesture_exit_mask(%Sensor{transport: i2c}, opts) do
Comm.set_gesture_conf1(i2c, opts)
end
## Gesture Exit Persistence
@spec get_exit_persistence(Sensor.t()) :: 0..3
def get_exit_persistence(%Sensor{transport: i2c}) do
{:ok, x} = Comm.get_gesture_conf1(i2c)
x.exit_persistence
end
@spec set_exit_persistence(Sensor.t(), 0..3) :: :ok
def set_exit_persistence(%Sensor{transport: i2c}, value) do
Comm.set_gesture_conf1(i2c, exit_persistence: value)
end
## Gesture Gain Control
@spec get_gain(Sensor.t()) :: 0..3
def get_gain(%Sensor{transport: i2c}) do
{:ok, x} = Comm.get_gesture_conf2(i2c)
x.gain
end
@spec set_gain(Sensor.t(), 0..3) :: :ok
def set_gain(%Sensor{transport: i2c}, value) do
Comm.set_gesture_conf2(i2c, gain: value)
end
## Gesture LED Drive Strength
@spec get_led_drive_strength(Sensor.t()) :: 0..3
def get_led_drive_strength(%Sensor{transport: i2c}) do
{:ok, x} = Comm.get_gesture_conf2(i2c)
x.led_drive_strength
end
@spec set_led_drive_strength(Sensor.t(), 0..3) :: :ok
def set_led_drive_strength(%Sensor{transport: i2c}, value) do
Comm.set_gesture_conf2(i2c, led_drive_strength: value)
end
## Gesture Wait Time
@spec get_wait_time(Sensor.t()) :: 0..7
def get_wait_time(%Sensor{transport: i2c}) do
{:ok, x} = Comm.get_gesture_conf2(i2c)
x.wait_time
end
@spec set_wait_time(Sensor.t(), 0..7) :: :ok
def set_wait_time(%Sensor{transport: i2c}, value) do
Comm.set_gesture_conf2(i2c, wait_time: value)
end
## Gesture Saturation
@spec saturation?(Sensor.t()) :: boolean
def saturation?(%Sensor{transport: i2c}) do
{:ok, x} = Comm.status(i2c)
x.proximity_or_gesture_saturation == 1
end
## Gesture LED Boost
@spec get_led_boost(Sensor.t()) :: 0..3
def get_led_boost(%Sensor{transport: i2c}) do
{:ok, x} = Comm.get_config2(i2c)
x.led_boost
end
@spec set_led_boost(Sensor.t(), 0..3) :: :ok
def set_led_boost(%Sensor{transport: i2c}, value) do
Comm.set_config2(i2c, led_boost: value)
end
## Gesture Offset, UP/DOWN/LEFT/RIGHT
@spec get_offset(Sensor.t()) ::
%{down: -127..127, left: -127..127, right: -127..127, up: -127..127}
def get_offset(%Sensor{transport: i2c}) do
{:ok, x} = Comm.get_gesture_offset(i2c)
x
end
@spec set_offset(Sensor.t(), Enum.t()) :: :ok
def set_offset(%Sensor{transport: i2c}, opts) do
Comm.set_gesture_offset(i2c, opts)
end
## Gesture Pulse
@spec get_pulse(Sensor.t()) :: %{count: 0..63, length: 0..3}
def get_pulse(%Sensor{transport: i2c}) do
{:ok, x} = Comm.get_gesture_pulse(i2c)
%{count: x.pulse_count, length: x.pulse_length}
end
def set_pulse(%Sensor{transport: i2c}, opts) do
Comm.set_gesture_pulse(i2c, opts)
end
## Gesture Dimension Select
@spec get_dimension(Sensor.t()) :: 0..3
def get_dimension(%Sensor{transport: i2c}) do
{:ok, x} = Comm.get_gesture_conf3(i2c)
x.dimension
end
@spec set_dimension(Sensor.t(), 0..3) :: :ok
def set_dimension(%Sensor{transport: i2c}, value) do
Comm.set_gesture_conf3(i2c, dimension: value)
end
## Gesture Interrupt Enable
@spec interrupt_enabled?(Sensor.t()) :: boolean
def interrupt_enabled?(%Sensor{transport: i2c}) do
{:ok, x} = Comm.get_gesture_conf4(i2c)
x.interrupt == 1
end
@spec enable_interrupt(Sensor.t(), 0 | 1) :: :ok
def enable_interrupt(%Sensor{transport: i2c}, value \\ 1) do
Comm.set_gesture_conf4(i2c, interrupt: value)
end
## Gesture Mode
@spec get_mode(Sensor.t()) :: 0 | 1
def get_mode(%Sensor{transport: i2c}) do
{:ok, x} = Comm.get_gesture_conf4(i2c)
x.mode
end
def set_mode(%Sensor{transport: i2c}, value) do
Comm.set_gesture_conf4(i2c, mode: value)
end
## Gesture FIFO Level
@doc "The number of datasets that are currently available in the FIFO for read."
@spec fifo_level(Sensor.t()) :: byte
def fifo_level(%Sensor{transport: i2c}) do
{:ok, <<dataset_count>>} = Comm.gesture_fifo_level(i2c)
dataset_count
end
## Gesture FIFO Overflow
@spec fifo_overflow?(Sensor.t()) :: boolean
def fifo_overflow?(%Sensor{transport: i2c}) do
{:ok, x} = Comm.gesture_status(i2c)
x.fifo_overflow == 1
end
## Gesture Valid
@spec valid?(Sensor.t()) :: boolean
def valid?(%Sensor{transport: i2c}) do
{:ok, x} = Comm.gesture_status(i2c)
x.valid == 1
end
## Gesture FIFO UP/DOWN/LEFT/RIGHT
@spec gesture_fifo(Sensor.t(), non_neg_integer()) :: [dataset]
def gesture_fifo(%Sensor{transport: i2c}, dataset_count) do
{:ok, datasets} = Comm.gesture_fifo(i2c, dataset_count)
datasets
end
end
|
lib/apds9960/gesture.ex
| 0.768603
| 0.497742
|
gesture.ex
|
starcoder
|
defmodule BitstylesPhoenix.Component.Badge do
use BitstylesPhoenix.Component
@moduledoc """
The Badge component.
"""
@doc ~s"""
Render a badge to highlighted small texts, such as an item count or state indicator.
## Attributes
- `variant` — Variant of the badge you want, from those available in the CSS classes e.g. `brand-1`, `danger`
- `class` - Extra classes to pass to the badge. See `BitstylesPhoenix.Helper.classnames/1` for usage.
- All other attributes are passed to the `span` tag.
See [bitstyles badge docs](https://bitcrowd.github.io/bitstyles/?path=/docs/atoms-badge--badge) for examples, and for the default variants available.
"""
story("Default badge", '''
iex> assigns = %{}
...> render ~H"""
...> <.ui_badge>
...> published
...> </.ui_badge>
...> """
"""
<span class="a-badge u-h6 u-font--medium a-badge--gray">
published
</span>
"""
''')
story("Badge variant brand-1", '''
iex> assigns = %{}
...> render ~H"""
...> <.ui_badge variant="brand-1">
...> new
...> </.ui_badge>
...> """
"""
<span class="a-badge u-h6 u-font--medium a-badge--brand-1">
new
</span>
"""
''')
story("Badge variant brand-2", '''
iex> assigns = %{}
...> render ~H"""
...> <.ui_badge variant="brand-2">
...> recommended
...> </.ui_badge>
...> """
"""
<span class="a-badge u-h6 u-font--medium a-badge--brand-2">
recommended
</span>
"""
''')
story("Badge variant danger", '''
iex> assigns = %{}
...> render ~H"""
...> <.ui_badge variant="danger">
...> deleted
...> </.ui_badge>
...> """
"""
<span class="a-badge u-h6 u-font--medium a-badge--danger">
deleted
</span>
"""
''')
story("Extra options and classes", '''
iex> assigns = %{}
...> render ~H"""
...> <.ui_badge class="extra-class" data-foo="bar">
...> published
...> </.ui_badge>
...> """
"""
<span class="a-badge u-h6 u-font--medium a-badge--gray extra-class" data-foo="bar">
published
</span>
"""
''')
def ui_badge(assigns) do
variant = assigns[:variant] || "gray"
class =
classnames([
"a-badge u-h6 u-font--medium a-badge--#{variant}",
assigns[:class]
])
extra = assigns_to_attributes(assigns, [:class, :variant])
assigns = assign(assigns, class: class, extra: extra)
~H"<span class={@class} {@extra}><%= render_slot(@inner_block) %></span>"
end
end
|
lib/bitstyles_phoenix/component/badge.ex
| 0.812682
| 0.486697
|
badge.ex
|
starcoder
|
defmodule Utils.Crypto.RSA do
@moduledoc """
Basic wrapper over the :public_key api. Please read up on how public key
encryption works in general before using this lib
"""
# ----------------------------------------------------------------------------
# Module Types
# ----------------------------------------------------------------------------
@type t :: %{private: any, public: any}
# ----------------------------------------------------------------------------
# Public API
# ----------------------------------------------------------------------------
@doc """
Encrypt some data using the public key info. The key info
should come from the new API
"""
@spec encryptPublic(String.t(), Utils.Crypto.RSA.t()) :: binary
def encryptPublic(text, %{public: key}) do
:public_key.encrypt_public(text, key)
end
@doc """
Encrypt some data using the private key info. The key info
should come from the new API
"""
@spec encryptPrivate(String.t(), Utils.Crypto.RSA.t()) :: binary
def encryptPrivate(text, %{private: key}) do
:public_key.encrypt_private(text, key)
end
@doc """
Decrypt some data using the Public key info. This info is assumed
to have been encrypted with the private key version of this data
"""
@spec decryptPublic(String.t(), Utils.Crypto.RSA.t()) :: String.t()
def decryptPublic(text, %{public: key}) do
:public_key.decrypt_public(text, key)
end
@doc """
Decrypt some data using the Private key info. This info is assumed
to have been encrypted with the public key version of this data
"""
@spec decryptPrivate(String.t(), Utils.Crypto.RSA.t()) :: String.t()
def decryptPrivate(text, %{private: key}) do
:public_key.decrypt_private(text, key)
end
@doc """
Convert a string version of a RSA Key into a format that the
:public_key module can use
"""
@spec decodeKey(String.t()) :: any
def decodeKey(text) do
[entry] = :public_key.pem_decode(text)
:public_key.pem_entry_decode(entry)
end
@doc """
Check to see if the key is in a valid format.boolean()
NOTE: This check is not the best in the world right
now because it just does the decode and inspects
the results. I wonder if there is a faster way
to do this.
"""
def valid?(text), do: [] != :public_key.pem_decode(text)
@doc """
Create a new public key info map based on the string representation
of the RSA key
"""
@spec new(String.t(), nil | String.t()) :: Utils.Crypto.RSA.t()
def new(publicKeyText, nil) do
%{
private: nil,
public: decodeKey(publicKeyText)
}
end
def new(publicKeyText, privateKeyText) do
%{
private: decodeKey(privateKeyText),
public: decodeKey(publicKeyText)
}
end
@doc """
Create a new public/private key pair structure
This API is typically used with the `generate`
function
"""
@spec new({String.t(), String.t()}) :: Utils.Crypto.RSA.t()
def new({publicKeyText, privateKeyText}) do
%{
private: decodeKey(privateKeyText),
public: decodeKey(publicKeyText)
}
end
@doc """
Create a RSA Key Pair. Note: This call is a
bit expensive because you have to to call openssl
directly from the OS, then read/write the output
NOTE: This code is a bit scary because it is leaving keys
on the file system if for example something
fails but it should be OK given that the info
can not be used until AFTER the files are deleted
but still I don't like have a public record of these
In the future it would be good to look into creating
these keys without using the commandline openssl.
Returns:
{publicKeyText, privateKeyText}
"""
@spec generate(String.t()) ::
{String.t(), String.t()}
def generate(bits \\ "2048") do
keyName = UUID.uuid4()
priKey = "#{keyName}_priv.pem"
pubKey = "#{keyName}_pub.pem"
{_, 0} = System.cmd("openssl", ["genrsa", "-out", priKey, bits], stderr_to_stdout: true)
{_, 0} =
System.cmd(
"openssl",
["rsa", "-pubout", "-in", priKey, "-out", pubKey],
stderr_to_stdout: true
)
{:ok, priv} = File.read(priKey)
{:ok, pub} = File.read(pubKey)
File.rm!(priKey)
File.rm!(pubKey)
{pub, priv}
end
@doc """
Sign message with RSA private key
"""
@spec sign(String.t(), Utils.Crypto.RSA.t(), atom) :: String.t()
def sign(message, %{private: key}, digestType \\ :sha256) do
:public_key.sign(message, digestType, key)
end
@doc """
Verify signature with RSA public key
"""
@spec verify(String.t(), binary, Utils.Crypto.RSA.t(), atom) :: boolean
def verify(message, signature, %{public: key}, digestType \\ :sha256) do
:public_key.verify(message, digestType, signature, key)
end
end
|
src/apps/utils/lib/utils/crypto/rsa.ex
| 0.829871
| 0.444083
|
rsa.ex
|
starcoder
|
defmodule PSet do
require Piece
defstruct [:pieces]
def is_valid(set, pieces \\ nil, piece \\ nil, piece_pos \\ nil, expected \\ nil, valid_size \\ nil, type \\ nil, valid_pieces_variety \\ nil)
# validate size
def is_valid(set, pieces, piece, piece_pos, expected, nil, type, valid_pieces_variety) do
is_valid(set, pieces, piece, piece_pos, expected, valid_size(length(set.pieces)), type, valid_pieces_variety)
end
def is_valid(_, _, _, _, _, false, _, _), do: false
# define type and validate pieces variety
def is_valid(%PSet{} = set, pieces, piece, piece_pos, expected, true, nil, nil) do
sets = Enum.reduce(
set.pieces,
%{colors: MapSet.new(), numbers: MapSet.new()},
fn el, acc ->
%{
colors: MapSet.put(acc.colors, el.color),
numbers: MapSet.put(acc.numbers, el.number)
}
end
)
colors_qnt = Enum.filter(sets.colors, fn el -> el != 0 end) |> length
numbers_qnt = Enum.filter(sets.numbers, fn el -> el != 0 end) |> length
type = def_type(colors_qnt)
vpv = valid_pieces_variety(numbers_qnt, type)
is_valid(set, pieces, piece, piece_pos, expected, true, type, vpv);
end
def is_valid(_, _, _, _, _, _, _, false), do: false
def is_valid(set, _, _, _, _, true, :grp, true) when is_list(set.pieces) do
colors_variety = Enum.reduce(set.pieces, MapSet.new(), fn el, acc -> MapSet.put(acc, el.color) end) |> MapSet.size
colors_variety == length(set.pieces)
end
def is_valid(set, nil, nil, nil, expected, true, :run, true) do
[piece | pieces] = set.pieces
is_valid(set, pieces, piece, 0, expected, true, :run, true)
end
def is_valid(_, _, %Piece{} = piece, _, _, true, :run, true) when (not Piece.is_valid(piece.number, piece.color)), do: false
def is_valid(_, _, %Piece{} = piece, piece_pos, _, true, :run, true) when piece.number != 0 and piece.number > piece_pos + 1, do: false
def is_valid(_, _, _, _, %Piece{} = expected, true, :run, true) when expected.number > 13, do: false
def is_valid(set, pieces, %Piece{ number: 0 }, piece_pos, nil, true, :run, true) do
[new_piece | new_piecies] = pieces
is_valid(set, new_piecies, new_piece, piece_pos + 1, nil, true, :run, true)
end
def is_valid(set, pieces, %Piece{ number: 0 }, piece_pos, %Piece{} = expected, true, :run, true) do
[new_piece | new_piecies] = pieces
new_expected = %Piece{expected | number: expected.number + 1}
is_valid(set, new_piecies, new_piece, piece_pos + 1, new_expected, true, :run, true)
end
def is_valid(_, _, %Piece{} = piece, _, %Piece{} = expected, true, :run, true) when piece.number != expected.number or expected.number > 13, do: false
def is_valid(_, [], _, _, _, true, _, true), do: true
def is_valid(%PSet{} = set, pieces, %Piece{} = piece, piece_pos, _, true, :run, true) do
[new_piece | new_piecies] = pieces
new_expected = %Piece{piece | number: piece.number + 1}
is_valid(set, new_piecies, new_piece, piece_pos + 1, new_expected, true, :run, true)
end
def is_valid(_, _, _, _, _, _, _, _), do: false
defp valid_size(size) when size >= 3, do: true
defp valid_size(_), do: false
defp valid_pieces_variety(qnt, :grp) when qnt == 1 or qnt == 0, do: true
defp valid_pieces_variety(qnt, :run) when qnt >= 3 and qnt <= 13, do: true
defp valid_pieces_variety(_, _), do: false
defp def_type(qnt) when qnt == 1 or qnt == 0, do: :run
defp def_type(_), do: :grp
end
|
rmk/lib/pset.ex
| 0.616128
| 0.718002
|
pset.ex
|
starcoder
|
defmodule Estated.Property.Address do
@moduledoc "Address details as provided by the assessor."
@moduledoc since: "0.2.0"
defstruct [
:street_number,
:street_pre_direction,
:street_name,
:street_suffix,
:street_post_direction,
:unit_type,
:unit_number,
:formatted_street_address,
:city,
:state,
:zip_code,
:zip_plus_four_code,
:carrier_code,
:latitude,
:longitude,
:geocoding_accuracy,
:census_tract
]
@typedoc "Address details as provided by the assessor."
@typedoc since: "0.2.0"
@type t :: %__MODULE__{
street_number: street_number() | nil,
street_pre_direction: street_pre_direction() | nil,
street_name: street_name() | nil,
street_suffix: street_suffix() | nil,
street_post_direction: street_post_direction() | nil,
unit_type: unit_type() | nil,
unit_number: unit_number() | nil,
formatted_street_address: formatted_street_address() | nil,
city: city() | nil,
state: state() | nil,
zip_code: zip_code() | nil,
zip_plus_four_code: zip_plus_four_code() | nil,
carrier_code: carrier_code() | nil,
latitude: latitude() | nil,
longitude: longitude() | nil,
geocoding_accuracy: geocoding_accuracy() | nil,
census_tract: census_tract() | nil
}
@typedoc """
Parsed street number.
Eg. **3450**
"""
@typedoc since: "0.2.0"
@type street_number :: String.t()
@typedoc "Directional appearing before the street name."
@typedoc since: "0.2.0"
@type street_pre_direction :: directional()
@typedoc """
A geographic directional.
Eg. [**N**](https://estated.com/developers/docs/v4/property/enum-overview#Directionals)
"""
@typedoc since: "0.2.0"
@type directional :: String.t()
@typedoc """
Parsed street name.
Numeric street names will include ordinal suffixes (st, nd, rd, th).
Eg. **MAIN**
"""
@typedoc since: "0.2.0"
@type street_name :: String.t()
@typedoc """
Standardized and parsed street suffix abbreviation.
Eg. [**ST**](https://estated.com/developers/docs/v4/property/enum-overview#street_suffix)
"""
@typedoc since: "0.2.0"
@type street_suffix :: String.t()
@typedoc "Directional appearing after street suffix denoting quadrant."
@typedoc since: "0.2.0"
@type street_post_direction :: directional()
@typedoc """
Unit type abbreviation.
Eg. [**APT**](https://estated.com/developers/docs/v4/property/enum-overview#unit_type)
"""
@typedoc since: "0.2.0"
@type unit_type :: String.t()
@typedoc """
Unit number (may be alphanumeric).
Eg. **101A**
"""
@typedoc since: "0.2.0"
@type unit_number :: String.t()
@typedoc """
Combined street address (including unit).
Eg. **1650 N 16TH ST SW APT 101**
"""
@typedoc since: "0.2.0"
@type formatted_street_address :: String.t()
@typedoc """
City name.
Eg. **CHICAGO**
"""
@typedoc since: "0.2.0"
@type city :: String.t()
@typedoc """
State abbreviation.
Eg. **IL**
"""
@typedoc since: "0.2.0"
@type state :: String.t()
@typedoc """
Zip code.
Eg. **60614**
"""
@typedoc since: "0.2.0"
@type zip_code :: String.t()
@typedoc """
Four digit postal zip extension.
Eg. **5505**
"""
@typedoc since: "0.2.0"
@type zip_plus_four_code :: String.t()
@typedoc """
USPS code for mail delivery services.
Eg. **R001**
"""
@typedoc since: "0.2.0"
@type carrier_code :: String.t()
@typedoc """
Measured latitude for the property.
Eg. **41.912406**
"""
@typedoc since: "0.2.0"
@type latitude :: float()
@typedoc """
Measured longitude for the property.
Eg. **-87.649191**
"""
@typedoc since: "0.2.0"
@type longitude :: float()
@typedoc """
Describes the level of geocoding match.
Eg. [**PARCEL CENTROID**](https://estated.com/developers/docs/v4/property/enum-overview#geocoding_accuracy)
"""
@typedoc since: "0.2.0"
@type geocoding_accuracy :: String.t()
@typedoc """
The census tract as designated by the Census Bureau.
Eg. **10570200.002009**
"""
@typedoc since: "0.2.0"
@type census_tract :: String.t()
@doc false
@doc since: "0.2.0"
@spec cast(map()) :: t()
def cast(%{} = address) do
Enum.reduce(address, %__MODULE__{}, &cast_field/2)
end
@spec cast(nil) :: nil
def cast(nil) do
nil
end
defp cast_field({"street_number", street_number}, acc) do
%__MODULE__{acc | street_number: street_number}
end
defp cast_field({"street_pre_direction", street_pre_direction}, acc) do
%__MODULE__{acc | street_pre_direction: street_pre_direction}
end
defp cast_field({"street_name", street_name}, acc) do
%__MODULE__{acc | street_name: street_name}
end
defp cast_field({"street_suffix", street_suffix}, acc) do
%__MODULE__{acc | street_suffix: street_suffix}
end
defp cast_field({"street_post_direction", street_post_direction}, acc) do
%__MODULE__{acc | street_post_direction: street_post_direction}
end
defp cast_field({"unit_type", unit_type}, acc) do
%__MODULE__{acc | unit_type: unit_type}
end
defp cast_field({"unit_number", unit_number}, acc) do
%__MODULE__{acc | unit_number: unit_number}
end
defp cast_field({"formatted_street_address", formatted_street_address}, acc) do
%__MODULE__{acc | formatted_street_address: formatted_street_address}
end
defp cast_field({"city", city}, acc) do
%__MODULE__{acc | city: city}
end
defp cast_field({"state", state}, acc) do
%__MODULE__{acc | state: state}
end
defp cast_field({"zip_code", zip_code}, acc) do
%__MODULE__{acc | zip_code: zip_code}
end
defp cast_field({"zip_plus_four_code", zip_plus_four_code}, acc) do
%__MODULE__{acc | zip_plus_four_code: zip_plus_four_code}
end
defp cast_field({"carrier_code", carrier_code}, acc) do
%__MODULE__{acc | carrier_code: carrier_code}
end
defp cast_field({"latitude", latitude}, acc) do
%__MODULE__{acc | latitude: latitude}
end
defp cast_field({"longitude", longitude}, acc) do
%__MODULE__{acc | longitude: longitude}
end
defp cast_field({"geocoding_accuracy", geocoding_accuracy}, acc) do
%__MODULE__{acc | geocoding_accuracy: geocoding_accuracy}
end
defp cast_field({"census_tract", census_tract}, acc) do
%__MODULE__{acc | census_tract: census_tract}
end
defp cast_field(_map_entry, acc) do
acc
end
end
|
lib/estated/property/address.ex
| 0.925002
| 0.521898
|
address.ex
|
starcoder
|
defmodule Depot.Adapter.InMemory do
@moduledoc """
Depot Adapter using an `Agent` for in memory storage.
## Direct usage
iex> filesystem = Depot.Adapter.InMemory.configure(name: InMemoryFileSystem)
iex> start_supervised(filesystem)
iex> :ok = Depot.write(filesystem, "test.txt", "Hello World")
iex> {:ok, "Hello World"} = Depot.read(filesystem, "test.txt")
## Usage with a module
defmodule InMemoryFileSystem do
use Depot.Filesystem,
adapter: Depot.Adapter.InMemory
end
start_supervised(InMemoryFileSystem)
InMemoryFileSystem.write("test.txt", "Hello World")
{:ok, "Hello World"} = InMemoryFileSystem.read("test.txt")
"""
defmodule AgentStream do
@enforce_keys [:config, :path]
defstruct config: nil, path: nil, chunk_size: 1024
defimpl Enumerable do
def reduce(%{config: config, path: path, chunk_size: chunk_size}, a, b) do
case Depot.Adapter.InMemory.read(config, path) do
{:ok, contents} ->
contents
|> Depot.chunk(chunk_size)
|> reduce(a, b)
_ ->
{:halted, []}
end
end
def reduce(_list, {:halt, acc}, _fun), do: {:halted, acc}
def reduce(list, {:suspend, acc}, fun), do: {:suspended, acc, &reduce(list, &1, fun)}
def reduce([], {:cont, acc}, _fun), do: {:done, acc}
def reduce([head | tail], {:cont, acc}, fun), do: reduce(tail, fun.(head, acc), fun)
def count(_), do: {:error, __MODULE__}
def slice(_), do: {:error, __MODULE__}
def member?(_, _), do: {:error, __MODULE__}
end
defimpl Collectable do
def into(%{config: config, path: path} = stream) do
original =
case Depot.Adapter.InMemory.read(config, path) do
{:ok, contents} -> contents
_ -> ""
end
fun = fn
list, {:cont, x} ->
[x | list]
list, :done ->
contents = original <> IO.iodata_to_binary(:lists.reverse(list))
Depot.Adapter.InMemory.write(config, path, contents, [])
stream
_, :halt ->
:ok
end
{[], fun}
end
end
end
use Agent
defmodule Config do
@moduledoc false
defstruct name: nil
end
@behaviour Depot.Adapter
@impl Depot.Adapter
def starts_processes, do: true
def start_link({__MODULE__, %Config{} = config}) do
start_link(config)
end
def start_link(%Config{} = config) do
Agent.start_link(fn -> {%{}, %{}} end, name: Depot.Registry.via(__MODULE__, config.name))
end
@impl Depot.Adapter
def configure(opts) do
config = %Config{
name: Keyword.fetch!(opts, :name)
}
{__MODULE__, config}
end
@impl Depot.Adapter
def write(config, path, contents, opts) do
visibility = Keyword.get(opts, :visibility, :private)
directory_visibility = Keyword.get(opts, :directory_visibility, :private)
Agent.update(Depot.Registry.via(__MODULE__, config.name), fn state ->
file = {IO.iodata_to_binary(contents), %{visibility: visibility}}
directory = {%{}, %{visibility: directory_visibility}}
put_in(state, accessor(path, directory), file)
end)
end
@impl Depot.Adapter
def write_stream(config, path, opts) do
{:ok,
%AgentStream{
config: config,
path: path,
chunk_size: Keyword.get(opts, :chunk_size, 1024)
}}
end
@impl Depot.Adapter
def read(config, path) do
Agent.get(Depot.Registry.via(__MODULE__, config.name), fn state ->
case get_in(state, accessor(path)) do
{binary, _meta} when is_binary(binary) -> {:ok, binary}
_ -> {:error, :enoent}
end
end)
end
@impl Depot.Adapter
def read_stream(config, path, opts) do
{:ok,
%AgentStream{
config: config,
path: path,
chunk_size: Keyword.get(opts, :chunk_size, 1024)
}}
end
@impl Depot.Adapter
def delete(%Config{} = config, path) do
Agent.update(Depot.Registry.via(__MODULE__, config.name), fn state ->
{_, state} = pop_in(state, accessor(path))
state
end)
:ok
end
@impl Depot.Adapter
def move(%Config{} = config, source, destination, opts) do
visibility = Keyword.get(opts, :visibility, :private)
directory_visibility = Keyword.get(opts, :directory_visibility, :private)
Agent.get_and_update(Depot.Registry.via(__MODULE__, config.name), fn state ->
case get_in(state, accessor(source)) do
{binary, _meta} when is_binary(binary) ->
file = {binary, %{visibility: visibility}}
directory = {%{}, %{visibility: directory_visibility}}
{_, state} =
state |> put_in(accessor(destination, directory), file) |> pop_in(accessor(source))
{:ok, state}
_ ->
{{:error, :enoent}, state}
end
end)
end
@impl Depot.Adapter
def copy(%Config{} = config, source, destination, opts) do
visibility = Keyword.get(opts, :visibility, :private)
directory_visibility = Keyword.get(opts, :directory_visibility, :private)
Agent.get_and_update(Depot.Registry.via(__MODULE__, config.name), fn state ->
case get_in(state, accessor(source)) do
{binary, _meta} when is_binary(binary) ->
file = {binary, %{visibility: visibility}}
directory = {%{}, %{visibility: directory_visibility}}
{:ok, put_in(state, accessor(destination, directory), file)}
_ ->
{{:error, :enoent}, state}
end
end)
end
@impl Depot.Adapter
def copy(
%Config{} = _source_config,
_source,
%Config{} = _destination_config,
_destination,
_opts
) do
{:error, :unsupported}
end
@impl Depot.Adapter
def file_exists(%Config{} = config, path) do
Agent.get(Depot.Registry.via(__MODULE__, config.name), fn state ->
case get_in(state, accessor(path)) do
{binary, _meta} when is_binary(binary) -> {:ok, :exists}
_ -> {:ok, :missing}
end
end)
end
@impl Depot.Adapter
def list_contents(%Config{} = config, path) do
contents =
Agent.get(Depot.Registry.via(__MODULE__, config.name), fn state ->
paths =
case get_in(state, accessor(path)) do
{%{} = map, _meta} -> map
_ -> %{}
end
for {path, {content, meta}} <- paths do
struct =
case content do
%{} -> %Depot.Stat.Dir{size: 0}
bin when is_binary(bin) -> %Depot.Stat.File{size: byte_size(bin)}
end
struct!(struct, name: path, mtime: 0, visibility: meta.visibility)
end
end)
{:ok, contents}
end
@impl Depot.Adapter
def create_directory(%Config{} = config, path, opts) do
directory_visibility = Keyword.get(opts, :directory_visibility, :private)
directory = {%{}, %{visibility: directory_visibility}}
Agent.update(Depot.Registry.via(__MODULE__, config.name), fn state ->
put_in(state, accessor(path, directory), directory)
end)
end
@impl Depot.Adapter
def delete_directory(%Config{} = config, path, opts) do
recursive? = Keyword.get(opts, :recursive, false)
Agent.get_and_update(Depot.Registry.via(__MODULE__, config.name), fn state ->
case {recursive?, get_in(state, accessor(path))} do
{_, nil} ->
{:ok, state}
{recursive?, {map, _meta}} when is_map(map) and (map_size(map) == 0 or recursive?) ->
{_, state} = pop_in(state, accessor(path))
{:ok, state}
_ ->
{{:error, :eexist}, state}
end
end)
end
@impl Depot.Adapter
def clear(%Config{} = config) do
Agent.update(Depot.Registry.via(__MODULE__, config.name), fn _ -> {%{}, %{}} end)
end
@impl Depot.Adapter
def set_visibility(%Config{} = config, path, visibility) do
Agent.get_and_update(Depot.Registry.via(__MODULE__, config.name), fn state ->
case get_in(state, accessor(path)) do
{_, _} ->
state =
update_in(state, accessor(path), fn {contents, meta} ->
{contents, Map.put(meta, :visibility, visibility)}
end)
{:ok, state}
_ ->
{{:error, :enoent}, state}
end
end)
end
@impl Depot.Adapter
def visibility(%Config{} = config, path) do
Agent.get(Depot.Registry.via(__MODULE__, config.name), fn state ->
case get_in(state, accessor(path)) do
{_, %{visibility: visibility}} -> {:ok, visibility}
_ -> {:error, :enoent}
end
end)
end
defp accessor(path, default \\ nil) when is_binary(path) do
path
|> Path.absname("/")
|> Path.split()
|> do_accessor([], default)
|> Enum.reverse()
end
defp do_accessor([segment], acc, default) do
[Access.key(segment, default), Access.elem(0) | acc]
end
defp do_accessor([segment | rest], acc, default) do
intermediate_default = default || {%{}, %{}}
do_accessor(rest, [Access.key(segment, intermediate_default), Access.elem(0) | acc], default)
end
end
|
lib/depot/adapter/in_memory.ex
| 0.796055
| 0.412353
|
in_memory.ex
|
starcoder
|
defmodule Anagram do
@dictionary "/usr/share/dict/web2"
@moduledoc """
Useful functions to find anagrams of words and words that can be made with a
set of letters (sort of a sub anagram)
Our algorithm for anagrams is simply to canonicalise each word in the dictionary
by forcing case, removing whitespace and sorting the characters. This will
mean that all words with the same constituent letters in some permuted order,
will have the same canonicalised form. Now we simply stuff our words into a
Map, where the key is our canonicalised form
Finding anagrams is simply a case of canonicalising the word we are after
and searching for an entry in the Map with that key
For speed, load the dictionary once and hang onto it and pass to the anagram function
"""
@doc """
Load the given dictionary for later use by our anagram finder
Format is a map, where
- the keys are a normalised form of the word (lower case + sorted)
- the values are a list of anagrams of these letters
Finding anagrams is simply a case of normalising a candidate word and then
looking up all the anagrams from our dictionary
"""
def load_dictionary(path \\ @dictionary) do
Anagram.Finder.load_dictionary(path)
end
@doc """
Find all anagrams of the given word (from our dictionary). Returns a list
For repeated searches its highly recommended to preload the dictionary
using load_dictionary/1 and pass it to this function
The default dictionary will be web2, ie assumes a unixy OS. Load your own
dictionary if this isn't present/wanted
"""
def anagrams(word, remove_self \\ false) do
Anagram.Finder.anagrams(word, remove_self)
end
def anagrams(word, remove_self, dict) do
Anagram.Finder.anagrams(word, remove_self, dict)
end
@doc """
Find all words in our given dictionary file, which can be made from a subset of
the given letters (case insensitive)
letters can be passed either as a bitstring or as a list of individual (bitstring) letters
Default dictionary is web2, ie assumes a unixy OS. Specify your own dictionary
if this isn't present/wanted
"""
def words_from(word, dictionary \\ @dictionary) do
Anagram.Finder.words_from(word, dictionary)
end
end
|
lib/anagram.ex
| 0.663233
| 0.726814
|
anagram.ex
|
starcoder
|
if Code.ensure_loaded?(:gun) do
defmodule Tesla.Adapter.Gun do
@moduledoc """
Adapter for [gun] https://github.com/ninenines/gun
Remember to add `{:gun, "~> 1.3"}` to dependencies
Also, you need to recompile tesla after adding `:gun` dependency:
```
mix deps.clean tesla
mix deps.compile tesla
```
### Example usage
```
# set globally in config/config.exs
config :tesla, :adapter, Tesla.Adapter.Gun
# set per module
defmodule MyClient do
use Tesla
adapter Tesla.Adapter.Gun
end
```
### Options https://ninenines.eu/docs/en/gun/1.3/manual/gun/:
* `connect_timeout` - Connection timeout.
* `http_opts` - Options specific to the HTTP protocol.
* `http2_opts` - Options specific to the HTTP/2 protocol.
* `protocols` - Ordered list of preferred protocols. Defaults: [http2, http] - for :tls, [http] - for :tcp.
* `trace` - Whether to enable dbg tracing of the connection process. Should only be used during debugging. Default: false.
* `transport` - Whether to use TLS or plain TCP. The default varies depending on the port used. Port 443 defaults to tls.
All other ports default to tcp.
* `transport_opts` - Transport options. They are TCP options or TLS options depending on the selected transport. Default: [].
* `ws_opts` - Options specific to the Websocket protocol. Default: %{}.
* `compress` - Whether to enable permessage-deflate compression. This does not guarantee that compression will
be used as it is the server that ultimately decides. Defaults to false.
* `protocols` - A non-empty list enables Websocket protocol negotiation. The list of protocols will be sent
in the sec-websocket-protocol request header.
The handler module interface is currently undocumented and must be set to `gun_ws_h`.
}
"""
@behaviour Tesla.Adapter
alias Tesla.Multipart
@gun_keys [
:connect_timeout,
:http_opts,
:http2_opts,
:protocols,
:retry,
:retry_timeout,
:trace,
:transport,
:transport_opts,
:ws_opts
]
@adapter_default_timeout 1_000
@impl true
@doc false
def call(env, opts) do
with {:ok, status, headers, body} <- request(env, opts) do
{:ok, %{env | status: status, headers: format_headers(headers), body: body}}
end
end
defp format_headers(headers) do
for {key, value} <- headers do
{String.downcase(to_string(key)), to_string(value)}
end
end
defp format_method(method), do: String.upcase(to_string(method))
defp format_url(nil, nil), do: ""
defp format_url(nil, query), do: "?" <> query
defp format_url(path, nil), do: path
defp format_url(path, query), do: path <> "?" <> query
defp request(env, opts) do
request(
format_method(env.method),
Tesla.build_url(env.url, env.query),
env.headers,
env.body || "",
Tesla.Adapter.opts(env, opts) |> Enum.into(%{})
)
end
defp request(method, url, headers, %Stream{} = body, opts),
do: request_stream(method, url, headers, body, opts)
defp request(method, url, headers, body, opts) when is_function(body),
do: request_stream(method, url, headers, body, opts)
defp request(method, url, headers, %Multipart{} = mp, opts) do
headers = headers ++ Multipart.headers(mp)
body = Multipart.body(mp)
request(method, url, headers, body, opts)
end
defp request(method, url, headers, body, opts) do
with {pid, f_url} <- open_conn(url, opts),
stream <- open_stream(pid, method, f_url, headers, body, false) do
read_response(pid, stream, opts)
end
end
defp request_stream(method, url, headers, body, opts) do
with {pid, f_url} <- open_conn(url, opts),
stream <- open_stream(pid, method, f_url, headers, body, true) do
read_response(pid, stream, opts)
end
end
defp open_conn(url, opts) do
uri = URI.parse(url)
opts = if uri.scheme == "https", do: Map.put(opts, :transport, :tls), else: opts
{:ok, pid} = :gun.open(to_charlist(uri.host), uri.port, Map.take(opts, @gun_keys))
{pid, format_url(uri.path, uri.query)}
end
defp open_stream(pid, method, url, headers, body, true) do
stream = :gun.request(pid, method, url, headers, "")
for data <- body, do: :ok = :gun.data(pid, stream, :nofin, data)
:gun.data(pid, stream, :fin, "")
stream
end
defp open_stream(pid, method, url, headers, body, false),
do: :gun.request(pid, method, url, headers, body)
defp read_response(pid, stream, opts) do
receive do
{:gun_response, ^pid, ^stream, :fin, status, headers} ->
{:ok, status, headers, ""}
{:gun_response, ^pid, ^stream, :nofin, status, headers} ->
case read_body(pid, stream, opts) do
{:ok, body} ->
{:ok, status, headers, body}
{:error, error} ->
{:error, error}
end
{:error, error} ->
{:error, error}
{:gun_up, ^pid, :http} ->
read_response(pid, stream, opts)
{:gun_error, ^pid, reason} ->
{:error, reason}
{:gun_down, ^pid, _, _, _, _} ->
read_response(pid, stream, opts)
{:DOWN, _, _, _, reason} ->
{:error, reason}
after
opts[:timeout] || @adapter_default_timeout ->
{:error, :timeout}
end
end
defp read_body(pid, stream, opts, acc \\ "") do
limit = opts[:max_body]
receive do
{:gun_data, ^pid, ^stream, :fin, body} ->
check_body_size(acc, body, limit)
{:gun_data, ^pid, ^stream, :nofin, part} ->
case check_body_size(acc, part, limit) do
{:ok, acc} -> read_body(pid, stream, opts, acc)
{:error, error} -> {:error, error}
end
after
opts[:timeout] || @adapter_default_timeout ->
{:error, :timeout}
end
end
defp check_body_size(acc, part, nil), do: {:ok, acc <> part}
defp check_body_size(acc, part, limit) do
body = acc <> part
if limit - byte_size(body) >= 0 do
{:ok, body}
else
{:error, :body_too_large}
end
end
end
end
|
lib/tesla/adapter/gun.ex
| 0.844697
| 0.675136
|
gun.ex
|
starcoder
|
defmodule Membrane.Bin.RTP.Receiver do
@moduledoc """
A bin consuming one or more RTP streams on each input and outputting a stream from one ssrc on each output
Every stream is parsed and then (based on ssrc field) an
appropriate rtp session is initiated. It notifies its parent about each new
stream with a notification of the format `{:new_rtp_stream, ssrc, payload_type}`.
Parent should then connect to RTP bin dynamic output pad instance that will
have an id == `ssrc`.
"""
use Membrane.Bin
alias Membrane.Bin.RTP.Receiver
alias Membrane.ParentSpec
alias Membrane.Element.RTP
@static_fmt_file "rtp-parameters-1.csv" |> Path.expand(__DIR__)
@bin_input_buffer_params [warn_size: 250, fail_size: 500]
def_options fmt_mapping: [
spec: %{integer => String.t()},
default: %{},
description: "Mapping of the custom payload types (form fmt > 95)"
],
pt_to_depayloader: [
spec: (String.t() -> module()),
default: &__MODULE__.payload_type_to_depayloader/1,
description: "Mapping from payload type to a depayloader module"
]
def_input_pad :input, demand_unit: :buffers, caps: :any, availability: :on_request
def_output_pad :output, caps: :any, demand_unit: :buffers, availability: :on_request
defmodule State do
@moduledoc false
defstruct fmt_mapping: %{},
ssrc_pt_mapping: %{},
pt_to_depayloader: nil,
children_by_pads: %{}
end
@impl true
def handle_init(%{fmt_mapping: fmt_map, pt_to_depayloader: d_mapper}) do
children = [ssrc_router: Receiver.SSRCRouter]
links = []
spec = %ParentSpec{children: children, links: links}
{{:ok, spec: spec}, %State{fmt_mapping: fmt_map, pt_to_depayloader: d_mapper}}
end
@impl true
def handle_pad_added(Pad.ref(:input, _id) = pad, _ctx, state) do
parser_ref = {:parser, make_ref()}
children = [{parser_ref, RTP.Parser}]
links = [
link_bin_input(pad)
|> via_in(:input, buffer: @bin_input_buffer_params)
|> to(parser_ref)
|> to(:ssrc_router)
]
new_spec = %ParentSpec{children: children, links: links}
children_by_pads = state.children_by_pads |> Map.put(pad, parser_ref)
state = %{state | children_by_pads: children_by_pads}
{{:ok, spec: new_spec}, state}
end
@impl true
def handle_pad_added(
Pad.ref(:output, ssrc) = pad,
_ctx,
%State{ssrc_pt_mapping: ssrc_pt_mapping} = state
) do
depayloader =
ssrc_pt_mapping
|> Map.get(ssrc)
|> state.pt_to_depayloader.()
rtp_session_name = {:rtp_session, make_ref()}
new_children = [{rtp_session_name, %Receiver.Session{depayloader: depayloader}}]
new_links = [
link(:ssrc_router)
|> via_out(Pad.ref(:output, ssrc))
|> to(rtp_session_name)
|> to_bin_output(pad)
]
new_spec = %ParentSpec{children: new_children, links: new_links}
new_children_by_pads = state.children_by_pads |> Map.put(pad, rtp_session_name)
{{:ok, spec: new_spec}, %State{state | children_by_pads: new_children_by_pads}}
end
@impl true
def handle_pad_removed(Pad.ref(:input, _id) = pad, _ctx, state) do
{parser_to_remove, new_children_by_pads} = state.children_by_pads |> Map.pop(pad)
{{:ok, remove_child: parser_to_remove},
%State{state | children_by_pads: new_children_by_pads}}
end
@impl true
def handle_pad_removed(Pad.ref(:output, _ssrc) = pad, _ctx, state) do
{session_to_remove, new_children_by_pads} = state.children_by_pads |> Map.pop(pad)
{{:ok, remove_child: session_to_remove},
%State{state | children_by_pads: new_children_by_pads}}
end
@impl true
def handle_notification({:new_rtp_stream, ssrc, fmt}, :ssrc_router, state) do
{:ok, payload_type} = get_payload_type(fmt, state.fmt_mapping)
%State{ssrc_pt_mapping: ssrc_pt_mapping} = state
new_ssrc_pt_mapping = ssrc_pt_mapping |> Map.put(ssrc, payload_type)
{{:ok, notify: {:new_rtp_stream, ssrc, payload_type}},
%{state | ssrc_pt_mapping: new_ssrc_pt_mapping}}
end
File.stream!(@static_fmt_file)
|> CSV.decode!()
|> Stream.drop(1)
|> Enum.filter(fn [_, pt | _] ->
pt != "Unassigned" and pt != "dynamic" and not String.starts_with?(pt, "Reserved")
end)
|> Enum.map(fn [fmt_s, pt | _] ->
{fmt, ""} = fmt_s |> Integer.parse()
defp fmt_to_pt(unquote(fmt)), do: {:ok, unquote(pt)}
end)
defp fmt_to_pt(_), do: {:error, :not_static}
defp get_payload_type(fmt, fmt_mapping) do
case fmt_to_pt(fmt) do
{:ok, pt} ->
{:ok, pt}
{:error, :not_static} ->
if pt = fmt_mapping[fmt], do: {:ok, pt}, else: {:error, :not_found}
end
end
@spec payload_type_to_depayloader(Receiver.SSRCRouter.payload_type()) :: module()
def payload_type_to_depayloader("H264"), do: RTP.H264.Depayloader
def payload_type_to_depayloader("MPA"), do: RTP.MPEGAudio.Depayloader
end
|
lib/membrane_bin_rtp/receiver.ex
| 0.834188
| 0.437343
|
receiver.ex
|
starcoder
|
defmodule Flex.Rule do
alias Flex.Rule
@moduledoc """
An interface to create Linguistic Rules.
"""
defstruct statement: nil,
antecedent: nil,
consequent: nil
@typedoc """
Linguistic Rule struct.
- `:statement` - Rules behavior.
- `:antecedent` - (list) Input variables.
- `:consequent` - Output variable.
"""
@type t :: %__MODULE__{
statement: fun() | tuple(),
antecedent: [Flex.Variable.t(), ...],
consequent: Flex.Variable.t()
}
@doc """
Creates a Linguistic Rule.
The following options are require:
- `:statement` - Defines the rule behavior.
- `:antecedent` - (list) Defines the input variables.
- `:consequent` - Defines the output variable.
"""
def new(params) do
rule = Keyword.fetch!(params, :statement)
antecedent = Keyword.fetch!(params, :antecedent)
consequent = Keyword.fetch!(params, :consequent)
%Rule{statement: rule, antecedent: antecedent, consequent: consequent}
end
@doc """
Fuzzy AND operator (product).
"""
def tau(a, b), do: a * b
@doc """
Fuzzy AND operator.
"""
def a &&& b, do: min(a, b)
@doc """
Fuzzy OR operator.
"""
def a ||| b, do: max(a, b)
@doc """
Fuzzy THEN operator.
"""
def a >>> b do
case b.type do
:antecedent ->
raise("only the consequent variable can use the THEN operation")
:consequent ->
%{b | rule_output: a}
end
end
@doc """
Fuzzy IS operator.
"""
def a ~> b do
case a.type do
:antecedent ->
a.mf_values[b]
:consequent ->
new_values = Map.get(a.mf_values, b, []) ++ [a.rule_output]
mf_values = Map.put(a.mf_values, b, new_values)
%{a | mf_values: mf_values}
end
end
@doc """
Fuzzy Rules AST (Tuple).
"""
def statement({arg1, arg2, "tau"}, args), do: tau(statement(arg1, args), statement(arg2, args))
def statement({arg1, arg2, "&&&"}, args), do: statement(arg1, args) &&& statement(arg2, args)
def statement({arg1, arg2, "|||"}, args), do: statement(arg1, args) ||| statement(arg2, args)
def statement({var_tag, set_tag, "~>"}, args) when is_binary(var_tag) do
fuzzy_var = Map.get(args, var_tag, :error)
fuzzy_var ~> set_tag
end
def statement({consequent, set_tag, "~>"}, args), do: statement(consequent, args) ~> set_tag
def statement({arg1, con_tag, ">>>"}, args) do
val = statement(arg1, args)
consequent = Map.get(args, con_tag)
val >>> consequent
end
def statement(arg, _args), do: arg
@doc """
Gets the arguments of the Fuzzy Rule
"""
def get_rule_parameters([], _antecedent, lt_ant_vars), do: lt_ant_vars
def get_rule_parameters([tag | tail], antecedent, lt_ant_vars) do
f_var = Map.get(antecedent, tag)
lt_ant_vars = lt_ant_vars ++ [f_var]
get_rule_parameters(tail, antecedent, lt_ant_vars)
end
end
|
lib/rule.ex
| 0.850251
| 0.55254
|
rule.ex
|
starcoder
|
defmodule MapRewire do
@moduledoc """
MapRewire makes it easier to rewire maps, such as might be done when
translating from an external API result to an internal value or taking the
output of one external API and transforming it the input shape of an entirely
different external API.
To rewire a map, build transformation rules and call `rewire/3`, or if
MapRewire has been `import`ed, use the operator, `<~>`.
```
iex> map = %{"id" => "234923409", "title" => "asdf"}
iex> rules = "title=>name id=>shopify_id"
iex> map <~> rules
{%{"id" => "234923409", "title" => "asdf"}, %{"shopify_id" => "234923409", "name" => "asdf"}}
iex> MapRewire.rewire(map, rules) == (map <~> rules)
true
```
## Rewire Rules
The rewire rules have three basic forms.
1. A string containing string rename rules separated by whitespace.
```
iex> map = %{"id" => "234923409", "title" => "asdf"}
iex> rules = "title=>name id=>shopify_id"
iex> map <~> rules
{%{"id" => "234923409", "title" => "asdf"}, %{"shopify_id" => "234923409", "name" => "asdf"}}
```
Here, `rules` normalizes to: `[{"title", "name"}, {"id", "shopify_id"}]`.
2. A list of strings with one string rename rule in each string.
```
iex> map = %{"id" => "234923409", "title" => "asdf"}
iex> rules = ["title=>name", "id=>shopify_id"]
iex> map <~> rules
{%{"id" => "234923409", "title" => "asdf"}, %{"shopify_id" => "234923409", "name" => "asdf"}}
```
Here, rules normalizes to: `[{"title", "name"}, {"id", "shopify_id"}]`.
3. Any enumerable value that iterates as key/value tuples (map, keyword
list, or a list of 2-tuples). These may be either rename rules, or may
be more complex key transform rules.
```
iex> map = %{id: "234923409", title: "asdf"}
iex> rules = [title: :name, id: :shopify_id]
iex> map <~> rules
{%{id: "234923409", title: "asdf"}, %{shopify_id: "234923409", name: "asdf"}}
iex> map = %{"id" => "234923409", "title" => "asdf"}
iex> rules = [{"title", "name"}, {"id", "shopify_id"}]
iex> map <~> rules
{%{"id" => "234923409", "title" => "asdf"}, %{"shopify_id" => "234923409", "name" => "asdf"}}
iex> map = %{"id" => "234923409", "title" => "asdf"}
iex> rules = %{"title" => :name, "id" => :shopify_id}
iex> map <~> rules
{%{"id" => "234923409", "title" => "asdf"}, %{shopify_id: "234923409", name: "asdf"}}
# This is legal, but really ugly. Don't do it.
iex> map = %{"id" => "234923409", "title" => "asdf"}
iex> rules = ["title=>name", {"id", "shopify_id"}]
iex> map <~> rules
{%{"id" => "234923409", "title" => "asdf"}, %{"shopify_id" => "234923409", "name" => "asdf"}}
```
### Rename Rules
Rename rules take the value of the old key from the source map and write it
to the target map as the new key, like `"title=>name"`, `%{"title" =>
"name"}`, and `[title: :name]` that normalize to `{old_key, new_key}`. Both
`old_key` and `new_key` are typically atoms or strings, but may be any valid
Map key value, except for the forms noted below.
### Advanced Rules
There are two types of advanced rules (keys with options and producer
functions), which can only be provided when the rules are in an enumerable
format such as a keyword list, map, or list of tuples.
#### Keys with Options
The new key is provided as a tuple `{new_key, options}`. Supported options
are `:transform` (expecting a `t:transformer/0` function) and `:default`,
expecting any normal map value. The `:default` will work as the third
parameter of `Map.get/3` and be used instead of `key_missing/0`.
```
iex> map = %{"title" => "asdf"}
iex> rules = %{"title" => {:name, transform: &String.reverse/1}}
iex> map <~> rules
{%{"title" => "asdf"}, %{name: "fdsa"}}
# If "title" could be missing from the source map, the `transform` function
# should be written to handle `key_missing/0` values or have its own safe
# `default` value.
iex> map = %{}
iex> rules = %{"title" => {:name, default: "unknown", transform: &String.reverse/1}}
iex> map <~> rules
{%{}, %{name: "nwonknu"}}
```
#### Producer Functions
Producer functions (`t:producer/0`) take in the value and return zero or more
key/value tuples. It may be provided either as `producer` or `{producer,
options}` as shown below.
iex> dcs = fn value ->
...> unless MapRewire.key_missing?(value) do
...> [dept, class, subclass] =
...> value
...> |> String.split("-", parts: 3)
...> |> Enum.map(&String.to_integer/1)
...>
...> Enum.to_list(%{"department" => dept, "class" => class, "subclass" => subclass})
...> end
...> end
iex> map = %{"title" => "asdf", "dcs" => "1-3-5"}
iex> rules = %{"title" => "name", "dcs" => dcs}
iex> map <~> rules
{%{"title" => "asdf", "dcs" => "1-3-5"}, %{"name" => "asdf", "department" => 1, "class" => 3, "subclass" => 5}}
# If "title" could be missing from the source map, the `transform` function
# should be written to handle `key_missing/0` values or have its own safe
# `default` value.
iex> dcs = fn value ->
...> [dept, class, subclass] =
...> value
...> |> String.split("-", parts: 3)
...> |> Enum.map(&String.to_integer/1)
...>
...> Enum.to_list(%{"department" => dept, "class" => class, "subclass" => subclass})
...> end
iex> map = %{"title" => "asdf"}
iex> rules = %{"title" => "name", "dcs" => {dcs, default: "0-0-0"}}
iex> map <~> rules
{%{"title" => "asdf"}, %{"name" => "asdf", "department" => 0, "class" => 0, "subclass" => 0}}
"""
@transform_to "=>"
@key_missing "<~>NoMatch<~>" <> Base.encode16(:crypto.strong_rand_bytes(16))
require Logger
@typedoc """
A function that, given a map `value`, produces zero or more key/value tuples.
The `value` provided may be `key_missing/0`, so `key_missing?/1` should be
used to compare before blindly operating on `value`.
If no keys are to be produced (possibly because `value` is `key_missing/0`),
either `nil` or an empty list (`[]`) should be returned.
```
fn value ->
unless MapRewire.key_missing?(value) do
[dept, class, subclass] =
value
|> String.split("-", parts: 3)
|> Enum.map(&String.to_integer/1)
Enum.to_list(%{"department" => dept, "class" => class, "subclass" => subclass})
end
end
```
"""
@type producer ::
(Map.value() -> nil | {Map.key(), Map.value()} | list({Map.key(), Map.value()}))
@typedoc """
A function that, given a map `value`, transforms it before insertion into the
target map.
The `value` may be `key_missing/0`, so `key_missing?/1` should be used to
compare before blindly operating on `value`.
If the key should be omitted when `rewire/3` is called, `key_missing/0`
should be returned.
```
fn value ->
cond do
MapRewire.key_missing?(value) ->
value
is_binary(value) ->
String.reverse(value)
true ->
String.reverse(to_string(value))
end
end
```
"""
@type transformer :: (Map.value() -> Map.value())
@typedoc "Advanced rewire rule options"
@type rewire_rule_options :: [transform: transformer, default: Map.value()]
@typedoc "Rewire rule target values."
@type rewire_rule_target ::
Map.key()
| producer
| {producer, [default: Map.value()]}
| {Map.key(), rewire_rule_options}
@typedoc "A normalized rewire rule."
@type rewire_rule :: {old :: Map.key(), rewire_rule_target}
@typedoc """
The shape of MapRewire transformation rules.
Note that although keyword lists and maps may be used, the values must be
`t:rewire_rule_target/0` values.
"""
@type rewire_rules ::
String.t()
| list(String.t())
| keyword
| map
| list(rewire_rule)
defmacro __using__(options) do
if Keyword.get(options, :warn, true) do
IO.warn(
"use MapRewire is deprecated; import it directly instead",
Macro.Env.stacktrace(__ENV__)
)
end
quote(do: import(MapRewire))
end
@doc """
The operator form of `rewire/3`, which remaps the map `content` and replaces
the key if it matches with an item in `rewire_rules`.
"""
def content <~> rewire_rules do
rewire(content, rewire_rules, debug: false)
end
@doc """
Remaps the map `content` and replaces the key if it matches with an item in
`rules`.
Accepts two options:
- `:debug` controls the logging of the steps taken to transform `content`
using `rules`. The default is `Application.get_env(:map_rewire,
:debug?)`.
- `:compact` which controls the removal of values from the result map for
keys missing in the `content` map. The default is `true`.
```
iex> map = %{"title" => "asdf"}
iex> rules = %{"title" => :name, "missing" => :missing}
iex> rewire(map, rules, compact: true) # the default
{%{"title" => "asdf"}, %{name: "asdf"}}
iex> map = %{"title" => "asdf"}
iex> rules = %{"title" => :name, "missing" => :missing}
iex> rewire(map, rules, compact: false)
{%{"title" => "asdf"}, %{name: "asdf", missing: nil}}
```
"""
@spec rewire(map, rewire_rules, keyword) :: {old :: map, new :: map}
def rewire(content, rules, options \\ [])
def rewire(content, rules, options)
when is_map(content) and (is_list(rules) or is_binary(rules) or is_map(rules)) do
debug = Keyword.get(options, :debug, Application.get_env(:map_rewire, :debug?)) === true
log(debug, "[MapRewire]rewire#content: #{inspect(content)}")
log(debug, "[MapRewire]rewire#rules: #{inspect(rules)}")
log(debug, "[MapRewire]rewire#options: #{inspect(options)}")
new =
rules
|> normalize_rules(debug)
|> Enum.flat_map(&rewire_entry(&1, content, debug))
|> compact(Keyword.get(options, :compact, true))
|> Enum.reject(&match?({_, @key_missing}, &1))
|> Enum.into(%{})
{content, new}
end
def rewire(content, rules, _options) when is_map(content) do
raise ArgumentError,
"[MapRewire] expected rules to be a list, map, or string, got #{inspect(rules)}."
end
@doc """
The value used in rewire operations if an old key does not exist in the
source map.
Normally, when the rewired map is produced, keys containing this value will
be removed from the rewired map, but providing the option `compact: false` to
`rewire/3` will replace this value with `nil`.
The value in `key_missing/0` may be provided to `t:producer/0` and
`t:transformer/0` functions, so `key_missing?/1` should be used to determine
the correct response if this value is received (see the documentation for
these function types).
Note that the value of `key_missing/0` is a 45-byte binary value with a 13-byte
fixed head (`"<~>NoMatch<~>"`) and a random value that changes whenever
MapRewire is recompiled.
"""
@spec key_missing :: binary
def key_missing, do: @key_missing
@doc "Returns true if `value` is the same as `key_missing/0`."
@spec key_missing?(Map.value()) :: boolean
def key_missing?(value), do: value === key_missing()
@spec normalize_rules(String.t(), boolean) :: list(rewire_rule)
defp normalize_rules(rules, debug) when is_binary(rules) do
log(debug, "[MapRewire]normalize_rules#rules (String): #{inspect(rules)}")
rules
|> String.split(~r/\s/)
|> Enum.map(&normalize_rule(&1, debug))
end
@spec normalize_rules(list(String.t() | rewire_rule), boolean) :: list(rewire_rule)
defp normalize_rules(rules, debug) when is_list(rules) do
log(debug, "[MapRewire]normalize_rules#rules (List): #{inspect(rules)}")
Enum.map(rules, &normalize_rule(&1, debug))
end
@spec normalize_rules(map, boolean) :: list(rewire_rule)
defp normalize_rules(rules, debug) when is_map(rules) do
log(debug, "[MapRewire]normalize_rules#rules (Map): #{inspect(rules)}")
Enum.to_list(rules)
end
@spec normalize_rule(String.t() | rewire_rule | list, boolean) :: rewire_rule | no_return
defp normalize_rule({_old, _new} = rule, debug) do
log(debug, "[MapRewire]normalize_rule#rule (Tuple): #{inspect(rule)}")
rule
end
defp normalize_rule(rule, debug) when is_binary(rule) do
log(debug, "[MapRewire]normalize_rule#rule (String): #{inspect(rule)}")
List.to_tuple(String.split(rule, @transform_to))
end
defp normalize_rule(rule, _) when is_list(rule) and length(rule) != 2 do
raise ArgumentError, "[MapRewire] bad argument: invalid rule format #{inspect(rule)}"
end
defp normalize_rule(rule, debug) when is_list(rule) do
log(debug, "[MapRewire]normalize_rule#rule (List-2): #{inspect(rule)}")
List.to_tuple(rule)
end
defp normalize_rule(rule, _) do
raise ArgumentError, "[MapRewire] bad argument: invalid rule format #{inspect(rule)}"
end
@spec rewire_entry(rewire_rule, map, boolean) :: list({Map.key(), Map.value()})
defp rewire_entry({old, {producer, options}}, map, debug) when is_function(producer) do
log(
debug,
"[MapRewire]rewire_entry: from #{inspect(old)} with a producer function and options #{
inspect(options)
}"
)
List.wrap(producer.(Map.get(map, old, Keyword.get(options, :default, @key_missing))))
end
defp rewire_entry({old, {new, options}}, map, debug) do
log(
debug,
"[MapRewire]rewire_entry: from #{inspect(old)} to #{inspect(new)} with options #{
inspect(options)
}"
)
value = Map.get(map, old, Keyword.get(options, :default, @key_missing))
value = if(fun = Keyword.get(options, :transform), do: fun.(value), else: value)
[{new, value}]
end
defp rewire_entry({old, producer}, map, debug) when is_function(producer) do
log(debug, "[MapRewire]rewire_entry: from #{inspect(old)} with a producer function")
List.wrap(producer.(Map.get(map, old, @key_missing)))
end
defp rewire_entry({old, new}, map, debug) do
log(debug, "[MapRewire]rewire_entry: from #{inspect(old)} to #{inspect(new)}")
[{new, Map.get(map, old, @key_missing)}]
end
@spec compact(list({Map.key(), Map.value()}), boolean) :: list({Map.key(), Map.value()})
defp compact(rewired, true), do: Enum.reject(rewired, &match?({_, @key_missing}, &1))
defp compact(rewired, false) do
Enum.map(rewired, fn
{k, @key_missing} -> {k, nil}
pair -> pair
end)
end
@spec log(boolean, String.t() | function) :: any()
defp log(false, _), do: nil
defp log(true, message), do: Logger.info(message)
end
|
lib/map_rewire.ex
| 0.840161
| 0.770141
|
map_rewire.ex
|
starcoder
|
defmodule Exenv.Adapters.Yaml do
@moduledoc """
Loads env vars from `.yml` files.
You can use this adapter by adding it to your `:exenv` application config. The
options available can be seen in the `load/1` function.
config :exenv,
adapters: [
{Exenv.Adapters.Yaml, []}
]
Below is a simple example of a `.yml` file:
prod:
key1: val
key2: val
dev:
key1: val
key2: val
test:
key1: val
key2: val
Assuming we have the above file in our project root directory, we would be
able to access any of the above environment vars.
System.get_env("KEY1")
This adapter supports secrets encryption. Please see `Exenv.Encryption` for
more details on how to get that set up.
"""
use Exenv.Adapter
alias Exenv.Adapters.Yaml.Parser
@keys [Mix.env() |> to_string()]
defguardp is_env_val(val) when is_binary(val) or is_number(val) or is_boolean(val)
@doc """
Loads the system env vars from a `.yml` specified in the options.
## Options
* `:file` - The file path or mfa that evaluates to a file path in which to
read the `.yml` from. By default this is a `secrets.yml` file in your projects
root directory.
* `:keys` - A list of string keys within the `yml` file to use for the secrets.
By default this is just the value from `Mix.env/0`.
* `:encryption` - Options used to decrypt files. Please see `Exenv.read_file/2`
for the options available.
## Example
Exenv.Adapters.Yaml.load(file: "/path/to/file.yml", keys: ["common", "dev"])
Exenv.Adapters.Yaml.load(file: {MyApp, :yaml_path, []}, keys: ["common", "dev"])
"""
@impl true
def load(opts) do
opts = get_opts(opts)
with {:ok, env_file} <- get_env_file(opts),
{:ok, env_vars} <- parse(env_file, opts[:keys]) do
System.put_env(env_vars)
end
end
defp get_opts(opts) do
default_opts = [file: File.cwd!() <> "/secrets.yml", keys: @keys]
Keyword.merge(default_opts, opts)
end
defp get_env_file(opts) do
file = Keyword.get(opts, :file)
Exenv.read_file(file, opts)
end
defp parse(env_file, keys) do
with {:ok, yaml} <- Parser.read(env_file) do
parse_yaml(yaml, keys)
end
end
defp parse_yaml(yaml, keys) when is_map(yaml) and is_list(keys) do
env_vars =
yaml
|> Map.take(keys)
|> Map.values()
|> Stream.flat_map(& &1)
|> Stream.map(&parse_var/1)
|> Stream.filter(&(valid_var?(&1) == true))
|> Enum.to_list()
{:ok, env_vars}
end
defp parse_yaml(_yaml, _env) do
{:error, :malformed_yaml}
end
defp parse_var({key, val}) when is_binary(key) and is_env_val(val) do
with {:ok, key} <- safe_stringify(key),
{:ok, val} <- safe_stringify(val) do
{key |> String.upcase(), val}
end
end
defp parse_var(_var) do
:error
end
defp valid_var?({key, val}) when is_binary(key) and is_binary(val) do
true
end
defp valid_var?(_) do
false
end
defp safe_stringify(val) do
{:ok, val |> to_string() |> String.trim()}
rescue
_ -> nil
end
end
|
lib/adapters/yaml.ex
| 0.824179
| 0.511656
|
yaml.ex
|
starcoder
|
defmodule Maverick.Path do
@moduledoc """
Provides functionality for parsing paths to lists of path
nodes, identifying path variables for pattern matching on
incoming requests.
"""
@type path_node :: String.t() | {:variable, String.t()}
@type path :: [path_node]
@type raw_path :: String.t()
import NimbleParsec
@doc """
Parse a path string to a list of path nodes. A path node is either
a `String.t()` or a tuple of `{:variable, String.t()}`. Nodes
beginning with a colon character (":") will parse to a variable
tuple. At runtime, variable tuples are used to construct the
path params portion of a Maverick request.
"""
@spec parse(String.t()) :: path()
def parse(string) do
case parse_path("/" <> string) do
{:ok, result, _, _, _, _} ->
result
{:error, label, path, _, _, _} ->
raise __MODULE__.ParseError, message: label, path: path
end
end
@doc """
Reads a path string and validates as a Maverick-compatible path,
including any colon (":") characters signifying a path variable.
Strips any extraneous forward slashes from the result.
"""
@spec validate(String.t()) :: raw_path()
def validate(string) do
case parse_raw_path("/" <> string) do
{:ok, [result], _, _, _, _} ->
"/" <> result
{:error, label, path, _, _, _} ->
raise __MODULE__.ParseError, message: label, path: path
end
end
url_file_safe_alphabet = [?A..?z, ?0..?9, ?-, ?_]
root_slash = ignore(repeat(string("/"))) |> eos()
separator = ignore(times(string("/"), min: 1))
static = ascii_string(url_file_safe_alphabet, min: 1)
variable =
ignore(ascii_char([?:]))
|> ascii_string(url_file_safe_alphabet -- [?-], min: 1)
|> unwrap_and_tag(:variable)
node =
separator
|> choice([
variable,
static
])
path =
choice([
repeat(node) |> eos(),
root_slash
])
|> label("only legal characters")
defparsecp(:parse_path, path)
raw_node =
separator
|> ascii_string(url_file_safe_alphabet ++ [?:], min: 1)
raw_path =
choice([
repeat(raw_node) |> eos(),
root_slash
])
|> reduce({Enum, :join, ["/"]})
|> label("only legal characters")
defparsecp(:parse_raw_path, raw_path)
defmodule ParseError do
@moduledoc """
The path could not be parsed due to illegal character(s)
"""
defexception message: "expected only legal characters", path: []
end
end
|
lib/maverick/path.ex
| 0.888172
| 0.475362
|
path.ex
|
starcoder
|
if Code.ensure_loaded?(Phoenix) do
defmodule Aegis.Controller do
@moduledoc """
Wraps controllers with Aegis authorization functionality.
"""
@doc """
Authorizes a resource, for a user, for a given action, and marks the
connection as having had aegis authorization perfomed via the assignment of
a boolean value to `aegis_auth_performed` on the connection.
Returns a two-element tuple: if authorization check passes, then
`{:ok, conn}` is returned; otherwise, `{:error, conn}` is returned.
## Examples
defmodule Puppy do
defstruct [id: nil, user_id: nil, hungry: false]
end
defmodule Puppy.Policy do
@behaviour Aegis.Policy
def authorized?(_user, {:index, _puppy}), do: true
def authorized?(%User{id: id}, {:show, %Puppy{user_id: id}}), do: true
def authorized?(_user, {:show, _puppy}), do: false
end
iex> conn = %Plug.Conn{}
iex> user = :user
iex> resource = Puppy
iex> action = :index
iex> {:ok, conn} = Aegis.Controller.authorized?(conn, user, resource, action: action)
iex> conn.private[:aegis_auth_performed]
true
iex> conn = %Plug.Conn{}
iex> user = :user
iex> resource = Puppy
iex> action = :show
iex> {:error, conn} = Aegis.Controller.authorized?(conn, user, resource, action: action)
iex> conn.private[:aegis_auth_performed]
true
"""
@spec authorized?(Plug.Conn.t(), term, term, Keyword.t()) ::
{:ok, Plug.Conn.t()} | {:error, Plug.Conn.t()}
def authorized?(conn, user, resource, opts) do
conn = Plug.Conn.put_private(conn, :aegis_auth_performed, true)
action = Keyword.get_lazy(opts, :action, fn -> Phoenix.Controller.action_name(conn) end)
policy = Keyword.get(opts, :policy)
if Aegis.authorized?(user, {action, resource}, policy) do
{:ok, conn}
else
{:error, conn}
end
end
@doc """
Calls controller action and performs a check on the connection in order to
determine whether or not Aegis authorization has been performed.
## Examples
TODO..
"""
@spec call_action_and_verify_authorized(module, atom, Plug.Conn.t(), term) :: Plug.t()
def call_action_and_verify_authorized(mod, actn, conn, user) do
conn =
Plug.Conn.register_before_send(conn, fn conn ->
if conn.private[:aegis_auth_performed] do
conn
else
raise Aegis.AuthorizationNotPerformedError
end
end)
apply(mod, actn, [conn, conn.params, user])
end
@doc """
Returns the set of accessible resources, for a user, for a given action.
## Examples
Suppose your library defines the following resource and resource policy:
defmodule Puppy do
defstruct [id: nil, user_id: nil, hungry: false]
end
defmodule Puppy.Policy do
@behaviour Aegis.Policy
...
# users should only be able to see puppies that belong to them..
def auth_scope(%User{id: user_id}, {:index, scope}) do
Enum.filter(scope, &(&1.user_id == user_id))
end
end
We can use `auth_scope/4` to appropriately limit access to puppies for a given user
iex> user = %User{id: 1}
iex> puppy_1 = %Puppy{id: 1, user_id: 1}
iex> puppy_2 = %Puppy{id: 2, user_id: 2}
iex> all_puppies = [puppy_1, puppy_2]
iex> Aegis.Controller.auth_scope(user, all_puppies, :index)
[%Puppy{id: 1, user_id: 1, hungry: false}]
iex> Aegis.Controller.auth_scope(user, all_puppies, :index, Puppy.Policy)
[%Puppy{id: 1, user_id: 1, hungry: false}]
In the above example, `Puppy.Policy.auth_scope` is written such that it
takes in and filters an enumerable. A more common use case--if you're
integrating Aegis with database-backed data--is to filter data via building
a query that limits what data is returned.
For example, assuming our app uses Ecto as a database wrapper,
`Puppy.Policy.auth_scope/2` can be rewritten as follows, so that data is
filtered as part of the querying process:
defmodule Puppy.Policy do
@behaviour Aegis.Policy
...
def auth_scope(%User{id: user_id}, {:index, scope}) do
import Ecto.Query
scope |> where([p], p.user_id == ^user_id)
end
end
"""
@spec auth_scope(term(), term(), atom(), module() | nil) :: list()
def auth_scope(user, scope, action, policy \\ nil) do
Aegis.auth_scope(user, {action, scope}, policy)
end
@doc """
Allows another module to inherit `Aegis.Controller` methods.
## Options
- `except` - list of actions to exclude from aegis authorization; defaults to an empty list
## Examples:
For Phoenix applications:
```
defmodule MyApp.PuppyController do
use MyApp, :controller
use Aegis.Controller
def current_user(conn) do
conn.assigns[:user]
end
end
```
if you want to allow some actions to skip authorization, just use the
`except` option:
```
defmodule MyApp.Controller do
use MyApp, :controller
use Aegis.Controller, except: [:custom_action]
def current_user(conn) do
conn.assigns[:user]
end
end
```
"""
defmacro __using__(opts \\ []) do
excluded_actions = Keyword.get(opts, :except, [])
quote do
if Enum.empty?(unquote(excluded_actions)) do
def action(conn, _opts) do
call_action_and_verify_authorized(
__MODULE__,
action_name(conn),
conn,
current_user(conn)
)
end
else
def action(conn, _opts) do
conn
|> action_name()
|> case do
actn when actn in unquote(excluded_actions) ->
apply(__MODULE__, actn, [conn, conn.params])
actn ->
call_action_and_verify_authorized(__MODULE__, actn, conn, current_user(conn))
end
end
end
def current_user(_), do: raise("`current_user/1` not defined for #{__MODULE__}")
defoverridable current_user: 1
defdelegate call_action_and_verify_authorized(mod, actn, conn, user),
to: unquote(__MODULE__)
defdelegate authorized?(conn, user, resource, opts \\ []),
to: unquote(__MODULE__)
defdelegate auth_scope(user, scope, action, policy \\ nil),
to: unquote(__MODULE__)
end
end
end
end
|
lib/aegis/controller.ex
| 0.795181
| 0.637764
|
controller.ex
|
starcoder
|
defmodule BikeBrigade.Delivery do
import Ecto.Query, warn: false
alias BikeBrigade.Repo
alias BikeBrigade.LocalizedDateTime
import Geo.PostGIS, only: [st_distance: 2]
alias BikeBrigade.Riders.Rider
alias BikeBrigade.Messaging
alias BikeBrigade.Delivery.{Task, CampaignRider}
import BikeBrigade.Utils, only: [task_count: 1, humanized_task_count: 1]
@doc """
Returns the list of tasks.
## Examples
iex> list_tasks()
[%Task{}, ...]
"""
def list_tasks do
Repo.all(Task, preload: [:campaigns])
end
@doc """
Gets a single task.
Raises `Ecto.NoResultsError` if the Task does not exist.
## Examples
iex> get_task(123)
%Task{}
iex> get_task(456)
nil
"""
def get_task(id, opts \\ []) do
preload =
Keyword.get(opts, :prelaod, [
:assigned_rider,
:task_items,
:pickup_location,
:dropoff_location
])
from(t in Task,
as: :task,
where: t.id == ^id
)
|> task_load_location()
|> Repo.one()
|> Repo.preload(preload)
end
defp task_load_location(query) do
query
|> join(:inner, [task: t], pl in assoc(t, :pickup_location), as: :pickup_location)
|> join(:inner, [task: t], dl in assoc(t, :dropoff_location), as: :dropoff_location)
|> preload([pickup_location: pl, dropoff_location: dl],
pickup_location: pl,
dropoff_location: dl
)
|> select_merge([pickup_location: pl, dropoff_location: dl], %{
delivery_distance: st_distance(pl.coords, dl.coords)
})
end
@doc """
Creates a task.
## Examples
iex> create_task(%{field: value})
{:ok, %Task{}}
iex> create_task(%{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def create_task(attrs \\ %{}) do
%Task{}
|> Task.changeset(attrs)
|> Repo.insert()
end
@doc """
Updates a task.
## Examples
iex> update_task(task, %{field: new_value})
{:ok, %Task{}}
iex> update_task(task, %{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def update_task(%Task{} = task, attrs, opts \\ []) do
# TODO validate items unique index stuff
task
|> Task.changeset(attrs, opts)
|> Repo.update()
|> broadcast(:task_updated)
end
@doc """
Deletes a task.
## Examples
iex> delete_task(task)
{:ok, %Task{}}
iex> delete_task(task)
{:error, %Ecto.Changeset{}}
"""
def delete_task(%Task{} = task) do
Repo.delete(task)
|> broadcast(:task_deleted)
end
@doc """
Returns an `%Ecto.Changeset{}` for tracking task changes.
## Examples
iex> change_task(task)
%Ecto.Changeset{data: %Task{}}
"""
def change_task(%Task{} = task, attrs \\ %{}, opts \\ []) do
Task.changeset(task, attrs, opts)
end
alias BikeBrigade.Delivery.Campaign
def list_campaigns(week \\ nil, opts \\ []) do
preload = Keyword.get(opts, :preload, [:program])
query =
from c in Campaign,
as: :campaign,
order_by: [desc: c.delivery_start]
query =
if week do
start_date = LocalizedDateTime.new!(week, ~T[00:00:00])
end_date = Date.add(week, 6) |> LocalizedDateTime.new!(~T[23:59:59])
query
|> where([campaign: c], c.delivery_start >= ^start_date and c.delivery_start <= ^end_date)
else
query
end
Repo.all(query)
|> Repo.preload(preload)
end
alias BikeBrigade.Delivery.CampaignRider
def get_campaign_rider!(token) do
query =
from cr in CampaignRider,
join: c in assoc(cr, :campaign),
join: r in assoc(cr, :rider),
left_join: t in assoc(c, :tasks),
on: t.assigned_rider_id == r.id,
# TODO make this join dor distance some kind of function
left_join: pl in assoc(t, :pickup_location),
left_join: dl in assoc(t, :dropoff_location),
order_by: st_distance(pl.coords, dl.coords),
where: cr.token == ^token,
preload: [
campaign: [:program, :location],
rider: {r, [:location, assigned_tasks: {t, [:dropoff_location, task_items: :item]}]}
]
Repo.one!(query)
end
def create_campaign_rider(attrs \\ %{}) do
%CampaignRider{}
|> CampaignRider.changeset(attrs)
|> Repo.insert(
on_conflict: {:replace, [:rider_capacity, :notes, :pickup_window, :enter_building]},
conflict_target: [:rider_id, :campaign_id]
)
|> broadcast(:campaign_rider_created)
end
def delete_campaign_rider(%CampaignRider{} = campaign_rider) do
Repo.delete(campaign_rider)
|> broadcast(:campaign_rider_deleted)
end
def create_task_for_campaign(campaign, attrs \\ %{}, opts \\ []) do
# TODO handle conflicts for multiple task items here
# TODO this looks a lot like Task.changeset_for_campaign()
%Task{
pickup_location: campaign.location,
campaign_id: campaign.id
}
|> Task.changeset(attrs, opts)
|> Repo.insert()
|> broadcast(:task_created)
end
@doc """
Creates a campaign.
"""
def create_campaign(attrs \\ %{}) do
%Campaign{}
|> Campaign.changeset(attrs)
|> Repo.insert()
end
def get_campaign(id, opts \\ []) do
preload = Keyword.get(opts, :preload, [:location, :program])
Repo.get(Campaign, id)
|> Repo.preload(preload)
end
@doc """
Returns a tuple of `{riders, tasks}` for a `campaign`. Riders are pre-loaded with their assigned tasks,
and tasks are pre-loaded with the assigned rider, and task_items/items.
"""
def campaign_riders_and_tasks(%Campaign{} = campaign) do
campaign =
campaign
|> Repo.preload(:location)
all_tasks =
from(t in Task,
as: :task,
where: t.campaign_id == ^campaign.id,
select: t
)
|> task_load_location()
|> Repo.all()
|> Repo.preload([:pickup_location, :dropoff_location, task_items: [:item]])
all_riders =
Repo.all(
from cr in CampaignRider,
join: r in assoc(cr, :rider),
join: l in assoc(r, :location),
where: cr.campaign_id == ^campaign.id,
order_by: r.name,
select: r,
select_merge: %{
distance: st_distance(l.coords, ^campaign.location.coords),
task_notes: cr.notes,
task_capacity: cr.rider_capacity,
task_enter_building: cr.enter_building,
pickup_window: cr.pickup_window,
delivery_url_token: cr.token
}
)
|> Repo.preload(:location)
# Does a nested preload to get tasks' assigned riders without doing an extra db query
tasks = Repo.preload(all_tasks, assigned_rider: fn _ -> all_riders end)
riders = Repo.preload(all_riders, assigned_tasks: fn _ -> all_tasks end)
{riders, tasks}
end
# TODO RENAME TO TODAYS TASKS
def latest_campaign_tasks(rider) do
# This is hacky, better to refigure out how we present this
today = LocalizedDateTime.today()
end_of_today = LocalizedDateTime.new!(today, ~T[23:59:59])
start_of_yesterday = Date.add(today, -1) |> LocalizedDateTime.new!(~T[00:00:00])
query =
from c in Campaign,
join: t in assoc(c, :tasks),
join: cr in CampaignRider,
on: cr.rider_id == ^rider.id and cr.campaign_id == c.id,
where: c.delivery_start <= ^end_of_today and c.delivery_start >= ^start_of_yesterday,
where: t.assigned_rider_id == ^rider.id,
select: c,
select_merge: %{delivery_url_token: cr.token},
order_by: [desc: c.delivery_start, asc: t.id],
preload: [tasks: t]
Repo.all(query)
|> Repo.preload([:program, tasks: [:dropoff_location, task_items: :item]])
end
def campaigns_per_rider(rider) do
query = from c in CampaignRider, where: c.rider_id == ^rider.id, select: count(c.id)
Repo.one(query)
end
def hacky_assign(%Campaign{} = campaign) do
riders_query =
from r in Rider,
join: cr in CampaignRider,
on: cr.rider_id == r.id and cr.campaign_id == ^campaign.id,
join: l in assoc(r, :location),
order_by: [
desc: cr.rider_capacity,
asc: r.max_distance - st_distance(l.coords, ^campaign.location.coords)
],
left_join: t in Task,
on: t.assigned_rider_id == r.id and t.campaign_id == ^campaign.id,
preload: [:location, assigned_tasks: {t, :task_items}],
select: {r, cr.rider_capacity}
riders = Repo.all(riders_query)
require Logger
for {rider, rider_capacity} <- riders do
if task_count(rider.assigned_tasks) < rider_capacity do
tasks =
if rider_capacity > 1 do
Repo.all(
from t in Task,
where: t.campaign_id == ^campaign.id and is_nil(t.assigned_rider_id),
join: pl in assoc(t, :pickup_location),
join: dl in assoc(t, :dropoff_location),
preload: [task_items: :item],
order_by: st_distance(pl.coords, dl.coords)
)
else
Repo.all(
from t in Task,
where: t.campaign_id == ^campaign.id and is_nil(t.assigned_rider_id),
join: dl in assoc(t, :dropoff_location),
preload: [task_items: :item],
order_by: st_distance(dl.coords, ^rider.location.coords)
)
end
|> Repo.preload([:assigned_rider])
{to_assign, _tasks} = Enum.split(tasks, rider_capacity)
Logger.info("Assigning #{Enum.count(to_assign)} items to #{rider.name}")
for task <- to_assign do
update_task(task, %{assigned_rider_id: rider.id})
end
end
end
end
def change_campaign(campaign, attrs \\ %{}) do
Campaign.changeset(campaign, attrs)
end
def update_campaign(campaign, attrs, opts \\ []) do
campaign
|> Campaign.changeset(attrs, opts)
|> Repo.update()
|> broadcast(:campaign_updated)
end
def delete_campaign(%Campaign{} = campaign) do
Repo.delete(campaign)
end
def send_campaign_messages(%Campaign{} = campaign) do
campaign = Repo.preload(campaign, [:location, :instructions_template, :program])
{riders, _} = campaign_riders_and_tasks(campaign)
msgs =
for rider <- riders, rider != nil, rider.assigned_tasks != [] do
{:ok, msg} = send_campaign_message(%Campaign{} = campaign, rider)
msg
end
{:ok, msgs}
end
def send_campaign_message(%Campaign{} = campaign, rider) do
body =
render_campaign_message_for_rider(
campaign,
campaign.instructions_template.body,
rider
)
{:ok, msg} = Messaging.send_message_in_chunks(campaign, body, rider)
if rider.text_based_itinerary do
send_text_based_itinerary(rider, campaign)
end
{:ok, msg}
end
defp send_text_based_itinerary(rider, campaign) do
template = """
1. Pickup
• {{{task_count}}}
• {{{pickup_address}}}
• {{{pickup_window}}}
2. Drop-off
{{{task_details}}}
{{{directions}}}
"""
body =
render_campaign_message_for_rider(
campaign,
template,
rider
)
Messaging.send_message_in_chunks(campaign, body, rider)
end
def render_campaign_message_for_rider(campaign, nil, rider),
do: render_campaign_message_for_rider(campaign, "", rider)
def render_campaign_message_for_rider(%Campaign{} = campaign, message, %Rider{} = rider)
when is_binary(message) do
tasks =
rider.assigned_tasks
|> Enum.sort_by(& &1.delivery_distance)
# TODO: referncing CampaignHelpers here is bad!
# need to move this into Task or Delivery
pickup_window = BikeBrigadeWeb.CampaignHelpers.pickup_window(campaign, rider)
locations = [campaign.location | Enum.map(tasks, & &1.dropoff_location)]
task_details =
for task <- tasks do
"Name: #{task.dropoff_name}\nPhone: #{task.dropoff_phone}\nType: #{BikeBrigadeWeb.CampaignHelpers.request_type(task)}\nAddress: #{task.dropoff_location}\nNotes: #{task.rider_notes}"
end
|> Enum.join("\n\n")
{destination, waypoints} = List.pop_at(locations, -1)
# TODO: this is the same as DeliveryHelpers.directions_url
map_query =
URI.encode_query(%{
api: 1,
travelmode: "bicycling",
origin: rider.location,
waypoints: Enum.join(waypoints, "|"),
destination: destination
})
directions = "https://www.google.com/maps/dir/?#{map_query}"
delivery_details_url =
BikeBrigadeWeb.Router.Helpers.delivery_show_url(
BikeBrigadeWeb.Endpoint,
:show,
rider.delivery_url_token
)
assigns = %{
rider_name: rider.name |> String.split(" ") |> List.first(),
pickup_address: campaign.location,
task_details: task_details,
directions: directions,
task_count: humanized_task_count(tasks),
pickup_window: pickup_window,
delivery_details_url: delivery_details_url
}
Mustache.render(message, assigns)
end
def campaign_rider_token(%Campaign{} = campaign, %Rider{} = rider) do
# TODO
# this is very inefficient to look up each time, which we could cache these
query =
from cr in CampaignRider,
where: cr.rider_id == ^rider.id and cr.campaign_id == ^campaign.id
case Repo.one(query) do
%CampaignRider{token: nil} = cr ->
cr =
cr
|> CampaignRider.gen_token_changeset()
|> Repo.update!()
cr.token
%CampaignRider{token: token} ->
token
_ ->
nil
end
end
def remove_rider_from_campaign(campaign, rider) do
if cr = Repo.get_by(CampaignRider, campaign_id: campaign.id, rider_id: rider.id) do
delete_campaign_rider(cr)
end
tasks =
from(t in Task,
where: t.campaign_id == ^campaign.id and t.assigned_rider_id == ^rider.id
)
|> Repo.all()
for task <- tasks do
update_task(task, %{assigned_rider_id: nil})
end
end
alias BikeBrigade.Delivery.Program
@doc """
Returns the list of programs.
## Examples
iex> list_programs()
[%Program{}, ...]
"""
def list_programs(opts \\ []) do
preload = Keyword.get(opts, :preload, [])
query =
from p in Program,
as: :program,
order_by: [desc: p.active, asc: p.name]
query =
if search = opts[:search] do
query
|> where([program: p], ilike(p.name, ^"%#{search}%"))
else
query
end
query =
if opts[:with_campaign_count] do
from p in query,
left_join: c in assoc(p, :campaigns),
group_by: p.id,
select_merge: %{campaign_count: count(c)}
else
query
end
Repo.all(query)
|> Repo.preload(preload)
end
@doc """
Gets a single program.
Raises `Ecto.NoResultsError` if the Program does not exist.
## Examples
iex> get_program!(123)
%Program{}
iex> get_program!(456)
** (Ecto.NoResultsError)
"""
def get_program!(id, opts \\ []) do
preload = Keyword.get(opts, :preload, [])
Repo.get!(Program, id)
|> Repo.preload(preload)
end
def get_program_by_name(name, opts \\ []) do
preload = Keyword.get(opts, :preload, [])
Program
|> Repo.get_by(name: name)
|> Repo.preload(preload)
end
@doc """
Creates a program.
## Examples
iex> create_program(%{field: value})
{:ok, %Program{}}
iex> create_program(%{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def create_program(attrs \\ %{}) do
%Program{}
|> Program.changeset(attrs)
|> Repo.insert()
end
@doc """
Updates a program.
## Examples
iex> update_program(program, %{field: new_value})
{:ok, %Program{}}
iex> update_program(program, %{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def update_program(%Program{} = program, attrs) do
program
|> Program.changeset(attrs)
|> Repo.update()
end
@doc """
Deletes a program.
## Examples
iex> delete_program(program)
{:ok, %Program{}}
iex> delete_program(program)
{:error, %Ecto.Changeset{}}
"""
def delete_program(%Program{} = program) do
Repo.delete(program)
end
@doc """
Returns an `%Ecto.Changeset{}` for tracking program changes.
## Examples
iex> change_program(program)
%Ecto.Changeset{data: %Program{}}
"""
def change_program(%Program{} = program, attrs \\ %{}) do
Program.changeset(program, attrs)
end
def subscribe do
Phoenix.PubSub.subscribe(BikeBrigade.PubSub, "delivery")
end
defp broadcast({:error, _reason} = error, _event), do: error
defp broadcast({:ok, struct}, event) do
Phoenix.PubSub.broadcast(BikeBrigade.PubSub, "delivery", {event, struct})
{:ok, struct}
end
alias BikeBrigade.Delivery.Item
@doc """
Returns the list of items.
## Examples
iex> list_items()
[%Item{}, ...]
"""
def list_items do
query = from i in Item, order_by: i.name
Repo.all(query)
|> Repo.preload(:program)
end
@doc """
Gets a single item.
Raises `Ecto.NoResultsError` if the Item does not exist.
## Examples
iex> get_item!(123)
%Item{}
iex> get_item!(456)
** (Ecto.NoResultsError)
"""
def get_item!(id), do: Repo.get!(Item, id)
@doc """
Creates a item.
## Examples
iex> create_item(%{field: value})
{:ok, %Item{}}
iex> create_item(%{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def create_item(attrs \\ %{}) do
%Item{}
|> Item.changeset(attrs)
|> Repo.insert()
end
@doc """
Updates a item.
## Examples
iex> update_item(item, %{field: new_value})
{:ok, %Item{}}
iex> update_item(item, %{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def update_item(%Item{} = item, attrs) do
item
|> Item.changeset(attrs)
|> Repo.update()
end
@doc """
Deletes a item.
## Examples
iex> delete_item(item)
{:ok, %Item{}}
iex> delete_item(item)
{:error, %Ecto.Changeset{}}
"""
def delete_item(%Item{} = item) do
Repo.delete(item)
end
@doc """
Returns an `%Ecto.Changeset{}` for tracking item changes.
## Examples
iex> change_item(item)
%Ecto.Changeset{data: %Item{}}
"""
def change_item(%Item{} = item, attrs \\ %{}) do
Item.changeset(item, attrs)
end
alias BikeBrigade.Delivery.Opportunity
@doc """
Returns the list of opportunities.
## Examples
iex> list_opportunities()
[%Opportunity{}, ...]
"""
def list_opportunities(opts \\ []) do
query =
from o in Opportunity,
as: :opportunity,
left_join: p in assoc(o, :program),
as: :program,
on: o.program_id == p.id,
where: ^opportunities_filter(opts)
query =
case {Keyword.get(opts, :sort_order, :asc), Keyword.get(opts, :sort_field, :delivery_start)} do
{order, :program_name} ->
query
|> order_by([{^order, as(:program).name}, asc: as(:opportunity).delivery_start])
{order, :program_lead} ->
query
|> join(:left, [o, p], l in assoc(p, :lead), as: :lead)
|> order_by([{^order, as(:lead).name}, asc: as(:opportunity).delivery_start])
{order, field} when order in [:asc, :desc] and is_atom(field) ->
query
|> order_by([{^order, ^field}])
end
preload = Keyword.get(opts, :preload, [])
Repo.all(query)
|> Repo.preload(preload)
end
defp opportunities_filter(opts) do
filter = true
filter =
case Keyword.fetch(opts, :published) do
{:ok, true} -> dynamic([o], ^filter and o.published == true)
_ -> filter
end
filter =
case Keyword.fetch(opts, :start_date) do
{:ok, date} ->
date_time = LocalizedDateTime.new!(date, ~T[00:00:00])
dynamic([o], ^filter and o.delivery_start >= ^date_time)
_ ->
filter
end
filter =
case Keyword.fetch(opts, :end_date) do
{:ok, date} ->
date_time = LocalizedDateTime.new!(date, ~T[23:59:59])
dynamic([o], ^filter and o.delivery_start <= ^date_time)
_ ->
filter
end
filter
end
def get_opportunity(id, opts \\ []) do
preload = Keyword.get(opts, :preload, [:location, :program])
Repo.get!(Opportunity, id)
|> Repo.preload(preload)
end
def create_opportunity(attrs \\ %{}) do
%Opportunity{}
|> Opportunity.changeset(attrs)
|> Repo.insert()
|> broadcast(:opportunity_created)
end
def update_opportunity(%Opportunity{} = opportunity, attrs \\ %{}) do
opportunity
|> Opportunity.changeset(attrs)
|> Repo.update()
|> broadcast(:opportunity_updated)
end
def create_or_update_opportunity(%Opportunity{} = opportunity, attrs \\ %{}) do
if opportunity.id do
update_opportunity(opportunity, attrs)
else
create_opportunity(attrs)
end
end
def delete_opportunity(%Opportunity{} = opportunity) do
Repo.delete(opportunity)
|> broadcast(:opportunity_deleted)
end
def change_opportunity(%Opportunity{} = opportunity, attrs \\ %{}) do
Opportunity.changeset(opportunity, attrs)
end
end
|
lib/bike_brigade/delivery.ex
| 0.62395
| 0.452596
|
delivery.ex
|
starcoder
|
defmodule MockeryExtras.Given do
import Mockery
alias MockeryExtras.MacroX
alias MockeryExtras.Stubbery
@moduledoc """
This module provides a pretty notation for a common case that's not
gracefully handled by [Mockery](https://hexdocs.pm/mockery/readme.html).
Here is how you instruct Mockery to return the value `"5"` when
`Map.get/3` is called with `%{}` and `:key`:
use MockeryExtras.Given
given Map.get(%{}, :key), return: "5"
Note that the first argument to `given` looks like an ordinary function call.
It is also common to have a "don't care" argument, like this:
given Map.get(@any, :key), return: "5"
You may also ask that the function return a different value each time it's called:
given Map.get(@any, :key), stream: [1, 2, 3]
See `given/2` for more.
"""
@doc """
Arrange for a function to return a stubbed value or a stream of stubbed values.
The common case takes what looks like a function call, plus a return
value, and arranges that such a function call will return the given
value whenever it's made at a
["seam"](https://www.informit.com/articles/article.aspx?p=359417&seqNum=2)
marked with
[`Mockery.mockable`](https://hexdocs.pm/mockery/Mockery.Macro.html#mockable/2).
# Code:
... mockable(Schema).changeset(struct, params) ...
# Test:
given Schema.changeset(%Schema{}, %{age: "3"}), return: %Changeset{...}
The function's arguments and return value can be constants, as shown
above, or they can be calculations or variables. That can be helpful
when the `given` appears in a test helper:
def helper(params, cast_value, something_else) do
...
given Schema.changeset(%Schema{}, params), return: cast_value
...
assert ...
end
A function argument can be the special value `@any` (defined when
the module is `used`). That's useful when the argument is irrelevant
and you don't want to have to type it out:
given Schema.changeset(@any, params), return: cast_value
`@any` expands to a function whose value is always `true`. More generally,
any function used as an argument is not matched with equality. Instead, the
call-time value is passed to the function, which should return a truthy value
to indicate a match. So you can do this:
given Module.f(5, &even/1), return: 8
When the `stream:` keyword is used, each new matching call returns the
next value in the list argument:
given Map.get(@any, @any), stream: [3, 4]
streamer(%{}, :key) # returns 3
streamer(%{}, :key) # returns 4
streamer(%{}, :key) # assertion failure
Notes:
* You can provide return values for many arglist values.
given Module.f(5, &even/1), return: 8
given Module.f(6, @any), return: 9
* If there's more than one match, the first is used.
* If the same arglist is given twice, the second replaces the first.
That lets you use ExUnit `setup` to establish defaults:
def setup do
given RunningExample.params(:a_runnable), return: %{}
...
test "..."
given RunningExample.params(:a_runnable), return: %{"a" => "1"}
assert Steps.runnable(:a_runnable) == %{a: 1}
end
* If a function has a `given` value for one or more arglists, but none
matched, an error is raised.
* Despite the name, the value for `:stream` must be a `List`, not a `Stream`.
"""
defmacro given(funcall, return_description) do
case MacroX.decompose_call_alt(funcall) do
{:in_named_module, the_alias, name_and_arity, arglist_spec} ->
expand(the_alias, name_and_arity, arglist_spec, return_description)
_ ->
raise """
You can't use `given` with `#{Macro.to_string funcall}`.
There has to be a module in the call. You can use `__MODULE__`
if necessary:
__MODULE__.#{Macro.to_string funcall}
"""
end
end
@doc """
The guts of `given/2` for use in your own macros.
This function is convenient when you want to create a number of stubs at
once. For example, suppose the `RunningExample` module has several single-argument
getters. A `stub` macro can be more compact than several `givens`:
stub(
original_params: input,
format: :phoenix,
neighborhood: %{een(b: Module) => %{id: 383}})
`stub` can be written like this:
defmacro stub(kws) do
for {key, val} <- kws do
Given.expand(RunningExample, [{key, 1}], [:running], return: val)
end
end
When calling `expand(module_alias, name_and_arity, arglist_spec,
return_spec)`, know that:
* `module_alias` can be a simple atom, like `RunningExample`,
which is an alias for `EctoTestDSL.Run.RunningExample`. More generally, it
can be the `:__aliases__` value from `Macro.decompose_call/1`.
* `name_and_arity` is a function name and arity pair of the form `[get: 3]`.
* `arglist_spec` is a list of values like `[5, @any]`.
* `return_spec` should be either `return: <value>` or `stream: <list>`.
"""
def expand(module_alias, name_and_arity, arglist_spec, return_spec)
@keyword_error "There must be a single keyword, either `return:` or `stream:`"
def expand(module_alias, name_and_arity, arglist_spec, [{return_type, value}]) do
unless return_type in [:return, :stream], do: raise @keyword_error
quote do
module = MockeryExtras.MacroX.alias_to_module(unquote(module_alias), __ENV__)
process_key = Stubbery.process_dictionary_key(module, unquote(name_and_arity))
Stubbery.add_stub(process_key, unquote(arglist_spec), unquote(return_type), unquote(value))
return_calculator = Stubbery.make__return_calculator(process_key, unquote(name_and_arity))
mock(module, unquote(name_and_arity), return_calculator)
end
end
def expand(_, _, _, [{_key, _value} | _]), do: raise @keyword_error
# Backward compatibility
def expand(module_alias, name_and_arity, arglist_spec, return_value) do
expand(module_alias, name_and_arity, arglist_spec, return: return_value)
end
@doc """
This shows (as with `IO.inspect`) all the existing stubs.
The format is not pretty.
[
{{Given, Date, [add: 2]},
[
{[~D[2001-02-03], 3], "return for 3",
#Function<9.8563522/1 in MockeryExtras.Stubbery.make_matcher/1>}
]}
]
"""
def inspect do
Process.get
|> Enum.filter(&Stubbery.given?/1)
|> IO.inspect
end
defmacro __using__(_) do
quote do
import MockeryExtras.Given, only: [given: 2]
@any &Stubbery.any/1
end
end
end
|
lib/given.ex
| 0.87768
| 0.70163
|
given.ex
|
starcoder
|
defmodule PassiveSupport.Path.Sigil do
@moduledoc """
Quickly and idiomatically build filesystem paths by using `~P`
### The path sigil
`import PassiveSupport.Path.Sigil` will make `~P` activate
various functions within the `Path` module, for quick and idiomatic
usage of filesystem paths. As its default behavior, `~P[path/to/something]`
will expand to the output of `Path.absname("path/to/something")`, but other
behaviors can be exposed depending on the modifier provided following the
closing delimiter. The enabled modifiers are currently:
- `'a'` for `Path.absname/1` (default)
- `'b'` for `Path.basename/1`
- `'d'` for `Path.dirname/1`
- `'x'` for `Path.expand/1`
- `'w'` for `Path.wildcard/1`
- and `'wd'` for `Path.wildcard(path, match_dot: true)`
Beyond that, there is no means of modifying function behavior any further,
and interpolation through `~p` is not yet implemented. They both are on
the roadmap for this module, but being that this maintainer fundamentally
works on this library in his spare time, the ready date for those
functionalities is decidedly TBD.
## Examples
iex> ~P[foo]
Path.absname("foo")
iex> ~P[bar]a
Path.absname("bar")
iex> ~P[bat]b
"bat"
iex> ~P[bat/boy]b
"boy"
iex> ~P[fizz/buzz]d
"fizz"
iex> ~P[/etc/hosts]d
"/etc"
iex> ~P[/etc/config.d/nginx.conf]d
"/etc/config.d"
iex> ~P[./deps/phoenix/lib]x
Path.expand("./deps/phoenix/lib")
iex> ~P[~/.bash_profile]x
Path.expand("~/.bash_profile")
iex> ~P[**/*.txt]w
Path.wildcard("**/*.txt")
"""
defmacro sigil_P(path, modifiers)
defmacro sigil_P(path, modifiers) when modifiers in [[], 'a'] do
quote do: Path.absname(unquote(path))
end
defmacro sigil_P(path, 'b') do
quote do: Path.basename(unquote(path))
end
defmacro sigil_P(path, 'd') do
quote do: Path.dirname(unquote(path))
end
defmacro sigil_P(path, 'x') do
quote do: Path.expand(unquote(path))
end
defmacro sigil_P(path, 'w') do
quote do: Path.wildcard(unquote(path))
end
defmacro sigil_P(path, 'wd') do
quote do: Path.wildcard(unquote(path), match_dot: true)
end
# TODO: defmacro sigil_p
end
|
lib/passive_support/base/path/sigil.ex
| 0.627723
| 0.434101
|
sigil.ex
|
starcoder
|
defmodule Exoself do
alias Neuron
alias Sensor
alias Actuator
alias Cortex
def map(file_name) do
{:ok, genotype} = :file.consult(file_name)
# Mapping the genome can take some time
task = Task.async(fn -> map(file_name, genotype) end)
Task.await(task)
end
def map(file_name, genotype) do
'''
Generating a network from the genotype file
and initializing the evolutionary procedure
'''
{v1, v2, v3} = {:os.system_time, :os.system_time, :os.system_time}
:random.seed(v1, v2, v3)
# Creating a record linking ids and pids
ids_and_pids = :ets.new(:id_and_pids, [:set, :private])
cortex = Genotype.read(genotype, :cortex)
sensor_ids = cortex.sensor_ids
actuator_ids = cortex.actuator_ids
nids = cortex.nids
scape_pids = spawn_scapes(ids_and_pids, cortex, [cortex.id])
spawn_cerebral_units(ids_and_pids, Cortex, [cortex.id])
spawn_cerebral_units(ids_and_pids, Sensor, sensor_ids)
spawn_cerebral_units(ids_and_pids, Actuator, actuator_ids)
spawn_cerebral_units(ids_and_pids, Neuron, nids)
cerebral_units = List.flatten([sensor_ids, actuator_ids, nids])
link_cerebral_units(cerebral_units, ids_and_pids)
{spids, npids, apids} = link_cortex(cortex, ids_and_pids)
cx_pid = :ets.lookup_element(ids_and_pids, cortex.id, 2)
loop(file_name, genotype, ids_and_pids, cx_pid, spids, npids, apids, scape_pids, 0, 0, 0, 0, 1)
end
def loop(file_name, genotype, ids_and_pids, cx_pid, spids, npids, apids, scape_pids, highest_fitness, eval_acc, cycle_acc, time_acc, attempt) do
'''
For each iteration of NN, save it's genotype if it's fitness > highest fitness, if not, perturb it's weights
unless it's weights have already been perturbed without increasing fitness for n times with n > max_attempts
'''
receive do
{^cx_pid, :evaluation_completed, fitness, cycles, time} ->
{new_highest_fitness, new_attempt} = case fitness > highest_fitness do
true ->
Enum.map(npids, fn(npid) -> send(npid, {self(), :weight_backup}) end)
# Fitness increased, reset number of fitness attempts to 0
{fitness, 0}
false ->
perturbed_npids = get(:perturbed)
Enum.map(perturbed_npids, fn(npid) -> send(npid, {self(), :weight_restore}))
end
## FIXME: Incomplete function
end
end
def spawn_cerebral_units(ids_and_pids, cerebral_unit_type, [id|next_ids]) do
'''
Spawn the cerebral units processes
'''
pid = apply(cerebral_unit_type, :gen, [self()])
:ets.insert(ids_and_pids, {id, pid})
:ets.insert(ids_and_pids, {pid, id})
spawn_cerebral_units(ids_and_pids, cerebral_unit_type, next_ids)
end
def spawn_cerebral_units(_ids_and_pids, _cerebral_unit_type, []), do: true
def link_cerebral_units([%Sensor{} = sensor|records], ids_and_pids) do
'''
Link sensor cerebral unit
'''
sid = sensor.id
spid = :ets.lookup_element(ids_and_pids, sid, 2)
cx_pid = :ets.lookup_element(ids_and_pids, sensor.cx_id, 2)
s_name = sensor.name
fanout_ids = sensor.fanout_ids
fanout_pids = Enum.map(fanout_ids, fn(fanout_id) ->
:ets.lookup_element(ids_and_pids, fanout_id, 2) end)
# Send initialization order to spawned sensor
send(spid, {self(), {sid, cx_pid, s_name, sensor.vl, fanout_pids}})
# Proceed to the next cerebral unit to link
link_cerebral_units(records, ids_and_pids)
end
def link_cerebral_units([%Actuator{} = actuator|records], ids_and_pids) do
'''
Link actuator cerebral unit
'''
aid = actuator.id
apid = :ets.lookup_element(ids_and_pids, aid, 2)
cx_pid = :ets.lookup_element(ids_and_pids, actuator.cx_id, 2)
a_name = actuator.name
fanin_ids = actuator.fanin_ids
fanin_pids = Enum.map(fanin_ids, fn(fanin_id) ->
:ets.lookup_element(ids_and_pids, fanin_id, 2) end)
# Send initialization order to spawned actuator
send(apid, {self(), {aid, cx_pid, a_name, fanin_pids}})
# Proceed to the next cerebral unit to link
link_cerebral_units(records, ids_and_pids)
end
def link_cerebral_units([%Neuron{} = neuron|records], ids_and_pids) do
'''
Link neuron cerebral unit
'''
nid = neuron.id
npid = :ets.lookup_element(ids_and_pids, nid, 2)
cx_pid = :ets.lookup_element(ids_and_pids, neuron.cx_id, 2)
af_name = neuron.af
input_idps = neuron.input_idps
output_ids = neuron.output_ids
# Encoding weights in tuples containing pid (`pidps`)
input_pidps = convert_idps_to_pidps(ids_and_pids, input_idps, [])
output_pids = Enum.map(output_ids, fn(output_id) ->
:ets.lookup_element(ids_and_pids, output_id, 2) end)
# Send initialization order to spawned neuron
send(npid, {self(), {nid, cx_pid, af_name, input_pidps, output_pids}})
# Proceed to the next cerebral unit to link
link_cerebral_units(records, ids_and_pids)
end
def link_cerebral_units([], _ids_and_pids), do: :ok
def convert_idps_to_pidps(_ids_and_pids, [{:bias, bias}], acc) do
'''
End-case when there is only the bias remaining
'''
Enum.reverse([bias|acc])
end
def convert_idps_to_pidps(ids_and_pids, [{id, weights}|next_fanin_idps], acc) do
'''
Look for the corresponding pid to make an pidps
'''
convert_idps_to_pidps(ids_and_pids, next_fanin_idps, [{:ets.lookup_element(ids_and_pids, id, 2), weights}|acc])
end
def link_cortex(cx, ids_and_pids) do
cx_id = cx.id
cx_pid = :ets.lookup_element(ids_and_pids, cx_id, 2)
sids = cx.sensor_ids
aids = cx.actuator_ids
nids = cx.nids
spids = Enum.map(sids, fn(sid) ->
:ets.lookup_element(ids_and_pids, sid, 2) end)
apids = Enum.map(aids, fn(aid) ->
:ets.lookup_element(ids_and_pids, aid, 2) end)
npids = Enum.map(nids, fn(nid) ->
:ets.lookup_element(ids_and_pids, nid, 2) end)
# Send initialization order to spawned cortex
send(cx_pid, {self(), {cx_id, spids, apids, npids}, 1000})
end
def update_genotype(ids_and_pids, genotype, [{n_id, pidps}|weightps]) do
'''
Saving the current neurons weights/connections into the genotype
'''
neuron_index = Enum.find_index(genotype, fn(x) -> x.id == n_id end)
neuron = Enum.at(genotype, neuron_index)
updated_input_idps = convert_pidps_to_idps(ids_and_pids, pidps, [])
updated_neuron = %Neuron{neuron|input_idps: updated_input_idps}
updated_genotype = List.replace_at(genotype, neuron_index, updated_neuron)
update_genotype(ids_and_pids, updated_genotype, weightps)
end
def update_genotype(_ids_and_pids, genotype, []), do: genotype
def convert_pidps_to_idps(ids_and_pids, [{pid, weights}|next_input_idps], acc) do
convert_pidps_to_idps(ids_and_pids, next_input_idps, [{:ets.lookup_element(ids_and_pids, pid, 2), weights}|acc])
end
def convert_pidps_to_idps(_ids_and_pids, [bias], acc), do: Enum.reverse([{:bias, bias}|acc])
end
|
lib/exoself.ex
| 0.535584
| 0.510619
|
exoself.ex
|
starcoder
|
defmodule Emeck.Helper do
@moduledoc """
utility functions can be used in test.
"""
defmacro expect({{:., _, [m, f]}, _, []}, proxy) do
quote do
:meck.expect(unquote(m), unquote(f), unquote(proxy))
end
end
defmacro passthrough do
quote do
:meck.passthrough([])
end
end
defmacro passthrough(list) when is_list(list) do
quote do
:meck.passthrough(unquote(list))
end
end
defmacro passthrough(a) do
quote do
passthrough([unquote(a)])
end
end
defmacro passthrough(a, b) do
quote do
passthrough([unquote(a), unquote(b)])
end
end
defmacro passthrough(a, b, c) do
quote do
passthrough([unquote(a), unquote(b), unquote(c)])
end
end
defmacro passthrough(a, b, c, d) do
quote do
passthrough([unquote(a), unquote(b), unquote(c), unquote(d)])
end
end
defmacro passthrough(a, b, c, d, e) do
quote do
passthrough([unquote(a), unquote(b), unquote(c), unquote(d), unquote(e)])
end
end
# called String.length
# called String.length("foo")
# called &String.length/1
defmacro called(expr) do
{m, f, a} = mfa(expr)
quote do
:meck.called(unquote(m), unquote(f), unquote(a))
end
end
defmacro call_count(expr) do
{m, f, a} = mfa(expr)
quote do
:meck.num_calls(unquote(m), unquote(f), unquote(a))
end
end
defmacro reset_call(mod) do
quote do
:meck.reset(unquote(mod))
end
end
defmacro calls(expr) do
{m, f, a} = mfa(expr)
quote bind_quoted: [m: m, f: f, a: a] do
list = :meck.history(m)
Emeck.Util.filter_calls(list, f, a)
end
end
defmacro call_args(expr) do
quote do
{args, _} = last_call(unquote(expr))
args
end
end
defmacro call_return(expr) do
quote do
{_, result} = last_call(unquote(expr))
result
end
end
defmacro first_call(expr) do
quote do
unquote(expr) |> calls |> List.first
end
end
defmacro last_call(expr) do
quote do
unquote(expr) |> calls |> List.last
end
end
defp mfa({{:., _, [mod, fun]}, _, []}) do
{mod, fun, :_}
end
defp mfa({{:., _, [mod, fun]}, _, args}) do
{mod, fun, args}
end
defp mfa({:&, _, [{:/, _, [{{:., _, [mod, fun]}, _, []}, arity]}]}) do
{mod, fun, arity}
end
end
|
lib/emeck/helper.ex
| 0.546859
| 0.551151
|
helper.ex
|
starcoder
|
defmodule Adventofcode.Day10MonitoringStation do
use Adventofcode
alias __MODULE__.{
Asteroids,
BestLocation,
MonitoringStation,
Parser,
Printer,
VaporizationOrder
}
def part_1(input) do
input
|> best_location()
|> elem(1)
end
def part_2(input) do
{x, y} = input |> vaporization_order() |> Enum.at(200 - 1)
x * 100 + y
end
def best_location(input) do
input
|> Parser.parse()
|> MonitoringStation.try_all()
|> BestLocation.find()
|> BestLocation.reachable_asteroids()
end
def vaporization_order(input) do
input
|> Parser.parse()
|> MonitoringStation.try_all()
|> BestLocation.find()
|> VaporizationOrder.order()
end
defmodule Asteroids do
@enforce_keys [:data, :max_x, :max_y]
defstruct data: %{}, max_x: 0, max_y: 0
def all(%Asteroids{data: data}), do: data
def locations(%Asteroids{data: data}), do: Map.keys(data)
def other_locations(asteroids, {x, y}) do
locations(asteroids) -- [{x, y}]
end
def get(asteroids, {x, y}) do
{{x, y}, Map.get(asteroids.data, {x, y})}
end
end
defmodule BestLocation do
def find(asteroids) do
asteroids
|> Asteroids.all()
|> Enum.max_by(&length(elem(&1, 1)))
end
def reachable_asteroids({location, angles}), do: {location, length(angles)}
end
defmodule VaporizationOrder do
def order({_location, angles}) do
angles
|> Enum.sort_by(fn {angle, _asteroids} -> angle end)
|> Enum.map(fn {_angle, asteroids} -> asteroids end)
|> expand_lists_to_same_length
|> List.zip()
|> Enum.flat_map(&Tuple.to_list/1)
|> Enum.reject(&is_nil/1)
end
defp expand_lists_to_same_length(asteroids_in_angle_order) do
max = asteroids_in_angle_order |> Enum.max_by(&length/1) |> length
asteroids_in_angle_order
|> Enum.map(&(&1 ++ List.duplicate(nil, max - length(&1))))
end
end
defmodule MonitoringStation do
def try_all(asteroids) do
%{asteroids | data: do_find(asteroids)}
end
defp do_find(asteroids) do
asteroids
|> Asteroids.locations()
|> Enum.map(&{&1, detect_asteroids(&1, asteroids)})
|> Enum.into(%{})
end
defp detect_asteroids({x1, y1}, asteroids) do
asteroids
|> Asteroids.other_locations({x1, y1})
|> Enum.group_by(&angle({{x1, y1}, &1}))
|> Enum.map(&angle_asteroids_by_distance(&1, {x1, y1}))
end
defp angle_asteroids_by_distance({angle, asteroids}, {x1, y1}) do
{angle, Enum.sort_by(asteroids, &manhattan_distance({x1, y1}, &1))}
end
def angle({{x1, y1}, {x2, y2}}) do
dx = x2 - x1
dy = y2 - y1
cond do
dx == 0 && dy < 0 -> 0
dy == 0 && dx > 0 -> 90
dx == 0 && dy > 0 -> 180
dy == 0 && dx < 0 -> 270
true -> do_angle({dx, dy}) + 90
end
end
defp do_angle({dx, dy}) do
radian = :math.atan(dy / dx)
case radian * 180 / :math.pi() do
degree when dx < 0 -> degree + 180
degree -> degree
end
end
defp manhattan_distance({x1, y1}, {x2, y2}) do
abs(x2 - x1) + abs(y2 - y1)
end
end
defmodule Parser do
def parse(input) do
input
|> String.trim("\n")
|> String.split("\n")
|> Enum.map(&String.graphemes/1)
|> Enum.map(&Enum.with_index/1)
|> Enum.with_index()
|> build_asteroids()
end
defp build_asteroids(lines) do
%Asteroids{
max_x: length(elem(hd(lines), 0)) - 1,
max_y: length(lines) - 1,
data: lines |> Enum.reduce(%{}, &build_data/2)
}
end
defp build_data({line, y}, acc) do
line
|> Enum.reduce(acc, &parse_location(elem(&1, 0), {elem(&1, 1), y}, &2))
end
defp parse_location(".", {_, _}, acc), do: acc
defp parse_location("#", {x, y}, acc), do: Map.put(acc, {x, y}, [])
defp parse_location("X", {x, y}, acc), do: Map.put(acc, {x, y}, [])
defp parse_location(_num, {x, y}, acc), do: parse_location("#", {x, y}, acc)
end
defmodule Printer do
import IO.ANSI
def print(asteroids, {x, y}) do
IO.puts("\n" <> print_lines({x, y}, asteroids))
asteroids
end
defp print_lines({x, y}, asteroids) do
0..asteroids.max_y
|> Enum.to_list()
|> Enum.map_join("\n", &print_line(&1, {x, y}, asteroids))
end
defp print_line(y, pos, asteroids) do
0..asteroids.max_x
|> Enum.to_list()
|> Enum.map_join(&print_pos({&1, y}, pos, asteroids))
end
defp print_pos(pos, pos2, asteroids) do
do_print_pos(Asteroids.get(asteroids, pos), Asteroids.get(asteroids, pos2))
end
defp do_print_pos({{_, _}, nil}, {{_, _}, _}), do: "."
defp do_print_pos({{_, _}, _}, {{_, _}, nil}), do: "#"
defp do_print_pos({{x, y}, _}, {{x, y}, _}),
do: format([blue(), "X", reset()])
defp do_print_pos({pos, _}, {{_, _}, detected}) do
if pos in detected, do: format([green(), "#", reset()]), else: format([red(), "#", reset()])
end
defp do_print_pos({{_, _}, _}, {{_, _}, _}), do: " "
end
end
|
lib/day_10_monitoring_station.ex
| 0.67694
| 0.524699
|
day_10_monitoring_station.ex
|
starcoder
|
defmodule Dwolla.MassPayment do
@moduledoc """
Functions for `mass-payments` endpoint.
"""
alias Dwolla.Utils
defstruct id: nil,
created: nil,
status: nil,
metadata: nil,
funding_transfer_id: nil,
source_funding_source_id: nil,
correlation_id: nil,
total: nil,
total_fees: nil
@type t :: %__MODULE__{
id: String.t(),
created: String.t(),
# "deferred" | "pending" | "processing" | "complete" |
status: String.t(),
metadata: map(),
funding_transfer_id: String.t(),
source_funding_source_id: String.t(),
correlation_id: String.t(),
total: Dwolla.Amount.t(),
total_fees: Dwolla.Amount.t()
}
@type token :: String.t()
@type id :: String.t()
@type params :: %{required(atom) => any}
@type error :: HTTPoison.Error.t() | Dwolla.Errors.t() | tuple
@type location :: %{id: String.t()}
@endpoint "mass-payments"
defmodule Item do
@moduledoc """
Dwolla Mass Payment Item data structure
"""
defstruct id: nil,
dest_resource: nil,
dest_resource_id: nil,
amount: nil,
metadata: nil,
status: nil,
errors: nil,
transfer_id: nil
@type t :: %__MODULE__{
id: String.t(),
dest_resource: String.t(),
dest_resource_id: String.t(),
amount: Dwolla.Amount.t(),
metadata: map(),
status: String.t(),
# "failed" | "pending" | "success"
errors: Dwolla.Errors.t(),
transfer_id: String.t()
}
end
@doc """
Initiates a mass payment.
The parameters are verbose because of the many options available to the user
for setting the source and destination of the funds in the `href` field.
Parameters
```
%{
_links: %{
source: %{
href: "https://api-uat.dwolla.com/funding-sources/..."
}
},
items: [
%{
_links: %{
destination: %{
href: "https://api-uat.dwolla.com/funding-sources/..."
}
},
amount: %{
value: 1.00,
currency: "USD"
}
}
],
correlation_id: "3e17e4ad-4136-2fd3-a45d-adaa015d494c"
}
```
"""
@spec initiate(token, params, any) :: {:ok, location} | {:error, error}
def initiate(token, params, idempotency_key) do
headers = Utils.idempotency_header(idempotency_key)
Dwolla.make_request_with_token(:post, @endpoint, token, params, headers)
|> Utils.handle_resp(:mass_payment)
end
@doc """
Gets a mass payment by id.
"""
@spec get(token, id) :: {:ok, Dwolla.MassPayment.t()} | {:error, error}
def get(token, id) do
endpoint = @endpoint <> "/#{id}"
Dwolla.make_request_with_token(:get, endpoint, token)
|> Utils.handle_resp(:mass_payment)
end
@doc """
Gets items in a mass payment by id. Results paginated.
Parameters
```
%{limit: 25, offset: 0, status: "pending"}
```
"""
@spec get_items(token, id, params) :: {:ok, [Dwolla.MassPayment.Item.t()]} | {:error, error}
def get_items(token, id, params \\ %{}) do
endpoint =
case Map.keys(params) do
[] -> @endpoint <> "/#{id}/items"
_ -> @endpoint <> "/#{id}/items?" <> Utils.encode_params(params)
end
Dwolla.make_request_with_token(:get, endpoint, token)
|> Utils.handle_resp(:mass_payment_items)
end
end
|
lib/dwolla/mass_payment.ex
| 0.785103
| 0.532668
|
mass_payment.ex
|
starcoder
|
defmodule Gherkin.ASTBuilder do
alias Gherkin.{ASTNode, Builder, Location, TableCell, TableRow, Tag, Token}
@behaviour Builder
@type t :: %__MODULE__{comments: [comment], stack: [ASTNode.t(), ...]}
@typep comment :: %{location: Location.t(), text: String.t(), type: :Comment}
defstruct comments: [], stack: [%ASTNode{rule_type: :None}]
@impl Builder
def build(%__MODULE__{} = builder, %Token{} = token) do
if token.matched_type === :Comment do
comment = %{location: get_location(token), text: token.matched_text, type: :Comment}
%{builder | comments: [comment | builder.comments]}
else
[current_node | stack] = builder.stack
new_node = ASTNode.add_child(current_node, token.matched_type, token)
%{builder | stack: [new_node | stack]}
end
end
@impl Builder
def end_rule(%__MODULE__{} = builder, rule_type) when is_atom(rule_type) do
[node1, node2 | stack] = builder.stack
comments = :lists.reverse(builder.comments)
new_node = ASTNode.add_child(node2, node1.rule_type, transform_node(node1, comments))
%{builder | stack: [new_node | stack]}
end
@spec transform_node(ASTNode.t(), [comment]) ::
%{required(:type) => atom, optional(atom) => term} | ASTNode.t() | nil
defp transform_node(ast_node, comments) do
case ast_node.rule_type do
:Background -> transform_background_node(ast_node)
:DataTable -> transform_data_table_node(ast_node)
:Description -> transform_description_node(ast_node)
:DocString -> transform_doc_string_node(ast_node)
:Examples_Definition -> transform_examples_definition_node(ast_node)
:Examples_Table -> transform_examples_table_node(ast_node)
:Feature -> transform_feature_node(ast_node)
:GherkinDocument -> transform_gherkin_document_node(ast_node, comments)
:Scenario_Definition -> transform_scenario_definition_node(ast_node)
:Step -> transform_step_node(ast_node)
_ -> ast_node
end
end
@spec transform_background_node(ASTNode.t()) :: %{
required(:type) => :Background,
optional(:description) => String.t(),
optional(:keyword) => String.t(),
optional(:location) => Location.t(),
optional(:name) => String.t(),
optional(:steps) => list
}
defp transform_background_node(ast_node) do
token = ASTNode.get_item(ast_node, :BackgroundLine)
reject_nils(%{
description: get_description(ast_node),
keyword: token.matched_keyword,
location: get_location(token),
name: token.matched_text,
steps: get_steps(ast_node),
type: :Background
})
end
@spec get_steps(ASTNode.t()) :: list
defp get_steps(ast_node), do: ASTNode.get_children(ast_node, :Step)
@spec transform_data_table_node(ASTNode.t()) :: %{
required(:type) => :DataTable,
optional(:location) => Location.t(),
optional(:rows) => [TableRow.t()]
}
defp transform_data_table_node(ast_node) do
[%{location: location} | _] = rows = get_table_rows(node)
reject_nils(%{location: location, rows: rows, type: :DataTable})
end
@spec get_table_rows(ASTNode.t()) :: [TableRow.t()]
defp get_table_rows(ast_node) do
tokens = ASTNode.get_children(ast_node, :TableRow)
rows = for t <- tokens, do: %{cells: get_cells(t), location: get_location(t), type: :TableRow}
ensure_cell_count(rows)
rows
end
@spec get_cells(Token.t()) :: [TableCell.t()]
defp get_cells(token) do
for item <- token.matched_items,
do: %{
location: get_location(token, item.column),
type: :TableCell,
value: item.text
}
end
@spec ensure_cell_count([TableRow.t()]) :: :ok | no_return
defp ensure_cell_count([]), do: :ok
defp ensure_cell_count([%{cells: cells} | rows]) do
cell_count = length(cells)
Enum.each(rows, fn row ->
if length(row.cells) !== cell_count do
raise ASTBuilderError,
location: row.location,
message: "inconsistent cell count within the table"
end
end)
end
@spec transform_description_node(ASTNode.t()) :: String.t()
defp transform_description_node(ast_node) do
ast_node
|> ASTNode.get_children(:Other)
|> Stream.take_while(&(&1.line.trimmed_line_text !== ""))
|> Enum.map_join("\n", & &1.matched_text)
end
@spec transform_doc_string_node(ASTNode.t()) :: %{
required(:type) => :DocString,
optional(:content) => String.t(),
optional(:content_type) => String.t(),
optional(:location) => Location.t()
}
defp transform_doc_string_node(ast_node) do
token = ASTNode.get_item(ast_node, :DocStringSeparator)
content =
ast_node
|> ASTNode.get_children(:Other)
|> Enum.map_join("\n", & &1.matched_text)
reject_nils(%{
content: content,
content_type: scrub(token.matched_text),
location: get_location(token),
type: :DocString
})
end
@spec scrub(String.t()) :: String.t() | nil
defp scrub(""), do: nil
defp scrub(string) when is_binary(string), do: string
@spec transform_examples_definition_node(ASTNode.t()) :: %{
required(:type) => :Examples_Definition,
optional(:description) => String.t(),
optional(:keyword) => String.t(),
optional(:location) => Location.t(),
optional(:name) => String.t(),
optional(:tableBody) => term,
optional(:tableHeader) => term,
optional(:tags) => [Tag.t()]
}
defp transform_examples_definition_node(ast_node) do
examples_node = ASTNode.get_child(ast_node, :Examples)
token = ASTNode.get_item(examples_node, :ExampleLine)
examples_table_node = ASTNode.get_child(examples_node, :Examples_Table)
reject_nils(%{
description: get_description(examples_node),
keyword: token.matched_keyword,
location: get_location(token),
name: token.matched_text,
tableBody: examples_table_node && examples_table_node.tableBody,
tableHeader: examples_table_node && examples_table_node.tableHeader,
tags: get_tags(ast_node),
type: examples_node.rule_type
})
end
@spec get_tags(ASTNode.t()) :: [Tag.t()]
defp get_tags(ast_node) do
if tags_node = ASTNode.get_child(ast_node, :Tags) do
for token <- ASTNode.get_children(ast_node, :TagLine),
tag_item <- token.matched_items,
do: %{
location: get_location(token, tag_item.column),
name: tag_item.text,
type: :Tag
}
else
[]
end
end
@spec get_description(ASTNode.t()) :: String.t() | nil
defp get_description(ast_node), do: ASTNode.get_child(ast_node, :Description)
@spec transform_examples_table_node(ASTNode.t()) :: %{
optional(:tableBody) => [TableRow.t()],
optional(:tableHeader) => TableRow.t()
}
defp transform_examples_table_node(ast_node) do
[header | body] = get_table_rows(ast_node)
reject_nils(%{tableBody: body, tableHeader: header})
end
@spec transform_feature_node(ASTNode.t()) ::
%{
required(:type) => :Feature,
optional(:children) => list,
optional(:description) => String.t(),
optional(:keyword) => String.t(),
optional(:language) => String.t(),
optional(:location) => Location.t(),
optional(:name) => String.t(),
optional(:tags) => [Tag.t()]
}
| nil
defp transform_feature_node(ast_node) do
if feature_header_node = ASTNode.get_child(ast_node, :Feature_Header) do
if token = ASTNode.get_item(feature_header_node, :FeatureLine) do
scenario = ASTNode.get_children(ast_node, :Scenario_Definition)
children =
if background_node = ASTNode.get_child(ast_node, :Background),
do: [background_node | scenario],
else: scenario
reject_nils(%{
children: children,
description: get_description(feature_header_node),
keyword: token.matched_keyword,
language: token.matched_gherkin_dialect,
location: get_location(token),
name: token.matched_text,
tags: get_tags(feature_header_node),
type: :Feature
})
end
end
end
@spec transform_gherkin_document_node(ASTNode.t(), [comment]) :: %{
required(:type) => :GherkinDocument,
optional(:comments) => [comment],
optional(:feature) => ASTNode.t()
}
defp transform_gherkin_document_node(ast_node, comments),
do:
reject_nils(%{
comments: comments,
feature: ASTNode.get_child(ast_node, :Feature),
type: :GherkinDocument
})
@spec transform_scenario_definition_node(ASTNode.t()) :: %{
required(:type) => atom,
optional(:description) => String.t(),
optional(:examples) => list,
optional(:keyword) => String.t(),
optional(:location) => Location.t(),
optional(:name) => String.t(),
optional(:steps) => list,
optional(:tags) => [Tag.t()]
}
defp transform_scenario_definition_node(ast_node) do
tags = get_tags(ast_node)
if scenario_node = ASTNode.get_child(ast_node, :Scenario) do
token = ASTNode.get_item(scenario_node, :ScenarioLine)
reject_nils(%{
description: get_description(scenario_node),
keyword: token.matched_keyword,
location: get_location(token),
name: token.matched_text,
steps: get_steps(scenario_node),
tags: tags,
type: scenario_node.rule_type
})
else
scenario_outline_node = ASTNode.get_child(ast_node, :ScenarioOutline)
if !scenario_outline_node do
raise "Internal grammar error"
end
token = ASTNode.get_item(scenario_outline_node, :ScenarioOutlineLine)
examples = ASTNode.get_children(scenario_outline_node, :Examples_Definition)
reject_nils(%{
description: get_description(scenario_outline_node),
examples: examples,
keyword: token.matched_keyword,
location: get_location(token),
name: token.matched_text,
steps: get_steps(scenario_outline_node),
tags: tags,
type: scenario_outline_node.rule_type
})
end
end
@spec transform_step_node(ASTNode.t()) :: %{
required(:type) => :Step,
optional(:argument) => term,
optional(:keyword) => String.t(),
optional(:location) => Location.t(),
optional(:text) => String.t()
}
defp transform_step_node(ast_node) do
argument = ASTNode.get_child(ast_node, :DataTable) || ASTNode.get_child(ast_node, :DocString)
token = ASTNode.get_item(ast_node, :StepLine)
reject_nils(%{
argument: argument,
keyword: token.matched_keyword,
location: get_location(token),
text: token.matched_text,
type: :Step
})
end
@spec get_location(Token.t(), non_neg_integer) :: Location.t()
defp get_location(token, column \\ 0)
defp get_location(%{location: location}, 0), do: location
defp get_location(%{location: location}, column), do: %{location | column: column}
@spec reject_nils(map) :: map
defp reject_nils(map) do
for {k, v} <- map, v !== nil, into: %{}, do: {k, v}
end
@impl Builder
def get_result(%__MODULE__{stack: [current_node | _]}),
do: ASTNode.get_child(current_node, :GherkinDocument)
@impl Builder
def start_rule(%__MODULE__{} = builder, rule_type) when is_atom(rule_type),
do: %{builder | stack: [%ASTNode{rule_type: rule_type} | builder.stack]}
end
|
gherkin/elixir/lib/gherkin/ast_builder.ex
| 0.826537
| 0.490419
|
ast_builder.ex
|
starcoder
|
defexception ExUnit.AssertionError, message: "assertion failed"
defexception ExUnit.ExpectationError, expected: nil, actual: nil, assertion: "",
negation: false, prelude: "Expected", expr: nil do
def message(exception) do
if desc = exception.expr do
"#{exception.prelude} #{desc} #{exception.full_assertion} " <>
"#{exception.expected}. Instead got #{exception.actual}"
else
"#{exception.prelude} #{exception.expected} " <>
"#{exception.full_assertion} #{exception.actual}"
end
end
def full_assertion(exception) do
"to" <> if(exception.negation, do: " not ", else: " ") <> exception.assertion
end
end
defmodule ExUnit.Assertions do
@moduledoc """
This module contains a set of assertion functions that are
imported by default into your test cases.
In general, a developer will want to use the general
`assert` macro in tests. This macro tries to be smart
and provide good reporting whenever there is a failure.
For example, `assert some_fun() == 10` will fail (assuming
`some_fun()` returns 13):
Expected 10 to be equal to 13
This module also provides other convenience functions
like `assert_in_delta` and `assert_raise` to easily handle other
common cases such as checking a floating point number or handling exceptions.
"""
@doc """
Asserts the `expected` value is true.
`assert` in general tries to be smart and provide good
reporting whenever there is a failure. For example,
`assert 10 > 15` is going to fail with the message:
Expected 10 to be more than 15
## Examples
assert true
"""
defmacro assert(expected) do
case translate_assertion(expected) do
nil ->
# Default message in case no transform was performed
quote do
value = unquote(expected)
unless value do
raise ExUnit.ExpectationError,
expr: unquote(Macro.to_string(expected)),
assertion: "be",
expected: "true",
actual: inspect(value)
end
value
end
value -> value
end
end
@doc """
Refutes the `expected` value is true.
`refute` in general tries to be smart and provide good
reporting whenever there is a failure.
## Examples
refute false
"""
defmacro refute(expected) do
contents = case translate_assertion({ :!, [], [expected] }) do
nil ->
# Default message in case no transform was performed
quote do
value = unquote(expected)
if value do
raise ExUnit.ExpectationError,
expr: unquote(Macro.to_string(expected)),
assertion: "be",
expected: "false",
actual: inspect(value)
end
true
end
value -> value
end
{ :!, [], [contents] }
end
## START HELPERS
defp translate_assertion({ :=, _, [left, right] }) do
quote do
right = unquote(right)
case right do
unquote(left) ->
right
_ ->
raise ExUnit.ExpectationError,
expected: inspect(right),
actual: unquote(Macro.to_string(left)),
assertion: "match pattern (=)"
end
end
end
defp translate_assertion({ :==, _, [left, right] }) do
assert_operator :==, left, right, "be equal to (==)"
end
defp translate_assertion({ :<, _, [left, right] }) do
assert_operator :<, left, right, "be less than"
end
defp translate_assertion({ :>, _, [left, right] }) do
assert_operator :>, left, right, "be more than"
end
defp translate_assertion({ :<=, _, [left, right] }) do
assert_operator :<=, left, right, "be less than or equal to"
end
defp translate_assertion({ :>=, _, [left, right] }) do
assert_operator :>=, left, right, "be more than or equal to"
end
defp translate_assertion({ :===, _, [left, right] }) do
assert_operator :===, left, right, "be equal to (===)"
end
defp translate_assertion({ :!==, _, [left, right] }) do
assert_operator :!==, left, right, "be not equal to (!==)"
end
defp translate_assertion({ :!=, _, [left, right] }) do
assert_operator :!=, left, right, "be not equal to (!=)"
end
defp translate_assertion({ :=~, _, [left, right] }) do
assert_operator :=~, left, right, "match (=~)"
end
defp translate_assertion({ :in, _, [left, right] }) do
quote do
left = unquote(left)
right = unquote(right)
assert Enum.member?(right, left), left, right, assertion: "be in"
end
end
## Negative versions
defp translate_assertion({ :!, _, [{ :=, _, [left, right] }] }) do
quote do
right = unquote(right)
case right do
unquote(left) ->
raise ExUnit.ExpectationError,
expected: inspect(right),
actual: unquote(Macro.to_string(left)),
assertion: "match pattern (=)",
negation: true
_ ->
nil
end
end
end
defp translate_assertion({ :!, _, [{ :=~, _, [left, right] }] }) do
quote do
left = unquote(left)
right = unquote(right)
assert !(left =~ right), left, right, assertion: "match (=~)", negation: true
end
end
defp translate_assertion({ negation, _, [{ :in, _, [left, right] }] }) when negation in [:!, :not] do
quote do
left = unquote(left)
right = unquote(right)
assert !Enum.member?(right, left), left, right, assertion: "be in", negation: true
end
end
## Fallback
defp translate_assertion(_expected) do
nil
end
defp assert_operator(operator, expected, actual, text) do
quote do
left = unquote(expected)
right = unquote(actual)
assert unquote(operator)(left, right), left, right, unquote(text)
end
end
## END HELPERS
@doc """
Asserts the `expected` value is true.
If it fails, raises the given `message`.
## Examples
assert false, "it will never be true"
"""
def assert(expected, message) when is_binary(message) do
unless expected, do: raise(ExUnit.AssertionError, message: message)
true
end
@doc """
Asserts the `expected` value is true.
If it fails, it raises an expectation error
using the given `expected` and `actual` values.
## Examples
assert this > that, this, that, "more than"
"""
def assert(value, expected, actual, content) when is_binary(content) do
assert(value, expected, actual, assertion: content)
end
def assert(value, expected, actual, opts) do
unless value do
raise ExUnit.ExpectationError,
Keyword.merge([expected: inspect(expected), actual: inspect(actual)], opts)
end
true
end
@doc """
Asserts a message was or is going to be received. Unlike
`assert_received`, it has a default timeout of 100 milliseconds.
The given `expected` argument has to be a pattern.
## Examples
assert_receive :hello
Asserts against a larger timeout:
assert_receive :hello, 20_000
You can also match against specific patterns:
assert_receive { :hello, _ }
x = 5
assert_receive { :count, ^x }
"""
defmacro assert_receive(expected, timeout // 100, message // nil) do
do_assert_receive(expected, timeout, message)
end
@doc """
Asserts a message was received and is in the current process' mailbox.
Timeout is set to 0, so there is no waiting time.
The given `expected` argument has to be a pattern.
## Examples
self <- :hello
assert_received :hello
You can also match against specific patterns:
self <- { :hello, "world" }
assert_received { :hello, _ }
"""
defmacro assert_received(expected, message // nil) do
do_assert_receive(expected, 0, message)
end
defp do_assert_receive(expected, timeout, message) do
binary = Macro.to_string(expected)
quote do
receive do
unquote(expected) = received -> received
after
unquote(timeout) ->
flunk unquote(message) || "Expected to have received message matching #{unquote binary}"
end
end
end
@doc """
Asserts the `exception` is raised during `function` execution with
the `expected_message`. Returns the rescued exception, fails otherwise.
## Examples
assert_raise ArithmeticError, "bad argument in arithmetic expression", fn ->
1 + "test"
end
"""
def assert_raise(exception, message, function) when is_function(function) do
error = assert_raise(exception, function)
is_match = case message do
re when is_regex(re) -> error.message =~ re
bin when is_binary(bin) -> error.message == bin
end
assert is_match, message, error.message,
prelude: "Expected error message", assertion: "match"
error
end
@doc """
Asserts the `exception` is raised during `function` execution.
Returns the rescued exception, fails otherwise.
## Examples
assert_raise ArithmeticError, fn ->
1 + "test"
end
"""
def assert_raise(exception, function) when is_function(function) do
try do
function.()
flunk "Expected #{inspect exception} exception but nothing was raised"
rescue
error in [exception] -> error
error ->
name = error.__record__(:name)
if name in [ExUnit.AssertionError, ExUnit.ExpectationError] do
raise(error)
else
flunk "Expected exception #{inspect exception}, got #{inspect name} (#{error.message})"
end
end
end
@doc """
Asserts the `expected` and `received` are within `delta`.
## Examples
assert_in_delta 1.1, 1.5, 0.2
assert_in_delta 10, 15, 4
"""
def assert_in_delta(expected, received, delta, message // nil) do
diff = abs(expected - received)
message = message ||
"Expected |#{inspect expected} - #{inspect received}| (#{inspect diff}) to be < #{inspect delta}"
assert diff < delta, message
end
@doc """
Asserts the given `expression` will throw a value.
Returns the thrown value or fails otherwise.
## Examples
assert catch_throw(throw 1) == 1
"""
defmacro catch_throw(expression) do
do_catch(:throw, expression)
end
@doc """
Asserts the given `expression` will exit.
Returns the exit status/message or fails otherwise.
## Examples
assert catch_exit(exit 1) == 1
"""
defmacro catch_exit(expression) do
do_catch(:exit, expression)
end
@doc """
Asserts the given `expression` will cause an error.
Returns the error or fails otherwise.
## Examples
assert catch_error(error 1) == 1
"""
defmacro catch_error(expression) do
do_catch(:error, expression)
end
defp do_catch(kind, expr) do
quote do
try do
unquote(expr)
flunk "Expected to catch #{unquote(kind)}, got nothing"
rescue
e in [ExUnit.AssertionError, ExUnit.ExpectationError] -> raise(e)
catch
unquote(kind), what_we_got -> what_we_got
end
end
end
@doc """
Asserts the `not_expected` value is `nil` or `false`.
In case it is a truthy value, raises the given message.
## Examples
refute true, "This will obviously fail"
"""
def refute(not_expected, message) do
not assert(!not_expected, message)
end
@doc """
Asserts a message was not received and won't be within
the `timeout` period.
The `not_expected` argument must be a match pattern.
## Examples
refute_receive :bye
Refute received with a explicit timeout:
refute_receive :bye, 1000
"""
defmacro refute_receive(not_expected, timeout // 100, message // nil) do
do_refute_receive(not_expected, timeout, message)
end
@doc """
Asserts a message was not received (i.e. it is not in the current process mailbox).
The `not_expected` argument must be a match pattern.
Timeout is set to 0, so there is no waiting time.
## Examples
self <- :hello
refute_received :bye
"""
defmacro refute_received(not_expected, message // nil) do
do_refute_receive(not_expected, 0, message)
end
defp do_refute_receive(not_expected, timeout, message) do
binary = Macro.to_string(not_expected)
quote do
receive do
unquote(not_expected) = actual ->
flunk unquote(message) || "Expected to not have received message matching #{unquote binary}, got #{inspect actual}"
after
unquote(timeout) -> false
end
end
end
@doc """
Asserts the `expected` and `received` are not within `delta`.
## Examples
refute_in_delta 1.1, 1.2, 0.2
refute_in_delta 10, 11, 2
"""
def refute_in_delta(expected, received, delta, message // nil) do
diff = abs(expected - received)
message = message ||
"Expected |#{inspect expected} - #{inspect received}| (#{inspect diff}) to not be < #{inspect delta}"
refute diff < delta, message
end
@doc """
Fails with a message.
## Examples
flunk "This should raise an error"
"""
@spec flunk :: no_return
@spec flunk(String.t) :: no_return
def flunk(message // "Flunked!") do
raise ExUnit.AssertionError, message: message
end
end
|
lib/ex_unit/lib/ex_unit/assertions.ex
| 0.913121
| 0.758018
|
assertions.ex
|
starcoder
|
defmodule BSV.OpCode do
@moduledoc """
Module for accessing Bitcoin Script Op Codes.
Bitcoin Script provides a number of operations or commands, known as Op Codes.
When the script is evaluated, the Op Codes manipulate the stack in some way.
Within a script, an Op Code is single byte integer. Op Codes can also be
referenced by an atom representing the word or name of the Op Code.
"""
@typedoc """
Op Code
Represented as either an `t:atom/0` or an `t:integer/0`.
"""
@type t() :: atom() | integer()
@op_codes %{
# push value
OP_0: 0,
OP_FALSE: 0,
OP_PUSHDATA1: 76,
OP_PUSHDATA2: 77,
OP_PUSHDATA4: 78,
OP_1NEGATE: 79,
OP_RESERVED: 80,
OP_TRUE: 81,
OP_1: 81,
OP_2: 82,
OP_3: 83,
OP_4: 84,
OP_5: 85,
OP_6: 86,
OP_7: 87,
OP_8: 88,
OP_9: 89,
OP_10: 90,
OP_11: 91,
OP_12: 92,
OP_13: 93,
OP_14: 94,
OP_15: 95,
OP_16: 96,
# control
OP_NOP: 97,
OP_VER: 98,
OP_IF: 99,
OP_NOTIF: 100,
OP_VERIF: 101,
OP_VERNOTIF: 102,
OP_ELSE: 103,
OP_ENDIF: 104,
OP_VERIFY: 105,
OP_RETURN: 106,
# stack ops
OP_TOALTSTACK: 107,
OP_FROMALTSTACK: 108,
OP_2DROP: 109,
OP_2DUP: 110,
OP_3DUP: 111,
OP_2OVER: 112,
OP_2ROT: 113,
OP_2SWAP: 114,
OP_IFDUP: 115,
OP_DEPTH: 116,
OP_DROP: 117,
OP_DUP: 118,
OP_NIP: 119,
OP_OVER: 120,
OP_PICK: 121,
OP_ROLL: 122,
OP_ROT: 123,
OP_SWAP: 124,
OP_TUCK: 125,
# splice ops
OP_CAT: 126,
OP_SPLIT: 127,
OP_NUM2BIN: 128,
OP_BIN2NUM: 129,
OP_SIZE: 130,
# bit logic
OP_INVERT: 131,
OP_AND: 132,
OP_OR: 133,
OP_XOR: 134,
OP_EQUAL: 135,
OP_EQUALVERIFY: 136,
OP_RESERVED1: 137,
OP_RESERVED2: 138,
# numeric
OP_1ADD: 139,
OP_1SUB: 140,
OP_2MUL: 141,
OP_2DIV: 142,
OP_NEGATE: 143,
OP_ABS: 144,
OP_NOT: 145,
OP_0NOTEQUAL: 146,
OP_ADD: 147,
OP_SUB: 148,
OP_MUL: 149,
OP_DIV: 150,
OP_MOD: 151,
OP_LSHIFT: 152,
OP_RSHIFT: 153,
OP_BOOLAND: 154,
OP_BOOLOR: 155,
OP_NUMEQUAL: 156,
OP_NUMEQUALVERIFY: 157,
OP_NUMNOTEQUAL: 158,
OP_LESSTHAN: 159,
OP_GREATERTHAN: 160,
OP_LESSTHANOREQUAL: 161,
OP_GREATERTHANOREQUAL: 162,
OP_MIN: 163,
OP_MAX: 164,
OP_WITHIN: 165,
# crypto
OP_RIPEMD160: 166,
OP_SHA1: 167,
OP_SHA256: 168,
OP_HASH160: 169,
OP_HASH256: 170,
OP_CODESEPARATOR: 171,
OP_CHECKSIG: 172,
OP_CHECKSIGVERIFY: 173,
OP_CHECKMULTISIG: 174,
OP_CHECKMULTISIGVERIFY: 175,
OP_CHECKLOCKTIMEVERIFY: 177,
OP_CHECKSEQUENCEVERIFY: 178,
# expansion
OP_NOP1: 176,
OP_NOP2: 177,
OP_NOP3: 178,
OP_NOP4: 179,
OP_NOP5: 180,
OP_NOP6: 181,
OP_NOP7: 182,
OP_NOP8: 183,
OP_NOP9: 184,
OP_NOP10: 185,
# template matching params
OP_SMALLDATA: 249,
OP_SMALLINTEGER: 250,
OP_PUBKEYS: 251,
OP_PUBKEYHASH: 253,
OP_PUBKEY: 254,
OP_INVALIDOPCODE: 255
}
@doc """
Returns a map of all Op Codes.
"""
@spec all() :: map()
def all(), do: @op_codes
@doc """
Returns an `t:atom/0` Op Code from the given value. Returns nil if the value
is not a valid Op Code.
## Examples
iex> BSV.OpCode.to_atom :OP_RETURN
:OP_RETURN
iex> BSV.OpCode.to_atom "op_return"
:OP_RETURN
iex> BSV.OpCode.to_atom <<106>>
:OP_RETURN
iex> BSV.OpCode.to_atom 106
:OP_RETURN
iex> BSV.OpCode.to_atom :UNKNOWN_CODE
nil
"""
@spec to_atom(atom() | binary() | String.t() | integer()) :: t() | nil
def to_atom(op) when is_atom(op),
do: Enum.find_value(@op_codes, fn {k, _v} -> if k == op, do: k end)
def to_atom(<<op>>) when is_integer(op), do: to_atom(op)
def to_atom(op) when is_binary(op),
do: op |> String.upcase() |> String.to_existing_atom()
def to_atom(0), do: :OP_FALSE
def to_atom(op) when is_integer(op) and op in 0..255,
do: Enum.find_value(@op_codes, fn {k, v} -> if v == op, do: k end)
@doc """
Returns an `t:atom/0` Op Code from the given value.
As `to_atom/1` but raises an error if the value is not a valid Op Code.
"""
@spec to_atom!(atom() | String.t() | binary() | integer()) :: t()
def to_atom!(op) do
case to_atom(op) do
nil -> raise BSV.DecodeError, {:invalid_opcode, op}
opcode -> opcode
end
end
@doc """
Returns an `t:integer/0` Op Code from the given value. Returns nil if the
value is not a valid Op Code.
## Examples
iex> BSV.OpCode.to_integer :OP_RETURN
106
iex> BSV.OpCode.to_integer "op_return"
106
iex> BSV.OpCode.to_integer <<106>>
106
iex> BSV.OpCode.to_integer 106
106
iex> BSV.OpCode.to_integer :UNKNOWN_CODE
nil
"""
@spec to_integer(atom() | binary() | String.t()) :: t() | nil
def to_integer(op) when is_atom(op), do: @op_codes[op]
def to_integer(<<op>>) when is_integer(op), do: op
def to_integer(op) when is_binary(op),
do: op |> String.upcase() |> String.to_existing_atom() |> to_integer()
def to_integer(op) when is_integer(op) and op in 0..255,
do: Enum.find_value(@op_codes, fn {_k, v} -> if v == op, do: v end)
@doc """
Returns an `t:integer/0` Op Code from the given value.
As `to_integer/1` but raises an error if the value is not a valid Op Code.
"""
@spec to_integer!(atom() | binary() | String.t()) :: t()
def to_integer!(op) do
case to_integer(op) do
nil -> raise BSV.DecodeError, {:invalid_opcode, op}
opcode -> opcode
end
end
end
|
lib/bsv/op_code.ex
| 0.877306
| 0.416381
|
op_code.ex
|
starcoder
|
defmodule ExWire.P2P.Manager do
@moduledoc """
P2P.Manager handles the logic of the TCP protocol in the P2P network.
We track state with a `Connection` struct, deriving secrets during an
auth phase and then handling incoming packets and deciding how to
respond.
"""
require Logger
alias ExWire.Framing.Frame
alias ExWire.{DEVp2p, Handshake, Packet, TCP}
alias ExWire.DEVp2p.Session
alias ExWire.Handshake.Struct.AuthMsgV4
alias ExWire.P2P.Connection
alias ExWire.Packet.PacketIdMap
alias ExWire.Packet.Protocol.Disconnect
alias ExWire.Struct.Peer
@doc """
Function to create an outbound connection with a peer. It expects a `socket`
and a `peer` to be provided. This function starts the encrypted handshake with
the `peer`.
"""
@spec new_outbound_connection(Connection.t()) :: Connection.t()
def new_outbound_connection(connection_state) do
handshake =
connection_state.peer.remote_id
|> Handshake.new()
|> Handshake.generate_auth()
:ok =
send_unframed_data(
handshake.encoded_auth_msg,
connection_state.socket,
connection_state.peer
)
%{connection_state | handshake: handshake}
end
@doc """
Function to create an inbound connection with a peer. It expects a `socket`
but not a `peer` at this moment. The full peer information will be obtained from
the socket and the auth message when it arrives.
"""
@spec new_inbound_connection(Connection.t()) :: Connection.t()
def new_inbound_connection(connection_state) do
handshake = Handshake.new_response()
%{connection_state | handshake: handshake}
end
@doc """
Handle inbound messages from a peer node.
First we must ensure we perform an encrypted handshake. If such a handshake
has already occurred, then we should have derived the `secrets`. In that case,
we take the message to be a `packet`.
If we haven't yet completed the encrypted handshake, we'll await for an auth
or an ack message as appropriate. If this is an outbound connection, then we
assume we have sent the auth message, and we're looking for an ack response.
If this is an inbound connection, we assume the peer will send an auth message
first, so we await for that message.
TODO: clients may send an auth before (or as) we do, and we should handle this
case without error.
"""
# handle inbound message
def handle_message(conn = %{secrets: %ExWire.Framing.Secrets{}}, data) do
handle_packet_data(data, %{conn | last_error: nil})
end
# handle outbound message
def handle_message(conn = %{handshake: %Handshake{}}, data) do
conn
|> handle_encrypted_handshake(data)
|> prepare_devp2p_session()
end
@spec handle_encrypted_handshake(Connection.t(), binary()) :: Connection.t()
defp handle_encrypted_handshake(conn = %Connection{handshake: handshake}, data) do
case handshake do
%Handshake{initiator: true} ->
handle_acknowledgement_received(data, conn)
%Handshake{initiator: false} ->
handle_auth_message_received(data, conn)
end
end
@spec prepare_devp2p_session(Connection.t()) :: Connection.t()
defp prepare_devp2p_session(conn = %Connection{secrets: %ExWire.Framing.Secrets{}}) do
session = initiate_dev_p2p_session()
conn
|> Map.put(:session, session)
|> send_packet(session.hello_sent)
end
defp prepare_devp2p_session(conn), do: conn
@spec handle_packet_data(binary(), Connection.t()) :: Connection.t()
defp handle_packet_data(data, conn) when byte_size(data) == 0, do: conn
defp handle_packet_data(data, conn) do
%Connection{peer: peer, secrets: secrets, session: session} = conn
total_data = conn.queued_data <> data
case Frame.unframe(total_data, secrets) do
{:ok, message_id, packet_data, frame_rest, updated_secrets} ->
conn_after_unframe = %{
conn
| secrets: updated_secrets,
queued_data: <<>>
}
conn_after_handle =
case get_packet(session, message_id, packet_data) do
{:ok, packet_mod, packet} ->
:ok =
Logger.debug(fn ->
"[Network] [#{peer}] Got packet `#{inspect(packet_mod)}` from #{peer.host_name}"
end)
_ = notify_subscribers(packet, conn_after_unframe)
new_conn = handle_packet(packet_mod, packet, conn_after_unframe)
new_conn
:unknown_packet_type ->
:ok =
Logger.error(fn ->
"[Network] [#{peer}] Got unknown packet `#{message_id}` from #{peer.host_name}"
end)
conn_after_unframe
end
# TOOD: How does this work exactly? Is this for multiple frames?
handle_packet_data(frame_rest, conn_after_handle)
{:error, :insufficient_data} ->
%{conn | queued_data: total_data}
{:error, reason} ->
_ =
Logger.error(
"[Network] [#{peer}] Failed to read incoming packet from #{peer.host_name} `#{
to_string(reason)
}`)"
)
%{conn | last_error: reason}
end
end
@spec handle_packet(module(), Packet.packet(), Connection.t()) :: Connection.t()
defp handle_packet(packet_mod, packet, conn) do
packet_handle_response = packet_mod.handle(packet)
session_status = if DEVp2p.session_active?(conn.session), do: :active, else: :inactive
do_handle_packet(packet, session_status, packet_handle_response, conn)
end
defp do_handle_packet(_, _, {:disconnect, :useless_peer}, conn) do
disconnect_packet = Disconnect.new(:useless_peer)
send_packet(conn, disconnect_packet)
end
defp do_handle_packet(_, _, {:disconnect, :useless_peer, caps, p2p_version}, conn) do
disconnect_packet = Disconnect.new(:useless_peer)
send_packet(
%{conn | peer: %{conn.peer | p2p_version: p2p_version, caps: caps}},
disconnect_packet
)
end
defp do_handle_packet(packet, :inactive, {:activate, caps, p2p_version}, conn) do
new_session = attempt_session_activation(conn.session, packet)
%{conn | peer: %{conn.peer | p2p_version: p2p_version, caps: caps}, session: new_session}
end
defp do_handle_packet(_, :active, {:send, return_packet}, conn) do
send_packet(conn, return_packet)
end
defp do_handle_packet(_, :active, :ok, conn) do
conn
end
defp do_handle_packet(_, _, :peer_disconnect, conn) do
_ = TCP.shutdown(conn.socket)
conn
end
@spec attempt_session_activation(Session.t(), Packet.packet()) :: Session.t()
defp attempt_session_activation(session, packet) do
case DEVp2p.handle_message(session, packet) do
{:ok, updated_session} ->
updated_session
{:error, :handshake_incomplete} ->
:ok =
Logger.error(fn ->
"Ignoring message #{inspect(packet)} due to handshake incomplete."
end)
session
end
end
@spec get_packet(Session.t(), integer(), binary()) ::
{:ok, module(), Packet.packet()} | :unknown_packet_type
defp get_packet(session, message_id, packet_data) do
case PacketIdMap.get_packet_module(session.packet_id_map, message_id) do
{:ok, packet_mod} ->
{:ok, packet_mod, apply(packet_mod, :deserialize, [packet_data])}
:unsupported_packet ->
:unknown_packet_type
end
end
@spec notify_subscribers(Packet.packet(), Connection.t()) :: list(any())
defp notify_subscribers(packet, conn) do
for subscriber <- Map.get(conn, :subscribers, []) do
case subscriber do
{module, function, args} -> apply(module, function, [packet | args])
{:server, server} -> send(server, {:packet, packet, conn.peer})
end
end
end
@spec handle_acknowledgement_received(binary(), Connection.t()) :: Connection.t()
defp handle_acknowledgement_received(data, conn = %{peer: peer}) do
case Handshake.handle_ack(conn.handshake, data) do
{:ok, handshake, secrets, queued_data} ->
:ok =
Logger.debug(fn ->
"[Network] [#{peer}] Got ack from #{peer.host_name}, deriving secrets"
end)
Map.merge(conn, %{handshake: handshake, secrets: secrets, queued_data: queued_data})
{:invalid, reason} ->
:ok =
Logger.warn(
"[Network] [#{peer}] Failed to get handshake message when expecting ack - #{reason}"
)
conn
end
end
@spec handle_auth_message_received(binary(), Connection.t()) :: Connection.t()
defp handle_auth_message_received(data, conn = %{socket: socket}) do
case Handshake.handle_auth(conn.handshake, data) do
{:ok, handshake, secrets} ->
peer = get_peer_info(handshake.auth_msg, socket)
:ok = Logger.debug("[Network] Received auth. Sending ack.")
:ok = send_unframed_data(handshake.encoded_ack_resp, socket, peer)
Map.merge(conn, %{handshake: handshake, secrets: secrets, peer: peer})
{:invalid, reason} ->
:ok =
Logger.warn(
"[Network] Received unknown handshake message when expecting auth: #{reason}"
)
conn
end
end
@doc """
Function for sending a packet over to a peer.
"""
@spec send_packet(Connection.t(), Packet.packet()) :: Connection.t()
def send_packet(conn, packet) do
%{socket: socket, secrets: secrets, peer: peer, session: session} = conn
{:ok, message_id} = PacketIdMap.get_packet_id(session.packet_id_map, packet)
{:ok, packet_mod} = PacketIdMap.get_packet_module(session.packet_id_map, message_id)
:ok =
Logger.debug(fn ->
"[Network] [#{peer}] Sending packet #{inspect(packet_mod)} (#{
inspect(message_id, base: :hex)
}) to #{peer.host_name} (##{conn.sent_message_count + 1})"
end)
packet_data = apply(packet_mod, :serialize, [packet])
{frame, updated_secrets} = Frame.frame(message_id, packet_data, secrets)
:ok = TCP.send_data(socket, frame)
Map.merge(conn, %{
secrets: updated_secrets,
sent_message_count: conn.sent_message_count + 1
})
end
@spec send_unframed_data(binary(), TCP.socket(), Peer.t()) :: :ok | {:error, any()}
defp send_unframed_data(data, socket, peer) do
_ =
Logger.debug(fn ->
"[Network] [#{peer}] Sending raw data message of length #{byte_size(data)} byte(s) to #{
peer.host_name
}"
end)
TCP.send_data(socket, data)
end
@spec initiate_dev_p2p_session() :: Session.t()
defp initiate_dev_p2p_session() do
session = DEVp2p.init_session()
hello = DEVp2p.build_hello()
DEVp2p.hello_sent(session, hello)
end
@spec get_peer_info(AuthMsgV4.t(), TCP.socket()) :: Peer.t()
defp get_peer_info(auth_msg, socket) do
{host, port} = TCP.peer_info(socket)
remote_id = Peer.hex_node_id(auth_msg.initiator_public_key)
Peer.new(host, port, remote_id)
end
end
|
apps/ex_wire/lib/ex_wire/p2p/manager.ex
| 0.728652
| 0.502747
|
manager.ex
|
starcoder
|
defmodule NewRelic.Config do
@moduledoc """
New Relic Agent Configuration
All configuration items can be set via Environment variables _or_ via `Application` config
"""
@doc """
**Required**
Configure your application name. May contain up to 3 names seperated by `;`
Application name can be configured in two ways:
* Environment variable: `NEW_RELIC_APP_NAME=MyApp`
* Application config: `config :new_relic_agent, app_name: "MyApp"`
"""
def app_name,
do: get(:app_name)
@doc """
**Required**
Configure your New Relic License Key.
License Key can be configured in two ways, though using Environment Variables is strongly
recommended to keep secrets out of source code:
* Environment variables: `NEW_RELIC_LICENSE_KEY=abc123`
* Application config: `config :new_relic_agent, license_key: "abc123"`
"""
def license_key,
do: get(:license_key)
@doc false
def host,
do: get(:host)
@doc """
Configure the Agent logging mechanism.
This controls how the Agent logs it's own behavior, and doesn't impact your
applications own logging at all.
Defaults to the File `"tmp/new_relic.log"`.
Options:
- `"stdout"` Write directly to Standard Out
- `"Logger"` Send Agent logs to Elixir's Logger
- `"file_name.log"` Write to a chosen file
Agent logging can be configured in two ways:
* Environment variable: `NEW_RELIC_LOG=stdout`
* Application config: `config :new_relic_agent, log: "stdout"`
"""
def logger,
do: get(:log)
@doc """
An optional list of key/value pairs that will be automatic custom attributes
on all event types reported (Transactions, etc). Values are determined at Agent
start.
Options:
- `{:system, "ENV_NAME"}` Read a System ENV variable
- `{module, function, args}` Call a function.
- `"foo"` A direct value
This feature is only configurable with `Application` config.
Example:
```
config :new_relic_agent,
automatic_attributes: [
environment: {:system, "APP_ENV"},
node_name: {Node, :self, []},
team_name: "Afterlife"
]
```
"""
def automatic_attributes,
do: get(:automatic_attributes)
@doc """
An optional list of labels that will be applied to the application.
Configured with a single string containing a list of key-value pairs:
`key1:value1;key2:value2`
The delimiting characters `;` and `:` are not allowed in the `key` or `value`
Labels can be configured in two ways:
* Environment variables: `NEW_RELIC_LABELS=region:west;env:prod`
* Application config: `config :new_relic_agent, labels: "region:west;env:prod"`
"""
def labels,
do: get(:labels)
@doc """
Some Agent features can be toggled via configuration.
### Security
* `:error_collector_enabled` (default `true`)
* Toggles collection of any Error traces or metrics
* `:db_query_collection_enabled` (default `true`)
* Toggles collection of Database query strings
* `function_argument_collection_enabled` (default `true`)
* Toggles collection of traced function arguments
### Instrumentation
Opting out of Instrumentation means that `:telemetry` handlers
will not be attached, reducing the performance impact to zero.
* `:plug_instrumentation_enabled` (default `true`)
* Controls all Plug instrumentation
* `:ecto_instrumentation_enabled` (default `true`)
* Controls all Ecto instrumentation
* `:redix_instrumentation_enabled` (default `true`)
* Controls all Redix instrumentation
* `:request_queuing_metrics_enabled`
* Controls collection of request queuing metrics
### Configuration
Each of these features can be configured in two ways, for example:
* Environment variables: `NEW_RELIC_ERROR_COLLECTOR_ENABLED=false`
* Application config: `config :new_relic_agent, error_collector_enabled: false`
"""
def feature?(toggleable_agent_feature)
def feature?(:error_collector) do
get(:features, :error_collector)
end
def feature?(:db_query_collection) do
get(:features, :db_query_collection)
end
def feature?(:plug_instrumentation) do
get(:features, :plug_instrumentation)
end
def feature?(:phoenix_instrumentation) do
get(:features, :phoenix_instrumentation)
end
def feature?(:ecto_instrumentation) do
get(:features, :ecto_instrumentation)
end
def feature?(:redix_instrumentation) do
get(:features, :redix_instrumentation)
end
def feature?(:function_argument_collection) do
get(:features, :function_argument_collection)
end
def feature?(:request_queuing_metrics) do
get(:features, :request_queuing_metrics)
end
@doc """
Some Agent features can be controlled via configuration.
### Logs In Context
This feature can be run in multiple "modes":
* `forwarder` The recommended mode which formats outgoing logs as JSON objects
ready to be picked up by a [Log Forwarder](https://docs.newrelic.com/docs/logs/enable-log-management-new-relic/enable-log-monitoring-new-relic/enable-log-management-new-relic)
* `direct` Logs are buffered in the agent and shipped directly to New Relic. Your logs
will continue being output to their normal destination.
* `disabled` (default)
Logs In Context can be configured in two ways:
* Environment variable `NEW_RELIC_LOGS_IN_CONTEXT=forwarder`
* Application config `config :new_relic_agent, logs_in_context: :forwarder`
### Infinite Tracing
[Infinite Tracing](https://docs.newrelic.com/docs/understand-dependencies/distributed-tracing/infinite-tracing/introduction-infinite-tracing)
gives you more control of sampling by collecting 100% of Spans and sending them
to a Trace Observer for processing.
You can configure your Trace Observer in two ways:
* Environment variable `NEW_RELIC_INFINITE_TRACING_TRACE_OBSERVER_HOST=trace-observer.host`
* Application config `config :new_relic_agent, infinite_tracing_trace_observer_host: "trace-observer.host"`
"""
def feature(configurable_agent_feature)
def feature(:logs_in_context) do
case System.get_env("NEW_RELIC_LOGS_IN_CONTEXT") do
nil -> Application.get_env(:new_relic_agent, :logs_in_context, :disabled)
"forwarder" -> :forwarder
"direct" -> :direct
other -> other
end
end
def feature(:infinite_tracing) do
get(:trace_mode)
end
@doc false
def enabled?,
do: (harvest_enabled?() && app_name() && license_key() && true) || false
@doc false
def region_prefix,
do: get(:region_prefix)
@doc false
def event_harvest_config() do
%{
harvest_limits: %{
analytic_event_data:
Application.get_env(:new_relic_agent, :analytic_event_per_minute, 1000),
custom_event_data: Application.get_env(:new_relic_agent, :custom_event_per_minute, 1000),
error_event_data: Application.get_env(:new_relic_agent, :error_event_per_minute, 100),
span_event_data: Application.get_env(:new_relic_agent, :span_event_per_minute, 1000)
}
}
end
defp harvest_enabled?, do: get(:harvest_enabled)
@doc false
def get(key), do: :persistent_term.get(:nr_config)[key]
@doc false
def get(:features, key), do: :persistent_term.get(:nr_features)[key]
@doc false
def put(items), do: :persistent_term.put(:nr_config, items)
@doc false
def put(:features, items), do: :persistent_term.put(:nr_features, items)
@external_resource "VERSION"
@agent_version "VERSION" |> File.read!() |> String.trim()
@doc false
def agent_version, do: @agent_version
end
|
lib/new_relic/config.ex
| 0.898902
| 0.591133
|
config.ex
|
starcoder
|
defmodule StepFlow.WorkflowController do
use StepFlow, :controller
use BlueBird.Controller
require Logger
alias StepFlow.Controller.Helpers
alias StepFlow.Metrics.WorkflowInstrumenter
alias StepFlow.Repo
alias StepFlow.Step
alias StepFlow.Workflows
alias StepFlow.Workflows.Workflow
action_fallback(StepFlow.FallbackController)
def index(%Plug.Conn{assigns: %{current_user: user}} = conn, params) do
workflows =
params
|> Map.put("rights", user.rights)
|> Workflows.list_workflows()
conn
|> put_view(StepFlow.WorkflowView)
|> render("index.json", workflows: workflows)
end
def index(conn, _) do
conn
|> put_status(:forbidden)
|> put_view(StepFlow.WorkflowDefinitionView)
|> render("error.json",
errors: %{reason: "Forbidden to view workflows."}
)
end
def create_workflow(conn, workflow_params) do
case Workflows.create_workflow(workflow_params) do
{:ok, %Workflow{} = workflow} ->
WorkflowInstrumenter.inc(:step_flow_workflows_created, workflow.identifier)
Workflows.Status.define_workflow_status(workflow.id, :created_workflow)
Step.start_next(workflow)
StepFlow.Notification.send("new_workflow", %{workflow_id: workflow.id})
conn
|> put_status(:created)
|> put_view(StepFlow.WorkflowView)
|> render("created.json", workflow: workflow)
{:error, changeset} ->
conn
|> put_status(:unprocessable_entity)
|> put_view(StepFlow.ChangesetView)
|> render("error.json", changeset: changeset)
end
end
def create(
%Plug.Conn{assigns: %{current_user: user}} = conn,
%{"workflow_identifier" => identifier} = workflow_params
) do
case StepFlow.WorkflowDefinitions.get_workflow_definition(identifier) do
nil ->
conn
|> put_status(:unprocessable_entity)
|> put_view(StepFlow.WorkflowDefinitionView)
|> render("error.json",
errors: %{reason: "Unable to locate workflow with this identifier"}
)
workflow_definition ->
if Helpers.has_right(workflow_definition, user, "create") do
workflow_description =
workflow_definition
|> Map.put(:reference, Map.get(workflow_params, "reference"))
|> Map.put(
:parameters,
merge_parameters(
StepFlow.Map.get_by_key_or_atom(workflow_definition, :parameters),
Map.get(workflow_params, "parameters", %{})
)
)
|> Map.put(
:rights,
workflow_definition
|> Map.get(:rights)
|> Enum.map(fn right -> Map.from_struct(right) end)
)
|> Map.from_struct()
create_workflow(conn, workflow_description)
else
conn
|> put_status(:forbidden)
|> put_view(StepFlow.WorkflowDefinitionView)
|> render("error.json",
errors: %{reason: "Forbidden to create workflow with this identifier"}
)
end
end
end
def create(conn, _workflow_params) do
conn
|> put_status(:unprocessable_entity)
|> put_view(StepFlow.WorkflowDefinitionView)
|> render("error.json",
errors: %{reason: "Missing Workflow identifier parameter"}
)
end
defp merge_parameters(parameters, request_parameters, result \\ [])
defp merge_parameters([], _request_parameters, result), do: result
defp merge_parameters([parameter | tail], request_parameters, result) do
result =
case Map.get(request_parameters, Map.get(parameter, "id")) do
nil ->
List.insert_at(result, -1, parameter)
parameter_value ->
List.insert_at(result, -1, Map.put(parameter, "value", parameter_value))
end
merge_parameters(tail, request_parameters, result)
end
def show(%Plug.Conn{assigns: %{current_user: user}} = conn, %{"id" => id}) do
workflow =
Workflows.get_workflow!(id)
|> Repo.preload(:jobs)
if Helpers.has_right(workflow, user, "view") do
conn
|> put_view(StepFlow.WorkflowView)
|> render("show.json", workflow: workflow)
else
conn
|> put_status(:forbidden)
|> put_view(StepFlow.WorkflowDefinitionView)
|> render("error.json",
errors: %{reason: "Forbidden to view workflow with this identifier"}
)
end
end
def show(conn, _) do
conn
|> put_status(:forbidden)
|> put_view(StepFlow.WorkflowDefinitionView)
|> render("error.json",
errors: %{reason: "Forbidden to show workflow with this identifier"}
)
end
def get(conn, %{"identifier" => workflow_identifier} = _params) do
workflow =
case workflow_identifier do
_ -> %{}
end
conn
|> json(workflow)
end
def get(conn, _params) do
conn
|> json(%{})
end
def statistics(conn, params) do
scale = Map.get(params, "scale", "hour")
stats = Workflows.get_workflow_history(%{scale: scale})
conn
|> json(%{
data: stats
})
end
def update(conn, _params) do
conn
|> put_status(:forbidden)
|> put_view(StepFlow.WorkflowDefinitionView)
|> render("error.json",
errors: %{reason: "Forbidden to update workflow with this identifier"}
)
end
def delete(%Plug.Conn{assigns: %{current_user: user}} = conn, %{"id" => id}) do
workflow = Workflows.get_workflow!(id)
if Helpers.has_right(workflow, user, "delete") do
with {:ok, %Workflow{}} <- Workflows.delete_workflow(workflow) do
send_resp(conn, :no_content, "")
end
else
conn
|> put_status(:forbidden)
|> put_view(StepFlow.WorkflowDefinitionView)
|> render("error.json",
errors: %{reason: "Forbidden to update workflow with this identifier"}
)
end
end
def delete(conn, _) do
conn
|> put_status(:forbidden)
|> put_view(StepFlow.WorkflowDefinitionView)
|> render("error.json",
errors: %{reason: "Forbidden to delete workflow with this identifier"}
)
end
end
|
lib/step_flow/controllers/workflow_controller.ex
| 0.613931
| 0.433202
|
workflow_controller.ex
|
starcoder
|
defmodule Commanded.Event.Handler do
@moduledoc """
Defines the behaviour an event handler must implement and
provides a convenience macro that implements the behaviour, allowing you to
handle only the events you are interested in processing.
You should start your event handlers using a [Supervisor](supervision.html) to
ensure they are restarted on error.
## Example
defmodule ExampleHandler do
use Commanded.Event.Handler, name: "ExampleHandler"
def handle(%AnEvent{..}, _metadata) do
# ... process the event
:ok
end
end
Start your event handler process (or use a [Supervisor](supervision.html)):
{:ok, _handler} = ExampleHandler.start_link()
## Event handler name
The name you specify is used when subscribing to the event store. Therefore
you *should not* change the name once the handler has been deployed. A new
subscription will be created when you change the name, and you event handler
will receive already handled events.
You can use the module name of your event handler using the `__MODULE__`
special form:
defmodule ExampleHandler do
use Commanded.Event.Handler,
name: __MODULE__
end
## Subscription options
You can choose to start the event handler's event store subscription from
`:origin`, `:current` position, or an exact event number using the
`start_from` option. The default is to use the origin so your handler will
receive *all* events.
Use the `:current` position when you don't want newly created event handlers
to go through all previous events. An example would be adding an event handler
to send transactional emails to an already deployed system containing many
historical events.
### Example
Set the `start_from` option (`:origin`, `:current`, or an explicit event
number) when using `Commanded.Event.Handler`:
defmodule ExampleHandler do
use Commanded.Event.Handler,
name: "ExampleHandler",
start_from: :origin
end
You can optionally override `:start_from` by passing it as option when
starting your handler:
{:ok, _handler} = ExampleHandler.start_link(start_from: :current)
### Subscribing to an individual stream
By default event handlers will subscribe to all events appended to any stream.
Provide a `subscribe_to` option to subscribe to a single stream.
defmodule ExampleHandler do
use Commanded.Event.Handler,
name: __MODULE__,
subscribe_to: "stream1234"
end
This will ensure the handler only receives events appended to that stream.
## `c:init/0` callback
You can define an `c:init/0` function in your handler to be called once it has
started and successfully subscribed to the event store.
This callback function must return `:ok`, any other return value will
terminate the event handler with an error.
defmodule ExampleHandler do
use Commanded.Event.Handler, name: "ExampleHandler"
def init do
# optional initialisation
:ok
end
def handle(%AnEvent{..}, _metadata) do
# ... process the event
:ok
end
end
## `c:error/3` callback
You can define an `c:error/3` callback function to handle any errors returned
from your event handler's `handle/2` functions. The `c:error/3` function is
passed the actual error (e.g. `{:error, :failure}`), the failed event, and a
failure context.
Use pattern matching on the error and/or failed event to explicitly handle
certain errors or events. You can choose to retry, skip, or stop the event
handler after an error.
The default behaviour if you don't provide an `c:error/3` callback is to stop
the event handler using the exact error reason returned from the `handle/2`
function. You should supervise event handlers to ensure they are correctly
restarted on error.
### Example error handling
defmodule ExampleHandler do
use Commanded.Event.Handler, name: __MODULE__
require Logger
alias Commanded.Event.FailureContext
def handle(%AnEvent{}, _metadata) do
# simulate event handling failure
{:error, :failed}
end
def error({:error, :failed}, %AnEvent{} = event, %FailureContext{context: context}) do
context = record_failure(context)
case Map.get(context, :failures) do
too_many when too_many >= 3 ->
# skip bad event after third failure
Logger.warn(fn -> "Skipping bad event, too many failures: " <> inspect(event) end)
:skip
_ ->
# retry event, failure count is included in context map
{:retry, context}
end
end
defp record_failure(context) do
Map.update(context, :failures, 1, fn failures -> failures + 1 end)
end
end
## Consistency
For each event handler you can define its consistency, as one of either
`:strong` or `:eventual`.
This setting is used when dispatching commands and specifying the
`consistency` option.
When you dispatch a command using `:strong` consistency, after successful
command dispatch the process will block until all event handlers configured to
use `:strong` consistency have processed the domain events created by the
command. This is useful when you have a read model updated by an event handler
that you wish to query for data affected by the command dispatch. With
`:strong` consistency you are guaranteed that the read model will be
up-to-date after the command has successfully dispatched. It can be safely
queried for data updated by any of the events created by the command.
The default setting is `:eventual` consistency. Command dispatch will return
immediately upon confirmation of event persistence, not waiting for any event
handlers.
### Example
defmodule ExampleHandler do
use Commanded.Event.Handler,
name: "ExampleHandler",
consistency: :strong
end
"""
use GenServer
use Commanded.Registration
require Logger
alias Commanded.Event.{FailureContext, Handler}
alias Commanded.EventStore
alias Commanded.EventStore.RecordedEvent
alias Commanded.Subscriptions
@type domain_event :: struct()
@type metadata :: map()
@type subscribe_from :: :origin | :current | non_neg_integer()
@type consistency :: :eventual | :strong
@doc """
Optional initialisation callback function called when the handler starts.
Can be used to start any related processes when the event handler is started.
Return `:ok` on success, or `{:stop, reason}` to stop the handler process.
"""
@callback init() :: :ok | {:stop, reason :: any()}
@doc """
Event handler behaviour to handle a domain event and its metadata.
Return `:ok` on success, `{:error, :already_seen_event}` to ack and skip the
event, or `{:error, reason}` on failure.
"""
@callback handle(domain_event, metadata) ::
:ok
| {:error, :already_seen_event}
| {:error, reason :: any()}
@doc """
Called when an event `handle/2` callback returns an error.
The `c:error/3` function allows you to control how event handling failures
are handled. The function is passed the error returned by the event handler
(e.g. `{:error, :failure}`), the event causing the error, and a context map
containing state passed between retries. Use the context map to track any
transient state you need to access between retried failures.
You can return one of the following responses depending upon the
error severity:
- `{:retry, context}` - retry the failed event, provide a context
map containing any state passed to subsequent failures. This could be used
to count the number of failures, stopping after too many.
- `{:retry, delay, context}` - retry the failed event, after sleeping for
the requested delay (in milliseconds). Context is a map as described in
`{:retry, context}` above.
- `:skip` - skip the failed event by acknowledging receipt.
- `{:stop, reason}` - stop the event handler with the given reason.
"""
@callback error(
error :: term(),
failed_event :: domain_event,
failure_context :: FailureContext.t()
) ::
{:retry, context :: map()}
| {:retry, delay :: non_neg_integer(), context :: map()}
| :skip
| {:stop, reason :: term()}
@doc """
Macro as a convenience for defining an event handler.
"""
defmacro __using__(opts) do
quote location: :keep do
@before_compile unquote(__MODULE__)
@behaviour Commanded.Event.Handler
@opts unquote(opts) || []
@name Commanded.Event.Handler.parse_name(__MODULE__, @opts[:name])
@doc false
def start_link(opts \\ []) do
opts = Commanded.Event.Handler.start_opts(__MODULE__, Keyword.drop(@opts, [:name]), opts)
Commanded.Event.Handler.start_link(@name, __MODULE__, opts)
end
@doc """
Provides a child specification to allow the event handler to be easily
supervised.
## Example
Supervisor.start_link([
{ExampleHandler, []}
], strategy: :one_for_one)
"""
def child_spec(opts) do
default = %{
id: {__MODULE__, @name},
start: {__MODULE__, :start_link, [opts]},
restart: :permanent,
type: :worker
}
Supervisor.child_spec(default, [])
end
@doc false
def __name__, do: @name
@doc false
def init, do: :ok
defoverridable init: 0
end
end
@doc false
def parse_name(module, name) when name in [nil, ""],
do: raise("#{inspect(module)} expects `:name` to be given")
def parse_name(_module, name) when is_bitstring(name), do: name
def parse_name(_module, name), do: inspect(name)
@doc false
def start_opts(module, module_opts, local_opts, additional_allowed_opts \\ []) do
{valid, invalid} =
module_opts
|> Keyword.merge(local_opts)
|> Keyword.split([:consistency, :start_from, :subscribe_to] ++ additional_allowed_opts)
if Enum.any?(invalid) do
raise "#{inspect(module)} specifies invalid options: #{inspect(Keyword.keys(invalid))}"
else
valid
end
end
# Include default `handle/2` and `error/3` callback functions in module
@doc false
defmacro __before_compile__(_env) do
quote generated: true do
@doc false
def handle(_event, _metadata), do: :ok
@doc false
def error({:error, reason}, _failed_event, _failure_context), do: {:stop, reason}
end
end
@doc false
defstruct [
:consistency,
:handler_name,
:handler_module,
:last_seen_event,
:subscribe_from,
:subscribe_to,
:subscription
]
@doc false
def start_link(handler_name, handler_module, opts \\ []) do
name = name(handler_name)
handler = %Handler{
handler_name: handler_name,
handler_module: handler_module,
consistency: consistency(opts),
subscribe_from: start_from(opts),
subscribe_to: subscribe_to(opts)
}
Registration.start_link(name, __MODULE__, handler)
end
defp name(name), do: {__MODULE__, name}
@doc false
def init(%Handler{} = state) do
:ok = GenServer.cast(self(), :subscribe_to_events)
{:ok, state}
end
@doc false
def handle_call(:last_seen_event, _from, %Handler{} = state) do
%Handler{last_seen_event: last_seen_event} = state
{:reply, last_seen_event, state}
end
@doc false
def handle_call(:config, _from, %Handler{} = state) do
%Handler{consistency: consistency, subscribe_from: subscribe_from, subscribe_to: subscribe_to} =
state
config = [consistency: consistency, start_from: subscribe_from, subscribe_to: subscribe_to]
{:reply, config, state}
end
@doc false
def handle_cast(:subscribe_to_events, %Handler{} = state) do
{:noreply, subscribe_to_events(state)}
end
@doc false
# Subscription to event store has successfully subscribed, init event handler
def handle_info({:subscribed, subscription}, %Handler{subscription: subscription} = state) do
Logger.debug(fn -> describe(state) <> " has successfully subscribed to event store" end)
%Handler{
consistency: consistency,
handler_module: handler_module,
handler_name: handler_name
} = state
case handler_module.init() do
:ok ->
# Register this event handler as a subscription with the given consistency
:ok = Subscriptions.register(handler_name, consistency)
{:noreply, state}
{:stop, reason} ->
Logger.debug(fn -> describe(state) <> " `init/0` callback has requested to stop" end)
{:stop, reason, state}
end
end
@doc false
def handle_info({:events, events}, %Handler{} = state) do
Logger.debug(fn -> describe(state) <> " received events: #{inspect(events)}" end)
try do
state = Enum.reduce(events, state, &handle_event/2)
{:noreply, state}
catch
{:error, reason} ->
# stop after event handling returned an error
{:stop, reason, state}
end
end
defp subscribe_to_events(%Handler{} = state) do
%Handler{
handler_name: handler_name,
subscribe_from: subscribe_from,
subscribe_to: subscribe_to
} = state
{:ok, subscription} =
EventStore.subscribe_to(subscribe_to, handler_name, self(), subscribe_from)
%Handler{state | subscription: subscription}
end
defp handle_event(event, handler, context \\ %{})
# Ignore already seen event.
defp handle_event(
%RecordedEvent{event_number: event_number} = event,
%Handler{last_seen_event: last_seen_event} = state,
_context
)
when not is_nil(last_seen_event) and event_number <= last_seen_event do
Logger.debug(fn -> describe(state) <> " has already seen event ##{inspect(event_number)}" end)
confirm_receipt(event, state)
end
# Delegate event to handler module.
defp handle_event(%RecordedEvent{} = event, %Handler{} = state, context) do
case delegate_event_to_handler(event, state) do
:ok ->
confirm_receipt(event, state)
{:error, :already_seen_event} ->
confirm_receipt(event, state)
{:error, reason} = error ->
Logger.error(fn ->
describe(state) <>
" failed to handle event #{inspect(event)} due to: #{inspect(reason)}"
end)
handle_event_error(error, event, state, context)
end
end
defp delegate_event_to_handler(%RecordedEvent{} = event, %Handler{} = state) do
%RecordedEvent{data: data} = event
%Handler{handler_module: handler_module} = state
metadata = enrich_metadata(event)
try do
handler_module.handle(data, metadata)
rescue
e ->
{:error, e}
end
end
defp handle_event_error(error, %RecordedEvent{} = failed_event, %Handler{} = state, context) do
%RecordedEvent{data: data} = failed_event
%Handler{handler_module: handler_module} = state
failure_context = %FailureContext{
context: context,
metadata: enrich_metadata(failed_event)
}
case handler_module.error(error, data, failure_context) do
{:retry, context} when is_map(context) ->
# Retry the failed event
Logger.info(fn -> describe(state) <> " is retrying failed event" end)
handle_event(failed_event, state, context)
{:retry, delay, context} when is_map(context) and is_integer(delay) and delay >= 0 ->
# Retry the failed event after waiting for the given delay, in milliseconds
Logger.info(fn ->
describe(state) <> " is retrying failed event after #{inspect(delay)}ms"
end)
:timer.sleep(delay)
handle_event(failed_event, state, context)
:skip ->
# Skip the failed event by confirming receipt
Logger.info(fn -> describe(state) <> " is skipping event" end)
confirm_receipt(failed_event, state)
{:stop, reason} ->
# Stop event handler
Logger.warn(fn -> describe(state) <> " has requested to stop: #{inspect(reason)}" end)
throw({:error, reason})
invalid ->
Logger.warn(fn ->
describe(state) <> " returned an invalid error reponse: #{inspect(invalid)}"
end)
# Stop event handler with original error
throw(error)
end
end
# Confirm receipt of event
defp confirm_receipt(%RecordedEvent{} = event, %Handler{} = state) do
%RecordedEvent{event_number: event_number} = event
Logger.debug(fn ->
describe(state) <> " confirming receipt of event ##{inspect(event_number)}"
end)
ack_event(event, state)
%Handler{state | last_seen_event: event_number}
end
defp ack_event(event, %Handler{} = state) do
%Handler{
consistency: consistency,
handler_name: handler_name,
subscription: subscription
} = state
:ok = EventStore.ack_event(subscription, event)
:ok = Subscriptions.ack_event(handler_name, consistency, event)
end
@enrich_metadata_fields [
:event_id,
:event_number,
:stream_id,
:stream_version,
:correlation_id,
:causation_id,
:created_at
]
defp enrich_metadata(%RecordedEvent{} = event) do
%RecordedEvent{metadata: metadata} = event
event
|> Map.from_struct()
|> Map.take(@enrich_metadata_fields)
|> Map.merge(metadata || %{})
end
defp consistency(opts) do
case opts[:consistency] || Application.get_env(:commanded, :default_consistency, :eventual) do
consistency when consistency in [:eventual, :strong] -> consistency
invalid -> raise "Invalid `consistency` option: #{inspect(invalid)}"
end
end
defp start_from(opts) do
case opts[:start_from] || :origin do
start_from when start_from in [:origin, :current] -> start_from
start_from when is_integer(start_from) -> start_from
invalid -> "Invalid `start_from` option: #{inspect(invalid)}"
end
end
defp subscribe_to(opts) do
case opts[:subscribe_to] || :all do
:all -> :all
stream when is_binary(stream) -> stream
invalid -> "Invalid `subscribe_to` option: #{inspect(invalid)}"
end
end
defp describe(%Handler{handler_module: handler_module}),
do: inspect(handler_module)
end
|
lib/commanded/event/handler.ex
| 0.93441
| 0.563738
|
handler.ex
|
starcoder
|
defmodule Timex.Interval do
@moduledoc """
This module is used for creating and manipulating DateTime intervals.
"""
alias Timex.Duration
defmodule FormatError do
@moduledoc """
Thrown when an error occurs with formatting an Interval
"""
defexception message: "Unable to format interval!"
def exception([message: message]) do
%FormatError{message: message}
end
end
@enforce_keys [:from, :until]
defstruct from: nil,
until: nil,
left_open: false,
right_open: true,
step: [days: 1]
@doc """
Create a new Interval struct.
Note: By default intervals are right open.
Valid keywords:
- `from`: The date the interval starts at. Should be a DateTime.
- `until`: Either a DateTime, or a time shift that will be applied to the `from` date.
- `left_open`: Whether the interval is left open. See explanation below.
- `right_open`: Whether the interval is right open. See explanation below.
- `step`: The step to use when iterating the interval, defaults to `[days: 1]`
The terms`left_open` and `right_open` come from the mathematical concept of intervals, the following
excerpt from Wikipedia gives a good explanation of their meaning:
"An interval is said to be left-open if and only if it has no minimum
(an element that is smaller than all other elements); right-open if it has no maximum;
and open if it has both properties. The interval [0,1) = {x | 0 ≤ x < 1}, for example,
is left-closed and right-open. The empty set and the set of all reals are open intervals,
while the set of non-negative reals, for example, is a right-open but not left-open interval.
The open intervals coincide with the open sets of the real line in its standard topology."
Note: `until` shifts delegate to `Timex.shift`, so the options provided should match its valid options.
## Examples
iex> use Timex
...> Interval.new(from: ~D[2014-09-22], until: ~D[2014-09-29])
...> |> Interval.format!("%Y-%m-%d", :strftime)
"[2014-09-22, 2014-09-29)"
iex> use Timex
...> Interval.new(from: ~D[2014-09-22], until: [days: 7])
...> |> Interval.format!("%Y-%m-%d", :strftime)
"[2014-09-22, 2014-09-29)"
iex> use Timex
...> Interval.new(from: ~D[2014-09-22], until: [days: 7], left_open: true, right_open: false)
...> |> Interval.format!("%Y-%m-%d", :strftime)
"(2014-09-22, 2014-09-29]"
iex> use Timex
...> Interval.new(from: ~N[2014-09-22T15:30:00], until: [minutes: 20], right_open: false)
...> |> Interval.format!("%H:%M", :strftime)
"[15:30, 15:50]"
"""
def new(options \\ []) do
from = case Keyword.get(options, :from) do
nil -> Timex.Protocol.NaiveDateTime.now()
%NaiveDateTime{} = d -> d
d -> Timex.to_naive_datetime(d)
end
left_open = Keyword.get(options, :left_open, false)
right_open = Keyword.get(options, :right_open, true)
step = Keyword.get(options, :step, [days: 1])
until = case Keyword.get(options, :until, [days: 1]) do
{:error, _} = err -> err
x when is_list(x) -> Timex.shift(from, x)
%NaiveDateTime{} = d -> d
%DateTime{} = d -> Timex.to_naive_datetime(d)
%Date{} = d -> Timex.to_naive_datetime(d)
_ -> {:error, :invalid_until}
end
case until do
{:error, _} = err -> err
_ ->
%__MODULE__{from: from, until: until,
left_open: left_open, right_open: right_open,
step: step}
end
end
@doc """
Return the interval duration, given a unit.
When the unit is one of `:seconds`, `:minutes`, `:hours`, `:days`, `:weeks`, `:months`, `:years`, the result is an `integer`.
When the unit is `:duration`, the result is a `Duration` struct.
## Example
iex> use Timex
...> Interval.new(from: ~D[2014-09-22], until: [months: 5])
...> |> Interval.duration(:months)
5
iex> use Timex
...> Interval.new(from: ~N[2014-09-22T15:30:00], until: [minutes: 20])
...> |> Interval.duration(:duration)
Duration.from_minutes(20)
"""
def duration(%__MODULE__{from: from, until: until}, :duration) do
Timex.diff(until, from, :microseconds) |> Duration.from_microseconds
end
def duration(%__MODULE__{from: from, until: until}, unit) do
Timex.diff(until, from, unit)
end
@doc """
Change the step value for the provided interval.
The step should be a keyword list valid for use with `Timex.Date.shift`.
## Examples
iex> use Timex
...> Interval.new(from: ~D[2014-09-22], until: [days: 3], right_open: false)
...> |> Interval.with_step([days: 1]) |> Enum.map(&Timex.format!(&1, "%Y-%m-%d", :strftime))
["2014-09-22", "2014-09-23", "2014-09-24", "2014-09-25"]
iex> use Timex
...> Interval.new(from: ~D[2014-09-22], until: [days: 3], right_open: false)
...> |> Interval.with_step([days: 2]) |> Enum.map(&Timex.format!(&1, "%Y-%m-%d", :strftime))
["2014-09-22", "2014-09-24"]
iex> use Timex
...> Interval.new(from: ~D[2014-09-22], until: [days: 3], right_open: false)
...> |> Interval.with_step([days: 3]) |> Enum.map(&Timex.format!(&1, "%Y-%m-%d", :strftime))
["2014-09-22", "2014-09-25"]
"""
def with_step(%__MODULE__{} = interval, step) do
%__MODULE__{interval | :step => step}
end
@doc """
Formats the interval as a human readable string.
## Examples
iex> use Timex
...> Interval.new(from: ~D[2014-09-22], until: [days: 3])
...> |> Interval.format!("%Y-%m-%d %H:%M", :strftime)
"[2014-09-22 00:00, 2014-09-25 00:00)"
iex> use Timex
...> Interval.new(from: ~D[2014-09-22], until: [days: 3])
...> |> Interval.format!("%Y-%m-%d", :strftime)
"[2014-09-22, 2014-09-25)"
"""
def format(%__MODULE__{} = interval, format, formatter \\ nil) do
case Timex.format(interval.from, format, formatter) do
{:error, _} = err -> err
{:ok, from} ->
case Timex.format(interval.until, format, formatter) do
{:error, _} = err -> err
{:ok, until} ->
lopen = if interval.left_open, do: "(", else: "["
ropen = if interval.right_open, do: ")", else: "]"
{:ok, "#{lopen}#{from}, #{until}#{ropen}"}
end
end
end
@doc """
Same as `format/3`, but raises a `Timex.Interval.FormatError` on failure.
"""
def format!(%__MODULE__{} = interval, format, formatter \\ nil) do
case format(interval, format, formatter) do
{:ok, str} -> str
{:error, e} -> raise FormatError, message: "#{inspect e}"
end
end
defimpl Enumerable do
def reduce(interval, acc, fun) do
do_reduce({get_starting_date(interval), interval.until, interval.right_open, interval.step}, acc, fun)
end
def member?(%Timex.Interval{from: from, until: until}, value) do
# Just tests for set membership (date is within the provided (inclusive) range)
result = cond do
Timex.compare(value, from) < 1 -> false
Timex.compare(value, until) > 0 -> false
:else -> true
end
{:ok, result}
end
def count(_interval) do
{:error, __MODULE__}
end
defp do_reduce(_state, {:halt, acc}, _fun), do: {:halted, acc}
defp do_reduce( state, {:suspend, acc}, fun), do: {:suspended, acc, &do_reduce(state, &1, fun)}
defp do_reduce({current_date, end_date, right_open, keywords}, {:cont, acc}, fun) do
if has_recursion_ended?(current_date, end_date, right_open) do
{:done, acc}
else
next_date = Timex.shift(current_date, keywords)
do_reduce({next_date, end_date, right_open, keywords}, fun.(current_date, acc), fun)
end
end
defp get_starting_date(%Timex.Interval{from: from, step: step, left_open: true}), do: Timex.shift(from, step)
defp get_starting_date(%Timex.Interval{from: from}), do: from
defp has_recursion_ended?(current_date, end_date, true), do: Timex.compare(end_date, current_date) < 1
defp has_recursion_ended?(current_date, end_date, false), do: Timex.compare(end_date, current_date) < 0
end
end
|
elixir/codes-from-books/little-elixir/cap8/blitzy/deps/timex/lib/interval/interval.ex
| 0.931134
| 0.597461
|
interval.ex
|
starcoder
|
defmodule Operate.Adapter do
@moduledoc """
Operate adapter specification.
An adapter is responsible for loading tapes and ops from a datasource -
potentially a web API, a database or even a Bitcoin node. Operate ships with
two default adapters, although these can be swapped out with any other adpater
by changing the configuration:
children = [
{Operate, [
tape_adapter: Operate.Adapter.Bob,
op_adapter: Operate.Adapter.OpApi
]}
]
Supervisor.start_link(children, strategy: :one_for_one)
## Creating an adapter
An adapter must implement one or more of the following callbacks:
* `c:fetch_tx/2` - function that takes a txid and returns a `t:Operate.BPU.Transaction.t/0`.
* `c:fetch_tx_by/2` - function that takes a map and returns a list of `t:Operate.BPU.Transaction.t/0` tx.
* `c:fetch_ops/2` - function that takes a list of Op references and returns a list of `t:Operate.Op.t/0` functions.
Example:
defmodule MyAdapter do
use Operate.Adapter
def fetch_tx(txid, opts) do
key = Keyword.get(opts, :api_key)
BitcoinApi.load_tx(txid, api_key: key)
|> to_bpu
end
defp to_bpu(tx) do
# Map tx object to `Operate.BPU.Transaction.t`
end
end
Using the above example, Operate can be configured with:
{Operate, [
tape_adapter: {MyAdapter, [api_key: "myapikey"]}
]}
"""
defmacro __using__(opts \\ []) do
quote bind_quoted: [opts: opts] do
@behaviour Operate.Adapter
def fetch_tx(_txid, _options \\ []),
do: raise "#{__MODULE__}.fetch_tx/2 not implemented"
def fetch_tx!(txid, options \\ []) do
case fetch_tx(txid, options) do
{:ok, tx} -> tx
{:error, err} -> raise err
end
end
def fetch_tx_by(_query, _options \\ []),
do: raise "#{__MODULE__}.fetch_tx_by/2 not implemented"
def fetch_tx_by!(query, options \\ []) do
case fetch_tx_by(query, options) do
{:ok, txns} -> txns
{:error, err} -> raise err
end
end
def fetch_ops(_refs, _options \\ []),
do: raise "#{__MODULE__}.fetch_ops/2 not implemented"
def fetch_ops!(refs, options \\ []) do
case fetch_ops(refs, options) do
{:ok, ops} -> ops
{:error, err} -> raise err
end
end
defoverridable fetch_tx: 1, fetch_tx: 2,
fetch_tx!: 1, fetch_tx!: 2,
fetch_tx_by: 1, fetch_tx_by: 2,
fetch_tx_by!: 1, fetch_tx_by!: 2,
fetch_ops: 1, fetch_ops: 2,
fetch_ops!: 1, fetch_ops!: 2
end
end
@doc """
Fetches a transaction by the given txid, and returns the result in an
`:ok` / `:error` tuple pair.
"""
@callback fetch_tx(String.t, keyword) ::
{:ok, Operate.Tape.t} |
{:error, Exception.t}
@doc """
As `c:fetch_tx/2`, but returns the transaction or raises an exception.
"""
@callback fetch_tx!(String.t, keyword) :: Operate.Tape.t
@doc """
Fetches a list of transactions by the given query map, and returns the result
in an `:ok` / `:error` tuple pair.
"""
@callback fetch_tx_by(map, keyword) ::
{:ok, [Operate.Tape.t, ...]} |
{:error, Exception.t}
@doc """
As `c:fetch_tx_by/2`, but returns the result or raises an exception.
"""
@callback fetch_tx_by!(map, keyword) :: [Operate.Tape.t, ...]
@doc """
Fetches a list of Ops by the given list of Op references. Returns the result
in an `:ok` / `:error` tuple pair.
"""
@callback fetch_ops(list, keyword) ::
{:ok, [Operate.Op.t, ...]} |
{:error, Exception.t}
@doc """
As `c:fetch_ops/2`, but returns the result or raises an exception.
"""
@callback fetch_ops!(list, keyword) :: [Operate.Op.t, ...]
end
|
lib/operate/adapter.ex
| 0.734501
| 0.565869
|
adapter.ex
|
starcoder
|
defmodule Timex do
@moduledoc File.read!("README.md")
use Application
def start(_type, _args) do
apps = Enum.map(Application.started_applications(), &elem(&1, 0))
cond do
:tzdata in apps ->
Supervisor.start_link([], strategy: :one_for_one, name: Timex.Supervisor)
:else ->
{:error, ":tzdata application not started! Ensure :timex is in your applications list!"}
end
end
defmacro __using__(_) do
quote do
alias Timex.AmbiguousDateTime
alias Timex.TimezoneInfo
alias Timex.AmbiguousTimezoneInfo
alias Timex.Interval
alias Timex.Duration
alias Timex.Timezone
end
end
alias Timex.{Duration, AmbiguousDateTime}
alias Timex.{Timezone, TimezoneInfo, AmbiguousTimezoneInfo}
alias Timex.{Types, Helpers, Translator}
alias Timex.{Comparable}
use Timex.Constants
import Timex.Macros
@doc """
Returns a Date representing the current day in UTC
"""
@spec today() :: Date.t()
def today() do
{{year, month, day}, _} = :calendar.universal_time()
%Date{year: year, month: month, day: day}
end
@doc """
Returns a DateTime representing the current moment in time in UTC
"""
@spec now() :: DateTime.t()
def now(), do: from_unix(:os.system_time(), :native)
@doc """
Returns a DateTime representing the current moment in time in the provided
timezone.
"""
@spec now(Types.valid_timezone()) :: DateTime.t() | AmbiguousDateTime.t() | {:error, term}
def now(tz), do: Timezone.convert(now(), tz)
@doc """
Returns a DateTime representing the current moment in time in the local timezone.
"""
@spec local() :: DateTime.t() | AmbiguousDateTime.t() | {:error, term}
def local() do
case Timezone.local(:calendar.local_time()) do
%AmbiguousTimezoneInfo{after: a, before: b} ->
d = now()
ad = Timezone.convert(d, a.full_name)
bd = Timezone.convert(d, b.full_name)
%AmbiguousDateTime{after: ad, before: bd}
%TimezoneInfo{full_name: tz} ->
now(tz)
{:error, _} = err ->
err
end
end
@doc """
Returns a DateTime representing the given date/time in the local timezone
"""
@spec local(Types.valid_datetime()) :: DateTime.t() | AmbiguousDateTime.t() | {:error, term}
def local(date) do
reference_date = to_erl(date)
case Timezone.local(reference_date) do
{:error, _} = err -> err
tz -> Timezone.convert(date, tz.full_name)
end
end
@doc """
Returns a Date representing the start of the UNIX epoch
"""
@spec epoch() :: Date.t()
def epoch(), do: %Date{year: 1970, month: 1, day: 1}
@doc """
Returns a Date representing the start of the Gregorian epoch
"""
@spec zero() :: Date.t()
def zero(), do: %Date{year: 0, month: 1, day: 1}
@doc """
Convert a date/time value to a Date struct.
"""
@spec to_date(Types.valid_datetime()) :: Date.t() | {:error, term}
defdelegate to_date(date), to: Timex.Protocol
@doc """
Convert a date/time value to a NaiveDateTime struct.
"""
@spec to_naive_datetime(Types.valid_datetime()) :: NaiveDateTime.t() | {:error, term}
defdelegate to_naive_datetime(date), to: Timex.Protocol
@doc """
Convert a date/time value and timezone name to a DateTime struct.
If the DateTime is ambiguous and cannot be resolved, an AmbiguousDateTime will be returned,
allowing the developer to choose which of the two choices is desired.
If no timezone is provided, "Etc/UTC" will be used
"""
@spec to_datetime(Types.valid_datetime()) :: DateTime.t() | {:error, term}
@spec to_datetime(Types.valid_datetime(), Types.valid_timezone()) ::
DateTime.t() | AmbiguousDateTime.t() | {:error, term}
def to_datetime(from), do: Timex.Protocol.to_datetime(from, "Etc/UTC")
defdelegate to_datetime(from, timezone), to: Timex.Protocol
@doc false
defdeprecated datetime(from, timezone), "use to_datetime/2 instead" do
to_datetime(from, timezone)
end
@doc """
Convert a date/time value to it's Erlang representation
"""
@spec to_erl(Types.valid_datetime()) :: Types.date() | Types.datetime() | {:error, term}
defdelegate to_erl(date), to: Timex.Protocol
@doc """
Convert a date/time value to a Julian calendar date number
"""
@spec to_julian(Types.valid_datetime()) :: integer | {:error, term}
defdelegate to_julian(datetime), to: Timex.Protocol
@doc """
Convert a date/time value to gregorian seconds (seconds since start of year zero)
"""
@spec to_gregorian_seconds(Types.valid_datetime()) :: non_neg_integer | {:error, term}
defdelegate to_gregorian_seconds(datetime), to: Timex.Protocol
@doc """
Convert a date/time value to gregorian microseconds (microseconds since start of year zero)
"""
@spec to_gregorian_microseconds(Types.valid_datetime()) :: non_neg_integer | {:error, term}
defdelegate to_gregorian_microseconds(datetime), to: Timex.Protocol
@doc """
Convert a date/time value to seconds since the UNIX epoch
"""
@spec to_unix(Types.valid_datetime()) :: non_neg_integer | {:error, term}
defdelegate to_unix(datetime), to: Timex.Protocol
@doc """
Delegates to `DateTime.from_unix!/2`. To recap the docs:
Converts the given Unix time to DateTime.
The integer can be given in different units according to `System.convert_time_unit/3`
and it will be converted to microseconds internally. Defaults to `:second`.
Unix times are always in UTC and therefore the DateTime will be returned in UTC.
"""
@spec from_unix(secs :: non_neg_integer, :native | Types.second_time_units()) ::
DateTime.t() | no_return
def from_unix(secs, unit \\ :second)
def from_unix(secs, :seconds) do
from_unix(secs, :second)
end
def from_unix(secs, :milliseconds) do
from_unix(secs, :millisecond)
end
def from_unix(secs, :microseconds) do
from_unix(secs, :microsecond)
end
def from_unix(secs, :nanoseconds) do
from_unix(secs, :nanosecond)
end
def from_unix(secs, unit) do
DateTime.from_unix!(secs, unit)
end
@doc """
Formats a date/time value using the given format string (and optional formatter).
See Timex.Format.DateTime.Formatters.Default or Timex.Format.DateTime.Formatters.Strftime
for documentation on the syntax supported by those formatters.
To use the Default formatter, simply call format/2. To use the Strftime formatter, you
can either alias and pass Strftime by module name, or as a shortcut, you can pass :strftime
instead.
Formatting will convert other dates than Elixir date types (Date, DateTime, NaiveDateTime)
to a NaiveDateTime using `to_naive_datetime/1` before formatting.
## Examples
iex> date = ~D[2016-02-29]
...> Timex.format(date, "{YYYY}-{0M}-{D}")
{:ok, "2016-02-29"}
iex> datetime = Timex.to_datetime({{2016,2,29},{22,25,0}}, "Etc/UTC")
...> Timex.format(datetime, "{ISO:Extended}")
{:ok, "2016-02-29T22:25:00+00:00"}
"""
@spec format(Types.valid_datetime(), format :: String.t()) :: {:ok, String.t()} | {:error, term}
defdelegate format(datetime, format_string), to: Timex.Format.DateTime.Formatter
@doc """
Same as format/2, except using a custom formatter
## Examples
iex> use Timex
...> datetime = Timex.to_datetime({{2016,2,29},{22,25,0}}, "America/Chicago")
iex> Timex.format(datetime, "%FT%T%:z", :strftime)
{:ok, "2016-02-29T22:25:00-06:00"}
"""
@spec format(Types.valid_datetime(), format :: String.t(), formatter :: atom) ::
{:ok, String.t()} | {:error, term}
defdelegate format(datetime, format_string, formatter), to: Timex.Format.DateTime.Formatter
@doc """
Same as format/2, except takes a locale name to translate text to.
Translations only apply to units, relative time phrases, and only for the locales in the
list of supported locales in the Timex documentation.
"""
@spec lformat(Types.valid_datetime(), format :: String.t(), locale :: String.t()) ::
{:ok, String.t()} | {:error, term}
defdelegate lformat(datetime, format_string, locale), to: Timex.Format.DateTime.Formatter
@doc """
Same as lformat/3, except takes a formatter as it's last argument.
Translations only apply to units, relative time phrases, and only for the locales in the
list of supported locales in the Timex documentation.
"""
@spec lformat(
Types.valid_datetime(),
format :: String.t(),
locale :: String.t(),
formatter :: atom
) :: {:ok, String.t()} | {:error, term}
defdelegate lformat(datetime, format_string, locale, formatter),
to: Timex.Format.DateTime.Formatter
@doc """
Same as format/2, except it returns only the value (not a tuple) and raises on error.
## Examples
iex> date = ~D[2016-02-29]
...> Timex.format!(date, "{YYYY}-{0M}-{D}")
"2016-02-29"
"""
@spec format!(Types.valid_datetime(), format :: String.t()) :: String.t() | no_return
defdelegate format!(datetime, format_string), to: Timex.Format.DateTime.Formatter
@doc """
Same as format/3, except it returns only the value (not a tuple) and raises on error.
## Examples
iex> use Timex
...> datetime = Timex.to_datetime({{2016,2,29},{22,25,0}}, "America/Chicago")
iex> Timex.format!(datetime, "%FT%T%:z", :strftime)
"2016-02-29T22:25:00-06:00"
"""
@spec format!(Types.valid_datetime(), format :: String.t(), formatter :: atom) ::
String.t() | no_return
defdelegate format!(datetime, format_string, formatter), to: Timex.Format.DateTime.Formatter
@doc """
Same as lformat/3, except local_format! raises on error.
See lformat/3 docs for usage examples.
"""
@spec lformat!(Types.valid_datetime(), format :: String.t(), locale :: String.t()) ::
String.t() | no_return
defdelegate lformat!(datetime, format_string, locale), to: Timex.Format.DateTime.Formatter
@doc """
Same as lformat/4, except local_format! raises on error.
See lformat/4 docs for usage examples
"""
@spec lformat!(
Types.valid_datetime(),
format :: String.t(),
locale :: String.t(),
formatter :: atom
) :: String.t() | no_return
defdelegate lformat!(datetime, format_string, locale, formatter),
to: Timex.Format.DateTime.Formatter
@doc """
Formats a DateTime using a fuzzy relative duration, from now.
## Examples
iex> use Timex
...> Timex.from_now(Timex.shift(DateTime.utc_now(), days: 2, hours: 1))
"in 2 days"
iex> use Timex
...> Timex.from_now(Timex.shift(DateTime.utc_now(), days: -2))
"2 days ago"
"""
@spec from_now(Types.valid_datetime()) :: String.t() | {:error, term}
def from_now(datetime), do: from_now(datetime, Timex.Translator.default_locale())
@doc """
Formats a DateTime using a fuzzy relative duration, translated using given locale
## Examples
iex> use Timex
...> Timex.from_now(Timex.shift(DateTime.utc_now(), days: 2, hours: 1), "ru")
"через 2 дня"
iex> use Timex
...> Timex.from_now(Timex.shift(DateTime.utc_now(), days: -2), "ru")
"2 дня назад"
"""
@spec from_now(Types.valid_datetime(), String.t()) :: String.t() | {:error, term}
def from_now(datetime, locale) when is_binary(locale) do
case to_naive_datetime(datetime) do
{:error, _} = err ->
err
%NaiveDateTime{} = dt ->
case lformat(dt, "{relative}", locale, :relative) do
{:ok, formatted} -> formatted
{:error, _} = err -> err
end
end
end
# Formats a DateTime using a fuzzy relative duration, with a reference datetime other than now
@spec from_now(Types.valid_datetime(), Types.valid_datetime()) :: String.t() | {:error, term}
def from_now(datetime, reference_date),
do: from_now(datetime, reference_date, Timex.Translator.default_locale())
@doc """
Formats a DateTime using a fuzzy relative duration, with a reference datetime other than now,
translated using the given locale
"""
@spec from_now(Types.valid_datetime(), Types.valid_datetime(), String.t()) ::
String.t() | {:error, term}
def from_now(datetime, reference_date, locale) when is_binary(locale) do
case to_naive_datetime(datetime) do
{:error, _} = err ->
err
%NaiveDateTime{} = dt ->
case to_naive_datetime(reference_date) do
{:error, _} = err ->
err
%NaiveDateTime{} = ref ->
case Timex.Format.DateTime.Formatters.Relative.relative_to(
dt,
ref,
"{relative}",
locale
) do
{:ok, formatted} -> formatted
{:error, _} = err -> err
end
end
end
end
@doc """
Formats an Erlang timestamp using the ISO-8601 duration format, or optionally, with a custom
formatter of your choosing.
See Timex.Format.Duration.Formatters.Default or Timex.Format.Duration.Formatters.Humanized
for documentation on the specific formatter behaviour.
To use the Default formatter, simply call format_duration/2.
To use the Humanized formatter, you can either alias and pass Humanized by module name,
or as a shortcut, you can pass :humanized instead.
## Examples
iex> use Timex
...> duration = Duration.from_seconds(Timex.to_unix({2016, 2, 29}))
...> Timex.format_duration(duration)
"P46Y2M10D"
iex> use Timex
...> duration = Duration.from_seconds(Timex.to_unix({2016, 2, 29}))
...> Timex.format_duration(duration, :humanized)
"46 years, 2 months, 1 week, 3 days"
iex> use Timex
...> datetime = Duration.from_seconds(Timex.to_unix(~N[2016-02-29T22:25:00]))
...> Timex.format_duration(datetime, :humanized)
"46 years, 2 months, 1 week, 3 days, 22 hours, 25 minutes"
"""
@spec format_duration(Duration.t()) :: String.t() | {:error, term}
defdelegate format_duration(timestamp), to: Timex.Format.Duration.Formatter, as: :format
@doc """
Same as format_duration/1, except it also accepts a formatter
"""
@spec format_duration(Duration.t(), atom) :: String.t() | {:error, term}
defdelegate format_duration(timestamp, formatter),
to: Timex.Format.Duration.Formatter,
as: :format
@doc """
Same as format_duration/1, except takes a locale for use in translation
"""
@spec lformat_duration(Duration.t(), locale :: String.t()) :: String.t() | {:error, term}
defdelegate lformat_duration(timestamp, locale),
to: Timex.Format.Duration.Formatter,
as: :lformat
@doc """
Same as lformat_duration/2, except takes a formatter as an argument
"""
@spec lformat_duration(Duration.t(), locale :: String.t(), atom) :: String.t() | {:error, term}
defdelegate lformat_duration(timestamp, locale, formatter),
to: Timex.Format.Duration.Formatter,
as: :lformat
@doc """
Parses a datetime string into a DateTime struct, using the provided format string (and optional tokenizer).
See Timex.Format.DateTime.Formatters.Default or Timex.Format.DateTime.Formatters.Strftime
for documentation on the syntax supported in format strings by their respective tokenizers.
To use the Default tokenizer, simply call parse/2. To use the Strftime tokenizer, you
can either alias and pass Timex.Parse.DateTime.Tokenizer.Strftime by module name,
or as a shortcut, you can pass :strftime instead.
## Examples
iex> use Timex
...> {:ok, result} = Timex.parse("2016-02-29", "{YYYY}-{0M}-{D}")
...> result
~N[2016-02-29T00:00:00]
iex> use Timex
...> expected = Timex.to_datetime({{2016, 2, 29}, {22, 25, 0}}, "America/Chicago")
...> {:ok, result} = Timex.parse("2016-02-29T22:25:00-06:00", "{ISO:Extended}")
...> Timex.equal?(expected, result)
true
iex> use Timex
...> expected = Timex.to_datetime({{2016, 2, 29}, {22, 25, 0}}, "America/Chicago")
...> {:ok, result} = Timex.parse("2016-02-29T22:25:00-06:00", "%FT%T%:z", :strftime)
...> Timex.equal?(expected, result)
true
"""
@spec parse(String.t(), String.t()) :: {:ok, DateTime.t() | NaiveDateTime.t()} | {:error, term}
@spec parse(String.t(), String.t(), atom) ::
{:ok, DateTime.t() | NaiveDateTime.t()} | {:error, term}
defdelegate parse(datetime_string, format_string), to: Timex.Parse.DateTime.Parser
defdelegate parse(datetime_string, format_string, tokenizer), to: Timex.Parse.DateTime.Parser
@doc """
Same as parse/2 and parse/3, except parse! raises on error.
See parse/2 or parse/3 docs for usage examples.
"""
@spec parse!(String.t(), String.t()) :: DateTime.t() | NaiveDateTime.t() | no_return
@spec parse!(String.t(), String.t(), atom) :: DateTime.t() | NaiveDateTime.t() | no_return
defdelegate parse!(datetime_string, format_string), to: Timex.Parse.DateTime.Parser
defdelegate parse!(datetime_string, format_string, tokenizer), to: Timex.Parse.DateTime.Parser
@doc """
Given a format string, validates that the format string is valid for the Default formatter.
Given a format string and a formatter, validates that the format string is valid for that formatter.
## Examples
iex> use Timex
...> Timex.validate_format("{YYYY}-{M}-{D}")
:ok
iex> use Timex
...> Timex.validate_format("{YYYY}-{M}-{V}")
{:error, "Expected end of input at line 1, column 11"}
iex> use Timex
...> Timex.validate_format("%FT%T%:z", :strftime)
:ok
"""
@spec validate_format(String.t()) :: :ok | {:error, term}
@spec validate_format(String.t(), atom) :: :ok | {:error, term}
defdelegate validate_format(format_string), to: Timex.Format.DateTime.Formatter, as: :validate
defdelegate validate_format(format_string, formatter),
to: Timex.Format.DateTime.Formatter,
as: :validate
@doc """
Gets the current century
## Examples
iex> #{__MODULE__}.century
21
"""
@spec century() :: non_neg_integer | {:error, term}
def century(), do: century(:calendar.universal_time())
@doc """
Given a date, get the century this date is in.
## Examples
iex> Timex.today |> #{__MODULE__}.century
21
iex> Timex.now |> #{__MODULE__}.century
21
iex> #{__MODULE__}.century(2016)
21
"""
@spec century(Types.year() | Types.valid_datetime()) :: non_neg_integer | {:error, term}
def century(year) when is_integer(year) do
base_century = div(year, 100)
years_past = rem(year, 100)
cond do
base_century == base_century - years_past -> base_century
true -> base_century + 1
end
end
def century(date), do: Timex.Protocol.century(date)
@doc """
Convert an iso ordinal day number to the day it represents in the current year.
## Examples
iex> %Date{:year => year} = Timex.from_iso_day(180)
...> %Date{:year => todays_year} = Timex.today()
...> year == todays_year
true
"""
@spec from_iso_day(non_neg_integer) :: Date.t() | {:error, term}
def from_iso_day(day) when is_day_of_year(day) do
{{year, _, _}, _} = :calendar.universal_time()
from_iso_day(day, year)
end
def from_iso_day(_), do: {:error, {:from_iso_day, :invalid_iso_day}}
@doc """
Same as from_iso_day/1, except you can expect the following based on the second parameter:
- If an integer year is given, the result will be a Date struct
- For any date/time value, the result will be in the same format (i.e. Date -> Date)
In all cases, the resulting value will be the date representation of the provided ISO day in that year
## Examples
### Creating a Date from the given day
iex> use Timex
...> expected = ~D[2015-06-29]
...> (expected === Timex.from_iso_day(180, 2015))
true
### Creating a Date/DateTime from the given day
iex> use Timex
...> expected = Timex.to_datetime({{2015, 6, 29}, {0,0,0}}, "Etc/UTC")
...> beginning = Timex.to_datetime({{2015,1,1}, {0,0,0}}, "Etc/UTC")
...> (expected === Timex.from_iso_day(180, beginning))
true
### Shifting a Date/DateTime to the given day
iex> use Timex
...> date = Timex.to_datetime({{2015,6,26}, {12,0,0}}, "Etc/UTC")
...> expected = Timex.to_datetime({{2015, 6, 29}, {12,0,0}}, "Etc/UTC")
...> (Timex.from_iso_day(180, date) === expected)
true
"""
@spec from_iso_day(non_neg_integer, Types.year() | Types.valid_datetime()) ::
Types.valid_datetime() | {:error, term}
def from_iso_day(day, year) when is_day_of_year(day) and is_year(year) do
{year, month, day} = Helpers.iso_day_to_date_tuple(year, day)
%Date{year: year, month: month, day: day}
end
def from_iso_day(day, datetime), do: Timex.Protocol.from_iso_day(datetime, day)
@doc """
Return a pair {year, week number} (as defined by ISO 8601) that the given
Date/DateTime value falls on.
## Examples
iex> #{__MODULE__}.iso_week({1970, 1, 1})
{1970,1}
"""
@spec iso_week(Types.valid_datetime()) :: {Types.year(), Types.weeknum()} | {:error, term}
defdelegate iso_week(datetime), to: Timex.Protocol
@doc """
Same as iso_week/1, except this takes a year, month, and day as distinct arguments.
## Examples
iex> #{__MODULE__}.iso_week(1970, 1, 1)
{1970,1}
"""
@spec iso_week(Types.year(), Types.month(), Types.day()) ::
{Types.year(), Types.weeknum()} | {:error, term}
def iso_week(year, month, day) when is_date(year, month, day),
do: :calendar.iso_week_number({year, month, day})
def iso_week(_, _, _),
do: {:error, {:iso_week, :invalid_date}}
@doc """
Return a 3-tuple {year, week number, weekday} for the given Date/DateTime.
## Examples
iex> #{__MODULE__}.iso_triplet(Timex.epoch)
{1970, 1, 4}
"""
@spec iso_triplet(Types.valid_datetime()) ::
{Types.year(), Types.weeknum(), Types.weekday()} | {:error, term}
def iso_triplet(datetime) do
case to_erl(datetime) do
{:error, _} = err ->
err
{y, m, d} = date ->
{iso_year, iso_week} = iso_week(y, m, d)
{iso_year, iso_week, Timex.weekday(date)}
{{y, m, d} = date, _} ->
{iso_year, iso_week} = iso_week(y, m, d)
{iso_year, iso_week, Timex.weekday(date)}
end
end
@doc """
Given an ISO triplet `{year, week number, weekday}`, convert it to a Date struct.
## Examples
iex> expected = Timex.to_date({2014, 1, 28})
iex> Timex.from_iso_triplet({2014, 5, 2}) === expected
true
"""
@spec from_iso_triplet(Types.iso_triplet()) :: Date.t() | {:error, term}
def from_iso_triplet({year, week, weekday})
when is_year(year) and is_week_of_year(week) and is_day_of_week(weekday, :mon) do
{_, _, jan4weekday} = iso_triplet({year, 1, 4})
offset = jan4weekday + 3
ordinal_day = week * 7 + weekday - offset
{year, iso_day} =
case {year, ordinal_day} do
{year, ordinal_day} when ordinal_day < 1 and is_leap_year(year - 1) ->
{year - 1, ordinal_day + 366}
{year, ordinal_day} when ordinal_day < 1 ->
{year - 1, ordinal_day + 365}
{year, ordinal_day} when ordinal_day > 366 and is_leap_year(year) ->
{year + 1, ordinal_day - 366}
{year, ordinal_day} when ordinal_day > 365 and not is_leap_year(year) ->
{year + 1, ordinal_day - 365}
_ ->
{year, ordinal_day}
end
{year, month, day} = Helpers.iso_day_to_date_tuple(year, iso_day)
%Date{year: year, month: month, day: day}
end
def from_iso_triplet({_, _, _}), do: {:error, {:from_iso_triplet, :invalid_triplet}}
@doc """
Returns a list of all valid timezone names in the Olson database
"""
@spec timezones() :: [String.t()]
def timezones(), do: Tzdata.zone_list()
@doc """
Get a TimezoneInfo object for the specified offset or name.
When offset or name is invalid, exception is raised.
If no DateTime value is given for the second parameter, the current date/time
will be used (in other words, it will return the current timezone info for the
given zone). If one is provided, the timezone info returned will be based on
the provided DateTime (or Erlang datetime tuple) value.
## Examples
iex> date = Timex.to_datetime({2015, 4, 12})
...> tz = Timex.timezone(:utc, date)
...> tz.full_name
"Etc/UTC"
iex> tz = Timex.timezone("America/Chicago", {2015,4,12})
...> {tz.full_name, tz.abbreviation}
{"America/Chicago", "CDT"}
iex> tz = #{__MODULE__}.timezone(+2, {2015, 4, 12})
...> {tz.full_name, tz.abbreviation}
{"Etc/GMT-2", "+02"}
"""
@spec timezone(Types.valid_timezone() | TimezoneInfo.t(), Types.valid_datetime()) ::
TimezoneInfo.t() | AmbiguousTimezoneInfo.t() | {:error, term}
def timezone(:utc, _), do: %TimezoneInfo{}
def timezone("UTC", _), do: %TimezoneInfo{}
def timezone("Etc/UTC", _), do: %TimezoneInfo{}
def timezone(tz, datetime) when is_binary(tz) do
case to_gregorian_seconds(datetime) do
{:error, _} = err ->
err
seconds_from_zeroyear ->
Timezone.resolve(tz, seconds_from_zeroyear)
end
end
def timezone(%TimezoneInfo{} = tz, datetime), do: Timezone.get(tz, datetime)
def timezone(tz, datetime) do
case to_gregorian_seconds(datetime) do
{:error, _} = err ->
err
seconds_from_zeroyear ->
case Timezone.name_of(tz) do
{:error, _} = err ->
err
tzname ->
Timezone.resolve(tzname, seconds_from_zeroyear)
end
end
end
@doc """
Return a boolean indicating whether the given date is valid.
## Examples
iex> use Timex
...> Timex.is_valid?(~N[0001-01-01T01:01:01])
true
iex> use Timex
...> %Date{year: 1, day: 1, month: 13} |> #{__MODULE__}.is_valid?
false
"""
@spec is_valid?(Types.valid_datetime()) :: boolean | {:error, term}
defdelegate is_valid?(datetime), to: Timex.Protocol
@doc """
Returns a boolean indicating whether the provided term represents a valid time,
valid times are one of:
- `{hour, min, sec}`
- `{hour, min, sec, ms}`
"""
@spec is_valid_time?(term) :: boolean
def is_valid_time?({hour, min, sec}) when is_time(hour, min, sec), do: true
def is_valid_time?({hour, min, sec, ms}) when is_time(hour, min, sec, ms), do: true
def is_valid_time?(_), do: false
@doc """
Returns a boolean indicating whether the provided term represents a valid timezone,
valid timezones are one of:
- TimezoneInfo struct
- A timezone name as a string
- `:utc` as a shortcut for the UTC timezone
- `:local` as a shortcut for the local timezone
- A number representing an offset from UTC
"""
@spec is_valid_timezone?(term) :: boolean
def is_valid_timezone?(timezone) do
case Timezone.name_of(timezone) do
{:error, _} -> false
_name -> true
end
end
@doc """
Returns a boolean indicating whether the first `Timex.Comparable` occurs before the second
"""
@spec before?(Time, Time) :: boolean | {:error, term}
@spec before?(Comparable.comparable(), Comparable.comparable()) :: boolean | {:error, term}
def before?(a, b) do
case compare(a, b) do
-1 -> true
{:error, _} = res -> res
_ -> false
end
end
@doc """
Returns a boolean indicating whether the first `Timex.Comparable` occurs after the second
"""
@spec after?(Time, Time) :: boolean | {:error, term}
@spec after?(Comparable.comparable(), Comparable.comparable()) :: boolean | {:error, term}
def after?(a, b) do
case compare(a, b) do
1 -> true
{:error, _} = res -> res
_ -> false
end
end
@doc """
Returns a boolean indicating whether the first `Timex.Comparable` occurs between the second
and third.
If an error occurs, an error tuple will be returned.
By default, the `start` and `end` bounds are *exclusive*. You can opt for inclusive bounds with the
`inclusive: true` option.
To set just one of the bounds as inclusive, use the
`inclusive: :start` or `inclusive: :end` option.
"""
@type between_options :: [
inclusive:
boolean
| :start
| :end
]
@spec between?(Time, Time, Time, between_options) :: boolean | {:error, term}
@spec between?(
Comparable.comparable(),
Comparable.comparable(),
Comparable.comparable(),
between_options
) :: boolean | {:error, term}
def between?(a, start, ending, options \\ []) do
{start_test, ending_test} =
case Keyword.get(options, :inclusive, false) do
:start -> {0, 1}
:end -> {1, 0}
true -> {0, 0}
_ -> {1, 1}
end
in_bounds?(compare(a, start), compare(ending, a), start_test, ending_test)
end
defp in_bounds?({:error, _e} = error, _, _, _), do: error
defp in_bounds?(_, {:error, _e} = error, _, _), do: error
defp in_bounds?(start_comparison, ending_comparison, start_test, ending_test) do
start_comparison >= start_test && ending_comparison >= ending_test
end
@doc """
Returns a boolean indicating whether the two `Timex.Comparable` values are equivalent.
Equality here implies that the two Comparables represent the same moment in time (with
the given granularity), not equality of the data structure.
The options for granularity is the same as for `compare/3`, defaults to `:seconds`.
## Examples
iex> date1 = ~D[2014-03-01]
...> date2 = ~D[2014-03-01]
...> #{__MODULE__}.equal?(date1, date2)
true
iex> date1 = ~D[2014-03-01]
...> date2 = Timex.to_datetime({2014, 3, 1}, "Etc/UTC")
...> #{__MODULE__}.equal?(date1, date2)
true
"""
@spec equal?(Time, Time, Comparable.granularity()) :: boolean | {:error, :badarg}
@spec equal?(Comparable.comparable(), Comparable.comparable(), Comparable.granularity()) ::
boolean | {:error, :badarg}
def equal?(a, a, granularity \\ :seconds)
def equal?(a, a, _granularity), do: true
def equal?(a, b, granularity) do
case compare(a, b, granularity) do
0 -> true
{:error, _} = res -> res
_ -> false
end
end
@doc """
See docs for `compare/3`
"""
@spec compare(Time, Time) :: Comparable.compare_result()
def compare(%Time{} = a, %Time{} = b) do
compare(a, b, :microseconds)
end
@spec compare(Comparable.comparable(), Comparable.comparable()) :: Comparable.compare_result()
defdelegate compare(a, b), to: Timex.Comparable
@doc """
Compare two `Timex.Comparable` values, returning one of the following values:
* `-1` -- the first date comes before the second one
* `0` -- both arguments represent the same date when coalesced to the same timezone.
* `1` -- the first date comes after the second one
You can provide a few reference constants for the second argument:
- :epoch will compare the first parameter against the Date/DateTime of the first moment of the UNIX epoch
- :zero will compare the first parameter against the Date/DateTime of the first moment of year zero
- :distant_past will compare the first parameter against a date/time infinitely in the past (i.e. it will always return 1)
- :distant_future will compare the first parameter against a date/time infinitely in the future (i.e. it will always return -1)
You can optionally specify a comparison granularity, any of the following:
- :year
- :years
- :month
- :months
- :week
- :weeks
- :calendar_week (weeks of the calendar as opposed to actual weeks in terms of days)
- :calendar_weeks
- :day
- :days
- :hour
- :hours
- :minute
- :minutes
- :second
- :seconds
- :millisecond
- :milliseconds
- :microsecond (default)
- :microseconds
- :duration
and the dates will be compared with the cooresponding accuracy.
The default granularity is `:microsecond`.
## Examples
iex> date1 = ~D[2014-03-04]
iex> date2 = ~D[2015-03-04]
iex> Timex.compare(date1, date2, :year)
-1
iex> Timex.compare(date2, date1, :year)
1
iex> Timex.compare(date1, date1)
0
"""
@spec compare(Time, Time, Comparable.granularity()) :: Comparable.compare_result()
@spec compare(Comparable.comparable(), Comparable.comparable(), Comparable.granularity()) ::
Comparable.compare_result()
def compare(%Time{} = a, %Time{} = b, granularity),
do: Timex.Comparable.Utils.to_compare_result(diff(a, b, granularity))
defdelegate compare(a, b, granularity), to: Timex.Comparable
@doc """
See docs for `diff/3`
"""
@spec diff(Time, Time) :: Duration.t() | integer | {:error, term}
@spec diff(Comparable.comparable(), Comparable.comparable()) ::
Duration.t() | integer | {:error, term}
def diff(%Time{} = a, %Time{} = b), do: diff(a, b, :microseconds)
defdelegate diff(a, b), to: Timex.Comparable
@doc """
Calculate time interval between two dates. The result will be a signed integer, negative
if the first date/time comes before the second, and positive if the first date/time comes
after the second.
You must specify one of the following units:
- :year
- :years
- :month
- :months
- :week
- :weeks
- :calendar_week (weeks of the calendar as opposed to actual weeks in terms of days)
- :calendar_weeks
- :day
- :days
- :hour
- :hours
- :minute
- :minutes
- :second
- :seconds
- :millisecond
- :milliseconds
- :microsecond (default)
- :microseconds
- :duration
and the result will be an integer value of those units or a Duration.
"""
@spec diff(Time, Time, Comparable.granularity()) :: Duration.t() | integer | {:error, term}
@spec diff(Comparable.comparable(), Comparable.comparable(), Comparable.granularity()) ::
Duration.t() | integer | {:error, term}
def diff(%Time{}, %Time{}, granularity)
when granularity in [
:day,
:days,
:week,
:weeks,
:calendar_week,
:calendar_weeks,
:month,
:months,
:year,
:years
] do
0
end
def diff(%Time{} = a, %Time{} = b, granularity) do
a = ((a.hour * 60 + a.minute) * 60 + a.second) * 1_000 * 1_000 + elem(a.microsecond, 0)
b = ((b.hour * 60 + b.minute) * 60 + b.second) * 1_000 * 1_000 + elem(b.microsecond, 0)
case granularity do
:duration -> Duration.from_seconds(div(a - b, 1_000 * 1_000))
us when us in [:microseconds, :microsecond] -> a - b
ms when ms in [:milliseconds, :millisecond] -> div(a - b, 1_000)
s when s in [:seconds, :second] -> div(a - b, 1_000 * 1_000)
min when min in [:minutes, :minute] -> div(a - b, 1_000 * 1_000 * 60)
h when h in [:hours, :hour] -> div(a - b, 1_000 * 1_000 * 60 * 60)
_ -> {:error, {:invalid_granularity, granularity}}
end
end
defdelegate diff(a, b, granularity), to: Timex.Comparable
@doc """
Get the day of the week corresponding to the given name.
The name can be given as a string of the weekday name or its first three characters
(lowercase or capitalized) or as a corresponding atom (lowercase only).
## Examples
iex> #{__MODULE__}.day_to_num("Monday")
1
iex> #{__MODULE__}.day_to_num("monday")
1
iex> #{__MODULE__}.day_to_num("Mon")
1
iex> #{__MODULE__}.day_to_num("mon")
1
iex> #{__MODULE__}.day_to_num(:mon)
1
iex> #{__MODULE__}.day_to_num(:sunday)
7
"""
@spec day_to_num(binary | atom()) :: Types.weekday() | {:error, :invalid_day_name}
Enum.each(@weekdays, fn {day_name, day_num} ->
lower = day_name |> String.downcase()
abbr_cased = day_name |> String.slice(0..2)
abbr_lower = lower |> String.slice(0..2)
abbr_atom = abbr_lower |> String.to_atom()
atom = lower |> String.to_atom()
day_quoted =
quote do
def day_to_num(unquote(day_name)), do: unquote(day_num)
def day_to_num(unquote(lower)), do: unquote(day_num)
def day_to_num(unquote(abbr_cased)), do: unquote(day_num)
def day_to_num(unquote(abbr_lower)), do: unquote(day_num)
def day_to_num(unquote(abbr_atom)), do: unquote(day_num)
def day_to_num(unquote(atom)), do: unquote(day_num)
end
Module.eval_quoted(__MODULE__, day_quoted, [], __ENV__)
end)
# Make an attempt at cleaning up the provided string
def day_to_num(_), do: {:error, :invalid_day_name}
@doc """
Get the name of the day corresponding to the provided number
## Examples
iex> #{__MODULE__}.day_name(1)
"Monday"
iex> #{__MODULE__}.day_name(0)
{:error, :invalid_weekday_number}
"""
@spec day_name(Types.weekday()) :: String.t() | {:error, :invalid_weekday_number}
def day_name(num) when num in 1..7 do
weekdays = Translator.get_weekdays(Translator.default_locale())
Map.get(weekdays, num)
end
def day_name(_), do: {:error, :invalid_weekday_number}
@doc """
Get the short name of the day corresponding to the provided number
## Examples
iex> #{__MODULE__}.day_shortname(1)
"Mon"
iex> #{__MODULE__}.day_shortname(0)
{:error, :invalid_weekday_number}
"""
@spec day_shortname(Types.weekday()) :: String.t() | {:error, :invalid_weekday_number}
def day_shortname(num) when num in 1..7 do
weekdays = Translator.get_weekdays_abbreviated(Translator.default_locale())
Map.get(weekdays, num)
end
def day_shortname(_), do: {:error, :invalid_weekday_number}
@doc """
Get the number of the month corresponding to the given name.
## Examples
iex> #{__MODULE__}.month_to_num("January")
1
iex> #{__MODULE__}.month_to_num("january")
1
iex> #{__MODULE__}.month_to_num("Jan")
1
iex> #{__MODULE__}.month_to_num("jan")
1
iex> #{__MODULE__}.month_to_num(:jan)
1
"""
@spec month_to_num(binary) :: integer | {:error, :invalid_month_name}
Enum.each(@months, fn {month_name, month_num} ->
lower = month_name |> String.downcase()
abbr_cased = month_name |> String.slice(0..2)
abbr_lower = lower |> String.slice(0..2)
symbol = abbr_lower |> String.to_atom()
full_chars = month_name |> String.to_charlist()
abbr_chars = abbr_cased |> String.to_charlist()
# Account for months where full and abbr are equal
month_quoted =
if month_name == abbr_cased do
quote do
def month_to_num(unquote(month_name)), do: unquote(month_num)
def month_to_num(unquote(lower)), do: unquote(month_num)
def month_to_num(unquote(symbol)), do: unquote(month_num)
def month_to_num(unquote(full_chars)), do: unquote(month_num)
end
else
quote do
def month_to_num(unquote(month_name)), do: unquote(month_num)
def month_to_num(unquote(lower)), do: unquote(month_num)
def month_to_num(unquote(abbr_cased)), do: unquote(month_num)
def month_to_num(unquote(abbr_lower)), do: unquote(month_num)
def month_to_num(unquote(symbol)), do: unquote(month_num)
def month_to_num(unquote(full_chars)), do: unquote(month_num)
def month_to_num(unquote(abbr_chars)), do: unquote(month_num)
end
end
Module.eval_quoted(__MODULE__, month_quoted, [], __ENV__)
end)
# Make an attempt at cleaning up the provided string
def month_to_num(_), do: {:error, :invalid_month_name}
@doc """
Get the name of the month corresponding to the provided number
## Examples
iex> #{__MODULE__}.month_name(1)
"January"
iex> #{__MODULE__}.month_name(0)
{:error, :invalid_month_number}
"""
@spec month_name(Types.month()) :: String.t() | {:error, :invalid_month_number}
def month_name(num) when num in 1..12 do
months = Translator.get_months(Translator.default_locale())
Map.get(months, num)
end
def month_name(_), do: {:error, :invalid_month_number}
@doc """
Get the short name of the month corresponding to the provided number
## Examples
iex> #{__MODULE__}.month_shortname(1)
"Jan"
iex> #{__MODULE__}.month_shortname(0)
{:error, :invalid_month_number}
"""
@spec month_shortname(Types.month()) :: String.t() | {:error, :invalid_month_number}
def month_shortname(num) when num in 1..12 do
months = Translator.get_months_abbreviated(Translator.default_locale())
Map.get(months, num)
end
def month_shortname(_), do: {:error, :invalid_month_number}
@doc """
Return weekday number (as defined by ISO 8601) of the specified date.
## Examples
iex> Timex.epoch |> #{__MODULE__}.weekday
4 # (i.e. Thursday)
"""
@spec weekday(Types.valid_datetime()) :: Types.weekday() | {:error, term}
defdelegate weekday(datetime), to: Timex.Protocol
@doc """
Returns the ordinal day number of the date.
## Examples
iex> Timex.day(~D[2015-06-26])
177
"""
@spec day(Types.valid_datetime()) :: Types.daynum() | {:error, term}
defdelegate day(datetime), to: Timex.Protocol
@doc """
Return the number of days in the month which the date falls on.
## Examples
iex> Timex.days_in_month(~D[1970-01-01])
31
"""
@spec days_in_month(Types.valid_datetime()) :: Types.num_of_days() | {:error, term}
defdelegate days_in_month(datetime), to: Timex.Protocol
@doc """
Same as days_in_month/2, except takes year and month as distinct arguments
"""
@spec days_in_month(Types.year(), Types.month()) :: Types.num_of_days() | {:error, term}
defdelegate days_in_month(year, month), to: Timex.Helpers
@doc """
Returns the week number of the date provided, starting at 1.
## Examples
iex> Timex.week_of_month(~D[2016-03-05])
1
iex> Timex.week_of_month(~N[2016-03-14T00:00:00Z])
3
"""
@spec week_of_month(Types.valid_datetime()) :: Types.week_of_month()
defdelegate week_of_month(datetime), to: Timex.Protocol
@doc """
Same as week_of_month/1, except takes year, month, and day as distinct arguments
## Examples
iex> Timex.week_of_month(2016, 3, 30)
5
"""
@spec week_of_month(Types.year(), Types.month(), Types.day()) :: Types.week_of_month()
def week_of_month(year, month, day) when is_date(year, month, day) do
{_, week_index_of_given_date} = iso_week(year, month, day)
{_, week_index_of_first_day_of_given_month} = iso_week(year, month, 1)
week_index_of_given_date - week_index_of_first_day_of_given_month + 1
end
def week_of_month(_, _, _), do: {:error, :invalid_date}
@doc """
Given a date returns a date at the beginning of the month.
iex> date = Timex.to_datetime({{2015, 6, 15}, {12,30,0}}, "Europe/Paris")
iex> Timex.beginning_of_month(date)
Timex.to_datetime({{2015, 6, 1}, {0, 0, 0}}, "Europe/Paris")
"""
@spec beginning_of_month(Types.valid_datetime()) :: Types.valid_datetime() | {:error, term}
defdelegate beginning_of_month(datetime), to: Timex.Protocol
@doc """
Same as beginning_of_month/1, except takes year and month as distinct arguments
"""
@spec beginning_of_month(Types.year(), Types.month()) :: Date.t() | {:error, term}
def beginning_of_month(year, month) when is_year(month) and is_month(month),
do: %Date{year: year, month: month, day: 1}
def beginning_of_month(_, _),
do: {:error, :invalid_year_or_month}
@doc """
Given a date returns a date at the end of the month.
iex> date = ~N[2015-06-15T12:30:00Z]
iex> Timex.end_of_month(date)
~N[2015-06-30T23:59:59Z]
"""
@spec end_of_month(Types.valid_datetime()) :: Types.valid_datetime() | {:error, term}
defdelegate end_of_month(datetime), to: Timex.Protocol
@doc """
Same as end_of_month/1, except takes year and month as distinct arguments
## Examples
iex> Timex.end_of_month(2016, 2)
~D[2016-02-29]
"""
@spec end_of_month(Types.year(), Types.month()) :: Date.t()
def end_of_month(year, month) when is_year(year) and is_month(month),
do: end_of_month(%Date{year: year, month: month, day: 1})
def end_of_month(_, _),
do: {:error, :invalid_year_or_month}
@doc """
Returns what quarter of the year the given date/time falls in.
## Examples
iex> Timex.quarter(4)
2
"""
@spec quarter(Types.month() | Types.valid_datetime()) :: 1..4 | {:error, term}
def quarter(month) when is_month(month) do
case month do
m when m in 1..3 -> 1
m when m in 4..6 -> 2
m when m in 7..9 -> 3
m when m in 10..12 -> 4
end
end
def quarter(m) when is_integer(m), do: {:error, :invalid_month}
def quarter(datetime), do: Timex.Protocol.quarter(datetime)
@doc """
Given a date returns a date at the beginning of the quarter.
iex> date = Timex.to_datetime({{2015, 6, 15}, {12,30,0}}, "America/Chicago")
iex> Timex.beginning_of_quarter(date)
Timex.to_datetime({{2015, 4, 1}, {0, 0, 0}}, "America/Chicago")
"""
@spec beginning_of_quarter(Types.valid_datetime()) :: Types.valid_datetime() | {:error, term}
defdelegate beginning_of_quarter(datetime), to: Timex.Protocol
@doc """
Given a date or a year and month returns a date at the end of the quarter.
iex> date = ~N[2015-06-15T12:30:00]
...> Timex.end_of_quarter(date)
~N[2015-06-30T23:59:59]
iex> Timex.end_of_quarter(2015, 4)
~D[2015-06-30]
"""
@spec end_of_quarter(Types.valid_datetime()) :: Types.valid_datetime() | {:error, term}
defdelegate end_of_quarter(datetime), to: Timex.Protocol
@doc """
Same as end_of_quarter/1, except takes year and month as distinct arguments
"""
@spec end_of_quarter(Types.year(), Types.month()) :: Date.t() | {:error, term}
def end_of_quarter(year, month) when is_year(year) and is_month(month) do
end_of_month(%Date{year: year, month: 3 * quarter(month), day: 1})
end
def end_of_quarter(_, _), do: {:error, :invalid_year_or_month}
@doc """
Given a date or a number create a date at the beginning of that year
Examples
iex> date = ~N[2015-06-15T00:00:00]
iex> Timex.beginning_of_year(date)
~N[2015-01-01T00:00:00]
iex> Timex.beginning_of_year(2015)
~D[2015-01-01]
"""
@spec beginning_of_year(Types.year() | Types.valid_datetime()) ::
Types.valid_datetime() | {:error, term}
def beginning_of_year(year) when is_year(year),
do: %Date{year: year, month: 1, day: 1}
def beginning_of_year(datetime), do: Timex.Protocol.beginning_of_year(datetime)
@doc """
Given a date or a number create a date at the end of that year
Examples
iex> date = ~N[2015-06-15T00:00:00]
iex> Timex.end_of_year(date)
~N[2015-12-31T23:59:59]
iex> Timex.end_of_year(2015)
~D[2015-12-31]
"""
@spec end_of_year(Types.year() | Types.valid_datetime()) ::
Types.valid_datetime() | {:error, term}
def end_of_year(year) when is_year(year),
do: %Date{year: year, month: 12, day: 31}
def end_of_year(datetime), do: Timex.Protocol.end_of_year(datetime)
@doc """
Number of days to the beginning of the week
The weekstart determines which is the first day of the week, defaults to monday. It can be a number
between 1..7 (1 is monday, 7 is sunday), or any value accepted by `day_to_num/1`.
## Examples
iex> date = ~D[2015-11-30] # Monday 30th November
iex> Timex.days_to_beginning_of_week(date)
0
iex> date = ~D[2015-11-30] # Monday 30th November
iex> Timex.days_to_beginning_of_week(date, :sun)
1
"""
@spec days_to_beginning_of_week(Types.valid_datetime(), Types.weekstart()) ::
integer | {:error, term}
def days_to_beginning_of_week(date, weekstart \\ 1)
def days_to_beginning_of_week(date, weekstart)
when is_atom(weekstart) or is_binary(weekstart) do
days_to_beginning_of_week(date, Timex.day_to_num(weekstart))
end
def days_to_beginning_of_week(date, weekstart) when is_day_of_week(weekstart, :mon) do
case weekday(date) do
{:error, _} = err ->
err
wd ->
case wd - weekstart do
diff when diff < 0 ->
7 + diff
diff ->
diff
end
end
end
def days_to_beginning_of_week(_, {:error, _} = err), do: err
def days_to_beginning_of_week(_, _), do: {:error, :badarg}
@doc """
Number of days to the end of the week.
The weekstart can between 1..7, an atom e.g. :mon, or a string e.g. "Monday"
## Examples
Week starting Monday
iex> date = ~D[2015-11-30] # Monday 30th November
iex> Timex.days_to_end_of_week(date)
6
Week starting Sunday
iex> date = ~D[2015-11-30] # Monday 30th November
iex> Timex.days_to_end_of_week(date, :sun)
5
"""
@spec days_to_end_of_week(Types.valid_datetime(), Types.weekstart()) :: integer | {:error, term}
def days_to_end_of_week(date, weekstart \\ :mon) do
case days_to_beginning_of_week(date, weekstart) do
{:error, _} = err -> err
days -> abs(days - 6)
end
end
@doc """
Shifts the date to the beginning of the week
The weekstart can between 1..7, an atom e.g. :mon, or a string e.g. "Monday"
## Examples
iex> date = ~N[2015-11-30T13:30:30] # Monday 30th November
iex> Timex.beginning_of_week(date)
~N[2015-11-30T00:00:00]
iex> date = ~D[2015-11-30] # Monday 30th November
iex> Timex.beginning_of_week(date, :sun)
~D[2015-11-29]
"""
@spec beginning_of_week(Types.valid_datetime(), Types.weekstart()) ::
Types.valid_datetime() | {:error, term}
defdelegate beginning_of_week(date, weekstart \\ :mon), to: Timex.Protocol
@doc """
Returns a Date or a DateTime representing the end of the week, depending on the input,
i.e. if you pass a date/time value which represents just a date, you will get back a Date,
if both a date and time are present, you will get back a DateTime
The weekstart can between 1..7, an atom e.g. :mon, or a string e.g. "Monday"
## Examples
iex> date = ~N[2015-11-30T13:30:30] # Monday 30th November
...> Timex.end_of_week(date)
~N[2015-12-06T23:59:59]
iex> date = ~D[2015-11-30] # Monday 30th November
...> Timex.end_of_week(date, :sun)
~D[2015-12-05]
"""
@spec end_of_week(Types.valid_datetime(), Types.weekstart()) ::
Types.valid_datetime() | {:error, term}
defdelegate end_of_week(datetime, weekstart \\ 1), to: Timex.Protocol
@doc """
Returns a DateTime representing the beginning of the day
## Examples
iex> date = Timex.to_datetime({{2015, 1, 1}, {13, 14, 15}}, "Etc/UTC")
iex> Timex.beginning_of_day(date)
Timex.to_datetime({{2015, 1, 1}, {0, 0, 0}}, "Etc/UTC")
iex> date = ~D[2015-01-01]
...> Timex.beginning_of_day(date)
~D[2015-01-01]
"""
@spec beginning_of_day(Types.valid_datetime()) :: Types.valid_datetime() | {:error, term}
defdelegate beginning_of_day(datetime), to: Timex.Protocol
@doc """
Returns a DateTime representing the end of the day
## Examples
iex> date = ~N[2015-01-01T13:14:15]
...> Timex.end_of_day(date)
~N[2015-01-01T23:59:59]
"""
@spec end_of_day(Types.valid_datetime()) :: Types.valid_datetime() | {:error, term}
defdelegate end_of_day(datetime), to: Timex.Protocol
@doc """
Return a boolean indicating whether the given year is a leap year. You may
pase a date or a year number.
## Examples
iex> Timex.epoch() |> #{__MODULE__}.is_leap?
false
iex> #{__MODULE__}.is_leap?(2012)
true
"""
@spec is_leap?(Types.valid_datetime() | Types.year()) :: boolean | {:error, term}
def is_leap?(year) when is_year(year), do: :calendar.is_leap_year(year)
defdelegate is_leap?(date), to: Timex.Protocol
@doc """
Add time to a date using a Duration
Same as `shift(date, Duration.from_minutes(5), :duration)`.
"""
@spec add(Types.valid_datetime(), Duration.t()) ::
Types.valid_datetime() | AmbiguousDateTime.t() | {:error, term}
def add(date, %Duration{megaseconds: mega, seconds: sec, microseconds: micro}),
do: shift(date, seconds: mega * @million + sec, microseconds: micro)
@doc """
Subtract time from a date using a Duration
Same as `shift(date, Duration.from_minutes(5) |> Duration.invert, :timestamp)`.
"""
@spec subtract(Types.valid_datetime(), Duration.t()) ::
Types.valid_datetime() | AmbiguousDateTime.t() | {:error, term}
def subtract(date, %Duration{megaseconds: mega, seconds: sec, microseconds: micro}),
do: shift(date, seconds: -mega * @million - sec, microseconds: -micro)
@doc """
A single function for adjusting the date using various units: duration,
microseconds, seconds, minutes, hours, days, weeks, months, years.
The result of applying the shift will be the same type as that of the input,
with the exception of shifting DateTimes, which may result in an AmbiguousDateTime
if the shift moves to an ambiguous time period for the zone of that DateTime.
Shifting by months will always return a date in the expected month. Because months
have different number of days, shifting to a month with fewer days may may not be
the same day of the month as the original date.
If an error occurs, an error tuple will be returned.
## Examples
### Shifting across timezone changes
iex> use Timex
...> datetime = Timex.to_datetime({{2016,3,13}, {1,0,0}}, "America/Chicago")
...> # 2-3 AM doesn't exist due to leap forward, shift accounts for this
...> %DateTime{hour: 3} = Timex.shift(datetime, hours: 1)
...> shifted = Timex.shift(datetime, hours: 2)
...> {datetime.zone_abbr, shifted.zone_abbr, shifted.hour}
{"CST", "CDT", 3}
### Shifting and leap days
iex> use Timex
...> date = ~D[2016-02-29]
...> Timex.shift(date, years: -1)
~D[2015-02-28]
### Shifting by months
iex> date = ~D[2016-01-15]
...> Timex.shift(date, months: 1)
~D[2016-02-15]
iex> date = ~D[2016-01-31]
...> Timex.shift(date, months: 1)
~D[2016-02-29]
iex> date = ~D[2016-01-31]
...> Timex.shift(date, months: 2)
~D[2016-03-31]
...> Timex.shift(date, months: 1) |> Timex.shift(months: 1)
~D[2016-03-29]
"""
@type shift_options :: [
microseconds: integer,
milliseconds: integer,
seconds: integer,
minutes: integer,
hours: integer,
days: integer,
weeks: integer,
months: integer,
years: integer,
duration: Duration.t()
]
@spec shift(Types.valid_datetime(), shift_options) ::
Types.valid_datetime() | AmbiguousDateTime.t() | {:error, term}
defdelegate shift(date, options), to: Timex.Protocol
@doc """
Return a new date/time value with the specified fields replaced by new values.
Values are automatically validated and clamped to good values by default. If
you wish to skip validation, perhaps for performance reasons, pass `validate: false`.
Values are applied in order, so if you pass `[datetime: dt, date: d]`, the date value
from `date` will override `datetime`'s date value.
Options which do not apply to the input value (for example, `:hour` against a `Date` struct),
will be ignored.
## Example
iex> use Timex
...> expected = ~D[2015-02-28]
...> result = Timex.set(expected, [month: 2, day: 30])
...> result == expected
true
iex> use Timex
...> expected = ~N[2016-02-29T23:30:00]
...> result = Timex.set(expected, [hour: 30])
...> result === expected
true
"""
@type set_options :: [
validate: boolean,
datetime: Types.datetime(),
date: Types.valid_date(),
time: Types.valid_time(),
year: Types.year(),
month: Types.month(),
day: Types.day(),
hour: Types.hour(),
minute: Types.minute(),
second: Types.second(),
microsecond: Types.microsecond()
]
@spec set(Types.valid_datetime(), set_options) :: Types.valid_datetime()
defdelegate set(date, options), to: Timex.Protocol
@doc """
Given a unit to normalize, and the value to normalize, produces a valid
value for that unit, clamped to whatever boundaries are defined for that unit.
## Example
iex> Timex.normalize(:hour, 26)
23
"""
@spec normalize(:date, {integer, integer, integer}) :: Types.date()
@spec normalize(:time, {integer, integer, integer} | {integer, integer, integer, integer}) ::
Types.time()
@spec normalize(:day, {integer, integer, integer}) :: non_neg_integer
@spec normalize(
:year | :month | :day | :hour | :minute | :second | :millisecond | :microsecond,
integer
) :: non_neg_integer
def normalize(:date, {year, month, day}) do
year = normalize(:year, year)
month = normalize(:month, month)
day = normalize(:day, {year, month, day})
{year, month, day}
end
def normalize(:year, year) when year < 0, do: 0
def normalize(:year, year), do: year
def normalize(:month, month) do
cond do
month < 1 -> 1
month > 12 -> 12
:else -> month
end
end
def normalize(:time, {hour, min, sec}) do
hour = normalize(:hour, hour)
min = normalize(:minute, min)
sec = normalize(:second, sec)
{hour, min, sec}
end
def normalize(:time, {hour, min, sec, ms}) do
{h, m, s} = normalize(:time, {hour, min, sec})
msecs = normalize(:millisecond, ms)
{h, m, s, msecs}
end
def normalize(:hour, hour) do
cond do
hour < 0 -> 0
hour > 23 -> 23
:else -> hour
end
end
def normalize(:minute, min) do
cond do
min < 0 -> 0
min > 59 -> 59
:else -> min
end
end
def normalize(:second, sec) do
cond do
sec < 0 -> 0
sec > 59 -> 59
:else -> sec
end
end
def normalize(:millisecond, ms) do
cond do
ms < 0 -> 0
ms > 999 -> 999
:else -> ms
end
end
def normalize(:microsecond, {us, p}) do
cond do
us < 0 -> {0, p}
us > 999_999 -> {999_999, p}
:else -> {us, p}
end
end
def normalize(:day, {year, month, day}) do
year = normalize(:year, year)
month = normalize(:month, month)
ndays =
case Timex.days_in_month(year, month) do
n when is_integer(n) -> n
end
cond do
day < 1 -> 1
day > ndays -> ndays
:else -> day
end
end
end
|
lib/timex.ex
| 0.858881
| 0.540742
|
timex.ex
|
starcoder
|
defmodule EctoSchemaStruct do
@moduledoc """
Struct is defined with a combination of [TypedEctoSchema](https://github.com/bamorim/typed_ecto_schema)
and [Domo](https://github.com/IvanRublev/Domo).
The `enforce: true` given to TypedEctoSchema excludes default values from
the struct.
Domo automatically validates default values during the compile-time unless the
`skip_defaults: true` flag is given.
F.e. remove `default: "Joe"` option for the `:name` field in this file,
and recompile the project. The compilation should fail because of `nil` that
is not expected due to `enforce: true`.
Or change the `:happy` field's default value to `nil`.
Then the compilation should fail due to the precondition associated with `t()`.
Or make the `:name` field's default value longer than 10 characters.
Then the compilation should fail due to the precondition associated with `t()`.
Domo and Domo.Changeset provides several helper functions for change set
functions. See how they are used at the end of the file.
"""
use TypedEctoSchema
use Domo
import Ecto.Changeset
import Domo.Changeset
@type name :: String.t()
precond name: &validate_required/1
@type last_name :: String.t()
precond last_name: &validate_required/1
typed_schema "people" do
field(:name, :string, default: "Joe", null: false)
field(:last_name, :string) :: last_name() | nil
field(:age, :integer) :: non_neg_integer() | nil
field(:happy, :boolean, default: true, null: false)
field(:phone, :string)
timestamps(type: :naive_datetime_usec)
end
precond t: &validate_full_name/1
defp validate_required(name) when byte_size(name) == 0, do: {:error, "can't be empty string"}
defp validate_required(_name), do: :ok
defp validate_full_name(struct) do
if String.length(struct.name) + String.length(struct.last_name || "") > 10 do
{:error, "Summary length of :name and :last_name can't be greater than 10 bytes."}
else
:ok
end
end
# See how the following functions by Domo used in `changeset/2` below:
# typed_fields() - added to the struct's module
# required_fields() - added to the struct's module
# validate_type() - imported from Domo.Changeset
def changeset(changeset, attrs) do
changeset
|> cast(attrs, typed_fields())
|> validate_required(required_fields())
|> validate_type()
end
end
|
example_typed_integrations/lib/ecto_schema_struct.ex
| 0.781414
| 0.65321
|
ecto_schema_struct.ex
|
starcoder
|
defmodule OMG.Watcher.Signature do
@moduledoc """
Adapted from https://github.com/exthereum/blockchain.
Defines helper functions for signing and getting the signature
of a transaction, as defined in Appendix F of the Yellow Paper.
For any of the following functions, if chain_id is specified,
it's assumed that we're post-fork and we should follow the
specification EIP-155 from:
https://github.com/ethereum/EIPs/blob/master/EIPS/eip-155.md
"""
@base_recovery_id 27
@base_recovery_id_eip_155 35
@signature_len 32
@type keccak_hash :: binary()
@type public_key :: <<_::512>>
@type private_key :: <<_::256>>
@type hash_v :: integer()
@type hash_r :: integer()
@type hash_s :: integer()
@type signature_len :: unquote(@signature_len)
@doc """
Recovers a public key from a signed hash.
This implements Eq.(208) of the Yellow Paper, adapted from https://stackoverflow.com/a/20000007
## Example
iex(1)> OMG.Watcher.Signature.recover_public(<<2::256>>,
...(1)> 28,
...(1)> 38_938_543_279_057_362_855_969_661_240_129_897_219_713_373_336_787_331_739_561_340_553_100_525_404_231,
...(1)> 23_772_455_091_703_794_797_226_342_343_520_955_590_158_385_983_376_086_035_257_995_824_653_222_457_926
...(1)> )
{:ok,
<<121, 190, 102, 126, 249, 220, 187, 172, 85, 160, 98, 149, 206, 135, 11, 7, 2, 155, 252, 219, 45, 206, 40,
217, 89, 242, 129, 91, 22, 248, 23, 152, 72, 58, 218, 119, 38, 163, 196, 101, 93, 164, 251, 252, 14, 17,
8, 168, 253, 23, 180, 72, 166, 133, 84, 25, 156, 71, 208, 143, 251, 16, 212, 184>>}
"""
@spec recover_public(keccak_hash(), hash_v, hash_r, hash_s, integer() | nil) ::
{:ok, public_key} | {:error, atom()}
def recover_public(hash, v, r, s, chain_id \\ nil) do
signature =
pad(:binary.encode_unsigned(r), @signature_len) <>
pad(:binary.encode_unsigned(s), @signature_len)
# Fork Ψ EIP-155
recovery_id =
if not is_nil(chain_id) and uses_chain_id?(v) do
v - chain_id * 2 - @base_recovery_id_eip_155
else
v - @base_recovery_id
end
case ExSecp256k1.recover_compact(hash, signature, recovery_id) do
{:ok, <<_byte::8, public_key::binary()>>} -> {:ok, public_key}
{:error, reason} -> {:error, reason}
end
end
@doc """
Recovers a public key from a signed hash.
This implements Eq.(208) of the Yellow Paper, adapted from https://stackoverflow.com/a/20000007
## Example
iex(1)> OMG.Watcher.Signature.recover_public(<<2::256>>, <<168, 39, 110, 198, 11, 113, 141, 8, 168, 151, 22, 210, 198, 150, 24, 111, 23,
...(1)> 173, 42, 122, 59, 152, 143, 224, 214, 70, 96, 204, 31, 173, 154, 198, 97, 94,
...(1)> 203, 172, 169, 136, 182, 131, 11, 106, 54, 190, 96, 128, 227, 222, 248, 231,
...(1)> 75, 254, 141, 233, 113, 49, 74, 28, 189, 73, 249, 32, 89, 165, 27>>)
{:ok,
<<233, 102, 200, 175, 51, 251, 139, 85, 204, 181, 94, 133, 233, 88, 251, 156,
123, 157, 146, 192, 53, 73, 125, 213, 245, 12, 143, 102, 54, 70, 126, 35, 34,
167, 2, 255, 248, 68, 210, 117, 183, 156, 4, 185, 77, 27, 53, 239, 10, 57,
140, 63, 81, 87, 133, 241, 241, 210, 250, 35, 76, 232, 2, 153>>}
"""
def recover_public(hash, <<r::integer-size(256), s::integer-size(256), v::integer-size(8)>>, chain_id \\ nil) do
recover_public(hash, v, r, s, chain_id)
end
@spec uses_chain_id?(hash_v) :: boolean()
defp uses_chain_id?(v) do
v >= @base_recovery_id_eip_155
end
@spec pad(binary(), signature_len()) :: binary()
defp pad(binary, desired_length) do
desired_bits = desired_length * 8
case byte_size(binary) do
0 ->
<<0::size(desired_bits)>>
x when x <= desired_length ->
padding_bits = (desired_length - x) * 8
<<0::size(padding_bits)>> <> binary
_ ->
raise "Binary too long for padding"
end
end
end
|
apps/omg_watcher/lib/omg_watcher/signature.ex
| 0.866528
| 0.408601
|
signature.ex
|
starcoder
|
defmodule Verk.QueueStats do
@moduledoc """
This process will update an :ets table with the following information per queue
* Current amount of running jobs
* Amount of finished jobs
* Amount of failed jobs
It will persist to redis from time to time
It also holds information about the current status of queus. They can be:
* running
* idle
* pausing
* paused
"""
use GenStage
require Logger
alias Verk.QueueStatsCounters
defmodule State do
@moduledoc false
defstruct queues: %{}
end
@persist_interval 10_000
@doc false
def start_link(_) do
GenStage.start_link(__MODULE__, :ok, name: __MODULE__)
end
@doc """
Lists the queues and their stats searching for a `prefix` if provided
"""
@spec all(binary) :: Map.t()
def all(prefix \\ "") do
GenServer.call(__MODULE__, {:all, prefix})
end
defp status(queue, queues, running_counter) do
status = queues[queue] || Verk.Manager.status(queue)
if status == :running and running_counter == 0 do
:idle
else
status
end
end
@doc false
def init(_) do
QueueStatsCounters.init()
Process.send_after(self(), :persist_stats, @persist_interval)
{:consumer, %State{}, subscribe_to: [Verk.EventProducer]}
end
def handle_call({:all, prefix}, _from, state) do
result =
for {queue, running, finished, failed} <- QueueStatsCounters.all(prefix), is_list(queue) do
queue = to_string(queue)
%{
queue: queue,
status: status(queue, state.queues, running),
running_counter: running,
finished_counter: finished,
failed_counter: failed
}
end
queues =
for %{queue: queue, status: status} <- result, into: state.queues, do: {queue, status}
{:reply, result, [], %State{queues: queues}}
end
def handle_events(events, _from, state) do
new_state =
Enum.reduce(events, state, fn event, state ->
handle_event(event, state)
end)
{:noreply, [], new_state}
end
@doc false
defp handle_event(%Verk.Events.JobStarted{job: job}, state) do
QueueStatsCounters.register(:started, job.queue)
state
end
defp handle_event(%Verk.Events.JobFinished{job: job}, state) do
QueueStatsCounters.register(:finished, job.queue)
state
end
defp handle_event(%Verk.Events.JobFailed{job: job}, state) do
QueueStatsCounters.register(:failed, job.queue)
state
end
defp handle_event(%Verk.Events.QueueRunning{queue: queue}, state) do
QueueStatsCounters.reset_started(queue)
%{state | queues: Map.put(state.queues, to_string(queue), :running)}
end
defp handle_event(%Verk.Events.QueuePausing{queue: queue}, state) do
%{state | queues: Map.put(state.queues, to_string(queue), :pausing)}
end
defp handle_event(%Verk.Events.QueuePaused{queue: queue}, state) do
%{state | queues: Map.put(state.queues, to_string(queue), :paused)}
end
@doc false
def handle_info(:persist_stats, state) do
case QueueStatsCounters.persist() do
:ok ->
:ok
{:error, reason} ->
Logger.error("QueueStats failed to persist stats to Redis. Reason: #{inspect(reason)}")
end
Process.send_after(self(), :persist_stats, @persist_interval)
{:noreply, [], state}
end
@doc false
def handle_info(_, state) do
{:noreply, [], state}
end
end
|
lib/verk/queue_stats.ex
| 0.700792
| 0.502625
|
queue_stats.ex
|
starcoder
|
defmodule Timex.Format.DateTime.Formatters.Relative do
@moduledoc """
Relative time, based on Moment.js
Uses localized strings.
The format string should contain {relative}, which is where the phrase will be injected.
| Range | Sample Output
---------------------------------------------------------------------
| 0 seconds | now
| 1 to 45 seconds | a few seconds ago
| 45 to 90 seconds | a minute ago
| 90 seconds to 45 minutes | 2 minutes ago ... 45 minutes ago
| 45 to 90 minutes | an hour ago
| 90 minutes to 22 hours | 2 hours ago ... 22 hours ago
| 22 to 36 hours | a day ago
| 36 hours to 25 days | 2 days ago ... 25 days ago
| 25 to 45 days | a month ago
| 45 to 345 days | 2 months ago ... 11 months ago
| 345 to 545 days (1.5 years) | a year ago
| 546 days+ | 2 years ago ... 20 years ago
"""
use Timex.Format.DateTime.Formatter
use Combine
alias Timex.Format.FormatError
alias Timex.{Types, Translator}
@spec tokenize(String.t()) :: {:ok, [Directive.t()]} | {:error, term}
def tokenize(format_string) do
case Combine.parse(format_string, relative_parser()) do
results when is_list(results) ->
directives = results |> List.flatten() |> Enum.filter(fn x -> x !== nil end)
case Enum.any?(directives, fn %Directive{type: type} -> type != :literal end) do
false -> {:error, "Invalid format string, must contain at least one directive."}
true -> {:ok, directives}
end
{:error, _} = err ->
err
end
end
@doc """
Formats a date/time as a relative time formatted string
## Examples
iex> #{__MODULE__}.format(Timex.shift(Timex.now, minutes: -1), "{relative}")
{:ok, "1 minute ago"}
"""
@spec format(Types.calendar_types(), String.t()) :: {:ok, String.t()} | {:error, term}
def format(date, format_string), do: lformat(date, format_string, Translator.default_locale())
@spec format!(Types.calendar_types(), String.t()) :: String.t() | no_return
def format!(date, format_string), do: lformat!(date, format_string, Translator.default_locale())
@spec lformat(Types.calendar_types(), String.t(), String.t()) ::
{:ok, String.t()} | {:error, term}
def lformat(date, format_string, locale) do
case tokenize(format_string) do
{:ok, []} ->
{:error, "There were no formatting directives in the provided string."}
{:ok, dirs} when is_list(dirs) ->
do_format(
locale,
Timex.to_naive_datetime(date),
Timex.Protocol.NaiveDateTime.now(),
dirs,
<<>>
)
{:error, reason} ->
{:error, {:format, reason}}
end
end
@spec lformat!(Types.calendar_types(), String.t(), String.t()) :: String.t() | no_return
def lformat!(date, format_string, locale) do
case lformat(date, format_string, locale) do
{:ok, result} -> result
{:error, reason} -> raise FormatError, message: reason
end
end
def relative_to(date, relative_to, format_string) do
relative_to(date, relative_to, format_string, Translator.default_locale())
end
def relative_to(date, relative_to, format_string, locale) do
case tokenize(format_string) do
{:ok, []} ->
{:error, "There were no formatting directives in the provided string."}
{:ok, dirs} when is_list(dirs) ->
do_format(
locale,
Timex.to_naive_datetime(date),
Timex.to_naive_datetime(relative_to),
dirs,
<<>>
)
{:error, reason} ->
{:error, {:format, reason}}
end
end
@minute 60
@hour @minute * 60
@day @hour * 24
@month @day * 30
@year @month * 12
defp do_format(_locale, _date, _relative, [], result), do: {:ok, result}
defp do_format(locale, date, relative, [%Directive{type: :literal, value: char} | dirs], result)
when is_binary(char) do
do_format(locale, date, relative, dirs, <<result::binary, char::binary>>)
end
defp do_format(locale, date, relative, [%Directive{type: :relative} | dirs], result) do
diff = Timex.diff(date, relative, :seconds)
phrase =
cond do
# future
diff == 0 ->
Translator.translate(locale, "relative_time", "now")
diff > 0 && diff <= 45 ->
Translator.translate_plural(
locale,
"relative_time",
"in %{count} second",
"in %{count} seconds",
diff
)
diff > 45 && diff < @minute * 2 ->
Translator.translate_plural(
locale,
"relative_time",
"in %{count} minute",
"in %{count} minutes",
1
)
diff >= @minute * 2 && diff < @hour ->
Translator.translate_plural(
locale,
"relative_time",
"in %{count} minute",
"in %{count} minutes",
div(diff, @minute)
)
diff >= @hour && diff < @hour * 2 ->
Translator.translate_plural(
locale,
"relative_time",
"in %{count} hour",
"in %{count} hours",
1
)
diff >= @hour * 2 && diff < @day ->
Translator.translate_plural(
locale,
"relative_time",
"in %{count} hour",
"in %{count} hours",
div(diff, @hour)
)
diff >= @day && diff < @day * 2 ->
Translator.translate_plural(
locale,
"relative_time",
"in %{count} day",
"in %{count} days",
1
)
diff >= @day * 2 && diff < @month ->
Translator.translate_plural(
locale,
"relative_time",
"in %{count} day",
"in %{count} days",
div(diff, @day)
)
diff >= @month && diff < @month * 2 ->
Translator.translate_plural(
locale,
"relative_time",
"in %{count} month",
"in %{count} months",
1
)
diff >= @month * 2 && diff < @year ->
Translator.translate_plural(
locale,
"relative_time",
"in %{count} month",
"in %{count} months",
div(diff, @month)
)
diff >= @year && diff < @year * 2 ->
Translator.translate_plural(
locale,
"relative_time",
"in %{count} year",
"in %{count} years",
1
)
diff >= @year * 2 ->
Translator.translate_plural(
locale,
"relative_time",
"in %{count} year",
"in %{count} years",
div(diff, @year)
)
# past
diff < 0 && diff >= -45 ->
Translator.translate_plural(
locale,
"relative_time",
"%{count} second ago",
"%{count} seconds ago",
diff * -1
)
diff < -45 && diff > @minute * 2 * -1 ->
Translator.translate_plural(
locale,
"relative_time",
"%{count} minute ago",
"%{count} minutes ago",
1
)
diff <= @minute * 2 && diff > @hour * -1 ->
Translator.translate_plural(
locale,
"relative_time",
"%{count} minute ago",
"%{count} minutes ago",
div(diff * -1, @minute)
)
diff <= @hour && diff > @hour * 2 * -1 ->
Translator.translate_plural(
locale,
"relative_time",
"%{count} hour ago",
"%{count} hours ago",
1
)
diff <= @hour * 2 && diff > @day * -1 ->
Translator.translate_plural(
locale,
"relative_time",
"%{count} hour ago",
"%{count} hours ago",
div(diff * -1, @hour)
)
diff <= @day && diff > @day * 2 * -1 ->
Translator.translate_plural(
locale,
"relative_time",
"%{count} day ago",
"%{count} days ago",
1
)
diff <= @day * 2 && diff > @month * -1 ->
Translator.translate_plural(
locale,
"relative_time",
"%{count} day ago",
"%{count} days ago",
div(diff * -1, @day)
)
diff <= @month && diff > @month * 2 * -1 ->
Translator.translate_plural(
locale,
"relative_time",
"%{count} month ago",
"%{count} months ago",
1
)
diff <= @month * 2 && diff > @year * -1 ->
Translator.translate_plural(
locale,
"relative_time",
"%{count} month ago",
"%{count} months ago",
div(diff * -1, @month)
)
diff <= @year && diff > @year * 2 * -1 ->
Translator.translate_plural(
locale,
"relative_time",
"%{count} year ago",
"%{count} years ago",
1
)
diff <= @year * 2 * -1 ->
Translator.translate_plural(
locale,
"relative_time",
"%{count} year ago",
"%{count} years ago",
div(diff * -1, @year)
)
end
do_format(locale, date, relative, dirs, <<result::binary, phrase::binary>>)
end
defp do_format(
locale,
date,
relative,
[%Directive{type: type, modifiers: mods, flags: flags, width: width} | dirs],
result
) do
case format_token(locale, type, date, mods, flags, width) do
{:error, _} = err -> err
formatted -> do_format(locale, date, relative, dirs, <<result::binary, formatted::binary>>)
end
end
# Token parser
defp relative_parser do
many1(
choice([
between(char(?{), map(one_of(word(), ["relative"]), &map_directive/1), char(?})),
map(none_of(char(), ["{", "}"]), &map_literal/1)
])
)
end
# Gets/builds the Directives for a given token
defp map_directive("relative"),
do: %Directive{:type => :relative, :value => "relative"}
# Generates directives for literal characters
defp map_literal([]), do: nil
defp map_literal(literals)
when is_list(literals),
do: Enum.map(literals, &map_literal/1)
defp map_literal(literal), do: %Directive{type: :literal, value: literal, parser: char(literal)}
end
|
lib/format/datetime/formatters/relative.ex
| 0.877082
| 0.626181
|
relative.ex
|
starcoder
|
defmodule Stagger do
@moduledoc ~S"""
Point-to-point, durable message-queues as GenStage producers.
Stagger enables the creation of GenStage processes that enqueue terms to simple,
file-backed message-queues, allowing the producer and consumer to run independently
of each other, possibly at different times.
+----------+ +----------+ +----------+ +------------+
| Upstream | | MsgQueue | | MsgQueue | | Downstream |
| | -> | | <- | | <---> | |
| Client | | Producer | | Consumer | | Processing |
+----------+ +----------+ +----------+ +------------+
| | read
write | |
+------+
| FILE |
| |
| |
+------+
Your upstream client writes its events into the message-queue (provided by
Stagger), which persists them to local storage. Your (GenStage) consumer, subscribes
to the producer and receives events, via this local storage.
## Producers
Upstream clients must first open their message-queue, via `open/1`, and then use the
resulting process to enqueue writes, via `write/2`.
{:ok, pid} = Stagger.open("/path/to/msg/queue")
...
:ok = Stagger.write(pid, "foo")
:ok = Stagger.write(pid, "bar")
:ok = Stagger.write(pid, "baz")
The process created via `open/1` is the GenStage MsgQueue - by writing entries to it,
it will satisfy demand from a downstream consumer.
## Consumers
Downstream clients are GenStage consumers. They must also open the message-queue, via
`open/1` and then subscribe use existing GenStage subscription facilities:
def init(args) do
{:ok, pid} = Stagger.open("/path/to/msg/queue")
{:ok, sub} = GenStage.sync_subscribe(self(), to: pid, ack: last_processed())
...
end
def handle_events(events, _from, stage) do
...
end
## Sequence numbers
Sequence numbers are used to control the events seen by a subscriber. Every event
delivered to a consumer is a 2-tuple of `{seqno, msg}` and it is the consumer's
responsibility to successfully record this sequence number as having been
processed.
A consumer must indicate its last-processed sequence number by passing `ack: N` in
the subscription options (pass `ack: 0` when no such number exists) whenever it
(re)subscribes. Event delivery will resume from the Nth + 1 event.
Every message _written_ to the message-queue is assigned an incrementing sequence number
by the Stagger process. When an existing message queue is re-opened, the process will
first recover the last written number, using that as the base for any subsequent writes.
## Purging
In order to prevent unconstrained growth of the message-queue file, a consumer may
periodically purge the queue of old entries by passing a `purge: N` option when it
(re)subscribes e.g:
last = last_processed()
{:ok, sub} = GenStage.sync_subscribe(self(), to: pid, ack: last, purge: last)
All entries _up to and including_ N are removed from the head of message-queue file.
The value of N will be capped to no more than the value of the last ack'd message.
To summarize:
- `ack: N` determines that the next delivered message will have a seqno of N + 1
- `purge: M` is a hint to the producer to remove messages 1..M from the head of
the message queue.
## Why not RabbitMQ?
If you think you need something like RabbitMQ, you probably do :-). Stagger is
intended to be a lightweight durable message queue with minimal dependencies.
"""
@doc """
Open a message-queue file, returning the pid responsible for managing it.
The resulting pid can be used by upstream clients to enqueue messages via `write/2`, or
may be used as the target of a GenStage subscribe operation.
The following option may be passed to the function:
* `hibernate_after: N` - After a period of _N_ milliseconds, the returned process will
hibernate. If unspecified, defaults to 15 seconds. Pass `hibernate_after: :infinite`
to inhibit this behaviour. This option only takes effect if the process managing the
queue is created by the call to `open/2`.
"""
@spec open(binary, Keyword.t) :: :ignore | {:error, any} | {:ok, any} | {:ok, pid, any}
def open(path, opts \\ []) when is_binary(path) do
args = Map.new(opts) |> Map.put(:path, path)
case DynamicSupervisor.start_child(Stagger.MsgQueueSupervisor, {Stagger.MsgQueue, args}) do
{:ok, pid} ->
{:ok, pid}
{:error, {:already_started, pid}} ->
{:ok, pid}
error ->
error
end
end
@doc """
Write a term to the message-queue.
"""
@spec write(pid :: atom | pid | {atom, any} | {:via, atom, any}, term :: any, timeout :: :infinite | non_neg_integer()) :: :ok | {:error, any}
def write(pid, term, timeout \\ 5000) do
Stagger.MsgQueue.write(pid, term, timeout)
end
end
|
lib/stagger.ex
| 0.821223
| 0.484746
|
stagger.ex
|
starcoder
|
defmodule Solid.Filter do
import Kernel, except: [abs: 1, ceil: 1, round: 1, floor: 1]
@moduledoc """
Standard filters
"""
@doc """
Apply `filter` if it exists. Otherwise return the first input.
iex> Solid.Filter.apply("upcase", ["ac"])
"AC"
iex> Solid.Filter.apply("no_filter_here", [1, 2, 3])
1
"""
def apply(filter, args) do
custom_module = Application.get_env(:solid, :custom_filters, __MODULE__)
cond do
filter_exists?({custom_module, filter, Enum.count(args)}) ->
apply_filter({custom_module, filter, args})
filter_exists?({__MODULE__, filter, Enum.count(args)}) ->
apply_filter({__MODULE__, filter, args})
true ->
List.first(args)
end
end
defp apply_filter({m, f, a}) do
Kernel.apply(m, String.to_existing_atom(f), a)
end
defp filter_exists?({module, function, arity}) do
try do
function = String.to_existing_atom(function)
function_exported?(module, function, arity)
rescue
ArgumentError -> false
end
end
@doc """
Returns the absolute value of a number.
iex> Solid.Filter.abs(-17)
17
iex> Solid.Filter.abs(17)
17
iex> Solid.Filter.abs("-17.5")
17.5
"""
@spec abs(number | String.t()) :: number
def abs(input) when is_binary(input) do
{float, _} = Float.parse(input)
abs(float)
end
def abs(input), do: Kernel.abs(input)
@doc """
Concatenates two strings and returns the concatenated value.
iex> Solid.Filter.append("www.example.com", "/index.html")
"www.example.com/index.html"
"""
@spec append(any, any) :: String.t()
def append(input, string), do: "#{input}#{string}"
@doc """
Makes the first character of a string capitalized.
iex> Solid.Filter.capitalize("my great title")
"My great title"
iex> Solid.Filter.capitalize(1)
"1"
"""
@spec capitalize(any) :: String.t()
def capitalize(input), do: to_string(input) |> String.capitalize()
@doc """
Rounds the input up to the nearest whole number. Liquid tries to convert the input to a number before the filter is applied.
"""
@spec ceil(number | String.t()) :: number
def ceil(input) when is_binary(input) do
{float, _} = Float.parse(input)
ceil(float)
end
def ceil(input) when is_integer(input), do: input
def ceil(input), do: Float.ceil(input) |> trunc
@doc """
Allows you to specify a fallback in case a value doesn’t exist.
`default` will show its value if the left side is nil, false, or empty
iex> Solid.Filter.default(123, 456)
123
iex> Solid.Filter.default(nil, 456)
456
iex> Solid.Filter.default(false, 456)
456
iex> Solid.Filter.default([], 456)
456
"""
@spec default(any, any) :: any
def default(nil, value), do: value
def default(false, value), do: value
def default([], value), do: value
def default(input, _), do: input
@doc """
Divides a number by the specified number.
The result is rounded down to the nearest integer (that is, the floor) if the divisor is an integer.
{{ 16 | divided_by: 4 }}
iex> Solid.Filter.divided_by(16, 4)
4
iex> Solid.Filter.divided_by(5, 3)
1
iex> Solid.Filter.divided_by(20, 7)
2
"""
@spec divided_by(number, number) :: number
def divided_by(input, operand) when is_integer(operand) do
(input / operand) |> Float.floor() |> trunc
end
def divided_by(input, operand) when is_float(operand) do
input / operand
end
@doc """
Makes each character in a string uppercase.
It has no effect on strings which are already all uppercase.
iex> Solid.Filter.upcase("aBc")
"ABC"
iex> Solid.Filter.upcase(456)
"456"
iex> Solid.Filter.upcase(nil)
""
"""
@spec upcase(any) :: String.t()
def upcase(input), do: input |> to_string |> String.upcase()
@doc """
Makes each character in a string lowercase.
It has no effect on strings which are already all lowercase.
iex> Solid.Filter.downcase("aBc")
"abc"
iex> Solid.Filter.downcase(456)
"456"
iex> Solid.Filter.downcase(nil)
""
"""
@spec downcase(any) :: String.t()
def downcase(input), do: input |> to_string |> String.downcase()
@doc """
Returns the first item of an array.
iex> Solid.Filter.first([1, 2, 3])
1
iex> Solid.Filter.first([])
nil
"""
@spec first(list) :: any
def first(input), do: List.first(input)
@doc """
Rounds a number down to the nearest whole number.
Solid tries to convert the input to a number before the filter is applied.
iex> Solid.Filter.floor(1.2)
1
iex> Solid.Filter.floor(2.0)
2
iex> Solid.Filter.floor("3.5")
3
"""
@spec floor(number | String.t()) :: integer
def floor(input) when is_binary(input) do
{float, _} = Float.parse(input)
floor(float)
end
def floor(input), do: Float.floor(input) |> trunc
@doc """
Removes all occurrences of nil from a list
iex> Solid.Filter.compact([1, nil, 2, nil, 3])
[1, 2, 3]
"""
@spec compact(list) :: list
def compact(input) when is_list(input), do: Enum.reject(input, &(&1 == nil))
def compact(input, property) when is_list(input), do: Enum.reject(input, &(&1[property] == nil))
@doc """
Join a list of strings returning one String glued by `glue
iex> Solid.Filter.join(["a", "b", "c"])
"a b c"
iex> Solid.Filter.join(["a", "b", "c"], "-")
"a-b-c"
"""
@spec join(list, String.t()) :: String.t()
def join(input, glue \\ " ") when is_list(input), do: Enum.join(input, glue)
@doc """
Returns the last item of an array.
iex> Solid.Filter.last([1, 2, 3])
3
iex> Solid.Filter.last([])
nil
"""
@spec last(list) :: any
def last(input), do: List.last(input)
@doc """
Removes all whitespaces (tabs, spaces, and newlines) from the beginning of a string.
The filter does not affect spaces between words.
iex> Solid.Filter.lstrip(" So much room for activities! ")
"So much room for activities! "
"""
@spec lstrip(String.t()) :: String.t()
def lstrip(input), do: String.trim_leading(input)
@doc """
Split input string into an array of substrings separated by given pattern.
iex> Solid.Filter.split("a b c", " ")
~w(a b c)
iex> Solid.Filter.split("", " ")
[""]
"""
@spec split(any, String.t()) :: List.t()
def split(input, pattern), do: to_string(input) |> String.split(pattern)
@doc """
Map through a list of hashes accessing `property`
iex> Solid.Filter.map([%{"a" => "A"}, %{"a" => 1}], "a")
["A", 1]
"""
def map(input, property) when is_list(input) do
Enum.map(input, & &1[property])
end
@doc """
Subtracts a number from another number.
iex> Solid.Filter.minus(4, 2)
2
iex> Solid.Filter.minus(16, 4)
12
iex> Solid.Filter.minus(183.357, 12)
171.357
"""
@spec minus(number, number) :: number
def minus(input, number), do: input - number
@doc """
Adds a number to another number.
iex> Solid.Filter.plus(4, 2)
6
iex> Solid.Filter.plus(16, 4)
20
iex> Solid.Filter.plus(183.357, 12)
195.357
"""
@spec plus(number, number) :: number
def plus(input, number), do: input + number
@doc """
Adds the specified string to the beginning of another string.
iex> Solid.Filter.prepend("/index.html", "www.example.com")
"www.example.com/index.html"
"""
@spec prepend(any, any) :: String.t()
def prepend(input, string), do: "#{string}#{input}"
@doc """
Removes every occurrence of the specified substring from a string.
iex> Solid.Filter.remove("I strained to see the train through the rain", "rain")
"I sted to see the t through the "
"""
@spec remove(String.t(), String.t()) :: String.t()
def remove(input, string) do
String.replace(input, string, "")
end
@doc """
Removes only the first occurrence of the specified substring from a string.
iex> Solid.Filter.remove_first("I strained to see the train through the rain", "rain")
"I sted to see the train through the rain"
"""
@spec remove_first(String.t(), String.t()) :: String.t()
def remove_first(input, string) do
String.replace(input, string, "", global: false)
end
@doc """
Replaces every occurrence of an argument in a string with the second argument.
iex> Solid.Filter.replace("Take my protein pills and put my helmet on", "my", "your")
"Take your protein pills and put your helmet on"
"""
@spec replace(String.t(), String.t(), String.t()) :: String.t()
def replace(input, string, replacement \\ "") do
input |> to_string |> String.replace(string, replacement)
end
@doc """
Replaces only the first occurrence of the first argument in a string with the second argument.
iex> Solid.Filter.replace_first("Take my protein pills and put my helmet on", "my", "your")
"Take your protein pills and put my helmet on"
"""
@spec replace_first(String.t(), String.t(), String.t()) :: String.t()
def replace_first(input, string, replacement \\ "") do
input |> to_string |> String.replace(string, replacement, global: false)
end
@doc """
Reverses the order of the items in an array. reverse cannot reverse a string.
iex> Solid.Filter.reverse(["a", "b", "c"])
["c", "b", "a"]
"""
@spec reverse(list) :: List.t()
def reverse(input), do: Enum.reverse(input)
@doc """
Rounds an input number to the nearest integer or,
if a number is specified as an argument, to that number of decimal places.
iex> Solid.Filter.round(1.2)
1
iex> Solid.Filter.round(2.7)
3
iex> Solid.Filter.round(183.357, 2)
183.36
"""
@spec round(number) :: integer
def round(input, precision \\ nil)
def round(input, nil), do: Kernel.round(input)
def round(input, precision) do
p = :math.pow(10, precision)
Kernel.round(input * p) / p
end
@doc """
Removes all whitespace (tabs, spaces, and newlines) from the right side of a string.
iex> Solid.Filter.rstrip(" So much room for activities! ")
" So much room for activities!"
"""
@spec rstrip(String.t()) :: String.t()
def rstrip(input), do: String.trim_trailing(input)
@doc """
Returns the number of characters in a string or the number of items in an array.
iex> Solid.Filter.size("Ground control to Major Tom.")
28
iex> Solid.Filter.size(~w(ground control to Major Tom.))
5
"""
@spec size(String.t() | list) :: non_neg_integer
def size(input) when is_list(input), do: Enum.count(input)
def size(input), do: String.length(input)
@doc """
Returns a substring of 1 character beginning at the index specified by the argument passed in.
An optional second argument specifies the length of the substring to be returned.
String indices are numbered starting from 0.
iex> Solid.Filter.slice("Liquid", 0)
"L"
iex> Solid.Filter.slice("Liquid", 2)
"q"
iex> Solid.Filter.slice("Liquid", 2, 5)
"quid"
iex> Solid.Filter.slice("Liquid", -3, 2)
"ui"
"""
@spec slice(String.t(), integer, non_neg_integer) :: String.t()
def slice(input, offset, length \\ nil)
def slice(input, offset, nil), do: String.at(input, offset)
def slice(input, offset, length), do: String.slice(input, offset, length)
@doc """
Sorts items in an array by a property of an item in the array. The order of the sorted array is case-sensitive.
iex> Solid.Filter.sort(~w(zebra octopus giraffe SallySnake))
~w(SallySnake giraffe octopus zebra)
"""
@spec sort(List.t()) :: List.t()
def sort(input), do: Enum.sort(input)
@doc """
Sorts items in an array by a property of an item in the array. The order of the sorted array is case-sensitive.
iex> Solid.Filter.sort_natural(~w(zebra octopus giraffe SallySnake))
~w(giraffe octopus SallySnake zebra)
"""
@spec sort_natural(List.t()) :: List.t()
def sort_natural(input) do
Enum.sort(input, &(String.downcase(&1) <= String.downcase(&2)))
end
@doc """
Removes all whitespace (tabs, spaces, and newlines) from both the left and right side of a string.
It does not affect spaces between words.
iex> Solid.Filter.strip(" So much room for activities! ")
"So much room for activities!"
"""
@spec strip(String.t()) :: String.t()
def strip(input), do: String.trim(input)
@doc """
Multiplies a number by another number.
iex> Solid.Filter.times(3, 2)
6
iex> Solid.Filter.times(24, 7)
168
iex> Solid.Filter.times(183.357, 12)
2200.284
"""
@spec times(number, number) :: number
def times(input, operand), do: input * operand
@doc """
truncate shortens a string down to the number of characters passed as a parameter.
If the number of characters specified is less than the length of the string, an ellipsis (…) is appended to the string
and is included in the character count.
iex> Solid.Filter.truncate("Ground control to Major Tom.", 20)
"Ground control to..."
# Custom ellipsis
truncate takes an optional second parameter that specifies the sequence of characters to be appended to the truncated string.
By default this is an ellipsis (…), but you can specify a different sequence.
The length of the second parameter counts against the number of characters specified by the first parameter.
For example, if you want to truncate a string to exactly 10 characters, and use a 3-character ellipsis,
use 13 for the first parameter of truncate, since the ellipsis counts as 3 characters.
iex> Solid.Filter.truncate("Ground control to Major Tom.", 25, ", and so on")
"Ground control, and so on"
# No ellipsis
You can truncate to the exact number of characters specified by the first parameter
and show no trailing characters by passing a blank string as the second parameter:
iex> Solid.Filter.truncate("Ground control to Major Tom.", 20, "")
"Ground control to Ma"
"""
@spec truncate(String.t(), non_neg_integer, String.t()) :: String.t()
def truncate(input, length, ellipsis \\ "...") do
if String.length(input) > length do
length = max(0, length - String.length(ellipsis))
slice(input, 0, length) <> ellipsis
else
input
end
end
@doc """
Shortens a string down to the number of words passed as the argument.
If the specified number of words is less than the number of words in the string, an ellipsis (…) is appended to the string.
iex> Solid.Filter.truncatewords("Ground control to Major Tom.", 3)
"Ground control to..."
# Custom ellipsis
`truncatewords` takes an optional second parameter that specifies the sequence of characters to be appended to the truncated string.
By default this is an ellipsis (…), but you can specify a different sequence.
iex> Solid.Filter.truncatewords("Ground control to Major Tom.", 3, "--")
"Ground control to--"
# No ellipsis
You can avoid showing trailing characters by passing a blank string as the second parameter:
iex> Solid.Filter.truncatewords("Ground control to Major Tom.", 3, "")
"Ground control to"
"""
@spec truncatewords(String.t(), non_neg_integer, String.t()) :: String.t()
def truncatewords(input, max_words, ellipsis \\ "...") do
words = String.split(input, " ")
if length(words) > max_words do
Enum.take(words, max_words)
|> Enum.intersperse(" ")
|> to_string
|> Kernel.<>(ellipsis)
end
end
@doc """
Removes any duplicate elements in an array.
Output
iex> Solid.Filter.uniq(~w(ants bugs bees bugs ants))
~w(ants bugs bees)
"""
@spec uniq(list) :: list
def uniq(input), do: Enum.uniq(input)
end
|
lib/solid/filter.ex
| 0.843895
| 0.55097
|
filter.ex
|
starcoder
|
defmodule OpentelemetryEcto do
@moduledoc """
Telemetry handler for creating OpenTelemetry Spans from Ecto query events.
"""
require OpenTelemetry.Tracer
@doc """
Attaches the OpentelemetryEcto handler to your repo events. This should be called
from your application behaviour on startup.
Example:
OpentelemetryEcto.setup([:blog, :repo])
You may also supply the following options in the second argument:
* `:time_unit` - a time unit used to convert the values of query phase
timings, defaults to `:microsecond`. See `System.convert_time_unit/3`
* `:span_prefix` - the first part of the span name, as a `String.t`,
defaults to the concatenation of the event name with periods, e.g.
`"blog.repo.query"`. This will always be followed with a colon and the
source (the table name for SQL adapters).
"""
def setup(event_prefix, config \\ []) do
event = event_prefix ++ [:query]
:telemetry.attach({__MODULE__, event}, event, &__MODULE__.handle_event/4, config)
end
@doc false
def handle_event(
event,
measurements,
%{query: query, source: source, result: query_result, repo: repo, type: type},
config
) do
# Doing all this even if the span isn't sampled so the sampler
# could technically use the attributes to decide if it should sample or not
total_time = measurements.total_time
end_time = :opentelemetry.timestamp()
start_time = end_time - total_time
database = repo.config()[:database]
url =
case repo.config()[:url] do
nil ->
# TODO: add port
URI.to_string(%URI{scheme: "ecto", host: repo.config()[:hostname]})
url ->
url
end
span_name =
case Keyword.fetch(config, :span_prefix) do
{:ok, prefix} -> prefix
:error -> Enum.join(event, ".")
end <> ":#{source}"
time_unit = Keyword.get(config, :time_unit, :microsecond)
db_type =
case type do
:ecto_sql_query -> :sql
_ -> type
end
result =
case query_result do
{:ok, _} -> []
_ -> [error: true]
end
# TODO: need connection information to complete the required attributes
# net.peer.name or net.peer.ip and net.peer.port
base_attributes =
Keyword.merge(result,
"db.type": db_type,
"db.statement": query,
source: source,
"db.instance": database,
"db.url": url,
"total_time_#{time_unit}s": System.convert_time_unit(total_time, :native, time_unit)
)
attributes =
measurements
|> Enum.into(%{})
|> Map.take(~w(decode_time query_time queue_time)a)
|> Enum.reject(&is_nil(elem(&1, 1)))
|> Enum.map(fn {k, v} ->
{String.to_atom("#{k}_#{time_unit}s"), System.convert_time_unit(v, :native, time_unit)}
end)
s =
OpenTelemetry.Tracer.start_span(span_name, %{
start_time: start_time,
attributes: attributes ++ base_attributes
})
OpenTelemetry.Span.end_span(s)
end
end
|
instrumentation/opentelemetry_ecto/lib/opentelemetry_ecto.ex
| 0.712832
| 0.435962
|
opentelemetry_ecto.ex
|
starcoder
|
defmodule Mint.TransportError do
@moduledoc """
Represents an error with the transport used by an HTTP connection.
A `Mint.TransportError` struct is an exception, so it can be raised as any
other exception.
## Struct fields
This exception represents an error with the transport (TCP or SSL) used
by an HTTP connection. The exception struct itself is opaque, that is,
not all fields are public. The following are the public fields:
* `:reason` - a term representing the error reason. The value of this field
can be:
* `:timeout` - if there's a timeout in interacting with the socket.
* `:closed` - if the connection has been closed.
* `:protocol_not_negotiated` - if the ALPN protocol negotiation failed.
* `{:bad_alpn_protocol, protocol}` - when the ALPN protocol is not
one of the supported protocols, which are `http/1.1` and `h2`.
* `t::inet.posix/0` - if there's any other error with the socket,
such as `:econnrefused` or `:nxdomain`.
* `t::ssl.error_alert/0` - if there's an SSL error.
## Message representation
If you want to convert an error reason to a human-friendly message (for example
for using in logs), you can use `Exception.message/1`:
iex> {:error, %Mint.TransportError{} = error} = Mint.HTTP.connect(:http, "nonexistent", 80)
iex> Exception.message(error)
"non-existing domain"
"""
reason_type =
quote do
:timeout
| :closed
| :protocol_not_negotiated
| {:bad_alpn_protocol, String.t()}
| :inet.posix()
end
reason_type =
if System.otp_release() >= "21" do
quote do: unquote(reason_type) | :ssl.error_alert()
else
reason_type
end
@type t() :: %__MODULE__{reason: unquote(reason_type) | term()}
defexception [:reason]
def message(%__MODULE__{reason: reason}) do
format_reason(reason)
end
## Our reasons.
defp format_reason(:protocol_not_negotiated) do
"ALPN protocol not negotiated"
end
defp format_reason({:bad_alpn_protocol, protocol}) do
"bad ALPN protocol #{inspect(protocol)}, supported protocols are \"http/1.1\" and \"h2\""
end
defp format_reason(:closed) do
"socket closed"
end
defp format_reason(:timeout) do
"timeout"
end
# :ssl.format_error/1 falls back to :inet.format_error/1 when the error is not an SSL-specific
# error (at least since OTP 19+), so we can just use that.
defp format_reason(reason) do
case :ssl.format_error(reason) do
'Unexpected error:' ++ _ -> inspect(reason)
message -> List.to_string(message)
end
end
end
|
lib/mint/transport_error.ex
| 0.906169
| 0.439086
|
transport_error.ex
|
starcoder
|
defmodule Warpath.Filter.Predicate do
@moduledoc false
@operators [
:<,
:>,
:<=,
:>=,
:==,
:!=,
:===,
:!==,
:and,
:or,
:in
]
@functions [
:is_atom,
:is_binary,
:is_boolean,
:is_float,
:is_integer,
:is_list,
:is_map,
:is_nil,
:is_number,
:is_tuple,
:not
]
@current_node {:current_node, "@"}
@type expression ::
{:property, atom() | String.t()}
| {:index_access, integer()}
| {:current_node, String.t()}
@spec eval(boolean | {atom, expression}, any) :: boolean()
def eval({:literal, false}, _), do: false
def eval({:literal, true}, _), do: true
for action <- [:has_children?] ++ @operators ++ @functions do
def eval({unquote(action), _} = expression, context) do
resolve(expression, context)
catch
error when error in [:not_indexable_type, :not_container_type] -> false
end
end
for operator <- @operators do
defp resolve({unquote(operator), [left, right]}, context) do
unquote(operator)(resolve(left, context), resolve(right, context))
end
end
for function <- @functions do
defp resolve({unquote(function), expression}, context) do
unquote(function)(resolve(expression, context))
end
end
defp resolve(@current_node, context), do: context
defp resolve({:literal, value}, _context), do: value
defp resolve({:subpath_expression, tokens}, context) do
Enum.reduce(tokens, context, fn token, acc -> resolve(token, acc) end)
end
defp resolve({:has_children?, {:subpath_expression, tokens}}, context) do
{last_token, rest} = List.pop_at(tokens, -1)
result = resolve({:subpath_expression, rest}, context)
case {result, last_token} do
{map, {:dot, {:property, key}}} when is_map(map) ->
Map.has_key?(map, key)
{list, {:dot, {:property, key}}} when is_list(list) and is_atom(key) ->
Keyword.has_key?(list, key)
{list, {:indexes, [index_access: index]}} when is_list(list) and index >= 0 ->
length(list) > index
{list, {:indexes, [index_access: index]}} when is_list(list) ->
count = length(list)
count + index >= 0
_ ->
false
end
end
defp resolve({:dot, {:property, name}}, context) do
case {context, name} do
{map = %{}, key} ->
Map.get(map, key)
{list, key} when is_list(list) and is_atom(key) ->
Access.get(list, key)
_ ->
throw(:not_container_type)
end
end
defp resolve({:indexes, [index_access: index]}, context) do
case context do
nil ->
nil
indexable when is_list(indexable) ->
Enum.at(indexable, index)
_ ->
throw(:not_indexable_type)
end
end
false
defp resolve(term, context) when is_list(term) do
Enum.map(term, &resolve(&1, context))
end
end
|
lib/warpath/filter/predicate.ex
| 0.561335
| 0.571199
|
predicate.ex
|
starcoder
|
defmodule RulEx do
@moduledoc """
RulEx is a rules engine and simplified expressions language for evaluating
a set of conditions against a set of facts, yielding true or false on
whether the provided facts adhere to the set of conditions given.
## RulEx expressions
The expressions used by RulEx are inspired by Lisp, and are simply nested lists
with the first element being an operand, think of it as a function, and the
remaining elements arguments to this operand. Evaluating these expressions
is done against a set of facts provided as a `RulEx.DataBag`, and the
outcome is a boolean on whether or not the conditions match on
the given facts.
### Supported operands
#### Logical operands
These operands can only be used in `RulEx.Behaviour.eval/2` callback, and only yield `true`
or `false` results. They can be passed "facts" from outside the expressions using
`RulEx.DataBag` and the value operands supported by RulEx.
- The any operand `:|`, `"|"`, which matches any list of RulEx expressions and yields true
if one of them yields true, false otherwise.
- The all operand `:&`, `"&"`, which matches any list of RulEx expressions and yields true
if non of them yields false, true otherwise.
- The negation operand `:!`, which matches a single RulEx expressions and yields the negation
of whatever the input expression yields, e.g. true for false, and false for true.
- The equality operand `:=`, `"="`, which matches a two RulEx value expressions and yields
true if they are equal, this operation is non strict, i.e. `1.0 = 0` is true.
- The inequality operand `:!=`, `"!="`, which matches a two RulEx value expressions and yields
true if they are not equal, this operation is non strict, i.e. `1.0 != 0` is false.
- The less than operand `:<`, `"<"`, which matches a two RulEx value expressions and yields
true if the one on the left hand side is less than the one on the right hand side.
- The greater than operand `:>`, `">"`, which matches a two RulEx value expressions and yields
true if the one on the left hand side is greater than the one on the right hand side.
- The less than or equals operand `:<=`, `"<="`, which matches a two RulEx value expressions and yields
true if the one on the left hand side is less than or equals the one on the right hand side.
- The greater than or equals operand `:>=`, `">="`, which matches a two RulEx value expressions and yields
true if the one on the left hand side is greater than or equals the one on the right hand side.
- The contains operand `:in`, `"in"`, which matches a "needle", a single RulEx value expressions, and a
"haystack", a list of normal values (not RulEx expressions), and yields true if the needle exists
in the given haystack.
#### Value operands
Value operands are used to represent data in a RulEx expressions, they are only two operands for this use case
`:val` (`"val"`) and `:var` (`"var"`).
`val` operand can be used in order to store exact values in the expressions, this includes the conditions
you want to mean when applying the logical operands.
`var` operand can be used in order to pass facts from outside of the expressions, this is done by using
any Elixir term that implements the `RulEx.DataBag` protocol.
Both these operands accept 2 arguments, a data type, and a value, the value in a `val` expressions is
the actual value to be yielded back, while in a `var` expressions is they key used with `RulEx.DataBag`.
The values are validated against the data type argument given, and would fail/be rejected if
they don't match properly, i.e. you cannot return the value `"string"` when the data
type expressed is `"numeric"`.
The supported data types are,
1. `"any"`, which will yield back any value without any validation.
1. `"number"`, which will yield back any numeric value, regardless if an integer or float.
1. `"integer"`, which will yield back only integer values.
1. `"float"`, which will yield back only float values.
1. `"string"`, which will yield back only strings.
1. `"boolean"`, which will yield back only boolean values, i.e. `true` and `false`.
1. `"list"`, which will yield back only lists of values, no validation is required on the values within the list.
1. `"map"`, which will yield back only map of arbitrary keys and values, no validation is required on either the
keys or the values within the map.
1. `"time"`, which will yield back time values, it can be given string values and it will parse them, so long
as those values are times as defined by ISO 8601.
1. `"date"`, which will yield back date values, it can be given string values and it will parse them, so long
as those values are dates as defined by ISO 8601.
1. `"datetime"`, which will yield back date time values (naive datetime in Elixir terminology), it can be
given string values and it will parse them, so long as those values are datetimes as defined by ISO 8601.
Currently RulEx does not support any other additional types, and all of it's operations regarding values are
strictly typed, meaning it will reject to complete operations if types don't match.
#### Reserved operands
RulEx reserves all the previously defined expressions as defined in *Logical operands* and in
*Value operands*, in both their string and atom formats. RulEx also disallows for any
non-reserved operand that isn't a string. RulEx does however allow for adding
*custom operands* as defined in Custom operand.
#### Custom operands
RulEx behaviour can be extended with any arbitrary custom operands by simply defining the `RulEx.Behaviour.opernad/3`
callback, this will receive the operand string, a list of arguments given to the operand, and the current data bag
holding the facts being processed.
### Example expressions
You can view the test suite for example expressions of a variety of types and forms, in short an expression is
simply a list of arbitrarily nested RulEx valid values, this includes other RulEx expressions and normal
Elixir terms, e.g. `[:=, [:val, "any", "hello"], [:val, "any, "world"]]` is a valid expression.
Think of RulEx expressions very similarly to how [Lisp](https://en.wikipedia.org/wiki/Lisp_(programming_language))
syntax works, these can be built and manipulated by code easily due to their easy to use structure. Some
examples are given below, for more varied examples please check the test suite!
[ :!
, [ :=
, [ :val
, "string"
, "some value"
]
, [ :val
, "string"
, "some other value"
]
]
]
[ :|
, [ :=
, [ :val
, "string"
, "some value"
]
, [ :val
, "string"
, "some other value"
]
]
, [ :=
, [ :val
, "numeric"
, 10
]
, [ :val
, "numeric"
, 10.0
]
]
]
### Storing and transporting these expressions
RulEx provides the `RulEx.EncodeError` behaviour used to define converting RulEx expressions
from and to Elixir terms, this can be helpful when you need to store these expressions
and/or to transfer these expressions over the wire. By default, a JSON encoding is
implemented for you.
## Usage
Simply use `RulEx` as is, as it implements the `RulEx.Behaviour` fully. However, if you wan to
add custom operands to the set of supported rules, simply define the `RulEx.Behaviour` as is,
and then run wild. To do so simply use `RulEx.Behaviour` in your module, then implement
your custom operands by overriding the `RulEx.Behaviour.operand/3` callback.
## Caveats and quirks
- If no arguments are given to the any (`:|`) operand, it will yield back `false`, [follow this issue in Elixir
for a discussion around this behaviour](https://github.com/elixir-lang/elixir/issues/1508).
- If no arguments are given to the all (`:&`) operand, it will yield back `true`.
- Value expressions can be used in the `RulEx.Behaviour.eval/2` callback, the results will simply
be converted based on their truthiness as defined by Elixir and Erlang (i.e. only `false`
and `nil` are falsy values).
- Logical expressions will always yield an error if passed to the `RulEx.Behaviour.value/2` callbacks.
- Results coming back from custom `RulEx.Behaviour.operand/3` are treated like value expressions
and are converted to booleans based on their truthiness.
- If given a reserved expression but as a string instead of an atom, RulEx will convert it to an atom
and use it as if it was passed as the atom for the reserved expression.
- The comparison operands, `<`, `>`, `<=`, `>=`, `=`, and `!=` all validate that **both** arguments are
`val` or `var` expressions of the same type before doing anything, and will yield an error otherwise.
"""
@typedoc """
The set of all supported operands by RulEx, these include a set of reserved
operands, as well as any arbitrary strings that can be used to extend RulEx
behaviour to match any custom domain that needs rules evaluation.
"""
@type op ::
:|
| :&
| :!
| :=
| :!=
| :<
| :>
| :<=
| :>=
| :in
# Data related operands
| :val
| :var
# Any custom user defined operands, everything define before this
# as well as their equivalent in string are al reserved by RulEx.
| String.t()
@typedoc "The set of all valid values that represent an \"argument\" in a RulEx expression."
@type arg :: String.t() | number | DateTime.t() | NaiveDateTime.t() | Date.t() | list(arg) | any
@typedoc """
A RulEx expression is a list of operands and arguments, ideally these expressions *must* start with
one operand and then be followed by any arbitrarily long set of arguments, the arguments can also
be other RulEx expressions.
In order to validate whether a given RulEx expression is valid or not you can use the callback
`RulEx.Behaviour.expr?/1`. Operands and expressions can further be validated with the guards
defined in the `RulEx.Guards` module.
"""
@type t :: [op | arg | t]
use RulEx.Behaviour
end
|
lib/rulex.ex
| 0.902508
| 0.868158
|
rulex.ex
|
starcoder
|
defmodule ExAdmin.Show do
@moduledoc """
Override the default show page for an ExAdmin resource.
By default, ExAdmin renders the show page without any additional
configuration. It renders each column in the model, except the id,
inserted_at, and updated_at columns in an attributes table.
To customize the show page, use the `show` macro.
## Examples
register_resource Survey.Seating do
show seating do
attributes_table do
row :id
row :name
row :image, [image: true, height: 100], &(ExAdminDemo.Image.url({&1.image, &1}, :thumb))
end
panel "Answers" do
table_for(seating.answers) do
column "Question", fn(answer) ->
"#\{answer.question.name}"
end
column "Answer", fn(answer) ->
"#\{answer.choice.name}"
end
end
end
end
"""
import ExAdmin.Helpers
import ExAdmin.Repo, only: [repo: 0]
import Kernel, except: [div: 2]
use Xain
defmacro __using__(_) do
quote do
import unquote(__MODULE__)
end
end
@doc """
Customize the show page.
"""
defmacro show(resource, [do: contents]) do
quote location: :keep do
def show_view(var!(conn), unquote(resource) = var!(resource)) do
import ExAdmin.Utils
import ExAdmin.ViewHelpers
_ = var!(resource)
markup safe: true do
unquote(contents)
end
end
end
end
@doc """
Display a table of the model's attributes.
When called with a block, the rows specified in the block will be
displayed.
When called without a block, the default attributes table will be
displayed.
"""
defmacro attributes_table(name \\ nil, do: block) do
quote location: :keep do
var!(rows, ExAdmin.Show) = []
unquote(block)
rows = var!(rows, ExAdmin.Show) |> Enum.reverse
schema = %{name: unquote(name), rows: rows}
ExAdmin.Table.attributes_table var!(conn), var!(resource), schema
end
end
defmacro attributes_table do
quote location: :keep do
ExAdmin.Show.default_attributes_table(var!(conn), var!(resource))
end
end
@doc """
Display a table of a specific model's attributes.
When called with a block, the rows specified in the block will be
displayed.
When called without a block, the default attributes table will be
displayed.
"""
defmacro attributes_table_for(resource, do: block) do
quote location: :keep do
var!(rows, ExAdmin.Show) = []
unquote(block)
rows = var!(rows, ExAdmin.Show) |> Enum.reverse
resource = unquote(resource)
schema = %{rows: rows}
ExAdmin.Table.attributes_table_for var!(conn), resource, schema
end
end
@doc """
Adds a new panel to the show page.
The block given must include one of two commands:
* `table_for` - Displays a table for a `:has_many` association.
* `contents` - Add HTML to a panel
"""
defmacro panel(name \\ "", opts \\ [], do: block) do
quote do
var!(elements, ExAdmin.Show) = []
unquote(block)
ExAdmin.Table.panel(
var!(conn),
[ {:name, unquote(name)}, {:opts, unquote(opts)} | var!(elements, ExAdmin.Show) ], unquote(opts)
)
end
end
@doc """
Add a table for a `:has_many` association.
## Examples
show account do
attributes_table do
row :username
row :email
row :contact
end
panel "Inventory" do
table_for account.inventory do
column "Asset", &__MODULE__.inventory_name/1
column "PO", &(&1.sales_order.po)
column :quantity
end
end
end
"""
defmacro table_for(resources, opts, do: block) do
block = if Keyword.has_key?(opts, :sortable) do
ensure_sort_handle_column(block)
else
block
end
quote do
opts = unquote(opts) |> ExAdmin.Show.prepare_sortable_opts
var!(columns, ExAdmin.Show) = []
unquote(block)
columns = var!(columns, ExAdmin.Show) |> Enum.reverse
var!(elements, ExAdmin.Show) = var!(elements, ExAdmin.Show) ++ [{
:table_for,
%{resources: unquote(resources), columns: columns, opts: opts}
}]
end
end
defmacro table_for(resources, do: block) do
quote do
table_for unquote(resources), [], do: unquote(block)
end
end
defp ensure_sort_handle_column({:__block__, trace, cols} = block) do
has_sort_handle_column = Enum.any?(cols, fn({ctype, _, _}) -> ctype == :sort_handle_column end)
if has_sort_handle_column do
block
else
{:__block__, trace, [{:sort_handle_column, [], nil} | cols]}
end
end
def prepare_sortable_opts(opts) do
case opts[:sortable] do
[resource: resource, assoc_name: assoc_name] ->
path = ExAdmin.Utils.admin_association_path(resource, assoc_name, :update_positions)
[
class: "table sortable",
"data-sortable-link": path
] |> Keyword.merge(Keyword.drop(opts, [:sortable]))
_ ->
opts
end
end
defmacro sortable_table_for(resource, assoc_name, do: block) do
quote do
resource = unquote(resource)
assoc_name = unquote(assoc_name)
resources = Map.get(resource, assoc_name)
table_for resources, [sortable: [resource: resource, assoc_name: assoc_name]], do: unquote(block)
end
end
@doc """
Add a markup block to a form.
Allows the use of the Xain markup to be used in a panel.
## Examples
show user do
attributes_table
panel "Testing" do
markup_contents do
div ".my-class" do
test "Tesing"
end
end
end
"""
defmacro markup_contents(do: block) do
quote do
content = markup :nested do
unquote(block)
end
var!(elements, ExAdmin.Show) = var!(elements, ExAdmin.Show) ++ [{
:contents, %{contents: content}
}]
end
end
@doc """
Add a select box to add N:M associations to the resource on show page.
*Note:* If you have custom keys in intersection table, please use association_filler/2 to specify them explicit.
## Examples
show post do
attributes_table
panel "Tags" do
table_for(post.post_tags) do
column :tag
end
markup_contents do
association_filler post, :tags, autocomplete: true
end
end
end
"""
defmacro association_filler(resource, assoc_name, opts) do
quote bind_quoted: [resource: resource, assoc_name: assoc_name, opts: opts] do
opts = ExAdmin.Schema.get_intersection_keys(resource, assoc_name)
|> Keyword.merge([assoc_name: to_string(assoc_name)])
|> Keyword.merge(opts)
association_filler(resource, opts)
end
end
@doc """
Add a select box to add N:M associations to the resource on show page.
## Options
* `resource_key` - foreign key in the intersection table for resource model
* `assoc_name` - name of association
* `assoc_key` - foreign key in the intersection table for association model
* `assoc_model` - association Ecto model
* `autocomplete` - preload all possible associations if `false` and use autocomplete if `true`
## Examples
show post do
attributes_table
panel "Tags" do
table_for(post.post_tags) do
column :tag
end
markup_contents do
association_filler(post, resource_key: "post_id", assoc_name: "tags",
assoc_key: "tag_id", autocomplete: false)
end
end
end
"""
defmacro association_filler(resource, opts) do
quote bind_quoted: [resource: resource, opts: opts] do
required_opts = [:resource_key, :assoc_name, :assoc_key]
unless MapSet.subset?(MapSet.new(required_opts), MapSet.new(Keyword.keys(opts))) do
raise ArgumentError.exception("""
`association_filler` macro requires following options:
#{inspect(required_opts)}
For example:
association_filler(category, resource_key: "category_id", assoc_name: "properties",
assoc_key: "property_id", autocomplete: false)
""")
end
hr
h4(opts[:label] || "Enter new #{opts[:assoc_name]}")
ExAdmin.Show.build_association_filler_form(resource, opts[:autocomplete], opts)
end
end
def build_association_filler_form(resource, true = _autocomplete, opts) do
path = ExAdmin.Utils.admin_association_path(resource, opts[:assoc_name], :add)
markup do
Xain.form class: "association_filler_form", name: "select_#{opts[:assoc_name]}", method: "post", action: path do
Xain.input name: "_csrf_token", value: Plug.CSRFProtection.get_csrf_token, type: "hidden"
Xain.input name: "resource_key", value: opts[:resource_key], type: "hidden"
Xain.input name: "assoc_key", value: opts[:assoc_key], type: "hidden"
Xain.select class: "association_filler", multiple: "multiple", name: "selected_ids[]" do
option ""
end
Xain.input value: "Save", type: "submit", class: "btn btn-primary", style: "margin-left: 1em;"
end
associations_path = ExAdmin.Utils.admin_association_path(resource, opts[:assoc_name])
script type: "text/javascript" do
text """
$(document).ready(function() {
ExAdmin.association_filler_opts.ajax.url = "#{associations_path}";
$(".association_filler").select2(ExAdmin.association_filler_opts);
});
"""
end
end
end
def build_association_filler_form(resource, _autocomplete, opts) do
assoc_name = String.to_existing_atom(opts[:assoc_name])
assoc_defn = ExAdmin.get_registered_by_association(resource, assoc_name)
path = ExAdmin.Utils.admin_association_path(resource, opts[:assoc_name], :add)
Xain.form class: "association_filler_form", name: "select_#{opts[:assoc_name]}", method: "post", action: path do
Xain.input name: "_csrf_token", value: Plug.CSRFProtection.get_csrf_token, type: "hidden"
Xain.input name: "resource_key", value: opts[:resource_key], type: "hidden"
Xain.input name: "assoc_key", value: opts[:assoc_key], type: "hidden"
Xain.select class: "select2", multiple: "multiple", name: "selected_ids[]" do
ExAdmin.Model.potential_associations_query(resource, assoc_defn.__struct__, assoc_name)
|> repo.all
|> Enum.each(fn(opt) ->
option ExAdmin.Helpers.display_name(opt), value: ExAdmin.Schema.get_id(opt)
end)
end
Xain.input value: "Save", type: "submit", class: "btn btn-primary", style: "margin-left: 1em;"
end
end
@doc false
def default_show_view(conn, resource) do
markup safe: true do
default_attributes_table conn, resource
end
end
@doc false
def default_attributes_table(conn, resource) do
case conn.assigns.defn do
nil ->
throw :invalid_route
%{__struct__: _} = defn ->
columns = defn.resource_model.__schema__(:fields)
|> Enum.filter(&(not &1 in [:id, :inserted_at, :updated_at]))
|> Enum.map(&({translate_field(defn, &1), %{}}))
|> Enum.filter(&(not is_nil(&1)))
ExAdmin.Table.attributes_table conn, resource, %{rows: columns}
end
end
end
|
lib/ex_admin/show.ex
| 0.813646
| 0.451568
|
show.ex
|
starcoder
|
defmodule Spandex do
@moduledoc """
The functions here call the corresponding functions on the configured adapter.
"""
require Logger
alias Spandex.{
Span,
SpanContext,
Trace,
Tracer
}
@type headers :: [{atom, binary}] | [{binary, binary}] | %{binary => binary}
@typedoc "Used for Span and Trace IDs (type defined by adapters)"
@type id :: term()
@typedoc "Unix timestamp in nanoseconds"
@type timestamp :: non_neg_integer()
@doc """
Starts a new trace.
Span updates for the first span may be passed in. They are skipped if they are
invalid updates. As such, if you aren't sure if your updates are valid, it is
safer to perform a second call to `update_span/2` and check the return value.
"""
@spec start_trace(binary(), Tracer.opts()) ::
{:ok, Trace.t()}
| {:error, :disabled}
| {:error, :trace_running}
| {:error, [Optimal.error()]}
def start_trace(_, :disabled), do: {:error, :disabled}
def start_trace(name, opts) do
strategy = opts[:strategy]
if strategy.trace_active?(opts[:trace_key]) do
Logger.error("Tried to start a trace over top of another trace. name: #{inspect(name)}")
{:error, :trace_running}
else
do_start_trace(name, opts)
end
end
@doc """
Start a new span.
Span updates for that span may be passed in. They are skipped if they are
invalid updates. As such, if you aren't sure if your updates are valid, it is
safer to perform a second call to `update_span/2` and check the return value.
"""
@spec start_span(String.t(), Tracer.opts()) ::
{:ok, Span.t()}
| {:error, :disabled}
| {:error, :no_trace_context}
def start_span(_, :disabled), do: {:error, :disabled}
def start_span(name, opts) do
strategy = opts[:strategy]
case strategy.get_trace(opts[:trace_key]) do
{:error, :no_trace_context} = error ->
error
{:error, _} = error ->
error
{:ok, trace} ->
do_start_span(name, trace, opts)
end
end
@doc """
Updates the current span.
In the case of an invalid update, validation errors are returned.
"""
@spec update_span(Tracer.opts(), boolean()) ::
{:ok, Span.t()}
| {:error, :disabled}
| {:error, :no_trace_context}
| {:error, :no_span_context}
| {:error, [Optimal.error()]}
def update_span(opts, top? \\ false)
def update_span(:disabled, _), do: {:error, :disabled}
def update_span(opts, top?) do
strategy = opts[:strategy]
case strategy.get_trace(opts[:trace_key]) do
{:error, :no_trace_context} = error ->
error
{:ok, %Trace{stack: []}} ->
{:error, :no_span_context}
{:ok, trace} ->
do_update_span(trace, opts, top?)
{:error, _} = error ->
error
end
end
@doc """
Updates the top-most parent span.
Any spans that have already been started will not inherit any of the updates
from that span. For instance, if you change `service`, it will not be
reflected in already-started spans.
In the case of an invalid update, validation errors are returned.
"""
@spec update_top_span(Tracer.opts()) ::
{:ok, Span.t()}
| {:error, :disabled}
| {:error, :no_trace_context}
| {:error, [Optimal.error()]}
def update_top_span(:disabled), do: {:error, :disabled}
def update_top_span(opts), do: update_span(opts, true)
@doc """
Updates all spans, whether complete or in-progress.
In the case of an invalid update for any span, validation errors are returned.
"""
@spec update_all_spans(Tracer.opts()) ::
{:ok, Trace.t()}
| {:error, :disabled}
| {:error, :no_trace_context}
| {:error, [Optimal.error()]}
def update_all_spans(:disabled), do: {:error, :disabled}
def update_all_spans(opts) do
strategy = opts[:strategy]
with {:ok, %Trace{stack: stack, spans: spans} = trace} <- strategy.get_trace(opts[:trace_key]),
{:ok, new_spans} <- update_many_spans(spans, opts),
{:ok, new_stack} <- update_many_spans(stack, opts) do
strategy.put_trace(opts[:trace_key], %{trace | stack: new_stack, spans: new_spans})
end
end
@doc """
Finishes the current trace.
Span updates for the top span may be passed in. They are skipped if they are
invalid updates. As such, if you aren't sure if your updates are valid, it is
safer to perform a call to `update_span/2` and check the return value before
finishing the trace.
"""
@spec finish_trace(Tracer.opts()) ::
{:ok, Trace.t()}
| {:error, :disabled}
| {:error, :no_trace_context}
def finish_trace(:disabled), do: {:error, :disabled}
def finish_trace(opts) do
strategy = opts[:strategy]
adapter = opts[:adapter]
case strategy.get_trace(opts[:trace_key]) do
{:error, :no_trace_context} = error ->
Logger.error("Tried to finish a trace without an active trace.")
error
{:ok, %Trace{spans: spans, stack: stack} = trace} ->
unfinished_spans =
stack
|> List.update_at(0, &update_or_keep(&1, opts))
|> Enum.map(&ensure_completion_time_set(&1, adapter))
sender = opts[:sender] || adapter.default_sender()
# TODO: We need to define a behaviour for the Sender API.
sender.send_trace(%Trace{trace | spans: spans ++ unfinished_spans, stack: []})
strategy.delete_trace(opts[:trace_key])
{:error, _} = error ->
error
end
end
@doc """
Finishes the current span.
Span updates for that span may be passed in. They are skipped if they are
invalid updates. As such, if you aren't sure if your updates are valid, it is
safer to perform a call to `update_span/2` and check the return value before
finishing the span.
"""
@spec finish_span(Tracer.opts()) ::
{:ok, Span.t()}
| {:error, :disabled}
| {:error, :no_trace_context}
| {:error, :no_span_context}
def finish_span(:disabled), do: {:error, :disabled}
def finish_span(opts) do
strategy = opts[:strategy]
adapter = opts[:adapter]
case strategy.get_trace(opts[:trace_key]) do
{:error, :no_trace_context} = error ->
error
{:ok, %Trace{stack: []}} ->
Logger.error("Tried to finish a span without an active span.")
{:error, :no_span_context}
{:ok, %Trace{stack: [span | tail], spans: spans} = trace} ->
finished_span =
span
|> update_or_keep(opts)
|> ensure_completion_time_set(adapter)
strategy.put_trace(opts[:trace_key], %{
trace
| stack: tail,
spans: [finished_span | spans]
})
{:ok, finished_span}
{:error, _} = error ->
error
end
end
@doc """
Updates the current span with error details.
In the case of an invalid value, validation errors are returned.
"""
@spec span_error(Exception.t(), Enum.t(), Tracer.opts()) ::
{:ok, Span.t()}
| {:error, :disabled}
| {:error, :no_trace_context}
| {:error, :no_span_context}
| {:error, [Optimal.error()]}
def span_error(_error, _stacktrace, :disabled), do: {:error, :disabled}
def span_error(exception, stacktrace, opts) do
updates = [exception: exception, stacktrace: stacktrace]
update_span(Keyword.put_new(opts, :error, updates))
end
@doc """
Returns the id of the currently-running trace.
"""
@spec current_trace_id(Tracer.opts()) :: Spandex.id() | nil
def current_trace_id(:disabled), do: nil
def current_trace_id(opts) do
strategy = opts[:strategy]
case strategy.get_trace(opts[:trace_key]) do
{:ok, %Trace{id: id}} ->
id
{:error, _} ->
# TODO: Alter the return type of this interface to allow for returning
# errors from fetching the trace.
nil
end
end
@doc """
Returns the id of the currently-running span.
"""
@spec current_span_id(Tracer.opts()) :: Spandex.id() | nil
def current_span_id(:disabled), do: nil
def current_span_id(opts) do
case current_span(opts) do
nil -> nil
span -> span.id
end
end
@doc """
Returns the `%Span{}` struct for the currently-running span
"""
@spec current_span(Tracer.opts()) :: Span.t() | nil
def current_span(:disabled), do: nil
def current_span(opts) do
strategy = opts[:strategy]
case strategy.get_trace(opts[:trace_key]) do
{:ok, %Trace{stack: []}} ->
nil
{:ok, %Trace{stack: [span | _]}} ->
span
{:error, _} ->
# TODO: Alter the return type of this interface to allow for returning
# errors from fetching the trace.
nil
end
end
@doc """
Returns the current `%SpanContext{}` or an error.
### DEPRECATION WARNING
Expect changes to this in the future, as this will eventualy be refactored to
only ever return a `%SpanContext{}`, or at least to always return something
consistent.
"""
@spec current_context(Tracer.opts()) ::
{:ok, SpanContext.t()}
| {:error, :disabled}
| {:error, :no_span_context}
| {:error, :no_trace_context}
def current_context(:disabled), do: {:error, :disabled}
def current_context(opts) do
strategy = opts[:strategy]
case strategy.get_trace(opts[:trace_key]) do
{:ok, %Trace{id: trace_id, priority: priority, baggage: baggage, stack: [%Span{id: span_id} | _]}} ->
{:ok, %SpanContext{trace_id: trace_id, priority: priority, baggage: baggage, parent_id: span_id}}
{:ok, %Trace{stack: []}} ->
{:error, :no_span_context}
{:error, _} ->
{:error, :no_trace_context}
end
end
@doc """
Given a `%SpanContext{}`, resumes a trace from a different process or service.
Span updates for the top span may be passed in. They are skipped if they are
invalid updates. As such, if you aren't sure if your updates are valid, it is
safer to perform a second call to `update_span/2` and check the return value.
"""
@spec continue_trace(String.t(), SpanContext.t(), Keyword.t()) ::
{:ok, Trace.t()}
| {:error, :disabled}
| {:error, :trace_already_present}
def continue_trace(_, _, :disabled), do: {:error, :disabled}
def continue_trace(name, %SpanContext{} = span_context, opts) do
strategy = opts[:strategy]
if strategy.trace_active?(opts[:trace_key]) do
Logger.error("Tried to continue a trace over top of another trace. name: #{inspect(name)}")
{:error, :trace_already_present}
else
do_continue_trace(name, span_context, opts)
end
end
@doc """
Given a trace_id and span_id, resumes a trace from a different process or service.
Span updates for the top span may be passed in. They are skipped if they are
invalid updates. As such, if you aren't sure if your updates are valid, it is
safer to perform a second call to `update_span/2` and check the return value.
"""
@spec continue_trace(String.t(), Spandex.id(), Spandex.id(), Keyword.t()) ::
{:ok, Trace.t()}
| {:error, :disabled}
| {:error, :trace_already_present}
@deprecated "Use continue_trace/3 instead"
def continue_trace(_, _, _, :disabled), do: {:error, :disabled}
def continue_trace(name, trace_id, span_id, opts) do
continue_trace(name, %SpanContext{trace_id: trace_id, parent_id: span_id}, opts)
end
@doc """
Given a span struct, resumes a trace from a different process or service.
Span updates for the top span may be passed in. They are skipped if they are
invalid updates. As such, if you aren't sure if your updates are valid, it is
safer to perform a second call to `update_span/2` and check the return value.
"""
@spec continue_trace_from_span(String.t(), Span.t(), Tracer.opts()) ::
{:ok, Trace.t()}
| {:error, :disabled}
| {:error, :trace_already_present}
def continue_trace_from_span(_name, _span, :disabled), do: {:error, :disabled}
def continue_trace_from_span(name, span, opts) do
strategy = opts[:strategy]
if strategy.trace_active?(opts[:trace_key]) do
Logger.error("Tried to continue a trace over top of another trace. name: #{inspect(name)}")
{:error, :trace_already_present}
else
do_continue_trace_from_span(name, span, opts)
end
end
@doc """
Returns the context from a given set of HTTP headers, as determined by the adapter.
"""
@spec distributed_context(Plug.Conn.t(), Tracer.opts()) :: {:ok, SpanContext.t()} | {:error, :disabled}
@spec distributed_context(headers(), Tracer.opts()) :: {:ok, SpanContext.t()} | {:error, :disabled}
def distributed_context(_, :disabled), do: {:error, :disabled}
def distributed_context(metadata, opts) do
adapter = opts[:adapter]
adapter.distributed_context(metadata, opts)
end
@doc """
Alters headers to include the outgoing HTTP headers necessary to continue a
distributed trace, as determined by the adapter.
"""
@spec inject_context(headers(), SpanContext.t(), Tracer.opts()) :: headers()
def inject_context(headers, %SpanContext{} = span_context, opts) do
adapter = opts[:adapter]
adapter.inject_context(headers, span_context, opts)
end
# Private Helpers
defp update_many_spans(spans, opts) do
spans
|> Enum.reduce({:ok, []}, fn
span, {:ok, acc} ->
case Span.update(span, opts) do
{:ok, updated} ->
{:ok, [updated | acc]}
{:error, error} ->
{:error, error}
end
_, {:error, error} ->
{:error, error}
end)
|> case do
{:ok, list} ->
{:ok, Enum.reverse(list)}
{:error, error} ->
{:error, error}
end
end
defp do_continue_trace(name, span_context, opts) do
strategy = opts[:strategy]
adapter = opts[:adapter]
with {:ok, top_span} <- span(name, opts, span_context, adapter) do
Logger.metadata(trace_id: to_string(span_context.trace_id), span_id: to_string(top_span.id))
trace = %Trace{
id: span_context.trace_id,
priority: span_context.priority,
baggage: span_context.baggage,
stack: [top_span],
spans: []
}
strategy.put_trace(opts[:trace_key], trace)
end
end
defp do_continue_trace_from_span(name, span, opts) do
strategy = opts[:strategy]
adapter = opts[:adapter]
with {:ok, span} <- Span.child_of(span, name, adapter.span_id(), adapter.now(), opts) do
trace = %Trace{id: adapter.trace_id(), stack: [span], spans: []}
strategy.put_trace(opts[:trace_key], trace)
end
end
defp do_start_span(name, %Trace{stack: [current_span | _]} = trace, opts) do
strategy = opts[:strategy]
adapter = opts[:adapter]
with {:ok, span} <- Span.child_of(current_span, name, adapter.span_id(), adapter.now(), opts),
{:ok, _trace} <- strategy.put_trace(opts[:trace_key], %{trace | stack: [span | trace.stack]}) do
Logger.metadata(span_id: to_string(span.id), trace_id: to_string(trace.id))
{:ok, span}
end
end
defp do_start_span(name, %Trace{stack: [], id: trace_id} = trace, opts) do
strategy = opts[:strategy]
adapter = opts[:adapter]
span_context = %SpanContext{trace_id: trace_id}
with {:ok, span} <- span(name, opts, span_context, adapter),
{:ok, _trace} <- strategy.put_trace(opts[:trace_key], %{trace | stack: [span]}) do
Logger.metadata(span_id: to_string(span.id), trace_id: to_string(trace_id))
{:ok, span}
end
end
defp do_start_trace(name, opts) do
strategy = opts[:strategy]
adapter = opts[:adapter]
trace_id = adapter.trace_id()
span_context = %SpanContext{trace_id: trace_id}
with {:ok, span} <- span(name, opts, span_context, adapter) do
Logger.metadata(trace_id: to_string(trace_id), span_id: to_string(span.id))
trace = %Trace{spans: [], stack: [span], id: trace_id}
strategy.put_trace(opts[:trace_key], trace)
end
end
defp do_update_span(%Trace{stack: stack} = trace, opts, true) do
strategy = opts[:strategy]
top_span = Enum.at(stack, -1)
with {:ok, updated} <- Span.update(top_span, opts),
new_stack <- List.replace_at(stack, -1, updated),
{:ok, _trace} <- strategy.put_trace(opts[:trace_key], %{trace | stack: new_stack}) do
{:ok, updated}
end
end
defp do_update_span(%Trace{stack: [current_span | other_spans]} = trace, opts, false) do
strategy = opts[:strategy]
with {:ok, updated} <- Span.update(current_span, opts),
new_stack <- [updated | other_spans],
{:ok, _trace} <- strategy.put_trace(opts[:trace_key], %{trace | stack: new_stack}) do
{:ok, updated}
end
end
defp ensure_completion_time_set(%Span{completion_time: nil} = span, adapter) do
update_or_keep(span, completion_time: adapter.now())
end
defp ensure_completion_time_set(%Span{} = span, _adapter), do: span
defp span(name, opts, span_context, adapter) do
opts
|> Keyword.put_new(:name, name)
|> Keyword.put(:trace_id, span_context.trace_id)
|> Keyword.put(:parent_id, span_context.parent_id)
|> Keyword.put(:start, adapter.now())
|> Keyword.put(:id, adapter.span_id())
|> Span.new()
end
defp update_or_keep(span, opts) do
case Span.update(span, opts) do
{:error, _} -> span
{:ok, span} -> span
end
end
end
|
lib/spandex.ex
| 0.790166
| 0.511717
|
spandex.ex
|
starcoder
|
defmodule Cldr.Unit.Math do
@moduledoc """
Simple arithmetic functions for the `Unit.t` type
"""
alias Cldr.Unit
alias Cldr.Unit.Conversion
import Kernel, except: [div: 2, round: 1, trunc: 1]
import Unit, only: [incompatible_units_error: 2]
@doc """
Adds two compatible `%Unit{}` types
## Options
* `unit_1` and `unit_2` are compatible Units
returned by `Cldr.Unit.new/2`
## Returns
* A `%Unit{}` of the same type as `unit_1` with a value
that is the sum of `unit_1` and the potentially converted
`unit_2` or
* `{:error, {IncompatibleUnitError, message}}`
## Examples
iex> Cldr.Unit.Math.add Cldr.Unit.new!(:foot, 1), Cldr.Unit.new!(:foot, 1)
#Cldr.Unit<:foot, 2>
iex> Cldr.Unit.Math.add Cldr.Unit.new!(:foot, 1), Cldr.Unit.new!(:mile, 1)
#Cldr.Unit<:foot, 5281>
iex> Cldr.Unit.Math.add Cldr.Unit.new!(:foot, 1), Cldr.Unit.new!(:gallon, 1)
{:error, {Cldr.Unit.IncompatibleUnitsError,
"Operations can only be performed between units of the same category. Received #Cldr.Unit<:foot, 1> and #Cldr.Unit<:gallon, 1>"}}
"""
@spec add(Unit.t(), Unit.t()) :: Unit.t() | {:error, {module(), String.t()}}
def add(%Unit{unit: unit, value: value_1}, %Unit{unit: unit, value: value_2})
when is_number(value_1) and is_number(value_2) do
Unit.new!(unit, value_1 + value_2)
end
def add(%Unit{unit: unit, value: %Decimal{} = value_1}, %Unit{
unit: unit,
value: %Decimal{} = value_2
}) do
Unit.new!(unit, Decimal.add(value_1, value_2))
end
def add(%Unit{unit: unit, value: %Decimal{}} = unit_1, %Unit{unit: unit, value: value_2})
when is_number(value_2) do
add(unit_1, Unit.new!(unit, Decimal.new(value_2)))
end
def add(%Unit{unit: unit, value: value_2}, %Unit{unit: unit, value: %Decimal{}} = unit_1)
when is_number(value_2) do
add(unit_1, Unit.new!(unit, Decimal.new(value_2)))
end
def add(%Unit{unit: unit, value: %Ratio{} = value_1}, %Unit{unit: unit, value: value_2})
when is_number(value_2) do
Unit.new!(unit, Ratio.add(value_1, value_2))
end
def add(%Unit{unit: unit, value: value_2}, %Unit{unit: unit, value: %Ratio{} = value_1})
when is_number(value_2) do
Unit.new!(unit, Ratio.add(value_1, value_2))
end
def add(%Unit{unit: unit_category_1} = unit_1, %Unit{unit: unit_category_2} = unit_2) do
if Unit.compatible?(unit_category_1, unit_category_2) do
add(unit_1, Conversion.convert!(unit_2, unit_category_1))
else
{:error, incompatible_units_error(unit_1, unit_2)}
end
end
@doc """
Adds two compatible `%Unit{}` types
and raises on error
## Options
* `unit_1` and `unit_2` are compatible Units
returned by `Cldr.Unit.new/2`
## Returns
* A `%Unit{}` of the same type as `unit_1` with a value
that is the sum of `unit_1` and the potentially converted
`unit_2` or
* Raises an exception
"""
@spec add!(Unit.t(), Unit.t()) :: Unit.t() | no_return()
def add!(unit_1, unit_2) do
case add(unit_1, unit_2) do
{:error, {exception, reason}} -> raise exception, reason
unit -> unit
end
end
@doc """
Subtracts two compatible `%Unit{}` types
## Options
* `unit_1` and `unit_2` are compatible Units
returned by `Cldr.Unit.new/2`
## Returns
* A `%Unit{}` of the same type as `unit_1` with a value
that is the difference between `unit_1` and the potentially
converted `unit_2`
* `{:error, {IncompatibleUnitError, message}}`
## Examples
iex> Cldr.Unit.sub Cldr.Unit.new!(:kilogram, 5), Cldr.Unit.new!(:pound, 1)
#Cldr.Unit<:kilogram, -81900798833369519 <|> 18014398509481984>
iex> Cldr.Unit.sub Cldr.Unit.new!(:pint, 5), Cldr.Unit.new!(:liter, 1)
#Cldr.Unit<:pint, -36794683014431043834033898368027039378825884348261 <|> 12746616238742849396626455585282990375683527307233>
iex> Cldr.Unit.sub Cldr.Unit.new!(:pint, 5), Cldr.Unit.new!(:pint, 1)
#Cldr.Unit<:pint, 4>
"""
@spec sub(Unit.t(), Unit.t()) :: Unit.t() | {:error, {module(), String.t()}}
def sub(%Unit{unit: unit, value: value_1}, %Unit{unit: unit, value: value_2})
when is_number(value_1) and is_number(value_2) do
Unit.new!(unit, value_1 - value_2)
end
def sub(%Unit{unit: unit, value: %Decimal{} = value_1}, %Unit{
unit: unit,
value: %Decimal{} = value_2
}) do
Unit.new!(unit, Decimal.sub(value_1, value_2))
end
def sub(%Unit{unit: unit, value: %Decimal{}} = unit_1, %Unit{unit: unit, value: value_2})
when is_number(value_2) do
sub(unit_1, Unit.new!(unit, Decimal.new(value_2)))
end
def sub(%Unit{unit: unit, value: value_2}, %Unit{unit: unit, value: %Decimal{}} = unit_1)
when is_number(value_2) do
sub(unit_1, Unit.new!(unit, Decimal.new(value_2)))
end
def sub(%Unit{unit: unit, value: %Ratio{} = value_1}, %Unit{unit: unit, value: value_2})
when is_number(value_2) do
Unit.new!(unit, Ratio.sub(value_1, value_2))
end
def sub(%Unit{unit: unit, value: value_2}, %Unit{unit: unit, value: %Ratio{} = value_1})
when is_number(value_2) do
Unit.new!(unit, Ratio.sub(value_1, value_2))
end
def sub(%Unit{unit: unit_category_1} = unit_1, %Unit{unit: unit_category_2} = unit_2) do
if Unit.compatible?(unit_category_1, unit_category_2) do
sub(unit_1, Conversion.convert!(unit_2, unit_category_1))
else
{:error, incompatible_units_error(unit_1, unit_2)}
end
end
@doc """
Subtracts two compatible `%Unit{}` types
and raises on error
## Options
* `unit_1` and `unit_2` are compatible Units
returned by `Cldr.Unit.new/2`
## Returns
* A `%Unit{}` of the same type as `unit_1` with a value
that is the difference between `unit_1` and the potentially
converted `unit_2`
* Raises an exception
"""
@spec sub!(Unit.t(), Unit.t()) :: Unit.t() | no_return()
def sub!(unit_1, unit_2) do
case sub(unit_1, unit_2) do
{:error, {exception, reason}} -> raise exception, reason
unit -> unit
end
end
@doc """
Multiplies two compatible `%Unit{}` types
## Options
* `unit_1` and `unit_2` are compatible Units
returned by `Cldr.Unit.new/2`
## Returns
* A `%Unit{}` of the same type as `unit_1` with a value
that is the product of `unit_1` and the potentially
converted `unit_2`
* `{:error, {IncompatibleUnitError, message}}`
## Examples
iex> Cldr.Unit.mult Cldr.Unit.new!(:kilogram, 5), Cldr.Unit.new!(:pound, 1)
#Cldr.Unit<:kilogram, 40855968570202005 <|> 18014398509481984>
iex> Cldr.Unit.mult Cldr.Unit.new!(:pint, 5), Cldr.Unit.new!(:liter, 1)
#Cldr.Unit<:pint, 134691990896416015745491897791939562497958760939520 <|> 12746616238742849396626455585282990375683527307233>
iex> Cldr.Unit.mult Cldr.Unit.new!(:pint, 5), Cldr.Unit.new!(:pint, 1)
#Cldr.Unit<:pint, 5>
"""
@spec mult(Unit.t(), Unit.t()) :: Unit.t() | {:error, {module(), String.t()}}
def mult(%Unit{unit: unit, value: value_1}, %Unit{unit: unit, value: value_2})
when is_number(value_1) and is_number(value_2) do
Unit.new!(unit, value_1 * value_2)
end
def mult(%Unit{unit: unit, value: %Decimal{} = value_1}, %Unit{
unit: unit,
value: %Decimal{} = value_2
}) do
Unit.new!(unit, Decimal.mult(value_1, value_2))
end
def mult(%Unit{unit: unit, value: %Decimal{}} = unit_1, %Unit{unit: unit, value: value_2})
when is_number(value_2) do
mult(unit_1, Unit.new!(unit, Decimal.new(value_2)))
end
def mult(%Unit{unit: unit, value: value_2}, %Unit{unit: unit, value: %Decimal{}} = unit_1)
when is_number(value_2) do
mult(unit_1, Unit.new!(unit, Decimal.new(value_2)))
end
def mult(%Unit{unit: unit, value: %Ratio{} = value_1}, %Unit{unit: unit, value: value_2})
when is_number(value_2) do
Unit.new!(unit, Ratio.mult(value_1, value_2))
end
def mult(%Unit{unit: unit, value: value_2}, %Unit{unit: unit, value: %Ratio{} = value_1})
when is_number(value_2) do
Unit.new!(unit, Ratio.mult(value_1, value_2))
end
def mult(%Unit{unit: unit_category_1} = unit_1, %Unit{unit: unit_category_2} = unit_2) do
if Unit.compatible?(unit_category_1, unit_category_2) do
{:ok, conversion} = Conversion.convert(unit_2, unit_category_1)
mult(unit_1, conversion)
else
{:error, incompatible_units_error(unit_1, unit_2)}
end
end
@doc """
Multiplies two compatible `%Unit{}` types
and raises on error
## Options
* `unit_1` and `unit_2` are compatible Units
returned by `Cldr.Unit.new/2`
## Returns
* A `%Unit{}` of the same type as `unit_1` with a value
that is the product of `unit_1` and the potentially
converted `unit_2`
* Raises an exception
"""
@spec mult!(Unit.t(), Unit.t()) :: Unit.t() | no_return()
def mult!(unit_1, unit_2) do
case mult(unit_1, unit_2) do
{:error, {exception, reason}} -> raise exception, reason
unit -> unit
end
end
@doc """
Divides one compatible `%Unit{}` type by another
## Options
* `unit_1` and `unit_2` are compatible Units
returned by `Cldr.Unit.new/2`
## Returns
* A `%Unit{}` of the same type as `unit_1` with a value
that is the dividend of `unit_1` and the potentially
converted `unit_2`
* `{:error, {IncompatibleUnitError, message}}`
## Examples
iex> Cldr.Unit.div Cldr.Unit.new!(:kilogram, 5), Cldr.Unit.new!(:pound, 1)
#Cldr.Unit<:kilogram, 8171193714040401 <|> 90071992547409920>
iex> Cldr.Unit.div Cldr.Unit.new!(:pint, 5), Cldr.Unit.new!(:liter, 1)
#Cldr.Unit<:pint, 26938398179283203149098379558387912499591752187904 <|> 63733081193714246983132277926414951878417636536165>
iex> Cldr.Unit.div Cldr.Unit.new!(:pint, 5), Cldr.Unit.new!(:pint, 1)
#Cldr.Unit<:pint, 5.0>
"""
@spec div(Unit.t(), Unit.t()) :: Unit.t() | {:error, {module(), String.t()}}
def div(%Unit{unit: unit, value: value_1}, %Unit{unit: unit, value: value_2})
when is_number(value_1) and is_number(value_2) do
Unit.new!(unit, value_1 / value_2)
end
def div(%Unit{unit: unit, value: %Decimal{} = value_1}, %Unit{
unit: unit,
value: %Decimal{} = value_2
}) do
Unit.new!(unit, Decimal.div(value_1, value_2))
end
def div(%Unit{unit: unit, value: %Decimal{}} = unit_1, %Unit{unit: unit, value: value_2})
when is_number(value_2) do
div(unit_1, Unit.new!(unit, Decimal.new(value_2)))
end
def div(%Unit{unit: unit, value: value_2}, %Unit{unit: unit, value: %Decimal{}} = unit_1)
when is_number(value_2) do
div(unit_1, Unit.new!(unit, Decimal.new(value_2)))
end
def div(%Unit{unit: unit, value: %Ratio{} = value_1}, %Unit{unit: unit, value: value_2})
when is_number(value_2) do
Unit.new!(unit, Ratio.div(value_1, value_2))
end
def div(%Unit{unit: unit, value: value_2}, %Unit{unit: unit, value: %Ratio{} = value_1})
when is_number(value_2) do
Unit.new!(unit, Ratio.div(value_1, value_2))
end
def div(%Unit{unit: unit_category_1} = unit_1, %Unit{unit: unit_category_2} = unit_2) do
if Unit.compatible?(unit_category_1, unit_category_2) do
div(unit_1, Conversion.convert!(unit_2, unit_category_1))
else
{:error, incompatible_units_error(unit_1, unit_2)}
end
end
@doc """
Divides one compatible `%Unit{}` type by another
and raises on error
## Options
* `unit_1` and `unit_2` are compatible Units
returned by `Cldr.Unit.new/2`
## Returns
* A `%Unit{}` of the same type as `unit_1` with a value
that is the dividend of `unit_1` and the potentially
converted `unit_2`
* Raises an exception
"""
@spec div!(Unit.t(), Unit.t()) :: Unit.t() | no_return()
def div!(unit_1, unit_2) do
case div(unit_1, unit_2) do
{:error, {exception, reason}} -> raise exception, reason
unit -> unit
end
end
@doc """
Rounds the value of a unit.
## Options
* `unit` is any unit returned by `Cldr.Unit.new/2`
* `places` is the number of decimal places to round to. The default is `0`.
* `mode` is the rounding mode to be applied. The default is `:half_up`.
## Returns
* A `%Unit{}` of the same type as `unit` with a value
that is rounded to the specified number of decimal places
## Rounding modes
Directed roundings:
* `:down` - Round towards 0 (truncate), eg 10.9 rounds to 10.0
* `:up` - Round away from 0, eg 10.1 rounds to 11.0. (Non IEEE algorithm)
* `:ceiling` - Round toward +∞ - Also known as rounding up or ceiling
* `:floor` - Round toward -∞ - Also known as rounding down or floor
Round to nearest:
* `:half_even` - Round to nearest value, but in a tiebreak, round towards the
nearest value with an even (zero) least significant bit, which occurs 50%
of the time. This is the default for IEEE binary floating-point and the recommended
value for decimal.
* `:half_up` - Round to nearest value, but in a tiebreak, round away from 0.
This is the default algorithm for Erlang's Kernel.round/2
* `:half_down` - Round to nearest value, but in a tiebreak, round towards 0
(Non IEEE algorithm)
## Examples
iex> Cldr.Unit.round Cldr.Unit.new!(:yard, 1031.61), 1
#Cldr.Unit<:yard, 1031.6>
iex> Cldr.Unit.round Cldr.Unit.new!(:yard, 1031.61), 2
#Cldr.Unit<:yard, 1031.61>
iex> Cldr.Unit.round Cldr.Unit.new!(:yard, 1031.61), 1, :up
#Cldr.Unit<:yard, 1031.7>
"""
@spec round(
unit :: Unit.t(),
places :: non_neg_integer,
mode :: :down | :up | :ceiling | :floor | :half_even | :half_up | :half_down
) :: Unit.t()
def round(unit, places \\ 0, mode \\ :half_up)
def round(%Unit{value: %Ratio{} = value} = unit, places, mode) do
value = Ratio.to_float(value)
round(%{unit | value: value}, places, mode)
end
def round(%Unit{unit: unit, value: value}, places, mode) do
rounded_value = Cldr.Math.round(value, places, mode)
Unit.new!(unit, rounded_value)
end
@doc """
Truncates a unit's value
"""
def trunc(%Unit{value: %Ratio{} = value} = unit) do
value = Ratio.to_float(value)
trunc(%{unit | value: value})
end
def trunc(%Unit{value: value} = unit) when is_float(value) do
%{unit | value: Kernel.trunc(value)}
end
def trunc(%Unit{value: value} = unit) when is_integer(value) do
unit
end
def trunc(%Unit{value: %Decimal{} = value} = unit) do
%{unit | value: Decimal.round(value, 0, :floor)}
end
@doc """
Compare two units, converting to a common unit
type if required.
If conversion is performed, the results are both
rounded to a single decimal place before
comparison.
Returns `:gt`, `:lt`, or `:eq`.
## Example
iex> x = Cldr.Unit.new!(:kilometer, 1)
iex> y = Cldr.Unit.new!(:meter, 1000)
iex> Cldr.Unit.Math.compare x, y
:eq
"""
def compare(
%Unit{unit: unit, value: %Decimal{}} = unit_1,
%Unit{unit: unit, value: %Decimal{}} = unit_2
) do
Cldr.Math.decimal_compare(unit_1.value, unit_2.value)
end
def compare(%Unit{value: %Decimal{}} = unit_1, %Unit{value: %Decimal{}} = unit_2) do
unit_2 = Unit.Conversion.convert!(unit_2, unit_1.unit)
compare(unit_1, unit_2)
end
def compare(%Unit{unit: unit} = unit_1, %Unit{unit: unit} = unit_2) do
cond do
unit_1.value == unit_2.value -> :eq
unit_1.value > unit_2.value -> :gt
unit_1.value < unit_2.value -> :lt
end
end
def compare(%Unit{} = unit_1, %Unit{} = unit_2) do
unit_1 =
unit_1
|> round(1, :half_even)
unit_2 =
unit_2
|> Unit.Conversion.convert!(unit_1.unit)
|> round(1, :half_even)
compare(unit_1, unit_2)
end
@deprecated "Please use Cldr.Unit.Math.compare/2"
def cmp(unit_1, unit_2) do
compare(unit_1, unit_2)
end
end
|
lib/cldr/unit/math.ex
| 0.921016
| 0.723187
|
math.ex
|
starcoder
|
defmodule KerbalMaps.WaypointsParser do
@moduledoc false
require Logger
import NimbleParsec
def parse({:ok, data, stream}), do: parse({:ok, data, "", stream})
def parse({:eof, data, "", _stream}), do: {:ok, List.flatten(data), ""}
def parse({{:error, reason}, _, _, _}), do: {:error, reason}
def parse({:ok, _, "", _} = state), do: reload_buffer(state) |> parse
def parse({_, data, buffer, stream} = state) do
case buffer do
"//" <> remaining ->
{:ok, data, remaining, stream}
|> strip_comment
|> parse
"WAYPOINT" <> remaining ->
{:ok, data, remaining, stream}
|> parse_waypoint
|> reload_buffer
|> parse
_ ->
state
|> strip_whitespace
|> parse
end
end
def strip_comment({:ok, data, <<10, remaining::binary>>, stream}),
do: {:ok, data, remaining, stream}
def strip_comment({:ok, data, <<13, 10, remaining::binary>>, stream}),
do: {:ok, data, remaining, stream}
def strip_comment({:ok, data, <<13, remaining::binary>>, stream}),
do: {:ok, data, remaining, stream}
def strip_comment({:ok, _, "", _} = state), do: reload_buffer(state) |> strip_comment
def strip_comment({:ok, data, <<_::bytes-size(1), remaining::binary>>, stream}),
do: strip_comment({:ok, data, remaining, stream})
def strip_whitespace({:ok, data, <<0, remaining::binary>>, stream}),
do: strip_whitespace({:ok, data, remaining, stream})
def strip_whitespace({:ok, data, <<9, remaining::binary>>, stream}),
do: strip_whitespace({:ok, data, remaining, stream})
def strip_whitespace({:ok, data, <<10, remaining::binary>>, stream}),
do: strip_whitespace({:ok, data, remaining, stream})
def strip_whitespace({:ok, data, <<12, remaining::binary>>, stream}),
do: strip_whitespace({:ok, data, remaining, stream})
def strip_whitespace({:ok, data, <<13, remaining::binary>>, stream}),
do: strip_whitespace({:ok, data, remaining, stream})
def strip_whitespace({:ok, data, <<32, remaining::binary>>, stream}),
do: strip_whitespace({:ok, data, remaining, stream})
def strip_whitespace({:ok, _, "", _} = state), do: reload_buffer(state) |> strip_whitespace
def strip_whitespace(state), do: state
# nul, \t, \n, \f, \r, space respectively
whitespace_values = [0, 9, 10, 12, 13, 32]
whitespace_char = utf8_char(whitespace_values)
# \n, \r, } respectively
value_terminator_values = [10, 13, ?}]
value_terminator_char = utf8_char(value_terminator_values)
name =
ignore(repeat(whitespace_char))
|> repeat(lookahead_not(utf8_char([?=])) |> utf8_char([]))
|> reduce({List, :to_string, []})
|> map({String, :trim, []})
|> unwrap_and_tag(:name)
value =
repeat(lookahead_not(value_terminator_char) |> utf8_char([]))
|> reduce({List, :to_string, []})
|> map({String, :trim, []})
|> unwrap_and_tag(:value)
|> ignore(repeat(whitespace_char))
def resolve_pair([{:name, name}, {:value, value}]), do: [{name, value}]
defcombinatorp(
:pair,
name
|> ignore(repeat(whitespace_char))
|> ignore(utf8_char([?=]))
|> concat(value)
|> reduce(:resolve_pair)
)
defparsec(
:waypoint,
ignore(repeat(whitespace_char))
|> ignore(utf8_char([?{]))
|> repeat(lookahead_not(utf8_char([?}])) |> parsec(:pair))
|> ignore(utf8_char([?}]))
|> tag(:waypoint)
|> ignore(repeat(whitespace_char))
)
def parse_waypoint({:ok, _, "", _} = state), do: reload_buffer(state) |> parse_waypoint
def parse_waypoint({:ok, data, buffer, stream}) do
case waypoint(buffer) do
{:ok, [waypoint: pairs_list], remainder, _, _, _} ->
{:ok,
[
data,
Enum.reduce(List.flatten(pairs_list), %{}, fn {key, value}, acc ->
Map.put(acc, key, value)
end)
], remainder, stream}
{:error, error_message, remainder, _, _, _} ->
{:error, error_message, remainder, stream}
end
end
defp reload_buffer({_, data, buffer, stream}) do
case IO.binread(stream, 1024) do
:eof ->
{:eof, data, buffer, stream}
{:error, _} = error_reason ->
{error_reason, data, buffer, stream}
new_chunk ->
{:ok, data, buffer <> new_chunk, stream}
end
end
end
|
lib/kerbal_maps/parsers/waypoints_parser.ex
| 0.627723
| 0.476762
|
waypoints_parser.ex
|
starcoder
|
defmodule Aoc2019Day2 do
@moduledoc """
https://adventofcode.com/2019/day/2
--- Day 2: 1202 Program Alarm ---
On the way to your gravity assist around the Moon, your ship computer beeps
angrily about a "1202 program alarm". On the radio, an Elf is already
explaining how to handle the situation: "Don't worry, that's perfectly
norma--" The ship computer bursts into flames.
You notify the Elves that the computer's magic smoke seems to have escaped.
"That computer ran Intcode programs like the gravity assist program it was
working on; surely there are enough spare parts up there to build a new
Intcode computer!"
An Intcode program is a list of integers separated by commas (like
1,0,0,3,99). To run one, start by looking at the first integer (called
position 0). Here, you will find an opcode - either 1, 2, or 99. The opcode
indicates what to do; for example, 99 means that the program is finished and
should immediately halt. Encountering an unknown opcode means something went
wrong.
Opcode 1 adds together numbers read from two positions and stores the result
in a third position. The three integers immediately after the opcode tell you
these three positions - the first two indicate the positions from which you
should read the input values, and the third indicates the position at which
the output should be stored.
For example, if your Intcode computer encounters 1,10,20,30, it should read
the values at positions 10 and 20, add those values, and then overwrite the
value at position 30 with their sum.
Opcode 2 works exactly like opcode 1, except it multiplies the two inputs
instead of adding them. Again, the three integers after the opcode indicate
where the inputs and outputs are, not their values.
Once you're done processing an opcode, move to the next one by stepping
forward 4 positions.
For example, suppose you have the following program:
1,9,10,3,2,3,11,0,99,30,40,50
For the purposes of illustration, here is the same program split into
multiple lines:
1,9,10,3,
2,3,11,0,
99,
30,40,50
The first four integers, 1,9,10,3, are at positions 0, 1, 2, and 3. Together,
they represent the first opcode (1, addition), the positions of the two inputs
(9 and 10), and the position of the output (3). To handle this opcode, you
first need to get the values at the input positions: position 9 contains 30,
and position 10 contains 40. Add these numbers together to get 70. Then, store
this value at the output position; here, the output position (3) is at
position 3, so it overwrites itself. Afterward, the program looks like this:
1,9,10,70,
2,3,11,0,
99,
30,40,50
Step forward 4 positions to reach the next opcode, 2. This opcode works just
like the previous, but it multiplies instead of adding. The inputs are at
positions 3 and 11; these positions contain 70 and 50 respectively.
Multiplying these produces 3500; this is stored at position 0:
3500,9,10,70,
2,3,11,0,
99,
30,40,50
Stepping forward 4 more positions arrives at opcode 99, halting the program.
Here are the initial and final states of a few more small programs:
1,0,0,0,99 becomes 2,0,0,0,99 (1 + 1 = 2).
2,3,0,3,99 becomes 2,3,0,6,99 (3 * 2 = 6).
2,4,4,5,99,0 becomes 2,4,4,5,99,9801 (99 * 99 = 9801).
1,1,1,4,99,5,6,0,99 becomes 30,1,1,4,2,5,6,0,99.
Once you have a working computer, the first step is to restore the gravity
assist program (your puzzle input) to the "1202 program alarm" state it had
just before the last computer caught fire. To do this, before running the
program, replace position 1 with the value 12 and replace position 2 with the
value 2. What value is left at position 0 after the program halts?
--- Part Two ---
"Good, the new computer seems to be working correctly! Keep it nearby during
this mission - you'll probably use it again. Real Intcode computers support
many more features than your new one, but we'll let you know what they are as
you need them."
"However, your current priority should be to complete your gravity assist
around the Moon. For this mission to succeed, we should settle on some
terminology for the parts you've already built."
Intcode programs are given as a list of integers; these values are used as
the initial state for the computer's memory. When you run an Intcode program,
make sure to start by initializing memory to the program's values. A position
in memory is called an address (for example, the first value in memory is at
"address 0").
Opcodes (like 1, 2, or 99) mark the beginning of an instruction. The values
used immediately after an opcode, if any, are called the instruction's
parameters. For example, in the instruction 1,2,3,4, 1 is the opcode; 2, 3,
and 4 are the parameters. The instruction 99 contains only an opcode and has
no parameters.
The address of the current instruction is called the instruction pointer; it
starts at 0. After an instruction finishes, the instruction pointer increases
by the number of values in the instruction; until you add more instructions
to the computer, this is always 4 (1 opcode + 3 parameters) for the add and
multiply instructions. (The halt instruction would increase the instruction
pointer by 1, but it halts the program instead.)
"With terminology out of the way, we're ready to proceed. To complete the
gravity assist, you need to determine what pair of inputs produces the output
19690720."
The inputs should still be provided to the program by replacing the values at
addresses 1 and 2, just like before. In this program, the value placed in
address 1 is called the noun, and the value placed in address 2 is called the
verb. Each of the two input values will be between 0 and 99, inclusive.
Once the program has halted, its output is available at address 0, also just
like before. Each time you try a pair of inputs, make sure you first reset
the computer's memory to the values in the program (your puzzle input) - in
other words, don't reuse memory from a previous attempt.
Find the input noun and verb that cause the program to produce the output
19690720. What is 100 * noun + verb? (For example, if noun=12 and verb=2, the
answer would be 1202.)
"""
@doc """
Ship's Intcode Computer.
## Examples
"""
def insert_value(instructions, value, index) do
Enum.concat([Enum.slice(instructions, 0, index), [value], Enum.slice(instructions, (index + 1)..-1)])
end
def readInstructionFile(data) do
String.trim(File.read!(data))
end
def createInstructionList(instructionString) do
Enum.map(String.split(instructionString, ","), fn x -> String.to_integer(x) end)
end
def process_instructions(instructions) do
process_instructions(instructions, 0, Enum.at(instructions, 0))
end
def process_instructions(instructions, _i, instruction) when instruction == 99 do
instructions
end
def process_instructions(instructions, i, instruction) when instruction == 1 do
total = Enum.at(instructions, Enum.at(instructions, i + 1)) + Enum.at(instructions, Enum.at(instructions, i + 2))
target = Enum.at(instructions, i + 3)
updated_instructions = insert_value(instructions, total, target)
process_instructions(updated_instructions, i + 4, Enum.at(updated_instructions, i + 4))
end
def process_instructions(instructions, i, instruction) when instruction == 2 do
total = Enum.at(instructions, Enum.at(instructions, i + 1)) * Enum.at(instructions, Enum.at(instructions, i + 2))
target = Enum.at(instructions, i + 3)
updated_instructions = insert_value(instructions, total, target)
process_instructions(updated_instructions, i + 4, Enum.at(updated_instructions, i + 4))
end
def shipComputer(intcodeProgram) do
Enum.join(process_instructions(createInstructionList(intcodeProgram)), ",")
end
def shipComputerOutput(intcodeProgram) do
hd(process_instructions(createInstructionList(intcodeProgram)))
end
def searchForOutput(target, intcodeProgramString) do
intcodeProgram = createInstructionList(intcodeProgramString)
Enum.each(0..99, fn noun ->
Enum.each(0..99, fn verb ->
searchForOutput(target, intcodeProgram, noun, verb)
end)
end)
end
def searchForOutput(target, intcodeProgram, noun, verb) do
modifiedIntcodeProgram = [hd(intcodeProgram)] ++ [noun, verb] ++ tl(tl(tl(intcodeProgram)))
output = hd(process_instructions(modifiedIntcodeProgram))
if target == output do
IO.inspect([noun, verb, output])
IO.inspect(100 * noun + verb)
end
end
end
# D2.searchForOutput(19690720, D2.readInstructionFile('lib/data.txt'))
|
lib/aoc2019Day2.ex
| 0.800848
| 0.793306
|
aoc2019Day2.ex
|
starcoder
|
defmodule VintageNet.Interface.Classification do
@moduledoc """
Module for classifying and prioritizing network interfaces
"""
@typedoc """
Categorize interfaces based on their technology
"""
@type interface_type :: :ethernet | :wifi | :mobile | :local | :unknown
@typedoc """
Interface connection status
* `:disconnected` - The interface doesn't exist or it's not connected
* `:lan` - The interface is connected to the LAN, but may not be able
reach the Internet
* `:internet` - Packets going through the interface should be able to
reach the Internet
"""
@type connection_status :: :lan | :internet | :disconnected
@typedoc """
Prioritization for using default gateways
Examples
* `{:ethernet, :internet}` - Wired ethernet that's Internet connected
* `{:ethernet, :_}` - Wired ethernet with any status
* `{:_, :internet}` - Any Internet-connected network interface
"""
@type prioritization :: {interface_type() | :_, connection_status() | :_}
@typedoc """
A weight used to disambiguate interfaces that would otherwise have the same priority
Low weights are higher priority.
"""
@type weight :: 0..9
@doc """
Classify a network type based on its name
Examples
iex> Classification.to_type("eth0")
:ethernet
iex> Classification.to_type("wlp5s0")
:wifi
iex> Classification.to_type("ppp0")
:mobile
"""
@spec to_type(VintageNet.ifname()) :: interface_type()
def to_type("eth" <> _rest), do: :ethernet
def to_type("en" <> _rest), do: :ethernet
def to_type("wlan" <> _rest), do: :wifi
def to_type("wl" <> _rest), do: :wifi
def to_type("ra" <> _rest), do: :wifi
def to_type("ppp" <> _rest), do: :mobile
def to_type("lo" <> _rest), do: :local
def to_type("tap" <> _rest), do: :local
def to_type(_other), do: :unknown
@doc """
Extract a number out of an interface name
The result is the interface index for most interfaces seen
on Nerves (eth0, eth1, ...), and something quite imperfect when using predictable
interface naming (enp6s0, enp6s1).
This is currently used to order priorities when there are two
interfaces available of the same type that cannot be differentiated
by other means. It has the one property of being easy to explain.
"""
@spec to_instance(VintageNet.ifname()) :: non_neg_integer()
def to_instance(ifname) do
ifname
|> String.to_charlist()
|> Enum.reduce(0, &add_numbers/2)
end
defp add_numbers(c, sum) when c >= ?0 and c <= ?9 do
sum * 10 + c - ?0
end
defp add_numbers(_c, sum), do: sum
@doc """
Compute the routing metric for an interface with a status
This uses the prioritization list to figure out what number should
be used for the Linux routing table metric. It could also be `:disabled`
to indicate that a route shouldn't be added to the Linux routing tables
at all.
"""
@spec compute_metric(interface_type(), connection_status(), weight(), [prioritization()]) ::
pos_integer() | :disabled
def compute_metric(_type, :disconnected, _weight, _prioritization), do: :disabled
def compute_metric(type, status, weight, prioritization) when status in [:lan, :internet] do
case Enum.find_index(prioritization, fn option -> matches_option?(option, type, status) end) do
nil ->
:disabled
value ->
# Don't return 0, since that looks like the metric wasn't set. Also space out the numbers.
# (Lower numbers are higher priority)
(value + 1) * 10 + weight
end
end
defp matches_option?({type, status}, type, status), do: true
defp matches_option?({:_, status}, _type, status), do: true
defp matches_option?({type, :_}, type, _status), do: true
defp matches_option?(_option, _type, _status), do: false
@doc """
Return a reasonable default for prioritizing interfaces
The logic is that Internet-connected interfaces are prioritized first
and after than Ethernet is preferred over WiFi and WiFi over LTE.
"""
@spec default_prioritization() :: [prioritization()]
def default_prioritization() do
[
{:ethernet, :internet},
{:wifi, :internet},
{:mobile, :internet},
{:_, :internet},
{:ethernet, :lan},
{:wifi, :lan},
{:mobile, :lan},
{:_, :lan}
]
end
end
|
lib/vintage_net/interface/classification.ex
| 0.863464
| 0.465873
|
classification.ex
|
starcoder
|
defmodule Mix.Tasks.Scenic.Setup do
@moduledoc """
Does much of the work to set up Scenic in an existing project, such as Nerves.
The typical use of this task is to install Scenic into a Nerves project. This assumes
that you have already installed Nerves.
[You should read use the Nerves Installation Guide.](https://hexdocs.pm/nerves/installation.html)
### Create a new project
Then create a new Nerves project and set up Scenic within it
```bash
mix nerves.new hello_nerves
cd hello_nerves
mix scenic.setup
```
This also works to set up Scenic in a blank Elixir project
```bash
mix new hello_world
cd hello_world
mix scenic.setup
```
At this point, the main file structures are set up, but not completely hooked together.
### Set up the Scenic dependency
Add the following lines the this list of deps in the project's mix.exs file. Notice that they
are usable for all the Nerves targets. (Actually, the local driver doesn't work for bbb yet
and is very slow and needs work on rpi4, but the point is that it works across host and
the device target...)
```elixir
{:scenic, "~> 0.11.0-beta.0"},
{:scenic_driver_local, "~> 0.11.0-beta.0"},
```
### Make sure your application is configured to start
This should be in your `mix.exs` file. If using Nerves, it will start `MyApplication.Application`
```elixir
def application do
[
mod: {MyApplication, []},
extra_applications: [:logger, :scenic]
]
end
```
### Add Scenic to your app's supervisor
Next, you need to add Scenic to your app's supervisor so that it starts scenic.
Something like this should be in your `MyApp` or `MyApp.Application` module.
```elixir
def start(_type, _args) do
# start the application with the configured viewport
viewport_config = Application.get_env(:<%= @app %>, :viewport)
children = [
{Scenic, [viewport_config]},
<%= @mod %>.PubSub.Supervisor
]
|> Supervisor.start_link( strategy: :one_for_one )
end
```
### Configure your assets
Add the following to your config.exs file. Change the app name as appropriate. This
configuration is usually the same for all targets.
```elixir
config :scenic, :assets, module: MyApplication.Assets
```
### Configure your ViewPort
Next, you need to configure your ViewPort. This instructs Scenic how to draw
to the screen, or window. This is typically different for the various Nerves
targets.
The following example would go in the host.exs file. Or, if this is just a regular
elixir project running on a Mac/PC/Linux machine, it could go in config.exs
```elixir
config :my_application, :viewport,
size: {800, 600},
theme: :dark,
default_scene: MyApplication.Scene.Home,
drivers: [
[
module: Scenic.Driver.Local,
window: [title: "My Application"],
on_close: :stop_system
]
]
```
This configuration could be for a Nerves device. In this case an rpi3, that I've been
using, but it could be any device with a fixed screen.
```elixir
config :my_application, :viewport,
size: {800, 600},
theme: :dark,
default_scene: MyApplication.Scene.Home,
drivers: [
[
module: Scenic.Driver.Local,
position: [scaled: true, centered: true, orientation: :normal],
]
]
```
Scenic.Driver.Local has quite a few options you can set.
Please see it's documentation for more.
### Get the Dependencies and Run
You should now be ready to fetch the dependencies and run your project. Run these
commands from within your project's main directory.
```bash
mix deps.get
iex -S mix
```
## The Starter Application
The starter application created by the generator adds a minimal set of Scenic files
displays information about the system it is running on.
## What to read next
Next, you should read guides describing the overall Scenic structure. This is
in the documentation for Scenic itself
"""
use Mix.Task
import Mix.Generator
alias ScenicNew.Common
@shortdoc "Creates a new Scenic v#{Common.scenic_version()} application"
@switches [
app: :string,
module: :string
]
import IEx
# --------------------------------------------------------
def run(argv) do
{opts, argv} = OptionParser.parse!(argv, strict: @switches)
Common.elixir_version_check!()
path =
case argv do
[] -> "."
[path | _] -> path
end
app = opts[:app] || Path.basename(Path.expand(path))
Common.check_application_name!(app, !opts[:app])
mod = opts[:module] || Macro.camelize(app)
Common.check_mod_name_validity!(mod)
Common.check_mod_name_availability!(mod)
pry()
File.cd!(path, fn ->
generate(app, mod, path, opts)
end)
end
# --------------------------------------------------------
defp generate(app, mod, _path, _opts) do
assigns = [
app: app,
mod: mod,
elixir_version: get_version(System.version()),
scenic_version: Common.scenic_version()
]
create_file("lib/assets.ex", assets_template(assigns))
create_directory("assets")
create_file("assets/readme.txt", Common.assets_readme(assigns))
create_directory("lib/scenes")
create_file("lib/scenes/home.ex", scene_home_template(assigns))
create_file("lib/scenes/readme.txt", Common.scene_readme(assigns))
create_directory("lib/components")
create_file("lib/components/readme.txt", Common.comp_readme(assigns))
create_directory("lib/pubsub")
create_file("lib/pubsub/supervisor.ex", pubsub_sup_template(assigns))
create_file("lib/pubsub/readme.txt", Common.pubsub_readme(assigns))
"""
The Scenic files were added successfully.
You still need to configure it.
------
Add the following lines the this list of deps in the project's mix.exs file. Notice that they
are usable for all the Nerves targets. (Actually, the local driver doesn't work for bbb yet
and is very slow and needs work on rpi4, but the point is that it works across host and
the device target...)
{:scenic, "~> #{Common.scenic_version()}"},
{:scenic_driver_local, "~> #{Common.scenic_version()}"},
------
Make sure your mix.exs file starts your application and scenic
def application do
[
mod: {#{mod}, []},
extra_applications: [:logger, :scenic]
]
end
------
Add Scenic to your app's supervisor so that it starts scenic.
Something like this should be in your `#{app}.ex` module.
def start(_type, _args) do
# start the application with the configured viewport
viewport_config = Application.get_env(:#{app}, :viewport)
children = [
{Scenic, [viewport_config]},
#{mod}.PubSub.Supervisor
]
|> Supervisor.start_link( strategy: :one_for_one )
end
------
Add the following to your config.exs file. Change the app name as appropriate. This
configuration is usually the same for all targets.
config :scenic, :assets, module: #{mod}.Assets
------
Configure your ViewPort. This instructs Scenic how to draw
to the screen, or window. This is typically different for the various Nerves
targets. The following example would go in the host.exs file. Or, if this is
just a regular elixir project running on a Mac/PC/Linux machine,
it could go in config.exs
config :#{app}, :viewport,
size: {800, 600},
theme: :dark,
default_scene: #{mod}.Scene.Home,
drivers: [
[
module: Scenic.Driver.Local,
window: [title: "#{app}"],
on_close: :stop_system
]
]
This configuration could be for a Nerves device.
config :#{app}, :viewport,
size: {800, 600},
theme: :dark,
default_scene: #{mod}.Scene.Home,
drivers: [
[
module: Scenic.Driver.Local,
position: [scaled: true, centered: true, orientation: :normal],
]
]
------
Finally, build and run your app:
$ mix deps.get
You can start your app with:
$ mix scenic.run
You can also run it interactively like this:
$ iex -S mix
"""
|> Mix.shell().info()
end
# --------------------------------------------------------
defp get_version(version) do
{:ok, version} = Version.parse(version)
"#{version.major}.#{version.minor}" <>
case version.pre do
[h | _] -> "-#{h}"
[] -> ""
end
end
# ============================================================================
# template files
templates = [
# formatter: "templates/formatter.exs",
# gitignore: "templates/gitignore",
assets: "templates/new/lib/assets.ex.eex",
pubsub_sup: "templates/new/lib/pubsub/supervisor.ex.eex",
scene_home: "templates/new/lib/scenes/home.ex.eex"
]
Enum.each(templates, fn {name, content} ->
embed_template(name, from_file: content)
end)
end
|
lib/mix/tasks/setup.ex
| 0.773601
| 0.818483
|
setup.ex
|
starcoder
|
defmodule PigLatin do
@doc """
Given a `phrase`, translate it a word at a time to Pig Latin.
Words beginning with consonants should have the consonant moved to the end of
the word, followed by "ay".
Words beginning with vowels (aeiou) should have "ay" added to the end of the
word.
Some groups of letters are treated like consonants, including "ch", "qu",
"squ", "th", "thr", and "sch".
Some groups are treated like vowels, including "yt" and "xr".
"""
@spec translate(phrase :: String.t()) :: String.t()
def translate(phrase) do
String.split(phrase)
|> Enum.map(fn word ->
status = starts_vogal(word)
case status do
:ok -> word_inverter(word)
?x -> exception_xy(phrase)
?y -> exception_xy(phrase)
?u -> word_inverter(word)
?q -> exception_q(word, "")
:error -> word_inverter_consoant(word, "")
end
end)
|> Enum.join(" ")
end
defp word_inverter_consoant(word, init) do
[head | tail] = String.graphemes(word)
status = starts_vogal(head)
case status do
?q -> exception_q(List.to_string(tail), init <> head)
?u -> head <> List.to_string(tail) <> init <> "ay"
?x -> word_inverter_consoant(List.to_string(tail), init <> head)
?y -> word_inverter_consoant(List.to_string(tail), init <> head)
:ok -> head <> List.to_string(tail) <> init <> "ay"
:error -> word_inverter_consoant(List.to_string(tail), init <> head)
end
end
defp exception_q(word, init) do
[head | tail] = String.graphemes(word)
status = starts_vogal(head)
case status do
?q -> exception_q(List.to_string(tail), init <> head)
?u -> word_inverter(List.to_string(tail), init <> head)
?x -> word_inverter_consoant(List.to_string(tail), init <> head)
?y -> word_inverter_consoant(List.to_string(tail), init <> head)
:error -> word_inverter_consoant(word, init)
:ok -> word <> init <> "ay"
end
end
defp exception_xy(word) do
[head | tail] = String.graphemes(word)
cabeca2 =
hd(tail)
|> starts_vogal
case cabeca2 do
:error -> word <> "ay"
?y -> head <> Enum.join(tail) <> "ay"
?x -> head <> Enum.join(tail) <> "ay"
?q -> head <> Enum.join(tail) <> "ay"
:ok -> Enum.join(tail) <> head <> "ay"
end
end
def word_inverter(body, initial) do
body <> initial <> "ay"
end
defp word_inverter(body) do
body <> "ay"
end
defp starts_vogal(word) do
String.to_charlist(word)
|> hd
|> is_vowel()
end
defp is_vowel(vowel) do
case vowel do
?a -> :ok
?e -> :ok
?i -> :ok
?o -> :ok
?u -> ?u
?y -> ?y
?x -> ?x
?q -> ?q
_ -> :error
end
end
end
|
pig-latin/lib/pig_latin.ex
| 0.749637
| 0.48249
|
pig_latin.ex
|
starcoder
|
defmodule Legend.Event do
@moduledoc """
All: id, [{Legend.full_name, non_neg_integer, {Node.t, DateTime.t}}, ...], Legend.full_name, event, context, Logger.metadata()
Legend.Hook:
[:completed, :hook, name], event, hook_result
[:skipped, :hook, :name], event
Legend.Stage.Sync:
[:starting, :sync, :transaction], effects_so_far
[:completed, :sync, :transaction], transaction_result
[:starting, :sync, :compensation], error_to_compensate, effect_to_compensate, effects_so_far
[:completed, :sync, :compensation], compensation_result
[:starting, :sync, :retry], retry_state, retry_opts
[:completed, :sync, :retry], retry_result
[:starting, :sync, :compensation, :error_handler, handler_name], error
[:completed, :sync, :compensation, :error_handler, handler_name], compensation_error_handler_result
Legend.Stage.Async:
[:starting, :async, :transaction], effects_so_far
[:completed, :async, :transaction] %{name => transaction_result}
[:starting, :async, :compensation], errors_to_compensate, effects_to_compensate, effects_so_far
[:completed, :async, :compensation] %{name => compensation_result}
[:dependency_waiting, :async, :transactions], %{name => waiting_deps}
[:starting, :async, :transactions] %{name => async_opts}
[:started, :async, :transactions] %{name => Task.t}
[:completed, :async, :transactions] %{name => task_result}
[:dependency_waiting, :async, :compensations], %{name => waiting_deps}
[:starting, :async, :compensations] %{name => async_opts}
[:started, :async, :compensations] %{name => Task.t}
[:completed, :async, :compensations] %{name => task_result}
Legend.Stage.Mapper:
[:starting, :mapper, :decomposer], effects_so_far, async_opts
[:completed, :mapper, :decomposer], decomposer_result
[:starting, :mapper, :stages], [{name, decomposer_effects, async_opts}]
[:completed, :mapper, :stages], %{name => stage_result}
[:starting, :mapper, :recomposer], mapper_effects
[:completed, :mapper, :recomposer], recomposer_result
Legend.Stage.Feedback:
[:starting, :feedback, :init], effects_so_far, feedback_opts
[:completed, :feedback, :init], feedback_init_result
[:starting, :feedback, :transaction, :check], effects_so_far, feedback_state
[:completed, :feedback, :transaction, :check], :continue | :complete, feedback_state
[:starting, :feedback, :compensation, :check], effects_so_far, feedback_state
[:completed, :feedback, :compensation, :check], :continue | :complete, feedback_state
[:starting, :feedback, :transaction], effects_so_far
[:completed, :feedback, :transaction], transaction_result
[:starting, :feedback, :compensation], error_to_compensate, effect_to_compensate, effects_so_far
[:completed, :feedback, :compensation], compensation_result
Legend:
[:starting, :legend], initial_effects, legend_opts
[:starting, :legend, :transaction, name], effects_so_far
[:completed, :legend, :transaction, name], transaction_result
[:starting, :legend, :compensation, name], error_to_compensate, effect_to_compensate, effects_so_far
[:completed, :legend, :compensation, name], compensation_result
[:complete, :legend], legend_result
"""
alias Legend.Utils
@typedoc """
"""
@type t :: %__MODULE__{
id: Legend.id,
#timestamp: [{Legend.full_name, non_neg_integer, {Node.t, DateTime.t}}, ...],
timestamp: {Node.t, DateTime.t},
stage_name: Legend.full_name,
name: [atom],
context: term,
metadata: Keyword.t
}
defstruct [
id: nil,
timestamp: nil,
stage_name: nil,
name: nil,
context: nil,
metadata: nil
]
@spec defaults() :: Keyword.t
def defaults() do
[
timestamp: {Node.self(), DateTime.utc_now()},
metadata: Utils.get_local_metadata()
]
end
@spec create(Keyword.t) :: t
def create(opts \\ []) do
defaults()
|> Keyword.merge(opts)
|> (&struct(__MODULE__, &1)).()
end
@spec update(t, Keyword.t) :: t
def update(event, opts \\ []) do
defaults()
|> Keyword.merge(opts)
|> Enum.reduce(event, fn {k, v}, e ->
if Map.has_key?(e, k) do
Map.put(e, k, v)
else
e
end
end)
end
@spec get_effect(t, Legend.effects) :: Legend.effect | nil
def get_effect(event, effects) do
get_in(effects, tl(event.stage_name))
end
end
|
lib/legend/event.ex
| 0.751192
| 0.562056
|
event.ex
|
starcoder
|
defmodule MotionBetweenImpacts do
@moduledoc """
Computes the time evolution of the system from one impact to the next
"""
@typedoc """
Tuple comprising an `:ImpactPoint`, a list which optionally contains `:StateOfMotion` instances for time steps
from the previous impact and a function to evaluate a chatter counter.
"""
@type point_with_states :: {%ImpactPoint{}, [%StateOfMotion{}], (number() -> any())}
@doc """
Gives the state of motion (position, velocity, time) at a given time after an impact
`:t`: time into the simulation
`:previous_impact`: the point on the impact surface corresponding to the previous impact
`:coeffs`: the coefficients governing the motion after the previous impact
Returns the `:StateOfMotion` at time `:t`
"""
@spec motion_at_time(number(), %ImpactPoint{}, %EvolutionCoefficients{}) :: %StateOfMotion{}
def motion_at_time(t, %ImpactPoint{} = previous_impact, %EvolutionCoefficients{} = coeffs) do
# Time since the previous impact
lambda = t - previous_impact.t
result = %StateOfMotion{t: t}
# Displacement
result = %{
result
| x:
coeffs.cos_coeff * :math.cos(lambda) + coeffs.sin_coeff * :math.sin(lambda) +
coeffs.gamma *
:math.cos(coeffs.omega * t)
}
# Velocity
%{
result
| v:
coeffs.sin_coeff * :math.cos(lambda) - coeffs.cos_coeff * :math.sin(lambda) -
coeffs.omega *
coeffs.gamma * :math.sin(coeffs.omega * t)
}
end
@doc """
Returns the next impact given the a specified starting impact.
`:previous_impact`: the specified starting impact
`:params`: the parameters governing the system behaviour
`:record_states`: specifies whether intermediate states of motion will be returned
`:step_size`: the initial time step size (the search algorithm will reduce this)
Returns a `t:point_with_states/0` with the next impact point and optionally the intermediate states of motion
"""
@spec next_impact(
%ImpactPoint{},
%SystemParameters{},
(integer() -> {boolean(), (number() -> any())}),
boolean(),
number(),
number()
) :: point_with_states
def next_impact(
%ImpactPoint{} = previous_impact,
%SystemParameters{} = parameters,
chatter_counter \\ Chatter.count_low_v(),
record_states \\ false,
step_size \\ 0.1,
limit \\ 0.000001
) do
# Equations of motion to next impact
{status, coeffs} = EvolutionCoefficients.derive(parameters, previous_impact)
if status != :ok do
# TODO: this is a fudge!
IO.inspect(coeffs)
{nil, [], 0}
end
start_state = %StateOfMotion{
t: previous_impact.t,
x: parameters.sigma,
v: -parameters.r * previous_impact.v
}
# Check for chatter
check_chatter = fn state, parameters, sticking_region ->
Chatter.accumulation_state(state, parameters)
|> (&if(elem(&1, 0) == :ok, do: elem(&1, 1), else: nil)).()
|> (&StickingRegion.state_if_sticking(&1, sticking_region)).()
end
{chatter_impact, new_counter} = chatter_counter.(previous_impact.v)
chatter_result =
chatter_impact && check_chatter.(start_state, parameters, coeffs.sticking_region)
states =
if chatter_result do
# We have chatter, so skip to the chatter result
states_for_step(start_state, parameters.sigma, record_states) ++ [chatter_result]
else
# No chatter, so initiate the recursive search for the next impact
find_next_impact(
start_state,
previous_impact,
coeffs,
parameters,
record_states,
step_size,
limit
)
end
# Convert state at impact to an `:ImpactPoint`
{StateOfMotion.point_from_state(Enum.at(states, -1), parameters.omega), states, new_counter}
end
@spec states_for_step(%StateOfMotion{}, number(), boolean()) :: [%StateOfMotion{}]
# If intermediate states are being recorded AND the current displacement is less than or equal to the obstacle offset,
# returns a list containing the current state of motion. Otherwise, returns an empty list.
defp states_for_step(%StateOfMotion{} = state, sigma, record_states) do
if state.x <= sigma and record_states do
[state]
else
[]
end
end
@spec find_next_impact(
%StateOfMotion{},
%ImpactPoint{},
%EvolutionCoefficients{},
%SystemParameters{},
boolean(),
number(),
number()
) :: [%StateOfMotion{}]
# For a given impact point and current state of motion, returns a list containing the state of motion corresponding to
# the next impact. Optionally, the returned list will also contain the states corresponding to the intermediate time
# steps. The current state of motion is needed because the function is recursive.
defp find_next_impact(
%StateOfMotion{} = state,
%ImpactPoint{} = _previous_impact,
%EvolutionCoefficients{} = _coeffs,
%SystemParameters{} = _parameters,
_record_states,
step_size,
limit
)
when abs(step_size) < limit do
# Termination criterion: return the state of motion corresponding to the next impact
[state]
end
defp find_next_impact(
%StateOfMotion{} = state,
%ImpactPoint{} = previous_impact,
%EvolutionCoefficients{} = coeffs,
%SystemParameters{} = parameters,
record_states,
step_size,
limit
) do
# Record current state if required
states = states_for_step(state, parameters.sigma, record_states)
# Check for sticking
if StickingRegion.is_sticking_impact?(previous_impact, coeffs.sticking_region) do
states ++ [StickingRegion.next_impact_state(state.t, state.x, coeffs.sticking_region)]
else
# Update step size if necessary. When we are close to the impact, this implements the bisection algorithm which
# finds the impact time
step_size = new_step_size(step_size, state.t - previous_impact.t, state.x, parameters.sigma)
# Get state at new time
new_time = state.t + step_size
new_state = motion_at_time(new_time, previous_impact, coeffs)
# Recurse
states ++
find_next_impact(
new_state,
previous_impact,
coeffs,
parameters,
record_states,
step_size,
limit
)
end
end
@doc """
Where appropriate, refines the step size and reverses its direction. This effectively implements the bisection
algorithm which seeks the time of the next impact
"""
@spec new_step_size(float, float, float, float) :: float
def new_step_size(step_size, _time_diff, x, sigma) when x <= sigma and step_size < 0 do
# If we get here then previous displacement was above the offset, so continue to apply bisection algorithm
-0.5 * step_size
end
def new_step_size(step_size, time_diff, x, sigma) when x > sigma and step_size > 0 do
# Displacement is above offset so apply bisection algorithm to seek impact time
# BUT don't step farther back than the previous impact
-0.5 * min(step_size, time_diff)
end
def new_step_size(step_size, time_diff, _x, _sigma) when step_size <= -time_diff do
# Continue search in same direction
# BUT don't step farther back than the previous impact
-0.5 * time_diff
end
def new_step_size(step_size, _time_diff, _x, _sigma) do
# Default behaviour: do nothing
step_size
end
@doc """
Generates a sequence of impacts from a specified starting impact
`:start_impact`: the initial impact
`:params`: the parameters governing the system behaviour
`:num_iterations`: the number of impacts to compute
Returns a list of `:num_iterations` impacts
## Example
iex> initial_point = %ImpactPoint{phi: 0.5, t: 0.5, v: 0.5}
%ImpactPoint{phi: 0.5, t: 0.5, v: 0.5}
iex> params = %SystemParameters{omega: 2.8, r: 0.8, sigma: 0}
%SystemParameters{omega: 2.8, r: 0.8, sigma: 0}
iex> num_iterations = 10
10
iex> elem(MotionBetweenImpacts.iterate_impacts(initial_point, params, num_iterations), 0)
[
%ImpactPoint{phi: 0.5, t: 0.5, v: 0.5},
%ImpactPoint{
phi: 0.56107466894729,
t: 3.5030433654785176,
v: 0.6390091769528171
},
%ImpactPoint{
phi: 0.9714358043484209,
t: 6.667886352539055,
v: 0.28162295347995525
},
%ImpactPoint{
phi: 0.18654749416955352,
t: 7.150595855712881,
v: 0.1756490844536407
},
%ImpactPoint{
phi: 0.5086073611379497,
t: 10.117291259765604,
v: 0.47786866340459266
},
%ImpactPoint{
phi: 0.5977131170786586,
t: 12.561238861083954,
v: 0.13396902992973406
},
%ImpactPoint{
phi: 0.15289532364504355,
t: 13.807064819335901,
v: 0.4898211314201739
},
%ImpactPoint{
phi: 0.5147981307676757,
t: 16.863167572021457,
v: 0.6795397961462686
},
%ImpactPoint{
phi: 0.8816824953803373,
t: 19.930448913574235,
v: 0.2379830673351312
},
%ImpactPoint{
phi: 0.0669340795423185,
t: 20.346152496337915,
v: 0.20295138984978708
}
]
"""
@spec iterate_impacts(%ImpactPoint{}, %SystemParameters{}, integer(), boolean()) ::
{[%ImpactPoint{}], [%StateOfMotion{}]}
def iterate_impacts(
%ImpactPoint{} = start_impact,
%SystemParameters{} = params,
num_iterations \\ 1000,
record_states \\ false
) do
chatter_counter = Chatter.count_low_v()
stream =
Stream.unfold(
{start_impact, [], chatter_counter},
&{&1, next_impact(elem(&1, 0), params, elem(&1, 2), record_states)}
)
Enum.take(stream, num_iterations)
|> (&{Enum.reduce(&1, [], fn x, acc -> acc ++ [elem(x, 0)] end),
Enum.reduce(&1, [], fn x, acc -> acc ++ elem(x, 1) end)}).()
end
end
|
apps/imposc/lib/dynamics/motion.ex
| 0.892744
| 0.898321
|
motion.ex
|
starcoder
|
defmodule Shapt do
@moduledoc """
Use this to create your own feature toggle worker as in the example:
```elixir
defmodule TestModule do
use Shapt,
adapter: {Shapt.Adapters.Env, []},
toggles: [
feature_x?: %{
key: "MYAPP_FEATURE_X",
deadline: ~D[2019-12-31]
},
feature_y?: %{
deadline: ~D[2009-12-31]
}
]
end
```
"""
@typedoc """
Options to be passed when using `Shapt`.
It's a keywordlist with the required keys `:adapter` and `:toggles`.
"""
@type use_opts :: [adapter: {adapter(), adapter_opts()}, toggles: toggles()]
@typedoc """
A module that implements the `Shapt.Adapter` behaviour.
"""
@type adapter :: module()
@typedoc """
Options to configure the adapter.
Check the adapter documentation for more details.
"""
@type adapter_opts :: keyword()
@typedoc """
A keywordlist with the toggles names and its configuration.
"""
@type toggles :: [{toggle_name(), toggle_opts()}]
@typedoc """
The name of a toggle.
This name gonna become a function on your module and gonna be name used to identify this toggle on all Shapt mix tasks.
"""
@type toggle_name :: atom()
@typedoc """
It's a map with options to configure the individual toggle.
The only option that Shapt defines is the `:deadline`.
More options can be defined and used by the adapter.
"""
@type toggle_opts :: %{deadline: deadline()}
@typedoc """
Defines a deadline for using the toggle.
It's used by `Mix.Tasks.Shapt.Expired` task and the functions `expired?/1` and `expired_toggles/0`.
"""
@type deadline :: Date.t()
@doc false
defmacro __using__(options) do
{adapter, adapter_conf} = options[:adapter]
toggle_conf = options[:toggles]
toggles = Enum.map(toggle_conf, &elem(&1, 0))
[
quote do
def child_spec([]) do
opts = [
toggles: unquote(toggle_conf),
adapter: {unquote(adapter), unquote(adapter_conf)},
module: __MODULE__
]
Shapt.Worker.child_spec(opts)
end
def child_spec(params) do
opts = [
toggles: unquote(toggle_conf),
adapter: params[:adapter],
module: __MODULE__
]
Shapt.Worker.child_spec(opts)
end
def start_link([]) do
opts = [
toggle_conf: unquote(toggle_conf),
adapter: unquote(adapter),
adapter_conf: unquote(adapter_conf),
name: __MODULE__
]
Shapt.Worker.start_link(opts)
end
def start_link(params) do
{adapter, adapter_conf} = params[:adapter]
opts = [
toggle_conf: unquote(toggle_conf),
adapter: adapter,
adapter_conf: adapter_conf,
name: __MODULE__
]
Shapt.Worker.start_link(opts)
end
def all_values, do: Shapt.Worker.all_values(__MODULE__)
def reload, do: Shapt.Worker.reload(__MODULE__)
def enabled?(toggle) do
if toggle in unquote(toggles) do
Shapt.Worker.enabled?(__MODULE__, toggle)
else
:error
end
end
def expired?(toggle) do
Shapt.Helpers.do_expired(toggle, unquote(toggle_conf))
end
def expired_toggles do
Shapt.Helpers.do_all_expired(unquote(toggle_conf))
end
def template do
Shapt.Helpers.do_template(unquote(adapter), unquote(adapter_conf), unquote(toggle_conf))
end
def toggle(name, opts) do
if name in unquote(toggles) do
name
|> enabled?()
|> Shapt.Helpers.apply_toggle(opts)
else
:error
end
end
end
| Enum.map(options[:toggles], fn {name, _opts} ->
quote do
def unquote(name)(), do: enabled?(unquote(name))
end
end)
]
end
end
|
lib/shapt.ex
| 0.76432
| 0.812347
|
shapt.ex
|
starcoder
|
defmodule Estated.Property.Deed do
@moduledoc "Sale and mortgage data which constitute a deed record."
@moduledoc since: "0.2.0"
import Estated.CastHelpers, only: [cast_date: 1]
defstruct [
:document_type,
:recording_date,
:original_contract_date,
:deed_book,
:deed_page,
:document_id,
:sale_price,
:sale_price_description,
:transfer_tax,
:distressed_sale,
:real_estate_owned,
:seller_first_name,
:seller_last_name,
:seller2_first_name,
:seller2_last_name,
:seller_address,
:seller_unit_number,
:seller_city,
:seller_state,
:seller_zip_code,
:seller_zip_plus_four_code,
:buyer_first_name,
:buyer_last_name,
:buyer2_first_name,
:buyer2_last_name,
:buyer_address,
:buyer_unit_type,
:buyer_unit_number,
:buyer_city,
:buyer_state,
:buyer_zip_code,
:buyer_zip_plus_four_code,
:lender_name,
:lender_type,
:loan_amount,
:loan_type,
:loan_due_date,
:loan_finance_type,
:loan_interest_rate
]
@typedoc "Up to 20 years of sale and mortgage data which constitute a deed record."
@typedoc since: "0.2.0"
@type deeds :: [t()]
@typedoc "Sale and mortgage data which constitute a deed record."
@typedoc since: "0.2.0"
@type t :: %__MODULE__{
document_type: document_type() | nil,
recording_date: recording_date() | nil,
original_contract_date: original_contract_date() | nil,
deed_book: deed_book() | nil,
deed_page: deed_page() | nil,
document_id: document_id() | nil,
sale_price: sale_price() | nil,
sale_price_description: sale_price_description() | nil,
transfer_tax: transfer_tax() | nil,
distressed_sale: distressed_sale() | nil,
real_estate_owned: real_estate_owned() | nil,
seller_first_name: seller_first_name() | nil,
seller_last_name: seller_last_name() | nil,
seller2_first_name: seller2_first_name() | nil,
seller2_last_name: seller2_last_name() | nil,
seller_address: seller_address() | nil,
seller_unit_number: seller_unit_number() | nil,
seller_city: seller_city() | nil,
seller_state: seller_state() | nil,
seller_zip_code: seller_zip_code() | nil,
seller_zip_plus_four_code: seller_zip_plus_four_code() | nil,
buyer_first_name: buyer_first_name() | nil,
buyer_last_name: buyer_last_name() | nil,
buyer2_first_name: buyer2_first_name() | nil,
buyer2_last_name: buyer2_last_name() | nil,
buyer_address: buyer_address() | nil,
buyer_unit_type: buyer_unit_type() | nil,
buyer_unit_number: buyer_unit_number() | nil,
buyer_city: buyer_city() | nil,
buyer_state: buyer_state() | nil,
buyer_zip_code: buyer_zip_code() | nil,
buyer_zip_plus_four_code: buyer_zip_plus_four_code() | nil,
lender_name: lender_name() | nil,
lender_type: lender_type() | nil,
loan_amount: loan_amount() | nil,
loan_type: loan_type() | nil,
loan_due_date: loan_due_date() | nil,
loan_finance_type: loan_finance_type() | nil,
loan_interest_rate: loan_interest_rate() | nil
}
@typedoc """
Type of deed document.
Eg. [**WARRANTY DEED**](https://estated.com/developers/docs/v4/property/enum-overview#document_type)
"""
@typedoc since: "0.2.0"
@type document_type :: String.t()
@typedoc """
The official date the document was recorded.
Eg. **2011-12-28**
"""
@typedoc since: "0.2.0"
@type recording_date :: Date.t()
@typedoc """
The date the original contract was signed.
Eg. **2011-12-26**
"""
@typedoc since: "0.2.0"
@type original_contract_date :: Date.t()
@typedoc """
The physical book where the deed was recorded.
Eg. **9056**
"""
@typedoc since: "0.2.0"
@type deed_book :: String.t()
@typedoc """
The physical page where the deed was recorded.
Eg. **595**
"""
@typedoc since: "0.2.0"
@type deed_page :: String.t()
@typedoc """
Identifier assigned to document at the recording date.
Eg. **93154L**
"""
@typedoc since: "0.2.0"
@type document_id :: String.t()
@typedoc """
The total sale price in dollars.
Eg. **170000**
"""
@typedoc since: "0.2.0"
@type sale_price :: integer()
@typedoc """
A description of the sale.
Eg. [**ESTIMATED**](https://estated.com/developers/docs/v4/property/enum-overview#sale_price_description)
"""
@typedoc since: "0.2.0"
@type sale_price_description :: String.t()
@typedoc """
The tax amount levied by the city, county, or a combination thereof.
Eg. **142.03**
"""
@typedoc since: "0.2.0"
@type transfer_tax :: float()
@typedoc """
An indicator to determine if the sale was deemed to be distressed.
Eg. **false**
"""
@typedoc since: "0.2.0"
@type distressed_sale :: boolean()
@typedoc """
An indicator used to determine the status of the transfer.
Eg. [**NO**](https://estated.com/developers/docs/v4/property/enum-overview#real_estate_owned)
"""
@typedoc since: "0.2.0"
@type real_estate_owned :: String.t()
@typedoc """
Seller first name, or null if corporate owner.
Eg. **JOHN**
"""
@typedoc since: "0.2.0"
@type seller_first_name :: String.t()
@typedoc """
Seller last name, or corporate name.
Eg. **SMITH**
"""
@typedoc since: "0.2.0"
@type seller_last_name :: String.t()
@typedoc """
The second seller first name, or null if corporate owner.
Eg. **JANE**
"""
@typedoc since: "0.2.0"
@type seller2_first_name :: String.t()
@typedoc """
The second seller last name, or corporate name.
Eg. **SMITH**
"""
@typedoc since: "0.2.0"
@type seller2_last_name :: String.t()
@typedoc """
The seller mailing address.
Eg. **123 MAIN ST**
"""
@typedoc since: "0.2.0"
@type seller_address :: String.t()
@typedoc """
The seller unit number.
Eg. **1**
"""
@typedoc since: "0.2.0"
@type seller_unit_number :: String.t()
@typedoc """
The seller city.
Eg. **PHOENIX**
"""
@typedoc since: "0.2.0"
@type seller_city :: String.t()
@typedoc """
The seller mailing state.
Eg. **AZ**
"""
@typedoc since: "0.2.0"
@type seller_state :: String.t()
@typedoc """
Seller zip code.
Eg. **85024**
"""
@typedoc since: "0.2.0"
@type seller_zip_code :: String.t()
@typedoc """
Seller four digit postal zip extension.
Eg. **3019**
"""
@typedoc since: "0.2.0"
@type seller_zip_plus_four_code :: String.t()
@typedoc """
Buyer first name, or null if corporate owner.
Eg. **JOHN**
"""
@typedoc since: "0.2.0"
@type buyer_first_name :: String.t()
@typedoc """
Buyer last name, or corporate name.
Eg. **SMITH**
"""
@typedoc since: "0.2.0"
@type buyer_last_name :: String.t()
@typedoc """
Second buyer first name, or null if corporate owner.
Eg. **JANE**
"""
@typedoc since: "0.2.0"
@type buyer2_first_name :: String.t()
@typedoc """
Second buyer last name, or corporate name.
Eg. **SMITH**
"""
@typedoc since: "0.2.0"
@type buyer2_last_name :: String.t()
@typedoc """
Buyer mailing address.
Eg. **123 MAIN ST**
"""
@typedoc since: "0.2.0"
@type buyer_address :: String.t()
@typedoc """
Buyer unit type.
Eg. [**APT**](https://estated.com/developers/docs/v4/property/enum-overview#unit_type)
"""
@typedoc since: "0.2.0"
@type buyer_unit_type :: String.t()
@typedoc """
Buyer unit number.
Eg. **1A**
"""
@typedoc since: "0.2.0"
@type buyer_unit_number :: String.t()
@typedoc """
Buyer mailing city.
Eg. **DENVER**
"""
@typedoc since: "0.2.0"
@type buyer_city :: String.t()
@typedoc """
Buyer mailing state.
Eg. **CO**
"""
@typedoc since: "0.2.0"
@type buyer_state :: String.t()
@typedoc """
Buyer mailing zip code.
Eg. **80222**
"""
@typedoc since: "0.2.0"
@type buyer_zip_code :: String.t()
@typedoc """
Buyer four digit postal zip extension.
Eg. **3019**
"""
@typedoc since: "0.2.0"
@type buyer_zip_plus_four_code :: String.t()
@typedoc """
Mortgage lender.
Eg. **CITIMORTGAGE INC**
"""
@typedoc since: "0.2.0"
@type lender_name :: String.t()
@typedoc """
The type of lender.
Eg. [**BANK**](https://estated.com/developers/docs/v4/property/enum-overview#lender_type)
"""
@typedoc since: "0.2.0"
@type lender_type :: String.t()
@typedoc """
Mortgage recorded in dollars.
Eg. **74900**
"""
@typedoc since: "0.2.0"
@type loan_amount :: integer()
@typedoc """
Type of loan security.
Eg. [**CLOSED MORTGAGE**](https://estated.com/developers/docs/v4/property/enum-overview#loan_type)
"""
@typedoc since: "0.2.0"
@type loan_type :: String.t()
@typedoc """
The date the mortgage will be paid in full.
Eg. **2029-10-22**
"""
@typedoc since: "0.2.0"
@type loan_due_date :: Date.t()
@typedoc """
The interest rate type on the loan.
Eg. [**FIXED RATE**](https://estated.com/developers/docs/v4/property/enum-overview#loan_finance_type)
"""
@typedoc since: "0.2.0"
@type loan_finance_type :: String.t()
@typedoc """
The interest rate of the loan.
Eg. **2.97**
"""
@typedoc since: "0.2.0"
@type loan_interest_rate :: float()
@doc false
@doc since: "0.2.0"
@spec cast_list([map()]) :: [t()]
def cast_list(deeds) when is_list(deeds) do
Enum.map(deeds, &cast/1)
end
@spec cast_list(nil) :: []
def cast_list(nil) do
[]
end
defp cast(%{} = deed) do
Enum.reduce(deed, %__MODULE__{}, &cast_field/2)
end
defp cast_field({"document_type", document_type}, acc) do
%__MODULE__{acc | document_type: document_type}
end
defp cast_field({"recording_date", recording_date}, acc) do
%__MODULE__{acc | recording_date: cast_date(recording_date)}
end
defp cast_field({"original_contract_date", original_contract_date}, acc) do
%__MODULE__{acc | original_contract_date: cast_date(original_contract_date)}
end
defp cast_field({"deed_book", deed_book}, acc) do
%__MODULE__{acc | deed_book: deed_book}
end
defp cast_field({"deed_page", deed_page}, acc) do
%__MODULE__{acc | deed_page: deed_page}
end
defp cast_field({"document_id", document_id}, acc) do
%__MODULE__{acc | document_id: document_id}
end
defp cast_field({"sale_price", sale_price}, acc) do
%__MODULE__{acc | sale_price: sale_price}
end
defp cast_field({"sale_price_description", sale_price_description}, acc) do
%__MODULE__{acc | sale_price_description: sale_price_description}
end
defp cast_field({"transfer_tax", transfer_tax}, acc) do
%__MODULE__{acc | transfer_tax: transfer_tax}
end
defp cast_field({"distressed_sale", distressed_sale}, acc) do
%__MODULE__{acc | distressed_sale: distressed_sale}
end
defp cast_field({"real_estate_owned", real_estate_owned}, acc) do
%__MODULE__{acc | real_estate_owned: real_estate_owned}
end
defp cast_field({"seller_first_name", seller_first_name}, acc) do
%__MODULE__{acc | seller_first_name: seller_first_name}
end
defp cast_field({"seller_last_name", seller_last_name}, acc) do
%__MODULE__{acc | seller_last_name: seller_last_name}
end
defp cast_field({"seller2_first_name", seller2_first_name}, acc) do
%__MODULE__{acc | seller2_first_name: seller2_first_name}
end
defp cast_field({"seller2_last_name", seller2_last_name}, acc) do
%__MODULE__{acc | seller2_last_name: seller2_last_name}
end
defp cast_field({"seller_address", seller_address}, acc) do
%__MODULE__{acc | seller_address: seller_address}
end
defp cast_field({"seller_unit_number", seller_unit_number}, acc) do
%__MODULE__{acc | seller_unit_number: seller_unit_number}
end
defp cast_field({"seller_city", seller_city}, acc) do
%__MODULE__{acc | seller_city: seller_city}
end
defp cast_field({"seller_state", seller_state}, acc) do
%__MODULE__{acc | seller_state: seller_state}
end
defp cast_field({"seller_zip_code", seller_zip_code}, acc) do
%__MODULE__{acc | seller_zip_code: seller_zip_code}
end
defp cast_field({"seller_zip_plus_four_code", seller_zip_plus_four_code}, acc) do
%__MODULE__{acc | seller_zip_plus_four_code: seller_zip_plus_four_code}
end
defp cast_field({"buyer_first_name", buyer_first_name}, acc) do
%__MODULE__{acc | buyer_first_name: buyer_first_name}
end
defp cast_field({"buyer_last_name", buyer_last_name}, acc) do
%__MODULE__{acc | buyer_last_name: buyer_last_name}
end
defp cast_field({"buyer2_first_name", buyer2_first_name}, acc) do
%__MODULE__{acc | buyer2_first_name: buyer2_first_name}
end
defp cast_field({"buyer2_last_name", buyer2_last_name}, acc) do
%__MODULE__{acc | buyer2_last_name: buyer2_last_name}
end
defp cast_field({"buyer_address", buyer_address}, acc) do
%__MODULE__{acc | buyer_address: buyer_address}
end
defp cast_field({"buyer_unit_type", buyer_unit_type}, acc) do
%__MODULE__{acc | buyer_unit_type: buyer_unit_type}
end
defp cast_field({"buyer_unit_number", buyer_unit_number}, acc) do
%__MODULE__{acc | buyer_unit_number: buyer_unit_number}
end
defp cast_field({"buyer_city", buyer_city}, acc) do
%__MODULE__{acc | buyer_city: buyer_city}
end
defp cast_field({"buyer_state", buyer_state}, acc) do
%__MODULE__{acc | buyer_state: buyer_state}
end
defp cast_field({"buyer_zip_code", buyer_zip_code}, acc) do
%__MODULE__{acc | buyer_zip_code: buyer_zip_code}
end
defp cast_field({"buyer_zip_plus_four_code", buyer_zip_plus_four_code}, acc) do
%__MODULE__{acc | buyer_zip_plus_four_code: buyer_zip_plus_four_code}
end
defp cast_field({"lender_name", lender_name}, acc) do
%__MODULE__{acc | lender_name: lender_name}
end
defp cast_field({"lender_type", lender_type}, acc) do
%__MODULE__{acc | lender_type: lender_type}
end
defp cast_field({"loan_amount", loan_amount}, acc) do
%__MODULE__{acc | loan_amount: loan_amount}
end
defp cast_field({"loan_type", loan_type}, acc) do
%__MODULE__{acc | loan_type: loan_type}
end
defp cast_field({"loan_due_date", loan_due_date}, acc) do
%__MODULE__{acc | loan_due_date: cast_date(loan_due_date)}
end
defp cast_field({"loan_finance_type", loan_finance_type}, acc) do
%__MODULE__{acc | loan_finance_type: loan_finance_type}
end
defp cast_field({"loan_interest_rate", loan_interest_rate}, acc) do
%__MODULE__{acc | loan_interest_rate: loan_interest_rate}
end
defp cast_field(_map_entry, acc) do
acc
end
end
|
lib/estated/property/deed.ex
| 0.729327
| 0.483648
|
deed.ex
|
starcoder
|
defmodule Phoenix.Component do
@moduledoc ~S'''
API for function components.
A function component is any function that receives
an assigns map as argument and returns a rendered
struct built with [the `~H` sigil](`Phoenix.LiveView.Helpers.sigil_H/2`).
Here is an example:
defmodule MyComponent do
use Phoenix.Component
# Optionally also bring the HTML helpers
# use Phoenix.HTML
def greet(assigns) do
~H"""
<p>Hello, <%= assigns.name %></p>
"""
end
end
The component can be invoked as a regular function:
MyComponent.greet(%{name: "Jane"})
But it is typically invoked using the function component
syntax from the `~H` sigil:
~H"""
<MyComponent.greet name="Jane" />
"""
If the `MyComponent` module is imported or if the function
is defined locally, you can skip the module name:
~H"""
<.greet name="Jane" />
"""
Similar to any HTML tag inside the `~H` sigil, you can
interpolate attributes values too:
~H"""
<.greet name={@user.name} />
"""
You can learn more about the `~H` sigil [in its documentation](`Phoenix.LiveView.Helpers.sigil_H/2`).
## `use Phoenix.Component`
Modules that define function components should call
`use Phoenix.Component` at the top. Doing so will import
the functions from both `Phoenix.LiveView` and
`Phoenix.LiveView.Helpers` modules. `Phoenix.LiveView`
and `Phoenix.LiveComponent` automatically invoke
`use Phoenix.Component` for you.
You must avoid defining a module for each component. Instead,
we should use modules to group side-by-side related function
components.
## Assigns
While inside a function component, you must use `Phoenix.LiveView.assign/3`
and `Phoenix.LiveView.assign_new/3` to manipulate assigns,
so that LiveView can track changes to the assigns values.
For example, let's imagine a component that receives the first
name and last name and must compute the name assign. One option
would be:
def show_name(assigns) do
assigns = assign(assigns, :name, assigns.first_name <> assigns.last_name)
~H"""
<p>Your name is: <%= @name %></p>
"""
end
However, when possible, it may be cleaner to break the logic over function
calls instead of precomputed assigns:
def show_name(assigns) do
~H"""
<p>Your name is: <%= full_name(@first_name, @last_name) %></p>
"""
end
defp full_name(first_name, last_name), do: first_name <> last_name
Another example is making an assign optional by providing
a default value:
def field_label(assigns) do
assigns = assign_new(assigns, :help, fn -> nil end)
~H"""
<label>
<%= @text %>
<%= if @help do %>
<span class="help"><%= @help %></span>
<% end %>
</label>
"""
end
## Slots
Slots is a mechanism to give HTML blocks to function components
as in regular HTML tags.
### Default slots
Any content you pass inside a component is assigned to a default slot
called `@inner_block`. For example, imagine you want to create a button
component like this:
<.button>
This renders <strong>inside</strong> the button!
</.button>
It is quite simple to do so. Simply define your component and call
`render_slot(@inner_block)` where you want to inject the content:
def button(assigns) do
~H"""
<button class="btn">
<%= render_slot(@inner_block) %>
</button>
"""
end
In a nutshell, the contents given to the component is assigned to
the `@inner_block` assign and then we use `Phoenix.LiveView.Helpers.render_slot/2`
to render it.
You can even have the component give a value back to the caller,
by using the special attribute `:let` (note the leading `:`).
Imagine this component:
def unordered_list(assigns) do
~H"""
<ul>
<%= for entry <- @entries do %>
<li><%= render_slot(@inner_block, entry) %></li>
<% end %>
</ul>
"""
end
And now you can invoke it as:
<.unordered_list :let={entry} entries={~w(apple banana cherry)}>
I like <%= entry %>
</.unordered_list>
You can also pattern match the arguments provided to the render block. Let's
make our `unordered_list` component fancier:
def unordered_list(assigns) do
~H"""
<ul>
<%= for entry <- @entries do %>
<li><%= render_slot(@inner_block, %{entry: entry, gif_url: random_gif()}) %></li>
<% end %>
</ul>
"""
end
And now we can invoke it like this:
<.unordered_list :let={%{entry: entry, gif_url: url}}>
I like <%= entry %>. <img src={url} />
</.unordered_list>
### Named slots
Besides `@inner_block`, it is also possible to pass named slots
to the component. For example, imagine that you want to create
a modal component. The modal component has a header, a footer,
and the body of the modal, which we would use like this:
<.modal>
<:header>
This is the top of the modal.
</:header>
This is the body - everything not in a
named slot goes to @inner_block.
<:footer>
<button>Save</button>
</:footer>
</.modal>
The component itself could be implemented like this:
def modal(assigns) do
~H"""
<div class="modal">
<div class="modal-header">
<%= render_slot(@header) %>
</div>
<div class="modal-body">
<%= render_slot(@inner_block) %>
</div>
<div class="modal-footer">
<%= render_slot(@footer) %>
</div>
</div>
"""
end
If you want to make the `@header` and `@footer` optional,
you can assign them a default of an empty list at the top:
def modal(assigns) do
assigns =
assigns
|> assign_new(:header, fn -> [] end)
|> assign_new(:footer, fn -> [] end)
~H"""
<div class="modal">
...
end
### Named slots with attributes
It is also possible to pass the same named slot multiple
times and also give attributes to each of them.
If multiple slot entries are defined for the same slot,
`render_slot/2` will automatically render all entries,
merging their contents. But sometimes we want more fine
grained control over each individual slot, including access
to their attributes. Let's see an example. Imagine we want
to implement a table component
For example, imagine a table component:
<.table rows={@users}>
<:col :let={user} label="Name">
<%= user.name %>
</:col>
<:col :let={user} label="Address">
<%= user.address %>
</:col>
</.table>
At the top level, we pass the rows as an assign and we define
a `:col` slot for each column we want in the table. Each
column also has a `label`, which we are going to use in the
table header.
Inside the component, you can render the table with headers,
rows, and columns:
def table(assigns) do
~H"""
<table>
<tr>
<%= for col <- @col do %>
<th><%= col.label %></th>
<% end %>
</tr>
<%= for row <- @rows do %>
<tr>
<%= for col <- @col do %>
<td><%= render_slot(col, row) %></td>
<% end %>
</tr>
<% end %>
</table>
"""
end
Each named slot (including the `@inner_block`) is a list of maps,
where the map contains all slot attributes, allowing us to access
the label as `col.label`. This gives us complete control over how
we render them.
## Attributes
TODO
### Global Attributes
Global attributes may be provided to any component that declares a
`:global` attribute. By default, the supported global attributes are
those common to all HTML elements. The full list can be found
[here](https://developer.mozilla.org/en-US/docs/Web/HTML/Global_attributes)
Custom attribute prefixes can be provided by the caller module with
the `:global_prefixes` option to `use Phoenix.Component`. For example, the
following would allow Alpine JS annotations, such as `x-on:click`,
`x-data`, etc:
use Phoenix.Component, global_prefixes: ~w(x-)
'''
@global_prefixes ~w(
phx-
aria-
data-
)
@globals ~w(
xml:lang
xml:base
onabort
onautocomplete
onautocompleteerror
onblur
oncancel
oncanplay
oncanplaythrough
onchange
onclick
onclose
oncontextmenu
oncuechange
ondblclick
ondrag
ondragend
ondragenter
ondragleave
ondragover
ondragstart
ondrop
ondurationchange
onemptied
onended
onerror
onfocus
oninput
oninvalid
onkeydown
onkeypress
onkeyup
onload
onloadeddata
onloadedmetadata
onloadstart
onmousedown
onmouseenter
onmouseleave
onmousemove
onmouseout
onmouseover
onmouseup
onmousewheel
onpause
onplay
onplaying
onprogress
onratechange
onreset
onresize
onscroll
onseeked
onseeking
onselect
onshow
onsort
onstalled
onsubmit
onsuspend
ontimeupdate
ontoggle
onvolumechange
onwaiting
accesskey
autocapitalize
autofocus
class
contenteditable
contextmenu
dir
draggable
enterkeyhint
exportparts
hidden
id
inputmode
is
itemid
itemprop
itemref
itemscope
itemtype
lang
nonce
part
role
slot
spellcheck
style
tabindex
title
translate
)
@doc false
def __global__?(module, name) when is_atom(module) and is_binary(name) do
if function_exported?(module, :__global__?, 1) do
module.__global__?(name) or __global__?(name)
else
__global__?(name)
end
end
for prefix <- @global_prefixes do
def __global__?(unquote(prefix) <> _), do: true
end
for name <- @globals do
def __global__?(unquote(name)), do: true
end
def __global__?(_), do: false
@doc false
def __reserved_assigns__, do: [:__changed__, :__slot__, :inner_block, :myself, :flash, :socket]
@doc false
defmacro __using__(opts \\ []) do
conditional =
if __CALLER__.module != Phoenix.LiveView.Helpers do
quote do: import(Phoenix.LiveView.Helpers)
end
imports =
quote bind_quoted: [opts: opts] do
import Kernel, except: [def: 2, defp: 2]
import Phoenix.Component
import Phoenix.LiveView
@doc false
for prefix <- Phoenix.Component.__setup__(__MODULE__, opts) do
def __global__?(unquote(prefix) <> _), do: true
end
def __global__?(_), do: false
end
[conditional, imports]
end
@doc false
@valid_opts [:global_prefixes]
def __setup__(module, opts) do
{prefixes, invalid_opts} = Keyword.pop(opts, :global_prefixes, [])
for prefix <- prefixes do
unless String.ends_with?(prefix, "-") do
raise ArgumentError,
"global prefixes for #{inspect(module)} must end with a dash, got: #{inspect(prefix)}"
end
end
if invalid_opts != [] do
raise ArgumentError, """
invalid options passed to #{inspect(__MODULE__)}.
The following options are supported: #{inspect(@valid_opts)}, got: #{inspect(invalid_opts)}
"""
end
Module.register_attribute(module, :__attrs__, accumulate: true)
Module.register_attribute(module, :__components_calls__, accumulate: true)
Module.put_attribute(module, :__components__, %{})
Module.put_attribute(module, :on_definition, __MODULE__)
Module.put_attribute(module, :before_compile, __MODULE__)
prefixes
end
@doc """
TODO.
## Options
* `:required` - TODO
* `:default` - TODO
## Validations
LiveView performs some validation of attributes via the `:live_view`
compiler. When attributes are defined, LiveView will warn at compilation
time on the caller if:
* if a required attribute of a component is missing
* if an unknown attribute is given
* if you specify a literal attribute (such as `value="string"` or `value`,
but not `value={expr}`) and the type does not match
Livebook does not perform any validation at runtime. This means the type
information is mostly used for documentation and reflection purposes.
On the side of the LiveView component itself, defining attributes provides
the following quality of life improvements:
* The default value of all attributes will be added to the `assigns`
map upfront
* Required struct types are annotated and emit compilation warnings.
For example, if you specify `attr :user, User, required: true` and
then you write `@user.non_valid_<PASSWORD>` in your template, a warning
will be emitted
This list may increase in the future.
"""
defmacro attr(name, type, opts \\ []) do
quote bind_quoted: [name: name, type: type, opts: opts] do
Phoenix.Component.__attr__!(__MODULE__, name, type, opts, __ENV__.line, __ENV__.file)
end
end
@doc false
def __attr__!(module, name, type, opts, line, file) do
cond do
not is_atom(name) ->
compile_error!(line, file, "attribute names must be atoms, got: #{inspect(name)}")
not is_list(opts) ->
compile_error!(line, file, """
expected attr/3 to receive keyword list of options, but got #{inspect(opts)}\
""")
type == :global and Keyword.has_key?(opts, :required) ->
compile_error!(line, file, "global attributes do not support the :required option")
true ->
:ok
end
{required, opts} = Keyword.pop(opts, :required, false)
unless is_boolean(required) do
compile_error!(line, file, ":required must be a boolean, got: #{inspect(required)}")
end
if required and Keyword.has_key?(opts, :default) do
compile_error!(line, file, "only one of :required or :default must be given")
end
type = validate_attr_type!(module, name, type, line, file)
validate_attr_opts!(name, opts, line, file)
Module.put_attribute(module, :__attrs__, %{
name: name,
type: type,
required: required,
opts: opts,
line: line
})
end
@builtin_types [:boolean, :integer, :float, :string, :atom, :list, :map, :global]
@valid_types [:any] ++ @builtin_types
defp validate_attr_type!(module, name, type, line, file) when is_atom(type) do
attrs = get_attrs(module)
cond do
Enum.find(attrs, fn attr -> attr.name == name end) ->
compile_error!(line, file, """
a duplicate attribute with name #{inspect(name)} already exists\
""")
existing = type == :global && Enum.find(attrs, fn attr -> attr.type == :global end) ->
compile_error!(line, file, """
cannot define global attribute #{inspect(name)} because one is already defined under #{inspect(existing.name)}.
Only a single global attribute may be defined.
""")
true ->
:ok
end
case Atom.to_string(type) do
"Elixir." <> _ -> {:struct, type}
_ when type in @valid_types -> type
_ -> bad_type!(name, type, line, file)
end
end
defp validate_attr_type!(_module, name, type, line, file) do
bad_type!(name, type, line, file)
end
defp compile_error!(line, file, msg) do
raise CompileError, line: line, file: file, description: msg
end
defp bad_type!(name, type, line, file) do
compile_error!(line, file, """
invalid type #{inspect(type)} for attr #{inspect(name)}. \
The following types are supported:
* any Elixir struct, such as URI, MyApp.User, etc
* one of #{Enum.map_join(@builtin_types, ", ", &inspect/1)}
* :any for all other types
""")
end
@valid_opts [:required, :default]
defp validate_attr_opts!(name, opts, line, file) do
for {key, _} <- opts, key not in @valid_opts do
compile_error!(line, file, """
invalid option #{inspect(key)} for attr #{inspect(name)}. \
The supported options are: #{inspect(@valid_opts)}
""")
end
end
@doc false
defmacro def(expr, body) do
quote do
Kernel.def(unquote(annotate_def(:def, expr)), unquote(body))
end
end
@doc false
defmacro defp(expr, body) do
quote do
Kernel.defp(unquote(annotate_def(:defp, expr)), unquote(body))
end
end
defp annotate_def(kind, expr) do
case expr do
{:when, meta, [left, right]} -> {:when, meta, [annotate_call(kind, left), right]}
left -> annotate_call(kind, left)
end
end
defp annotate_call(_kind, {name, meta, [{:\\, _, _} = arg]}), do: {name, meta, [arg]}
defp annotate_call(kind, {name, meta, [arg]}),
do: {name, meta, [quote(do: unquote(__MODULE__).__pattern__!(unquote(kind), unquote(arg)))]}
defp annotate_call(_kind, left),
do: left
defmacro __pattern__!(kind, arg) do
{name, 1} = __CALLER__.function
attrs = register_component!(kind, __CALLER__, name, true)
fields =
for %{name: name, required: true, type: {:struct, struct}} <- attrs do
{name, quote(do: %unquote(struct){})}
end
if fields == [] do
arg
else
quote(do: %{unquote_splicing(fields)} = unquote(arg))
end
end
@doc false
def __on_definition__(env, kind, name, args, _guards, body) do
case args do
[_] when body == nil ->
register_component!(kind, env, name, false)
_ ->
attrs = pop_attrs(env)
validate_misplaced_attrs!(attrs, env.file, fn ->
case length(args) do
1 ->
"could not define attributes for function #{name}/1. " <>
"Components cannot be dynamically defined or have default arguments"
arity ->
"cannot declare attributes for function #{name}/#{arity}. Components must be functions with arity 1"
end
end)
end
end
@doc false
defmacro __before_compile__(env) do
attrs = pop_attrs(env)
validate_misplaced_attrs!(attrs, env.file, fn ->
"cannot define attributes without a related function component"
end)
components = Module.get_attribute(env.module, :__components__)
components_calls = Module.get_attribute(env.module, :__components_calls__) |> Enum.reverse()
names_and_defs =
for {name, %{kind: kind, attrs: attrs}} <- components do
defaults =
for %{name: name, required: false, opts: opts} <- attrs,
Keyword.has_key?(opts, :default) do
{name, Macro.escape(opts[:default])}
end
{global_name, global_default} =
case Enum.find(attrs, fn attr -> attr.type == :global end) do
%{name: name, opts: opts} -> {name, Macro.escape(Keyword.get(opts, :default, %{}))}
nil -> {nil, nil}
end
known_keys = for(attr <- attrs, do: attr.name) ++ __reserved_assigns__()
def_body =
if global_name do
quote do
{assigns, caller_globals} = Map.split(assigns, unquote(known_keys))
globals = Map.merge(unquote(global_default), caller_globals)
merged = Map.merge(%{unquote_splicing(defaults)}, assigns)
super(Phoenix.LiveView.assign(merged, unquote(global_name), globals))
end
else
quote do
super(Map.merge(%{unquote_splicing(defaults)}, assigns))
end
end
merge =
quote do
Kernel.unquote(kind)(unquote(name)(assigns)) do
unquote(def_body)
end
end
{{name, 1}, merge}
end
{names, defs} = Enum.unzip(names_and_defs)
overridable =
if names != [] do
quote do
defoverridable unquote(names)
end
end
def_components_ast =
quote do
def __components__() do
unquote(Macro.escape(components))
end
end
def_components_calls_ast =
if components_calls != [] do
quote do
def __components_calls__() do
unquote(Macro.escape(components_calls))
end
end
end
{:__block__, [], [def_components_ast, def_components_calls_ast, overridable | defs]}
end
defp register_component!(kind, env, name, check_if_defined?) do
attrs = pop_attrs(env)
cond do
attrs != [] ->
check_if_defined? and raise_if_function_already_defined!(env, name, attrs)
components =
env.module
|> Module.get_attribute(:__components__)
# Sort by name as this is used when they are validated
|> Map.put(name, %{kind: kind, attrs: Enum.sort_by(attrs, & &1.name)})
Module.put_attribute(env.module, :__components__, components)
Module.put_attribute(env.module, :__last_component__, name)
attrs
Module.get_attribute(env.module, :__last_component__) == name ->
Module.get_attribute(env.module, :__components__)[name].attrs
true ->
[]
end
end
defp validate_misplaced_attrs!(attrs, file, message_fun) do
with [%{line: first_attr_line} | _] <- attrs do
compile_error!(first_attr_line, file, message_fun.())
end
end
defp get_attrs(module) do
Module.get_attribute(module, :__attrs__) || []
end
defp pop_attrs(env) do
attrs =
env.module
|> Module.get_attribute(:__attrs__)
|> Enum.reverse()
Module.delete_attribute(env.module, :__attrs__)
attrs
end
defp raise_if_function_already_defined!(env, name, attrs) do
if Module.defines?(env.module, {name, 1}) do
{:v1, _, meta, _} = Module.get_definition(env.module, {name, 1})
[%{line: first_attr_line} | _] = attrs
compile_error!(first_attr_line, env.file, """
attributes must be defined before the first function clause at line #{meta[:line]}
""")
end
end
end
|
lib/phoenix_component.ex
| 0.849191
| 0.615955
|
phoenix_component.ex
|
starcoder
|
defmodule Protocol.Consolidation do
@moduledoc """
Module responsible for consolidating protocols and helpers for
extracting protocols and implementations from code paths for
consolidation.
"""
@doc """
Extract all protocols from the given paths.
The paths can be either a char list or a string. Internally
they are worked on as char lists, so passing them as lists
avoid extra conversion.
## Examples
# Get Elixir's ebin and retrieve all protocols
iex> path = :code.lib_dir(:elixir, :ebin)
iex> mods = Protocol.Consolidation.extract_protocols([path])
iex> Enumerable in mods
true
"""
@spec extract_protocols([char_list | String.t]) :: [atom]
def extract_protocols(paths) do
extract_matching_by_attribute paths, 'Elixir.',
fn module, attributes ->
case attributes[:protocol] do
[fallback_to_any: _, consolidated: _] -> module
_ -> nil
end
end
end
@doc """
Extract all types implemented for the given protocol from
the given paths.
The paths can be either a char list or a string. Internally
they are worked on as char lists, so passing them as lists
avoid extra conversion.
## Examples
# Get Elixir's ebin and retrieve all protocols
iex> path = :code.lib_dir(:elixir, :ebin)
iex> mods = Protocol.Consolidation.extract_impls(Enumerable, [path])
iex> List in mods
true
"""
@spec extract_impls(module, [char_list | String.t]) :: [atom]
def extract_impls(protocol, paths) when is_atom(protocol) do
prefix = atom_to_list(protocol) ++ '.'
extract_matching_by_attribute paths, prefix, fn
_mod, attributes ->
case attributes[:impl] do
[protocol: ^protocol, for: for] -> for
_ -> nil
end
end
end
defp extract_matching_by_attribute(paths, prefix, callback) do
for path <- paths,
file <- list_dir(path),
mod = extract_from_file(path, file, prefix, callback),
do: mod
end
defp list_dir(path) when is_list(path) do
case :file.list_dir(path) do
{ :ok, files } -> files
_ -> []
end
end
defp list_dir(path), do: list_dir(to_char_list(path))
defp extract_from_file(path, file, prefix, callback) do
if :lists.prefix(prefix, file) and Path.extname(file) == '.beam' do
extract_from_beam(Path.join(path, file), callback)
end
end
defp extract_from_beam(file, callback) do
case :beam_lib.chunks(file, [:attributes]) do
{:ok, { module, [attributes: attributes] } } ->
callback.(module, attributes)
_ ->
nil
end
end
defmacrop if_ok(expr, call) do
quote do
case unquote(expr) do
{ :ok, var } -> unquote(Macro.pipe(quote(do: var), call))
other -> other
end
end
end
@doc """
Receives a protocol and a list of implementations and
consolidates the given protocol. Consolidation happens
by changing the protocol `impl_for` in the abstract
format to have fast lookup rules.
It returns the updated version of the protocol bytecode.
A given bytecode or protocol implementation can be checked
to be consolidated or not by analyzing the protocol
attribute:
Enumerable.__info__(:attributes)[:protocol]
If the first element of the tuple is true, it means
the protocol was consolidated.
This function does not load the protocol at any point
nor loads the new bytecode for the compiled module.
"""
@spec apply_to(module, [module]) ::
{ :ok, binary } |
{ :error, :not_a_protocol } |
{ :error, :no_beam_info }
def apply_to(protocol, types) when is_atom(protocol) do
raise ArgumentError, "consolidation is disabled as we can't consolidate records " <>
"and structs at once. Consolidation will be added back once " <>
"polymorphic records are removed"
ensure_protocol(protocol)
|> if_ok(change_debug_info types)
|> if_ok(compile)
end
# Ensure the given module is loaded and is a protocol.
defp ensure_protocol(protocol) do
case :beam_lib.chunks(beam_file(protocol), [:abstract_code, :attributes]) do
{ :ok, { ^protocol, [abstract_code: { _raw, abstract_code },
attributes: attributes] } } ->
case attributes[:protocol] do
[fallback_to_any: any, consolidated: _] ->
{ :ok, { protocol, any, abstract_code } }
_ ->
{ :error, :not_a_protocol }
end
_ ->
{ :error, :no_beam_info }
end
end
defp beam_file(module) when is_atom(module) do
case :code.which(module) do
:non_existing -> module
file -> file
end
end
# Change the debug information to the optimized
# impl_for/1 dispatch version.
defp change_debug_info({ protocol, any, code }, types) do
types = if any, do: types, else: List.delete(types, Any)
all = [Any] ++ for { _guard, mod } <- Protocol.builtin, do: mod
structs = types -- all
change_impl_for(code, protocol, types, structs, false, [])
end
defp change_impl_for([{ :attribute, line, :protocol, opts }|t], protocol, types, structs, _, acc) do
opts = [fallback_to_any: opts[:fallback_to_any], consolidated: true]
change_impl_for(t, protocol, types, structs, true,
[{ :attribute, line, :protocol, opts }|acc])
end
defp change_impl_for([{ :function, line, :impl_for, 1, _ }|t], protocol, types, structs, is_protocol, acc) do
fallback = if Any in types, do: Module.concat(protocol, Any), else: nil
clauses = for { guard, mod } <- Protocol.builtin,
mod in types,
do: builtin_clause_for(mod, guard, protocol, line)
clauses = [struct_clause_for(line)|clauses] ++
[fallback_clause_for(fallback, protocol, line)]
change_impl_for(t, protocol, types, structs, is_protocol,
[{ :function, line, :impl_for, 1, clauses }|acc])
end
defp change_impl_for([{ :function, line, :struct_impl_for, 1, _ }|t], protocol, types, structs, is_protocol, acc) do
fallback = if Any in types, do: Module.concat(protocol, Any), else: nil
clauses = for struct <- structs, do: each_struct_clause_for(struct, protocol, line)
clauses = clauses ++ [fallback_clause_for(fallback, protocol, line)]
change_impl_for(t, protocol, types, structs, is_protocol,
[{ :function, line, :struct_impl_for, 1, clauses }|acc])
end
defp change_impl_for([h|t], protocol, info, types, is_protocol, acc) do
change_impl_for(t, protocol, info, types, is_protocol, [h|acc])
end
defp change_impl_for([], protocol, _info, _types, is_protocol, acc) do
if is_protocol do
{ :ok, { protocol, Enum.reverse(acc) } }
else
{ :error, :not_a_protocol }
end
end
defp builtin_clause_for(mod, guard, protocol, line) do
{:clause, line,
[{:var, line, :x}],
[[{:call, line,
{:remote, line, {:atom, line, :erlang}, {:atom, line, guard}},
[{:var, line, :x}],
}]],
[{:atom, line, Module.concat(protocol, mod)}]}
end
defp struct_clause_for(line) do
{:clause, line,
[{:map, line, [
{:map_field_exact, line, {:atom, line, :__struct__}, {:var, line, :x}}
]}],
[[{:call, line,
{:remote, line, {:atom, line, :erlang}, {:atom, line, :is_atom}},
[{:var, line, :x}],
}]],
[{:call, line,
{:atom, line, :struct_impl_for},
[{:var, line, :x}]}]}
end
defp each_struct_clause_for(other, protocol, line) do
{:clause, line, [{:atom, line, other}], [],
[{:atom, line, Module.concat(protocol, other)}]}
end
defp fallback_clause_for(value, _protocol, line) do
{:clause, line, [{:var, line, :_}], [],
[{ :atom, line, value }]}
end
# Finally compile the module and emit its bytecode.
defp compile({ protocol, code }) do
opts = if Code.compiler_options[:debug_info], do: [:debug_info], else: []
{ :ok, ^protocol, binary, _warnings } = :compile.forms(code, [:return|opts])
{ :ok, binary }
end
end
|
lib/elixir/lib/protocol/consolidation.ex
| 0.871427
| 0.406921
|
consolidation.ex
|
starcoder
|
defmodule BaseX do
@moduledoc """
coding for arbitrary alphabets and block sizes
"""
@doc """
prepare a coding module
Returns the name of the module.
The resulting module will appear in the BaseX namespace and have `encode`
and `decode` functions available.
Examples:
```
iex> BaseX.prepare_module("Base2", "01", 4)
BaseX.Base2
iex> BaseX.Base2.encode("Hi!")
"010010000110100100100001"
iex> BaseX.Base2.decode("010010000110100100100001")
"Hi!"
```
These functions are only suitable for complete messages.
Streaming applications should manage their own incomplete message state.
The supplied module name should be both valid (by Elixir rules) and unique.
Care should be taken when regenerating modules with the same name.
Alphabets may be defined by `{"t","u","p","l","e"}`, `"string"`, `'charlist'` or
`["l","i","s","t"]` as desired.
"""
@spec prepare_module(String.t(), binary | list | tuple, pos_integer) :: term
def prepare_module(name, alphabet, block_size) when is_tuple(alphabet) do
full_name = Module.concat("BaseX", name)
generate_module(full_name, alphabet, block_size)
case Code.ensure_compiled(full_name) do
{:module, module} -> module
{:error, why} -> raise(why)
end
end
def prepare_module(name, [a | bc], bs) when is_bitstring(a),
do: prepare_module(name, [a | bc] |> List.to_tuple(), bs)
def prepare_module(name, [a | bc], bs) when is_integer(a),
do: prepare_module(name, [a | bc] |> Enum.map(&:binary.encode_unsigned/1), bs)
def prepare_module(name, abc, bs) when is_binary(abc),
do:
prepare_module(
name,
abc
|> String.split("")
|> Enum.filter(fn c -> c != "" end),
bs
)
defp generate_module(name, abc, s) do
require BaseX.ModuleMaker
a = tuple_size(abc)
b = s * 8
c = chars_for_bits(b, a)
{vb, vc, vn} = valid_sizes(b, a, {%{}, %{}, %{}})
BaseX.ModuleMaker.gen_module(
name,
Macro.escape(abc),
a,
b,
c,
abc |> index_map_from_tuple |> Macro.escape(),
Macro.escape(vb),
Macro.escape(vc),
Macro.escape(vn),
Macro.escape(single_octets?(abc))
)
end
defp single_octets?(chars), do: single_octets(Tuple.to_list(chars), true)
defp single_octets([], acc), do: acc
defp single_octets([h | t], acc), do: single_octets(t, acc and bit_size(h) <= 8)
defp chars_for_bits(b, a), do: trunc(Float.ceil(b / :math.log2(a)))
defp max_num_for_bits(b), do: (:math.pow(2, b) - 1) |> trunc
defp valid_sizes(0, _a, acc), do: acc
defp valid_sizes(b, a, acc) do
c = chars_for_bits(b, a)
valid_sizes(
b - 8,
a,
{Map.put(elem(acc, 0), b, c), Map.put(elem(acc, 1), c, b),
Map.put(elem(acc, 2), b, b |> max_num_for_bits)}
)
end
defp index_map_from_tuple(tuple), do: map_elements(tuple |> Tuple.to_list(), 0, %{})
defp map_elements([], _index, map), do: map
defp map_elements([e | rest], index, map),
do: map_elements(rest, index + 1, Map.put(map, e, index))
end
|
lib/basex.ex
| 0.86813
| 0.850407
|
basex.ex
|
starcoder
|
defmodule Mix.Tasks.Avrora.Reg.Schema do
use Mix.Task
@moduledoc """
Register either one schema or all schemas in the `Avrora.Config.schemas_path`
directory (or your private client schemas path).
mix avrora.reg.schema [--all] [--name NAME] [--as NEW_NAME] [--module MODULE]
The search of the schemas will be performed under path configured in `schemas_path`
configuration option. One of either option must be given.
## Command line options
* `--name` - the full name of the schema to register (exclusive with `--all`)
* `--as` - the name which will be used to register schema (i.e subject)
* `--all` - register all found schemas
* `--module` - private Avrora client module (i.e MyClient)
The `--module` option allows to use your private Avrora client module instead of
the default `Avrora`.
The `--as` option is possible to use only together with `--name`.
The `--name` option expects that given schema name will comply to
`Avrora.Storage.File` module rules.
For example, if the schema name is `io.confluent.Payment` it should be stored
as `<schemas path>/io/confluent/Payment.avsc`
## Usage
mix avrora.reg.schema --name io.confluent.Payment
mix avrora.reg.schema --name io.confluent.Payment --as MyCustomName
mix avrora.reg.schema --all --module MyClient
mix avrora.reg.schema --all
"""
@shortdoc "Register schema(s) in the Confluent Schema Registry"
alias Mix.Tasks
@cli_options [
strict: [
as: :string,
all: :boolean,
name: :string,
module: :string
]
]
@impl Mix.Task
def run(argv) do
Tasks.Loadpaths.run(["--no-elixir-version-check", "--no-archives-check"])
{opts, _, _} = OptionParser.parse(argv, @cli_options)
{module_name, opts} = Keyword.pop(opts, :module, "Avrora")
module = Module.concat(Elixir, String.trim(module_name))
config = Module.concat(module, Config)
registrar = Module.concat(module, Utils.Registrar)
{:ok, _} = Application.ensure_all_started(:avrora)
{:ok, _} = module.start_link()
case opts |> Keyword.keys() |> Enum.sort() do
[:all] ->
[config.self().schemas_path(), "**", "*.avsc"]
|> Path.join()
|> Path.wildcard()
|> Enum.each(fn file_path ->
file_path
|> Path.relative_to(config.self().schemas_path())
|> Path.rootname()
|> String.replace("/", ".")
|> register_schema_by_name(registrar)
end)
[:name] ->
opts[:name] |> String.trim() |> register_schema_by_name(registrar)
[:as, :name] ->
opts[:name]
|> String.trim()
|> register_schema_by_name(registrar, as: String.trim(opts[:as]))
_ ->
message = """
don't know how to handle `#{Enum.join(argv, " ")}'
please use #{IO.ANSI.yellow()}mix help avrora.reg.schema#{IO.ANSI.reset()} for help
"""
Mix.shell().error(message)
exit({:shutdown, 1})
end
end
defp register_schema_by_name(name, registrar, opts \\ []) do
opts = Keyword.merge(opts, force: true)
case registrar.register_schema_by_name(name, opts) do
{:ok, _} ->
case Keyword.get(opts, :as) do
nil -> Mix.shell().info("schema `#{name}' will be registered")
new_name -> Mix.shell().info("schema `#{name}' will be registered as `#{new_name}'")
end
{:error, error} ->
Mix.shell().error("schema `#{name}' will be skipped due to an error `#{error}'")
end
end
end
|
lib/mix/tasks/avrora.reg.schema.ex
| 0.77552
| 0.402744
|
avrora.reg.schema.ex
|
starcoder
|
defmodule Jsox.Parser do
@moduledoc """
A JSON parser according to ECMA-404.
Standard ECMA-404: The JSON Data Interchange Format
https://www.ecma-international.org/publications/standards/Ecma-404.htm
"""
use Bitwise
@type json :: map | list | String.t | integer | float | true | false | nil
@digits '0123456789'
@minus_digits '-0123456789'
@whitespaces '\s\r\t\n'
@escape_map %{
?\\ => '\\',
?" => '\"',
?n => '\n',
?r => '\r',
?b => '\b',
?f => '\f',
?t => '\t',
?/ => '\/'}
@escape_chars [?u|Map.keys(@escape_map)]
@spec parse(iodata) :: {:ok, json} | {:error, String.t}
def parse(data) do
{result, data, pos} = json(data, 0)
if data =~ ~r/^\s*$/ do
{:ok, result}
else
{:error, :eof, pos}
end
catch
{token, pos} -> {:error, token, pos}
end
defp json(<<char>> <> data, pos)
when char in @whitespaces,
do: json(data, pos + 1)
defp json(<<char>> <> data, pos)
when char == ?",
do: string(data, pos + 1, [])
defp json(<<char>> <> data, pos)
when char == ?[,
do: list(data, pos + 1, [])
defp json(<<char>> <> data, pos)
when char == ?{,
do: map(data, pos + 1, %{}, nil)
defp json(<<char>> <> data, pos)
when char in @minus_digits,
do: number(data, pos + 1, [char], (if char == ?-, do: :minus, else: :digit))
defp json("true" <> data, pos),
do: {true, data, pos + 4}
defp json("false" <> data, pos),
do: {false, data, pos + 5}
defp json("null" <> data, pos),
do: {nil, data, pos + 4}
defp json(_data, pos),
do: throw {:json, pos + 1}
defp key(<<char>> <> data, pos)
when char in @whitespaces,
do: key(data, pos + 1)
defp key(<<char>> <> data, pos)
when char == ?",
do: string(data, pos + 1, [])
defp key(_data, pos),
do: throw {:key, pos}
defp number(<<char>> <> data, pos, chars, _last)
when char in @digits,
do: number(data, pos + 1, [chars, char], :digit)
defp number(<<?.>> <> _data, pos, _chars, :minus),
do: throw {:number, pos + 1}
defp number(<<?.>> <> data, pos, chars, _last),
do: float(data, pos + 1, [chars, ?.], :full_stop)
defp number(<<char>> <> _data, pos, _chars, :minus)
when char in 'eE',
do: throw {:number, pos}
defp number(<<char>> <> data, pos, chars, _last)
when char in 'eE',
do: exponential(data, pos + 1, [chars, '.0e'], :exp)
defp number(_data, pos, _chars, :minus),
do: throw {:number, pos}
defp number(data, pos, chars, _last) do
result = chars
|> IO.iodata_to_binary
|> String.to_integer
{result, data, pos}
end
defp exponential(<<?->> <> data, pos, chars, :exp),
do: exponential(data, pos + 1, [chars, ?-], :minus)
defp exponential(<<char>> <> data, pos, chars, _last)
when char in @digits,
do: exponential(data, pos + 1, [chars, char], :digit)
defp exponential(_data, pos, _chars, :exp),
do: throw {:exponential, pos}
defp exponential(_data, pos, _chars, :minus),
do: throw {:exponential, pos}
defp exponential(data, pos, chars, _last),
do: {chars |> IO.iodata_to_binary |> String.to_float, data, pos}
defp float(<<char>> <> _data, pos, _chars, :full_stop)
when char in 'eE',
do: throw {:float, pos}
defp float(<<char>> <> data, pos, chars, _last)
when char in 'eE',
do: exponential(data, pos + 1, [chars, 'e'], :exp)
defp float(<<char>> <> data, pos, chars, _last)
when char in @digits,
do: float(data, pos + 1, [chars, char], :digit)
defp float(data, pos, chars, _last),
do: {chars |> IO.iodata_to_binary |> String.to_float, data, pos}
defp string(<<?">> <> data, pos, chars),
do: {chars |> Enum.reverse |> IO.iodata_to_binary, data, pos}
defp string(<<?\\, char>> <> data, pos, chars) when char in @escape_chars do
if char == ?u do
unicode(data, pos + 1, chars)
else
string(data, pos + 2, [@escape_map[char]|chars])
end
end
defp string(<<?\\>> <> _data, pos, _chars),
do: throw {:string, pos}
defp string(<<char>> <> data, pos, chars),
do: string(data, pos + 1, [char|chars])
defp unicode(<<a1, b1, c1, d1, ?\\, ?u, a2, b2, c2, d2>> <> data, pos, chars)
when a1 in 'dD' and a2 in 'dD' and b1 in '89abAb' and b2 in 'cedfCDEF' do
hi = List.to_integer([a1, b1, c1, d1], 16)
lo = List.to_integer([a2, b2, c2, d2], 16)
codepoint = 0x10000 + ((hi &&& 0x03FF) <<< 10) + (lo &&& 0x03FF)
string(data, pos + 11, [<<codepoint :: utf8>>|chars])
end
defp unicode(<<seq :: binary-size(4)>> <> data, pos, chars),
do: string(data, pos + 6, [<<String.to_integer(seq, 16) :: utf8>>|chars])
defp list(<<char>> <> data, pos, list)
when char in @whitespaces,
do: list(data, pos + 1, list)
defp list(<<?]>> <> data, pos, list),
do: {Enum.reverse(list), data, pos + 1}
defp list(<<?,>> <> _data, pos, []),
do: throw {:list, pos}
defp list(<<?,>> <> data, pos, list) do
{result, data, pos} = json(data, pos + 1)
list(data, pos, [result|list])
end
defp list(data, pos, []) do
{result, data, pos} = json(data, pos)
list(data, pos, [result])
end
defp list(_data, pos, _list),
do: throw{:list, pos}
defp map(<<char>> <> data, pos, map, key)
when char in @whitespaces,
do: map(data, pos + 1, map, key)
defp map(<<?}>> <> data, pos, map, _key) do
{map, data, pos + 1}
end
defp map(<<?,>> <> _data, pos, _map, nil),
do: throw {:map, pos + 1}
defp map(<<?,>> <> data, pos, map, _key) do
{result, data, pos} = key(data, pos + 1)
map(data, pos + 1, map, result)
end
defp map(<<?:>> <> _data, pos, _map, nil),
do: throw {:map, pos + 1}
defp map(<<?:>> <> data, pos, map, key) do
{result, data, pos} = json(data, pos + 1)
map(data, pos + 1, Map.put(map, key, result), key)
end
defp map(data, pos, _map, nil) do
{result, data, pos} = key(data, pos)
map(data, pos, %{}, result)
end
defp map(_data, pos, _map, _key),
do: throw {:map, pos + 1}
end
|
lib/jsox/parser.ex
| 0.555194
| 0.576721
|
parser.ex
|
starcoder
|
defmodule Feedistiller.Limits do
@moduledoc """
Limits on the number of items to retrieve and the date range of items.
- `from:` only items newer than this date are retrieved (default is `:oldest` for no limit)
- `to:` only items older than this date are retrieved (default is `:latest` for not limit)
- `max:` maximum number of items to retrieve (default is `:unlimited` for no limit)
"""
defstruct from: :oldest, to: :latest, max: :unlimited
@type t :: %__MODULE__{from: DateTime.t | :oldest, to: DateTime.t | :latest, max: integer | :unlimited}
end
defmodule Feedistiller.Filters do
@moduledoc """
Filters applied to retrieved items.
- `limits:` a `Limits` struct for date/number limits
- `mime:` a list of `Regex` applied to the `content-type` of enclosures
- `name:` a list of Regex applied to the `title` of feed items
"""
defstruct limits: %Feedistiller.Limits{}, mime: [], name: []
@type t :: %__MODULE__{limits: Feedistiller.Limits.t, mime: [Regex.t], name: [Regex.t]}
end
defmodule Feedistiller.FeedAttributes do
@moduledoc """
The attributes of a feed to download.
- `name:` a name for the feed
- `url:` web address of the feed
- `user:` user for protected feed
- `password:` password for protected feed
- `dir:` subdirectory where the downloaded items will be put under the `destination` directory
- `destination:` the directory where to put the downloaded items (they will be put in a subdirectory
with the same name as the feed). Default is `.` (current directory)
- `max_simultaneous_downloads:` the maximum number of item to download at the same time (default is 3)
- `filters:` the filters applied to the feed
- `timeout:` timeout applied to http operations
"""
defstruct name: "", url: "", filters: %Feedistiller.Filters{}, dir: "",
destination: ".", max_simultaneous_downloads: 3, user: "", password: "",
only_new: false, timeout: 60, clean: false
@type t :: %__MODULE__{name: String.t, url: String.t, filters: Filters.t, dir: String.t, destination: String.t,
max_simultaneous_downloads: :unlimited | integer, user: String.t, password: <PASSWORD>,
only_new: boolean, timeout: integer, clean: boolean}
end
defmodule Feedistiller.Event do
@moduledoc """
Events reported by the downloaders.
"""
defstruct feed: %Feedistiller.FeedAttributes{}, destination: "", entry: %Feedistiller.Feeder.Entry{}, event: nil
@type t :: %__MODULE__{feed: Feedistiller.FeedAttributes.t, destination: String.t, entry: Feedistiller.Feeder.Entry.t, event: nil | tuple}
end
defmodule Feedistiller.Util do
def tformat(t = %Timex.Duration{})
do
Timex.format_duration(%Timex.Duration{t | microseconds: 0}, :humanized)
end
def tformat(_), do: ""
def dformat(date = %DateTime{}), do: Calendar.strftime(date, "%Y-%m-%d")
def dformat(nil), do: "????-??-??"
end
defmodule Feedistiller do
@moduledoc """
Provides functions to downloads enclosures of rss/atom feeds.
Features:
- download multiple feeds at once and limit the number of downloads
occurring at the same (globally or on per feed basis).
- various filtering options:
- content-type criteria
- item name criteria
- item date criteria
`HTTPoison` must be started to use `Feedistiller` functions.
"""
alias Feedistiller.FeedAttributes
alias Feedistiller.Event
alias Feedistiller.Http
alias Feedistiller.Feeder
alias Alambic.Semaphore
alias Alambic.CountDown
alias Alambic.BlockingQueue
@doc "Download a set of feeds according to their settings."
@spec download_feeds(list(FeedAttributes.t)) :: :ok
def download_feeds(feeds) when is_list(feeds)
do
download_feeds(feeds, nil)
end
@doc """
Download a set of feeds according to their settings, with `max` simultaneous
downloads at the same time across all feeds.
"""
@spec download_feeds(list(FeedAttributes.t), integer | Semaphore.t | nil) :: :ok
def download_feeds(feeds, max)
when is_list(feeds) and is_integer(max) and max > 0
do
semaphore = Semaphore.create_link(max)
download_feeds(feeds, semaphore)
Semaphore.destroy(semaphore)
end
def download_feeds(feeds, semaphore)
when is_list(feeds) and (is_map(semaphore) or is_nil(semaphore))
do
feeds
|> Enum.map(&Task.async(fn -> download_feed(&1, semaphore) end))
|> Enum.each(&Task.await(&1, :infinity))
end
@doc ~S"""
Download enclosures of the given `feed` according to its settings.
Attributes of the feed are:
- `url:` the url of the feed. Redirect are auto followed.
- `destination:` path for the downloaded files. Files are put in a subdirectory
matching the feed name.
- `max_simultaneous_downloads:` maximum number of simultaneous downloads for this file.
Default is `3`. Can be set to `:unlimited` for no limit.
- `only_new:` donwload only new files (files not already in the destination directory)
- `filters:` a set of filters to apply to the downloaded files:
- `limits:` limits on the number of files to download:
- `to:` download files up to this date (default is `:latest`)
- `from:` download files from this date (default is `:oldest`)
- `max:` download at most `max` files (default is `:unlimited`)
- `mime:` a list of regex to apply to the 'content-type' field of the enclosure. Only
'content-type' passing those regex are downloaded.
- `name:` a list of regex to apply to the name of the feed items. Only enclosure attached
to names matching those regex are downloaded.
"""
@spec download_feed(FeedAttributes.t, Semaphore.t | nil) :: :ok | {:error, String.t}
def download_feed(feed = %FeedAttributes{}, global_sem \\ nil)
when is_map(global_sem) or is_nil(global_sem)
do
# Check we can write to destination
destination = Path.join(feed.destination, feed.dir) |> Path.expand
try do
:ok = File.mkdir_p(destination)
rescue
e ->
Feedistiller.Reporter.notify(%Event{feed: feed, event: {:error_destination, destination}})
raise e
end
if feed.clean do
clean(destination, feed, global_sem)
else
chunksq = BlockingQueue.create(10)
entriesq = BlockingQueue.create()
entries = entriesq
semaphores = [global_sem: global_sem, local_sem: get_sem(feed.max_simultaneous_downloads)]
# Download feed and gather chunks in a shared queue
spawn(fn -> generate_chunks_stream(chunksq, feed, semaphores) end)
# Filters
titlematch = for regex <- feed.filters.name, do: &Regex.match?(regex, &1.title)
typematch = for regex <- feed.filters.mime, do: &Regex.match?(regex, &1.enclosure.type)
filters = typematch ++ titlematch
filters = [fn e -> !(feed.only_new and File.exists?(filename(e, destination))) end | filters]
filters = [(&filter_feed_entry(&1, {feed.filters.limits.from, feed.filters.limits.to})) | filters]
filters = [fn e -> !is_nil(e.enclosure) end | filters]
# Parse the feed and filter/stream it to the entry queue
spawn(fn -> generate_entries_stream(entriesq, chunksq, feed, filters) end)
# and get all!
get_enclosures(entries, destination, feed, semaphores)
# clean up
BlockingQueue.destroy(chunksq)
BlockingQueue.destroy(entriesq)
end
end
defp remove_file(file, feed) do
File.rm!(file)
Feedistiller.Reporter.notify(%Event{feed: feed, event: {:clean, file}})
end
defp check_file(file, feed) do
try do
if Path.extname(file) == ".tmp" do
remove_file(file, feed)
else
cfile = filename(Path.basename(file), Path.dirname(file))
if cfile != file && File.exists?(cfile) do
remove_file(file, feed)
end
end
rescue
_ -> Feedistiller.Reporter.notify(%Event{feed: feed, event: {:bad_clean, file}})
end
end
defp clean(destination, feed, sem) do
acquire(sem)
Feedistiller.Reporter.notify(%Event{feed: feed, event: :begin_clean})
Path.wildcard(destination <> "/*") |> Enum.each(&check_file(&1, feed))
Feedistiller.Reporter.notify(%Event{feed: feed, event: :end_clean})
after
release(sem)
end
defp measure(f), do: Timex.Duration.measure(f)
defp generate_chunks_stream(chunks, feed, semaphores) do
{t, _} = measure(fn ->
try do
acquire(semaphores)
Feedistiller.Reporter.notify(%Event{feed: feed, event: :begin_feed})
Http.stream_get!(feed.url,
fn (chunk, chunks) ->
:ok = BlockingQueue.enqueue(chunks, chunk)
chunks
end,
chunks, feed.timeout, feed.user, feed.password)
rescue
_ -> Feedistiller.Reporter.notify(%Event{feed: feed, event: :bad_url})
after
BlockingQueue.complete(chunks)
release(semaphores)
end
end)
Feedistiller.Reporter.notify(%Event{feed: feed, event: {:end_feed, t}})
end
defp match_filters(_, []), do: true
defp match_filters(e, [filter | filters]) do
filter.(e) && match_filters(e, filters)
end
defp generate_entries_stream(entries, chunks, feed, filters) do
try do
max = if feed.filters.limits.max == :unlimited, do: 999999999999, else: feed.filters.limits.max
Feeder.stream(
[
event_state: {entries, 0},
event_fun: fn
(entry = %Feeder.Entry{}, {entries, count}) ->
if count < max && match_filters(entry, filters) do
BlockingQueue.enqueue(entries, entry)
{entries, count + 1}
else
Feedistiller.Reporter.notify(%Event{feed: feed, entry: entry, event: {:ignored}})
{entries, count}
end
(channel = %Feeder.Feed{}, state) ->
Feedistiller.Reporter.notify(%Event{feed: feed, event: {:channel_complete, channel}})
state
(_, state) -> state
end,
continuation_state: chunks,
continuation_fun: fn chunks ->
case BlockingQueue.dequeue(chunks) do
{:ok, chunk} -> {chunk, chunks}
state ->
if state == :error do
Feedistiller.Reporter.notify(%Event{feed: feed, event: :bad_feed})
end
{"", chunks}
end
end
])
rescue
_ -> Feedistiller.Reporter.notify(%Event{feed: feed, event: :bad_feed})
after # whatever happens we complete the entry queue
BlockingQueue.complete(entries)
end
end
# Filter a feed entry according to date limits
defp filter_feed_entry(entry, dates) do
case dates do
{:oldest, :latest} -> true
{:oldest, to} -> DateTime.compare(entry.updated, to) in [:lt, :eq]
{from, :latest} -> DateTime.compare(entry.updated, from) in [:gt, :eq]
{from, to} ->
DateTime.compare(entry.updated, to) in [:lt, :eq] and DateTime.compare(entry.updated, from) in [:gt, :eq]
end
end
defmacrop sem_acquire(s) do
quote do
if(!is_nil(unquote(s)), do: Alambic.Semaphore.acquire(unquote(s)))
end
end
defmacrop sem_release(s) do
quote do
if(!is_nil(unquote(s)), do: Alambic.Semaphore.release(unquote(s)))
end
end
defp acquire([global_sem: gsem, local_sem: lsem]) do
sem_acquire(gsem)
sem_acquire(lsem)
end
defp acquire(sem) do
sem_acquire(sem)
end
defp release([global_sem: gsem, local_sem: lsem]) do
sem_release(lsem)
sem_release(gsem)
end
defp release(sem) do
sem_release(sem)
end
defp filename(entry = %Feeder.Entry{}, destination) do
filename(entry.title, destination) <> Path.extname(URI.parse(entry.enclosure.url).path)
end
defp filename(entry, destination) do
title = String.replace(entry, ~r/\*|<|>|\/|\\|\||"|:/, "_")
title = String.replace(title, "&", "-")
title = String.replace(title, "?", "")
title = String.trim(title)
Path.join(destination, title)
end
# Download one enclosure
defp get_enclosure(entry, destination, feed, semaphores, countdown) do
acquire(semaphores)
CountDown.increase(countdown)
spawn_link(fn ->
filename = filename(entry, destination)
Feedistiller.Reporter.notify(%Event{
feed: feed,
destination: destination,
entry: entry,
event: {:begin, filename}
})
get_enclosure(filename, entry, feed)
CountDown.signal(countdown)
release(semaphores)
end)
end
@chunk_limit 64 * 1024
# Fetch an enclosure and save it
defp get_enclosure(filename, entry, feed) do
event = %Event{feed: feed, destination: Path.dirname(filename), entry: entry}
tmp_filename = filename <> ".tmp"
case File.open(tmp_filename, [:write]) do
{:ok, file} ->
try do
{time, {:ok, {written, _}}} = measure(fn -> Http.stream_get!(
entry.enclosure.url,
fn chunk, {current_size, current_chunk} ->
:ok = IO.binwrite(file, chunk)
s = current_size + byte_size(chunk)
c = current_chunk + byte_size(chunk)
c = if c > @chunk_limit do
Feedistiller.Reporter.notify(%{event | event: {:write, filename, s}})
0
else
c
end
{s, c}
end,
{0, 0},
feed.timeout, feed.user, feed.password)
end)
File.close(file)
File.rename(tmp_filename, filename)
Feedistiller.Reporter.notify(%{event | event: {:finish_write, filename, written, time}})
rescue
e -> Feedistiller.Reporter.notify(%{event | event: {:error_write, filename, e}})
File.close(file)
File.rm(tmp_filename)
end
e -> Feedistiller.Reporter.notify(%{event | event: {:error_write, filename, e}})
end
end
# Retrieve all enclosures
defp get_enclosures(entries, destination, feed, semaphores) do
{t, _} = measure(fn ->
countdown = CountDown.create_link(0)
# fetch all enclosures, up to 'max' at the same time
Enum.each(entries, fn entry -> get_enclosure(entry, destination, feed, semaphores, countdown) end)
CountDown.wait(countdown)
CountDown.destroy(countdown)
end)
Feedistiller.Reporter.notify(%Event{feed: feed, event: {:end_enclosures, t}})
end
defp get_sem(max) do
case max do
:unlimited -> nil
_ -> Alambic.Semaphore.create(max)
end
end
end
|
lib/feedistiller.ex
| 0.889804
| 0.566618
|
feedistiller.ex
|
starcoder
|
defmodule DateTimeParser.DateTime do
@moduledoc false
import NimbleParsec
import DateTimeParser.Combinators.Date
import DateTimeParser.Combinators.DateTime
import DateTimeParser.Formatters, only: [format_token: 2, clean: 1]
defparsec(
:parse,
vocal_day()
|> optional()
|> choice([
vocal_month_day_time_year(),
formal_date_time(),
formal_date()
])
)
defparsec(
:parse_us,
vocal_day()
|> optional()
|> choice([
vocal_month_day_time_year(),
us_date_time(),
us_date()
])
)
def from_tokens(tokens, opts) do
parsed_values =
clean(%{
year: format_token(tokens, :year),
month: format_token(tokens, :month),
day: format_token(tokens, :day),
hour: format_token(tokens, :hour),
minute: format_token(tokens, :minute),
second: format_token(tokens, :second),
microsecond: format_token(tokens, :microsecond)
})
case Keyword.get(opts, :assume_time, false) do
false ->
NaiveDateTime.new(
parsed_values[:year],
parsed_values[:month],
parsed_values[:day],
parsed_values[:hour],
parsed_values[:minute],
parsed_values[:second] || 0,
parsed_values[:microsecond] || {0, 0}
)
%Time{} = assumed_time ->
assume_time(parsed_values, assumed_time)
true ->
assume_time(parsed_values, ~T[00:00:00])
end
end
defp assume_time(parsed_values, %Time{} = time) do
NaiveDateTime.new(
parsed_values[:year],
parsed_values[:month],
parsed_values[:day],
parsed_values[:hour] || time.hour,
parsed_values[:minute] || time.minute,
parsed_values[:second] || time.second,
parsed_values[:microsecond] || time.microsecond
)
end
defp timezone_from_tokens(tokens, naive_datetime) do
with zone <- format_token(tokens, :zone_abbr),
offset <- format_token(tokens, :utc_offset),
true <- Enum.any?([zone, offset]) do
Timex.Timezone.get(offset || zone, naive_datetime)
end
end
def from_naive_datetime_and_tokens(naive_datetime, tokens) do
with timezone when not is_nil(timezone) <- tokens[:zone_abbr] || tokens[:utc_offset],
%{} = timezone_info <- timezone_from_tokens(tokens, naive_datetime) do
naive_datetime
|> DateTime.from_naive!("Etc/UTC")
|> Map.merge(%{
std_offset: timezone_info.offset_std,
utc_offset: timezone_info.offset_utc,
zone_abbr: timezone_info.abbreviation,
time_zone: timezone_info.full_name
})
else
_ -> naive_datetime
end
end
end
|
lib/date_time.ex
| 0.654453
| 0.433322
|
date_time.ex
|
starcoder
|
defmodule Coherence do
@moduledoc """
Coherence is a full featured, configurable authentication and user
management system for Phoenix, providing a number of optional features
configured with a installation mix task:
* Database Authenticatable: handles hashing and storing an encrypted password in the database.
* Invitable: sends invites to new users with a sign-up link, allowing the user to create their account with their own password.
* Registerable: allows anonymous users to register a users email address and password.
* Confirmable: new accounts require clicking a link in a confirmation email.
* Recoverable: provides a link to generate a password reset link with token expiry.
* Trackable: saves login statics like login counts, timestamps, and IP address for each user.
* Lockable: locks an account when a specified number of failed sign-in attempts has been exceeded.
* Unlockable With Token: provides a link to send yourself an unlock email.
* Rememberable: provides persistent login with 'Remember me?' check box on login page.
See the [README](readme.html) file for an overview, installation, and
setup instructions.
### Authenticatable
Handles hashing and storing an encrypted password in the database.
Provides `/sessions/new` and `/sessions/delete` routes for logging in and out with
the appropriate templates and view.
The following columns are added the `<timestamp>_add_coherence_to_user.exs` migration:
* :password_hash, :string - the encrypted password
The following options can be customized (default shown):
config :coherence,
password_hash_field: :password_hash # you must recompile Coherence after changing this
### Invitable
Handles sending invites to new users with a sign-up link, allowing the user to create their account with their own password.
Provides `/invitations/new` and `invitations/edit` routes for creating a new invitation and creating a new account from the invite email.
These routes can be configured to require login by using the `coherence_routes :protected` macro in your router.exs file.
Invitation token timeout will be added in the future.
The following table is created by the generated `<timestamp>_create_coherence_invitable.exs` migration:
create table(:invitations) do
add :name, :string
add :email, :string
add :token, :string
end
### Registerable
Allows anonymous users to register a users email address and password.
Provides `/registrations/new` and `/registrations/create` routes for creating a new registration.
Adds a `Register New Account` to the log-in page.
It is recommended that the :confirmable option is used with :registerable to
ensure a valid email address is captured.
### Confirmable
Requires a new account be conformed. During registration, a confirmation token is generated and sent to the registering email. This link must be clicked before the user can sign-in.
Provides `edit` action for the `/confirmations` route.
The following options can be customized (default shown):
config :coherence,
confirmation_token_expire_days: 5,
allow_unconfirmed_access_for: 0 # number of days. 0 to disable
### Recoverable
Allows users to reset their password using an expiring token send by email.
Provides `new`, `create`, `edit`, `update` actions for the `/passwords` route.
Adds a "Forgot your password?" link to the log-in form. When clicked, the user provides their email address and if found, sends a reset password instructions email with a reset link.
The following options can be customized (default shown):
config :coherence,
reset_token_expire_days: 2
### Trackable
Saves login statics like login counts, timestamps, and IP address for each user.
Adds the following database field to your User model with the generated migration:
add :sign_in_count, :integer, default: 0 # how many times the user has logged in
add :current_sign_in_at, :datetime # the current login timestamp
add :last_sign_in_at, :datetime # the timestamp of the previous login
add :current_sign_in_ip, :string # the current login IP adddress
add :last_sign_in_ip, :string # the IP address of the previous login
### Lockable
Locks an account when a specified number of failed sign-in attempts has been exceeded.
The following defaults can be changed with the following config entries:
The following options can be customized (default shown):
config :coherence,
unlock_timeout_minutes: 5,
max_failed_login_attempts: 5
Adds the following database field to your User model with the generated migration:
add :failed_attempts, :integer, default: 0
add :unlock_token, :string
add :locked_at, :datetime
### Unlockable with Token
Provides a link to send yourself an unlock email. When the user clicks the link, the user is presented a form to enter their email address and password. If the token has not expired and the email and password are valid, a unlock email is sent to the user's email address with an expiring token.
The following options can be customized (default shown):
config :coherence,
unlock_token_expire_minutes: 5
### Remember Me
The `rememberable` option provides persistent login when the 'Remember Me?' box is checked during login.
With this feature, you will automatically be logged in from the same browser when your current login session dies using a configurable expiring persistent cookie.
For security, both a token and series number stored in the cookie on initial login. Each new creates a new token, but preserves the series number, providing protection against fraud. As well, both the token and series numbers are hashed before saving them to the database, providing protection if the database is compromised.
The following options can be customized (default shown):
config :coherence,
rememberable_cookie_expire_hours: 2*24,
login_cookie: "coherence_login"
The following table is created by the generated `<timestamp>_create_coherence_rememberable.exs` migration:
create table(:rememberables) do
add :series_hash, :string
add :token_hash, :string
add :token_created_at, :datetime
add :user_id, references(:users, on_delete: :delete_all)
timestamps
end
create index(:rememberables, [:user_id])
create index(:rememberables, [:series_hash])
create index(:rememberables, [:token_hash])
create unique_index(:rememberables, [:user_id, :series_hash, :token_hash])
The `--rememberable` install option is not provided in any of the installer group options. You must provide the `--rememberable` option to install the migration and its support.
## Mix Tasks
Backwords compatibility note: For versions of Phoenix previous to 1.3 use `mix coherence.<command>`. The new `mix coh.<command>` better understands the newer Phoenix patterns.
### Installer
The following examples illustrate various configuration scenarios for the install mix task:
# Install with only the `authenticatable` option
$ mix coh.install
# Install all the options except `confirmable` and `invitable`
$ mix coh.install --full
# Install all the options except `invitable`
$ mix coh.install --full-confirmable
# Install all the options except `confirmable`
$ mix coh.install --full-invitable
# Install the `full` options except `lockable` and `trackable`
$ mix coh.install --full --no-lockable --no-trackable
And some reinstall examples:
# Reinstall with defaults (--silent --no-migrations --no-config --confirm-once)
$ mix coh.install --reinstall
# Confirm to overwrite files, show instructions, and generate migrations
$ mix coh.install --reinstall --no-confirm-once --with-migrations
Run `$ mix help coh.install` for more information.
### Clean
The following examples illustrate how to remove the files created by the installer:
# Clean all the installed files
$ mix coh.clean --all
# Clean only the installed view and template files
$ mix coh.clean --views --templates
# Clean all but the models
$ mix coh.clean --all --no-models
# Prompt once to confirm the removal
$ mix coh.clean --all --confirm-once
After installation, if you later want to remove one more options, here are a couple examples:
# Clean one option
$ mix coh.clean --options=recoverable
# Clean several options without confirmation
$ mix coh.clicked --no-confirm --options="recoverable unlockable-with-token"
# Test the uninstaller without removing files
$ mix coh.clicked --dry-run --options="recoverable unlockable-with-token"
Run `$ mix help coh.install` or `$ mix help coherence.install` for more information.
"""
use Application
alias Coherence.{Config, Messages}
@doc false
def start(_type, _args) do
Coherence.Supervisor.start_link()
end
@doc """
Get the currently logged in user data.
"""
def current_user(conn), do: conn.assigns[Config.assigns_key]
@doc """
Updates the user login data in the current sessions.
Other sessions belonging to the same user won't be updated.
Requires access to the `conn`, which means it can't be called outside of the context of a conn.
To update all session belonging to the user see `t:update_user_login/1`.
"""
def update_user_login(conn, user) do
apply(Config.auth_module,
Config.update_login,
[conn, user, [id_key: Config.schema_key]])
end
@doc """
Updates the user login data in the all sessions belonging to the user.
All sessions belonging to the same user will be updated.
Doesn't need access to the `conn`, which means it can be called anywhere.
To update only the current session see `t:update_user_login/2`
"""
def update_user_logins(user) do
# Handle a user's DBStore
Coherence.CredentialStore.Server.update_user_logins(user)
end
@doc """
Get the currently logged in user name.
"""
def current_user_name(conn, field \\ :name) do
(current_user(conn) || %{}) |> Map.get(field)
end
@doc """
Get the currently assigned user_token
"""
def user_token(conn), do: conn.assigns[Config.token_assigns_key]
@doc """
Verify a user token for channel authentication.
"""
def verify_user_token(socket, token, assign_fun) do
result = case Config.verify_user_token do
fun when is_function(fun) ->
fun.(socket, token)
{mod, fun, args} ->
apply(mod, fun, args)
error ->
{:error, Messages.backend().verify_user_token(user_token: Config.verify_user_token(), error: error)}
end
case result do
{:ok, user_id} -> {:ok, assign_fun.(socket, :user_id, user_id)}
error -> error
end
end
@doc """
Check if user is logged in.
"""
def logged_in?(conn), do: !!current_user(conn)
end
|
lib/coherence.ex
| 0.817793
| 0.730554
|
coherence.ex
|
starcoder
|
defmodule Alembic.Plugin.Behaviour do
@moduledoc """
Defines the callback functions that the main callback module of each
Alembic plugin is expected to implement and provides a `__using__/2` helper
macro that plugin authors can use to define fallback implementations of
several of these callbacks.
"""
use Behaviour
alias Alembic.Client
alias Alembic.Request
@doc false
defmacro __using__(_module, _opts) do
quote do
@behaviour Alembic.Plugin.Behaviour
@doc """
Fallback clause of the expected callback function `handle/2`. Used
to ensure that a well-defined plugin's callback module will not
throw a missing function clause error when this callback is invoked
by the server's plugin manager.
"""
def handle(_request, _client) do
:continue
end
@doc """
Fallback clause of the expected callback function `screen/2`. Used
to ensure that a well-defined plugin's callback module will not
throw a missing function clause error when this callback is invoked
by the server's plugin manager.
"""
def screen(_request, _client) do
:ignore
end
end
end
@doc """
Returns the current plugin's manifest. A valid plugin manifest is a keyword
list with the following key-value pairs:
* `name`: the name of the plugin, as a string
* `version`: the plugin's SemVer-compliant version number, as a string
"""
defcallback alembic_plugin :: [name: String.t, version: String.t]
@doc """
Invoked by the server's plugin manager when it is this plugin's turn to
handle the specified request. Note that one or more other plugins may have
already been given an opportunity to handle this request by the time this
callback is invoked.
This function should return either the atom `:continue` (if the request
should also be passed down the line for the plugin with the next-highest
handle priority to handle) or the atom `:consume` (if no further plugins
should be allowed to handle the request).
"""
defcallback handle(Request.t, pid) :: :continue | :consume
@doc """
Invoked by the server's plugin manager each time a new request is
submitted. The value returned by a plugin's implementation of this function
when passed a particular request is used to determine when, if ever, the
plugin will be given an opportunity to handle that request.
This function should return either an integer (with a higher integer
indicating a stronger interest in being the first plugin to handle the
specified request) or the atom `:ignore` (indicating that this plugin has
no interest in handling the request at all).
"""
defcallback screen(Request.t, pid) :: integer | :ignore
end
|
lib/plugin.ex
| 0.881021
| 0.478833
|
plugin.ex
|
starcoder
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.