code
stringlengths 114
1.05M
| path
stringlengths 3
312
| quality_prob
float64 0.5
0.99
| learning_prob
float64 0.2
1
| filename
stringlengths 3
168
| kind
stringclasses 1
value |
|---|---|---|---|---|---|
defmodule UTCDateTime.ISO do
@moduledoc false
# credo:disable-for-this-file
# Copied from Elixir 1.9, since no longer in 1.10
# Might need to look for them in a different spot.
@doc false
@spec __match_date__ :: [term]
def __match_date__ do
quote do
[
<<y1, y2, y3, y4, ?-, m1, m2, ?-, d1, d2>>,
y1 >= ?0 and y1 <= ?9 and y2 >= ?0 and y2 <= ?9 and y3 >= ?0 and y3 <= ?9 and y4 >= ?0 and
y4 <= ?9 and m1 >= ?0 and m1 <= ?9 and m2 >= ?0 and m2 <= ?9 and d1 >= ?0 and d1 <= ?9 and
d2 >= ?0 and d2 <= ?9,
{
(y1 - ?0) * 1000 + (y2 - ?0) * 100 + (y3 - ?0) * 10 + (y4 - ?0),
(m1 - ?0) * 10 + (m2 - ?0),
(d1 - ?0) * 10 + (d2 - ?0)
}
]
end
end
@doc false
@spec __match_time__ :: [term]
def __match_time__ do
quote do
[
<<h1, h2, ?:, i1, i2, ?:, s1, s2>>,
h1 >= ?0 and h1 <= ?9 and h2 >= ?0 and h2 <= ?9 and i1 >= ?0 and i1 <= ?9 and i2 >= ?0 and
i2 <= ?9 and s1 >= ?0 and s1 <= ?9 and s2 >= ?0 and s2 <= ?9,
{
(h1 - ?0) * 10 + (h2 - ?0),
(i1 - ?0) * 10 + (i2 - ?0),
(s1 - ?0) * 10 + (s2 - ?0)
}
]
end
end
@doc false
@spec parse_microsecond(String.t()) :: {{integer, integer}, String.t()} | :error
def parse_microsecond("." <> rest) do
case parse_microsecond(rest, 0, "") do
{"", 0, _} ->
:error
{microsecond, precision, rest} when precision in 1..6 ->
pad = String.duplicate("0", 6 - byte_size(microsecond))
{{String.to_integer(microsecond <> pad), precision}, rest}
{microsecond, _precision, rest} ->
{{String.to_integer(binary_part(microsecond, 0, 6)), 6}, rest}
end
end
def parse_microsecond("," <> rest) do
parse_microsecond("." <> rest)
end
def parse_microsecond(rest) do
{{0, 0}, rest}
end
defp parse_microsecond(<<head, tail::binary>>, precision, acc) when head in ?0..?9,
do: parse_microsecond(tail, precision + 1, <<acc::binary, head>>)
defp parse_microsecond(rest, precision, acc), do: {acc, precision, rest}
@doc false
@spec parse_offset(String.t()) :: {integer | nil, String.t()} | :error
def parse_offset(""), do: {nil, ""}
def parse_offset("Z"), do: {0, ""}
def parse_offset("-00:00"), do: :error
def parse_offset(<<?+, hour::2-bytes, ?:, min::2-bytes, rest::binary>>),
do: parse_offset(1, hour, min, rest)
def parse_offset(<<?-, hour::2-bytes, ?:, min::2-bytes, rest::binary>>),
do: parse_offset(-1, hour, min, rest)
def parse_offset(<<?+, hour::2-bytes, min::2-bytes, rest::binary>>),
do: parse_offset(1, hour, min, rest)
def parse_offset(<<?-, hour::2-bytes, min::2-bytes, rest::binary>>),
do: parse_offset(-1, hour, min, rest)
def parse_offset(<<?+, hour::2-bytes, rest::binary>>), do: parse_offset(1, hour, "00", rest)
def parse_offset(<<?-, hour::2-bytes, rest::binary>>), do: parse_offset(-1, hour, "00", rest)
def parse_offset(_), do: :error
defp parse_offset(sign, hour, min, rest) do
with {hour, ""} when hour < 24 <- Integer.parse(hour),
{min, ""} when min < 60 <- Integer.parse(min) do
{(hour * 60 + min) * 60 * sign, rest}
else
_ -> :error
end
end
end
|
lib/utc_datetime/iso.ex
| 0.540924
| 0.533823
|
iso.ex
|
starcoder
|
defmodule Contentful.Delivery do
@moduledoc """
The Delivery API is the main access point for fetching data for your customers.
The API is _read only_.
If you wish to manipulate data, please have a look at the `Contentful.Management`.
## Basic interaction
The `space_id`, the `environment` and your `access_token` can all be configured in
`config/config.exs`:
```
# config/config.exs
config :contentful, delivery: [
space_id: "<my_space_id>",
environment: "<my_environment>",
access_token: "<my_access_token_cda>"
]
```
The space can then be fetched as a `Contentful.Space` via a simple query:
```
import Contentful.Query
alias Contentful.Delivery.Spaces
{:ok, space} = Spaces |> fetch_one
```
Retrieving items is then just a matter of importing `Contentful.Query`:
```
import Contentful.Query
alias Contentful.Delivery.Entries
{:ok, entries, total: _total_count_of_entries} = Entries |> fetch_all
```
You can create query chains to form more complex queries:
```
import Contentful.Query
alias Contentful.Delivery.Entries
{:ok, entries, total: _total_count_of_entries} =
Entries
|> skip(2)
|> limit(10)
|> include(2)
|> fetch_all
```
Fetching indidvidual entities is straight forward:
```
import Contentful.Query
alias Contentful.Delivery.Assets
my_asset_id = "my_asset_id"
{:ok, assets, total: _total_count_of_assets} = Assets |> fetch_one(my_asset_id)
```
All query resolvers also support chaning the `space_id`, `environment` and `access_token` at call
time:
```
import Contentful.Query
alias Contentful.Delivery.Assets
my_asset_id = "my_asset_id"
{:ok, asset} =
Assets
|> fetch_one(my_asset_id)
```
Note: If you want to pass the configuration at call time, you can pass these later as function
parameters to the resolver call:
```
import Contentful.Query
alias Contentful.Delivery.Assets
my_asset_id = "my_asset_id"
my_space_id = "bmehzfuz4raf"
my_environment = "staging"
my_access_token = "<PASSWORD>"
{:ok, asset} =
Assets
|> fetch_one(my_asset_id, my_space_id, my_environment, my_access_token)
# also works for fetch_all:
{:ok, assets, _} =
Assets
|> fetch_all(my_space_id, my_environment, my_access_token)
# and for stream:
[ asset | _ ] =
Assets
|> stream(my_space_id, my_environment, my_access_token)
|> Enum.to_list
```
## Spaces as an exception
Unfortunately, `Contentful.Delivery.Spaces` do not support complete collection behaviour:
```
# doesn't exist in the Delivery API:
{:error, _, _} = Contentful.Delivery.Spaces |> fetch_all
# however, you can still retrieve a single `Contentful.Space`:
{:ok, space} = Contentful.Delivery.Spaces |> fetch_one # the configured space
{:ok, my_space} = Contentful.Delivery.Spaces |> fetch_one("my_space_id") # a passed space
```
## Further reading
* [Contentful Delivery API docs](https://www.contentful.com/developers/docs/references/content-delivery-api/) (CDA).
"""
import Contentful.Misc, only: [fallback: 2]
import HTTPoison, only: [get: 2]
alias Contentful.Configuration
alias HTTPoison.Response
@endpoint "cdn.contentful.com"
@preview_endpoint "preview.contentful.com"
@protocol "https"
@separator "/"
@doc """
Gets the json library for the Contentful Delivery API based
on the config/config.exs.
"""
@spec json_library :: module()
def json_library do
Contentful.json_library()
end
@doc """
constructs the base url with protocol for the CDA
## Examples
"https://cdn.contentful.com" = url()
"""
@spec url() :: String.t()
def url do
"#{@protocol}://#{host_from_config()}"
end
@doc """
constructs the base url with the space id that got configured in config.exs
"""
@spec url(nil) :: String.t()
def url(space) when is_nil(space) do
case space_from_config() do
nil ->
url()
space ->
space |> url
end
end
@doc """
constructs the base url with the extension for a given space
## Examples
"https://cdn.contentful.com/spaces/foo" = url("foo")
"""
@spec url(String.t()) :: String.t()
def url(space) do
[url(), "spaces", space] |> Enum.join(@separator)
end
@doc """
When explicilty given `nil`, will fetch the `environment` from the environments
current config (see `config/config.exs`). Will fall back to `"master"` if no environment
is set.
## Examples
"https://cdn.contentful.com/spaces/foo/environments/master" = url("foo", nil)
# With config set in config/config.exs
config :contentful_delivery, environment: "staging"
"https://cdn.contentful.com/spaces/foo/environments/staging" = url("foo", nil)
"""
@spec url(String.t(), nil) :: String.t()
def url(space, env) when is_nil(env) do
[space |> url(), "environments", environment_from_config()]
|> Enum.join(@separator)
end
@doc """
constructs the base url for the delivery endpoint for a given space and environment
## Examples
"https://cdn.contentful.com/spaces/foo/environments/bar" = url("foo", "bar")
"""
def url(space, env) do
[space |> url(), "environments", env] |> Enum.join(@separator)
end
@doc """
Sends a request against the CDA. It's really just a wrapper around `HTTPoison.get/2`
"""
@spec send_request({binary(), any()}) :: {:error, HTTPoison.Error.t()} | {:ok, Response.t()}
def send_request({url, headers}) do
get(url, headers)
end
@doc """
Parses the response from the CDA and triggers a callback on success
"""
@spec parse_response({:ok, Response.t()}, fun()) ::
{:ok, struct()}
| {:ok, list(struct()), total: non_neg_integer()}
| {:error, :rate_limit_exceeded, wait_for: integer()}
| {:error, atom(), original_message: String.t()}
def parse_response(
{:ok, %Response{status_code: code, body: body} = resp},
callback
) do
case code do
200 ->
body |> json_library().decode! |> callback.()
401 ->
body |> build_error(:unauthorized)
404 ->
body |> build_error(:not_found)
_ ->
resp |> build_error()
end
end
@doc """
catch_all for any errors during flight (connection loss, etc.)
"""
@spec parse_response({:error, any()}, fun()) :: {:error, :unknown}
def parse_response({:error, _}, _callback) do
build_error()
end
@doc """
Used to construct generic errors for calls against the CDA
"""
@spec build_error(String.t(), atom()) ::
{:error, atom(), original_message: String.t()}
def build_error(response_body, status) do
{:ok, %{"message" => message}} = response_body |> json_library().decode()
{:error, status, original_message: message}
end
@doc """
Used for the rate limit exceeded error, as it gives the user extra information on wait times
"""
@spec build_error(Response.t()) ::
{:error, :rate_limit_exceeded, wait_for: integer()}
def build_error(%Response{
status_code: 429,
headers: [{"x-contentful-rate-limit-exceeded", seconds}, _]
}) do
{:error, :rate_limit_exceeded, wait_for: seconds}
end
@doc """
Used to make a generic error, in case the API Response is not what is expected
"""
@spec build_error() :: {:error, :unknown}
def build_error do
{:error, :unknown}
end
defp environment_from_config do
Configuration.get(:environment) |> fallback("master")
end
defp space_from_config do
Configuration.get(:space)
end
defp host_from_config do
case Configuration.get(:endpoint) do
nil -> @endpoint
:preview -> @preview_endpoint
value -> value
end
end
end
|
lib/contentful_delivery/delivery.ex
| 0.888735
| 0.887838
|
delivery.ex
|
starcoder
|
defmodule Oracleex.Protocol do
@moduledoc """
Implementation of `DBConnection` behaviour for `Oracleex.ODBC`.
Handles translation of concepts to what ODBC expects and holds
state for a connection.
This module is not called directly, but rather through
other `Oracleex` modules or `DBConnection` functions.
"""
use DBConnection
alias Oracleex.ODBC
alias Oracleex.Result
defstruct [pid: nil, oracle: :idle, conn_opts: []]
@typedoc """
Process state.
Includes:
* `:pid`: the pid of the ODBC process
* `:oracle`: the transaction state. Can be `:idle` (not in a transaction),
`:transaction` (in a transaction) or `:auto_commit` (connection in
autocommit mode)
* `:conn_opts`: the options used to set up the connection.
"""
@type state :: %__MODULE__{pid: pid(),
oracle: :idle | :transaction | :auto_commit,
conn_opts: Keyword.t}
@type query :: Oracleex.Query.t
@type params :: [{:odbc.odbc_data_type(), :odbc.value()}]
@type result :: Result.t
@type cursor :: any
@doc false
@spec connect(opts :: Keyword.t) :: {:ok, state}
| {:error, Exception.t}
def connect(opts) do
conn_opts = [
{"DSN", opts[:dsn] || System.get_env("ORACLE_DSN") || "OracleODBC-12c"},
{"DBQ", opts[:service] || System.get_env("ORACLE_SERVICE")},
{"UID", opts[:username] || System.get_env("ORACLE_USR")},
{"PWD", opts[:password] || System.get_env("ORACLE_PWD")}
]
conn_str = Enum.reduce(conn_opts, "", fn {key, value}, acc ->
acc <> "#{key}=#{value};" end)
case ODBC.start_link(conn_str, opts) do
{:ok, pid} -> {:ok, %__MODULE__{
pid: pid,
conn_opts: opts,
oracle: if(opts[:auto_commit] == :on,
do: :auto_commit,
else: :idle)
}}
response -> response
end
end
@doc false
@spec disconnect(err :: Exception.t, state) :: :ok
def disconnect(_err, %{pid: pid} = state) do
case ODBC.disconnect(pid) do
:ok -> :ok
{:error, reason} -> {:error, reason, state}
end
end
@doc false
@spec reconnect(new_opts :: Keyword.t, state) :: {:ok, state}
def reconnect(new_opts, state) do
with :ok <- disconnect("Reconnecting", state),
do: connect(new_opts)
end
@doc false
@spec checkout(state) :: {:ok, state}
| {:disconnect, Exception.t, state}
def checkout(state) do
{:ok, state}
end
@doc false
@spec checkin(state) :: {:ok, state}
| {:disconnect, Exception.t, state}
def checkin(state) do
{:ok, state}
end
@doc false
@spec handle_begin(opts :: Keyword.t, state) ::
{:ok, result, state}
| {:error | :disconnect, Exception.t, state}
def handle_begin(opts, state) do
case Keyword.get(opts, :mode, :transaction) do
:transaction -> handle_transaction(:begin, opts, state)
:savepoint -> handle_savepoint(:begin, opts, state)
end
end
@doc false
@spec handle_commit(opts :: Keyword.t, state) ::
{:ok, result, state} |
{:error | :disconnect, Exception.t, state}
def handle_commit(opts, state) do
case Keyword.get(opts, :mode, :transaction) do
:transaction -> handle_transaction(:commit, opts, state)
:savepoint -> handle_savepoint(:commit, opts, state)
end
end
@doc false
@spec handle_rollback(opts :: Keyword.t, state) ::
{:ok, result, state} |
{:error | :disconnect, Exception.t, state}
def handle_rollback(opts, state) do
case Keyword.get(opts, :mode, :transaction) do
:transaction -> handle_transaction(:rollback, opts, state)
:savepoint -> handle_savepoint(:rollback, opts, state)
end
end
defp handle_transaction(:begin, _opts, state) do
case state.oracle do
:idle -> {:ok, %Result{num_rows: 0}, %{state | oracle: :transaction}}
:transaction -> {:error,
%Oracleex.Error{message: "Already in transaction"},
state}
:auto_commit -> {:error,
%Oracleex.Error{message: "Transactions not allowed in autocommit mode"},
state}
end
end
defp handle_transaction(:commit, _opts, state) do
case ODBC.commit(state.pid) do
:ok -> {:ok, %Result{}, %{state | oracle: :idle}}
{:error, reason} -> {:error, reason, state}
end
end
defp handle_transaction(:rollback, _opts, state) do
case ODBC.rollback(state.pid) do
:ok -> {:ok, %Result{}, %{state | oracle: :idle}}
{:error, reason} -> {:disconnect, reason, state}
end
end
defp handle_savepoint(:begin, opts, state) do
if state.oracle == :autocommit do
{:error,
%Oracleex.Error{message: "savepoint not allowed in autocommit mode"},
state}
else
handle_execute(
%Oracleex.Query{name: "", statement: "SAVEPOINT Oracleex_savepoint"},
[], opts, state)
end
end
defp handle_savepoint(:commit, _opts, state) do
{:ok, %Result{}, state}
end
defp handle_savepoint(:rollback, opts, state) do
handle_execute(
%Oracleex.Query{name: "", statement: "ROLLBACK TO Oracleex_savepoint"},
[], opts, state)
end
@doc false
@spec handle_prepare(query, opts :: Keyword.t, state) ::
{:ok, query, state} |
{:error | :disconnect, Exception.t, state}
def handle_prepare(query, _opts, state) do
{:ok, query, state}
end
@doc false
@spec handle_execute(query, params, opts :: Keyword.t, state) ::
{:ok, result, state} |
{:error | :disconnect, Exception.t, state}
def handle_execute(query, params, opts, state) do
{status, message, new_state} = do_query(query, params, opts, state)
case new_state.oracle do
:idle ->
with {:ok, _, post_commit_state} <- handle_commit(opts, new_state)
do
{status, message, post_commit_state}
end
:transaction -> {status, message, new_state}
:auto_commit ->
with {:ok, post_connect_state} <- switch_auto_commit(:off, new_state)
do
{status, message, post_connect_state}
end
end
end
defp do_query(query, params, opts, state) do
case ODBC.query(state.pid, query.statement, params, opts) do
{:error,
%Oracleex.Error{odbc_code: :not_allowed_in_transaction} = reason} ->
if state.oracle == :auto_commit do
{:error, reason, state}
else
with {:ok, new_state} <- switch_auto_commit(:on, state),
do: handle_execute(query, params, opts, new_state)
end
{:error,
%Oracleex.Error{odbc_code: :connection_exception} = reason} ->
{:disconnect, reason, state}
{:error, reason} ->
{:error, reason, state}
{:selected, columns, rows} ->
{:ok, %Result{columns: Enum.map(columns, &(to_string(&1))), rows: rows, num_rows: Enum.count(rows)}, state}
{:updated, num_rows} ->
{:ok, %Result{num_rows: num_rows}, state}
end
end
defp switch_auto_commit(new_value, state) do
reconnect(Keyword.put(state.conn_opts, :auto_commit, new_value), state)
end
@doc false
@spec handle_close(query, opts :: Keyword.t, state) ::
{:ok, result, state} |
{:error | :disconnect, Exception.t, state}
def handle_close(_query, _opts, state) do
{:ok, %Result{}, state}
end
def ping(state) do
query = %Oracleex.Query{name: "ping", statement: "SELECT 1 FROM DUAL"}
case do_query(query, [], [], state) do
{:ok, _, new_state} -> {:ok, new_state}
{:error, reason, new_state} -> {:disconnect, reason, new_state}
other -> other
end
end
end
|
lib/oracleex/protocol.ex
| 0.826327
| 0.427994
|
protocol.ex
|
starcoder
|
defmodule Markright.Parsers.Youtube do
@moduledoc ~S"""
Parses the input for the youtube video.
## Examples
iex> input = "✇https://www.youtube.com/watch?v=noQcPIeW6tE&size=5"
iex> Markright.Parsers.Youtube.to_ast(input)
%Markright.Continuation{ast: {:iframe,
%{allowfullscreen: nil, frameborder: 0, height: 315,
src: "http://www.youtube.com/embed/noQcPIeW6tE", width: 560},
"http://www.youtube.com/embed/noQcPIeW6tE"},
bag: [tags: [], parser: Markright.Parsers.Generic],
fun: nil, tail: ""}
iex> "✇http://www.youtube.com/embed/noQcPIeW6tE"
...> |> Markright.to_ast()
...> |> XmlBuilder.generate(format: :none)
"<article><p><iframe allowfullscreen=\"\" frameborder=\"0\" height=\"315\" src=\"http://www.youtube.com/embed/noQcPIeW6tE\" width=\"560\">http://www.youtube.com/embed/noQcPIeW6tE</iframe></p></article>"
"""
use Markright.Continuation
use Markright.Helpers.Magnet
def to_ast(input, %Plume{} = plume \\ %Plume{}) when is_binary(input) do
%Plume{ast: <<@magnet::binary, url::binary>>} = cont = astify(input, plume)
url = code(url)
iframe = {:iframe, %{width: 560, height: 315, src: url, frameborder: 0, allowfullscreen: nil}}
Markright.Utils.continuation(:continuation, %Plume{cont | ast: url}, iframe)
end
defp youtubify(code), do: "http://www.youtube.com/embed/#{code}"
1..24
|> Enum.each(fn i ->
defp code(<<"http://youtu.be/"::binary, code::binary-size(unquote(i))>>), do: youtubify(code)
defp code(
<<"http://youtu.be/"::binary, code::binary-size(unquote(i)), "?"::binary, _::binary>>
),
do: youtubify(code)
defp code(<<"http://www.youtube.com/v/"::binary, code::binary-size(unquote(i))>>),
do: youtubify(code)
defp code(
<<"http://www.youtube.com/v/"::binary, code::binary-size(unquote(i)), "?"::binary,
_::binary>>
),
do: youtubify(code)
defp code(<<"http://www.youtube.com/embed/"::binary, code::binary-size(unquote(i))>>),
do: youtubify(code)
defp code(
<<"http://www.youtube.com/embed/"::binary, code::binary-size(unquote(i)), "?"::binary,
_::binary>>
),
do: youtubify(code)
defp code(<<"http://www.youtube.com/watch?v="::binary, code::binary-size(unquote(i))>>),
do: youtubify(code)
defp code(
<<"http://www.youtube.com/watch?v="::binary, code::binary-size(unquote(i)),
"&"::binary, _::binary>>
),
do: youtubify(code)
defp code(<<"https://youtu.be/"::binary, code::binary-size(unquote(i))>>), do: youtubify(code)
defp code(
<<"https://youtu.be/"::binary, code::binary-size(unquote(i)), "?"::binary, _::binary>>
),
do: youtubify(code)
defp code(<<"https://www.youtube.com/v/"::binary, code::binary-size(unquote(i))>>),
do: youtubify(code)
defp code(
<<"https://www.youtube.com/v/"::binary, code::binary-size(unquote(i)), "?"::binary,
_::binary>>
),
do: youtubify(code)
defp code(<<"https://www.youtube.com/embed/"::binary, code::binary-size(unquote(i))>>),
do: youtubify(code)
defp code(
<<"https://www.youtube.com/embed/"::binary, code::binary-size(unquote(i)), "?"::binary,
_::binary>>
),
do: youtubify(code)
defp code(<<"https://www.youtube.com/watch?v="::binary, code::binary-size(unquote(i))>>),
do: youtubify(code)
defp code(
<<"https://www.youtube.com/watch?v="::binary, code::binary-size(unquote(i)),
"&"::binary, _::binary>>
),
do: youtubify(code)
end)
defp code(code), do: code
end
|
lib/markright/parsers/youtube.ex
| 0.695028
| 0.48499
|
youtube.ex
|
starcoder
|
defmodule Kaffy.ResourceAdmin do
alias Kaffy.ResourceSchema
alias Kaffy.Utils
@moduledoc """
ResourceAdmin modules should be created for every schema you want to customize/configure in Kaffy.
If you have a schema like `MyApp.Products.Product`, you should create an admin module with
name `MyApp.Products.ProductAdmin` and add functions documented in this module to customize the behavior.
All functions are optional.
"""
@doc """
`index/1` takes the schema module and should return a keyword list of fields and
their options.
Supported options are `:name` and `:value`.
Both options can be a string or an anonymous function.
If a fuction is provided, the current entry is passed to it.
If index/1 is not defined, Kaffy will return all the fields of the schema and their default values.
Example:
```elixir
def index(_schema) do
[
id: %{name: "ID", value: fn post -> post.id + 100 end},
title: nil, # this will render the default name for this field (Title) and its default value (post.title)
views: %{name: "Hits", value: fn post -> post.views + 10 end},
published: %{name: "Published?", value: fn post -> published?(post) end},
comment_count: %{name: "Comments", value: fn post -> comment_count(post) end}
]
end
```
"""
def index(resource) do
schema = resource[:schema]
Utils.get_assigned_value_or_default(resource, :index, ResourceSchema.index_fields(schema))
end
@doc """
form_fields/1 takes a schema and returns a keyword list of fields and their options for the new/edit form.
Supported options are:
`:label`, `:type`, `:choices`, and `:permission`
`:type` can be any ecto type in addition to `:file` and `:textarea`
If `:choices` is provided, it must be a keyword list and
the field will be rendered as a `<select>` element regardless of the actual field type.
Setting `:permission` to `:read` will make the field non-editable. It is `:write` by default.
If you want to remove a field from being rendered, just remove it from the list.
If form_fields/1 is not defined, Kaffy will return all the fields with
their default types based on the schema.
Example:
```elixir
def form_fields(_schema) do
[
title: %{label: "Subject"},
slug: nil,
image: %{type: :file},
status: %{choices: [{"Pending", "pending"}, {"Published", "published"}]},
body: %{type: :textarea, rows: 3},
views: %{permission: :read}
]
end
```
"""
def form_fields(resource) do
schema = resource[:schema]
Utils.get_assigned_value_or_default(
resource,
:form_fields,
ResourceSchema.form_fields(schema)
)
|> set_default_field_options(schema)
end
defp set_default_field_options(fields, schema) do
Enum.map(fields, fn {f, o} ->
default_options = Kaffy.ResourceSchema.default_field_options(schema, f)
final_options = Map.merge(default_options, o || %{})
{f, final_options}
end)
end
@doc """
`search_fields/1` takes a schema and must return a list of `:string` fields to search against when typing in the search box.
If `search_fields/1` is not defined, Kaffy will return all the `:string` fields of the schema.
Example:
```elixir
def search_fields(_schema) do
[:title, :slug, :body]
end
```
"""
def search_fields(resource) do
Utils.get_assigned_value_or_default(
resource,
:search_fields,
ResourceSchema.search_fields(resource)
)
end
@doc """
`ordering/1` takes a schema and returns how the entries should be ordered.
If `ordering/1` is not defined, Kaffy will return `[desc: :id]`.
Example:
```elixir
def ordering(_schema) do
[asc: :title]
end
```
"""
def ordering(resource) do
Utils.get_assigned_value_or_default(resource, :ordering, desc: :id)
end
@doc """
`authorized?/2` takes the schema and the current Plug.Conn struct and
should return a boolean value.
Returning false will prevent the access of this resource for the current user/request.
If `authorized?/2` is not defined, Kaffy will return true.
Example:
```elixir
def authorized?(_schema, _conn) do
true
end
```
"""
def authorized?(resource, conn) do
Utils.get_assigned_value_or_default(resource, :authorized?, true, [conn])
end
@doc """
`create_changeset/2` takes the record and the changes and should return a changeset for creating a new record.
If `create_changeset/2` is not defined, Kaffy will try to call `schema.changeset/2`
and if that's not defined, `Ecto.Changeset.change/2` will be called.
Example:
```elixir
def create_changeset(schema, attrs) do
MyApp.Blog.Post.create_changeset(schema, attrs)
end
```
"""
def create_changeset(resource, changes) do
schema = resource[:schema]
schema_struct = schema.__struct__
functions = schema.__info__(:functions)
default =
case Keyword.has_key?(functions, :changeset) do
true ->
schema.changeset(schema_struct, changes)
false ->
form_fields = Kaffy.ResourceSchema.form_fields(schema) |> Keyword.keys()
schema_struct
|> Ecto.Changeset.cast(changes, form_fields)
|> Ecto.Changeset.change(changes)
end
Utils.get_assigned_value_or_default(
resource,
:create_changeset,
default,
[schema.__struct__, changes],
false
)
end
@doc """
`update_changeset/2` takes the record and the changes and should return a changeset for updating an existing record.
If `update_changeset/2` is not defined, Kaffy will try to call `schema.changeset/2`
and if that's not defined, `Ecto.Changeset.change/2` will be called.
Example:
```elixir
def update_changeset(schema, attrs) do
MyApp.Blog.Post.create_changeset(schema, attrs)
end
```
"""
def update_changeset(resource, entry, changes) do
schema = resource[:schema]
functions = schema.__info__(:functions)
default =
case Keyword.has_key?(functions, :changeset) do
true ->
schema.changeset(entry, changes)
false ->
form_fields = Kaffy.ResourceSchema.form_fields(schema) |> Keyword.keys()
entry
|> Ecto.Changeset.cast(changes, form_fields)
|> Ecto.Changeset.change(changes)
Ecto.Changeset.change(entry, changes)
end
Utils.get_assigned_value_or_default(
resource,
:update_changeset,
default,
[entry, changes],
false
)
end
@doc """
This function should return a string for the singular name of a resource.
If `singular_name/1` is not defined, Kaffy will use the name of
the last part of the schema module (e.g. Post in MyApp.Blog.Post)
This is useful for when you have a schema but you want to display its name differently.
If you have "Post" and you want to display "Article" for example.
Example:
```elixir
def singular_name(_schema) do
"Article"
end
```
"""
def singular_name(resource) do
default = humanize_term(resource[:schema])
Utils.get_assigned_value_or_default(resource, :singular_name, default)
end
def humanize_term(term) do
term
|> to_string()
|> String.split(".")
|> Enum.at(-1)
|> Macro.underscore()
|> String.split("_")
|> Enum.map(fn s -> String.capitalize(s) end)
|> Enum.join(" ")
end
@doc """
This is useful for names that cannot be plural by adding an "s" at the end.
Like "Category" => "Categories" or "Person" => "People".
If `plural_name/1` is not defined, Kaffy will use the singular
name and add an "s" to it (e.g. Posts).
Example:
```elixir
def plural_name(_schema) do
"Categories"
end
```
"""
def plural_name(resource) do
default = singular_name(resource) <> "s"
Utils.get_assigned_value_or_default(resource, :plural_name, default)
end
def resource_actions(resource, conn) do
Utils.get_assigned_value_or_default(resource, :resource_actions, nil, [conn], false)
end
def list_actions(resource, conn) do
Utils.get_assigned_value_or_default(resource, :list_actions, nil, [conn], false)
end
def widgets(resource, conn) do
Utils.get_assigned_value_or_default(
resource,
:widgets,
ResourceSchema.widgets(resource),
[conn]
)
end
def collect_widgets(conn) do
Enum.reduce(Kaffy.Utils.contexts(), [], fn c, all ->
widgets =
Enum.reduce(Kaffy.Utils.schemas_for_context(c), [], fn {_, resource}, all ->
all ++ Kaffy.ResourceAdmin.widgets(resource, conn)
end)
|> Enum.map(fn widget ->
width = Map.get(widget, :width)
type = widget.type
cond do
is_nil(width) and type == "tidbit" -> Map.put(widget, :width, 3)
is_nil(width) and type == "chart" -> Map.put(widget, :width, 12)
is_nil(width) and type == "flash" -> Map.put(widget, :width, 4)
true -> Map.put_new(widget, :width, 6)
end
end)
all ++ widgets
end)
|> Enum.sort_by(fn w -> Map.get(w, :order, 999) end)
end
def custom_pages(resource, conn) do
Utils.get_assigned_value_or_default(resource, :custom_pages, [], [conn])
end
def collect_pages(conn) do
Enum.reduce(Kaffy.Utils.contexts(), [], fn c, all ->
all ++
Enum.reduce(Kaffy.Utils.schemas_for_context(c), [], fn {_, resource}, all ->
all ++ Kaffy.ResourceAdmin.custom_pages(resource, conn)
end)
end)
|> Enum.sort_by(fn p -> Map.get(p, :order, 999) end)
end
def find_page(conn, slug) do
conn
|> collect_pages()
|> Enum.filter(fn p -> p.slug == slug end)
|> Enum.at(0)
end
def custom_links(resource, location \\ nil) do
links = Utils.get_assigned_value_or_default(resource, :custom_links, [])
case location do
nil -> links
:top -> Enum.filter(links, fn l -> Map.get(l, :location, :sub) == :top end)
:sub -> Enum.filter(links, fn l -> Map.get(l, :location, :sub) == :sub end)
end
|> Enum.sort_by(fn l -> Map.get(l, :order, 999) end)
|> Enum.map(fn l -> Map.merge(%{target: "_self", icon: "link", method: :get}, l) end)
end
def collect_links(location) do
contexts = Kaffy.Utils.contexts()
Enum.reduce(contexts, [], fn c, all ->
resources = Kaffy.Utils.schemas_for_context(c)
Enum.reduce(resources, all, fn {_r, options}, all ->
links =
Kaffy.ResourceAdmin.custom_links(options)
|> Enum.filter(fn link -> Map.get(link, :location, :sub) == location end)
all ++ links
end)
end)
|> Enum.sort_by(fn c -> Map.get(c, :order, 999) end)
end
end
|
lib/kaffy/resource_admin.ex
| 0.862945
| 0.743401
|
resource_admin.ex
|
starcoder
|
defmodule Pointers.ULID do
@moduledoc """
An Ecto type for ULID strings.
"""
use Ecto.Type
require Logger
def synthesise!(x) when is_binary(x) and byte_size(x) == 26, do: synth(x)
def synthesise!(x) when is_binary(x) and byte_size(x) > 26, do: synthesise!(String.slice(x, 0, 26))
def synthesise!(x) when is_binary(x) and byte_size(x) < 26, do: Logger.error("Too short, need #{26 - byte_size(x)} chars.")
defp synth(""), do: ""
defp synth(<< c :: bytes-size(1), rest :: binary >>), do: synth_letter(c) <> synth(rest)
defp synth_letter("0"), do: "0"
defp synth_letter("1"), do: "1"
defp synth_letter("2"), do: "2"
defp synth_letter("3"), do: "3"
defp synth_letter("4"), do: "4"
defp synth_letter("5"), do: "5"
defp synth_letter("6"), do: "6"
defp synth_letter("7"), do: "7"
defp synth_letter("8"), do: "8"
defp synth_letter("9"), do: "9"
defp synth_letter("a"), do: "A"
defp synth_letter("b"), do: "B"
defp synth_letter("c"), do: "C"
defp synth_letter("d"), do: "D"
defp synth_letter("e"), do: "E"
defp synth_letter("f"), do: "F"
defp synth_letter("g"), do: "G"
defp synth_letter("h"), do: "H"
defp synth_letter("i"), do: "1"
defp synth_letter("j"), do: "J"
defp synth_letter("k"), do: "K"
defp synth_letter("l"), do: "1"
defp synth_letter("m"), do: "M"
defp synth_letter("n"), do: "N"
defp synth_letter("o"), do: "0"
defp synth_letter("p"), do: "P"
defp synth_letter("q"), do: "Q"
defp synth_letter("r"), do: "R"
defp synth_letter("s"), do: "S"
defp synth_letter("t"), do: "T"
defp synth_letter("u"), do: "V"
defp synth_letter("v"), do: "V"
defp synth_letter("w"), do: "W"
defp synth_letter("x"), do: "X"
defp synth_letter("y"), do: "Y"
defp synth_letter("z"), do: "Z"
defp synth_letter("A"), do: "A"
defp synth_letter("B"), do: "B"
defp synth_letter("C"), do: "C"
defp synth_letter("D"), do: "D"
defp synth_letter("E"), do: "E"
defp synth_letter("F"), do: "F"
defp synth_letter("G"), do: "G"
defp synth_letter("H"), do: "H"
defp synth_letter("I"), do: "1"
defp synth_letter("J"), do: "J"
defp synth_letter("K"), do: "K"
defp synth_letter("L"), do: "1"
defp synth_letter("M"), do: "M"
defp synth_letter("N"), do: "N"
defp synth_letter("O"), do: "0"
defp synth_letter("P"), do: "P"
defp synth_letter("Q"), do: "Q"
defp synth_letter("R"), do: "R"
defp synth_letter("S"), do: "S"
defp synth_letter("T"), do: "T"
defp synth_letter("U"), do: "V"
defp synth_letter("V"), do: "V"
defp synth_letter("W"), do: "W"
defp synth_letter("X"), do: "X"
defp synth_letter("Y"), do: "Y"
defp synth_letter("Z"), do: "Z"
defp synth_letter(other), do: throw {:bad_letter, other}
@doc "Returns the timestamp of an encoded or unencoded ULID"
def timestamp(<<_::bytes-size(26)>> = encoded) do
with {:ok, decoded} <- decode(encoded) do
bintimestamp(decoded)
end
end
def bintimestamp(<<timestamp::unsigned-size(48), _ :: binary>>) do
DateTime.from_unix(timestamp, :millisecond)
end
@doc """
The underlying schema type.
"""
def type, do: :uuid
@doc """
Casts a 26-byte encoded string to ULID or a 16-byte binary unchanged
"""
def cast(<<_::bytes-size(16)>> = value), do: {:ok, value}
def cast(<<_::bytes-size(26)>> = value) do
if valid?(value) do
{:ok, value}
else
:error
end
end
def cast(_), do: :error
@doc """
Same as `cast/1` but raises `Ecto.CastError` on invalid arguments.
"""
def cast!(value) do
case cast(value) do
{:ok, ulid} -> ulid
:error -> raise Ecto.CastError, type: __MODULE__, value: value
end
end
@doc """
Converts a Crockford Base32 encoded ULID into a binary.
"""
def dump(<<_::bytes-size(26)>> = encoded), do: decode(encoded)
def dump(_), do: :error
@doc """
Converts a binary ULID into a Crockford Base32 encoded string.
"""
def load(<<_::unsigned-size(128)>> = bytes), do: encode(bytes)
def load(_), do: :error
@doc false
def autogenerate, do: generate()
@doc """
Generates a Crockford Base32 encoded ULID.
If a value is provided for `timestamp`, the generated ULID will be for the provided timestamp.
Otherwise, a ULID will be generated for the current time.
Arguments:
* `timestamp`: A Unix timestamp with millisecond precision.
"""
def generate(timestamp \\ System.system_time(:millisecond)) do
{:ok, ulid} = encode(bingenerate(timestamp))
ulid
end
@doc """
Generates a binary ULID.
If a value is provided for `timestamp`, the generated ULID will be for the provided timestamp.
Otherwise, a ULID will be generated for the current time.
Arguments:
* `timestamp`: A Unix timestamp with millisecond precision.
"""
def bingenerate(timestamp \\ System.system_time(:millisecond)) do
<<timestamp::unsigned-size(48), :crypto.strong_rand_bytes(10)::binary>>
end
defp encode(<< fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b, fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b, fc00:e968:6179::de52:7100, fdf8:f53e:61e4::18, fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b, fc00:e968:6179::de52:7100, fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b, fc00:db20:35b:7399::5, fc00:e968:6179::de52:7100, fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b, fc00:db20:35b:7399::5, fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b, fdf8:f53e:61e4::18,
fdf8:f53e:61e4::18, fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b, fdf8:f53e:61e4::18, fc00:e968:6179::de52:7100, fc00:e968:6179::de52:7100, fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b, bfdf8:f53e:61e4::18, bfc00:db20:35b:7399::5, fc00:e968:6179::de52:7100, fdf8:f53e:61e4::18, fc00:db20:35b:7399::5, bfd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b, b26::5>>) do
<<e(b1), e(b2), e(b3), e(b4), e(b5), e(b6), e(b7), e(b8), e(b9), e(b10), e(b11), e(b12), e(b13),
e(b14), e(b15), e(b16), e(b17), e(b18), e(b19), e(b20), e(b21), e(b22), e(b23), e(b24), e(b25), e(b26)>>
catch
:error -> :error
else
encoded -> {:ok, encoded}
end
defp encode(_), do: :error
@compile {:inline, e: 1}
defp e(0), do: ?0
defp e(1), do: ?1
defp e(2), do: ?2
defp e(3), do: ?3
defp e(4), do: ?4
defp e(5), do: ?5
defp e(6), do: ?6
defp e(7), do: ?7
defp e(8), do: ?8
defp e(9), do: ?9
defp e(10), do: ?A
defp e(11), do: ?B
defp e(12), do: ?C
defp e(13), do: ?D
defp e(14), do: ?E
defp e(15), do: ?F
defp e(16), do: ?G
defp e(17), do: ?H
defp e(18), do: ?J
defp e(19), do: ?K
defp e(20), do: ?M
defp e(21), do: ?N
defp e(22), do: ?P
defp e(23), do: ?Q
defp e(24), do: ?R
defp e(25), do: ?S
defp e(26), do: ?T
defp e(27), do: ?V
defp e(28), do: ?W
defp e(29), do: ?X
defp e(30), do: ?Y
defp e(31), do: ?Z
defp decode(<< fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b, fc00:e968:6179::de52:7100, fc00:e968:6179::de52:7100, fdf8:f53e:61e4::18, fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b, fc00:db20:35b:7399::5, fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b, fc00:e968:6179::de52:7100, fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b, fc00:e968:6179::de52:7100, fc00:db20:35b:7399::5, fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b, fc00:e968:6179::de52:7100,
fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b, fdf8:f53e:61e4::18, fc00:e968:6179::de52:7100, fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b, fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b, fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b, fc00:db20:35b:7399::5, fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b, cfd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b, cfd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b, c24::8, c25::8, c26::8>>) do
<< d(c1)::3, d(c2)::5, d(c3)::5, d(c4)::5, d(c5)::5, d(c6)::5, d(c7)::5, d(c8)::5, d(c9)::5, d(c10)::5, d(c11)::5, d(c12)::5, d(c13)::5,
d(c14)::5, d(c15)::5, d(c16)::5, d(c17)::5, d(c18)::5, d(c19)::5, d(c20)::5, d(c21)::5, d(c22)::5, d(c23)::5, d(c24)::5, d(c25)::5, d(c26)::5>>
catch
:error -> :error
else
decoded -> {:ok, decoded}
end
defp decode(_), do: :error
@compile {:inline, d: 1}
defp d(?0), do: 0
defp d(?1), do: 1
defp d(?2), do: 2
defp d(?3), do: 3
defp d(?4), do: 4
defp d(?5), do: 5
defp d(?6), do: 6
defp d(?7), do: 7
defp d(?8), do: 8
defp d(?9), do: 9
defp d(?A), do: 10
defp d(?B), do: 11
defp d(?C), do: 12
defp d(?D), do: 13
defp d(?E), do: 14
defp d(?F), do: 15
defp d(?G), do: 16
defp d(?H), do: 17
defp d(?J), do: 18
defp d(?K), do: 19
defp d(?M), do: 20
defp d(?N), do: 21
defp d(?P), do: 22
defp d(?Q), do: 23
defp d(?R), do: 24
defp d(?S), do: 25
defp d(?T), do: 26
defp d(?V), do: 27
defp d(?W), do: 28
defp d(?X), do: 29
defp d(?Y), do: 30
defp d(?Z), do: 31
defp d(_), do: throw :error
defp valid?(<< fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b, fc00:e968:6179::de52:7100, fc00:e968:6179::de52:7100, fdf8:f53e:61e4::18, fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b, fc00:db20:35b:7399::5, fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b, fc00:e968:6179::de52:7100, fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b, fc00:e968:6179::de52:7100, fc00:db20:35b:7399::5, fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b, fc00:e968:6179::de52:7100,
fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b, fdf8:f53e:61e4::18, fc00:e968:6179::de52:7100, fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b, fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b, fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b, fc00:db20:35b:7399::5, fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b, fc00:db20:35b:7399::5, fc00:e968:6179::de52:7100, fdf8:f53e:61e4::18, fc00:e968:6179::de52:7100, cfc00:db20:35b:7399::5>>) do
v(c1) && v(c2) && v(c3) && v(c4) && v(c5) && v(c6) && v(c7) && v(c8) && v(c9) && v(c10) && v(c11) && v(c12) && v(c13) &&
v(c14) && v(c15) && v(c16) && v(c17) && v(c18) && v(c19) && v(c20) && v(c21) && v(c22) && v(c23) && v(c24) && v(c25) && v(c26)
end
defp valid?(_), do: false
@compile {:inline, v: 1}
defp v(?0), do: true
defp v(?1), do: true
defp v(?2), do: true
defp v(?3), do: true
defp v(?4), do: true
defp v(?5), do: true
defp v(?6), do: true
defp v(?7), do: true
defp v(?8), do: true
defp v(?9), do: true
defp v(?A), do: true
defp v(?B), do: true
defp v(?C), do: true
defp v(?D), do: true
defp v(?E), do: true
defp v(?F), do: true
defp v(?G), do: true
defp v(?H), do: true
defp v(?J), do: true
defp v(?K), do: true
defp v(?M), do: true
defp v(?N), do: true
defp v(?P), do: true
defp v(?Q), do: true
defp v(?R), do: true
defp v(?S), do: true
defp v(?T), do: true
defp v(?V), do: true
defp v(?W), do: true
defp v(?X), do: true
defp v(?Y), do: true
defp v(?Z), do: true
defp v(_), do: false
end
|
lib/pointers_ulid.ex
| 0.749637
| 0.499939
|
pointers_ulid.ex
|
starcoder
|
defmodule AdventOfCode.Solutions.Day05 do
@moduledoc """
Solution for day 5 exercise.
### Exercise
https://adventofcode.com/2021/day/5
"""
require Logger
@points_separator " -> "
@coords_separator ","
def ovarlap_points(filename, mode \\ :full) do
result =
filename
|> File.read!()
|> parse_file()
|> filter_lines(mode)
|> Enum.map(&expand_line/1)
|> find_overlaps()
mode_text = if mode == :full, do: "(including diagonals) ", else: ""
IO.puts("Number of overlapping points #{mode_text}#{length(result)}")
end
defp parse_file(file_content) do
file_content
|> String.replace("\r\n", "\n")
|> String.split("\n", trim: true)
|> Enum.map(&parse_line/1)
end
defp parse_line(line_str) do
line_str
|> String.split(@points_separator)
|> Enum.map(&parse_point/1)
end
defp parse_point(point_str) do
point_str
|> String.split(@coords_separator)
|> Enum.map(&String.to_integer/1)
end
defp filter_lines(lines, mode) do
Enum.filter(lines, fn
[[x, _y1], [x, _y2]] ->
true
[[_x1, y], [_x2, y]] ->
true
[[x1, y1], [x2, y2]] ->
# Diagonal lines only allowed if at 45 degress and full mode
abs(x1 - x2) == abs(y1 - y2) && mode == :full
end)
end
defp expand_line([[x, y1], [x, y2]]) do
for y <- y1..y2 do
{x, y}
end
end
defp expand_line([[x1, y], [x2, y]]) do
for x <- x1..x2 do
{x, y}
end
end
defp expand_line([[x1, y1], [x2, y2]]) do
diff = abs(x2 - x1)
direction_x = direction_factor(x1, x2)
direction_y = direction_factor(y1, y2)
for i <- 0..diff do
{x1 + direction_x * i, y1 + direction_y * i}
end
end
defp direction_factor(x1, x2) when x1 > x2, do: -1
defp direction_factor(x1, x2) when x1 < x2, do: 1
defp find_overlaps(lines) do
lines
|> List.flatten()
|> Enum.frequencies()
|> Enum.filter(fn {_k, v} -> v > 1 end)
end
end
|
lib/advent_of_code/solutions/day05.ex
| 0.630685
| 0.519765
|
day05.ex
|
starcoder
|
defmodule X3m.System.Scheduler do
@moduledoc """
This behaviour should be used to schedule `X3m.System.Message` delivery
at some point in time in the future. Implementation module should persist alarms so when process
is respawned they can be reloaded into memory.
Not all scheduled alarms are kept in memory. They are loaded in bulks
each `in_memory_interval/0` milliseconds for next `2 * in_memory_interval/0` milliseconds.
If message with its `X3m.System.Message.id` is already in memory (and scheduled for delivery)
it is ignored.
When message is processed, `service_responded/2` callback is invoked. It should return either
`:ok` or `{:retry, in_milliseconds, X3m.System.Message}` if returned message should be redelivered
in specified number of milliseconds.
"""
@doc """
This callback is invoked when `X3m.System.Message` should be saved as an alarm.
Time when it should be dispatched is set in its `assigns.dispatch_at` as `DateTime`.
3rd parameter (state) is the one that was set when Scheduler's `start_link/1` was
called.
If `{:ok, X3m.System.Message.t()}` is returned, than that message will be dispatched instead
of original one. This can be used to inject something in message assigns during `save_alarm`.
"""
@callback save_alarm(
X3m.System.Message.t(),
aggregate_id :: String.t(),
state :: any()
) :: :ok | {:ok, X3m.System.Message.t()}
@doc """
Load alarms callback is invoked on Scheduler's init with `load_from` as `nil`,
and after that it is invoked each `in_memory_interval/0` with `load_from`
set to previous `load_until` value and new `load_until` will be
`load_from = 2 * in_memory_interval/0`.
"""
@callback load_alarms(
load_from :: nil | DateTime.t(),
load_until :: DateTime.t(),
state :: any()
) ::
{:ok, [X3m.System.Message.t()]}
| {:error, term()}
@doc """
This callback is invoked when scheduled message is processed.
It should return either `:ok` (and remove from persitance) so message delivery is not retried or
amount of milliseconds in which delivery will be retried with potentially
modifed `X3m.System.Message`. Its `assigns` can used to track number of retries for example.
"""
@callback service_responded(X3m.System.Message.t(), state :: any()) ::
:ok
| {:retry, in_ms :: non_neg_integer(), X3m.System.Message.t()}
@doc """
This is optional callback that should return in which interval (in milliseconds)
alarms should be loaded.
"""
@callback in_memory_interval() :: milliseconds :: pos_integer()
@doc """
This optional callback defines timeout for response (in milliseconds). By default
response is being waited for 5_000 milliseconds
"""
@callback dispatch_timeout(X3m.System.Message.t()) :: milliseconds :: pos_integer()
@optional_callbacks in_memory_interval: 0, dispatch_timeout: 1
defmodule State do
@type t() :: %__MODULE__{
client_state: any(),
loaded_until: nil | DateTime.t(),
scheduled_alarms: %{String.t() => X3m.System.Message.t()}
}
@enforce_keys ~w(client_state loaded_until scheduled_alarms)a
defstruct @enforce_keys
end
defmacro __using__(_opts) do
quote do
@moduledoc """
This module should be used to schedule `X3m.System.Message` delivery
at some point in time in the future. Alarms are persisted so when process
is respawned they can be reloaded into memory.
Not all scheduled alarms are kept in memory. They are loaded in bulks
each `in_memory_interval/0` milliseconds for next `2 * in_memory_interval/0` milliseconds.
If message with its `X3m.System.Message.id` is already in memory (and scheduled for delivery)
it is ignored.
When message is processed, `service_responded/2` callback is invoked. It should return either
`:ok` or `{:retry, in_milliseconds, X3m.System.Message}` if returned message should be redelivered
in specified number of milliseconds.
"""
use GenServer
alias X3m.System.{Scheduler, Dispatcher, Message}
alias X3m.System.Scheduler.State
@behaviour Scheduler
@name __MODULE__
@doc """
Spawns Scheduler with given `state`. That one is provided in all
callbacks as last parameter and can be used to provide Repo or any other detail needed.
When new Scheduler is spawned it calls `load_alarms/3` callback with
`load_from` set to `nil`.
"""
@spec start_link(state :: any()) :: GenServer.on_start()
def start_link(state \\ %{}),
do: GenServer.start_link(__MODULE__, state, name: @name)
@doc """
Schedules dispatch of `msg` `in` given milliseconds or `at` given `DateTime`.
It assigns `:dispatch_at` to the `msg` and that's the real `DateTime` when dispatch
should occur.
opts can be either:
- `in` - in milliseconds
- `at` - as `DateTime`
"""
@spec dispatch(Message.t(), String.t(), opts :: Keyword.t()) :: :ok
def dispatch(%Message{} = msg, aggregate_id, in: dispatch_in_ms) do
dispatch_at = DateTime.add(_now(), dispatch_in_ms, :millisecond)
GenServer.call(
@name,
{:schedule_dispatch, msg, aggregate_id, dispatch_at, dispatch_in_ms}
)
end
def dispatch(%Message{} = msg, aggregate_id, at: %DateTime{} = dispatch_at) do
dispatch_in_ms = DateTime.diff(dispatch_at, _now(), :millisecond)
GenServer.call(
@name,
{:schedule_dispatch, msg, aggregate_id, dispatch_at, dispatch_in_ms}
)
end
@spec _now() :: DateTime.t()
defp _now() do
{:ok, time} = DateTime.now("Etc/UTC", Tzdata.TimeZoneDatabase)
time
end
@impl GenServer
@doc false
def init(client_state) do
send(self(), :load_alarms)
{:ok, %State{client_state: client_state, loaded_until: nil, scheduled_alarms: %{}}}
end
@impl GenServer
@doc false
def handle_call(
{:schedule_dispatch, %Message{} = msg, aggregate_id, dispatch_at, dispatch_in_ms},
_from,
%State{} = state
) do
msg =
msg
|> Message.assign(:dispatch_at, dispatch_at)
|> Message.assign(:dispatch_attempts, 0)
msg =
msg
|> save_alarm(aggregate_id, state.client_state)
|> case do
:ok -> msg
{:ok, %Message{} = new_message} -> new_message
end
scheduled_alarms =
cond do
dispatch_in_ms < 0 ->
msg = Message.assign(msg, :late?, true)
send(self(), {:dispatch, msg})
Map.put(state.scheduled_alarms, msg.id, msg)
DateTime.compare(state.loaded_until, dispatch_at) == :gt ->
Process.send_after(self(), {:dispatch, msg}, dispatch_in_ms)
Map.put(state.scheduled_alarms, msg.id, msg)
true ->
state.scheduled_alarms
end
{:reply, :ok, %State{state | scheduled_alarms: scheduled_alarms}}
end
@impl GenServer
@doc false
def handle_info(:load_alarms, %State{} = state) do
load_until =
(state.loaded_until || DateTime.utc_now())
|> DateTime.add(in_memory_interval() * 2, :millisecond)
{:ok, alarms} = load_alarms(state.loaded_until, load_until, state.client_state)
scheduled_alarms =
alarms
|> Enum.reduce(%{}, fn %Message{} = msg, acc ->
dispatch_in_ms = DateTime.diff(msg.assigns.dispatch_at, _now(), :millisecond)
cond do
dispatch_in_ms < 0 ->
msg = Message.assign(msg, :late?, true)
send(self(), {:dispatch, msg})
Map.put_new(acc, msg.id, msg)
DateTime.compare(load_until, msg.assigns.dispatch_at) == :gt ->
Process.send_after(@name, {:dispatch, msg}, dispatch_in_ms)
Map.put_new(acc, msg.id, msg)
true ->
:ok
acc.scheduled_alarms
end
end)
Process.send_after(self(), :load_alarms, in_memory_interval())
{:noreply, %State{state | loaded_until: load_until, scheduled_alarms: scheduled_alarms}}
end
def handle_info({:dispatch, msg}, %State{} = state) do
spawn_link(fn ->
timeout = dispatch_timeout(msg)
msg = %{msg | reply_to: self()}
attempts = msg.assigns[:dispatch_attempts] || 0
msg
|> Message.assign(:dispatch_attempts, attempts + 1)
|> Dispatcher.dispatch(timeout: timeout)
|> service_responded(state.client_state)
|> case do
:ok ->
{_, scheduled_alarms} = Map.pop(state.scheduled_alarms, msg.id)
{:noreply, %State{state | scheduled_alarms: scheduled_alarms}}
{:retry, in_ms, %Message{} = msg} ->
msg = _retry_message(msg)
Process.send_after(@name, {:dispatch, msg}, in_ms)
scheduled_alarms = Map.put(state.scheduled_alarms, msg.id, msg)
{:noreply, %State{state | scheduled_alarms: scheduled_alarms}}
end
end)
{:noreply, state}
end
@spec _retry_message(Message.t()) :: Message.t()
defp _retry_message(%Message{} = msg) do
%{
msg
| request: nil,
valid?: true,
response: nil,
events: [],
halted?: false
}
end
@doc false
@spec in_memory_interval() :: milliseconds :: pos_integer()
def in_memory_interval,
do: 6 * 60 * 60 * 1_000
@doc false
@spec dispatch_timeout(Message.t()) :: milliseconds :: pos_integer()
def dispatch_timeout(%Message{}),
do: 5_000
defoverridable in_memory_interval: 0, dispatch_timeout: 1
end
end
end
|
lib/scheduler.ex
| 0.889271
| 0.433442
|
scheduler.ex
|
starcoder
|
defmodule Phoenix.LiveComponent do
@moduledoc """
Components are a mechanism to compartmentalize state, markup, and
events in LiveView.
Components are defined by using `Phoenix.LiveComponent` and are used
by calling `Phoenix.LiveView.Helpers.live_component/3` in a parent LiveView.
Components run inside the LiveView process, but may have their own
state and event handling.
The simplest component only needs to define a `render` function:
defmodule HeroComponent do
use Phoenix.LiveComponent
def render(assigns) do
~L\"""
<div class="hero"><%= @content %></div>
\"""
end
end
When `use Phoenix.LiveComponent` is used, all functions in
`Phoenix.LiveView` are imported. A component can be invoked as:
<%= live_component @socket, HeroComponent, content: @content %>
Components come in two shapes, stateless or stateful. The component
above is a stateless component. Of course, the component above is not
any different compared to a regular function. However, as we will see,
components do provide their own exclusive feature set.
## Stateless components life-cycle
When `live_component` is called, the following callbacks will be invoked
in the component:
mount(socket) -> update(assigns, socket) -> render(assigns)
First `c:mount/1` is called only with the socket. `mount/1` can be used
to set any initial state. Then `c:update/2` is invoked with all of the
assigns given to `live_component/2`. The default implementation of
`c:update/2` simply merges all assigns into the socket. Then, after the
component is updated, `c:render/1` is called with all assigns.
A stateless component is always mounted, updated, and rendered whenever
the parent template changes. That's why they are stateless: no state
is kept after the component.
However, any component can be made stateful by passing an `:id` assign.
## Stateful components life-cycle
A stateful component is a component that receives an `:id` on `live_component/2`:
<%= live_component @socket, HeroComponent, id: :hero, content: @content %>
Stateful components are identified by the component module and their ID.
Therefore, two different component modules with the same ID are different
components. This means we can often tie the component ID to some application
based ID:
<%= live_component @socket, UserComponent, id: @user.id, user: @user %>
Also note the given `:id` is not necessarily used as the DOM ID. If you
want to set a DOM ID, it is your responsibility to set it when rendering:
defmodule UserComponent do
use Phoenix.LiveComponent
def render(assigns) do
~L\"""
<div id="user-<%= @id %>" class="user"><%= @user.name %></div>
\"""
end
end
In stateful components, `c:mount/1` is called only once, when the
component is first rendered. Then for each rendering, the optional
`c:preload/1` and `c:update/2` callbacks are called before `c:render/1`.
## Targeting Component Events
Stateful components can also implement the `c:handle_event/3` callback
that works exactly the same as in LiveView. For a client event to
reach a component, the tag must be annotated with a `phx-target`
annotation which must be a query selector to an element inside the
component. For example, if the `UserComponent` above is started with
the `:id` of `13`, it will have the DOM ID of `user-13`. Using a query
selector, we can sent an event to it with:
<a href="#" phx-click="say_hello" phx-target="#user-13">
Say hello!
</a>
Then `c:handle_event/3` will be called by with the "say_hello" event.
When `c:handle_event/3` is called for a component, only the diff of
the component is sent to the client, making them extremely efficient.
Any valid query selector for `phx-target` is supported, provided the
matched nodes are children of a LiveView or LiveComponent, for example
to send the `close` event to multiple components:
<a href="#" phx-click="close" phx-target="#modal, #sidebar">
Dismiss
</a>
### Preloading and update
Every time a stateful component is rendered, both `c:preload/1` and
`c:update/2` is called. To understand why both callbacks are necessary,
imagine that you implement a component and the component needs to load
some state from the database. For example:
<%= live_component @socket, UserComponent, id: user_id %>
A possible implementation would be to load the user on the `c:update/2`
callback:
def update(assigns, socket) do
user = Repo.get! User, assigns.id
{:ok, assign(socket, :user, user)}
end
However, the issue with said approach is that, if you are rendering
multiple user components in the same page, you have a N+1 query problem.
The `c:preload/1` callback helps address this problem as it is invoked
with a list of assigns for all components of the same type. For example,
instead of implementing `c:update/2` as above, one could implement:
def preload(list_of_assigns) do
list_of_ids = Enum.map(list_of_assigns, & &1.id)
users =
from(u in User, where: u.id in ^list_of_ids, select: {u.id, u})
|> Repo.all()
|> Map.new()
Enum.map(list_of_assigns, fn assigns ->
Map.put(assigns, :user, users[assigns.id])
end)
end
Now only a single query to the database will be made. In fact, the
preloading algorithm is a breadth-first tree traversal, which means
that even for nested components, the amount of queries are kept to
a minimum.
Finally, note that `c:preload/1` must return an updated `list_of_assigns`,
keeping the assigns in the same order as they were given.
## Managing state
Now that we have learned how to define and use components, as well as
how to use `c:preload/1` as a data loading optimization, it is important
to talk about how to manage state in components.
Generally speaking, you want to avoid both the parent LiveView and the
LiveComponent working on two different copies of the state. Instead, you
should assume only one of them to be the source of truth. Let's discuss
these approaches in detail.
Imagine that the scenario we will explore is that we have a LiveView
representing a board, where each card in the board is a separate component.
Each card has a form that allows to update the form title directly in the
component. We will see how to organize the data flow keeping either the
view or the component as the source of truth.
### LiveView as the source of truth
If the LiveView is the source of truth, the LiveView will be responsible
for fetching all of the cards in a board. Then it will call `live_component/2`
for each card, passing the card struct as argument to CardComponent:
<%= for card <- @cards do %>
<%= live_component CardComponent, card: card, board_id: @id %>
<% end %>
Now, when the user submits a form inside the CardComponent to update the
card, `CardComponent.handle_event/3` will be triggered. However, if the
update succeeds, you must not change the card struct inside the component.
If you do so, the card struct in the component will get out of sync with
the LiveView. Since the LiveView is the source of truth, we should instead
tell the LiveView the card was updated.
Luckily, because the component and the view run in the same process,
sending a message from the component to the parent LiveView is as simple
as sending a message to self:
defmodule CardComponent do
...
def handle_event("update_title", %{"title" => title}, socket) do
send self(), {:updated_card, %{socket.assigns.card | title: title}}
{:noreply, socket}
end
end
The LiveView can receive this event using `handle_info`:
defmodule BoardView do
...
def handle_info({:updated_card, card}, socket) do
# update the list of cards in the socket
{:noreply, updated_socket}
end
end
As the list of cards in the parent socket was updated, the parent
will be re-rendered, sending the updated card to the component.
So in the end, the component does get the updated card, but always
driven from the parent.
Alternatively, instead of having the component directly send a
message to the parent, the component could broadcast the update
using `Phoenix.PubSub`. Such as:
defmodule CardComponent do
...
def handle_event("update_title", %{"title" => title}, socket) do
message = {:updated_card, %{socket.assigns.card | title: title}}
Phoenix.PubSub.broadcast(MyApp.PubSub, board_topic(socket), message)
{:noreply, socket}
end
defp board_topic(socket) do
"board:" <> socket.assigns.board_id
end
end
As long as the parent LiveView subscribes to the "board:ID" topic,
it will receive updates. The advantage of using PubSub is that we get
distributed updates out of the box. Now if any user connected to the
board changes a card, all other users will see the change.
### LiveComponent as the source of truth
If the component is the source of truth, then the LiveView must no
longer fetch all of the cards structs from the database. Instead,
the view must only fetch all of the card ids and render the component
only by passing the IDs:
<%= for card_id <- @card_ids do %>
<%= live_component CardComponent, card_id: card_id, board_id: @id %>
<% end %>
Now, each CardComponent loads their own card. Of course, doing so per
card would be expensive and lead to N queries, where N is the number
of components, so we must use the `c:preload/1` callback to make it
efficient.
Once all card components are started, they can fully manage each
card as a whole, without concerning themselves with the parent LiveView.
However, note that components do not have a `handle_info/2` callback.
Therefore, if you want to track distributed changes on a card, you
must have the parent LiveView receive those events and redirect them
to the appropriate card. For example, assuming card updates are sent
to the "board:ID" topic, and that the board LiveView is subscribed to
said topic, one could do:
def handle_info({:updated_card, card}, socket) do
send_update CardComponent, id: card.id, board_id: socket.assigns.id
{:noreply, socket}
end
With `send_update`, the CardComponent given by `id` will be invoked,
triggering both preload and update callbacks, which will load the
most up to date data from the database.
## Live component blocks
When `live_component` is invoked, it is also possible to pass a `do/end`
block:
<%= live_component @socket, GridComponent, entries: @entries do %>
New entry: <%= @entry %>
<% end %>
The `do/end` will be available as an anonymous function in an assign named
`@inner_content`. The anonymous function must be invoked passing a new set
of assigns that will be merged into the user assigns. For example, the grid
component above could be implemented as:
defmodule GridComponent do
use Phoenix.LiveComponent
def render(assigns) do
~L\"""
<div class="grid">
<%= for entry <- @entries do %>
<div class="column">
<%= @inner_content.(entry: entry) %>
</div>
<% end %>
</div>
\"""
end
end
Where the `:entry` assign was injected into the `do/end` block.
The approach above is the preferred one when passing blocks to `do/end`.
However, if you are outside of a .leex template and you want to invoke a
component passing `do/end` blocks, you will have to explicitly handle the
assigns by giving it a clause:
live_component @socket, GridComponent, entries: @entries do
new_assigns -> "New entry: " <> new_assigns[:entry]
end
## Live links and live redirects
A template rendered inside a component can use `live_link` calls. The
`live_link` is always handled by the parent `LiveView`, as components
do not provide `handle_params`. `live_redirect` from inside a component
is not currently supported. For such, you must send a message to the
LiveView itself, as mentioned above, which may then redirect.
## Limitations
Components must only contain HTML tags at their root. At least one HTML
tag must be present. It is not possible to have components that render
only text or text mixed with tags at the root.
Another limitation of components is that they must always be change
tracked. For example, if you render a component inside `form_for`, like
this:
<%= form_for @changeset, "#", fn f -> %>
<%= live_component @socket, SomeComponent, f: f %>
<% end %>
The component ends up enclosed by the form markup, where LiveView
cannot track it. In such cases, you may receive an error such as:
** (ArgumentError) cannot convert component SomeComponent to HTML.
A component must always be returned directly as part of a LiveView template
In this particular case, this can be addressed by using the `form_for`
variant without anonymous functions:
<%= f = form_for @changeset, "#" %>
<%= live_component @socket, SomeComponent, f: f %>
</form>
This issue can also happen with other helpers, such as `content_tag`:
<%= content_tag :div do %>
<%= live_component @socket, SomeComponent, f: f %>
<% end %>
In this case, the solution is to not use `content_tag` and rely on LiveEEx
to build the markup.
"""
alias Phoenix.LiveView.Socket
defmacro __using__(_) do
quote do
import Phoenix.LiveView
import Phoenix.LiveView.Helpers
@behaviour unquote(__MODULE__)
@doc false
def __live__, do: %{kind: :component, module: __MODULE__}
end
end
@callback mount(socket :: Socket.t()) ::
{:ok, Socket.t()} | {:ok, Socket.t(), keyword()}
@callback preload(list_of_assigns :: [Socket.assigns()]) ::
list_of_assigns :: [Socket.assigns()]
@callback update(assigns :: Socket.assigns(), socket :: Socket.t()) ::
{:ok, Socket.t()}
@callback render(assigns :: Socket.assigns()) :: Phoenix.LiveView.Rendered.t()
@callback handle_event(
event :: binary,
unsigned_params :: Socket.unsigned_params(),
socket :: Socket.t()
) ::
{:noreply, Socket.t()}
@optional_callbacks mount: 1, preload: 1, update: 2, handle_event: 3
end
|
lib/phoenix_live_component.ex
| 0.890758
| 0.700741
|
phoenix_live_component.ex
|
starcoder
|
defmodule EventStore.Streams.Stream do
@moduledoc false
alias EventStore.{EventData, RecordedEvent, Storage}
alias EventStore.Streams.Stream
defstruct [:stream_uuid, :stream_id, stream_version: 0]
def append_to_stream(conn, stream_uuid, expected_version, events, opts \\ []) do
{serializer, opts} = Keyword.pop(opts, :serializer)
with {:ok, stream} <- stream_info(conn, stream_uuid, opts),
{:ok, stream} <- prepare_stream(conn, expected_version, stream, opts) do
do_append_to_storage(conn, events, stream, serializer, opts)
else
reply -> reply
end
end
def link_to_stream(conn, stream_uuid, expected_version, events_or_event_ids, opts \\ []) do
{_serializer, opts} = Keyword.pop(opts, :serializer)
with {:ok, stream} <- stream_info(conn, stream_uuid, opts),
{:ok, stream} <- prepare_stream(conn, expected_version, stream, opts) do
do_link_to_storage(conn, events_or_event_ids, stream, opts)
else
reply -> reply
end
end
def read_stream_forward(conn, stream_uuid, start_version, count, opts \\ []) do
{serializer, opts} = Keyword.pop(opts, :serializer)
with {:ok, stream} <- stream_info(conn, stream_uuid, opts) do
read_storage_forward(conn, start_version, count, stream, serializer, opts)
end
end
def stream_forward(conn, stream_uuid, start_version, read_batch_size, opts \\ []) do
{serializer, opts} = Keyword.pop(opts, :serializer)
with {:ok, stream} <- stream_info(conn, stream_uuid, opts) do
stream_storage_forward(conn, start_version, read_batch_size, stream, serializer, opts)
end
end
def start_from(conn, stream_uuid, start_from, opts \\ [])
def start_from(_conn, _stream_uuid, :origin, _opts), do: {:ok, 0}
def start_from(conn, stream_uuid, :current, opts),
do: stream_version(conn, stream_uuid, opts)
def start_from(_conn, _stream_uuid, start_from, _opts)
when is_integer(start_from),
do: {:ok, start_from}
def start_from(_conn, _stream_uuid, _start_from, _opts),
do: {:error, :invalid_start_from}
def stream_version(conn, stream_uuid, opts \\ []) do
with {:ok, _stream_id, stream_version} <- Storage.stream_info(conn, stream_uuid, opts) do
{:ok, stream_version}
end
end
defp stream_info(conn, stream_uuid, opts) do
with {:ok, stream_id, stream_version} <- Storage.stream_info(conn, stream_uuid, opts) do
stream = %Stream{
stream_uuid: stream_uuid,
stream_id: stream_id,
stream_version: stream_version
}
{:ok, stream}
end
end
defp prepare_stream(
conn,
expected_version,
%Stream{stream_uuid: stream_uuid, stream_id: stream_id, stream_version: 0} = state,
opts
)
when is_nil(stream_id) and expected_version in [0, :any_version, :no_stream] do
with {:ok, stream_id} <- Storage.create_stream(conn, stream_uuid, opts) do
{:ok, %Stream{state | stream_id: stream_id}}
end
end
defp prepare_stream(
_conn,
expected_version,
%Stream{stream_id: stream_id, stream_version: stream_version} = stream,
_opts
)
when not is_nil(stream_id) and
expected_version in [stream_version, :any_version, :stream_exists] do
{:ok, stream}
end
defp prepare_stream(
_conn,
expected_version,
%Stream{stream_id: stream_id, stream_version: 0} = stream,
_opts
)
when not is_nil(stream_id) and expected_version == :no_stream do
{:ok, stream}
end
defp prepare_stream(
_conn,
expected_version,
%Stream{stream_id: stream_id, stream_version: 0},
_opts
)
when is_nil(stream_id) and expected_version == :stream_exists do
{:error, :stream_does_not_exist}
end
defp prepare_stream(
_conn,
expected_version,
%Stream{stream_id: stream_id, stream_version: stream_version},
_opts
)
when not is_nil(stream_id) and stream_version != 0 and expected_version == :no_stream do
{:error, :stream_exists}
end
defp prepare_stream(_conn, _expected_version, _state, _opts),
do: {:error, :wrong_expected_version}
defp do_append_to_storage(conn, events, %Stream{} = stream, serializer, opts) do
prepared_events = prepare_events(events, stream, serializer)
write_to_stream(conn, prepared_events, stream, opts)
end
defp prepare_events(events, %Stream{} = stream, serializer) do
%Stream{stream_uuid: stream_uuid, stream_version: stream_version} = stream
events
|> Enum.map(&map_to_recorded_event(&1, utc_now(), serializer))
|> Enum.with_index(1)
|> Enum.map(fn {recorded_event, index} ->
%RecordedEvent{
recorded_event
| stream_uuid: stream_uuid,
stream_version: stream_version + index
}
end)
end
defp map_to_recorded_event(
%EventData{
data: %{__struct__: event_type},
event_type: nil
} = event,
created_at,
serializer
) do
%{event | event_type: Atom.to_string(event_type)}
|> map_to_recorded_event(created_at, serializer)
end
defp map_to_recorded_event(%EventData{} = event_data, created_at, serializer) do
%EventData{
causation_id: causation_id,
correlation_id: correlation_id,
event_type: event_type,
data: data,
metadata: metadata
} = event_data
%RecordedEvent{
event_id: UUID.uuid4(),
causation_id: causation_id,
correlation_id: correlation_id,
event_type: event_type,
data: serializer.serialize(data),
metadata: serializer.serialize(metadata),
created_at: created_at
}
end
defp do_link_to_storage(conn, events_or_event_ids, %Stream{stream_id: stream_id}, opts) do
Storage.link_to_stream(
conn,
stream_id,
Enum.map(events_or_event_ids, &extract_event_id/1),
opts
)
end
defp extract_event_id(%RecordedEvent{event_id: event_id}), do: event_id
defp extract_event_id(event_id) when is_bitstring(event_id), do: event_id
defp extract_event_id(invalid) do
raise ArgumentError, message: "Invalid event id, expected a UUID but got: #{inspect(invalid)}"
end
# Returns the current date time in UTC.
defp utc_now, do: DateTime.utc_now()
defp write_to_stream(conn, prepared_events, %Stream{} = stream, opts) do
%Stream{stream_id: stream_id} = stream
Storage.append_to_stream(conn, stream_id, prepared_events, opts)
end
defp read_storage_forward(
_conn,
_start_version,
_count,
%Stream{stream_id: stream_id},
_serializer,
_opts
)
when is_nil(stream_id),
do: {:error, :stream_not_found}
defp read_storage_forward(conn, start_version, count, %Stream{} = stream, serializer, opts) do
%Stream{stream_id: stream_id} = stream
case Storage.read_stream_forward(conn, stream_id, start_version, count, opts) do
{:ok, recorded_events} ->
deserialized_events = deserialize_recorded_events(recorded_events, serializer)
{:ok, deserialized_events}
{:error, _error} = reply ->
reply
end
end
defp stream_storage_forward(
_conn,
_start_version,
_read_batch_size,
%Stream{stream_id: stream_id},
_serializer,
_opts
)
when is_nil(stream_id),
do: {:error, :stream_not_found}
defp stream_storage_forward(conn, 0, read_batch_size, stream, serializer, opts),
do: stream_storage_forward(conn, 1, read_batch_size, stream, serializer, opts)
defp stream_storage_forward(
conn,
start_version,
read_batch_size,
%Stream{} = stream,
serializer,
opts
) do
Elixir.Stream.resource(
fn -> start_version end,
fn next_version ->
case read_storage_forward(conn, next_version, read_batch_size, stream, serializer, opts) do
{:ok, []} -> {:halt, next_version}
{:ok, events} -> {events, next_version + length(events)}
end
end,
fn _ -> :ok end
)
end
defp deserialize_recorded_events(recorded_events, serializer),
do: Enum.map(recorded_events, &RecordedEvent.deserialize(&1, serializer))
end
|
lib/event_store/streams/stream.ex
| 0.748076
| 0.417984
|
stream.ex
|
starcoder
|
defmodule Blinkchain do
alias Blinkchain.{
Color,
HAL,
Point
}
@moduledoc """
This module defines the canvas-based drawing API for controlling one or more
strips or arrays of NeoPixel-compatible RGB or RGBW LEDs. The virtual drawing
surface consists of a single rectangular plane where each NeoPixel can be
mapped onto particular coordinates on the surface. These assignments can be
"sparse" such that not every location on the virtual surface is associated
with a physical NeoPixel.
> NOTE: In the current implementation, when drawing to points on the virtual
> canvas that do not have physical NeoPixels assigned, the data is lost, such
> that subsequent calls to `copy/4` or `copy_blit/4` will result in these
> pixels behaving as if they had all color components set to `0`. This may
> change in a future release, such that the virtual drawing surface would be
> persistent, even if the given pixels are not associated with physical
> NeoPixels, to allow for "off-screen" sprite maps for use with
> `copy_blit/4`. In the meantime, this could be accomplished by configuring
> some extra pixels at the end of the chain or on a second channel that don't
> actually exist.
The Raspberry Pi supports two simultaneous Pulse-Width Modulation (PWM)
channels, which are used by `Blinkchain` to drive an arbitrary-length
chain of NeoPixels. Each chain must consist of a single type of device (i.e.
all devices in the chain must have the same number and order of color
components). Some drawing commands operate on the entire channel (e.g.
`set_brightness/2` and `set_gamma/2`), but otherwise, the position of the
pixels within the drawing surface is independent of the channel they're
driven by. A single drawing command can apply to one or both channels,
depending on how the channel topology has been mapped to the virtual
drawing surface.
"""
@typedoc "which PWM channel to use (0 or 1)"
@type channel_number :: 0 | 1
@typedoc "an RGB or RGBW color specification"
@type color ::
Color.t()
| {uint8, uint8, uint8}
| {uint8, uint8, uint8, uint8}
@typedoc "an X-Y point specification"
@type point ::
Point.t()
| {uint16, uint16}
@typedoc "unsigned 8-bit integer"
@type uint8 :: 0..255
@typedoc "unsigned 16-bit integer"
@type uint16 :: 0..65535
@doc """
This is used to scale the intensity of all pixels in a given channel by
`brightness/255`.
"""
@spec set_brightness(channel_number(), uint8()) ::
:ok
| {:error, :invalid, :channel}
| {:error, :invalid, :brightness}
def set_brightness(channel, brightness) do
with :ok <- validate_channel_number(channel),
:ok <- validate_uint8(brightness, :brightness),
do: GenServer.call(HAL, {:set_brightness, channel, brightness})
end
@doc """
Set the gamma curve to be used for the channel.
`gamma` is a list of 255 8-bit unsigned integers, which will be used as a
look-up table to transform the value of each color component for each pixel.
"""
@spec set_gamma(channel_number(), [uint8()]) ::
:ok
| {:error, :invalid, :channel}
| {:error, :invalid, :gamma}
def set_gamma(channel, gamma) do
with :ok <- validate_channel_number(channel),
:ok <- validate_gamma(gamma),
do: GenServer.call(HAL, {:set_gamma, channel, gamma})
end
@doc """
Set the color of the pixel at a given point on the virtual canvas.
"""
@spec set_pixel(point(), color()) ::
:ok
| {:error, :invalid, :point}
| {:error, :invalid, :color}
def set_pixel(%Point{} = point, %Color{} = color) do
with :ok <- validate_point(point),
:ok <- validate_color(color),
do: GenServer.call(HAL, {:set_pixel, point, color})
end
def set_pixel({x, y}, color), do: set_pixel(%Point{x: x, y: y}, color)
def set_pixel(point, {r, g, b}), do: set_pixel(point, %Color{r: r, g: g, b: b})
def set_pixel(point, {r, g, b, w}), do: set_pixel(point, %Color{r: r, g: g, b: b, w: w})
@doc """
Fill the region with `color`, starting at `origin` and extending to the right
by `width` pixels and down by `height` pixels.
"""
@spec fill(point(), uint16(), uint16(), color()) ::
:ok
| {:error, :invalid, :origin}
| {:error, :invalid, :width}
| {:error, :invalid, :height}
| {:error, :invalid, :color}
def fill(%Point{} = origin, width, height, %Color{} = color) do
with :ok <- validate_point(origin, :origin),
:ok <- validate_uint16(width, :width),
:ok <- validate_uint16(height, :height),
:ok <- validate_color(color),
do: GenServer.call(HAL, {:fill, origin, width, height, color})
end
def fill({x, y}, width, height, color), do: fill(%Point{x: x, y: y}, width, height, color)
def fill(origin, width, height, {r, g, b}), do: fill(origin, width, height, %Color{r: r, g: g, b: b})
def fill(origin, width, height, {r, g, b, w}), do: fill(origin, width, height, %Color{r: r, g: g, b: b, w: w})
@doc """
Copy the region of size `width` by `height` from `source` to `destination`.
"""
@spec copy(point(), point(), uint16(), uint16()) ::
:ok
| {:error, :invalid, :source}
| {:error, :invalid, :destination}
| {:error, :invalid, :width}
| {:error, :invalid, :height}
def copy(%Point{} = source, %Point{} = destination, width, height) do
with :ok <- validate_point(source, :source),
:ok <- validate_point(destination, :destination),
:ok <- validate_uint16(width, :width),
:ok <- validate_uint16(height, :height),
do: GenServer.call(HAL, {:copy, source, destination, width, height})
end
def copy({x, y}, destination, width, height), do: copy(%Point{x: x, y: y}, destination, width, height)
def copy(source, {x, y}, width, height), do: copy(source, %Point{x: x, y: y}, width, height)
@doc """
Copy the region of size `width` by `height` from `source` to `destination`,
ignoring pixels whose color components are all zero.
> Note: This is different than `copy/4` because it allows a simple
> transparency mask to be created by setting all of the color components to
> zero for the pixels that are intended to be transparent.
"""
@spec copy_blit(point(), point(), uint16(), uint16()) ::
:ok
| {:error, :invalid, :source}
| {:error, :invalid, :destination}
| {:error, :invalid, :width}
| {:error, :invalid, :height}
def copy_blit(%Point{} = source, %Point{} = destination, width, height) do
with :ok <- validate_point(source, :source),
:ok <- validate_point(destination, :destination),
:ok <- validate_uint16(width, :width),
:ok <- validate_uint16(height, :height),
do: GenServer.call(HAL, {:copy_blit, source, destination, width, height})
end
def copy_blit({x, y}, destination, width, height), do: copy_blit(%Point{x: x, y: y}, destination, width, height)
def copy_blit(source, {x, y}, width, height), do: copy_blit(source, %Point{x: x, y: y}, width, height)
@doc """
Copy the elements from the `data` list as pixel data, copying it to the
region of size `width` by `height` and origin of `destination`, ignoring
pixels whose color components are all zero.
`data` must be a list of `width` x `height` elements, where each element is
a `t:color/0`.
> Note: Similar to `copy_blit/4`, this allows a simple transparency mask to
> be created by setting all of the color components to zero for the pixels
> that are intended to be transparent.
"""
@spec blit(point(), uint16(), uint16(), [color()]) ::
:ok
| {:error, :invalid, :destination}
| {:error, :invalid, :width}
| {:error, :invalid, :height}
| {:error, :invalid, :data}
def blit(%Point{} = destination, width, height, data) do
with :ok <- validate_point(destination, :destination),
:ok <- validate_uint16(width, :width),
:ok <- validate_uint16(height, :height),
:ok <- validate_data(data, width * height),
do: GenServer.call(HAL, {:blit, destination, width, height, normalize_data(data)})
end
def blit({x, y}, width, height, data), do: blit(%Point{x: x, y: y}, width, height, data)
@doc """
Render the current canvas state to the physical NeoPixels according to their
configured locations in the virtual canvas.
"""
@spec render() :: :ok
def render, do: GenServer.call(HAL, :render)
# Helpers
defp normalize_data(data) when is_binary(data), do: data
defp normalize_data(colors) when is_list(colors) do
colors
|> Enum.reduce(<<>>, fn color, acc -> acc <> normalize_color(color) end)
end
defp normalize_color(%Color{r: r, g: g, b: b, w: w}), do: <<r, g, b, w>>
defp normalize_color({r, g, b}), do: <<r, g, b, 0>>
defp normalize_color({r, g, b, w}), do: <<r, g, b, w>>
defp validate_uint8(val, tag) do
val
|> validate_uint8()
|> case do
:ok -> :ok
:error -> {:error, :invalid, tag}
end
end
defp validate_uint16(val, tag) do
val
|> validate_uint16()
|> case do
:ok -> :ok
:error -> {:error, :invalid, tag}
end
end
defp validate_uint8(val) when val in 0..255, do: :ok
defp validate_uint8(_), do: :error
defp validate_uint16(val) when val in 0..65535, do: :ok
defp validate_uint16(_), do: :error
defp validate_channel_number(val) when val in 0..1, do: :ok
defp validate_channel_number(_), do: {:error, :invalid, :channel}
defp validate_gamma(gamma) when is_list(gamma) and length(gamma) == 256 do
gamma
|> Enum.all?(&(validate_uint8(&1) == :ok))
|> case do
true -> :ok
false -> {:error, :invalid, :gamma}
end
end
defp validate_gamma(_), do: {:error, :invalid, :gamma}
defp validate_point(point, tag \\ :point)
defp validate_point(%Point{x: x, y: y}, _tag) when x in 0..65535 and y in 0..65535, do: :ok
defp validate_point(_point, tag), do: {:error, :invalid, tag}
defp validate_color({r, g, b}) when r in 0..255 and g in 0..255 and b in 0..255,
do: :ok
defp validate_color({r, g, b, w}) when r in 0..255 and g in 0..255 and b in 0..255 and w in 0..255,
do: :ok
defp validate_color(%Color{r: r, g: g, b: b, w: w}) when r in 0..255 and g in 0..255 and b in 0..255 and w in 0..255,
do: :ok
defp validate_color(_), do: {:error, :invalid, :color}
defp validate_data(data, expected_length) when is_list(data) and length(data) == expected_length do
data
|> Enum.all?(&(validate_color(&1) == :ok))
|> case do
true -> :ok
false -> {:error, :invalid, :data}
end
end
defp validate_data(data, expected_length) when is_binary(data) and byte_size(data) == expected_length * 4, do: :ok
defp validate_data(_, _), do: {:error, :invalid, :data}
end
|
lib/blinkchain.ex
| 0.937132
| 0.753603
|
blinkchain.ex
|
starcoder
|
defmodule Opencensus.Absinthe do
@moduledoc """
Extends `Absinthe` to automatically create `opencensus` spans. Designed to work with whatever
is producing spans upstream, e.g. `Opencensus.Plug`.
## Installation
Assuming you're using `Absinthe.Plug`:
Add `opencensus_absinthe` to your `deps` in `mix.exs`, using a tighter version constraint than:
```elixir
{:absinthe_plug, ">= 0.0.0"},
{:opencensus_absinthe, ">= 0.0.0"},
```
Add a `:pipeline` to your `t:Absinthe.Plug.opts/0` to have it call
`Opencensus.Absinthe.Plug.traced_pipeline/2`. If you're using `Phoenix.Router.forward/4`, for
example:
``` elixir
forward(
path,
Absinthe.Plug,
# ... existing config ...
pipeline: {Opencensus.Absinthe.Plug, :traced_pipeline}
)
```
If you already have a `pipeline`, you can define your own and call both to insert their phases.
To work with `ApolloTracing`, for example:
```elixir
def your_custom_pipeline(config, pipeline_opts \\ []) do
config
|> Absinthe.Plug.default_pipeline(pipeline_opts)
|> ApolloTracing.Pipeline.add_phases()
|> Opencensus.Absinthe.add_phases()
end
```
Worst case, you'll need to copy the code from the current `pipeline` target and add a call to
`Opencensus.Absinthe.add_phases/1` as above.
If you're using [`Dataloader`][dataloader], you will want to use the provided
`Opencensus.Absinthe.Middleware.Dataloader` Absinthe plugin module in place of
the default one for tracing batched resolutions. See the [module
docs][internal_dataloader] for details.
[dataloader]: https://github.com/absinthe-graphql/dataloader
[internal_dataloader]: https://hexdocs.pm/opencensus_absinthe/Opencensus.Absinthe.Middleware.Dataloader.html
If you are using `DocumentProvider` modules, you will need to integrate into
their `pipeline/1` callback as well. If your `DocumentProvider` modules do not
yet override this callback, then this is fairly straightforward:
```elixir
def pipeline(%{pipeline: as_configured}) do
as_configured
|> Absinthe.Pipeline.from(__absinthe_plug_doc__(:remaining_pipeline))
|> Opencensus.Absinthe.add_schema_phases()
end
```
If you already override the `pipeline/1` callback, just append this to the end:
```elixir
# ... result
|> Opencensus.Absinthe.add_schema_phases()
```
"""
alias Absinthe.Middleware
alias Absinthe.Type
@doc """
Add tracing phases to an existing pipeline for blueprint resolution.
```elixir
pipeline =
Absinthe.Pipeline.for_document(schema, pipeline_opts)
|> Opencensus.Absinthe.add_phases()
```
"""
@spec add_phases(Absinthe.Pipeline.t()) :: Absinthe.Pipeline.t()
def add_phases(pipeline) do
pipeline
|> Absinthe.Pipeline.insert_after(
Absinthe.Phase.Blueprint,
Opencensus.Absinthe.Phase.Push
)
|> Absinthe.Pipeline.insert_after(
Absinthe.Phase.Document.Result,
Opencensus.Absinthe.Phase.Pop
)
end
@doc """
Add tracing phases to an existing pipeline for schema.
```elixir
pipeline =
Absinthe.Pipeline.for_document(schema, pipeline_opts)
|> Opencensus.Absinthe.add_schema_phases()
```
"""
@spec add_schema_phases(Absinthe.Pipeline.t()) :: Absinthe.Pipeline.t()
def add_schema_phases(pipeline) do
pipeline
|> Absinthe.Pipeline.insert_after(
Absinthe.Phase.Schema,
Opencensus.Absinthe.Phase.SchemaPush
)
|> Absinthe.Pipeline.insert_after(
Absinthe.Phase.Document.Result,
Opencensus.Absinthe.Phase.Pop
)
end
@doc """
Add tracing middleware for field resolution.
Specifically, prepends `Opencensus.Absinthe.Middleware` to the `middleware` chain if the field
has `trace` or `absinthe_telemetry` set in its metadata, e.g.:
```elixir
field :users, list_of(:user), meta: [trace: true] do
middleware(Middleware.Authorize, "superadmin")
resolve(&Resolvers.Account.all_users/2)
end
```
"""
@spec middleware(
[Middleware.spec(), ...],
Type.Field.t(),
Type.Object.t()
) :: [Middleware.spec(), ...]
def middleware(middleware, field, _object) do
if metaset(field, :trace) or metaset(field, :absinthe_telemetry) do
[{Opencensus.Absinthe.Middleware, field: field}] ++ middleware
else
middleware
end
end
@spec metaset(Type.Field.t(), atom()) :: boolean()
defp metaset(field, atom), do: Type.meta(field, atom) == true
end
|
lib/opencensus/absinthe.ex
| 0.928466
| 0.836555
|
absinthe.ex
|
starcoder
|
if Code.ensure_loaded?(Plug) do
defmodule Authex.Plug.Authentication do
@moduledoc """
A plug to handle authentication.
This plug must be passed an auth module in which to authenticate with. Otherwise,
it will raise an `Authex.Error`.
With it, we can easily authenticate a Phoenix controller:
defmodule MyAppWeb.MyController do
use MyAppWeb, :controller
plug Authex.Plug.Authentication, with: MyApp.Auth
def show(conn, _params) do
with {:ok, %{id: id}} <- MyApp.Auth.current_user(conn),
{:ok, user} <- MyApp.Users.get(id)
do
render(conn, "show.json", user: user)
end
end
end
The plug looks for the `Authorization: Bearer mytoken` header by default. It
will then verify and covert out token into a resource using the provided auth
module.
We can then access our current resource from the conn using `Authex.current_resource/1`.
By default, if authentication fails, the plug sends the conn to the `Authex.Plug.Unauthorized`
plug. This plug will put a `401` status into the conn with the body `"Unauthorized"`.
We can configure our own unauthorized plug by passing it as an option to this plug.
## Options
* `:with` - The auth module that will be used for verification and token conversion.
* `:unauthorized` - The plug to call when the token is invalid - defaults to `Authex.Plug.Unauthorized`.
* `:header` - The header to extract the token from - defaults to `"authorization"`.
"""
@behaviour Plug
import Plug.Conn, only: [get_req_header: 2, put_private: 3]
@type option :: {:with, Authex.t()} | {:unauthorized, module()} | {:header, binary()}
@type options :: [option()]
@doc false
@impl Plug
def init(opts \\ []) do
verify_options(opts)
build_options(opts)
end
@doc false
@impl Plug
def call(conn, opts) do
with {:ok, compact} <- fetch_header_token(conn, opts),
{:ok, token} <- verify_token(compact, opts),
{:ok, conn} <- put_token(conn, token),
{:ok, conn} <- put_current_resource(conn, token, opts) do
conn
else
_ -> unauthorized(conn, opts)
end
end
defp fetch_header_token(conn, opts) do
case get_req_header(conn, opts.header) do
[header] -> {:ok, parse_header(header)}
_ -> :error
end
end
defp verify_token(compact, opts) do
Authex.verify(opts.with, compact)
end
defp parse_header(header) do
header
|> String.split()
|> List.last()
end
defp put_token(conn, token) do
{:ok, put_private(conn, :authex_token, token)}
end
defp put_current_resource(conn, token, opts) do
case Authex.from_token(opts.with, token) do
{:ok, resource} -> {:ok, put_private(conn, :authex_resource, resource)}
{:error, _} -> :error
end
end
defp unauthorized(conn, opts) do
opts = apply(opts.unauthorized, :init, [opts])
apply(opts.unauthorized, :call, [conn, opts])
end
defp verify_options(opts) do
Keyword.has_key?(opts, :with) ||
raise Authex.Error, "Auth module missing. Please pass an auth module using the :with key."
end
defp build_options(opts) do
%{
with: Keyword.fetch!(opts, :with),
header: Keyword.get(opts, :header, "authorization"),
unauthorized: Keyword.get(opts, :unauthorized, Authex.Plug.Unauthorized)
}
end
end
end
|
lib/authex/plug/authentication.ex
| 0.836421
| 0.402011
|
authentication.ex
|
starcoder
|
defmodule AOC.Day2.Intcode do
@moduledoc false
@type memory :: %{integer => integer}
def part1(path) do
stream_input(path)
|> input_to_map()
|> update(1, 12)
|> update(2, 2)
|> compute
end
def part2(path, output) do
{noun, verb} =
stream_input(path)
|> input_to_map()
|> brute_force(output, 0, 0)
100 * noun + verb
end
def stream_input(path) do
File.read!(path)
|> String.trim()
|> String.split(",")
end
@spec input_to_map(list(integer)) :: memory
def input_to_map(input) do
input
|> Stream.with_index()
|> Stream.map(fn {value, index} ->
{index, String.to_integer(value)}
end)
|> Map.new()
end
@spec brute_force(memory, integer, integer, integer) :: {integer, integer} | :error
defp brute_force(memory, output, noun, verb) do
result =
memory
|> update(1, noun)
|> update(2, verb)
|> compute()
cond do
output == result -> {noun, verb}
verb <= 99 -> brute_force(memory, output, noun, verb + 1)
noun <= 99 -> brute_force(memory, output, noun + 1, 0)
true -> :error
end
end
@spec compute(memory) :: integer
def compute(program) do
0..map_size(program)
|> Enum.reduce_while({program, 0}, fn _i, {memory, instruction_pointer} ->
with {op, num_params} <- instruction(memory, instruction_pointer),
false <- is_atom(op),
params <- read_params(memory, instruction_pointer, num_params),
result <- apply(op, [memory | params]),
false <- is_atom(result) do
{:cont, {result, instruction_pointer + num_params}}
else
_ -> {:halt, read(memory, 0)}
end
end)
end
@spec read_params(memory, integer, integer) :: list(integer)
def read_params(memory, instruction_pointer, num_params) do
num_params = num_params - 2
if num_params > 0 do
Enum.map(0..num_params, fn i -> read(memory, i + instruction_pointer + 1) end)
else
[]
end
end
@spec read(memory, integer) :: integer
def read(memory, address) do
Map.get(memory, address)
end
@spec update(memory, integer, integer) :: memory
def update(memory, address, value) do
%{memory | address => value}
end
@spec instruction(memory, integer) :: {(... -> memory) | :error, integer}
def instruction(memory, instruction_pointer) do
opcode = read(memory, instruction_pointer)
cond do
opcode == 1 ->
func = &add/4
{:arity, num_params} = Function.info(func, :arity)
{func, num_params}
opcode == 2 ->
func = &multiply/4
{:arity, num_params} = Function.info(func, :arity)
{func, num_params}
opcode == 99 ->
func = &terminate/1
{:arity, num_params} = Function.info(func, :arity)
{func, num_params}
true ->
:error
end
end
@spec add(memory, integer, integer, integer) :: memory
def add(memory, param1, param2, param3) do
value = read(memory, param1) + read(memory, param2)
update(memory, param3, value)
end
@spec multiply(memory, integer, integer, integer) :: memory
def multiply(memory, param1, param2, param3) do
value = read(memory, param1) * read(memory, param2)
update(memory, param3, value)
end
@spec terminate(memory) :: :terminate
def terminate(_memory) do
:terminate
end
end
|
aoc-2019/lib/aoc/day2/intcode.ex
| 0.826852
| 0.500854
|
intcode.ex
|
starcoder
|
defmodule Uplink.Monitors.Phoenix do
use Uplink.Monitor
@default_buckets [
5,
10,
20,
50,
100,
200,
500,
:timer.seconds(1),
:timer.seconds(1.5),
:timer.seconds(2),
:timer.seconds(5),
:timer.seconds(10)
]
@default_socket_buckets [
1,
2,
5,
10,
20,
50,
100,
200,
500,
:timer.seconds(1),
:timer.seconds(1.5),
:timer.seconds(2),
:timer.seconds(5),
:timer.seconds(10),
:timer.seconds(15),
:timer.seconds(20),
:timer.seconds(25),
:timer.seconds(30)
]
@moduledoc """
Phoenix definitions. Include these if using Phoenix.
## Options
* `:buckets` - Buckets override. Default: #{inspect(@default_buckets)}
* `:socket_buckets` - Socket buckets override. Default: #{inspect(@default_socket_buckets)}
* `:channel_join_warn_threshold` - Slow channel join warning threshold. Time in ms. Default: 250
* `:channel_event_warn_threshold` - Slow channel event warning threshold. Time in ms. Default: 100
## Definitions
* `http.request.duration.ms` - Phoenix endpoint duration - Total time of the request
* Type: `Telemetry.Metrics.Distribution.t()`
* Tags: [:http_status, :method]
* Buckets: #{inspect(@default_buckets)}
* `phoenix.router_dispatch.duration.ms` - Phoenix endpoint duration - Total time of the request
* Type: `Telemetry.Metrics.Distribution.t()`
* Tags: [:http_status, :method, :route]
* Buckets: #{inspect(@default_buckets)}
* `phoenix.error_rendered.total` - Phoenix errors rendered total - Total number of errors rendered
* Type: `Telemetry.Metrics.Counter.t()`
* Tags: [:http_status]
* `phoenix.socket_connected.duration.ms` - Phoenix socket connected duration - Time spent connecting a socket
* Type: `Telemetry.Metrics.Distribution.t()`
* Tags: [:result]
* Buckets: #{inspect(@default_socket_buckets)}
* `phoenix.channel_joined.duration.ms` - Phoenix channel joined duration - Time spent joining a channel
* Type: `Telemetry.Metrics.Distribution.t()`
* Tags: [:channel, :result]
* Buckets: #{inspect(@default_socket_buckets)}
* `phoenix.channel_handled_in.duration.ms` - Phoenix channel handled in duration - Time spent handling an in event
* Type: `Telemetry.Metrics.Distribution.t()`
* Tags: [:channel, :event]
* Buckets: #{inspect(@default_socket_buckets)}
"""
require Logger
import Telemetry.Metrics, only: [counter: 2, distribution: 2]
@impl true
def init(opts \\ []) do
attach_events(opts)
end
def handle_event([:phoenix, :channel_joined], measurements, meta, config) do
duration = System.convert_time_unit(measurements.duration, :native, :millisecond)
if duration > config.threshold do
log_data = %{
title: "Slow phoenix channel join",
duration: duration,
result: meta.result,
socket_info:
Map.take(meta.socket, [
:channel,
:endpoint,
:handler,
:id,
:join_ref,
:joined,
:pubsub_server,
:serializer,
:topic,
:transport
])
}
_ =
log_data
|> Jason.encode!()
|> Logger.warn()
:ok
end
:ok
end
def handle_event([:phoenix, :channel_handled_in], measurements, meta, config) do
duration = System.convert_time_unit(measurements.duration, :native, :millisecond)
if duration > config.threshold do
log_data = %{
title: "Slow phoenix channel event",
duration: duration,
event: meta.event,
socket_info:
Map.take(meta.socket, [
:channel,
:endpoint,
:handler,
:id,
:join_ref,
:joined,
:pubsub_server,
:serializer,
:topic,
:transport
])
}
_ =
log_data
|> Jason.encode!()
|> Logger.warn()
:ok
end
:ok
end
defp default_options do
[
buckets: @default_buckets,
socket_buckets: @default_socket_buckets,
channel_join_warn_threshold: 250,
channel_event_warn_threshold: 100
]
end
defp attach_events(opts) do
final_opts = Keyword.merge(default_options(), opts)
join_threshold = Keyword.fetch!(final_opts, :channel_join_warn_threshold)
:telemetry.attach(
"phoenix_slow_join_handler",
[:phoenix, :channel_joined],
&__MODULE__.handle_event/4,
%{threshold: join_threshold}
)
event_threshold = Keyword.fetch!(final_opts, :channel_event_warn_threshold)
:telemetry.attach(
"phoenix_slow_event_handler",
[:phoenix, :channel_handled_in],
&__MODULE__.handle_event/4,
%{threshold: event_threshold}
)
end
@impl true
def metric_definitions(opts \\ []) do
final_opts = Keyword.merge(default_options(), opts)
buckets = Keyword.fetch!(final_opts, :buckets)
socket_buckets = Keyword.fetch!(final_opts, :socket_buckets)
[
distribution("phoenix.endpoint.duration.ms",
event_name: [:phoenix, :endpoint, :stop],
measurement: :duration,
unit: {:native, :millisecond},
reporter_options: [buckets: buckets],
description: "Phoenix endpoint duration - Total time of the request",
tags: [:http_status, :method],
tag_values: fn %{conn: conn} ->
%{
http_status: conn.status,
method: conn.method
}
end
),
distribution("phoenix.router_dispatch.duration.ms",
event_name: [:phoenix, :router_dispatch, :stop],
measurement: :duration,
unit: {:native, :millisecond},
reporter_options: [buckets: buckets],
description: "Phoenix endpoint duration - Total time of the request",
tags: [:http_status, :method, :route],
tag_values: fn %{conn: conn, route: route} ->
%{
http_status: conn.status,
method: conn.method,
route: route
}
end
),
counter("phoenix.error_rendered.total",
event_name: [:phoenix, :error_rendered],
measurement: :duration,
unit: :"1",
description: "Phoenix errors rendered total - Total number of errors rendered",
tags: [:http_status],
tag_values: fn %{status: status} ->
%{
http_status: status
}
end
),
distribution("phoenix.socket_connected.duration.ms",
event_name: [:phoenix, :socket_connected],
measurement: :duration,
unit: {:native, :millisecond},
reporter_options: [buckets: socket_buckets],
description: "Phoenix socket connected duration - Time spent connected a socket",
tags: [:result]
),
distribution("phoenix.channel_joined.duration.ms",
event_name: [:phoenix, :channel_joined],
measurement: :duration,
unit: {:native, :millisecond},
reporter_options: [buckets: socket_buckets],
description: "Phoenix channel joined duration - Time spent joining a channel",
tags: [:channel, :result],
tag_values: fn %{result: result, socket: socket} ->
%{
result: result,
channel: socket.channel
}
end
),
distribution("phoenix.channel_handled_in.duration.ms",
event_name: [:phoenix, :channel_handled_in],
measurement: :duration,
unit: {:native, :millisecond},
reporter_options: [buckets: socket_buckets],
description: "Phoenix channel handled in duration - Time spent handling an in event",
tags: [:channel, :event],
tag_values: fn %{event: event, socket: socket} ->
%{
event: event,
channel: socket.channel
}
end
)
]
end
end
|
examples/org_uplink/lib/org_uplink/monitors/phoenix.ex
| 0.864825
| 0.498047
|
phoenix.ex
|
starcoder
|
defmodule Code.Identifier do
@moduledoc false
@doc """
Checks if the given identifier is an unary op.
## Examples
iex> Code.Identifier.unary_op(:+)
{:non_associative, 300}
"""
@spec unary_op(atom) :: {:non_associative, precedence :: pos_integer} | :error
def unary_op(op) do
cond do
op in [:&] -> {:non_associative, 90}
op in [:!, :^, :not, :+, :-, :~~~] -> {:non_associative, 300}
op in [:@] -> {:non_associative, 320}
true -> :error
end
end
@doc """
Checks if the given identifier is a binary op.
## Examples
iex> Code.Identifier.binary_op(:+)
{:left, 210}
"""
@spec binary_op(atom) :: {:left | :right, precedence :: pos_integer} | :error
def binary_op(op) do
cond do
op in [:<-, :\\] -> {:left, 40}
op in [:when] -> {:right, 50}
op in [:"::"] -> {:right, 60}
op in [:|] -> {:right, 70}
op in [:=] -> {:right, 100}
op in [:||, :|||, :or] -> {:left, 120}
op in [:&&, :&&&, :and] -> {:left, 130}
op in [:==, :!=, :=~, :===, :!==] -> {:left, 140}
op in [:<, :<=, :>=, :>] -> {:left, 150}
op in [:|>, :<<<, :>>>, :<~, :~>, :<<~, :~>>, :<~>, :<|>] -> {:left, 160}
op in [:in] -> {:left, 170}
op in [:^^^] -> {:left, 180}
op in [:"//"] -> {:right, 190}
op in [:++, :--, :.., :<>, :+++, :---] -> {:right, 200}
op in [:+, :-] -> {:left, 210}
op in [:*, :/] -> {:left, 220}
op in [:.] -> {:left, 310}
true -> :error
end
end
@doc """
Classifies the given atom into one of the following categories:
* `:alias` - a valid Elixir alias, like `Foo`, `Foo.Bar` and so on
* `:callable_local` - an atom that can be used as a local call;
this category includes identifiers like `:foo`
* `:callable_operator` - all callable operators, such as `:<>`. Note
operators such as `:..` are not callable because of ambiguity
* `:not_atomable` - callable operators that must be wrapped in quotes when
defined as an atom. For example, `::` must be written as `:"::"` to avoid
the ambiguity between the atom and the keyword identifier
* `:not_callable` - an atom that cannot be used as a function call after the
`.` operator. Those are typically AST nodes that are special forms (such as
`:%{}` and `:<<>>>`) as well as nodes that are ambiguous in calls (such as
`:..` and `:...`). This category also includes atoms like `:Foo`, since
they are valid identifiers but they need quotes to be used in function
calls (`Foo."Bar"`)
* `:other` - any other atom (these are usually escaped when inspected, like
`:"foo and bar"`)
"""
def classify(atom) when is_atom(atom) do
charlist = Atom.to_charlist(atom)
cond do
atom in [:%, :%{}, :{}, :<<>>, :..., :.., :., :"..//", :->] ->
:not_callable
atom in [:"::", :"//"] ->
:not_atomable
unary_op(atom) != :error or binary_op(atom) != :error ->
:callable_operator
valid_alias?(charlist) ->
:alias
true ->
case :elixir_config.static(:identifier_tokenizer).tokenize(charlist) do
{kind, _acc, [], _, _, special} ->
if kind == :identifier and not :lists.member(?@, special) do
:callable_local
else
:not_callable
end
_ ->
:other
end
end
end
defp valid_alias?('Elixir' ++ rest), do: valid_alias_piece?(rest)
defp valid_alias?(_other), do: false
defp valid_alias_piece?([?., char | rest]) when char >= ?A and char <= ?Z,
do: valid_alias_piece?(trim_leading_while_valid_identifier(rest))
defp valid_alias_piece?([]), do: true
defp valid_alias_piece?(_other), do: false
defp trim_leading_while_valid_identifier([char | rest])
when char >= ?a and char <= ?z
when char >= ?A and char <= ?Z
when char >= ?0 and char <= ?9
when char == ?_ do
trim_leading_while_valid_identifier(rest)
end
defp trim_leading_while_valid_identifier(other) do
other
end
@doc """
Inspects the identifier as an atom.
"""
def inspect_as_atom(atom) when is_nil(atom) or is_boolean(atom) do
Atom.to_string(atom)
end
def inspect_as_atom(atom) when is_atom(atom) do
binary = Atom.to_string(atom)
case classify(atom) do
:alias ->
case binary do
binary when binary in ["Elixir", "Elixir.Elixir"] -> binary
"Elixir.Elixir." <> _rest -> binary
"Elixir." <> rest -> rest
end
type when type in [:callable_local, :callable_operator, :not_callable] ->
":" <> binary
_ ->
{escaped, _} = escape(binary, ?")
IO.iodata_to_binary([?:, ?", escaped, ?"])
end
end
@doc """
Inspects the given identifier as a key.
"""
def inspect_as_key(atom) when is_atom(atom) do
binary = Atom.to_string(atom)
case classify(atom) do
type when type in [:callable_local, :callable_operator, :not_callable] ->
IO.iodata_to_binary([binary, ?:])
_ ->
{escaped, _} = escape(binary, ?")
IO.iodata_to_binary([?", escaped, ?", ?:])
end
end
@doc """
Inspects the given identifier as a function name.
"""
def inspect_as_function(atom) when is_atom(atom) do
binary = Atom.to_string(atom)
case classify(atom) do
type when type in [:callable_local, :callable_operator, :not_atomable] ->
binary
type ->
escaped =
if type in [:not_callable, :alias] do
binary
else
elem(escape(binary, ?"), 0)
end
IO.iodata_to_binary([?", escaped, ?"])
end
end
@doc """
Extracts the name and arity of the parent from the anonymous function identifier.
"""
# Example of this format: -NAME/ARITY-fun-COUNT-
def extract_anonymous_fun_parent(atom) when is_atom(atom) do
with "-" <> rest <- Atom.to_string(atom),
[trailing | reversed] = rest |> String.split("/") |> Enum.reverse(),
[arity, _inner, _count, ""] <- String.split(trailing, "-") do
{reversed |> Enum.reverse() |> Enum.join("/") |> String.to_atom(), arity}
else
_ -> :error
end
end
@doc """
Escapes the given identifier.
"""
def escape(other, char, count \\ :infinity, fun \\ &escape_map/1) do
escape(other, char, count, [], fun)
end
defp escape(<<_, _::binary>> = binary, _char, 0, acc, _fun) do
{acc, binary}
end
defp escape(<<char, t::binary>>, char, count, acc, fun) do
escape(t, char, decrement(count), [acc | [?\\, char]], fun)
end
defp escape(<<?#, ?{, t::binary>>, char, count, acc, fun) do
escape(t, char, decrement(count), [acc | '\\\#{'], fun)
end
defp escape(<<h::utf8, t::binary>>, char, count, acc, fun) do
escaped = if value = fun.(h), do: value, else: escape_char(h)
escape(t, char, decrement(count), [acc | escaped], fun)
end
defp escape(<<a::4, b::4, t::binary>>, char, count, acc, fun) do
escape(t, char, decrement(count), [acc | ['\\x', to_hex(a), to_hex(b)]], fun)
end
defp escape(<<>>, _char, _count, acc, _fun) do
{acc, <<>>}
end
defp escape_char(0), do: '\\0'
defp escape_char(65279), do: '\\uFEFF'
defp escape_char(char)
when char in 0x20..0x7E
when char in 0xA0..0xD7FF
when char in 0xE000..0xFFFD
when char in 0x10000..0x10FFFF do
<<char::utf8>>
end
defp escape_char(char) when char < 0x100 do
<<a::4, b::4>> = <<char::8>>
['\\x', to_hex(a), to_hex(b)]
end
defp escape_char(char) when char < 0x10000 do
<<a::4, b::4, c::4, d::4>> = <<char::16>>
['\\x{', to_hex(a), to_hex(b), to_hex(c), to_hex(d), ?}]
end
defp escape_char(char) when char < 0x1000000 do
<<a::4, b::4, c::4, d::4, e::4, f::4>> = <<char::24>>
['\\x{', to_hex(a), to_hex(b), to_hex(c), to_hex(d), to_hex(e), to_hex(f), ?}]
end
defp escape_map(?\a), do: '\\a'
defp escape_map(?\b), do: '\\b'
defp escape_map(?\d), do: '\\d'
defp escape_map(?\e), do: '\\e'
defp escape_map(?\f), do: '\\f'
defp escape_map(?\n), do: '\\n'
defp escape_map(?\r), do: '\\r'
defp escape_map(?\t), do: '\\t'
defp escape_map(?\v), do: '\\v'
defp escape_map(?\\), do: '\\\\'
defp escape_map(_), do: false
@compile {:inline, to_hex: 1, decrement: 1}
defp to_hex(c) when c in 0..9, do: ?0 + c
defp to_hex(c) when c in 10..15, do: ?A + c - 10
defp decrement(:infinity), do: :infinity
defp decrement(counter), do: counter - 1
end
|
lib/elixir/lib/code/identifier.ex
| 0.895739
| 0.719334
|
identifier.ex
|
starcoder
|
defmodule Mux.Token do
@moduledoc """
This module provides helpers for working with Playback IDs with `signed` playback policies. [API Documentation](https://docs.mux.com/docs/security-signed-urls)
"""
@type signature_type :: :video | :thumbnail | :gif | :storyboard
@type option ::
{:type, signature_type}
| {:expiration, integer}
| {:token_id, String.t()}
| {:token_secret, String.t()}
| {:params, any}
@type options :: [option]
@doc """
Create a signed URL token for a playback ID.
`options` object can include:
- `options.token_id`: Signing token ID (defaults to `Application.get_env(:mux, :signing_token_id)`)
- `options.token_secret`: Signing token secret (defaults to `Application.get_env(:mux, :signing_token_secret)`)
- `options.type`: Type of signature to create. Defaults to `:video`, options are: `:video, :gif, :thumbnail, :storyboard`
- `options.expiration`: Seconds the token is valid for. Defaults to 7 days from now (604,800)
- `options.params`: Map that includes any additional query params. For thumbnails this would be values like `height` or `time`.
"""
@spec sign(String.t(), options()) :: String.t()
def sign(playback_id, opts \\ []) do
opts = opts |> default_options()
signer = opts[:token_secret] |> jwt_signer
params = opts[:params]
claims = %{
"typ" => "JWT",
"alg" => "RS256",
"kid" => opts[:token_id]
}
payload =
%{
"aud" => opts[:type] |> type_to_aud(),
"sub" => playback_id,
"exp" => (DateTime.utc_now() |> DateTime.to_unix()) + opts[:expiration]
}
|> Map.merge(params)
|> Jason.encode!()
JOSE.JWS.sign(signer, payload, claims) |> JOSE.JWS.compact() |> elem(1)
end
def verify(token, opts \\ []) do
%{token_secret: secret} = opts |> default_options()
signer = secret |> jwt_signer()
JOSE.JWS.verify(signer, token)
end
defp jwt_signer(secret) do
secret
|> get_private_key()
|> JOSE.JWK.from_pem()
end
defp default_options(opts) do
application_env = [
token_id: Application.get_env(:mux, :signing_token_id),
token_secret: Application.get_env(:mux, :signing_token_secret),
expiration: 604_800,
type: :video,
params: %{}
]
Keyword.merge(application_env, opts) |> Enum.into(%{})
end
defp get_private_key("-----BEGIN RSA PRIVATE KEY-----" <> _ = key), do: key
defp get_private_key(key), do: Base.decode64!(key)
defp type_to_aud(:video), do: "v"
defp type_to_aud(:thumbnail), do: "t"
defp type_to_aud(:gif), do: "g"
defp type_to_aud(:storyboard), do: "s"
end
|
lib/mux/token.ex
| 0.829837
| 0.424412
|
token.ex
|
starcoder
|
defmodule Coxir.Struct.Role do
@moduledoc """
Defines methods used to interact with guild roles.
Refer to [this](https://discord.com/developers/docs/topics/permissions#role-object)
for a list of fields and a broader documentation.
"""
@type role :: String.t | map
use Coxir.Struct
@doc false
def get(id),
do: super(id)
@doc false
def select(pattern)
@doc """
Modifies a given role.
Returns a role object upon success
or a map containing error information.
#### Params
Must be an enumerable with the fields listed below.
- `name` - name of the role
- `color` - RGB color value
- `permissions` - bitwise of the permissions
- `hoist` - whether the role should be displayed separately
- `mentionable` - whether the role should be mentionable
Refer to [this](https://discord.com/developers/docs/resources/guild#modify-guild-role)
for a broader explanation on the fields and their defaults.
"""
@spec edit(role, Enum.t) :: map
def edit(%{id: id, guild_id: guild}, params),
do: edit(id, guild, params)
@doc """
Modifies a given role.
Refer to `edit/2` for more information.
"""
@spec edit(String.t, String.t, Enum.t) :: map
def edit(role, guild, params) do
API.request(:patch, "guilds/#{guild}/roles/#{role}", params)
|> put(:guild_id, guild)
end
@doc """
Changes the name of a given role.
Returns a role object upon success
or a map containing error information.
"""
@spec set_name(role, String.t) :: map
def set_name(%{id: id, guild_id: guild}, name),
do: set_name(id, guild, name)
@doc """
Changes the name of a given role.
Refer to `set_name/2` for more information.
"""
@spec set_name(String.t, String.t, String.t) :: map
def set_name(role, guild, name),
do: edit(role, guild, name: name)
@doc """
Changes the color of a given role.
Returns a role object upon success
or a map containing error information.
"""
@spec set_color(role, integer) :: map
def set_color(%{id: id, guild_id: guild}, color),
do: set_color(id, guild, color)
@doc """
Changes the color of a given role.
Refer to `set_color/2` for more information.
"""
@spec set_color(String.t, String.t, integer) :: map
def set_color(role, guild, color),
do: edit(role, guild, color: color)
@doc """
Changes the permissions of a given role.
Returns a role object upon success
or a map containing error information.
"""
@spec set_permissions(role, integer) :: map
def set_permissions(%{id: id, guild_id: guild}, permissions),
do: set_permissions(id, guild, permissions)
@doc """
Changes the permissions of a given role.
Refer to `set_permissions/2` for more information.
"""
@spec set_permissions(String.t, String.t, integer) :: map
def set_permissions(role, guild, permissions),
do: edit(role, guild, permissions: permissions)
@doc """
Changes the hoist flag of a given role.
Returns a role object upon success
or a map containing error information.
"""
@spec set_hoist(role, boolean) :: map
def set_hoist(%{id: id, guild_id: guild}, bool),
do: set_hoist(id, guild, bool)
@doc """
Changes the hoist flag of a given role.
Refer to `set_hoist/2` for more information.
"""
@spec set_hoist(String.t, String.t, boolean) :: map
def set_hoist(role, guild, bool),
do: edit(role, guild, hoist: bool)
@doc """
Changes the mentionable flag of a given role.
Returns a role object upon success
or a map containing error information.
"""
@spec set_mentionable(role, boolean) :: map
def set_mentionable(%{id: id, guild_id: guild}, bool),
do: set_mentionable(id, guild, bool)
@doc """
Changes the mentionable flag of a given role.
Refer to `set_mentionable/2` for more information.
"""
@spec set_mentionable(String.t, String.t, boolean) :: map
def set_mentionable(role, guild, bool),
do: edit(role, guild, mentionable: bool)
@doc """
Deletes a given role.
Returns the atom `:ok` upon success
or a map containing error information.
"""
@spec delete(role) :: :ok | map
def delete(%{id: id, guild_id: guild}),
do: delete(id, guild)
@doc """
Deletes a given role.
Refer to `delete/1` for more information.
"""
@spec delete(String.t, String.t) :: :ok | map
def delete(role, guild) do
API.request(:delete, "guilds/#{guild}/roles/#{role}")
end
end
|
lib/coxir/struct/role.ex
| 0.90753
| 0.541833
|
role.ex
|
starcoder
|
defmodule Earmark.TagSpecificProcessors do
@moduledoc """
This struct represents a list of tuples `{tag, function}` from which a postprocessing function
can be constructed
General Usage Examples:
iex(0)> tsp = new({"p", &Earmark.AstTools.merge_atts_in_node(&1, class: "one")})
...(0)> tsp = prepend_tag_function(tsp, "i", &Earmark.AstTools.merge_atts_in_node(&1, class: "two"))
...(0)> make_postprocessor(tsp).({"p", [], nil, nil})
{"p", [{"class", "one"}], nil, nil}
iex(1)> tsp = new({"p", &Earmark.AstTools.merge_atts_in_node(&1, class: "one")})
...(1)> tsp = prepend_tag_function(tsp, "i", &Earmark.AstTools.merge_atts_in_node(&1, class: "two"))
...(1)> make_postprocessor(tsp).({"i", [{"class", "i"}], nil, nil})
{"i", [{"class", "two i"}], nil, nil}
iex(2)> tsp = new({"p", &Earmark.AstTools.merge_atts_in_node(&1, class: "one")})
...(2)> tsp = prepend_tag_function(tsp, "i", &Earmark.AstTools.merge_atts_in_node(&1, class: "two"))
...(2)> make_postprocessor(tsp).({"x", [], nil, nil})
{"x", [], nil, nil}
"""
defstruct tag_functions: []
@doc """
Constructs a postprocessor function from this struct which will find the function associated
to the tag of the node, and apply the node to it if such a function was found.
"""
def make_postprocessor(%__MODULE__{tag_functions: tfs}) do
fn {_, _, _, _}=node -> _postprocess(node, tfs)
other -> other end
end
@doc """
Convenience construction
iex(3)> new()
%Earmark.TagSpecificProcessors{}
"""
def new, do: %__MODULE__{}
def new({_, _}=tf), do: %__MODULE__{tag_functions: [tf]}
def new(tfs), do: %__MODULE__{tag_functions: tfs}
@doc """
Prepends a tuple {tag, function} to the list of such tuples.
"""
def prepend_tag_function(tsp, tag, function), do: prepend_tag_function(tsp, {tag, function})
def prepend_tag_function(%__MODULE__{tag_functions: tfs}=tsp, tf) do
%{tsp | tag_functions: [tf|tfs]}
end
defp _postprocess({t, _, _, _}=node, tfs) do
fun = tfs
|> Enum.find_value(fn {t_, f} -> if t == t_, do: f end)
if fun, do: fun.(node), else: node
end
end
# SPDX-License-Identifier: Apache-2.0
|
lib/earmark/tag_specific_processors.ex
| 0.785473
| 0.524943
|
tag_specific_processors.ex
|
starcoder
|
defmodule Aino.Middleware.Routes do
@moduledoc """
An Aino set of middleware for dealing with routes and routing
## Examples
To use the routes middleware together, see the example below.
```elixir
routes([
get("/orders", &Orders.index/1, as: :orders),
get("/orders/:id", [&Orders.authorize/1, &Order.show/1], as: :order),
post("/orders", &Orders.create/1),
post("/orders/:id", [&Orders.authorize/1, &Order.update/1])
])
def handle(token) do
middleware = [
Aino.Middleware.common(),
&Aino.Middleware.Routes.routes(&1, routes()),
&Aino.Middleware.Routes.match_route/1,
&Aino.Middleware.params/1,
&Aino.Middleware.Routes.handle_route/1
]
Aino.Token.reduce(token, middleware)
end
```
In the example above you can see why `match_route/1` and `handle_route/1` are
separate functions, you can perform other middleware in between the two. In this
example, params are merged together via `Aino.Middleware.params/1` before
handling the route.
"""
alias Aino.Token
@doc """
Configure routes for the handler
Defines `routes/0` and `__MODULE__.Routes` for route helper functions
When defining routes, provide the `:as` option to have `_path` and `_url` functions
generated for you. E.g. `as: :sign_in` will generate `Routes.sign_in_path/2` and
`Routes.sign_in_url/2`.
Note that when defining routes, you must only define one `:as` a particular atom. For
instance, if you have multiple routes pointing at the url `/orders/:id`, you should only
add `as: :order` to the first route.
```elixir
routes([
get("/", &MyApp.Web.Page.root/1, as: :root),
get("/sign-in", &MyApp.Web.Session.show/1, as: :sign_in),
post("/sign-in", &MyApp.Web.Session.create/1),
delete("/sign-out", &MyApp.Web.Session.delete/1, as: :sign_out),
get("/orders", &MyApp.Web.Orders.index/1, as: :orders),
get("/orders/:id", &MyApp.Web.Orders.show/1, as: :order)
])
```
"""
defmacro routes(routes_list) do
module = compile_routes_module(routes_list)
quote do
def routes(), do: unquote(routes_list)
unquote(module)
end
end
def compile_routes_module(routes) do
quote bind_quoted: [routes: routes] do
defmodule Routes do
@moduledoc false
alias Aino.Middleware.Routes
routes
|> Enum.reject(fn route -> is_nil(route[:as]) end)
|> Enum.map(fn route ->
path = :"#{route[:as]}_path"
url = :"#{route[:as]}_url"
def unquote(path)(_token, params \\ %{}) do
Routes.compile_path(unquote(route.path), params)
end
def unquote(url)(token, params \\ %{}) do
Routes.compile_url(token, unquote(route.path), params)
end
end)
end
end
end
@doc false
def compile_url(token, path, params) do
path = compile_path(path, params)
"#{token.scheme}://#{token.host}:#{token.port}#{path}"
end
@doc false
def compile_path(path, params) do
path =
path
|> Enum.map(fn part ->
case is_atom(part) do
true ->
params[part]
false ->
part
end
end)
|> Enum.join("/")
"/" <> path
end
@doc """
Create a DELETE route
## Examples
```elixir
routes = [
delete("/orders/:id", [&Orders.authorize/1, &Order.delete/1], as: :order)
]
```
"""
def delete(path, middleware, opts \\ []) do
middleware = List.wrap(middleware)
path =
path
|> String.split("/")
|> Enum.reject(fn part -> part == "" end)
|> Enum.map(fn
":" <> variable ->
String.to_atom(variable)
part ->
part
end)
%{
method: :delete,
path: path,
middleware: middleware,
as: opts[:as]
}
end
@doc """
Create a GET route
## Examples
```elixir
routes = [
get("/orders", &Orders.index/1, as: :orders),
get("/orders/:id", [&Orders.authorize/1, &Order.show/1], as: :order)
]
```
"""
def get(path, middleware, opts \\ []) do
middleware = List.wrap(middleware)
path =
path
|> String.split("/")
|> Enum.reject(fn part -> part == "" end)
|> Enum.map(fn
":" <> variable ->
String.to_atom(variable)
part ->
part
end)
%{
method: :get,
path: path,
middleware: middleware,
as: opts[:as]
}
end
@doc """
Create a POST route
## Examples
```elixir
routes = [
post("/orders", &Orders.create/1, as: :orders),
post("/orders/:id", [&Orders.authorize/1, &Order.update/1], as: :order)
]
```
"""
def post(path, middleware, opts \\ []) do
middleware = List.wrap(middleware)
path =
path
|> String.split("/")
|> Enum.reject(fn part -> part == "" end)
|> Enum.map(fn
":" <> variable ->
String.to_atom(variable)
part ->
part
end)
%{
method: :post,
path: path,
middleware: middleware,
as: opts[:as]
}
end
@doc """
Set routes for the token
Adds the following keys to the token `[:routes]`
"""
def routes(token, routes) do
default_assigns =
Map.merge(token.default_assigns, %{
routes: %{
root_path: fn -> "/" end
}
})
token
|> Map.put(:routes, routes)
|> Map.put(:default_assigns, default_assigns)
end
@doc """
Matches the request against routes on the token
_Must_ have routes set via `routes/2` before running this middleware.
You _should_ run `handle_route/1` after matching the route, otherwise
the route is not run.
Adds the following keys to the token `[:path_params, :route_middleware]`
"""
def match_route(token) do
case find_route(token.routes, token.method, token.path) do
{:ok, %{middleware: middleware}, path_params} ->
token
|> Map.put(:path_params, path_params)
|> Map.put(:route_middleware, middleware)
:error ->
token
|> Token.response_status(404)
|> Token.response_header("Content-Type", "text/html")
|> Token.response_body("Not found")
end
end
@doc """
Run the matched route from `match_route/1`
If no route is present, nothing happens. If a route is present, the
middleware stored on the token from the matched request is reduced over.
"""
def handle_route(%{route_middleware: middleware} = token) do
Aino.Token.reduce(token, middleware)
end
def handle_route(token), do: token
@doc false
def find_route([route = %{method: method} | routes], method, path) do
case check_path(path, route.path) do
{:ok, path_params} ->
{:ok, route, path_params}
:error ->
find_route(routes, method, path)
end
end
def find_route([_route | routes], method, path) do
find_route(routes, method, path)
end
def find_route([], _method, _path), do: :error
@doc false
def check_path(path, route_path, params \\ %{})
def check_path([], [], params), do: {:ok, params}
def check_path([value | path], [variable | route_path], params) when is_atom(variable) do
params = Map.put(params, variable, value)
check_path(path, route_path, params)
end
def check_path([part | path], [part | route_path], params) do
check_path(path, route_path, params)
end
def check_path(_path, _route_path, _params), do: :error
end
|
lib/aino/routes.ex
| 0.921207
| 0.81648
|
routes.ex
|
starcoder
|
defmodule Elixlsx.Sheet do
alias __MODULE__
alias Elixlsx.Sheet
alias Elixlsx.Util
@moduledoc ~S"""
Describes a single sheet with a given name. The name can be up to 31 characters long.
The rows property is a list, each corresponding to a
row (from the top), of lists, each corresponding to
a column (from the left), of contents.
Content may be
- a String.t (unicode),
- a number, or
- a list [String|number, property_list...]
The property list describes formatting options for that
cell. See Font.from_props/1 for a list of options.
"""
defstruct name: "", rows: [], col_widths: %{}, row_heights: %{}, merge_cells: [], pane_freeze: nil, show_grid_lines: true, hidden: false
@type t :: %Sheet {
name: String.t,
rows: list(list(any())),
col_widths: %{pos_integer => number},
row_heights: %{pos_integer => number},
merge_cells: [],
pane_freeze: {number, number} | nil,
show_grid_lines: boolean(),
hidden: boolean()
}
@doc ~S"""
Create a sheet with a sheet name.
The name can be up to 31 characters long.
"""
@spec with_name(String.t) :: Sheet.t
def with_name(name) do
%Sheet{name: name}
end
defp split_cell_content_props(cell) do
cond do
is_list(cell) ->
{hd(cell), tl(cell)}
true ->
{cell, []}
end
end
@doc ~S"""
Returns a "CSV" representation of the Sheet. This is mainly
used for doctests and does not generate valid CSV (yet).
"""
def to_csv_string(sheet) do
Enum.map_join sheet.rows, "\n", fn row ->
Enum.map_join row, ",", fn cell ->
{content, _} = split_cell_content_props cell
case content do
nil -> ""
_ -> to_string content
end
end
end
end
@spec set_cell(Sheet.t, String.t, any(), [key: any]) :: Sheet.t
@doc ~S"""
Set a cell indexed by excel coordinates.
## Example
iex> %Elixlsx.Sheet{} |>
...> Elixlsx.Sheet.set_cell("C1", "<NAME>",
...> bold: true, underline: true) |>
...> Elixlsx.Sheet.to_csv_string
",,Hello World"
"""
def set_cell(sheet, index, content, opts \\ []) when is_binary(index) do
{row, col} = Util.from_excel_coords0 index
set_at(sheet, row, col, content, opts)
end
@spec set_at(Sheet.t, non_neg_integer, non_neg_integer, any(), [key: any]) :: Sheet.t
@doc ~S"""
Set a cell at a given row/column index. Indizes start at 0.
## Example
iex> %Elixlsx.Sheet{} |>
...> Elixlsx.Sheet.set_at(0, 2, "Hello World",
...> bold: true, underline: true) |>
...> Elixlsx.Sheet.to_csv_string
",,Hello World"
"""
def set_at(sheet, rowidx, colidx, content, opts \\ [])
when is_number(rowidx) and is_number(colidx) do
cond do
length(sheet.rows) <= rowidx ->
# append new rows, call self again with new sheet
n_new_rows = rowidx - length(sheet.rows)
new_rows = 0..n_new_rows |> Enum.map(fn _ -> [] end)
update_in(sheet.rows, &(&1 ++ new_rows)) |>
set_at(rowidx, colidx, content, opts)
length(Enum.at(sheet.rows, rowidx)) <= colidx ->
n_new_cols = colidx - length(Enum.at(sheet.rows, rowidx))
new_cols = 0..n_new_cols |> Enum.map(fn _ -> nil end)
new_row = Enum.at(sheet.rows, rowidx) ++ new_cols
update_in(sheet.rows, &(List.replace_at &1, rowidx, new_row)) |>
set_at(rowidx, colidx, content, opts)
true ->
update_in sheet.rows, fn rows ->
List.update_at rows, rowidx, fn cols ->
List.replace_at cols, colidx, [content | opts]
end
end
end
end
@spec set_col_width(Sheet.t, String.t, number) :: Sheet.t
@doc ~S"""
Set the column width for a given column. Column is indexed by
name ("A", ...)
"""
def set_col_width(sheet, column, width) do
update_in sheet.col_widths,
&(Map.put &1, Util.decode_col(column), width)
end
@spec set_row_height(Sheet.t, number, number) :: Sheet.t
@doc ~S"""
Set the row height for a given row. Row is indexed starting from 1
"""
def set_row_height(sheet, row_idx, height) do
update_in sheet.row_heights,
&(Map.put &1, row_idx, height)
end
@spec set_pane_freeze(Sheet.t, number, number) :: Sheet.t
@doc ~S"""
Set the pane freeze at the given row and column. Row and column are indexed starting from 1.
Special value 0 means no freezing, e.g. {1, 0} will freeze first row and no columns.
"""
def set_pane_freeze(sheet, row_idx, col_idx) do
%{sheet | pane_freeze: {row_idx, col_idx}}
end
@spec remove_pane_freeze(Sheet.t) :: Sheet.t
@doc ~S"""
Removes any pane freezing that has been set
"""
def remove_pane_freeze(sheet) do
%{sheet | pane_freeze: nil}
end
@spec set_hidden(Sheet.t(), boolean) :: Sheet.t()
@doc ~S"""
Sets a sheet as hidden.
"""
def set_hidden(sheet, is_hidden) do
%{sheet | hidden: is_hidden}
end
end
|
lib/elixlsx/sheet.ex
| 0.760206
| 0.544438
|
sheet.ex
|
starcoder
|
defmodule Lnrpc.GenSeedRequest do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
aezeed_passphrase: String.t(),
seed_entropy: String.t()
}
defstruct [:aezeed_passphrase, :seed_entropy]
field(:aezeed_passphrase, 1, type: :bytes)
field(:seed_entropy, 2, type: :bytes)
end
defmodule Lnrpc.GenSeedResponse do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
cipher_seed_mnemonic: [String.t()],
enciphered_seed: String.t()
}
defstruct [:cipher_seed_mnemonic, :enciphered_seed]
field(:cipher_seed_mnemonic, 1, repeated: true, type: :string)
field(:enciphered_seed, 2, type: :bytes)
end
defmodule Lnrpc.InitWalletRequest do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
wallet_password: String.t(),
cipher_seed_mnemonic: [String.t()],
aezeed_passphrase: String.t(),
recovery_window: integer
}
defstruct [:wallet_password, :cipher_seed_mnemonic, :aezeed_passphrase, :recovery_window]
field(:wallet_password, 1, type: :bytes)
field(:cipher_seed_mnemonic, 2, repeated: true, type: :string)
field(:aezeed_passphrase, 3, type: :bytes)
field(:recovery_window, 4, type: :int32)
end
defmodule Lnrpc.InitWalletResponse do
@moduledoc false
use Protobuf, syntax: :proto3
defstruct []
end
defmodule Lnrpc.UnlockWalletRequest do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
wallet_password: String.t(),
recovery_window: integer
}
defstruct [:wallet_password, :recovery_window]
field(:wallet_password, 1, type: :bytes)
field(:recovery_window, 2, type: :int32)
end
defmodule Lnrpc.UnlockWalletResponse do
@moduledoc false
use Protobuf, syntax: :proto3
defstruct []
end
defmodule Lnrpc.ChangePasswordRequest do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
current_password: String.t(),
new_password: String.t()
}
defstruct [:current_password, :new_password]
field(:current_password, 1, type: :bytes)
field(:new_password, 2, type: :bytes)
end
defmodule Lnrpc.ChangePasswordResponse do
@moduledoc false
use Protobuf, syntax: :proto3
defstruct []
end
defmodule Lnrpc.Transaction do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
tx_hash: String.t(),
amount: integer,
num_confirmations: integer,
block_hash: String.t(),
block_height: integer,
time_stamp: integer,
total_fees: integer,
dest_addresses: [String.t()]
}
defstruct [
:tx_hash,
:amount,
:num_confirmations,
:block_hash,
:block_height,
:time_stamp,
:total_fees,
:dest_addresses
]
field(:tx_hash, 1, type: :string)
field(:amount, 2, type: :int64)
field(:num_confirmations, 3, type: :int32)
field(:block_hash, 4, type: :string)
field(:block_height, 5, type: :int32)
field(:time_stamp, 6, type: :int64)
field(:total_fees, 7, type: :int64)
field(:dest_addresses, 8, repeated: true, type: :string)
end
defmodule Lnrpc.GetTransactionsRequest do
@moduledoc false
use Protobuf, syntax: :proto3
defstruct []
end
defmodule Lnrpc.TransactionDetails do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
transactions: [Lnrpc.Transaction.t()]
}
defstruct [:transactions]
field(:transactions, 1, repeated: true, type: Lnrpc.Transaction)
end
defmodule Lnrpc.FeeLimit do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
limit: {atom, any}
}
defstruct [:limit]
oneof(:limit, 0)
field(:fixed, 1, type: :int64, oneof: 0)
field(:percent, 2, type: :int64, oneof: 0)
end
defmodule Lnrpc.SendRequest do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
dest: String.t(),
dest_string: String.t(),
amt: integer,
payment_hash: String.t(),
payment_hash_string: String.t(),
payment_request: String.t(),
final_cltv_delta: integer,
fee_limit: Lnrpc.FeeLimit.t()
}
defstruct [
:dest,
:dest_string,
:amt,
:payment_hash,
:payment_hash_string,
:payment_request,
:final_cltv_delta,
:fee_limit
]
field(:dest, 1, type: :bytes)
field(:dest_string, 2, type: :string)
field(:amt, 3, type: :int64)
field(:payment_hash, 4, type: :bytes)
field(:payment_hash_string, 5, type: :string)
field(:payment_request, 6, type: :string)
field(:final_cltv_delta, 7, type: :int32)
field(:fee_limit, 8, type: Lnrpc.FeeLimit)
end
defmodule Lnrpc.SendResponse do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
payment_error: String.t(),
payment_preimage: String.t(),
payment_route: Lnrpc.Route.t()
}
defstruct [:payment_error, :payment_preimage, :payment_route]
field(:payment_error, 1, type: :string)
field(:payment_preimage, 2, type: :bytes)
field(:payment_route, 3, type: Lnrpc.Route)
end
defmodule Lnrpc.SendToRouteRequest do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
payment_hash: String.t(),
payment_hash_string: String.t(),
routes: [Lnrpc.Route.t()]
}
defstruct [:payment_hash, :payment_hash_string, :routes]
field(:payment_hash, 1, type: :bytes)
field(:payment_hash_string, 2, type: :string)
field(:routes, 3, repeated: true, type: Lnrpc.Route)
end
defmodule Lnrpc.ChannelPoint do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
funding_txid: {atom, any},
output_index: non_neg_integer
}
defstruct [:funding_txid, :output_index]
oneof(:funding_txid, 0)
field(:funding_txid_bytes, 1, type: :bytes, oneof: 0)
field(:funding_txid_str, 2, type: :string, oneof: 0)
field(:output_index, 3, type: :uint32)
end
defmodule Lnrpc.LightningAddress do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
pubkey: String.t(),
host: String.t()
}
defstruct [:pubkey, :host]
field(:pubkey, 1, type: :string)
field(:host, 2, type: :string)
end
defmodule Lnrpc.SendManyRequest do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
AddrToAmount: %{String.t() => integer},
target_conf: integer,
sat_per_byte: integer
}
defstruct [:AddrToAmount, :target_conf, :sat_per_byte]
field(:AddrToAmount, 1, repeated: true, type: Lnrpc.SendManyRequest.AddrToAmountEntry, map: true)
field(:target_conf, 3, type: :int32)
field(:sat_per_byte, 5, type: :int64)
end
defmodule Lnrpc.SendManyRequest.AddrToAmountEntry do
@moduledoc false
use Protobuf, map: true, syntax: :proto3
@type t :: %__MODULE__{
key: String.t(),
value: integer
}
defstruct [:key, :value]
field(:key, 1, type: :string)
field(:value, 2, type: :int64)
end
defmodule Lnrpc.SendManyResponse do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
txid: String.t()
}
defstruct [:txid]
field(:txid, 1, type: :string)
end
defmodule Lnrpc.SendCoinsRequest do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
addr: String.t(),
amount: integer,
target_conf: integer,
sat_per_byte: integer
}
defstruct [:addr, :amount, :target_conf, :sat_per_byte]
field(:addr, 1, type: :string)
field(:amount, 2, type: :int64)
field(:target_conf, 3, type: :int32)
field(:sat_per_byte, 5, type: :int64)
end
defmodule Lnrpc.SendCoinsResponse do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
txid: String.t()
}
defstruct [:txid]
field(:txid, 1, type: :string)
end
defmodule Lnrpc.NewAddressRequest do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
type: integer
}
defstruct [:type]
field(:type, 1, type: Lnrpc.NewAddressRequest.AddressType, enum: true)
end
defmodule Lnrpc.NewAddressRequest.AddressType do
@moduledoc false
use Protobuf, enum: true, syntax: :proto3
field(:WITNESS_PUBKEY_HASH, 0)
field(:NESTED_PUBKEY_HASH, 1)
end
defmodule Lnrpc.NewAddressResponse do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
address: String.t()
}
defstruct [:address]
field(:address, 1, type: :string)
end
defmodule Lnrpc.SignMessageRequest do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
msg: String.t()
}
defstruct [:msg]
field(:msg, 1, type: :bytes)
end
defmodule Lnrpc.SignMessageResponse do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
signature: String.t()
}
defstruct [:signature]
field(:signature, 1, type: :string)
end
defmodule Lnrpc.VerifyMessageRequest do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
msg: String.t(),
signature: String.t()
}
defstruct [:msg, :signature]
field(:msg, 1, type: :bytes)
field(:signature, 2, type: :string)
end
defmodule Lnrpc.VerifyMessageResponse do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
valid: boolean,
pubkey: String.t()
}
defstruct [:valid, :pubkey]
field(:valid, 1, type: :bool)
field(:pubkey, 2, type: :string)
end
defmodule Lnrpc.ConnectPeerRequest do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
addr: Lnrpc.LightningAddress.t(),
perm: boolean
}
defstruct [:addr, :perm]
field(:addr, 1, type: Lnrpc.LightningAddress)
field(:perm, 2, type: :bool)
end
defmodule Lnrpc.ConnectPeerResponse do
@moduledoc false
use Protobuf, syntax: :proto3
defstruct []
end
defmodule Lnrpc.DisconnectPeerRequest do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
pub_key: String.t()
}
defstruct [:pub_key]
field(:pub_key, 1, type: :string)
end
defmodule Lnrpc.DisconnectPeerResponse do
@moduledoc false
use Protobuf, syntax: :proto3
defstruct []
end
defmodule Lnrpc.HTLC do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
incoming: boolean,
amount: integer,
hash_lock: String.t(),
expiration_height: non_neg_integer
}
defstruct [:incoming, :amount, :hash_lock, :expiration_height]
field(:incoming, 1, type: :bool)
field(:amount, 2, type: :int64)
field(:hash_lock, 3, type: :bytes)
field(:expiration_height, 4, type: :uint32)
end
defmodule Lnrpc.Channel do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
active: boolean,
remote_pubkey: String.t(),
channel_point: String.t(),
chan_id: non_neg_integer,
capacity: integer,
local_balance: integer,
remote_balance: integer,
commit_fee: integer,
commit_weight: integer,
fee_per_kw: integer,
unsettled_balance: integer,
total_satoshis_sent: integer,
total_satoshis_received: integer,
num_updates: non_neg_integer,
pending_htlcs: [Lnrpc.HTLC.t()],
csv_delay: non_neg_integer,
private: boolean
}
defstruct [
:active,
:remote_pubkey,
:channel_point,
:chan_id,
:capacity,
:local_balance,
:remote_balance,
:commit_fee,
:commit_weight,
:fee_per_kw,
:unsettled_balance,
:total_satoshis_sent,
:total_satoshis_received,
:num_updates,
:pending_htlcs,
:csv_delay,
:private
]
field(:active, 1, type: :bool)
field(:remote_pubkey, 2, type: :string)
field(:channel_point, 3, type: :string)
field(:chan_id, 4, type: :uint64)
field(:capacity, 5, type: :int64)
field(:local_balance, 6, type: :int64)
field(:remote_balance, 7, type: :int64)
field(:commit_fee, 8, type: :int64)
field(:commit_weight, 9, type: :int64)
field(:fee_per_kw, 10, type: :int64)
field(:unsettled_balance, 11, type: :int64)
field(:total_satoshis_sent, 12, type: :int64)
field(:total_satoshis_received, 13, type: :int64)
field(:num_updates, 14, type: :uint64)
field(:pending_htlcs, 15, repeated: true, type: Lnrpc.HTLC)
field(:csv_delay, 16, type: :uint32)
field(:private, 17, type: :bool)
end
defmodule Lnrpc.ListChannelsRequest do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
active_only: boolean,
inactive_only: boolean,
public_only: boolean,
private_only: boolean
}
defstruct [:active_only, :inactive_only, :public_only, :private_only]
field(:active_only, 1, type: :bool)
field(:inactive_only, 2, type: :bool)
field(:public_only, 3, type: :bool)
field(:private_only, 4, type: :bool)
end
defmodule Lnrpc.ListChannelsResponse do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
channels: [Lnrpc.Channel.t()]
}
defstruct [:channels]
field(:channels, 11, repeated: true, type: Lnrpc.Channel)
end
defmodule Lnrpc.ChannelCloseSummary do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
channel_point: String.t(),
chan_id: non_neg_integer,
chain_hash: String.t(),
closing_tx_hash: String.t(),
remote_pubkey: String.t(),
capacity: integer,
close_height: non_neg_integer,
settled_balance: integer,
time_locked_balance: integer,
close_type: integer
}
defstruct [
:channel_point,
:chan_id,
:chain_hash,
:closing_tx_hash,
:remote_pubkey,
:capacity,
:close_height,
:settled_balance,
:time_locked_balance,
:close_type
]
field(:channel_point, 1, type: :string)
field(:chan_id, 2, type: :uint64)
field(:chain_hash, 3, type: :string)
field(:closing_tx_hash, 4, type: :string)
field(:remote_pubkey, 5, type: :string)
field(:capacity, 6, type: :int64)
field(:close_height, 7, type: :uint32)
field(:settled_balance, 8, type: :int64)
field(:time_locked_balance, 9, type: :int64)
field(:close_type, 10, type: Lnrpc.ChannelCloseSummary.ClosureType, enum: true)
end
defmodule Lnrpc.ChannelCloseSummary.ClosureType do
@moduledoc false
use Protobuf, enum: true, syntax: :proto3
field(:COOPERATIVE_CLOSE, 0)
field(:LOCAL_FORCE_CLOSE, 1)
field(:REMOTE_FORCE_CLOSE, 2)
field(:BREACH_CLOSE, 3)
field(:FUNDING_CANCELED, 4)
field(:ABANDONED, 5)
end
defmodule Lnrpc.ClosedChannelsRequest do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
cooperative: boolean,
local_force: boolean,
remote_force: boolean,
breach: boolean,
funding_canceled: boolean,
abandoned: boolean
}
defstruct [:cooperative, :local_force, :remote_force, :breach, :funding_canceled, :abandoned]
field(:cooperative, 1, type: :bool)
field(:local_force, 2, type: :bool)
field(:remote_force, 3, type: :bool)
field(:breach, 4, type: :bool)
field(:funding_canceled, 5, type: :bool)
field(:abandoned, 6, type: :bool)
end
defmodule Lnrpc.ClosedChannelsResponse do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
channels: [Lnrpc.ChannelCloseSummary.t()]
}
defstruct [:channels]
field(:channels, 1, repeated: true, type: Lnrpc.ChannelCloseSummary)
end
defmodule Lnrpc.Peer do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
pub_key: String.t(),
address: String.t(),
bytes_sent: non_neg_integer,
bytes_recv: non_neg_integer,
sat_sent: integer,
sat_recv: integer,
inbound: boolean,
ping_time: integer
}
defstruct [
:pub_key,
:address,
:bytes_sent,
:bytes_recv,
:sat_sent,
:sat_recv,
:inbound,
:ping_time
]
field(:pub_key, 1, type: :string)
field(:address, 3, type: :string)
field(:bytes_sent, 4, type: :uint64)
field(:bytes_recv, 5, type: :uint64)
field(:sat_sent, 6, type: :int64)
field(:sat_recv, 7, type: :int64)
field(:inbound, 8, type: :bool)
field(:ping_time, 9, type: :int64)
end
defmodule Lnrpc.ListPeersRequest do
@moduledoc false
use Protobuf, syntax: :proto3
defstruct []
end
defmodule Lnrpc.ListPeersResponse do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
peers: [Lnrpc.Peer.t()]
}
defstruct [:peers]
field(:peers, 1, repeated: true, type: Lnrpc.Peer)
end
defmodule Lnrpc.GetInfoRequest do
@moduledoc false
use Protobuf, syntax: :proto3
defstruct []
end
defmodule Lnrpc.GetInfoResponse do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
identity_pubkey: String.t(),
alias: String.t(),
num_pending_channels: non_neg_integer,
num_active_channels: non_neg_integer,
num_peers: non_neg_integer,
block_height: non_neg_integer,
block_hash: String.t(),
synced_to_chain: boolean,
testnet: boolean,
chains: [String.t()],
uris: [String.t()],
best_header_timestamp: integer,
version: String.t()
}
defstruct [
:identity_pubkey,
:alias,
:num_pending_channels,
:num_active_channels,
:num_peers,
:block_height,
:block_hash,
:synced_to_chain,
:testnet,
:chains,
:uris,
:best_header_timestamp,
:version
]
field(:identity_pubkey, 1, type: :string)
field(:alias, 2, type: :string)
field(:num_pending_channels, 3, type: :uint32)
field(:num_active_channels, 4, type: :uint32)
field(:num_peers, 5, type: :uint32)
field(:block_height, 6, type: :uint32)
field(:block_hash, 8, type: :string)
field(:synced_to_chain, 9, type: :bool)
field(:testnet, 10, type: :bool)
field(:chains, 11, repeated: true, type: :string)
field(:uris, 12, repeated: true, type: :string)
field(:best_header_timestamp, 13, type: :int64)
field(:version, 14, type: :string)
end
defmodule Lnrpc.ConfirmationUpdate do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
block_sha: String.t(),
block_height: integer,
num_confs_left: non_neg_integer
}
defstruct [:block_sha, :block_height, :num_confs_left]
field(:block_sha, 1, type: :bytes)
field(:block_height, 2, type: :int32)
field(:num_confs_left, 3, type: :uint32)
end
defmodule Lnrpc.ChannelOpenUpdate do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
channel_point: Lnrpc.ChannelPoint.t()
}
defstruct [:channel_point]
field(:channel_point, 1, type: Lnrpc.ChannelPoint)
end
defmodule Lnrpc.ChannelCloseUpdate do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
closing_txid: String.t(),
success: boolean
}
defstruct [:closing_txid, :success]
field(:closing_txid, 1, type: :bytes)
field(:success, 2, type: :bool)
end
defmodule Lnrpc.CloseChannelRequest do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
channel_point: Lnrpc.ChannelPoint.t(),
force: boolean,
target_conf: integer,
sat_per_byte: integer
}
defstruct [:channel_point, :force, :target_conf, :sat_per_byte]
field(:channel_point, 1, type: Lnrpc.ChannelPoint)
field(:force, 2, type: :bool)
field(:target_conf, 3, type: :int32)
field(:sat_per_byte, 4, type: :int64)
end
defmodule Lnrpc.CloseStatusUpdate do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
update: {atom, any}
}
defstruct [:update]
oneof(:update, 0)
field(:close_pending, 1, type: Lnrpc.PendingUpdate, oneof: 0)
field(:confirmation, 2, type: Lnrpc.ConfirmationUpdate, oneof: 0)
field(:chan_close, 3, type: Lnrpc.ChannelCloseUpdate, oneof: 0)
end
defmodule Lnrpc.PendingUpdate do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
txid: String.t(),
output_index: non_neg_integer
}
defstruct [:txid, :output_index]
field(:txid, 1, type: :bytes)
field(:output_index, 2, type: :uint32)
end
defmodule Lnrpc.OpenChannelRequest do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
node_pubkey: String.t(),
node_pubkey_string: String.t(),
local_funding_amount: integer,
push_sat: integer,
target_conf: integer,
sat_per_byte: integer,
private: boolean,
min_htlc_msat: integer,
remote_csv_delay: non_neg_integer,
min_confs: integer,
spend_unconfirmed: boolean
}
defstruct [
:node_pubkey,
:node_pubkey_string,
:local_funding_amount,
:push_sat,
:target_conf,
:sat_per_byte,
:private,
:min_htlc_msat,
:remote_csv_delay,
:min_confs,
:spend_unconfirmed
]
field(:node_pubkey, 2, type: :bytes)
field(:node_pubkey_string, 3, type: :string)
field(:local_funding_amount, 4, type: :int64)
field(:push_sat, 5, type: :int64)
field(:target_conf, 6, type: :int32)
field(:sat_per_byte, 7, type: :int64)
field(:private, 8, type: :bool)
field(:min_htlc_msat, 9, type: :int64)
field(:remote_csv_delay, 10, type: :uint32)
field(:min_confs, 11, type: :int32)
field(:spend_unconfirmed, 12, type: :bool)
end
defmodule Lnrpc.OpenStatusUpdate do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
update: {atom, any}
}
defstruct [:update]
oneof(:update, 0)
field(:chan_pending, 1, type: Lnrpc.PendingUpdate, oneof: 0)
field(:confirmation, 2, type: Lnrpc.ConfirmationUpdate, oneof: 0)
field(:chan_open, 3, type: Lnrpc.ChannelOpenUpdate, oneof: 0)
end
defmodule Lnrpc.PendingHTLC do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
incoming: boolean,
amount: integer,
outpoint: String.t(),
maturity_height: non_neg_integer,
blocks_til_maturity: integer,
stage: non_neg_integer
}
defstruct [:incoming, :amount, :outpoint, :maturity_height, :blocks_til_maturity, :stage]
field(:incoming, 1, type: :bool)
field(:amount, 2, type: :int64)
field(:outpoint, 3, type: :string)
field(:maturity_height, 4, type: :uint32)
field(:blocks_til_maturity, 5, type: :int32)
field(:stage, 6, type: :uint32)
end
defmodule Lnrpc.PendingChannelsRequest do
@moduledoc false
use Protobuf, syntax: :proto3
defstruct []
end
defmodule Lnrpc.PendingChannelsResponse do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
total_limbo_balance: integer,
pending_open_channels: [Lnrpc.PendingChannelsResponse.PendingOpenChannel.t()],
pending_closing_channels: [Lnrpc.PendingChannelsResponse.ClosedChannel.t()],
pending_force_closing_channels: [Lnrpc.PendingChannelsResponse.ForceClosedChannel.t()],
waiting_close_channels: [Lnrpc.PendingChannelsResponse.WaitingCloseChannel.t()]
}
defstruct [
:total_limbo_balance,
:pending_open_channels,
:pending_closing_channels,
:pending_force_closing_channels,
:waiting_close_channels
]
field(:total_limbo_balance, 1, type: :int64)
field(:pending_open_channels, 2,
repeated: true,
type: Lnrpc.PendingChannelsResponse.PendingOpenChannel
)
field(:pending_closing_channels, 3,
repeated: true,
type: Lnrpc.PendingChannelsResponse.ClosedChannel
)
field(:pending_force_closing_channels, 4,
repeated: true,
type: Lnrpc.PendingChannelsResponse.ForceClosedChannel
)
field(:waiting_close_channels, 5,
repeated: true,
type: Lnrpc.PendingChannelsResponse.WaitingCloseChannel
)
end
defmodule Lnrpc.PendingChannelsResponse.PendingChannel do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
remote_node_pub: String.t(),
channel_point: String.t(),
capacity: integer,
local_balance: integer,
remote_balance: integer
}
defstruct [:remote_node_pub, :channel_point, :capacity, :local_balance, :remote_balance]
field(:remote_node_pub, 1, type: :string)
field(:channel_point, 2, type: :string)
field(:capacity, 3, type: :int64)
field(:local_balance, 4, type: :int64)
field(:remote_balance, 5, type: :int64)
end
defmodule Lnrpc.PendingChannelsResponse.PendingOpenChannel do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
channel: Lnrpc.PendingChannelsResponse.PendingChannel.t(),
confirmation_height: non_neg_integer,
commit_fee: integer,
commit_weight: integer,
fee_per_kw: integer
}
defstruct [:channel, :confirmation_height, :commit_fee, :commit_weight, :fee_per_kw]
field(:channel, 1, type: Lnrpc.PendingChannelsResponse.PendingChannel)
field(:confirmation_height, 2, type: :uint32)
field(:commit_fee, 4, type: :int64)
field(:commit_weight, 5, type: :int64)
field(:fee_per_kw, 6, type: :int64)
end
defmodule Lnrpc.PendingChannelsResponse.WaitingCloseChannel do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
channel: Lnrpc.PendingChannelsResponse.PendingChannel.t(),
limbo_balance: integer
}
defstruct [:channel, :limbo_balance]
field(:channel, 1, type: Lnrpc.PendingChannelsResponse.PendingChannel)
field(:limbo_balance, 2, type: :int64)
end
defmodule Lnrpc.PendingChannelsResponse.ClosedChannel do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
channel: Lnrpc.PendingChannelsResponse.PendingChannel.t(),
closing_txid: String.t()
}
defstruct [:channel, :closing_txid]
field(:channel, 1, type: Lnrpc.PendingChannelsResponse.PendingChannel)
field(:closing_txid, 2, type: :string)
end
defmodule Lnrpc.PendingChannelsResponse.ForceClosedChannel do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
channel: Lnrpc.PendingChannelsResponse.PendingChannel.t(),
closing_txid: String.t(),
limbo_balance: integer,
maturity_height: non_neg_integer,
blocks_til_maturity: integer,
recovered_balance: integer,
pending_htlcs: [Lnrpc.PendingHTLC.t()]
}
defstruct [
:channel,
:closing_txid,
:limbo_balance,
:maturity_height,
:blocks_til_maturity,
:recovered_balance,
:pending_htlcs
]
field(:channel, 1, type: Lnrpc.PendingChannelsResponse.PendingChannel)
field(:closing_txid, 2, type: :string)
field(:limbo_balance, 3, type: :int64)
field(:maturity_height, 4, type: :uint32)
field(:blocks_til_maturity, 5, type: :int32)
field(:recovered_balance, 6, type: :int64)
field(:pending_htlcs, 8, repeated: true, type: Lnrpc.PendingHTLC)
end
defmodule Lnrpc.WalletBalanceRequest do
@moduledoc false
use Protobuf, syntax: :proto3
defstruct []
end
defmodule Lnrpc.WalletBalanceResponse do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
total_balance: integer,
confirmed_balance: integer,
unconfirmed_balance: integer
}
defstruct [:total_balance, :confirmed_balance, :unconfirmed_balance]
field(:total_balance, 1, type: :int64)
field(:confirmed_balance, 2, type: :int64)
field(:unconfirmed_balance, 3, type: :int64)
end
defmodule Lnrpc.ChannelBalanceRequest do
@moduledoc false
use Protobuf, syntax: :proto3
defstruct []
end
defmodule Lnrpc.ChannelBalanceResponse do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
balance: integer,
pending_open_balance: integer
}
defstruct [:balance, :pending_open_balance]
field(:balance, 1, type: :int64)
field(:pending_open_balance, 2, type: :int64)
end
defmodule Lnrpc.QueryRoutesRequest do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
pub_key: String.t(),
amt: integer,
num_routes: integer,
final_cltv_delta: integer,
fee_limit: Lnrpc.FeeLimit.t()
}
defstruct [:pub_key, :amt, :num_routes, :final_cltv_delta, :fee_limit]
field(:pub_key, 1, type: :string)
field(:amt, 2, type: :int64)
field(:num_routes, 3, type: :int32)
field(:final_cltv_delta, 4, type: :int32)
field(:fee_limit, 5, type: Lnrpc.FeeLimit)
end
defmodule Lnrpc.QueryRoutesResponse do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
routes: [Lnrpc.Route.t()]
}
defstruct [:routes]
field(:routes, 1, repeated: true, type: Lnrpc.Route)
end
defmodule Lnrpc.Hop do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
chan_id: non_neg_integer,
chan_capacity: integer,
amt_to_forward: integer,
fee: integer,
expiry: non_neg_integer,
amt_to_forward_msat: integer,
fee_msat: integer
}
defstruct [
:chan_id,
:chan_capacity,
:amt_to_forward,
:fee,
:expiry,
:amt_to_forward_msat,
:fee_msat
]
field(:chan_id, 1, type: :uint64)
field(:chan_capacity, 2, type: :int64)
field(:amt_to_forward, 3, type: :int64, deprecated: true)
field(:fee, 4, type: :int64, deprecated: true)
field(:expiry, 5, type: :uint32)
field(:amt_to_forward_msat, 6, type: :int64)
field(:fee_msat, 7, type: :int64)
end
defmodule Lnrpc.Route do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
total_time_lock: non_neg_integer,
total_fees: integer,
total_amt: integer,
hops: [Lnrpc.Hop.t()],
total_fees_msat: integer,
total_amt_msat: integer
}
defstruct [:total_time_lock, :total_fees, :total_amt, :hops, :total_fees_msat, :total_amt_msat]
field(:total_time_lock, 1, type: :uint32)
field(:total_fees, 2, type: :int64, deprecated: true)
field(:total_amt, 3, type: :int64, deprecated: true)
field(:hops, 4, repeated: true, type: Lnrpc.Hop)
field(:total_fees_msat, 5, type: :int64)
field(:total_amt_msat, 6, type: :int64)
end
defmodule Lnrpc.NodeInfoRequest do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
pub_key: String.t()
}
defstruct [:pub_key]
field(:pub_key, 1, type: :string)
end
defmodule Lnrpc.NodeInfo do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
node: Lnrpc.LightningNode.t(),
num_channels: non_neg_integer,
total_capacity: integer
}
defstruct [:node, :num_channels, :total_capacity]
field(:node, 1, type: Lnrpc.LightningNode)
field(:num_channels, 2, type: :uint32)
field(:total_capacity, 3, type: :int64)
end
defmodule Lnrpc.LightningNode do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
last_update: non_neg_integer,
pub_key: String.t(),
alias: String.t(),
addresses: [Lnrpc.NodeAddress.t()],
color: String.t()
}
defstruct [:last_update, :pub_key, :alias, :addresses, :color]
field(:last_update, 1, type: :uint32)
field(:pub_key, 2, type: :string)
field(:alias, 3, type: :string)
field(:addresses, 4, repeated: true, type: Lnrpc.NodeAddress)
field(:color, 5, type: :string)
end
defmodule Lnrpc.NodeAddress do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
network: String.t(),
addr: String.t()
}
defstruct [:network, :addr]
field(:network, 1, type: :string)
field(:addr, 2, type: :string)
end
defmodule Lnrpc.RoutingPolicy do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
time_lock_delta: non_neg_integer,
min_htlc: integer,
fee_base_msat: integer,
fee_rate_milli_msat: integer,
disabled: boolean
}
defstruct [:time_lock_delta, :min_htlc, :fee_base_msat, :fee_rate_milli_msat, :disabled]
field(:time_lock_delta, 1, type: :uint32)
field(:min_htlc, 2, type: :int64)
field(:fee_base_msat, 3, type: :int64)
field(:fee_rate_milli_msat, 4, type: :int64)
field(:disabled, 5, type: :bool)
end
defmodule Lnrpc.ChannelEdge do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
channel_id: non_neg_integer,
chan_point: String.t(),
last_update: non_neg_integer,
node1_pub: String.t(),
node2_pub: String.t(),
capacity: integer,
node1_policy: Lnrpc.RoutingPolicy.t(),
node2_policy: Lnrpc.RoutingPolicy.t()
}
defstruct [
:channel_id,
:chan_point,
:last_update,
:node1_pub,
:node2_pub,
:capacity,
:node1_policy,
:node2_policy
]
field(:channel_id, 1, type: :uint64)
field(:chan_point, 2, type: :string)
field(:last_update, 3, type: :uint32)
field(:node1_pub, 4, type: :string)
field(:node2_pub, 5, type: :string)
field(:capacity, 6, type: :int64)
field(:node1_policy, 7, type: Lnrpc.RoutingPolicy)
field(:node2_policy, 8, type: Lnrpc.RoutingPolicy)
end
defmodule Lnrpc.ChannelGraphRequest do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
include_unannounced: boolean
}
defstruct [:include_unannounced]
field(:include_unannounced, 1, type: :bool)
end
defmodule Lnrpc.ChannelGraph do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
nodes: [Lnrpc.LightningNode.t()],
edges: [Lnrpc.ChannelEdge.t()]
}
defstruct [:nodes, :edges]
field(:nodes, 1, repeated: true, type: Lnrpc.LightningNode)
field(:edges, 2, repeated: true, type: Lnrpc.ChannelEdge)
end
defmodule Lnrpc.ChanInfoRequest do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
chan_id: non_neg_integer
}
defstruct [:chan_id]
field(:chan_id, 1, type: :uint64)
end
defmodule Lnrpc.NetworkInfoRequest do
@moduledoc false
use Protobuf, syntax: :proto3
defstruct []
end
defmodule Lnrpc.NetworkInfo do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
graph_diameter: non_neg_integer,
avg_out_degree: float,
max_out_degree: non_neg_integer,
num_nodes: non_neg_integer,
num_channels: non_neg_integer,
total_network_capacity: integer,
avg_channel_size: float,
min_channel_size: integer,
max_channel_size: integer
}
defstruct [
:graph_diameter,
:avg_out_degree,
:max_out_degree,
:num_nodes,
:num_channels,
:total_network_capacity,
:avg_channel_size,
:min_channel_size,
:max_channel_size
]
field(:graph_diameter, 1, type: :uint32)
field(:avg_out_degree, 2, type: :double)
field(:max_out_degree, 3, type: :uint32)
field(:num_nodes, 4, type: :uint32)
field(:num_channels, 5, type: :uint32)
field(:total_network_capacity, 6, type: :int64)
field(:avg_channel_size, 7, type: :double)
field(:min_channel_size, 8, type: :int64)
field(:max_channel_size, 9, type: :int64)
end
defmodule Lnrpc.StopRequest do
@moduledoc false
use Protobuf, syntax: :proto3
defstruct []
end
defmodule Lnrpc.StopResponse do
@moduledoc false
use Protobuf, syntax: :proto3
defstruct []
end
defmodule Lnrpc.GraphTopologySubscription do
@moduledoc false
use Protobuf, syntax: :proto3
defstruct []
end
defmodule Lnrpc.GraphTopologyUpdate do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
node_updates: [Lnrpc.NodeUpdate.t()],
channel_updates: [Lnrpc.ChannelEdgeUpdate.t()],
closed_chans: [Lnrpc.ClosedChannelUpdate.t()]
}
defstruct [:node_updates, :channel_updates, :closed_chans]
field(:node_updates, 1, repeated: true, type: Lnrpc.NodeUpdate)
field(:channel_updates, 2, repeated: true, type: Lnrpc.ChannelEdgeUpdate)
field(:closed_chans, 3, repeated: true, type: Lnrpc.ClosedChannelUpdate)
end
defmodule Lnrpc.NodeUpdate do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
addresses: [String.t()],
identity_key: String.t(),
global_features: String.t(),
alias: String.t()
}
defstruct [:addresses, :identity_key, :global_features, :alias]
field(:addresses, 1, repeated: true, type: :string)
field(:identity_key, 2, type: :string)
field(:global_features, 3, type: :bytes)
field(:alias, 4, type: :string)
end
defmodule Lnrpc.ChannelEdgeUpdate do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
chan_id: non_neg_integer,
chan_point: Lnrpc.ChannelPoint.t(),
capacity: integer,
routing_policy: Lnrpc.RoutingPolicy.t(),
advertising_node: String.t(),
connecting_node: String.t()
}
defstruct [
:chan_id,
:chan_point,
:capacity,
:routing_policy,
:advertising_node,
:connecting_node
]
field(:chan_id, 1, type: :uint64)
field(:chan_point, 2, type: Lnrpc.ChannelPoint)
field(:capacity, 3, type: :int64)
field(:routing_policy, 4, type: Lnrpc.RoutingPolicy)
field(:advertising_node, 5, type: :string)
field(:connecting_node, 6, type: :string)
end
defmodule Lnrpc.ClosedChannelUpdate do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
chan_id: non_neg_integer,
capacity: integer,
closed_height: non_neg_integer,
chan_point: Lnrpc.ChannelPoint.t()
}
defstruct [:chan_id, :capacity, :closed_height, :chan_point]
field(:chan_id, 1, type: :uint64)
field(:capacity, 2, type: :int64)
field(:closed_height, 3, type: :uint32)
field(:chan_point, 4, type: Lnrpc.ChannelPoint)
end
defmodule Lnrpc.HopHint do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
node_id: String.t(),
chan_id: non_neg_integer,
fee_base_msat: non_neg_integer,
fee_proportional_millionths: non_neg_integer,
cltv_expiry_delta: non_neg_integer
}
defstruct [:node_id, :chan_id, :fee_base_msat, :fee_proportional_millionths, :cltv_expiry_delta]
field(:node_id, 1, type: :string)
field(:chan_id, 2, type: :uint64)
field(:fee_base_msat, 3, type: :uint32)
field(:fee_proportional_millionths, 4, type: :uint32)
field(:cltv_expiry_delta, 5, type: :uint32)
end
defmodule Lnrpc.RouteHint do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
hop_hints: [Lnrpc.HopHint.t()]
}
defstruct [:hop_hints]
field(:hop_hints, 1, repeated: true, type: Lnrpc.HopHint)
end
defmodule Lnrpc.Invoice do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
memo: String.t(),
receipt: String.t(),
r_preimage: String.t(),
r_hash: String.t(),
value: integer,
settled: boolean,
creation_date: integer,
settle_date: integer,
payment_request: String.t(),
description_hash: String.t(),
expiry: integer,
fallback_addr: String.t(),
cltv_expiry: non_neg_integer,
route_hints: [Lnrpc.RouteHint.t()],
private: boolean,
add_index: non_neg_integer,
settle_index: non_neg_integer,
amt_paid: integer,
amt_paid_sat: integer,
amt_paid_msat: integer
}
defstruct [
:memo,
:receipt,
:r_preimage,
:r_hash,
:value,
:settled,
:creation_date,
:settle_date,
:payment_request,
:description_hash,
:expiry,
:fallback_addr,
:cltv_expiry,
:route_hints,
:private,
:add_index,
:settle_index,
:amt_paid,
:amt_paid_sat,
:amt_paid_msat
]
field(:memo, 1, type: :string)
field(:receipt, 2, type: :bytes)
field(:r_preimage, 3, type: :bytes)
field(:r_hash, 4, type: :bytes)
field(:value, 5, type: :int64)
field(:settled, 6, type: :bool)
field(:creation_date, 7, type: :int64)
field(:settle_date, 8, type: :int64)
field(:payment_request, 9, type: :string)
field(:description_hash, 10, type: :bytes)
field(:expiry, 11, type: :int64)
field(:fallback_addr, 12, type: :string)
field(:cltv_expiry, 13, type: :uint64)
field(:route_hints, 14, repeated: true, type: Lnrpc.RouteHint)
field(:private, 15, type: :bool)
field(:add_index, 16, type: :uint64)
field(:settle_index, 17, type: :uint64)
field(:amt_paid, 18, type: :int64, deprecated: true)
field(:amt_paid_sat, 19, type: :int64)
field(:amt_paid_msat, 20, type: :int64)
end
defmodule Lnrpc.AddInvoiceResponse do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
r_hash: String.t(),
payment_request: String.t(),
add_index: non_neg_integer
}
defstruct [:r_hash, :payment_request, :add_index]
field(:r_hash, 1, type: :bytes)
field(:payment_request, 2, type: :string)
field(:add_index, 16, type: :uint64)
end
defmodule Lnrpc.PaymentHash do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
r_hash_str: String.t(),
r_hash: String.t()
}
defstruct [:r_hash_str, :r_hash]
field(:r_hash_str, 1, type: :string)
field(:r_hash, 2, type: :bytes)
end
defmodule Lnrpc.ListInvoiceRequest do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
pending_only: boolean,
index_offset: non_neg_integer,
num_max_invoices: non_neg_integer,
reversed: boolean
}
defstruct [:pending_only, :index_offset, :num_max_invoices, :reversed]
field(:pending_only, 1, type: :bool)
field(:index_offset, 4, type: :uint64)
field(:num_max_invoices, 5, type: :uint64)
field(:reversed, 6, type: :bool)
end
defmodule Lnrpc.ListInvoiceResponse do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
invoices: [Lnrpc.Invoice.t()],
last_index_offset: non_neg_integer,
first_index_offset: non_neg_integer
}
defstruct [:invoices, :last_index_offset, :first_index_offset]
field(:invoices, 1, repeated: true, type: Lnrpc.Invoice)
field(:last_index_offset, 2, type: :uint64)
field(:first_index_offset, 3, type: :uint64)
end
defmodule Lnrpc.InvoiceSubscription do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
add_index: non_neg_integer,
settle_index: non_neg_integer
}
defstruct [:add_index, :settle_index]
field(:add_index, 1, type: :uint64)
field(:settle_index, 2, type: :uint64)
end
defmodule Lnrpc.Payment do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
payment_hash: String.t(),
value: integer,
creation_date: integer,
path: [String.t()],
fee: integer,
payment_preimage: String.t(),
value_sat: integer,
value_msat: integer
}
defstruct [
:payment_hash,
:value,
:creation_date,
:path,
:fee,
:payment_preimage,
:value_sat,
:value_msat
]
field(:payment_hash, 1, type: :string)
field(:value, 2, type: :int64, deprecated: true)
field(:creation_date, 3, type: :int64)
field(:path, 4, repeated: true, type: :string)
field(:fee, 5, type: :int64)
field(:payment_preimage, 6, type: :string)
field(:value_sat, 7, type: :int64)
field(:value_msat, 8, type: :int64)
end
defmodule Lnrpc.ListPaymentsRequest do
@moduledoc false
use Protobuf, syntax: :proto3
defstruct []
end
defmodule Lnrpc.ListPaymentsResponse do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
payments: [Lnrpc.Payment.t()]
}
defstruct [:payments]
field(:payments, 1, repeated: true, type: Lnrpc.Payment)
end
defmodule Lnrpc.DeleteAllPaymentsRequest do
@moduledoc false
use Protobuf, syntax: :proto3
defstruct []
end
defmodule Lnrpc.DeleteAllPaymentsResponse do
@moduledoc false
use Protobuf, syntax: :proto3
defstruct []
end
defmodule Lnrpc.AbandonChannelRequest do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
channel_point: Lnrpc.ChannelPoint.t()
}
defstruct [:channel_point]
field(:channel_point, 1, type: Lnrpc.ChannelPoint)
end
defmodule Lnrpc.AbandonChannelResponse do
@moduledoc false
use Protobuf, syntax: :proto3
defstruct []
end
defmodule Lnrpc.DebugLevelRequest do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
show: boolean,
level_spec: String.t()
}
defstruct [:show, :level_spec]
field(:show, 1, type: :bool)
field(:level_spec, 2, type: :string)
end
defmodule Lnrpc.DebugLevelResponse do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
sub_systems: String.t()
}
defstruct [:sub_systems]
field(:sub_systems, 1, type: :string)
end
defmodule Lnrpc.PayReqString do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
pay_req: String.t()
}
defstruct [:pay_req]
field(:pay_req, 1, type: :string)
end
defmodule Lnrpc.PayReq do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
destination: String.t(),
payment_hash: String.t(),
num_satoshis: integer,
timestamp: integer,
expiry: integer,
description: String.t(),
description_hash: String.t(),
fallback_addr: String.t(),
cltv_expiry: integer,
route_hints: [Lnrpc.RouteHint.t()]
}
defstruct [
:destination,
:payment_hash,
:num_satoshis,
:timestamp,
:expiry,
:description,
:description_hash,
:fallback_addr,
:cltv_expiry,
:route_hints
]
field(:destination, 1, type: :string)
field(:payment_hash, 2, type: :string)
field(:num_satoshis, 3, type: :int64)
field(:timestamp, 4, type: :int64)
field(:expiry, 5, type: :int64)
field(:description, 6, type: :string)
field(:description_hash, 7, type: :string)
field(:fallback_addr, 8, type: :string)
field(:cltv_expiry, 9, type: :int64)
field(:route_hints, 10, repeated: true, type: Lnrpc.RouteHint)
end
defmodule Lnrpc.FeeReportRequest do
@moduledoc false
use Protobuf, syntax: :proto3
defstruct []
end
defmodule Lnrpc.ChannelFeeReport do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
chan_point: String.t(),
base_fee_msat: integer,
fee_per_mil: integer,
fee_rate: float
}
defstruct [:chan_point, :base_fee_msat, :fee_per_mil, :fee_rate]
field(:chan_point, 1, type: :string)
field(:base_fee_msat, 2, type: :int64)
field(:fee_per_mil, 3, type: :int64)
field(:fee_rate, 4, type: :double)
end
defmodule Lnrpc.FeeReportResponse do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
channel_fees: [Lnrpc.ChannelFeeReport.t()],
day_fee_sum: non_neg_integer,
week_fee_sum: non_neg_integer,
month_fee_sum: non_neg_integer
}
defstruct [:channel_fees, :day_fee_sum, :week_fee_sum, :month_fee_sum]
field(:channel_fees, 1, repeated: true, type: Lnrpc.ChannelFeeReport)
field(:day_fee_sum, 2, type: :uint64)
field(:week_fee_sum, 3, type: :uint64)
field(:month_fee_sum, 4, type: :uint64)
end
defmodule Lnrpc.PolicyUpdateRequest do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
scope: {atom, any},
base_fee_msat: integer,
fee_rate: float,
time_lock_delta: non_neg_integer
}
defstruct [:scope, :base_fee_msat, :fee_rate, :time_lock_delta]
oneof(:scope, 0)
field(:global, 1, type: :bool, oneof: 0)
field(:chan_point, 2, type: Lnrpc.ChannelPoint, oneof: 0)
field(:base_fee_msat, 3, type: :int64)
field(:fee_rate, 4, type: :double)
field(:time_lock_delta, 5, type: :uint32)
end
defmodule Lnrpc.PolicyUpdateResponse do
@moduledoc false
use Protobuf, syntax: :proto3
defstruct []
end
defmodule Lnrpc.ForwardingHistoryRequest do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
start_time: non_neg_integer,
end_time: non_neg_integer,
index_offset: non_neg_integer,
num_max_events: non_neg_integer
}
defstruct [:start_time, :end_time, :index_offset, :num_max_events]
field(:start_time, 1, type: :uint64)
field(:end_time, 2, type: :uint64)
field(:index_offset, 3, type: :uint32)
field(:num_max_events, 4, type: :uint32)
end
defmodule Lnrpc.ForwardingEvent do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
timestamp: non_neg_integer,
chan_id_in: non_neg_integer,
chan_id_out: non_neg_integer,
amt_in: non_neg_integer,
amt_out: non_neg_integer,
fee: non_neg_integer
}
defstruct [:timestamp, :chan_id_in, :chan_id_out, :amt_in, :amt_out, :fee]
field(:timestamp, 1, type: :uint64)
field(:chan_id_in, 2, type: :uint64)
field(:chan_id_out, 4, type: :uint64)
field(:amt_in, 5, type: :uint64)
field(:amt_out, 6, type: :uint64)
field(:fee, 7, type: :uint64)
end
defmodule Lnrpc.ForwardingHistoryResponse do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
forwarding_events: [Lnrpc.ForwardingEvent.t()],
last_offset_index: non_neg_integer
}
defstruct [:forwarding_events, :last_offset_index]
field(:forwarding_events, 1, repeated: true, type: Lnrpc.ForwardingEvent)
field(:last_offset_index, 2, type: :uint32)
end
defmodule Lnrpc.WalletUnlocker.Service do
@moduledoc false
use GRPC.Service, name: "lnrpc.WalletUnlocker"
rpc(:GenSeed, Lnrpc.GenSeedRequest, Lnrpc.GenSeedResponse)
rpc(:InitWallet, Lnrpc.InitWalletRequest, Lnrpc.InitWalletResponse)
rpc(:UnlockWallet, Lnrpc.UnlockWalletRequest, Lnrpc.UnlockWalletResponse)
rpc(:ChangePassword, Lnrpc.ChangePasswordRequest, Lnrpc.ChangePasswordResponse)
end
defmodule Lnrpc.WalletUnlocker.Stub do
@moduledoc false
use GRPC.Stub, service: Lnrpc.WalletUnlocker.Service
end
defmodule Lnrpc.Lightning.Service do
@moduledoc false
use GRPC.Service, name: "lnrpc.Lightning"
rpc(:WalletBalance, Lnrpc.WalletBalanceRequest, Lnrpc.WalletBalanceResponse)
rpc(:ChannelBalance, Lnrpc.ChannelBalanceRequest, Lnrpc.ChannelBalanceResponse)
rpc(:GetTransactions, Lnrpc.GetTransactionsRequest, Lnrpc.TransactionDetails)
rpc(:SendCoins, Lnrpc.SendCoinsRequest, Lnrpc.SendCoinsResponse)
rpc(:SubscribeTransactions, Lnrpc.GetTransactionsRequest, stream(Lnrpc.Transaction))
rpc(:SendMany, Lnrpc.SendManyRequest, Lnrpc.SendManyResponse)
rpc(:NewAddress, Lnrpc.NewAddressRequest, Lnrpc.NewAddressResponse)
rpc(:SignMessage, Lnrpc.SignMessageRequest, Lnrpc.SignMessageResponse)
rpc(:VerifyMessage, Lnrpc.VerifyMessageRequest, Lnrpc.VerifyMessageResponse)
rpc(:ConnectPeer, Lnrpc.ConnectPeerRequest, Lnrpc.ConnectPeerResponse)
rpc(:DisconnectPeer, Lnrpc.DisconnectPeerRequest, Lnrpc.DisconnectPeerResponse)
rpc(:ListPeers, Lnrpc.ListPeersRequest, Lnrpc.ListPeersResponse)
rpc(:GetInfo, Lnrpc.GetInfoRequest, Lnrpc.GetInfoResponse)
rpc(:PendingChannels, Lnrpc.PendingChannelsRequest, Lnrpc.PendingChannelsResponse)
rpc(:ListChannels, Lnrpc.ListChannelsRequest, Lnrpc.ListChannelsResponse)
rpc(:ClosedChannels, Lnrpc.ClosedChannelsRequest, Lnrpc.ClosedChannelsResponse)
rpc(:OpenChannelSync, Lnrpc.OpenChannelRequest, Lnrpc.ChannelPoint)
rpc(:OpenChannel, Lnrpc.OpenChannelRequest, stream(Lnrpc.OpenStatusUpdate))
rpc(:CloseChannel, Lnrpc.CloseChannelRequest, stream(Lnrpc.CloseStatusUpdate))
rpc(:AbandonChannel, Lnrpc.AbandonChannelRequest, Lnrpc.AbandonChannelResponse)
rpc(:SendPayment, stream(Lnrpc.SendRequest), stream(Lnrpc.SendResponse))
rpc(:SendPaymentSync, Lnrpc.SendRequest, Lnrpc.SendResponse)
rpc(:SendToRoute, stream(Lnrpc.SendToRouteRequest), stream(Lnrpc.SendResponse))
rpc(:SendToRouteSync, Lnrpc.SendToRouteRequest, Lnrpc.SendResponse)
rpc(:AddInvoice, Lnrpc.Invoice, Lnrpc.AddInvoiceResponse)
rpc(:ListInvoices, Lnrpc.ListInvoiceRequest, Lnrpc.ListInvoiceResponse)
rpc(:LookupInvoice, Lnrpc.PaymentHash, Lnrpc.Invoice)
rpc(:SubscribeInvoices, Lnrpc.InvoiceSubscription, stream(Lnrpc.Invoice))
rpc(:DecodePayReq, Lnrpc.PayReqString, Lnrpc.PayReq)
rpc(:ListPayments, Lnrpc.ListPaymentsRequest, Lnrpc.ListPaymentsResponse)
rpc(:DeleteAllPayments, Lnrpc.DeleteAllPaymentsRequest, Lnrpc.DeleteAllPaymentsResponse)
rpc(:DescribeGraph, Lnrpc.ChannelGraphRequest, Lnrpc.ChannelGraph)
rpc(:GetChanInfo, Lnrpc.ChanInfoRequest, Lnrpc.ChannelEdge)
rpc(:GetNodeInfo, Lnrpc.NodeInfoRequest, Lnrpc.NodeInfo)
rpc(:QueryRoutes, Lnrpc.QueryRoutesRequest, Lnrpc.QueryRoutesResponse)
rpc(:GetNetworkInfo, Lnrpc.NetworkInfoRequest, Lnrpc.NetworkInfo)
rpc(:StopDaemon, Lnrpc.StopRequest, Lnrpc.StopResponse)
rpc(:SubscribeChannelGraph, Lnrpc.GraphTopologySubscription, stream(Lnrpc.GraphTopologyUpdate))
rpc(:DebugLevel, Lnrpc.DebugLevelRequest, Lnrpc.DebugLevelResponse)
rpc(:FeeReport, Lnrpc.FeeReportRequest, Lnrpc.FeeReportResponse)
rpc(:UpdateChannelPolicy, Lnrpc.PolicyUpdateRequest, Lnrpc.PolicyUpdateResponse)
rpc(:ForwardingHistory, Lnrpc.ForwardingHistoryRequest, Lnrpc.ForwardingHistoryResponse)
end
defmodule Lnrpc.Lightning.Stub do
@moduledoc false
use GRPC.Stub, service: Lnrpc.Lightning.Service
end
|
lib/lnrpc/rpc.pb.ex
| 0.829008
| 0.451568
|
rpc.pb.ex
|
starcoder
|
defmodule Geocoder.Providers.GoogleMaps do
use HTTPoison.Base
use Towel
@endpoint "https://maps.googleapis.com/"
def geocode(opts) do
request("maps/api/geocode/json", extract_opts(opts))
|> fmap(&parse_geocode/1)
end
def geocode_list(opts) do
request_all("maps/api/geocode/json", extract_opts(opts))
|> fmap(fn r -> Enum.map(r, &parse_geocode/1) end)
end
def reverse_geocode(opts) do
request("maps/api/geocode/json", extract_opts(opts))
|> fmap(&parse_reverse_geocode/1)
end
def reverse_geocode_list(opts) do
request_all("maps/api/geocode/json", extract_opts(opts))
|> fmap(fn r -> Enum.map(r, &parse_reverse_geocode/1) end)
end
defp extract_opts(opts) do
opts
|> Keyword.take([
:key,
:address,
:components,
:bounds,
:language,
:region,
:latlng,
:placeid,
:result_type,
:location_type
])
|> Keyword.update(:latlng, nil, fn
{lat, lng} -> "#{lat},#{lng}"
q -> q
end)
|> Keyword.delete(:latlng, nil)
end
defp parse_geocode(response) do
coords = geocode_coords(response)
bounds = geocode_bounds(response)
location = geocode_location(response)
%{coords | bounds: bounds, location: location}
end
defp parse_reverse_geocode(response) do
coords = geocode_coords(response)
location = geocode_location(response)
%{coords | location: location}
end
defp geocode_coords(%{"geometry" => %{"location" => coords}}) do
%{"lat" => lat, "lng" => lon} = coords
%Geocoder.Coords{lat: lat, lon: lon}
end
defp geocode_bounds(%{"geometry" => %{"bounds" => bounds}}) do
%{
"northeast" => %{"lat" => north, "lng" => east},
"southwest" => %{"lat" => south, "lng" => west}
} = bounds
%Geocoder.Bounds{top: north, right: east, bottom: south, left: west}
end
defp geocode_bounds(_), do: %Geocoder.Bounds{}
@components [
"locality",
"administrative_area_level_1",
"administrative_area_level_2",
"country",
"postal_code",
"street",
"street_number",
"route"
]
@map %{
"street_number" => :street_number,
"route" => :street,
"street_address" => :street,
"locality" => :city,
"administrative_area_level_1" => :state,
"administrative_area_level_2" => :county,
"postal_code" => :postal_code,
"country" => :country
}
defp geocode_location(%{
"address_components" => components,
"formatted_address" => formatted_address
}) do
name = &Map.get(&1, "long_name")
type = fn component ->
component |> Map.get("types") |> Enum.find(&Enum.member?(@components, &1))
end
map = &{type.(&1), name.(&1)}
reduce = fn {type, name}, location ->
struct(location, [{@map[type], name}])
end
country =
Enum.find(components, fn component ->
component |> Map.get("types") |> Enum.member?("country")
end)
country_code =
case country do
nil ->
nil
%{"short_name" => name} ->
name
end
location = %Geocoder.Location{
country_code: country_code,
formatted_address: formatted_address
}
components
|> Enum.filter(type)
|> Enum.map(map)
|> Enum.reduce(location, reduce)
end
defp request_all(path, params) do
params = Keyword.merge(params, key: Application.get_env(:geocoder, :worker)[:key])
httpoison_options = Application.get_env(:geocoder, Geocoder.Worker)[:httpoison_options] || []
case get(path, [], Keyword.merge(httpoison_options, params: Enum.into(params, %{}))) do
# API does not return a non-200 code when there is an error!
{:ok, %{status_code: 200, body: %{"results" => [], "error_message" => error_message, "status" => _status}}} ->
{:error, error_message}
{:ok, %{status_code: 200, body: %{"status" => "OK", "results" => results}}} ->
{:ok, List.wrap(results)}
{_, response} ->
{:error, response}
end
end
def request(path, params) do
request_all(path, params)
|> fmap(&List.first/1)
end
def process_url(url) do
@endpoint <> url
end
def process_response_body(body) do
body |> Poison.decode!()
end
end
|
lib/geocoder/providers/google_maps.ex
| 0.599485
| 0.470311
|
google_maps.ex
|
starcoder
|
defmodule Tesla.Middleware.DigestAuth do
@moduledoc """
Digest access authentication middleware.
[Wiki on the topic](https://en.wikipedia.org/wiki/Digest_access_authentication)
**NOTE**: Currently the implementation is incomplete and works only for MD5 algorithm
and auth "quality of protection" (qop).
## Examples
```
defmodule MyClient do
use Tesla
def client(username, password, opts \\ %{}) do
Tesla.client([
{Tesla.Middleware.DigestAuth, Map.merge(%{username: username, password: password}, opts)}
])
end
end
```
## Options
- `:username` - username (defaults to `""`)
- `:password` - password (defaults to `""`)
- `:cnonce_fn` - custom function generating client nonce (defaults to `&Tesla.Middleware.DigestAuth.cnonce/0`)
- `:nc` - nonce counter (defaults to `"00000000"`)
"""
@behaviour Tesla.Middleware
@impl Tesla.Middleware
def call(env, next, opts) do
if env.opts && Keyword.get(env.opts, :digest_auth_handshake) do
Tesla.run(env, next)
else
opts = opts || %{}
with {:ok, headers} <- authorization_header(env, opts) do
env
|> Tesla.put_headers(headers)
|> Tesla.run(next)
end
end
end
defp authorization_header(env, opts) do
with {:ok, vars} <- authorization_vars(env, opts) do
{:ok,
vars
|> calculated_authorization_values
|> create_header}
end
end
defp authorization_vars(env, opts) do
with {:ok, unauthorized_response} <-
env.__module__.request(
env.__client__,
method: env.opts[:pre_auth_method] || env.method,
url: env.url,
opts: Keyword.put(env.opts || [], :digest_auth_handshake, true)
) do
{:ok,
%{
username: opts[:username] || "",
password: opts[:password] || "",
path: URI.parse(env.url).path,
auth:
Tesla.get_header(unauthorized_response, "www-authenticate")
|> parse_www_authenticate_header,
method: env.method |> to_string |> String.upcase(),
client_nonce: (opts[:cnonce_fn] || (&cnonce/0)).(),
nc: opts[:nc] || "00000000"
}}
end
end
defp calculated_authorization_values(%{auth: auth}) when auth == %{}, do: []
defp calculated_authorization_values(auth_vars) do
[
{"username", auth_vars.username},
{"realm", auth_vars.auth["realm"]},
{"uri", auth_vars[:path]},
{"nonce", auth_vars.auth["nonce"]},
{"nc", auth_vars.nc},
{"cnonce", auth_vars.client_nonce},
{"response", response(auth_vars)},
# hard-coded, will not work for MD5-sess
{"algorithm", "MD5"},
# hard-coded, will not work for auth-int or unspecified
{"qop", "auth"}
]
end
defp single_header_val({k, v}) when k in ~w(nc qop algorithm), do: "#{k}=#{v}"
defp single_header_val({k, v}), do: "#{k}=\"#{v}\""
defp create_header([]), do: []
defp create_header(calculated_authorization_values) do
vals =
calculated_authorization_values
|> Enum.reduce([], fn val, acc -> [single_header_val(val) | acc] end)
|> Enum.join(", ")
[{"authorization", "Digest #{vals}"}]
end
defp ha1(%{username: username, auth: %{"realm" => realm}, password: password}) do
md5("#{username}:#{realm}:#{password}")
end
defp ha2(%{method: method, path: path}) do
md5("#{method}:#{path}")
end
defp response(%{auth: %{"nonce" => nonce}, nc: nc, client_nonce: client_nonce} = auth_vars) do
md5("#{ha1(auth_vars)}:#{nonce}:#{nc}:#{client_nonce}:auth:#{ha2(auth_vars)}")
end
defp parse_www_authenticate_header(nil), do: %{}
defp parse_www_authenticate_header(header) do
Regex.scan(~r/(\w+?)="(.+?)"/, header)
|> Enum.reduce(%{}, fn [_, key, val], acc -> Map.merge(acc, %{key => val}) end)
end
defp md5(data), do: Base.encode16(:erlang.md5(data), case: :lower)
defp cnonce, do: :crypto.strong_rand_bytes(4) |> Base.encode16(case: :lower)
end
|
lib/tesla/middleware/digest_auth.ex
| 0.850484
| 0.816772
|
digest_auth.ex
|
starcoder
|
defmodule NervesTime.SaneTime do
# One of the ways that nerves_time determines whether a particular time is
# possible is whether it's in a known good range.
@default_earliest_time ~N[2020-07-25 00:00:00]
@default_latest_time %{@default_earliest_time | year: @default_earliest_time.year + 20}
@moduledoc false
@doc """
Figure out a guess of the real time based on the current system clock (possible_time)
and the latest timestamp from the RTC.
"""
@spec derive_time(NaiveDateTime.t(), NaiveDateTime.t()) :: NaiveDateTime.t()
def derive_time(possible_time, rtc_time) do
# First normalize the input times so that they're in a reasonable time interval
sane_rtc_time = make_sane(rtc_time)
sane_possible_time = make_sane(possible_time)
# Pick the latest
if NaiveDateTime.compare(sane_possible_time, sane_rtc_time) == :gt do
sane_possible_time
else
sane_rtc_time
end
end
@doc """
This function takes a guess at the current time and tries to adjust it so
that it's not obviously wrong. Obviously wrong means that it is outside
of the configured valid time range.
If the time doesn't look right, set it to the earliest time. Why not set it
to the latest allowed time if the time is in the future? The reason is
that a cause of future times is RTC corruption. The logic is that the earliest
allowed time is likely much closer to the actual time than the latest one.
"""
@spec make_sane(NaiveDateTime.t()) :: NaiveDateTime.t()
def make_sane(%NaiveDateTime{} = time) do
earliest_time = Application.get_env(:nerves_time, :earliest_time, @default_earliest_time)
latest_time = Application.get_env(:nerves_time, :latest_time, @default_latest_time)
if within_interval(time, earliest_time, latest_time) do
time
else
earliest_time
end
end
# Fix anything bogus that's passed in. This does not feel very Erlang, but
# crashing nerves_time causes more pain than it's worth for purity.
def make_sane(_other),
do: Application.get_env(:nerves_time, :earliest_time, @default_earliest_time)
defp within_interval(time, earliest_time, latest_time) do
NaiveDateTime.compare(time, earliest_time) == :gt and
NaiveDateTime.compare(time, latest_time) == :lt
end
end
|
lib/nerves_time/sane_time.ex
| 0.770292
| 0.626853
|
sane_time.ex
|
starcoder
|
defmodule Exhort.SAT.Expr do
@moduledoc """
Create an expression in the expression language.
If the expression contains a comparison operator, it will be a constraint.
Otherwise, it will be a linear expression.
Use the `new/2` macro to create a new expression.
Use the remaining functions to create variables and list-based constraints.
"""
alias Exhort.SAT.DSL
alias Exhort.SAT.BoolVar
alias Exhort.SAT.Constraint
alias Exhort.SAT.IntVar
alias Exhort.SAT.IntervalVar
@comparison [:<, :<=, :==, :>=, :>, :"abs=="]
@doc """
Create a new expression using the DSL.
If the expression contains a comparison operator, it becomes a constraint.
"""
defmacro new(expr, opts \\ [])
defmacro new({op, _, [_lhs, _rhs]} = expr, opts) when op in @comparison do
expr =
case expr do
{:==, m1, [lhs, {:abs, _m2, [var]}]} ->
{:"abs==", m1, [lhs, var]}
expr ->
expr
end
{op, _, [lhs, rhs]} = expr
lhs = DSL.transform_expression(lhs)
rhs = DSL.transform_expression(rhs)
opts = Enum.map(opts, &DSL.transform_expression(&1))
quote do
%Constraint{defn: {unquote(lhs), unquote(op), unquote(rhs), unquote(opts)}}
end
end
defmacro new(expr, _opts) do
expr = DSL.transform_expression(expr)
quote do
unquote(expr)
end
end
@doc """
Define a new integer variable. It must later be added to the model.
"""
@spec def_int_var(
name :: String.t(),
domain :: {lower_bound :: integer(), upper_bound :: integer()} | integer()
) ::
IntVar.t()
defdelegate def_int_var(name, domain), to: IntVar, as: :new
@doc """
Define a new boolean variable. It must later be added to the model.
"""
@spec def_bool_var(name :: String.t()) :: BoolVar.t()
defdelegate def_bool_var(name), to: BoolVar, as: :new
@doc """
Define an interval variable. It must later be added to the model.
See `Exhort.SAT.Builder.def_interval_var/6`.
- `name` is the variable name
- `start` is the start of the interval
- `size` is the size of the interval
- `stop` is the end of the interval
"""
@spec def_interval_var(
name :: String.t(),
start :: atom() | String.t(),
size :: integer(),
stop :: atom() | String.t(),
opts :: Keyword.t()
) ::
IntervalVar.t()
defdelegate def_interval_var(name, start, size, stop, opts \\ []), to: IntervalVar, as: :new
@doc """
Definite a constant. It must later be added to the model.
"""
@spec def_constant(
name :: String.t() | atom(),
value :: integer()
) :: IntVar.t()
defdelegate def_constant(name, value), to: IntVar, as: :new
@doc """
Add an implication constraint where `bool1` implies `bool2`.
"""
defmacro implication(bool1, bool2) do
expr1 = DSL.transform_expression(bool1)
expr2 = DSL.transform_expression(bool2)
quote do
Constraint.implication(unquote(expr1), unquote(expr2))
end
end
@doc """
Create a constraint on the list ensuring there are no overlap among the
variables in the list.
"""
@spec no_overlap(list(), Keyword.t()) :: Constraint.t()
defdelegate no_overlap(list, opts \\ []), to: Constraint
@doc """
Create a constraint on the list ensuring that each variable in the list has a
different value.
"""
@spec all_different(list(), Keyword.t()) :: Constraint.t()
defdelegate all_different(list, opts \\ []), to: Constraint
@doc """
Create logical AND constraint on the list of booleans.
"""
defmacro bool_and(list) when is_list(list) do
expr_list = Enum.map(list, &DSL.transform_expression(&1))
quote do
Constraint.bool_and(unquote(expr_list))
end
end
@doc """
Create a constraint that requires one of the booleans in the list to be true.
"""
defmacro bool_or(list) when is_list(list) do
expr_list = Enum.map(list, &DSL.transform_expression(&1))
quote do
Constraint.bool_or(unquote(expr_list))
end
end
end
|
lib/exhort/sat/expr.ex
| 0.815306
| 0.739611
|
expr.ex
|
starcoder
|
defmodule Mint.HTTP2.Frame do
@moduledoc false
use Bitwise, skip_operators: true
import Record
shared_stream = [:stream_id, {:flags, 0x00}]
shared_conn = [stream_id: 0, flags: 0x00]
defrecord :data, shared_stream ++ [:data, :padding]
defrecord :headers, shared_stream ++ [:exclusive?, :stream_dependency, :weight, :hbf, :padding]
defrecord :priority, shared_stream ++ [:exclusive?, :stream_dependency, :weight]
defrecord :rst_stream, shared_stream ++ [:error_code]
defrecord :settings, shared_conn ++ [:params]
defrecord :push_promise, shared_stream ++ [:promised_stream_id, :hbf, :padding]
defrecord :ping, shared_conn ++ [:opaque_data]
defrecord :goaway, shared_conn ++ [:last_stream_id, :error_code, :debug_data]
defrecord :window_update, shared_stream ++ [:window_size_increment]
defrecord :continuation, shared_stream ++ [:hbf]
@types %{
data: 0x00,
headers: 0x01,
priority: 0x02,
rst_stream: 0x03,
settings: 0x04,
push_promise: 0x05,
ping: 0x06,
goaway: 0x07,
window_update: 0x08,
continuation: 0x09
}
## Flag handling
@flags %{
data: [end_stream: 0x01, padded: 0x08],
headers: [end_stream: 0x01, end_headers: 0x04, padded: 0x08, priority: 0x20],
settings: [ack: 0x01],
push_promise: [end_headers: 0x04, padded: 0x08],
ping: [ack: 0x01],
continuation: [end_headers: 0x04]
}
@spec set_flags(byte(), atom(), [flag_name :: atom()]) :: byte()
def set_flags(initial_flags \\ 0x00, frame_name, flags_to_set)
when is_integer(initial_flags) and is_list(flags_to_set) do
Enum.reduce(flags_to_set, initial_flags, &set_flag(&2, frame_name, &1))
end
@spec flag_set?(byte(), atom(), atom()) :: boolean()
def flag_set?(flags, frame, flag_name)
for {frame, flags} <- @flags,
{flag_name, flag_value} <- flags do
defp set_flag(flags, unquote(frame), unquote(flag_name)), do: bor(flags, unquote(flag_value))
defp set_flag(unquote(frame), unquote(flag_name)), do: unquote(flag_value)
def flag_set?(flags, unquote(frame), unquote(flag_name)),
do: band(flags, unquote(flag_value)) == unquote(flag_value)
end
defmacrop is_flag_set(flags, flag) do
quote do
band(unquote(flags), unquote(flag)) == unquote(flag)
end
end
## Parsing
@doc """
Decodes the next frame of the given binary.
Returns `{:ok, frame, rest}` if successful, `{:error, reason}` if not.
"""
@spec decode_next(binary()) :: {:ok, tuple(), binary()} | :more | {:error, reason}
when reason:
{:frame_size_error, atom()}
| {:protocol_error, binary()}
| :payload_too_big
def decode_next(bin, max_frame_size \\ 16_384) when is_binary(bin) do
case decode_next_raw(bin) do
{:ok, {_type, _flags, _stream_id, payload}, _rest}
when byte_size(payload) > max_frame_size ->
{:error, :payload_too_big}
{:ok, {type, flags, stream_id, payload}, rest} ->
{:ok, decode_contents(type, flags, stream_id, payload), rest}
:more ->
:more
end
catch
:throw, {:mint, reason} -> {:error, reason}
end
defp decode_next_raw(<<
length::24,
type,
flags,
_reserved::1,
stream_id::31,
payload::size(length)-binary,
rest::binary
>>) do
{:ok, {type, flags, stream_id, payload}, rest}
end
defp decode_next_raw(_other) do
:more
end
for {frame, type} <- @types do
function = :"decode_#{frame}"
defp decode_contents(unquote(type), flags, stream_id, payload) do
unquote(function)(flags, stream_id, payload)
end
end
# Parsing of specific frames
# http://httpwg.org/specs/rfc7540.html#rfc.section.6.1
defp decode_data(flags, stream_id, payload) do
{data, padding} = decode_padding(:data, flags, payload)
data(stream_id: stream_id, flags: flags, data: data, padding: padding)
end
# http://httpwg.org/specs/rfc7540.html#rfc.section.6.2
defp decode_headers(flags, stream_id, payload) do
{data, padding} = decode_padding(:headers, flags, payload)
{exclusive?, stream_dependency, weight, data} =
if flag_set?(flags, :headers, :priority) do
<<exclusive::1, stream_dependency::31, weight::8, rest::binary>> = data
{exclusive == 1, stream_dependency, weight + 1, rest}
else
{nil, nil, nil, data}
end
headers(
stream_id: stream_id,
flags: flags,
padding: padding,
exclusive?: exclusive?,
stream_dependency: stream_dependency,
weight: weight,
hbf: data
)
end
# http://httpwg.org/specs/rfc7540.html#rfc.section.6.3
defp decode_priority(_flags, _stream_id, payload) when byte_size(payload) != 5 do
throw({:mint, {:frame_size_error, :priority}})
end
defp decode_priority(flags, stream_id, payload) do
<<exclusive::1, stream_dependency::31, weight::8>> = payload
priority(
stream_id: stream_id,
flags: flags,
exclusive?: exclusive == 1,
stream_dependency: stream_dependency,
weight: weight + 1
)
end
# http://httpwg.org/specs/rfc7540.html#rfc.section.6.4
defp decode_rst_stream(_flags, _stream_id, payload) when byte_size(payload) != 4 do
throw({:mint, {:frame_size_error, :rst_stream}})
end
defp decode_rst_stream(flags, stream_id, <<error_code::32>>) do
rst_stream(
stream_id: stream_id,
flags: flags,
error_code: humanize_error_code(error_code)
)
end
# http://httpwg.org/specs/rfc7540.html#rfc.section.6.5
defp decode_settings(_flags, _stream_id, payload) when rem(byte_size(payload), 6) != 0 do
throw({:mint, {:frame_size_error, :settings}})
end
defp decode_settings(flags, stream_id, payload) do
settings(stream_id: stream_id, flags: flags, params: decode_settings_params(payload))
end
# http://httpwg.org/specs/rfc7540.html#rfc.section.6.6
defp decode_push_promise(flags, stream_id, payload) do
{data, padding} = decode_padding(:push_promise, flags, payload)
<<_reserved::1, promised_stream_id::31, header_block_fragment::binary>> = data
push_promise(
stream_id: stream_id,
flags: flags,
promised_stream_id: promised_stream_id,
hbf: header_block_fragment,
padding: padding
)
end
# http://httpwg.org/specs/rfc7540.html#rfc.section.6.7
defp decode_ping(_flags, _stream_id, payload) when byte_size(payload) != 8 do
throw({:mint, {:frame_size_error, :ping}})
end
defp decode_ping(flags, stream_id, payload) do
ping(stream_id: stream_id, flags: flags, opaque_data: payload)
end
# http://httpwg.org/specs/rfc7540.html#rfc.section.6.8
defp decode_goaway(flags, stream_id, payload) do
<<_reserved::1, last_stream_id::31, error_code::32, debug_data::binary>> = payload
goaway(
stream_id: stream_id,
flags: flags,
last_stream_id: last_stream_id,
error_code: humanize_error_code(error_code),
debug_data: debug_data
)
end
# http://httpwg.org/specs/rfc7540.html#rfc.section.6.9
defp decode_window_update(_flags, _stream_id, payload) when byte_size(payload) != 4 do
throw({:mint, {:frame_size_error, :window_update}})
end
defp decode_window_update(_flags, _stream_id, <<_reserved::1, 0::31>>) do
throw({:mint, {:protocol_error, "bad WINDOW_SIZE increment"}})
end
defp decode_window_update(flags, stream_id, <<_reserved::1, window_size_increment::31>>) do
window_update(
stream_id: stream_id,
flags: flags,
window_size_increment: window_size_increment
)
end
# http://httpwg.org/specs/rfc7540.html#rfc.section.6.10
defp decode_continuation(flags, stream_id, payload) do
continuation(stream_id: stream_id, flags: flags, hbf: payload)
end
defp decode_padding(frame, flags, <<pad_length, rest::binary>> = payload)
when is_flag_set(flags, unquote(@flags[:data][:padded])) do
if pad_length >= byte_size(payload) do
debug_data =
"the padding length of a #{inspect(frame)} frame is bigger than the payload length"
throw({:mint, {:protocol_error, debug_data}})
else
# 1 byte is for the space taken by pad_length
data_length = byte_size(payload) - pad_length - 1
<<data::size(data_length)-binary, padding::size(pad_length)-binary>> = rest
{data, padding}
end
end
defp decode_padding(_frame, _flags, payload) do
{payload, nil}
end
defp decode_settings_params(payload) do
decode_settings_params(payload, _acc = [])
end
defp decode_settings_params(<<>>, acc) do
Enum.reverse(acc)
end
defp decode_settings_params(<<identifier::16, value::32, rest::binary>>, acc) do
# From http://httpwg.org/specs/rfc7540.html#SettingValues:
# An endpoint that receives a SETTINGS frame with any unknown or unsupported identifier MUST
# ignore that setting.
acc =
case identifier do
0x01 -> [{:header_table_size, value} | acc]
0x02 -> [{:enable_push, value == 1} | acc]
0x03 -> [{:max_concurrent_streams, value} | acc]
0x04 -> [{:initial_window_size, value} | acc]
0x05 -> [{:max_frame_size, value} | acc]
0x06 -> [{:max_header_list_size, value} | acc]
_other -> acc
end
decode_settings_params(rest, acc)
end
## Encoding
@doc """
Encodes the given `frame`.
"""
@spec encode(tuple()) :: iodata()
def encode(frame)
def encode(data(stream_id: stream_id, flags: flags, data: data, padding: nil)) do
encode_raw(@types[:data], flags, stream_id, data)
end
def encode(data(stream_id: stream_id, flags: flags, data: data, padding: padding)) do
flags = set_flags(flags, :data, [:padded])
payload = [byte_size(padding), data, padding]
encode_raw(@types[:data], flags, stream_id, payload)
end
def encode(headers() = frame) do
headers(
flags: flags,
stream_id: stream_id,
exclusive?: exclusive?,
stream_dependency: stream_dependency,
weight: weight,
hbf: hbf,
padding: padding
) = frame
payload = hbf
{payload, flags} =
if stream_dependency && weight && is_boolean(exclusive?) do
{
[<<if(exclusive?, do: 1, else: 0)::1, stream_dependency::31>>, weight - 1, payload],
set_flags(flags, :headers, [:priority])
}
else
{payload, flags}
end
{payload, flags} =
if padding do
{[byte_size(padding), payload, padding], set_flags(flags, :headers, [:padded])}
else
{payload, flags}
end
encode_raw(@types[:headers], flags, stream_id, payload)
end
def encode(priority() = frame) do
priority(
stream_id: stream_id,
flags: flags,
exclusive?: exclusive?,
stream_dependency: stream_dependency,
weight: weight
) = frame
payload = [
<<if(exclusive?, do: 1, else: 0)::1, stream_dependency::31>>,
weight - 1
]
encode_raw(@types[:priority], flags, stream_id, payload)
end
def encode(rst_stream(stream_id: stream_id, flags: flags, error_code: error_code)) do
payload = <<dehumanize_error_code(error_code)::32>>
encode_raw(@types[:rst_stream], flags, stream_id, payload)
end
def encode(settings(stream_id: stream_id, flags: flags, params: params)) do
payload =
Enum.map(params, fn
{:header_table_size, value} -> <<0x01::16, value::32>>
{:enable_push, value} -> <<0x02::16, if(value, do: 1, else: 0)::32>>
{:max_concurrent_streams, value} -> <<0x03::16, value::32>>
{:initial_window_size, value} -> <<0x04::16, value::32>>
{:max_frame_size, value} -> <<0x05::16, value::32>>
{:max_header_list_size, value} -> <<0x06::16, value::32>>
end)
encode_raw(@types[:settings], flags, stream_id, payload)
end
def encode(push_promise() = frame) do
push_promise(
stream_id: stream_id,
flags: flags,
promised_stream_id: promised_stream_id,
hbf: hbf,
padding: padding
) = frame
payload = [<<0::1, promised_stream_id::31>>, hbf]
{payload, flags} =
if padding do
{
[byte_size(padding), payload, padding],
set_flags(flags, :push_promise, [:padded])
}
else
{payload, flags}
end
encode_raw(@types[:push_promise], flags, stream_id, payload)
end
def encode(ping(stream_id: 0, flags: flags, opaque_data: opaque_data)) do
encode_raw(@types[:ping], flags, 0, opaque_data)
end
def encode(goaway() = frame) do
goaway(
stream_id: 0,
flags: flags,
last_stream_id: last_stream_id,
error_code: error_code,
debug_data: debug_data
) = frame
payload = [<<0::1, last_stream_id::31, dehumanize_error_code(error_code)::32>>, debug_data]
encode_raw(@types[:goaway], flags, 0, payload)
end
def encode(window_update(stream_id: stream_id, flags: flags, window_size_increment: wsi)) do
payload = <<0::1, wsi::31>>
encode_raw(@types[:window_update], flags, stream_id, payload)
end
def encode(continuation(stream_id: stream_id, flags: flags, hbf: hbf)) do
encode_raw(@types[:continuation], flags, stream_id, _payload = hbf)
end
def encode_raw(type, flags, stream_id, payload) do
[<<IO.iodata_length(payload)::24>>, type, flags, <<0::1, stream_id::31>>, payload]
end
## Helpers
error_codes = %{
0x00 => :no_error,
0x01 => :protocol_error,
0x02 => :internal_error,
0x03 => :flow_control_error,
0x04 => :settings_timeout,
0x05 => :stream_closed,
0x06 => :frame_size_error,
0x07 => :refused_stream,
0x08 => :cancel,
0x09 => :compression_error,
0x0A => :connect_error,
0x0B => :enhance_your_calm,
0x0C => :inadequate_security,
0x0D => :http_1_1_required
}
for {code, human_code} <- error_codes do
defp humanize_error_code(unquote(code)), do: unquote(human_code)
defp dehumanize_error_code(unquote(human_code)), do: unquote(code)
end
end
|
lib/mint/http2/frame.ex
| 0.736211
| 0.520374
|
frame.ex
|
starcoder
|
defmodule X509.Certificate.Validity do
@moduledoc """
Convenience functions for creating `:Validity` records for use in
certificates. The `:Validity` record represents the X.509 Validity
type, defining the validity of a certificate in terms of `notBefore`
and `notAfter` timestamps.
"""
import X509.ASN1
@typedoc "X.509 Time type (UTCTime or GeneralizedTime)"
@type time :: {:utcTime | :generalizedTime, charlist()}
@typedoc "`:Validity` record, as used in Erlang's `:public_key` module"
@opaque t :: X509.ASN1.record(:validity)
@default_backdate_seconds 5 * 60
@seconds_per_day 24 * 60 * 60
@doc """
Creates a new `:Validity` record with the given start and end timestamps
in DateTime format.
## Examples:
iex> {:ok, not_before, 0} = DateTime.from_iso8601("2018-01-01T00:00:00Z")
iex> {:ok, not_after, 0} = DateTime.from_iso8601("2018-12-31T23:59:59Z")
iex> X509.Certificate.Validity.new(not_before, not_after)
{:Validity, {:utcTime, '180101000000Z'}, {:utcTime, '181231235959Z'}}
iex> {:ok, not_before, 0} = DateTime.from_iso8601("2051-01-01T00:00:00Z")
iex> {:ok, not_after, 0} = DateTime.from_iso8601("2051-12-31T23:59:59Z")
iex> X509.Certificate.Validity.new(not_before, not_after)
{:Validity, {:generalizedTime, '20510101000000Z'},
{:generalizedTime, '20511231235959Z'}}
"""
@spec new(DateTime.t(), DateTime.t()) :: t()
def new(%DateTime{} = not_before, %DateTime{} = not_after) do
validity(
notBefore: to_asn1(not_before),
notAfter: to_asn1(not_after)
)
end
@doc """
Creates a new `:Validity` record with an `notAfter` value a given number of
days in the future. The `notBefore` value can be backdated (by default
#{@default_backdate_seconds} seconds) to avoid newly issued certificates
from being rejected by peers due to poorly synchronized clocks.
For CA certificates, consider using `new/2` instead, with a `not_before`
value that does not reveal the exact time when the keypair was generated.
This minimizes information leakage about the state of the RNG.
"""
@spec days_from_now(pos_integer(), non_neg_integer()) :: t()
def days_from_now(days, backdate_seconds \\ @default_backdate_seconds) do
not_before =
DateTime.utc_now()
|> shift(-backdate_seconds)
not_after = shift(not_before, days * @seconds_per_day)
new(not_before, not_after)
end
# Shifts a DateTime value by a number of seconds (positive or negative)
defp shift(datetime, seconds) do
datetime
|> DateTime.to_unix()
|> Kernel.+(seconds)
|> DateTime.from_unix!()
end
# Converts a DateTime value to ASN.1 UTCTime (for years prior to 2050) or
# GeneralizedTime (for years starting with 2050)
defp to_asn1(%DateTime{year: year} = datetime) when year < 2050 do
iso = DateTime.to_iso8601(datetime, :basic)
[_, date, time] = Regex.run(~r/^\d\d(\d{6})T(\d{6})Z$/, iso)
{:utcTime, '#{date}#{time}Z'}
end
defp to_asn1(datetime) do
iso = DateTime.to_iso8601(datetime, :basic)
[_, date, time] = Regex.run(~r/^(\d{8})T(\d{6})Z$/, iso)
{:generalizedTime, '#{date}#{time}Z'}
end
end
|
lib/x509/certificate/validity.ex
| 0.899442
| 0.557544
|
validity.ex
|
starcoder
|
defprotocol BehaviorTree.Node.Protocol do
@moduledoc """
A protocol so you can define your own custom behavior tree nodes.
If you would like to make your own nodes with custom traversal behavior, you need to implement this protocol. A node is just a wrapper around a collection of children, that defines a context on how to traverse the tree when one of its children fails or succeeds.
Note that any node that has _not_ explicitly implemented this protocol will be considered a leaf.
`BehaviorTree` is backed by `ExZipper.Zipper`, so you will need to understand how that library works.
Each function will provide your node as the first argument as per the protocol standard, but many will also include the current zipper as the second argument.
For examples, look at the source of the standard `BehaviorTree.Node`s.
Note that your nodes can be stateful if necessary, see the implementation for `BehaviorTree.Node.repeat_n/2` for an example.
"""
@fallback_to_any true
@doc "Sets the node's children."
@spec set_children(any(), list(any())) :: any()
def set_children(data, children)
@doc "Get the node's children."
@spec get_children(any()) :: list(any())
def get_children(data)
@doc """
Focus your node's first child.
In most cases, this will be the first (left-most) child, but in some cases, like for a "random" node, it could be a different child.
The supplied zipper will be focused on your node, and you need to advance it to the starting child. Usually `ExZipper.Zipper.down/1` would be desired.
"""
@spec first_child(any(), ExZipper.Zipper.t()) :: ExZipper.Zipper.t()
def first_child(data, zipper)
@doc """
What to do when one of your node's children fail.
This is the meat of your custom node logic. You can move the zipper's focus to a different child (usually with `ExZipper.Zipper.right/1`), or signal that your entire node failed or succeeded by returning the special atom `:fail` or `:succeed`.
Note that you will need to handle any of the `t:ExZipper.Zipper.error/0` types (like `:right_from_rightmost`) appropriately.
"""
@spec on_fail(any(), ExZipper.Zipper.t()) :: ExZipper.Zipper.t() | :succeed | :fail
def on_fail(data, zipper)
@doc """
What to do when one of your node's children succeeds.
This is the meat of your custom node logic. You can move the zipper's focus to a different child (usually with `ExZipper.Zipper.right/1`), or signal that your entire node failed or succeeded by returning the special atom `:fail` or `:succeed`.
Note that you will need to handle any of the `t:ExZipper.Zipper.error/0` types (like `:right_from_rightmost`) appropriately.
"""
@spec on_succeed(any(), ExZipper.Zipper.t()) :: ExZipper.Zipper.t() | :succeed | :fail
def on_succeed(data, zipper)
end
defimpl BehaviorTree.Node.Protocol, for: Any do
def set_children(data, _children), do: data
def get_children(_data), do: []
def first_child(_data, zipper), do: zipper
def on_fail(_data, zipper), do: zipper
def on_succeed(_data, zipper), do: zipper
end
defmodule BehaviorTree.Node do
@moduledoc """
A collection of "standard" behavior tree nodes.
By composing these nodes, you should be able to describe almost any behavior you need. The children of each node can be a mix of other nodes to create deeper trees, or any other value to create a leaf (an atom or function is recommended).
These nodes implement `BehaviorTree.Node.Protocol`.
"""
alias ExZipper.Zipper
defstruct [:type, :children, repeat_count: 1, weights: []]
@opaque t :: %__MODULE__{
type:
:select
| :sequence
| :repeat_until_succeed
| :repeat_until_fail
| :repeat_n
| :random
| :random_weighted
| :always_succeed
| :always_fail
| :negate,
children: nonempty_list(any()),
repeat_count: pos_integer(),
weights: list(pos_integer())
}
defimpl BehaviorTree.Node.Protocol do
def set_children(%BehaviorTree.Node{} = data, children) do
%BehaviorTree.Node{data | children: children}
end
def get_children(%BehaviorTree.Node{children: children}), do: children
def first_child(%BehaviorTree.Node{type: :random, children: children}, zipper) do
random_index = :rand.uniform(Enum.count(children)) - 1
n_times = [nil] |> Stream.cycle() |> Enum.take(random_index)
zipper
|> Zipper.down()
|> (fn zipper -> Enum.reduce(n_times, zipper, fn _, z -> Zipper.right(z) end) end).()
end
def first_child(%BehaviorTree.Node{type: :random_weighted, weights: weights}, zipper) do
weighted_total = Enum.reduce(weights, fn weight, sum -> sum + weight end)
random_weighted_index = :rand.uniform(weighted_total)
random_index =
Enum.reduce_while(weights, {0, random_weighted_index}, fn weight, {i, remaining_weight} ->
if remaining_weight - weight <= 0,
do: {:halt, i},
else: {:cont, {i + 1, remaining_weight - weight}}
end)
n_times = [nil] |> Stream.cycle() |> Enum.take(random_index)
zipper
|> Zipper.down()
|> (fn zipper -> Enum.reduce(n_times, zipper, fn _, z -> Zipper.right(z) end) end).()
end
def first_child(_data, zipper), do: Zipper.down(zipper)
def on_succeed(%BehaviorTree.Node{type: :sequence}, zipper) do
case Zipper.right(zipper) do
{:error, :right_from_rightmost} ->
:succeed
next ->
next
end
end
def on_succeed(%BehaviorTree.Node{type: :select}, _zipper), do: :succeed
def on_succeed(%BehaviorTree.Node{type: :repeat_until_fail}, zipper), do: zipper
def on_succeed(%BehaviorTree.Node{type: :repeat_until_succeed}, _zipper), do: :succeed
def on_succeed(%BehaviorTree.Node{type: :repeat_n, repeat_count: repeat_count}, zipper) do
if repeat_count > 1 do
zipper
|> Zipper.up()
|> Zipper.edit(&%BehaviorTree.Node{&1 | repeat_count: repeat_count - 1})
|> Zipper.down()
else
:succeed
end
end
def on_succeed(%BehaviorTree.Node{type: :random}, _zipper), do: :succeed
def on_succeed(%BehaviorTree.Node{type: :random_weighted}, _zipper), do: :succeed
def on_succeed(%BehaviorTree.Node{type: :always_succeed}, _zipper), do: :succeed
def on_succeed(%BehaviorTree.Node{type: :always_fail}, _zipper), do: :fail
def on_succeed(%BehaviorTree.Node{type: :negate}, _zipper), do: :fail
def on_fail(%BehaviorTree.Node{type: :sequence}, _zipper), do: :fail
def on_fail(%BehaviorTree.Node{type: :select}, zipper) do
case Zipper.right(zipper) do
{:error, :right_from_rightmost} ->
:fail
next ->
next
end
end
def on_fail(%BehaviorTree.Node{type: :repeat_until_fail}, _zipper), do: :succeed
def on_fail(%BehaviorTree.Node{type: :repeat_until_succeed}, zipper), do: zipper
def on_fail(%BehaviorTree.Node{type: :repeat_n, repeat_count: repeat_count}, zipper) do
if repeat_count > 1 do
zipper
|> Zipper.up()
|> Zipper.edit(&%BehaviorTree.Node{&1 | repeat_count: repeat_count - 1})
|> Zipper.down()
else
:succeed
end
end
def on_fail(%BehaviorTree.Node{type: :random}, _zipper), do: :fail
def on_fail(%BehaviorTree.Node{type: :random_weighted}, _zipper), do: :fail
def on_fail(%BehaviorTree.Node{type: :always_succeed}, _zipper), do: :succeed
def on_fail(%BehaviorTree.Node{type: :always_fail}, _zipper), do: :fail
def on_fail(%BehaviorTree.Node{type: :negate}, _zipper), do: :succeed
end
@doc """
Create a "select" style node with the supplied children.
This node always goes from left to right, moving on to the next child when the current one fails. Succeeds immediately if any child succeeds, fails if all children fail.
## Example
iex> tree = Node.select([:a, :b])
iex> tree |> BehaviorTree.start |> BehaviorTree.fail |> BehaviorTree.value
:b
iex> tree = Node.select([
...> Node.select([:a, :b]),
...> :c
...> ])
iex> tree |> BehaviorTree.start |> BehaviorTree.fail |> BehaviorTree.fail |> BehaviorTree.value
:c
iex> tree = Node.sequence([
...> Node.select([:a, :b]),
...> :c
...> ])
iex> tree |> BehaviorTree.start |> BehaviorTree.succeed |> BehaviorTree.value
:c
"""
@spec select(nonempty_list(any())) :: __MODULE__.t()
def select(children) when is_list(children) and length(children) != 0 do
%__MODULE__{type: :select, children: children}
end
@doc """
Create a "sequence" style node with the supplied children.
This node always goes from left to right, moving on to the next child when the current one succeeds. Succeeds if all children succeed, fails immediately if any child fails.
## Example
iex> tree = Node.sequence([:a, :b])
iex> tree |> BehaviorTree.start |> BehaviorTree.succeed |> BehaviorTree.value
:b
iex> tree = Node.sequence([
...> Node.sequence([:a, :b]),
...> :c
...> ])
iex> tree |> BehaviorTree.start |> BehaviorTree.succeed |> BehaviorTree.succeed |> BehaviorTree.value
:c
iex> tree = Node.select([
...> Node.sequence([:a, :b]),
...> :c
...> ])
iex> tree |> BehaviorTree.start |> BehaviorTree.fail |> BehaviorTree.value
:c
"""
@spec sequence(nonempty_list(any())) :: __MODULE__.t()
def sequence(children) when is_list(children) and length(children) != 0 do
%__MODULE__{type: :sequence, children: children}
end
@doc """
Create a "repeat_until_fail" style "decorator" node.
This node only takes a single child, which it will repeatedly return until the child fails, at which point this node will succeed. This node never fails, but it may run forever if the child never fails.
You may find it useful to nest one of the other nodes under this node if you want a collection of children to repeat.
## Example
iex> tree = Node.sequence([
...> Node.repeat_until_fail(:a),
...> :b
...> ])
iex> tree |> BehaviorTree.start |> BehaviorTree.succeed |> BehaviorTree.succeed |> BehaviorTree.value
:a
iex> tree = Node.sequence([
...> Node.repeat_until_fail(:a),
...> :b
...> ])
iex> tree |> BehaviorTree.start |> BehaviorTree.succeed |> BehaviorTree.fail |> BehaviorTree.value
:b
iex> tree = Node.sequence([
...> Node.repeat_until_fail(Node.select([:a, :b])),
...> :c
...> ])
iex> tree |> BehaviorTree.start |> BehaviorTree.fail |> BehaviorTree.fail |> BehaviorTree.value
:c
"""
@spec repeat_until_fail(any()) :: __MODULE__.t()
def repeat_until_fail(child) do
%__MODULE__{type: :repeat_until_fail, children: [child]}
end
@doc """
Create a "repeat_until_succeed" style "decorator" node.
This node only takes a single child, which it will repeatedly return until the child succeeds, at which point this node will succeed. This node never fails, but it may run forever if the child never succeeds.
You may find it useful to nest one of the other nodes under this node if you want a collection of children to repeat.
## Example
iex> tree = Node.sequence([
...> Node.repeat_until_succeed(:a),
...> :b
...> ])
iex> tree |> BehaviorTree.start |> BehaviorTree.fail |> BehaviorTree.fail |> BehaviorTree.value
:a
iex> tree = Node.sequence([
...> Node.repeat_until_succeed(:a),
...> :b
...> ])
iex> tree |> BehaviorTree.start |> BehaviorTree.fail |> BehaviorTree.succeed |> BehaviorTree.value
:b
iex> tree = Node.sequence([
...> Node.repeat_until_succeed(Node.sequence([:a, :b])),
...> :c
...> ])
iex> tree |> BehaviorTree.start |> BehaviorTree.succeed |> BehaviorTree.succeed |> BehaviorTree.value
:c
"""
@spec repeat_until_succeed(any()) :: __MODULE__.t()
def repeat_until_succeed(child) do
%__MODULE__{type: :repeat_until_succeed, children: [child]}
end
@doc """
Create a "repeat_n" style "decorator" node.
This node takes an integer greater than 1, and a single child, which it will repeatedly return n times, regardless of if the child fails or succeeds. After that, this node will succeed. This node never fails, and always runs n times.
You may find it useful to nest one of the other nodes under this node if you want a collection of children to repeat.
## Example
iex> tree = Node.sequence([
...> Node.repeat_n(2, :a),
...> :b
...> ])
iex> tree |> BehaviorTree.start |> BehaviorTree.value
:a
iex> tree |> BehaviorTree.start |> BehaviorTree.fail |> BehaviorTree.value
:a
iex> tree |> BehaviorTree.start |> BehaviorTree.fail |> BehaviorTree.fail |> BehaviorTree.value
:b
"""
@spec repeat_n(pos_integer, any()) :: __MODULE__.t()
def repeat_n(n, child) when n > 1 do
%__MODULE__{type: :repeat_n, children: [child], repeat_count: n}
end
@doc """
Create a "random" style "decorator" node.
This node takes multiple children, from which it will randomly pick one to run (using `:rand.uniform/1`). If that child fails, this node fails, if the child succeeds, this node succeeds.
## Example
Node.random([:a, :b, :c]) |> BehaviorTree.start |> BehaviorTree.value # will be one of :a, :b, or :c
iex> tree = Node.sequence([
...> Node.random([:a, :b, :c]),
...> :d
...> ])
iex> tree |> BehaviorTree.start |> BehaviorTree.succeed |> BehaviorTree.value
:d
"""
@spec random(nonempty_list(any())) :: __MODULE__.t()
def random(children) when is_list(children) and length(children) != 0 do
%__MODULE__{type: :random, children: children}
end
@doc """
Create a "random_weighted" style "decorator" node.
This node takes multiple children with associated weights, from which it will randomly pick one to run, taking the weighting into account (using `:rand.uniform/1`). If that child fails, this node fails, if the child succeeds, this node succeeds.
Note that `BehaviorTree.value` will return only the value (the first position of the supplied tuple).
## Example
Node.random_weighted([{:a, 2}, {:b, 1}]) |> BehaviorTree.start() |> BehaviorTree.value()
# :a will be returned twice as often as :b
iex> tree = Node.sequence([
...> Node.random_weighted([{:a, 2}, {:b, 1}]),
...> :d
...> ])
iex> tree |> BehaviorTree.start |> BehaviorTree.succeed |> BehaviorTree.value
:d
"""
@spec random_weighted(nonempty_list({any(), pos_integer()})) :: __MODULE__.t()
def random_weighted(children) when is_list(children) and length(children) != 0 do
%__MODULE__{
type: :random_weighted,
children: Enum.map(children, &elem(&1, 0)),
weights: Enum.map(children, &elem(&1, 1))
}
end
@doc """
Create an "always_succeed" style "decorator" node.
This node takes a single child, and will always succeed, regardless of the outcome of the child.
This may be useful when used in combination with the "random" nodes.
## Example
iex> tree = Node.sequence([
...> Node.always_succeed(:a),
...> :b
...> ])
iex> tree |> BehaviorTree.start |> BehaviorTree.value
:a
iex> tree |> BehaviorTree.start |> BehaviorTree.fail |> BehaviorTree.value
:b
"""
@spec always_succeed(any()) :: __MODULE__.t()
def always_succeed(child) do
%__MODULE__{type: :always_succeed, children: [child]}
end
@doc """
Create an "always_fail" style "decorator" node.
This node takes a single child, and will always fail, regardless of the outcome of the child.
This may be useful when used in combination with the "random" nodes.
## Example
iex> tree = Node.sequence([
...> Node.always_fail(:a),
...> :b
...> ])
iex> tree |> BehaviorTree.start |> BehaviorTree.value
:a
iex> tree |> BehaviorTree.start |> BehaviorTree.succeed |> BehaviorTree.value
:a
"""
@spec always_fail(any()) :: __MODULE__.t()
def always_fail(child) do
%__MODULE__{type: :always_fail, children: [child]}
end
@doc """
Create a "negate" style "decorator" node.
This node takes a single child. If the child fails, this node succeeds. If the child succeeds, this node fails.
This may be useful to simplify handlers code.
## Example
iex> tree = Node.sequence([
...> Node.negate(:a),
...> :b
...> ])
iex> tree |> BehaviorTree.start |> BehaviorTree.value
:a
iex> tree |> BehaviorTree.start |> BehaviorTree.succeed |> BehaviorTree.value
:a
iex> tree |> BehaviorTree.start |> BehaviorTree.fail |> BehaviorTree.value
:b
"""
@spec negate(any()) :: __MODULE__.t()
def negate(child) do
%__MODULE__{type: :negate, children: [child]}
end
end
|
lib/behavior_tree/node.ex
| 0.944702
| 0.640383
|
node.ex
|
starcoder
|
defmodule Flv.AudioData do
@moduledoc "Represents a packet of audio data."
@type sound_format ::
:pcm_platform_endian
| :adpcm
| :mp3
| :pcm_little_endian
| :nelly_16khz
| :nelly_8khz
| :nelly
| :g711_alaw
| :g711_mulaw
| :reserved
| :aac
| :speex
| :mp3_8khz
| :device_specific
@type sample_rate :: 5 | 11 | 22 | 44
@type sample_size :: 8 | 16
@type channel_type :: :mono | :stereo
@type aac_packet_type :: :sequence_header | :raw_data | :not_aac
@type t :: %__MODULE__{
format: sound_format,
sample_rate_in_khz: sample_rate,
sample_size_in_bits: sample_size,
channel_type: channel_type,
aac_packet_type: aac_packet_type,
data: binary
}
defstruct format: nil,
sample_rate_in_khz: nil,
sample_size_in_bits: nil,
channel_type: nil,
aac_packet_type: :not_aac,
data: <<>>
@spec parse(binary) :: {:ok, __MODULE__.t()} | :error
@doc "Parses the provided binary into an flv video tag"
def parse(binary) when is_binary(binary) do
do_parse(binary)
end
defp do_parse(<<format_id::4, rate_id::2, size_id::1, type_id::1, rest::binary>>) do
format = get_format(format_id)
rate = get_rate(rate_id)
size = get_size(size_id)
type = get_channel_type(type_id)
case format != :error && rate != :error && size != :error && type != :error do
true ->
audio = %__MODULE__{
format: format,
sample_rate_in_khz: rate,
sample_size_in_bits: size,
channel_type: type
}
{:ok, apply_data(rest, audio)}
false ->
:error
end
end
defp do_parse(_) do
:error
end
defp get_format(0), do: :pcm_platform_endian
defp get_format(1), do: :adpcm
defp get_format(2), do: :mp3
defp get_format(3), do: :pcm_little_endian
defp get_format(4), do: :nelly_16khz
defp get_format(5), do: :nelly_8khz
defp get_format(6), do: :nelly
defp get_format(7), do: :g711_alaw
defp get_format(8), do: :g711_mulaw
defp get_format(9), do: :reserved
defp get_format(10), do: :aac
defp get_format(11), do: :speex
defp get_format(14), do: :mp3_8khz
defp get_format(15), do: :device_specific
defp get_format(_), do: :error
# should be 5.5, but for some reason the typesepec is not allowing decimals
defp get_rate(0), do: 5
defp get_rate(1), do: 11
defp get_rate(2), do: 22
defp get_rate(3), do: 44
defp get_size(0), do: 8
defp get_size(1), do: 16
defp get_channel_type(0), do: :mono
defp get_channel_type(1), do: :stereo
defp apply_data(<<0x00, rest::binary>>, audio_data = %__MODULE__{format: :aac}) do
%{audio_data | aac_packet_type: :sequence_header, data: rest}
end
defp apply_data(<<0x01, rest::binary>>, audio_data = %__MODULE__{format: :aac}) do
%{audio_data | aac_packet_type: :raw_data, data: rest}
end
defp apply_data(binary, audio_data) do
%{audio_data | data: binary}
end
end
|
apps/flv/lib/flv/audio_data.ex
| 0.75496
| 0.496643
|
audio_data.ex
|
starcoder
|
defmodule AWS.IoTSecureTunneling do
@moduledoc """
AWS IoT Secure Tunneling
AWS IoT Secure Tunnling enables you to create remote connections to devices
deployed in the field.
For more information about how AWS IoT Secure Tunneling works, see the
[User
Guide](https://docs.aws.amazon.com/secure-tunneling/latest/ug/what-is-secure-tunneling.html).
"""
@doc """
Closes a tunnel identified by the unique tunnel id. When a `CloseTunnel`
request is received, we close the WebSocket connections between the client
and proxy server so no data can be transmitted.
"""
def close_tunnel(client, input, options \\ []) do
request(client, "CloseTunnel", input, options)
end
@doc """
Gets information about a tunnel identified by the unique tunnel id.
"""
def describe_tunnel(client, input, options \\ []) do
request(client, "DescribeTunnel", input, options)
end
@doc """
Lists the tags for the specified resource.
"""
def list_tags_for_resource(client, input, options \\ []) do
request(client, "ListTagsForResource", input, options)
end
@doc """
List all tunnels for an AWS account. Tunnels are listed by creation time in
descending order, newer tunnels will be listed before older tunnels.
"""
def list_tunnels(client, input, options \\ []) do
request(client, "ListTunnels", input, options)
end
@doc """
Creates a new tunnel, and returns two client access tokens for clients to
use to connect to the AWS IoT Secure Tunneling proxy server. .
"""
def open_tunnel(client, input, options \\ []) do
request(client, "OpenTunnel", input, options)
end
@doc """
A resource tag.
"""
def tag_resource(client, input, options \\ []) do
request(client, "TagResource", input, options)
end
@doc """
Removes a tag from a resource.
"""
def untag_resource(client, input, options \\ []) do
request(client, "UntagResource", input, options)
end
@spec request(AWS.Client.t(), binary(), map(), list()) ::
{:ok, Poison.Parser.t() | nil, Poison.Response.t()}
| {:error, Poison.Parser.t()}
| {:error, HTTPoison.Error.t()}
defp request(client, action, input, options) do
client = %{client | service: "IoTSecuredTunneling"}
host = build_host("api.tunneling.iot", client)
url = build_url(host, client)
headers = [
{"Host", host},
{"Content-Type", "application/x-amz-json-1.1"},
{"X-Amz-Target", "IoTSecuredTunneling.#{action}"}
]
payload = Poison.Encoder.encode(input, %{})
headers = AWS.Request.sign_v4(client, "POST", url, headers, payload)
case HTTPoison.post(url, payload, headers, options) do
{:ok, %HTTPoison.Response{status_code: 200, body: ""} = response} ->
{:ok, nil, response}
{:ok, %HTTPoison.Response{status_code: 200, body: body} = response} ->
{:ok, Poison.Parser.parse!(body, %{}), response}
{:ok, %HTTPoison.Response{body: body}} ->
error = Poison.Parser.parse!(body, %{})
{:error, error}
{:error, %HTTPoison.Error{reason: reason}} ->
{:error, %HTTPoison.Error{reason: reason}}
end
end
defp build_host(_endpoint_prefix, %{region: "local"}) do
"localhost"
end
defp build_host(endpoint_prefix, %{region: region, endpoint: endpoint}) do
"#{endpoint_prefix}.#{region}.#{endpoint}"
end
defp build_url(host, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}/"
end
end
|
lib/aws/iot_secure_tunneling.ex
| 0.785514
| 0.435481
|
iot_secure_tunneling.ex
|
starcoder
|
defmodule MarsWater.Algos.Heap do
alias MarsWater.Util.MaxHeap
@count_solutions_every 100
def run(input) when is_binary(input) do
[results_requested, grid_size | measurements] =
String.split(input, " ", trim: true)
|> Enum.map(& Integer.parse(&1) |> elem(0))
heap = build_heap(measurements, grid_size)
reduce_results_until(measurements, heap, results_requested, grid_size)
|> prepare_results(results_requested, grid_size)
end
def build_heap(measurements, grid_size) do
{heap, _index} = Enum.reduce(measurements, {MaxHeap.new(), -1}, fn score, {heap, index} ->
index = index + 1
x = rem(index, grid_size)
y = div(index, grid_size)
heap = MaxHeap.insert(heap, {score, {x, y}})
{heap, index}
end)
heap
end
def reduce_results_until(measurements, heap, results_requested, grid_size) do
{_heap, map, _index} = Enum.reduce_while(measurements, {heap, %{}, -1}, fn _measurement, {heap, map, index} ->
index = index + 1
# Get largest score from heap
{{score, {x, y}}, heap} = MaxHeap.delete_root(heap)
# Add to all affected coords
map = Enum.reduce(affected_coords(x, y), map, fn {aff_x, aff_y}, acc ->
Map.update(acc, {aff_x, aff_y}, [score], & &1 ++ [score])
end)
# Check if we have enough results
if rem(index, @count_solutions_every) == 0 or index == grid_size - 1 do
map_complete = Enum.filter(Map.to_list(map), fn {{_x, _y}, scores} ->
length(scores) == expected_scores(x, y, grid_size)
end)
if map_complete |> length >= results_requested do
IO.puts "Calculated #{results_requested} results after processing #{index}/#{length(measurements)}"
{:halt, {heap, map, index}}
else
{:cont, {heap, map, index}}
end
else
{:cont, {heap, map, index}}
end
end)
map
end
def prepare_results(map, results_requested, grid_size) do
map
|> Map.to_list
|> Enum.filter(fn {{x, y}, _scores} ->
if x < 0 or y < 0 or x > grid_size - 1 or y > grid_size - 1 do
false
else
true
end
end)
|> Enum.map(fn {coords, scores} ->
{coords, Enum.sum(scores)}
end)
|> Enum.sort(fn {{_x1, y1}, _s1}, {{_x2, y2}, _s2} -> y1 < y2 end)
|> Enum.sort(fn {{x1, _y1}, _s1}, {{x2, _y2}, _s2} -> x1 < x2 end)
|> Enum.sort(fn {{_x1, _y1}, s1}, {{_x2, _y2}, s2} -> s1 > s2 end)
|> Enum.take(results_requested)
|> Enum.map(&format_result/1)
|> Enum.join("\n")
end
def affected_coords(x, y) do
[
{x-1, y-1}, {x, y-1}, {x+1, y-1},
{x-1, y}, {x, y}, {x+1, y},
{x-1, y+1}, {x, y+1}, {x+1, y+1}
]
end
def expected_scores(x, y, grid_size) do
min = 0
max = grid_size - 1
cond do
x == min and y == min ->
4
x == min and y == max ->
4
x == max and y == min ->
4
x == max and y == max ->
4
x == min or x == max or y == min or y == max ->
6
true ->
9
end
end
def format_result({{x, y}, score}) do
" (#{x}, #{y}, score: #{score})"
end
end
|
elixir/elixir-mars-water/lib/algos/heap.ex
| 0.671363
| 0.626681
|
heap.ex
|
starcoder
|
defmodule DeskClock.Faces.Lazy do
@moduledoc """
A face that optimizes writes to be minimal on every pass
"""
@behaviour DeskClock.Face
alias ExPaint.{Color, Font}
@impl DeskClock.Face
def create(upper_zone, lower_zone) do
%{
label_font: Font.load("fixed6x12"),
time_font: Font.load("Terminus22"),
last_upper_time: nil,
last_lower_time: nil,
last_upper_label: nil,
last_lower_label: nil,
upper_zone: upper_zone,
lower_zone: lower_zone,
dirty_components: [:background]
}
end
@impl DeskClock.Face
def get_zone(:upper_zone, state) do
state[:upper_zone]
end
@impl DeskClock.Face
def get_zone(:lower_zone, state) do
state[:lower_zone]
end
@impl DeskClock.Face
def set_zone(:upper_zone, zone, state) do
%{
state
| upper_zone: zone,
dirty_components: state[:dirty_components] ++ [:upper_time, :upper_label]
}
end
@impl DeskClock.Face
def set_zone(:lower_zone, zone, state) do
%{
state
| lower_zone: zone,
dirty_components: state[:dirty_components] ++ [:lower_time, :lower_label]
}
end
@impl DeskClock.Face
def build_drawlist_for_time(%DateTime{} = time, state) do
upper_time_as_time = Timex.Timezone.convert(time, state[:upper_zone])
upper_time = formatted_time(upper_time_as_time)
upper_label = formatted_label(upper_time_as_time)
lower_time_as_time = Timex.Timezone.convert(time, state[:lower_zone])
lower_time = formatted_time(lower_time_as_time)
lower_label = formatted_label(lower_time_as_time)
state = %{
state
| dirty_components:
state[:dirty_components] ++ [:upper_time, :lower_time, :upper_label, :lower_label]
}
{build_dirty_components(upper_time, lower_time, upper_label, lower_label, state),
%{
state
| last_upper_time: upper_time,
last_lower_time: lower_time,
last_upper_label: upper_label,
last_lower_label: lower_label,
dirty_components: []
}}
end
defp build_dirty_components(upper_time, lower_time, upper_label, lower_label, state) do
state[:dirty_components]
|> Enum.flat_map(
&build_component(&1, upper_time, lower_time, upper_label, lower_label, state)
)
end
defp build_component(:background, _upper_time, _lower_time, _upper_label, _lower_label, _state) do
{:ok, image} = ExPaint.create(256, 64)
ExPaint.filled_rect(image, {0, 0}, {256, 64}, Color.black())
[{image, {0, 0}}]
end
defp build_component(:upper_label, _upper_time, _lower_time, upper_label, _lower_label, state) do
draw_label(upper_label, state[:last_upper_label], state[:label_font], {8, 8})
end
defp build_component(:lower_label, _upper_time, _lower_time, _upper_label, lower_label, state) do
draw_label(lower_label, state[:last_lower_label], state[:label_font], {8, 40})
end
defp build_component(:upper_time, upper_time, _lower_time, _upper_label, _lower_label, state) do
draw_time(upper_time, state[:last_upper_time], state[:time_font], {40, 1})
end
defp build_component(:lower_time, _upper_time, lower_time, _upper_label, _lower_label, state) do
draw_time(lower_time, state[:last_lower_time], state[:time_font], {40, 33})
end
defp draw_label(label, last_label, font, {x, y}) do
last_label =
case last_label do
nil -> String.duplicate("X", String.length(label))
other -> other
end
case label do
^last_label -> []
other -> [{draw_text(other, font, 0), {x, y}}]
end
end
defp draw_time(time, last_time, font, {x, y}) do
last_time =
case last_time do
nil -> String.duplicate("X", String.length(time))
other -> other
end
{glyph_width, _height} = Font.size(font)
first_changed_character =
Enum.zip(String.graphemes(time), String.graphemes(last_time))
|> Enum.with_index()
|> Enum.find(fn {{old, new}, _index} -> old != new end)
case first_changed_character do
nil ->
[]
{_, index} ->
substring_origin_x = x + glyph_width * index
aligned_substring_origin_x = substring_origin_x - rem(substring_origin_x, 4)
{slice_to_draw, x_offset} =
case substring_origin_x - aligned_substring_origin_x do
0 -> {index..-1, 0}
offset when offset > 0 -> {(index - 1)..-1, offset - glyph_width}
end
image = time |> String.slice(slice_to_draw) |> draw_text(font, x_offset)
[{image, {aligned_substring_origin_x, y}}]
end
end
defp draw_text(text, font, x_offset) do
{glyph_width, height} = Font.size(font)
width = x_offset + glyph_width * String.length(text)
# Pad width out to the next multiple of 4
width = width + (4 - rem(width, 4))
{:ok, image} = ExPaint.create(width, height)
ExPaint.filled_rect(image, {0, 0}, {width, height}, Color.black())
ExPaint.text(image, {x_offset, 0}, font, text, Color.white())
image
end
defp formatted_time(time) do
Timex.format!(time, "{ISOdate}T{h24}:{m}:{s}")
end
defp formatted_label(time) do
time
|> Timex.format!("{Zabbr}")
|> String.pad_leading(4)
end
end
|
lib/desk_clock/faces/lazy.ex
| 0.851891
| 0.508483
|
lazy.ex
|
starcoder
|
defmodule BroadwaySQS.Options do
@moduledoc """
Broadway Sqs Option definitions and custom validators.
"""
def definition() do
[
queue_url: [
required: true,
type: {
:custom,
__MODULE__,
:type_non_empty_string,
[[{:name, :queue_url}]]
},
doc: """
The url for the SQS queue. *Note this does not have to be a
regional endpoint*. For example, `https://sqs.amazonaws.com/0000000000/my_queue`.
"""
],
sqs_client: [
doc: """
A module that implements the `BroadwaySQS.SQSClient`
behaviour. This module is responsible for fetching and acknowledging the
messages. Pay attention that all options passed to the producer will be forwarded
to the client.
""",
default: BroadwaySQS.ExAwsClient
],
receive_interval: [
type: :non_neg_integer,
doc: """
The duration (in milliseconds) for which the producer
waits before making a request for more messages.
""",
default: 5000
],
on_success: [
type: :atom,
default: :ack,
doc: """
configures the acking behaviour for successful messages. See the
"Acknowledgments" section below for all the possible values.
"""
],
on_failure: [
type: :atom,
default: :noop,
doc: """
configures the acking behaviour for failed messages. See the
"Acknowledgments" section below for all the possible values.
"""
],
config: [
type: :keyword_list,
default: [],
doc: """
A set of options that overrides the default ExAws configuration
options. The most commonly used options are: `:access_key_id`, `:secret_access_key`,
`:scheme`, `:region` and `:port`. For a complete list of configuration options and
their default values, please see the `ExAws` documentation.
"""
],
max_number_of_messages: [
type: {
:custom,
__MODULE__,
:type_bounded_integer,
[[{:name, :max_number_of_messages}, {:min, 1}, {:max, 10}]]
},
default: 10,
doc: """
The maximum number of messages to be fetched
per request. This value must be between `1` and `10`, which is the maximum number
allowed by AWS.
"""
],
wait_time_seconds: [
type: {
:custom,
__MODULE__,
:type_bounded_integer,
[[{:name, :wait_time_seconds}, {:min, 0}, {:max, 20}]]
},
doc: """
The duration (in seconds) for which the call waits
for a message to arrive in the queue before returning. This value must be
between `0` and `20`, which is the maximum number allowed by AWS. For more
information see ["WaitTimeSeconds" on the Amazon SQS documentation](https://docs.aws.amazon.com/AWSSimpleQueueService/latest/APIReference/API_ReceiveMessage.html).
"""
],
visibility_timeout: [
type: {
:custom,
__MODULE__,
:type_bounded_integer,
[[{:name, :visibility_timeout}, {:min, 0}, {:max, 43200}]]
},
doc: """
The time period (in seconds) that a message will
remain _invisible_ to other consumers whilst still on the queue and not acknowledged.
This is passed to SQS when the message (or messages) are read.
This value must be between 0 and 43200 (12 hours).
"""
],
attribute_names: [
type: {
:custom,
__MODULE__,
:type_list_limited_member_or_all_atom,
[
[
{:name, :attribute_names},
{:allowed_members,
[
:sender_id,
:sent_timestamp,
:approximate_receive_count,
:approximate_first_receive_timestamp,
:sequence_number,
:message_deduplication_id,
:message_group_id,
:aws_trace_header
]}
]
]
},
doc: """
A list containing the names of attributes that should be
attached to the response and appended to the `metadata` field of the message.
Supported values are:
* `:sender_id`
* `:sent_timestamp`
* `:approximate_receive_count`
* `:approximate_first_receive_timestamp`
* `:sequence_number`
* `:message_deduplication_id`
* `:message_group_id`
* `:aws_trace_header`
You can also use `:all` instead of the list if you want to retrieve all attributes
"""
],
message_attribute_names: [
type: {
:custom,
__MODULE__,
:type_array_non_empty_string_or_all_atom,
[[{:name, :queue_url}]]
},
doc: """
A list containing the names of custom message attributes
that should be attached to the response and appended to the `metadata` field of the
message. Wildcards `[".*"]` and prefixes `["bar.*"]` will retrieve multiple fields.
You can also use `:all` instead of the list if you want to retrieve all attributes.
"""
],
test_pid: [
type: :pid,
doc: false
],
message_server: [
type: :pid,
doc: false
]
]
end
def type_bounded_integer(value, [{:name, _}, {:min, min}, {:max, max}])
when is_integer(value) and value >= min and value <= max do
{:ok, value}
end
def type_bounded_integer(value, [{:name, name}, {:min, min}, {:max, max}]) do
{:error,
"expected :#{name} to be an integer between #{min} and #{max}, got: #{inspect(value)}"}
end
def type_array_non_empty_string_or_all_atom(:all, _) do
{:ok, :all}
end
def type_array_non_empty_string_or_all_atom(value, [{:name, name}]) do
invalid_members =
value
|> Enum.filter(&(is_nil(&1) || &1 == "" || !is_binary(&1)))
if invalid_members == [] do
{:ok, value}
else
{:error,
"expected :#{name} to be a list with non-empty strings, got: #{inspect(invalid_members)}"}
end
end
def type_non_empty_string("", [{:name, name}]) do
{:error, "expected :#{name} to be a non-empty string, got: \"\""}
end
def type_non_empty_string(value, _)
when not is_nil(value) and is_binary(value) do
{:ok, value}
end
def type_non_empty_string(value, [{:name, name}]) do
{:error, "expected :#{name} to be a non-empty string, got: #{inspect(value)}"}
end
def type_list_limited_member_or_all_atom(:all, _) do
{:ok, :all}
end
def type_list_limited_member_or_all_atom(value, [
{:name, name},
{:allowed_members, allowed_members}
]) do
if value -- allowed_members == [] do
{:ok, value}
else
{:error,
"expected :#{name} to be a list with possible members #{inspect(allowed_members)}, got: #{
inspect(value)
}"}
end
end
end
|
lib/broadway_sqs/options.ex
| 0.851228
| 0.4436
|
options.ex
|
starcoder
|
defmodule Noizu.MnesiaVersioning.SchemaBehaviour do
@moduledoc """
This method provides information about the changesets we will be running.
Currently only the change_sets() method is provided which should return a list of
changesets structures to execute. In the future we will provide support for directory scanning,
Similiar to how the test folders works.
"""
@doc """
Return array of changesets to be applied/rolledback.
@note the current logic is fairly crude.
One may simply implement a module that returns an inline array of changesets.
```
defmodule MyApp.SchemaVersioning do
@behaviour Noizu.MnesiaVersioning.SchemaBehaviour
def change_sets() do
[
%ChangeSet{...},
%ChangeSet{...},
]
end
end
```
If desired, however, one could ofcourse put changesets into seperate module files
and simply concatenate them together here.
defmodule MyApp.SchemaVersioning do
@behaviour Noizu.MnesiaVersioning.SchemaBehaviour
def change_sets() do
# where the following each return an array of change sets.
MyApp.SchemaVersioning.UserFeature.change_sets() ++ MyApp.SchemaVersioning.PokerGameFeature.change_sets()
end
end
"""
@callback change_sets() :: [Noizu.MnesiaVersioning.ChangeSet]
#-----------------------------------------------------------------------------
# Using Implementation
#-----------------------------------------------------------------------------
defmacro __using__(options) do
default_timeout = options[:default_timeout] || 60_000
default_cluster = options[:default_cluster] || :auto
quote do
import unquote(__MODULE__)
@behaviour Noizu.MnesiaVersioning.SchemaBehaviour
@default_timeout(unquote(default_timeout))
@default_cluster(unquote(default_cluster))
use Amnesia
def create_table(tab, dist \\ :auto) do
dist = case dist do
:auto ->
case @default_cluster do
:auto -> [disk: [node()]]
v -> v
end
v -> v
end
if !Amnesia.Table.exists?(tab) do
:ok = tab.create(dist)
end
end
def destroy_table(tab, timeout \\ @default_timeout) do
if Amnesia.Table.exists?(tab) do
:ok = Amnesia.Table.wait([tab], timeout)
:ok = Amnesia.Table.destroy(tab)
end
end
end # end __using__
end # end macro
end
|
lib/mnesia_versioning/behaviour/schema_behaviour.ex
| 0.761538
| 0.848471
|
schema_behaviour.ex
|
starcoder
|
defmodule Lifx.Types.HSBK do
defstruct hue: 65535, saturation: 100, brightness: 32768, kelvin: 5000
end
defmodule Lifx.Types.GetColor do
defstruct [:ignored]
end
defmodule Lifx.Types.SetColor do
alias Lifx.Types.HSBK
defstruct color: %HSBK{}, duration: 1000
end
defmodule Lifx.Types.StateColor do
alias Lifx.Types.HSBK
defstruct color: %HSBK{}, power: 0, label: ""
end
defmodule Lifx.Types.GetLightPower do
defstruct [:ignored]
end
defmodule Lifx.Types.SetLightPower do
defstruct level: 0, duration: 1000
end
defmodule Lifx.Types.StateLightPower do
defstruct level: 0
end
defimpl Lifx.Decoder, for: [Lifx.Types.HSBK,
Lifx.Types.GetColor,
Lifx.Types.SetColor,
Lifx.Types.StateColor,
Lifx.Types.GetLightPower,
Lifx.Types.SetLightPower,
Lifx.Types.StateLightPower] do
use Lifx.CodecHelper
alias Lifx.Types.{GetColor, SetColor, StateColor,
GetLightPower, SetLightPower, StateLightPower, HSBK}
def decode!(%HSBK{}=hsbk, payload) do
<<hue::integer-unsigned-little-size(16),
saturation::integer-unsigned-little-size(16),
brightness::integer-unsigned-little-size(16),
kelvin::integer-unsigned-little-size(16)>> = payload
%{hsbk | hue: hue, saturation: saturation, brightness: brightness, kelvin: kelvin}
end
def decode!(%GetColor{}, <<>>), do: %GetColor{}
def decode!(%GetLightPower{}, <<>>), do: %GetLightPower{}
def decode!(%SetColor{}=message, payload) do
<<_reserved::binary-1,
color::binary-8,
duration::integer-unsigned-little-size(32)>> = payload
%{message | duration: duration, color: Lifx.Decoder.decode!(%HSBK{}, color)}
end
def decode!(%StateColor{}=message, payload) do
<<color::binary-8,
_reserved::binary-2,
power::integer-unsigned-little-size(16),
label::binary-32,
_reserved1::binary-8>> = payload
%{message | power: power, label: unpad(label), color: Lifx.Decoder.decode!(%HSBK{}, color)}
end
def decode!(%SetLightPower{}=message, payload) do
<<level::integer-unsigned-little-size(16),
duration::integer-unsigned-little-size(32)>> = payload
%{message | level: level, duration: duration}
end
def decode!(%StateLightPower{}=message, payload) do
<<level::integer-unsigned-little-size(16)>> = payload
%{message | level: level}
end
end
defimpl Lifx.Encoder, for: [Lifx.Types.HSBK,
Lifx.Types.GetColor,
Lifx.Types.SetColor,
Lifx.Types.StateColor,
Lifx.Types.GetLightPower,
Lifx.Types.SetLightPower,
Lifx.Types.StateLightPower] do
use Lifx.CodecHelper
alias Lifx.Types.{GetColor, SetColor, StateColor,
GetLightPower, SetLightPower, StateLightPower, HSBK}
def encode!(%HSBK{hue: hue, saturation: saturation, brightness: brightness, kelvin: kelvin}) do
<<hue::integer-unsigned-little-size(16),
saturation::integer-unsigned-little-size(16),
brightness::integer-unsigned-little-size(16),
kelvin::integer-unsigned-little-size(16)>>
end
def encode!(%GetColor{}), do: <<>>
def encode!(%GetLightPower{}), do: <<>>
def encode!(%SetColor{color: color, duration: duration}) do
hsbk = Lifx.Encoder.encode!(color)
<<0::1, hsbk::binary, duration::integer-unsigned-little-size(32)>>
end
def encode!(%StateColor{color: color, power: power, label: label}) do
hsbk = Lifx.Encoder.encode!(color)
<<hsbk::binary, fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b, power::integer-unsigned-little-size(16), (pad(label, 32))::binary-32, 0::1>>
end
def encode!(%SetLightPower{level: level, duration: duration}) do
<<level::integer-unsigned-little-size(16),
duration::integer-unsigned-little-size(32)>>
end
def encode!(%StateLightPower{level: level}) do
<<level::integer-unsigned-little-size(16)>>
end
end
|
lib/lifx/types/light.ex
| 0.609873
| 0.493958
|
light.ex
|
starcoder
|
defmodule NewRelic.Transaction.Sidecar do
use GenServer, restart: :temporary
@moduledoc false
alias NewRelic.Transaction.ErlangTrace
def setup_stores do
:ets.new(__MODULE__.ContextStore, [:named_table, :set, :public, read_concurrency: true])
:ets.new(__MODULE__.LookupStore, [:named_table, :set, :public, read_concurrency: true])
:persistent_term.put({__MODULE__, :counter}, :counters.new(1, []))
end
def track(type) do
# We use `GenServer.start` to avoid a bi-directional link
# and guarentee that we never crash the Transaction process
# even in the case of an unexpected bug. Additionally, this
# blocks the Transaction process the smallest amount possible
{:ok, sidecar} = GenServer.start(__MODULE__, {self(), type})
store_sidecar(self(), sidecar)
set_sidecar(sidecar)
receive do
:sidecar_ready -> :ok
end
end
def init({parent, type}) do
Process.monitor(parent)
send(parent, :sidecar_ready)
counter(:add)
{:ok,
%{
start_time: System.system_time(:millisecond),
type: type,
parent: parent,
exclusions: [],
offspring: MapSet.new(),
attributes: %{}
}}
end
def connect(%{sidecar: nil}), do: :ignore
def connect(%{sidecar: sidecar, parent: parent}) do
with nil <- get_sidecar() do
cast(sidecar, {:spawn, parent, self(), System.system_time(:millisecond)})
store_sidecar(self(), sidecar)
set_sidecar(sidecar)
ErlangTrace.trace()
end
end
def disconnect() do
set_sidecar(:no_track)
cleanup(lookup: self())
end
def tracking?() do
is_pid(get_sidecar())
end
def track_spawn(parent, child, timestamp) do
with parent_sidecar when is_pid(parent_sidecar) <- lookup_sidecar(parent) do
store_sidecar(child, parent_sidecar)
cast(parent_sidecar, {:spawn, parent, child, timestamp})
parent_sidecar
end
end
def add(attrs) do
cast({:add_attributes, attrs})
end
def incr(attrs) do
cast({:incr_attributes, attrs})
end
def append(attrs) do
cast({:append_attributes, attrs})
end
def trace_context(context) do
:ets.insert(__MODULE__.ContextStore, {{:context, get_sidecar()}, context})
end
def trace_context() do
case :ets.lookup(__MODULE__.ContextStore, {:context, get_sidecar()}) do
[{_, value}] -> value
[] -> nil
end
end
def ignore() do
cast(:ignore)
set_sidecar(:no_track)
end
def exclude() do
cast({:exclude, self()})
set_sidecar(:no_track)
end
def complete() do
with sidecar when is_pid(sidecar) <- get_sidecar() do
cleanup(context: sidecar)
cleanup(lookup: self())
clear_sidecar()
cast(sidecar, :complete)
end
end
defp cast(message) do
GenServer.cast(get_sidecar(), message)
end
defp cast(sidecar, message) do
GenServer.cast(sidecar, message)
end
def handle_cast({:add_attributes, attrs}, state) do
attributes = Map.merge(state.attributes, Map.new(attrs))
{:noreply, %{state | attributes: attributes}}
end
def handle_cast({:incr_attributes, attrs}, state) do
attributes =
Enum.reduce(attrs, state.attributes, fn {key, val}, acc ->
Map.update(acc, key, val, &(&1 + val))
end)
{:noreply, %{state | attributes: attributes}}
end
def handle_cast({:append_attributes, attrs}, state) do
attributes =
Enum.reduce(attrs, state.attributes, fn {key, val}, acc ->
Map.update(acc, key, [val], &[val | &1])
end)
{:noreply, %{state | attributes: attributes}}
end
def handle_cast({:spawn, _parent, _child, timestamp}, %{start_time: start_time} = state)
when timestamp < start_time do
{:noreply, state}
end
def handle_cast({:spawn, parent, child, timestamp}, state) do
Process.monitor(child)
spawn = {child, timestamp, parent, NewRelic.Util.process_name(child)}
{:noreply,
%{
state
| attributes: Map.update(state.attributes, :process_spawns, [spawn], &[spawn | &1]),
offspring: MapSet.put(state.offspring, child)
}}
end
def handle_cast({:offspring, child}, state) do
{:noreply, %{state | offspring: MapSet.put(state.offspring, child)}}
end
def handle_cast({:exclude, pid}, state) do
cleanup(lookup: pid)
{:noreply, %{state | exclusions: [pid | state.exclusions]}}
end
def handle_cast(:ignore, state) do
cleanup(context: self())
cleanup(lookup: state.parent)
Enum.each(state.offspring, &cleanup(lookup: &1))
{:stop, :normal, state}
end
def handle_cast(:complete, state) do
{:noreply, state, {:continue, :complete}}
end
def handle_info(
{:DOWN, _, _, parent, down_reason},
%{type: :other, parent: parent} = state
) do
end_time_mono = System.monotonic_time()
attributes =
with {reason, stack} when reason != :shutdown <- down_reason,
false <- match?(%{expected: true}, reason) do
Map.merge(state.attributes, %{
error: true,
error_kind: :exit,
error_reason: inspect(reason),
error_stack: inspect(stack)
})
else
_ -> state.attributes
end
|> Map.put_new(:end_time_mono, end_time_mono)
{:noreply, %{state | attributes: attributes}, {:continue, :complete}}
end
def handle_info({:DOWN, _, _, child, _}, state) do
p_exit = {child, System.system_time(:millisecond)}
{:noreply,
%{
state
| attributes: Map.update(state.attributes, :process_exits, [p_exit], &[p_exit | &1])
}}
end
def handle_info(_msg, state) do
{:noreply, state}
end
def handle_continue(:complete, state) do
cleanup(context: self())
cleanup(lookup: state.parent)
Enum.each(state.offspring, &cleanup(lookup: &1))
run_complete(state)
counter(:sub)
report_stats()
{:stop, :normal, :completed}
end
@kb 1024
defp report_stats() do
info = Process.info(self(), [:memory, :reductions])
NewRelic.report_metric(
{:supportability, :agent, "Sidecar/Process/MemoryKb"},
value: info[:memory] / @kb
)
NewRelic.report_metric(
{:supportability, :agent, "Sidecar/Process/Reductions"},
value: info[:reductions]
)
end
defp clear_sidecar() do
Process.delete(:nr_tx_sidecar)
end
defp set_sidecar(pid) do
Process.put(:nr_tx_sidecar, pid)
pid
end
def get_sidecar() do
case Process.get(:nr_tx_sidecar) do
nil ->
with {:links, links} <- Process.info(self(), :links),
sidecar when is_pid(sidecar) <-
lookup_sidecar_in(linked_process_callers(links)) ||
lookup_sidecar_in(linked_process_ancestors(links)) do
cast(sidecar, {:offspring, self()})
store_sidecar(self(), sidecar)
set_sidecar(sidecar)
end
:no_track ->
nil
sidecar ->
sidecar
end
end
defp lookup_sidecar_in(processes) do
Enum.find_value(processes, &lookup_sidecar/1)
end
defp store_sidecar(_, nil), do: :no_sidecar
defp store_sidecar(pid, sidecar) do
:ets.insert(__MODULE__.LookupStore, {pid, sidecar})
end
defp lookup_sidecar(pid) when is_pid(pid) do
case :ets.lookup(__MODULE__.LookupStore, pid) do
[{_, sidecar}] -> sidecar
[] -> nil
end
end
defp lookup_sidecar(_named_process), do: nil
defp linked_process_callers(links) do
for pid <- Process.get(:"$callers", []) |> Enum.reverse(),
^pid <- links do
pid
end
end
defp linked_process_ancestors(links) do
for pid <- Process.get(:"$ancestors", []),
^pid <- links do
pid
end
end
defp cleanup(context: sidecar) do
:ets.delete(__MODULE__.ContextStore, {:context, sidecar})
end
defp cleanup(lookup: root) do
:ets.delete(__MODULE__.LookupStore, root)
end
def counter() do
:counters.get(:persistent_term.get({__MODULE__, :counter}), 1)
end
defp counter(:add) do
:counters.add(:persistent_term.get({__MODULE__, :counter}), 1, 1)
end
defp counter(:sub) do
:counters.sub(:persistent_term.get({__MODULE__, :counter}), 1, 1)
end
defp run_complete(%{attributes: attributes} = state) do
attributes
|> process_exclusions(state.exclusions)
|> NewRelic.Transaction.Complete.run(state.parent)
end
defp process_exclusions(attributes, exclusions) do
attributes
|> Map.update(:process_spawns, [], fn spawns ->
Enum.reject(spawns, fn {pid, _, _, _} -> pid in exclusions end)
end)
end
end
|
lib/new_relic/transaction/sidecar.ex
| 0.729905
| 0.46393
|
sidecar.ex
|
starcoder
|
Describe the intended usage of this charm and anything unique about how this
charm relates to others here.
This README will be displayed in the Charm Store, it should be either Markdown
or RST. Ideal READMEs include instructions on how to use the charm, expected
usage, and charm features that your audience might be interested in. For an
example of a well written README check out Hadoop:
http://jujucharms.com/charms/precise/hadoop
Use this as a Markdown reference if you need help with the formatting of this
README: http://askubuntu.com/editing-help
This charm provides [service][]. Add a description here of what the service
itself actually does.
Also remember to check the [icon guidelines][] so that your charm looks good
in the Juju GUI.
# Usage
Step by step instructions on using the charm:
juju deploy servicename
and so on. If you're providing a web service or something that the end user
needs to go to, tell them here, especially if you're deploying a service that
might listen to a non-default port.
You can then browse to http://ip-address to configure the service.
## Scale out Usage
If the charm has any recommendations for running at scale, outline them in
examples here. For example if you have a memcached relation that improves
performance, mention it here.
## Known Limitations and Issues
This not only helps users but gives people a place to start if they want to help
you add features to your charm.
# Configuration
The configuration options will be listed on the charm store, however If you're
making assumptions or opinionated decisions in the charm (like setting a default
administrator password), you should detail that here so the user knows how to
change it immediately, etc.
# Contact Information
Though this will be listed in the charm store itself don't assume a user will
know that, so include that information here:
## Upstream Project Name
- Upstream website
- Upstream bug tracker
- Upstream mailing list or contact information
- Feel free to add things if it's useful for users
[service]: http://example.com
[icon guidelines]: https://jujucharms.com/docs/stable/authors-charm-icon
|
voting-app/README.ex
| 0.621656
| 0.690429
|
README.ex
|
starcoder
|
defmodule Unicode.Transform.Rule.Transform do
@moduledoc """
#### 10.3.6 [Transform Rules](https://unicode.org/reports/tr35/tr35-general.html#Transform_Rules)
Each transform rule consists of two colons followed by a transform name, which is of the form source-target. For example:
```
:: NFD ;
:: und_Latn-und_Greek ;
:: Latin-Greek; # alternate form
```
If either the source or target is 'und', it can be omitted, thus 'und_NFC' is equivalent to 'NFC'. For compatibility, the English names for scripts can be used instead of the und_Latn locale name, and "Any" can be used instead of "und". Case is not significant.
The following transforms are defined not by rules, but by the operations in the Unicode Standard, and may be used in building any other transform:
> **Any-NFC, Any-NFD, Any-NFKD, Any-NFKC** - the normalization forms defined by [[UAX15](https://www.unicode.org/reports/tr41/#UAX15)].
>
> **Any-Lower, Any-Upper, Any-Title** - full case transformations, defined by [[Unicode](https://unicode.org/reports/tr35/tr35-general.html#Transform_Rules)] Chapter 3.
In addition, the following special cases are defined:
> **Any-Null** - has no effect; that is, each character is left alone.
> **Any-Remove** - maps each character to the empty string; this, removes each character.
The inverse of a transform rule uses parentheses to indicate what should be done when the inverse transform is used. For example:
```
:: lower () ; # only executed for the normal
:: (lower) ; # only executed for the inverse
:: lower ; # executed for both the normal and the inverse
```
"""
defstruct [:forward, :backward, :comment]
alias Unicode.Transform.Rule.Comment
@transform "[A-Za-z_-]"
@regex ~r/(?<forward>#{@transform}*)?\s*(\((?<backward>#{@transform}*)\))?\s*;\s*(\#\s*(?<comment>.*))?/u
def parse(<<"::">> <> rule) do
rule =
Regex.named_captures(@regex, String.trim(rule))
|> Enum.map(fn {k, v} -> {String.to_atom(k), v} end)
struct(__MODULE__, rule)
end
def parse(_other) do
nil
end
defimpl Unicode.Transform.Rule do
def to_forward_code(%{forward: ""} = _rule) do
[]
end
def to_forward_code(rule) do
[Comment.comment_from(rule), "transform(", inspect(rule.forward), ")", "\n"]
end
def to_backward_code(%{backward: ""} = _rule) do
[]
end
def to_backward_code(rule) do
[Comment.comment_from(rule), "transform(", inspect(rule.backward), ")", "\n"]
end
end
end
|
lib/unicode/transform/rule/transform.ex
| 0.867696
| 0.891008
|
transform.ex
|
starcoder
|
defmodule Membrane.Clock do
@moduledoc """
Clock is a Membrane utility that allows elements to measure time according to
a particular clock, which can be e.g. a soundcard hardware clock.
Internally, Clock is a GenServer process that can receive _updates_ (see `t:update_message_t/0`),
which are messages containing amount of time until the next update.
For example, a sink playing audio to the sound card can send an update before
each write to the sound card buffer (for practical reasons that can be done every
100 or 1000 writes). Although it might be more intuitive to send updates with
the time passed, in practice the described approach turns out to be more convenient,
as it simplifies the first update.
Basing on updates, Clock calculates the `t:ratio_t/0` of its time to the reference
time. The reference time can be configured with `:time_provider` option. The ratio
is broadcasted (see `t:ratio_message_t/0`) to _subscribers_ (see `subscribe/2`)
- processes willing to synchronize to the custom clock. Subscribers can adjust
their timers according to received ratio - timers started with
`t:Membrane.Element.Action.start_timer_t/0` action in elements do it automatically.
Initial ratio is equal to 1, which means that if no updates are received,
Clock is synchronized to the reference time.
## Proxy mode
Clock can work in _proxy_ mode, which means it cannot receive updates, but
it receives ratio from another clock instead, and forwards it to subscribers.
Proxy mode is enabled with `proxy_for: pid` or `proxy: true` (no initial proxy)
option, and the proxy is set/changed using `proxy_for/2`.
"""
use Bunch
use GenServer
alias Membrane.Core.Message
alias Membrane.Time
@type t :: pid
@typedoc """
Ratio of the Clock time to the reference time.
"""
@type ratio_t :: Ratio.t() | non_neg_integer
@typedoc """
Update message received by the Clock. It should contain the time till the next
update.
"""
@type update_message_t ::
{:membrane_clock_update,
milliseconds ::
non_neg_integer
| Ratio.t()
| {numerator :: non_neg_integer, denominator :: pos_integer}}
@typedoc """
Ratio message sent by the Clock to all its subscribers. It contains the ratio
of the custom clock time to the reference time.
"""
@type ratio_message_t :: {:membrane_clock_ratio, clock :: pid, ratio_t}
@typedoc """
Options accepted by `start_link/2` and `start/2` functions.
They are the following:
- `time_provider` - function providing the reference time in milliseconds
- `proxy` - determines whether the Clock should work in proxy mode
- `proxy_for` - enables the proxy mode and sets proxied Clock to pid
Check the moduledoc for more details.
"""
@type option_t ::
{:time_provider, (() -> Time.t())}
| {:proxy, boolean}
| {:proxy_for, pid | nil}
@spec start_link([option_t], GenServer.options()) :: GenServer.on_start()
def start_link(options \\ [], gen_server_options \\ []) do
GenServer.start_link(__MODULE__, options, gen_server_options)
end
@spec start([option_t], GenServer.options()) :: GenServer.on_start()
def start(options \\ [], gen_server_options \\ []) do
GenServer.start(__MODULE__, options, gen_server_options)
end
@doc """
Subscribes `pid` for receiving `t:ratio_message_t/0` messages from the clock.
This function can be called multiple times from the same process. To unsubscribe,
`unsubscribe/2` should be called the same amount of times. The subscribed pid
always receives one message, regardless of how many times it called `subscribe/2`.
"""
@spec subscribe(t, subscriber :: pid) :: :ok
def subscribe(clock, pid \\ self()) do
GenServer.cast(clock, {:clock_subscribe, pid})
end
@doc """
Unsubscribes `pid` from receiving `t:ratio_message_t/0` messages from the clock.
For unsubscription to take effect, `unsubscribe/2` should be called the same
amount of times as `subscribe/2`.
"""
@spec unsubscribe(t, subscriber :: pid) :: :ok
def unsubscribe(clock, pid \\ self()) do
GenServer.cast(clock, {:clock_unsubscribe, pid})
end
@doc """
Sets a new proxy clock to `clock_to_proxy_for`.
"""
@spec proxy_for(t, clock_to_proxy_for :: pid | nil) :: :ok
def proxy_for(clock, clock_to_proxy_for) do
GenServer.cast(clock, {:proxy_for, clock_to_proxy_for})
end
@impl GenServer
def init(options) do
proxy_opts = get_proxy_options(options[:proxy], options[:proxy_for])
state =
%{
ratio: 1,
subscribers: %{},
time_provider: options |> Keyword.get(:time_provider, fn -> Time.monotonic_time() end)
}
|> Map.merge(proxy_opts)
if pid = proxy_opts[:proxy_for], do: Message.send(pid, :clock_subscribe, self())
{:ok, state}
end
@impl GenServer
def handle_cast({:proxy_for, proxy_for}, %{proxy: true} = state) do
if state.proxy_for, do: unsubscribe(state.proxy_for)
state = %{state | proxy_for: proxy_for}
state =
if proxy_for do
subscribe(proxy_for)
state
else
broadcast_and_update_ratio(1, state)
end
{:noreply, state}
end
@impl GenServer
def handle_cast({:clock_subscribe, pid}, state) do
state
|> update_in([:subscribers, pid], fn
nil ->
send_ratio(pid, state.ratio)
monitor = Process.monitor(pid)
%{monitor: monitor, subscriptions: 1}
%{subscriptions: subs} = subscriber ->
%{subscriber | subscriptions: subs + 1}
end)
~> {:noreply, &1}
end
@impl GenServer
def handle_cast({:clock_unsubscribe, pid}, state) do
if Map.has_key?(state.subscribers, pid) do
{subs, state} =
state |> Bunch.Access.get_updated_in([:subscribers, pid, :subscriptions], &(&1 - 1))
if subs == 0, do: handle_unsubscribe(pid, state), else: state
else
state
end
~> {:noreply, &1}
end
@impl GenServer
def handle_info({:membrane_clock_update, till_next}, %{proxy: false} = state) do
{:noreply, handle_clock_update(till_next, state)}
end
@impl GenServer
def handle_info({:membrane_clock_ratio, pid, ratio}, %{proxy: true, proxy_for: pid} = state) do
{:noreply, broadcast_and_update_ratio(ratio, state)}
end
@impl GenServer
# When ratio from previously proxied clock comes in after unsubscribing
def handle_info({:membrane_clock_ratio, _pid, _ratio}, %{proxy: true} = state) do
{:noreply, state}
end
@impl GenServer
def handle_info({:DOWN, _ref, :process, pid, _reason}, state) do
{:noreply, handle_unsubscribe(pid, state)}
end
defp get_proxy_options(proxy, proxy_for)
defp get_proxy_options(_proxy, pid) when is_pid(pid), do: %{proxy: true, proxy_for: pid}
defp get_proxy_options(true, _proxy_for), do: %{proxy: true, proxy_for: nil}
defp get_proxy_options(_proxy, _proxy_for),
do: %{init_time: nil, clock_time: 0, till_next: nil, proxy: false}
defp handle_unsubscribe(pid, state) do
Process.demonitor(state.subscribers[pid].monitor, [:flush])
state |> Bunch.Access.delete_in([:subscribers, pid])
end
defp handle_clock_update({nom, denom}, state) do
handle_clock_update(Ratio.new(nom, denom), state)
end
defp handle_clock_update(till_next, state) do
use Ratio
if till_next < 0 do
raise "Clock update time cannot be negative, received: #{inspect(till_next)}"
end
till_next = till_next * Time.millisecond()
case state.init_time do
nil -> %{state | init_time: state.time_provider.(), till_next: till_next}
_init_time -> do_handle_clock_update(till_next, state)
end
end
defp do_handle_clock_update(till_next, state) do
use Ratio
%{till_next: from_previous, clock_time: clock_time} = state
clock_time = clock_time + from_previous
ratio = clock_time / (state.time_provider.() - state.init_time)
state = %{state | clock_time: clock_time, till_next: till_next}
broadcast_and_update_ratio(ratio, state)
end
defp broadcast_and_update_ratio(ratio, state) do
state.subscribers |> Bunch.KVList.each_key(&send_ratio(&1, ratio))
%{state | ratio: ratio}
end
defp send_ratio(pid, ratio), do: send(pid, {:membrane_clock_ratio, self(), ratio})
end
|
lib/membrane/clock.ex
| 0.930229
| 0.690523
|
clock.ex
|
starcoder
|
defmodule Concentrate.Parser.Helpers do
@moduledoc """
Helper functions for the GTFS-RT and GTFS-RT Enhanced parsers.
"""
require Logger
defmodule Options do
@moduledoc false
@type drop_fields :: %{module => map}
@type t :: %Options{
routes: :all | {:ok, MapSet.t()},
excluded_routes: :none | {:ok, MapSet.t()},
max_time: :infinity | non_neg_integer,
drop_fields: drop_fields,
feed_url: String.t() | nil
}
defstruct routes: :all,
excluded_routes: :none,
max_time: :infinity,
drop_fields: %{},
feed_url: nil
end
alias __MODULE__.Options
@doc """
Options for parsing a GTFS Realtime file.
* routes: either :all (don't filter the routes) or {:ok, Enumerable.t} with the route IDs to include
* excluded_routes: either :none (don't filter) or {:ok, Enumerable.t} with the route IDs to exclude
* max_time: the maximum time in the future for a stop time update
* drop_fields: an optional map of struct module to Enumerable.t with fields to drop from the struct
"""
def parse_options(opts) do
parse_options(opts, %Options{})
end
defp parse_options([{:routes, route_ids} | rest], acc) do
parse_options(rest, %{acc | routes: {:ok, MapSet.new(route_ids)}})
end
defp parse_options([{:excluded_routes, route_ids} | rest], acc) do
parse_options(rest, %{acc | excluded_routes: {:ok, MapSet.new(route_ids)}})
end
defp parse_options([{:drop_fields, %{} = fields} | rest], acc) do
# create a partial map with the default values from the struct
fields =
for {mod, fields} <- fields, into: %{} do
new_map = Map.take(struct!(mod), fields)
{mod, new_map}
end
parse_options(rest, %{acc | drop_fields: fields})
end
defp parse_options([{:max_future_time, seconds} | rest], acc) do
max_time = :os.system_time(:seconds) + seconds
parse_options(rest, %{acc | max_time: max_time})
end
defp parse_options([{:feed_url, url} | rest], acc) do
parse_options(rest, %{acc | feed_url: url})
end
defp parse_options([_ | rest], acc) do
parse_options(rest, acc)
end
defp parse_options([], acc) do
acc
end
@spec drop_fields(Enumerable.t(), Options.drop_fields()) :: Enumerable.t()
@doc """
Given a configuration map, optionally drop some fields from the given enumerable.
If non-structs are a part of the enumerable, they will be removed.
"""
def drop_fields(enum, map) when map_size(map) == 0 do
enum
end
def drop_fields(enum, map) do
for %{__struct__: mod} = struct <- enum do
case map do
%{^mod => new_map} ->
Map.merge(struct, new_map)
_ ->
struct
end
end
end
@spec valid_route_id?(Options.t(), String.t()) :: boolean
@doc """
Returns true if the given route ID is valid for the provided options.
"""
def valid_route_id?(options, route_id)
def valid_route_id?(%{routes: {:ok, route_ids}}, route_id) do
route_id in route_ids
end
def valid_route_id?(%{excluded_routes: {:ok, route_ids}}, route_id) do
not (route_id in route_ids)
end
def valid_route_id?(_, _) do
true
end
@spec times_less_than_max?(
non_neg_integer | nil,
non_neg_integer | nil,
non_neg_integer | :infinity
) :: boolean
@doc """
Returns true if the arrival or departure time is less than the provided maximum time.
"""
def times_less_than_max?(arrival_time, departure_time, max_time)
def times_less_than_max?(_, _, :infinity), do: true
def times_less_than_max?(nil, nil, _), do: true
def times_less_than_max?(time, nil, max), do: time <= max
def times_less_than_max?(_, time, max), do: time <= max
@doc """
Log a warning if the vehicle timestamp is greater than the feed timestamp.
"""
@spec log_future_vehicle_timestamp(
Options.t(),
non_neg_integer | nil,
non_neg_integer | nil,
String.t()
) :: :ok
def log_future_vehicle_timestamp(options, feed_timestamp, vehicle_timestamp, vehicle_id)
def log_future_vehicle_timestamp(options, feed_timestamp, vehicle_timestamp, vehicle_id)
when is_integer(vehicle_timestamp) and is_integer(feed_timestamp) and
vehicle_timestamp > feed_timestamp do
_ =
Logger.warn(
"vehicle timestamp after feed timestamp feed_url=#{inspect(options.feed_url)} vehicle_id=#{
inspect(vehicle_id)
} feed_timestamp=#{inspect(feed_timestamp)} vehicle_timestamp=#{
inspect(vehicle_timestamp)
}"
)
:ok
end
def log_future_vehicle_timestamp(_options, _feed_timestamp, _vehicle_timestamp, _vehicle_id) do
:ok
end
end
|
lib/concentrate/parser/helpers.ex
| 0.859943
| 0.454714
|
helpers.ex
|
starcoder
|
import TypeClass
defclass Witchcraft.Applicative do
@moduledoc """
`Applicative` extends `Apply` with the ability to lift value into a
particular data type or "context".
This fills in the connection between regular function application and `Apply`
data --------------- function ---------------> result
| | |
of(Container, data) of(Container, function) of(Container, result)
↓ ↓ ↓
%Container<data> --- %Container<function> ---> %Container<result>
## Type Class
An instance of `Witchcraft.Applicative` must also implement `Witchcraft.Apply`,
and define `Witchcraft.Applicative.of/2`.
Functor [map/2]
↓
Apply [convey/2]
↓
Applicative [of/2]
"""
alias __MODULE__
extend Witchcraft.Apply
@type t :: any()
defmacro __using__(opts \\ []) do
quote do
use Witchcraft.Apply, unquote(opts)
import unquote(__MODULE__), unquote(opts)
end
end
where do
@doc """
Bring a value into the same data type as some sample
## Examples
iex> of([], 42)
[42]
iex> of([1, 2, 3], 42)
[42]
iex> of({"a", "b", 155}, 42)
{"", "", 42}
iex> of(fn -> nil end, 42).(55)
42
iex> of(fn(a, b, c) -> a + b - c end, 42).(55)
42
iex> import Witchcraft.Apply
...>
...> []
...> |> of(&+/2)
...> |> provide([1, 2, 3])
...> |> ap(of([], 42))
[43, 44, 45]
"""
@spec of(Applicative.t(), any()) :: Applicative.t()
def of(sample, to_wrap)
end
@doc """
Partially apply `of/2`, generally as a way to bring many values into the same context.
## Examples
iex> {"very example", "much wow"}
...> |> of()
...> |> Witchcraft.Functor.across([42, "hello", [1, 2, 3]])
[{"", 42}, {"", "hello"}, {"", [1, 2, 3]}]
"""
@spec of(Applicative.t()) :: (any() -> Applicative.t())
def of(sample), do: fn to_wrap -> of(sample, to_wrap) end
@doc """
Alias for `of/2`, for cases that this helps legibility or style.
## Example
iex> wrap({":|", "^.~"}, 42)
{"", 42}
iex> [] |> wrap(42)
[42]
"""
@spec wrap(Applicative.t(), any()) :: Applicative.t()
defalias wrap(sample, to_wrap), as: :of
@doc """
Alias for `of/2`, for cases that this helps legibility or style.
## Example
iex> pure({"ohai", "thar"}, 42)
{"", 42}
iex> [] |> pure(42)
[42]
"""
@spec pure(Applicative.t(), any()) :: Applicative.t()
defalias pure(sample, to_wrap), as: :of
@doc """
`of/2` with arguments reversed.
## Example
iex> to(42, {"ohai", "thar"})
{"", 42}
iex> 42 |> to([])
[42]
42 |> to(%Algae.Id{})
#=> %Algae.Id{id: 42}
"""
@spec to(any(), Applicative.t()) :: Applicative.t()
def to(to_wrap, sample), do: of(sample, to_wrap)
@doc """
Prime a value to be brought into other data types
## Example
iex> make = to(42)
...> make.({"ohai", "thar"})
{"", 42}
...> make.([])
[42]
"""
@spec to(any()) :: (Applicative.t() -> Applicative.t())
def to(to_wrap), do: fn type -> of(type, to_wrap) end
@doc """
Alias for `of/2`, for cases that this helps legibility or style
## Example
iex> unit({":)", ":("}, 42)
{"", 42}
iex> [] |> unit(42)
[42]
"""
@spec unit(Applicative.t(), any()) :: Applicative.t()
defalias unit(sample, to_wrap), as: :of
properties do
import Witchcraft.Functor
import Witchcraft.Apply
def identity(data) do
a = generate(data)
f = &Quark.id/1
equal?(a, a ~>> Applicative.of(a, f))
end
def homomorphism(data) do
arg = 42
a = generate(data)
f = &inspect/1
left = Applicative.of(a, arg) ~>> Applicative.of(a, f)
right = Applicative.of(a, f.(arg))
equal?(left, right)
end
def interchange(data) do
arg = 42
as = generate(data)
fs = replace(as, &inspect/1)
left = Applicative.of(as, arg) ~>> fs
right = fs ~>> Applicative.of(as, fn g -> g.(arg) end)
equal?(left, right)
end
end
end
definst Witchcraft.Applicative, for: Function do
def of(_, unwrapped), do: &Quark.SKI.k(unwrapped, &1)
end
definst Witchcraft.Applicative, for: List do
def of(_, unwrapped), do: [unwrapped]
end
definst Witchcraft.Applicative, for: Tuple do
custom_generator(_) do
import TypeClass.Property.Generator, only: [generate: 1]
{generate(0), generate(0)}
end
def of(sample, unwrapped) do
size = tuple_size(sample)
sample
|> elem(0)
|> Witchcraft.Monoid.empty()
|> Tuple.duplicate(size)
|> put_elem(size - 1, unwrapped)
end
end
|
lib/witchcraft/applicative.ex
| 0.839882
| 0.666316
|
applicative.ex
|
starcoder
|
defmodule QRCode.ByteMode do
@moduledoc """
Byte mode character capacities table.
"""
alias QRCode.QR
@level_low [
{17, 1},
{32, 2},
{53, 3},
{78, 4},
{106, 5},
{134, 6},
{154, 7},
{192, 8},
{230, 9},
{271, 10},
{321, 11},
{367, 12},
{425, 13},
{458, 14},
{520, 15},
{586, 16},
{644, 17},
{718, 18},
{792, 19},
{858, 20},
{929, 21},
{1003, 22},
{1091, 23},
{1171, 24},
{1273, 25},
{1367, 26},
{1465, 27},
{1528, 28},
{1628, 29},
{1732, 30},
{1840, 31},
{1952, 32},
{2068, 33},
{2188, 34},
{2303, 35},
{2431, 36},
{2563, 37},
{2699, 38},
{2809, 39},
{2953, 40}
]
@level_medium [
{14, 1},
{26, 2},
{42, 3},
{62, 4},
{84, 5},
{106, 6},
{122, 7},
{152, 8},
{180, 9},
{213, 10},
{251, 11},
{287, 12},
{331, 13},
{362, 14},
{412, 15},
{450, 16},
{504, 17},
{560, 18},
{624, 19},
{666, 20},
{711, 21},
{779, 22},
{857, 23},
{911, 24},
{997, 25},
{1059, 26},
{1125, 27},
{1190, 28},
{1264, 29},
{1370, 30},
{1452, 31},
{1538, 32},
{1628, 33},
{1722, 34},
{1809, 35},
{1911, 36},
{1989, 37},
{2099, 38},
{2213, 39},
{2331, 40}
]
@level_quartile [
{11, 1},
{20, 2},
{32, 3},
{46, 4},
{60, 5},
{74, 6},
{86, 7},
{108, 8},
{130, 9},
{151, 10},
{177, 11},
{203, 12},
{241, 13},
{258, 14},
{292, 15},
{322, 16},
{364, 17},
{394, 18},
{442, 19},
{482, 20},
{509, 21},
{565, 22},
{611, 23},
{661, 24},
{715, 25},
{751, 26},
{805, 27},
{868, 28},
{908, 29},
{982, 30},
{1030, 31},
{1112, 32},
{1168, 33},
{1228, 34},
{1283, 35},
{1351, 36},
{1423, 37},
{1499, 38},
{1579, 39},
{1663, 40}
]
@level_high [
{7, 1},
{14, 2},
{24, 3},
{34, 4},
{44, 5},
{58, 6},
{64, 7},
{84, 8},
{98, 9},
{119, 10},
{137, 11},
{155, 12},
{177, 13},
{194, 14},
{220, 15},
{250, 16},
{280, 17},
{310, 18},
{338, 19},
{382, 20},
{403, 21},
{439, 22},
{461, 23},
{511, 24},
{535, 25},
{593, 26},
{625, 27},
{658, 28},
{698, 29},
{742, 30},
{790, 31},
{842, 32},
{898, 33},
{958, 34},
{983, 35},
{1051, 36},
{1093, 37},
{1139, 38},
{1219, 39},
{1273, 40}
]
@spec put_version(QR.t()) :: Result.t(String.t(), QR.t())
def put_version(%QR{orig: orig, ecc_level: :low} = qr) do
@level_low
|> find_version(byte_size(orig))
|> Result.map(fn ver -> %{qr | version: ver} end)
end
def put_version(%QR{orig: orig, ecc_level: :medium} = qr) do
@level_medium
|> find_version(byte_size(orig))
|> Result.map(fn ver -> %{qr | version: ver} end)
end
def put_version(%QR{orig: orig, ecc_level: :quartile} = qr) do
@level_quartile
|> find_version(byte_size(orig))
|> Result.map(fn ver -> %{qr | version: ver} end)
end
def put_version(%QR{orig: orig, ecc_level: :high} = qr) do
@level_high
|> find_version(byte_size(orig))
|> Result.map(fn ver -> %{qr | version: ver} end)
end
defp find_version(level, bytes) do
Enum.reduce_while(level, {:error, "Input string can't be encoded"}, fn {max, ver}, acc ->
if bytes <= max do
{:halt, {:ok, ver}}
else
{:cont, acc}
end
end)
end
end
|
lib/qr_code/byte_mode.ex
| 0.711932
| 0.463991
|
byte_mode.ex
|
starcoder
|
defmodule Delta.Op do
alias Delta.Attr
def new(action, value, attr \\ false)
def new("delete", length, _attr), do: %{"delete" => length}
def new(action, value, %{} = attr) when map_size(attr) > 0 do
%{action => value, "attributes" => attr}
end
def new(action, value, _attr), do: %{action => value}
def insert(value, attr \\ false), do: new("insert", value, attr)
def retain(value, attr \\ false), do: new("retain", value, attr)
def delete(value), do: new("delete", value)
def has_attributes?(%{"attributes" => %{}}), do: true
def has_attributes?(_), do: false
def type?(op, action, value_type \\ nil)
def type?(%{} = op, action, nil) when is_map_key(op, action), do: true
def type?(%{} = op, action, :map), do: is_map(op[action])
def type?(%{} = op, action, :string), do: is_binary(op[action])
def type?(%{} = op, action, :number), do: is_integer(op[action])
def type?(%{}, _action, _value_type), do: false
def insert?(op, type \\ nil), do: type?(op, "insert", type)
def delete?(op, type \\ nil), do: type?(op, "delete", type)
def retain?(op, type \\ nil), do: type?(op, "retain", type)
def text_size(text) do
text
|> :unicode.characters_to_binary(:utf8, :utf16)
|> byte_size()
|> div(2)
end
def size(%{"insert" => text}) when is_binary(text), do: text_size(text)
def size(%{"delete" => len}) when is_integer(len), do: len
def size(%{"retain" => len}) when is_integer(len), do: len
def size(_op), do: 1
def take(op = %{"insert" => embed}, _length) when not is_bitstring(embed) do
{op, false}
end
def take(op, length) do
case size(op) - length do
0 -> {op, false}
_ -> take_partial(op, length)
end
end
@spec get_embed_data!(map, map) :: {any, any, any}
def get_embed_data!(a, b) do
cond do
not is_map(a) ->
raise("cannot retain #{inspect(a)}")
not is_map(b) ->
raise("cannot retain #{inspect(b)}")
map_size(a) != 1 and Map.keys(a) != Map.keys(b) ->
raise("embeds not matched: #{inspect(a: a, b: b)}")
true ->
[type] = Map.keys(a)
{type, a[type], b[type]}
end
end
def compose(a, b) do
{op1, a, op2, b} = next(a, b)
composed =
case {info(op1), info(op2)} do
{{"retain", _type}, {"delete", :number}} ->
op2
{{"retain", :map}, {"retain", :number}} ->
attr = Attr.compose(op1["attributes"], op2["attributes"])
retain(op1["retain"], attr)
{{"retain", :number}, {"retain", _type}} ->
attr = Attr.compose(op1["attributes"], op2["attributes"], true)
retain(op2["retain"], attr)
{{"insert", _type}, {"retain", :number}} ->
attr = Attr.compose(op1["attributes"], op2["attributes"])
insert(op1["insert"], attr)
{{action, type}, {"retain", :map}} ->
{embed_type, embed1, embed2} = get_embed_data!(op1[action], op2["retain"])
handler = Delta.get_handler!(embed_type)
composed_embed = %{embed_type => handler.compose(embed1, embed2, action == "retain")}
keep_nil? = action == "retain" && type == :number
attr = Attr.compose(op1["attributes"], op2["attributes"], keep_nil?)
new(action, composed_embed, attr)
_other ->
false
end
{composed, a, b}
end
def transform(offset, index, op, priority) when is_integer(index) do
length = size(op)
if insert?(op) and (offset < index or not priority) do
{offset + length, index + length}
else
{offset + length, index}
end
end
def transform(a, b, priority) do
{op1, a, op2, b} = next(a, b)
transformed =
cond do
delete?(op1) ->
false
delete?(op2) ->
op2
# Delegate to embed handler if both are retain ops are
# embeds of the same type
retain?(op1, :map) && retain?(op2, :map) &&
Map.keys(op1["retain"]) == Map.keys(op2["retain"]) ->
{embed_type, embed1, embed2} = get_embed_data!(op1["retain"], op2["retain"])
handler = Delta.get_handler!(embed_type)
embed = %{embed_type => handler.transform(embed1, embed2, priority)}
attrs = Attr.transform(op1["attributes"], op2["attributes"], priority)
retain(embed, attrs)
retain?(op1, :number) && retain?(op2, :map) ->
attrs = Attr.transform(op1["attributes"], op2["attributes"], priority)
retain(op2["retain"], attrs)
true ->
attrs = Attr.transform(op1["attributes"], op2["attributes"], priority)
retain(size(op1), attrs)
end
{transformed, a, b}
end
defp next(a, b) do
size = min(size(a), size(b))
{op1, a} = take(a, size)
{op2, b} = take(b, size)
{op1, a, op2, b}
end
defp take_partial(%{"insert" => text} = op, len) do
binary = :unicode.characters_to_binary(text, :utf8, :utf16)
binary_length = byte_size(binary)
left =
binary
|> Kernel.binary_part(0, len * 2)
|> :unicode.characters_to_binary(:utf16, :utf8)
right =
binary
|> Kernel.binary_part(len * 2, binary_length - len * 2)
|> :unicode.characters_to_binary(:utf16, :utf8)
case {is_binary(left), is_binary(right)} do
{true, true} ->
{insert(left, op["attributes"]), insert(right, op["attributes"])}
_ ->
raise "Encoding failed in take_partial #{inspect({text, op, len, left, right})}"
end
end
defp take_partial(%{"delete" => full}, length) do
{delete(length), delete(full - length)}
end
defp take_partial(%{"retain" => full} = op, length) do
{retain(length, op["attributes"]), retain(full - length, op["attributes"])}
end
defp info(op) do
action =
case op do
%{"insert" => _} -> "insert"
%{"retain" => _} -> "retain"
%{"delete" => _} -> "delete"
end
type =
case op[action] do
value when is_integer(value) -> :number
value when is_binary(value) -> :string
value when is_map(value) -> :map
end
{action, type}
end
end
|
lib/delta/op.ex
| 0.616936
| 0.451145
|
op.ex
|
starcoder
|
defmodule Itest.Account do
@moduledoc """
Maintaining used accounts state so that we're able to run tests multiple times.
"""
alias Itest.Reorg
alias Itest.Transactions.Encoding
import Itest.Poller, only: [wait_on_receipt_confirmed: 1]
@spec take_accounts(integer()) :: map()
def take_accounts(number_of_accounts) do
1..number_of_accounts
|> Task.async_stream(fn _ -> account() end,
timeout: 120_000,
on_timeout: :kill_task,
max_concurrency: System.schedulers_online() * 2
)
|> Enum.map(fn {:ok, result} -> result end)
end
defp account() do
tick_acc = generate_entity()
account_priv_enc = Base.encode16(tick_acc.priv)
passphrase = "<PASSWORD>"
{:ok, addr} = create_account_from_secret(account_priv_enc, passphrase)
{:ok, [faucet | _]} = Ethereumex.HttpClient.eth_accounts()
data = %{from: faucet, to: addr, value: Encoding.to_hex(1_000_000 * trunc(:math.pow(10, 9 + 5)))}
{:ok, receipt_hash} = Ethereumex.HttpClient.eth_send_transaction(data)
wait_on_receipt_confirmed(receipt_hash)
if Application.get_env(:cabbage, :reorg) do
Reorg.unlock_account(addr, passphrase)
else
{:ok, true} = Ethereumex.HttpClient.request("personal_unlockAccount", [addr, passphrase, 0], [])
end
{addr, account_priv_enc}
end
defp generate_entity() do
{:ok, priv} = generate_private_key()
{:ok, pub} = generate_public_key(priv)
{:ok, address} = generate_address(pub)
%{priv: priv, addr: address}
end
defp generate_private_key(), do: {:ok, :crypto.strong_rand_bytes(32)}
defp generate_public_key(<<priv::binary-size(32)>>) do
{:ok, der_pub} = get_public_key(priv)
{:ok, der_to_raw(der_pub)}
end
defp get_public_key(private_key) do
case :libsecp256k1.ec_pubkey_create(private_key, :uncompressed) do
{:ok, public_key} -> {:ok, public_key}
{:error, reason} -> {:error, to_string(reason)}
end
end
defp der_to_raw(<<4::integer-size(8), data::binary>>), do: data
defp generate_address(<<pub::binary-size(64)>>) do
<<_::binary-size(12), address::binary-size(20)>> = hash(pub)
{:ok, address}
end
defp hash(message), do: ExthCrypto.Hash.hash(message, ExthCrypto.Hash.kec())
defp create_account_from_secret(secret, passphrase) do
if Application.get_env(:cabbage, :reorg) do
Reorg.create_account_from_secret(secret, passphrase)
else
Ethereumex.HttpClient.request("personal_importRawKey", [secret, passphrase], [])
end
end
end
|
priv/cabbage/apps/itest/lib/account.ex
| 0.631935
| 0.419499
|
account.ex
|
starcoder
|
defmodule MeshxNode do
@readme File.read!("docs/README.md") |> String.split("<!-- MDOC !-->") |> Enum.fetch!(1)
@moduledoc """
#{@readme}
## Configuration options
* `:mesh_adapter` - Required. Specifies service mesh adapter module. Example: `mesh_adapter: MeshxConsul`.
* `:service_params` - 2-arity function executed when distribution is started. First function argument is node "short name", second argument is host name. For example: `:mynode@myhost` translates to `(:mynode, 'myhost')`. Function should return first argument `params` passed to `c:Meshx.ServiceMesh.start/4`, for example: `"mynode@myhost"`. Default: `&MeshxNode.Default.service_params/2`.
* `:service_reg` - service registration template passed as second argument to `c:Meshx.ServiceMesh.start/4`. Default: `[]`.
* `:upstream_params` - 1-arity function executed when connection between nodes is setup. Function argument is remote node name: running `Node.connect(:node1@myhost)` will invoke function with `(:node1@myhost)`. Function should return first argument `params` passed to `c:Meshx.ServiceMesh.connect/3`, for example: `"node1@myhost"`. Default: `&MeshxNode.Default.upstream_params/1`.
* `:upstream_reg` - upstream registration template passed as second argument to `c:Meshx.ServiceMesh.connect/3`. Default: `nil`.
* `:upstream_proxy` - 2-arity function executed when connection between nodes is setup. Function arguments are `(:remote_node_name, :local_node_name)`. Function should return third argument `proxy` passed to `c:Meshx.ServiceMesh.connect/3`, for example: `{"node1@myhost", "node1@myhost"}`. Default: `&MeshxNode.Default.upstream_proxy/2`.
* `:force_registration?` - boolean passed as third argument to `c:Meshx.ServiceMesh.start/4`. Default: `false`.
* `:timeout` - timeout value passed as fourth argument to `c:Meshx.ServiceMesh.start/4`. Default: `5000`.
## Credits
`MeshxNode` distribution module is based on [example code](https://github.com/erlang/otp/blob/master/lib/kernel/examples/erl_uds_dist/src/erl_uds_dist.erl) by <NAME>.
"""
require Logger
alias MeshxNode.App.C
@retries 100
@sleep_time 100
@doc """
Turns node into a distributed and sets node magic cookie.
Function checks if service mesh adapter application specified by `:mesh_adapter` configuration entry was already started. If mesh adapter is started `Node.start/3` and `Node.set_cookie/2` are executed. Otherwise it will sleep for 100msec and retry, retries limit is 100.
Function arguments are same as `Node.start/3`.
"""
@spec start(node :: node(), cookie :: atom(), :longnames | :shortnames, non_neg_integer()) :: {:ok, pid()} | {:error, term()}
def start(node, cookie, type \\ :longnames, tick_time \\ 15000), do: start_retry(node, cookie, type, tick_time)
@doc """
Asynchronous version of `start/4`.
Function spawns `start/4` using `Kernel.spawn/3`.
"""
@spec spawn_start(node :: node(), cookie :: atom(), :longnames | :shortnames, non_neg_integer()) :: pid()
def spawn_start(name, cookie, type \\ :longnames, tick_time \\ 15000),
do: spawn(__MODULE__, :start, [name, cookie, type, tick_time])
@doc false
def adapter_started?() do
app = Application.get_application(C.mesh_adapter())
if is_nil(app) do
false
else
Application.started_applications()
|> Enum.find(nil, fn {a, _desc, _ver} -> a == app end)
|> is_nil()
|> Kernel.not()
end
end
defp start_retry(name, cookie, type, tick_time, retry \\ 0) do
if adapter_started?() do
case Node.start(name, type, tick_time) do
{:ok, pid} ->
Node.set_cookie(name, cookie)
{:ok, pid}
e ->
Logger.error("[#{__MODULE__}]: #{inspect(e)}")
e
end
else
if retry < @retries do
Process.sleep(@sleep_time)
start_retry(name, cookie, type, tick_time, retry + 1)
else
Logger.error(
"[#{__MODULE__}]: User defined service mesh adapter [#{C.mesh_adapter()}] not started. Giving up starting node [#{name}]."
)
end
end
end
end
|
lib/meshx_node.ex
| 0.835013
| 0.622459
|
meshx_node.ex
|
starcoder
|
defmodule OMG.API.State.Transaction do
@moduledoc """
Internal representation of a spend transaction on Plasma chain
"""
alias OMG.API.Crypto
alias OMG.API.State.Transaction.Signed
@zero_address Crypto.zero_address()
@max_inputs 2
defstruct [
:blknum1,
:txindex1,
:oindex1,
:blknum2,
:txindex2,
:oindex2,
:cur12,
:newowner1,
:amount1,
:newowner2,
:amount2
]
@type t() :: %__MODULE__{
blknum1: non_neg_integer(),
txindex1: non_neg_integer(),
oindex1: 0 | 1,
blknum2: non_neg_integer(),
txindex2: non_neg_integer(),
oindex2: 0 | 1,
cur12: currency(),
newowner1: Crypto.address_t(),
amount1: pos_integer(),
newowner2: Crypto.address_t(),
amount2: non_neg_integer()
}
@type currency :: Crypto.address_t()
@doc """
Creates transaction from utxos where first output belongs to receiver and second belongs to owner of utxos
and the amount decreased by receiver's amount and the fee.
assumptions:
length(utxos) = 1 | 2
"""
@spec create_from_utxos(
%{address: Crypto.address_t(), utxos: map()},
%{address: Crypto.address_t(), amount: pos_integer()},
fee :: non_neg_integer()
) :: {:ok, t()} | {:error, atom()}
def create_from_utxos(sender_utxos, receiver, fee \\ 0)
def create_from_utxos(_utxos, _receiver, fee) when fee < 0, do: {:error, :invalid_fee}
def create_from_utxos(%{utxos: utxos}, _, _) when length(utxos) > @max_inputs, do: {:error, :too_many_utxo}
def create_from_utxos(%{utxos: utxos} = inputs, receiver, fee) do
with {:ok, currency} <- validate_currency(utxos) do
do_create_from_utxos(inputs, currency, receiver, fee)
end
end
defp do_create_from_utxos(
%{address: sender_address, utxos: utxos},
currency,
%{address: receiver_address, amount: amount},
fee
) do
total_amount =
utxos
|> Enum.map(&Map.fetch!(&1, :amount))
|> Enum.sum()
inputs =
utxos
|> Enum.map(fn %{blknum: blknum, txindex: txindex, oindex: oindex} ->
{blknum, txindex, oindex}
end)
amount2 = total_amount - amount - fee
outputs = [
{receiver_address, amount},
{sender_address, amount2}
]
with :ok <- validate_amount(amount),
:ok <- validate_amount(amount2),
do: {:ok, new(inputs, currency, outputs)}
end
defp validate_currency([%{currency: cur1}, %{currency: cur2}]) when cur1 != cur2,
do: {:error, :currency_mixing_not_possible}
defp validate_currency([%{currency: cur1} | _]), do: {:ok, cur1}
defp validate_amount(output_amount) when output_amount < 0, do: {:error, :amount_negative_value}
defp validate_amount(output_amount) when is_integer(output_amount), do: :ok
@doc """
assumptions:
length(inputs) <= 2
length(outputs) <= 2
behavior:
Adds empty (zeroes) inputs and/or outputs to reach the expected size
of 2 inputs and 2 outputs.
"""
@spec new(
list({pos_integer, pos_integer, 0 | 1}),
Crypto.address_t(),
list({Crypto.address_t(), pos_integer})
) :: t()
def new(inputs, currency, outputs) do
inputs = inputs ++ List.duplicate({0, 0, 0}, @max_inputs - Kernel.length(inputs))
outputs = outputs ++ List.duplicate({@zero_address, 0}, @max_inputs - Kernel.length(outputs))
inputs =
inputs
|> Enum.with_index(1)
|> Enum.map(fn {{blknum, txindex, oindex}, index} ->
%{
String.to_existing_atom("blknum#{index}") => blknum,
String.to_existing_atom("txindex#{index}") => txindex,
String.to_existing_atom("oindex#{index}") => oindex
}
end)
|> Enum.reduce(%{}, &Map.merge/2)
outputs =
outputs
|> Enum.with_index(1)
|> Enum.map(fn {{newowner, amount}, index} ->
%{
String.to_existing_atom("newowner#{index}") => newowner,
String.to_existing_atom("amount#{index}") => amount
}
end)
|> Enum.reduce(%{cur12: currency}, &Map.merge/2)
struct(__MODULE__, Map.merge(inputs, outputs))
end
def account_address?(@zero_address), do: false
def account_address?(address) when is_binary(address) and byte_size(address) == 20, do: true
def account_address?(_), do: false
def encode(tx) do
[
tx.blknum1,
tx.txindex1,
tx.oindex1,
tx.blknum2,
tx.txindex2,
tx.oindex2,
tx.cur12,
tx.newowner1,
tx.amount1,
tx.newowner2,
tx.amount2
]
|> ExRLP.encode()
end
def hash(%__MODULE__{} = tx) do
tx
|> encode
|> Crypto.hash()
end
@doc """
private keys are in the form: <<54, 43, 207, 67, 140, 160, 190, 135, 18, 162, 70, 120, 36, 245, 106, 165, 5, 101, 183,
55, 11, 117, 126, 135, 49, 50, 12, 228, 173, 219, 183, 175>>
"""
@spec sign(t(), Crypto.priv_key_t(), Crypto.priv_key_t()) :: Signed.t()
def sign(%__MODULE__{} = tx, priv1, priv2) do
encoded_tx = encode(tx)
signature1 = signature(encoded_tx, priv1)
signature2 = signature(encoded_tx, priv2)
transaction = %Signed{raw_tx: tx, sig1: signature1, sig2: signature2}
%{transaction | signed_tx_bytes: Signed.encode(transaction)}
end
defp signature(_encoded_tx, <<>>), do: <<0::size(520)>>
defp signature(encoded_tx, priv), do: Crypto.signature(encoded_tx, priv)
end
|
apps/omg_api/lib/state/transaction.ex
| 0.826642
| 0.404772
|
transaction.ex
|
starcoder
|
defmodule Nostrum.Struct.Message.Component do
@moduledoc """
A component attached to a message.
Note that the fields present depend on the `t:type/0` of the component object.
See the [Discord API Component Object
Documentation](https://discord.com/developers/docs/interactions/message-components#component-object)
for more information.
"""
@moduledoc since: "0.5.0"
alias Nostrum.Struct.Emoji
alias Nostrum.Util
defstruct [
:type,
:custom_id,
:disabled,
:style,
:label,
:emoji,
:url,
:options,
:placeholder,
:min_values,
:max_values,
:components
]
@typedoc """
Component type.
This field is always set.
## Values
- ``1``: Action Row - A container for other components.
- ``2``: Button - A button object.
- ``3``: Select Menu - A select menu for picking from choices.
## References
See [Discord Developer Portal: Component
Types](https://discord.com/developers/docs/interactions/message-components#component-object-component-types).
"""
@type type :: 1 | 2 | 3
@typedoc """
A developer-defined identifier for the component.
Maximum of 100 characters. Only present for buttons and select menus.
"""
@type custom_id :: String.t() | nil
@typedoc """
Whether the component is disabled.
Only present for buttons and select menus.
"""
@type disabled :: boolean() | nil
@typedoc """
An integer representing the style of the button.
Only present for buttons.
## Values
- ``1``: Primary - blurple, ``custom_id`` required.
- ``2``: Secondary - grey, ``custom_id`` required.
- ``3``: Success - green, ``custom_id`` required.
- ``4``: Danger - red, ``custom_id`` required.
- ``5``: Link - grey, ``url`` required, navigates to the URL.
## References
See [Discord Developer Portal: Button
Styles](https://discord.com/developers/docs/interactions/message-components#button-object-button-styles).
"""
@type style :: 1 | 2 | 3 | 4 | 5 | nil
@typedoc """
Text that appears on the button.
Maximum of 80 characters. Only present for buttons.
"""
@type label :: String.t() | nil
@typedoc """
Partial emoji of the button.
Only present for buttons. The following fields are set:
- ``name``
- ``id``
- ``animated``
"""
@type emoji :: Emoji.t() | nil
@typedoc """
URL for link-style buttons.
Only present for buttons.
"""
@type url :: String.t() | nil
@typedoc """
Choices of the select menu.
Maximum of 25 options. Only present for select menus.
## References
See [Discord Developer Portal: Select Option
Structure](https://discord.com/developers/docs/interactions/message-components#select-menu-object-select-option-structure).
"""
@type options ::
[
%{
required(:label) => String.t(),
required(:value) => String.t(),
optional(:description) => String.t(),
optional(:emoji) => %{
id: Emoji.id(),
name: Emoji.name(),
animated: Emoji.animated()
},
optional(:default) => boolean()
}
]
| nil
@typedoc """
Custom placeholder text if nothing is selected.
Maximum of 100 characters. Only present for select menus.
"""
@type placeholder :: String.t() | nil
@typedoc """
Minimum number of items that must be chosen.
Defaults to ``1``. Minimum of ``0``. Maximum of ``25``. Only present for select menus.
"""
@type min_values :: 0..25 | nil
@typedoc """
Maximum number of items that must be chosen.
Defaults to ``1``. Maximum of ``25``. Only present for select menus.
"""
@type max_values :: 1..25 | nil
@typedoc """
Child components for action rows.
Only present for action rows.
"""
@type components :: [t()]
@typedoc "Represents a message component."
@type t :: %__MODULE__{
type: type(),
custom_id: custom_id(),
disabled: disabled(),
style: style(),
label: label(),
emoji: emoji(),
url: url(),
options: options(),
placeholder: placeholder(),
min_values: min_values(),
max_values: max_values(),
components: components()
}
@doc false
@spec to_struct(map()) :: t()
def to_struct(map) do
%__MODULE__{
type: map.type,
custom_id: map[:custom_id],
disabled: map[:disabled],
style: map[:style],
label: map[:label],
emoji: Util.cast(map[:emoji], {:struct, Emoji}),
url: map[:url],
options: map[:options],
placeholder: map[:placeholder],
min_values: map[:min_values],
max_values: map[:max_values],
components: Util.cast(map[:components], {:list, {:struct, __MODULE__}})
}
end
end
|
lib/nostrum/struct/message/component.ex
| 0.880932
| 0.490541
|
component.ex
|
starcoder
|
defmodule Exfmt.Ast.Util do
@moduledoc false
@doc """
Given the arguments of a function call node, split the `do end`
block arguments off, assuming any are present.
iex> split_do_block([])
{[], []}
iex> split_do_block([1])
{[1], []}
iex> split_do_block([1, 2])
{[1, 2], []}
iex> split_do_block([1, 2, 3])
{[1, 2, 3], []}
iex> split_do_block([1, 2, [do: 3]])
{[1, 2], [do: 3]}
iex> split_do_block([1, 2, [do: 3], 4])
{[1, 2, [do: 3], 4], []}
iex> split_do_block([1, [do: 2, else: 3]])
{[1], [do: 2, else: 3]}
iex> split_do_block([1, [do: 2, rescue: 3]])
{[1], [do: 2, rescue: 3]}
iex> split_do_block([[do: 1, else: 2]])
{[], [do: 1, else: 2]}
iex> split_do_block([1, [else: 2, do: 3]])
{[1, [else: 2, do: 3]], []}
iex> split_do_block([1, [do: 2, rescue: 3, else: 4]])
{[1], [do: 2, rescue: 3, else: 4]}
"""
def split_do_block([]) do
{[], []}
end
def split_do_block([head | tail]) do
do_split_do_block tail, head, []
end
defp do_split_do_block([], [{:do, _} | _] = prev, acc) do
if Enum.all?(prev, &keyword_block?/1) do
result = Enum.reverse(acc)
{result, prev}
else
result = Enum.reverse([prev | acc])
{result, []}
end
end
defp do_split_do_block([head | tail], prev, acc) do
do_split_do_block tail, head, [prev | acc]
end
defp do_split_do_block([], prev, acc) do
result = Enum.reverse([prev | acc])
{result, []}
end
defp keyword_block?({word, _}) do
word in [:do, :after, :else, :rescue, :catch]
end
defp keyword_block?(false) do
false
end
@doc """
Given an AST node, determine if the node is a call with block arguments.
iex> call_with_block?(1)
false
iex> call_with_block?("Hello")
false
iex> call_with_block?(quote do run(:ok) end)
false
iex> call_with_block?(quote do run :ok do nil end end)
true
"""
@spec call_with_block?(Macro.t) :: boolean
def call_with_block?({_, _, args}) when is_list(args) do
case List.last(args) do
[{:do, _} | _] ->
true
_ ->
false
end
end
def call_with_block?(_) do
false
end
end
|
lib/exfmt/ast/util.ex
| 0.574156
| 0.405096
|
util.ex
|
starcoder
|
defmodule Tzdata.ReleaseReader do
@moduledoc false
def rules, do: simple_lookup(:rules) |> hd |> elem(1)
def zones, do: simple_lookup(:zones) |> hd |> elem(1)
def links, do: simple_lookup(:links) |> hd |> elem(1)
def zone_list, do: simple_lookup(:zone_list) |> hd |> elem(1)
def link_list, do: simple_lookup(:link_list) |> hd |> elem(1)
def zone_and_link_list, do: simple_lookup(:zone_and_link_list) |> hd |> elem(1)
def archive_content_length, do: simple_lookup(:archive_content_length) |> hd |> elem(1)
def release_version, do: simple_lookup(:release_version) |> hd |> elem(1)
def leap_sec_data, do: simple_lookup(:leap_sec_data) |> hd |> elem(1)
def by_group, do: simple_lookup(:by_group) |> hd |> elem(1)
def modified_at, do: simple_lookup(:modified_at) |> hd |> elem(1)
defp simple_lookup(key) do
:ets.lookup(current_release_from_table() |> table_name_for_release_name, key)
end
def zone(zone_name) do
{:ok, zones()[zone_name]}
end
def rules_for_name(rules_name) do
{:ok, rules()[rules_name]}
end
def periods_for_zone_or_link(zone) do
if Enum.member?(zone_list(), zone) do
{:ok, do_periods_for_zone(zone)}
else
case Enum.member?(link_list(), zone) do
true -> periods_for_zone_or_link(links()[zone])
_ -> {:error, :not_found}
end
end
end
def has_modified_at? do
simple_lookup(:modified_at) != []
end
defp do_periods_for_zone(zone) do
case lookup_periods_for_zone(zone) do
periods when is_list(periods) ->
periods
|> Enum.sort_by(fn period -> elem(period, 1) |> delimiter_to_number() end)
_ ->
nil
end
end
defp lookup_periods_for_zone(zone) when is_binary(zone),
do: simple_lookup(String.to_existing_atom(zone))
defp lookup_periods_for_zone(_), do: []
@doc !"""
Hack which is useful for sorting periods. Delimiters can be integers representing
gregorian seconds or :min or :max. By converting :min and :max to integers, they are
easier to sort. It is assumed that the fake numbers they are converted to are far beyond
numbers used.
TODO: Instead of doing this, do the sorting before inserting. When reading from a bag the order
should be preserved.
"""
@very_high_number_representing_gregorian_seconds 9_315_537_984_000
@low_number_representing_before_year_0 -1
def delimiter_to_number(:min), do: @low_number_representing_before_year_0
def delimiter_to_number(:max), do: @very_high_number_representing_gregorian_seconds
def delimiter_to_number(integer) when is_integer(integer), do: integer
defp current_release_from_table do
:ets.lookup(:tzdata_current_release, :release_version) |> hd |> elem(1)
end
defp table_name_for_release_name(release_name) do
"tzdata_rel_#{release_name}" |> String.to_atom()
end
def periods_for_zone_time_and_type(zone_name, time_point, time_type) do
try do
case do_periods_for_zone_time_and_type(zone_name, time_point, time_type) do
{:ok, []} ->
# If nothing was found, it could be that the zone name is not canonical.
# E.g. "Europe/Jersey" which links to "Europe/London".
# So we try with a link
zone_name_to_use = links()[zone_name]
case zone_name_to_use do
nil -> {:ok, []}
_ -> do_periods_for_zone_time_and_type(zone_name_to_use, time_point, time_type)
end
{:ok, list} ->
{:ok, list}
end
rescue
ArgumentError -> {:ok, []}
end
end
@max_possible_periods_for_wall_time 2
@max_possible_periods_for_utc 1
def do_periods_for_zone_time_and_type(zone_name, time_point, :wall) do
match_fun = [
{{String.to_existing_atom(zone_name), :_, :"$1", :_, :_, :"$2", :_, :_, :_, :_},
[
{:andalso, {:orelse, {:"=<", :"$1", time_point}, {:==, :"$1", :min}},
{:orelse, {:>, :"$2", time_point}, {:==, :"$2", :max}}}
], [:"$_"]}
]
case :ets.select(
current_release_from_table() |> table_name_for_release_name,
match_fun,
@max_possible_periods_for_wall_time
) do
{ets_result, _} ->
{:ok, ets_result}
_ ->
{:ok, []}
end
end
def do_periods_for_zone_time_and_type(zone_name, time_point, :utc) do
match_fun = [
{{String.to_existing_atom(zone_name), :"$1", :_, :_, :"$2", :_, :_, :_, :_, :_},
[
{:andalso, {:orelse, {:"=<", :"$1", time_point}, {:==, :"$1", :min}},
{:orelse, {:>, :"$2", time_point}, {:==, :"$2", :max}}}
], [:"$_"]}
]
case :ets.select(
current_release_from_table() |> table_name_for_release_name,
match_fun,
@max_possible_periods_for_utc
) do
{ets_result, _} ->
{:ok, ets_result}
_ ->
{:ok, []}
end
end
end
|
lib/tzdata/release_reader.ex
| 0.51562
| 0.445771
|
release_reader.ex
|
starcoder
|
defmodule ExCypher.Statements.Generic do
@moduledoc false
# This module will provide the most generic AST conversion
# functions that'll be shared between different commands.
# Of course, such abstraction won't be possible to match
# all kinds of statements, because some cypher commands
# like the `WHERE` statement, have a unique syntax that is
# very different from simpler ones, like the `RETURN`
# statement.
# The intent, in this way, is to combine functions in a
# specialization way. Outer modules attempt to filter and
# process their specific syntaxes and, whenever they can't,
# use this module as a last attempt to convert those AST
# nodes.
# This way the core logic, which can include the caveat arround
# elixir's function identification on unknown names, for example,
# can be shared with other modules
alias ExCypher.Graph.{Node, Relationship}
alias ExCypher.Statements.Generic.Expression
@spec parse(ast :: term()) :: String.t()
def parse(ast, env \\ nil)
# Removing parenthesis from statements that elixir
# attempts to resolve a name as a function.
def parse(ast, env) do
parse_expression(Expression.new(ast, env))
end
defp parse_expression(expr = %Expression{type: :property}) do
[first, last] = expr.args
"#{parse(first, expr.env)}.#{parse(last, expr.env)}"
end
defp parse_expression(expr = %Expression{type: :fragment}) do
expr.args
|> Enum.map(&parse(&1, expr.env))
|> Enum.map(&String.replace(&1, "\"", ""))
|> Enum.join(", ")
end
defp parse_expression(expr = %Expression{type: :node}),
do: apply(Node, :node, expr.args)
defp parse_expression(expr = %Expression{type: :relationship}),
do: apply(Relationship, :rel, expr.args)
defp parse_expression(expr = %Expression{type: :association}) do
[association, {from_type, from}, {to_type, to}] = expr.args
apply(Relationship, :assoc, [
association,
{
{from_type, parse(from, expr.env)},
{to_type, parse(to, expr.env)}
}
])
end
defp parse_expression(%Expression{type: :null}),
do: "NULL"
defp parse_expression(expr = %Expression{type: :alias}),
do: Atom.to_string(expr.args)
defp parse_expression(expr = %Expression{type: :list}) do
expr.args
|> Enum.map(&parse(&1, expr.env))
|> Enum.intersperse(",")
end
defp parse_expression(expr = %Expression{type: :var}) do
term = expr.args
quote bind_quoted: [term: term] do
if is_binary(term) do
"\"#{term}\""
else
term
end
end
end
defp parse_expression(%Expression{args: args}), do: Macro.to_string(args)
end
|
lib/ex_cypher/statements/generic.ex
| 0.730194
| 0.634317
|
generic.ex
|
starcoder
|
defmodule Dune.Opts do
@moduledoc """
Defines and validates the options for `Dune`.
The available options are explained below:
### Parsing restriction options
- `atom_pool_size`:
Defines the maximum total number of atoms that can be created.
Must be an integer `>= 0`. Defaults to `5000`.
See the [section below](#module-extra-note-about-atom_pool_size) for more information.
- `max_length`:
Defines the maximum length of code strings that can be parsed.
Defaults to `5000`.
### Execution restriction options
- `allowlist`:
Defines which module and functions are considered safe or restricted.
Should be a module implementing the `Dune.Allowlist` behaviour.
Defaults to `Dune.Allowlist.Default`.
- `max_heap_size`:
Limits the memory usage of the evaluation process using the
[`max_heap_size` flag](https://erlang.org/doc/man/erlang.html#process_flag_max_heap_size).
Should be an integer `> 0`. Defaults to `30_000`.
- `max_reductions`:
Limits the number of CPU cycles of the evaluation process.
The erlang pre-emptive scheduler is using reductions to measure work being done by processes,
which is useful to prevent users to run CPU intensive code such as infinite loops.
Should be an integer `> 0`. Defaults to `30_000`.
- `timeout`:
Limits the time the evaluation process is authorized to run (in milliseconds).
Should be an integer `> 0`. Defaults to `50`.
The evaluation process will still need to parse and execute the sanitized AST, so using
too low limits here would leave only a small margin to actually run user code.
### Other options
- `pretty`:
Use pretty printing when inspecting the result.
Should be a boolean. Defaults to `false`.
### Extra note about `atom_pool_size`
Atoms are reused from one evaluation to the other so the total is not
expected to grow. Atoms will not be leaked.
Also, the atom pool is actually split into several pools: regular atoms, module names,
unused variable names, ...
So defining a value of `100` does not mean that `100` atoms will be available, but
rather `25` of each type.
Atoms being very lightweight, there is no need to use a low value, as long
as there is an upper bound preventing atom leaks.
"""
alias Dune.Allowlist
@type t :: %__MODULE__{
atom_pool_size: non_neg_integer,
max_length: pos_integer,
allowlist: module,
max_heap_size: pos_integer,
max_reductions: pos_integer,
timeout: pos_integer,
pretty: boolean
}
defstruct atom_pool_size: 5000,
max_length: 5000,
allowlist: Dune.Allowlist.Default,
max_heap_size: 30_000,
max_reductions: 30_000,
timeout: 50,
pretty: false
@doc """
Validates untrusted options from a keyword or a map and returns a `Dune.Opts` struct.
## Examples
iex> Dune.Opts.validate!([])
%Dune.Opts{
allowlist: Dune.Allowlist.Default,
atom_pool_size: 5000,
max_heap_size: 30000,
max_length: 5000,
max_reductions: 30000,
pretty: false,
timeout: 50
}
iex> Dune.Opts.validate!(atom_pool_size: 10)
%Dune.Opts{atom_pool_size: 10, allowlist: Dune.Allowlist.Default}
iex> Dune.Opts.validate!(atom_pool_size: -10)
** (ArgumentError) atom_pool_size should be an integer >= 0
iex> Dune.Opts.validate!(max_length: 0)
** (ArgumentError) atom_pool_size should be an integer > 0
iex> Dune.Opts.validate!(allowlist: DoesNotExists)
** (ArgumentError) could not load module DoesNotExists due to reason :nofile
iex> Dune.Opts.validate!(allowlist: List)
** (ArgumentError) List does not implement the Dune.Allowlist behaviour
iex> Dune.Opts.validate!(max_reductions: 10_000, max_heap_size: 10_000, timeout: 20)
%Dune.Opts{max_heap_size: 10_000, max_reductions: 10_000, timeout: 20}
iex> Dune.Opts.validate!(max_heap_size: 0)
** (ArgumentError) max_heap_size should be an integer > 0
iex> Dune.Opts.validate!(max_reductions: 0)
** (ArgumentError) max_reductions should be an integer > 0
iex> Dune.Opts.validate!(timeout: "55")
** (ArgumentError) timeout should be an integer > 0
iex> Dune.Opts.validate!(pretty: :maybe)
** (ArgumentError) pretty should be a boolean
"""
@spec validate!(Keyword.t() | map) :: t
def validate!(opts) do
struct(__MODULE__, opts) |> do_validate()
end
defp do_validate(%{atom_pool_size: atom_pool_size})
when not (is_integer(atom_pool_size) and atom_pool_size >= 0) do
raise ArgumentError, message: "atom_pool_size should be an integer >= 0"
end
defp do_validate(%{max_length: max_length})
when not (is_integer(max_length) and max_length > 0) do
raise ArgumentError, message: "atom_pool_size should be an integer > 0"
end
defp do_validate(%{allowlist: allowlist}) when not is_atom(allowlist) do
raise ArgumentError, message: "allowlist should be a module"
end
defp do_validate(%{max_reductions: max_reductions})
when not (is_integer(max_reductions) and max_reductions > 0) do
raise ArgumentError, message: "max_reductions should be an integer > 0"
end
defp do_validate(%{max_heap_size: max_heap_size})
when not (is_integer(max_heap_size) and max_heap_size > 0) do
raise ArgumentError, message: "max_heap_size should be an integer > 0"
end
defp do_validate(%{timeout: timeout}) when not (is_integer(timeout) and timeout > 0) do
raise ArgumentError, message: "timeout should be an integer > 0"
end
defp do_validate(%{pretty: pretty}) when not is_boolean(pretty) do
raise ArgumentError, message: "pretty should be a boolean"
end
defp do_validate(opts = %{allowlist: allowlist}) do
Allowlist.ensure_implements_behaviour!(allowlist)
opts
end
end
|
lib/dune/opts.ex
| 0.916918
| 0.760072
|
opts.ex
|
starcoder
|
defmodule Cldr.Unit.Conversions do
@moduledoc false
alias Cldr.Unit.Conversion
alias Cldr.Unit.Parser
@conversions Map.get(Cldr.Config.units(), :conversions)
|> Kernel.++(Cldr.Unit.Additional.conversions())
|> Enum.map(fn
{k, v} -> {k, struct(Conversion, v)}
end)
|> Enum.map(fn
{unit, %{factor: factor} = conversion} when is_number(factor) ->
{unit, conversion}
{unit, %{factor: factor} = conversion} ->
{unit, %{conversion | factor: Ratio.new(factor.numerator, factor.denominator)}}
end)
|> Enum.map(fn
{unit, %{offset: offset} = conversion} when is_number(offset) ->
{unit, conversion}
{unit, %{offset: offset} = conversion} ->
{unit, %{conversion | offset: Ratio.new(offset.numerator, offset.denominator)}}
end)
|> Enum.map(fn
{unit, %{base_unit: base_unit} = conversion} ->
{unit, [{unit, %{conversion | base_unit: [base_unit]}}]}
end)
|> Map.new()
@identity_conversions Enum.map(@conversions, fn
{_k, [{_v, %Conversion{base_unit: [base_unit]}}]} ->
{base_unit,
[
{base_unit,
%Conversion{base_unit: [base_unit], offset: 0, factor: 1}}
]}
end)
|> Map.new()
@all_conversions Map.merge(@conversions, @identity_conversions)
def conversions do
unquote(Macro.escape(@all_conversions))
end
def conversion_for(unit) when is_atom(unit) do
case Map.fetch(conversions(), unit) do
{:ok, conversion} ->
{:ok, conversion}
:error ->
unit_string = Atom.to_string(unit)
Parser.parse_unit(unit_string)
end
end
def conversion_for(unit) when is_binary(unit) do
unit
|> String.to_existing_atom()
|> conversion_for()
rescue
ArgumentError ->
{:error, Cldr.Unit.unit_error(unit)}
end
def conversion_for!(unit) do
case conversion_for(unit) do
{:ok, conversion} -> conversion
{:error, {exception, reason}} -> raise exception, reason
end
end
end
|
lib/cldr/unit/conversion/conversions.ex
| 0.824356
| 0.469763
|
conversions.ex
|
starcoder
|
defmodule AWS.LookoutEquipment do
@moduledoc """
Amazon Lookout for Equipment is a machine learning service that uses advanced
analytics to identify anomalies in machines from sensor data for use in
predictive maintenance.
"""
alias AWS.Client
alias AWS.Request
def metadata do
%AWS.ServiceMetadata{
abbreviation: "LookoutEquipment",
api_version: "2020-12-15",
content_type: "application/x-amz-json-1.0",
credential_scope: nil,
endpoint_prefix: "lookoutequipment",
global?: false,
protocol: "json",
service_id: "LookoutEquipment",
signature_version: "v4",
signing_name: "lookoutequipment",
target_prefix: "AWSLookoutEquipmentFrontendService"
}
end
@doc """
Creates a container for a collection of data being ingested for analysis.
The dataset contains the metadata describing where the data is and what the data
actually looks like. In other words, it contains the location of the data
source, the data schema, and other information. A dataset also contains any tags
associated with the ingested data.
"""
def create_dataset(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateDataset", input, options)
end
@doc """
Creates a scheduled inference.
Scheduling an inference is setting up a continuous real-time inference plan to
analyze new measurement data. When setting up the schedule, you provide an S3
bucket location for the input data, assign it a delimiter between separate
entries in the data, set an offset delay if desired, and set the frequency of
inferencing. You must also provide an S3 bucket location for the output data.
"""
def create_inference_scheduler(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateInferenceScheduler", input, options)
end
@doc """
Creates an ML model for data inference.
A machine-learning (ML) model is a mathematical model that finds patterns in
your data. In Amazon Lookout for Equipment, the model learns the patterns of
normal behavior and detects abnormal behavior that could be potential equipment
failure (or maintenance events). The models are made by analyzing normal data
and abnormalities in machine behavior that have already occurred.
Your model is trained using a portion of the data from your dataset and uses
that data to learn patterns of normal behavior and abnormal patterns that lead
to equipment failure. Another portion of the data is used to evaluate the
model's accuracy.
"""
def create_model(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateModel", input, options)
end
@doc """
Deletes a dataset and associated artifacts.
The operation will check to see if any inference scheduler or data ingestion job
is currently using the dataset, and if there isn't, the dataset, its metadata,
and any associated data stored in S3 will be deleted. This does not affect any
models that used this dataset for training and evaluation, but does prevent it
from being used in the future.
"""
def delete_dataset(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteDataset", input, options)
end
@doc """
Deletes an inference scheduler that has been set up.
Already processed output results are not affected.
"""
def delete_inference_scheduler(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteInferenceScheduler", input, options)
end
@doc """
Deletes an ML model currently available for Amazon Lookout for Equipment.
This will prevent it from being used with an inference scheduler, even one that
is already set up.
"""
def delete_model(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteModel", input, options)
end
@doc """
Provides information on a specific data ingestion job such as creation time,
dataset ARN, status, and so on.
"""
def describe_data_ingestion_job(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeDataIngestionJob", input, options)
end
@doc """
Provides information on a specified dataset such as the schema location, status,
and so on.
"""
def describe_dataset(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeDataset", input, options)
end
@doc """
Specifies information about the inference scheduler being used, including name,
model, status, and associated metadata
"""
def describe_inference_scheduler(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeInferenceScheduler", input, options)
end
@doc """
Provides overall information about a specific ML model, including model name and
ARN, dataset, training and evaluation information, status, and so on.
"""
def describe_model(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeModel", input, options)
end
@doc """
Provides a list of all data ingestion jobs, including dataset name and ARN, S3
location of the input data, status, and so on.
"""
def list_data_ingestion_jobs(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListDataIngestionJobs", input, options)
end
@doc """
Lists all datasets currently available in your account, filtering on the dataset
name.
"""
def list_datasets(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListDatasets", input, options)
end
@doc """
Lists all inference executions that have been performed by the specified
inference scheduler.
"""
def list_inference_executions(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListInferenceExecutions", input, options)
end
@doc """
Retrieves a list of all inference schedulers currently available for your
account.
"""
def list_inference_schedulers(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListInferenceSchedulers", input, options)
end
@doc """
Generates a list of all models in the account, including model name and ARN,
dataset, and status.
"""
def list_models(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListModels", input, options)
end
@doc """
Lists all the tags for a specified resource, including key and value.
"""
def list_tags_for_resource(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListTagsForResource", input, options)
end
@doc """
Starts a data ingestion job.
Amazon Lookout for Equipment returns the job status.
"""
def start_data_ingestion_job(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "StartDataIngestionJob", input, options)
end
@doc """
Starts an inference scheduler.
"""
def start_inference_scheduler(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "StartInferenceScheduler", input, options)
end
@doc """
Stops an inference scheduler.
"""
def stop_inference_scheduler(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "StopInferenceScheduler", input, options)
end
@doc """
Associates a given tag to a resource in your account.
A tag is a key-value pair which can be added to an Amazon Lookout for Equipment
resource as metadata. Tags can be used for organizing your resources as well as
helping you to search and filter by tag. Multiple tags can be added to a
resource, either when you create it, or later. Up to 50 tags can be associated
with each resource.
"""
def tag_resource(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "TagResource", input, options)
end
@doc """
Removes a specific tag from a given resource.
The tag is specified by its key.
"""
def untag_resource(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UntagResource", input, options)
end
@doc """
Updates an inference scheduler.
"""
def update_inference_scheduler(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UpdateInferenceScheduler", input, options)
end
end
|
lib/aws/generated/lookout_equipment.ex
| 0.895054
| 0.487978
|
lookout_equipment.ex
|
starcoder
|
defmodule JMES.Functions do
@moduledoc """
Contains builtin functions.
"""
defmodule Handler do
@callback call(String.t(), list, keyword) :: {:ok, term} | {:error, atom}
end
@behaviour Handler
# ==============================================================================================
# abs
# ==============================================================================================
@doc """
Executes a function given a name and a list of arguments.
"""
@spec call(String.t(), list, keyword) :: {:ok, term} | {:error, atom}
def call("abs", args, _opts) when length(args) != 1 do
{:error, :invalid_arity}
end
def call("abs", [value], _opts) when not is_number(value) do
{:error, :invalid_type}
end
def call("abs", [value], _opts) do
{:ok, abs(value)}
end
# ==============================================================================================
# avg
# ==============================================================================================
def call("avg", args, _opts) when length(args) != 1 do
{:error, :invalid_arity}
end
def call("avg", [list], _opts) do
with :ok <- list_of(list, :number) do
{:ok, Enum.reduce(list, 0, &+/2) / length(list)}
end
end
# ==============================================================================================
# contains
# ==============================================================================================
def call("contains", args, _opts) when length(args) != 2 do
{:error, :invalid_arity}
end
def call("contains", [list, value], _opts) when is_list(list) do
{:ok, value in list}
end
def call("contains", [string, value], _opts) when is_binary(value) and is_binary(string) do
{:ok, String.contains?(string, value)}
end
def call("contains", _args, _opts) do
{:error, :invalid_type}
end
# ==============================================================================================
# ceil
# ==============================================================================================
def call("ceil", args, _opts) when length(args) != 1 do
{:error, :invalid_arity}
end
def call("ceil", [value], _opts) when not is_number(value) do
{:error, :invalid_type}
end
def call("ceil", [value], _opts) do
{:ok, ceil(value)}
end
# ==============================================================================================
# ends_with
# ==============================================================================================
def call("ends_with", args, _opts) when length(args) != 2 do
{:error, :invalid_arity}
end
def call("ends_with", [string, suffix], _opts)
when not (is_binary(string) and is_binary(suffix)) do
{:error, :invalid_type}
end
def call("ends_with", [string, suffix], _opts) do
{:ok, String.ends_with?(string, suffix)}
end
# ==============================================================================================
# floor
# ==============================================================================================
def call("floor", args, _opts) when length(args) != 1 do
{:error, :invalid_arity}
end
def call("floor", [value], _opts) when not is_number(value) do
{:error, :invalid_type}
end
def call("floor", [value], _opts) do
{:ok, floor(value)}
end
# ==============================================================================================
# join
# ==============================================================================================
def call("join", args, _opts) when length(args) != 2 do
{:error, :invalid_arity}
end
def call("join", [joiner, _list], _opts) when not is_binary(joiner) do
{:error, :invalid_type}
end
def call("join", [joiner, list], _opts) do
with :ok <- list_of(list, :string) do
{:ok, Enum.join(list, joiner)}
end
end
# ==============================================================================================
# keys
# ==============================================================================================
def call("keys", args, _opts) when length(args) != 1 do
{:error, :invalid_arity}
end
def call("keys", [value], _opts) when not is_map(value) do
{:error, :invalid_type}
end
def call("keys", [value], _opts) do
{:ok, Map.keys(value)}
end
# ==============================================================================================
# length
# ==============================================================================================
def call("length", args, _opts) when length(args) != 1 do
{:error, :invalid_arity}
end
def call("length", [value], _opts) when is_binary(value) do
{:ok, String.length(value)}
end
def call("length", [value], _opts) when is_list(value) do
{:ok, length(value)}
end
def call("length", [value], _opts) when is_map(value) do
{:ok, length(Map.keys(value))}
end
def call("length", _args, _opts) do
{:error, :invalid_type}
end
# ==============================================================================================
# map
# ==============================================================================================
def call("map", args, _opts) when length(args) != 2 do
{:error, :invalid_arity}
end
def call("map", [expr, _list], _opts) when not is_atom(expr) and not is_tuple(expr) do
{:error, :invalid_type}
end
def call("map", [_expr, list], _opts) when not is_list(list) do
{:error, :invalid_type}
end
def call("map", [expr, list], opts) do
List.foldr(list, {:ok, []}, fn
item, {:ok, list} ->
case JMES.search(expr, item, opts) do
{:ok, value} -> {:ok, [value | list]}
err -> err
end
_expr, err ->
err
end)
end
# ==============================================================================================
# max
# ==============================================================================================
def call("max", args, _opts) when length(args) != 1 do
{:error, :invalid_arity}
end
def call("max", [value], _opts) do
with :ok <- one_of([&list_of(&1, :number), &list_of(&1, :string)], value) do
{:ok, Enum.max(value, fn -> nil end)}
end
end
# ==============================================================================================
# max_by
# ==============================================================================================
def call("max_by", args, _opts) when length(args) != 2 do
{:error, :invalid_arity}
end
def call("max_by", [list, expr], opts) do
eject = fn {_item, value} -> value end
with {:ok, values} <- call("map", [expr, list], opts),
:ok <- one_of([&list_of(&1, :number), &list_of(&1, :string)], values),
{item, _value} <- Enum.max_by(Enum.zip(list, values), eject, fn -> nil end) do
{:ok, item}
end
end
# ==============================================================================================
# merge
# ==============================================================================================
def call("merge", args, _opts) when length(args) < 1 do
{:error, :invalid_arity}
end
def call("merge", args, _opts) do
with :ok <- list_of(args, :map) do
Enum.reduce(args, {:ok, %{}}, fn curr, {:ok, acc} ->
{:ok, Enum.into(curr, acc)}
end)
end
end
# ==============================================================================================
# min
# ==============================================================================================
def call("min", args, _opts) when length(args) != 1 do
{:error, :invalid_arity}
end
def call("min", [value], _opts) do
with :ok <- one_of([&list_of(&1, :number), &list_of(&1, :string)], value) do
{:ok, Enum.min(value, fn -> nil end)}
end
end
# ==============================================================================================
# min_by
# ==============================================================================================
def call("min_by", args, _opts) when length(args) != 2 do
{:error, :invalid_arity}
end
def call("min_by", [list, expr], opts) do
eject = fn {_item, value} -> value end
with {:ok, values} <- call("map", [expr, list], opts),
:ok <- one_of([&list_of(&1, :number), &list_of(&1, :string)], values),
{item, _value} <- Enum.min_by(Enum.zip(list, values), eject, fn -> nil end) do
{:ok, item}
end
end
# ==============================================================================================
# not_null
# ==============================================================================================
def call("not_null", args, _opts) when length(args) < 1 do
{:error, :invalid_arity}
end
def call("not_null", args, _opts) do
{:ok, Enum.find(args, &(&1 != nil))}
end
# ==============================================================================================
# reverse
# ==============================================================================================
def call("reverse", args, _opts) when length(args) != 1 do
{:error, :invalid_arity}
end
def call("reverse", [value], _opts) when is_binary(value) do
{:ok, String.reverse(value)}
end
def call("reverse", [value], _opts) when is_list(value) do
{:ok, Enum.reverse(value)}
end
def call("reverse", _args, _opts) do
{:error, :invalid_type}
end
# ==============================================================================================
# sort
# ==============================================================================================
def call("sort", args, _opts) when length(args) != 1 do
{:error, :invalid_arity}
end
def call("sort", [value], _opts) do
with :ok <- one_of([&list_of(&1, :number), &list_of(&1, :string)], value) do
{:ok, Enum.sort(value)}
end
end
# ==============================================================================================
# sort_by
# ==============================================================================================
def call("sort_by", args, _opts) when length(args) != 2 do
{:error, :invalid_arity}
end
def call("sort_by", [list, expr], opts) do
eject_item = fn {item, _value} -> item end
eject_value = fn {_item, value} -> value end
with {:ok, values} <- call("map", [expr, list], opts),
:ok <- one_of([&list_of(&1, :number), &list_of(&1, :string)], values) do
items =
Enum.zip(list, values)
|> Enum.sort_by(eject_value)
|> Enum.map(eject_item)
{:ok, items}
end
end
# ==============================================================================================
# starts_with
# ==============================================================================================
def call("starts_with", args, _opts) when length(args) != 2 do
{:error, :invalid_arity}
end
def call("starts_with", [string, suffix], _opts)
when not (is_binary(string) and is_binary(suffix)) do
{:error, :invalid_type}
end
def call("starts_with", [string, suffix], _opts) do
{:ok, String.starts_with?(string, suffix)}
end
# ==============================================================================================
# sum
# ==============================================================================================
def call("sum", args, _opts) when length(args) != 1 do
{:error, :invalid_arity}
end
def call("sum", [list], _opts) do
with :ok <- list_of(list, :number) do
{:ok, Enum.reduce(list, 0, &(&1 + &2))}
end
end
# ==============================================================================================
# to_array
# ==============================================================================================
def call("to_array", args, _opts) when length(args) != 1 do
{:error, :invalid_arity}
end
def call("to_array", [value], _opts) when is_list(value) do
{:ok, value}
end
def call("to_array", [value], _opts) when is_nil(value) do
{:ok, nil}
end
def call("to_array", [value], _opts) do
{:ok, [value]}
end
def call("to_array", _args, _opts) do
{:error, :invalid_type}
end
# ==============================================================================================
# to_string
# ==============================================================================================
def call("to_string", args, _opts) when length(args) != 1 do
{:error, :invalid_arity}
end
def call("to_string", [value], _opts) when is_binary(value) do
{:ok, value}
end
def call("to_string", [value], _opts) do
Poison.encode(value)
end
# ==============================================================================================
# to_number
# ==============================================================================================
def call("to_number", args, _opts) when length(args) != 1 do
{:error, :invalid_arity}
end
def call("to_number", [value], _opts) when is_number(value) do
{:ok, value}
end
def call("to_number", [value], _opts) when is_binary(value) do
case Float.parse(value) do
{float, _rest} -> {:ok, float}
:error -> {:ok, nil}
end
end
def call("to_number", _args, _opts) do
{:ok, nil}
end
# ==============================================================================================
# type
# ==============================================================================================
def call("type", args, _opts) when length(args) != 1 do
{:error, :invalid_arity}
end
def call("type", [value], _opts) when is_number(value) do
{:ok, "number"}
end
def call("type", [value], _opts) when is_binary(value) do
{:ok, "string"}
end
def call("type", [value], _opts) when is_boolean(value) do
{:ok, "boolean"}
end
def call("type", [value], _opts) when is_list(value) do
{:ok, "array"}
end
def call("type", [value], _opts) when is_map(value) do
{:ok, "object"}
end
def call("type", _args, _opts) do
{:ok, "null"}
end
# ==============================================================================================
# values
# ==============================================================================================
def call("values", args, _opts) when length(args) != 1 do
{:error, :invalid_arity}
end
def call("values", [value], _opts) when not is_map(value) do
{:error, :invalid_type}
end
def call("values", [value], _opts) do
{:ok, Map.values(value)}
end
# ==============================================================================================
# Fallback
# ==============================================================================================
def call(name, args, opts) do
if module = opts[:custom_functions] do
apply(module, :call, [name, args, opts])
else
{:error, :unknown_function}
end
end
# ==============================================================================================
# Helpers
# ==============================================================================================
@spec list_of(any, atom) :: :ok | {:error, atom}
defp list_of(value, _type) when not is_list(value) do
{:error, :invalid_type}
end
defp list_of(value, :number) do
Enum.reduce_while(value, :ok, fn
item, _acc when is_number(item) -> {:cont, :ok}
_item, _acc -> {:halt, {:error, :invalid_type}}
end)
end
defp list_of(value, :string) do
Enum.reduce_while(value, :ok, fn
item, _acc when is_binary(item) -> {:cont, :ok}
_item, _acc -> {:halt, {:error, :invalid_type}}
end)
end
defp list_of(value, :map) do
Enum.reduce_while(value, :ok, fn
item, _acc when is_map(item) -> {:cont, :ok}
_item, _acc -> {:halt, {:error, :invalid_type}}
end)
end
@spec one_of([(any -> :ok | {:error, atom})], any) :: :ok | {:error, atom}
defp one_of(funs, value) do
Enum.reduce_while(funs, {:error, :invalid_type}, fn fun, _acc ->
case apply(fun, [value]) do
:ok -> {:halt, :ok}
err -> {:cont, err}
end
end)
end
end
|
lib/jmes/functions.ex
| 0.707304
| 0.42054
|
functions.ex
|
starcoder
|
defmodule Automaton.Types.TWEANN.ExoSelf do
alias Automaton.Types.TWEANN.Sensor
alias Automaton.Types.TWEANN.Actuator
alias Automaton.Types.TWEANN.Cortex
alias Automaton.Types.TWEANN.Neuron
require Logger
@doc ~S"""
The map/1 function maps the tuple encoded genotype into a process based
phenotype.
The map function expects the cortex record as the leading tuple
in the tuple list it reads from the file_name. We create an ets table to map
Ids to PIds and back again. Since the Cortex element contains all the Sensor
Actuator, and Neuron Ids, we are able to spawn each neuron using its own gen
function, and in the process construct a map from Ids to PIds. We then use
link_cerebral_units to link all non Cortex elements to each other by sending
each spawned process the information contained in its record, but with Ids
converted to Pids where appropriate. Finally, we provide the Cortex process
with all the PIds in the NN system by executing the link_cortex/2 function.
Once the NN is up and running, exoself starts its wait until the NN has
finished its job and is ready to backup. When the cortex initiates the backup
process it sends exoself the updated Input_p_id_ps from its neurons. ExoSelf
uses the update_genotype/3 function to update the old genotype with new
weights, and then stores the updated version back to its file.
"""
def map() do
map(:neuro)
end
def map(file_name) do
{:ok, genotype} = :file.consult(file_name)
task = Task.async(fn -> map(file_name, genotype) end)
Task.await(task)
end
def map(file_name, genotype) do
ids_n_pids = :ets.new(:ids_n_pids, [:set, :private])
[cortex | cerebral_units] = genotype
spawn_cerebral_units(ids_n_pids, Cortex, [cortex.id])
spawn_cerebral_units(ids_n_pids, Sensor, cortex.sensor_ids)
spawn_cerebral_units(ids_n_pids, Actuator, cortex.actuator_ids)
spawn_cerebral_units(ids_n_pids, Neuron, cortex.n_ids)
link_cerebral_units(cerebral_units, ids_n_pids)
link_cortex(cortex, ids_n_pids)
cortex_pid = :ets.lookup_element(ids_n_pids, cortex.id, 2)
receive do
{^cortex_pid, :backup, neuron_ids_n_weights} ->
u_genotype = update_genotype(ids_n_pids, genotype, neuron_ids_n_weights)
{:ok, file} = :file.open(file_name, :write)
:lists.foreach(fn x -> :io.format(file, "~p.~n", [x]) end, u_genotype)
:file.close(file)
Logger.debug("Finished updating to file: #{file_name}")
end
end
@doc ~S"""
We spawn the process for each element based on its type: cerebral_unit_type, and
the gen function that belongs to the cerebral_unit_type module. We then enter
the {Id, PId} tuple into our ETS table for later use.
"""
def spawn_cerebral_units(ids_n_pids, cerebral_unit_type, [id | ids]) do
pid = apply(cerebral_unit_type, :gen, [self()])
:ets.insert(ids_n_pids, {id, pid})
:ets.insert(ids_n_pids, {pid, id})
spawn_cerebral_units(ids_n_pids, cerebral_unit_type, ids)
end
def spawn_cerebral_units(_ids_n_pids, _cerebral_unit_type, []) do
true
end
@doc ~S"""
The link_cerebral_units/2 converts the Ids to PIds using the created IdsNPids
ETS table. At this point all the elements are spawned, and the processes are
waiting for their initial states.
"""
def link_cerebral_units([%Sensor{} = sensor | cerebral_units], ids_n_pids) do
sensor_pid = :ets.lookup_element(ids_n_pids, sensor.id, 2)
cortex_pid = :ets.lookup_element(ids_n_pids, sensor.cx_id, 2)
fanout_pids = for id <- sensor.fanout_ids, do: :ets.lookup_element(ids_n_pids, id, 2)
send(sensor_pid, {self(), {sensor.id, cortex_pid, sensor.name, sensor.vl, fanout_pids}})
link_cerebral_units(cerebral_units, ids_n_pids)
end
def link_cerebral_units([%Actuator{} = actuator | cerebral_units], ids_n_pids) do
actuator_pid = :ets.lookup_element(ids_n_pids, actuator.id, 2)
cortex_pid = :ets.lookup_element(ids_n_pids, actuator.cx_id, 2)
fanin_pids = for id <- actuator.fanin_ids, do: :ets.lookup_element(ids_n_pids, id, 2)
send(actuator_pid, {self(), {actuator.id, cortex_pid, actuator.name, fanin_pids}})
link_cerebral_units(cerebral_units, ids_n_pids)
end
def link_cerebral_units([%Neuron{} = neuron | cerebral_units], ids_n_pids) do
neuron_pid = :ets.lookup_element(ids_n_pids, neuron.id, 2)
cortex_pid = :ets.lookup_element(ids_n_pids, neuron.cx_id, 2)
input_pid_ps = convert_id_ps2pid_ps(ids_n_pids, neuron.input_id_ps, [])
output_pids = for id <- neuron.output_ids, do: :ets.lookup_element(ids_n_pids, id, 2)
send(neuron_pid, {self(), {neuron.id, cortex_pid, neuron.af, input_pid_ps, output_pids}})
link_cerebral_units(cerebral_units, ids_n_pids)
end
def link_cerebral_units([], _ids_n_pids), do: :ok
@doc ~S"""
convert_id_ps2pid_ps/3 converts the IdPs
tuples into tuples that use PIds instead of Ids, such that the Neuron will
know which weights are to be associated with which incoming vector signals.
The last element is the bias, which is added to the list in a non tuple form.
Afterwards, the list is reversed to take its proper order.
"""
def convert_id_ps2pid_ps(_ids_n_pids, [{:bias, bias}], acc) do
Enum.reverse([bias | acc])
end
def convert_id_ps2pid_ps(ids_n_pids, [{id, weights} | fanin_id_ps], acc) do
convert_id_ps2pid_ps(ids_n_pids, fanin_id_ps, [
{:ets.lookup_element(ids_n_pids, id, 2), weights} | acc
])
end
@doc ~S"""
The cortex is initialized to its proper state just as other elements. Because
we have not yet implemented a learning algorithm for our NN system, we need to
specify when the NN should shutdown. We do this by specifying the total number
of cycles the NN should execute before terminating, which is 1000 in this
case.
"""
def link_cortex(cortex, ids_n_pids) do
cortex_pid = :ets.lookup_element(ids_n_pids, cortex.id, 2)
sensor_pids = for id <- cortex.sensor_ids, do: :ets.lookup_element(ids_n_pids, id, 2)
actuator_pids = for id <- cortex.actuator_ids, do: :ets.lookup_element(ids_n_pids, id, 2)
neuron_pids = for id <- cortex.n_ids, do: :ets.lookup_element(ids_n_pids, id, 2)
send(cortex_pid, {self(), {cortex.id, sensor_pids, actuator_pids, neuron_pids}, 1000})
end
@doc ~S"""
For every {neuron_id, p_id_ps} tuple the update_genotype/3 function extracts the
neuron with the id: neuron_id, and updates its weights. The convert_p_id_ps2id_ps/3
performs the conversion from PIds to Ids of every {PId, Weights} tuple in the
Input_p_id_ps list. The updated genotype is then returned back to the caller.
"""
def update_genotype(ids_n_pids, genotype, [{neuron_id, p_id_ps} | weight_ps]) do
Logger.debug("genotype: #{inspect(genotype)}")
Logger.debug("neuron_id: #{inspect(neuron_id)}")
## FIXME: genotype is a list of maps/structs not tuples/records. Find replacement.
neuron_index = Enum.find_index(genotype, fn x -> x.id == neuron_id end)
neuron = Enum.at(genotype, neuron_index)
# neuron = :lists.keyfind(neuron_id, 2, genotype)
Logger.debug("p_id_ps: #{inspect(p_id_ps)}")
input_id_ps = convert_p_id_ps2id_ps(ids_n_pids, p_id_ps, [])
Logger.debug("neuron: #{inspect(neuron)}")
updated_neuron = %Neuron{neuron | input_id_ps: input_id_ps}
updated_genotype = List.replace_at(genotype, neuron_index, updated_neuron)
Logger.debug("neuron: #{inspect(neuron)}")
Logger.debug("updated_neuron: #{inspect(updated_neuron)}")
Logger.debug("genotype: #{inspect(genotype)}")
Logger.debug("updated_genotype: #{inspect(updated_genotype)}")
update_genotype(ids_n_pids, updated_genotype, weight_ps)
end
def update_genotype(_ids_n_pids, genotype, []) do
genotype
end
def convert_p_id_ps2id_ps(ids_n_pids, [{pid, weights} | input_id_ps], acc) do
convert_p_id_ps2id_ps(ids_n_pids, input_id_ps, [
{:ets.lookup_element(ids_n_pids, pid, 2), weights} | acc
])
end
def convert_p_id_ps2id_ps(_ids_n_pids, [bias], acc) do
:lists.reverse([{:bias, bias} | acc])
end
end
|
lib/automata/automaton_types/neuroevolution/exoself.ex
| 0.718693
| 0.480662
|
exoself.ex
|
starcoder
|
defmodule Playwright.Config do
@moduledoc """
Configuration for Playwright.
## Overview
config :playwright, ConnectOptions,
[...]
config :playwright, LaunchOptions,
[...]
config :playwright, PlaywrightTest,
[...]
## Details for `ConnectOptions`
Configuration for connecting to a running Playwright browser server over a
WebSocket.
### `ws_endpoint` (required)
A browser websocket endpoint to which the runner will connect.
This option is required when using the `:driver` transport to communicate with
a Playwright browser server.
e.g.,
config :playwright, ConnectOptions,
ws_endpoint: "ws://localhost:3000/playwright"
## Details for `LaunchOptions`
Configuration for Playwright browser server launch commands.
### `args` (optional)
Additional arguments to pass to the browser instance. The list of Chromium
flags may be found [online](http://peter.sh/experiments/chromium-command-line-switches/).
e.g.,
config :playwright, LaunchOptions,
args: [
"--use-fake-ui-for-media-stream",
"--use-fake-device-for-media-stream"
]
### `channel` (optional)
Browser distribution channel for Chromium. Supported values are:
- `chrome`
- `chrome-beta`
- `chrome-dev`
- `chrome-canary`
- `msedge`
- `msedge-beta`
- `msedge-dev`
- `msedge-canary`
Read more about using Google Chrome and Microsoft Edge
[online](https://playwright.dev/docs/browsers#google-chrome--microsoft-edge).
e.g.,
config :playwright, LaunchOptions,
channel: "chrome"
### `chromium_sandbox` (optional)
Enable Chromium sandboxing. Defaults to `false`.
e.g.,
config :playwright, LaunchOptions,
chromium_sandbox: true
### `devtools` (optional)
With Chromium, specifies whether to auto-open a "Developer Tools" panel for
each tab. If this option is `true`, the `headless` option will be set to
`false`.
Defaults to `false`.
e.g.,
config :playwright, LaunchOptions,
devtools: true
### `headless` (optional)
Specifies whether to run the browser in "headless" mode. See:
- [headless Chromium](https://developers.google.com/web/updates/2017/04/headless-chrome)
- [headless Firefox](https://developer.mozilla.org/en-US/docs/Mozilla/Firefox/Headless_mode)
Defaults to `true` unless the `devtools` option is `true`.
e.g.,
config :playwright, LaunchOptions,
headless: false # e.g., see a browser window pop up in "dev".
### `downloads_path` (optional)
**WARNING: not yet implemented**
If specified, accepted downloads are written to this directory. Otherwise, a
temporary directory is created and is removed when the browser is closed.
e.g.,
config :playwright, LaunchOptions,
downloads_path: "./doc/downloads"
### `env` (optional)
**WARNING: not yet implemented**
Environment variables that will be made visible to the browser. Defaults to
`System.get_env/0`.
e.g.,
config :playwright, LaunchOptions,
env: ["DEBUG", "true"]
### `executable_path` (optional)
A filesystem path to a browser executable to run instead of the bundled
browser. If `executable_path` is a relative path, then it is resolved relative
to the current working directory.
**Chromium-only**
Playwright can also be used to control the Google Chrome or Microsoft Edge
browsers, but it works best with the bundled version of Chromium. There is no
guarantee that it will work with any other version.
**Use `executable_path` option with extreme caution.**
e.g.,
config :playwright, LaunchOptions,
executable_path: "/Applications/..."
### `playwright_cli_path` (optional)
A filesystem path to the playwright cli.js file to use instead of the default
assets path.
**Chromium-only**
This can be helpful for packaged releases or systems where the node_module may
be located elsewhere on the filesystem.
**Use `playwright_cli_path` option with extreme caution.**
e.g.,
config :playwright, ConnectOptions,
playwright_cli_path: "/Cache/.../playwright/cli.js"
## Details for `PlaywrightTest`
Configuration for usage of `PlaywrightTest.Case`.
### `transport` (optional)
One of `:driver` or `:websocket`, defaults to `:driver`.
Additional configuration may be required depending on the transport
configuration:
- `LaunchOptions` for the `:driver` transport
- `ConnectOptions` for the `:websocket` transport
e.g.,
config :playwright, PlaywrightTest,
transport: :websocket
"""
alias Playwright.Config.Types
alias Playwright.Extra
@typedoc false
@type connect_options :: %{
ws_endpoint: String.t()
}
@typedoc false
@type launch_options :: %{
args: [String.t()],
channel: String.t(),
chromium_sandbox: boolean(),
devtools: boolean(),
downloads_path: String.t(),
env: any(),
executable_path: String.t(),
headless: boolean(),
playwright_cli_path: String.t()
}
@typedoc false
@type playwright_test :: %{
transport: atom()
}
defmodule Types do
@moduledoc false
defmodule ConnectOptions do
@moduledoc false
defstruct [:ws_endpoint, :playwright_cli_path]
end
defmodule LaunchOptions do
@moduledoc false
defstruct [
:args,
:channel,
:chromium_sandbox,
:devtools,
:downloads_path,
:executable_path,
:headless,
:playwright_cli_path
]
end
defmodule PlaywrightTest do
@moduledoc false
defstruct transport: :driver
end
end
@doc false
@spec connect_options(boolean()) :: connect_options
def connect_options(camelcase \\ false) do
config_for(ConnectOptions, %Types.ConnectOptions{}, camelcase) || %{}
end
@doc false
@spec launch_options(boolean()) :: map()
def launch_options(camelcase \\ false) do
config_for(LaunchOptions, %Types.LaunchOptions{}, camelcase) || %{}
# |> clean()
end
@doc false
@spec playwright_test(boolean()) :: Types.PlaywrightTest
def playwright_test(camelcase \\ false) do
config_for(PlaywrightTest, %Types.PlaywrightTest{}, camelcase)
# |> Map.from_struct()
end
@doc false
def config_for(key, mod, camelcase \\ false) do
configured =
Application.get_env(:playwright, key, %{})
|> Enum.into(%{})
result = build(configured, mod) |> clean()
if camelcase, do: camelize(result), else: result
end
# private
# ----------------------------------------------------------------------------
defp build(source, mod) do
result =
for key <- Map.keys(mod) |> Enum.reject(fn key -> key == :__struct__ end),
into: %{} do
case Map.get(source, key) do
nil ->
{key, Map.get(mod, key)}
value ->
{key, value}
end
end
Map.merge(mod, result)
end
defp clean(source) do
Map.from_struct(source)
|> Enum.reject(fn
{_key, value} when is_nil(value) -> true
{_key, value} when is_list(value) -> value == []
_otherwise_ -> false
end)
|> Enum.into(%{})
end
defp camelize(source) do
Extra.Map.deep_camelize_keys(source)
end
end
|
lib/playwright/config.ex
| 0.903826
| 0.422326
|
config.ex
|
starcoder
|
defmodule RethinkDB.Pseudotypes do
@moduledoc false
defmodule Binary do
@moduledoc false
defstruct data: nil
def parse(%{"$reql_type$" => "BINARY", "data" => data}) do
%__MODULE__{data: :base64.decode(data)}
end
end
defmodule Geometry do
@moduledoc false
defmodule Point do
@moduledoc false
defstruct coordinates: []
end
defmodule Line do
@moduledoc false
defstruct coordinates: []
end
defmodule Polygon do
@moduledoc false
defstruct coordinates: []
end
def parse(%{"$reql_type$" => "GEOMETRY", "coordinates" => [x,y], "type" => "Point"}) do
%Point{coordinates: {x,y}}
end
def parse(%{"$reql_type$" => "GEOMETRY", "coordinates" => coords, "type" => "LineString"}) do
%Line{coordinates: Enum.map(coords, &List.to_tuple/1)}
end
def parse(%{"$reql_type$" => "GEOMETRY", "coordinates" => coords, "type" => "Polygon"}) do
%Polygon{coordinates: (for points <- coords, do: Enum.map points, &List.to_tuple/1)}
end
end
defmodule Time do
@moduledoc false
defstruct epoch_time: nil, timezone: nil
def parse(%{"$reql_type$" => "TIME", "epoch_time" => epoch_time, "timezone" => timezone}) do
%__MODULE__{epoch_time: epoch_time, timezone: timezone}
end
end
def convert_reql_pseudotypes(nil), do: nil
def convert_reql_pseudotypes(%{"$reql_type$" => "BINARY"} = data) do
Binary.parse(data)
end
def convert_reql_pseudotypes(%{"$reql_type$" => "GEOMETRY"} = data) do
Geometry.parse(data)
end
def convert_reql_pseudotypes(%{"$reql_type$" => "GROUPED_DATA"} = data) do
parse_grouped_data(data)
end
def convert_reql_pseudotypes(%{"$reql_type$" => "TIME"} = data) do
Time.parse(data)
end
def convert_reql_pseudotypes(list) when is_list(list) do
Enum.map(list, &convert_reql_pseudotypes/1)
end
def convert_reql_pseudotypes(map) when is_map(map) do
Enum.map(map, fn {k, v} ->
{k, convert_reql_pseudotypes(v)}
end) |> Enum.into(%{})
end
def convert_reql_pseudotypes(string), do: string
def parse_grouped_data(%{"$reql_type$" => "GROUPED_DATA", "data" => data}) do
Enum.map(data, fn ([k, data]) ->
{k, data}
end) |> Enum.into(%{})
end
def create_grouped_data(data) when is_map(data) do
data = data |> Enum.map(fn {k,v} -> [k, v] end)
%{"$reql_type$" => "GROUPED_DATA", "data" => data}
end
end
|
lib/rethinkdb/pseudotypes.ex
| 0.611266
| 0.546194
|
pseudotypes.ex
|
starcoder
|
defmodule Nacha.Batch do
@moduledoc """
A struct that represents a batch, containing the Batch Header, Batch Control,
and Entry Detail records.
Also includes utility functions for building and managing batches.
"""
import Kernel, except: [to_string: 1]
alias Nacha.Records.BatchHeader, as: Header
alias Nacha.Records.BatchControl, as: Control
alias Nacha.Records.EntryDetail
alias Nacha.Entry
@credit_codes ["22", "32"]
@debit_codes ["27", "37"]
@service_class_codes %{mixed: 200, credit_only: 220, debit_only: 225}
defstruct [:header_record, :control_record, errors: [], entries: []]
@typep entry_list :: list(Entry.t())
@type t :: %__MODULE__{
header_record: Header.t(),
entries: entry_list,
control_record: Control.t(),
errors: list({atom, String.t()})
}
defmodule Offset do
@type t :: %__MODULE__{
routing_number: String.t(),
account_number: String.t(),
account_type: :checking | :savings
}
@enforce_keys [:routing_number, :account_number, :account_type]
defstruct @enforce_keys
end
@doc """
Build a valid batch with necessary generated values.
"""
@spec build(entry_list, %{atom => any}, Offset.t() | nil) ::
{:ok, t()} | {:error, t()}
def build(entries, params, offset \\ nil) do
params
|> build_params(entries)
|> do_build
|> (fn batch ->
if is_nil(offset) do
batch
else
add_offset(batch, offset)
end
end).()
|> validate
end
@spec to_string(__MODULE__.t()) :: String.t()
def to_string(%__MODULE__{} = batch),
do: batch |> to_iolist |> Kernel.to_string()
@spec to_iolist(list(__MODULE__.t())) :: iolist
def to_iolist([%__MODULE__{} | _] = batches),
do: batches |> Stream.map(&to_iolist/1) |> Enum.intersperse("\n")
@spec to_iolist(__MODULE__.t()) :: iolist
def to_iolist(%__MODULE__{} = batch) do
[
Header.to_iolist(batch.header_record),
"\n",
Entry.to_iolist(batch.entries),
"\n",
Control.to_iolist(batch.control_record)
]
end
defp build_params(params, entries) do
{credit_total, debit_total} = totals(entries)
Map.merge(
params,
%{
entries: entries,
entry_count: length(entries),
entry_hash: calculate_hash(entries),
total_credits: credit_total,
total_debits: debit_total,
service_class_code: calculate_scc(credit_total, debit_total)
}
)
end
defp do_build(params) do
%__MODULE__{
header_record: build_header(params),
entries: params.entries,
control_record: build_control(params)
}
end
@spec valid?(__MODULE__.t()) :: boolean
def valid?(batch), do: match?({:ok, _}, validate(batch))
defp build_header(params), do: Header |> struct(params)
defp build_control(params), do: Control |> struct(params)
defp get_offset_trace_number(entries) when is_list(entries) do
entries
|> Enum.at(Enum.count(entries) - 1)
|> (& &1.record.trace_number).()
|> :erlang.+(1)
end
# no need to add offset entry if total_debits and total_credits are the same
defp add_offset(
%__MODULE__{
control_record: %{
total_debits: amount,
total_credits: amount
}
} = batch,
_
) do
batch
end
defp add_offset(
%__MODULE__{
header_record: header_record,
entries: entries,
control_record:
%{
total_debits: total_debits,
total_credits: total_credits
} = control_record
},
%Offset{
account_type: account_type
} = offset
) do
{transaction_code, offset_amount, max_amount} =
case {account_type, total_debits - total_credits} do
{:checking, amount} when amount > 0 ->
{"22", amount, total_debits}
{:checking, amount} when amount < 0 ->
{"27", -amount, total_credits}
{:savings, amount} when amount > 0 ->
{"32", amount, total_debits}
{:savings, amount} when amount < 0 ->
{"37", -amount, total_credits}
end
{rdfi_id, check_digit} = String.split_at(offset.routing_number, -1)
offset_entry_detail = %EntryDetail{
transaction_code: transaction_code,
rdfi_id: rdfi_id,
check_digit: String.to_integer(check_digit),
account_number: offset.account_number,
amount: offset_amount,
individual_id: "",
individual_name: "OFFSET",
standard_entry_class: header_record.standard_entry_class,
# ODFI routing number
trace_id: header_record.odfi_id,
trace_number: get_offset_trace_number(entries)
}
new_entries = entries ++ [Entry.build(offset_entry_detail, [])]
%__MODULE__{
header_record: %{
header_record
| service_class_code: @service_class_codes.mixed
},
entries: new_entries,
control_record: %{
control_record
| service_class_code: @service_class_codes.mixed,
entry_hash: calculate_hash(new_entries),
entry_count: length(new_entries),
total_debits: max_amount,
total_credits: max_amount
}
}
end
defp validate(
%{header_record: header, control_record: control, entries: entries} =
batch
) do
case {Header.validate(header), Control.validate(control),
Enum.all?(entries, &Entry.valid?/1)} do
{%{valid?: true} = header, %{valid?: true} = control, true} ->
{:ok, %{batch | header_record: header, control_record: control}}
{header, control, is_entries_valid} ->
{:error, consolidate_errors(batch, header, control, is_entries_valid)}
end
end
defp consolidate_errors(batch, header, control, is_entries_valid) do
errors = Enum.uniq(header.errors ++ control.errors)
errors =
if is_entries_valid do
errors
else
["contain invalid entry" | errors]
end
%{
batch
| header_record: header,
control_record: control,
errors: errors
}
end
defp totals(entries) do
entries
|> Enum.group_by(&credit_or_debit/1, &get_amount/1)
|> sums()
end
defp calculate_hash(entries) do
entries
|> Enum.map(&String.to_integer(&1.record.rdfi_id))
|> Enum.sum()
|> Integer.digits()
|> Enum.take(-10)
|> Integer.undigits()
end
defp calculate_scc(0, debits) when debits > 0,
do: @service_class_codes.debit_only
defp calculate_scc(credits, 0) when credits > 0,
do: @service_class_codes.credit_only
defp calculate_scc(_, _), do: @service_class_codes.mixed
defp credit_or_debit(%{record: %{transaction_code: tx}})
when tx in @credit_codes,
do: :credit
defp credit_or_debit(%{record: %{transaction_code: tx}})
when tx in @debit_codes,
do: :debit
defp credit_or_debit(_), do: :error
defp get_amount(%{record: %{amount: amount}}), do: amount
defp sums(amounts), do: {sum(amounts, :credit), sum(amounts, :debit)}
defp sum(amounts, type), do: amounts |> Map.get(type, []) |> Enum.sum()
end
|
lib/nacha/batch.ex
| 0.837088
| 0.488344
|
batch.ex
|
starcoder
|
defmodule MiniCBOR do
@moduledoc """
Wrapper for CBOR encoding library to work with values encoded using structures optimised by Rust
https://twittner.gitlab.io/minicbor/minicbor_derive/index.html library
Changes maps keys for encoded values to integers.
Encodes atoms as index integers.
Map keys optimization:
Given data map `%{field1: 100, field2: "hi"}`
and schema `{:map, [:field1, field2]}` // same as `{:map, [{:field1, :noschema}, {:field2, :noschema}]}`
optimizes keys as `%{1 => 100, 2 => "hi"}`
Enum atoms optimization:
Given atom value `:other_thing`
and schema `{:enum, [:one_thins, :other_thing, :another_thing]}`
optimizes value as `1`
Supports nested schemas in map key mapping:
With data map `%{path: "/resource", method: :get}`
and schema `{:map, [:path, {:method, {:enum, [:get, :post]}}`
optimizes map as `%{1 => "/resource", 2 => 0}`
When encoding a map or struct, the field `0` is reserved for use of type-tags (the tag feature is currently disabled on rust,
and not implemented on elixir)
"""
@type schema() :: {:map, [atom() | {atom(), schema()}]} | {:enum, [atom()]} | :noschema
@reserved_tag_field :minicbor_tag_reserved
@deprecated "Use Ockam.TypedCBOR instead"
def encode(struct, schema) do
schema_map = struct_schema(schema)
optimized = rekey_struct(struct, schema_map)
CBOR.encode(optimized)
end
@deprecated "Use Ockam.TypedCBOR instead"
def decode(binary, schema) do
with {:ok, optimized, rest} <- CBOR.decode(binary) do
schema_map = optimized_schema(schema)
struct = rekey_optimized(optimized, schema_map)
{:ok, struct, rest}
end
end
defp reserve_tag_field(keys) when is_list(keys) do
# As a workaround, set this unused field at position 0.
# Latter we will use position 0 to carry tag information.
[@reserved_tag_field | keys]
end
def struct_schema({:map, keys}) when is_list(keys) do
mapping =
reserve_tag_field(keys)
|> Enum.with_index(fn
{key, inner_schema}, index -> {key, {index, struct_schema(inner_schema)}}
key, index -> {key, index}
end)
|> Map.new()
{:map, mapping}
end
def struct_schema({:enum, options}) when is_list(options) do
mapping =
options
|> Enum.with_index()
|> Map.new()
{:enum, mapping}
end
def struct_schema({:list, schema}) do
{:list, struct_schema(schema)}
end
def optimized_schema({:map, keys}) when is_list(keys) do
mapping =
reserve_tag_field(keys)
|> Enum.with_index(fn
{key, inner_schema}, index -> {index, {key, optimized_schema(inner_schema)}}
key, index -> {index, key}
end)
|> Map.new()
{:map, mapping}
end
def optimized_schema({:enum, options}) when is_list(options) do
mapping =
options
|> Enum.with_index(fn key, index -> {index, key} end)
|> Map.new()
{:enum, mapping}
end
def optimized_schema({:list, schema}) do
{:list, optimized_schema(schema)}
end
def rekey_struct(struct, :noschema) do
struct
end
def rekey_struct(struct, {:list, schema}) do
Enum.map(struct, fn val ->
rekey_struct(val, schema)
end)
end
def rekey_struct(struct, {:map, schema_map}) do
struct
# because enum is not implemented for structs
|> as_map()
# Just in case
|> Map.delete(@reserved_tag_field)
|> Enum.flat_map(fn {key, val} ->
case Map.get(schema_map, key) do
nil ->
[]
index when is_integer(index) ->
[{index, val}]
{index, inner_schema} when is_integer(index) ->
[{index, rekey_struct(val, inner_schema)}]
end
end)
|> Map.new()
end
def rekey_struct(atom, {:enum, option_map}) when is_atom(atom) do
Map.fetch!(option_map, atom)
end
def rekey_optimized(optimized, :noschema) do
optimized
end
def rekey_optimized(optimized, {:list, schema}) do
Enum.map(optimized, fn val ->
rekey_optimized(val, schema)
end)
end
def rekey_optimized(optimized, {:map, schema_map}) do
Enum.flat_map(optimized, fn {index, val} ->
case Map.get(schema_map, index) do
nil ->
[]
{key, inner_schema} ->
[{key, rekey_optimized(val, inner_schema)}]
key ->
[{key, val}]
end
end)
|> Map.new()
end
def rekey_optimized(index, {:enum, option_map}) when is_integer(index) do
Map.fetch!(option_map, index)
end
defp as_map(map) when is_struct(map) do
Map.from_struct(map)
end
defp as_map(map) when is_map(map) do
map
end
end
|
implementations/elixir/ockam/ockam/lib/ockam/mini_cbor.ex
| 0.86923
| 0.681548
|
mini_cbor.ex
|
starcoder
|
defmodule Node do
@moduledoc """
Functions related to Erlang nodes.
"""
@doc """
Returns the current node. It returns the same as the built-in node().
"""
def self do
:erlang.node()
end
@doc """
Returns true if the local node is alive; that is, if the node can be
part of a distributed system. Otherwise, it returns false.
"""
def alive? do
:erlang.is_alive()
end
@doc """
Returns a list of all visible nodes in the system, excluding
the local node. Same as list(visible).
"""
def list do
:erlang.nodes()
end
@doc """
Returns a list of nodes according to argument given. The result
returned when the argument is a list, is the list of nodes
satisfying the disjunction(s) of the list elements.
See http://www.erlang.org/doc/man/erlang.html#nodes-1 for more info.
"""
def list(args) do
:erlang.nodes(args)
end
@doc """
Monitors the status of the node. If flag is true, monitoring is
turned on. If flag is false, monitoring is turned off.
See http://www.erlang.org/doc/man/erlang.html#monitor_node-2 for more info.
"""
def monitor(node, flag) do
:erlang.monitor_node(node, flag)
end
@doc """
Behaves as monitor_node/2 except that it allows an extra
option to be given, namely :allow_passive_connect.
See http://www.erlang.org/doc/man/erlang.html#monitor_node-3 for more info.
"""
def monitor(node, flag, options) do
:erlang.monitor_node(node, flag, options)
end
@doc """
Forces the disconnection of a node. This will appear to the `node` as if
the local node has crashed. This BIF is mainly used in the Erlang network
authentication protocols. Returns true if disconnection succeeds, otherwise
false. If the local node is not alive, the function returns ignored.
See http://www.erlang.org/doc/man/erlang.html#disconnect_node-1 for more info.
"""
def disconnect(node) do
:erlang.disconnect_node(node)
end
@doc """
Returns the pid of a new process started by the application of `fun`
on `node`. If `node` does not exist, a useless pid is returned.
Check http://www.erlang.org/doc/man/erlang.html#spawn_opt-4 for
the list of available options.
"""
def spawn(node, fun, opts // []) do
:erlang.spawn_opt(node, fun, opts)
end
@doc """
Returns the pid of a new process started by the application of
`module.function(args)` on `node`. If `node` does not exists, a useless
pid is returned.
Check http://www.erlang.org/doc/man/erlang.html#spawn_opt-4 for
the list of available options.
"""
def spawn(node, module, fun, args, opts // []) do
:erlang.spawn(node, module, fun, args, opts)
end
@doc """
Returns the pid of a new process started by the application of `fun`
on `node`. A link is created between the calling process and the
new process, atomically. If `node` does not exist, a useless pid is returned
(and due to the link, an exit signal with exit reason :noconnection will be
received).
"""
def spawn_link(node, fun) do
:erlang.spawn_link(node, fun)
end
@doc """
Returns the pid of a new process started by the application of
`module.function(args)` on `node`. A link is created between the calling
process and the new process, atomically. If `node` does not exist, a useless
pid is returned (and due to the link, an exit signal with exit reason
:noconnection will be received).
"""
def spawn_link(node, module, fun, args) do
:erlang.spawn_link(node, module, fun, args)
end
end
|
lib/elixir/lib/node.ex
| 0.796253
| 0.531209
|
node.ex
|
starcoder
|
defmodule Xema.Ref do
@moduledoc """
This module contains a struct and functions to represent and handle
references.
"""
alias Xema.{Ref, Schema, Utils, Validator}
require Logger
@typedoc """
A reference contains a `pointer` and an optional `uri`.
"""
@type t :: %__MODULE__{
pointer: String.t(),
uri: URI.t() | nil
}
defstruct pointer: nil,
uri: nil
@compile {:inline, fetch_from_opts!: 2, fetch_by_key!: 3}
@doc """
Creates a new reference from the given `pointer`.
"""
@spec new(String.t()) :: Ref.t()
def new(pointer), do: %Ref{pointer: pointer}
@doc """
Creates a new reference from the given `pointer` and `uri`.
"""
@spec new(String.t(), URI.t() | nil) :: Ref.t()
def new("#" <> _ = pointer, _uri), do: new(pointer)
def new(pointer, uri) when is_binary(pointer),
do: %Ref{
pointer: pointer,
uri: Utils.update_uri(uri, pointer)
}
@doc """
Validates the given value with the referenced schema.
"""
@spec validate(Ref.t(), any, keyword) ::
:ok | {:error, map}
def validate(ref, value, opts) do
{schema, opts} = fetch_from_opts!(ref, opts)
Validator.validate(schema, value, opts)
end
@doc """
Returns the schema and the root for the given `ref` and `xema`.
"""
@spec fetch!(Ref.t(), struct, struct | nil) :: {struct | atom, struct}
def fetch!(ref, master, root) do
case fetch_by_key!(key(ref), master, root) do
{%Schema{}, _root} = schema ->
schema
{xema, root} ->
case fragment(ref) do
nil ->
{xema, root}
fragment ->
{Map.fetch!(xema.refs, fragment), xema}
end
end
end
@doc """
Returns the reference key for a `Ref` or an `URI`.
"""
@spec key(ref :: Ref.t() | URI.t()) :: String.t()
def key(%Ref{pointer: pointer, uri: nil}), do: pointer
def key(%Ref{uri: uri}), do: key(uri)
def key(%URI{} = uri), do: uri |> Map.put(:fragment, nil) |> URI.to_string()
def fragment(%Ref{uri: nil}), do: nil
def fragment(%Ref{uri: %URI{fragment: nil}}), do: nil
def fragment(%Ref{uri: %URI{fragment: ""}}), do: nil
def fragment(%Ref{uri: %URI{fragment: fragment}}), do: "##{fragment}"
defp fetch_from_opts!(%Ref{pointer: "#", uri: nil}, opts),
do: {opts[:root], opts}
defp fetch_from_opts!(%Ref{pointer: pointer, uri: nil}, opts),
do: {Map.fetch!(opts[:root].refs, pointer), opts}
defp fetch_from_opts!(%Ref{} = ref, opts) do
case fetch!(ref, opts[:master], opts[:root]) do
{:root, root} ->
{root, Keyword.put(opts, :root, root)}
{%Schema{} = schema, root} ->
{schema, Keyword.put(opts, :root, root)}
{xema, _} ->
{xema, Keyword.put(opts, :root, xema)}
end
end
defp fetch_by_key!("#", master, nil), do: {master, master}
defp fetch_by_key!("#", _master, root), do: {root, root}
defp fetch_by_key!(key, master, nil),
do: {Map.fetch!(master.refs, key), master}
defp fetch_by_key!(key, master, root) do
case Map.get(root.refs, key) do
nil -> {Map.fetch!(master.refs, key), master}
schema -> {schema, root}
end
end
end
defimpl Inspect, for: Xema.Ref do
def inspect(schema, opts) do
map =
schema
|> Map.from_struct()
|> Enum.filter(fn {_, val} -> !is_nil(val) end)
|> Enum.into(%{})
Inspect.Map.inspect(map, "Xema.Ref", opts)
end
end
|
lib/xema/ref.ex
| 0.884461
| 0.639764
|
ref.ex
|
starcoder
|
defmodule Day16 do
def solveA(input, progs) do
dance(parse_moves(input), parse_progs(progs))
|> Enum.map(&Atom.to_string/1)
|> Enum.join("")
end
def parse_moves(input) do
input
|> String.trim
|> String.split(",")
|> Enum.map(fn move ->
case String.at move, 0 do
"s" ->
x =
move
|> String.slice(1..-1)
|> String.to_integer
{:s, x}
"x" ->
{_, exchange} = String.split_at move, 1
[posa, posb] =
String.split(exchange, "/")
|> Enum.map(&String.to_integer/1)
{:x, posa, posb}
"p" ->
{_, partners} = String.split_at move, 1
[a, b] = String.split(partners, "/")
{:p, a |> String.to_atom , b |> String.to_atom}
end
end)
end
def parse_progs(progs) do
progs
|> String.split("", trim: true)
|> Enum.map(&String.to_atom/1)
end
def solveB(progs, input, n) do
do_solveB(parse_progs(progs), parse_moves(input), MapSet.new, 0, n)
|> Enum.map(&Atom.to_string/1)
|> Enum.join("")
end
def do_solveB(progs, moves, seen, i, n) do
cond do
progs in seen ->
IO.puts "progs already seen, rerun with n = #{rem(n, i)}"
i == n ->
progs
True ->
seen = MapSet.put(seen, progs)
progs = dance(moves, progs)
do_solveB(progs, moves, seen, i + 1, n)
end
end
def dance([], progs), do: progs
def dance(moves, progs) do
[next_move | moves] = moves
case next_move do
{:s, x} ->
{first, last} = Enum.split progs, -x
dance(moves, last ++ first)
{:x, posa, posb} ->
a = Enum.at progs, posa
b = Enum.at progs, posb
progs =
progs
|> List.replace_at(posa, b)
|> List.replace_at(posb, a)
dance(moves, progs)
{:p, a, b} ->
progs =
progs
|> Enum.map(fn c ->
cond do
c == a ->
b
c == b ->
a
True ->
c
end
end)
dance(moves, progs)
end
end
end
|
2017/elixir/day16/lib/day16.ex
| 0.605449
| 0.576482
|
day16.ex
|
starcoder
|
defmodule Identicon do
@moduledoc """
Builds a identicon from an input string. An Identicon is a visual representation of a
hash value, usually of an IP address, that serves to identify a user of a computer
system as a form of avatar while protecting the users' privacy. The original
Identicon was a 9-block graphic, and the representation has been extended to other
graphic forms by third parties.
This module generates a 250x250 identicon, where 50x50 is a dimension of a cell.
## Examples
iex> Identicon.create("Joe")
iex> :ok
"""
@doc """
Creates an identicon png image from an input string. The filename of the image is equal
to the input string.
"""
def create(string) do
string
|> hash
|> pick_color
|> build_grid
|> filter_odd_squares
|> build_pixel_map
|> draw_image
|> save_image(string)
end
@doc """
Returns a MD5 hash of a string.
"""
def hash(string) do
hex = :crypto.hash(:md5, string)
|> :binary.bin_to_list
%Identicon.Image{hex: hex}
end
@doc """
Creates a image struct with RGB values from the MD5 hash (input).
Red is the first, green the second and blue the third btye in the MD5 hash.
"""
def pick_color(%Identicon.Image{hex: [red, green, blue | _tail]} = image) do
%Identicon.Image{image | color: {red, green, blue}}
end
@doc """
Creates a image struct with grid value from MD5 hash (input).
"""
def build_grid(%Identicon.Image{hex: hex} = image) do
grid =
hex
|> Enum.chunk(3)
|> Enum.map(&mirror_row/1)
|> List.flatten
|> Enum.with_index
%Identicon.Image{image | grid: grid}
end
@doc """
Mirror a list around its middle value.
## Examples
iex> Identicon.mirror_row([42, 23, 22])
[42, 23, 22, 23, 42]
"""
def mirror_row([first, second | _tail] = row) do
row ++ [second, first]
end
@doc """
Return an image struct where the grid only has even values.
"""
def filter_odd_squares(%Identicon.Image{grid: grid} = image) do
grid = Enum.filter grid, fn({code, _index}) ->
rem(code, 2) == 0
end
%Identicon.Image{image | grid: grid}
end
@doc """
Build a pixel map for the identicon. Each cell is represented by
top left and bottom right corners (touple). The function returns
all tuples needed to create the identicon image.
"""
def build_pixel_map(%Identicon.Image{grid: grid} = image) do
pixel_map = Enum.map grid, fn({_code, index}) ->
cell_width = 50
grid_width = 5
horizontal = rem(index, grid_width) * cell_width
vertical = div(index, grid_width) * cell_width
top_left = {horizontal, vertical}
bottom_left = {horizontal + cell_width, vertical + cell_width}
{top_left, bottom_left}
end
%Identicon.Image{image | pixel_map: pixel_map}
end
@doc """
Uses the Image struct to draw the identicon in png format with EGD
(Erlang Graphical Drawer). Check the official documentation for more
information (http://erlang.org/doc/man/egd.html).
"""
def draw_image(%Identicon.Image{color: color, pixel_map: pixel_map}) do
image = :egd.create(250, 250)
fill = :egd.color(color)
Enum.each pixel_map, fn({top_left, bottom_right}) ->
:egd.filledRectangle(image, top_left, bottom_right, fill)
end
:egd.render(image)
end
@doc """
Takes an Image struct and a filename and saves the image in a png format.
"""
def save_image(image, filename) do
File.write("#{filename}.png", image)
end
end
|
lib/identicon.ex
| 0.868715
| 0.664662
|
identicon.ex
|
starcoder
|
defmodule Roman do
@moduledoc """
Functions to work with roman numerals in the `1..3999` range
"""
@external_resource "lib/numerals.txt"
@numeral_pairs @external_resource
|> File.stream!()
|> Stream.map(&String.split/1)
|> Stream.map(fn [val, num] -> {String.to_integer(val), num} end)
|> Enum.to_list()
@type error :: {:error, error_tuple}
@type error_tuple :: {atom, String.t()}
@type numeral :: String.t()
@doc false
@spec numeral_pairs() :: [{integer, numeral}]
def numeral_pairs, do: @numeral_pairs
@doc """
Decodes a roman numeral string into the corresponding integer value.
Strings with non-uppercase letters will only be decoded if the `:ignore_case`
option is set to `true`.
## Options
* `:explain` (boolean) - if `true`, error reasons will be more specific in
indicating what the problem with the provided numeral is (slightly
degrades performance in error cases, as the numeral must be inspected to
determine the cause of the error). If `false` (default), a generic error
reason will be returned in most cases (see "Error reasons" below for more
information).
* `:ignore_case` (boolean) - if `true`, strings will be decoded regardless
of casing. If `false` (default), strings containing a lowercase letter
will return an error.
* `:strict` (boolean) - if `true` (default), strings not conforming to
[composition rules](composition_rules.html) will return an error. If `false` the numeral components
will be decomposed and added, and the result will be returned.
* `:zero` - if `true`, the numeral N will be decoded as 0. This option has
no influence on decoding other numerals.
Default values for options can be set in `config.exs`using the `:roman`
application and the `:default_flags` key, for example:
`config :roman, :default_flags, %{ignore_case: true}`
The values will remain overrideable on a per-call basis by passing the
desired option value.
This function returns:
- `{:ok, value}` - the integer value of the provided numeral.
- `{:error, reason}` - the provided numeral is invalid.
## Error reasons
Possible error reasons are listed below.
When `:explain` is `false` (default value):
- `{:empty_string, _}`: string is empty.
- `{:invalid_numeral, _}`: string isn't a valid numeral.
When `:explain` is `true`:
- `{:empty_string, _}`: string is empty.
- `{:invalid_letter, _}`: if the provided string contains a
character that isn't one of I, V, X, L, C, D, M.
- `{:repeated_vld, _}`: string contains more than one instance each
of letters V, L, and D (i.e. numerals corresponding to numbers starting
with a 5). Cannot happen if `:strict` is `false`.
- `{:identical_letter_seq_too_long, _}`: string has a sequence of 4
or more identical letters. Cannot happen if `:strict` is `false`.
- `{:sequence_increasing, _}`: string contains a value greater than
one appearing before it (rule applies to combined value in subtractive
case). Cannot happen if `:strict` is `false`.
- `{:value_greater_than_subtraction, _}`: string contains a value
matching or exceding a previously subtracted value. Cannot happen if
`:strict` is `false`.
For more information on how roman numerals should be composed according to
the `:strict` rules, see the [composition rules](composition_rules.html)
documentation page.
### Examples
iex> Roman.decode("MMMDCCCXCVIII")
{:ok, 3898}
iex> Roman.decode("vi", ignore_case: true)
{:ok, 6}
iex> Roman.decode("N", zero: true)
{:ok, 0}
iex> Roman.decode("IIII", strict: false)
{:ok, 4}
iex> Roman.decode("LLVIV")
{:error, {:invalid_numeral, "numeral is invalid"}}
iex> Roman.decode("LLVIV", explain: true)
{:error, {:repeated_vld,
"letters V, L, and D can appear only once, but found several instances of L, V"}}
"""
@spec decode(String.t(), keyword) :: {:ok, integer} | Roman.error()
defdelegate decode(numeral, options \\ []), to: __MODULE__.Decoder
@doc """
Similar to `decode/1` but raises an error if the numeral could not be
decoded.
If it succeeds in decoding the numeral, it returns corresponding value.
"""
@spec decode!(String.t(), keyword) :: integer | no_return
def decode!(numeral, options \\ []) do
case decode(numeral, options) do
{:ok, val} ->
val
{:error, {_, message}} ->
raise ArgumentError, message: message
end
end
@doc """
Encodes an integer into a roman numeral.
Only values in the `1..3999` range can be encoded.
This function returns:
- `{:ok, numeral}` - the nuermal corresponding to the provided integer.
- `{:error, {:invalid_integer, message}}` - the provided integer is not within
the acceptable `1..3999` range.
### Examples
iex> Roman.encode(3898)
{:ok, "MMMDCCCXCVIII"}
iex> Roman.encode(4000)
{:error, {:invalid_integer,
"cannot encode values outside of range 1..3999"}}
"""
@spec encode(integer) :: {:ok, Roman.numeral()} | Roman.error()
defdelegate encode(integer), to: __MODULE__.Encoder
@doc """
Similar to `encode/1` but raises an error if the integer could not be
encoded.
If it succeeds in encoding the numeral, it returns the corresponding numeral.
"""
@spec encode!(integer) :: Roman.numeral() | no_return
def encode!(int) do
case encode(int) do
{:ok, numeral} ->
numeral
{:error, {_, message}} ->
raise ArgumentError, message: message
end
end
@doc """
Returns a boolean indicating whether the provided string is a valid numeral.
The return value indicates whether a call to `decode/2` would be successful.
Accepts the same options and returns the same error values as `decode/2`.
```
iex> Roman.numeral?("VI")
true
iex> Roman.numeral?("FOO")
false
iex> Roman.numeral?("x")
false
iex> Roman.numeral?("x", ignore_case: true)
true
iex> Roman.numeral?("VXL", strict: false)
true
```
"""
@spec numeral?(String.t(), keyword) :: boolean
def numeral?(string, options \\ []) do
case decode(string, options) do
{:ok, _} -> true
_ -> false
end
end
end
|
lib/roman.ex
| 0.929087
| 0.785679
|
roman.ex
|
starcoder
|
defmodule AWS.ServiceDiscovery do
@moduledoc """
AWS Cloud Map lets you configure public DNS, private DNS, or HTTP
namespaces that your microservice applications run in. When an instance of
the service becomes available, you can call the AWS Cloud Map API to
register the instance with AWS Cloud Map. For public or private DNS
namespaces, AWS Cloud Map automatically creates DNS records and an optional
health check. Clients that submit public or private DNS queries, or HTTP
requests, for the service receive an answer that contains up to eight
healthy records.
"""
@doc """
Creates an HTTP namespace. Service instances that you register using an
HTTP namespace can be discovered using a `DiscoverInstances` request but
can't be discovered using DNS.
For the current quota on the number of namespaces that you can create using
the same AWS account, see [AWS Cloud Map
quotas](https://docs.aws.amazon.com/cloud-map/latest/dg/cloud-map-limits.html)
in the *AWS Cloud Map Developer Guide*.
"""
def create_http_namespace(client, input, options \\ []) do
request(client, "CreateHttpNamespace", input, options)
end
@doc """
Creates a private namespace based on DNS, which will be visible only inside
a specified Amazon VPC. The namespace defines your service naming scheme.
For example, if you name your namespace `example.com` and name your service
`backend`, the resulting DNS name for the service will be
`backend.example.com`. For the current quota on the number of namespaces
that you can create using the same AWS account, see [AWS Cloud Map
Limits](https://docs.aws.amazon.com/cloud-map/latest/dg/cloud-map-limits.html)
in the *AWS Cloud Map Developer Guide*.
"""
def create_private_dns_namespace(client, input, options \\ []) do
request(client, "CreatePrivateDnsNamespace", input, options)
end
@doc """
Creates a public namespace based on DNS, which will be visible on the
internet. The namespace defines your service naming scheme. For example, if
you name your namespace `example.com` and name your service `backend`, the
resulting DNS name for the service will be `backend.example.com`. For the
current quota on the number of namespaces that you can create using the
same AWS account, see [AWS Cloud Map
Limits](https://docs.aws.amazon.com/cloud-map/latest/dg/cloud-map-limits.html)
in the *AWS Cloud Map Developer Guide*.
"""
def create_public_dns_namespace(client, input, options \\ []) do
request(client, "CreatePublicDnsNamespace", input, options)
end
@doc """
Creates a service, which defines the configuration for the following
entities:
<ul> <li> For public and private DNS namespaces, one of the following
combinations of DNS records in Amazon Route 53:
<ul> <li> `A`
</li> <li> `AAAA`
</li> <li> `A` and `AAAA`
</li> <li> `SRV`
</li> <li> `CNAME`
</li> </ul> </li> <li> Optionally, a health check
</li> </ul> After you create the service, you can submit a
[RegisterInstance](https://docs.aws.amazon.com/cloud-map/latest/api/API_RegisterInstance.html)
request, and AWS Cloud Map uses the values in the configuration to create
the specified entities.
For the current quota on the number of instances that you can register
using the same namespace and using the same service, see [AWS Cloud Map
Limits](https://docs.aws.amazon.com/cloud-map/latest/dg/cloud-map-limits.html)
in the *AWS Cloud Map Developer Guide*.
"""
def create_service(client, input, options \\ []) do
request(client, "CreateService", input, options)
end
@doc """
Deletes a namespace from the current account. If the namespace still
contains one or more services, the request fails.
"""
def delete_namespace(client, input, options \\ []) do
request(client, "DeleteNamespace", input, options)
end
@doc """
Deletes a specified service. If the service still contains one or more
registered instances, the request fails.
"""
def delete_service(client, input, options \\ []) do
request(client, "DeleteService", input, options)
end
@doc """
Deletes the Amazon Route 53 DNS records and health check, if any, that AWS
Cloud Map created for the specified instance.
"""
def deregister_instance(client, input, options \\ []) do
request(client, "DeregisterInstance", input, options)
end
@doc """
Discovers registered instances for a specified namespace and service. You
can use `DiscoverInstances` to discover instances for any type of
namespace. For public and private DNS namespaces, you can also use DNS
queries to discover instances.
"""
def discover_instances(client, input, options \\ []) do
request(client, "DiscoverInstances", input, options)
end
@doc """
Gets information about a specified instance.
"""
def get_instance(client, input, options \\ []) do
request(client, "GetInstance", input, options)
end
@doc """
Gets the current health status (`Healthy`, `Unhealthy`, or `Unknown`) of
one or more instances that are associated with a specified service.
<note> There is a brief delay between when you register an instance and
when the health status for the instance is available.
</note>
"""
def get_instances_health_status(client, input, options \\ []) do
request(client, "GetInstancesHealthStatus", input, options)
end
@doc """
Gets information about a namespace.
"""
def get_namespace(client, input, options \\ []) do
request(client, "GetNamespace", input, options)
end
@doc """
Gets information about any operation that returns an operation ID in the
response, such as a `CreateService` request.
<note> To get a list of operations that match specified criteria, see
[ListOperations](https://docs.aws.amazon.com/cloud-map/latest/api/API_ListOperations.html).
</note>
"""
def get_operation(client, input, options \\ []) do
request(client, "GetOperation", input, options)
end
@doc """
Gets the settings for a specified service.
"""
def get_service(client, input, options \\ []) do
request(client, "GetService", input, options)
end
@doc """
Lists summary information about the instances that you registered by using
a specified service.
"""
def list_instances(client, input, options \\ []) do
request(client, "ListInstances", input, options)
end
@doc """
Lists summary information about the namespaces that were created by the
current AWS account.
"""
def list_namespaces(client, input, options \\ []) do
request(client, "ListNamespaces", input, options)
end
@doc """
Lists operations that match the criteria that you specify.
"""
def list_operations(client, input, options \\ []) do
request(client, "ListOperations", input, options)
end
@doc """
Lists summary information for all the services that are associated with one
or more specified namespaces.
"""
def list_services(client, input, options \\ []) do
request(client, "ListServices", input, options)
end
@doc """
Lists tags for the specified resource.
"""
def list_tags_for_resource(client, input, options \\ []) do
request(client, "ListTagsForResource", input, options)
end
@doc """
Creates or updates one or more records and, optionally, creates a health
check based on the settings in a specified service. When you submit a
`RegisterInstance` request, the following occurs:
<ul> <li> For each DNS record that you define in the service that is
specified by `ServiceId`, a record is created or updated in the hosted zone
that is associated with the corresponding namespace.
</li> <li> If the service includes `HealthCheckConfig`, a health check is
created based on the settings in the health check configuration.
</li> <li> The health check, if any, is associated with each of the new or
updated records.
</li> </ul> <important> One `RegisterInstance` request must complete before
you can submit another request and specify the same service ID and instance
ID.
</important> For more information, see
[CreateService](https://docs.aws.amazon.com/cloud-map/latest/api/API_CreateService.html).
When AWS Cloud Map receives a DNS query for the specified DNS name, it
returns the applicable value:
<ul> <li> **If the health check is healthy**: returns all the records
</li> <li> **If the health check is unhealthy**: returns the applicable
value for the last healthy instance
</li> <li> **If you didn't specify a health check configuration**: returns
all the records
</li> </ul> For the current quota on the number of instances that you can
register using the same namespace and using the same service, see [AWS
Cloud Map
Limits](https://docs.aws.amazon.com/cloud-map/latest/dg/cloud-map-limits.html)
in the *AWS Cloud Map Developer Guide*.
"""
def register_instance(client, input, options \\ []) do
request(client, "RegisterInstance", input, options)
end
@doc """
Adds one or more tags to the specified resource.
"""
def tag_resource(client, input, options \\ []) do
request(client, "TagResource", input, options)
end
@doc """
Removes one or more tags from the specified resource.
"""
def untag_resource(client, input, options \\ []) do
request(client, "UntagResource", input, options)
end
@doc """
Submits a request to change the health status of a custom health check to
healthy or unhealthy.
You can use `UpdateInstanceCustomHealthStatus` to change the status only
for custom health checks, which you define using `HealthCheckCustomConfig`
when you create a service. You can't use it to change the status for
Route 53 health checks, which you define using `HealthCheckConfig`.
For more information, see
[HealthCheckCustomConfig](https://docs.aws.amazon.com/cloud-map/latest/api/API_HealthCheckCustomConfig.html).
"""
def update_instance_custom_health_status(client, input, options \\ []) do
request(client, "UpdateInstanceCustomHealthStatus", input, options)
end
@doc """
Submits a request to perform the following operations:
<ul> <li> Update the TTL setting for existing `DnsRecords` configurations
</li> <li> Add, update, or delete `HealthCheckConfig` for a specified
service
<note> You can't add, update, or delete a `HealthCheckCustomConfig`
configuration.
</note> </li> </ul> For public and private DNS namespaces, note the
following:
<ul> <li> If you omit any existing `DnsRecords` or `HealthCheckConfig`
configurations from an `UpdateService` request, the configurations are
deleted from the service.
</li> <li> If you omit an existing `HealthCheckCustomConfig` configuration
from an `UpdateService` request, the configuration is not deleted from the
service.
</li> </ul> When you update settings for a service, AWS Cloud Map also
updates the corresponding settings in all the records and health checks
that were created by using the specified service.
"""
def update_service(client, input, options \\ []) do
request(client, "UpdateService", input, options)
end
@spec request(AWS.Client.t(), binary(), map(), list()) ::
{:ok, map() | nil, map()}
| {:error, term()}
defp request(client, action, input, options) do
client = %{client | service: "servicediscovery"}
host = build_host("servicediscovery", client)
url = build_url(host, client)
headers = [
{"Host", host},
{"Content-Type", "application/x-amz-json-1.1"},
{"X-Amz-Target", "Route53AutoNaming_v20170314.#{action}"}
]
payload = encode!(client, input)
headers = AWS.Request.sign_v4(client, "POST", url, headers, payload)
post(client, url, payload, headers, options)
end
defp post(client, url, payload, headers, options) do
case AWS.Client.request(client, :post, url, payload, headers, options) do
{:ok, %{status_code: 200, body: body} = response} ->
body = if body != "", do: decode!(client, body)
{:ok, body, response}
{:ok, response} ->
{:error, {:unexpected_response, response}}
error = {:error, _reason} -> error
end
end
defp build_host(_endpoint_prefix, %{region: "local", endpoint: endpoint}) do
endpoint
end
defp build_host(_endpoint_prefix, %{region: "local"}) do
"localhost"
end
defp build_host(endpoint_prefix, %{region: region, endpoint: endpoint}) do
"#{endpoint_prefix}.#{region}.#{endpoint}"
end
defp build_url(host, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}/"
end
defp encode!(client, payload) do
AWS.Client.encode!(client, payload, :json)
end
defp decode!(client, payload) do
AWS.Client.decode!(client, payload, :json)
end
end
|
lib/aws/generated/service_discovery.ex
| 0.902958
| 0.475484
|
service_discovery.ex
|
starcoder
|
defmodule Rondo.Component do
defmacro __using__(_) do
quote do
use Rondo.Action
use Rondo.Element
use Rondo.Event
use Rondo.Store
use Rondo.Stream
def state(props, _) do
props
end
def context(_) do
%{}
end
def render(_) do
nil
end
defoverridable [state: 2, context: 1, render: 1]
end
end
defstruct [:element,
:state,
:tree,
:context]
alias Rondo.State
alias Rondo.Tree
def mount(component = %{element: element, state: state}, path, context, state_store, action_store) do
state_descriptor = get_state(element, context)
case State.init(state, state_descriptor, path, state_store) do
{^state, state_store} ->
{_, action_store} = init_tree(component, path, action_store, state)
{component, state_store, action_store}
{state, state_store} ->
{context, action_store} = init_context(component, path, action_store, state)
{tree, action_store} = init_tree(component, path, action_store, state)
component = %{component | state: state, context: context, tree: tree}
{component, state_store, action_store}
end
rescue
e in Rondo.Store.Reference.Error ->
e = %{e | component_type: element.type}
reraise e, System.stacktrace
end
def init_context(%{element: element, context: context}, path, action_store, state) do
context_descriptor = get_context(element, state.root)
Tree.init(context, context_descriptor, path, state, action_store)
end
defp init_tree(%{element: element, tree: tree}, path, action_store, state) do
tree_descriptor = get_tree(element, state.root)
Tree.init(tree, tree_descriptor, path, state, action_store)
end
defp get_state(%{type: type, props: props, children: children}, context) do
props = Map.put(props, :children, children)
Rondo.Element.Mountable.state(type, props, context)
end
defp get_context(%{type: type}, state) do
Rondo.Element.Mountable.context(type, state)
end
defp get_tree(%{type: type}, state) do
Rondo.Element.Mountable.render(type, state)
end
end
defimpl Rondo.Diffable, for: Rondo.Component do
def diff(%{element: %{type: type}, tree: %{root: current}},
%{element: %{type: type}, tree: %{root: prev}}, path) do
Rondo.Diff.diff(current, prev, path)
end
def diff(%{tree: %{root: current}}, _, path) do
[Rondo.Operation.replace(path, current)]
end
end
defimpl Inspect, for: Rondo.Component do
import Inspect.Algebra
def inspect(%{element: %{type: type, props: props}, tree: %{root: tree}, state: %{root: state}, context: %{root: context}}, opts) do
{_, state} = Map.split(state, [:children | Map.keys(props)])
concat([
"#Rondo.Component<",
break(""),
format_prop(
"type=",
type,
opts
),
format_prop(
"props=",
props,
opts
),
format_prop(
"state=",
state,
opts
),
format_prop(
"context=",
context,
opts
),
">",
format_tree(tree, opts)
])
|> line("#Rondo.Component</>")
end
def inspect(%{element: element}, opts) do
concat([
"#Rondo.Component<PENDING>",
format_tree(element, opts)
])
|> line("#Rondo.Component</>")
end
defp format_tree(tree, opts) do
nest(line(empty(), to_doc(tree, opts)), 2)
end
defp format_prop(name, value, opts)
defp format_prop(_, map, _) when map_size(map) == 0 do
empty
end
defp format_prop(name, value, opts) do
concat([
name,
to_doc(value, opts),
break
])
end
end
|
lib/rondo/component.ex
| 0.578091
| 0.41742
|
component.ex
|
starcoder
|
defmodule Extreme do
@moduledoc """
Extreme module is main communication point with EventStore using tcp connection. Extreme is implemented using
GenServer and is OTP compatible. If client is disconnected from server we are not trying to reconnect,
instead you should rely on your supervisor. For example:
defmodule MyApp.Supervisor do
use Supervisor
def start_link,
do: Supervisor.start_link __MODULE__, :ok
@event_store MyApp.EventStore
def init(:ok) do
event_store_settings = Application.get_env :my_app, :event_store
children = [
worker(Extreme, [event_store_settings, [name: @event_store]]),
# ... other workers / supervisors
]
supervise children, strategy: :one_for_one
end
end
You can manually start adapter as well (as you can see in test file):
{:ok, server} = Application.get_env(:extreme, :event_store) |> Extreme.start_link
From now on, `server` pid is used for further communication. Since we are relying on supervisor to reconnect,
it is wise to name `server` as we did in example above.
"""
use GenServer
alias Extreme.Request
require Logger
alias Extreme.Response
## Client API
@doc """
Starts connection to EventStore using `connection_settings` and optional `opts`.
Extreme can connect to single ES node or to cluster specified with node IPs and ports.
Example for connecting to single node:
config :extreme, :event_store,
db_type: :node,
host: "localhost",
port: 1113,
username: "admin",
password: "<PASSWORD>",
reconnect_delay: 2_000,
max_attempts: :infinity
* `db_type` - defaults to :node, thus it can be omitted
* `host` - check EXT IP setting of your EventStore
* `port` - check EXT TCP PORT setting of your EventStore
* `reconnect_delay` - in ms. Defaults to 1_000. If tcp connection fails this is how long it will wait for reconnection.
* `max_attempts` - Defaults to :infinity. Specifies how many times we'll try to connect to EventStore
Example for connecting to cluster:
config :extreme, :event_store,
db_type: :cluster,
gossip_timeout: 300,
nodes: [
%{host: "10.10.10.29", port: 2113},
%{host: "10.10.10.28", port: 2113},
%{host: "10.10.10.30", port: 2113}
],
username: "admin",
password: "<PASSWORD>"
* `gossip_timeout` - in ms. Defaults to 1_000. We are iterating through `nodes` list, asking for cluster member details.
This setting represents timeout for gossip response before we are asking next node from `nodes` list for cluster details.
* `nodes` - Mandatory for cluster connection. Represents list of nodes in the cluster as we know it
* `host` - should be EXT IP setting of your EventStore node
* `port` - should be EXT HTTP PORT setting of your EventStore node
Example of connection to cluster via DNS lookup
config :extreme, :event_store,
db_type: :cluster_dns,
gossip_timeout: 300,
host: "es-cluster.example.com", # accepts char list too, this whould be multy A record host enrty in your nameserver
port: 2113, # the external gossip port
username: "admin",
password: "<PASSWORD>",
max_attempts: :infinity
When `cluster` mode is used, adapter goes thru `nodes` list and tries to gossip with node one after another
until it gets response about nodes. Based on nodes information from that response it ranks their statuses and chooses
the best candidate to connect to. For the way ranking is done, take a look at `lib/cluster_connection.ex`:
defp rank_state("Master"), do: 1
defp rank_state("PreMaster"), do: 2
defp rank_state("Slave"), do: 3
defp rank_state("Clone"), do: 4
defp rank_state("CatchingUp"), do: 5
defp rank_state("PreReplica"), do: 6
defp rank_state("Unknown"), do: 7
defp rank_state("Initializing"), do: 8
Note that above will work with same procedure with `cluster_dns` mode turned on, since internally it will get ip addresses to witch same connection procedure will be used.
Once client is disconnected from EventStore, supervisor should respawn it and connection starts over again.
"""
def start_link(connection_settings, opts \\[]),
do: GenServer.start_link(__MODULE__, connection_settings, opts)
@doc """
Executes protobuf `message` against `server`. Returns:
- {:ok, protobuf_message} on success .
- {:error, :not_authenticated} on wrong credentials.
- {:error, error_reason, protobuf_message} on failure.
EventStore uses ProtoBuf for taking requests and sending responses back.
We are using [exprotobuf](https://github.com/bitwalker/exprotobuf) to deal with them.
List and specification of supported protobuf messages can be found in `include/event_store.proto` file.
Instead of wrapping each and every request in elixir function, we are using `execute/2` function that takes server pid and request message:
{:ok, response} = Extreme.execute server, write_events()
where `write_events` can be helper function like:
alias Extreme.Msg, as: ExMsg
defp write_events(stream \\ "people", events \\ [%PersonCreated{name: "<NAME>"}, %PersonChangedName{name: "Zika"}]) do
proto_events = Enum.map(events, fn event ->
ExMsg.NewEvent.new(
event_id: Extreme.Tools.gen_uuid(),
event_type: to_string(event.__struct__),
data_content_type: 0,
metadata_content_type: 0,
data: :erlang.term_to_binary(event),
meta: ""
) end)
ExMsg.WriteEvents.new(
event_stream_id: stream,
expected_version: -2,
events: proto_events,
require_master: false
)
end
This way you can fine tune your requests, i.e. choose your serialization. We are using erlang serialization in this case
`data: :erlang.term_to_binary(event)`, but you can do whatever suites you.
For more information about protobuf messages EventStore uses,
take a look at their [documentation](http://docs.geteventstore.com) or for common use cases
you can check `test/extreme_test.exs` file.
"""
def execute(server, message),
do: GenServer.call(server, {:execute, message})
@doc """
Reads events specified in `read_events`, sends them to `subscriber`
and leaves `subscriber` subscribed per `subscribe` message.
`subscriber` is process that will keep receiving {:on_event, event} messages.
`read_events` :: Extreme.Msg.ReadStreamEvents
`subscribe` :: Extreme.Msg.SubscribeToStream
Returns {:ok, subscription} when subscription is success.
If `stream` is hard deleted `subscriber` will receive message {:extreme, :error, :stream_hard_deleted, stream}
If `stream` is soft deleted `subscriber` will receive message {:extreme, :warn, :stream_soft_deleted, stream}.
In case of soft deleted stream, new event will recreate stream and it will be sent to `subscriber` as described above
Hard deleted streams can't be recreated so suggestion is not to handle this message but rather crash when it happens
## Examples:
defmodule MyApp.StreamSubscriber
use GenServer
def start_link(extreme, last_processed_event),
do: GenServer.start_link __MODULE__, {extreme, last_processed_event}
def init({extreme, last_processed_event}) do
stream = "people"
state = %{ event_store: extreme, stream: stream, last_event: last_processed_event }
GenServer.cast self(), :subscribe
{:ok, state}
end
def handle_cast(:subscribe, state) do
# read only unprocessed events and stay subscribed
{:ok, subscription} = Extreme.read_and_stay_subscribed state.event_store, self(), state.stream, state.last_event + 1
# we want to monitor when subscription is crashed so we can resubscribe
ref = Process.monitor subscription
{:noreply, %{state|subscription_ref: ref}}
end
def handle_info({:DOWN, ref, :process, _pid, _reason}, %{subscription_ref: ref} = state) do
GenServer.cast self(), :subscribe
{:noreply, state}
end
def handle_info({:on_event, push}, state) do
push.event.data
|> :erlang.binary_to_term
|> process_event
event_number = push.link.event_number
:ok = update_last_event state.stream, event_number
{:noreply, %{state|last_event: event_number}}
end
def handle_info(_msg, state), do: {:noreply, state}
defp process_event(event), do: IO.puts("Do something with event: " <> inspect(event))
defp update_last_event(_stream, _event_number), do: IO.puts("Persist last processed event_number for stream")
end
This way unprocessed events will be sent by Extreme, using `{:on_event, push}` message.
After all persisted messages are sent, new messages will be sent the same way as they arrive to stream.
Since there's a lot of boilerplate code here, you can use `Extreme.Listener` to reduce it and focus only
on business part of code.
"""
def read_and_stay_subscribed(server, subscriber, stream, from_event_number \\ 0, per_page \\ 4096, resolve_link_tos \\ true, require_master \\ false) do
GenServer.call(server, {:read_and_stay_subscribed, subscriber, {stream, from_event_number, per_page, resolve_link_tos, require_master}})
end
@doc """
Subscribe `subscriber` to `stream` using `server`.
`subscriber` is process that will keep receiving {:on_event, event} messages.
Returns {:ok, subscription} when subscription is success.
```NOTE: If `stream` is hard deleted, `subscriber` will NOT receive any message!```
## Example:
def subscribe(server, stream \\ "people"), do: Extreme.subscribe_to(server, self(), stream)
def handle_info({:on_event, event}, state) do
Logger.debug "New event added to stream 'people': " <> inspect(event)
{:noreply, state}
end
As `Extreme.read_and_stay_subscribed/7` has it's abstraction in `Extreme.Listener`, there's abstraction for this function
as well in `Extreme.FanoutListener` behaviour.
"""
def subscribe_to(server, subscriber, stream, resolve_link_tos \\ true),
do: GenServer.call(server, {:subscribe_to, subscriber, stream, resolve_link_tos})
@doc """
Connect the `subscriber` to an existing persistent subscription named `subscription` on `stream`
`subscriber` is process that will keep receiving {:on_event, event} messages.
Returns {:ok, subscription} when subscription is success.
"""
def connect_to_persistent_subscription(server, subscriber, subscription, stream, buffer_size \\ 1),
do: GenServer.call(server, {:connect_to_persistent_subscription, subscriber, {subscription, stream, buffer_size}})
## Server Callbacks
def init(connection_settings) do
user = Keyword.fetch!(connection_settings, :username)
pass = Keyword.fetch!(connection_settings, :password)
GenServer.cast(self(), {:connect, connection_settings, 1})
{:ok, subscriptions_sup} = Extreme.SubscriptionsSupervisor.start_link(self())
{:ok, persistent_subscriptions_sup} = Extreme.PersistentSubscriptionsSupervisor.start_link(connection_settings)
state = %{
socket: nil,
pending_responses: %{},
subscriptions: %{},
subscriptions_sup: subscriptions_sup,
persistent_subscriptions_sup: persistent_subscriptions_sup,
credentials: %{user: user, pass: pass},
received_data: <<>>,
should_receive: nil,
}
{:ok, state}
end
def handle_cast({:connect, connection_settings, attempt}, state) do
db_type = Keyword.get(connection_settings, :db_type, :node)
|> cast_to_atom
case connect(db_type, connection_settings, attempt) do
{:ok, socket} -> {:noreply, %{state|socket: socket}}
error -> {:stop, error, state}
end
end
defp connect(:cluster, connection_settings, attempt) do
{:ok, host, port} = Extreme.ClusterConnection.get_node(connection_settings)
connect(host, port, connection_settings, attempt)
end
defp connect(:node, connection_settings, attempt) do
host = Keyword.fetch!(connection_settings, :host)
port = Keyword.fetch!(connection_settings, :port)
connect(host, port, connection_settings, attempt)
end
defp connect(:cluster_dns, connection_settings, attempt) do
{:ok, host, port} = Extreme.ClusterConnection.get_node(:cluster_dns, connection_settings)
connect(host, port, connection_settings, attempt)
end
defp connect(host, port, connection_settings, attempt) do
Logger.info "Connecting Extreme to #{host}:#{port}"
opts = [:binary, active: :once]
case :gen_tcp.connect(String.to_char_list(host), port, opts) do
{:ok, socket} ->
Logger.info "Successfully connected to EventStore @ #{host}:#{port}"
:timer.send_after(1_000, :send_ping)
{:ok, socket}
_ ->
max_attempts = Keyword.get(connection_settings, :max_attempts, :infinity)
reconnect = case max_attempts do
:infinity -> true
max when attempt <= max -> true
_ -> false
end
if reconnect do
reconnect_delay = Keyword.get connection_settings, :reconnect_delay, 1_000
Logger.warn "Error connecting to EventStore @ #{host}:#{port}. Will retry in #{reconnect_delay} ms."
:timer.sleep(reconnect_delay)
db_type = Keyword.get(connection_settings, :db_type, :node)
|> cast_to_atom
connect(db_type, connection_settings, attempt + 1)
else
{:error, :max_attempt_exceeded}
end
end
end
def handle_call({:execute, protobuf_msg}, from, state) do
{message, correlation_id} = Request.prepare(protobuf_msg, state.credentials)
#Logger.debug "Will execute #{inspect protobuf_msg}"
:ok = :gen_tcp.send(state.socket, message)
state = put_in(state.pending_responses, Map.put(state.pending_responses, correlation_id, from))
{:noreply, state}
end
def handle_call({:read_and_stay_subscribed, subscriber, params}, _from, state) do
{:ok, subscription} = Extreme.SubscriptionsSupervisor.start_subscription(state.subscriptions_sup, subscriber, params)
#Logger.debug "Subscription is: #{inspect subscription}"
{:reply, {:ok, subscription}, state}
end
def handle_call({:subscribe_to, subscriber, stream, resolve_link_tos}, _from, state) do
{:ok, subscription} = Extreme.SubscriptionsSupervisor.start_subscription(state.subscriptions_sup, subscriber, stream, resolve_link_tos)
#Logger.debug "Subscription is: #{inspect subscription}"
{:reply, {:ok, subscription}, state}
end
def handle_call({:connect_to_persistent_subscription, subscriber, params}, _from, state) do
{:ok, persistent_subscription} = Extreme.PersistentSubscriptionsSupervisor.start_persistent_subscription(state.persistent_subscriptions_sup, subscriber, params)
{:reply, {:ok, persistent_subscription}, state}
end
def handle_call({:subscribe, subscriber, msg}, from, state) do
#Logger.debug "Subscribing #{inspect subscriber} with: #{inspect msg}"
{message, correlation_id} = Request.prepare(msg, state.credentials)
:ok = :gen_tcp.send(state.socket, message)
state = put_in(state.pending_responses, Map.put(state.pending_responses, correlation_id, from))
state = put_in(state.subscriptions, Map.put(state.subscriptions, correlation_id, subscriber))
{:noreply, state}
end
def handle_call({:ack, protobuf_msg, correlation_id}, _from, state) do
{message, _correlation_id} = Request.prepare(protobuf_msg, state.credentials, correlation_id)
# Logger.debug(fn -> "Ack received event: #{inspect protobuf_msg}" end)
:ok = :gen_tcp.send(state.socket, message)
{:reply, :ok, state}
end
def handle_call({:nack, protobuf_msg, correlation_id}, _from, state) do
{message, _correlation_id} = Request.prepare(protobuf_msg, state.credentials, correlation_id)
Logger.debug(fn -> "Nack received event: #{inspect protobuf_msg}" end)
:ok = :gen_tcp.send(state.socket, message)
{:reply, :ok, state}
end
def handle_info(:send_ping, state) do
message = Request.prepare(:ping)
:ok = :gen_tcp.send(state.socket, message)
{:noreply, state}
end
def handle_info({:tcp, socket, pkg}, state) do
:inet.setopts(socket, active: :once) # Allow the socket to send us the next message
state = process_package(pkg, state)
{:noreply, state}
end
def handle_info({:tcp_closed, _port}, state), do: {:stop, :tcp_closed, state}
# This package carries message from it's start. Process it and return new `state`
defp process_package(<<message_length :: 32-unsigned-little-integer, content :: binary>>, %{socket: _socket, received_data: <<>>} = state) do
#Logger.debug "Processing package with message_length of: #{message_length}"
slice_content(message_length, content)
|> process_content(state)
end
# Process package for unfinished message. Process it and return new `state`
defp process_package(pkg, %{socket: _socket} = state) do
#Logger.debug "Processing next package. We need #{state.should_receive} bytes and we have collected #{byte_size(state.received_data)} so far and we have #{byte_size(pkg)} more"
slice_content(state.should_receive, state.received_data <> pkg)
|> process_content(state)
end
defp slice_content(message_length, content) do
if byte_size(content) < message_length do
#Logger.debug "We have unfinished message of length #{message_length}(#{byte_size(content)}): #{inspect content}"
{:unfinished_message, message_length, content}
else
case content do
<<message :: binary - size(message_length), next_message :: binary>> -> {message, next_message}
<<message :: binary - size(message_length)>> -> {message, <<>>}
end
end
end
defp process_content({:unfinished_message, expected_message_length, data}, state) do
%{state|should_receive: expected_message_length, received_data: data}
end
defp process_content({message, <<>>}, state) do
#Logger.debug "Processing single message: #{inspect message} and we have already received: #{inspect state.received_data}"
state = process_message(message, state)
#Logger.debug "After processing content state is #{inspect state}"
%{state|should_receive: nil, received_data: <<>>}
end
defp process_content({message, rest}, state) do
#Logger.debug "Processing message: #{inspect message}"
#Logger.debug "But we have something else in package: #{inspect rest}"
state = process_message(message, %{state|should_receive: nil, received_data: <<>>})
process_package(rest, state)
end
defp process_message(message, state) do
# Logger.debug(fn -> "Received tcp message: #{inspect Response.parse(message)}" end)
Response.parse(message)
|> respond(state)
end
defp respond({:pong, _correlation_id}, state) do
#Logger.debug "#{inspect self()} got :pong"
:timer.send_after 1_000, :send_ping
state
end
defp respond({:heartbeat_request, correlation_id}, state) do
#Logger.debug "#{inspect self()} Tick-Tack"
message = Request.prepare(:heartbeat_response, correlation_id)
:ok = :gen_tcp.send(state.socket, message)
%{state|pending_responses: state.pending_responses}
end
defp respond({:error, :not_authenticated, correlation_id}, state) do
{:error, :not_authenticated}
|> respond_with(correlation_id, state)
end
defp respond({_auth, correlation_id, response}, state) do
response
|> respond_with(correlation_id, state)
end
defp respond_with(response, correlation_id, state) do
#Logger.debug "Responding with response: #{inspect response}"
case Map.get(state.pending_responses, correlation_id) do
nil ->
respond_to_subscription(response, correlation_id, state.subscriptions)
state
from ->
:ok = GenServer.reply(from, Response.reply(response, correlation_id))
pending_responses = Map.delete(state.pending_responses, correlation_id)
%{state|pending_responses: pending_responses}
end
end
defp respond_to_subscription(response, correlation_id, subscriptions) do
# Logger.debug "Attempting to respond to subscription with response: #{inspect response}"
case Map.get(subscriptions, correlation_id) do
nil -> :ok #Logger.error "Can't find correlation_id #{inspect correlation_id} for response #{inspect response}"
subscription -> GenServer.cast(subscription, Response.reply(response, correlation_id))
end
end
@doc """
Cast the provided value to an atom if appropriate.
If the provided value is a string, convert it to an atom, otherwise return it as-is.
"""
def cast_to_atom(value) when is_binary(value),
do: String.to_atom(value)
def cast_to_atom(value),
do: value
end
|
lib/extreme.ex
| 0.813868
| 0.425725
|
extreme.ex
|
starcoder
|
defmodule RDF.XML.Encoder do
@moduledoc """
An encoder for RDF/XML serializations of RDF.ex data structures.
As for all encoders of `RDF.Serialization.Format`s, you normally won't use these
functions directly, but via one of the `write_` functions on the `RDF.XML` format
module or the generic `RDF.Serialization` module.
## Options
- `:base`: : Allows to specify the base URI to be used for a `xml:base` declaration.
If not specified the one from the given graph is used or if there is also none
specified for the graph the `RDF.default_base_iri/0`.
- `:prefixes`: Allows to specify the prefixes to be used as a `RDF.PrefixMap` or
anything from which a `RDF.PrefixMap` can be created with `RDF.PrefixMap.new/1`.
If not specified the ones from the given graph are used or if these are also not
present the `RDF.default_prefixes/0`.
- `:implicit_base`: Allows to specify that the used base URI should not be encoded
in the generated serialization (default: `false`).
- `:use_rdf_id`: Allows to determine if `rdf:ID` should be used when possible.
You can either provide a boolean value or a function which should return a boolean
value for a given `RDF.Description`. (default: `false`)
- `:producer`: This option allows you to provide a producer function, which will get
the input data (usually a `RDF.Graph`) and should produce a stream of the descriptions
to be encoded. This allows you to control the order of the descriptions, apply filters
etc.
iex> RDF.Graph.new([
...> EX.S1 |> EX.p1(EX.O1),
...> EX.S2 |> EX.p2(EX.O2),
...> ])
...> |> RDF.XML.write_string!(
...> prefixes: [ex: EX],
...> producer: fn graph ->
...> {first, rest} = RDF.Graph.pop(graph, EX.S2)
...> Stream.concat([first], RDF.Graph.descriptions(rest))
...> end)
~S(<?xml version="1.0" encoding="utf-8"?><rdf:RDF xmlns:ex="http://example.com/">\
<rdf:Description rdf:about="http://example.com/S2"><ex:p2 rdf:resource="http://example.com/O2"/></rdf:Description>\
<rdf:Description rdf:about="http://example.com/S1"><ex:p1 rdf:resource="http://example.com/O1"/></rdf:Description>\
</rdf:RDF>)
"""
use RDF.Serialization.Encoder
alias RDF.{Description, Graph, Dataset, IRI, BlankNode, Literal, LangString, XSD, PrefixMap}
import RDF.Utils
import Saxy.XML
@doc """
Encodes the given RDF `data` structure to a RDF/XML string.
The result is returned in an `:ok` tuple or an `:error` tuple in case of an error.
For a description of the available options see the [module documentation](`RDF.XML.Encoder`).
"""
@impl RDF.Serialization.Encoder
@spec encode(Graph.t(), keyword) :: {:ok, String.t()} | {:error, any}
def encode(data, opts \\ []) do
base = Keyword.get(opts, :base, Keyword.get(opts, :base_iri)) |> base_iri(data)
prefixes = Keyword.get(opts, :prefixes) |> prefix_map(data)
use_rdf_id = Keyword.get(opts, :use_rdf_id) || false
with {:ok, root} <- document(data, base, prefixes, use_rdf_id, opts) do
{:ok, Saxy.encode!(root, version: "1.0", encoding: :utf8)}
end
end
@doc """
Encodes the given RDF `data` structure to a RDF/XML stream.
By default the RDF/XML stream will emit single line strings for each of the
descriptions in the given `data`. But you can also receive the serialized RDF/XML
description as IO lists aka iodata by setting the `:mode` option to `:iodata`.
For a description of the other available options see the [module documentation](`RDF.XML.Encoder`).
"""
@impl RDF.Serialization.Encoder
@spec stream(Graph.t(), keyword) :: Enumerable.t()
def stream(data, opts \\ []) do
base = Keyword.get(opts, :base, Keyword.get(opts, :base_iri)) |> base_iri(data)
prefixes = Keyword.get(opts, :prefixes) |> prefix_map(data)
use_rdf_id = Keyword.get(opts, :use_rdf_id, false)
stream_mode = Keyword.get(opts, :mode, :string)
input = input(data, opts)
{rdf_close, rdf_open} =
Saxy.encode_to_iodata!(
{"rdf:RDF", ns_declarations(prefixes, base, implicit_base(opts)), [{:characters, "\n"}]}
)
|> List.pop_at(-1)
{rdf_close, rdf_open} =
if stream_mode == :string do
{IO.iodata_to_binary(rdf_close), IO.iodata_to_binary(rdf_open)}
else
{rdf_close, rdf_open}
end
Stream.concat([
[~s[<?xml version="1.0" encoding="utf-8"?>\n]],
[rdf_open],
description_stream(input, base, prefixes, use_rdf_id, stream_mode),
[rdf_close]
])
end
defp input(data, opts) do
case Keyword.get(opts, :producer) do
fun when is_function(fun) -> fun.(data)
nil -> data
end
end
defp implicit_base(opts) do
Keyword.get(opts, :implicit_base, false)
end
defp base_iri(nil, %Graph{base_iri: base}) when not is_nil(base), do: validate_base_iri(base)
defp base_iri(nil, _), do: RDF.default_base_iri() |> validate_base_iri()
defp base_iri(base_iri, _), do: base_iri |> IRI.coerce_base() |> validate_base_iri()
defp validate_base_iri(nil), do: nil
defp validate_base_iri(base_iri) do
uri = base_iri |> to_string() |> URI.parse()
to_string(%{uri | fragment: nil})
end
defp prefix_map(nil, %Graph{prefixes: prefixes}) when not is_nil(prefixes), do: prefixes
defp prefix_map(nil, %Dataset{} = dataset) do
prefixes = Dataset.prefixes(dataset)
if Enum.empty?(prefixes) do
RDF.default_prefixes()
else
prefixes
end
end
defp prefix_map(nil, _), do: RDF.default_prefixes()
defp prefix_map(prefixes, _), do: PrefixMap.new(prefixes)
defp ns_declarations(prefixes, nil, _) do
Enum.map(prefixes, fn
{nil, namespace} -> {"xmlns", to_string(namespace)}
{prefix, namespace} -> {"xmlns:#{prefix}", to_string(namespace)}
end)
end
defp ns_declarations(prefixes, _, true) do
ns_declarations(prefixes, nil, true)
end
defp ns_declarations(prefixes, base, implicit_base) do
[{"xml:base", to_string(base)} | ns_declarations(prefixes, nil, implicit_base)]
end
defp document(graph, base, prefixes, use_rdf_id, opts) do
with {:ok, descriptions} <-
graph
|> input(opts)
|> descriptions(base, prefixes, use_rdf_id) do
{:ok,
element(
"rdf:RDF",
ns_declarations(prefixes, base, implicit_base(opts)),
descriptions
)}
end
end
defp descriptions(%Graph{} = graph, base, prefixes, use_rdf_id) do
graph
|> Graph.descriptions()
|> descriptions(base, prefixes, use_rdf_id)
end
defp descriptions(input, base, prefixes, use_rdf_id) do
map_while_ok(input, &description(&1, base, prefixes, use_rdf_id))
end
defp description_stream(%Graph{} = graph, base, prefixes, use_rdf_id, stream_mode) do
graph
|> Graph.descriptions()
|> description_stream(base, prefixes, use_rdf_id, stream_mode)
end
@dialyzer {:nowarn_function, description_stream: 5}
defp description_stream(input, base, prefixes, use_rdf_id, stream_mode) do
Stream.map(input, fn description ->
case description(description, base, prefixes, use_rdf_id) do
{:ok, simple_form} when stream_mode == :string ->
Saxy.encode!(simple_form) <> "\n"
{:ok, simple_form} when stream_mode == :iodata ->
[Saxy.encode_to_iodata!(simple_form) | "\n"]
{:error, error} ->
raise error
end
end)
end
defp description(%Description{} = description, base, prefixes, use_rdf_id) do
{type_node, stripped_description} = type_node(description, prefixes)
with {:ok, predications} <- predications(stripped_description, base, prefixes) do
{:ok,
element(
type_node || "rdf:Description",
[description_id(description.subject, base, use_rdf_id, description)],
predications
)}
end
end
defp type_node(description, prefixes) do
description
|> Description.get(RDF.type())
|> List.wrap()
|> Enum.find_value(fn object ->
if qname = qname(object, prefixes) do
{qname, object}
end
end)
|> case do
nil -> {nil, description}
{qname, type} -> {qname, Description.delete(description, {RDF.type(), type})}
end
end
defp description_id(%BlankNode{value: bnode}, _base, _, _) do
{"rdf:nodeID", bnode}
end
defp description_id(%IRI{} = iri, base, fun, description) when is_function(fun) do
description_id(iri, base, fun.(description), description)
end
defp description_id(%IRI{} = iri, base, true, _) do
case attr_val_uri(iri, base) do
"#" <> value -> {"rdf:ID", value}
value -> {"rdf:about", value}
end
end
defp description_id(%IRI{} = iri, base, false, _) do
{"rdf:about", attr_val_uri(iri, base)}
end
defp predications(description, base, prefixes) do
flat_map_while_ok(description.predications, fn {predicate, objects} ->
predications_for_property(predicate, objects, base, prefixes)
end)
end
defp predications_for_property(property, objects, base, prefixes) do
if property_name = qname(property, prefixes) do
{:ok,
objects
|> Map.keys()
|> Enum.map(&statement(property_name, &1, base, prefixes))}
else
{:error,
%RDF.XML.EncodeError{message: "no namespace declaration for property #{property} found"}}
end
end
defp statement(property_name, %IRI{} = iri, base, _) do
element(property_name, [{"rdf:resource", attr_val_uri(iri, base)}], [])
end
defp statement(property_name, %BlankNode{value: value}, _base, _) do
element(property_name, [{"rdf:nodeID", value}], [])
end
@xml_literal IRI.to_string(RDF.XMLLiteral)
# TODO: This dialyzer exception rule can be removed when this got merged: https://github.com/qcam/saxy/pull/82
@dialyzer {:nowarn_function, statement: 4}
defp statement(property_name, %Literal{literal: %{datatype: @xml_literal}} = literal, _, _) do
element(
property_name,
[{"rdf:parseType", "Literal"}],
Literal.lexical(literal)
)
end
defp statement(property_name, %Literal{} = literal, base, _) do
element(
property_name,
literal_attributes(literal, base),
[{:characters, Literal.lexical(literal)}]
)
end
defp literal_attributes(%Literal{literal: %LangString{language: language}}, _),
do: [{"xml:lang", language}]
defp literal_attributes(%Literal{literal: %XSD.String{}}, _), do: []
defp literal_attributes(%Literal{literal: %datatype{}}, base),
do: [{"rdf:datatype", datatype.id() |> attr_val_uri(base)}]
defp literal_attributes(_, _), do: []
defp attr_val_uri(iri, nil), do: iri
defp attr_val_uri(%IRI{value: uri}, base), do: attr_val_uri(uri, base)
defp attr_val_uri(iri, base) do
String.replace_prefix(iri, base, "")
end
defp qname(iri, prefixes) do
case PrefixMap.prefixed_name(prefixes, iri) do
nil -> nil
":" <> name -> name
name -> name
end
end
end
|
lib/rdf/xml/encoder.ex
| 0.888154
| 0.641015
|
encoder.ex
|
starcoder
|
defmodule Day06 do
def hello() do
:hello
end
def manhattan_distance({x1, y1}, {x2, y2}) do
abs(x1 - x2) + abs(y1 - y2)
end
def string_to_coordinate(string) do
string
|> String.split("\n", trim: true)
|> Enum.map(&String.trim/1)
|> Enum.map(fn s -> String.split(s, ", ") end)
|> Enum.map(fn [x, y] -> {String.to_integer(x), String.to_integer(y)} end)
end
def smallest_grid(coordinates) do
{min_x, _} = coordinates |> Enum.min_by(fn {x, _} -> x end)
{_, min_y} = coordinates |> Enum.min_by(fn {_, y} -> y end)
{max_x, _} = coordinates |> Enum.max_by(fn {x, _} -> x end)
{_, max_y} = coordinates |> Enum.max_by(fn {_, y} -> y end)
{{min_x, min_y}, {max_x, max_y}}
end
def boundery?({x, y}, max_min) do
{{min_x, min_y}, {max_x, max_y}} = max_min
x == min_x || x == max_x || (y == min_y || y == max_y)
end
def boundery_indexes(coordinates) do
{{min_x, min_y}, {max_x, max_y}} = smallest_grid(coordinates)
coordinates
|> Enum.map(
&boundery?(
&1,
{{min_x, min_y}, {max_x, max_y}}
)
)
end
def generate_grid_coords({top_left, bottom_right}) do
{top_left_x, top_left_y} = top_left
{bottom_right_x, bottom_right_y} = bottom_right
for x <- top_left_x..bottom_right_x, y <- top_left_y..bottom_right_y, do: {x, y}
end
def solve_part1(input) do
coords = input |> string_to_coordinate
max_min = smallest_grid(coords)
boundary = coords |> boundery_indexes
grid_coords =
smallest_grid(coords)
|> IO.inspect()
|> generate_grid_coords
coords_closest_index =
grid_coords
|> Enum.map(fn {x, y} ->
{{x, y}, for(c <- coords, do: manhattan_distance(c, {x, y}))}
end)
|> Enum.filter(fn {_, v} ->
Enum.filter(v, &(&1 == Enum.min(v))) |> length == 1
end)
|> Enum.map(fn {coord, distances} ->
{coord, Enum.find_index(distances, &(&1 == Enum.min(distances)))}
end)
# those have points on edge of grid closest to it
index_of_input_points_that_has_infinite_area =
coords_closest_index
|> Enum.map(fn {{x, y}, index} -> if boundery?({x, y}, max_min), do: index, else: nil end)
|> Enum.reject(fn x -> x == nil end)
|> Enum.uniq()
|> IO.inspect()
coords_closest_index
|> Enum.group_by(fn {_, nearest_index} -> nearest_index end)
|> Enum.map(fn {index, nearest_coords} -> {index, length(nearest_coords)} end)
|> Enum.reject(fn {idx, count} ->
Enum.member?(index_of_input_points_that_has_infinite_area, idx)
end)
|> Enum.max_by(fn {_k, v} -> v end)
|> elem(1)
end
def total_distance(coordinates, location) do
Enum.sum(
coordinates
|> Enum.map(&manhattan_distance(&1, location))
)
end
def find_region(coordinates, dist_limit \\ 32) do
locations = coordinates |> smallest_grid |> generate_grid_coords
Enum.count(
Enum.map(
locations,
&total_distance(coordinates, &1)
),
&(&1 < dist_limit)
)
end
def solve_part2(inputstring, dist_limit \\ 10000) do
inputstring
|> string_to_coordinate
|> find_region(dist_limit)
end
end
|
lib/day06.ex
| 0.724286
| 0.731322
|
day06.ex
|
starcoder
|
defmodule VintageNetWiFi.WPASupplicantLL do
use GenServer
require Logger
@moduledoc """
This modules provides a low-level interface for interacting with the `wpa_supplicant`
Example use:
```elixir
iex> {:ok, ws} = VintageNetWiFi.WPASupplicantLL.start_link(path: "/tmp/vintage_net/wpa_supplicant/wlan0", notification_pid: self())
{:ok, #PID<0.1795.0>}
iex> VintageNetWiFi.WPASupplicantLL.control_request(ws, "ATTACH")
{:ok, "OK\n"}
iex> VintageNetWiFi.WPASupplicantLL.control_request(ws, "SCAN")
{:ok, "OK\n"}
iex> flush
{VintageNetWiFi.WPASupplicant, 51, "CTRL-EVENT-SCAN-STARTED "}
{VintageNetWiFi.WPASupplicant, 51, "CTRL-EVENT-BSS-ADDED 0 78:8a:20:87:7a:50"}
{VintageNetWiFi.WPASupplicant, 51, "CTRL-EVENT-SCAN-RESULTS "}
{VintageNetWiFi.WPASupplicant, 51, "CTRL-EVENT-NETWORK-NOT-FOUND "}
:ok
iex> VintageNetWiFi.WPASupplicantLL.control_request(ws, "BSS 0")
{:ok,
"id=0\nbssid=78:8a:20:82:7a:50\nfreq=2437\nbeacon_int=100\ncapabilities=0x0431\nqual=0\nnoise=-89\nlevel=-71\ntsf=0000333220048880\nage=14\nie=0008426f7062654c414e010882848b968c1298240301062a01003204b048606c0b0504000a00002d1aac011bffffff00000000000000000001000000000000000000003d1606080c000000000000000000000000000000000000007f080000000000000040dd180050f2020101000003a4000027a4000042435e0062322f00dd0900037f01010000ff7fdd1300156d00010100010237e58106788a20867a5030140100000fac040100000fac040100000fac020000\nflags=[WPA2-PSK-CCMP][ESS]\nssid=HelloWiFi\nsnr=18\nest_throughput=48000\nupdate_idx=1\nbeacon_ie=0008426f7062654c414e010882848b968c1298240301060504010300002a01003204b048606c0b0504000a00002d1aac011bffffff00000000000000000001000000000000000000003d1606080c000000000000000000000000000000000000007f080000000000000040dd180050f2020101000003a4000027a4000042435e0062322f00dd0900037f01010000ff7fdd1300156d00010100010237e58106788a20867a5030140100000fac040100000fac040100000fac020000\n"}
```
"""
defmodule State do
@moduledoc false
defstruct control_file: nil,
socket: nil,
request_queue: :queue.new(),
outstanding: nil,
notification_pid: nil,
request_timer: nil
end
@doc """
Start the WPASupplicant low-level interface
Pass the path to the wpa_supplicant control file.
Notifications from the wpa_supplicant are sent to the process that
calls this.
"""
@spec start_link(path: Path.t(), notification_pid: pid()) :: GenServer.on_start()
def start_link(init_args) do
GenServer.start_link(__MODULE__, init_args)
end
@spec control_request(GenServer.server(), binary()) :: {:ok, binary()} | {:error, any()}
def control_request(server, request) do
GenServer.call(server, {:control_request, request})
end
@impl GenServer
def init(init_args) do
path = Keyword.fetch!(init_args, :path)
pid = Keyword.fetch!(init_args, :notification_pid)
# Blindly create the control interface's directory in case we beat
# wpa_supplicant.
_ = File.mkdir_p(Path.dirname(path))
# The path to our end of the socket so that wpa_supplicant can send us
# notifications and responses
our_path = path <> ".ex"
# Blindly remove an old file just in case it exists from a previous run
_ = File.rm(our_path)
{:ok, socket} =
:gen_udp.open(0, [:local, :binary, {:active, true}, {:ip, {:local, our_path}}])
state = %State{
control_file: path,
socket: socket,
notification_pid: pid
}
{:ok, state}
end
@impl GenServer
def handle_call({:control_request, message}, from, state) do
new_state =
state
|> enqueue_request(message, from)
|> maybe_send_request()
{:noreply, new_state}
end
@impl GenServer
def handle_info(
{:udp, socket, _, 0, <<?<, priority, ?>, notification::binary()>>},
%{socket: socket, notification_pid: pid} = state
) do
send(pid, {__MODULE__, priority - ?0, notification})
{:noreply, state}
end
def handle_info({:udp, socket, _, 0, response}, %{socket: socket, outstanding: request} = state)
when not is_nil(request) do
{_message, from} = request
_ = :timer.cancel(state.request_timer)
GenServer.reply(from, {:ok, response})
new_state = %{state | outstanding: nil} |> maybe_send_request()
{:noreply, new_state}
end
def handle_info(:request_timeout, %{outstanding: request} = state)
when not is_nil(request) do
{_message, from} = request
GenServer.reply(from, {:error, :timeout})
new_state = %{state | outstanding: nil} |> maybe_send_request()
{:noreply, new_state}
end
def handle_info(message, state) do
Logger.error("wpa_supplicant_ll: unexpected message: #{inspect(message)}")
{:noreply, state}
end
defp enqueue_request(state, message, from) do
new_request_queue = :queue.in({message, from}, state.request_queue)
%{state | request_queue: new_request_queue}
end
defp maybe_send_request(%{outstanding: nil} = state) do
case :queue.out(state.request_queue) do
{:empty, _} ->
state
{{:value, request}, new_queue} ->
%{state | request_queue: new_queue}
|> do_send_request(request)
end
end
defp maybe_send_request(state), do: state
defp do_send_request(state, {message, from} = request) do
case :gen_udp.send(state.socket, {:local, state.control_file}, 0, message) do
:ok ->
{:ok, timer} = :timer.send_after(1000, :request_timeout)
%{state | outstanding: request, request_timer: timer}
error ->
Logger.error("wpa_supplicant_ll: Error sending #{inspect(message)} (#{inspect(error)})")
GenServer.reply(from, error)
maybe_send_request(state)
end
end
end
|
lib/vintage_net_wifi/wpa_supplicant_ll.ex
| 0.790045
| 0.523725
|
wpa_supplicant_ll.ex
|
starcoder
|
defmodule BasketAnalysis do
def measure_support do
main
end
def main(args \\ []) do
{opts, _, _} = OptionParser.parse(args,
switches: [src: :string, support: :float, target: :string],
aliases: [S: :src, s: :support, t: :target]
)
items = opts[:src] |> Data.load
"File loaded" |> IO.puts
baskets = get_baskets items
"Got Baskets" |> IO.puts
get_sets(
%{},
baskets |> Enum.into([]),
baskets,
items |> calculate_product_support(baskets, opts[:support])
)
|> Print.to_csv(opts[:target])
end
defp get_sets(sets, [{_, items} | t], baskets, product_support) do
"Calculating sets - Items left: #{Enum.count t} | Set size: #{Map.size sets}" |> IO.puts
sets |> get_sets_from_basket(Enum.chunk(items, 2, 1), baskets, product_support) |> get_sets(t, baskets, product_support)
end
defp get_sets(sets, [], _, _) do
sets
end
defp get_sets_from_basket(sets, [h | t], baskets, product_support) do
if h |> Enum.all?(fn(x) -> product_support |> Map.has_key?(x) end) do
Map.put_new(sets, h, calculate_measures(h, baskets, product_support)) |> get_sets_from_basket(t, baskets, product_support)
else
sets |> get_sets_from_basket(t, baskets, product_support)
end
end
defp get_sets_from_basket(sets, [], _, _) do
sets
end
defp get_baskets(items) do
items |> Enum.group_by(&get_transaction_id(&1), &get_product(&1))
end
defp get_product([name, _]) do
name
end
defp get_transaction_id([_, transaction_id]) do
transaction_id
end
defp get_support(support, [h | t], baskets) do
IO.puts "Calculating support - Items remaining: #{Enum.count t} | Support set size: #{Map.size support}"
Map.put(support, h, calculate_support(h, baskets)) |> get_support(t, baskets)
end
defp get_support(support, [], _) do
support
end
defp calculate_support(measures, products, baskets) do
Map.put(measures, :support, count_product_occurrences(products, baskets)/Enum.count(baskets))
end
defp calculate_support(product, baskets) do
count_product_occurrences(product, baskets)/Enum.count(baskets)
end
defp calculate_measures(products, baskets, product_support) do
%{}
|> calculate_support(products, baskets)
|> calculate_confidence(products, product_support)
|> calculate_lift(products, product_support)
end
defp count_product_occurrences([first_product, second_product], baskets) do
Enum.filter(baskets, fn({_,v}) ->
v |> Enum.any?(fn(x) ->
x == first_product
end) &&
v |> Enum.any?(fn(x) ->
x == second_product
end)
end) |> Enum.count
end
defp count_product_occurrences(product, baskets) do
Enum.filter(baskets, fn({_,v}) ->
v |> Enum.any?(fn(x) ->
x == product
end)
end) |> Enum.count
end
defp calculate_confidence(measures, [first_product, _], product_support) do
Map.put(
measures,
:confidence,
measures[:support]/product_support[first_product]
)
end
defp calculate_lift(measures, [first_product, second_product], product_support) do
Map.put(
measures,
:lift,
measures[:support]/(product_support[first_product] * product_support[second_product])
)
end
defp calculate_product_support(items, baskets, support) do
get_support(
%{},
items
|> Enum.map(&get_product(&1))
|> Enum.uniq, baskets
)
|> Enum.filter(fn({_, v}) -> v > support end)
|> Enum.into(%{})
end
end
|
lib/basket_analysis.ex
| 0.521959
| 0.632134
|
basket_analysis.ex
|
starcoder
|
defmodule Chronik.Store do
@moduledoc """
Chronik event Store API
"""
@typedoc "The options given for reading events from the stream"
@type options :: Keyword.t()
@typedoc """
The version of a given event record in the Store.
A simple implementation is a integer starting from 0. The atom
`:empty` is the initial version (without events yet).
"""
@type version :: term() | :empty
@typep events :: [Chronik.domain_event()]
@typep event_record :: Chronik.EventRecord
@typep event_records :: [event_record]
@doc """
Append a list of events to the Store.
- `aggregate` is the agregate that generated the events.
- `events` is an enumberable with the events to append.
- `options` is a keyword indicating the optimistic concurrency
checks to perform at the moment of writing to the stream.
## Versioning
Possible values are:
- `:any`: (default value) no checks are performed, the events are
always written
- `:no_stream`: verifies that the target stream does not exists
yet
- any other value: the event number expected to currently be at
The return values are `{:ok, last_inserted_version, records}` on
success or `{:error, message}` in case of failure.
"""
@callback append(aggregate :: Chronik.Aggregate, events :: events(), opts :: options()) ::
{:ok, version(), event_records()}
| {:error, String.t()}
@doc """
Retrieves all events from the store starting (but not including) at
`version`.
Possible `version` values are `:all` (default value) or a term
indicating starting read position. Event at `version` is not
included in the result.
The return values are `{:ok, version, [event records]}` or `{:error,
message}` in case of failure.
If no records are found on the stream (starting at version) the
function returns `{:ok, version, []}`.
"""
@callback fetch(version :: version()) ::
{:ok, version(), event_records()}
| {:error, String.t()}
@doc """
Retrieves all events from the store for a given aggregate starting
(but not including) at `version`.
Possible `version` values are `:all` (default value) or a term
indicating starting read position. Event at `version` is not
included in the result.
The return values are `{:ok, version, [event records]}` or `{:error,
message}` in case of failure.
If no records are found on the stream (starting at version) the
function returns `{:ok, version, []}`.
"""
@callback fetch_by_aggregate(aggregate :: Chronik.Aggregate, version :: version()) ::
{:ok, version(), event_records()}
| {:error, String.t()}
@doc """
This function allows the Projection module to comapre versions of
EventRecords coming form the PubSub bus.
The implementation depends on the version type but a trivial
implementation is to compare the integers and return the
corresponding atoms.
"""
@callback compare_version(version :: version(), version :: version()) ::
:past
| :next_one
| :future
| :equal
@doc """
This function creates a snapshot in the store for the given
`aggregate`. The Store only stores the last snapshot.
"""
@callback snapshot(
aggregate :: Chronik.Aggregate,
state :: Chronik.Aggregate.state(),
version :: version()
) ::
:ok
| {:error, reason() :: String.t()}
@doc """
Remove all events for given `aggregate` and it's current snapshot
"""
@callback remove_events(aggregate :: Chronik.Aggregate) :: :ok
@doc """
Retrives a snapshot from the Store. If there is no snapshot it
returns `nil`.
If there is a snapshot this function should return a tuple
`{version, state}` indicating the state of the snapshot and with
wich version of the aggregate was created.
"""
@callback get_snapshot(aggregate :: Chronik.Aggregate) :: {version(), Chronik.Aggregate.state()}
@doc """
Retrives the current version of the store. If there are no record returns :empty.
"""
@callback current_version() :: version()
@doc """
Calls the `fun` function over a stream of domain events starting at version
`version`.
"""
@callback stream(fun :: (event_record(), any() -> any()), version :: version()) :: any()
@doc """
Calls the `fun` function over the `aggregate`'s domain event stream
starting at version `version`.
"""
@callback stream_by_aggregate(
aggregate :: Chronik.Aggregate,
fun :: (event_record(), any() -> any()),
version :: version()
) :: any()
defmacro __using__(opts) do
quote bind_quoted: [opts: opts] do
@behaviour Chronik.Store
alias Chronik.Store.EventRecord
alias Chronik.Config
{_cfg, adapter} = Config.fetch_config(__MODULE__, opts)
@adapter adapter
defdelegate current_version(), to: @adapter
defdelegate append(aggregate, events, opts \\ [version: :any]), to: @adapter
defdelegate snapshot(aggregate, state, version), to: @adapter
defdelegate get_snapshot(aggregate), to: @adapter
defdelegate fetch(version \\ :all), to: @adapter
defdelegate fetch_by_aggregate(aggregate, version \\ :all), to: @adapter
defdelegate remove_events(aggregate), to: @adapter
defdelegate stream(fun, version \\ :all), to: @adapter
defdelegate stream_by_aggregate(aggregate, fun, version \\ :all), to: @adapter
defdelegate compare_version(version1, version2), to: @adapter
defdelegate start_link(opts), to: @adapter
end
end
def start_link(opts \\ []) do
{store, _pub_sub} = Chronik.Config.fetch_adapters()
store.start_link(opts)
end
end
|
lib/chronik/store.ex
| 0.910719
| 0.652338
|
store.ex
|
starcoder
|
defmodule Hedwig.Robot do
@moduledoc """
Defines a robot.
Robots receive messages from a chat source (XMPP, Slack, Console, etc), and
dispatch them to matching responders. See the documentation for
`Hedwig.Responder` for details on responders.
When used, the robot expects the `:otp_app` as option. The `:otp_app` should
point to an OTP application that has the robot configuration. For example,
the robot:
defmodule MyApp.Robot do
use Hedwig.Robot, otp_app: :my_app
end
Could be configured with:
config :my_app, MyApp.Robot,
adapter: Hedwig.Adapters.Console,
name: "alfred"
Most of the configuration that goes into the `config` is specific to the
adapter. Be sure to check the documentation for the adapter in use for all
of the available options.
## Robot configuration
* `adapter` - the adapter module name.
* `name` - the name the robot will respond to.
* `aka` - an alias the robot will respond to.
* `log_level` - the level to use when logging output.
* `responders` - a list of responders specified in the following format:
`{module, kwlist}`.
"""
defstruct adapter: nil,
aka: nil,
name: "",
opts: [],
pid: nil,
responders: []
defmacro __using__(opts) do
quote location: :keep, bind_quoted: [opts: opts] do
use GenServer
require Logger
{otp_app, adapter, robot_config} =
Hedwig.Robot.Supervisor.parse_config(__MODULE__, opts)
@adapter adapter
@before_compile adapter
@config robot_config
@log_level robot_config[:log_level] || :debug
@otp_app otp_app
def start_link(opts \\ []) do
Hedwig.start_robot(__MODULE__, opts)
end
def stop(robot) do
Hedwig.stop_robot(robot)
end
def config(opts \\ []) do
Hedwig.Robot.Supervisor.config(__MODULE__, @otp_app, opts)
end
def log(msg) do
Logger.unquote(@log_level)(fn ->
"#{inspect msg}"
end, [])
end
def __adapter__, do: @adapter
def init({robot, opts}) do
opts = Keyword.merge(robot.config, opts)
{:ok, adapter} = @adapter.start_link(robot, opts)
{aka, opts} = Keyword.pop(opts, :aka)
{name, opts} = Keyword.pop(opts, :name)
responders = Keyword.get(opts, :responders, [])
unless responders == [] do
GenServer.cast(self, :install_responders)
end
state = %Hedwig.Robot{
adapter: adapter,
aka: aka,
name: name,
opts: opts,
pid: self()
}
{:ok, state}
end
def after_connect(state) do
Logger.warn """
#{inspect __MODULE__}.after_connect/1 default handler invoked.
"""
{:ok, state}
end
def handle_in(msg, state) do
Logger.warn """
#{inspect __MODULE__}.handle_in/2 default handler invoked.
"""
{:ok, state}
end
def handle_call(:after_connect, _from, state) do
{:ok, state} = __MODULE__.after_connect(state)
{:reply, :ok, state}
end
def handle_cast({:send, msg}, %{adapter: adapter} = state) do
@adapter.send(adapter, msg)
{:noreply, state}
end
def handle_cast({:reply, msg}, %{adapter: adapter} = state) do
@adapter.reply(adapter, msg)
{:noreply, state}
end
def handle_cast({:emote, msg}, %{adapter: adapter} = state) do
@adapter.emote(adapter, msg)
{:noreply, state}
end
def handle_cast({:register, name}, state) do
Hedwig.Registry.register(name)
{:noreply, state}
end
def handle_cast(%Hedwig.Message{} = msg, %{responders: responders} = state) do
Hedwig.Responder.run(%{msg | robot: %{state | responders: []}}, responders)
{:noreply, state}
end
def handle_cast({:handle_in, msg}, state) do
{:ok, state} = __MODULE__.handle_in(msg, state)
{:noreply, state}
end
def handle_cast(:install_responders, %{opts: opts} = state) do
responders =
Enum.reduce opts[:responders], [], fn {mod, opts}, acc ->
mod.install(state, opts) ++ acc
end
{:noreply, %{state | responders: responders}}
end
def handle_info(msg, state) do
{:noreply, state}
end
def terminate(_reason, _state) do
:ok
end
def code_change(_old, state, _extra) do
{:ok, state}
end
defoverridable [
{:after_connect, 1},
{:terminate, 2},
{:code_change, 3},
{:handle_in, 2},
{:handle_info, 2}
]
end
end
@doc false
def start_link(robot, opts) do
GenServer.start_link(robot, {robot, opts})
end
@doc """
Send a message via the robot.
"""
def send(pid, msg) do
GenServer.cast(pid, {:send, msg})
end
@doc """
Send a reply message via the robot.
"""
def reply(pid, msg) do
GenServer.cast(pid, {:reply, msg})
end
@doc """
Send an emote message via the robot.
"""
def emote(pid, msg) do
GenServer.cast(pid, {:emote, msg})
end
@doc """
Handles invoking installed responders with a `Hedwig.Message`.
This function should be called by an adapter when a message arrives. A message
will be sent to each installed responder.
"""
@spec handle_message(pid, Hedwig.Message.t) :: :ok
def handle_message(robot, %Hedwig.Message{} = msg) do
GenServer.cast(robot, msg)
end
@doc """
Invokes a user defined `handle_in/2` function, if defined.
This function should be called by an adapter when a message arrives but
should be handled by the user.
"""
@spec handle_in(pid, any) :: :ok
def handle_in(robot, msg) do
GenServer.cast(robot, {:handle_in, msg})
end
@doc """
Invokes a user defined `after_connect/1` function, if defined.
If the user has defined an `after_connect/1` in the robot module, it will be
called with the robot's state. It is expected that the function return
`{:ok, state}`.
"""
@spec after_connect(pid, integer) :: :ok
def after_connect(robot, timeout \\ 5000) do
GenServer.call(robot, :after_connect, timeout)
end
@doc """
Allows a robot to be registered by name.
"""
@spec register(pid, any) :: :ok
def register(robot, name) do
GenServer.cast(robot, {:register, name})
end
end
|
lib/hedwig/robot.ex
| 0.850748
| 0.549036
|
robot.ex
|
starcoder
|
defmodule Cronex.Parser do
@moduledoc """
This modules is responsible for time parsing.
"""
@days_of_week [:monday, :tuesday, :wednesday, :thursday, :friday, :saturday, :sunday]
@doc """
Parses a given `frequency` and `time` to a tuple.
## Example
iex> Cronex.Parser.parse_regular_frequency(:hour)
{0, :*, :*, :*, :*}
iex> Cronex.Parser.parse_regular_frequency(:day, "10:00")
{0, 10, :*, :*, :*}
iex> Cronex.Parser.parse_regular_frequency(:day, "12:10")
{10, 12, :*, :*, :*}
iex> Cronex.Parser.parse_regular_frequency(:wednesday, "12:00")
{0, 12, :*, :*, 3}
iex> Cronex.Parser.parse_regular_frequency([:friday, :saturday])
{0, 0, :*, :*, [5,6]}
iex> Cronex.Parser.parse_regular_frequency(:non_existing_day)
:invalid
iex> Cronex.Parser.parse_regular_frequency(:monday, "invalid time")
:invalid
"""
def parse_regular_frequency(frequency, time \\ "00:00") do
parsed_time = parse_time(time)
do_parse_regular_frequency(frequency, parsed_time)
end
defp do_parse_regular_frequency(_, :invalid), do: :invalid
defp do_parse_regular_frequency(frequency, {hour, minute}) do
cond do
frequency == :minute ->
{:*, :*, :*, :*, :*}
frequency == :hour ->
{0, :*, :*, :*, :*}
frequency == :day ->
{minute, hour, :*, :*, :*}
frequency == :month ->
{minute, hour, 1, :*, :*}
frequency == :year ->
{minute, hour, 1, 1, :*}
frequency in @days_of_week ->
day_of_week = Enum.find_index(@days_of_week, &(&1 == frequency)) + 1
{minute, hour, :*, :*, day_of_week}
is_list(frequency) and Enum.all?(frequency, &Enum.member?(@days_of_week, &1)) ->
days_of_week =
frequency
|> Enum.map(fn freq ->
Enum.find_index(@days_of_week, &(&1 == freq)) + 1
end)
{minute, hour, :*, :*, days_of_week}
true ->
:invalid
end
end
@doc """
Parses a given `interval`, `frequency` and `time` to a tuple.
`interval` is a function wich receives one argument and returns the remainder of the division of that argument by the given `interval`
## Example
iex> Cronex.Parser.parse_interval_frequency(2, :hour)
{0, interval, :*, :*, :*}
iex> Cronex.Parser.parse_interval_frequency(2, :invalid_day)
:invalid
"""
def parse_interval_frequency(interval, frequency, time \\ "00:00") do
parsed_time = parse_time(time)
do_parse_interval_frequency(interval, frequency, parsed_time)
end
defp do_parse_interval_frequency(_, _, :invalid), do: :invalid
defp do_parse_interval_frequency(interval, frequency, {hour, minute}) do
interval_fn = fn arg -> rem(arg, interval) end
cond do
frequency == :minute ->
{interval_fn, :*, :*, :*, :*}
frequency == :hour ->
{0, interval_fn, :*, :*, :*}
frequency == :day ->
{minute, hour, interval_fn, :*, :*}
frequency == :month ->
{minute, hour, 1, interval_fn, :*}
true ->
:invalid
end
end
defp parse_time(time) when is_bitstring(time) do
try do
time
|> String.split(":")
|> Enum.map(&String.to_integer/1)
|> List.to_tuple()
rescue
_ -> :invalid
end
end
end
|
lib/cronex/parser.ex
| 0.902757
| 0.667619
|
parser.ex
|
starcoder
|
defmodule Runtime.Config.Helper do
@moduledoc """
## Helper
A utility for fetching environment variable values from System and parsing them into the correct types ready for
use in elixir without having to do all the parsing and validation yourself.
Helper.get_env("ENV_VAR_NAME", <options>)
## Valid options:
type:
:atom - The string read from the environment is converted to an atom.
:boolean - The string read from the environment must be in the Elixir set [true, false]
:charlist - The environment variable is converted to an erlang-compatible character list.
:float - The env variable is converted to a float.
:integer - The env variable is converted to an integer.
:module - The env variable is interpreted as an existing (loaded) module name.
:string - The env variable is left as a raw string (elixir binary()).
:list - The env variable is interpreted as a comma-separated list of values of the specified subtype (default: :string)
:tuple - The env variable is interpreted as a comma-separated tuple of values of the specified subtype (default: :string)
:map - The env variable is treated as a JSON string to interpret into a map() type.
subtype: For lists and tuples, a subtype may be specified indicating the type(s) of elements they are expected to contain.
:atom - the resulting list/tuple will be a set of atoms.
:boolean - the resulting list/tuple will be a set of boolean values.
:charlist - the resulting list/tuple will be a set of charlist elements (list of lists).
:float - the resulting list/tuple will be a set of floating point values.
:integer - the resulting list/tuple will be a set of integers.
:module - the resulting list/tuple will be a set of loaded modules.
:string - the resulting list/tuple will be a set of strings.
:map - the resulting list/tuple will be a set of map() objects.
tuple() - valid only for a type of tuple, the subtype of each element in the tuple can be expressed separately for
each element in the tuple. Note that the size of the subtype tuple must be the same as the number of elements
expressed in the comma-separated values of the environment variable parsed.
list() - the subtype of each element in the tuple or list can be expressed separately for each element in the
list or tuple. Note that the size of the subtype list must be the same as the number of elements
expressed in the comma-separated values of the environment variable parsed.
default: any() The default value returned when the specified variable is not provided in the system environment.
in_set: list() The value parsed from the environment variable must be in the specified list of values of the same
type.
in_range: range() The value parsed from the environment must be within the specified range of values.
regex: /regex/ pattern the (type: :string) variable must match.
custom: (tuple(): {Module, validator/2}) A module and function that takes a value and helper options for custom
validation of the value.
required: (boolean()), when true a value must be provided in the environment. A nil generates an {:error, msg} result.
"""
require Logger
require Jason
# TODO: New type candidates: :existing_atom? :map from JSON?
@valid_types [
:atom,
:boolean,
:charlist,
:float,
:integer,
:module,
:string,
:list,
:tuple,
:map
]
@valid_subtypes [
:atom,
:boolean,
:charlist,
:float,
:integer,
:module,
:string,
:map
]
# Testing hooks
defmodule Wrapper do
defmodule API do
@callback get_env(String.t(), String.t() | nil) :: String.t() | nil
end
@behaviour API
@impl API
def get_env(varname, default \\ nil) do
System.get_env(varname, default)
end
end
@system_module Application.get_env(:runtime_config, :helper_system, Wrapper)
@doc """
"""
@spec get_env(binary(), keyword()) :: {:error, binary()} | any()
def get_env(var_name, opts \\ []) when is_binary(var_name) do
opts_map = Map.new(opts) |> Map.put_new(:_env_var, var_name)
@system_module.get_env(var_name, nil)
|> parse(determine_type(opts), opts_map)
|> validity_check(opts_map)
end
defp determine_type(opts) do
# Use the explicitly defined type if provided.
# Unspecified type: try to determine from the data type of the default.
# No default type: try to determine from the data type of the set (if :in_set specified).
# TODO: (Maybe deterine from "in-range"?)
# Must be a string, then...
opts[:type] ||
type_of(opts[:default]) ||
(is_list(opts[:in_set]) && type_of(opts[:in_set] |> List.first())) ||
:string
end
## Error reporting
defp error(msg, opts) do
Logger.error("#{opts._env_var}: #{msg}")
{:error, msg}
end
# Determine type from value.
defp type_of(nil) do
nil
end
defp type_of(value)
when is_boolean(value) do
:boolean
end
defp type_of(value)
when is_integer(value) do
:integer
end
defp type_of(value)
when is_float(value) do
:float
end
defp type_of(value)
when is_atom(value) do
case parse(value, :module, []) |> Code.ensure_compiled() do
{:module, _} -> :module
_ -> :atom
end
end
defp type_of(value)
when is_tuple(value) do
:tuple
end
defp type_of(value)
when is_list(value) do
:list
end
defp type_of(_value) do
nil
end
defp safe_type_of(value) do
type_of(value) || :string
end
## Parse based on determined type
defp parse(nil, _type, %{default: default} = _opts) do
default
end
defp parse(value, nil, opts) do
parse(value, :string, opts)
end
defp parse(value, :string, _opts) do
value
end
defp parse(value, :charlist, _opts) do
value |> String.to_charlist()
end
defp parse(value, :boolean, opts) do
value
|> String.to_existing_atom()
|> validity_check(Map.put_new(opts, :in_set, [true, false]))
end
defp parse(value, :integer, _opts) do
value
|> String.to_integer()
end
defp parse(value, :float, _opts) do
value
|> String.to_float()
end
defp parse(value, :tuple, opts) do
value
|> parse(:list, opts)
|> List.to_tuple()
end
defp parse(value, :list, opts) do
value
|> String.split(",")
|> Enum.map(fn elem -> String.trim(elem) end)
|> parse_subtypes(opts)
end
defp parse(value, :module, _opts) do
case value |> to_string() |> String.split(".") |> List.first() do
"Elixir" -> :"#{value}"
_ -> :"Elixir.#{value}"
end
end
defp parse(value, :atom, _opts) do
value
|> String.to_atom()
end
defp parse(_value, type, opts) do
error("Unrecognized type (#{inspect(type)}), supported types: #{inspect(@valid_types)}", opts)
end
# Parse list and tuple elements
defp parse_subtypes(list, opts) do
parse_subtypes(list, Map.put_new(opts, :subtype, :string), [])
end
defp parse_subtypes([], %{}, acc) do
Enum.reverse(acc)
end
defp parse_subtypes(values, %{subtype: subtype} = opts, acc) when subtype in @valid_subtypes and is_list(values) do
parse_subtypes(values, %{opts | subtype: List.duplicate(subtype, length(values))}, acc)
end
defp parse_subtypes(values, %{subtype: subtypes} = opts, acc) when is_tuple(subtypes) do
parse_subtypes(values, %{opts | subtype: Tuple.to_list(subtypes)}, acc)
end
defp parse_subtypes([hv | tv] = values, %{subtype: [hs | ts] = subtypes} = opts, acc)
when length(values) == length(subtypes) do
parse_subtypes(tv, %{opts | subtype: ts}, [parse(hv, hs, %{opts | type: hs})] ++ acc)
end
defp parse_subtypes(values, %{subtype: subtypes} = opts, _acc) when is_list(subtypes) and is_list(values) do
error("value list (#{inspect(values)}) length #{length(values)} to " <>
"type list (#{inspect(subtypes)}) length #{length(subtypes)}", opts)
end
# Second, run built-in validators [:in_set, :in_range, :regex] and customs...
defp validity_check(value, opts) when is_map(opts) do
value
|> check_type(opts)
|> check_set(opts)
|> check_range(opts)
|> check_regex(opts)
|> check_custom(opts)
|> check_required(opts)
end
# Required checks
defp check_required(nil, %{required: true} = opts) do
error("value is required and no default was provided", opts)
end
defp check_required({:error, _} = error, _opts) do
error
end
defp check_required(value, _opts) do
value
end
# Final result type checks
defp check_type(nil, _opts) do
nil
end
defp check_type({:error, _} = error, _opts) do
error
end
defp check_type([], _opts) do
[]
end
defp check_type(list, %{subtype: subtype} = opts) when is_list(list) and subtype in @valid_subtypes do
check_type(list, %{opts | subtype: List.duplicate(subtype, length(list))})
end
defp check_type([hv|tv], %{subtype: [hs|ts], type: type} = opts) when length(ts) == length(tv) do
# Logger.debug("Type of #{inspect(hv)} (#{type_of(hv) |> inspect()}), expected #{inspect(hs)}")
if safe_type_of(hv) == hs do
case check_type(tv, %{opts | subtype: ts}) do
list when is_list(list) -> [hv] ++ list
error -> error
end
else
error("value (#{inspect(hv)}) subtype (#{inspect(hs)}) mismatch in #{inspect(type)}", opts)
end
end
defp check_type(values, %{subtype: types} = opts) when length(types) != length(values) do
error("value list (#{inspect(values)}) length to " <>
"type list (#{inspect(types)}) length mismatch in #{inspect(types)}", opts)
end
defp check_type({} = value, %{subtype: subtype, type: :tuple} = opts) when subtype in @valid_subtypes do
value
|> Tuple.to_list
|> check_type(opts)
end
defp check_type({} = value, %{subtype: {} = match, type: :tuple} = opts) do
value
|> Tuple.to_list
|> check_type(%{opts | subtype: match |> Tuple.to_list()})
end
defp check_type(value, %{type: type} = opts) when type in @valid_types do
Logger.debug("Validating type of #{inspect(value)}")
checked_type = safe_type_of(value)
if checked_type == type || (checked_type == :list && type == :charlist) do
value
else
error("value (#{inspect(value)}) type (#{inspect(checked_type)}) is not type specified (#{inspect(type)})", opts)
end
end
defp check_type(value, _opts) do
value
end
# Run custom validators
defp check_custom(nil, _opts) do
nil
end
defp check_custom({:error, _} = error, _opts) do
error
end
defp check_custom(value, %{custom: custom_validators} = opts)
when is_list(custom_validators) do
iterate_customs(value, opts, custom_validators)
end
defp check_custom(value, %{custom: {module, function}} = opts)
when is_atom(module) and
is_atom(function) do
(Code.ensure_compiled?(module) &&
{function, 2} in module.__info__(:functions) &&
Kernel.apply(module, function, [value, opts])) ||
error("custom validator module #{module} or function #{function} missing", opts)
end
defp check_custom(value, _opts) do
value
end
defp iterate_customs(value, _opts, []) do
value
end
defp iterate_customs(value, opts, [custom | the_rest]) do
value
|> check_custom(%{opts | custom: custom})
|> iterate_customs(opts, the_rest)
end
## Check must-be-in-a-set validators for sets of atoms, numbers, strings, etc.
defp check_set({:error, _} = error, _opts) do
error
end
defp check_set(value, %{in_set: set} = opts) when is_list(set) do
if Enum.member?(set, value) do
value
else
error("value #{value} not in options #{inspect(set)}", opts)
end
end
defp check_set(value, _opts) do
value
end
## Check ordinal range validators...
defp check_range({:error, _} = error, _opts) do
error
end
defp check_range(value, %{in_range: range} = opts) do
if Enum.member?(range, value) do
value
else
error("value #{value} not in specified range #{inspect(range)}", opts)
end
end
defp check_range(value, _opts) do
value
end
## Check regex validators...
defp check_regex({:error, _} = error, _opts) do
error
end
defp check_regex(value, %{regex: regex} = opts) do
if Regex.match?(regex, to_string(value)) do
value
else
error("value #{value} does not match regex #{inspect(opts[:regex])}", opts)
end
end
defp check_regex(value, _opts) do
value
end
end
|
lib/runtime/config/helper.ex
| 0.577376
| 0.567697
|
helper.ex
|
starcoder
|
defmodule DataMorph.Csv do
@moduledoc ~S"""
Functions for converting enumerable, or string of CSV to stream of rows.
"""
@doc ~S"""
Parse `csv` string, stream, or enumerable to stream of rows.
## Examples
Convert blank string to empty headers and empty stream.
iex> {headers, rows} = DataMorph.Csv.to_headers_and_rows_stream("")
...> rows
...> |> Enum.to_list
[]
...> headers
[]
Map a string of lines separated by \n to headers, and a stream of rows as
lists:
iex> {headers, rows} = "name,iso\n" <>
...> "New Zealand,nz\n" <>
...> "United Kingdom,gb"
...> |> DataMorph.Csv.to_headers_and_rows_stream
...> rows
...> |> Enum.to_list
[
["New Zealand","nz"],
["United Kingdom","gb"]
]
...> headers
["name","iso"]
Map a stream of lines separated by \n to headers, and a stream of rows as
lists:
iex> {headers, rows} = "name,iso\n" <>
...> "New Zealand,nz\n" <>
...> "United Kingdom,gb"
...> |> String.split("\n")
...> |> Stream.map(& &1)
...> |> DataMorph.Csv.to_headers_and_rows_stream
...> rows
...> |> Enum.to_list
[
["New Zealand","nz"],
["United Kingdom","gb"]
]
...> headers
["name","iso"]
Map a string of tab-separated lines separated by \n to headers, and a stream
of rows as lists:
iex> {headers, rows} = "name\tiso\n" <>
...> "New Zealand\tnz\n" <>
...> "United Kingdom\tgb"
...> |> DataMorph.Csv.to_headers_and_rows_stream(separator: ?\t)
...> rows
...> |> Enum.to_list
[
["New Zealand","nz"],
["United Kingdom","gb"]
]
...> headers
["name","iso"]
"""
def to_headers_and_rows_stream(csv) do
to_headers_and_rows_stream(csv, separator: ",")
end
def to_headers_and_rows_stream(csv, options) when is_binary(csv) do
csv
|> String.split("\n")
|> to_headers_and_rows_stream(options)
end
def to_headers_and_rows_stream(stream, options) do
separator = options |> Keyword.get(:separator)
first_line = stream |> Enum.at(0)
headers = first_line |> to_headers(separator)
rows = stream |> to_rows(separator, first_line)
{headers, rows}
end
defp to_headers("", _), do: []
defp to_headers(line, separator) do
[line]
|> decode(separator)
|> Enum.at(0)
end
defp to_rows(stream, separator, first_line) do
stream
|> Stream.drop_while(&(&1 == first_line))
|> decode(separator)
end
defp decode(stream, ","), do: stream |> CSV.decode!()
defp decode(stream, separator), do: stream |> CSV.decode!(separator: separator)
end
|
lib/data_morph/csv.ex
| 0.804713
| 0.409959
|
csv.ex
|
starcoder
|
defmodule Pandora.Parse do
require Pandora.Data, as: Data
require QueueWrapper, as: Queue
@type result(t) :: {:ok, t} | {:error, any}
@type parse_hint :: :text | :element | :cdata | :comment | :closing_node | :end_of_input
defprotocol Parser do
@spec move_cursor(parser :: any, amount :: pos_integer()) :: any
def move_cursor(parser, amount)
@spec slice_from_cursor_offset(
parser :: any,
offset :: pos_integer(),
amount :: pos_integer()
) :: {any, String.t()}
def slice_from_cursor_offset(parser, offset, amount)
@spec lookup_from_cursor(parser :: any, index :: pos_integer()) :: {any, String.t()}
def lookup_from_cursor(parser, index)
@spec has_finished(parser :: any) :: {any, boolean}
def has_finished(parser)
end
def slice_from_cursor(p, amount) do
Parser.slice_from_cursor_offset(p, 0, amount)
end
defmodule FromString do
defstruct [:input, :cursor]
def init(input) do
%__MODULE__{input: input, cursor: 0}
end
end
defimpl Parser, for: FromString do
def move_cursor(%FromString{} = p, amount) do
%FromString{p | cursor: p.cursor + amount}
end
def slice_from_cursor_offset(%FromString{} = p, offset, amount) do
{p, String.slice(p.input, offset + p.cursor, amount)}
end
def lookup_from_cursor(%FromString{} = p, index) do
{p, String.at(p.input, p.cursor + index)}
end
def has_finished(%FromString{} = p) do
{p, p.cursor >= String.length(p.input)}
end
def close(_), do: nil
end
@spec from_string(parser :: any) :: result(Data.document())
def from_string(string) do
parser = FromString.init(string)
with {:ok, {_parser, document}} <- parse_document(parser) do
{:ok, document}
end
end
@spec parse_document(parser) :: result({parser, Data.document()}) when parser: any
defp parse_document(parser) do
with parser = skip_whitespace(parser),
{:ok, {parser, declaration}} <- maybe_parse_declaration(parser),
parser = skip_whitespace(parser),
{:ok, {parser, comments}} <- parse_comments(parser),
{:ok, {parser, doctype}} <- maybe_parse_doctype(parser),
parser = skip_whitespace(parser),
{:ok, {parser, nodes}} <- parse_nodes(parser) do
nodes = Queue.join(comments, nodes)
{:ok, {parser, Data.document(doctype: doctype, declaration: declaration, nodes: nodes)}}
end
end
@spec maybe_parse_declaration(p) :: result({p, Data.doctype() | nil}) when p: any
defp maybe_parse_declaration(p) do
parse_xml_symbol = fn p ->
case slice_from_cursor(p, 3) do
{p, "xml"} ->
p = Parser.move_cursor(p, 3)
p = skip_whitespace(p)
{:ok, p}
{p, _} ->
{:error, {:xml_declartion, p, :expected_xml_symbol_in_declaraction}}
end
end
parse_declaraction_attributes = fn p ->
supported_encodings = %{"UTF-8" => :utf8}
standalone_swap = %{"YES" => true, "NO" => false}
with {:ok, {p, attributes}} <- parse_attributes(p) do
{v, attributes} = Map.pop(attributes, "version")
{e, attributes} = Map.pop(attributes, "encoding")
{s, attributes} = Map.pop(attributes, "standalone")
e = if e != nil, do: String.upcase(e), else: nil
s = if s != nil, do: String.upcase(s), else: nil
if e == nil || Map.has_key?(supported_encodings, e) do
if s == nil || Map.has_key?(standalone_swap, s) do
e = Map.get(supported_encodings, e)
s = Map.get(standalone_swap, s)
# fails if the any attributes remain, since they're not
# part of the XML standard.
if attributes == %{},
do: {:ok, {p, v, e, s}},
else: {:error, {:xml_declartion, p, {:unexpected_attributes, attributes}}}
else
{:error, {:xml_declartion, p, {:invalid, e}}}
end
else
{:error, {:xml_declartion, p, {:unsupported_encoding, e}}}
end
end
end
parse_closing_symbol = fn p ->
case slice_from_cursor(p, 2) do
{p, "?>"} ->
p = skip_whitespace(p)
p = Parser.move_cursor(p, 2)
{:ok, p}
{p, _} ->
{:error, {:xml_declartion, p, :expected_xml_declaration_closing_tag}}
end
end
case slice_from_cursor(p, 2) do
{p, "<?"} ->
p = Parser.move_cursor(p, 2)
p = skip_whitespace(p)
with {:ok, p} <- parse_xml_symbol.(p),
{:ok, {p, v, e, s}} <- parse_declaraction_attributes.(p),
{:ok, p} <- parse_closing_symbol.(p) do
{:ok, {p, Data.declaration(version: v, standalone: s, encoding: e)}}
end
{p, _} ->
{:ok, {p, nil}}
end
end
@spec maybe_parse_doctype(p) :: result({p, Data.doctype() | nil}) when p: any
defp maybe_parse_doctype(p) do
parse_doctype_symbol = fn p ->
{p, slice} = slice_from_cursor(p, 7)
if String.upcase(slice) == "DOCTYPE" do
p = Parser.move_cursor(p, 7)
{:ok, p}
else
{:error, {:doctype_error, p, :expected_doctype_symbol}}
end
end
parse_root_node_name = fn p ->
{p, name} = parse_identifer_token(p)
if name != "" do
{:ok, {p, name}}
else
{:error, {:doctype_error, p, :expected_root_node_name}}
end
end
parse_inlined_dtd_impl = fn f, p, chunks, offset ->
case Parser.slice_from_cursor_offset(p, offset, 1) do
{p, "]"} ->
{p, last_chunk} = slice_from_cursor(p, offset)
p = Parser.move_cursor(p, offset + 1)
{:ok, {p, join_in_reverse([last_chunk | chunks])}}
{p, _} ->
case Parser.slice_from_cursor_offset(p, offset, 4) do
{p, "<!--"} ->
{p, text_chunk} = slice_from_cursor(p, offset)
p = Parser.move_cursor(p, offset)
with {:ok, {p, {:comment, comment_body}}} <- parse_comment(p) do
comment_chunk = "<!--#{comment_body}-->"
chunks = [comment_chunk, text_chunk | chunks]
f.(f, p, chunks, 0)
end
{p, _} ->
f.(f, p, chunks, offset + 1)
end
end
end
parse_inlined_dtd = fn p ->
p = Parser.move_cursor(p, 1)
parse_inlined_dtd_impl.(parse_inlined_dtd_impl, p, [], 0)
end
maybe_parse_dtds = fn f, p, dtds ->
case slice_from_cursor(p, 1) do
{p, "["} ->
with {:ok, {p, inlined}} <- parse_inlined_dtd.(p) do
f.(f, p, [{:inlined, inlined} | dtds])
end
{p, _} ->
{p, modifier} = parse_identifer_token(p)
case String.upcase(modifier) do
"" ->
{:ok, {p, Enum.reverse(dtds)}}
"SYSTEM" ->
p = skip_whitespace(p)
with {:ok, {p, dtd_url}} <- parse_quoted_string(p) do
p = skip_whitespace(p)
f.(f, p, [{:system, dtd_url} | dtds])
end
"PUBLIC" ->
p = skip_whitespace(p)
with {:ok, {p, dtd_location}} <- parse_quoted_string(p),
p = skip_whitespace(p),
{:ok, {p, dtd_url}} <- parse_quoted_string(p) do
p = skip_whitespace(p)
f.(f, p, [{:public, dtd_location, dtd_url} | dtds])
end
other ->
{:error, {:doctype_error, p, {:unexpected_dtd_start_token, other, dtds}}}
end
end
end
parse_doctype_closing_token = fn p ->
{p, token} = slice_from_cursor(p, 1)
if token == ">" do
p = Parser.move_cursor(p, 1)
{:ok, p}
else
{:error, {:doctype_error, p, :expected_end_of_doctype}}
end
end
case slice_from_cursor(p, 2) do
{p, "<!"} ->
case Parser.slice_from_cursor_offset(p, 2, 2) do
{p, "--"} ->
{:ok, {p, nil}}
{p, "[C"} ->
{:ok, {p, nil}}
{p, _} ->
p = Parser.move_cursor(p, 2)
p = skip_whitespace(p)
with {:ok, p} <- parse_doctype_symbol.(p),
p = skip_whitespace(p),
{:ok, {p, root_node}} <- parse_root_node_name.(p),
p = skip_whitespace(p),
{:ok, {p, dtds}} <- maybe_parse_dtds.(maybe_parse_dtds, p, []),
{:ok, p} <- parse_doctype_closing_token.(p) do
{:ok, {p, Data.doctype(root_node: root_node, dtds: dtds)}}
end
end
{p, _} ->
{:ok, {p, nil}}
end
end
@spec parse_nodes(p) :: result({p, Queue.t()}) when p: any
defp parse_nodes(p) do
recursive = fn f, p, nodes ->
with_parsing_fn = fn p, parse_fn ->
with {:ok, {p, node}} <- parse_fn.(p) do
nodes = Queue.in_rear(node, nodes)
f.(f, p, nodes)
end
end
case parse_hint(p) do
{:ok, {p, :element}} ->
with_parsing_fn.(p, &parse_element/1)
{:ok, {p, :text}} ->
with_parsing_fn.(p, &parse_text/1)
{:ok, {p, :comment}} ->
with_parsing_fn.(p, &parse_comment/1)
{:ok, {p, :cdata}} ->
with_parsing_fn.(p, &parse_cdata/1)
{:ok, {p, :closing_node}} ->
{:ok, {p, nodes}}
{:ok, {p, :end_of_input}} ->
{:ok, {p, nodes}}
end
end
recursive.(recursive, p, Queue.new())
end
@spec parse_element(p) :: result({p, Data.xml_node()}) when p: any
defp parse_element(p) do
consume_alpha_token = fn p ->
consume_while(p, fn c ->
c != " " && c != ":" && c != nil && c != "/" && c != ">"
end)
end
parse_tag_name = fn p ->
{p, token_one} = consume_alpha_token.(p)
p = skip_whitespace(p)
case slice_from_cursor(p, 1) do
{p, ":"} ->
# skip the ':'
p = Parser.move_cursor(p, 1)
p = skip_whitespace(p)
{p, token_two} = consume_alpha_token.(p)
{p, token_two, token_one}
{p, _} ->
{p, token_one, nil}
end
end
# skip the '<'
p = Parser.move_cursor(p, 1)
p = skip_whitespace(p)
{p, tag_name, namespace} = parse_tag_name.(p)
p = skip_whitespace(p)
with {:ok, {p, attributes}} <- parse_attributes(p) do
p = skip_whitespace(p)
case Parser.lookup_from_cursor(p, 0) do
{p, "/"} ->
p = Parser.move_cursor(p, 1)
p = skip_whitespace(p)
with {:ok, p} <- skip_these_chunks(p, [">"]) do
{:ok,
{p,
Data.element(
name: tag_name,
namespace: namespace,
attributes: attributes
)}}
end
{p, ">"} ->
p = Parser.move_cursor(p, 1)
p = skip_whitespace(p)
chunks =
if namespace == nil,
do: ["</", tag_name, ">"],
else: ["</", namespace, ":", tag_name, ">"]
with {:ok, {p, nodes}} <- parse_nodes(p),
{:ok, p} <- skip_these_chunks(p, chunks) do
{:ok,
{p,
Data.element(
name: tag_name,
namespace: namespace,
attributes: attributes,
children: nodes
)}}
end
{_p, _c} ->
{:error, :expected_end_of_node}
end
end
end
@spec parse_text(p) :: result({p, Data.xml_node()}) when p: any
defp parse_text(p) do
recursive = fn f, p, index ->
case parse_hint(p, index) do
{:ok, {p, :text}} ->
f.(f, p, index + 1)
{:ok, _} ->
{p, text} = slice_from_cursor(p, index)
p = Parser.move_cursor(p, index)
{:ok, {p, Data.text(value: text)}}
end
end
recursive.(recursive, p, 1)
end
@spec parse_comment(p) :: result({p, Data.xml_node()}) when p: any
defp parse_comment(p) do
p = Parser.move_cursor(p, 4)
recursive = fn f, p, index ->
case Parser.slice_from_cursor_offset(p, index, 3) do
{p, "-->"} ->
{p, body} = slice_from_cursor(p, index)
p = Parser.move_cursor(p, index + 3)
{:ok, {p, Data.comment(body: body)}}
{p, chunk} ->
if String.length(chunk) == 3 do
f.(f, p, index + 1)
else
{:error, :unexpected_end_of_comment}
end
end
end
recursive.(recursive, p, 0)
end
@spec parse_comments(p) :: result({p, Queue.t()}) when p: any
defp parse_comments(p) do
recursive = fn f, p, q ->
case parse_hint(p) do
{:ok, {p, :comment}} ->
with {:ok, {p, comment}} <- parse_comment(p) do
q = Queue.in_rear(comment, q)
p = skip_whitespace(p)
f.(f, p, q)
end
{:ok, {p, _}} ->
{:ok, {p, q}}
end
end
recursive.(recursive, p, Queue.new())
end
@spec parse_cdata(p) :: result({p, Data.xml_node()}) when p: any
defp parse_cdata(p) do
# skip the <![CDATA[
p = Parser.move_cursor(p, 9)
recursive = fn f, p, read_chunks, read_from, read_offset ->
read_start = read_from + read_offset
case Parser.slice_from_cursor_offset(p, read_start, 3) do
{p, "]]>"} ->
{p, chunk} = Parser.slice_from_cursor_offset(p, read_from, read_offset)
read_chunks = [chunk | read_chunks]
case Parser.slice_from_cursor_offset(p, read_start + 3, 9) do
{p, "<![CDATA["} ->
read_from = read_start + 3 + 9
f.(f, p, read_chunks, read_from, 0)
{p, _} ->
# move the cursor to the end of the CDATA
p = Parser.move_cursor(p, read_start + 3)
combined_chunks = join_in_reverse(read_chunks)
{:ok, {p, Data.cdata(value: combined_chunks, encoded: true)}}
end
{p, _} ->
f.(f, p, read_chunks, read_from, read_offset + 1)
end
end
recursive.(recursive, p, [], 0, 0)
end
@spec parse_attributes(p) :: result({p, Data.attrs()}) when p: any
defp parse_attributes(p) do
parse_attributes(p, %{})
end
@spec parse_attributes(p, attributes :: Data.attrs()) :: result({p, Data.attrs()}) when p: any
defp parse_attributes(p, attributes) do
consume_alpha_token = fn p ->
consume_while(p, &(&1 != nil && is_alphanum(&1)))
end
parse_attribute_name = fn p ->
{p, token_one} = consume_alpha_token.(p)
p = skip_whitespace(p)
case slice_from_cursor(p, 1) do
{p, ":"} ->
# skip the ':'
p = Parser.move_cursor(p, 1)
p = skip_whitespace(p)
{p, token_two} = consume_alpha_token.(p)
{p, {token_one, token_two}}
{p, _} ->
{p, token_one}
end
end
{p, next_char} = Parser.lookup_from_cursor(p, 0)
# may want to skip whitespace if
# this isn't the first attribute.
p =
if attributes != %{},
do: skip_whitespace(p),
else: p
if is_alpha(next_char) do
{p, key} = parse_attribute_name.(p)
if !Map.has_key?(attributes, key) do
p = skip_whitespace(p)
with {:ok, p} <- skip_these_chunks(p, ["="]),
{:ok, {p, value}} <- parse_quoted_string(p) do
parse_attributes(p, Map.put(attributes, key, value))
end
else
{:error, {:attributes, :duplicate_attribute, key}}
end
else
{:ok, {p, attributes}}
end
end
@spec parse_hint(p) :: result({p, parse_hint}) when p: any
def parse_hint(p) do
parse_hint(p, 0)
end
@spec parse_hint(p, start_from :: non_neg_integer) :: result({p, parse_hint}) when p: any
def parse_hint(p, start_from) do
case Parser.lookup_from_cursor(p, 0 + start_from) do
{p, nil} ->
{:ok, {p, :end_of_input}}
{p, "<"} ->
case Parser.lookup_from_cursor(p, 1 + start_from) do
{p, "!"} ->
case Parser.lookup_from_cursor(p, 2 + start_from) do
{p, "-"} ->
case Parser.lookup_from_cursor(p, 2 + start_from) do
{p, "-"} -> {:ok, {p, :comment}}
{p, _} -> {:ok, {p, :text}}
end
{p, "["} ->
case are_these_the_next_chunks(p, ["CDATA["], 3 + start_from) do
{p, true, _} -> {:ok, {p, :cdata}}
{p, false, _} -> {:ok, {p, :text}}
end
{p, _} ->
{:ok, {p, :text}}
end
{p, _} ->
# Skip the legal whitespace before the character
# without consuming it in the hint making function
{p, bump} = amount_til_end_of_whitespace(p, 1 + start_from)
case Parser.lookup_from_cursor(p, start_from + bump) do
{p, "/"} ->
{:ok, {p, :closing_node}}
{p, "_"} ->
{:ok, {p, :element}}
{p, char} ->
case is_alpha(char) do
true -> {:ok, {p, :element}}
false -> {:ok, {p, :text}}
end
end
end
{p, _} ->
{:ok, {p, :text}}
end
end
@spec parse_identifer_token(p) :: {p, String.t()} when p: any
def parse_identifer_token(p) do
consume_while(p, fn c ->
c != " " && c != nil && c != "/" && c != ">"
end)
end
@spec parse_quoted_string(p) :: result({p, String.t()}) when p: any
def parse_quoted_string(p) do
with {:ok, p} <- skip_these_chunks(p, ["\""]),
{p, value} = consume_while(p, &(&1 != nil && &1 != "\"")),
{:ok, p} <- skip_these_chunks(p, ["\""]) do
{:ok, {p, value}}
end
end
@spec skip_these_chunks(p, chunks :: [String.t()]) :: result(p) when p: any
def skip_these_chunks(p, chunks) do
{p, result, move_cursor_by} = are_these_the_next_chunks(p, chunks, 0)
if result do
p = Parser.move_cursor(p, move_cursor_by)
{:ok, p}
else
{:error, {:unexpected_chunks, p, chunks}}
end
end
@spec are_these_the_next_chunks(
p,
chunks :: nonempty_list(String.t()),
offset :: non_neg_integer
) :: {p, boolean, non_neg_integer}
when p: any
def are_these_the_next_chunks(p, [], offset) do
{p, true, offset}
end
def are_these_the_next_chunks(p, [head_chunk | remaining_chunks], offset) do
chunk_length = String.length(head_chunk)
{p, read_chunk} = Parser.slice_from_cursor_offset(p, offset, chunk_length)
if read_chunk == head_chunk do
{p, offset} = amount_til_end_of_whitespace(p, offset + chunk_length)
are_these_the_next_chunks(p, remaining_chunks, offset)
else
{p, false, offset}
end
end
@spec amount_til_end_of_whitespace(parser :: any, index :: non_neg_integer) ::
{any, non_neg_integer}
def amount_til_end_of_whitespace(p, index) do
{p, character} = Parser.lookup_from_cursor(p, index)
case character do
" " -> amount_til_end_of_whitespace(p, index + 1)
"\t" -> amount_til_end_of_whitespace(p, index + 1)
"\n" -> amount_til_end_of_whitespace(p, index + 1)
"\r" -> amount_til_end_of_whitespace(p, index + 1)
"\f" -> amount_til_end_of_whitespace(p, index + 1)
nil -> {p, index}
_ -> {p, index}
end
end
@spec amount_til_end_of_whitespace(parser :: any) :: {any, non_neg_integer}
def amount_til_end_of_whitespace(p) do
amount_til_end_of_whitespace(p, 0)
end
@spec skip_whitespace(p) :: p when p: any
def skip_whitespace(p) do
{p, n} = amount_til_end_of_whitespace(p)
Parser.move_cursor(p, n)
end
@spec consume_while(p, predicate :: (String.t() -> boolean)) :: {p, String.t()} when p: any
defp consume_while(p, predicate) do
consume_while(p, predicate, 0)
end
@spec consume_while(p, predicate :: (String.t() -> boolean), index :: non_neg_integer) ::
{p, String.t()}
when p: any
defp consume_while(p, predicate, index) do
{p, character} = Parser.lookup_from_cursor(p, index)
cond do
predicate.(character) ->
consume_while(p, predicate, index + 1)
index == 0 ->
{p, ""}
true ->
{p, string} = slice_from_cursor(p, index)
p = Parser.move_cursor(p, index)
{p, string}
end
end
@spec is_alpha(character_s :: String.t()) :: boolean
defp is_alpha(""), do: false
defp is_alpha(character_s) do
[character | _] = to_charlist(character_s)
[lil_a, lil_z, big_a, big_z] = 'azAZ'
cond do
character >= lil_a && character <= lil_z -> true
character >= big_a && character <= big_z -> true
true -> false
end
end
@spec join_in_reverse(a :: list(String.t())) :: String.t()
defp join_in_reverse(chunks) do
Enum.reduce(chunks, "", &"#{&1}#{&2}")
end
@spec is_alphanum(character_s :: String.t()) :: boolean
defp is_alphanum(""), do: false
defp is_alphanum(character_s) do
[character | _] = to_charlist(character_s)
[lil_a, lil_z, big_a, big_z, zero, nine] = 'azAZ09'
cond do
character >= zero && character <= nine -> true
character >= lil_a && character <= lil_z -> true
character >= big_a && character <= big_z -> true
true -> false
end
end
end
|
lib/pandora/parse.ex
| 0.796174
| 0.454533
|
parse.ex
|
starcoder
|
defmodule Grizzly.CommandClass.MultiChannelAssociation.Set do
@moduledoc """
Command module for working with MULTI_CHANNEL_ASSOCIATION_SET command.
Command Options:
* `:group` - The association group
* `:nodes` - List of node ids to receive messages about node events
* `:endpoints` - List of endpoints corresponding one-to-one to the nodes
* `:seq_number` - The sequence number used in the Z/IP packet
* `:retries` - The number of attempts to send the command (default 2)
"""
@behaviour Grizzly.Command
alias Grizzly.Packet
alias Grizzly.Command.{EncodeError, Encoding}
alias Grizzly.CommandClass.MultiChannelAssociation
@type t :: %__MODULE__{
group: byte,
nodes: MultiChannelAssociation.associated_nodes(),
endpoints: MultiChannelAssociation.endpoints(),
seq_number: Grizzly.seq_number(),
retries: non_neg_integer()
}
@type opts ::
{:group, byte}
| {:nodes, MultiChannelAssociation.associated_nodes()}
| {:endpoints, MultiChannelAssociation.endpoints()}
| {:seq_number, Grizzly.seq_number()}
| {:retries, non_neg_integer()}
defstruct group: nil,
nodes: [],
endpoints: [],
seq_number: nil,
retries: 2
@spec init([opts]) :: {:ok, t}
def init(opts) do
{:ok, struct(__MODULE__, opts)}
end
@spec encode(t) :: {:ok, binary} | {:error, EncodeError.t()}
def encode(
%__MODULE__{group: group, nodes: nodes, endpoints: endpoints, seq_number: seq_number} =
command
) do
with {:ok, _encoded} <-
Encoding.encode_and_validate_args(
command,
%{
group: {:range, 2, 255},
nodes: [:byte],
endpoints: [%{node_id: {:range, 1, 127}, endpoint: :byte}]
}
) do
encoded_nodes = :erlang.list_to_binary(nodes)
{:ok, encoded_node_endpoints} = MultiChannelAssociation.encode_endpoints(endpoints)
binary =
Packet.header(seq_number) <>
<<0x8E, 0x01, group>> <>
encoded_nodes <> <<MultiChannelAssociation.marker()>> <> encoded_node_endpoints
{:ok, binary}
end
end
@spec handle_response(t, Packet.t()) ::
{:continue, t()}
| {:done, {:error, :nack_response}}
| {:done, :ok}
| {:retry, t()}
| {:queued, t()}
def handle_response(
%__MODULE__{seq_number: seq_number},
%Packet{
seq_number: seq_number,
types: [:ack_response]
}
) do
{:done, :ok}
end
def handle_response(
%__MODULE__{seq_number: seq_number, retries: 0},
%Packet{
seq_number: seq_number,
types: [:nack_response]
}
) do
{:done, {:error, :nack_response}}
end
def handle_response(
%__MODULE__{seq_number: seq_number, retries: n} = command,
%Packet{
seq_number: seq_number,
types: [:nack_response]
}
) do
{:retry, %{command | retries: n - 1}}
end
def handle_response(
%__MODULE__{seq_number: seq_number} = command,
%Packet{
seq_number: seq_number,
types: [:nack_response, :nack_waiting]
} = packet
) do
if Packet.sleeping_delay?(packet) do
{:queued, command}
else
{:continue, command}
end
end
def handle_response(command, _), do: {:continue, command}
end
|
lib/grizzly/command_class/multi_channel_association/set.ex
| 0.861655
| 0.411613
|
set.ex
|
starcoder
|
defmodule Cldr.Number.System do
@moduledoc """
Functions to manage number systems which describe the numbering characteristics for a locale.
A number system defines the digits (if they exist in this number system) or
or rules (if the number system does not have decimal digits).
The system name is also used as a key to define the separators that are used
when formatting a number is this number_system. See
`Cldr.Number.Symbol.number_symbols_for/2`.
"""
alias Cldr.Locale
alias Cldr.Number.Symbol
alias Cldr.LanguageTag
alias Cldr.Math
@default_number_system_type :default
@type system_name :: atom()
@type types :: :default | :native | :traditional | :finance
defdelegate known_number_systems, to: Cldr
defdelegate known_number_system_types(backend), to: Cldr
@doc """
Return the default number system type name.
Currently this is `:default`. Note that this is
not the number system itself but the type of the
number system. It can be used to find the
default number system for a given locale with
`number_systems_for(locale)[default_number_system()]`.
## Example
iex> Cldr.Number.System.default_number_system_type
:default
"""
def default_number_system_type do
@default_number_system_type
end
@doc """
Return a map of all CLDR number systems and definitions.
## Example
iex> Cldr.Number.System.number_systems |> Enum.count
85
"""
@spec number_systems :: map()
@number_systems Cldr.Config.number_systems()
def number_systems do
@number_systems
end
@systems_with_digits Enum.reject(@number_systems, fn {_name, system} ->
is_nil(system[:digits])
end)
|> Map.new()
@doc """
Number systems that have their own digit characters defined.
"""
def systems_with_digits do
@systems_with_digits
end
@doc """
Returns the default number system from a language tag
or locale name.
## Arguments
* `locale` is any language tag returned be `Cldr.Locale.new/2`
or a locale name in the list returned by `Cldr.known_locale_names/1`
* `backend` is any `Cldr` backend. That is, any module that
contains `use Cldr`
## Returns
* A number system name as an atom
## Examples
iex> Cldr.Number.System.number_system_from_locale "en-US-u-nu-thai", MyApp.Cldr
:thai
iex> Cldr.Number.System.number_system_from_locale "en-US", MyApp.Cldr
:latn
"""
@spec number_system_from_locale(LanguageTag.t() | Cldr.Locale.locale_name(), Cldr.backend()) ::
system_name
def number_system_from_locale(%LanguageTag{locale: %{number_system: nil}} = locale, backend) do
locale
|> number_systems_for!(backend)
|> Map.fetch!(default_number_system_type())
end
def number_system_from_locale(%LanguageTag{locale: %{number_system: number_system}}, _backend) do
number_system
end
def number_system_from_locale(%LanguageTag{} = locale, backend) do
locale
|> number_systems_for!(backend)
|> Map.fetch!(default_number_system_type())
end
def number_system_from_locale(locale_name, backend) when is_binary(locale_name) do
with {:ok, locale} <- Cldr.validate_locale(locale_name, backend) do
number_system_from_locale(locale, backend)
end
end
@doc """
Returns the number system from a language tag
or locale name.
## Arguments
* `locale` is any language tag returned be `Cldr.Locale.new/2`
## Returns
* A number system name as an atom
## Examples
iex> {:ok, locale} = MyApp.Cldr.validate_locale("en-US-u-nu-thai")
iex> Cldr.Number.System.number_system_from_locale(locale)
:thai
iex> {:ok, locale} = MyApp.Cldr.validate_locale("en-US")
iex> Cldr.Number.System.number_system_from_locale locale
:latn
iex> Cldr.Number.System.number_system_from_locale("ar")
:arab
"""
def number_system_from_locale(locale_name) when is_binary(locale_name) do
number_system_from_locale(locale_name, Cldr.default_backend!())
end
def number_system_from_locale(%LanguageTag{locale: %{number_system: nil}} = locale) do
number_system_from_locale(locale.cldr_locale_name, locale.backend)
end
def number_system_from_locale(%LanguageTag{locale: %{number_system: number_system}}) do
number_system
end
def number_system_from_locale(%LanguageTag{cldr_locale_name: locale, backend: backend}) do
number_system_from_locale(locale, backend)
end
@doc """
Returns the number systems available for a locale
or `{:error, message}` if the locale is not known.
## Arguments
* `locale` is any valid locale name returned by `Cldr.known_locale_names/0`
or a `Cldr.LanguageTag` struct returned by ``Cldr.Locale.new!/2``
* `backend` is any `Cldr` backend. That is, any module that
contains `use Cldr`
## Examples
iex> Cldr.Number.System.number_systems_for "en"
{:ok, %{default: :latn, native: :latn}}
iex> Cldr.Number.System.number_systems_for "th"
{:ok, %{default: :latn, native: :thai}}
iex> Cldr.Number.System.number_systems_for "zz", TestBackend.Cldr
{:error, {Cldr.UnknownLocaleError, "The locale \\"zz\\" is not known."}}
"""
@spec number_systems_for(Cldr.Locale.locale_name() | LanguageTag.t(), Cldr.backend()) ::
{:ok, map()} | {:error, {module(), String.t()}}
def number_systems_for(locale, backend) do
Module.concat(backend, Number.System).number_systems_for(locale)
end
@doc false
def number_systems_for(locale) when is_binary(locale) do
number_systems_for(locale, Cldr.default_backend!)
end
@doc """
Returns the number systems available for a locale
or raises if the locale is not known.
## Arguments
* `locale` is any valid locale name returned by `Cldr.known_locale_names/0`
or a `Cldr.LanguageTag` struct returned by ``Cldr.Locale.new!/2``
* `backend` is any `Cldr` backend. That is, any module that
contains `use Cldr`. The default is `Cldr.default_backend!/0`.
## Examples
iex> Cldr.Number.System.number_systems_for! "en"
%{default: :latn, native: :latn}
iex> Cldr.Number.System.number_systems_for! "th", TestBackend.Cldr
%{default: :latn, native: :thai}
"""
@spec number_systems_for!(Cldr.Locale.locale_name() | LanguageTag.t(), Cldr.backend()) ::
map() | no_return()
def number_systems_for!(locale, backend) do
case number_systems_for(locale, backend) do
{:error, {exception, message}} ->
raise exception, message
{:ok, systems} ->
systems
end
end
@doc false
def number_systems_for!(locale) when is_binary(locale) do
number_systems_for!(locale, Cldr.default_backend!())
end
@doc """
Returns the actual number system from a number system type.
## Arguments
* `locale` is any valid locale name returned by `Cldr.known_locale_names/0`
or a `Cldr.LanguageTag` struct returned by ``Cldr.Locale.new!/2``
* `system_name` is any number system name returned by
`Cldr.known_number_systems/0` or a number system type
returned by `Cldr.known_number_system_types/0`
* `backend` is any `Cldr` backend. That is, any module that
contains `use Cldr`
## Returns
* `{:ok, number_system_map}` or
* `{:error, {exception, reason}}`
## Notes
This function will decode a number system type into the actual
number system. If the number system provided can't be decoded
it is returned as is.
## Examples
iex> Cldr.Number.System.number_system_for "th", :latn, TestBackend.Cldr
{:ok, %{digits: "0123456789", type: :numeric}}
iex> Cldr.Number.System.number_system_for "en", :default, TestBackend.Cldr
{:ok, %{digits: "0123456789", type: :numeric}}
iex> Cldr.Number.System.number_system_for "he", :traditional, TestBackend.Cldr
{:ok, %{rules: "hebrew", type: :algorithmic}}
iex> Cldr.Number.System.number_system_for "en", :finance, TestBackend.Cldr
{
:error,
{Cldr.UnknownNumberSystemError,
"The number system :finance is unknown for the locale named \\"en\\". Valid number systems are %{default: :latn, native: :latn}"}
}
iex> Cldr.Number.System.number_system_for "en", :native, TestBackend.Cldr
{:ok, %{digits: "0123456789", type: :numeric}}
"""
@spec number_system_for(
Cldr.Locale.locale_name() | LanguageTag.t(),
Cldr.Number.System.system_name(),
Cldr.backend()
) ::
{:ok, map()} | {:error, {module(), String.t()}}
def number_system_for(locale, system_name, backend) do
with {:ok, locale} <- Cldr.validate_locale(locale, backend),
{:ok, system_name} <- system_name_from(system_name, locale, backend) do
{:ok, Map.get(number_systems(), system_name)}
else
{:error, reason} -> {:error, reason}
end
end
@doc """
Returns the names of the number systems available for
a locale or an `{:error, message}` tuple if the locale
is not known.
## Arguments
* `locale` is any locale returned by ``Cldr.Locale.new!/2`` or
a `Cldr.LanguageTag` struct
* `backend` is any `Cldr` backend. That is, any module that
contains `use Cldr`
## Examples
iex> Cldr.Number.System.number_system_names_for("en", TestBackend.Cldr)
{:ok, [:latn]}
iex> Cldr.Number.System.number_system_names_for("th", TestBackend.Cldr)
{:ok, [:latn, :thai]}
iex> Cldr.Number.System.number_system_names_for("he", TestBackend.Cldr)
{:ok, [:latn, :hebr]}
iex> Cldr.Number.System.number_system_names_for("zz", TestBackend.Cldr)
{:error, {Cldr.UnknownLocaleError, "The locale \\"zz\\" is not known."}}
"""
@spec number_system_names_for(Cldr.Locale.locale_name() | LanguageTag.t(), Cldr.backend()) ::
{:ok, list(atom())} | {:error, {module(), String.t()}}
def number_system_names_for(locale, backend) do
with {:ok, locale} <- Cldr.validate_locale(locale, backend),
{:ok, systems} <- number_systems_for(locale, backend) do
{:ok, systems |> Map.values() |> Enum.uniq()}
else
{:error, reason} -> {:error, reason}
end
end
@doc """
Returns the names of the number systems available for
a locale or an `{:error, message}` tuple if the locale
is not known.
## Arguments
* `locale` is any valid locale name returned by `Cldr.known_locale_names/0`
or a `Cldr.LanguageTag` struct returned by ``Cldr.Locale.new!/2``
* `backend` is any `Cldr` backend. That is, any module that
contains `use Cldr`
## Examples
iex> Cldr.Number.System.number_system_names_for!("en", TestBackend.Cldr)
[:latn]
iex> Cldr.Number.System.number_system_names_for!("th", TestBackend.Cldr)
[:latn, :thai]
iex> Cldr.Number.System.number_system_names_for!("he", TestBackend.Cldr)
[:latn, :hebr]
"""
@spec number_system_names_for!(Cldr.Locale.locale_name() | LanguageTag.t(), Cldr.backend()) ::
list(system_name()) | no_return()
def number_system_names_for!(locale, backend) do
case number_system_names_for(locale, backend) do
{:error, {exception, message}} ->
raise exception, message
{:ok, names} ->
names
end
end
@doc """
Returns a number system name for a given locale and number system reference.
## Arguments
* `system_name` is any number system name returned by
`Cldr.known_number_systems/0` or a number system type
returned by `Cldr.known_number_system_types/0`
* `locale` is any valid locale name returned by `Cldr.known_locale_names/0`
or a `Cldr.LanguageTag` struct returned by ``Cldr.Locale.new!/2``
* `backend` is any `Cldr` backend. That is, any module that
contains `use Cldr`
## Notes
Number systems can be references in one of two ways:
* As a number system type such as :default, :native, :traditional and
:finance. This allows references to a number system for a locale in a
consistent fashion for a given use
* WIth the number system name directly, such as :latn, :arab or any of the
other 70 or so
This function dereferences the supplied `system_name` and returns the
actual system name.
## Examples
ex> Cldr.Number.System.system_name_from(:default, "en", TestBackend.Cldr)
{:ok, :latn}
iex> Cldr.Number.System.system_name_from("latn", "en", TestBackend.Cldr)
{:ok, :latn}
iex> Cldr.Number.System.system_name_from(:native, "en", TestBackend.Cldr)
{:ok, :latn}
iex> Cldr.Number.System.system_name_from(:nope, "en", TestBackend.Cldr)
{
:error,
{Cldr.UnknownNumberSystemError, "The number system :nope is unknown"}
}
Note that return value is not guaranteed to be a valid
number system for the given locale as demonstrated in the third example.
"""
@spec system_name_from(system_name, Locale.locale_name() | LanguageTag.t(), Cldr.backend()) ::
{:ok, atom()} | {:error, {module(), String.t()}}
def system_name_from(system_name, locale, backend) do
with {:ok, locale} <- Cldr.validate_locale(locale, backend),
{:ok, number_system} <- validate_number_system_or_type(system_name, backend),
{:ok, number_systems} <- number_systems_for(locale, backend) do
cond do
Map.has_key?(number_systems, number_system) ->
{:ok, Map.get(number_systems, number_system)}
number_system in Map.values(number_systems) ->
{:ok, number_system}
true ->
{:error, unknown_number_system_for_locale_error(system_name, locale, number_systems)}
end
else
{:error, reason} -> {:error, reason}
end
end
@doc """
Returns a number system name for a given locale and number system reference
and raises if the number system is not available for the given locale.
## Arguments
* `system_name` is any number system name returned by
`Cldr.known_number_systems/0` or a number system type
returned by `Cldr.known_number_system_types/0`
* `locale` is any valid locale name returned by `Cldr.known_locale_names/0`
or a `Cldr.LanguageTag` struct returned by ``Cldr.Locale.new!/2``
* `backend` is any `Cldr` backend. That is, any module that
contains `use Cldr`
## Examples
iex> Cldr.Number.System.system_name_from!(:default, "en", TestBackend.Cldr)
:latn
iex> Cldr.Number.System.system_name_from!("latn", "en", TestBackend.Cldr)
:latn
iex> Cldr.Number.System.system_name_from!(:traditional, "he", TestBackend.Cldr)
:hebr
"""
@spec system_name_from!(system_name, Locale.locale_name() | LanguageTag.t(), Cldr.backend()) ::
atom() | no_return()
def system_name_from!(system_name, locale, backend) do
case system_name_from(system_name, locale, backend) do
{:error, {exception, message}} ->
raise exception, message
{:ok, name} ->
name
end
end
@doc """
Returns locale and number systems that have the same digits and
separators as the supplied one.
## Arguments
* `locale` is any valid locale name returned by `Cldr.known_locale_names/0`
or a `Cldr.LanguageTag` struct returned by ``Cldr.Locale.new!/2``
* `system_name` is any number system name returned by
`Cldr.known_number_systems/0` or a number system type
returned by `Cldr.known_number_system_types/0`
* `backend` is any `Cldr` backend. That is, any module that
contains `use Cldr`
## Returns
## Notes
Transliterating between locale & number systems is expensive. To avoid
unncessary transliteration we look for locale and number systems that have
the same digits and separators. Typically we are comparing to locale "en"
and number system "latn" since this is what the number formatting routines use
as placeholders.
## Examples
"""
@spec number_systems_like(LanguageTag.t() | Locale.locale_name(), system_name, Cldr.backend()) ::
{:ok, list()} | {:error, {module(), String.t()}}
def number_systems_like(locale, number_system, backend) do
with {:ok, _} <- Cldr.validate_locale(locale, backend),
{:ok, %{digits: digits}} <- number_system_for(locale, number_system, backend),
{:ok, symbols} <- Symbol.number_symbols_for(locale, number_system, backend),
{:ok, names} <- number_system_names_for(locale, backend) do
likes = do_number_systems_like(digits, symbols, names, backend)
{:ok, likes}
end
end
defp do_number_systems_like(digits, symbols, names, backend) do
Enum.map(Cldr.known_locale_names(backend), fn this_locale ->
Enum.reduce(names, [], fn this_system, acc ->
locale = Locale.new!(this_locale, backend)
case number_system_for(locale, this_system, backend) do
{:error, _} ->
acc
{:ok, %{digits: these_digits}} ->
{:ok, these_symbols} = Symbol.number_symbols_for(locale, this_system, backend)
if digits == these_digits && symbols == these_symbols do
acc ++ {locale, this_system}
end
end
end)
end)
|> Enum.reject(&(is_nil(&1) || &1 == []))
end
@doc """
Returns `{:ok, digits}` for a number system, or an `{:error, message}` if the
number system is not known.
## Arguments
* `system_name` is any number system name returned by
`Cldr.known_number_systems/0` or a number system type
returned by `Cldr.known_number_system_types/0`
## Returns
* `{:ok, string_of_digits}` or
* `{:error, {exception, reason}}`
## Examples
iex> Cldr.Number.System.number_system_digits(:latn)
{:ok, "0123456789"}
iex> Cldr.Number.System.number_system_digits(:nope)
{:error, {Cldr.UnknownNumberSystemError, "The number system :nope is not known or does not have digits"}}
"""
@spec number_system_digits(system_name()) ::
{:ok, String.t()} | {:error, {module(), String.t()}}
def number_system_digits(system_name) do
if system = Map.get(systems_with_digits(), system_name) do
{:ok, Map.get(system, :digits)}
else
{:error, number_system_digits_error(system_name)}
end
end
@doc """
Returns `digits` for a number system, or raises an exception if the
number system is not know.
## Arguments
* `system_name` is any number system name returned by
`Cldr.known_number_systems/0` or a number system type
returned by `Cldr.known_number_system_types/0`
## Returns
* A string of the number systems digits or
* raises an exception
## Examples
iex> Cldr.Number.System.number_system_digits! :latn
"0123456789"
Cldr.Number.System.number_system_digits! :nope
** (Cldr.UnknownNumberSystemError) The number system :nope is not known or does not have digits
"""
@spec number_system_digits!(system_name) :: String.t() | no_return()
def number_system_digits!(system_name) do
case number_system_digits(system_name) do
{:ok, digits} ->
digits
{:error, {exception, message}} ->
raise exception, message
end
end
@doc """
Converts a number into the representation of
a non-latin number system.
This function converts numbers to a known
number system only, it does not provide number
formatting.
## Arguments
* `number` is a `float`, `integer` or `Decimal`
* `system_name` is any number system name returned by
`Cldr.known_number_systems/0` or a number system type
returned by `Cldr.known_number_system_types/0`
* `backend` is any `Cldr` backend. That is, any module that
contains `use Cldr`
## Returns
* `{:ok, string_of_digits}` or
* `{:error, {exception, reason}}`
## Notes
There are two types of number systems in CLDR:
* `:numeric` in which the number system defines
a direct mapping between the latin digits `0..9`
into a the number system equivalent. In this case,
` to_system/3` invokes `Cldr.Number.Transliterate.transliterate_digits/3`
for the given number.
* `:algorithmic` in which the number system
does not have the same structure as the `:latn`
number system and therefore the conversion is
done algorithmically. For CLDR the algorithm
is implemented through `Cldr.Rbnf` rulesets.
These rulesets are considered by CLDR to be
less rigorous than the `:numeric` number systems
and caution and testing for a specific use case
is recommended.
## Examples
iex> Cldr.Number.System.to_system 123456, :hebr, TestBackend.Cldr
{:ok, "קכ״ג׳תנ״ו"}
iex> Cldr.Number.System.to_system 123, :hans, TestBackend.Cldr
{:ok, "一百二十三"}
iex> Cldr.Number.System.to_system 123, :hant, TestBackend.Cldr
{:ok, "一百二十三"}
iex> Cldr.Number.System.to_system 123, :hansfin, TestBackend.Cldr
{:ok, "壹佰贰拾叁"}
"""
@spec to_system(Math.number_or_decimal(), atom, Cldr.backend()) ::
{:ok, binary()} | {:error, {module(), String.t()}}
def to_system(number, system_name, backend) do
Module.concat(backend, Number.System).to_system(number, system_name)
end
@doc """
Converts a number into the representation of
a non-latin number system. Returns a converted
string or raises on error.
## Arguments
* `number` is a `float`, `integer` or `Decimal`
* `system_name` is any number system name returned by
`Cldr.known_number_systems/0` or a number system type
returned by `Cldr.known_number_system_types/0`
* `backend` is any `Cldr` backend. That is, any module that
contains `use Cldr`
## Returns
* `string_of_digits` or
* raises an exception
See `Cldr.Number.System.to_system/3` for further
information.
## Examples
iex> Cldr.Number.System.to_system! 123, :hans, TestBackend.Cldr
"一百二十三"
iex> Cldr.Number.System.to_system! 123, :hant, TestBackend.Cldr
"一百二十三"
iex> Cldr.Number.System.to_system! 123, :hansfin, TestBackend.Cldr
"壹佰贰拾叁"
"""
@spec to_system!(Math.number_or_decimal(), atom, Cldr.backend()) ::
binary() | no_return()
def to_system!(number, system_name, backend) do
case to_system(number, system_name, backend) do
{:ok, string} -> string
{:error, {exception, reason}} -> raise exception, reason
end
end
@doc """
Generate a transliteration map between two character classes
## Arguments
* `from` is any `String.t()` intended to represent the
digits of a number system but thats not a requirement.
* `to` is any `String.t()` that is the same length as `from`
intended to represent the digits of a number system.
## Returns
* A map where the keys are the graphemes in `from` and the
values are the graphemes in `to` or
* `{:error, {exception, reason}}`
## Examples
iex> Cldr.Number.System.generate_transliteration_map "0123456789", "9876543210"
%{
"0" => "9",
"1" => "8",
"2" => "7",
"3" => "6",
"4" => "5",
"5" => "4",
"6" => "3",
"7" => "2",
"8" => "1",
"9" => "0"
}
iex> Cldr.Number.System.generate_transliteration_map "0123456789", "987654321"
{:error,
{ArgumentError, "\\"0123456789\\" and \\"987654321\\" aren't the same length"}}
"""
def generate_transliteration_map(from, to) when is_binary(from) and is_binary(to) do
do_generate_transliteration_map(from, to, String.length(from), String.length(to))
end
defp do_generate_transliteration_map(from, to, from_length, to_length)
when from_length == to_length do
from
|> String.graphemes()
|> Enum.zip(String.graphemes(to))
|> Enum.into(%{})
end
defp do_generate_transliteration_map(from, to, _from_length, _to_length) do
{:error, {ArgumentError, "#{inspect(from)} and #{inspect(to)} aren't the same length"}}
end
defp validate_number_system_or_type(number_system, backend) do
with {:ok, number_system} <- Cldr.validate_number_system(number_system) do
{:ok, number_system}
else
{:error, _} ->
with {:ok, number_system} <- Cldr.validate_number_system_type(number_system, backend) do
{:ok, number_system}
else
{:error, _reason} -> {:error, Cldr.unknown_number_system_error(number_system)}
end
end
end
@doc """
Returns an error tuple for an number system unknown to a given locale.
## Arguements
* `number_system` is any number system name **not** returned by `Cldr.known_number_systems/0`
* `locale` is any valid locale name returned by `Cldr.known_locale_names/0`
or a `Cldr.LanguageTag` struct returned by ``Cldr.Locale.new!/2``
* `valid_number_systems` is a map returned by `Cldr.Number.System.number_systems_for/2`
"""
def unknown_number_system_for_locale_error(number_system, locale, valid_number_systems) do
{
Cldr.UnknownNumberSystemError,
"The number system #{inspect(number_system)} is unknown " <>
"for the locale named #{Cldr.locale_name(locale)}. " <>
"Valid number systems are #{inspect(valid_number_systems)}"
}
end
@doc false
def number_system_digits_error(system_name) do
{
Cldr.UnknownNumberSystemError,
"The number system #{inspect(system_name)} is not known or does not have digits"
}
end
end
|
lib/cldr/number/system.ex
| 0.933271
| 0.594375
|
system.ex
|
starcoder
|
defmodule Grizzly.ZWave.Commands.FailedNodeRemoveStatus do
@moduledoc """
This command reports on the attempted removal of a presumed failed node.
Params:
* `:node_id` - the id of the node which removal for failure was attempted
* `:seq_number` - the sequence number of the removal command
* `:status` - whether the presumed failed node was removed
When encoding the params you can encode for a specific command class version
by passing the `:command_class_version` to the encode options
```elixir
Grizzly.ZWave.Commands.FailedNodeRemoveStatus.encode_params(failed_node_remove_status, command_class_version: 3)
```
If there is no command class version specified this will encode to version 4 of
the `NetworkManagementInclusion` command class. This version supports the use
of 16 bit node ids.
"""
@behaviour Grizzly.ZWave.Command
alias Grizzly.ZWave
alias Grizzly.ZWave.{Command, DecodeError, NodeId}
alias Grizzly.ZWave.CommandClasses.NetworkManagementInclusion
@type status() :: :done | :failed_node_not_found | :failed_node_remove_fail
@type param() :: {:node_id, char()} | {:seq_number, ZWave.seq_number()} | {:status, status}
@impl Grizzly.ZWave.Command
@spec new([param()]) :: {:ok, Command.t()}
def new(params) do
command = %Command{
name: :failed_node_remove_status,
command_byte: 0x08,
command_class: NetworkManagementInclusion,
params: params,
impl: __MODULE__
}
{:ok, command}
end
@impl Grizzly.ZWave.Command
def encode_params(command, opts \\ []) do
seq_number = Command.param!(command, :seq_number)
node_id = Command.param!(command, :node_id)
status_byte = Command.param!(command, :status) |> encode_status()
case Keyword.get(opts, :command_class_version, 4) do
4 ->
<<seq_number, status_byte, NodeId.encode_extended(node_id)::binary>>
v when v < 4 ->
<<seq_number, status_byte, node_id>>
end
end
@impl Grizzly.ZWave.Command
@spec decode_params(binary()) :: {:ok, [param()]} | {:error, DecodeError.t()}
def decode_params(<<seq_number, status_byte, node_id_bin::binary>>) do
with {:ok, status} <- decode_status(status_byte) do
node_id = NodeId.parse(node_id_bin)
{:ok, [seq_number: seq_number, node_id: node_id, status: status]}
else
{:error, %DecodeError{} = error} ->
error
end
end
defp encode_status(:failed_node_not_found), do: 0x00
defp encode_status(:done), do: 0x01
defp encode_status(:failed_node_remove_fail), do: 0x02
defp decode_status(0x00), do: {:ok, :failed_node_not_found}
defp decode_status(0x01), do: {:ok, :done}
defp decode_status(0x02), do: {:ok, :failed_node_remove_fail}
defp decode_status(byte),
do: {:error, %DecodeError{value: byte, param: :status, command: :failed_node_remove_status}}
end
|
lib/grizzly/zwave/commands/failed_node_remove_status.ex
| 0.868325
| 0.747316
|
failed_node_remove_status.ex
|
starcoder
|
defmodule Loom.AWORSet do
@moduledoc """
An add-removed (optimized) observed-remove set (without tombstones).
This CRDT respects adds over removes in the event a simultaneous update. It
most naturally matches what most users expect when they add/remove items. It
also forms the foundation for other kinds of CRDT's, such as our AWORMap and
MVRegister.
"""
alias __MODULE__, as: Set
alias Loom.Dots
@type actor :: term
@type value :: term
@opaque t :: %Set{
dots: Dots.t(),
keep_delta: boolean,
delta: Dots.t() | nil
}
defstruct dots: %Dots{}, keep_delta: true, delta: nil
def init(%Set{dots: d, delta: delta_dots} = set, actor, value_) do
if member?(set, value_) or
(is_map(value_) and Map.has_key?(value_, :id) and
Enum.member?(Enum.map(value(set), &Map.get(&1, :id)), Map.get(value_, :id))) do
set
else
with {new_dots, new_delta_dots} <- Dots.init({d, delta_dots}, actor, value_),
do: %Set{set | dots: new_dots, delta: new_delta_dots}
end
end
@doc """
Creates a new AWORSet
The identity value for this is `[]`, an empty set.
iex> alias Loom.AWORSet, as: Set
iex> Set.new |> Set.value
[]
"""
@spec new() :: t
def new(options \\ []) do
initial_counter = Keyword.get(options, :initial_counter, 0)
dots = Keyword.get(options, :dots, Dots.new(initial_counter: initial_counter))
delta = Keyword.get(options, :delta, Dots.new(initial_counter: initial_counter))
keep_delta = Keyword.get(options, :keep_delta, true)
%Set{dots: dots, keep_delta: keep_delta, delta: delta}
end
@doc """
Grab the delta from an AWORSet for lower-cost synchronization.
"""
@spec delta(t) :: t
def delta(%Set{delta: delta}), do: %Set{dots: delta}
@doc """
Clear the delta from an AWORSet to preserve space. Do this after you sync
"enough".
iex> alias Loom.AWORSet, as: Set
iex> Set.new |> Set.add(:a, 5) |> Set.clear_delta |> Set.delta == Set.new |> Set.delta
true
"""
@spec clear_delta(t) :: t
def clear_delta(%Set{} = set), do: %Set{set | delta: Dots.new()}
@doc """
Add an element to an AWORSet
iex> alias Loom.AWORSet, as: Set
iex> Set.new |> Set.add(:a, 1) |> Set.add(:b, 2) |> Set.value |> Enum.sort
[1,2]
"""
@spec add(t, actor, value) :: t
def add(%Set{dots: d, delta: delta_dots} = set, actor, value) do
{new_dots, new_delta_dots} =
{d, delta_dots}
|> Dots.remove(value)
|> Dots.add(actor, value)
%Set{set | dots: new_dots, delta: new_delta_dots}
end
@doc """
Remove an element from an AWORSet
iex> alias Loom.AWORSet, as: Set
iex> Set.new
...> |> Set.add(:a, 1)
...> |> Set.add(:a, 2)
...> |> Set.remove(1)
...> |> Set.value
[2]
"""
@spec remove(t, value) :: t
def remove(%Set{dots: d, delta: delta_dots} = set, value) do
if member?(set, value) do
{new_dots, new_delta_dots} = {d, delta_dots} |> Dots.remove(value)
%Set{dots: new_dots, delta: new_delta_dots}
else
raise Loom.PreconditionError, unobserved: value
end
end
@doc """
Empties a CRDT of all elements
iex> alias Loom.AWORSet, as: Set
iex> Set.new
...> |> Set.add(:a, 1)
...> |> Set.empty
...> |> Set.value
[]
"""
@spec empty(t) :: t
def empty(%Set{dots: d, delta: delta_dots} = set) do
{new_dots, new_delta_dots} = Dots.remove({d, delta_dots})
%Set{set | dots: new_dots, delta: new_delta_dots}
end
@doc """
Join 2 CRDTs together
iex> alias Loom.AWORSet, as: Set
iex> a = Set.new |> Set.add(:a, 1)
iex> b = Set.new |> Set.add(:b, 2)
iex> Set.join(a, b) |> Set.value |> Enum.sort
[1,2]
"""
@spec join(t, t) :: t
def join(%Set{dots: d1, delta: delta_dots} = set, %Set{dots: d2}) do
%Set{set | dots: Dots.join(d1, d2), delta: Dots.join(delta_dots, d2)}
end
@doc """
Check to see if an element is a member of a set.
iex> alias Loom.AWORSet, as: Set
iex> Set.new
...> |> Set.add(:a, 1)
...> |> Set.member?(1)
true
"""
@spec member?(t, value) :: boolean
def member?(%Set{dots: d}, value) do
Dots.dots(d) |> Enum.any?(fn {_, v} -> v == value end)
end
@doc """
Returns a list of set elements.
See other examples for details.
"""
@spec value(t) :: [value]
def value(%Set{dots: d}) do
for({_, v} <- Dots.dots(d), do: v) |> Enum.uniq()
end
def update(set, actor, old_value, new_value) do
if member?(set, old_value) do
set |> remove(old_value) |> add(actor, new_value)
else
set
end
end
end
defimpl Loom.CRDT, for: Loom.AWORSet do
alias Loom.AWORSet, as: Set
@doc """
Returns a description of the operations that this CRDT takes.
Updates return a new CRDT, reads can return any natural datatype. This register
returns a value.
"""
def ops(_crdt) do
[
update: [
add: [:actor, :value],
remove: [:actor]
],
read: [
is_member: [:value],
value: []
]
]
end
@doc """
Applies a CRDT to a counter in an abstract way.
This is for ops-based support.
iex> alias Loom.CRDT
iex> reg = Loom.AWORSet.new |> CRDT.apply({:add, :a, "test"}) |> CRDT.apply({:add, :a, "testing"}) |> CRDT.apply({:remove, "test"})
iex> {CRDT.apply(reg, {:is_member, "testing"}), CRDT.apply(reg, {:is_member, "test"})}
{true, false}
"""
def apply(crdt, {:add, actor, value}) do
Set.add(crdt, actor, value)
end
def apply(crdt, {:remove, value}) do
Set.remove(crdt, value)
end
def apply(crdt, {:is_member, value}), do: Set.member?(crdt, value)
def apply(crdt, :value), do: Set.value(crdt)
@doc """
Joins 2 CRDT's of the same type.
2 different types cannot mix (yet).
iex> alias Loom.CRDT
iex> a = Loom.AWORSet.new |> CRDT.apply({:add, :a, "test"})
iex> b = Loom.AWORSet.new |> CRDT.apply({:add, :b, "test2"})
iex> CRDT.join(a,b) |> CRDT.apply(:value) |> Enum.sort
["test","test2"]
"""
def join(a, b), do: Set.join(a, b)
@doc """
Returns the most natural primitive value for a set, a list.
iex> Loom.AWORSet.new |> Loom.CRDT.value
[]
"""
def value(crdt), do: Set.value(crdt)
end
|
lib/loom/aworset.ex
| 0.881672
| 0.447219
|
aworset.ex
|
starcoder
|
defmodule Absinthe.Introspection do
@moduledoc """
Introspection support.
You can introspect your schema using `__schema`, `__type`, and `__typename`,
as [described in the specification](https://facebook.github.io/graphql/#sec-Introspection).
## Examples
Seeing the names of the types in the schema:
```
\"""
{
__schema {
types {
name
}
}
}
\"""
|> Absinthe.run(MyApp.Schema)
{:ok,
%{data: %{
"__schema" => %{
"types" => [
%{"name" => "Boolean"},
%{"name" => "Float"},
%{"name" => "ID"},
%{"name" => "Int"},
%{"name" => "String"},
...
]
}
}}
}
```
Getting the name of the queried type:
```
\"""
{
profile {
name
__typename
}
}
\"""
|> Absinthe.run(MyApp.Schema)
{:ok,
%{data: %{
"profile" => %{
"name" => "Joe",
"__typename" => "Person"
}
}}
}
```
Getting the name of the fields for a named type:
```
\"""
{
__type(name: "Person") {
fields {
name
type {
kind
name
}
}
}
}
\"""
|> Absinthe.run(MyApp.Schema)
{:ok,
%{data: %{
"__type" => %{
"fields" => [
%{
"name" => "name",
"type" => %{"kind" => "SCALAR", "name" => "String"}
},
%{
"name" => "age",
"type" => %{"kind" => "SCALAR", "name" => "Int"}
},
]
}
}}
}
```
(Note that you may have to nest several depths of `type`/`ofType`, as
type information includes any wrapping layers of [List](https://facebook.github.io/graphql/#sec-List)
and/or [NonNull](https://facebook.github.io/graphql/#sec-Non-null).)
"""
alias Absinthe.Type
# Determine if a term is an introspection type
@doc false
@spec type?(any) :: boolean
def type?(%Type.Object{name: "__" <> _}), do: true
def type?(_), do: false
end
|
lib/absinthe/introspection.ex
| 0.919136
| 0.754802
|
introspection.ex
|
starcoder
|
defmodule GenMetricsBench.Pipeline do
alias GenMetrics.GenStage.Pipeline
alias GenMetricsBench.GenStage.Producer
alias GenMetricsBench.GenStage.ProducerConsumer
alias GenMetricsBench.GenStage.Consumer
alias GenMetricsBench.Utils.Runtime
@moduledoc """
GenMetricsBench harness for GenStage Pipelines.
This module provides a simple benchmark harness to
load a simple GenStage pipeline with flexible message sizes
and load volume.
Using 'no_metrics/0` a benchmark can be run with GenMetrics
data collection and reporting entirely disabled. This provides
a baseline benchmark reading against which you can compare
benchmarks run with GenMetrics activated.
The following benchmarks can be run with various flavours
of GenMetrics activated:
- `summary_metrics/0'
- `statistical_metrics/0`
- `statsd_metrics/0`
- `datadog_metrics/0`
"""
@default_benchmark_load 10_000_000
@doc """
Run benchmark with all metrics gathering disabled.
"""
def no_metrics do
{simulator, sim_msg, sim_load} = build_benchmark()
do_run({simulator, sim_msg, sim_load})
end
@doc """
Run benchmark with in-memory summary metrics gathering enabled.
"""
def summary_metrics do
{simulator, sim_msg, sim_load} = build_benchmark()
pipeline = %Pipeline{name: "bench_summary_metrics",
producer: [Producer],
producer_consumer: [ProducerConsumer],
consumer: [Consumer]}
{:ok, _mid} = GenMetrics.monitor_pipeline(pipeline)
do_run({simulator, sim_msg, sim_load})
end
@doc """
Run benchmark with in-memory statistical metrics gathering enabled.
"""
def statistical_metrics do
{simulator, sim_msg, sim_load} = build_benchmark()
pipeline = %Pipeline{name: "bench_summary_metrics",
producer: [Producer],
producer_consumer: [ProducerConsumer],
consumer: [Consumer], opts: [statistics: true]}
{:ok, _mid} = GenMetrics.monitor_pipeline(pipeline)
do_run({simulator, sim_msg, sim_load})
end
@doc """
Run benchmark with `statsd` statistical metrics gathering enabled.
"""
def statsd_metrics do
{simulator, sim_msg, sim_load} = build_benchmark()
pipeline = %Pipeline{name: "bench_summary_metrics",
producer: [Producer],
producer_consumer: [ProducerConsumer],
consumer: [Consumer], opts: [statistics: :statsd]}
{:ok, _mid} = GenMetrics.monitor_pipeline(pipeline)
do_run({simulator, sim_msg, sim_load})
end
@doc """
Run benchmark with `datadog` statistical metrics gathering enabled.
"""
def datadog_metrics do
{simulator, sim_msg, sim_load} = build_benchmark()
pipeline = %Pipeline{name: "bench_summary_metrics",
producer: [Producer],
producer_consumer: [ProducerConsumer],
consumer: [Consumer], opts: [statistics: :datadog]}
{:ok, _mid} = GenMetrics.monitor_pipeline(pipeline)
do_run({simulator, sim_msg, sim_load})
end
defp build_benchmark do
simulator = Runtime.pipeline_simulator
sim_msg = simulator.gen_msg
sim_load = Application.get_env(:gen_metrics_bench,
:benchmark_load, @default_benchmark_load)
{simulator, sim_msg, sim_load}
end
defp do_run({_simulator, _sim_msg, sim_load}) do
{:ok, producer} = GenStage.start_link(Producer, [])
{:ok, prodcon} = GenStage.start_link(ProducerConsumer, [])
{:ok, consumer} = GenStage.start_link(Consumer, sim_load)
GenStage.sync_subscribe(consumer, to: prodcon)
GenStage.sync_subscribe(prodcon, to: producer)
Process.sleep(:infinity)
end
end
|
lib/pipeline.ex
| 0.830972
| 0.484075
|
pipeline.ex
|
starcoder
|
defmodule Mix.Tasks.Hex.Publish do
use Mix.Task
alias Mix.Tasks.Hex.Build
@shortdoc "Publishes a new package version"
@moduledoc """
Publishes a new version of the package.
$ mix hex.publish
The current authenticated user will be the package owner. Only package
owners can publish the package, new owners can be added with the
`mix hex.owner` task.
Packages and documentation sizes are limited to 8mb compressed, and 64mb uncompressed.
## Publishing documentation
Documentation will be generated by running the `mix docs` task. `ex_doc`
provides this task by default, but any library can be used. Or an alias can be
used to extend the documentation generation. The expected result of the task
is the generated documentation located in the `doc/` directory with an
`index.html` file.
The documentation will be accessible at `https://hexdocs.pm/my_package/1.0.0`,
`https://hexdocs.pm/my_package` will always redirect to the latest published
version.
Documentation will be built and published automatically. To publish a package
without documentation run `mix hex.publish package` or to only publish documentation
run `mix hex.publish docs`.
## Reverting a package
A new package can be reverted or updated within 24 hours of it's initial publish,
a new version of an existing package can be reverted or updated within one hour.
Documentation have no limitations on when it can be updated.
To update the package simply run the `mix hex.publish` task again. To revert run
`mix hex.publish --revert VERSION` or to only revert the documentation run
`mix hex.publish docs --revert VERSION`.
If the last version is reverted, the package is removed.
## Command line options
* `--organization ORGANIZATION` - Set this for private packages belonging to an organization
* `--yes` - Publishes the package without any confirmation prompts
* `--dry-run` - Builds package and performs local checks without publishing,
use `mix hex.build --unpack` to inspect package contents before publishing
* `--replace` - Allows overwriting an existing package version if it exists.
Private packages can always be overwritten, public packages can only be
overwritten within one hour after they were initially published.
* `--revert VERSION` - Revert given version. If the last version is reverted,
the package is removed.
## Configuration
* `:app` - Package name (required).
* `:version` - Package version (required).
* `:deps` - List of package dependencies (see Dependencies below).
* `:description` - Short description of the project.
* `:package` - Hex specific configuration (see Package configuration below).
## Dependencies
Dependencies are defined in mix's dependency format. But instead of using
`:git` or `:path` as the SCM `:package` is used.
defp deps() do
[
{:ecto, "~> 0.1.0"},
{:postgrex, "~> 0.3.0"},
{:cowboy, github: "extend/cowboy"}
]
end
As can be seen Hex package dependencies works alongside git dependencies.
Important to note is that non-Hex dependencies will not be used during
dependency resolution and neither will they be listed as dependencies of the
package.
## Package configuration
Additional metadata of the package can optionally be defined, but it is very
recommended to do so.
* `:name` - Set this if the package name is not the same as the application
name.
* `:organization` - Set this if you are publishing to an organization instead
of the default public hex.pm.
* `:files` - List of files and directories to include in the package,
can include wildcards. Defaults to `["lib", "priv", "mix.exs", "README*",
"readme*", "LICENSE*", "license*", "CHANGELOG*", "changelog*", "src"]`.
* `:licenses` - List of licenses used by the package.
* `:links` - Map of links relevant to the package.
* `:build_tools` - List of build tools that can build the package. Hex will
try to automatically detect the build tools, it will do this based on the
files in the package. If a "rebar" or "rebar.config" file is present Hex
will mark it as able to build with rebar. This detection can be overridden
by setting this field.
"""
@behaviour Hex.Mix.TaskDescription
@switches [
revert: :string,
progress: :boolean,
organization: :string,
organisation: :string,
yes: :boolean,
dry_run: :boolean,
replace: :boolean
]
@impl true
def run(args) do
Hex.Mix.check_deps()
Hex.start()
{opts, args} = Hex.OptionParser.parse!(args, strict: @switches)
build = Build.prepare_package()
revert_version = opts[:revert]
revert = !!revert_version
organization = opts[:organization] || build.organization
case args do
["package"] when revert ->
auth = Mix.Tasks.Hex.auth_info(:write)
revert_package(build, organization, revert_version, auth)
["docs"] when revert ->
auth = Mix.Tasks.Hex.auth_info(:write)
revert_docs(build, organization, revert_version, auth)
[] when revert ->
auth = Mix.Tasks.Hex.auth_info(:write)
revert_package(build, organization, revert_version, auth)
["package"] ->
case proceed_with_owner(build, organization, opts) do
{:ok, owner} ->
auth = Mix.Tasks.Hex.auth_info(:write)
Hex.Shell.info("Publishing package...")
create_release(build, organization, auth, opts)
transfer_owner(build, owner, auth, opts)
:error ->
:ok
end
["docs"] ->
docs_task()
auth = Mix.Tasks.Hex.auth_info(:write)
create_docs(build, organization, auth, opts)
[] ->
create(build, organization, opts)
_ ->
Mix.raise("""
Invalid arguments, expected one of:
mix hex.publish
mix hex.publish package
mix hex.publish docs
""")
end
end
@impl true
def tasks() do
[
{"", "Publishes a new package version"},
{"package", "Publish current package"},
{"docs", "Publish current docs"},
{"package --revert VERSION", "Reverts package on given version"},
{"docs --revert VERSION", "Reverts docs on given version"},
{"--revert VERSION", "Reverts given version"}
]
end
defp create(build, organization, opts) do
case proceed_with_owner(build, organization, opts) do
{:ok, owner} ->
Hex.Shell.info("Building docs...")
docs_task()
auth = Mix.Tasks.Hex.auth_info(:write)
Hex.Shell.info("Publishing package...")
if :ok == create_release(build, organization, auth, opts) do
Hex.Shell.info("Publishing docs...")
create_docs(build, organization, auth, opts)
end
transfer_owner(build, owner, auth, opts)
:error ->
:ok
end
end
defp create_docs(build, organization, auth, opts) do
directory = docs_dir()
name = build.meta.name
version = build.meta.version
unless File.exists?("#{directory}/index.html") do
Mix.raise("File not found: #{directory}/index.html")
end
progress? = Keyword.get(opts, :progress, true)
dry_run? = Keyword.get(opts, :dry_run, false)
tarball = build_docs_tarball(directory)
if dry_run? do
:ok
else
send_tarball(organization, name, version, tarball, auth, progress?)
end
end
defp docs_task() do
try do
Mix.Task.run("docs", [])
rescue
ex in [Mix.NoTaskError] ->
require Hex.Stdlib
stacktrace = Hex.Stdlib.stacktrace()
Mix.shell().error("""
Publication failed because the "docs" task is unavailable. You may resolve this by:
1. Adding {:ex_doc, ">= 0.0.0", only: :dev, runtime: false} to your dependencies in your mix.exs and trying again
2. If ex_doc was already added, make sure you run "mix hex.publish" in the same environment as the ex_doc package
3. Publishing the package without docs by running "mix hex.publish package" (not recommended)
""")
reraise ex, stacktrace
end
end
defp proceed_with_owner(build, organization, opts) do
meta = build.meta
exclude_deps = build.exclude_deps
package = build.package
Hex.Shell.info("Building #{meta.name} #{meta.version}")
Build.print_info(meta, organization, exclude_deps, package[:files])
print_link_to_coc()
print_public_private(organization)
print_owner_prompt(build, organization, opts)
end
defp print_public_private(organization) do
api_url = Hex.State.fetch!(:api_url)
default_api_url? = api_url == Hex.State.default_api_url()
using_api =
if default_api_url? do
""
else
" using #{api_url}"
end
to_repository =
cond do
!organization and !default_api_url? ->
""
public_organization?(organization) ->
[" to ", :bright, "public", :reset, " repository hexpm"]
true ->
[" to ", :bright, "private", :reset, " repository #{organization}"]
end
Hex.Shell.info(
Hex.Shell.format([
"Publishing package",
to_repository,
using_api,
"."
])
)
end
defp print_owner_prompt(build, organization, opts) do
auth = Mix.Tasks.Hex.auth_info(:read)
organizations = user_organizations(auth)
owner_prompt? =
public_organization?(organization) and
not Keyword.get(opts, :yes, false) and
organizations != [] and
not package_exists?(build)
Hex.Shell.info("")
if owner_prompt? do
do_print_owner_prompt(organizations)
else
if Keyword.get(opts, :yes, false) or Hex.Shell.yes?("Proceed?") do
{:ok, nil}
else
:error
end
end
end
defp do_print_owner_prompt(organizations) do
Hex.Shell.info(
"You are a member of one or multiple organizations. Would you like to publish " <>
"the package with yourself as owner or an organization as owner? " <>
"If you publish with an organization as owner your package will " <>
"be public but managed by the selected organization."
)
Hex.Shell.info("")
Hex.Shell.info(" [1] Yourself")
numbers = Stream.map(Stream.iterate(2, &(&1 + 1)), &Integer.to_string/1)
organizations = Stream.zip(numbers, organizations)
Enum.each(organizations, fn {ix, organization} ->
Hex.Shell.info(" [#{ix}] #{organization}")
end)
Hex.Shell.info("")
owner_prompt_selection(Enum.into(organizations, %{}))
end
defp owner_prompt_selection(organizations) do
selection = Hex.Stdlib.string_trim(Hex.Shell.prompt("Your selection:"))
if selection == "1" do
{:ok, nil}
else
case Map.fetch(organizations, selection) do
{:ok, organization} -> {:ok, organization}
:error -> owner_prompt_selection(organizations)
end
end
end
defp package_exists?(build) do
case Hex.API.Package.get("hexpm", build.meta.name) do
{:ok, {200, _body, _headers}} ->
true
{:ok, {404, _body, _headers}} ->
false
other ->
Hex.Utils.print_error_result(other)
true
end
end
defp user_organizations(auth) do
case Hex.API.User.me(auth) do
{:ok, {200, body, _header}} ->
Enum.map(body["organizations"], & &1["name"])
other ->
Hex.Utils.print_error_result(other)
[]
end
end
defp public_organization?(organization), do: organization in [nil, "hexpm"]
defp transfer_owner(_build, nil, _auth, _opts) do
:ok
end
defp transfer_owner(build, owner, auth, opts) do
Hex.Shell.info("Transferring ownership to #{owner}...")
dry_run? = Keyword.get(opts, :dry_run, false)
if dry_run? do
:ok
else
case Hex.API.Package.Owner.add("hexpm", build.meta.name, owner, "full", true, auth) do
{:ok, {status, _body, _header}} when status in 200..299 ->
:ok
other ->
Hex.Shell.error("Failed to transfer ownership")
Hex.Utils.print_error_result(other)
end
end
end
defp print_link_to_coc() do
Hex.Shell.info(
"Before publishing, please read the Code of Conduct: " <>
"https://hex.pm/policies/codeofconduct\n"
)
end
defp revert_package(build, organization, version, auth) do
name = build.meta.name
case Hex.API.Release.delete(organization, name, version, auth) do
{:ok, {code, _, _}} when code in 200..299 ->
Hex.Shell.info("Reverted #{name} #{version}")
other ->
Hex.Shell.error("Reverting #{name} #{version} failed")
Hex.Utils.print_error_result(other)
end
end
defp revert_docs(build, organization, version, auth) do
name = build.meta.name
case Hex.API.ReleaseDocs.delete(organization, name, version, auth) do
{:ok, {code, _, _}} when code in 200..299 ->
Hex.Shell.info("Reverted docs for #{name} #{version}")
{:ok, {404, _, _}} ->
Hex.Shell.info("Docs do not exist")
other ->
Hex.Shell.error("Reverting docs for #{name} #{version} failed")
Hex.Utils.print_error_result(other)
end
end
defp build_docs_tarball(directory) do
files = files(directory)
raise_if_file_matches_semver(files)
{:ok, data} = :mix_hex_tarball.create_docs(files)
data
end
defp raise_if_file_matches_semver(files) do
Enum.map(files, fn
{filename, _contents} -> filename_matches_semver!(filename)
filename -> filename_matches_semver!(filename)
end)
end
defp filename_matches_semver!(filename) do
top_level = filename |> Path.split() |> List.first()
case Version.parse(to_string(top_level)) do
{:ok, _struct} ->
Mix.raise("Invalid filename: top-level filenames cannot match a semantic version pattern")
_ ->
:ok
end
end
defp send_tarball(organization, name, version, tarball, auth, progress?) do
progress = progress_fun(progress?, byte_size(tarball))
case Hex.API.ReleaseDocs.publish(organization, name, version, tarball, auth, progress) do
{:ok, {code, _body, headers}} when code in 200..299 ->
api_url = Hex.State.fetch!(:api_url)
default_api_url? = api_url == Hex.State.default_api_url()
location =
if !default_api_url? && headers['location'] do
headers['location']
else
Hex.Utils.hexdocs_url(organization, name, version)
end
Hex.Shell.info("")
Hex.Shell.info(["Docs published to ", location])
:ok
{:ok, {404, _, _}} ->
Hex.Shell.info("")
Hex.Shell.error("Publishing docs failed due to the package not being published yet")
:error
other ->
Hex.Shell.info("")
Hex.Shell.error("Publishing docs failed")
Hex.Utils.print_error_result(other)
:error
end
end
defp files(directory) do
"#{directory}/**"
|> Path.wildcard()
|> Enum.filter(&File.regular?/1)
|> Enum.map(&{relative_path(&1, directory), File.read!(&1)})
end
defp relative_path(file, dir) do
Path.relative_to(file, dir)
|> Hex.Stdlib.string_to_charlist()
end
defp docs_dir do
cond do
File.exists?("doc") ->
"doc"
File.exists?("docs") ->
"docs"
true ->
Mix.raise(
"Documentation could not be found. " <>
"Please ensure documentation is in the doc/ or docs/ directory"
)
end
end
defp create_release(build, organization, auth, opts) do
meta = build.meta
%{tarball: tarball, outer_checksum: checksum} = Hex.Tar.create!(meta, meta.files, :memory)
dry_run? = Keyword.get(opts, :dry_run, false)
if dry_run? do
:ok
else
send_release(tarball, checksum, organization, auth, opts)
end
end
defp send_release(tarball, checksum, organization, auth, opts) do
progress? = Keyword.get(opts, :progress, true)
progress = progress_fun(progress?, byte_size(tarball))
replace? = Keyword.get(opts, :replace, false)
case Hex.API.Release.publish(organization, tarball, auth, progress, replace?) do
{:ok, {code, body, _}} when code in 200..299 ->
location = body["html_url"] || body["url"]
checksum = String.downcase(Base.encode16(checksum, case: :lower))
Hex.Shell.info("")
Hex.Shell.info("Package published to #{location} (#{checksum})")
:ok
other ->
Hex.Shell.info("")
Hex.Shell.error("Publishing failed")
Hex.Utils.print_error_result(other)
:error
end
end
defp progress_fun(true, size), do: Mix.Tasks.Hex.progress(size)
defp progress_fun(false, _size), do: Mix.Tasks.Hex.progress(nil)
end
|
lib/mix/tasks/hex.publish.ex
| 0.866514
| 0.658088
|
hex.publish.ex
|
starcoder
|
defmodule EctoCommons.DateTimeValidator do
@moduledoc ~S"""
This module provides validators for `DateTime`s.
You can use the following checks:
* `:is` to check if a `DateTime` is exactly some `DateTime`. You can also provide a `:delta` option (in seconds)
to specify a delta around which the `DateTime` is still considered identical.
* `:before` to check if a `DateTime` is before some `DateTime`
* `:after` to check if a `DateTime` is after some `DateTime`
You can also combine the given checks for complex checks. Errors won't be stacked though, the first error
encountered will be returned and subsequent checks will be skipped.
If you want to check everything at once, you'll need to call this validator multiple times.
Also, instead of providing a `DateTime`, you can also provide some special atoms:
* `:utc_now` will compare the given `DateTime` with the `DateTime` at runtime, by calling `DateTime.utc_now()`.
## Example:
iex> types = %{birthdate: :utc_datetime}
iex> params = %{birthdate: ~U[2016-05-24 13:26:08Z]}
iex> Ecto.Changeset.cast({%{}, types}, params, Map.keys(types))
...> |> validate_datetime(:birthdate)
#Ecto.Changeset<action: nil, changes: %{birthdate: ~U[2016-05-24 13:26:08Z]}, errors: [], data: %{}, valid?: true>
# Using :is to ensure a date is identical to another date
iex> types = %{birthdate: :utc_datetime}
iex> params = %{birthdate: ~U[2016-05-24 13:26:08Z]}
iex> Ecto.Changeset.cast({%{}, types}, params, Map.keys(types))
...> |> validate_datetime(:birthdate, is: ~U[2016-05-24 13:26:08Z])
#Ecto.Changeset<action: nil, changes: %{birthdate: ~U[2016-05-24 13:26:08Z]}, errors: [], data: %{}, valid?: true>
iex> types = %{birthdate: :utc_datetime}
iex> params = %{birthdate: ~U[2016-05-24 13:26:08Z]}
iex> Ecto.Changeset.cast({%{}, types}, params, Map.keys(types))
...> |> validate_datetime(:birthdate, is: ~U[2017-05-24 13:26:08Z])
#Ecto.Changeset<action: nil, changes: %{birthdate: ~U[2016-05-24 13:26:08Z]}, errors: [birthdate: {"should be %{is}.", [validation: :datetime, kind: :is]}], data: %{}, valid?: false>
# Using :is with :delta to ensure a date is near another another date
iex> types = %{birthdate: :utc_datetime}
iex> params = %{birthdate: ~U[2016-05-24 13:26:08Z]}
iex> Ecto.Changeset.cast({%{}, types}, params, Map.keys(types))
...> |> validate_datetime(:birthdate, is: ~U[2016-05-24 13:46:08Z], delta: 3600)
#Ecto.Changeset<action: nil, changes: %{birthdate: ~U[2016-05-24 13:26:08Z]}, errors: [], data: %{}, valid?: true>
iex> types = %{birthdate: :utc_datetime}
iex> params = %{birthdate: ~U[2016-05-24 15:26:08Z]}
iex> Ecto.Changeset.cast({%{}, types}, params, Map.keys(types))
...> |> validate_datetime(:birthdate, is: ~U[2016-05-24 13:26:08Z], delta: 3600)
#Ecto.Changeset<action: nil, changes: %{birthdate: ~U[2016-05-24 15:26:08Z]}, errors: [birthdate: {"should be %{is}.", [validation: :datetime, kind: :is]}], data: %{}, valid?: false>
# Using :before to ensure date is before given date
iex> types = %{birthdate: :utc_datetime}
iex> params = %{birthdate: ~U[2016-05-24 13:26:08Z]}
iex> Ecto.Changeset.cast({%{}, types}, params, Map.keys(types))
...> |> validate_datetime(:birthdate, before: ~U[2017-05-24 00:00:00Z])
#Ecto.Changeset<action: nil, changes: %{birthdate: ~U[2016-05-24 13:26:08Z]}, errors: [], data: %{}, valid?: true>
iex> types = %{birthdate: :utc_datetime}
iex> params = %{birthdate: ~U[2016-05-24 13:26:08Z]}
iex> Ecto.Changeset.cast({%{}, types}, params, Map.keys(types))
...> |> validate_datetime(:birthdate, before: ~U[2015-05-24 00:00:00Z])
#Ecto.Changeset<action: nil, changes: %{birthdate: ~U[2016-05-24 13:26:08Z]}, errors: [birthdate: {"should be before %{before}.", [validation: :datetime, kind: :before]}], data: %{}, valid?: false>
iex> types = %{birthdate: :utc_datetime}
iex> params = %{birthdate: ~U[2016-05-24 13:26:08Z]}
iex> Ecto.Changeset.cast({%{}, types}, params, Map.keys(types))
...> |> validate_datetime(:birthdate, before: :utc_now)
#Ecto.Changeset<action: nil, changes: %{birthdate: ~U[2016-05-24 13:26:08Z]}, errors: [], data: %{}, valid?: true>
iex> types = %{birthdate: :utc_datetime}
iex> params = %{birthdate: ~U[3000-05-24 13:26:08Z]}
iex> Ecto.Changeset.cast({%{}, types}, params, Map.keys(types))
...> |> validate_datetime(:birthdate, before: :utc_now)
#Ecto.Changeset<action: nil, changes: %{birthdate: ~U[3000-05-24 13:26:08Z]}, errors: [birthdate: {"should be before %{before}.", [validation: :datetime, kind: :before]}], data: %{}, valid?: false>
# Using :after to ensure date is after given date
iex> types = %{birthdate: :utc_datetime}
iex> params = %{birthdate: ~U[2016-05-24 13:26:08Z]}
iex> Ecto.Changeset.cast({%{}, types}, params, Map.keys(types))
...> |> validate_datetime(:birthdate, after: ~U[2015-05-24 00:00:00Z])
#Ecto.Changeset<action: nil, changes: %{birthdate: ~U[2016-05-24 13:26:08Z]}, errors: [], data: %{}, valid?: true>
iex> types = %{birthdate: :utc_datetime}
iex> params = %{birthdate: ~U[2016-05-24 13:26:08Z]}
iex> Ecto.Changeset.cast({%{}, types}, params, Map.keys(types))
...> |> validate_datetime(:birthdate, after: ~U[2017-05-24 00:00:00Z])
#Ecto.Changeset<action: nil, changes: %{birthdate: ~U[2016-05-24 13:26:08Z]}, errors: [birthdate: {"should be after %{after}.", [validation: :datetime, kind: :after]}], data: %{}, valid?: false>
iex> types = %{birthdate: :utc_datetime}
iex> params = %{birthdate: ~U[3000-05-24 13:26:08Z]}
iex> Ecto.Changeset.cast({%{}, types}, params, Map.keys(types))
...> |> validate_datetime(:birthdate, after: :utc_now)
#Ecto.Changeset<action: nil, changes: %{birthdate: ~U[3000-05-24 13:26:08Z]}, errors: [], data: %{}, valid?: true>
iex> types = %{birthdate: :utc_datetime}
iex> params = %{birthdate: ~U[1000-05-24 13:26:08Z]}
iex> Ecto.Changeset.cast({%{}, types}, params, Map.keys(types))
...> |> validate_datetime(:birthdate, after: :utc_now)
#Ecto.Changeset<action: nil, changes: %{birthdate: ~U[1000-05-24 13:26:08Z]}, errors: [birthdate: {"should be after %{after}.", [validation: :datetime, kind: :after]}], data: %{}, valid?: false>
"""
import Ecto.Changeset
def validate_datetime(changeset, field, opts \\ []) do
validate_change(changeset, field, {:datetime, opts}, fn
_, value ->
is = get_validation_value(opts[:is])
afterr = get_validation_value(opts[:after])
before = get_validation_value(opts[:before])
error =
(is && wrong_datetime(value, is, opts[:delta], opts)) ||
(afterr && too_soon(value, afterr, opts)) ||
(before && too_late(value, before, opts))
if error, do: [{field, error}], else: []
end)
end
defp wrong_datetime(%DateTime{} = value, value, _delta, _opts), do: nil
defp wrong_datetime(%DateTime{} = value, is, nil, opts) do
case DateTime.compare(value, is) do
:eq -> nil
_ -> {message(opts, "should be %{is}."), validation: :datetime, kind: :is}
end
end
defp wrong_datetime(%DateTime{} = value, is, delta, opts) do
case DateTime.compare(value, is) do
:eq ->
nil
_ ->
case abs(DateTime.diff(value, is, :second)) do
val when val > delta ->
{message(opts, "should be %{is}."), validation: :datetime, kind: :is}
_ ->
nil
end
end
end
defp too_soon(%DateTime{} = value, value, _opts), do: nil
defp too_soon(%DateTime{} = value, afterr, opts) do
case DateTime.compare(value, afterr) do
:gt -> nil
_ -> {message(opts, "should be after %{after}."), validation: :datetime, kind: :after}
end
end
defp too_late(%DateTime{} = value, value, _opts), do: nil
defp too_late(%DateTime{} = value, before, opts) do
case DateTime.compare(value, before) do
:lt -> nil
_ -> {message(opts, "should be before %{before}."), validation: :datetime, kind: :before}
end
end
defp get_validation_value(nil), do: nil
defp get_validation_value(:utc_now), do: DateTime.utc_now()
defp get_validation_value(%DateTime{} = val), do: val
defp message(opts, key \\ :message, default) do
Keyword.get(opts, key, default)
end
end
|
lib/validators/date_time.ex
| 0.905469
| 0.494629
|
date_time.ex
|
starcoder
|
defmodule AWS.FMS do
@moduledoc """
AWS Firewall Manager
This is the *AWS Firewall Manager API Reference*.
This guide is for developers who need detailed information about the AWS
Firewall Manager API actions, data types, and errors. For detailed information
about AWS Firewall Manager features, see the [AWS Firewall Manager Developer Guide](https://docs.aws.amazon.com/waf/latest/developerguide/fms-chapter.html).
Some API actions require explicit resource permissions. For information, see the
developer guide topic [Firewall Manager required permissions for API actions](https://docs.aws.amazon.com/waf/latest/developerguide/fms-api-permissions-ref.html).
"""
alias AWS.Client
alias AWS.Request
def metadata do
%AWS.ServiceMetadata{
abbreviation: "FMS",
api_version: "2018-01-01",
content_type: "application/x-amz-json-1.1",
credential_scope: nil,
endpoint_prefix: "fms",
global?: false,
protocol: "json",
service_id: "FMS",
signature_version: "v4",
signing_name: "fms",
target_prefix: "AWSFMS_20180101"
}
end
@doc """
Sets the AWS Firewall Manager administrator account.
AWS Firewall Manager must be associated with the master account of your AWS
organization or associated with a member account that has the appropriate
permissions. If the account ID that you submit is not an AWS Organizations
master account, AWS Firewall Manager will set the appropriate permissions for
the given member account.
The account that you associate with AWS Firewall Manager is called the AWS
Firewall Manager administrator account.
"""
def associate_admin_account(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "AssociateAdminAccount", input, options)
end
@doc """
Permanently deletes an AWS Firewall Manager applications list.
"""
def delete_apps_list(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteAppsList", input, options)
end
@doc """
Deletes an AWS Firewall Manager association with the IAM role and the Amazon
Simple Notification Service (SNS) topic that is used to record AWS Firewall
Manager SNS logs.
"""
def delete_notification_channel(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteNotificationChannel", input, options)
end
@doc """
Permanently deletes an AWS Firewall Manager policy.
"""
def delete_policy(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeletePolicy", input, options)
end
@doc """
Permanently deletes an AWS Firewall Manager protocols list.
"""
def delete_protocols_list(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteProtocolsList", input, options)
end
@doc """
Disassociates the account that has been set as the AWS Firewall Manager
administrator account.
To set a different account as the administrator account, you must submit an
`AssociateAdminAccount` request.
"""
def disassociate_admin_account(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DisassociateAdminAccount", input, options)
end
@doc """
Returns the AWS Organizations master account that is associated with AWS
Firewall Manager as the AWS Firewall Manager administrator.
"""
def get_admin_account(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetAdminAccount", input, options)
end
@doc """
Returns information about the specified AWS Firewall Manager applications list.
"""
def get_apps_list(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetAppsList", input, options)
end
@doc """
Returns detailed compliance information about the specified member account.
Details include resources that are in and out of compliance with the specified
policy. Resources are considered noncompliant for AWS WAF and Shield Advanced
policies if the specified policy has not been applied to them. Resources are
considered noncompliant for security group policies if they are in scope of the
policy, they violate one or more of the policy rules, and remediation is
disabled or not possible. Resources are considered noncompliant for Network
Firewall policies if a firewall is missing in the VPC, if the firewall endpoint
isn't set up in an expected Availability Zone and subnet, if a subnet created by
the Firewall Manager doesn't have the expected route table, and for
modifications to a firewall policy that violate the Firewall Manager policy's
rules.
"""
def get_compliance_detail(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetComplianceDetail", input, options)
end
@doc """
Information about the Amazon Simple Notification Service (SNS) topic that is
used to record AWS Firewall Manager SNS logs.
"""
def get_notification_channel(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetNotificationChannel", input, options)
end
@doc """
Returns information about the specified AWS Firewall Manager policy.
"""
def get_policy(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetPolicy", input, options)
end
@doc """
If you created a Shield Advanced policy, returns policy-level attack summary
information in the event of a potential DDoS attack.
Other policy types are currently unsupported.
"""
def get_protection_status(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetProtectionStatus", input, options)
end
@doc """
Returns information about the specified AWS Firewall Manager protocols list.
"""
def get_protocols_list(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetProtocolsList", input, options)
end
@doc """
Retrieves violations for a resource based on the specified AWS Firewall Manager
policy and AWS account.
"""
def get_violation_details(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetViolationDetails", input, options)
end
@doc """
Returns an array of `AppsListDataSummary` objects.
"""
def list_apps_lists(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListAppsLists", input, options)
end
@doc """
Returns an array of `PolicyComplianceStatus` objects.
Use `PolicyComplianceStatus` to get a summary of which member accounts are
protected by the specified policy.
"""
def list_compliance_status(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListComplianceStatus", input, options)
end
@doc """
Returns a `MemberAccounts` object that lists the member accounts in the
administrator's AWS organization.
The `ListMemberAccounts` must be submitted by the account that is set as the AWS
Firewall Manager administrator.
"""
def list_member_accounts(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListMemberAccounts", input, options)
end
@doc """
Returns an array of `PolicySummary` objects.
"""
def list_policies(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListPolicies", input, options)
end
@doc """
Returns an array of `ProtocolsListDataSummary` objects.
"""
def list_protocols_lists(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListProtocolsLists", input, options)
end
@doc """
Retrieves the list of tags for the specified AWS resource.
"""
def list_tags_for_resource(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListTagsForResource", input, options)
end
@doc """
Creates an AWS Firewall Manager applications list.
"""
def put_apps_list(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "PutAppsList", input, options)
end
@doc """
Designates the IAM role and Amazon Simple Notification Service (SNS) topic that
AWS Firewall Manager uses to record SNS logs.
To perform this action outside of the console, you must configure the SNS topic
to allow the Firewall Manager role `AWSServiceRoleForFMS` to publish SNS logs.
For more information, see [Firewall Manager required permissions for API actions](https://docs.aws.amazon.com/waf/latest/developerguide/fms-api-permissions-ref.html)
in the *AWS Firewall Manager Developer Guide*.
"""
def put_notification_channel(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "PutNotificationChannel", input, options)
end
@doc """
Creates an AWS Firewall Manager policy.
Firewall Manager provides the following types of policies:
* An AWS WAF policy (type WAFV2), which defines rule groups to run
first in the corresponding AWS WAF web ACL and rule groups to run last in the
web ACL.
* An AWS WAF Classic policy (type WAF), which defines a rule group.
* A Shield Advanced policy, which applies Shield Advanced protection
to specified accounts and resources.
* A security group policy, which manages VPC security groups across
your AWS organization.
* An AWS Network Firewall policy, which provides firewall rules to
filter network traffic in specified Amazon VPCs.
Each policy is specific to one of the types. If you want to enforce more than
one policy type across accounts, create multiple policies. You can create
multiple policies for each type.
You must be subscribed to Shield Advanced to create a Shield Advanced policy.
For more information about subscribing to Shield Advanced, see
[CreateSubscription](https://docs.aws.amazon.com/waf/latest/DDOSAPIReference/API_CreateSubscription.html).
"""
def put_policy(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "PutPolicy", input, options)
end
@doc """
Creates an AWS Firewall Manager protocols list.
"""
def put_protocols_list(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "PutProtocolsList", input, options)
end
@doc """
Adds one or more tags to an AWS resource.
"""
def tag_resource(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "TagResource", input, options)
end
@doc """
Removes one or more tags from an AWS resource.
"""
def untag_resource(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UntagResource", input, options)
end
end
|
lib/aws/generated/fms.ex
| 0.841761
| 0.456773
|
fms.ex
|
starcoder
|
alias ExType.{Type, Typespec, Typespecable}
defprotocol ExType.Typespecable do
@moduledoc false
@spec to_quote(Type.t()) :: any()
def to_quote(x)
@spec resolve_vars(Type.t(), %{optional(Type.SpecVariable.t()) => Type.t()}) :: Type.t()
def resolve_vars(x, vars)
@spec get_protocol_path(Type.t()) :: {:ok, atom()} | :error
def get_protocol_path(x)
end
defimpl Typespecable, for: Type.Any do
def to_quote(_) do
quote do
any()
end
end
def resolve_vars(type, _) do
type
end
def get_protocol_path(_), do: :error
end
defimpl Typespecable, for: Type.None do
def to_quote(_) do
quote do
none()
end
end
def resolve_vars(type, _) do
type
end
def get_protocol_path(_), do: :error
end
defimpl Typespecable, for: Type.Union do
def to_quote(%Type.Union{types: types}) do
[first | rest] = Enum.map(types, &Typespecable.to_quote/1)
Enum.reduce(rest, first, fn x, acc ->
quote do
unquote(x) | unquote(acc)
end
end)
end
def resolve_vars(%Type.Union{types: types}, vars) do
Typespec.union_types(Enum.map(types, &Typespecable.resolve_vars(&1, vars)))
end
def get_protocol_path(_), do: :error
end
defimpl Typespecable, for: Type.Intersection do
def to_quote(%Type.Intersection{types: types}) do
quote do
T.&({unquote_splicing(Enum.map(types, &Typespecable.to_quote/1))})
end
end
def resolve_vars(%Type.Intersection{types: types}, vars) do
Typespec.intersect_types(Enum.map(types, &Typespecable.resolve_vars(&1, vars)))
end
def get_protocol_path(_), do: :error
end
defimpl Typespecable, for: Type.SpecVariable do
def to_quote(%Type.SpecVariable{name: name, spec: {module, _, _}}) do
quote do
unquote(Macro.var(name, module))
end
end
def resolve_vars(%Type.SpecVariable{} = spec_var, vars) do
case Map.fetch(vars, spec_var) do
{:ok, type} ->
type
:error ->
spec_var.type
end
end
def get_protocol_path(_), do: :error
end
defimpl Typespecable, for: Type.Protocol do
def to_quote(%Type.Protocol{module: module}) do
quote do
unquote(module).t()
end
end
def resolve_vars(type, _) do
type
end
def get_protocol_path(_), do: :error
end
defimpl Typespecable, for: Type.GenericProtocol do
def to_quote(%Type.GenericProtocol{module: module, generic: generic}) do
quote do
T.p(unquote(module), unquote(ExType.Typespecable.to_quote(generic)))
end
end
def resolve_vars(%Type.GenericProtocol{generic: generic} = type, vars) do
%{type | generic: Typespecable.resolve_vars(generic, vars)}
end
def get_protocol_path(_), do: :error
end
defimpl Typespecable, for: Type.Float do
def to_quote(%Type.Float{}) do
quote do
float()
end
end
def resolve_vars(type, _) do
type
end
def get_protocol_path(_), do: {:ok, Float}
end
defimpl Typespecable, for: Type.Integer do
def to_quote(%Type.Integer{}) do
quote do
integer()
end
end
def resolve_vars(type, _) do
type
end
def get_protocol_path(_), do: {:ok, Integer}
end
defimpl Typespecable, for: Type.Atom do
def to_quote(%Type.Atom{literal: literal, value: value}) do
if literal do
value
else
quote do
atom()
end
end
end
def resolve_vars(type, _) do
type
end
def get_protocol_path(_), do: {:ok, Atom}
end
defimpl Typespecable, for: Type.Reference do
def to_quote(%Type.Reference{}) do
quote do
reference()
end
end
def resolve_vars(type, _) do
type
end
def get_protocol_path(_), do: {:ok, Reference}
end
defimpl Typespecable, for: Type.PID do
def to_quote(%Type.PID{}) do
quote do
pid()
end
end
def resolve_vars(type, _) do
type
end
def get_protocol_path(_), do: {:ok, PID}
end
defimpl Typespecable, for: Type.AnyFunction do
def to_quote(%Type.AnyFunction{}) do
quote do
... -> any()
end
end
def resolve_vars(type, _) do
type
end
def get_protocol_path(_), do: {:ok, Function}
end
defimpl Typespecable, for: Type.RawFunction do
def to_quote(%Type.RawFunction{arity: arity}) do
quoted_anys = List.duplicate(quote(do: any()), arity)
quote do
unquote_splicing(quoted_anys) -> any()
end
end
def resolve_vars(type, _) do
type
end
def get_protocol_path(_), do: {:ok, Function}
end
defimpl Typespecable, for: Type.TypedFunction do
def to_quote(%Type.TypedFunction{inputs: inputs, output: output}) do
quoted_inputs = Enum.map(inputs, &Typespecable.to_quote/1)
quoted_output = Typespecable.to_quote(output)
quote do
unquote_splicing(quoted_inputs) -> unquote(quoted_output)
end
end
def resolve_vars(%Type.TypedFunction{inputs: inputs, output: output}, vars) do
%Type.TypedFunction{
inputs: Enum.map(inputs, &Typespecable.resolve_vars(&1, vars)),
output: Typespecable.resolve_vars(output, vars)
}
end
def get_protocol_path(_), do: {:ok, Function}
end
defimpl Typespecable, for: Type.List do
def to_quote(%Type.List{type: type}) do
quote do
[unquote(Typespecable.to_quote(type))]
end
end
def resolve_vars(%Type.List{type: type}, vars) do
%Type.List{type: Typespecable.resolve_vars(type, vars)}
end
def get_protocol_path(_), do: {:ok, List}
end
defimpl Typespecable, for: Type.Map do
def to_quote(%Type.Map{key: key, value: value}) do
quoted_key = Typespecable.to_quote(key)
quoted_value = Typespecable.to_quote(value)
quote do
%{required(unquote(quoted_key)) => unquote(quoted_value)}
end
end
def resolve_vars(%Type.Map{key: key, value: value}, vars) do
%Type.Map{
key: Typespecable.resolve_vars(key, vars),
value: Typespecable.resolve_vars(value, vars)
}
end
def get_protocol_path(_), do: {:ok, Map}
end
defimpl Typespecable, for: Type.AnyTuple do
def to_quote(%Type.AnyTuple{}) do
quote do
tuple()
end
end
def resolve_vars(type, _) do
type
end
def get_protocol_path(_), do: {:ok, Tuple}
end
defimpl Typespecable, for: Type.TypedTuple do
def to_quote(%Type.TypedTuple{types: types}) do
quoted_types = Enum.map(types, &Typespecable.to_quote/1)
quote do
{unquote_splicing(quoted_types)}
end
end
def resolve_vars(%Type.TypedTuple{types: types}, vars) do
%Type.TypedTuple{types: Enum.map(types, &Typespecable.resolve_vars(&1, vars))}
end
def get_protocol_path(_), do: {:ok, Tuple}
end
defimpl Typespecable, for: Type.BitString do
def to_quote(%Type.BitString{}) do
quote do
bitstring()
end
end
def resolve_vars(type, _) do
type
end
def get_protocol_path(_), do: {:ok, BitString}
end
defimpl Typespecable, for: Type.Struct do
def to_quote(%Type.Struct{struct: struct, types: types}) do
quote do
%unquote(struct){
unquote_splicing(
Enum.map(types, fn {key, value} ->
{key, Typespecable.to_quote(value)}
end)
)
}
end
end
def resolve_vars(type, _) do
# TODO: fix this
type
end
def get_protocol_path(%Type.Struct{struct: struct}) do
{:ok, struct}
end
end
|
lib/ex_type/typespecable.ex
| 0.779574
| 0.552419
|
typespecable.ex
|
starcoder
|
defmodule Color do
defstruct red: 0, blue: 0, green: 0
@colors %{
aliceBlue: [240, 248, 255],
antique_white1: [255, 239, 219],
antique_white2: [238, 223, 204],
antique_white3: [205, 192, 176],
antique_white4: [139, 131, 120],
antique_white: [250, 235, 215],
blanched_almond: [255, 235, 205],
blue_violet: [138, 43, 226],
cadet_blue1: [152, 245, 255],
cadet_blue2: [142, 229, 238],
cadet_blue3: [122, 197, 205],
cadet_blue4: [83, 134, 139],
cadet_blue: [95, 158, 160],
cornflower_blue: [100, 149, 237],
dark_blue: [0, 0, 139],
dark_cyan: [0, 139, 139],
dark_goldenrod1: [255, 185, 15],
dark_goldenrod2: [238, 173, 14],
dark_goldenrod3: [205, 149, 12],
dark_goldenrod4: [139, 101, 8],
dark_goldenrod: [184, 134, 11],
dark_gray: [169, 169, 169],
dark_green: [0, 100, 0],
dark_grey: [169, 169, 169],
dark_khaki: [189, 183, 107],
dark_magenta: [139, 0, 139],
dark_olive_green1: [202, 255, 112],
dark_olive_green2: [188, 238, 104],
dark_olive_green3: [162, 205, 90],
dark_olive_green4: [110, 139, 61],
dark_olive_green: [85, 107, 47],
dark_orange1: [255, 127, 0],
dark_orange2: [238, 118, 0],
dark_orange3: [205, 102, 0],
dark_orange4: [139, 69, 0],
dark_orange: [255, 140, 0],
dark_orchid1: [191, 62, 255],
dark_orchid2: [178, 58, 238],
dark_orchid3: [154, 50, 205],
dark_orchid4: [104, 34, 139],
dark_orchid: [153, 50, 204],
dark_red: [139, 0, 0],
dark_salmon: [233, 150, 122],
dark_sea_green1: [193, 255, 193],
dark_sea_green2: [180, 238, 180],
dark_sea_green3: [155, 205, 155],
dark_sea_green4: [105, 139, 105],
dark_sea_green: [143, 188, 143],
dark_slate_blue: [72, 61, 139],
dark_slate_gray1: [151, 255, 255],
dark_slate_gray2: [141, 238, 238],
dark_slate_gray3: [121, 205, 205],
dark_slate_gray4: [82, 139, 139],
dark_slate_gray: [47, 79, 79],
dark_slate_grey: [47, 79, 79],
dark_turquoise: [0, 206, 209],
dark_violet: [148, 0, 211],
deep_pink1: [255, 20, 147],
deep_pink2: [238, 18, 137],
deep_pink3: [205, 16, 118],
deep_pink4: [139, 10, 80],
deep_pink: [255, 20, 147],
deep_sky_blue1: [0, 191, 255],
deep_sky_blue2: [0, 178, 238],
deep_sky_blue3: [0, 154, 205],
deep_sky_blue4: [0, 104, 139],
deep_sky_blue: [0, 191, 255],
dim_gray: [105, 105, 105],
dim_grey: [105, 105, 105],
dodger_blue1: [30, 144, 255],
dodger_blue2: [28, 134, 238],
dodger_blue3: [24, 116, 205],
dodger_blue4: [16, 78, 139],
dodger_blue: [30, 144, 255],
floral_white: [255, 250, 240],
forest_green: [34, 139, 34],
ghost_white: [248, 248, 255],
green_yellow: [173, 255, 47],
hot_pink1: [255, 110, 180],
hot_pink2: [238, 106, 167],
hot_pink3: [205, 96, 144],
hot_pink4: [139, 58, 98],
hot_pink: [255, 105, 180],
indian_red1: [255, 106, 106],
indian_red2: [238, 99, 99],
indian_red3: [205, 85, 85],
indian_red4: [139, 58, 58],
indian_red: [205, 92, 92],
lavender_blush1: [255, 240, 245],
lavender_blush2: [238, 224, 229],
lavender_blush3: [205, 193, 197],
lavender_blush4: [139, 131, 134],
lavender_blush: [255, 240, 245],
lawn_green: [124, 252, 0],
lemon_chiffon1: [255, 250, 205],
lemon_chiffon2: [238, 233, 191],
lemon_chiffon3: [205, 201, 165],
lemon_chiffon4: [139, 137, 112],
lemon_chiffon: [255, 250, 205],
light_blue1: [191, 239, 255],
light_blue2: [178, 223, 238],
light_blue3: [154, 192, 205],
light_blue4: [104, 131, 139],
light_blue: [173, 216, 230],
light_coral: [240, 128, 128],
light_cyan1: [224, 255, 255],
light_cyan2: [209, 238, 238],
light_cyan3: [180, 205, 205],
light_cyan4: [122, 139, 139],
light_cyan: [224, 255, 255],
light_goldenrod1: [255, 236, 139],
light_goldenrod2: [238, 220, 130],
light_goldenrod3: [205, 190, 112],
light_goldenrod4: [139, 129, 76],
light_goldenrod: [238, 221, 130],
light_goldenrod_yellow: [250, 250, 210],
light_gray: [211, 211, 211],
light_green: [144, 238, 144],
light_grey: [211, 211, 211],
light_pink1: [255, 174, 185],
light_pink2: [238, 162, 173],
light_pink3: [205, 140, 149],
light_pink4: [139, 95, 101],
light_pink: [255, 182, 193],
light_salmon1: [255, 160, 122],
light_salmon2: [238, 149, 114],
light_salmon3: [205, 129, 98],
light_salmon4: [139, 87, 66],
light_salmon: [255, 160, 122],
light_sea_green: [32, 178, 170],
light_sky_blue1: [176, 226, 255],
light_sky_blue2: [164, 211, 238],
light_sky_blue3: [141, 182, 205],
light_sky_blue4: [96, 123, 139],
light_sky_blue: [135, 206, 250],
light_slate_blue: [132, 112, 255],
light_slate_gray: [119, 136, 153],
light_slate_grey: [119, 136, 153],
light_steel_blue1: [202, 225, 255],
light_steel_blue2: [188, 210, 238],
light_steel_blue3: [162, 181, 205],
light_steel_blue4: [110, 123, 139],
light_steel_blue: [176, 196, 222],
light_yellow1: [255, 255, 224],
light_yellow2: [238, 238, 209],
light_yellow3: [205, 205, 180],
light_yellow4: [139, 139, 122],
light_yellow: [255, 255, 224],
lime_green: [50, 205, 50],
medium_aquamarine: [102, 205, 170],
medium_blue: [0, 0, 205],
medium_orchid1: [224, 102, 255],
medium_orchid2: [209, 95, 238],
medium_orchid3: [180, 82, 205],
medium_orchid4: [122, 55, 139],
medium_orchid: [186, 85, 211],
medium_purple1: [171, 130, 255],
medium_purple2: [159, 121, 238],
medium_purple3: [137, 104, 205],
medium_purple4: [93, 71, 139],
medium_purple: [147, 112, 219],
medium_sea_green: [60, 179, 113],
medium_slate_blue: [123, 104, 238],
medium_spring_green: [0, 250, 154],
medium_turquoise: [72, 209, 204],
medium_violet_red: [199, 21, 133],
midnight_blue: [25, 25, 112],
mint_cream: [245, 255, 250],
misty_rose1: [255, 228, 225],
misty_rose2: [238, 213, 210],
misty_rose3: [205, 183, 181],
misty_rose4: [139, 125, 123],
misty_rose: [255, 228, 225],
navajo_white1: [255, 222, 173],
navajo_white2: [238, 207, 161],
navajo_white3: [205, 179, 139],
navajo_white4: [139, 121, 94],
navajo_white: [255, 222, 173],
navy_blue: [0, 0, 128],
old_lace: [253, 245, 230],
olive_drab1: [192, 255, 62],
olive_drab2: [179, 238, 58],
olive_drab3: [154, 205, 50],
olive_drab4: [105, 139, 34],
olive_drab: [107, 142, 35],
orange_red1: [255, 69, 0],
orange_red2: [238, 64, 0],
orange_red3: [205, 55, 0],
orange_red4: [139, 37, 0],
orange_red: [255, 69, 0],
pale_goldenrod: [238, 232, 170],
pale_green1: [154, 255, 154],
pale_green2: [144, 238, 144],
pale_green3: [124, 205, 124],
pale_green4: [84, 139, 84],
pale_green: [152, 251, 152],
pale_turquoise1: [187, 255, 255],
pale_turquoise2: [174, 238, 238],
pale_turquoise3: [150, 205, 205],
pale_turquoise4: [102, 139, 139],
pale_turquoise: [175, 238, 238],
pale_violet_red1: [255, 130, 171],
pale_violet_red2: [238, 121, 159],
pale_violet_red3: [205, 104, 137],
pale_violet_red4: [139, 71, 93],
pale_violet_red: [219, 112, 147],
papaya_whip: [255, 239, 213],
peach_puff1: [255, 218, 185],
peach_puff2: [238, 203, 173],
peach_puff3: [205, 175, 149],
peach_puff4: [139, 119, 101],
peach_puff: [255, 218, 185],
powder_blue: [176, 224, 230],
rosy_brown1: [255, 193, 193],
rosy_brown2: [238, 180, 180],
rosy_brown3: [205, 155, 155],
rosy_brown4: [139, 105, 105],
rosy_brown: [188, 143, 143],
royal_blue1: [72, 118, 255],
royal_blue2: [67, 110, 238],
royal_blue3: [58, 95, 205],
royal_blue4: [39, 64, 139],
royal_blue: [65, 105, 225],
saddle_brown: [139, 69, 19],
sandy_brown: [244, 164, 96],
sea_green1: [84, 255, 159],
sea_green2: [78, 238, 148],
sea_green3: [67, 205, 128],
sea_green4: [46, 139, 87],
sea_green: [46, 139, 87],
sky_blue1: [135, 206, 255],
sky_blue2: [126, 192, 238],
sky_blue3: [108, 166, 205],
sky_blue4: [74, 112, 139],
sky_blue: [135, 206, 235],
slate_blue1: [131, 111, 255],
slate_blue2: [122, 103, 238],
slate_blue3: [105, 89, 205],
slate_blue4: [71, 60, 139],
slate_blue: [106, 90, 205],
slate_gray1: [198, 226, 255],
slate_gray2: [185, 211, 238],
slate_gray3: [159, 182, 205],
slate_gray4: [108, 123, 139],
slate_gray: [112, 128, 144],
slate_grey: [112, 128, 144],
spring_green1: [0, 255, 127],
spring_green2: [0, 238, 118],
spring_green3: [0, 205, 102],
spring_green4: [0, 139, 69],
spring_green: [0, 255, 127],
steel_blue1: [99, 184, 255],
steel_blue2: [92, 172, 238],
steel_blue3: [79, 148, 205],
steel_blue4: [54, 100, 139],
steel_blue: [70, 130, 180],
violet_red1: [255, 62, 150],
violet_red2: [238, 58, 140],
violet_red3: [205, 50, 120],
violet_red4: [139, 34, 82],
violet_red: [208, 32, 144],
white_smoke: [245, 245, 245],
yellow_green: [154, 205, 50],
alice_blue: [240, 248, 255],
antique_white: [250, 235, 215],
aquamarine1: [127, 255, 212],
aquamarine2: [118, 238, 198],
aquamarine3: [102, 205, 170],
aquamarine4: [69, 139, 116],
aquamarine: [127, 255, 212],
azure1: [240, 255, 255],
azure2: [224, 238, 238],
azure3: [193, 205, 205],
azure4: [131, 139, 139],
azure: [240, 255, 255],
beige: [245, 245, 220],
bisque1: [255, 228, 196],
bisque2: [238, 213, 183],
bisque3: [205, 183, 158],
bisque4: [139, 125, 107],
bisque: [255, 228, 196],
black: [0, 0, 0],
blanched_almond: [255, 235, 205],
blue1: [0, 0, 255],
blue2: [0, 0, 238],
blue3: [0, 0, 205],
blue4: [0, 0, 139],
blue: [0, 0, 255],
blue_violet: [138, 43, 226],
brown1: [255, 64, 64],
brown2: [238, 59, 59],
brown3: [205, 51, 51],
brown4: [139, 35, 35],
brown: [165, 42, 42],
burlywood1: [255, 211, 155],
burlywood2: [238, 197, 145],
burlywood3: [205, 170, 125],
burlywood4: [139, 115, 85],
burlywood: [222, 184, 135],
cadet_blue: [95, 158, 160],
chartreuse1: [127, 255, 0],
chartreuse2: [118, 238, 0],
chartreuse3: [102, 205, 0],
chartreuse4: [69, 139, 0],
chartreuse: [127, 255, 0],
chocolate1: [255, 127, 36],
chocolate2: [238, 118, 33],
chocolate3: [205, 102, 29],
chocolate4: [139, 69, 19],
chocolate: [210, 105, 30],
coral1: [255, 114, 86],
coral2: [238, 106, 80],
coral3: [205, 91, 69],
coral4: [139, 62, 47],
coral: [255, 127, 80],
cornflower_blue: [100, 149, 237],
cornsilk1: [255, 248, 220],
cornsilk2: [238, 232, 205],
cornsilk3: [205, 200, 177],
cornsilk4: [139, 136, 120],
cornsilk: [255, 248, 220],
cyan1: [0, 255, 255],
cyan2: [0, 238, 238],
cyan3: [0, 205, 205],
cyan4: [0, 139, 139],
cyan: [0, 255, 255],
dark_blue: [0, 0, 139],
dark_cyan: [0, 139, 139],
dark_goldenrod: [184, 134, 11],
dark_gray: [169, 169, 169],
dark_green: [0, 100, 0],
dark_grey: [169, 169, 169],
dark_khaki: [189, 183, 107],
dark_magenta: [139, 0, 139],
dark_olive_green: [85, 107, 47],
dark_orange: [255, 140, 0],
dark_orchid: [153, 50, 204],
dark_red: [139, 0, 0],
dark_salmon: [233, 150, 122],
dark_sea_green: [143, 188, 143],
dark_slate_blue: [72, 61, 139],
dark_slate_gray: [47, 79, 79],
dark_slate_grey: [47, 79, 79],
dark_turquoise: [0, 206, 209],
dark_violet: [148, 0, 211],
deep_pink: [255, 20, 147],
deep_sky_blue: [0, 191, 255],
dim_gray: [105, 105, 105],
dim_grey: [105, 105, 105],
dodger_blue: [30, 144, 255],
firebrick1: [255, 48, 48],
firebrick2: [238, 44, 44],
firebrick3: [205, 38, 38],
firebrick4: [139, 26, 26],
firebrick: [178, 34, 34],
floral_white: [255, 250, 240],
forest_green: [34, 139, 34],
gainsboro: [220, 220, 220],
ghost_white: [248, 248, 255],
gold1: [255, 215, 0],
gold2: [238, 201, 0],
gold3: [205, 173, 0],
gold4: [139, 117, 0],
goldenrod1: [255, 193, 37],
goldenrod2: [238, 180, 34],
goldenrod3: [205, 155, 29],
goldenrod4: [139, 105, 20],
goldenrod: [218, 165, 32],
gray0: [0, 0, 0],
gray1: [3, 3, 3],
gray2: [5, 5, 5],
gray3: [8, 8, 8],
gray: [190, 190, 190],
green1: [0, 255, 0],
green2: [0, 238, 0],
green3: [0, 205, 0],
green4: [0, 139, 0],
green: [0, 255, 0],
green_yellow: [173, 255, 47],
grey0: [0, 0, 0],
grey1: [3, 3, 3],
grey2: [5, 5, 5],
grey3: [8, 8, 8],
grey: [190, 190, 190],
honeydew1: [240, 255, 240],
honeydew2: [224, 238, 224],
honeydew3: [193, 205, 193],
honeydew4: [131, 139, 131],
honeydew: [240, 255, 240],
hot_pink: [255, 105, 180],
indian_red: [205, 92, 92],
ivory1: [255, 255, 240],
ivory2: [238, 238, 224],
ivory3: [205, 205, 193],
ivory4: [139, 139, 131],
ivory: [255, 255, 240],
khaki1: [255, 246, 143],
khaki2: [238, 230, 133],
khaki3: [205, 198, 115],
khaki4: [139, 134, 78],
khaki: [240, 230, 140],
lavender: [230, 230, 250],
lavender_blush: [255, 240, 245],
lawn_green: [124, 252, 0],
lemon_chiffon: [255, 250, 205],
light_blue: [173, 216, 230],
light_coral: [240, 128, 128],
light_cyan: [224, 255, 255],
light_goldenrod: [238, 221, 130],
light_goldenrod_yellow: [250, 250, 210],
light_gray: [211, 211, 211],
light_green: [144, 238, 144],
light_grey: [211, 211, 211],
light_pink: [255, 182, 193],
light_salmon: [255, 160, 122],
light_sea_green: [32, 178, 170],
light_sky_blue: [135, 206, 250],
light_slate_blue: [132, 112, 255],
light_slate_gray: [119, 136, 153],
light_slate_grey: [119, 136, 153],
light_steel_blue: [176, 196, 222],
light_yellow: [255, 255, 224],
lime_green: [50, 205, 50],
linen: [250, 240, 230],
magenta1: [255, 0, 255],
magenta2: [238, 0, 238],
magenta3: [205, 0, 205],
magenta4: [139, 0, 139],
magenta: [255, 0, 255],
maroon1: [255, 52, 179],
maroon2: [238, 48, 167],
maroon3: [205, 41, 144],
maroon4: [139, 28, 98],
maroon: [176, 48, 96],
medium_aquamarine: [102, 205, 170],
medium_blue: [0, 0, 205],
medium_orchid: [186, 85, 211],
medium_purple: [147, 112, 219],
medium_sea_green: [60, 179, 113],
medium_slate_blue: [123, 104, 238],
medium_spring_green: [0, 250, 154],
medium_turquoise: [72, 209, 204],
medium_violet_red: [199, 21, 133],
midnight_blue: [25, 25, 112],
mint_cream: [245, 255, 250],
misty_rose: [255, 228, 225],
moccasin: [255, 228, 181],
navajo_white: [255, 222, 173],
navy: [0, 0, 128],
navy_blue: [0, 0, 128],
old_lace: [253, 245, 230],
olive_drab: [107, 142, 35],
orange1: [255, 165, 0],
orange2: [238, 154, 0],
orange3: [205, 133, 0],
orange4: [139, 90, 0],
orange: [255, 165, 0],
orange_red: [255, 69, 0],
orchid1: [255, 131, 250],
orchid2: [238, 122, 233],
orchid3: [205, 105, 201],
orchid4: [139, 71, 137],
orchid: [218, 112, 214],
pale_goldenrod: [238, 232, 170],
pale_green: [152, 251, 152],
pale_turquoise: [175, 238, 238],
pale_violet_red: [219, 112, 147],
papaya_whip: [255, 239, 213],
peach_puff: [255, 218, 185],
peru: [205, 133, 63],
pink1: [255, 181, 197],
pink2: [238, 169, 184],
pink3: [205, 145, 158],
pink4: [139, 99, 108],
pink: [255, 192, 203],
plum1: [255, 187, 255],
plum2: [238, 174, 238],
plum3: [205, 150, 205],
plum4: [139, 102, 139],
plum: [221, 160, 221],
powder_blue: [176, 224, 230],
purple1: [155, 48, 255],
purple2: [145, 44, 238],
purple3: [125, 38, 205],
purple4: [85, 26, 139],
purple: [160, 32, 240],
red1: [255, 0, 0],
red2: [238, 0, 0],
red3: [205, 0, 0],
red4: [139, 0, 0],
red: [255, 0, 0],
rosy_brown: [188, 143, 143],
royal_blue: [65, 105, 225],
saddle_brown: [139, 69, 19],
salmon1: [255, 140, 105],
salmon2: [238, 130, 98],
salmon3: [205, 112, 84],
salmon4: [139, 76, 57],
salmon: [250, 128, 114],
sandy_brown: [244, 164, 96],
sea_green: [46, 139, 87],
seashell1: [255, 245, 238],
seashell2: [238, 229, 222],
seashell3: [205, 197, 191],
seashell4: [139, 134, 130],
seashell: [255, 245, 238],
sienna1: [255, 130, 71],
sienna2: [238, 121, 66],
sienna3: [205, 104, 57],
sienna4: [139, 71, 38],
sienna: [160, 82, 45],
sky_blue: [135, 206, 235],
slate_blue: [106, 90, 205],
slate_gray: [112, 128, 144],
slate_grey: [112, 128, 144],
snow1: [255, 250, 250],
snow2: [238, 233, 233],
snow3: [205, 201, 201],
snow4: [139, 137, 137],
snow: [255, 250, 250],
spring_green: [0, 255, 127],
steel_blue: [70, 130, 180],
tan1: [255, 165, 79],
tan2: [238, 154, 73],
tan3: [205, 133, 63],
tan4: [139, 90, 43],
tan: [210, 180, 140],
thistle1: [255, 225, 255],
thistle2: [238, 210, 238],
thistle3: [205, 181, 205],
thistle4: [139, 123, 139],
thistle: [216, 191, 216],
tomato1: [255, 99, 71],
tomato2: [238, 92, 66],
tomato3: [205, 79, 57],
tomato4: [139, 54, 38],
tomato: [255, 99, 71],
turquoise1: [0, 245, 255],
turquoise2: [0, 229, 238],
turquoise3: [0, 197, 205],
turquoise4: [0, 134, 139],
turquoise: [64, 224, 208],
violet: [238, 130, 238],
violet_red: [208, 32, 144],
wheat1: [255, 231, 186],
wheat2: [238, 216, 174],
wheat3: [205, 186, 150],
wheat4: [139, 126, 102],
wheat: [245, 222, 179],
white: [255, 255, 255],
white_smoke: [245, 245, 245],
yellow1: [255, 255, 0],
yellow2: [238, 238, 0],
yellow3: [205, 205, 0],
yellow4: [139, 139, 0],
yellow: [255, 255, 0],
yellow_green: [154, 205, 50],
gold: [255, 215, 0],
gray100: [255, 255, 255],
gray10: [26, 26, 26],
gray11: [28, 28, 28],
gray12: [31, 31, 31],
gray13: [33, 33, 33],
gray14: [36, 36, 36],
gray15: [38, 38, 38],
gray16: [41, 41, 41],
gray17: [43, 43, 43],
gray18: [46, 46, 46],
gray19: [48, 48, 48],
gray20: [51, 51, 51],
gray21: [54, 54, 54],
gray22: [56, 56, 56],
gray23: [59, 59, 59],
gray24: [61, 61, 61],
gray25: [64, 64, 64],
gray26: [66, 66, 66],
gray27: [69, 69, 69],
gray28: [71, 71, 71],
gray29: [74, 74, 74],
gray30: [77, 77, 77],
gray31: [79, 79, 79],
gray32: [82, 82, 82],
gray33: [84, 84, 84],
gray34: [87, 87, 87],
gray35: [89, 89, 89],
gray36: [92, 92, 92],
gray37: [94, 94, 94],
gray38: [97, 97, 97],
gray39: [99, 99, 99],
gray40: [102, 102, 102],
gray41: [105, 105, 105],
gray42: [107, 107, 107],
gray43: [110, 110, 110],
gray44: [112, 112, 112],
gray45: [115, 115, 115],
gray46: [117, 117, 117],
gray47: [120, 120, 120],
gray48: [122, 122, 122],
gray49: [125, 125, 125],
gray4: [10, 10, 10],
gray50: [127, 127, 127],
gray51: [130, 130, 130],
gray52: [133, 133, 133],
gray53: [135, 135, 135],
gray54: [138, 138, 138],
gray55: [140, 140, 140],
gray56: [143, 143, 143],
gray57: [145, 145, 145],
gray58: [148, 148, 148],
gray59: [150, 150, 150],
gray5: [13, 13, 13],
gray60: [153, 153, 153],
gray61: [156, 156, 156],
gray62: [158, 158, 158],
gray63: [161, 161, 161],
gray64: [163, 163, 163],
gray65: [166, 166, 166],
gray66: [168, 168, 168],
gray67: [171, 171, 171],
gray68: [173, 173, 173],
gray69: [176, 176, 176],
gray6: [15, 15, 15],
gray70: [179, 179, 179],
gray71: [181, 181, 181],
gray72: [184, 184, 184],
gray73: [186, 186, 186],
gray74: [189, 189, 189],
gray75: [191, 191, 191],
gray76: [194, 194, 194],
gray77: [196, 196, 196],
gray78: [199, 199, 199],
gray79: [201, 201, 201],
gray7: [18, 18, 18],
gray80: [204, 204, 204],
gray81: [207, 207, 207],
gray82: [209, 209, 209],
gray83: [212, 212, 212],
gray84: [214, 214, 214],
gray85: [217, 217, 217],
gray86: [219, 219, 219],
gray87: [222, 222, 222],
gray88: [224, 224, 224],
gray89: [227, 227, 227],
gray8: [20, 20, 20],
gray90: [229, 229, 229],
gray91: [232, 232, 232],
gray92: [235, 235, 235],
gray93: [237, 237, 237],
gray94: [240, 240, 240],
gray95: [242, 242, 242],
gray96: [245, 245, 245],
gray97: [247, 247, 247],
gray98: [250, 250, 250],
gray99: [252, 252, 252],
gray9: [23, 23, 23],
grey100: [255, 255, 255],
grey10: [26, 26, 26],
grey11: [28, 28, 28],
grey12: [31, 31, 31],
grey13: [33, 33, 33],
grey14: [36, 36, 36],
grey15: [38, 38, 38],
grey16: [41, 41, 41],
grey17: [43, 43, 43],
grey18: [46, 46, 46],
grey19: [48, 48, 48],
grey20: [51, 51, 51],
grey21: [54, 54, 54],
grey22: [56, 56, 56],
grey23: [59, 59, 59],
grey24: [61, 61, 61],
grey25: [64, 64, 64],
grey26: [66, 66, 66],
grey27: [69, 69, 69],
grey28: [71, 71, 71],
grey29: [74, 74, 74],
grey30: [77, 77, 77],
grey31: [79, 79, 79],
grey32: [82, 82, 82],
grey33: [84, 84, 84],
grey34: [87, 87, 87],
grey35: [89, 89, 89],
grey36: [92, 92, 92],
grey37: [94, 94, 94],
grey38: [97, 97, 97],
grey39: [99, 99, 99],
grey40: [102, 102, 102],
grey41: [105, 105, 105],
grey42: [107, 107, 107],
grey43: [110, 110, 110],
grey44: [112, 112, 112],
grey45: [115, 115, 115],
grey46: [117, 117, 117],
grey47: [120, 120, 120],
grey48: [122, 122, 122],
grey49: [125, 125, 125],
grey4: [10, 10, 10],
grey50: [127, 127, 127],
grey51: [130, 130, 130],
grey52: [133, 133, 133],
grey53: [135, 135, 135],
grey54: [138, 138, 138],
grey55: [140, 140, 140],
grey56: [143, 143, 143],
grey57: [145, 145, 145],
grey58: [148, 148, 148],
grey59: [150, 150, 150],
grey5: [13, 13, 13],
grey60: [153, 153, 153],
grey61: [156, 156, 156],
grey62: [158, 158, 158],
grey63: [161, 161, 161],
grey64: [163, 163, 163],
grey65: [166, 166, 166],
grey66: [168, 168, 168],
grey67: [171, 171, 171],
grey68: [173, 173, 173],
grey69: [176, 176, 176],
grey6: [15, 15, 15],
grey70: [179, 179, 179],
grey71: [181, 181, 181],
grey72: [184, 184, 184],
grey73: [186, 186, 186],
grey74: [189, 189, 189],
grey75: [191, 191, 191],
grey76: [194, 194, 194],
grey77: [196, 196, 196],
grey78: [199, 199, 199],
grey79: [201, 201, 201],
grey7: [18, 18, 18],
grey80: [204, 204, 204],
grey81: [207, 207, 207],
grey82: [209, 209, 209],
grey83: [212, 212, 212],
grey84: [214, 214, 214],
grey85: [217, 217, 217],
grey86: [219, 219, 219],
grey87: [222, 222, 222],
grey88: [224, 224, 224],
grey89: [227, 227, 227],
grey8: [20, 20, 20],
grey90: [229, 229, 229],
grey91: [232, 232, 232],
grey92: [235, 235, 235],
grey93: [237, 237, 237],
grey94: [240, 240, 240],
grey95: [242, 242, 242],
grey96: [245, 245, 245],
grey97: [247, 247, 247],
grey98: [250, 250, 250],
grey99: [252, 252, 252],
grey9: [23, 23, 23]
}
for {name, [r, g, b]} <- @colors do
def unquote(name)() do
%Color{red: unquote(r), green: unquote(g), blue: unquote(b)}
end
end
def named(name) do
[r, g, b] = Map.fetch!(@colors, name)
%Color{red: r, blue: b, green: g}
end
def to_list(%Color{red: red, blue: blue, green: green}) do
[blue, green, red]
end
end
|
lib/color.ex
| 0.709221
| 0.611353
|
color.ex
|
starcoder
|
defmodule FFprobe do
@moduledoc """
Execute ffprobe CLI commands.
> `ffprobe` is a simple multimedia streams analyzer. You can use it to output
all kinds of information about an input including duration, frame rate, frame size, etc.
It is also useful for gathering specific information about an input to be used in a script.
(from https://trac.ffmpeg.org/wiki/FFprobeTips)
"""
@type format_map :: Map.t()
@type streams_list :: [Map.t()]
@doc """
Get the duration in seconds, as a float.
If no duration (e.g., a still image), returns `:no_duration`
"""
@spec duration(binary | format_map) :: float | :no_duration
def duration(file_path) when is_binary(file_path) do
duration(format(file_path))
end
def duration(format_map) when is_map(format_map) do
case format_map["duration"] do
nil -> :no_duration
result -> Float.parse(result) |> elem(0)
end
end
@doc """
Get a list of formats for the file.
"""
@spec format_names(binary | format_map) :: [binary]
def format_names(file_path) when is_binary(file_path) do
format_names(format(file_path))
end
def format_names(format_map) when is_map(format_map) do
String.split(format_map["format_name"], ",")
end
@doc """
Get the "format" map, containing general info for the specified file,
such as number of streams, duration, file size, and more.
"""
@spec format(binary) :: format_map | no_return
def format(file_path) do
cmd_args = ["-v", "quiet", "-print_format", "json", "-show_format", file_path]
{result, 0} = System.cmd(ffprobe_path(), cmd_args, stderr_to_stdout: true)
result
|> Jason.decode!()
|> Map.get("format", %{})
end
@spec streams(binary) :: streams_list | no_return
def streams(file_path) do
cmd_args = ["-v", "quiet", "-print_format", "json", "-show_streams", file_path]
{result, 0} = System.cmd(ffprobe_path(), cmd_args, stderr_to_stdout: true)
result
|> Jason.decode!()
|> Map.get("streams", [])
end
# Read ffprobe path from config. If unspecified, assume `ffprobe` is in env $PATH.
defp ffprobe_path do
case Application.get_env(:ffmpex, :ffprobe_path, nil) do
nil -> System.find_executable("ffprobe")
path -> path
end
end
end
|
lib/ffprobe.ex
| 0.784979
| 0.46721
|
ffprobe.ex
|
starcoder
|
defmodule QueryBuilder.Query.Where do
@moduledoc false
require Ecto.Query
import QueryBuilder.Utils
def where(query, assoc_fields, filters) do
token = QueryBuilder.Token.token(query, assoc_fields)
{query, token} = QueryBuilder.JoinMaker.make_joins(query, token)
apply_filters(query, token, List.wrap(filters))
end
defp apply_filters(query, _token, []), do: query
defp apply_filters(query, token, [filter | tail]) do
query = apply_filter(query, token, filter)
apply_filters(query, token, tail)
end
defp apply_filter(query, token, {field, value}) do
apply_filter(query, token, {field, :eq, value})
end
defp apply_filter(query, token, {field1, operator, field2}) when is_atom(field2) do
{field1, binding_field1} = find_field_and_binding_from_token(query, token, field1)
{field2, binding_field2} = find_field_and_binding_from_token(query, token, field2)
do_where(query, binding_field1, binding_field2, {field1, operator, field2})
end
defp apply_filter(query, token, {field, operator, value}) do
{field, binding} = find_field_and_binding_from_token(query, token, field)
do_where(query, binding, {field, operator, value})
end
defp do_where(query, binding, {field, :eq, value}) do
Ecto.Query.where(query, [{^binding, x}], field(x, ^field) == ^value)
end
defp do_where(query, binding, {field, :ne, value}) do
Ecto.Query.where(query, [{^binding, x}], field(x, ^field) != ^value)
end
defp do_where(query, binding, {field, :gt, value}) do
Ecto.Query.where(query, [{^binding, x}], field(x, ^field) > ^value)
end
defp do_where(query, binding, {field, :ge, value}) do
Ecto.Query.where(query, [{^binding, x}], field(x, ^field) >= ^value)
end
defp do_where(query, binding, {field, :lt, value}) do
Ecto.Query.where(query, [{^binding, x}], field(x, ^field) < ^value)
end
defp do_where(query, binding, {field, :le, value}) do
Ecto.Query.where(query, [{^binding, x}], field(x, ^field) <= ^value)
end
defp do_where(query, binding, {field, :like, value}) do
value = like_value(value)
Ecto.Query.where(query, [{^binding, x}], like(field(x, ^field), ^value))
end
defp do_where(query, binding, {field, :ilike, value}) do
value = like_value(value)
Ecto.Query.where(query, [{^binding, x}], ilike(field(x, ^field), ^value))
end
defp do_where(query, binding1, binding2, {field1, :eq, field2}) do
Ecto.Query.where(
query,
[{^binding1, x}, {^binding2, y}],
field(x, ^field1) == field(y, ^field2)
)
end
defp do_where(query, binding1, binding2, {field1, :ne, field2}) do
Ecto.Query.where(
query,
[{^binding1, x}, {^binding2, y}],
field(x, ^field1) != field(y, ^field2)
)
end
defp do_where(query, binding1, binding2, {field1, :gt, field2}) do
Ecto.Query.where(
query,
[{^binding1, x}, {^binding2, y}],
field(x, ^field1) > field(y, ^field2)
)
end
defp do_where(query, binding1, binding2, {field1, :ge, field2}) do
Ecto.Query.where(
query,
[{^binding1, x}, {^binding2, y}],
field(x, ^field1) >= field(y, ^field2)
)
end
defp do_where(query, binding1, binding2, {field1, :lt, field2}) do
Ecto.Query.where(
query,
[{^binding1, x}, {^binding2, y}],
field(x, ^field1) < field(y, ^field2)
)
end
defp do_where(query, binding1, binding2, {field1, :le, field2}) do
Ecto.Query.where(
query,
[{^binding1, x}, {^binding2, y}],
field(x, ^field1) <= field(y, ^field2)
)
end
defp do_where(query, binding1, binding2, {field1, :like, field2}) do
Ecto.Query.where(
query,
[{^binding1, x}, {^binding2, y}],
fragment("? LIKE ('%' || ? || '%')",
field(x, ^field1) |> type(:string),
field(y, ^field2) |> type(:string))
)
end
defp do_where(query, binding1, binding2, {field1, :ilike, field2}) do
Ecto.Query.where(
query,
[{^binding1, x}, {^binding2, y}],
fragment("? ILIKE ('%' || ? || '%')",
field(x, ^field1) |> type(:string),
field(y, ^field2) |> type(:string))
)
end
defp like_value(value) when is_binary(value) do
case :binary.match(value, "%") do
:nomatch ->
"%#{value}%"
_ ->
value
end
end
defp like_value(value), do: "%#{value}%"
end
|
lib/query/where.ex
| 0.703957
| 0.606935
|
where.ex
|
starcoder
|
defmodule Blockchain.Blocktree do
@moduledoc """
Blocktree provides functions for adding blocks to the
overall blocktree and forming a consistent blockchain.
We have two important issues to handle after we get a new
unknown block:
1. Do we accept the block? Is it valid and does it connect to
a known parent?
2. After we've accepted it, is it (by total difficulty) the canonical block?
Does it become the canonical block after other blocks have been added
to the block chain?
TODO: Number 1.
"""
defmodule InvalidBlockError do
defexception [:message]
end
alias Blockchain.Block
defstruct [
block: nil,
children: [],
total_difficulty: 0,
parent_map: %{},
]
@type t :: %__MODULE__{
block: :root | Block.t,
children: %{EVM.hash => t},
total_difficulty: integer(),
parent_map: %{EVM.hash => EVM.hash},
}
@doc """
Creates a new empty blocktree.
## Examples
iex> Blockchain.Blocktree.new_tree()
%Blockchain.Blocktree{
block: :root,
children: %{},
total_difficulty: 0,
parent_map: %{}
}
"""
@spec new_tree() :: t
def new_tree() do
%__MODULE__{
block: :root,
children: %{},
total_difficulty: 0,
parent_map: %{}
}
end
# Creates a new trie with a given root. This should be used to
# create sub-trees internally.
@spec rooted_tree(Block.t) :: t
defp rooted_tree(gen_block) do
%__MODULE__{
block: gen_block,
children: %{},
total_difficulty: gen_block.header.difficulty,
parent_map: %{}
}
end
@doc """
Traverses a tree to find the most canonical block. This decides based on
blocks with the highest difficulty recursively walking down the tree.
## Examples
iex> Blockchain.Blocktree.new_tree() |> Blockchain.Blocktree.get_canonical_block()
:root
iex> block_1 = %Blockchain.Block{block_hash: <<1>>, header: %Block.Header{number: 0, parent_hash: <<0::256>>, difficulty: 100}}
iex> Blockchain.Blocktree.new_tree()
...> |> Blockchain.Blocktree.add_block(block_1)
...> |> Blockchain.Blocktree.get_canonical_block()
%Blockchain.Block{block_hash: <<1>>, header: %Block.Header{difficulty: 100, number: 0, parent_hash: <<0::256>>}}
iex> block_10 = %Blockchain.Block{block_hash: <<10>>, header: %Block.Header{number: 5, parent_hash: <<0::256>>, difficulty: 100}}
iex> block_20 = %Blockchain.Block{block_hash: <<20>>, header: %Block.Header{number: 6, parent_hash: <<10>>, difficulty: 110}}
iex> block_21 = %Blockchain.Block{block_hash: <<21>>, header: %Block.Header{number: 6, parent_hash: <<10>>, difficulty: 109}}
iex> block_30 = %Blockchain.Block{block_hash: <<30>>, header: %Block.Header{number: 7, parent_hash: <<20>>, difficulty: 120}}
iex> block_31 = %Blockchain.Block{block_hash: <<31>>, header: %Block.Header{number: 7, parent_hash: <<20>>, difficulty: 119}}
iex> block_41 = %Blockchain.Block{block_hash: <<41>>, header: %Block.Header{number: 8, parent_hash: <<30>>, difficulty: 129}}
iex> Blockchain.Blocktree.new_tree()
...> |> Blockchain.Blocktree.add_block(block_10)
...> |> Blockchain.Blocktree.add_block(block_20)
...> |> Blockchain.Blocktree.add_block(block_30)
...> |> Blockchain.Blocktree.add_block(block_31)
...> |> Blockchain.Blocktree.add_block(block_41)
...> |> Blockchain.Blocktree.add_block(block_21)
...> |> Blockchain.Blocktree.get_canonical_block()
%Blockchain.Block{block_hash: <<41>>, header: %Block.Header{difficulty: 129, number: 8, parent_hash: <<30>>}}
"""
@spec get_canonical_block(t) :: Block.t
def get_canonical_block(blocktree) do
case Enum.count(blocktree.children) do
0 -> blocktree.block
_ ->
{_hash, tree} = Enum.max_by(blocktree.children, fn {_k, v} -> v.total_difficulty end)
get_canonical_block(tree)
end
end
@doc """
Verifies a block is valid, and if so, adds it to the block tree. This performs
four steps.
1. Find the parent block
2. Verfiy the block against its parent block
3. If valid, put the block into our DB
4. Add the block to our blocktree.
## Examples
# For a genesis block
iex> trie = MerklePatriciaTree.Trie.new(MerklePatriciaTree.Test.random_ets_db())
iex> chain = Blockchain.Chain.load_chain(:ropsten)
iex> gen_block = %Blockchain.Block{header: %Block.Header{number: 0, parent_hash: <<0::256>>, beneficiary: <<2, 3, 4>>, difficulty: 0x100000, gas_limit: 0x1000000, timestamp: 11, mix_hash: <<1>>, nonce: <<2>>, state_root: <<33, 123, 11, 188, 251, 114, 226, 213, 126, 40, 243, 60, 179, 97, 185, 152, 53, 19, 23, 119, 85, 220, 63, 51, 206, 62, 112, 34, 237, 98, 183, 123>>}}
iex> tree = Blockchain.Blocktree.new_tree()
iex> {:ok, tree_1} = Blockchain.Blocktree.verify_and_add_block(tree, chain, gen_block, trie.db)
iex> Blockchain.Blocktree.inspect_tree(tree_1)
[:root, [{0, <<71, 157, 104, 174, 116, 127, 80, 187, 43, 230, 237, 165, 124,
115, 132, 188, 112, 248, 218, 117, 191, 179, 180, 121, 118, 244,
128, 207, 39, 194, 241, 152>>}]]
# With a valid block
iex> trie = MerklePatriciaTree.Trie.new(MerklePatriciaTree.Test.random_ets_db())
iex> chain = Blockchain.Chain.load_chain(:ropsten)
iex> parent = Blockchain.Block.gen_genesis_block(chain, trie.db)
iex> child = Blockchain.Block.gen_child_block(parent, chain)
iex> block_1 = %Blockchain.Block{header: %Block.Header{number: 0, parent_hash: <<0::256>>, beneficiary: <<2, 3, 4>>, difficulty: 1_048_576, timestamp: 0, gas_limit: 200_000, mix_hash: <<1>>, nonce: <<2>>, state_root: parent.header.state_root}}
iex> block_2 = %Blockchain.Block{header: %Block.Header{number: 1, parent_hash: block_1 |> Blockchain.Block.hash, beneficiary: <<2::160>>, difficulty: 997_888, timestamp: 1_479_642_530, gas_limit: 200_000, mix_hash: <<1>>, nonce: <<2>>, state_root: child.header.state_root}} |> Blockchain.Block.add_rewards_to_block(trie.db)
iex> tree = Blockchain.Blocktree.new_tree()
iex> {:ok, tree_1} = Blockchain.Blocktree.verify_and_add_block(tree, chain, block_1, trie.db)
iex> {:ok, tree_2} = Blockchain.Blocktree.verify_and_add_block(tree_1, chain, block_2, trie.db)
iex> Blockchain.Blocktree.inspect_tree(tree_2)
[:root,
[{0,
<<155, 169, 162, 94, 229, 198, 27, 192, 121, 15, 154, 160, 41, 76,
199, 62, 154, 57, 121, 20, 34, 43, 200, 107, 54, 247, 204, 195,
57, 60, 223, 204>>},
[{1,
<<46, 192, 123, 64, 63, 230, 19, 10, 150, 191, 251, 157, 226, 35,
183, 69, 92, 177, 33, 66, 159, 174, 200, 202, 197, 69, 24, 216,
9, 107, 151, 192>>}]]]
# With a invalid block
iex> trie = MerklePatriciaTree.Trie.new(MerklePatriciaTree.Test.random_ets_db())
iex> chain = Blockchain.Chain.load_chain(:ropsten)
iex> parent = Blockchain.Block.gen_genesis_block(chain, trie.db)
iex> block_1 = %Blockchain.Block{header: %Block.Header{number: 0, parent_hash: <<0::256>>, beneficiary: <<2, 3, 4>>, difficulty: 1_048_576, timestamp: 11, gas_limit: 200_000, mix_hash: <<1>>, nonce: <<2>>, state_root: parent.header.state_root}}
iex> block_2 = %Blockchain.Block{header: %Block.Header{number: 1, parent_hash: block_1 |> Blockchain.Block.hash, beneficiary: <<2, 3, 4>>, difficulty: 110, timestamp: 11, mix_hash: <<1>>, nonce: <<2>>}}
iex> tree = Blockchain.Blocktree.new_tree()
iex> {:ok, tree_1} = Blockchain.Blocktree.verify_and_add_block(tree, chain, block_1, trie.db)
iex> Blockchain.Blocktree.verify_and_add_block(tree_1, chain, block_2, trie.db)
{:invalid, [:invalid_difficulty, :invalid_gas_limit, :child_timestamp_invalid]}
"""
@spec verify_and_add_block(t, Chain.t, Block.t, MerklePatriciaTree.DB.db, boolean()) :: {:ok, t} | :parent_not_found | {:invalid, [atom()]}
def verify_and_add_block(blocktree, chain, block, db, do_validate \\ true) do
parent = case Blockchain.Block.get_parent_block(block, db) do
:genesis -> nil
{:ok, parent} -> parent
:not_found -> :parent_not_found
end
validation = if do_validate, do: Block.is_fully_valid?(block, chain, parent, db), else: :valid
with :valid <- validation do
{:ok, block_hash} = Block.put_block(block, db)
block = %{block | block_hash: block_hash} # Cache computed block hash
{:ok, add_block(blocktree, block)}
end
end
@doc """
Adds a block to our complete block tree. We should perform this action
only after we've verified the block is valid.
Note, if the block does not fit into the current tree (e.g. if the parent block
isn't known to us yet), then we will raise an exception.
TODO: Perhaps we should store the block until we encounter the parent block?
## Examples
iex> block_1 = %Blockchain.Block{block_hash: <<1>>, header: %Block.Header{number: 5, parent_hash: <<0::256>>, difficulty: 100}}
iex> block_2 = %Blockchain.Block{block_hash: <<2>>, header: %Block.Header{number: 6, parent_hash: <<1>>, difficulty: 110}}
iex> Blockchain.Blocktree.new_tree()
...> |> Blockchain.Blocktree.add_block(block_1)
...> |> Blockchain.Blocktree.add_block(block_2)
%Blockchain.Blocktree{
block: :root,
children: %{
<<1>> => %Blockchain.Blocktree{
block: %Blockchain.Block{block_hash: <<1>>, header: %Block.Header{difficulty: 100, number: 5, parent_hash: <<0::256>>}},
children: %{
<<2>> =>
%Blockchain.Blocktree{
block: %Blockchain.Block{block_hash: <<2>>, header: %Block.Header{difficulty: 110, number: 6, parent_hash: <<1>>}},
children: %{},
parent_map: %{},
total_difficulty: 110
}
},
total_difficulty: 110,
parent_map: %{},
}
},
total_difficulty: 110,
parent_map: %{
<<1>> => <<0::256>>,
<<2>> => <<1>>,
}
}
"""
@spec add_block(t, Block.t) :: t
def add_block(blocktree, block) do
block_hash = block.block_hash || ( block |> Block.hash() )
blocktree = %{blocktree | parent_map: Map.put(blocktree.parent_map, block_hash, block.header.parent_hash)}
case get_path_to_root(blocktree, block_hash) do
:no_path -> raise InvalidBlockError, "No path to root" # TODO: How we can better handle this case?
{:ok, path} ->
do_add_block(blocktree, block, block_hash, path)
end
end
# Recursively walk tree and to add children block
@spec do_add_block(t, Block.t, EVM.hash, [EVM.hash]) :: t
defp do_add_block(blocktree, block, block_hash, path) do
case path do
[] ->
tree = rooted_tree(block)
new_children = Map.put(blocktree.children, block_hash, tree)
%{blocktree | children: new_children, total_difficulty: max_difficulty(new_children)}
[path_hash|rest] ->
case blocktree.children[path_hash] do
nil -> raise InvalidBlockError, "Invalid path to root, missing path #{inspect path_hash}" # this should be impossible unless the tree is missing nodes
sub_tree ->
# Recurse and update the children of this tree. Note, we may also need to adjust the total
# difficulty of this subtree.
new_child = do_add_block(sub_tree, block, block_hash, rest)
# TODO: Does this parent_hash only exist at the root node?
%{blocktree |
children: Map.put(blocktree.children, path_hash, new_child),
total_difficulty: max(blocktree.total_difficulty, new_child.total_difficulty),
}
end
end
end
# Gets the maximum difficulty amoungst a set of child nodes
@spec max_difficulty(%{EVM.hash => t}) :: integer()
defp max_difficulty(children) do
Enum.map(children, fn {_, child} -> child.total_difficulty end) |> Enum.max
end
@doc """
Returns a path from the given block's parent all the way up to the root of the tree. This will
raise if any node does not have a valid path to root, and runs in O(n) time with regards to the
height of the tree.
Because the blocktree doesn't have structure based on retrieval, we store a sheet of nodes to
parents for each subtree. That way, we can always find the correct path the traverse the tree.
This obviously requires us to store a significant extra amount of data about the tree.
## Examples
iex> Blockchain.Blocktree.get_path_to_root(
...> %Blockchain.Blocktree{parent_map: %{<<1>> => <<2>>, <<2>> => <<3>>, <<3>> => <<0::256>>}},
...> <<1>>)
{:ok, [<<3>>, <<2>>]}
iex> Blockchain.Blocktree.get_path_to_root(
...> %Blockchain.Blocktree{parent_map: %{<<20>> => <<10>>, <<10>> => <<0::256>>}},
...> <<20>>)
{:ok, [<<10>>]}
iex> Blockchain.Blocktree.get_path_to_root(
...> %Blockchain.Blocktree{parent_map: %{<<30>> => <<20>>, <<31>> => <<20>>, <<20>> => <<10>>, <<21 >> => <<10>>, <<10>> => <<0::256>>}},
...> <<30>>)
{:ok, [<<10>>, <<20>>]}
iex> Blockchain.Blocktree.get_path_to_root(
...> %Blockchain.Blocktree{parent_map: %{<<30>> => <<20>>, <<31>> => <<20>>, <<20>> => <<10>>, <<21 >> => <<10>>, <<10>> => <<0::256>>}},
...> <<20>>)
{:ok, [<<10>>]}
iex> Blockchain.Blocktree.get_path_to_root(
...> %Blockchain.Blocktree{parent_map: %{<<30>> => <<20>>, <<31>> => <<20>>, <<20>> => <<10>>, <<21 >> => <<10>>, <<10>> => <<0::256>>}},
...> <<31>>)
{:ok, [<<10>>, <<20>>]}
iex> Blockchain.Blocktree.get_path_to_root(
...> %Blockchain.Blocktree{parent_map: %{<<30>> => <<20>>, <<31>> => <<20>>, <<20>> => <<10>>, <<21 >> => <<10>>, <<10>> => <<0::256>>}},
...> <<32>>)
:no_path
"""
@spec get_path_to_root(t, EVM.hash) :: {:ok, [EVM.hash]} | :no_path
def get_path_to_root(blocktree, hash) do
case do_get_path_to_root(blocktree, hash) do
{:ok, path} -> {:ok, Enum.reverse(path)}
els -> els
end
end
@spec do_get_path_to_root(t, EVM.hash) :: {:ok, [EVM.hash]} | :no_path
defp do_get_path_to_root(blocktree, hash) do
case Map.get(blocktree.parent_map, hash, :no_path) do
:no_path -> :no_path
<<0::256>> -> {:ok, []}
parent_hash -> case do_get_path_to_root(blocktree, parent_hash) do
:no_path -> :no_path
{:ok, path} -> {:ok, [parent_hash | path]}
end
end
end
@doc """
Simple function to inspect the structure of a block tree.
Simply walks through the tree and prints the block number
and hash as a set of sub-lists.
Note: I don't believe this fits the rules for tail call
recursion, so we need to be careful to not use this for
excessively large trees.
# TODO: Add examples
"""
@spec inspect_tree(t) :: [any()]
def inspect_tree(blocktree) do
value = case blocktree.block do
:root -> :root
block -> {block.header.number, block.block_hash}
end
children = for {_, child} <- blocktree.children, do: inspect_tree(child)
[value | children]
end
end
|
lib/blockchain/blocktree.ex
| 0.740925
| 0.656796
|
blocktree.ex
|
starcoder
|
defmodule Temple.Engine do
@behaviour Phoenix.Template.Engine
@moduledoc """
Temple provides a templating engine for use in Phoenix web applications.
You can configure your application to use Temple templates by adding the following configuration.
```elixir
# config.exs
config :phoenix, :template_engines, exs: Temple.Engine
# config/dev.exs
config :your_app, YourAppWeb.Endpoint,
live_reload: [
patterns: [
~r"lib/your_app_web/templates/.*(exs)$"
]
]
# your_app_web.ex
def view do
quote location: :keep do
# ...
use Temple # Replaces the call to import Phoenix.HTML
end
end
```
## Usage
Temple templates use the `.exs` extension, because they are written with pure Elixir!
`assigns` (@conn, etc) are handled the same as normal `Phoenix.HTML.Engine` templates.
Note: The `Temple.temple/1` macro is _not_ needed for Temple templates due to the engine taking care of that for you.
```
# app.html.exs
html lang: "en" do
head do
meta charset: "utf-8"
meta http_equiv: "X-UA-Compatible", content: "IE=edge"
meta name: "viewport", content: "width=device-width, initial-scale=1.0"
title "YourApp · Phoenix Framework"
link rel: "stylesheet", href: Routes.static_path(@conn, "/css/app.css")
end
body do
header do
section class: "container" do
nav role: "navigation" do
ul do
li do: a("Get Started", href: "https://hexdocs.pm/phoenix/overview.html")
end
end
a href: "http://phoenixframework.org/", class: "phx-logo" do
img src: Routes.static_path(@conn, "/images/phoenix.png"),
alt: "Phoenix Framework Logo"
end
end
end
main role: "main", class: "container" do
p get_flash(@conn, :info), class: "alert alert-info", role: "alert"
p get_flash(@conn, :error), class: "alert alert-danger", role: "alert"
partial render(@view_module, @view_template, assigns)
end
script type: "text/javascript", src: Routes.static_path(@conn, "/js/app.js")
end
end
```
"""
def compile(path, _name) do
template =
path
|> File.read!()
|> Code.string_to_quoted!(file: path)
|> handle_assigns()
quote location: :keep do
use Temple
temple do: unquote(template)
end
end
defp handle_assigns(quoted) do
quoted
|> Macro.prewalk(fn
{:@, _, [{key, _, _}]} ->
quote location: :keep do
case Access.fetch(var!(assigns), unquote(key)) do
{:ok, val} ->
val
:error ->
raise ArgumentError, """
assign @#{unquote(key)} not available in Temple template.
Please make sure all proper assigns have been set. If this
is a child template, ensure assigns are given explicitly by
the parent template as they are not automatically forwarded.
Available assigns: #{inspect(Enum.map(var!(assigns), &elem(&1, 0)))}
"""
end
end
ast ->
ast
end)
end
end
|
lib/temple/engine.ex
| 0.73307
| 0.599221
|
engine.ex
|
starcoder
|
defmodule Exceptional.Value do
@moduledoc ~S"""
Provide an escape hatch for propagating unraised exceptions
## Convenience `use`s
Everything:
use Exceptional.Value
Only named functions (`exception_or_continue`):
use Exceptional.Value, only: :named_functions
Only operators (`~>`):
use Exceptional.Value, only: :operators
"""
defmacro __using__(only: :named_functions) do
quote do
import unquote(__MODULE__), except: [~>: 2]
end
end
defmacro __using__(only: :operators) do
quote do
import unquote(__MODULE__), only: [~>: 2]
end
end
defmacro __using__(_) do
quote do
import unquote(__MODULE__)
end
end
@doc ~S"""
If an exception, return the exception, otherwise continue computation.
Essentially an `Either` construct for `Exception`s.
Note that this does not catch `raise` or `throw`s. If you want that behaviour,
please see `Exceptional.Rescue`.
## Examples
iex> 1 |> exception_or_continue(fn value -> value * 100 end.())
100
iex> %ArgumentError{message: "exception handled"}
...> |> exception_or_continue(fn value -> value * 100 end.())
%ArgumentError{message: "exception handled"}
iex> %ArgumentError{message: "exception handled"}
...> |> exception_or_continue(fn x -> x + 1 end.())
...> |> exception_or_continue(fn y -> y - 10 end.())
%ArgumentError{message: "exception handled"}
iex> %ArgumentError{message: "exception not caught"}
...> |> raise
...> |> exception_or_continue(fn value -> value * 100 end.())
** (ArgumentError) exception not caught
iex> Enum.fetch!([], 9) |> exception_or_continue(fn v -> v * 10 end.())
** (Enum.OutOfBoundsError) out of bounds error
"""
@lint {Credo.Check.Design.AliasUsage, false}
@spec exception_or_continue(Exception.t | any, fun) :: Exception.t | any
defmacro exception_or_continue(maybe_exception, continue) do
quote do
require Exceptional.Control
Exceptional.Control.branch unquote(maybe_exception),
value_do: unquote(continue),
exception_do: fn exception -> exception end.()
end
end
@doc ~S"""
Operator alias for `exception_or_continue`
## Examples
iex> 1 ~> fn value -> value * 100 end.()
100
iex> exception = %Enum.OutOfBoundsError{message: "exception"}
...> exception ~> fn x -> x + 1 end.()
%Enum.OutOfBoundsError{message: "exception"}
...> exception
...> ~> fn x -> x + 1 end.()
...> ~> fn y -> y - 10 end.()
%Enum.OutOfBoundsError{message: "exception"}
...> raise(exception) ~> fn x -> x + 1 end.()
** (Enum.OutOfBoundsError) out of bounds error
iex> Enum.fetch!([], 9) ~> fn x -> x + 1 end.()
** (Enum.OutOfBoundsError) out of bounds error
"""
@lint {Credo.Check.Design.AliasUsage, false}
defmacro maybe_exception ~> continue do
quote do
require Exceptional.Control
Exceptional.Control.branch unquote(maybe_exception),
value_do: unquote(continue),
exception_do: fn exception -> exception end.()
end
end
end
|
deps/exceptional/lib/exceptional/value.ex
| 0.853226
| 0.40342
|
value.ex
|
starcoder
|
defmodule Data.Effect do
@moduledoc """
In game effects such as damage
Valid kinds of effects:
- "damage": Does an amount of damage
- "damage/type": Halves damage if the type does not align
- "damage/over-time": Does damage over time
- "recover": Heals an amount of health/skill/move points
- "stats": Modify base stats for the player
"""
import Data.Type
import Ecto.Changeset
alias Data.Effect
alias Data.Stats
@modes ["add", "multiply", "subtract", "divide"]
@type t :: map
@type damage :: %{
type: String.t(),
amount: integer
}
@type damage_type :: %{
types: [String.t()]
}
@typedoc """
Damage over time. Does damage every `every` milliseconds.
"""
@type damage_over_time :: %{
type: String.t(),
amount: integer(),
every: integer(),
count: integer()
}
@type heal :: %{
amount: integer
}
@type recover :: %{
type: atom(),
amount: integer()
}
@type stats :: %{
field: atom,
mode: String.t(),
amount: integer
}
@type stats_boost :: %{
field: atom(),
amount: integer(),
mode: String.t(),
duration: integer()
}
@behaviour Ecto.Type
@doc """
A list of all types in the system.
"""
def types() do
[
"damage",
"damage/over-time",
"damage/type",
"recover",
"stats",
"stats/boost"
]
end
@impl Ecto.Type
def type, do: :map
@impl Ecto.Type
def cast(stats) when is_map(stats), do: {:ok, stats}
def cast(_), do: :error
@doc """
Load an effect from a stored map
Cast it properly
iex> Data.Effect.load(%{"kind" => "damage", "type" => "slashing", "amount" => 10})
{:ok, %{kind: "damage", type: "slashing", amount: 10}}
iex> Data.Effect.load(%{"kind" => "damage/type", "types" => ["slashing"]})
{:ok, %{kind: "damage/type", types: ["slashing"]}}
iex> Data.Effect.load(%{"kind" => "damage/over-time", "type" => "slashing", "amount" => 10, "every" => 3})
{:ok, %{kind: "damage/over-time", type: "slashing", amount: 10, every: 3}}
iex> Data.Effect.load(%{"kind" => "stats", "field" => "agility"})
{:ok, %{kind: "stats", field: :agility, mode: "add"}}
"""
@impl Ecto.Type
def load(effect) do
effect = for {key, val} <- effect, into: %{}, do: {String.to_atom(key), val}
effect = effect.kind |> cast_vals(effect)
{:ok, effect}
end
defp cast_vals("heal", effect) do
effect |> Map.put(:type, String.to_atom(effect.type))
end
defp cast_vals("stats", effect) do
effect
|> Map.put(:field, String.to_atom(effect.field))
|> ensure(:mode, "add")
end
defp cast_vals("stats/boost", effect) do
effect |> Map.put(:field, String.to_atom(effect.field))
end
defp cast_vals(_type, effect), do: effect
@impl Ecto.Type
def dump(stats) when is_map(stats), do: {:ok, Map.delete(stats, :__struct__)}
def dump(_), do: :error
@doc """
Get a starting effect, to fill out in the web interface. Just the structure,
the values won't mean anyhting.
"""
@spec starting_effect(String.t()) :: t()
def starting_effect("damage") do
%{kind: "damage", type: "slashing", amount: 0}
end
def starting_effect("damage/type") do
%{kind: "damage/type", types: []}
end
def starting_effect("damage/over-time") do
%{kind: "damage/over-time", type: "slashing", amount: 0, every: 10, count: 2}
end
def starting_effect("recover") do
%{kind: "recover", type: "health", amount: 0}
end
def starting_effect("stats") do
%{kind: "stats", field: :agility, amount: 0}
end
def starting_effect("stats/boost") do
%{kind: "stats/boost", field: :agility, amount: 0, duration: 1000, mode: "add"}
end
@doc """
Validate an effect based on type
iex> Data.Effect.valid?(%{kind: "damage", type: "slashing", amount: 10})
true
iex> Data.Effect.valid?(%{kind: "damage", type: "slashing", amount: :invalid})
false
iex> Data.Effect.valid?(%{kind: "damage/type", types: ["slashing"]})
true
iex> Data.Effect.valid?(%{kind: "damage/over-time", type: "slashing", amount: 10, every: 3, count: 3})
true
iex> Data.Effect.valid?(%{kind: "damage/over-time", type: "something", amount: :hi, every: 3, count: 3})
false
iex> Data.Effect.valid?(%{kind: "recover", type: "skill", amount: 10})
true
iex> Data.Effect.valid?(%{kind: "recover", type: "skill", amount: :invalid})
false
iex> Data.Effect.valid?(%{kind: "stats", field: :strength, amount: 10, mode: "add"})
true
iex> Data.Effect.valid?(%{kind: "stats", field: :strength, amount: :invalid})
false
"""
@spec valid?(Stats.t()) :: boolean
def valid?(effect)
def valid?(effect = %{kind: "damage"}) do
keys(effect) == [:amount, :kind, :type] && valid_damage?(effect)
end
def valid?(effect = %{kind: "damage/type"}) do
keys(effect) == [:kind, :types] && valid_damage_type?(effect)
end
def valid?(effect = %{kind: "damage/over-time"}) do
keys(effect) == [:amount, :count, :every, :kind, :type] && valid_damage_over_time?(effect)
end
def valid?(effect = %{kind: "recover"}) do
keys(effect) == [:amount, :kind, :type] && valid_recover?(effect)
end
def valid?(effect = %{kind: "stats"}) do
keys(effect) == [:amount, :field, :kind, :mode] && valid_stats?(effect)
end
def valid?(effect = %{kind: "stats/boost"}) do
keys(effect) == [:amount, :duration, :field, :kind, :mode] && valid_stats_boost?(effect)
end
def valid?(_), do: false
@doc """
Validate if damage is right
iex> Data.Effect.valid_damage?(%{type: "slashing", amount: 10})
true
iex> Data.Effect.valid_damage?(%{type: "slashing", amount: nil})
false
iex> Data.Effect.valid_damage?(%{type: "finger"})
false
"""
@spec valid_damage?(Effect.t()) :: boolean
def valid_damage?(effect)
def valid_damage?(%{type: type, amount: amount}) do
is_binary(type) && is_integer(amount)
end
def valid_damage?(_), do: false
@doc """
Validate if damage/type is right
iex> Data.Effect.valid_damage_type?(%{types: ["slashing"]})
true
iex> Data.Effect.valid_damage_type?(%{types: :slashing})
false
"""
@spec valid_damage_type?(Effect.t()) :: boolean
def valid_damage_type?(effect)
def valid_damage_type?(%{types: types}) when is_list(types) do
Enum.all?(types, &is_binary(&1))
end
def valid_damage_type?(_), do: false
@doc """
Validate if `damage/over-time` is right
iex> Data.Effect.valid_damage_over_time?(%{type: "slashing", amount: 10, every: 3, count: 3})
true
iex> Data.Effect.valid_damage_over_time?(%{type: "slashing", amount: :ten, every: 3, count: 3})
false
iex> Data.Effect.valid_damage_over_time?(%{type: "slashing", amount: 10, every: :three, count: 3})
false
iex> Data.Effect.valid_damage_over_time?(%{type: "slashing", amount: 10, every: 3, count: :three})
false
"""
@spec valid_damage_over_time?(Effect.t()) :: boolean
def valid_damage_over_time?(effect)
def valid_damage_over_time?(%{type: type, amount: amount, every: every, count: count}) do
is_binary(type) && is_integer(amount) && is_integer(every) && every > 0 && is_integer(count) &&
count > 0
end
def valid_damage_over_time?(_), do: false
@doc """
Validate if recover is right
iex> Data.Effect.valid_recover?(%{type: "health", amount: 10})
true
iex> Data.Effect.valid_recover?(%{type: "skill", amount: 10})
true
iex> Data.Effect.valid_recover?(%{type: "move", amount: 10})
true
iex> Data.Effect.valid_recover?(%{type: "skill", amount: :invalid})
false
iex> Data.Effect.valid_recover?(%{type: "other", amount: 10})
false
"""
@spec valid_recover?(Effect.t()) :: boolean
def valid_recover?(effect)
def valid_recover?(%{type: type, amount: amount}) do
type in ["health", "skill", "move"] && is_integer(amount)
end
def valid_recover?(_), do: false
@doc """
Validate if the stats type is right
iex> Data.Effect.valid_stats?(%{field: :strength, amount: 10, mode: "add"})
true
iex> Data.Effect.valid_stats?(%{field: :strength, amount: nil, mode: "add"})
false
iex> Data.Effect.valid_stats?(%{field: :strength, amount: 10, mode: "remove"})
false
iex> Data.Effect.valid_stats?(%{field: :head, amount: 10, mode: "add"})
false
iex> Data.Effect.valid_stats?(%{field: :strength})
false
"""
@spec valid_stats?(Effect.t()) :: boolean
def valid_stats?(effect)
def valid_stats?(%{field: field, amount: amount, mode: mode}) do
field in Stats.fields() && mode in @modes && is_integer(amount)
end
def valid_stats?(_), do: false
@doc """
Validate if the stats type is right
iex> Data.Effect.valid_stats_boost?(%{field: :strength, amount: 10, duration: 10, mode: "add"})
true
iex> Data.Effect.valid_stats_boost?(%{field: :strength, amount: nil, duration: 10, mode: "add"})
false
iex> Data.Effect.valid_stats_boost?(%{field: :strength, amount: 10, duration: nil, mode: "add"})
false
iex> Data.Effect.valid_stats_boost?(%{field: :head, amount: 10, duration: 10})
false
iex> Data.Effect.valid_stats_boost?(%{field: :strength})
false
"""
@spec valid_stats_boost?(Effect.t()) :: boolean
def valid_stats_boost?(effect)
def valid_stats_boost?(%{field: field, amount: amount, duration: duration, mode: mode}) do
field in Stats.fields() && mode in @modes && is_integer(amount) && is_integer(duration)
end
def valid_stats_boost?(_), do: false
def validate_effects(changeset) do
case changeset do
%{changes: %{effects: effects}} when effects != nil ->
_validate_effects(changeset)
_ ->
changeset
end
end
defp _validate_effects(changeset = %{changes: %{effects: effects}}) do
case effects |> Enum.all?(&Effect.valid?/1) do
true -> changeset
false -> add_error(changeset, :effects, "are invalid")
end
end
@doc """
Check if an effect is continuous or not
iex> Data.Effect.continuous?(%{kind: "damage/over-time"})
true
iex> Data.Effect.continuous?(%{kind: "stats/boost"})
true
iex> Data.Effect.continuous?(%{kind: "damage"})
false
"""
@spec continuous?(Effect.t()) :: boolean()
def continuous?(effect)
def continuous?(%{kind: "damage/over-time"}), do: true
def continuous?(%{kind: "stats/boost"}), do: true
def continuous?(_), do: false
@doc """
Check if an effect is continuous and should apply to every effect coming in
iex> Data.Effect.applies_to_every_effect?(%{kind: "stats/boost"})
true
iex> Data.Effect.applies_to_every_effect?(%{kind: "damage/over-time"})
false
iex> Data.Effect.continuous?(%{kind: "damage"})
false
"""
@spec applies_to_every_effect?(Effect.t()) :: boolean()
def applies_to_every_effect?(effect)
def applies_to_every_effect?(%{kind: "stats/boost"}), do: true
def applies_to_every_effect?(_), do: false
@doc """
Instantiate an effect by giving it an ID to track, for future callbacks
"""
@spec instantiate(Effect.t()) :: boolean()
def instantiate(effect) do
effect |> Map.put(:id, UUID.uuid4())
end
end
|
lib/data/effect.ex
| 0.903498
| 0.562898
|
effect.ex
|
starcoder
|
defmodule Mix.Tasks.Compile do
use Mix.Task
@shortdoc "Compile source files"
@recursive true
@moduledoc """
A meta task that compiles source files. It simply runs the
compilers registered in your project. At the end of compilation
it ensures load paths are set.
## Configuration
* `:compilers` - compilers to be run, defaults to:
```
[:elixir, :app]
```
It can be configured to handle custom compilers, for example:
```
[compilers: [:elixir, :mycompiler, :app]]
```
## Command line options
* `--list` - List all enabled compilers.
Remaining options are forwarded to underlying compilers.
"""
def run(["--list"]) do
Mix.Task.load_all
shell = Mix.shell
modules = Mix.Task.all_modules
docs = lc module inlist modules,
task = Mix.Task.task_name(module),
match?("compile." <> _, task),
doc = Mix.Task.shortdoc(module) do
{ task, doc }
end
max = Enum.reduce docs, 0, fn({ task, _ }, acc) ->
max(size(task), acc)
end
sorted = Enum.sort(docs)
Enum.each sorted, fn({ task, doc }) ->
shell.info format('mix ~-#{max}s # ~ts', [task, doc])
end
shell.info "\nEnabled compilers: #{Enum.join get_compilers, ", "}"
end
@doc """
Runs this compile task by recursively calling all registered compilers.
"""
def run(args) do
Mix.Task.run("loadpaths", args)
res =
Enum.map(get_compilers, fn(compiler) ->
List.wrap Mix.Task.run("compile.#{compiler}", args)
end)
Code.prepend_path Mix.Project.compile_path
if Enum.any?(res, &(:ok in &1)), do: :ok, else: :noop
end
@doc """
Returns manifests for all compilers.
"""
def manifests do
Enum.flat_map(get_compilers, fn(compiler) ->
module = Mix.Task.get!("compile.#{compiler}")
if function_exported?(module, :manifests, 0) do
module.manifests
else
[]
end
end)
end
defp get_compilers do
Mix.project[:compilers] ||
[:yecc, :leex, :erlang, :elixir, :app]
end
defp format(expression, args) do
:io_lib.format(expression, args) |> iolist_to_binary
end
end
|
lib/mix/lib/mix/tasks/compile.ex
| 0.78535
| 0.672379
|
compile.ex
|
starcoder
|
defmodule CRC.Model do
@type t() :: %__MODULE__{
bits: 0x00..0xff,
sick: boolean(),
width: 0x00..0xff,
poly: 0x0000000000000000..0xffffffffffffffff,
init: 0x0000000000000000..0xffffffffffffffff,
refin: boolean(),
refout: boolean(),
xorout: 0x0000000000000000..0xffffffffffffffff,
check: 0x0000000000000000..0xffffffffffffffff,
residue: 0x0000000000000000..0xffffffffffffffff,
name: binary(),
key: atom(),
aliases: %{
optional(atom()) => binary()
},
slow: boolean(),
value: non_neg_integer()
}
defstruct [
bits: nil,
sick: nil,
width: nil,
poly: nil,
init: nil,
refin: nil,
refout: nil,
xorout: nil,
check: nil,
residue: nil,
name: nil,
key: nil,
aliases: %{},
slow: false,
value: 0
]
def decode(binary) when is_binary(binary) do
parse(binary, %__MODULE__{})
end
def decode_file(filename) do
with {:ok, content} <- :file.read_file(filename) do
decode_list(content)
end
end
def decode_list(binary) when is_binary(binary) do
decode_list(binary, [])
end
def dump_code(model_or_models) do
IO.puts(gen_code(model_or_models))
end
def gen_code(model = %__MODULE__{}) do
[
gen_stubs(model),
"\n",
gen_tables(model)
]
end
def gen_code(models = [%__MODULE__{} | _]) do
stubs =
for model <- models, into: [] do
{model.width, model.key, gen_stubs(model)}
end
|> :lists.usort()
tables =
for model <- models, into: [] do
{model.width, model.key, gen_tables(model)}
end
|> :lists.usort()
stubs = for {_, _, stub} <- stubs, into: [], do: stub
tables = for {_, _, table} <- tables, into: [], do: table
[
stubs,
"\n",
tables
]
end
def gen_stubs(%__MODULE__{ aliases: aliases, name: name, key: root_key }) do
entries = [
{root_key, "{{NULL, NULL}, false, 0, \"#{root_key}\", 0, \"#{root_key}\", \"#{name}\"},\n"}
| (for {key, val} <- aliases, into: [] do
{key, "{{NULL, NULL}, false, 0, \"#{root_key}\", 0, \"#{key}\", \"#{val}\"},\n"}
end)
]
entries =
for {_, entry} <- Enum.sort_by(entries, fn ({key, _}) -> key end), into: [] do
entry
end
[
"/* #{root_key} */\n"
| entries
]
end
def gen_tables(model = %__MODULE__{ bits: bits }) do
table =
if model.sick do
:lists.duplicate(256, 0)
else
context = :crc_nif.crc_init(model)
{true, table} = :crc_nif.debug_table(context)
table
end
[_ | table_values] = :lists.flatten(for n <- table, into: [] do
[?,, encode_hex(n, bits)]
end)
width = "#{model.width}"
poly = encode_hex(model.poly, bits)
init = encode_hex(model.init, bits)
refin = "#{model.refin}"
refout = "#{model.refin}"
xorout = encode_hex(model.xorout, bits)
check = encode_hex(model.check, bits)
residue = encode_hex(model.residue, bits)
name = "\"#{model.name}\""
aliases =
if map_size(model.aliases) == 0 do
[]
else
for {_, val} <- model.aliases, into: [] do
[?\s, "alias=\"", val, ?\"]
end
end
[
if model.sick do
["/* width=", width, " poly=", poly, " init=", init, " sick=true check=", check, " name=", name, aliases, " */\n"]
else
["/* width=", width, " poly=", poly, " init=", init, " refin=", refin, " refout=", refout, " xorout=", xorout, " check=", check, " residue=", residue, " name=", name, aliases, " */\n"]
end,
"{{{NULL, NULL}, true, 0, \"#{model.key}\", #{bits}},\n",
" #{model.sick},\n",
" ", width, ",\n",
" ", poly, ",\n",
" ", init, ",\n",
" ", refin, ",\n",
" ", refout, ",\n",
" ", xorout, ",\n",
" ", check, ",\n",
" ", residue, ",\n",
" {", table_values, "}},\n"
]
end
@doc false
defp decode_list(data, acc) do
case decode(data) do
{:ok, model, rest} ->
decode_list(rest, [model | acc])
{:error, _, <<>>} ->
{:ok, :lists.reverse(acc)}
{:error, model, rest} ->
{:error, model, rest}
end
end
@doc false
defp parse("width=" <> rest, acc = %{ width: nil }) do
{rest, value} = take_until_whitespace(rest, <<>>)
value = :erlang.binary_to_integer(value)
acc = %{ acc | width: value }
parse(rest, acc)
end
defp parse("poly=" <> rest, acc = %{ poly: nil }) do
{rest, "0x" <> value} = take_until_whitespace(rest, <<>>)
value = decode_hex(value)
acc = %{ acc | poly: value }
parse(rest, acc)
end
defp parse("init=" <> rest, acc = %{ init: nil }) do
{rest, "0x" <> value} = take_until_whitespace(rest, <<>>)
value = decode_hex(value)
acc = %{ acc | init: value }
parse(rest, acc)
end
defp parse("xorout=" <> rest, acc = %{ xorout: nil }) do
{rest, "0x" <> value} = take_until_whitespace(rest, <<>>)
value = decode_hex(value)
acc = %{ acc | xorout: value }
parse(rest, acc)
end
defp parse("check=" <> rest, acc = %{ check: nil }) do
{rest, "0x" <> value} = take_until_whitespace(rest, <<>>)
value = decode_hex(value)
acc = %{ acc | check: value }
parse(rest, acc)
end
defp parse("residue=" <> rest, acc = %{ residue: nil }) do
{rest, "0x" <> value} = take_until_whitespace(rest, <<>>)
value = decode_hex(value)
acc = %{ acc | residue: value }
parse(rest, acc)
end
defp parse("refin=" <> rest, acc = %{ refin: nil }) do
{rest, value} = take_until_whitespace(rest, <<>>)
value =
case :erlang.binary_to_atom(value, :unicode) do
val when is_boolean(val) -> val
end
acc = %{ acc | refin: value }
parse(rest, acc)
end
defp parse("refout=" <> rest, acc = %{ refout: nil }) do
{rest, value} = take_until_whitespace(rest, <<>>)
value =
case :erlang.binary_to_atom(value, :unicode) do
val when is_boolean(val) -> val
end
acc = %{ acc | refout: value }
parse(rest, acc)
end
defp parse("sick=" <> rest, acc = %{ sick: nil }) do
{rest, value} = take_until_whitespace(rest, <<>>)
value =
case :erlang.binary_to_atom(value, :unicode) do
val when is_boolean(val) -> val
end
acc = %{ acc | sick: value }
parse(rest, acc)
end
defp parse("name=" <> rest, acc = %{ name: nil }) do
{rest, value} = take_until_whitespace(rest, <<>>)
name = strip_quotes(value, <<>>)
root_key = :erlang.binary_to_atom(underscore(value, <<>>), :unicode)
acc = %{ acc | name: name, key: root_key }
parse(rest, acc)
end
defp parse("alias=" <> rest, acc = %{ aliases: aliases }) do
{rest, value} = take_until_whitespace(rest, <<>>)
name = strip_quotes(value, <<>>)
key = :erlang.binary_to_atom(underscore(value, <<>>), :unicode)
aliases = Map.put(aliases, key, name)
acc = %{ acc | aliases: aliases }
parse(rest, acc)
end
defp parse(<< ?\n, rest :: binary() >>, acc) do
acc =
if is_nil(acc.sick) do
%{ acc | sick: false }
else
acc
end
if is_nil(acc.width) or is_nil(acc.refin) or is_nil(acc.poly) do
{:error, acc, rest}
else
bits = (div(acc.width, 8) + (if rem(acc.width, 8) == 0, do: 0, else: 1)) * 8
bits =
case bits do
_ when bits in [8, 16, 32, 64] -> bits
_ when bits < 8 -> 8
_ when bits > 8 and bits < 16 -> 16
_ when bits > 16 and bits < 32 -> 32
_ when bits > 32 and bits < 64 -> 64
end
acc = %{ acc | bits: bits }
{:ok, acc, rest}
end
end
defp parse(<< _, rest :: binary() >>, acc) do
parse(rest, acc)
end
defp parse(<<>>, acc) do
parse(<< ?\n >>, acc)
end
@doc false
defp decode_hex(value) do
:erlang.binary_to_integer(value, 16)
end
@doc false
defp encode_hex(value, bits) do
"0x" <> Base.encode16(<< value :: unsigned-big-integer-unit(1)-size(bits) >>, case: :lower)
end
@doc false
defp take_until_whitespace(<<>>, acc) do
{<<>>, acc}
end
defp take_until_whitespace(rest = << ?\n, _ :: binary() >>, acc) do
{rest, acc}
end
defp take_until_whitespace(<< ?\s, rest :: binary() >>, acc) do
{rest, acc}
end
defp take_until_whitespace(<< c, rest :: binary() >>, acc) do
take_until_whitespace(rest, << acc ::binary(), c >>)
end
@doc false
defp underscore(<< ?", rest :: binary() >>, acc) do
underscore(rest, acc)
end
defp underscore(<< c, rest :: binary() >>, acc) when c in ?A..?Z do
underscore(rest, << acc :: binary(), (c + 32) >>)
end
defp underscore(<< c, rest :: binary() >>, acc) when c in ?a..?z or c in ?0..?9 do
underscore(rest, << acc :: binary(), c >>)
end
defp underscore(<< c, rest :: binary() >>, acc) when c in [?-, ?/] do
underscore(rest, << acc :: binary(), ?_ >>)
end
defp underscore(<<>>, acc) do
acc
end
@doc false
defp strip_quotes(<< ?", rest :: binary() >>, acc) do
strip_quotes(rest, acc)
end
defp strip_quotes(<< c, rest :: binary() >>, acc) do
strip_quotes(rest, << acc :: binary(), c >>)
end
defp strip_quotes(<<>>, acc) do
acc
end
end
|
lib/crc/model.ex
| 0.580471
| 0.402627
|
model.ex
|
starcoder
|
defmodule VintageNetMobile.Modem.QuectelBG96 do
@behaviour VintageNetMobile.Modem
require Logger
@moduledoc """
Quectel BG96 support
The Quectel BG96 is an LTE Cat M1/Cat NB1/EGPRS module. Here's an example
configuration:
```elixir
VintageNet.configure(
"ppp0",
%{
type: VintageNetMobile,
vintage_net_mobile: %{
modem: VintageNetMobile.Modem.QuectelBG96,
service_providers: [%{apn: "super"}]
}
}
)
```
Options:
* `:modem` - `VintageNetMobile.Modem.QuectelBG96`
* `:service_providers` - A list of service provider information (only `:apn`
providers are supported)
* `:at_tty` - A tty for sending AT commands on. This defaults to `"ttyUSB2"`
which works unless other USB serial devices cause Linux to set it to
something different.
* `:ppp_tty` - A tty for the PPP connection. This defaults to `"ttyUSB2"`
which works unless other USB serial devices cause Linux to set it to
something different.
* `:scan` - Set this to the order that radio access technologies should be
attempted when trying to connect. For example, `[:lte_cat_m1, :gsm]`
would prevent the modem from trying LTE Cat NB1 and potentially save some
time if you're guaranteed to not have Cat NB1 service.
If multiple service providers are configured, this implementation only
attempts to connect to the first one.
Example of supported properties:
```elixir
iex> VintageNet.get_by_prefix(["interface", "ppp0"])
[
{["interface", "ppp0", "addresses"],
[
%{
address: {10, 64, 64, 64},
family: :inet,
netmask: {255, 255, 255, 255},
prefix_length: 32,
scope: :universe
}
]},
{["interface", "ppp0", "connection"], :internet},
{["interface", "ppp0", "lower_up"], true},
{["interface", "ppp0", "mobile", "access_technology"], "CAT-M1"},
{["interface", "ppp0", "mobile", "band"], "LTE BAND 12"},
{["interface", "ppp0", "mobile", "channel"], 5110},
{["interface", "ppp0", "mobile", "cid"], 18677159},
{["interface", "ppp0", "mobile", "lac"], 4319},
{["interface", "ppp0", "mobile", "mcc"], 310},
{["interface", "ppp0", "mobile", "mnc"], 410},
{["interface", "ppp0", "mobile", "network"], "AT&T"},
{["interface", "ppp0", "mobile", "signal_4bars"], 4},
{["interface", "ppp0", "mobile", "signal_asu"], 25},
{["interface", "ppp0", "mobile", "signal_dbm"], -63},
{["interface", "ppp0", "present"], true},
{["interface", "ppp0", "state"], :configured},
{["interface", "ppp0", "type"], VintageNetMobile}
]
```
## Required Linux kernel options
* CONFIG_USB_SERIAL=m
* CONFIG_USB_SERIAL_WWAN=m
* CONFIG_USB_SERIAL_OPTION=m
* CONFIG_USB_WDM=m
* CONFIG_USB_NET_QMI_WWAN=m
"""
alias VintageNet.Interface.RawConfig
alias VintageNetMobile.{CellMonitor, Chatscript, ExChat, ModemInfo, PPPDConfig, SignalMonitor}
alias VintageNetMobile.Modem.Utils
@impl VintageNetMobile.Modem
def normalize(config) do
check_linux_version()
config
|> Utils.require_a_service_provider()
|> normalize_mobile_opts()
end
defp normalize_mobile_opts(%{vintage_net_mobile: mobile} = config) do
new_mobile =
mobile
|> normalize_scan()
%{config | vintage_net_mobile: new_mobile}
end
defp normalize_scan(%{scan: rat_list} = mobile) when is_list(rat_list) do
supported_list = [:lte_cat_m1, :lte_cat_nb1, :gsm]
ok_rat_list = Enum.filter(rat_list, fn rat -> rat in supported_list end)
if ok_rat_list == [] do
raise ArgumentError,
"Check your `:scan` list for technologies supported by the BG96: #{inspect(supported_list)} "
end
%{mobile | scan: ok_rat_list}
end
defp normalize_scan(mobile), do: mobile
@impl VintageNetMobile.Modem
def add_raw_config(raw_config, %{vintage_net_mobile: mobile} = _config, opts) do
ifname = raw_config.ifname
files = [{Chatscript.path(ifname, opts), chatscript(mobile)}]
at_tty = Map.get(mobile, :at_tty, "ttyUSB2")
ppp_tty = Map.get(mobile, :ppp_tty, "ttyUSB3")
child_specs = [
{ExChat, [tty: at_tty, speed: 9600]},
{SignalMonitor, [ifname: ifname, tty: at_tty]},
{CellMonitor, [ifname: ifname, tty: at_tty]},
{ModemInfo, [ifname: ifname, tty: at_tty]}
]
%RawConfig{
raw_config
| files: files,
child_specs: child_specs
}
|> PPPDConfig.add_child_spec(ppp_tty, 9600, opts)
end
defp chatscript(mobile_config) do
Chatscript.default(mobile_config, script_additions(mobile_config))
end
defp script_additions(nil), do: []
defp script_additions(mobile_config) when is_map(mobile_config) do
[
scan_additions(Map.get(mobile_config, :scan))
]
end
defp scan_additions(nil) do
# Reset to the factory default modes and search sequence
scan_additions([:lte_cat_m1, :lte_cat_nb1, :gsm])
end
defp scan_additions(scan_list) when is_list(scan_list) do
# This sets the sequence as specified and resets nwscanmode and iotop to be permissive
[
"OK AT+QCFG=\"nwscanseq\",",
Enum.map(scan_list, &scan_to_nwscanseq/1),
"\n",
"OK AT+QCFG=\"nwscanmode\",",
scan_to_nwscanmode(scan_list),
"\n",
"OK AT+QCFG=\"iotopmode\",",
scan_to_iotopmode(scan_list),
"\n"
]
end
defp scan_to_nwscanseq(:gsm), do: "01"
defp scan_to_nwscanseq(:lte_cat_m1), do: "02"
defp scan_to_nwscanseq(:lte_cat_nb1), do: "03"
defp scan_to_nwscanmode(scan_list) do
has_gsm = Enum.member?(scan_list, :gsm)
has_lte = Enum.member?(scan_list, :lte_cat_m1) or Enum.member?(scan_list, :lte_cat_nb1)
cond do
has_gsm and has_lte -> "0"
has_gsm -> "1"
has_lte -> "3"
true -> "0"
end
end
defp scan_to_iotopmode(scan_list) do
has_m1 = Enum.member?(scan_list, :lte_cat_m1)
has_nb1 = Enum.member?(scan_list, :lte_cat_nb1)
cond do
has_m1 and has_nb1 -> "2"
has_nb1 -> "1"
true -> "0"
end
end
@doc false
@spec check_linux_version :: :ok
def check_linux_version() do
case :os.version() do
{5, 4, patch} when patch > 52 -> linux_warning()
{5, minor, _patch} when minor > 4 -> linux_warning()
_ -> :ok
end
end
defp linux_warning() do
Logger.warn(
"VintageNetMobile is broken on Linux 5.4.53+ when using Quectel modems unless you revert https://github.com/torvalds/linux/commit/2bb70f0a4b238323e4e2f392fc3ddeb5b7208c9e"
)
end
end
|
lib/vintage_net_mobile/modem/quectel_BG96.ex
| 0.858733
| 0.672923
|
quectel_BG96.ex
|
starcoder
|
defmodule Membrane.Opus.PacketUtils do
@moduledoc false
alias Membrane.Opus
# Refer to https://tools.ietf.org/html/rfc6716#section-3.1
@toc_config_map %{
0 => {:silk, :narrow, 10_000},
1 => {:silk, :narrow, 20_000},
2 => {:silk, :narrow, 40_000},
3 => {:silk, :narrow, 60_000},
4 => {:silk, :medium, 10_000},
5 => {:silk, :medium, 20_000},
6 => {:silk, :medium, 40_000},
7 => {:silk, :medium, 60_000},
8 => {:silk, :wide, 10_000},
9 => {:silk, :wide, 20_000},
10 => {:silk, :wide, 40_000},
11 => {:silk, :wide, 60_000},
12 => {:hybrid, :super_wide, 10_000},
13 => {:hybrid, :super_wide, 20_000},
14 => {:hybrid, :full, 10_000},
15 => {:hybrid, :full, 20_000},
16 => {:celt, :narrow, 2_500},
17 => {:celt, :narrow, 5_000},
18 => {:celt, :narrow, 10_000},
19 => {:celt, :narrow, 20_000},
20 => {:celt, :wide, 2_500},
21 => {:celt, :wide, 5_000},
22 => {:celt, :wide, 10_000},
23 => {:celt, :wide, 20_000},
24 => {:celt, :super_wide, 2_500},
25 => {:celt, :super_wide, 5_000},
26 => {:celt, :super_wide, 10_000},
27 => {:celt, :super_wide, 20_000},
28 => {:celt, :full, 2_500},
29 => {:celt, :full, 5_000},
30 => {:celt, :full, 10_000},
31 => {:celt, :full, 20_000}
}
@spec skip_toc(data :: binary) ::
{:ok,
%{
mode: :silk | :hybrid | :celt,
bandwidth: :narrow | :medium | :wide | :super_wide | :full,
frame_duration: Membrane.Time.non_neg_t(),
channels: Opus.channels_t(),
code: 0..3
}, data :: binary}
| :end_of_data
def skip_toc(<<config::5, stereo_flag::1, code::2, data::binary>>) do
{mode, bandwidth, frame_duration} = Map.fetch!(@toc_config_map, config)
{:ok,
%{
mode: mode,
bandwidth: bandwidth,
frame_duration: Membrane.Time.microseconds(frame_duration),
channels: stereo_flag + 1,
code: code
}, data}
end
def skip_toc(_data), do: :end_of_data
@spec skip_code(code :: integer, data :: binary) ::
{:ok, :cbr | :vbr, frames_count :: integer, padding_len :: integer, data :: binary}
| :end_of_data
def skip_code(0, data), do: {:ok, :cbr, 1, 0, data}
def skip_code(1, data), do: {:ok, :cbr, 2, 0, data}
def skip_code(2, data), do: {:ok, :vbr, 2, 0, data}
def skip_code(3, <<mode::1, padding_present?::1, frames::6, data::binary>>) do
mode = if mode == 0, do: :cbr, else: :vbr
skip_pad_len_result =
if padding_present? == 1, do: skip_padding_length(data, 0), else: {:ok, 0, data}
with {:ok, pad_len, data} <- skip_pad_len_result do
{:ok, mode, frames, pad_len, data}
end
end
def skip_code(_code, _data), do: :end_of_data
@spec skip_frame_sizes(mode :: :cbr | :vbr, data :: binary, frames_count :: integer) ::
{:ok, frames_size :: integer, data :: binary} | :end_of_data
def skip_frame_sizes(:cbr, data, frames_count) do
with {:ok, size, data} <- do_skip_frame_sizes(data, min(frames_count, 1), 0) do
{:ok, frames_count * size, data}
end
end
def skip_frame_sizes(:vbr, data, frames), do: do_skip_frame_sizes(data, frames, 0)
defp do_skip_frame_sizes(data, 0, acc), do: {:ok, acc, data}
defp do_skip_frame_sizes(<<size, data::binary>>, frames, acc) when size <= 251,
do: do_skip_frame_sizes(data, frames - 1, acc + size)
defp do_skip_frame_sizes(<<size1, size2, data::binary>>, frames, acc) when size1 >= 252,
do: do_skip_frame_sizes(data, frames - 1, acc + size1 + size2 * 4)
defp do_skip_frame_sizes(_data, _frames, _total), do: :end_of_data
@spec skip_data(size :: non_neg_integer(), data :: binary) ::
{:ok, data :: binary()} | :end_of_data
def skip_data(size, data) do
case data do
<<_to_skip::binary-size(size), data::binary>> -> {:ok, data}
_data -> :end_of_data
end
end
@spec encode_frame_size(pos_integer) :: binary
def encode_frame_size(size) when size in 0..251, do: <<size>>
def encode_frame_size(size) when size in 252..1275 do
size = size - 252
<<252 + rem(size, 4), div(size, 4)>>
end
defp skip_padding_length(<<255, data::binary>>, acc_padding),
do: skip_padding_length(data, acc_padding + 254)
defp skip_padding_length(<<padding, data::binary>>, acc_padding),
do: {:ok, padding + acc_padding, data}
defp skip_padding_length(<<>>, _acc_padding), do: :end_of_data
end
|
lib/membrane_opus/packet_utils.ex
| 0.768342
| 0.439447
|
packet_utils.ex
|
starcoder
|
% DateTime represents both Date and Time. Currently it is not aware of
% timezones, but that should be added in the future, while Date and Time
% objects should always ignore timezones.
%
% This implementation is based on Erlang's calendar module: http://erlang.org/doc/man/calendar.html
module DateTime
def new(date_time)
#DateTime::Behavior(date_time)
end
% Return the current time in UTC according to the value
% returned by the operating system.
def utc
#DateTime::Behavior(Erlang.calendar.universal_time)
end
module Behavior
def __bound__({date, time})
@('date: date.to_tuple, 'time: time.to_tuple)
end
% Add the number of *seconds* to the DateTime object.
%
% ## Example
%
% datetime = DateTime.utc
% datetime.to_s % => "2010-04-17 14:00:00"
%
% % Add two days
% datetime + (2 * 86400) % => "2010-04-19 14:00:00"
%
def +(seconds)
DateTime.new Erlang.calendar.gregorian_seconds_to_datetime(to_i + seconds)
end
% Subtract the number of *seconds* from the DateTime object.
def -(seconds)
DateTime.new Erlang.calendar.gregorian_seconds_to_datetime(to_i - seconds)
end
% Returns a string representation of the DateTime object.
def inspect
"#{formatted_date} #{formatted_time}"
end
% Converts the given time to a string according to the gregorian calendar (i.e. starting with year 0).
% You can find more information about it on Erlang's calendar module: http://erlang.org/doc/man/calendar.html
def to_i
Erlang.calendar.datetime_to_gregorian_seconds({@date,@time})
end
% Returns the year
def year
@date[0]
end
% Returns the month
def month
@date[1]
end
% Returns the day
def day
@date[2]
end
% Returns the hours
def hours
@time[0]
end
% Returns the minutes
def minutes
@time[1]
end
% Returns the seconds
def seconds
@time[2]
end
% Returns a tuple where the first element is a tuple containing { year, month, day }
% and the second is a tuple containing { hours, minutes, seconds }.
def to_tuple
{ @date, @time }
end
% Returns time according to RFC1123.
def rfc1123
date = Date.new(@date)
"#{date.weekday_name}, #{convert_to_double_digit(@date[2])}-#{date.month_name}-#{@date[0]} #{formatted_time} GMT"
end
private
def formatted_date
"#{@date[0]}-#{convert_to_double_digit(@date[1])}-#{convert_to_double_digit(@date[2])}"
end
def formatted_time
"#{convert_to_double_digit(@time[0])}:#{convert_to_double_digit(@time[1])}:#{convert_to_double_digit(@time[2])}"
end
def convert_to_double_digit(unit)
if unit < 10
"0" + unit.to_s
else
unit.to_s
end
end
end
end
|
lib/date_time.ex
| 0.809577
| 0.471832
|
date_time.ex
|
starcoder
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.