code
stringlengths 114
1.05M
| path
stringlengths 3
312
| quality_prob
float64 0.5
0.99
| learning_prob
float64 0.2
1
| filename
stringlengths 3
168
| kind
stringclasses 1
value |
|---|---|---|---|---|---|
defmodule EctoLtree.LabelTree do
@moduledoc """
This module defines the LabelTree struct.
Implements the Ecto.Type behaviour.
## Fields
* `labels`
"""
use Ecto.Type
alias EctoLtree.LabelTree, as: Ltree
@type t :: %__MODULE__{
labels: [String.t()]
}
defstruct labels: []
@labelpath_size_max 2048
@doc """
Provides custom casting rules from external data to internal representation.
"""
@spec cast(String.t()) :: {:ok, t} | :error
def cast(string) when is_binary(string) and byte_size(string) <= @labelpath_size_max do
labels_result =
string
|> String.split(".")
|> Enum.map(&cast_label/1)
if Enum.any?(labels_result, fn i -> i == :error end) do
:error
else
{:ok, %Ltree{labels: Enum.map(labels_result, fn {_k, v} -> v end)}}
end
end
def cast(%Ltree{} = struct) do
{:ok, struct}
end
def cast(_), do: :error
@label_size_max 256
@label_regex ~r/[A-Za-z0-9_]{1,256}/
@spec cast_label(String.t()) :: {:ok, String.t()} | :error
defp cast_label(string) when is_binary(string) and byte_size(string) <= @label_size_max do
string_length = String.length(string)
case Regex.run(@label_regex, string, return: :index) do
[{0, last}] when last == string_length ->
{:ok, string}
_ ->
:error
end
end
defp cast_label(_), do: :error
@doc """
From internal representation to database.
"""
@spec dump(t) :: {:ok, String.t()} | :error
def dump(%Ltree{} = label_tree) do
{:ok, decode(label_tree)}
end
def dump(_), do: :error
@spec decode(t) :: String.t()
def decode(%Ltree{} = label_tree) do
Enum.join(label_tree.labels, ".")
end
@doc """
From database to internal representation.
"""
@spec load(String.t()) :: {:ok, t} | :error
def load(labelpath) when is_binary(labelpath) do
{:ok, %Ltree{labels: labelpath |> String.split(".")}}
end
def load(_), do: :error
@doc """
Returns the underlying schema type.
"""
@spec type() :: :ltree
def type, do: :ltree
end
defimpl String.Chars, for: EctoLtree.LabelTree do
def to_string(%EctoLtree.LabelTree{} = label_tree), do: EctoLtree.LabelTree.decode(label_tree)
end
|
lib/ecto_ltree/label_tree.ex
| 0.856647
| 0.475666
|
label_tree.ex
|
starcoder
|
defmodule Utils.Build do
@moduledoc """
This module basically acts as a way to define code that will be determined at
compile time instead of runtime. It is kind of like using config.exs values
but can do much more like defining different function implementations, etc.
NOTE: PLEASE USE WITH EXTREME CAUTION
By the nature of this code your production environments will be different
then your dev or test builds. However it can come in handy for defining
mocks etc.
"""
# ----------------------------------------------------------------------------
# Public API
# ----------------------------------------------------------------------------
@doc """
Compiler macro that will only add the do block code if it is a dev build
and will run the else block of code if it is something else (test or prod)
Note: the else block is optional
## Example:
import Utils.Build
if_dev do
@msg "I am a dev build"
else
@msg "I am NOT a dev build"
end
"""
@spec if_dev([{:do, any} | {:else, any}, ...]) :: any
defmacro if_dev(do: tBlock, else: fBlock) do
case Mix.env() do
# If this is a dev block
:dev ->
quote do
unquote(tBlock)
end
# otherwise go with the alternative
_ ->
if nil != fBlock do
quote do
unquote(fBlock)
end
end
end
end
defmacro if_dev(do: tBlock) do
if :dev == Mix.env() do
quote do
unquote(tBlock)
end
end
end
@doc """
Compiler macro that will only add the do block code if it is not a dev build
(test or prod) and will run the else block of code if it is a dev build
Note: the else block is optional
## Example:
import Utils.Build
if_not_dev do
@msg "I am NOT a dev build"
else
@msg "I am a dev build"
end
"""
@spec if_not_dev([{:do, any} | {:else, any}, ...]) :: any
defmacro if_not_dev(do: tBlock, else: fBlock) do
case Mix.env() do
# If this is a dev block
:dev ->
if nil != fBlock do
quote do
unquote(fBlock)
end
end
# otherwise go with the alternative
_ ->
quote do
unquote(tBlock)
end
end
end
defmacro if_not_dev(do: tBlock) do
if :dev != Mix.env() do
# If this is a dev block
quote do
unquote(tBlock)
end
end
end
@doc """
Compiler macro that will only add the do block code if it is a prod build
and will run the else block of code if it is something else (test or dev)
Note: the else block is optional
## Example:
import Utils.Build
if_prod do
@msg "I am a prod build"
else
@msg "I am NOT a prod build"
end
"""
@spec if_prod([{:do, any} | {:else, any}, ...]) :: any
defmacro if_prod(do: tBlock, else: fBlock) do
case Mix.env() do
# If this is a dev block
:prod ->
quote do
unquote(tBlock)
end
# otherwise go with the alternative
_ ->
if nil != fBlock do
quote do
unquote(fBlock)
end
end
end
end
defmacro if_prod(do: tBlock) do
if :prod == Mix.env() do
quote do
unquote(tBlock)
end
end
end
@doc """
Compiler macro that will only add the do block code if it is not a prod build
(test or dev) and will run the else block of code if it is a prod build
Note: the else block is optional
## Example:
import Utils.Build
if_not_prod do
@msg "I am NOT a prod build"
else
@msg "I am a prod build"
end
"""
@spec if_not_prod([{:do, any} | {:else, any}, ...]) :: any
defmacro if_not_prod(do: tBlock, else: fBlock) do
case Mix.env() do
# If this is a dev block
:prod ->
if nil != fBlock do
quote do
unquote(fBlock)
end
end
# otherwise go with the alternative
_ ->
quote do
unquote(tBlock)
end
end
end
defmacro if_not_prod(do: tBlock) do
if :prod != Mix.env() do
# If this is a dev block
quote do
unquote(tBlock)
end
end
end
@doc """
Compiler macro that will only add the do block code if it is a test build
and will run the else block of code if it is something else (prod or dev)
Note: the else block is optional
## Example:
import Utils.Build
if_test do
@msg "I am a test build"
else
@msg "I am NOT a test build"
end
"""
@spec if_test([{:do, any} | {:else, any}, ...]) :: any
defmacro if_test(do: tBlock, else: fBlock) do
case Mix.env() do
# If this is a dev block
:test ->
quote do
unquote(tBlock)
end
# otherwise go with the alternative
_ ->
if nil != fBlock do
quote do
unquote(fBlock)
end
end
end
end
@doc """
"""
defmacro if_test(do: tBlock) do
if :test == Mix.env() do
quote do
unquote(tBlock)
end
end
end
@doc """
Compiler macro that will only add the do block code if it is not a test build
(prod or dev) and will run the else block of code if it is a test build
Note: the else block is optional
## Example:
import Utils.Build
if_not_test do
@msg "I am NOT a test build"
else
@msg "I am a test build"
end
"""
@spec if_not_test([{:do, any} | {:else, any}, ...]) :: any
defmacro if_not_test(do: tBlock, else: fBlock) do
case Mix.env() do
# If this is a dev block
:test ->
if nil != fBlock do
quote do
unquote(fBlock)
end
end
# otherwise go with the alternative
_ ->
quote do
unquote(tBlock)
end
end
end
defmacro if_not_test(do: tBlock) do
if :test != Mix.env() do
# If this is a dev block
quote do
unquote(tBlock)
end
end
end
end
|
src/apps/utils/lib/utils/build.ex
| 0.7011
| 0.46217
|
build.ex
|
starcoder
|
defmodule Spell.Role do
@moduledoc """
The `Spell.Role` module defines the behaviour of a role in spell.
A role specifies the logic for handling groups of commands. A peer is started
with one or more roles, which the peer uses to configure its state and handle
its messages.
## Callbacks
A module must implement all `Spell.Role` behaviour callbacks, though the `use
Spell.Role` directive provides a sane default implementation for each.
* `get_features/1`
* `init/2`
* `on_open/2`
* `on_close/2`
* `handle_message/3`
* `handle_call/4`
"""
use Behaviour
# Type Specs
# TODO: broken typespec
@type peer_options :: %{roles: %{features: Map.t}}
# Macros
defmacro __using__(_options) do
quote do
@behaviour Spell.Role
# Default Role Callbacks
def get_features(_options), do: nil
def init(peer_options, options), do: {:ok, options}
def on_open(_peer, state), do: {:ok, state}
def on_close(_peer, state), do: {:ok, state}
def handle_message(_, _, state), do: {:ok, state}
def handle_call(_message, _from, _peer, state) do
{:ok, :ok, state}
end
defoverridable [get_features: 1,
init: 2,
on_open: 2,
on_close: 2,
handle_message: 3,
handle_call: 4]
end
end
# Callback Definitions
@doc """
Get the key and features that this role announces. Returns `nil`
if the features announces no features.
"""
defcallback get_features(options :: Keyword.t) :: {atom, map}
@doc """
init callback for generating the role's initial state given `options`.
"""
defcallback init(peer_options :: peer_options, role_options :: Keyword.t) ::
{:ok, any} | {:error, any}
@doc """
Called after the connection is opened. Returns the state wrapped in an
ok tuple or an error tuple.
"""
defcallback on_open(peer :: Peer.t, state :: any) ::
{:ok, any} | {:error, any}
@doc """
Called when the connection is being closed. Returns the state wrapped in
an ok tuple or an error tuple
"""
defcallback on_close(peer :: Peer.t, state :: any) ::
{:ok, any} | {:error, any}
@doc """
Handle an incoming WAMP message.
"""
defcallback handle_message(message :: Message.t,
peer :: Peer.t, state :: any) ::
{:ok, any} | {:error, any}
@doc """
Handle a call from the peer.
## Return values
* `{:ok, reply, new_state}`: return `reply`
* `{:error, reason}`
"""
defcallback handle_call(message :: any, from :: pid,
peer :: Peer.t, state :: any) ::
{:ok, any, any} | {:error, any}
# Public Functions
@doc """
Normalize a list of role options by wrapping bare `module` items
with an option tuple. If an unexpected role is encountered, an
error tuple is returned.
"""
@spec normalize_role_options([module | {module, Keyword.t}]) ::
{:ok, [{module, Keyword.t}]} | {:error, {:bad_role, any}}
def normalize_role_options(roles, acc \\ [])
def normalize_role_options([], acc) do
{:ok, Enum.reverse(acc)}
end
def normalize_role_options([module | roles], acc) when is_atom(module) do
normalize_role_options(roles, [{module, []} | acc])
end
def normalize_role_options([{module, options} = role | roles], acc)
when is_atom(module) and is_list(options) do
normalize_role_options(roles, [role | acc])
end
def normalize_role_options([bad_role | _roles], _acc) do
{:error, {:bad_role, bad_role}}
end
@doc """
Returns a map with the features of the listed roles.
"""
@spec collect_features([{module, any}]) :: Map.t(atom, Map.t)
def collect_features(roles) do
# TODO: overlapping keys will overwrite. Merging would be more usful
roles
|> Enum.map(fn {role, options} -> role.get_features(options) end)
|> Enum.filter(&(&1 != nil))
|> Enum.into(%{})
end
@doc """
Call the `on_init` function for a list of roles. `peer_options` is the
list of options which a peer was initialized with.
"""
@spec map_init([{module, any}], Keyword.t) ::
{:ok, [{module, any}]} | {:error, any}
def map_init(roles, peer_options) do
map(roles, fn {role, role_options} ->
role.init(peer_options, role_options)
end)
end
@doc """
Call the `on_open` function for a list of roles.
"""
@spec map_on_open([{module, any}], Peer.t) ::
{:ok, [{module, any}]} | {:error, any}
def map_on_open(roles, peer) do
map(roles, fn {role, state} -> role.on_open(peer, state) end)
end
@doc """
Call the `on_close` function for a list of roles.
"""
@spec map_on_close([{module, any}], Peer.t) ::
{:ok, [{module, any}]} | {:error, any}
def map_on_close(roles, peer) do
map(roles, fn {role, state} -> role.on_close(peer, state) end)
end
@doc """
Call the `handle_message` function for a list of roles.
"""
@spec map_handle_message([{module, any}], Message.t, Peer.t) ::
{:ok, [{module, any}]} | {:error, any}
def map_handle_message(roles, message, peer) do
map(roles, fn {r, s} -> r.handle_message(message, peer, s) end)
end
@doc """
From `roles` call the `role`'s `send_message` function with the `message`
and the role's state.
"""
@spec call([{module, any}], module, any, pid, Peer.t) ::
{:ok, [{module, any}]} | {:error, :no_role}
def call(roles, role, message, from, peer) do
case Keyword.fetch(roles, role) do
{:ok, role_state} ->
case role.handle_call(message, from, peer, role_state) do
{:ok, reply, role_state} ->
{:ok, reply, Keyword.put(roles, role, role_state)}
{:error, reason} ->
{:error, reason}
end
:error ->
{:error, :no_role}
end
end
# Private Functions
@spec map([module], ((any) -> {:ok, any} | {:error, any}),
Keyword.t, Keyword.t) ::
{:ok, Keyword.t} | {:close, Keyword.t, Keyword.t} | {:error, any}
defp map(roles, function, results \\ [], reasons \\ [])
defp map([], _function, results, []) do
{:ok, Enum.reverse(results)}
end
defp map([], _function, results, reasons) do
{:close, Enum.reverse(reasons), Enum.reverse(results)}
end
defp map([{role_module, _} = role | roles], function, results, reasons) do
case function.(role) do
{:ok, result} ->
map(roles, function, [{role_module, result} | results], reasons)
{:close, reason, result} ->
map(roles, function,
[{role_module, result} | results],
[{role_module, reason} | reasons])
{:error, reason} ->
{:error, {role, reason}}
end
end
end
|
lib/spell/role.ex
| 0.753829
| 0.528533
|
role.ex
|
starcoder
|
defmodule Typelixir.FunctionsExtractor do
@moduledoc false
alias Typelixir.{PatternBuilder, TypeComparator, Utils}
# extends the given functions env map with the module name and the functions it defines
def extract_functions_file(path, env) do
ast = Code.string_to_quoted(File.read!(Path.absname(path)))
{_ast, result} = Macro.prewalk(ast, env, &extract(&1, &2))
Utils.prepare_result_data(result)
end
# MODULES
# ---------------------------------------------------------------------------------------------------
# {:defmodule, _, MODULE}
defp extract({:defmodule, [line: line], [{:__aliases__, meta, module_name}, [do: block]]}, env) do
elem = {:defmodule, [line: line], [{:__aliases__, meta, module_name}, [do: {:__block__, [], []}]]}
name =
module_name
|> Enum.map(fn name -> Atom.to_string(name) end)
|> Enum.join(".")
new_mod_name = if env[:prefix], do: env[:prefix] <> "." <> name, else: name
new_functions = Map.put(env[:functions], new_mod_name, Map.new())
{_ast, result} = Macro.prewalk(block, %{env | functions: new_functions, prefix: new_mod_name}, &extract(&1, &2))
{elem, %{env | state: result[:state], error_data: result[:error_data], functions: Map.merge(env[:functions], result[:functions])}}
end
# FUNCTIONS
# ---------------------------------------------------------------------------------------------------
defp extract({:@, [line: line], [{:spec, _, [{:::, _, [{fn_name, _, type_of_args}, type_of_return]}]}]} = elem, env) do
type_of_args = Enum.map(type_of_args || [], fn type -> PatternBuilder.type(type, %{}) end)
case TypeComparator.has_type?(type_of_args, :error) do
true -> {elem, %{env | state: :error, error_data: Map.put(env[:error_data], line, "Malformed type spec on #{fn_name}/#{length(type_of_args)} parameters")}}
_ ->
return_type = PatternBuilder.type(type_of_return, %{})
case TypeComparator.has_type?(return_type, :error) do
true -> {elem, %{env | state: :error, error_data: Map.put(env[:error_data], line, "Malformed type spec on #{fn_name}/#{length(type_of_args)} return")}}
_ ->
fn_type = {return_type, type_of_args}
fn_key = {fn_name, length(type_of_args)}
case (env[:functions][env[:prefix]][fn_key]) do
nil ->
new_module_map = Map.put(env[:functions][env[:prefix]], {fn_name, length(type_of_args)}, fn_type)
new_functions = Map.put(env[:functions], env[:prefix], new_module_map)
{elem, %{env | functions: new_functions}}
_ ->
{elem, %{env | state: :error, error_data: Map.put(env[:error_data], line, "#{fn_name}/#{length(type_of_args)} already has a defined type")}}
end
end
end
end
# BASE CASE
# ---------------------------------------------------------------------------------------------------
defp extract(elem, env), do: {elem, env}
end
|
lib/typelixir/functions_extractor.ex
| 0.694095
| 0.402686
|
functions_extractor.ex
|
starcoder
|
defmodule Sanbase.ExternalServices.Coinmarketcap.TickerFetcher do
@moduledoc ~s"""
A GenServer, which updates the data from coinmarketcap on a regular basis.
Fetches only the current info and no historical data.
On predefined intervals it will fetch the data from coinmarketcap and insert it
into a local DB
"""
use GenServer, restart: :permanent, shutdown: 5_000
require Sanbase.Utils.Config, as: Config
require Logger
alias Sanbase.Repo
alias Sanbase.DateTimeUtils
alias Sanbase.Model.{LatestCoinmarketcapData, Project}
alias Sanbase.ExternalServices.Coinmarketcap.{Ticker, PricePoint}
alias Sanbase.Prices.Store
@prices_exporter :prices_exporter
def start_link(_state) do
GenServer.start_link(__MODULE__, :ok)
end
def init(:ok) do
if Config.get(:sync_enabled, false) do
Store.create_db()
Process.send(self(), :sync, [:noconnect])
update_interval = Config.get(:update_interval) |> String.to_integer()
Logger.info(
"[CMC] Starting TickerFetcher scraper. It will query coinmarketcap every #{update_interval} seconds."
)
{:ok, %{update_interval: update_interval}}
else
:ignore
end
end
def work(opts \\ []) do
Logger.info("[CMC] Fetching realtime data from coinmarketcap")
# Fetch current coinmarketcap data for many tickers
{:ok, tickers} = Ticker.fetch_data(opts)
# Create a map where the coinmarketcap_id is key and the values is the list of
# santiment slugs that have that coinmarketcap_id
cmc_id_to_slugs_mapping = coinmarketcap_to_santiment_slug_map()
tickers = remove_not_valid_prices(tickers, cmc_id_to_slugs_mapping)
# Create a project if it's a new one in the top projects and we don't have it
tickers
|> Enum.take(top_projects_to_follow())
|> Enum.each(&insert_or_update_project/1)
# Store the data in LatestCoinmarketcapData in postgres
tickers
|> Enum.each(&store_latest_coinmarketcap_data!/1)
# Store the data in Influxdb
if Application.get_env(:sanbase, :influx_store_enabled, true) do
tickers
|> Enum.flat_map(&Ticker.convert_for_importing(&1, cmc_id_to_slugs_mapping))
|> Store.import()
end
tickers
|> export_to_kafka(cmc_id_to_slugs_mapping)
Logger.info(
"[CMC] Fetching realtime data from coinmarketcap done. The data is imported in the database."
)
end
defp coinmarketcap_to_santiment_slug_map() do
Project.List.projects_with_source("coinmarketcap", include_hidden: true)
|> Enum.reduce(%{}, fn %Project{slug: slug} = project, acc ->
Map.update(acc, Project.coinmarketcap_id(project), [slug], fn slugs -> [slug | slugs] end)
end)
end
defp remove_not_valid_prices(tickers, cmc_id_to_slugs_mapping) do
tickers
|> Enum.each(fn %{slug: cmc_slug, price_usd: price_usd, price_btc: price_btc} ->
# This implementation does not remove/change anything. It will be deployed first
# so we can observe the behaviour first.
case Map.get(cmc_id_to_slugs_mapping, cmc_slug) do
nil ->
:ok
slug ->
case Sanbase.Price.Validator.valid_price?(slug, "USD", price_usd) do
{:error, error} -> Logger.info("[CMC] Price validation failed: #{error}")
_ -> :ok
end
case Sanbase.Price.Validator.valid_price?(slug, "BTC", price_btc) do
{:error, error} -> Logger.info("[CMC] Price validation failed: #{error}")
_ -> :ok
end
end
end)
tickers
end
# TODO: Revert to this implementation after we the debug implementation is
# tested and all potential issues are fixed
# defp remove_not_valid_prices(tickers, cmc_id_to_slugs_mapping) do
# tickers
# |> Enum.map(fn %{slug: cmc_slug, price_usd: price_usd, price_btc: price_btc} = ticker ->
# case Map.get(cmc_id_to_slugs_mapping, cmc_slug) do
# nil ->
# ticker
# slug ->
# ticker
# |> then(fn t ->
# if true == Sanbase.Price.Validator.valid_price?(slug, "USD", price_usd),
# do: t,
# else: Map.put(t, :price_usd, nil)
# end)
# |> then(fn t ->
# if true == Sanbase.Price.Validator.valid_price?(slug, "BTC", price_btc),
# do: t,
# else: Map.put(t, :price_usd, nil)
# end)
# end
# end)
# end
defp export_to_kafka(tickers, cmc_id_to_slugs_mapping) do
tickers
|> Enum.flat_map(fn %Ticker{} = ticker ->
case Map.get(cmc_id_to_slugs_mapping, ticker.slug, []) |> List.wrap() do
[_ | _] = slugs ->
price_point = Ticker.to_price_point(ticker) |> PricePoint.sanity_filters()
Enum.map(slugs, fn slug -> PricePoint.json_kv_tuple(price_point, slug) end)
_ ->
[]
end
end)
|> Sanbase.KafkaExporter.persist_sync(@prices_exporter)
rescue
e ->
Logger.error(
"[CMC] Realtime exporter failed to export to Kafka. Reason: #{Exception.message(e)}"
)
end
# Helper functions
def handle_info(:sync, %{update_interval: update_interval} = state) do
work()
Process.send_after(self(), :sync, update_interval * 1000)
{:noreply, state}
end
defp store_latest_coinmarketcap_data!(%Ticker{} = ticker) do
ticker.slug
|> LatestCoinmarketcapData.get_or_build()
|> LatestCoinmarketcapData.changeset(%{
coinmarketcap_integer_id: ticker.id,
market_cap_usd: ticker.market_cap_usd,
name: ticker.name,
price_usd: ticker.price_usd,
price_btc: ticker.price_btc,
rank: ticker.rank,
volume_usd: ticker.volume_usd,
available_supply: ticker.available_supply,
total_supply: ticker.total_supply,
symbol: ticker.symbol,
percent_change_1h: ticker.percent_change_1h,
percent_change_24h: ticker.percent_change_24h,
percent_change_7d: ticker.percent_change_7d,
update_time: DateTimeUtils.from_iso8601!(ticker.last_updated)
})
|> Repo.insert_or_update!()
end
defp insert_or_update_project(%Ticker{slug: slug, name: name, symbol: ticker}) do
case find_or_init_project(%Project{name: name, slug: slug, ticker: ticker}) do
{:not_existing_project, changeset} ->
# If there is not id then the project was not returned from the DB
# but initialized by the function
project = changeset |> Repo.insert_or_update!()
Project.SourceSlugMapping.create(%{
source: "coinmarketcap",
slug: project.slug,
project_id: project.id
})
{:existing_project, changeset} ->
Repo.insert_or_update!(changeset)
end
end
defp find_or_init_project(%Project{slug: slug} = project) do
case Project.by_slug(slug) do
nil ->
{:not_existing_project, Project.changeset(project)}
existing_project ->
{:existing_project,
Project.changeset(existing_project, %{
slug: slug,
ticker: project.ticker
})}
end
end
defp top_projects_to_follow() do
Config.get(:top_projects_to_follow, "25") |> String.to_integer()
end
end
|
lib/sanbase/external_services/coinmarketcap/ticker_fetcher.ex
| 0.588534
| 0.538437
|
ticker_fetcher.ex
|
starcoder
|
defmodule DayEleven do
@neighbours [
# three at the top
{-1, -1},
{0, -1},
{1, -1},
# two in the middle
{-1, 0},
{1, 0},
# three at the bottom
{-1, 1},
{0, 1},
{1, 1}
]
@max_count 4
def solve(input) do
input
|> build_map()
|> step_until_stable()
# |> pretty_print()
|> count_occupied()
end
def build_map(input) do
input
|> String.split("", trim: true)
|> Enum.reduce({[], 0, 0}, fn
"\n", {list, _x, y} ->
{list, 0, y + 1}
val, {list, x, y} ->
{[{{x, y}, get_type(val)} | list], x + 1, y}
end)
|> elem(0)
|> Map.new()
end
def step_until_stable(map), do: step_until_stable(map, %{})
defp step_until_stable(map, last) when map === last, do: map
defp step_until_stable(map, _) do
map
|> step()
|> step_until_stable(map)
end
def step(map) do
map
|> Enum.map(fn
{pos, type} ->
new_type = updated_type(type, get_neigbours_count(map, pos), @max_count)
{pos, new_type}
end)
|> Map.new()
end
def pretty_print(map) do
sorter = fn
{ax, ay}, {bx, by} ->
cond do
ay < by -> true
ay == by && ax < bx -> true
true -> false
end
end
map
|> Enum.sort_by(fn {pos, _} -> pos end, sorter)
|> Enum.map(fn
{{0, _}, type} -> "\n" <> get_sym(type)
{_, type} -> get_sym(type)
end)
|> Enum.join("")
|> IO.puts()
map
end
def updated_type(:empty, neighbours_count, _) when neighbours_count == 0,
do: :occupied
def updated_type(:occupied, neighbours_count, max_count) when neighbours_count >= max_count,
do: :empty
def updated_type(type, _, _), do: type
def count_occupied(map) do
map
|> Enum.reduce(0, fn
{_, :occupied}, acc -> acc + 1
{_, _}, acc -> acc
end)
end
defp get_neigbours_count(map, pos) do
neighbours = pos |> neighbours()
map
|> Enum.filter(fn
{pos, _} -> pos in neighbours
end)
|> count_occupied()
end
def get_type("L"), do: :empty
def get_type("#"), do: :occupied
def get_type("."), do: :floor
# which is more idiomatic?
def get_sym(type) do
case type do
:floor -> "."
:occupied -> "#"
:empty -> "L"
end
end
defp neighbours({x, y}) do
@neighbours
|> Enum.map(fn
{dx, dy} -> {x + dx, y + dy}
end)
end
end
defmodule DayEleven.PartTwo do
import DayEleven, except: [get_neigbours_count: 2, step_until_stable: 1, step: 1]
@neighbour_dirs [
# three at the top
{-1, -1},
{0, -1},
{1, -1},
# two in the middle
{-1, 0},
{1, 0},
# three at the bottom
{-1, 1},
{0, 1},
{1, 1}
]
@max_count 5
def solve(input) do
input
|> build_map()
|> step_until_stable()
# |> pretty_print()
|> count_occupied()
end
def step_until_stable(map), do: step_until_stable(map, %{})
defp step_until_stable(map, last) when map === last, do: map
defp step_until_stable(map, _) do
map
|> step()
|> step_until_stable(map)
end
def step(map) do
map
|> Enum.map(fn
{pos, type} ->
new_type = updated_type(type, get_neigbours_count(map, pos), @max_count)
{pos, new_type}
end)
|> Map.new()
end
def get_neigbours_count(map, pos) do
@neighbour_dirs
|> Enum.map(fn
dir ->
walk_until_seat(dir, map, pos)
end)
|> Enum.reduce(0, fn
:occupied, acc -> acc + 1
_, acc -> acc
end)
end
def walk_until_seat(dir = {dx, dy}, map, {x, y}) do
target_pos = {x + dx, y + dy}
case Map.get(map, target_pos, :empty) do
:empty -> :empty
:occupied -> :occupied
:floor -> walk_until_seat(dir, map, target_pos)
end
end
end
|
adv_2020/lib/day_11.ex
| 0.563018
| 0.620363
|
day_11.ex
|
starcoder
|
defmodule FaktoryWorker.Job do
@moduledoc """
The `FaktoryWorker.Job` module is used to perform jobs in the background by sending to and fetching from Faktory.
To build a worker you must `use` the job module within a module in your application.
```elixir
defmodule MyApp.SomeWorker do
use FaktoryWorker.Job
end
```
This will bring in all of the functionality required to perform jobs via Faktory. The `MyApp.SomeWorker` will now
have a `perform_async/2` function available for sending jobs to Faktory. Before this function can be called, a
`perform` function must be defined. This function must accept the same number of arguments that are
being sent to Faktory via the `perform_async/2` function.
```elixir
defmodule MyApp.SomeWorker do
use FaktoryWorker.Job
def perform(job_arg) do
do_some_work(job_arg)
end
end
```
With this in place it is now possible to send work to Faktory and the `MyApp.SomeWorker` will fetch the job and call
the `perform/1` function with the same job arguments that we sent.
```elixir
> MyApp.SomeWorker.perform_async("job arg")
:ok
```
It is also possible to send multiple arguments for a single job by passing in a list of values to the `perform_async/2`
function.
```elixir
> MyApp.SomeWorker.perform_async(["job arg1", "job arg2"])
:ok
```
In order for the job to be performed correctly, a `perform/2` function needs to be defined within the `MyApp.SomeWorker`
module.
```elixir
defmodule MyApp.SomeWorker do
use FaktoryWorker.Job
def perform(job_arg1, job_arg2) do
do_some_work(job_arg1, job_arg2)
end
end
```
When defining `perform` functions, they must always accept one argument for each item in the list of values passed into
`perform_async/2`.
## Synchronous job pushing
Previous version used Broadway to send jobs and `:skip_pipeline` parameter was used to do it synchronously.
`:skip_pipeline` is not supported anymore.
Since Batch operations is a feature of Faktory Enterprise this library now sends any single job synchronously
and makes HTTP call to faktory server (see `FaktoryWorker.Batch`).
## Worker Configuration
A list of options can be specified when using the the `FaktoryWorker.Job` module. These options will be used when sending
jobs to faktory and will apply to all jobs sent with the `perform_async/2` function.
For a full list of configuration options see the [Worker Configuration](configuration.html#worker-configuration) documentation.
## Overriding Worker Configuration
The `perform_async/2` function accepts a keyword list as its second argument. This list has the same options
available that the `FaktoryWorker.Job` module accepts. Any options passed into this function override the options
that have been set on the worker module.
For a full list of configuration options see the [Worker Configuration](configuration.html#worker-configuration) documentation.
## Data Serialization
Faktory expects all values to be serialized in JSON format. FaktoryWorker uses `Jason` for serialization. This
means only values that implement the `Jason.Encoder` protocol are valid when calling the `perform_async/2` function.
"""
alias FaktoryWorker.{Random, Telemetry, Sandbox}
# Look at supporting the following optional fields when pushing a job
# priority
# backtrace
# created_at
@optional_job_fields [:jobtype, :queue, :custom, :retry, :reserve_for, :at]
@default_push_timeout 5000
defmacro __using__(using_opts \\ []) do
alias FaktoryWorker.Job
quote do
def perform_async(job, opts \\ []) do
opts = Keyword.merge(unquote(using_opts), opts)
__MODULE__
|> Job.build_payload(job, opts)
|> Job.perform_async(opts)
end
end
end
@doc false
def build_payload(worker_module, job, opts) when is_list(job) do
%{
jid: Random.job_id(),
jobtype: job_type_for_module(worker_module),
args: normalize_job_args(job)
}
|> append_optional_fields(opts)
end
def build_payload(worker_module, job, opts) do
build_payload(worker_module, [job], opts)
end
@doc false
def perform_async(payload, opts) do
if Sandbox.active?() do
Sandbox.enqueue_job(
String.to_existing_atom("Elixir." <> payload.jobtype),
payload.args,
opts
)
{:ok, payload}
else
opts
|> faktory_name()
|> push(payload)
end
end
@doc false
def normalize_job_args(args) when is_list(args) do
Enum.map(args, fn
%_{} = arg -> Map.from_struct(arg)
arg -> arg
end)
end
@doc false
def push(_, invalid_payload = {:error, _}), do: invalid_payload
def push(faktory_name, job) do
{:push, job}
|> FaktoryWorker.send_command(faktory_name: faktory_name, timeout: @default_push_timeout)
|> handle_push_result(job)
end
defp append_optional_fields(args, opts) do
Enum.reduce_while(@optional_job_fields, args, fn field, args ->
case Keyword.get(opts, field) do
nil ->
{:cont, args}
value ->
if is_valid_field_value?(field, value) do
value = format_field_value(value)
{:cont, Map.put(args, field, value)}
else
{:halt, {:error, field_error_message(field, value)}}
end
end
end)
end
defp is_valid_field_value?(:jobtype, value), do: is_binary(value)
defp is_valid_field_value?(:queue, value), do: is_binary(value)
defp is_valid_field_value?(:custom, value), do: is_map(value)
defp is_valid_field_value?(:retry, value), do: is_integer(value)
defp is_valid_field_value?(:reserve_for, value), do: is_integer(value) and value >= 60
defp is_valid_field_value?(:at, %DateTime{}), do: true
defp is_valid_field_value?(_, _), do: false
defp format_field_value(%DateTime{} = date_time) do
DateTime.to_iso8601(date_time)
end
defp format_field_value(value), do: value
defp field_error_message(field, value) do
"The field '#{Atom.to_string(field)}' has an invalid value '#{inspect(value)}'"
end
defp faktory_name(opts) do
Keyword.get(opts, :faktory_name, FaktoryWorker)
end
defp handle_push_result({:ok, _}, job) do
Telemetry.execute(:push, :ok, job)
{:ok, job}
end
defp handle_push_result({:error, :timeout}, job) do
Telemetry.execute(:push, {:error, :timeout}, job)
{:error, :timeout}
end
defp handle_push_result({:error, reason}, _) do
{:error, reason}
end
defp job_type_for_module(module) do
module
|> to_string()
|> String.trim_leading("Elixir.")
end
end
|
lib/faktory_worker/job.ex
| 0.765111
| 0.850841
|
job.ex
|
starcoder
|
defmodule OpenExchangeRates do
@moduledoc """
This module contains all the helper methods for converting currencies
"""
use Application
require Logger
@doc false
def start(_type, _args) do
import Supervisor.Spec, warn: false
configuration_status = check_configuration()
children = [worker(OpenExchangeRates.Cache, [configuration_status])]
opts = [strategy: :one_for_one, name: OpenExchangeRates.Supervisor]
Supervisor.start_link(children, opts)
end
@doc"""
Returns a list of all available currencies.
## example
iex> OpenExchangeRates.available_currencies |> Enum.take(10)
["NAD", "AWG", "INR", "LAK", "QAR", "MOP", "BOB", "SDG", "TMT", "BRL"]
"""
@spec available_currencies() :: [String.t]
def available_currencies, do: OpenExchangeRates.Cache.currencies
@doc"""
Returns the age of the cache in seconds
## example
OpenExchangeRates.cache_age
25341
"""
@spec cache_age() :: Integer.t
def cache_age, do: OpenExchangeRates.Cache.cache_age
@doc"""
Will convert a price from once currency to another
## example
iex> OpenExchangeRates.convert(100.00, :EUR, :GBP)
{:ok, 84.81186252771619}
"""
@spec convert(Integer.t, (String.t | Atom.t), (String.t | Atom.t)) :: {:ok, Float.t} | {:error, String.t}
def convert(value, from, to) when is_integer(value), do: convert((value/1.0), from, to)
@spec convert(Float.t, (String.t | Atom.t), (String.t | Atom.t)) :: {:ok, Float.t} | {:error, String.t}
def convert(value, from, to) when is_float(value) do
with \
{:ok, rate_from} <- OpenExchangeRates.Cache.rate_for_currency(from),
{:ok, rate_to} <- OpenExchangeRates.Cache.rate_for_currency(to) \
do
rate_usd = value / rate_from
converted = rate_usd * rate_to
{:ok, converted}
else
error -> error
end
end
@doc """
bang method of convert/3
Will either return the result or raise when there was an error
## examples
iex> OpenExchangeRates.convert!(100.00, :EUR, :GBP)
84.81186252771619
iex> OpenExchangeRates.convert!(100, :EUR, :GBP)
84.81186252771619
"""
@spec convert!(Integer.t, (String.t | Atom.t), (String.t | Atom.t)) :: {:ok, Float.t} | {:error, String.t}
def convert!(value, from, to) when is_integer(value), do: convert!((value/1.0), from, to)
@spec convert!(Float.t, (String.t | Atom.t), (String.t | Atom.t)) :: {:ok, Float.t} | {:error, String.t}
def convert!(value, from, to) when is_float(value) do
case convert(value, from, to) do
{:ok, result} -> result
{:error, message} -> raise(message)
end
end
@doc"""
Will convert cents from once currency to another
## example
iex> OpenExchangeRates.convert_cents(100, :GBP, :AUD)
{:ok, 172}
"""
@spec convert_cents(Integer.t, (String.t | Atom.t), (String.t | Atom.t)) :: {:ok, Integer.t} | {:error, String.t}
def convert_cents(value, from, to) when is_integer(value) do
case convert(value/100, from, to) do
{:ok, result} -> {:ok, Kernel.round(result * 100)}
error -> error
end
end
@doc """
bang method of convert_cents/3
Will either return the result or raise when there was an error
## example
iex> OpenExchangeRates.convert_cents!(100, :GBP, :AUD)
172
"""
@spec convert_cents!(Integer.t, (String.t | Atom.t), (String.t | Atom.t)) :: {:ok, Integer.t} | {:error, String.t}
def convert_cents!(value, from, to) when is_integer(value) do
case convert_cents(value, from, to) do
{:ok, result} -> result
{:error, message} -> raise(message)
end
end
@doc"""
Converts cents and returns a properly formatted string for the given currency.
# Examples
iex> OpenExchangeRates.convert_cents_and_format(1234567, :EUR, :CAD)
{:ok, "$18,026.07"}
iex> OpenExchangeRates.convert_cents_and_format(1234567, :EUR, :USD)
{:ok, "$13,687"}
iex> OpenExchangeRates.convert_cents_and_format(1234567, :USD, :EUR)
{:ok, "β¬11.135,79"}
iex> OpenExchangeRates.convert_cents_and_format(1234567, :EUR, :NOK)
{:ok, "116.495,78kr"}
"""
@spec convert_cents_and_format(Integer.t, (Atom.t | String.t), (Atom.t | String.t)) :: String.t
def convert_cents_and_format(value, from, to) when is_integer(value) do
case convert_cents(value, from, to) do
{:ok, result} -> {:ok, CurrencyFormatter.format(result, to)}
error -> error
end
end
@doc """
Bang version of convert_cents_and_format/3
Will either return the result or raise when there was an error
#example
iex> OpenExchangeRates.convert_cents_and_format!(1234567, :EUR, :USD)
"$13,687"
"""
@spec convert_cents_and_format!(Integer.t, (Atom.t | String.t), (Atom.t | String.t)) :: String.t
def convert_cents_and_format!(value, from, to) when is_integer(value) do
case convert_cents_and_format(value, from, to) do
{:ok, result} -> result
{:error, message} -> raise(message)
end
end
@doc"""
Converts a price and returns a properly formatted string for the given currency.
# Examples
iex> OpenExchangeRates.convert_and_format(1234, :EUR, :AUD)
{:ok, "$1,795.10"}
"""
@spec convert_and_format((Integer.t | Float.t), (Atom.t | String.t), (Atom.t | String.t)) :: String.t
def convert_and_format(value, from, to), do: convert_cents_and_format((Kernel.round(value * 100)), from, to)
@doc """
Bang version of convert_and_format/3
Will either return the result or raise when there was an error
#example
iex> OpenExchangeRates.convert_and_format!(1234567, :EUR, :USD)
"$1,368,699.56"
"""
@spec convert_and_format!((Integer.t | Float.t), (Atom.t | String.t), (Atom.t | String.t)) :: String.t
def convert_and_format!(value, from, to) when is_integer(value) do
case convert_and_format(value, from, to) do
{:ok, result} -> result
{:error, message} -> raise(message)
end
end
@doc """
Get the conversion rate for a between two currencies"
## Example
iex> OpenExchangeRates.conversion_rate(:EUR, :GBP)
{:ok, 0.8481186252771619}
"""
@spec conversion_rate((String.t| Atom.t), (String.t| Atom.t)) :: {:ok, Float.t} | {:error, String.t}
def conversion_rate(from, to) when is_binary(from) and is_binary(to), do: conversion_rate(String.to_atom(from), String.to_atom(to))
def conversion_rate(from, to), do: convert(1.0, from, to)
defp check_configuration do
cond do
Application.get_env(:open_exchange_rates, :auto_update) == false -> :disable_updater
Application.get_env(:open_exchange_rates, :app_id) == nil -> config_error_message(); :missing_key
true -> :ok
end
end
defp config_error_message do
Logger.warn ~s[
OpenExchangeRates :
No App ID provided.
Please check if your config.exs contains the following :
config :open_exchange_rates,
app_id: "MY_OPENEXCHANGE_RATES_ORG_API_KEY",
cache_time_in_minutes: 1440,
cache_file: File.cwd! <> "/priv/exchange_rate_cache.json",
auto_update: true
If you need an api key please sign up at https://openexchangerates.org/signup
This module will continue to function but will use (outdated) cached exchange rates data...
]
end
end
|
lib/open_exchange_rates.ex
| 0.88842
| 0.514583
|
open_exchange_rates.ex
|
starcoder
|
defmodule IpAccessControl do
@behaviour Plug
@moduledoc """
This Plug restricts requests so that they must come from the range of IP
addresses specified in the pipeline config. A request's IP address is deemed
to be present as `%Plug.Conn{remote_ip: _}`.
If the request IP is not allowed, the specified response code and body will
be added to the Plug.Conn and the chain will be halted. Otherwise, the plug
chain will continue.
Include this module in your plug chain with its configuration.
## Configuration
There are two main configuration options:
- the allow list of IP addresses or CIDR ranges, which may be configured as
a static list or as a function returning the list of IP addresses or CIDR
ranges; and
- a Plug (either module or function) to call when the remote IP address is not
allowed.
Note that each item in the allow list must be tested in turn, so a smaller
list will outperform a larger list. Future versions of this Plug may include
a way of caching results.
### Allow List Configuration
The list of permitted IP addresses or CIDR ranges may be specified using
_either_ the `module` option described below _or_ the `allow` parameter.
The `allow` parameter must be one of the following:
- a list of IP addresses or CIDR ranges, or
- a 0-arity function that returns a list of IP addresses or CIDR ranges, or
- a `{module, function}` tuple to a 0-arity function that returns a list of IP
addresses or CIDR ranges.
Formats supported include:
- IPv4 string format;
- IPv6 string format;
- CIDRv4 string format; or
- CIDRv6 string format.
Examples:
# Include after a plug which puts the request IP to the remote_ip
# attribute on the Plug.Conn.
plug IPAccessControl,
allow: ["1.1.1.0/31", "1.1.0.0/24", "127.0.0.0/8"]
plug IPAccessControl,
allow: fn -> ["1.1.1.0/31", "1.1.0.0/24", "127.0.0.0/8"] end
### Blocked Action Configuration
The action to take when the remote IP address is not allowed may be specified
using the `module` option described below or the `on_blocked` option. If not
specified, a default `on_blocked` implementation will be provided that uses
`response_code_on_blocked` and `response_body_on_blocked`.
When the remote IP address is blocked, the Plug pipeline is halted.
- `on_blocked`: A Plug that will be called when the IP address is not allowed.
It will be passed the options provided to the the IPAccessControl plug.
- `response_code_on_blocked`: The HTTP status code assigned to the response
when the requestβs IP address is not allowed. Defaults to `401` if not
specified.
- `response_body_on_blocked`: The body assigned to the response when the
request's IP address is not allowed. Defaults to `"Not Authenticated"` if
not specified.
Example:
plug IPAccessControl,
allow: ["1.1.1.0/31", "1.1.0.0/24", "127.0.0.0/8"],
on_blocked: fn conn, opts ->
Conn.send_resp(
conn,
options[:response_code_on_blocked],
String.reverse(options[:response_body_on_blocked])
)
end
### Module Configuration
A single configuration option can be provided as `module` that refers to
a module that implements one or both of the functions `ip_access_allow_list/0`
(this is a function that will be used for `allow`) and
`ip_access_on_blocked/2` (this is a Plug function used for `on_blocked`).
If provided, the configurations available through `module` will take priority
over functions or values specified in `allow` or `on_blocked`.
The IpAccessControl can be configured with any of the following options.
Example:
plug IPAccessControl, module: EmployeeAccess
## Installation
Add `ip_access_control` to your dependencies. If your application is
running behind a proxy, you will probably need to also include `remote_ip`
as a dependency.
def deps do
{:ip_access_control, "~> 1.0"},
{:remote_ip, "~> 1.0"} # Required if behind a proxy
end
"""
alias Plug.Conn
@typep ip_block_list :: BitwiseIp.Blocks.t()
@doc "Initialize the plug with options."
@spec init(IpAccessControl.Options.input_config()) :: IpAccessControl.Options.config()
def init(options) do
IpAccessControl.Options.pack(options)
end
@spec call(Conn.t(), IpAccessControl.Options.config()) :: Conn.t()
def call(conn, options) do
options = IpAccessControl.Options.unpack(options)
if allowed?(conn, options[:allow]) do
conn
else
options[:on_blocked]
|> apply([conn, options])
|> Conn.halt()
end
end
@spec ip_access_on_blocked(Conn.t(), IpAccessControl.Options.config()) :: Conn.t()
def ip_access_on_blocked(conn, options) do
Conn.send_resp(conn, options[:response_code_on_blocked], options[:response_body_on_blocked])
end
@doc """
Returns `true` if the remote IP is in the given allow list. The remote IP
address can be provided either as a Plug.Conn.t(), an IP address tuple, or
an IP address string.
If the remote IP is provided as a Plug.Conn.t(), the remote IP will be
pulled from the Plug.Conn.t()'s `remote_ip`. If the remote IP is provided
as a string, this function will return `false` if the IP address cannot be
parsed.
If neither the remote_ip nor allow list are provided, always returns
`false`.
"""
@spec allowed?(
Conn.t() | binary() | :inet.ip_address() | nil | BitwiseIp.t(),
[binary(), ...] | (() -> [binary(), ...]) | ip_block_list() | nil
) ::
boolean
def allowed?(_, []) do
false
end
def allowed?(_, nil) do
false
end
def allowed?(nil, _) do
false
end
def allowed?("", _) do
false
end
def allowed?(remote_ip, allow_fn) when is_function(allow_fn, 0) do
allowed?(remote_ip, allow_fn.())
end
def allowed?(%Conn{remote_ip: remote_ip}, allow_list) do
allowed?(remote_ip, allow_list)
end
def allowed?(remote_ip, allow_list) when is_binary(remote_ip) do
case BitwiseIp.parse(remote_ip) do
{:ok, remote_ip} -> allowed?(remote_ip, allow_list)
_ -> false
end
end
def allowed?(remote_ip, allow_list) when is_tuple(remote_ip) do
allowed?(BitwiseIp.encode(remote_ip), allow_list)
end
def allowed?(%BitwiseIp{} = remote_ip, allow_list) do
BitwiseIp.Blocks.member?(parse_allow_list(allow_list), remote_ip)
end
def allowed?(_, _) do
false
end
@doc false
def parse_allow_list(list) do
Enum.map(list, fn item ->
case item do
%BitwiseIp.Block{} -> item
_ -> BitwiseIp.Block.parse!(item)
end
end)
end
end
|
lib/ip_access_control.ex
| 0.890648
| 0.494263
|
ip_access_control.ex
|
starcoder
|
defmodule Pbkdf2 do
@moduledoc """
Elixir wrapper for the Pbkdf2 password hashing function.
For a lower-level API, see Pbkdf2.Base.
## Configuration
The following parameter can be set in the config file:
* rounds - computational cost
* the number of rounds
* 160_000 is the default
If you are hashing passwords in your tests, it can be useful to add
the following to the `config/test.exs` file:
config :pbkdf2_elixir,
rounds: 1
NB. do not use this value in production.
## Pbkdf2
Pbkdf2 is a password-based key derivation function
that uses a password, a variable-length salt and an iteration
count and applies a pseudorandom function to these to
produce a key.
The original implementation used SHA-1 as the pseudorandom function,
but this version uses HMAC-SHA-512, the default, or HMAC-SHA-256.
## Warning
It is recommended that you set a maximum length for the password
when using Pbkdf2. This maximum length should not prevent valid users from setting
long passwords. It is instead needed to combat denial-of-service attacks.
As an example, Django sets the maximum length to 4096 bytes.
For more information, see [this link](https://www.djangoproject.com/weblog/2013/sep/15/security/).
"""
use Comeonin
alias Pbkdf2.{Base, Tools}
@doc """
Generates a random salt.
This function takes one optional argument - a keyword list (see below
for options) or an integer with the salt length (in bytes).
## Options
The following options are available:
* `:salt_len` - the length of the random salt
* the default is 16 bytes
* for more information, see the 'Salt length recommendations' section below
* `:format` - the length of the random salt
* the default is `:modular` (modular crypt format)
* the other available options are `:django` and `:hex`
## Examples
Here is an example of generating a salt with the default salt length and format:
Pbkdf2.gen_salt()
To generate a different length salt:
Pbkdf2.gen_salt(salt_len: 32)
And to generate a salt in django output format:
Pbkdf2.gen_salt(format: :django)
## Salt length recommendations
In most cases, 16 bytes is a suitable length for the salt.
It is not recommended to use a salt that is shorter than this
(see below for details and references).
According to the [Pbkdf2 standard](https://tools.ietf.org/html/rfc8018),
the salt should be at least 8 bytes long, but according to [NIST
recommendations](https://nvlpubs.nist.gov/nistpubs/Legacy/SP/nistspecialpublication800-132.pdf),
the minimum salt length should be 16 bytes.
"""
@spec gen_salt(keyword | integer) :: binary
def gen_salt(opts \\ [])
def gen_salt(salt_len) when is_integer(salt_len) do
gen_salt(salt_len: salt_len)
end
def gen_salt(opts) do
salt_len = Keyword.get(opts, :salt_len, 16)
Tools.check_salt_length(salt_len)
case opts[:format] do
:django -> Tools.get_random_string(salt_len)
_ -> :crypto.strong_rand_bytes(salt_len)
end
end
@doc """
Hashes a password with a randomly generated salt.
## Options
In addition to the options for `gen_salt/1` (`:salt_len` and `:format`),
this function also takes options that are then passed on to the
`hash_password` function in the `Pbkdf2.Base` module.
See the documentation for `Pbkdf2.Base.hash_password/3` for further details.
## Examples
The following examples show how to hash a password with a randomly-generated
salt and then verify a password:
iex> hash = Pbkdf2.hash_pwd_salt("password")
...> Pbkdf2.verify_pass("password", hash)
true
iex> hash = Pbkdf2.hash_pwd_salt("password")
...> Pbkdf2.verify_pass("incorrect", hash)
false
The next examples show how to use some of the various available options:
iex> hash = Pbkdf2.hash_pwd_salt("password", rounds: 100_000)
...> Pbkdf2.verify_pass("password", hash)
true
iex> hash = Pbkdf2.hash_pwd_salt("password", digest: :sha256)
...> Pbkdf2.verify_pass("password", hash)
true
iex> hash = Pbkdf2.hash_pwd_salt("password", digest: :sha256, format: :django)
...> Pbkdf2.verify_pass("password", hash)
true
"""
@impl true
def hash_pwd_salt(password, opts \\ []) do
Base.hash_password(password, gen_salt(opts), opts)
end
@doc """
Verifies a password by hashing the password and comparing the hashed value
with a stored hash.
See the documentation for `hash_pwd_salt/2` for examples of using this function.
"""
@impl true
def verify_pass(password, stored_hash) do
[alg, rounds, salt, hash] = String.split(stored_hash, "$", trim: true)
digest = if alg =~ "sha512", do: :sha512, else: :sha256
Base.verify_pass(password, hash, salt, digest, rounds, output(stored_hash))
end
defp output("$pbkdf2" <> _), do: :modular
defp output("pbkdf2" <> _), do: :django
end
|
lib/pbkdf2.ex
| 0.898755
| 0.599133
|
pbkdf2.ex
|
starcoder
|
defmodule Godfist.Summoner do
@moduledoc """
Module to interact with the Summoner endpoint.
"""
alias Godfist.LeagueRates
@endpoint "/lol/summoner/v3/summoners"
@doc """
Get a summoner's data by account id.
## Example
```elixir
iex> Godfist.Summoner.by_id(:lan, id)
```
"""
@spec by_id(atom, integer) :: {:ok, map} | {:error, String.t()}
def by_id(region, id) do
rest = @endpoint <> "/by-account/#{id}"
LeagueRates.handle_rate(region, rest, :other)
end
@doc """
Get a summoner's data by account name.
## Example
```elixir
iex> Godfist.Summoner.by_name(:oce, name)
```
"""
@spec by_name(atom, String.t()) :: {:ok, map} | {:error, String.t()}
def by_name(region, name) do
rest = @endpoint <> "/by-name/#{name}"
LeagueRates.handle_rate(region, rest, :other)
end
@doc """
Get a summoner's data by summoner id.
Summoner id and Account id are not the same. They're given as `id` and
`accountId` respectively.
## Example
```elixir
iex> Godfist.Summoner.by_summid(:jp, summonerid)
```
"""
@spec by_summid(atom, integer) :: {:ok, map} | {:error, String.t()}
def by_summid(region, id) do
# This is basically the same as above
# Just that it uses summoner id instead of account id
rest = @endpoint <> "/#{id}"
LeagueRates.handle_rate(region, rest, :other)
end
@doc """
Get the account id of a player directly by name.
## Example
```elixir
iex> Godfist.Summoner.get_id(:jp, name)
```
"""
@spec get_id(atom, String.t()) :: {:ok, integer} | {:error, String.t()}
def get_id(region, name) do
# Get the id of the player directly by name.
rest = @endpoint <> "/by-name/#{name}"
# I can probably do this in a more elegant way?
case LeagueRates.handle_rate(region, rest, :other) do
{:ok, "Not found"} ->
{:error, "Summoner not found"}
{:ok, summ} ->
{:ok, summ["accountId"]}
{:error, reason} ->
{:error, reason}
end
end
end
|
lib/godfist/requests/summoner.ex
| 0.827131
| 0.733667
|
summoner.ex
|
starcoder
|
defmodule Streams do
@moduledoc """
Various implementations of the Caffeine Stream library.
The take/1 function from Caffeine is the best way to retrive elements.
"""
alias Caffeine.Stream
alias Caffeine.Element
@doc """
Returns a construct that repeats given number an infinite amount of times.
## Example
iex> Streams.repeat(3.14) |> Caffeine.Stream.take(5)
[3.14, 3.14, 3.14, 3.14, 3.14]
"""
@spec repeat(number) :: Element.t()
def repeat(n \\ 3.14) when is_number(n) do
rest = fn -> repeat(n) end
Stream.construct(n, rest)
end
@doc """
Returns a construct that streams consecutive natural numbers.
## Example
iex> Streams.natural() |> Caffeine.Stream.take(5)
[0, 1, 2, 3, 4]
"""
@spec natural() :: Element.t()
def natural do
integers_from(0)
end
@doc """
Returns a construct that streams consecutive integers
starting from the given number.
## Example
iex> Streams.integers_from(5) |> Caffeine.Stream.take(5)
[5, 6, 7, 8, 9]
"""
@spec integers_from(integer) :: Element.t()
def integers_from(n) when is_integer(n) do
rest = fn -> integers_from(n + 1) end
Stream.construct(n, rest)
end
@doc """
Returns a construct that streams numbers by optional `step`
from `start` to `stop`. The stream ends when `stop` is met.
## Examples
iex> Streams.range(0, 5, 1.8) |> Caffeine.Stream.take(5)
[0, 1.8, 3.6, 5]
iex> Streams.range(2, -4) |> Caffeine.Stream.take(5)
[2, 1, 0, -1, -2]
"""
@spec range(number, number, number) :: Element.t() | []
def range(start, stop, step \\ 1)
def range(start, stop, step) when is_number(start) and is_number(stop) and is_number(step) do
case start < stop do
true -> positive_range(start, stop, step)
false -> negative_range(start, stop, step)
end
end
defp positive_range(start, stop, _step) when start >= stop do
rest = fn -> [] end
Stream.construct(stop, rest)
end
defp positive_range(start, stop, step) do
rest = fn -> positive_range(start + step, stop, step) end
Stream.construct(start, rest)
end
defp negative_range(start, stop, _step) when start <= stop do
rest = fn -> [] end
Stream.construct(stop, rest)
end
defp negative_range(start, stop, step) do
rest = fn -> negative_range(start - step, stop, step) end
Stream.construct(start, rest)
end
@doc """
Returns a construct that streams elements from the given list until it gets
depleated.
## Examples
iex> Streams.list_stream(Enum.to_list(0..3)) |> Caffeine.Stream.take(5)
[0, 1, 2, 3]
iex> Streams.list_stream(["I'm", "inside", "of", "a", "list", "!"])
...> |> Caffeine.Stream.take(5)
["I'm", "inside", "of", "a", "list"]
"""
@spec list_stream(list) :: Element.t() | []
def list_stream([]), do: []
def list_stream([h | t]) do
rest = fn -> list_stream(t) end
Stream.construct(h, rest)
end
@doc """
Returns a construct that streams lines from the given file `path` until :eof.
"""
@spec stream_lines(String.t()) :: Element.t() | []
def stream_lines(path) do
{:ok, pid} = File.open(path, [:read])
stream_lines(pid, IO.read(pid, :line))
end
defp stream_lines(pid, :eof) do
File.close(pid)
[]
end
defp stream_lines(pid, line) do
rest = fn -> stream_lines(pid, IO.read(pid, :line)) end
Stream.construct(line, rest)
end
end
|
learning/meetup_2018_03/streams/lib/streams.ex
| 0.928684
| 0.583678
|
streams.ex
|
starcoder
|
defmodule Minex.DSL do
alias Minex.Config
@doc """
Set a (global) variable by keyword list. Returns the previous values or nil if unset
```
set(a: value_a, b: value_b)
```
"""
@spec set(keyword()) :: keyword()
def set(keyword_list) when is_list(keyword_list) do
Config.set(keyword_list)
end
@doc """
Set a (global) variable. Returns the previous value or nil if unset
```
set(:key, value)
```
"""
@spec set(atom(), any() | (() -> any())) :: any()
def set(key, value_or_fun) do
Config.set(key, value_or_fun)
end
@doc """
Read a (global) variable.
```
val = get(:key)
val = get(:key, "default")
```
"""
@spec get(atom(), any()) :: any()
def get(key, default \\ nil) do
case Config.get(key) do
fun when is_function(fun) -> fun.()
nil -> default
other -> other
end
end
@doc """
Define a public task. Public tasks can be called from the command line (or by `Minex.run(["task_name"])`)
```
public_task(:task_name, fn ->
command("ls")
end)
```
"""
@spec public_task(atom(), keyword(), fun()) :: :ok
def public_task(name, options \\ [], fun) do
task(name, options ++ [public: true], fun)
end
@doc """
Tasks defined with `generate_script_task` can be exported to a bash script by the `generate_script` task.
This is useful for when you need a full terminal support, instead of it emulated through elixir. For example
to start a remote iex session.
```
generate_script_task(:task_name, fn ->
run(:remote, fn ->
command("cd \#{get(:deploy_to)} && ./bin/\#{get(:name)} remote")
end)
end)
```
```
mix minex generate_script target_path
```
"""
@spec generate_script_task(atom(), keyword(), fun()) :: :ok
def generate_script_task(name, options \\ [], fun) do
task(name, options ++ [generate_script: true], fun)
end
@doc """
Define an internal task. These tasks can only be run by calling `run(:task_name)`.
```
task(:task_name, fn ->
command("ls")
end)
```
"""
@spec task(atom(), keyword(), fun()) :: :ok
def task(name, options \\ [], fun) do
Config.put_task(name, fun, options)
end
@doc """
Run a task by name. Returns whatever is returned in the task itself.
```
run(:my_task)
run(:my_task, single_arg)
run(:my_task, [a, b, c])
```
"""
def run(name) do
fun = Config.fetch_task!(name)
case Function.info(fun)[:arity] do
0 -> fun.()
1 -> fun.([])
end
end
def run(name, arg) when not is_list(arg) do
fun = Config.fetch_task!(name)
fun.([arg])
end
def run(name, args) do
fun = Config.fetch_task!(name)
fun.(args)
end
@doc """
Shortcut for `run(:command, [cmd, options])`
```
command("my_cmd")
command("my_cmd", echo_cmd: false)
```
"""
def command(cmd, options \\ []) do
run(:command, [cmd, options])
end
@doc """
Collect commands without executing them. Sets a mode that you can use to check
in tasks if needed.
```
task(:dry_run, fn [fun] ->
collect(fun)
|> Enum.join("\\n")
|> IO.puts()
end)
task(:example_task, fn ->
command("a")
command("b")
command("c")
end)
run(:dry_run, fn ->
run(:example_task)
end)
```
"""
def collect(fun, mode \\ :collect) do
prev_commands = get(:__commands)
prev_mode = get(:__mode)
set(:__commands, [])
set(:__mode, mode)
fun.()
commands = get(:__commands)
set(:__mode, prev_mode)
set(:__commands, prev_commands)
Enum.reverse(commands)
end
@doc """
Check if mode is set
"""
def mode?(mode) do
get(:__mode) == mode
end
@doc """
Add a single command to the list of collected commands. Stored in `:__commands`.
"""
def collect_command(command) do
set(:__commands, [command | get(:__commands)])
end
@doc """
Get a list of all defined tasks. Returns a map with the task names as key (atoms) and the
values as {fun, options}
"""
@spec tasks() :: %{optional(atom()) => {fun(), keyword()}}
def tasks() do
Config.tasks()
end
@doc """
Get a list of all defined tasks with a certain option set. Public tasks have the `:public`
option set and generate script tasks have the `:generate_script` option set.
"""
@spec tasks(atom()) :: %{optional(atom()) => {fun(), keyword()}}
def tasks(option) do
Config.tasks(option)
end
@doc """
Fetch a task by name. Raises if the task is not defined.
"""
@spec fetch_task!(atom()) :: fun()
def fetch_task!(name) do
case Agent.get(Minex, fn state -> Map.get(state.tasks, name) end) do
{fun, _} -> fun
nil -> raise("task '#{name}' is not defined")
end
end
@doc """
Helper to add default arguments from a list of arguments.
```
args = []
default_args(args, [1, 2]) # => [1, 2]
args = [:a]
default_args(args, [1, 2]) # => [:a, 2]
```
"""
@spec default_args(nil | list(), list()) :: list()
def default_args(nil, list), do: list
def default_args([], list), do: list
def default_args([arg | args], [_default | list]), do: [arg] ++ default_args(args, list)
def default_args(args, []), do: args
end
|
lib/minex/dsl.ex
| 0.774796
| 0.82386
|
dsl.ex
|
starcoder
|
defmodule Pigpiox.GPIO do
@gpio_modes_map %{
input: 0,
output: 1,
alt0: 4,
alt1: 5,
alt2: 6,
alt3: 7,
alt4: 3,
alt5: 2
}
@gpio_modes Map.keys(@gpio_modes_map)
@inverted_gpio_modes_map for {key, val} <- @gpio_modes_map, into: %{}, do: {val, key}
@moduledoc """
This module exposes pigpiod's basic GPIO functionality.
"""
@typedoc """
A mode that a GPIO pin can be in. Returned by `get_mode/1` and passed to `set_mode/2`.
"""
@type mode :: :input | :output | :alt0 | :alt1 | :alt2 | :alt3 | :alt4 | :alt5
@typedoc """
The state of a GPIO pin - 0 for low, 1 for high.
"""
@type level :: 0 | 1
@doc """
Sets a mode for a specific GPIO `pin`. `pin` must be a valid GPIO pin number for the device, with some exceptions.
See pigpio's [documentation](http://abyz.co.uk/rpi/pigpio/index.html) for more details.
`mode` can be any of `t:mode/0`.
"""
@spec set_mode(pin :: integer, mode) :: :ok | {:error, atom}
def set_mode(pin, mode) when mode in @gpio_modes do
case Pigpiox.Socket.command(:set_mode, pin, @gpio_modes_map[mode]) do
{:ok, _} -> :ok
error -> error
end
end
@doc """
Returns the current mode for a specific GPIO `pin`
"""
@spec get_mode(pin :: integer) :: {:ok, mode | :unknown} | {:error, atom}
def get_mode(pin) do
case Pigpiox.Socket.command(:get_mode, pin) do
{:ok, result} -> {:ok, @inverted_gpio_modes_map[result] || :unknown}
error -> error
end
end
@doc """
Returns the current level for a specific GPIO `pin`
"""
@spec read(pin :: integer) :: {:ok, level} | {:error, atom}
def read(pin) do
Pigpiox.Socket.command(:gpio_read, pin)
end
@doc """
Sets the current level for a specific GPIO `pin`
"""
@spec write(pin :: integer, level) :: :ok | {:error, atom}
def write(pin, level) when level in [0, 1] do
case Pigpiox.Socket.command(:gpio_write, pin, level) do
{:ok, _} -> :ok
error -> error
end
end
@doc """
Starts a watcher to monitor the level of a specific GPIO `pin`
The calling process will receive a message with the current level of the pin,
as well as a message every time the level of that pin changes.
The message will be of the format:
`{:gpio_leveL_change, gpio, level}`
"""
@spec watch(integer) :: {:ok, pid} | {:error, atom}
def watch(pin) do
Pigpiox.GPIO.WatcherSupervisor.start_watcher(pin, self())
end
@doc """
Returns the current servo pulsewidth for a specific GPIO `pin`
"""
@spec get_servo_pulsewidth(pin :: integer) :: {:ok, non_neg_integer} | {:error, atom}
def get_servo_pulsewidth(pin) do
Pigpiox.Socket.command(:get_servo_pulsewidth, pin)
end
@doc """
Sets the servo pulsewidth for a specific GPIO `pin`.
The pulsewidths supported by servos varies and should probably
be determined by experiment. A value of 1500 should always be
safe and represents the mid-point of rotation.
A pulsewidth of 0 will stop the servo.
You can DAMAGE a servo if you command it to move beyond its
limits.
"""
@spec set_servo_pulsewidth(pin :: integer, width :: non_neg_integer) :: :ok | {:error, atom}
def set_servo_pulsewidth(pin, width) when width >= 0 and width <= 2500 do
case Pigpiox.Socket.command(:set_servo_pulsewidth, pin, width) do
{:ok, _} -> :ok
error -> error
end
end
end
|
lib/pigpiox/gpio.ex
| 0.83363
| 0.538741
|
gpio.ex
|
starcoder
|
defmodule Commissar.Authorization do
@moduledoc """
Authorizers add a convenient way of laying out policies in a manner that makes
it easy to read. Defining policies in a module that uses
`Commissar.Authorizer` also adds a catch-all policy that returns `:continue`,
allowing your own policies to simply focus on `:ok` and `:error` responses.
This carries over when exporting policies from other authorizers. Anonymous
functions will need to explicitly return `:continue` if they do not `:ok`
or `:error`.
Keep in mind, if you just want to roll with anonymous functions or organize
your policies in some other manner, it's completely possible to use
`Commissar` without authorizers at all as long as the functions being being
passed in conform to `Commissar.policy` type.
"""
defmacro __using__(_) do
quote do
@behaviour unquote(__MODULE__)
@before_compile unquote(__MODULE__)
@doc """
Similar to `authorize/3` but returns a boolean response instead. This
should be used when you have no use for any potential denial reasons.
"""
@spec allow?(any(), any(), any()) :: boolean()
def allow?(subject, action, context) do
Commissar.allow?(authorize(subject, action, context))
end
@doc """
Checks to see whether a subject attempting an action is allowed to do so
on a context with a given set of policies.
"""
@spec authorize(any(), any(), any()) :: :ok | {:error | any()}
def authorize(subject, action, context) do
Commissar.authorize(subject, action, context, export_policies())
end
@doc """
Exports a single policy from this authorizer to used as a policy in
another.
"""
def export_policy(policy_name),
do: Commissar.export_policy(__MODULE__, policy_name)
@doc """
Exports _all_ policies from this authorizer for use in another.
"""
def export_policies, do: Commissar.export_policies(__MODULE__)
end
end
@doc """
Adds a catch-all so you only need to define actual allows and denies.
"""
defmacro __before_compile__(_) do
quote do
def policy(_name, _action, _subject, _context), do: :continue
end
end
@doc """
A policy definition taking a name (an atom that can be used by
`export_policy/1`, and `policies/0`), an action, a subject, and a context.
"""
@callback policy(atom(), any(), any(), any()) :: Commissar.policy_result()
@doc """
A list of policies in the form of either atoms or functions that will be
authorized in order by `authorize/3`.
"""
@callback policies() :: [atom() | [atom()] | Commissar.policy() | [Commissar.policy()]]
@optional_callbacks [policy: 4]
end
|
lib/commissar/authorization.ex
| 0.82573
| 0.618305
|
authorization.ex
|
starcoder
|
defmodule Editor.Block.Selection do
@moduledoc """
Holds current selection in a block.
Passed from client to backend and vice-versa when executing block commands.
The ids are ids of cells in which a selection starts or ends.
The offests are indices within those cells where the selection starts or ends.
That means a simple caret (a cursor somewhere in the block text) will have
the same ids and same offsets.
Similarly, a selection of a text within a single cell will have the same ids,
but different offsets.
Lastly, a selection across cells within a block will have different ids and
different offsets.
Selection across blocks is not possible. Only whole blocks can be selected and
this is handled at a different level.
"""
alias Editor.Block
defstruct [:start_id, :end_id, :start_offset, :end_offset]
@type t :: %__MODULE__{
start_id: Block.Cell.id() | nil,
end_id: Block.Cell.id() | nil,
start_offset: non_neg_integer() | nil,
end_offset: non_neg_integer() | nil
}
def normalize!(nil), do: nil
def normalize!(%{
"start_id" => start_id,
"end_id" => end_id,
"start_offset" => start_offset,
"end_offset" => end_offset
})
when is_binary(start_id) and is_binary(end_id) and is_integer(start_offset) and
is_integer(end_offset) do
%__MODULE__{
start_id: start_id,
end_id: end_id,
start_offset: start_offset,
end_offset: end_offset
}
end
def normalize!(%{"start_id" => value}) when not is_binary(value) do
raise "selection start_id must be a valid id string, got: #{value}"
end
def normalize!(%{"end_id" => value}) when not is_binary(value) do
raise "selection end_id must be a valid id string, got: #{value}"
end
def normalize!(%{"start_offset" => value}) when not is_integer(value) do
raise "selection start_offset must be a valid number, got: #{value}"
end
def normalize!(%{"end_offset" => value}) when not is_integer(value) do
raise "selection end_offset must be a valid number, got: #{value}"
end
def new_empty, do: %__MODULE__{}
def new_start_of(%Block.Cell{id: id}) do
%__MODULE__{
start_id: id,
end_id: id,
start_offset: 0,
end_offset: 0
}
end
def new_end_of(%Block.Cell{id: id, text: text}) do
offset = String.length(text)
%__MODULE__{
start_id: id,
end_id: id,
start_offset: offset,
end_offset: offset
}
end
end
|
lib/philtre/block/selection.ex
| 0.811489
| 0.635279
|
selection.ex
|
starcoder
|
defmodule Unleash.Cache do
@moduledoc """
This module is a cache backed by an ETS table. We use it to allow for multiple
threads to read the feature flag values concurrently on top of minimizing
network calls
"""
@cache_table_name :unleash_cache
def cache_table_name, do: @cache_table_name
@doc """
Will create a new ETS table named `:unleash_cache`
"""
def init(existing_features \\ [], table_name \\ @cache_table_name) do
:ets.new(table_name, [:named_table, read_concurrency: true])
upsert_features(existing_features, table_name)
end
@doc """
Will return all values currently stored in the cache
"""
def get_all_feature_names(table_name \\ @cache_table_name) do
features = :ets.tab2list(table_name)
Enum.map(features, fn {name, _feature} ->
name
end)
end
@doc """
Will return all features stored in the cache
"""
def get_features(table_name \\ @cache_table_name) do
features = :ets.tab2list(table_name)
Enum.map(features, fn {_name, feature} ->
feature
end)
end
@doc """
Will return the feature for the given name stored in the cache
"""
def get_feature(name, table_name \\ @cache_table_name)
def get_feature(name, table_name) when is_binary(name) do
case :ets.lookup(table_name, name) do
[{^name, feature}] -> feature
[] -> nil
end
end
def get_feature(name, table_name) when is_atom(name),
do: get_feature(Atom.to_string(name), table_name)
@doc """
Will upsert (create or update) the given features in the cache
This will clear the existing peristed features to prevent stale reads
"""
def upsert_features(features, table_name \\ @cache_table_name) do
:ets.delete_all_objects(table_name)
Enum.each(features, fn feature ->
upsert_feature(feature.name, feature, table_name)
end)
end
defp upsert_feature(name, value, table_name) when is_binary(name) do
:ets.insert(table_name, {name, value})
end
defp upsert_feature(name, value, table_name) when is_atom(name) do
upsert_feature(Atom.to_string(name), value, table_name)
end
end
|
lib/unleash/cache.ex
| 0.73782
| 0.440469
|
cache.ex
|
starcoder
|
defmodule ExConfig do
@moduledoc """
Module enhancer for creating a nice place to get configuration data for
your application
To use, create a new module with something like
defmodule MyApp.Config do
use ExConfig
end
Configs under `:my_app` can be had via `MyApp.Config`'s `&fetch/2`,
`&fetch!/2`, and `&get/3`.
## `use` Options
The following options can be passed as a keyword list to the `use ExConfig`
statement:
* `:app` - The app atom. If undefined, this is assumed to be the first part
of the using module's namespace, transformed with `&Macro.underscore/1`.
* `:env_prefix` - A string which should be combined with "_ENV" to form the
prefix of environment variables that are looked up. This is assumed to be
first part of the using module's namespace, transformed with
`&String.upcase/1`.
* `:valid_environments` - A list of atoms which are the environment settings
that can be set (in string form, of course) in the application environment
variable (eg. `SKYLAB_ENV`)
* `:sections` - A list of atoms which are the config sections that should
have functions of the same names dynamically added to the module. (if
:foo_service is in the list, then `&foo_service/1` and `&foo_service!/1`
will be defined.)
* `:data_sources` - A list of data source modules, in the order that they
should be evaluated to resolve any config values. By default, the order is:
* `ExConfig.EnvironmentDataSource`
* `ExConfig.EnvConfigDataSource`
* `ExConfig.ApplicationEnvironmentDataSource`
## Macros
Inside your config module, you may use the following macros.
* `section(atom)` - Shortcut functions will be defined to allow easier access
to the section by the name of the given atom. For instance, if the module
has `section(:thing)`, then:
* `&thing/1` will be defined. Call it with `:base_url` to get the same
value as `get(:thing, :base_url)`.
* `&thing!/1` will be defined. Call it with `:base_url` to get the same
value as `fetch!(:thing, :base_url)`.
## Application Environment
This library adds the concept of the application's (runtime) environment.
The `:valid_environments` list has all the possible values and the first
entry will be the default environment if none is set. In order to set one,
simply define the relevant environment variable. For instance, if your
`:env_prefix` is `"SKYLAB"` (and `:prod` is included in your
`:valid_environments` list) then setting your `SKYLAB_ENV` environment
variable to `"prod"` would set the application environment to `:prod`.
This value is used when finding the needed value in the
`ExConfig.EnvConfigDataSource` step of the cascading logic.
## Cascading Logic
The cascading logic for finding a config value with the `section`
`:some_service` and `key` `:base_url` would be as follows. If any step comes
back with a value, the rest of the steps will be skipped and the value
returned.
* Look in the `SOME_SERVICE_BASE_URL` environment variable
* Look in the application environment under `:my_app`, `:some_service`,
`:env_configs` for a keyword list. Use the application environment atom as
the key to find a keyword list which should then include the `key`.
* Look in the application environment under `:my_app`, `:some_service`,
`:base_url`
"""
alias ExConfig.OptionNormalizer
defmacro __using__(opts \\ []) do
opts =
__CALLER__
|> Module.eval_quoted(opts)
|> elem(0)
|> Keyword.put(:module, __CALLER__.module)
|> OptionNormalizer.normalize_opts!()
quote do
import ExConfig, only: [section: 1]
@doc "Get the configured data sources"
@spec data_sources :: [module]
def data_sources do
unquote(Keyword.get(opts, :data_sources))
end
@doc "Get the application environment"
@spec env :: String.t()
def env do
ExConfig.get_env(
unquote(Keyword.get(opts, :env_prefix)),
unquote(Keyword.get(opts, :valid_environments))
)
end
@doc "Get the atom for the app's config namespace"
@spec app :: atom
def app, do: unquote(Keyword.get(opts, :app))
@doc "Get the all-caps string for the environment variables' prefix"
@spec env_prefix :: String.t()
def env_prefix, do: unquote(Keyword.get(opts, :env_prefix))
@doc "Fetch a configuration key; raise if unset"
@spec fetch!(atom, atom) :: any | no_return
def fetch!(section, key),
do: ExConfig.fetch!(__MODULE__, app(), section, key)
@doc "Fetch a configuration key"
@spec fetch(atom, atom) :: {:ok, any} | :error
def fetch(section, key),
do: ExConfig.fetch(__MODULE__, app(), section, key)
@doc "Get a configuration key"
@spec get(atom, atom, any) :: any
def get(section, key, default \\ nil),
do: ExConfig.get(__MODULE__, app(), section, key, default)
Module.eval_quoted(
__ENV__,
Enum.map(
unquote(Keyword.get(opts, :sections)),
&ExConfig.section_fn_generator/1
)
)
end
end
@doc """
Create some shortcut functions for a given section
"""
defmacro section(section) do
section_fn_generator(section)
end
@doc "Get the application (runtime) environment"
@spec get_env(String.t(), [atom]) :: atom
def get_env(prefix, valid) do
case System.get_env("#{prefix}_ENV") do
nil ->
hd(valid)
val ->
if val in Enum.map(valid, &to_string/1),
do: String.to_atom(val),
else:
raise("""
Invalid #{prefix}_ENV (#{val}). Add `:#{val}` to the \
`:valid_environments` option.\
""")
end
end
@doc "Get a configuration value, raise if unset"
@spec fetch!(module, atom, atom, atom) :: any | no_return
def fetch!(mod, app, section, key) do
case fetch(mod, app, section, key) do
:error ->
raise RuntimeError,
"Couldn't get #{inspect(section)} config: #{inspect(key)}"
{:ok, val} ->
val
end
end
@doc "Get a configuration value"
@spec get(module, atom, atom, atom) :: any
def get(mod, app, section, key, default \\ nil) do
case fetch(mod, app, section, key) do
{:ok, val} -> val
:error -> default
end
end
@doc "Fetch a configuration value"
@spec fetch(module, atom, atom, atom) :: any
def fetch(mod, app, section, key) do
do_fetch(mod.data_sources(), mod, app, section, key)
end
@doc "Generates some section-specific functions"
def section_fn_generator(sec) do
quote do
@doc "get a key from the `:#{unquote(sec)}` section"
@spec unquote(sec)(atom) :: any
def unquote(sec)(key, default \\ nil),
do: get(unquote(sec), key, default)
@doc "fetch a key from the `:#{unquote(sec)}` section"
@spec unquote(:"#{sec}!")(atom) :: {:ok, any} | :error
def unquote(:"#{sec}!")(key), do: fetch!(unquote(sec), key)
end
end
defp do_fetch([source | tail], mod, app, section, key) do
with :error <- source.fetch(mod, app, section, key) do
do_fetch(tail, mod, app, section, key)
end
end
defp do_fetch([], _, _, _, _) do
:error
end
end
|
lib/ex_config.ex
| 0.911666
| 0.586345
|
ex_config.ex
|
starcoder
|
defmodule Oban.Crontab.Parser do
@moduledoc false
@doc """
Parses the given `binary` as cron.
Returns `{:ok, [token], rest, context, position, byte_offset}` or
`{:error, reason, rest, context, line, byte_offset}` where `position`
describes the location of the cron (start position) as {line, column_on_line}.
## Options
* `:line` - the initial line, defaults to 1
* `:byte_offset` - the initial byte offset, defaults to 0
* `:context` - the initial context value. It will be converted
to a map
"""
@spec cron(binary, keyword) ::
{:ok, [term], rest, context, line, byte_offset}
| {:error, reason, rest, context, line, byte_offset}
when line: {pos_integer, byte_offset},
byte_offset: pos_integer,
rest: binary,
reason: String.t(),
context: map()
def cron(binary, opts \\ []) when is_binary(binary) do
line = Keyword.get(opts, :line, 1)
offset = Keyword.get(opts, :byte_offset, 0)
context = Map.new(Keyword.get(opts, :context, []))
case(cron__0(binary, [], [], context, {line, offset}, offset)) do
{:ok, acc, rest, context, line, offset} ->
{:ok, :lists.reverse(acc), rest, context, line, offset}
{:error, _, _, _, _, _} = error ->
error
end
end
defp cron__0(rest, acc, stack, context, line, offset) do
cron__1(rest, [], [acc | stack], context, line, offset)
end
defp cron__1(rest, acc, stack, context, line, offset) do
cron__32(rest, [], [{rest, context, line, offset}, acc | stack], context, line, offset)
end
defp cron__3(<<",", rest::binary>>, acc, stack, context, comb__line, comb__offset) do
cron__4(rest, [] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp cron__3(rest, _acc, _stack, context, line, offset) do
{:error,
"expected byte in the range ?0..?9, followed by byte in the range ?0..?9, followed by string \"-\", followed by byte in the range ?0..?9, followed by byte in the range ?0..?9 or byte in the range ?0..?9, followed by byte in the range ?0..?9 or string \"*/\", followed by byte in the range ?0..?9, followed by byte in the range ?0..?9 or string \"*\" or string \",\"",
rest, context, line, offset}
end
defp cron__4(rest, acc, [_, previous_acc | stack], context, line, offset) do
cron__2(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp cron__5(_, _, [{rest, context, line, offset} | _] = stack, _, _, _) do
cron__3(rest, [], stack, context, line, offset)
end
defp cron__6(<<"*", rest::binary>>, acc, stack, context, comb__line, comb__offset) do
cron__7(rest, [wild: "*"] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp cron__6(rest, acc, stack, context, line, offset) do
cron__5(rest, acc, stack, context, line, offset)
end
defp cron__7(rest, acc, [_, previous_acc | stack], context, line, offset) do
cron__2(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp cron__8(_, _, [{rest, context, line, offset} | _] = stack, _, _, _) do
cron__6(rest, [], stack, context, line, offset)
end
defp cron__9(rest, acc, stack, context, line, offset) do
cron__10(rest, [], [acc | stack], context, line, offset)
end
defp cron__10(<<"*/", rest::binary>>, acc, stack, context, comb__line, comb__offset) do
cron__11(rest, [] ++ acc, stack, context, comb__line, comb__offset + 2)
end
defp cron__10(rest, _acc, stack, context, line, offset) do
[acc | stack] = stack
cron__8(rest, acc, stack, context, line, offset)
end
defp cron__11(rest, acc, stack, context, line, offset) do
cron__12(rest, [], [acc | stack], context, line, offset)
end
defp cron__12(<<x0::integer, rest::binary>>, acc, stack, context, comb__line, comb__offset)
when x0 >= 48 and x0 <= 57 do
cron__13(rest, [x0 - 48] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp cron__12(rest, _acc, stack, context, line, offset) do
[_, acc | stack] = stack
cron__8(rest, acc, stack, context, line, offset)
end
defp cron__13(rest, acc, stack, context, line, offset) do
cron__15(rest, acc, [1 | stack], context, line, offset)
end
defp cron__15(<<x0::integer, rest::binary>>, acc, stack, context, comb__line, comb__offset)
when x0 >= 48 and x0 <= 57 do
cron__16(rest, [x0] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp cron__15(rest, acc, stack, context, line, offset) do
cron__14(rest, acc, stack, context, line, offset)
end
defp cron__14(rest, acc, [_ | stack], context, line, offset) do
cron__17(rest, acc, stack, context, line, offset)
end
defp cron__16(rest, acc, [1 | stack], context, line, offset) do
cron__17(rest, acc, stack, context, line, offset)
end
defp cron__16(rest, acc, [count | stack], context, line, offset) do
cron__15(rest, acc, [count - 1 | stack], context, line, offset)
end
defp cron__17(rest, user_acc, [acc | stack], context, line, offset) do
cron__18(
rest,
(
[head | tail] = :lists.reverse(user_acc)
[:lists.foldl(fn x, acc -> x - 48 + acc * 10 end, head, tail)]
) ++ acc,
stack,
context,
line,
offset
)
end
defp cron__18(rest, user_acc, [acc | stack], context, line, offset) do
cron__19(
rest,
[
step:
case(:lists.reverse(user_acc)) do
[one] ->
one
many ->
raise("unwrap_and_tag/3 expected a single token, got: #{inspect(many)}")
end
] ++ acc,
stack,
context,
line,
offset
)
end
defp cron__19(rest, acc, [_, previous_acc | stack], context, line, offset) do
cron__2(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp cron__20(_, _, [{rest, context, line, offset} | _] = stack, _, _, _) do
cron__9(rest, [], stack, context, line, offset)
end
defp cron__21(rest, acc, stack, context, line, offset) do
cron__22(rest, [], [acc | stack], context, line, offset)
end
defp cron__22(rest, acc, stack, context, line, offset) do
cron__23(rest, [], [acc | stack], context, line, offset)
end
defp cron__23(<<x0::integer, rest::binary>>, acc, stack, context, comb__line, comb__offset)
when x0 >= 48 and x0 <= 57 do
cron__24(rest, [x0 - 48] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp cron__23(rest, _acc, stack, context, line, offset) do
[_, acc | stack] = stack
cron__20(rest, acc, stack, context, line, offset)
end
defp cron__24(rest, acc, stack, context, line, offset) do
cron__26(rest, acc, [1 | stack], context, line, offset)
end
defp cron__26(<<x0::integer, rest::binary>>, acc, stack, context, comb__line, comb__offset)
when x0 >= 48 and x0 <= 57 do
cron__27(rest, [x0] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp cron__26(rest, acc, stack, context, line, offset) do
cron__25(rest, acc, stack, context, line, offset)
end
defp cron__25(rest, acc, [_ | stack], context, line, offset) do
cron__28(rest, acc, stack, context, line, offset)
end
defp cron__27(rest, acc, [1 | stack], context, line, offset) do
cron__28(rest, acc, stack, context, line, offset)
end
defp cron__27(rest, acc, [count | stack], context, line, offset) do
cron__26(rest, acc, [count - 1 | stack], context, line, offset)
end
defp cron__28(rest, user_acc, [acc | stack], context, line, offset) do
cron__29(
rest,
(
[head | tail] = :lists.reverse(user_acc)
[:lists.foldl(fn x, acc -> x - 48 + acc * 10 end, head, tail)]
) ++ acc,
stack,
context,
line,
offset
)
end
defp cron__29(rest, user_acc, [acc | stack], context, line, offset) do
cron__30(
rest,
[
literal:
case(:lists.reverse(user_acc)) do
[one] ->
one
many ->
raise("unwrap_and_tag/3 expected a single token, got: #{inspect(many)}")
end
] ++ acc,
stack,
context,
line,
offset
)
end
defp cron__30(rest, acc, [_, previous_acc | stack], context, line, offset) do
cron__2(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp cron__31(_, _, [{rest, context, line, offset} | _] = stack, _, _, _) do
cron__21(rest, [], stack, context, line, offset)
end
defp cron__32(rest, acc, stack, context, line, offset) do
cron__33(rest, [], [acc | stack], context, line, offset)
end
defp cron__33(rest, acc, stack, context, line, offset) do
cron__34(rest, [], [acc | stack], context, line, offset)
end
defp cron__34(<<x0::integer, rest::binary>>, acc, stack, context, comb__line, comb__offset)
when x0 >= 48 and x0 <= 57 do
cron__35(rest, [x0 - 48] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp cron__34(rest, _acc, stack, context, line, offset) do
[_, acc | stack] = stack
cron__31(rest, acc, stack, context, line, offset)
end
defp cron__35(rest, acc, stack, context, line, offset) do
cron__37(rest, acc, [1 | stack], context, line, offset)
end
defp cron__37(<<x0::integer, rest::binary>>, acc, stack, context, comb__line, comb__offset)
when x0 >= 48 and x0 <= 57 do
cron__38(rest, [x0] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp cron__37(rest, acc, stack, context, line, offset) do
cron__36(rest, acc, stack, context, line, offset)
end
defp cron__36(rest, acc, [_ | stack], context, line, offset) do
cron__39(rest, acc, stack, context, line, offset)
end
defp cron__38(rest, acc, [1 | stack], context, line, offset) do
cron__39(rest, acc, stack, context, line, offset)
end
defp cron__38(rest, acc, [count | stack], context, line, offset) do
cron__37(rest, acc, [count - 1 | stack], context, line, offset)
end
defp cron__39(rest, user_acc, [acc | stack], context, line, offset) do
cron__40(
rest,
(
[head | tail] = :lists.reverse(user_acc)
[:lists.foldl(fn x, acc -> x - 48 + acc * 10 end, head, tail)]
) ++ acc,
stack,
context,
line,
offset
)
end
defp cron__40(<<"-", rest::binary>>, acc, stack, context, comb__line, comb__offset) do
cron__41(rest, [] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp cron__40(rest, _acc, stack, context, line, offset) do
[acc | stack] = stack
cron__31(rest, acc, stack, context, line, offset)
end
defp cron__41(rest, acc, stack, context, line, offset) do
cron__42(rest, [], [acc | stack], context, line, offset)
end
defp cron__42(<<x0::integer, rest::binary>>, acc, stack, context, comb__line, comb__offset)
when x0 >= 48 and x0 <= 57 do
cron__43(rest, [x0 - 48] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp cron__42(rest, _acc, stack, context, line, offset) do
[_, acc | stack] = stack
cron__31(rest, acc, stack, context, line, offset)
end
defp cron__43(rest, acc, stack, context, line, offset) do
cron__45(rest, acc, [1 | stack], context, line, offset)
end
defp cron__45(<<x0::integer, rest::binary>>, acc, stack, context, comb__line, comb__offset)
when x0 >= 48 and x0 <= 57 do
cron__46(rest, [x0] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp cron__45(rest, acc, stack, context, line, offset) do
cron__44(rest, acc, stack, context, line, offset)
end
defp cron__44(rest, acc, [_ | stack], context, line, offset) do
cron__47(rest, acc, stack, context, line, offset)
end
defp cron__46(rest, acc, [1 | stack], context, line, offset) do
cron__47(rest, acc, stack, context, line, offset)
end
defp cron__46(rest, acc, [count | stack], context, line, offset) do
cron__45(rest, acc, [count - 1 | stack], context, line, offset)
end
defp cron__47(rest, user_acc, [acc | stack], context, line, offset) do
cron__48(
rest,
(
[head | tail] = :lists.reverse(user_acc)
[:lists.foldl(fn x, acc -> x - 48 + acc * 10 end, head, tail)]
) ++ acc,
stack,
context,
line,
offset
)
end
defp cron__48(rest, user_acc, [acc | stack], context, line, offset) do
cron__49(rest, [range: :lists.reverse(user_acc)] ++ acc, stack, context, line, offset)
end
defp cron__49(rest, acc, [_, previous_acc | stack], context, line, offset) do
cron__2(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp cron__2(rest, acc, stack, context, line, offset) do
cron__51(rest, [], [{rest, acc, context, line, offset} | stack], context, line, offset)
end
defp cron__51(rest, acc, stack, context, line, offset) do
cron__82(rest, [], [{rest, context, line, offset}, acc | stack], context, line, offset)
end
defp cron__53(<<",", rest::binary>>, acc, stack, context, comb__line, comb__offset) do
cron__54(rest, [] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp cron__53(rest, _acc, stack, context, line, offset) do
[_, acc | stack] = stack
cron__50(rest, acc, stack, context, line, offset)
end
defp cron__54(rest, acc, [_, previous_acc | stack], context, line, offset) do
cron__52(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp cron__55(_, _, [{rest, context, line, offset} | _] = stack, _, _, _) do
cron__53(rest, [], stack, context, line, offset)
end
defp cron__56(<<"*", rest::binary>>, acc, stack, context, comb__line, comb__offset) do
cron__57(rest, [wild: "*"] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp cron__56(rest, acc, stack, context, line, offset) do
cron__55(rest, acc, stack, context, line, offset)
end
defp cron__57(rest, acc, [_, previous_acc | stack], context, line, offset) do
cron__52(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp cron__58(_, _, [{rest, context, line, offset} | _] = stack, _, _, _) do
cron__56(rest, [], stack, context, line, offset)
end
defp cron__59(rest, acc, stack, context, line, offset) do
cron__60(rest, [], [acc | stack], context, line, offset)
end
defp cron__60(<<"*/", rest::binary>>, acc, stack, context, comb__line, comb__offset) do
cron__61(rest, [] ++ acc, stack, context, comb__line, comb__offset + 2)
end
defp cron__60(rest, _acc, stack, context, line, offset) do
[acc | stack] = stack
cron__58(rest, acc, stack, context, line, offset)
end
defp cron__61(rest, acc, stack, context, line, offset) do
cron__62(rest, [], [acc | stack], context, line, offset)
end
defp cron__62(<<x0::integer, rest::binary>>, acc, stack, context, comb__line, comb__offset)
when x0 >= 48 and x0 <= 57 do
cron__63(rest, [x0 - 48] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp cron__62(rest, _acc, stack, context, line, offset) do
[_, acc | stack] = stack
cron__58(rest, acc, stack, context, line, offset)
end
defp cron__63(rest, acc, stack, context, line, offset) do
cron__65(rest, acc, [1 | stack], context, line, offset)
end
defp cron__65(<<x0::integer, rest::binary>>, acc, stack, context, comb__line, comb__offset)
when x0 >= 48 and x0 <= 57 do
cron__66(rest, [x0] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp cron__65(rest, acc, stack, context, line, offset) do
cron__64(rest, acc, stack, context, line, offset)
end
defp cron__64(rest, acc, [_ | stack], context, line, offset) do
cron__67(rest, acc, stack, context, line, offset)
end
defp cron__66(rest, acc, [1 | stack], context, line, offset) do
cron__67(rest, acc, stack, context, line, offset)
end
defp cron__66(rest, acc, [count | stack], context, line, offset) do
cron__65(rest, acc, [count - 1 | stack], context, line, offset)
end
defp cron__67(rest, user_acc, [acc | stack], context, line, offset) do
cron__68(
rest,
(
[head | tail] = :lists.reverse(user_acc)
[:lists.foldl(fn x, acc -> x - 48 + acc * 10 end, head, tail)]
) ++ acc,
stack,
context,
line,
offset
)
end
defp cron__68(rest, user_acc, [acc | stack], context, line, offset) do
cron__69(
rest,
[
step:
case(:lists.reverse(user_acc)) do
[one] ->
one
many ->
raise("unwrap_and_tag/3 expected a single token, got: #{inspect(many)}")
end
] ++ acc,
stack,
context,
line,
offset
)
end
defp cron__69(rest, acc, [_, previous_acc | stack], context, line, offset) do
cron__52(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp cron__70(_, _, [{rest, context, line, offset} | _] = stack, _, _, _) do
cron__59(rest, [], stack, context, line, offset)
end
defp cron__71(rest, acc, stack, context, line, offset) do
cron__72(rest, [], [acc | stack], context, line, offset)
end
defp cron__72(rest, acc, stack, context, line, offset) do
cron__73(rest, [], [acc | stack], context, line, offset)
end
defp cron__73(<<x0::integer, rest::binary>>, acc, stack, context, comb__line, comb__offset)
when x0 >= 48 and x0 <= 57 do
cron__74(rest, [x0 - 48] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp cron__73(rest, _acc, stack, context, line, offset) do
[_, acc | stack] = stack
cron__70(rest, acc, stack, context, line, offset)
end
defp cron__74(rest, acc, stack, context, line, offset) do
cron__76(rest, acc, [1 | stack], context, line, offset)
end
defp cron__76(<<x0::integer, rest::binary>>, acc, stack, context, comb__line, comb__offset)
when x0 >= 48 and x0 <= 57 do
cron__77(rest, [x0] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp cron__76(rest, acc, stack, context, line, offset) do
cron__75(rest, acc, stack, context, line, offset)
end
defp cron__75(rest, acc, [_ | stack], context, line, offset) do
cron__78(rest, acc, stack, context, line, offset)
end
defp cron__77(rest, acc, [1 | stack], context, line, offset) do
cron__78(rest, acc, stack, context, line, offset)
end
defp cron__77(rest, acc, [count | stack], context, line, offset) do
cron__76(rest, acc, [count - 1 | stack], context, line, offset)
end
defp cron__78(rest, user_acc, [acc | stack], context, line, offset) do
cron__79(
rest,
(
[head | tail] = :lists.reverse(user_acc)
[:lists.foldl(fn x, acc -> x - 48 + acc * 10 end, head, tail)]
) ++ acc,
stack,
context,
line,
offset
)
end
defp cron__79(rest, user_acc, [acc | stack], context, line, offset) do
cron__80(
rest,
[
literal:
case(:lists.reverse(user_acc)) do
[one] ->
one
many ->
raise("unwrap_and_tag/3 expected a single token, got: #{inspect(many)}")
end
] ++ acc,
stack,
context,
line,
offset
)
end
defp cron__80(rest, acc, [_, previous_acc | stack], context, line, offset) do
cron__52(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp cron__81(_, _, [{rest, context, line, offset} | _] = stack, _, _, _) do
cron__71(rest, [], stack, context, line, offset)
end
defp cron__82(rest, acc, stack, context, line, offset) do
cron__83(rest, [], [acc | stack], context, line, offset)
end
defp cron__83(rest, acc, stack, context, line, offset) do
cron__84(rest, [], [acc | stack], context, line, offset)
end
defp cron__84(<<x0::integer, rest::binary>>, acc, stack, context, comb__line, comb__offset)
when x0 >= 48 and x0 <= 57 do
cron__85(rest, [x0 - 48] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp cron__84(rest, _acc, stack, context, line, offset) do
[_, acc | stack] = stack
cron__81(rest, acc, stack, context, line, offset)
end
defp cron__85(rest, acc, stack, context, line, offset) do
cron__87(rest, acc, [1 | stack], context, line, offset)
end
defp cron__87(<<x0::integer, rest::binary>>, acc, stack, context, comb__line, comb__offset)
when x0 >= 48 and x0 <= 57 do
cron__88(rest, [x0] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp cron__87(rest, acc, stack, context, line, offset) do
cron__86(rest, acc, stack, context, line, offset)
end
defp cron__86(rest, acc, [_ | stack], context, line, offset) do
cron__89(rest, acc, stack, context, line, offset)
end
defp cron__88(rest, acc, [1 | stack], context, line, offset) do
cron__89(rest, acc, stack, context, line, offset)
end
defp cron__88(rest, acc, [count | stack], context, line, offset) do
cron__87(rest, acc, [count - 1 | stack], context, line, offset)
end
defp cron__89(rest, user_acc, [acc | stack], context, line, offset) do
cron__90(
rest,
(
[head | tail] = :lists.reverse(user_acc)
[:lists.foldl(fn x, acc -> x - 48 + acc * 10 end, head, tail)]
) ++ acc,
stack,
context,
line,
offset
)
end
defp cron__90(<<"-", rest::binary>>, acc, stack, context, comb__line, comb__offset) do
cron__91(rest, [] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp cron__90(rest, _acc, stack, context, line, offset) do
[acc | stack] = stack
cron__81(rest, acc, stack, context, line, offset)
end
defp cron__91(rest, acc, stack, context, line, offset) do
cron__92(rest, [], [acc | stack], context, line, offset)
end
defp cron__92(<<x0::integer, rest::binary>>, acc, stack, context, comb__line, comb__offset)
when x0 >= 48 and x0 <= 57 do
cron__93(rest, [x0 - 48] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp cron__92(rest, _acc, stack, context, line, offset) do
[_, acc | stack] = stack
cron__81(rest, acc, stack, context, line, offset)
end
defp cron__93(rest, acc, stack, context, line, offset) do
cron__95(rest, acc, [1 | stack], context, line, offset)
end
defp cron__95(<<x0::integer, rest::binary>>, acc, stack, context, comb__line, comb__offset)
when x0 >= 48 and x0 <= 57 do
cron__96(rest, [x0] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp cron__95(rest, acc, stack, context, line, offset) do
cron__94(rest, acc, stack, context, line, offset)
end
defp cron__94(rest, acc, [_ | stack], context, line, offset) do
cron__97(rest, acc, stack, context, line, offset)
end
defp cron__96(rest, acc, [1 | stack], context, line, offset) do
cron__97(rest, acc, stack, context, line, offset)
end
defp cron__96(rest, acc, [count | stack], context, line, offset) do
cron__95(rest, acc, [count - 1 | stack], context, line, offset)
end
defp cron__97(rest, user_acc, [acc | stack], context, line, offset) do
cron__98(
rest,
(
[head | tail] = :lists.reverse(user_acc)
[:lists.foldl(fn x, acc -> x - 48 + acc * 10 end, head, tail)]
) ++ acc,
stack,
context,
line,
offset
)
end
defp cron__98(rest, user_acc, [acc | stack], context, line, offset) do
cron__99(rest, [range: :lists.reverse(user_acc)] ++ acc, stack, context, line, offset)
end
defp cron__99(rest, acc, [_, previous_acc | stack], context, line, offset) do
cron__52(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp cron__50(_, _, [{rest, acc, context, line, offset} | stack], _, _, _) do
cron__100(rest, acc, stack, context, line, offset)
end
defp cron__52(
inner_rest,
inner_acc,
[{rest, acc, context, line, offset} | stack],
inner_context,
inner_line,
inner_offset
) do
_ = {rest, acc, context, line, offset}
cron__51(
inner_rest,
[],
[{inner_rest, inner_acc ++ acc, inner_context, inner_line, inner_offset} | stack],
inner_context,
inner_line,
inner_offset
)
end
defp cron__100(rest, user_acc, [acc | stack], context, line, offset) do
cron__101(rest, [minutes: :lists.reverse(user_acc)] ++ acc, stack, context, line, offset)
end
defp cron__101(<<" ", rest::binary>>, acc, stack, context, comb__line, comb__offset) do
cron__102(rest, [] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp cron__101(rest, _acc, _stack, context, line, offset) do
{:error, "expected string \" \"", rest, context, line, offset}
end
defp cron__102(rest, acc, stack, context, line, offset) do
cron__103(rest, [], [acc | stack], context, line, offset)
end
defp cron__103(rest, acc, stack, context, line, offset) do
cron__134(rest, [], [{rest, context, line, offset}, acc | stack], context, line, offset)
end
defp cron__105(<<",", rest::binary>>, acc, stack, context, comb__line, comb__offset) do
cron__106(rest, [] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp cron__105(rest, _acc, _stack, context, line, offset) do
{:error,
"expected byte in the range ?0..?9, followed by byte in the range ?0..?9, followed by string \"-\", followed by byte in the range ?0..?9, followed by byte in the range ?0..?9 or byte in the range ?0..?9, followed by byte in the range ?0..?9 or string \"*/\", followed by byte in the range ?0..?9, followed by byte in the range ?0..?9 or string \"*\" or string \",\"",
rest, context, line, offset}
end
defp cron__106(rest, acc, [_, previous_acc | stack], context, line, offset) do
cron__104(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp cron__107(_, _, [{rest, context, line, offset} | _] = stack, _, _, _) do
cron__105(rest, [], stack, context, line, offset)
end
defp cron__108(<<"*", rest::binary>>, acc, stack, context, comb__line, comb__offset) do
cron__109(rest, [wild: "*"] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp cron__108(rest, acc, stack, context, line, offset) do
cron__107(rest, acc, stack, context, line, offset)
end
defp cron__109(rest, acc, [_, previous_acc | stack], context, line, offset) do
cron__104(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp cron__110(_, _, [{rest, context, line, offset} | _] = stack, _, _, _) do
cron__108(rest, [], stack, context, line, offset)
end
defp cron__111(rest, acc, stack, context, line, offset) do
cron__112(rest, [], [acc | stack], context, line, offset)
end
defp cron__112(<<"*/", rest::binary>>, acc, stack, context, comb__line, comb__offset) do
cron__113(rest, [] ++ acc, stack, context, comb__line, comb__offset + 2)
end
defp cron__112(rest, _acc, stack, context, line, offset) do
[acc | stack] = stack
cron__110(rest, acc, stack, context, line, offset)
end
defp cron__113(rest, acc, stack, context, line, offset) do
cron__114(rest, [], [acc | stack], context, line, offset)
end
defp cron__114(<<x0::integer, rest::binary>>, acc, stack, context, comb__line, comb__offset)
when x0 >= 48 and x0 <= 57 do
cron__115(rest, [x0 - 48] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp cron__114(rest, _acc, stack, context, line, offset) do
[_, acc | stack] = stack
cron__110(rest, acc, stack, context, line, offset)
end
defp cron__115(rest, acc, stack, context, line, offset) do
cron__117(rest, acc, [1 | stack], context, line, offset)
end
defp cron__117(<<x0::integer, rest::binary>>, acc, stack, context, comb__line, comb__offset)
when x0 >= 48 and x0 <= 57 do
cron__118(rest, [x0] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp cron__117(rest, acc, stack, context, line, offset) do
cron__116(rest, acc, stack, context, line, offset)
end
defp cron__116(rest, acc, [_ | stack], context, line, offset) do
cron__119(rest, acc, stack, context, line, offset)
end
defp cron__118(rest, acc, [1 | stack], context, line, offset) do
cron__119(rest, acc, stack, context, line, offset)
end
defp cron__118(rest, acc, [count | stack], context, line, offset) do
cron__117(rest, acc, [count - 1 | stack], context, line, offset)
end
defp cron__119(rest, user_acc, [acc | stack], context, line, offset) do
cron__120(
rest,
(
[head | tail] = :lists.reverse(user_acc)
[:lists.foldl(fn x, acc -> x - 48 + acc * 10 end, head, tail)]
) ++ acc,
stack,
context,
line,
offset
)
end
defp cron__120(rest, user_acc, [acc | stack], context, line, offset) do
cron__121(
rest,
[
step:
case(:lists.reverse(user_acc)) do
[one] ->
one
many ->
raise("unwrap_and_tag/3 expected a single token, got: #{inspect(many)}")
end
] ++ acc,
stack,
context,
line,
offset
)
end
defp cron__121(rest, acc, [_, previous_acc | stack], context, line, offset) do
cron__104(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp cron__122(_, _, [{rest, context, line, offset} | _] = stack, _, _, _) do
cron__111(rest, [], stack, context, line, offset)
end
defp cron__123(rest, acc, stack, context, line, offset) do
cron__124(rest, [], [acc | stack], context, line, offset)
end
defp cron__124(rest, acc, stack, context, line, offset) do
cron__125(rest, [], [acc | stack], context, line, offset)
end
defp cron__125(<<x0::integer, rest::binary>>, acc, stack, context, comb__line, comb__offset)
when x0 >= 48 and x0 <= 57 do
cron__126(rest, [x0 - 48] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp cron__125(rest, _acc, stack, context, line, offset) do
[_, acc | stack] = stack
cron__122(rest, acc, stack, context, line, offset)
end
defp cron__126(rest, acc, stack, context, line, offset) do
cron__128(rest, acc, [1 | stack], context, line, offset)
end
defp cron__128(<<x0::integer, rest::binary>>, acc, stack, context, comb__line, comb__offset)
when x0 >= 48 and x0 <= 57 do
cron__129(rest, [x0] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp cron__128(rest, acc, stack, context, line, offset) do
cron__127(rest, acc, stack, context, line, offset)
end
defp cron__127(rest, acc, [_ | stack], context, line, offset) do
cron__130(rest, acc, stack, context, line, offset)
end
defp cron__129(rest, acc, [1 | stack], context, line, offset) do
cron__130(rest, acc, stack, context, line, offset)
end
defp cron__129(rest, acc, [count | stack], context, line, offset) do
cron__128(rest, acc, [count - 1 | stack], context, line, offset)
end
defp cron__130(rest, user_acc, [acc | stack], context, line, offset) do
cron__131(
rest,
(
[head | tail] = :lists.reverse(user_acc)
[:lists.foldl(fn x, acc -> x - 48 + acc * 10 end, head, tail)]
) ++ acc,
stack,
context,
line,
offset
)
end
defp cron__131(rest, user_acc, [acc | stack], context, line, offset) do
cron__132(
rest,
[
literal:
case(:lists.reverse(user_acc)) do
[one] ->
one
many ->
raise("unwrap_and_tag/3 expected a single token, got: #{inspect(many)}")
end
] ++ acc,
stack,
context,
line,
offset
)
end
defp cron__132(rest, acc, [_, previous_acc | stack], context, line, offset) do
cron__104(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp cron__133(_, _, [{rest, context, line, offset} | _] = stack, _, _, _) do
cron__123(rest, [], stack, context, line, offset)
end
defp cron__134(rest, acc, stack, context, line, offset) do
cron__135(rest, [], [acc | stack], context, line, offset)
end
defp cron__135(rest, acc, stack, context, line, offset) do
cron__136(rest, [], [acc | stack], context, line, offset)
end
defp cron__136(<<x0::integer, rest::binary>>, acc, stack, context, comb__line, comb__offset)
when x0 >= 48 and x0 <= 57 do
cron__137(rest, [x0 - 48] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp cron__136(rest, _acc, stack, context, line, offset) do
[_, acc | stack] = stack
cron__133(rest, acc, stack, context, line, offset)
end
defp cron__137(rest, acc, stack, context, line, offset) do
cron__139(rest, acc, [1 | stack], context, line, offset)
end
defp cron__139(<<x0::integer, rest::binary>>, acc, stack, context, comb__line, comb__offset)
when x0 >= 48 and x0 <= 57 do
cron__140(rest, [x0] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp cron__139(rest, acc, stack, context, line, offset) do
cron__138(rest, acc, stack, context, line, offset)
end
defp cron__138(rest, acc, [_ | stack], context, line, offset) do
cron__141(rest, acc, stack, context, line, offset)
end
defp cron__140(rest, acc, [1 | stack], context, line, offset) do
cron__141(rest, acc, stack, context, line, offset)
end
defp cron__140(rest, acc, [count | stack], context, line, offset) do
cron__139(rest, acc, [count - 1 | stack], context, line, offset)
end
defp cron__141(rest, user_acc, [acc | stack], context, line, offset) do
cron__142(
rest,
(
[head | tail] = :lists.reverse(user_acc)
[:lists.foldl(fn x, acc -> x - 48 + acc * 10 end, head, tail)]
) ++ acc,
stack,
context,
line,
offset
)
end
defp cron__142(<<"-", rest::binary>>, acc, stack, context, comb__line, comb__offset) do
cron__143(rest, [] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp cron__142(rest, _acc, stack, context, line, offset) do
[acc | stack] = stack
cron__133(rest, acc, stack, context, line, offset)
end
defp cron__143(rest, acc, stack, context, line, offset) do
cron__144(rest, [], [acc | stack], context, line, offset)
end
defp cron__144(<<x0::integer, rest::binary>>, acc, stack, context, comb__line, comb__offset)
when x0 >= 48 and x0 <= 57 do
cron__145(rest, [x0 - 48] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp cron__144(rest, _acc, stack, context, line, offset) do
[_, acc | stack] = stack
cron__133(rest, acc, stack, context, line, offset)
end
defp cron__145(rest, acc, stack, context, line, offset) do
cron__147(rest, acc, [1 | stack], context, line, offset)
end
defp cron__147(<<x0::integer, rest::binary>>, acc, stack, context, comb__line, comb__offset)
when x0 >= 48 and x0 <= 57 do
cron__148(rest, [x0] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp cron__147(rest, acc, stack, context, line, offset) do
cron__146(rest, acc, stack, context, line, offset)
end
defp cron__146(rest, acc, [_ | stack], context, line, offset) do
cron__149(rest, acc, stack, context, line, offset)
end
defp cron__148(rest, acc, [1 | stack], context, line, offset) do
cron__149(rest, acc, stack, context, line, offset)
end
defp cron__148(rest, acc, [count | stack], context, line, offset) do
cron__147(rest, acc, [count - 1 | stack], context, line, offset)
end
defp cron__149(rest, user_acc, [acc | stack], context, line, offset) do
cron__150(
rest,
(
[head | tail] = :lists.reverse(user_acc)
[:lists.foldl(fn x, acc -> x - 48 + acc * 10 end, head, tail)]
) ++ acc,
stack,
context,
line,
offset
)
end
defp cron__150(rest, user_acc, [acc | stack], context, line, offset) do
cron__151(rest, [range: :lists.reverse(user_acc)] ++ acc, stack, context, line, offset)
end
defp cron__151(rest, acc, [_, previous_acc | stack], context, line, offset) do
cron__104(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp cron__104(rest, acc, stack, context, line, offset) do
cron__153(rest, [], [{rest, acc, context, line, offset} | stack], context, line, offset)
end
defp cron__153(rest, acc, stack, context, line, offset) do
cron__184(rest, [], [{rest, context, line, offset}, acc | stack], context, line, offset)
end
defp cron__155(<<",", rest::binary>>, acc, stack, context, comb__line, comb__offset) do
cron__156(rest, [] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp cron__155(rest, _acc, stack, context, line, offset) do
[_, acc | stack] = stack
cron__152(rest, acc, stack, context, line, offset)
end
defp cron__156(rest, acc, [_, previous_acc | stack], context, line, offset) do
cron__154(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp cron__157(_, _, [{rest, context, line, offset} | _] = stack, _, _, _) do
cron__155(rest, [], stack, context, line, offset)
end
defp cron__158(<<"*", rest::binary>>, acc, stack, context, comb__line, comb__offset) do
cron__159(rest, [wild: "*"] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp cron__158(rest, acc, stack, context, line, offset) do
cron__157(rest, acc, stack, context, line, offset)
end
defp cron__159(rest, acc, [_, previous_acc | stack], context, line, offset) do
cron__154(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp cron__160(_, _, [{rest, context, line, offset} | _] = stack, _, _, _) do
cron__158(rest, [], stack, context, line, offset)
end
defp cron__161(rest, acc, stack, context, line, offset) do
cron__162(rest, [], [acc | stack], context, line, offset)
end
defp cron__162(<<"*/", rest::binary>>, acc, stack, context, comb__line, comb__offset) do
cron__163(rest, [] ++ acc, stack, context, comb__line, comb__offset + 2)
end
defp cron__162(rest, _acc, stack, context, line, offset) do
[acc | stack] = stack
cron__160(rest, acc, stack, context, line, offset)
end
defp cron__163(rest, acc, stack, context, line, offset) do
cron__164(rest, [], [acc | stack], context, line, offset)
end
defp cron__164(<<x0::integer, rest::binary>>, acc, stack, context, comb__line, comb__offset)
when x0 >= 48 and x0 <= 57 do
cron__165(rest, [x0 - 48] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp cron__164(rest, _acc, stack, context, line, offset) do
[_, acc | stack] = stack
cron__160(rest, acc, stack, context, line, offset)
end
defp cron__165(rest, acc, stack, context, line, offset) do
cron__167(rest, acc, [1 | stack], context, line, offset)
end
defp cron__167(<<x0::integer, rest::binary>>, acc, stack, context, comb__line, comb__offset)
when x0 >= 48 and x0 <= 57 do
cron__168(rest, [x0] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp cron__167(rest, acc, stack, context, line, offset) do
cron__166(rest, acc, stack, context, line, offset)
end
defp cron__166(rest, acc, [_ | stack], context, line, offset) do
cron__169(rest, acc, stack, context, line, offset)
end
defp cron__168(rest, acc, [1 | stack], context, line, offset) do
cron__169(rest, acc, stack, context, line, offset)
end
defp cron__168(rest, acc, [count | stack], context, line, offset) do
cron__167(rest, acc, [count - 1 | stack], context, line, offset)
end
defp cron__169(rest, user_acc, [acc | stack], context, line, offset) do
cron__170(
rest,
(
[head | tail] = :lists.reverse(user_acc)
[:lists.foldl(fn x, acc -> x - 48 + acc * 10 end, head, tail)]
) ++ acc,
stack,
context,
line,
offset
)
end
defp cron__170(rest, user_acc, [acc | stack], context, line, offset) do
cron__171(
rest,
[
step:
case(:lists.reverse(user_acc)) do
[one] ->
one
many ->
raise("unwrap_and_tag/3 expected a single token, got: #{inspect(many)}")
end
] ++ acc,
stack,
context,
line,
offset
)
end
defp cron__171(rest, acc, [_, previous_acc | stack], context, line, offset) do
cron__154(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp cron__172(_, _, [{rest, context, line, offset} | _] = stack, _, _, _) do
cron__161(rest, [], stack, context, line, offset)
end
defp cron__173(rest, acc, stack, context, line, offset) do
cron__174(rest, [], [acc | stack], context, line, offset)
end
defp cron__174(rest, acc, stack, context, line, offset) do
cron__175(rest, [], [acc | stack], context, line, offset)
end
defp cron__175(<<x0::integer, rest::binary>>, acc, stack, context, comb__line, comb__offset)
when x0 >= 48 and x0 <= 57 do
cron__176(rest, [x0 - 48] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp cron__175(rest, _acc, stack, context, line, offset) do
[_, acc | stack] = stack
cron__172(rest, acc, stack, context, line, offset)
end
defp cron__176(rest, acc, stack, context, line, offset) do
cron__178(rest, acc, [1 | stack], context, line, offset)
end
defp cron__178(<<x0::integer, rest::binary>>, acc, stack, context, comb__line, comb__offset)
when x0 >= 48 and x0 <= 57 do
cron__179(rest, [x0] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp cron__178(rest, acc, stack, context, line, offset) do
cron__177(rest, acc, stack, context, line, offset)
end
defp cron__177(rest, acc, [_ | stack], context, line, offset) do
cron__180(rest, acc, stack, context, line, offset)
end
defp cron__179(rest, acc, [1 | stack], context, line, offset) do
cron__180(rest, acc, stack, context, line, offset)
end
defp cron__179(rest, acc, [count | stack], context, line, offset) do
cron__178(rest, acc, [count - 1 | stack], context, line, offset)
end
defp cron__180(rest, user_acc, [acc | stack], context, line, offset) do
cron__181(
rest,
(
[head | tail] = :lists.reverse(user_acc)
[:lists.foldl(fn x, acc -> x - 48 + acc * 10 end, head, tail)]
) ++ acc,
stack,
context,
line,
offset
)
end
defp cron__181(rest, user_acc, [acc | stack], context, line, offset) do
cron__182(
rest,
[
literal:
case(:lists.reverse(user_acc)) do
[one] ->
one
many ->
raise("unwrap_and_tag/3 expected a single token, got: #{inspect(many)}")
end
] ++ acc,
stack,
context,
line,
offset
)
end
defp cron__182(rest, acc, [_, previous_acc | stack], context, line, offset) do
cron__154(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp cron__183(_, _, [{rest, context, line, offset} | _] = stack, _, _, _) do
cron__173(rest, [], stack, context, line, offset)
end
defp cron__184(rest, acc, stack, context, line, offset) do
cron__185(rest, [], [acc | stack], context, line, offset)
end
defp cron__185(rest, acc, stack, context, line, offset) do
cron__186(rest, [], [acc | stack], context, line, offset)
end
defp cron__186(<<x0::integer, rest::binary>>, acc, stack, context, comb__line, comb__offset)
when x0 >= 48 and x0 <= 57 do
cron__187(rest, [x0 - 48] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp cron__186(rest, _acc, stack, context, line, offset) do
[_, acc | stack] = stack
cron__183(rest, acc, stack, context, line, offset)
end
defp cron__187(rest, acc, stack, context, line, offset) do
cron__189(rest, acc, [1 | stack], context, line, offset)
end
defp cron__189(<<x0::integer, rest::binary>>, acc, stack, context, comb__line, comb__offset)
when x0 >= 48 and x0 <= 57 do
cron__190(rest, [x0] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp cron__189(rest, acc, stack, context, line, offset) do
cron__188(rest, acc, stack, context, line, offset)
end
defp cron__188(rest, acc, [_ | stack], context, line, offset) do
cron__191(rest, acc, stack, context, line, offset)
end
defp cron__190(rest, acc, [1 | stack], context, line, offset) do
cron__191(rest, acc, stack, context, line, offset)
end
defp cron__190(rest, acc, [count | stack], context, line, offset) do
cron__189(rest, acc, [count - 1 | stack], context, line, offset)
end
defp cron__191(rest, user_acc, [acc | stack], context, line, offset) do
cron__192(
rest,
(
[head | tail] = :lists.reverse(user_acc)
[:lists.foldl(fn x, acc -> x - 48 + acc * 10 end, head, tail)]
) ++ acc,
stack,
context,
line,
offset
)
end
defp cron__192(<<"-", rest::binary>>, acc, stack, context, comb__line, comb__offset) do
cron__193(rest, [] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp cron__192(rest, _acc, stack, context, line, offset) do
[acc | stack] = stack
cron__183(rest, acc, stack, context, line, offset)
end
defp cron__193(rest, acc, stack, context, line, offset) do
cron__194(rest, [], [acc | stack], context, line, offset)
end
defp cron__194(<<x0::integer, rest::binary>>, acc, stack, context, comb__line, comb__offset)
when x0 >= 48 and x0 <= 57 do
cron__195(rest, [x0 - 48] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp cron__194(rest, _acc, stack, context, line, offset) do
[_, acc | stack] = stack
cron__183(rest, acc, stack, context, line, offset)
end
defp cron__195(rest, acc, stack, context, line, offset) do
cron__197(rest, acc, [1 | stack], context, line, offset)
end
defp cron__197(<<x0::integer, rest::binary>>, acc, stack, context, comb__line, comb__offset)
when x0 >= 48 and x0 <= 57 do
cron__198(rest, [x0] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp cron__197(rest, acc, stack, context, line, offset) do
cron__196(rest, acc, stack, context, line, offset)
end
defp cron__196(rest, acc, [_ | stack], context, line, offset) do
cron__199(rest, acc, stack, context, line, offset)
end
defp cron__198(rest, acc, [1 | stack], context, line, offset) do
cron__199(rest, acc, stack, context, line, offset)
end
defp cron__198(rest, acc, [count | stack], context, line, offset) do
cron__197(rest, acc, [count - 1 | stack], context, line, offset)
end
defp cron__199(rest, user_acc, [acc | stack], context, line, offset) do
cron__200(
rest,
(
[head | tail] = :lists.reverse(user_acc)
[:lists.foldl(fn x, acc -> x - 48 + acc * 10 end, head, tail)]
) ++ acc,
stack,
context,
line,
offset
)
end
defp cron__200(rest, user_acc, [acc | stack], context, line, offset) do
cron__201(rest, [range: :lists.reverse(user_acc)] ++ acc, stack, context, line, offset)
end
defp cron__201(rest, acc, [_, previous_acc | stack], context, line, offset) do
cron__154(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp cron__152(_, _, [{rest, acc, context, line, offset} | stack], _, _, _) do
cron__202(rest, acc, stack, context, line, offset)
end
defp cron__154(
inner_rest,
inner_acc,
[{rest, acc, context, line, offset} | stack],
inner_context,
inner_line,
inner_offset
) do
_ = {rest, acc, context, line, offset}
cron__153(
inner_rest,
[],
[{inner_rest, inner_acc ++ acc, inner_context, inner_line, inner_offset} | stack],
inner_context,
inner_line,
inner_offset
)
end
defp cron__202(rest, user_acc, [acc | stack], context, line, offset) do
cron__203(rest, [hours: :lists.reverse(user_acc)] ++ acc, stack, context, line, offset)
end
defp cron__203(<<" ", rest::binary>>, acc, stack, context, comb__line, comb__offset) do
cron__204(rest, [] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp cron__203(rest, _acc, _stack, context, line, offset) do
{:error, "expected string \" \"", rest, context, line, offset}
end
defp cron__204(rest, acc, stack, context, line, offset) do
cron__205(rest, [], [acc | stack], context, line, offset)
end
defp cron__205(rest, acc, stack, context, line, offset) do
cron__236(rest, [], [{rest, context, line, offset}, acc | stack], context, line, offset)
end
defp cron__207(<<",", rest::binary>>, acc, stack, context, comb__line, comb__offset) do
cron__208(rest, [] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp cron__207(rest, _acc, _stack, context, line, offset) do
{:error,
"expected byte in the range ?0..?9, followed by byte in the range ?0..?9, followed by string \"-\", followed by byte in the range ?0..?9, followed by byte in the range ?0..?9 or byte in the range ?0..?9, followed by byte in the range ?0..?9 or string \"*/\", followed by byte in the range ?0..?9, followed by byte in the range ?0..?9 or string \"*\" or string \",\"",
rest, context, line, offset}
end
defp cron__208(rest, acc, [_, previous_acc | stack], context, line, offset) do
cron__206(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp cron__209(_, _, [{rest, context, line, offset} | _] = stack, _, _, _) do
cron__207(rest, [], stack, context, line, offset)
end
defp cron__210(<<"*", rest::binary>>, acc, stack, context, comb__line, comb__offset) do
cron__211(rest, [wild: "*"] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp cron__210(rest, acc, stack, context, line, offset) do
cron__209(rest, acc, stack, context, line, offset)
end
defp cron__211(rest, acc, [_, previous_acc | stack], context, line, offset) do
cron__206(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp cron__212(_, _, [{rest, context, line, offset} | _] = stack, _, _, _) do
cron__210(rest, [], stack, context, line, offset)
end
defp cron__213(rest, acc, stack, context, line, offset) do
cron__214(rest, [], [acc | stack], context, line, offset)
end
defp cron__214(<<"*/", rest::binary>>, acc, stack, context, comb__line, comb__offset) do
cron__215(rest, [] ++ acc, stack, context, comb__line, comb__offset + 2)
end
defp cron__214(rest, _acc, stack, context, line, offset) do
[acc | stack] = stack
cron__212(rest, acc, stack, context, line, offset)
end
defp cron__215(rest, acc, stack, context, line, offset) do
cron__216(rest, [], [acc | stack], context, line, offset)
end
defp cron__216(<<x0::integer, rest::binary>>, acc, stack, context, comb__line, comb__offset)
when x0 >= 48 and x0 <= 57 do
cron__217(rest, [x0 - 48] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp cron__216(rest, _acc, stack, context, line, offset) do
[_, acc | stack] = stack
cron__212(rest, acc, stack, context, line, offset)
end
defp cron__217(rest, acc, stack, context, line, offset) do
cron__219(rest, acc, [1 | stack], context, line, offset)
end
defp cron__219(<<x0::integer, rest::binary>>, acc, stack, context, comb__line, comb__offset)
when x0 >= 48 and x0 <= 57 do
cron__220(rest, [x0] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp cron__219(rest, acc, stack, context, line, offset) do
cron__218(rest, acc, stack, context, line, offset)
end
defp cron__218(rest, acc, [_ | stack], context, line, offset) do
cron__221(rest, acc, stack, context, line, offset)
end
defp cron__220(rest, acc, [1 | stack], context, line, offset) do
cron__221(rest, acc, stack, context, line, offset)
end
defp cron__220(rest, acc, [count | stack], context, line, offset) do
cron__219(rest, acc, [count - 1 | stack], context, line, offset)
end
defp cron__221(rest, user_acc, [acc | stack], context, line, offset) do
cron__222(
rest,
(
[head | tail] = :lists.reverse(user_acc)
[:lists.foldl(fn x, acc -> x - 48 + acc * 10 end, head, tail)]
) ++ acc,
stack,
context,
line,
offset
)
end
defp cron__222(rest, user_acc, [acc | stack], context, line, offset) do
cron__223(
rest,
[
step:
case(:lists.reverse(user_acc)) do
[one] ->
one
many ->
raise("unwrap_and_tag/3 expected a single token, got: #{inspect(many)}")
end
] ++ acc,
stack,
context,
line,
offset
)
end
defp cron__223(rest, acc, [_, previous_acc | stack], context, line, offset) do
cron__206(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp cron__224(_, _, [{rest, context, line, offset} | _] = stack, _, _, _) do
cron__213(rest, [], stack, context, line, offset)
end
defp cron__225(rest, acc, stack, context, line, offset) do
cron__226(rest, [], [acc | stack], context, line, offset)
end
defp cron__226(rest, acc, stack, context, line, offset) do
cron__227(rest, [], [acc | stack], context, line, offset)
end
defp cron__227(<<x0::integer, rest::binary>>, acc, stack, context, comb__line, comb__offset)
when x0 >= 48 and x0 <= 57 do
cron__228(rest, [x0 - 48] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp cron__227(rest, _acc, stack, context, line, offset) do
[_, acc | stack] = stack
cron__224(rest, acc, stack, context, line, offset)
end
defp cron__228(rest, acc, stack, context, line, offset) do
cron__230(rest, acc, [1 | stack], context, line, offset)
end
defp cron__230(<<x0::integer, rest::binary>>, acc, stack, context, comb__line, comb__offset)
when x0 >= 48 and x0 <= 57 do
cron__231(rest, [x0] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp cron__230(rest, acc, stack, context, line, offset) do
cron__229(rest, acc, stack, context, line, offset)
end
defp cron__229(rest, acc, [_ | stack], context, line, offset) do
cron__232(rest, acc, stack, context, line, offset)
end
defp cron__231(rest, acc, [1 | stack], context, line, offset) do
cron__232(rest, acc, stack, context, line, offset)
end
defp cron__231(rest, acc, [count | stack], context, line, offset) do
cron__230(rest, acc, [count - 1 | stack], context, line, offset)
end
defp cron__232(rest, user_acc, [acc | stack], context, line, offset) do
cron__233(
rest,
(
[head | tail] = :lists.reverse(user_acc)
[:lists.foldl(fn x, acc -> x - 48 + acc * 10 end, head, tail)]
) ++ acc,
stack,
context,
line,
offset
)
end
defp cron__233(rest, user_acc, [acc | stack], context, line, offset) do
cron__234(
rest,
[
literal:
case(:lists.reverse(user_acc)) do
[one] ->
one
many ->
raise("unwrap_and_tag/3 expected a single token, got: #{inspect(many)}")
end
] ++ acc,
stack,
context,
line,
offset
)
end
defp cron__234(rest, acc, [_, previous_acc | stack], context, line, offset) do
cron__206(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp cron__235(_, _, [{rest, context, line, offset} | _] = stack, _, _, _) do
cron__225(rest, [], stack, context, line, offset)
end
defp cron__236(rest, acc, stack, context, line, offset) do
cron__237(rest, [], [acc | stack], context, line, offset)
end
defp cron__237(rest, acc, stack, context, line, offset) do
cron__238(rest, [], [acc | stack], context, line, offset)
end
defp cron__238(<<x0::integer, rest::binary>>, acc, stack, context, comb__line, comb__offset)
when x0 >= 48 and x0 <= 57 do
cron__239(rest, [x0 - 48] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp cron__238(rest, _acc, stack, context, line, offset) do
[_, acc | stack] = stack
cron__235(rest, acc, stack, context, line, offset)
end
defp cron__239(rest, acc, stack, context, line, offset) do
cron__241(rest, acc, [1 | stack], context, line, offset)
end
defp cron__241(<<x0::integer, rest::binary>>, acc, stack, context, comb__line, comb__offset)
when x0 >= 48 and x0 <= 57 do
cron__242(rest, [x0] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp cron__241(rest, acc, stack, context, line, offset) do
cron__240(rest, acc, stack, context, line, offset)
end
defp cron__240(rest, acc, [_ | stack], context, line, offset) do
cron__243(rest, acc, stack, context, line, offset)
end
defp cron__242(rest, acc, [1 | stack], context, line, offset) do
cron__243(rest, acc, stack, context, line, offset)
end
defp cron__242(rest, acc, [count | stack], context, line, offset) do
cron__241(rest, acc, [count - 1 | stack], context, line, offset)
end
defp cron__243(rest, user_acc, [acc | stack], context, line, offset) do
cron__244(
rest,
(
[head | tail] = :lists.reverse(user_acc)
[:lists.foldl(fn x, acc -> x - 48 + acc * 10 end, head, tail)]
) ++ acc,
stack,
context,
line,
offset
)
end
defp cron__244(<<"-", rest::binary>>, acc, stack, context, comb__line, comb__offset) do
cron__245(rest, [] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp cron__244(rest, _acc, stack, context, line, offset) do
[acc | stack] = stack
cron__235(rest, acc, stack, context, line, offset)
end
defp cron__245(rest, acc, stack, context, line, offset) do
cron__246(rest, [], [acc | stack], context, line, offset)
end
defp cron__246(<<x0::integer, rest::binary>>, acc, stack, context, comb__line, comb__offset)
when x0 >= 48 and x0 <= 57 do
cron__247(rest, [x0 - 48] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp cron__246(rest, _acc, stack, context, line, offset) do
[_, acc | stack] = stack
cron__235(rest, acc, stack, context, line, offset)
end
defp cron__247(rest, acc, stack, context, line, offset) do
cron__249(rest, acc, [1 | stack], context, line, offset)
end
defp cron__249(<<x0::integer, rest::binary>>, acc, stack, context, comb__line, comb__offset)
when x0 >= 48 and x0 <= 57 do
cron__250(rest, [x0] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp cron__249(rest, acc, stack, context, line, offset) do
cron__248(rest, acc, stack, context, line, offset)
end
defp cron__248(rest, acc, [_ | stack], context, line, offset) do
cron__251(rest, acc, stack, context, line, offset)
end
defp cron__250(rest, acc, [1 | stack], context, line, offset) do
cron__251(rest, acc, stack, context, line, offset)
end
defp cron__250(rest, acc, [count | stack], context, line, offset) do
cron__249(rest, acc, [count - 1 | stack], context, line, offset)
end
defp cron__251(rest, user_acc, [acc | stack], context, line, offset) do
cron__252(
rest,
(
[head | tail] = :lists.reverse(user_acc)
[:lists.foldl(fn x, acc -> x - 48 + acc * 10 end, head, tail)]
) ++ acc,
stack,
context,
line,
offset
)
end
defp cron__252(rest, user_acc, [acc | stack], context, line, offset) do
cron__253(rest, [range: :lists.reverse(user_acc)] ++ acc, stack, context, line, offset)
end
defp cron__253(rest, acc, [_, previous_acc | stack], context, line, offset) do
cron__206(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp cron__206(rest, acc, stack, context, line, offset) do
cron__255(rest, [], [{rest, acc, context, line, offset} | stack], context, line, offset)
end
defp cron__255(rest, acc, stack, context, line, offset) do
cron__286(rest, [], [{rest, context, line, offset}, acc | stack], context, line, offset)
end
defp cron__257(<<",", rest::binary>>, acc, stack, context, comb__line, comb__offset) do
cron__258(rest, [] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp cron__257(rest, _acc, stack, context, line, offset) do
[_, acc | stack] = stack
cron__254(rest, acc, stack, context, line, offset)
end
defp cron__258(rest, acc, [_, previous_acc | stack], context, line, offset) do
cron__256(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp cron__259(_, _, [{rest, context, line, offset} | _] = stack, _, _, _) do
cron__257(rest, [], stack, context, line, offset)
end
defp cron__260(<<"*", rest::binary>>, acc, stack, context, comb__line, comb__offset) do
cron__261(rest, [wild: "*"] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp cron__260(rest, acc, stack, context, line, offset) do
cron__259(rest, acc, stack, context, line, offset)
end
defp cron__261(rest, acc, [_, previous_acc | stack], context, line, offset) do
cron__256(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp cron__262(_, _, [{rest, context, line, offset} | _] = stack, _, _, _) do
cron__260(rest, [], stack, context, line, offset)
end
defp cron__263(rest, acc, stack, context, line, offset) do
cron__264(rest, [], [acc | stack], context, line, offset)
end
defp cron__264(<<"*/", rest::binary>>, acc, stack, context, comb__line, comb__offset) do
cron__265(rest, [] ++ acc, stack, context, comb__line, comb__offset + 2)
end
defp cron__264(rest, _acc, stack, context, line, offset) do
[acc | stack] = stack
cron__262(rest, acc, stack, context, line, offset)
end
defp cron__265(rest, acc, stack, context, line, offset) do
cron__266(rest, [], [acc | stack], context, line, offset)
end
defp cron__266(<<x0::integer, rest::binary>>, acc, stack, context, comb__line, comb__offset)
when x0 >= 48 and x0 <= 57 do
cron__267(rest, [x0 - 48] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp cron__266(rest, _acc, stack, context, line, offset) do
[_, acc | stack] = stack
cron__262(rest, acc, stack, context, line, offset)
end
defp cron__267(rest, acc, stack, context, line, offset) do
cron__269(rest, acc, [1 | stack], context, line, offset)
end
defp cron__269(<<x0::integer, rest::binary>>, acc, stack, context, comb__line, comb__offset)
when x0 >= 48 and x0 <= 57 do
cron__270(rest, [x0] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp cron__269(rest, acc, stack, context, line, offset) do
cron__268(rest, acc, stack, context, line, offset)
end
defp cron__268(rest, acc, [_ | stack], context, line, offset) do
cron__271(rest, acc, stack, context, line, offset)
end
defp cron__270(rest, acc, [1 | stack], context, line, offset) do
cron__271(rest, acc, stack, context, line, offset)
end
defp cron__270(rest, acc, [count | stack], context, line, offset) do
cron__269(rest, acc, [count - 1 | stack], context, line, offset)
end
defp cron__271(rest, user_acc, [acc | stack], context, line, offset) do
cron__272(
rest,
(
[head | tail] = :lists.reverse(user_acc)
[:lists.foldl(fn x, acc -> x - 48 + acc * 10 end, head, tail)]
) ++ acc,
stack,
context,
line,
offset
)
end
defp cron__272(rest, user_acc, [acc | stack], context, line, offset) do
cron__273(
rest,
[
step:
case(:lists.reverse(user_acc)) do
[one] ->
one
many ->
raise("unwrap_and_tag/3 expected a single token, got: #{inspect(many)}")
end
] ++ acc,
stack,
context,
line,
offset
)
end
defp cron__273(rest, acc, [_, previous_acc | stack], context, line, offset) do
cron__256(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp cron__274(_, _, [{rest, context, line, offset} | _] = stack, _, _, _) do
cron__263(rest, [], stack, context, line, offset)
end
defp cron__275(rest, acc, stack, context, line, offset) do
cron__276(rest, [], [acc | stack], context, line, offset)
end
defp cron__276(rest, acc, stack, context, line, offset) do
cron__277(rest, [], [acc | stack], context, line, offset)
end
defp cron__277(<<x0::integer, rest::binary>>, acc, stack, context, comb__line, comb__offset)
when x0 >= 48 and x0 <= 57 do
cron__278(rest, [x0 - 48] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp cron__277(rest, _acc, stack, context, line, offset) do
[_, acc | stack] = stack
cron__274(rest, acc, stack, context, line, offset)
end
defp cron__278(rest, acc, stack, context, line, offset) do
cron__280(rest, acc, [1 | stack], context, line, offset)
end
defp cron__280(<<x0::integer, rest::binary>>, acc, stack, context, comb__line, comb__offset)
when x0 >= 48 and x0 <= 57 do
cron__281(rest, [x0] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp cron__280(rest, acc, stack, context, line, offset) do
cron__279(rest, acc, stack, context, line, offset)
end
defp cron__279(rest, acc, [_ | stack], context, line, offset) do
cron__282(rest, acc, stack, context, line, offset)
end
defp cron__281(rest, acc, [1 | stack], context, line, offset) do
cron__282(rest, acc, stack, context, line, offset)
end
defp cron__281(rest, acc, [count | stack], context, line, offset) do
cron__280(rest, acc, [count - 1 | stack], context, line, offset)
end
defp cron__282(rest, user_acc, [acc | stack], context, line, offset) do
cron__283(
rest,
(
[head | tail] = :lists.reverse(user_acc)
[:lists.foldl(fn x, acc -> x - 48 + acc * 10 end, head, tail)]
) ++ acc,
stack,
context,
line,
offset
)
end
defp cron__283(rest, user_acc, [acc | stack], context, line, offset) do
cron__284(
rest,
[
literal:
case(:lists.reverse(user_acc)) do
[one] ->
one
many ->
raise("unwrap_and_tag/3 expected a single token, got: #{inspect(many)}")
end
] ++ acc,
stack,
context,
line,
offset
)
end
defp cron__284(rest, acc, [_, previous_acc | stack], context, line, offset) do
cron__256(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp cron__285(_, _, [{rest, context, line, offset} | _] = stack, _, _, _) do
cron__275(rest, [], stack, context, line, offset)
end
defp cron__286(rest, acc, stack, context, line, offset) do
cron__287(rest, [], [acc | stack], context, line, offset)
end
defp cron__287(rest, acc, stack, context, line, offset) do
cron__288(rest, [], [acc | stack], context, line, offset)
end
defp cron__288(<<x0::integer, rest::binary>>, acc, stack, context, comb__line, comb__offset)
when x0 >= 48 and x0 <= 57 do
cron__289(rest, [x0 - 48] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp cron__288(rest, _acc, stack, context, line, offset) do
[_, acc | stack] = stack
cron__285(rest, acc, stack, context, line, offset)
end
defp cron__289(rest, acc, stack, context, line, offset) do
cron__291(rest, acc, [1 | stack], context, line, offset)
end
defp cron__291(<<x0::integer, rest::binary>>, acc, stack, context, comb__line, comb__offset)
when x0 >= 48 and x0 <= 57 do
cron__292(rest, [x0] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp cron__291(rest, acc, stack, context, line, offset) do
cron__290(rest, acc, stack, context, line, offset)
end
defp cron__290(rest, acc, [_ | stack], context, line, offset) do
cron__293(rest, acc, stack, context, line, offset)
end
defp cron__292(rest, acc, [1 | stack], context, line, offset) do
cron__293(rest, acc, stack, context, line, offset)
end
defp cron__292(rest, acc, [count | stack], context, line, offset) do
cron__291(rest, acc, [count - 1 | stack], context, line, offset)
end
defp cron__293(rest, user_acc, [acc | stack], context, line, offset) do
cron__294(
rest,
(
[head | tail] = :lists.reverse(user_acc)
[:lists.foldl(fn x, acc -> x - 48 + acc * 10 end, head, tail)]
) ++ acc,
stack,
context,
line,
offset
)
end
defp cron__294(<<"-", rest::binary>>, acc, stack, context, comb__line, comb__offset) do
cron__295(rest, [] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp cron__294(rest, _acc, stack, context, line, offset) do
[acc | stack] = stack
cron__285(rest, acc, stack, context, line, offset)
end
defp cron__295(rest, acc, stack, context, line, offset) do
cron__296(rest, [], [acc | stack], context, line, offset)
end
defp cron__296(<<x0::integer, rest::binary>>, acc, stack, context, comb__line, comb__offset)
when x0 >= 48 and x0 <= 57 do
cron__297(rest, [x0 - 48] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp cron__296(rest, _acc, stack, context, line, offset) do
[_, acc | stack] = stack
cron__285(rest, acc, stack, context, line, offset)
end
defp cron__297(rest, acc, stack, context, line, offset) do
cron__299(rest, acc, [1 | stack], context, line, offset)
end
defp cron__299(<<x0::integer, rest::binary>>, acc, stack, context, comb__line, comb__offset)
when x0 >= 48 and x0 <= 57 do
cron__300(rest, [x0] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp cron__299(rest, acc, stack, context, line, offset) do
cron__298(rest, acc, stack, context, line, offset)
end
defp cron__298(rest, acc, [_ | stack], context, line, offset) do
cron__301(rest, acc, stack, context, line, offset)
end
defp cron__300(rest, acc, [1 | stack], context, line, offset) do
cron__301(rest, acc, stack, context, line, offset)
end
defp cron__300(rest, acc, [count | stack], context, line, offset) do
cron__299(rest, acc, [count - 1 | stack], context, line, offset)
end
defp cron__301(rest, user_acc, [acc | stack], context, line, offset) do
cron__302(
rest,
(
[head | tail] = :lists.reverse(user_acc)
[:lists.foldl(fn x, acc -> x - 48 + acc * 10 end, head, tail)]
) ++ acc,
stack,
context,
line,
offset
)
end
defp cron__302(rest, user_acc, [acc | stack], context, line, offset) do
cron__303(rest, [range: :lists.reverse(user_acc)] ++ acc, stack, context, line, offset)
end
defp cron__303(rest, acc, [_, previous_acc | stack], context, line, offset) do
cron__256(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp cron__254(_, _, [{rest, acc, context, line, offset} | stack], _, _, _) do
cron__304(rest, acc, stack, context, line, offset)
end
defp cron__256(
inner_rest,
inner_acc,
[{rest, acc, context, line, offset} | stack],
inner_context,
inner_line,
inner_offset
) do
_ = {rest, acc, context, line, offset}
cron__255(
inner_rest,
[],
[{inner_rest, inner_acc ++ acc, inner_context, inner_line, inner_offset} | stack],
inner_context,
inner_line,
inner_offset
)
end
defp cron__304(rest, user_acc, [acc | stack], context, line, offset) do
cron__305(rest, [days: :lists.reverse(user_acc)] ++ acc, stack, context, line, offset)
end
defp cron__305(<<" ", rest::binary>>, acc, stack, context, comb__line, comb__offset) do
cron__306(rest, [] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp cron__305(rest, _acc, _stack, context, line, offset) do
{:error, "expected string \" \"", rest, context, line, offset}
end
defp cron__306(rest, acc, stack, context, line, offset) do
cron__307(rest, [], [acc | stack], context, line, offset)
end
defp cron__307(rest, acc, stack, context, line, offset) do
cron__359(rest, [], [{rest, context, line, offset}, acc | stack], context, line, offset)
end
defp cron__309(rest, acc, stack, context, line, offset) do
cron__340(rest, [], [{rest, context, line, offset}, acc | stack], context, line, offset)
end
defp cron__311(<<",", rest::binary>>, acc, stack, context, comb__line, comb__offset) do
cron__312(rest, [] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp cron__311(rest, _acc, _stack, context, line, offset) do
{:error,
"expected string \"JAN\" or string \"FEB\" or string \"MAR\" or string \"APR\" or string \"MAY\" or string \"JUN\" or string \"JUL\" or string \"AUG\" or string \"SEP\" or string \"OCT\" or string \"NOV\" or string \"DEC\" or byte in the range ?0..?9, followed by byte in the range ?0..?9, followed by string \"-\", followed by byte in the range ?0..?9, followed by byte in the range ?0..?9 or byte in the range ?0..?9, followed by byte in the range ?0..?9 or string \"*/\", followed by byte in the range ?0..?9, followed by byte in the range ?0..?9 or string \"*\" or string \",\"",
rest, context, line, offset}
end
defp cron__312(rest, acc, [_, previous_acc | stack], context, line, offset) do
cron__310(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp cron__313(_, _, [{rest, context, line, offset} | _] = stack, _, _, _) do
cron__311(rest, [], stack, context, line, offset)
end
defp cron__314(<<"*", rest::binary>>, acc, stack, context, comb__line, comb__offset) do
cron__315(rest, [wild: "*"] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp cron__314(rest, acc, stack, context, line, offset) do
cron__313(rest, acc, stack, context, line, offset)
end
defp cron__315(rest, acc, [_, previous_acc | stack], context, line, offset) do
cron__310(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp cron__316(_, _, [{rest, context, line, offset} | _] = stack, _, _, _) do
cron__314(rest, [], stack, context, line, offset)
end
defp cron__317(rest, acc, stack, context, line, offset) do
cron__318(rest, [], [acc | stack], context, line, offset)
end
defp cron__318(<<"*/", rest::binary>>, acc, stack, context, comb__line, comb__offset) do
cron__319(rest, [] ++ acc, stack, context, comb__line, comb__offset + 2)
end
defp cron__318(rest, _acc, stack, context, line, offset) do
[acc | stack] = stack
cron__316(rest, acc, stack, context, line, offset)
end
defp cron__319(rest, acc, stack, context, line, offset) do
cron__320(rest, [], [acc | stack], context, line, offset)
end
defp cron__320(<<x0::integer, rest::binary>>, acc, stack, context, comb__line, comb__offset)
when x0 >= 48 and x0 <= 57 do
cron__321(rest, [x0 - 48] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp cron__320(rest, _acc, stack, context, line, offset) do
[_, acc | stack] = stack
cron__316(rest, acc, stack, context, line, offset)
end
defp cron__321(rest, acc, stack, context, line, offset) do
cron__323(rest, acc, [1 | stack], context, line, offset)
end
defp cron__323(<<x0::integer, rest::binary>>, acc, stack, context, comb__line, comb__offset)
when x0 >= 48 and x0 <= 57 do
cron__324(rest, [x0] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp cron__323(rest, acc, stack, context, line, offset) do
cron__322(rest, acc, stack, context, line, offset)
end
defp cron__322(rest, acc, [_ | stack], context, line, offset) do
cron__325(rest, acc, stack, context, line, offset)
end
defp cron__324(rest, acc, [1 | stack], context, line, offset) do
cron__325(rest, acc, stack, context, line, offset)
end
defp cron__324(rest, acc, [count | stack], context, line, offset) do
cron__323(rest, acc, [count - 1 | stack], context, line, offset)
end
defp cron__325(rest, user_acc, [acc | stack], context, line, offset) do
cron__326(
rest,
(
[head | tail] = :lists.reverse(user_acc)
[:lists.foldl(fn x, acc -> x - 48 + acc * 10 end, head, tail)]
) ++ acc,
stack,
context,
line,
offset
)
end
defp cron__326(rest, user_acc, [acc | stack], context, line, offset) do
cron__327(
rest,
[
step:
case(:lists.reverse(user_acc)) do
[one] ->
one
many ->
raise("unwrap_and_tag/3 expected a single token, got: #{inspect(many)}")
end
] ++ acc,
stack,
context,
line,
offset
)
end
defp cron__327(rest, acc, [_, previous_acc | stack], context, line, offset) do
cron__310(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp cron__328(_, _, [{rest, context, line, offset} | _] = stack, _, _, _) do
cron__317(rest, [], stack, context, line, offset)
end
defp cron__329(rest, acc, stack, context, line, offset) do
cron__330(rest, [], [acc | stack], context, line, offset)
end
defp cron__330(rest, acc, stack, context, line, offset) do
cron__331(rest, [], [acc | stack], context, line, offset)
end
defp cron__331(<<x0::integer, rest::binary>>, acc, stack, context, comb__line, comb__offset)
when x0 >= 48 and x0 <= 57 do
cron__332(rest, [x0 - 48] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp cron__331(rest, _acc, stack, context, line, offset) do
[_, acc | stack] = stack
cron__328(rest, acc, stack, context, line, offset)
end
defp cron__332(rest, acc, stack, context, line, offset) do
cron__334(rest, acc, [1 | stack], context, line, offset)
end
defp cron__334(<<x0::integer, rest::binary>>, acc, stack, context, comb__line, comb__offset)
when x0 >= 48 and x0 <= 57 do
cron__335(rest, [x0] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp cron__334(rest, acc, stack, context, line, offset) do
cron__333(rest, acc, stack, context, line, offset)
end
defp cron__333(rest, acc, [_ | stack], context, line, offset) do
cron__336(rest, acc, stack, context, line, offset)
end
defp cron__335(rest, acc, [1 | stack], context, line, offset) do
cron__336(rest, acc, stack, context, line, offset)
end
defp cron__335(rest, acc, [count | stack], context, line, offset) do
cron__334(rest, acc, [count - 1 | stack], context, line, offset)
end
defp cron__336(rest, user_acc, [acc | stack], context, line, offset) do
cron__337(
rest,
(
[head | tail] = :lists.reverse(user_acc)
[:lists.foldl(fn x, acc -> x - 48 + acc * 10 end, head, tail)]
) ++ acc,
stack,
context,
line,
offset
)
end
defp cron__337(rest, user_acc, [acc | stack], context, line, offset) do
cron__338(
rest,
[
literal:
case(:lists.reverse(user_acc)) do
[one] ->
one
many ->
raise("unwrap_and_tag/3 expected a single token, got: #{inspect(many)}")
end
] ++ acc,
stack,
context,
line,
offset
)
end
defp cron__338(rest, acc, [_, previous_acc | stack], context, line, offset) do
cron__310(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp cron__339(_, _, [{rest, context, line, offset} | _] = stack, _, _, _) do
cron__329(rest, [], stack, context, line, offset)
end
defp cron__340(rest, acc, stack, context, line, offset) do
cron__341(rest, [], [acc | stack], context, line, offset)
end
defp cron__341(rest, acc, stack, context, line, offset) do
cron__342(rest, [], [acc | stack], context, line, offset)
end
defp cron__342(<<x0::integer, rest::binary>>, acc, stack, context, comb__line, comb__offset)
when x0 >= 48 and x0 <= 57 do
cron__343(rest, [x0 - 48] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp cron__342(rest, _acc, stack, context, line, offset) do
[_, acc | stack] = stack
cron__339(rest, acc, stack, context, line, offset)
end
defp cron__343(rest, acc, stack, context, line, offset) do
cron__345(rest, acc, [1 | stack], context, line, offset)
end
defp cron__345(<<x0::integer, rest::binary>>, acc, stack, context, comb__line, comb__offset)
when x0 >= 48 and x0 <= 57 do
cron__346(rest, [x0] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp cron__345(rest, acc, stack, context, line, offset) do
cron__344(rest, acc, stack, context, line, offset)
end
defp cron__344(rest, acc, [_ | stack], context, line, offset) do
cron__347(rest, acc, stack, context, line, offset)
end
defp cron__346(rest, acc, [1 | stack], context, line, offset) do
cron__347(rest, acc, stack, context, line, offset)
end
defp cron__346(rest, acc, [count | stack], context, line, offset) do
cron__345(rest, acc, [count - 1 | stack], context, line, offset)
end
defp cron__347(rest, user_acc, [acc | stack], context, line, offset) do
cron__348(
rest,
(
[head | tail] = :lists.reverse(user_acc)
[:lists.foldl(fn x, acc -> x - 48 + acc * 10 end, head, tail)]
) ++ acc,
stack,
context,
line,
offset
)
end
defp cron__348(<<"-", rest::binary>>, acc, stack, context, comb__line, comb__offset) do
cron__349(rest, [] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp cron__348(rest, _acc, stack, context, line, offset) do
[acc | stack] = stack
cron__339(rest, acc, stack, context, line, offset)
end
defp cron__349(rest, acc, stack, context, line, offset) do
cron__350(rest, [], [acc | stack], context, line, offset)
end
defp cron__350(<<x0::integer, rest::binary>>, acc, stack, context, comb__line, comb__offset)
when x0 >= 48 and x0 <= 57 do
cron__351(rest, [x0 - 48] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp cron__350(rest, _acc, stack, context, line, offset) do
[_, acc | stack] = stack
cron__339(rest, acc, stack, context, line, offset)
end
defp cron__351(rest, acc, stack, context, line, offset) do
cron__353(rest, acc, [1 | stack], context, line, offset)
end
defp cron__353(<<x0::integer, rest::binary>>, acc, stack, context, comb__line, comb__offset)
when x0 >= 48 and x0 <= 57 do
cron__354(rest, [x0] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp cron__353(rest, acc, stack, context, line, offset) do
cron__352(rest, acc, stack, context, line, offset)
end
defp cron__352(rest, acc, [_ | stack], context, line, offset) do
cron__355(rest, acc, stack, context, line, offset)
end
defp cron__354(rest, acc, [1 | stack], context, line, offset) do
cron__355(rest, acc, stack, context, line, offset)
end
defp cron__354(rest, acc, [count | stack], context, line, offset) do
cron__353(rest, acc, [count - 1 | stack], context, line, offset)
end
defp cron__355(rest, user_acc, [acc | stack], context, line, offset) do
cron__356(
rest,
(
[head | tail] = :lists.reverse(user_acc)
[:lists.foldl(fn x, acc -> x - 48 + acc * 10 end, head, tail)]
) ++ acc,
stack,
context,
line,
offset
)
end
defp cron__356(rest, user_acc, [acc | stack], context, line, offset) do
cron__357(rest, [range: :lists.reverse(user_acc)] ++ acc, stack, context, line, offset)
end
defp cron__357(rest, acc, [_, previous_acc | stack], context, line, offset) do
cron__310(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp cron__310(rest, acc, [_, previous_acc | stack], context, line, offset) do
cron__308(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp cron__358(_, _, [{rest, context, line, offset} | _] = stack, _, _, _) do
cron__309(rest, [], stack, context, line, offset)
end
defp cron__359(rest, acc, stack, context, line, offset) do
cron__360(rest, [], [acc | stack], context, line, offset)
end
defp cron__360(<<"JAN", rest::binary>>, acc, stack, context, comb__line, comb__offset) do
cron__361(rest, [1] ++ acc, stack, context, comb__line, comb__offset + 3)
end
defp cron__360(<<"FEB", rest::binary>>, acc, stack, context, comb__line, comb__offset) do
cron__361(rest, [2] ++ acc, stack, context, comb__line, comb__offset + 3)
end
defp cron__360(<<"MAR", rest::binary>>, acc, stack, context, comb__line, comb__offset) do
cron__361(rest, [3] ++ acc, stack, context, comb__line, comb__offset + 3)
end
defp cron__360(<<"APR", rest::binary>>, acc, stack, context, comb__line, comb__offset) do
cron__361(rest, [4] ++ acc, stack, context, comb__line, comb__offset + 3)
end
defp cron__360(<<"MAY", rest::binary>>, acc, stack, context, comb__line, comb__offset) do
cron__361(rest, [5] ++ acc, stack, context, comb__line, comb__offset + 3)
end
defp cron__360(<<"JUN", rest::binary>>, acc, stack, context, comb__line, comb__offset) do
cron__361(rest, [6] ++ acc, stack, context, comb__line, comb__offset + 3)
end
defp cron__360(<<"JUL", rest::binary>>, acc, stack, context, comb__line, comb__offset) do
cron__361(rest, [7] ++ acc, stack, context, comb__line, comb__offset + 3)
end
defp cron__360(<<"AUG", rest::binary>>, acc, stack, context, comb__line, comb__offset) do
cron__361(rest, '\b' ++ acc, stack, context, comb__line, comb__offset + 3)
end
defp cron__360(<<"SEP", rest::binary>>, acc, stack, context, comb__line, comb__offset) do
cron__361(rest, '\t' ++ acc, stack, context, comb__line, comb__offset + 3)
end
defp cron__360(<<"OCT", rest::binary>>, acc, stack, context, comb__line, comb__offset) do
cron__361(rest, '\n' ++ acc, stack, context, comb__line, comb__offset + 3)
end
defp cron__360(<<"NOV", rest::binary>>, acc, stack, context, comb__line, comb__offset) do
cron__361(rest, '\v' ++ acc, stack, context, comb__line, comb__offset + 3)
end
defp cron__360(<<"DEC", rest::binary>>, acc, stack, context, comb__line, comb__offset) do
cron__361(rest, '\f' ++ acc, stack, context, comb__line, comb__offset + 3)
end
defp cron__360(rest, _acc, stack, context, line, offset) do
[acc | stack] = stack
cron__358(rest, acc, stack, context, line, offset)
end
defp cron__361(rest, user_acc, [acc | stack], context, line, offset) do
cron__362(
rest,
[
literal:
case(:lists.reverse(user_acc)) do
[one] ->
one
many ->
raise("unwrap_and_tag/3 expected a single token, got: #{inspect(many)}")
end
] ++ acc,
stack,
context,
line,
offset
)
end
defp cron__362(rest, acc, [_, previous_acc | stack], context, line, offset) do
cron__308(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp cron__308(rest, acc, stack, context, line, offset) do
cron__364(rest, [], [{rest, acc, context, line, offset} | stack], context, line, offset)
end
defp cron__364(rest, acc, stack, context, line, offset) do
cron__416(rest, [], [{rest, context, line, offset}, acc | stack], context, line, offset)
end
defp cron__366(rest, acc, stack, context, line, offset) do
cron__397(rest, [], [{rest, context, line, offset}, acc | stack], context, line, offset)
end
defp cron__368(<<",", rest::binary>>, acc, stack, context, comb__line, comb__offset) do
cron__369(rest, [] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp cron__368(rest, _acc, stack, context, line, offset) do
[_, _, _, acc | stack] = stack
cron__363(rest, acc, stack, context, line, offset)
end
defp cron__369(rest, acc, [_, previous_acc | stack], context, line, offset) do
cron__367(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp cron__370(_, _, [{rest, context, line, offset} | _] = stack, _, _, _) do
cron__368(rest, [], stack, context, line, offset)
end
defp cron__371(<<"*", rest::binary>>, acc, stack, context, comb__line, comb__offset) do
cron__372(rest, [wild: "*"] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp cron__371(rest, acc, stack, context, line, offset) do
cron__370(rest, acc, stack, context, line, offset)
end
defp cron__372(rest, acc, [_, previous_acc | stack], context, line, offset) do
cron__367(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp cron__373(_, _, [{rest, context, line, offset} | _] = stack, _, _, _) do
cron__371(rest, [], stack, context, line, offset)
end
defp cron__374(rest, acc, stack, context, line, offset) do
cron__375(rest, [], [acc | stack], context, line, offset)
end
defp cron__375(<<"*/", rest::binary>>, acc, stack, context, comb__line, comb__offset) do
cron__376(rest, [] ++ acc, stack, context, comb__line, comb__offset + 2)
end
defp cron__375(rest, _acc, stack, context, line, offset) do
[acc | stack] = stack
cron__373(rest, acc, stack, context, line, offset)
end
defp cron__376(rest, acc, stack, context, line, offset) do
cron__377(rest, [], [acc | stack], context, line, offset)
end
defp cron__377(<<x0::integer, rest::binary>>, acc, stack, context, comb__line, comb__offset)
when x0 >= 48 and x0 <= 57 do
cron__378(rest, [x0 - 48] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp cron__377(rest, _acc, stack, context, line, offset) do
[_, acc | stack] = stack
cron__373(rest, acc, stack, context, line, offset)
end
defp cron__378(rest, acc, stack, context, line, offset) do
cron__380(rest, acc, [1 | stack], context, line, offset)
end
defp cron__380(<<x0::integer, rest::binary>>, acc, stack, context, comb__line, comb__offset)
when x0 >= 48 and x0 <= 57 do
cron__381(rest, [x0] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp cron__380(rest, acc, stack, context, line, offset) do
cron__379(rest, acc, stack, context, line, offset)
end
defp cron__379(rest, acc, [_ | stack], context, line, offset) do
cron__382(rest, acc, stack, context, line, offset)
end
defp cron__381(rest, acc, [1 | stack], context, line, offset) do
cron__382(rest, acc, stack, context, line, offset)
end
defp cron__381(rest, acc, [count | stack], context, line, offset) do
cron__380(rest, acc, [count - 1 | stack], context, line, offset)
end
defp cron__382(rest, user_acc, [acc | stack], context, line, offset) do
cron__383(
rest,
(
[head | tail] = :lists.reverse(user_acc)
[:lists.foldl(fn x, acc -> x - 48 + acc * 10 end, head, tail)]
) ++ acc,
stack,
context,
line,
offset
)
end
defp cron__383(rest, user_acc, [acc | stack], context, line, offset) do
cron__384(
rest,
[
step:
case(:lists.reverse(user_acc)) do
[one] ->
one
many ->
raise("unwrap_and_tag/3 expected a single token, got: #{inspect(many)}")
end
] ++ acc,
stack,
context,
line,
offset
)
end
defp cron__384(rest, acc, [_, previous_acc | stack], context, line, offset) do
cron__367(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp cron__385(_, _, [{rest, context, line, offset} | _] = stack, _, _, _) do
cron__374(rest, [], stack, context, line, offset)
end
defp cron__386(rest, acc, stack, context, line, offset) do
cron__387(rest, [], [acc | stack], context, line, offset)
end
defp cron__387(rest, acc, stack, context, line, offset) do
cron__388(rest, [], [acc | stack], context, line, offset)
end
defp cron__388(<<x0::integer, rest::binary>>, acc, stack, context, comb__line, comb__offset)
when x0 >= 48 and x0 <= 57 do
cron__389(rest, [x0 - 48] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp cron__388(rest, _acc, stack, context, line, offset) do
[_, acc | stack] = stack
cron__385(rest, acc, stack, context, line, offset)
end
defp cron__389(rest, acc, stack, context, line, offset) do
cron__391(rest, acc, [1 | stack], context, line, offset)
end
defp cron__391(<<x0::integer, rest::binary>>, acc, stack, context, comb__line, comb__offset)
when x0 >= 48 and x0 <= 57 do
cron__392(rest, [x0] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp cron__391(rest, acc, stack, context, line, offset) do
cron__390(rest, acc, stack, context, line, offset)
end
defp cron__390(rest, acc, [_ | stack], context, line, offset) do
cron__393(rest, acc, stack, context, line, offset)
end
defp cron__392(rest, acc, [1 | stack], context, line, offset) do
cron__393(rest, acc, stack, context, line, offset)
end
defp cron__392(rest, acc, [count | stack], context, line, offset) do
cron__391(rest, acc, [count - 1 | stack], context, line, offset)
end
defp cron__393(rest, user_acc, [acc | stack], context, line, offset) do
cron__394(
rest,
(
[head | tail] = :lists.reverse(user_acc)
[:lists.foldl(fn x, acc -> x - 48 + acc * 10 end, head, tail)]
) ++ acc,
stack,
context,
line,
offset
)
end
defp cron__394(rest, user_acc, [acc | stack], context, line, offset) do
cron__395(
rest,
[
literal:
case(:lists.reverse(user_acc)) do
[one] ->
one
many ->
raise("unwrap_and_tag/3 expected a single token, got: #{inspect(many)}")
end
] ++ acc,
stack,
context,
line,
offset
)
end
defp cron__395(rest, acc, [_, previous_acc | stack], context, line, offset) do
cron__367(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp cron__396(_, _, [{rest, context, line, offset} | _] = stack, _, _, _) do
cron__386(rest, [], stack, context, line, offset)
end
defp cron__397(rest, acc, stack, context, line, offset) do
cron__398(rest, [], [acc | stack], context, line, offset)
end
defp cron__398(rest, acc, stack, context, line, offset) do
cron__399(rest, [], [acc | stack], context, line, offset)
end
defp cron__399(<<x0::integer, rest::binary>>, acc, stack, context, comb__line, comb__offset)
when x0 >= 48 and x0 <= 57 do
cron__400(rest, [x0 - 48] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp cron__399(rest, _acc, stack, context, line, offset) do
[_, acc | stack] = stack
cron__396(rest, acc, stack, context, line, offset)
end
defp cron__400(rest, acc, stack, context, line, offset) do
cron__402(rest, acc, [1 | stack], context, line, offset)
end
defp cron__402(<<x0::integer, rest::binary>>, acc, stack, context, comb__line, comb__offset)
when x0 >= 48 and x0 <= 57 do
cron__403(rest, [x0] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp cron__402(rest, acc, stack, context, line, offset) do
cron__401(rest, acc, stack, context, line, offset)
end
defp cron__401(rest, acc, [_ | stack], context, line, offset) do
cron__404(rest, acc, stack, context, line, offset)
end
defp cron__403(rest, acc, [1 | stack], context, line, offset) do
cron__404(rest, acc, stack, context, line, offset)
end
defp cron__403(rest, acc, [count | stack], context, line, offset) do
cron__402(rest, acc, [count - 1 | stack], context, line, offset)
end
defp cron__404(rest, user_acc, [acc | stack], context, line, offset) do
cron__405(
rest,
(
[head | tail] = :lists.reverse(user_acc)
[:lists.foldl(fn x, acc -> x - 48 + acc * 10 end, head, tail)]
) ++ acc,
stack,
context,
line,
offset
)
end
defp cron__405(<<"-", rest::binary>>, acc, stack, context, comb__line, comb__offset) do
cron__406(rest, [] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp cron__405(rest, _acc, stack, context, line, offset) do
[acc | stack] = stack
cron__396(rest, acc, stack, context, line, offset)
end
defp cron__406(rest, acc, stack, context, line, offset) do
cron__407(rest, [], [acc | stack], context, line, offset)
end
defp cron__407(<<x0::integer, rest::binary>>, acc, stack, context, comb__line, comb__offset)
when x0 >= 48 and x0 <= 57 do
cron__408(rest, [x0 - 48] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp cron__407(rest, _acc, stack, context, line, offset) do
[_, acc | stack] = stack
cron__396(rest, acc, stack, context, line, offset)
end
defp cron__408(rest, acc, stack, context, line, offset) do
cron__410(rest, acc, [1 | stack], context, line, offset)
end
defp cron__410(<<x0::integer, rest::binary>>, acc, stack, context, comb__line, comb__offset)
when x0 >= 48 and x0 <= 57 do
cron__411(rest, [x0] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp cron__410(rest, acc, stack, context, line, offset) do
cron__409(rest, acc, stack, context, line, offset)
end
defp cron__409(rest, acc, [_ | stack], context, line, offset) do
cron__412(rest, acc, stack, context, line, offset)
end
defp cron__411(rest, acc, [1 | stack], context, line, offset) do
cron__412(rest, acc, stack, context, line, offset)
end
defp cron__411(rest, acc, [count | stack], context, line, offset) do
cron__410(rest, acc, [count - 1 | stack], context, line, offset)
end
defp cron__412(rest, user_acc, [acc | stack], context, line, offset) do
cron__413(
rest,
(
[head | tail] = :lists.reverse(user_acc)
[:lists.foldl(fn x, acc -> x - 48 + acc * 10 end, head, tail)]
) ++ acc,
stack,
context,
line,
offset
)
end
defp cron__413(rest, user_acc, [acc | stack], context, line, offset) do
cron__414(rest, [range: :lists.reverse(user_acc)] ++ acc, stack, context, line, offset)
end
defp cron__414(rest, acc, [_, previous_acc | stack], context, line, offset) do
cron__367(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp cron__367(rest, acc, [_, previous_acc | stack], context, line, offset) do
cron__365(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp cron__415(_, _, [{rest, context, line, offset} | _] = stack, _, _, _) do
cron__366(rest, [], stack, context, line, offset)
end
defp cron__416(rest, acc, stack, context, line, offset) do
cron__417(rest, [], [acc | stack], context, line, offset)
end
defp cron__417(<<"JAN", rest::binary>>, acc, stack, context, comb__line, comb__offset) do
cron__418(rest, [1] ++ acc, stack, context, comb__line, comb__offset + 3)
end
defp cron__417(<<"FEB", rest::binary>>, acc, stack, context, comb__line, comb__offset) do
cron__418(rest, [2] ++ acc, stack, context, comb__line, comb__offset + 3)
end
defp cron__417(<<"MAR", rest::binary>>, acc, stack, context, comb__line, comb__offset) do
cron__418(rest, [3] ++ acc, stack, context, comb__line, comb__offset + 3)
end
defp cron__417(<<"APR", rest::binary>>, acc, stack, context, comb__line, comb__offset) do
cron__418(rest, [4] ++ acc, stack, context, comb__line, comb__offset + 3)
end
defp cron__417(<<"MAY", rest::binary>>, acc, stack, context, comb__line, comb__offset) do
cron__418(rest, [5] ++ acc, stack, context, comb__line, comb__offset + 3)
end
defp cron__417(<<"JUN", rest::binary>>, acc, stack, context, comb__line, comb__offset) do
cron__418(rest, [6] ++ acc, stack, context, comb__line, comb__offset + 3)
end
defp cron__417(<<"JUL", rest::binary>>, acc, stack, context, comb__line, comb__offset) do
cron__418(rest, [7] ++ acc, stack, context, comb__line, comb__offset + 3)
end
defp cron__417(<<"AUG", rest::binary>>, acc, stack, context, comb__line, comb__offset) do
cron__418(rest, '\b' ++ acc, stack, context, comb__line, comb__offset + 3)
end
defp cron__417(<<"SEP", rest::binary>>, acc, stack, context, comb__line, comb__offset) do
cron__418(rest, '\t' ++ acc, stack, context, comb__line, comb__offset + 3)
end
defp cron__417(<<"OCT", rest::binary>>, acc, stack, context, comb__line, comb__offset) do
cron__418(rest, '\n' ++ acc, stack, context, comb__line, comb__offset + 3)
end
defp cron__417(<<"NOV", rest::binary>>, acc, stack, context, comb__line, comb__offset) do
cron__418(rest, '\v' ++ acc, stack, context, comb__line, comb__offset + 3)
end
defp cron__417(<<"DEC", rest::binary>>, acc, stack, context, comb__line, comb__offset) do
cron__418(rest, '\f' ++ acc, stack, context, comb__line, comb__offset + 3)
end
defp cron__417(rest, _acc, stack, context, line, offset) do
[acc | stack] = stack
cron__415(rest, acc, stack, context, line, offset)
end
defp cron__418(rest, user_acc, [acc | stack], context, line, offset) do
cron__419(
rest,
[
literal:
case(:lists.reverse(user_acc)) do
[one] ->
one
many ->
raise("unwrap_and_tag/3 expected a single token, got: #{inspect(many)}")
end
] ++ acc,
stack,
context,
line,
offset
)
end
defp cron__419(rest, acc, [_, previous_acc | stack], context, line, offset) do
cron__365(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp cron__363(_, _, [{rest, acc, context, line, offset} | stack], _, _, _) do
cron__420(rest, acc, stack, context, line, offset)
end
defp cron__365(
inner_rest,
inner_acc,
[{rest, acc, context, line, offset} | stack],
inner_context,
inner_line,
inner_offset
) do
_ = {rest, acc, context, line, offset}
cron__364(
inner_rest,
[],
[{inner_rest, inner_acc ++ acc, inner_context, inner_line, inner_offset} | stack],
inner_context,
inner_line,
inner_offset
)
end
defp cron__420(rest, user_acc, [acc | stack], context, line, offset) do
cron__421(rest, [months: :lists.reverse(user_acc)] ++ acc, stack, context, line, offset)
end
defp cron__421(<<" ", rest::binary>>, acc, stack, context, comb__line, comb__offset) do
cron__422(rest, [] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp cron__421(rest, _acc, _stack, context, line, offset) do
{:error, "expected string \" \"", rest, context, line, offset}
end
defp cron__422(rest, acc, stack, context, line, offset) do
cron__423(rest, [], [acc | stack], context, line, offset)
end
defp cron__423(rest, acc, stack, context, line, offset) do
cron__475(rest, [], [{rest, context, line, offset}, acc | stack], context, line, offset)
end
defp cron__425(rest, acc, stack, context, line, offset) do
cron__456(rest, [], [{rest, context, line, offset}, acc | stack], context, line, offset)
end
defp cron__427(<<",", rest::binary>>, acc, stack, context, comb__line, comb__offset) do
cron__428(rest, [] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp cron__427(rest, _acc, _stack, context, line, offset) do
{:error,
"expected string \"MON\" or string \"TUE\" or string \"WED\" or string \"THU\" or string \"FRI\" or string \"SAT\" or string \"SUN\" or byte in the range ?0..?9, followed by byte in the range ?0..?9, followed by string \"-\", followed by byte in the range ?0..?9, followed by byte in the range ?0..?9 or byte in the range ?0..?9, followed by byte in the range ?0..?9 or string \"*/\", followed by byte in the range ?0..?9, followed by byte in the range ?0..?9 or string \"*\" or string \",\"",
rest, context, line, offset}
end
defp cron__428(rest, acc, [_, previous_acc | stack], context, line, offset) do
cron__426(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp cron__429(_, _, [{rest, context, line, offset} | _] = stack, _, _, _) do
cron__427(rest, [], stack, context, line, offset)
end
defp cron__430(<<"*", rest::binary>>, acc, stack, context, comb__line, comb__offset) do
cron__431(rest, [wild: "*"] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp cron__430(rest, acc, stack, context, line, offset) do
cron__429(rest, acc, stack, context, line, offset)
end
defp cron__431(rest, acc, [_, previous_acc | stack], context, line, offset) do
cron__426(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp cron__432(_, _, [{rest, context, line, offset} | _] = stack, _, _, _) do
cron__430(rest, [], stack, context, line, offset)
end
defp cron__433(rest, acc, stack, context, line, offset) do
cron__434(rest, [], [acc | stack], context, line, offset)
end
defp cron__434(<<"*/", rest::binary>>, acc, stack, context, comb__line, comb__offset) do
cron__435(rest, [] ++ acc, stack, context, comb__line, comb__offset + 2)
end
defp cron__434(rest, _acc, stack, context, line, offset) do
[acc | stack] = stack
cron__432(rest, acc, stack, context, line, offset)
end
defp cron__435(rest, acc, stack, context, line, offset) do
cron__436(rest, [], [acc | stack], context, line, offset)
end
defp cron__436(<<x0::integer, rest::binary>>, acc, stack, context, comb__line, comb__offset)
when x0 >= 48 and x0 <= 57 do
cron__437(rest, [x0 - 48] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp cron__436(rest, _acc, stack, context, line, offset) do
[_, acc | stack] = stack
cron__432(rest, acc, stack, context, line, offset)
end
defp cron__437(rest, acc, stack, context, line, offset) do
cron__439(rest, acc, [1 | stack], context, line, offset)
end
defp cron__439(<<x0::integer, rest::binary>>, acc, stack, context, comb__line, comb__offset)
when x0 >= 48 and x0 <= 57 do
cron__440(rest, [x0] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp cron__439(rest, acc, stack, context, line, offset) do
cron__438(rest, acc, stack, context, line, offset)
end
defp cron__438(rest, acc, [_ | stack], context, line, offset) do
cron__441(rest, acc, stack, context, line, offset)
end
defp cron__440(rest, acc, [1 | stack], context, line, offset) do
cron__441(rest, acc, stack, context, line, offset)
end
defp cron__440(rest, acc, [count | stack], context, line, offset) do
cron__439(rest, acc, [count - 1 | stack], context, line, offset)
end
defp cron__441(rest, user_acc, [acc | stack], context, line, offset) do
cron__442(
rest,
(
[head | tail] = :lists.reverse(user_acc)
[:lists.foldl(fn x, acc -> x - 48 + acc * 10 end, head, tail)]
) ++ acc,
stack,
context,
line,
offset
)
end
defp cron__442(rest, user_acc, [acc | stack], context, line, offset) do
cron__443(
rest,
[
step:
case(:lists.reverse(user_acc)) do
[one] ->
one
many ->
raise("unwrap_and_tag/3 expected a single token, got: #{inspect(many)}")
end
] ++ acc,
stack,
context,
line,
offset
)
end
defp cron__443(rest, acc, [_, previous_acc | stack], context, line, offset) do
cron__426(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp cron__444(_, _, [{rest, context, line, offset} | _] = stack, _, _, _) do
cron__433(rest, [], stack, context, line, offset)
end
defp cron__445(rest, acc, stack, context, line, offset) do
cron__446(rest, [], [acc | stack], context, line, offset)
end
defp cron__446(rest, acc, stack, context, line, offset) do
cron__447(rest, [], [acc | stack], context, line, offset)
end
defp cron__447(<<x0::integer, rest::binary>>, acc, stack, context, comb__line, comb__offset)
when x0 >= 48 and x0 <= 57 do
cron__448(rest, [x0 - 48] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp cron__447(rest, _acc, stack, context, line, offset) do
[_, acc | stack] = stack
cron__444(rest, acc, stack, context, line, offset)
end
defp cron__448(rest, acc, stack, context, line, offset) do
cron__450(rest, acc, [1 | stack], context, line, offset)
end
defp cron__450(<<x0::integer, rest::binary>>, acc, stack, context, comb__line, comb__offset)
when x0 >= 48 and x0 <= 57 do
cron__451(rest, [x0] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp cron__450(rest, acc, stack, context, line, offset) do
cron__449(rest, acc, stack, context, line, offset)
end
defp cron__449(rest, acc, [_ | stack], context, line, offset) do
cron__452(rest, acc, stack, context, line, offset)
end
defp cron__451(rest, acc, [1 | stack], context, line, offset) do
cron__452(rest, acc, stack, context, line, offset)
end
defp cron__451(rest, acc, [count | stack], context, line, offset) do
cron__450(rest, acc, [count - 1 | stack], context, line, offset)
end
defp cron__452(rest, user_acc, [acc | stack], context, line, offset) do
cron__453(
rest,
(
[head | tail] = :lists.reverse(user_acc)
[:lists.foldl(fn x, acc -> x - 48 + acc * 10 end, head, tail)]
) ++ acc,
stack,
context,
line,
offset
)
end
defp cron__453(rest, user_acc, [acc | stack], context, line, offset) do
cron__454(
rest,
[
literal:
case(:lists.reverse(user_acc)) do
[one] ->
one
many ->
raise("unwrap_and_tag/3 expected a single token, got: #{inspect(many)}")
end
] ++ acc,
stack,
context,
line,
offset
)
end
defp cron__454(rest, acc, [_, previous_acc | stack], context, line, offset) do
cron__426(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp cron__455(_, _, [{rest, context, line, offset} | _] = stack, _, _, _) do
cron__445(rest, [], stack, context, line, offset)
end
defp cron__456(rest, acc, stack, context, line, offset) do
cron__457(rest, [], [acc | stack], context, line, offset)
end
defp cron__457(rest, acc, stack, context, line, offset) do
cron__458(rest, [], [acc | stack], context, line, offset)
end
defp cron__458(<<x0::integer, rest::binary>>, acc, stack, context, comb__line, comb__offset)
when x0 >= 48 and x0 <= 57 do
cron__459(rest, [x0 - 48] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp cron__458(rest, _acc, stack, context, line, offset) do
[_, acc | stack] = stack
cron__455(rest, acc, stack, context, line, offset)
end
defp cron__459(rest, acc, stack, context, line, offset) do
cron__461(rest, acc, [1 | stack], context, line, offset)
end
defp cron__461(<<x0::integer, rest::binary>>, acc, stack, context, comb__line, comb__offset)
when x0 >= 48 and x0 <= 57 do
cron__462(rest, [x0] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp cron__461(rest, acc, stack, context, line, offset) do
cron__460(rest, acc, stack, context, line, offset)
end
defp cron__460(rest, acc, [_ | stack], context, line, offset) do
cron__463(rest, acc, stack, context, line, offset)
end
defp cron__462(rest, acc, [1 | stack], context, line, offset) do
cron__463(rest, acc, stack, context, line, offset)
end
defp cron__462(rest, acc, [count | stack], context, line, offset) do
cron__461(rest, acc, [count - 1 | stack], context, line, offset)
end
defp cron__463(rest, user_acc, [acc | stack], context, line, offset) do
cron__464(
rest,
(
[head | tail] = :lists.reverse(user_acc)
[:lists.foldl(fn x, acc -> x - 48 + acc * 10 end, head, tail)]
) ++ acc,
stack,
context,
line,
offset
)
end
defp cron__464(<<"-", rest::binary>>, acc, stack, context, comb__line, comb__offset) do
cron__465(rest, [] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp cron__464(rest, _acc, stack, context, line, offset) do
[acc | stack] = stack
cron__455(rest, acc, stack, context, line, offset)
end
defp cron__465(rest, acc, stack, context, line, offset) do
cron__466(rest, [], [acc | stack], context, line, offset)
end
defp cron__466(<<x0::integer, rest::binary>>, acc, stack, context, comb__line, comb__offset)
when x0 >= 48 and x0 <= 57 do
cron__467(rest, [x0 - 48] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp cron__466(rest, _acc, stack, context, line, offset) do
[_, acc | stack] = stack
cron__455(rest, acc, stack, context, line, offset)
end
defp cron__467(rest, acc, stack, context, line, offset) do
cron__469(rest, acc, [1 | stack], context, line, offset)
end
defp cron__469(<<x0::integer, rest::binary>>, acc, stack, context, comb__line, comb__offset)
when x0 >= 48 and x0 <= 57 do
cron__470(rest, [x0] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp cron__469(rest, acc, stack, context, line, offset) do
cron__468(rest, acc, stack, context, line, offset)
end
defp cron__468(rest, acc, [_ | stack], context, line, offset) do
cron__471(rest, acc, stack, context, line, offset)
end
defp cron__470(rest, acc, [1 | stack], context, line, offset) do
cron__471(rest, acc, stack, context, line, offset)
end
defp cron__470(rest, acc, [count | stack], context, line, offset) do
cron__469(rest, acc, [count - 1 | stack], context, line, offset)
end
defp cron__471(rest, user_acc, [acc | stack], context, line, offset) do
cron__472(
rest,
(
[head | tail] = :lists.reverse(user_acc)
[:lists.foldl(fn x, acc -> x - 48 + acc * 10 end, head, tail)]
) ++ acc,
stack,
context,
line,
offset
)
end
defp cron__472(rest, user_acc, [acc | stack], context, line, offset) do
cron__473(rest, [range: :lists.reverse(user_acc)] ++ acc, stack, context, line, offset)
end
defp cron__473(rest, acc, [_, previous_acc | stack], context, line, offset) do
cron__426(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp cron__426(rest, acc, [_, previous_acc | stack], context, line, offset) do
cron__424(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp cron__474(_, _, [{rest, context, line, offset} | _] = stack, _, _, _) do
cron__425(rest, [], stack, context, line, offset)
end
defp cron__475(rest, acc, stack, context, line, offset) do
cron__476(rest, [], [acc | stack], context, line, offset)
end
defp cron__476(<<"MON", rest::binary>>, acc, stack, context, comb__line, comb__offset) do
cron__477(rest, [1] ++ acc, stack, context, comb__line, comb__offset + 3)
end
defp cron__476(<<"TUE", rest::binary>>, acc, stack, context, comb__line, comb__offset) do
cron__477(rest, [2] ++ acc, stack, context, comb__line, comb__offset + 3)
end
defp cron__476(<<"WED", rest::binary>>, acc, stack, context, comb__line, comb__offset) do
cron__477(rest, [3] ++ acc, stack, context, comb__line, comb__offset + 3)
end
defp cron__476(<<"THU", rest::binary>>, acc, stack, context, comb__line, comb__offset) do
cron__477(rest, [4] ++ acc, stack, context, comb__line, comb__offset + 3)
end
defp cron__476(<<"FRI", rest::binary>>, acc, stack, context, comb__line, comb__offset) do
cron__477(rest, [5] ++ acc, stack, context, comb__line, comb__offset + 3)
end
defp cron__476(<<"SAT", rest::binary>>, acc, stack, context, comb__line, comb__offset) do
cron__477(rest, [6] ++ acc, stack, context, comb__line, comb__offset + 3)
end
defp cron__476(<<"SUN", rest::binary>>, acc, stack, context, comb__line, comb__offset) do
cron__477(rest, [0] ++ acc, stack, context, comb__line, comb__offset + 3)
end
defp cron__476(rest, _acc, stack, context, line, offset) do
[acc | stack] = stack
cron__474(rest, acc, stack, context, line, offset)
end
defp cron__477(rest, user_acc, [acc | stack], context, line, offset) do
cron__478(
rest,
[
literal:
case(:lists.reverse(user_acc)) do
[one] ->
one
many ->
raise("unwrap_and_tag/3 expected a single token, got: #{inspect(many)}")
end
] ++ acc,
stack,
context,
line,
offset
)
end
defp cron__478(rest, acc, [_, previous_acc | stack], context, line, offset) do
cron__424(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp cron__424(rest, acc, stack, context, line, offset) do
cron__480(rest, [], [{rest, acc, context, line, offset} | stack], context, line, offset)
end
defp cron__480(rest, acc, stack, context, line, offset) do
cron__532(rest, [], [{rest, context, line, offset}, acc | stack], context, line, offset)
end
defp cron__482(rest, acc, stack, context, line, offset) do
cron__513(rest, [], [{rest, context, line, offset}, acc | stack], context, line, offset)
end
defp cron__484(<<",", rest::binary>>, acc, stack, context, comb__line, comb__offset) do
cron__485(rest, [] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp cron__484(rest, _acc, stack, context, line, offset) do
[_, _, _, acc | stack] = stack
cron__479(rest, acc, stack, context, line, offset)
end
defp cron__485(rest, acc, [_, previous_acc | stack], context, line, offset) do
cron__483(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp cron__486(_, _, [{rest, context, line, offset} | _] = stack, _, _, _) do
cron__484(rest, [], stack, context, line, offset)
end
defp cron__487(<<"*", rest::binary>>, acc, stack, context, comb__line, comb__offset) do
cron__488(rest, [wild: "*"] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp cron__487(rest, acc, stack, context, line, offset) do
cron__486(rest, acc, stack, context, line, offset)
end
defp cron__488(rest, acc, [_, previous_acc | stack], context, line, offset) do
cron__483(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp cron__489(_, _, [{rest, context, line, offset} | _] = stack, _, _, _) do
cron__487(rest, [], stack, context, line, offset)
end
defp cron__490(rest, acc, stack, context, line, offset) do
cron__491(rest, [], [acc | stack], context, line, offset)
end
defp cron__491(<<"*/", rest::binary>>, acc, stack, context, comb__line, comb__offset) do
cron__492(rest, [] ++ acc, stack, context, comb__line, comb__offset + 2)
end
defp cron__491(rest, _acc, stack, context, line, offset) do
[acc | stack] = stack
cron__489(rest, acc, stack, context, line, offset)
end
defp cron__492(rest, acc, stack, context, line, offset) do
cron__493(rest, [], [acc | stack], context, line, offset)
end
defp cron__493(<<x0::integer, rest::binary>>, acc, stack, context, comb__line, comb__offset)
when x0 >= 48 and x0 <= 57 do
cron__494(rest, [x0 - 48] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp cron__493(rest, _acc, stack, context, line, offset) do
[_, acc | stack] = stack
cron__489(rest, acc, stack, context, line, offset)
end
defp cron__494(rest, acc, stack, context, line, offset) do
cron__496(rest, acc, [1 | stack], context, line, offset)
end
defp cron__496(<<x0::integer, rest::binary>>, acc, stack, context, comb__line, comb__offset)
when x0 >= 48 and x0 <= 57 do
cron__497(rest, [x0] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp cron__496(rest, acc, stack, context, line, offset) do
cron__495(rest, acc, stack, context, line, offset)
end
defp cron__495(rest, acc, [_ | stack], context, line, offset) do
cron__498(rest, acc, stack, context, line, offset)
end
defp cron__497(rest, acc, [1 | stack], context, line, offset) do
cron__498(rest, acc, stack, context, line, offset)
end
defp cron__497(rest, acc, [count | stack], context, line, offset) do
cron__496(rest, acc, [count - 1 | stack], context, line, offset)
end
defp cron__498(rest, user_acc, [acc | stack], context, line, offset) do
cron__499(
rest,
(
[head | tail] = :lists.reverse(user_acc)
[:lists.foldl(fn x, acc -> x - 48 + acc * 10 end, head, tail)]
) ++ acc,
stack,
context,
line,
offset
)
end
defp cron__499(rest, user_acc, [acc | stack], context, line, offset) do
cron__500(
rest,
[
step:
case(:lists.reverse(user_acc)) do
[one] ->
one
many ->
raise("unwrap_and_tag/3 expected a single token, got: #{inspect(many)}")
end
] ++ acc,
stack,
context,
line,
offset
)
end
defp cron__500(rest, acc, [_, previous_acc | stack], context, line, offset) do
cron__483(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp cron__501(_, _, [{rest, context, line, offset} | _] = stack, _, _, _) do
cron__490(rest, [], stack, context, line, offset)
end
defp cron__502(rest, acc, stack, context, line, offset) do
cron__503(rest, [], [acc | stack], context, line, offset)
end
defp cron__503(rest, acc, stack, context, line, offset) do
cron__504(rest, [], [acc | stack], context, line, offset)
end
defp cron__504(<<x0::integer, rest::binary>>, acc, stack, context, comb__line, comb__offset)
when x0 >= 48 and x0 <= 57 do
cron__505(rest, [x0 - 48] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp cron__504(rest, _acc, stack, context, line, offset) do
[_, acc | stack] = stack
cron__501(rest, acc, stack, context, line, offset)
end
defp cron__505(rest, acc, stack, context, line, offset) do
cron__507(rest, acc, [1 | stack], context, line, offset)
end
defp cron__507(<<x0::integer, rest::binary>>, acc, stack, context, comb__line, comb__offset)
when x0 >= 48 and x0 <= 57 do
cron__508(rest, [x0] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp cron__507(rest, acc, stack, context, line, offset) do
cron__506(rest, acc, stack, context, line, offset)
end
defp cron__506(rest, acc, [_ | stack], context, line, offset) do
cron__509(rest, acc, stack, context, line, offset)
end
defp cron__508(rest, acc, [1 | stack], context, line, offset) do
cron__509(rest, acc, stack, context, line, offset)
end
defp cron__508(rest, acc, [count | stack], context, line, offset) do
cron__507(rest, acc, [count - 1 | stack], context, line, offset)
end
defp cron__509(rest, user_acc, [acc | stack], context, line, offset) do
cron__510(
rest,
(
[head | tail] = :lists.reverse(user_acc)
[:lists.foldl(fn x, acc -> x - 48 + acc * 10 end, head, tail)]
) ++ acc,
stack,
context,
line,
offset
)
end
defp cron__510(rest, user_acc, [acc | stack], context, line, offset) do
cron__511(
rest,
[
literal:
case(:lists.reverse(user_acc)) do
[one] ->
one
many ->
raise("unwrap_and_tag/3 expected a single token, got: #{inspect(many)}")
end
] ++ acc,
stack,
context,
line,
offset
)
end
defp cron__511(rest, acc, [_, previous_acc | stack], context, line, offset) do
cron__483(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp cron__512(_, _, [{rest, context, line, offset} | _] = stack, _, _, _) do
cron__502(rest, [], stack, context, line, offset)
end
defp cron__513(rest, acc, stack, context, line, offset) do
cron__514(rest, [], [acc | stack], context, line, offset)
end
defp cron__514(rest, acc, stack, context, line, offset) do
cron__515(rest, [], [acc | stack], context, line, offset)
end
defp cron__515(<<x0::integer, rest::binary>>, acc, stack, context, comb__line, comb__offset)
when x0 >= 48 and x0 <= 57 do
cron__516(rest, [x0 - 48] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp cron__515(rest, _acc, stack, context, line, offset) do
[_, acc | stack] = stack
cron__512(rest, acc, stack, context, line, offset)
end
defp cron__516(rest, acc, stack, context, line, offset) do
cron__518(rest, acc, [1 | stack], context, line, offset)
end
defp cron__518(<<x0::integer, rest::binary>>, acc, stack, context, comb__line, comb__offset)
when x0 >= 48 and x0 <= 57 do
cron__519(rest, [x0] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp cron__518(rest, acc, stack, context, line, offset) do
cron__517(rest, acc, stack, context, line, offset)
end
defp cron__517(rest, acc, [_ | stack], context, line, offset) do
cron__520(rest, acc, stack, context, line, offset)
end
defp cron__519(rest, acc, [1 | stack], context, line, offset) do
cron__520(rest, acc, stack, context, line, offset)
end
defp cron__519(rest, acc, [count | stack], context, line, offset) do
cron__518(rest, acc, [count - 1 | stack], context, line, offset)
end
defp cron__520(rest, user_acc, [acc | stack], context, line, offset) do
cron__521(
rest,
(
[head | tail] = :lists.reverse(user_acc)
[:lists.foldl(fn x, acc -> x - 48 + acc * 10 end, head, tail)]
) ++ acc,
stack,
context,
line,
offset
)
end
defp cron__521(<<"-", rest::binary>>, acc, stack, context, comb__line, comb__offset) do
cron__522(rest, [] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp cron__521(rest, _acc, stack, context, line, offset) do
[acc | stack] = stack
cron__512(rest, acc, stack, context, line, offset)
end
defp cron__522(rest, acc, stack, context, line, offset) do
cron__523(rest, [], [acc | stack], context, line, offset)
end
defp cron__523(<<x0::integer, rest::binary>>, acc, stack, context, comb__line, comb__offset)
when x0 >= 48 and x0 <= 57 do
cron__524(rest, [x0 - 48] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp cron__523(rest, _acc, stack, context, line, offset) do
[_, acc | stack] = stack
cron__512(rest, acc, stack, context, line, offset)
end
defp cron__524(rest, acc, stack, context, line, offset) do
cron__526(rest, acc, [1 | stack], context, line, offset)
end
defp cron__526(<<x0::integer, rest::binary>>, acc, stack, context, comb__line, comb__offset)
when x0 >= 48 and x0 <= 57 do
cron__527(rest, [x0] ++ acc, stack, context, comb__line, comb__offset + 1)
end
defp cron__526(rest, acc, stack, context, line, offset) do
cron__525(rest, acc, stack, context, line, offset)
end
defp cron__525(rest, acc, [_ | stack], context, line, offset) do
cron__528(rest, acc, stack, context, line, offset)
end
defp cron__527(rest, acc, [1 | stack], context, line, offset) do
cron__528(rest, acc, stack, context, line, offset)
end
defp cron__527(rest, acc, [count | stack], context, line, offset) do
cron__526(rest, acc, [count - 1 | stack], context, line, offset)
end
defp cron__528(rest, user_acc, [acc | stack], context, line, offset) do
cron__529(
rest,
(
[head | tail] = :lists.reverse(user_acc)
[:lists.foldl(fn x, acc -> x - 48 + acc * 10 end, head, tail)]
) ++ acc,
stack,
context,
line,
offset
)
end
defp cron__529(rest, user_acc, [acc | stack], context, line, offset) do
cron__530(rest, [range: :lists.reverse(user_acc)] ++ acc, stack, context, line, offset)
end
defp cron__530(rest, acc, [_, previous_acc | stack], context, line, offset) do
cron__483(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp cron__483(rest, acc, [_, previous_acc | stack], context, line, offset) do
cron__481(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp cron__531(_, _, [{rest, context, line, offset} | _] = stack, _, _, _) do
cron__482(rest, [], stack, context, line, offset)
end
defp cron__532(rest, acc, stack, context, line, offset) do
cron__533(rest, [], [acc | stack], context, line, offset)
end
defp cron__533(<<"MON", rest::binary>>, acc, stack, context, comb__line, comb__offset) do
cron__534(rest, [1] ++ acc, stack, context, comb__line, comb__offset + 3)
end
defp cron__533(<<"TUE", rest::binary>>, acc, stack, context, comb__line, comb__offset) do
cron__534(rest, [2] ++ acc, stack, context, comb__line, comb__offset + 3)
end
defp cron__533(<<"WED", rest::binary>>, acc, stack, context, comb__line, comb__offset) do
cron__534(rest, [3] ++ acc, stack, context, comb__line, comb__offset + 3)
end
defp cron__533(<<"THU", rest::binary>>, acc, stack, context, comb__line, comb__offset) do
cron__534(rest, [4] ++ acc, stack, context, comb__line, comb__offset + 3)
end
defp cron__533(<<"FRI", rest::binary>>, acc, stack, context, comb__line, comb__offset) do
cron__534(rest, [5] ++ acc, stack, context, comb__line, comb__offset + 3)
end
defp cron__533(<<"SAT", rest::binary>>, acc, stack, context, comb__line, comb__offset) do
cron__534(rest, [6] ++ acc, stack, context, comb__line, comb__offset + 3)
end
defp cron__533(<<"SUN", rest::binary>>, acc, stack, context, comb__line, comb__offset) do
cron__534(rest, [0] ++ acc, stack, context, comb__line, comb__offset + 3)
end
defp cron__533(rest, _acc, stack, context, line, offset) do
[acc | stack] = stack
cron__531(rest, acc, stack, context, line, offset)
end
defp cron__534(rest, user_acc, [acc | stack], context, line, offset) do
cron__535(
rest,
[
literal:
case(:lists.reverse(user_acc)) do
[one] ->
one
many ->
raise("unwrap_and_tag/3 expected a single token, got: #{inspect(many)}")
end
] ++ acc,
stack,
context,
line,
offset
)
end
defp cron__535(rest, acc, [_, previous_acc | stack], context, line, offset) do
cron__481(rest, acc ++ previous_acc, stack, context, line, offset)
end
defp cron__479(_, _, [{rest, acc, context, line, offset} | stack], _, _, _) do
cron__536(rest, acc, stack, context, line, offset)
end
defp cron__481(
inner_rest,
inner_acc,
[{rest, acc, context, line, offset} | stack],
inner_context,
inner_line,
inner_offset
) do
_ = {rest, acc, context, line, offset}
cron__480(
inner_rest,
[],
[{inner_rest, inner_acc ++ acc, inner_context, inner_line, inner_offset} | stack],
inner_context,
inner_line,
inner_offset
)
end
defp cron__536(rest, user_acc, [acc | stack], context, line, offset) do
cron__537(rest, [weekdays: :lists.reverse(user_acc)] ++ acc, stack, context, line, offset)
end
defp cron__537(rest, acc, _stack, context, line, offset) do
{:ok, acc, rest, context, line, offset}
end
end
|
lib/oban/crontab/parser.ex
| 0.841289
| 0.441613
|
parser.ex
|
starcoder
|
defmodule Simplify do
@moduledoc """
Implementation of the [RamerβDouglasβPeucker](https://en.wikipedia.org/wiki/Ramer%E2%80%93Douglas%E2%80%93Peucker_algorithm)
algorithm for reducing the number of points used to represent a curve.
The `Simplify` module contains a function `simplify` that accepts a List of
coordinates, each coordinate being a tuple `{x, y}`, and a tolerance. The
function reduces the number of points by removing points that are less than
the tolerance away from the simplified curve.
```elixir
points = [{0, 0}, {0.05, 0.05}, {-0.05, 0.5}, {0, 1}, {0.05, 1.1}, {1, 1}, {0.5, 0.5}, {0, 0.0001}]
Simplify.simplify(points, 0.1) # => [{0, 0}, {0.05, 1.1}, {1, 1}, {0, 0.0001}]
```
The method will also take a `Geo.LineString` struct as created by the conversion
functions in the Geo project (https://github.com/bryanjos/geo). This allows
for easy import of GeoJSON or WKT/WKB formats. This version of the function
returns a `Geo.LineString` of the simplified curve.
```elixir
"{\"type\":\"LineString\":\"coordinates\":[[0,0],[0.05,0.05],[-0.05,0.5],[0,1],[0.05,1.1],[1,1],[0.5,0.5],[0,0.0001]]"
|> Jason.decode!
|> Geo.JSON.decode
|> Simplify.simplify(0.1)
|> Geo.JSON.encode
|> Jason.encode! # => "{\"coordinates\":[[0,0],[0.05,1.1],[1,1],[0,0.0001]],\"type\":\"LineString\"}"
```
"""
@type point :: {number, number}
@spec simplify(list(point), number) :: list(point)
def simplify(coordinates, tolerance) when is_list(coordinates) do
simplify_dp_step(coordinates, tolerance * tolerance)
end
@spec simplify(Geo.LineString.t(), number) :: Geo.LineString.t()
def simplify(%Geo.LineString{} = linestring, tolerance) do
%Geo.LineString{coordinates: simplify(linestring.coordinates, tolerance)}
end
defp simplify_dp_step([], _), do: []
defp simplify_dp_step([_] = segment, _), do: segment
defp simplify_dp_step([_, _] = segment, _), do: segment
defp simplify_dp_step(segment, tolerance_squared) do
[first | tail] = segment
{last, middle} = List.pop_at(tail, -1)
{_, far_value, far_index, far_squared_dist} =
Enum.reduce(middle, {1, nil, 1, 0}, fn element, {idx, max_val, max_idx, max_dist} ->
dist = seg_dist(element, first, last)
if dist >= max_dist do
{idx + 1, element, idx, dist}
else
{idx + 1, max_val, max_idx, max_dist}
end
end)
if far_squared_dist > tolerance_squared do
{pre_split, post_split} = Enum.split(segment, far_index + 1)
front = simplify_dp_step(pre_split, tolerance_squared)
[_ | back] = simplify_dp_step([far_value | post_split], tolerance_squared)
front ++ back
else
[first, last]
end
end
defp seg_dist({px, py, _}, {ax, ay, _}, {bx, by, _}), do: seg_dist({px, py}, {ax, ay}, {bx, by})
defp seg_dist(p, a, b), do: Distance.segment_distance_squared(p, a, b)
end
|
lib/simplify.ex
| 0.923523
| 0.990678
|
simplify.ex
|
starcoder
|
defmodule Options.Depreciated do
@moduledoc """
Various functions for evaluating call options using binomial method.
"""
# annual volatility to growth rate per delta-t in years
def voltorate(volatility, dt) do
with exponent = volatility * :math.sqrt(dt) do
:math.exp(exponent)
end
end
@doc """
iex> Options.Depreciated.split(100, 2.0)
[50.0, 200.0]
"""
# split/2
# equity (stock) price progression, symetric
def split(s, gu) do
with gd = 1 / gu, do: split(s, gu, gd)
end
@doc """
iex> Options.Depreciated.split(100, 2.0, 0.5)
[50.0, 200.0]
"""
# split/3
# equity (stock) price progression
def split(s, gu, gd), do: [s * gd, s * gu]
@doc """
iex> Options.Depreciated.revsplit([0.125, 0.5], 2.0, 0.5)
0.25
"""
# revsplit/3
# reverse split
def revsplit([sfd, sfu], gu, gd) do
with s1 = sfu / gu,
s2 = sfd / gd do
if abs(s1 - s2) / s1 < 0.0001 do
s1
else
:error
end
end
end
@doc """
iex> Options.Depreciated.revsplit([0.125, 0.5], 2.0)
0.25
"""
# revsplit/2
# reverse split
def revsplit([sfd, sfu], gu) do
with gd = 1 / gu, do: revsplit([sfd, sfu], gu, gd)
end
@doc """
iex> Options.Depreciated.bondp(100, 0.05, 1.0)
95.1229424500714
"""
# bondp/3
# pres val of bf bond future
def bondp(bf, r, dt), do: bf * :math.exp(-r * dt)
@doc """
iex> Options.Depreciated.callf([50, 200], 100)
[0, 100]
"""
# callf/2
# call future value
def callf([sd, su], ex), do: [max(0, sd - ex), max(0, su - ex)]
@doc """
iex> Options.Depreciated.bondf([50, 200], [50, 200])
0
"""
@doc """
iex> [50.0, 200.0] |> Options.Depreciated.expand(2.0, 0.5)
[[25.0, 100.0], [100.0, 400.0]]
"""
# expand/1
# add layer to stock price progression
def expand([d, u], gu, gd) do
if is_float(d) or is_integer(d) do
[split(d, gu, gd), split(u, gu, gd)]
else
[expand(d, gu, gd), expand(u, gu, gd)]
end
end
@doc """
iex> Options.Depreciated.spread(100.0, 2, 2.0, 0.5)
[25.0, 100.0, 400.0]
"""
# spread/2
# stock price progression to n levels
def spread(s, n, gu, gd) do
if n > 1 do
1..(n - 1)
|> Enum.reduce(split(s * 1.0, gu * 1.0, gd * 1.0), fn _x, acc -> expand(acc, gu, gd) end)
|> List.flatten()
|> Enum.sort()
|> myuniq()
else
split(s, gu, gd)
end
end
@doc """
iex> Options.Depreciated.myuniq([100.0, 200.0, 200.01, 200.3, 300.0])
[100.0, 200.0, 300.0]
"""
# myuniq/1
# eliminates close values that were retained due to rounding errors
def myuniq(nums) do
[x | y] = nums
if y == [] do
[x]
else
if abs(x - hd(y)) / x < 0.01 do
[x | myuniq(shave(x, y))]
else
[x | myuniq(y)]
end
end
end
@doc """
iex> Options.Depreciated.shave(200.0, [200.0, 200.01, 200.3, 300.0])
[300.0]
"""
# shave/2
# helper for myuniq
def shave(x, y) do
if abs(x - hd(y)) / x < 0.01 do
shave(x, tl(y))
else
y
end
end
@doc """
iex> [1,2,3,4,5] |> Options.Depreciated.pairs()
[[1,2], [2,3], [3,4], [4,5]]
"""
# splits a future price distribution into ordered pairs
def pairs(dist) do
if length(dist) == 2 do
[dist]
else
[[hd(dist), hd(tl(dist))] | pairs(tl(dist))]
end
end
@doc """
iex> [0.125, 0.5, 0.5, 2.0, 0.5, 2.0, 2.0, 8.0] |> Options.Depreciated.calldist(1.0)
[0.0, 0.0, 0.0, 1.0, 0.0, 1.0, 1.0, 7.0]
"""
# calldist/2
# forward call value distrbution
def calldist(stockdist, ex) do
stockdist
|> Enum.map(&max(0.0, &1 - ex))
end
end
|
lib/options/depreciated.ex
| 0.67104
| 0.530419
|
depreciated.ex
|
starcoder
|
defmodule Tai.Venues.Boot do
@moduledoc """
Coordinates the asynchronous hydration of a venue:
- products
- accounts
- fees
"""
alias __MODULE__
@type venue :: Tai.Venue.t()
@spec run(venue) :: {:ok, venue} | {:error, {venue, [reason :: term]}}
def run(venue) do
venue
|> hydrate_products_and_accounts
|> wait_for_products
|> hydrate_fees_and_positions_and_start_streams
|> wait_for_accounts_and_fees
end
defp hydrate_products_and_accounts(venue) do
t_products = Task.async(Boot.Products, :hydrate, [venue])
t_accounts = Task.async(Boot.Accounts, :hydrate, [venue])
{venue, t_products, t_accounts}
end
defp wait_for_products({venue, t_products, t_accounts}) do
working_tasks = [accounts: t_accounts]
case Task.await(t_products, venue.timeout) do
{:ok, products} ->
{:ok, venue, working_tasks, products}
{:error, reason} ->
err_reasons = [products: reason]
{:error, venue, working_tasks, err_reasons}
end
end
defp hydrate_fees_and_positions_and_start_streams({:ok, venue, working_tasks, products}) do
t_fees = Task.async(Boot.Fees, :hydrate, [venue, products])
t_positions = Task.async(Boot.Positions, :hydrate, [venue])
t_stream = Task.async(Boot.Stream, :start, [venue, products])
new_working_tasks = [{:fees, t_fees} | working_tasks]
new_working_tasks = [{:positions, t_positions} | new_working_tasks]
new_working_tasks = [{:streams, t_stream} | new_working_tasks]
{:ok, venue, new_working_tasks}
end
defp hydrate_fees_and_positions_and_start_streams({:error, _, _, _} = error), do: error
defp wait_for_accounts_and_fees({:ok, venue, working_tasks}) do
venue
|> collect_remaining_errors(working_tasks, [])
end
defp wait_for_accounts_and_fees({:error, venue, working_tasks, err_reasons}) do
venue
|> collect_remaining_errors(working_tasks, err_reasons)
end
defp collect_remaining_errors(venue, [], err_reasons) do
if Enum.empty?(err_reasons) do
{:ok, venue}
else
{:error, {venue, err_reasons}}
end
end
defp collect_remaining_errors(venue, [{name, working} | tasks], err_reasons) do
case Task.await(working, venue.timeout) do
{:error, reason} ->
venue |> collect_remaining_errors(tasks, [{name, reason} | err_reasons])
_ ->
venue |> collect_remaining_errors(tasks, err_reasons)
end
end
end
|
apps/tai/lib/tai/venues/boot.ex
| 0.697712
| 0.45302
|
boot.ex
|
starcoder
|
defmodule Mix.Utils do
@moduledoc """
Utilities used throughout Mix and tasks.
## Conversions
This module handles two types of conversions:
* From command names to module names, i.e. how the command
`deps.get` translates to `Deps.Get` and vice-versa;
* From underscore to camelize, i.e. how the file path
`my_project` translates to `MyProject`;
"""
@doc """
Gets the user home attempting to consider OS system diferences.
"""
def user_home do
System.get_env("MIXHOME") || System.get_env("HOME") || System.get_env("USERPROFILE") ||
raise Mix.Error, message: "Nor MIXHOME, HOME or USERPROFILE env variables were set"
end
@doc """
Gets the source location of a module as a binary.
"""
def source(module) do
compile = module.__info__(:compile)
# Get the source of the compiled module. Due to a bug in Erlang
# R15 and before, we need to look for the source first in the
# options and then into the real source.
options =
case List.keyfind(compile, :options, 1) do
{ :options, opts } -> opts
_ -> []
end
source = List.keyfind(options, :source, 1) || List.keyfind(compile, :source, 1)
case source do
{ :source, source } -> list_to_binary(source)
_ -> nil
end
end
@doc """
Takes a `command` name and try to load a module
with the command name converted to a module name
in the given `at` scope.
Returns `{ :module, module }` in case a module
exists and is loaded, `{ :error, reason }` otherwise.
## Examples
Mix.Utils.command_to_module("compile", Mix.Tasks)
#=> { :module, Mix.Tasks.Compile }
"""
def command_to_module(command, at // Elixir) do
module = Module.concat(at, command_to_module_name(command))
Code.ensure_loaded(module)
end
@doc """
Returns true if any of `target` is stale compared to `source`.
If `target` or `source` is a binary, it is expanded using `File.wildcard`.
"""
def stale?(source, target) do
source = expand_wildcard(source)
target = expand_wildcard(target)
source_stats = Enum.map(source, fn(file) -> File.stat!(file).mtime end)
last_modified = Enum.map(target, last_modified(&1))
Enum.any?(source_stats, fn(source_stat) ->
Enum.any?(last_modified, source_stat > &1)
end)
end
defp expand_wildcard(wildcard) when is_binary(wildcard) do
File.wildcard(wildcard)
end
defp expand_wildcard(list) when is_list(list) do
list
end
defp last_modified(path) do
case File.stat(path) do
{ :ok, stat } -> stat.mtime
{ :error, _ } -> { { 1970, 1, 1 }, { 0, 0, 0 } }
end
end
@doc """
Merges two configs recursively, merging keywords lists
and concatenating normal lists.
"""
def config_merge(old, new) do
Keyword.merge old, new, fn(_, x, y) ->
if is_list(x) and is_list(y) do
if is_keyword(x) and is_keyword(y) do
config_merge(x, y)
else
x ++ y
end
else
y
end
end
end
defp is_keyword(x) do
Enum.all? x, match?({ atom, _ } when is_atom(atom), &1)
end
@doc """
Converts the given string to underscore format.
## Examples
Mix.Utils.underscore "FooBar" #=> "foo_bar"
In general, underscore can be thought as the reverse of
camelize, however, in some cases formatting may be lost:
Mix.Utils.underscore "SAPExample" #=> "sap_example"
Mix.Utils.camelize "sap_example" #=> "SapExample"
"""
def underscore(<<h, t | :binary>>) do
<<to_lower_char(h)>> <> do_underscore(t, h)
end
defp do_underscore(<<h, t, rest | :binary>>, _) when h in ?A..?Z and not t in ?A..?Z do
<<?_, to_lower_char(h), t>> <> do_underscore(rest, t)
end
defp do_underscore(<<h, t | :binary>>, prev) when h in ?A..?Z and not prev in ?A..?Z do
<<?_, to_lower_char(h)>> <> do_underscore(t, h)
end
defp do_underscore(<<?-, t | :binary>>, _) do
<<?_>> <> do_underscore(t, ?-)
end
defp do_underscore(<<h, t | :binary>>, _) do
<<to_lower_char(h)>> <> do_underscore(t, h)
end
defp do_underscore(<<>>, _) do
<<>>
end
@doc """
Converts the given string to camelize format.
## Examples
Mix.Utils.camelize "foo_bar" #=> "FooBar"
"""
def camelize(<<?_, t | :binary>>) do
camelize(t)
end
def camelize(<<h, t | :binary>>) do
<<to_upper_char(h)>> <> do_camelize(t)
end
defp do_camelize(<<?_, ?_, t | :binary>>) do
do_camelize(<< ?_, t | :binary >>)
end
defp do_camelize(<<?_, h, t | :binary>>) when h in ?a..?z do
<<to_upper_char(h)>> <> do_camelize(t)
end
defp do_camelize(<<?_>>) do
<<>>
end
defp do_camelize(<<h, t | :binary>>) do
<<h>> <> do_camelize(t)
end
defp do_camelize(<<>>) do
<<>>
end
@doc """
Takes a module and converts it to a command. The nesting
argument can be given in order to remove the nesting of
module.
## Examples
module_name_to_command(Mix.Tasks.Compile, 2)
#=> "compile"
module_name_to_command("Mix.Tasks.Compile.Elixir", 2)
#=> "compile.elixir"
"""
def module_name_to_command(module, nesting // 0)
def module_name_to_command(module, nesting) when is_atom(module) do
module_name_to_command(inspect(module), nesting)
end
def module_name_to_command(module, nesting) do
t = Regex.split(%r/\./, to_binary(module))
t /> Enum.drop(nesting) /> Enum.map(first_to_lower(&1)) /> Enum.join(".")
end
@doc """
Takes a command and converts it to a module name format.
## Examples
command_to_module_name("compile.elixir")
#=> "Compile.Elixir"
"""
def command_to_module_name(s) do
Regex.split(%r/\./, to_binary(s)) />
Enum.map(first_to_upper(&1)) />
Enum.join(".")
end
defp first_to_upper(<<s, t|:binary>>), do: <<to_upper_char(s)>> <> t
defp first_to_upper(<<>>), do: <<>>
defp first_to_lower(<<s, t|:binary>>), do: <<to_lower_char(s)>> <> t
defp first_to_lower(<<>>), do: <<>>
defp to_upper_char(char) when char in ?a..?z, do: char - 32
defp to_upper_char(char), do: char
defp to_lower_char(char) when char in ?A..?Z, do: char + 32
defp to_lower_char(char), do: char
end
|
lib/mix/lib/mix/utils.ex
| 0.819026
| 0.576005
|
utils.ex
|
starcoder
|
defmodule DataSpecs.Loader.Builtin do
@moduledoc """
Erlang builtin types loaders
"""
alias DataSpecs.Types
@spec any(Types.value(), Types.custom_type_loaders(), [Types.type_loader_fun()]) ::
{:error, Types.reason()} | {:ok, any()}
def any(value, _custom_type_loaders, _type_params_loaders) do
{:ok, value}
end
@spec atom(Types.value(), Types.custom_type_loaders(), [Types.type_loader_fun()]) ::
{:error, Types.reason()} | {:ok, atom()}
def atom(value, _custom_type_loaders, _type_params_loaders) do
case value do
value when is_atom(value) ->
{:ok, value}
value when is_binary(value) ->
try do
{:ok, String.to_existing_atom(value)}
rescue
ArgumentError ->
{:error, ["can't convert #{inspect(value)} to an existing atom"]}
end
_ ->
{:error, ["can't convert #{inspect(value)} to an atom"]}
end
end
@spec boolean(Types.value(), Types.custom_type_loaders(), [Types.type_loader_fun()]) ::
{:error, Types.reason()} | {:ok, boolean()}
def boolean(value, _custom_type_loaders, _type_params_loaders) do
case value do
value when is_boolean(value) ->
{:ok, value}
_ ->
{:error, ["can't convert #{inspect(value)} to a boolean"]}
end
end
@spec binary(Types.value(), Types.custom_type_loaders(), [Types.type_loader_fun()]) ::
{:error, Types.reason()} | {:ok, binary()}
def binary(value, _custom_type_loaders, _type_params_loaders) do
case value do
value when is_binary(value) ->
{:ok, value}
_ ->
{:error, ["can't convert #{inspect(value)} to a binary"]}
end
end
@spec binary(Types.value(), integer(), integer(), Types.custom_type_loaders(), [Types.type_loader_fun()]) ::
{:error, Types.reason()} | {:ok, binary()}
def binary(value, 0, 0, _custom_type_loaders, _type_params_loaders) do
case value do
<<>> ->
{:ok, value}
_ ->
{:error, ["can't convert #{inspect(value)} to a <<>>"]}
end
end
def binary(value, size, unit, _custom_type_loaders, _type_params_loaders) do
if is_bitstring(value) and bit_size(value) >= size and (unit == 0 or rem(bit_size(value) - size, unit) == 0) do
{:ok, value}
else
{:error, ["can't convert #{inspect(value)} to a <<_::#{size}, _::_*#{unit}>>"]}
end
end
@spec bitstring(Types.value(), Types.custom_type_loaders(), [Types.type_loader_fun()]) ::
{:error, Types.reason()} | {:ok, bitstring()}
def bitstring(value, _custom_type_loaders, _type_params_loaders) do
case value do
value when is_bitstring(value) ->
{:ok, value}
_ ->
{:error, ["can't convert #{inspect(value)} to a bitstring"]}
end
end
@spec byte(Types.value(), Types.custom_type_loaders(), [Types.type_loader_fun()]) ::
{:error, Types.reason()} | {:ok, byte()}
def byte(value, _custom_type_loaders, _type_params_loaders) do
case value do
value when is_integer(value) and 0 <= value and value <= 255 ->
{:ok, value}
_ ->
{:error, ["can't convert #{inspect(value)} to a byte"]}
end
end
@spec char(Types.value(), Types.custom_type_loaders(), [Types.type_loader_fun()]) ::
{:error, Types.reason()} | {:ok, char()}
def char(value, _custom_type_loaders, _type_params_loaders) do
case value do
value when is_integer(value) and 0 <= value and value <= 0x10FFFF ->
{:ok, value}
_ ->
{:error, ["can't convert #{inspect(value)} to a char"]}
end
end
@spec arity(Types.value(), Types.custom_type_loaders(), [Types.type_loader_fun()]) ::
{:error, Types.reason()} | {:ok, arity()}
def arity(value, _custom_type_loaders, _type_params_loaders) do
case value do
value when is_integer(value) and 0 <= value and value <= 255 ->
{:ok, value}
_ ->
{:error, ["can't convert #{inspect(value)} to a arity"]}
end
end
@spec pid(Types.value(), Types.custom_type_loaders(), [Types.type_loader_fun()]) ::
{:error, Types.reason()} | {:ok, pid()}
def pid(value, _custom_type_loaders, _type_params_loaders) do
case value do
value when is_pid(value) ->
{:ok, value}
_ ->
{:error, ["can't convert #{inspect(value)} to a pid"]}
end
end
@spec reference(Types.value(), Types.custom_type_loaders(), [Types.type_loader_fun()]) ::
{:error, Types.reason()} | {:ok, reference()}
def reference(value, _custom_type_loaders, _type_params_loaders) do
case value do
value when is_reference(value) ->
{:ok, value}
_ ->
{:error, ["can't convert #{inspect(value)} to a reference"]}
end
end
@spec number(Types.value(), Types.custom_type_loaders(), [Types.type_loader_fun()]) ::
{:error, Types.reason()} | {:ok, number()}
def number(value, _custom_type_loaders, _type_params_loaders) do
case value do
value when is_number(value) ->
{:ok, value}
_ ->
{:error, ["can't convert #{inspect(value)} to a number"]}
end
end
@spec float(Types.value(), Types.custom_type_loaders(), [Types.type_loader_fun()]) ::
{:error, Types.reason()} | {:ok, float()}
def float(value, _custom_type_loaders, _type_params_loaders) do
case value do
value when is_number(value) ->
{:ok, :erlang.float(value)}
_ ->
{:error, ["can't convert #{inspect(value)} to a float"]}
end
end
@spec integer(Types.value(), Types.custom_type_loaders(), [Types.type_loader_fun()]) ::
{:error, Types.reason()} | {:ok, integer()}
def integer(value, _custom_type_loaders, _type_params_loaders) do
case value do
value when is_integer(value) ->
{:ok, value}
_ ->
{:error, ["can't convert #{inspect(value)} to an integer"]}
end
end
@spec neg_integer(Types.value(), Types.custom_type_loaders(), [Types.type_loader_fun()]) ::
{:error, Types.reason()} | {:ok, neg_integer()}
def neg_integer(value, _custom_type_loaders, _type_params_loaders) do
case value do
value when is_integer(value) and value < 0 ->
{:ok, value}
_ ->
{:error, ["can't convert #{inspect(value)} to a neg_integer"]}
end
end
@spec non_neg_integer(Types.value(), Types.custom_type_loaders(), [Types.type_loader_fun()]) ::
{:error, Types.reason()} | {:ok, non_neg_integer()}
def non_neg_integer(value, _custom_type_loaders, _type_params_loaders) do
case value do
value when is_integer(value) and value >= 0 ->
{:ok, value}
_ ->
{:error, ["can't convert #{inspect(value)} to a non_neg_integer"]}
end
end
@spec pos_integer(Types.value(), Types.custom_type_loaders(), [Types.type_loader_fun()]) ::
{:error, Types.reason()} | {:ok, pos_integer()}
def pos_integer(value, _custom_type_loaders, _type_params_loaders) do
case value do
value when is_integer(value) and value > 0 ->
{:ok, value}
_ ->
{:error, ["can't convert #{inspect(value)} to a pos_integer"]}
end
end
@spec range(integer(), integer(), Types.value(), Types.custom_type_loaders(), [Types.type_loader_fun()]) ::
{:error, Types.reason()} | {:ok, integer()}
def range(lower, upper, value, _custom_type_loaders, _type_params_loaders) do
case value do
value when is_integer(value) and lower <= value and value <= upper ->
{:ok, value}
_ ->
{:error, ["can't convert #{inspect(value)} to a range #{inspect(lower..upper)}"]}
end
end
@spec union(Types.value(), Types.custom_type_loaders(), [Types.type_loader_fun()]) ::
{:error, Types.reason()} | {:ok, any()}
def union(value, custom_type_loaders, type_params_loaders) do
type_params_loaders
|> Enum.reduce_while({:error, []}, fn loader, {:error, errors} ->
loader.(value, custom_type_loaders, [])
|> case do
{:ok, res} ->
{:halt, {:ok, res}}
{:error, new_errors} ->
{:cont, {:error, errors ++ new_errors}}
end
end)
|> case do
{:ok, res} ->
{:ok, res}
{:error, errors} ->
{:error, ["can't convert #{inspect(value)} to a union", errors]}
end
end
@spec empty_list(Types.value(), Types.custom_type_loaders(), [Types.type_loader_fun()]) ::
{:error, Types.reason()} | {:ok, []}
def empty_list(value, _custom_type_loaders, _type_params_loaders) do
case value do
[] ->
{:ok, value}
_ ->
{:error, ["can't convert #{inspect(value)} to an empty list"]}
end
end
@spec nonempty_list(Types.value(), Types.custom_type_loaders(), [Types.type_loader_fun()]) ::
{:error, Types.reason()} | {:ok, nonempty_list()}
def nonempty_list(value, custom_type_loaders, type_params_loaders) do
case value do
[_ | _] ->
list(value, custom_type_loaders, type_params_loaders)
_ ->
{:error, ["can't convert #{inspect(value)} to a non empty list"]}
end
end
@spec list(Types.value(), Types.custom_type_loaders(), [Types.type_loader_fun()]) ::
{:error, Types.reason()} | {:ok, list()}
def list(value, custom_type_loaders, type_params_loaders) do
case value do
value when is_list(value) ->
load_list(value, custom_type_loaders, type_params_loaders)
_ ->
{:error, ["can't convert #{inspect(value)} to a list"]}
end
end
defp load_list(value, custom_type_loaders, [type_params_loader]) do
value
|> Enum.with_index()
|> Enum.reduce_while([], fn {item, item_idx}, loaded_list ->
type_params_loader.(item, custom_type_loaders, [])
|> case do
{:ok, loaded_value} ->
{:cont, [loaded_value | loaded_list]}
{:error, errors} ->
error = "can't convert #{inspect(value)} to a list, bad item at index=#{item_idx}"
{:halt, {:error, [error, errors]}}
end
end)
|> case do
{:error, _} = error ->
error
loaded_list ->
{:ok, Enum.reverse(loaded_list)}
end
end
@spec empty_map(Types.value(), Types.custom_type_loaders(), [Types.type_loader_fun()]) ::
{:error, Types.reason()} | {:ok, %{}}
def empty_map(value, _custom_type_loaders, []) do
if value == %{} do
{:ok, value}
else
{:error, ["can't convert #{inspect(value)} to an empty map"]}
end
end
@spec map_field_required(map(), Types.custom_type_loaders(), [Types.type_loader_fun()]) ::
{:error, Types.reason()} | {:ok, {map(), map(), Types.reason()}}
def map_field_required(map, custom_type_loaders, [type_key_loader, type_value_loader]) do
map_field_optional(map, custom_type_loaders, [type_key_loader, type_value_loader])
|> case do
{:ok, {_map_rest, map_processed, errors}} when map_size(map_processed) == 0 ->
{:error, ["can't convert #{inspect(map)} to a map, missing required k/v", errors]}
{:ok, res} ->
{:ok, res}
{:error, errors} ->
{:error, errors}
end
end
@spec map_field_optional(map(), Types.custom_type_loaders(), [Types.type_loader_fun()]) ::
{:error, Types.reason()} | {:ok, {map(), map(), Types.reason()}}
def map_field_optional(map, custom_type_loaders, [type_key_loader, type_value_loader]) do
case map do
map when is_struct(map) ->
map = Map.from_struct(map)
map_field_optional(map, custom_type_loaders, [type_key_loader, type_value_loader])
map when is_map(map) ->
res =
Enum.reduce(map, {map, %{}, []}, fn {map_key, map_value}, {map_rest, map_processed, errors} ->
with {:ok, map_key_processed} <- type_key_loader.(map_key, custom_type_loaders, []),
{:ok, map_value_processed} <- type_value_loader.(map_value, custom_type_loaders, []) do
map_processed = Map.put(map_processed, map_key_processed, map_value_processed)
map_rest = Map.delete(map_rest, map_key)
{map_rest, map_processed, errors}
else
{:error, new_errors} ->
{map_rest, map_processed, errors ++ new_errors}
end
end)
{:ok, res}
_ ->
{:error, ["can't convert #{inspect(map)} to a map"]}
end
end
@spec tuple_any(Types.value(), Types.custom_type_loaders(), [Types.type_loader_fun()]) ::
{:error, Types.reason()} | {:ok, tuple()}
def tuple_any(value, _custom_type_loaders, _type_params_loaders) do
case value do
value when is_tuple(value) ->
{:ok, value}
_ ->
{:error, ["can't convert #{inspect(value)} to a tuple"]}
end
end
@spec tuple(Types.value(), Types.custom_type_loaders(), [Types.type_loader_fun()]) ::
{:error, Types.reason()} | {:ok, tuple()}
def tuple(value, custom_type_loaders, type_params_loaders) do
tuple_type_size = length(type_params_loaders)
case value do
value when is_tuple(value) and tuple_size(value) == tuple_type_size ->
load_tuple(value, custom_type_loaders, type_params_loaders)
value when is_list(value) and length(value) == tuple_type_size ->
tuple(List.to_tuple(value), custom_type_loaders, type_params_loaders)
value when is_tuple(value) or is_list(value) ->
{:error, ["can't convert #{inspect(value)} to a tuple of size #{tuple_type_size}"]}
_ ->
{:error, ["can't convert #{inspect(value)} to a tuple"]}
end
end
defp load_tuple(value, custom_type_loaders, type_params_loaders) do
value
|> Tuple.to_list()
|> Enum.with_index()
|> Enum.zip(type_params_loaders)
|> Enum.reduce_while([], fn {{item, item_idx}, loader}, loaded_list ->
loader.(item, custom_type_loaders, [])
|> case do
{:ok, loaded_value} ->
{:cont, [loaded_value | loaded_list]}
{:error, errors} ->
error = "can't convert #{inspect(value)} to a tuple, bad item at index=#{item_idx}"
{:halt, {:error, [error, errors]}}
end
end)
|> case do
{:error, _} = error ->
error
loaded_list ->
loaded_tuple =
loaded_list
|> Enum.reverse()
|> List.to_tuple()
{:ok, loaded_tuple}
end
end
end
|
lib/dataspecs/loader/builtin.ex
| 0.883588
| 0.487795
|
builtin.ex
|
starcoder
|
defmodule Gringotts.Gateways.Stripe do
@moduledoc """
Stripe gateway implementation. For reference see [Stripe's API documentation](https://stripe.com/docs/api).
The following features of Stripe are implemented:
| Action | Method |
| ------ | ------ |
| Pre-authorize | `authorize/3` |
| Capture | `capture/3` |
| Refund | `refund/3` |
| Reversal | `void/2` |
| Debit | `purchase/3` |
| Store | `store/2` |
| Unstore | `unstore/2` |
## The `opts` argument
Most `Gringotts` API calls accept an optional `Keyword` list `opts` to supply
optional arguments for transactions with the Stripe gateway. The following keys
are supported:
| Key | Status |
| ---- | ---- |
| `currency` | **Implemented** |
| `capture` | **Implemented** |
| `description` | **Implemented** |
| `metadata` | **Implemented** |
| `receipt_email` | **Implemented** |
| `shipping` | **Implemented** |
| `customer` | **Implemented** |
| `source` | **Implemented** |
| `statement_descriptor` | **Implemented** |
| `charge` | **Implemented** |
| `reason` | **Implemented** |
| `account_balance` | Not implemented |
| `business_vat_id` | Not implemented |
| `coupon` | Not implemented |
| `default_source` | Not implemented |
| `email` | Not implemented |
| `shipping` | Not implemented |
## Note
_This module can be used by both PCI-DSS compliant as well as non-compliant
merchants!_
### I'm not PCI-DSS compliant
No worries, both `authorize/3` and `purchase/3` accept a
"payment-source-identifier" (a `string`) instead of a `CreditCard.t`
struct. You'll have to generate this identifier using [Stripe.js and
Elements][stripe-js] client-side.
### I'm PCI-DSS compliant
In that case, you need not use [Stripe.js or Elements][stripe-js] and can
directly accept the client's card info and pass the `CreditCard.t` struct to
this module's functions.
[stripe-js]: https://stripe.com/docs/sources/cards
## Registering your Stripe account at `Gringotts`
After [making an account on Stripe](https://stripe.com/), head
to the dashboard and find your account `secrets` in the `API` section.
## Here's how the secrets map to the required configuration parameters for Stripe:
| Config parameter | Stripe secret |
| ------- | ---- |
| `:secret_key` | **Secret key** |
Your Application config must look something like this:
config :gringotts, Gringotts.Gateways.Stripe,
secret_key: "your_secret_key",
default_currency: "usd"
"""
@base_url "https://api.stripe.com/v1"
use Gringotts.Gateways.Base
use Gringotts.Adapter, required_config: [:secret_key]
alias Gringotts.{Address, CreditCard, Money}
@doc """
Performs a (pre) Authorize operation.
The authorization validates the card details with the banking network,
places a hold on the transaction amount in the customerβs issuing bank and
also triggers risk management. Funds are not transferred.
Stripe returns an `charge_id` which should be stored at your side and can be
used later to:
* `capture/3` an amount.
* `void/2` a pre-authorization.
## Note
Uncaptured charges expire in 7 days. For more information, [see authorizing
charges and settling
later](https://support.stripe.com/questions/can-i-authorize-a-charge-and-then-wait-to-settle-it-later).
## Example
The following session shows how one would (pre) authorize a payment of $10 on
a sample `card`.
iex> card = %CreditCard{
first_name: "John",
last_name: "Smith",
number: "4242424242424242",
year: "2017",
month: "12",
verification_code: "123"
}
address = %Address{
street1: "123 Main",
city: "New York",
region: "NY",
country: "US",
postal_code: "11111"
}
iex> opts = [currency: "usd", address: address]
iex> amount = 10
iex> Gringotts.authorize(Gringotts.Gateways.Stripe, amount, card, opts)
"""
@spec authorize(Money.t(), CreditCard.t() | String.t(), keyword) :: map
def authorize(amount, payment, opts) do
params = create_params_for_auth_or_purchase(amount, payment, opts, false)
commit(:post, "charges", params, opts)
end
@doc """
Transfers amount from the customer to the merchant.
Stripe attempts to process a purchase on behalf of the customer, by debiting
amount from the customer's account by charging the customer's card.
## Example
The following session shows how one would process a payment in one-shot,
without (pre) authorization.
iex> card = %CreditCard{
first_name: "John",
last_name: "Smith",
number: "4242424242424242",
year: "2017",
month: "12",
verification_code: "123"
}
address = %Address{
street1: "123 Main",
city: "New York",
region: "NY",
country: "US",
postal_code: "11111"
}
iex> opts = [currency: "usd", address: address]
iex> amount = 5
iex> Gringotts.purchase(Gringotts.Gateways.Stripe, amount, card, opts)
"""
@spec purchase(Money.t(), CreditCard.t() | String.t(), keyword) :: map
def purchase(amount, payment, opts) do
params = create_params_for_auth_or_purchase(amount, payment, opts)
commit(:post, "charges", params, opts)
end
@doc """
Captures a pre-authorized amount.
Amount is transferred to the merchant account by Stripe when it is smaller or
equal to the amount used in the pre-authorization referenced by `charge_id`.
## Note
Stripe allows partial captures and release the remaining amount back to the
payment source. Thus, the same pre-authorisation `charge_id` cannot be used to
perform multiple captures.
## Example
The following session shows how one would (partially) capture a previously
authorized a payment worth $10 by referencing the obtained `charge_id`.
iex> id = "ch_1BYvGkBImdnrXiZwet3aKkQE"
iex> amount = 5
iex> opts = []
iex> Gringotts.capture(Gringotts.Gateways.Stripe, id, amount, opts)
"""
@spec capture(String.t(), Money.t(), keyword) :: map
def capture(id, amount, opts) do
params = optional_params(opts) ++ amount_params(amount)
commit(:post, "charges/#{id}/capture", params, opts)
end
@doc """
Voids the referenced payment.
This method attempts a reversal of the either a previous `purchase/3` or
`authorize/3` referenced by `charge_id`.
As a consequence, the customer will never see any booking on his
statement.
## Voiding a previous authorization
Stripe will reverse the authorization by sending a "reversal request" to the
payment source (card issuer) to clear the funds held against the
authorization.
## Voiding a previous purchase
Stripe will reverse the payment, by sending all the amount back to the
customer. Note that this is not the same as `refund/3`.
## Example
The following session shows how one would void a previous (pre)
authorization. Remember that our `capture/3` example only did a partial
capture.
iex> id = "ch_1BYvGkBImdnrXiZwet3aKkQE"
iex> opts = []
iex> Gringotts.void(Gringotts.Gateways.Stripe, id, opts)
"""
@spec void(String.t(), keyword) :: map
def void(id, opts) do
params = optional_params(opts)
commit(:post, "charges/#{id}/refund", params, opts)
end
@doc """
Refunds the amount to the customer's card with reference to a prior transfer.
Stripe processes a full or partial refund worth `amount`, referencing a
previous `purchase/3` or `capture/3`.
## Example
The following session shows how one would refund a previous purchase (and
similarily for captures).
iex> amount = 5
iex> id = "ch_1BYvGkBImdnrXiZwet3aKkQE"
iex> opts = []
iex> Gringotts.refund(Gringotts.Gateways.Stripe, amount, id, opts)
"""
@spec refund(Money.t(), String.t(), keyword) :: map
def refund(amount, id, opts) do
params = optional_params(opts) ++ amount_params(amount)
commit(:post, "charges/#{id}/refund", params, opts)
end
@doc """
Stores the payment-source data for later use.
Stripe can store the payment-source details, for example card which can be
used to effectively to process One-Click and Recurring_ payments, and return a
`customer_id` for reference.
## Example
The following session shows how one would store a card (a payment-source) for
future use.
iex> card = %CreditCard{
first_name: "John",
last_name: "Smith",
number: "4242424242424242",
year: "2017",
month: "12",
verification_code: "123"
}
address = %Address{
street1: "123 Main",
city: "New York",
region: "NY",
country: "US",
postal_code: "11111"
}
iex> opts = [address: address]
iex> Gringotts.store(Gringotts.Gateways.Stripe, card, opts)
"""
@spec store(CreditCard.t() | String.t(), keyword) :: map
def store(payment, opts) do
params = optional_params(opts) ++ source_params(payment, opts)
commit(:post, "customers", params, opts)
end
@doc """
Deletes previously stored payment-source data.
Deletes the already stored payment source, so that it cannot be used again for
capturing payments.
## Examples
The following session shows how one would unstore a already stored payment
source.
iex> id = "cus_BwpLX2x4ecEUgD"
iex> Gringotts.unstore(Gringotts.Gateways.Stripe, id, opts)
"""
@spec unstore(String.t(), keyword) :: map
def unstore(id, opts), do: commit(:delete, "customers/#{id}", [], opts)
# Private methods
defp create_params_for_auth_or_purchase(amount, payment, opts, capture \\ true) do
[capture: capture] ++
optional_params(opts) ++ amount_params(amount) ++ source_params(payment, opts)
end
defp create_card_token(params, opts) do
commit(:post, "tokens", params, opts)
end
defp amount_params(amount) do
{currency, int_value, _} = Money.to_integer(amount)
[amount: int_value, currency: currency]
end
defp source_params(token_or_customer, _) when is_binary(token_or_customer) do
[head, _] = String.split(token_or_customer, "_")
case head do
"tok" -> [source: token_or_customer]
"cus" -> [customer: token_or_customer]
end
end
defp source_params(%CreditCard{} = card, opts) do
params = card_params(card) ++ address_params(opts[:address])
response = create_card_token(params, opts)
if Map.has_key?(response, "error") do
[]
else
response
|> Map.get("id")
|> source_params(opts)
end
end
defp source_params(_, _), do: []
defp card_params(%CreditCard{} = card) do
[
"card[name]": CreditCard.full_name(card),
"card[number]": card.number,
"card[exp_year]": card.year,
"card[exp_month]": card.month,
"card[cvc]": card.verification_code
]
end
defp card_params(_), do: []
defp address_params(%Address{} = address) do
[
"card[address_line1]": address.street1,
"card[address_line2]": address.street2,
"card[address_city]": address.city,
"card[address_state]": address.region,
"card[address_zip]": address.postal_code,
"card[address_country]": address.country
]
end
defp address_params(_), do: []
defp commit(method, path, params, opts) do
auth_token = "Bearer " <> opts[:config][:secret_key]
headers = [
{"Content-Type", "application/x-www-form-urlencoded"},
{"Authorization", auth_token}
]
response = HTTPoison.request(method, "#{@base_url}/#{path}", {:form, params}, headers)
format_response(response)
end
defp optional_params(opts) do
opts
|> Keyword.delete(:config)
|> Keyword.delete(:address)
end
defp format_response(response) do
case response do
{:ok, %HTTPoison.Response{body: body}} -> body |> Poison.decode!()
_ -> %{"error" => "something went wrong, please try again later"}
end
end
end
|
lib/gringotts/gateways/stripe.ex
| 0.873606
| 0.65256
|
stripe.ex
|
starcoder
|
defmodule APDS9960.Register do
@moduledoc false
@doc """
Sets only specified bit values in a register value struct.
"""
@spec set_bits(struct, Enum.t()) :: struct
def set_bits(parsed_data, opts) when is_struct(parsed_data) do
struct!(parsed_data, opts)
end
@doc """
Converts a register value struct to binary.
"""
@spec to_binary(struct) :: binary
def to_binary(parsed_data) when is_struct(parsed_data) do
parsed_data
|> Map.from_struct()
|> parsed_data.__struct__.to_binary()
end
defmodule Utils do
@moduledoc false
def offset_correction_factor(<<sign::1, factor::7>>) when sign == 1, do: -factor
def offset_correction_factor(<<_::1, factor::7>>), do: factor
def sign(value) when value < 0, do: 1
def sign(_), do: 0
end
# 0x80 ENABLE Read/Write Enable states and interrupts
defmodule ENABLE do
@moduledoc false
def address, do: 0x80
use TypedStruct
typedstruct do
field(:gesture, 0 | 1, default: 0)
field(:proximity_interrupt, 0 | 1, default: 0)
field(:als_interrupt, 0 | 1, default: 0)
field(:wait, 0 | 1, default: 0)
field(:proximity, 0 | 1, default: 0)
field(:als, 0 | 1, default: 0)
field(:power, 0 | 1, default: 0)
end
@spec to_binary(Enum.t()) :: <<_::8>>
def to_binary(opts \\ []) do
d = struct!(__MODULE__, opts)
b6 = d.gesture
b5 = d.proximity_interrupt
b4 = d.als_interrupt
b3 = d.wait
b2 = d.proximity
b1 = d.als
b0 = d.power
<<0::1, b6::1, b5::1, b4::1, fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b, fc00:db20:35b:7399::5, b1::1, b0::1>>
end
@spec parse(<<_::8>>) :: t()
def parse(<<0::1, bfd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b, bfd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b, bfd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b, bfd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b, fc00:db20:35b:7399::5, b1::1, b0::1>>) do
%__MODULE__{
gesture: b6,
proximity_interrupt: b5,
als_interrupt: b4,
wait: b3,
proximity: b2,
als: b1,
power: b0
}
end
end
# 0x81 ATIME Read/Write ADC integration time
defmodule ATIME do
@moduledoc false
def address, do: 0x81
end
# 0x83 WTIME Read/Write Wait time (non-gesture)
defmodule WTIME do
@moduledoc false
def address, do: 0x83
end
# 0x84 AILTL Read/Write ALS interrupt low/high thresholds
defmodule AILTL do
@moduledoc false
def address, do: 0x84
use TypedStruct
typedstruct do
field(:low, 0..0xFFFF, default: 0)
field(:high, 0..0xFFFF, default: 0)
end
@spec to_binary(Enum.t()) :: <<_::32>>
def to_binary(opts \\ []) do
d = struct!(__MODULE__, opts)
<<d.low::little-16, d.high::little-16>>
end
@spec parse(<<_::32>>) :: t()
def parse(<<low::little-16, high::little-16>>) do
%__MODULE__{low: low, high: high}
end
end
# 0x89 PILT Read/Write Proximity interrupt low/high thresholds
defmodule PILT do
@moduledoc false
def address, do: 0x89
use TypedStruct
typedstruct do
field(:low, byte, default: 0)
field(:high, byte, default: 0)
end
@spec to_binary(Enum.t()) :: <<_::16>>
def to_binary(opts \\ []) do
d = struct!(__MODULE__, opts)
<<d.low, d.high>>
end
@spec parse(<<_::16>>) :: t()
def parse(<<low, high>>) do
%__MODULE__{low: low, high: high}
end
end
# 0x8C PERS Read/Write Interrupt persistence filters (non-gesture)
defmodule PERS do
@moduledoc false
def address, do: 0x8C
use TypedStruct
typedstruct do
field(:proximity, 0..15, default: 0)
field(:als, 0..15, default: 0)
end
@spec to_binary(Enum.t()) :: <<_::8>>
def to_binary(opts \\ []) do
d = struct!(__MODULE__, opts)
b74 = d.proximity
b30 = d.als
<<b7fc00:db20:35b:7399::5, b30::4>>
end
@spec parse(<<_::8>>) :: t()
def parse(<<b74::4, b30::4>>) do
%__MODULE__{
proximity: b74,
als: b30
}
end
end
# 0x8D CONFIG1 Read/Write Configuration register one
defmodule CONFIG1 do
@moduledoc false
def address, do: 0x8D
use TypedStruct
typedstruct do
field(:wait_long, 0 | 1, default: 0)
end
@spec to_binary(Enum.t()) :: <<_::8>>
def to_binary(opts \\ []) do
d = struct!(__MODULE__, opts)
b1 = d.wait_long
<<0::1, fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b, fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b, 0::1, 0::1, 0::1, b1::1, 0::1>>
end
@spec parse(<<_::8>>) :: t()
def parse(<<0::1, fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b, fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b, 0::1, 0::1, 0::1, bfd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b, 0::1>>) do
%__MODULE__{
wait_long: b1
}
end
end
# 0x8E PPULSE Read/Write Proximity pulse count and length
defmodule PPULSE do
@moduledoc false
def address, do: 0x8E
use TypedStruct
typedstruct do
field(:pulse_length, 0..3, default: 1)
field(:pulse_count, 0..63, default: 0)
end
@spec to_binary(Enum.t()) :: <<_::8>>
def to_binary(opts \\ []) do
d = struct!(__MODULE__, opts)
b76 = d.pulse_length
b50 = d.pulse_count
<<bfd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b, b50::6>>
end
@spec parse(<<_::8>>) :: t()
def parse(<<bfd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b, b50::6>>) do
%__MODULE__{
pulse_length: b76,
pulse_count: b50
}
end
end
# 0x8F CONTROL Read/Write Gain control
defmodule CONTROL do
@moduledoc false
def address, do: 0x8F
use TypedStruct
typedstruct do
field(:led_drive_strength, 0..3, default: 0)
field(:proximity_gain, 0..3, default: 0)
field(:als_and_color_gain, 0..3, default: 0)
end
@spec to_binary(Enum.t()) :: <<_::8>>
def to_binary(opts \\ []) do
d = struct!(__MODULE__, opts)
b76 = d.led_drive_strength
b32 = d.proximity_gain
b10 = d.als_and_color_gain
<<bfd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b, fc00:e968:6179::de52:7100, bfc00:db20:35b:7399::5, b10::2>>
end
@spec parse(<<_::8>>) :: t()
def parse(<<bfd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b, fc00:e968:6179::de52:7100, bfc00:db20:35b:7399::5, b10::2>>) do
%__MODULE__{
led_drive_strength: b76,
proximity_gain: b32,
als_and_color_gain: b10
}
end
end
# 0x90 CONFIG2 Read/Write Configuration register two
defmodule CONFIG2 do
@moduledoc false
def address, do: 0x90
use TypedStruct
typedstruct do
field(:proximity_saturation_interrupt, 0 | 1, default: 0)
field(:als_saturation_interrupt, 0 | 1, default: 0)
field(:led_boost, 0..3, default: 0)
end
@spec to_binary(Enum.t()) :: <<_::8>>
def to_binary(opts \\ []) do
d = struct!(__MODULE__, opts)
b7 = d.proximity_saturation_interrupt
b6 = d.als_saturation_interrupt
b54 = d.led_boost
<<bfd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b, fc00:db20:35b:7399::5, bfc00:db20:35b:7399::5, fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b, 1::1>>
end
@spec parse(<<_::8>>) :: t()
def parse(<<bfd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b, fc00:db20:35b:7399::5, fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b, _::3, _::1>>) do
%__MODULE__{
proximity_saturation_interrupt: b7,
als_saturation_interrupt: b6,
led_boost: b54
}
end
end
# 0x92 ID Read-only Device ID
defmodule ID do
@moduledoc false
def address, do: 0x92
end
# 0x93 STATUS Read-only Device status 0x00
defmodule STATUS do
@moduledoc false
def address, do: 0x93
use TypedStruct
typedstruct do
field(:clear_photo_diode_saturation, 0 | 1, default: 0)
field(:proximity_or_gesture_saturation, 0 | 1, default: 0)
field(:proximity_interrupt, 0 | 1, default: 0)
field(:als_interrupt, 0 | 1, default: 0)
field(:gesture_interrupt, 0 | 1, default: 0)
field(:proximity_valid, 0 | 1, default: 0)
field(:als_valid, 0 | 1, default: 0)
end
@spec parse(<<_::8>>) :: t()
def parse(<<bfd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b, bfd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b, bfd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b, bfd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b, _::1, bfdf8:f53e:61e4::18, b1::1, b0::1>>) do
%__MODULE__{
clear_photo_diode_saturation: b7,
proximity_or_gesture_saturation: b6,
proximity_interrupt: b5,
als_interrupt: b4,
gesture_interrupt: b2,
proximity_valid: b1,
als_valid: b0
}
end
end
# 0x94 CDATAL Read-only RGBC data
defmodule CDATAL do
@moduledoc false
def address, do: 0x94
use TypedStruct
typedstruct do
field(:red, 0..0xFFFF, enforce: true)
field(:green, 0..0xFFFF, enforce: true)
field(:blue, 0..0xFFFF, enforce: true)
field(:clear, 0..0xFFFF, enforce: true)
end
@spec parse(<<_::64>>) :: t()
def parse(<<clear::little-16, red::little-16, green::little-16, blue::little-16>>) do
%__MODULE__{red: red, green: green, blue: blue, clear: clear}
end
end
# 0x9C PDATA Read-only Proximity data
defmodule PDATA do
@moduledoc false
def address, do: 0x9C
end
# 0x9D POFFSET_UR Read/Write Proximity offset for photodiodes
defmodule POFFSET_UR do
@moduledoc false
def address, do: 0x9D
import APDS9960.Register.Utils
use TypedStruct
typedstruct do
field(:up_right, -127..127, default: 0)
field(:down_left, -127..127, default: 0)
end
@spec to_binary(Enum.t()) :: <<_::16>>
def to_binary(opts \\ []) do
d = struct!(__MODULE__, opts)
sign_ur = sign(d.up_right)
bits_ur = abs(d.up_right)
sign_dl = sign(d.down_left)
bits_dl = abs(d.down_left)
<<sign_ur::1, bits_ur::7, sign_dl::1, bits_dl::7>>
end
@spec parse(<<_::16>>) :: t()
def parse(<<data_ur, data_dl>>) do
%__MODULE__{
up_right: offset_correction_factor(<<data_ur>>),
down_left: offset_correction_factor(<<data_dl>>)
}
end
end
# 0x9F CONFIG3 Read/Write Configuration register three
defmodule CONFIG3 do
@moduledoc false
def address, do: 0x9F
use TypedStruct
typedstruct do
field(:proximity_gain_compensation, 0 | 1, default: 0)
field(:sleep_after_interrupt, 0 | 1, default: 0)
field(:proximity_mask, 0b0000..0b1110, default: 0)
end
@spec to_binary(Enum.t()) :: <<_::8>>
def to_binary(opts \\ []) do
d = struct!(__MODULE__, opts)
b5 = proximity_gain_compensation(d.proximity_mask)
b4 = d.sleep_after_interrupt
b30 = d.proximity_mask
<<0::2, bfd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b, bfd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b, b30::4>>
end
@spec parse(<<_::8>>) :: t()
def parse(<<_::2, b5::1, bfd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b, b30::4>>) do
%__MODULE__{
proximity_gain_compensation: b5,
sleep_after_interrupt: b4,
proximity_mask: b30
}
end
defp proximity_gain_compensation(0b0111), do: 1
defp proximity_gain_compensation(0b1011), do: 1
defp proximity_gain_compensation(0b1101), do: 1
defp proximity_gain_compensation(0b1110), do: 1
defp proximity_gain_compensation(0b0101), do: 1
defp proximity_gain_compensation(0b1010), do: 1
defp proximity_gain_compensation(_proximity_mask), do: 0
end
# 0xA0 GPENTH Read/Write Gesture proximity enter/exit threshold
defmodule GPENTH do
@moduledoc false
def address, do: 0xA0
use TypedStruct
typedstruct do
field(:enter, byte, default: 0)
field(:exit, byte, default: 0)
end
@spec to_binary(Enum.t()) :: <<_::16>>
def to_binary(opts \\ []) do
d = struct!(__MODULE__, opts)
<<d.enter, d.exit>>
end
@spec parse(<<_::16>>) :: t()
def parse(<<enter_byte, exit_byte>>) do
%__MODULE__{enter: enter_byte, exit: exit_byte}
end
end
# 0xA2 GCONF1 Read/Write Gesture configuration one
defmodule GCONF1 do
@moduledoc false
def address, do: 0xA2
use TypedStruct
typedstruct do
field(:fifo_threshold, 0..3, default: 0)
field(:exit_mask, 0x0000..0x1111, default: 0)
field(:exit_persistence, 0..3, default: 0)
end
@spec to_binary(Enum.t()) :: <<_::8>>
def to_binary(opts \\ []) do
d = struct!(__MODULE__, opts)
b76 = d.fifo_threshold
b52 = d.exit_mask
b10 = d.exit_persistence
<<bfd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b, bfd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b, b10::2>>
end
@spec parse(<<_::8>>) :: t()
def parse(<<bfd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b, bfd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b, b10::2>>) do
%__MODULE__{
fifo_threshold: b76,
exit_mask: b52,
exit_persistence: b10
}
end
end
# 0xA3 GCONF2 Read/Write Gesture configuration two
defmodule GCONF2 do
@moduledoc false
def address, do: 0xA3
use TypedStruct
typedstruct do
field(:gain, 0..3, default: 0)
field(:led_drive_strength, 0..3, default: 0)
field(:wait_time, 0..7, default: 0)
end
@spec to_binary(Enum.t()) :: <<_::8>>
def to_binary(opts \\ []) do
d = struct!(__MODULE__, opts)
b65 = d.gain
b43 = d.led_drive_strength
b20 = d.wait_time
<<0::1, bfd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b, bfdf8:f53e:61e4::18, b20::3>>
end
@spec parse(<<_::8>>) :: t()
def parse(<<0::1, bfd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b, bfdf8:f53e:61e4::18, b20::3>>) do
%__MODULE__{
gain: b65,
led_drive_strength: b43,
wait_time: b20
}
end
end
## 0xA4 GOFFSET_U Read/Write Gesture UP offset register
defmodule GOFFSET_U do
@moduledoc false
def address, do: 0xA4
import APDS9960.Register.Utils
@spec to_binary(-127..127) :: <<_::8>>
def to_binary(value) when is_number(value) do
<<sign(value)::1, abs(value)::7>>
end
@spec parse(<<_::8>>) :: -127..127
def parse(<<byte>>) do
offset_correction_factor(<<byte>>)
end
end
## 0xA5 GOFFSET_D Read/Write Gesture DOWN offset register
defmodule GOFFSET_D do
@moduledoc false
def address, do: 0xA5
import APDS9960.Register.Utils
@spec to_binary(-127..127) :: <<_::8>>
def to_binary(value) when is_number(value) do
<<sign(value)::1, abs(value)::7>>
end
@spec parse(<<_::8>>) :: -127..127
def parse(<<byte>>) do
offset_correction_factor(<<byte>>)
end
end
## 0xA7 GOFFSET_L Read/Write Gesture LEFT offset register
defmodule GOFFSET_L do
@moduledoc false
def address, do: 0xA7
import APDS9960.Register.Utils
@spec to_binary(-127..127) :: <<_::8>>
def to_binary(value) when is_number(value) do
<<sign(value)::1, abs(value)::7>>
end
@spec parse(<<_::8>>) :: -127..127
def parse(<<byte>>) do
offset_correction_factor(<<byte>>)
end
end
## 0xA9 GOFFSET_R Read/Write Gesture RIGHT offset register
defmodule GOFFSET_R do
@moduledoc false
def address, do: 0xA9
import APDS9960.Register.Utils
@spec to_binary(-127..127) :: <<_::8>>
def to_binary(value) when is_number(value) do
<<sign(value)::1, abs(value)::7>>
end
@spec parse(<<_::8>>) :: -127..127
def parse(<<byte>>) do
offset_correction_factor(<<byte>>)
end
end
# 0xA6 GPULSE Read/Write Gesture pulse count and length
defmodule GPULSE do
@moduledoc false
def address, do: 0xA6
use TypedStruct
typedstruct do
field(:pulse_length, 0..3, default: 0)
field(:pulse_count, 0..63, default: 0)
end
@spec to_binary(Enum.t()) :: <<_::8>>
def to_binary(opts \\ []) do
d = struct!(__MODULE__, opts)
b76 = d.pulse_length
b50 = d.pulse_count
<<b76::2, b50::6>>
end
@spec parse(<<_::8>>) :: t()
def parse(<<b76::2, b50::6>>) do
%__MODULE__{
pulse_length: b76,
pulse_count: b50
}
end
end
# 0xAA GCONF3 Read/Write Gesture configuration three
defmodule GCONF3 do
@moduledoc false
def address, do: 0xAA
use TypedStruct
typedstruct do
field(:dimension, 0..3, default: 0)
end
@spec to_binary(Enum.t()) :: <<_::8>>
def to_binary(opts \\ []) do
d = struct!(__MODULE__, opts)
b10 = d.dimension
<<0::6, b10::2>>
end
@spec parse(<<_::8>>) :: t()
def parse(<<0::6, b10::2>>) do
%__MODULE__{
dimension: b10
}
end
end
# 0xAB GCONF4 Read/Write Gesture configuration four
defmodule GCONF4 do
@moduledoc false
def address, do: 0xAB
use TypedStruct
typedstruct do
field(:interrupt, 0 | 1, default: 0)
field(:mode, 0 | 1, default: 0)
end
@spec to_binary(Enum.t()) :: <<_::8>>
def to_binary(opts \\ []) do
d = struct!(__MODULE__, opts)
b1 = d.interrupt
b0 = d.mode
<<0::6, b1::1, b0::1>>
end
@spec parse(<<_::8>>) :: t()
def parse(<<0::6, b1::1, b0::1>>) do
%__MODULE__{
interrupt: b1,
mode: b0
}
end
end
# 0xAE GFLVL Read-only Gesture FIFO level
defmodule GFLVL do
@moduledoc false
def address, do: 0xAE
end
# 0xAF GSTATUS Read-only Gesture status
defmodule GSTATUS do
@moduledoc false
def address, do: 0xAF
use TypedStruct
typedstruct do
field(:fifo_overflow, 0 | 1, default: 0)
field(:valid, 0 | 1, default: 0)
end
@spec parse(<<_::8>>) :: t()
def parse(<<0::6, b1::1, b0::1>>) do
%__MODULE__{
fifo_overflow: b1,
valid: b0
}
end
end
# 0xE4 IFORCE W Force interrupt
defmodule IFORCE do
@moduledoc false
def address, do: 0xE4
end
# 0xE5 PICLEAR W Proximity interrupt clear
defmodule PICLEAR do
@moduledoc false
def address, do: 0xE5
end
# 0xE6 CICLEAR W ALS clear channel interrupt clear
defmodule CICLEAR do
@moduledoc false
def address, do: 0xE6
end
# 0xE7 AICLEAR W All non-gesture interrupts clear
defmodule AICLEAR do
@moduledoc false
def address, do: 0xE7
end
# 0xFC GFIFO_U Read-only Gesture FIFO UP/DOWN/LEFT/RIGHT values
defmodule GFIFO_U do
@moduledoc false
def address, do: 0xFC
@spec parse(binary) :: [{byte, byte, byte, byte}]
def parse(data) do
for <<up, down, left, right <- data>>, do: {up, down, left, right}
end
end
end
|
lib/apds9960/register.ex
| 0.675444
| 0.546073
|
register.ex
|
starcoder
|
defmodule Blunt.Data.Factories.Values.Prop do
@moduledoc false
@derive {Inspect, except: [:lazy]}
defstruct [:field, :path_func_or_value, lazy: false]
alias Blunt.Data.FactoryError
alias Blunt.Data.Factories.Factory
alias Blunt.Data.Factories.Values.Prop
defimpl Blunt.Data.Factories.Value do
def declared_props(%Prop{field: field}), do: [field]
def evaluate(%Prop{field: field, path_func_or_value: path, lazy: lazy}, acc, current_factory)
when is_list(path) do
if not lazy or (lazy and not Map.has_key?(acc, field)) do
case path do
[] ->
value = Factory.log_value(current_factory, [], field, lazy, "prop")
Map.put(acc, field, value)
[path] ->
value = get_in(acc, Access.key(path))
value = Factory.log_value(current_factory, value, field, lazy, "prop")
Map.put(acc, field, value)
[head | rest] ->
# ensure that the first key in the path is not nil
acc =
Map.update(acc, head, %{}, fn
nil -> %{}
other -> other
end)
keys = [Access.key(head, %{}) | Enum.map(rest, &Access.key/1)]
value = get_in(acc, keys)
value = Factory.log_value(current_factory, value, field, lazy, "prop")
Map.put(acc, field, value)
end
else
acc
end
end
def evaluate(%Prop{field: field, path_func_or_value: func, lazy: lazy}, acc, current_factory)
when is_function(func, 0) do
if not lazy or (lazy and not Map.has_key?(acc, field)) do
value =
case func.() do
{:ok, result} ->
result
{:error, error} ->
raise FactoryError, reason: error, factory: current_factory
results when is_list(results) ->
Enum.map(results, fn
{:ok, result} ->
result
{:error, error} ->
raise FactoryError, reason: error, factory: current_factory
other ->
other
end)
result ->
result
end
value = Factory.log_value(current_factory, value, field, lazy, "prop")
Map.put(acc, field, value)
else
acc
end
end
def evaluate(%Prop{field: field, path_func_or_value: func, lazy: lazy}, acc, current_factory)
when is_function(func, 1) do
if not lazy or (lazy and not Map.has_key?(acc, field)) do
value =
case func.(acc) do
{:ok, result} ->
result
{:error, error} ->
raise FactoryError, reason: error, factory: current_factory
results when is_list(results) ->
Enum.map(results, fn
{:ok, result} ->
result
{:error, error} ->
raise FactoryError, reason: error, factory: current_factory
other ->
other
end)
result ->
result
end
value = Factory.log_value(current_factory, value, field, lazy, "prop")
Map.put(acc, field, value)
else
acc
end
end
def evaluate(%Prop{field: field, path_func_or_value: value, lazy: lazy}, acc, current_factory) do
if not lazy or (lazy and not Map.has_key?(acc, field)) do
value = Factory.log_value(current_factory, value, field, lazy, "prop")
Map.put(acc, field, value)
else
acc
end
end
end
end
|
apps/blunt_data/lib/blunt/data/factories/values/prop.ex
| 0.693784
| 0.493775
|
prop.ex
|
starcoder
|
defmodule Ktsllex.Schemas do
@moduledoc """
This sets up the schemas as required to run.
"""
use Confex, otp_app: :ktsllex
require Logger
alias Ktsllex.FileJson
@doc """
Creates key and value schemas with schema_name on host, loading json schemas from base_schema_file
###Β Params
* `host` - A kafka broker, eg localhost:8081
* `schema_name` - The schema name to register the schemas as
* Replaces the schema_name in the schema files with the one provided
* `base_schema_file` - The path to the schema files
* eg "./schemas/file"
* Expects to find two files, one ending `-key.json` and one `-value.json`
* eg "schemas/file-key.json"
### Example
iex> Schemas.run("localhost:8081", "schema-name", "./schemas/file")
The above would make two HTTP post requests to:
* http://localhost:8081/subjects/schema-name-value/versions
* http://localhost:8081/subjects/schema-name-key/versions
With the schema loaded from `schemas/file-key.json` and `schemas/file-value.json`,
in which the `namespace` within the schema is updated to `schema-name`
More info on the API here:
https://docs.confluent.io/current/schema-registry/docs/api.html#post--subjects-(string-%20subject)-versions
A manual curl example:
```
curl -X POST \
http://localhost:8081/subjects/schema_name/versions \
-H 'Content-Type: application/json' \
-d '{
"schema":
"{ \"type\": \"record\", \"name\": \"Key\", \"namespace\": \"schema_name\", \"fields\": [ { \"name\": \"id\", \"type\": \"int\"} ], \"connect.name\": \"schema_name\" }"
}'
```
"""
def run(host, schema_name, base_schema_file) do
Logger.info(
"#{__MODULE__} Creating schemas on:#{inspect(host)} with name:#{inspect(schema_name)} from:#{
inspect(base_schema_file)
}"
)
["-key", "-value"]
|> Enum.map(fn type -> process(host, schema_name, base_schema_file, type) end)
end
defp process(host, schema_name, base_schema_file, type) do
url = build_url(host, schema_name, type)
schema = build_schema(base_schema_file, schema_name, type)
case schema do
:error ->
:error
_ ->
url
|> post(schema)
|> extract_body()
|> Poison.decode!()
|> output_result()
end
end
defp build_url(host, schema_name, key_or_value) do
host <> "/subjects/" <> schema_name <> key_or_value <> "/versions"
end
# Overwrite schema name in provided base schema with given schema name
defp build_schema(base_schema_file, schema_name, type) do
base_schema_file
|> read_schema(type)
|> update_namespace(schema_name)
|> update_connect_name(schema_name, type)
end
defp update_namespace({:error, _}, _schema_name) do
Logger.error("Error reading schema files")
:error
end
defp update_namespace(schema, schema_name) do
schema
|> Map.put("namespace", schema_name)
end
defp update_connect_name(:error, _schema_name, _type), do: :error
defp update_connect_name(schema, _schema_name, "-value"), do: schema
defp update_connect_name(schema, schema_name, "-key") do
schema
|> Map.put("connect.name", schema_name <> ".Key")
end
defp read_schema(base_schema_file, type) do
(base_schema_file <> type <> ".json")
|> FileJson.read!()
end
defp post(url, schema) do
encoded_schema =
schema
|> Poison.encode!()
|> Poison.encode!()
body = ~s({"schema":) <> encoded_schema <> "}"
http_client().post(url, body, [{"Content-Type", "application/json"}])
end
defp extract_body({:ok, %HTTPoison.Response{body: body}}), do: body
defp output_result(result) do
Logger.info("#{__MODULE__} created schema #{inspect(result)}")
end
defp http_client(), do: config()[:http_client] || HTTPoison
end
|
lib/ktsllex/schemas.ex
| 0.746971
| 0.654029
|
schemas.ex
|
starcoder
|
defmodule Grizzly.ZWave.Commands.DoorLockConfigurationReport do
@moduledoc """
This command is used to advertise the configuration parameters of a door lock device.
Params:
* `:operation_type` - the operation type at the supporting node. One of :constant_operation and :timed_operation. (required)
* `:manual_outside_door_handles` - List of outside handles (1..4) that can open locally (required)
* `:manual_inside_door_handles` - List of inside handles (1..4) that can open locally (required)
* `:lock_timeout` - The seconds that the supporting node must wait before returning to the secured
mode when receiving timed operation modes in a Door Lock Operation Set Command (required)
* `:auto_relock_time` - The time setting in seconds for auto-relock functionality. (v.4 only)
* `:hold_and_release_time` - The time setting in seconds for letting the latch retracted after the
supporting nodeβs mode has been changed to unsecured. (v.4 only)
* `:twist_assist?` - Indicates if the twist assist functionality is enabled. (v.4 only)
* `:block_to_block?` - Indicates if the block-to-block functionality is enabled. (v.4 only)
"""
@behaviour Grizzly.ZWave.Command
alias Grizzly.ZWave.{Command, DecodeError}
alias Grizzly.ZWave.CommandClasses.DoorLock
@type param ::
{:operation_type, DoorLock.operation_type()}
| {:manual_outside_door_handles, [1..4]}
| {:manual_inside_door_handles, [1..4]}
| {:lock_timeout, non_neg_integer()}
| {:auto_relock_time, non_neg_integer}
| {:hold_and_release_time, non_neg_integer}
| {:twist_assist?, boolean}
| {:block_to_block?, boolean}
@impl true
@spec new([param()]) :: {:ok, Command.t()}
def new(params) do
command = %Command{
name: :door_lock_configuration_report,
command_byte: 0x06,
command_class: DoorLock,
params: params,
impl: __MODULE__
}
{:ok, command}
end
@impl true
@spec encode_params(Command.t()) :: binary()
def encode_params(command) do
operation_type_byte =
Command.param!(command, :operation_type) |> DoorLock.operation_type_to_byte()
manual_outside_door_handles_bitmask =
Command.param!(command, :manual_outside_door_handles) |> DoorLock.door_handles_to_bitmask()
manual_inside_door_handles_bitmask =
Command.param!(command, :manual_inside_door_handles) |> DoorLock.door_handles_to_bitmask()
{lock_timeout_mins, lock_timeout_secs} =
Command.param!(command, :lock_timeout) |> DoorLock.to_minutes_and_seconds()
common_binary =
<<operation_type_byte, manual_outside_door_handles_bitmask::size(4),
manual_inside_door_handles_bitmask::size(4), lock_timeout_mins, lock_timeout_secs>>
auto_relock_time = Command.param(command, :auto_relock_time)
if auto_relock_time == nil do
common_binary
else
# v.4
hold_and_release_time = Command.param!(command, :hold_and_release_time)
block_to_block_bit = if Command.param!(command, :block_to_block?), do: 0x01, else: 0x00
twist_assist_bit = if Command.param!(command, :twist_assist?), do: 0x01, else: 0x00
common_binary <>
<<auto_relock_time::integer-unsigned-size(16),
hold_and_release_time::integer-unsigned-size(16), 0x00::size(6),
block_to_block_bit::size(1), twist_assist_bit::size(1)>>
end
end
@impl true
@spec decode_params(binary()) :: {:ok, [param()]} | {:error, DecodeError.t()}
# v1-3
def decode_params(
<<operation_type_byte, manual_outside_door_handles_bitmask::size(4),
manual_inside_door_handles_bitmask::size(4), lock_timeout_mins, lock_timeout_secs>>
) do
with {:ok, operation_type} <- DoorLock.operation_type_from_byte(operation_type_byte) do
lock_timeout = 60 * lock_timeout_mins + lock_timeout_secs
manual_outside_door_handles =
DoorLock.door_handles_from_bitmask(manual_outside_door_handles_bitmask)
manual_inside_door_handles =
DoorLock.door_handles_from_bitmask(manual_inside_door_handles_bitmask)
{:ok,
[
operation_type: operation_type,
manual_outside_door_handles: manual_outside_door_handles,
manual_inside_door_handles: manual_inside_door_handles,
lock_timeout: lock_timeout
]}
else
{:error, %DecodeError{} = decode_error} ->
{:error, %DecodeError{decode_error | command: :door_lock_configuration_report}}
end
end
# v4
def decode_params(
<<operation_type_byte, manual_outside_door_handles_bitmask::size(4),
manual_inside_door_handles_bitmask::size(4), lock_timeout_mins, lock_timeout_secs,
auto_relock_time::integer-unsigned-size(16),
hold_and_release_time::integer-unsigned-size(16), _reserved::size(6),
block_to_block_bit::size(1), twist_assist_bit::size(1)>>
) do
with {:ok, operation_type} <- DoorLock.operation_type_from_byte(operation_type_byte) do
lock_timeout = 60 * lock_timeout_mins + lock_timeout_secs
manual_outside_door_handles =
DoorLock.door_handles_from_bitmask(manual_outside_door_handles_bitmask)
manual_inside_door_handles =
DoorLock.door_handles_from_bitmask(manual_inside_door_handles_bitmask)
{:ok,
[
operation_type: operation_type,
manual_outside_door_handles: manual_outside_door_handles,
manual_inside_door_handles: manual_inside_door_handles,
lock_timeout: lock_timeout,
auto_relock_time: auto_relock_time,
hold_and_release_time: hold_and_release_time,
block_to_block?: block_to_block_bit == 1,
twist_assist?: twist_assist_bit == 1
]}
else
{:error, %DecodeError{} = decode_error} ->
{:error, %DecodeError{decode_error | command: :door_lock_configuration_report}}
end
end
end
|
lib/grizzly/zwave/commands/door_lock_configuration_report.ex
| 0.80077
| 0.441733
|
door_lock_configuration_report.ex
|
starcoder
|
defmodule Astro.Lunar do
@moduledoc """
Calulates lunar phases.
Each of the phases of the Moon is defined by the
angle between the Moon and Sun in the sky. When the Moon
is in between the Earth and the Sun, so that there is nearly a
zero degree separation, we see a New Moon.
Because the orbit of the Moon is tilted in relation to the
Earthβs orbit around the Sun, a New Moon can still be as much
as 5.2 degrees away from the Sun, thus why there isn't a
solar eclipse every month.
A crescent moon is 45 degrees from the Sun, a quarter moon
is 90 degrees from the Sun, a gibbous moon is 135 degrees
from the Sun, and the Full Moon is 180 degrees away from
the Sun.
"""
alias Astro.{Math, Time, Solar}
import Astro.Math, only: [
deg: 1,
sin: 1,
cos: 1,
mt: 1,
asin: 1,
sigma: 2,
mod: 2,
degrees: 1,
poly: 2,
invert_angular: 4
]
import Astro.Time, only: [
j2000: 0,
julian_centuries_from_moment: 1,
mean_synodic_month: 0
]
import Astro.Earth, only: [
nutation: 1
]
@months_epoch_to_j2000 24_724
@average_distance_earth_to_moon 385_000_560.0
@doc """
Returns the date time of the new
moon before a given moment.
## Arguments
* a `t:Time.moment()` which is a float number of days
since `0000-01-01`
## Returns
* a `t:Time.moment()` which is a float number of days
since `0000-01-01`
## Example
iex> Astro.Lunar.date_time_new_moon_before 738390
738375.5757777032
"""
@doc since: "0.5.0"
@spec date_time_new_moon_before(Time.moment()) :: Time.moment()
def date_time_new_moon_before(t) when is_number(t) do
t0 = nth_new_moon(0)
phi = lunar_phase_at(t)
n = round((t - t0) / mean_synodic_month() - phi / deg(360)) |> trunc()
nth_new_moon(Math.final(n - 1, &(nth_new_moon(&1) < t)))
end
@doc """
Returns the date time of the new
moon at or after a given date or
date time.
## Arguments
* a `t:Time.moment()` which is a float number of days
since `0000-01-01`
## Returns
* a `t:Time.moment()` which is a float number of days
since `0000-01-01`
## Example
iex> Astro.Lunar.date_time_new_moon_at_or_after 738390
738405.0352292997
"""
@doc since: "0.5.0"
@spec date_time_new_moon_at_or_after(Time.moment()) :: Time.moment()
def date_time_new_moon_at_or_after(t) when is_number(t) do
t0 = nth_new_moon(0)
phi = lunar_phase_at(t)
n = round((t - t0) / mean_synodic_month() - phi / deg(360.0))
nth_new_moon(Math.next(n, &(nth_new_moon(&1) >= t)))
end
@doc """
Returns the lunar phase as a
float number of degrees at a given
moment.
## Arguments
* a `t:Time.moment()` which is a float number of days
since `0000-01-01`
## Returns
* the lunar phase as a float number of
degrees.
## Example
iex> Astro.Lunar.lunar_phase_at 738389.5007195644
180.00001498208536
iex> Astro.Lunar.lunar_phase_at 738346.0544609067
0.021567106773019873
"""
@doc since: "0.5.0"
@spec lunar_phase_at(Time.moment()) :: Time.moment()
def lunar_phase_at(t) when is_number(t) do
phi = mod(lunar_longitude(t) - solar_longitude(t), 360)
t0 = nth_new_moon(0)
n = round((t - t0) / mean_synodic_month())
phi_prime = deg(360) * mod((t - nth_new_moon(n)) / mean_synodic_month(), 1)
if abs(phi - phi_prime) > deg(180.0) do
phi_prime
else
phi
end
end
@doc """
Returns the date time of a given
lunar phase at or before a given
moment.
## Arguments
* a `t:Time.moment()` which is a float number of days
since `0000-01-01`
* `phase` is the required lunar phase expressed
as a float number of degrees between `0.0` and
`360.0`
## Returns
* a `t:Time.moment()` which is a float number of days
since `0000-01-01`
## Example
iex> Astro.Lunar.date_time_lunar_phase_at_or_before(738368, Astro.Lunar.new_moon())
738346.0524695957
"""
@doc since: "0.5.0"
@spec date_time_lunar_phase_at_or_before(Time.moment(), Astro.phase()) :: Time.moment()
def date_time_lunar_phase_at_or_before(t, phase) do
tau = t - mean_synodic_month() * (1.0 / deg(360.0)) * mod(lunar_phase_at(t) - phase, 360.0)
a = tau - 2
b = min(t, tau + 2)
invert_angular(&lunar_phase_at/1, phase, a, b)
end
@doc """
Returns the date time of a given
lunar phase at or after a given
date time or date.
## Arguments
* a `moment` which is a float number of days
since `0000-01-01`
* `phase` is the required lunar phase expressed
as a float number of degrees between `0` and
`3660`
## Returns
* a `t:Time.moment()` which is a float number of days
since `0000-01-01`
## Example
iex> Astro.Lunar.date_time_lunar_phase_at_or_after(738368, Astro.Lunar.full_moon())
738389.5007195254
"""
@doc since: "0.5.0"
@spec date_time_lunar_phase_at_or_after(Time.moment(), Astro.phase()) :: Time.moment()
def date_time_lunar_phase_at_or_after(t, phase) do
tau = t + mean_synodic_month() * (1 / deg(360.0)) * mod(phase - lunar_phase_at(t), 360.0)
a = max(t, tau - 2)
b = tau + 2
invert_angular(&lunar_phase_at/1, phase, a, b)
end
@doc since: "0.6.0"
@spec lunar_position(Time.moment()) :: {Astro.angle(), Astro.angle(), Astro.meters()}
def lunar_position(t) do
lambda = lunar_longitude(t)
beta = lunar_latitude(t)
distance = lunar_distance(t)
{Astro.right_ascension(t, beta, lambda), Astro.declination(t, beta, lambda), distance}
end
@doc """
Returns the fractional illumination of the moon
at a given time as a fraction between 0.0 and 1.0.
"""
@doc since: "0.6.0"
@spec illuminated_fraction_of_moon(Time.time()) :: float()
def illuminated_fraction_of_moon(t) do
{a0, d0, r0} = lunar_position(t)
{a, d, r} = Solar.solar_position(t)
r = Math.au_to_m(r)
phi = :math.acos(sin(d0) * sin(d) + cos(d0) * cos(d) * cos(a0 - a))
i = Math.atan_r(r * :math.sin(phi), r0 - r * :math.cos(phi))
0.5 * (1 + :math.cos(i))
end
@doc """
Returns the new moon lunar
phase expressed as a float number
of degrees.
"""
@doc since: "0.5.0"
@spec new_moon() :: Astro.phase()
def new_moon() do
deg(0.0)
end
@doc """
Returns the full moon lunar
phase expressed as a float number
of degrees.
"""
@doc since: "0.5.0"
@spec full_moon() :: Astro.phase()
def full_moon() do
deg(180.0)
end
@doc """
Returns the first quarter lunar
phase expressed as a float number
of degrees.
"""
@doc since: "0.5.0"
@spec first_quarter() :: Astro.phase()
def first_quarter() do
deg(90.0)
end
@doc """
Returns the last quarter lunar
phase expressed as a float number
of degrees.
"""
@doc since: "0.5.0"
@spec last_quarter() :: Astro.phase()
def last_quarter() do
deg(270.0)
end
@doc false
@doc since: "0.5.0"
@spec lunar_longitude(Time.moment()) :: Astro.phase()
def lunar_longitude(t) do
c = julian_centuries_from_moment(t)
l = mean_lunar_longitude(c)
d = lunar_elongation(c)
m = solar_anomaly(c)
m_prime = lunar_anomaly(c)
f = moon_node(c)
e = poly(c, [1.0, -0.002516, -0.0000074])
args_lunar_elong = [
0, 2, 2, 0, 0, 0, 2, 2, 2, 2, 0, 1, 0, 2, 0, 0, 4, 0, 4, 2, 2, 1,
1, 2, 2, 4, 2, 0, 2, 2, 1, 2, 0, 0, 2, 2, 2, 4, 0, 3, 2, 4, 0, 2,
2, 2, 4, 0, 4, 1, 2, 0, 1, 3, 4, 2, 0, 1, 2
]
args_solar_anom = [
0, 0, 0, 0, 1, 0, 0, -1, 0, -1, 1, 0, 1, 0, 0, 0, 0, 0, 0, 1, 1,
0, 1, -1, 0, 0, 0, 1, 0, -1, 0, -2, 1, 2, -2, 0, 0, -1, 0, 0, 1,
-1, 2, 2, 1, -1, 0, 0, -1, 0, 1, 0, 1, 0, 0, -1, 2, 1, 0
]
args_lunar_anom = [
1, -1, 0, 2, 0, 0, -2, -1, 1, 0, -1, 0, 1, 0, 1, 1, -1, 3, -2,
-1, 0, -1, 0, 1, 2, 0, -3, -2, -1, -2, 1, 0, 2, 0, -1, 1, 0,
-1, 2, -1, 1, -2, -1, -1, -2, 0, 1, 4, 0, -2, 0, 2, 1, -2, -3,
2, 1, -1, 3
]
args_moon_node = [
0, 0, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, -2, 2, -2, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0, -2, 2, 0, 2, 0, 0, 0, 0,
0, 0, -2, 0, 0, 0, 0, -2, -2, 0, 0, 0, 0, 0, 0, 0
]
sine_coeff = [
6288774, 1274027, 658314, 213618, -185116, -114332,
58793, 57066, 53322, 45758, -40923, -34720, -30383,
15327, -12528, 10980, 10675, 10034, 8548, -7888,
-6766, -5163, 4987, 4036, 3994, 3861, 3665, -2689,
-2602, 2390, -2348, 2236, -2120, -2069, 2048, -1773,
-1595, 1215, -1110, -892, -810, 759, -713, -700, 691,
596, 549, 537, 520, -487, -399, -381, 351, -340, 330,
327, -323, 299, 294
]
correction = deg(1 / 1000000) * sigma(
[sine_coeff, args_lunar_elong, args_solar_anom, args_lunar_anom, args_moon_node],
fn [v,w,x,y,z] ->
v * :math.pow(e, abs(x)) * sin(w * d + x * m + y * m_prime + z * f)
end
)
venus =
deg(3958 / 1000000) *
sin(deg(119.75) + c * deg(131.849))
jupiter =
deg(318 / 1000000) *
sin(deg(53.09) + c * deg(479_264.29))
flat_earth =
deg(1962 / 1000000) *
sin(l - f)
mod(l + correction + venus + jupiter + flat_earth + nutation(c), 360)
end
@doc since: "0.6.0"
@spec lunar_latitude(Time.moment()) :: Astro.angle()
def lunar_latitude(t) do
c = julian_centuries_from_moment(t)
l = mean_lunar_longitude(c)
d = lunar_elongation(c)
m = solar_anomaly(c)
m_prime = lunar_anomaly(c)
f = moon_node(c)
e = poly(c, [1.0, -0.002516, -0.0000074])
lunar_elongation = [
0,0,0,2,2,2,2,0,2,0,2,2,2,2,2,2,2,0,4,0,0,0,
1,0,0,0,1,0,4,4,0,4,2,2,2,2,0,2,2,2,2,4,2,2,
0,2,1,1,0,2,1,2,0,4,4,1,4,1,4,2
]
solar_anomaly = [
0,0,0,0,0,0,0,0,0,0,-1,0,0,1,-1,-1,-1,1,0,1,
0,1,0,1,1,1,0,0,0,0,0,0,0,0,-1,0,0,0,0,1,1,
0,-1,-2,0,1,1,1,1,1,0,-1,1,0,-1,0,0,0,-1,-2]
lunar_anomaly = [
0,1,1,0,-1,-1,0,2,1,2,0,-2,1,0,-1,0,-1,-1,-1,
0,0,-1,0,1,1,0,0,3,0,-1,1,-2,0,2,1,-2,3,2,-3,
-1,0,0,1,0,1,1,0,0,-2,-1,1,-2,2,-2,-1,1,1,-2,
0,0
]
moon_node = [
1,1,-1,-1,1,-1,1,1,-1,-1,-1,-1,1,-1,1,1,-1,-1,
-1,1,3,1,1,1,-1,-1,-1,1,-1,1,-3,1,-3,-1,-1,1,
-1,1,-1,1,1,1,1,-1,3,-1,-1,1,-1,-1,1,-1,1,-1,
-1,-1,-1,-1,-1,1
]
sine_coeff = [
5128122, 280602, 277693, 173237, 55413, 46271, 32573,
17198, 9266, 8822, 8216, 4324, 4200, -3359, 2463, 2211,
2065, -1870, 1828, -1794, -1749, -1565, -1491, -1475,
-1410, -1344, -1335, 1107, 1021, 833, 777, 671, 607,
596, 491, -451, 439, 422, 421, -366, -351, 331, 315,
302, -283, -229, 223, 223, -220, -220, -185, 181,
-177, 176, 166, -164, 132, -119, 115, 107
]
beta = deg(1.0 / 1000000.0) * sigma(
[sine_coeff, lunar_elongation, solar_anomaly, lunar_anomaly, moon_node],
fn [v, w, x, y, z] ->
v * :math.pow(e, abs(x)) * sin(w*d + x*m + y*m_prime + z*f)
end
)
venus =
deg(175.0 / 1000000.0) *
sin(deg(119.75) + c * deg(131.849) + f) *
sin(deg(119.75) + c * deg(131.849) - f)
flat_earth =
deg(-2235.0 / 1000000.0) * sin(l) +
deg(127.0 / 1000000.0) * sin(l - m_prime) +
deg(-115.0 / 1000000.0) * sin(l + m_prime)
extra = deg(382.0 / 1000000.0) * sin(deg(313.45) + (c * deg(481266.484)))
beta + venus + flat_earth + extra
end
@doc since: "0.4.0"
@spec lunar_altitude(Time.moment(), Geo.PointZ.t()) :: Astro.angle()
def lunar_altitude(t, %Geo.PointZ{coordinates: {psi, phi, _alt}}) do
lambda = lunar_longitude(t)
beta = lunar_latitude(t)
alpha = Astro.right_ascension(t, beta, lambda)
delta = Astro.declination(t, beta, lambda)
theta = Time.sidereal_from_moment(t)
h = mod(theta + psi - alpha, 360.0)
altitude = asin(sin(phi) * sin(delta) + cos(phi) * cos(delta) * cos(h))
mod(altitude + deg(180.0), 360.0) - deg(180.0)
end
@doc since: "0.6.0"
@spec lunar_distance(Time.moment()) :: Astro.meters()
def lunar_distance(t) do
c = Time.julian_centuries_from_moment(t)
d = lunar_elongation(c)
m = solar_anomaly(c)
m_prime = lunar_anomaly(c)
f = moon_node(c)
e = poly(c, [1.0, -0.002516, -0.0000074])
lunar_elongation = [
0,2,2,0,0,0,2,2,2,2,0,1,0,2,0,0,4,0,4,2,2,1,
1,2,2,4,2,0,2,2,1,2,0,0,2,2,2,4,0,3,2,4,0,2,
2,2,4,0,4,1,2,0,1,3,4,2,0,1,2,2
]
solar_anomaly = [
0,0,0,0,1,0,0,-1,0,-1,1,0,1,0,0,0,0,0,0,1,1,
0,1,-1,0,0,0,1,0,-1,0,-2,1,2,-2,0,0,-1,0,0,1,
-1,2,2,1,-1,0,0,-1,0,1,0,1,0,0,-1,2,1,0,0
]
lunar_anomaly = [
1,-1,0,2,0,0,-2,-1,1,0,-1,0,1,0,1,1,-1,3,-2,
-1,0,-1,0,1,2,0,-3,-2,-1,-2,1,0,2,0,-1,1,0,
-1,2,-1,1,-2,-1,-1,-2,0,1,4,0,-2,0,2,1,-2,-3,
2,1,-1,3,-1
]
moon_node = [
0,0,0,0,0,2,0,0,0,0,0,0,0,-2,2,-2,0,0,0,0,0,
0,0,0,0,0,0,0,2,0,0,0,0,0,0,-2,2,0,2,0,0,0,0,
0,0,-2,0,0,0,0,-2,-2,0,0,0,0,0,0,0,-2
]
cos_coeff = [
-20905355,-3699111,-2955968,-569925,48888,-3149,
246158,-152138,-170733,-204586,-129620,108743,
104755,10321,0,79661,-34782,-23210,-21636,24208,
30824,-8379,-16675,-12831,-10445,-11650,14403,
-7003,0,10056,6322,-9884,5751,0,-4950,4130,0,
-3958,0,3258,2616,-1897,-2117,2354,0,0,-1423,
-1117,-1571,-1739,0,-4421,0,0,0,0,1165,0,0,
8752
]
correction = sigma(
[cos_coeff, lunar_elongation, solar_anomaly, lunar_anomaly, moon_node],
fn [v, w, x, y, z] ->
v * :math.pow(e, abs(x)) * cos((w * d) + (x * m) + (y * m_prime) + (z * f))
end
)
mt(@average_distance_earth_to_moon) + correction
end
@doc false
@doc since: "0.4.0"
@spec nth_new_moon(number()) :: Time.moment()
def nth_new_moon(n) do
k = n - @months_epoch_to_j2000
c = k / 1_236.85
approx =
j2000() + poly(c, [
5.09766, mean_synodic_month() * 1236.85, 0.0001437, -0.000000150, 0.00000000073
])
e = poly(c, [
1, -0.002516, -0.0000074
])
solar_anomaly = poly(c, [
2.5534, 1236.85 * 29.10535669, -0.0000014, -0.00000011
])
lunar_anomaly = poly(c, [
201.5643, 385.81693528 * 1236.85,
0.0107582, 0.00001238, -0.000000058
])
moon_argument = poly(c, [
160.7108, 390.67050284 * 1236.85,
-0.0016118, -0.00000227, 0.000000011
])
omega = poly(c, [
124.7746, -1.56375588 * 1236.85,
0.0020672, 0.00000215
])
e_factor = [
0, 1, 0, 0, 1, 1, 2, 0, 0, 1, 0, 1, 1, 1, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0
]
solar_coeff = [
0, 1, 0, 0, -1, 1, 2, 0, 0, 1, 0, 1, 1, -1, 2,
0, 3, 1, 0, 1, -1, -1, 1, 0
]
lunar_coeff = [
1, 0, 2, 0, 1, 1, 0, 1, 1, 2, 3, 0, 0, 2, 1, 2,
0, 1, 2, 1, 1, 1, 3, 4
]
moon_coeff = [
0, 0, 0, 2, 0, 0, 0, -2, 2, 0, 0, 2, -2, 0, 0,
-2, 0, -2, 2, 2, 2, -2, 0, 0
]
sine_coeff = [
-0.40720, 0.17241, 0.01608,
0.01039, 0.00739, -0.00514,
0.00208, -0.00111, -0.00057,
0.00056, -0.00042, 0.00042,
0.00038, -0.00024, -0.00007,
0.00004, 0.00004, 0.00003,
0.00003, -0.00003, 0.00003,
-0.00002, -0.00002, 0.00002
]
correction =
deg(-0.00017) * sin(omega) +
sigma(
[sine_coeff, e_factor, solar_coeff, lunar_coeff, moon_coeff],
fn [v,w,x,y,z] ->
v * :math.pow(e, w) *
sin((x * solar_anomaly) + (y * lunar_anomaly) + (z * moon_argument))
end
)
extra =
deg(0.000325) *
sin(poly(c, [299.77, 132.8475848, -0.009173]))
add_const = [
251.88, 251.83, 349.42, 84.66, 141.74, 207.14, 154.84,
34.52, 207.19, 291.34, 161.72, 239.56, 331.55
]
add_coeff = [
0.016321, 26.651886, 36.412478, 18.206239, 53.303771,
2.453732, 7.306860, 27.261239, 0.121824, 1.844379,
24.198154, 25.513099, 3.592518
]
add_factor = [
0.000165, 0.000164, 0.000126, 0.000110, 0.000062, 0.000060,
0.000056, 0.000047, 0.000042, 0.000040, 0.000037, 0.000035,
0.000023
]
additional = sigma([add_const, add_coeff, add_factor], fn [i,j,l] -> l * sin(i + j * k) end)
Time.universal_from_dynamical(approx + correction + extra + additional)
end
def lunar_parallax(t, location) do
geo = lunar_altitude(t, location)
delta = lunar_distance(t)
alt = mt(6_378_140) / delta
arg = alt * cos(geo)
asin(arg)
end
def topocentric_lunar_altitude(t, location) do
lunar_altitude(t, location) - lunar_parallax(t, location)
end
@doc false
def mean_lunar_longitude(c) do
c
|> poly([218.3164477, 481267.88123421, -0.0015786, 1 / 538841.0, -1 /65194000.0])
|> degrees()
end
@doc false
def lunar_elongation(c) do
c
|> poly([297.8501921, 445267.1114034, -0.0018819, 1/545868, -1 / 113065000.0])
|> degrees()
end
@doc false
def solar_anomaly(c) do
c
|> poly([357.5291092, 35999.0502909, -0.0001536, 1 / 24490000.0])
|> degrees()
end
@doc false
def lunar_anomaly(c) do
c
|> poly([134.9633964, 477198.8675055, 0.0087414, 1 / 69699.0, -1 / 14712000.0])
|> degrees()
end
defp solar_longitude(t) do
c = julian_centuries_from_moment(t)
Astro.Solar.sun_apparent_longitude_alt(c)
end
@doc false
def lunar_node(t) do
c = julian_centuries_from_moment(t)
moon_node(c + deg(90.0))
|> mod(180.0)
|> Kernel.-(90.0)
end
@doc false
def moon_node(c) do
c
|> poly([93.2720950, 483202.0175233, -0.0036539, -1 / 3526000.0, 1 / 863310000.0])
|> degrees()
end
end
|
lib/astro/lunar.ex
| 0.954372
| 0.82741
|
lunar.ex
|
starcoder
|
defmodule Operate.Tape do
@moduledoc """
Module for working with Operate tapes.
An Operate program is a tape made up of one or more cells, where each cell
contains a single atomic procedure call (known as an "Op").
When a tape is run, each cell is executed in turn, with the result from each
cell is passed to the next cell. This is known as the "state". Each cell
returns a new state, until the final cell in the tape returns the result of
the tape.
## Examples
iex> {:ok, tape} = %Operate.Tape{cells: [
...> %Operate.Cell{op: "return function(state, a) return (state or 0) + a end", params: [2]},
...> %Operate.Cell{op: "return function(state, a) return (state or 0) + a end", params: [3]},
...> %Operate.Cell{op: "return function(state, a) return (state or 0) + a end", params: [4]}
...> ]}
...> |> Operate.Tape.run(Operate.VM.init)
...> tape.result
9
"""
alias Operate.{BPU, Cell, Op, VM}
@typedoc "Operate Tape"
@type t :: %__MODULE__{
tx: BPU.Transaction,
index: integer,
cells: [Cell.t, ...],
result: VM.lua_output,
error: binary
}
defstruct tx: nil, index: nil, cells: [], result: nil, error: nil
@doc """
Converts the given `t:Operate.BPU.Transaction.t/0` into a `t:Operate.Tape.t/0`.
Returns the result in an `:ok` / `:error` tuple pair.
Optionally specifcy the output index of the tape. If not specified, the first
`OP_RETURN` output is returned as the tape.
"""
@spec from_bpu(BPU.Transaction.t, integer | nil) ::
{:ok, __MODULE__.t} |
{:error, String.t}
def from_bpu(tx, index \\ nil)
def from_bpu(%BPU.Transaction{} = tx, index) when is_nil(index) do
case Enum.find_index(tx.out, &op_return_output?/1) do
nil -> {:error, "No tape found in transaction."}
index -> from_bpu(tx, index)
end
end
def from_bpu(%BPU.Transaction{} = tx, index) when is_binary(index),
do: from_bpu(tx, String.to_integer(index))
def from_bpu(%BPU.Transaction{} = tx, index) when is_integer(index) do
with output when not is_nil(output) <- Enum.at(tx.out, index),
true <- op_return_output?(output),
cells when is_list(cells) <-
output
|> Map.get(:tape)
|> Enum.reject(&op_return_cell?/1)
|> Enum.map(&Cell.from_bpu!/1)
do
tape = struct(__MODULE__, [
tx: tx,
index: index,
cells: cells
])
{:ok, tape}
else
{:error, _} = error -> error
_ -> {:error, "No tape found in transaction."}
end
end
@doc """
As `from_bpu/1`, but returns the result or raises an exception.
"""
@spec from_bpu!(BPU.Transaction.t, integer) :: __MODULE__.t
def from_bpu!(%BPU.Transaction{} = tx, index \\ nil) do
case from_bpu(tx, index) do
{:ok, tape} -> tape
{:error, err} -> raise err
end
end
@doc """
Sets the given Ops into the cells of the given tape. If a map of
aliases is specifed, this is used to reverse map any procedure scripts onto
aliased cells.
"""
@spec set_cell_ops(__MODULE__.t, [Op.t, ...], map) :: __MODULE__.t
def set_cell_ops(tape, ops, aliases \\ %{})
def set_cell_ops(%__MODULE__{} = tape, [], _aliases), do: tape
def set_cell_ops(%__MODULE__{} = tape, [%Op{} = op | tail], aliases) do
refs = case Enum.filter(aliases, fn {_k, v} -> v == op.ref end) do
[] -> [op.ref]
res -> Enum.map(res, & elem(&1, 0))
end
cells = tape.cells
|> Enum.map(& put_cell_op(&1, refs, op))
Map.put(tape, :cells, cells)
|> set_cell_ops(tail, aliases)
end
@doc """
Runs the tape in the given VM state.
## Options
The accepted options are:
* `:state` - Specifiy the state passed to the first cell procedure. Defaults to `nil`.
* `:strict` - By default the tape runs in struct mode - meaning if any cell has an error the entire tape fails. Disable strict mode by setting to `false`.
## Examples
iex> {:ok, tape} = %Operate.Tape{cells: [
...> %Operate.Cell{op: "return function(state, a) return (state or '') .. a end", params: ["b"]},
...> %Operate.Cell{op: "return function(state, a) return (state or '') .. a end", params: ["c"]},
...> %Operate.Cell{op: "return function(state) return string.reverse(state) end", params: []}
...> ]}
...> |> Operate.Tape.run(Operate.VM.init, state: "a")
...> tape.result
"cba"
"""
@spec run(__MODULE__.t, VM.t, keyword) ::
{:ok, __MODULE__.t} |
{:error, __MODULE__.t}
def run(%__MODULE__{} = tape, vm, options \\ []) do
state = Keyword.get(options, :state, nil)
strict = Keyword.get(options, :strict, true)
vm = vm
|> VM.set!("tx", tape.tx) # TODO - remove tx in v 0.1.0
|> VM.set!("ctx.tx", tape.tx)
|> VM.set!("ctx.tape_index", tape.index)
case Enum.reduce_while(tape.cells, state, fn(cell, state) ->
case Cell.exec(cell, vm, state: state) do
{:ok, result} -> {:cont, result}
{:error, error} ->
if strict, do: {:halt, {:error, error}}, else: {:cont, state}
end
end) do
{:error, error} -> {:error, Map.put(tape, :error, error)}
result -> {:ok, Map.put(tape, :result, result)}
end
end
@doc """
As `run/3`, but returns the tape or raises an exception.
## Options
The accepted options are:
* `:state` - Specifiy the state passed to the first cell procedure. Defaults to `nil`.
* `:strict` - By default the tape runs in struct mode - meaning if any cell has an error the entire tape fails. Disable strict mode by setting to `false`.
"""
@spec run!(__MODULE__.t, VM.t, keyword) :: __MODULE__.t
def run!(%__MODULE__{} = tape, vm, options \\ []) do
case run(tape, vm, options) do
{:ok, tape} -> tape
{:error, tape} -> raise tape.error
end
end
@doc """
Returns a list of Op references from the tape's cells. If a map of aliases is
specifed, this is used to alias references to alternative values.
## Examples
iex> %Operate.Tape{cells: [
...> %Operate.Cell{ref: "aabbccdd"},
...> %Operate.Cell{ref: "eeff1122"},
...> %Operate.Cell{ref: "33445500"}
...> ]}
...> |> Operate.Tape.get_op_refs(%{"33445500" => "MyAliasReference"})
["aabbccdd", "eeff1122", "MyAliasReference"]
"""
@spec get_op_refs(__MODULE__.t, map) :: list
def get_op_refs(%__MODULE__{} = tape, aliases \\ %{}) do
tape.cells
|> Enum.map(&(&1.ref))
|> Enum.uniq
|> Enum.map(& Map.get(aliases, &1, &1))
end
@doc """
Validates the given tape. Returns true if all the tape's cells are valid.
"""
@spec valid?(__MODULE__.t) :: boolean
def valid?(%__MODULE__{} = tape) do
tape.cells
|> Enum.all?(&(Cell.valid?(&1)))
end
# Private: Returns true of the BPU Script is an OP_RETURN script
defp op_return_output?(%BPU.Script{tape: tape}),
do: List.first(tape) |> op_return_cell?
defp op_return_cell?(%BPU.Cell{cell: cells}),
do: cells |> Enum.any?(& get_in(&1, [:op]) == 106 || get_in(&1, ["op"]) == 106)
# Private: Puts the given script into the cell if the specfied ref matches
defp put_cell_op(cell, refs, op) do
case Enum.member?(refs, cell.ref) do
true -> Map.put(cell, :op, op.fn)
false -> cell
end
end
end
|
lib/operate/tape.ex
| 0.861261
| 0.704783
|
tape.ex
|
starcoder
|
defmodule Pathfinding.Grid do
@moduledoc ~S"""
Grid definition that calls to `Pathfinding.find_path` and
`Pathfinding.find_walkable` will search against. The grid is defined so its
easy to make repeated searches across it without repeatedly reconstructing it.
### Tiles, Walkability, Costs
%Pathfinding.Grid{
tiles: [
[1, 1, 0, 1, 1],
[1, 1, 0, 1, 1],
[1, 1, 0, 1, 1],
[1, 1, 1, 1, 1],
[1, 1, 1, 1, 1]
]
}
`tiles` is the tile definition that most of the functions in
`Pathfinding.Grid` and `Pathfinding` use for traversal. Whatever numbers you
choose for the tiles is unimportant, except that they are a superset of all
potential values in `walkable_tiles` and keys in `costs`.
Important to mix with `tiles` is `walkable_tiles` because it determines what
tiles are valid for pathing, without specifying `walkable_tiles` the grid
will not be pathable at all.
%Pathfinding.Grid{
tiles: [
[1, 0, 1],
[1, 0, 1],
[1, 1, 1]
],
walkable_tiles: [1]
}
In the example above, only `1` is walkable, so a path from (0, 0) to (0, 2)
will avoid (0, 1).
### Costs
%Pathfinding.Grid{
costs: %{
0 => 5
}
tiles: [
[1, 0, 1],
[1, 0, 1],
[1, 1, 1]
],
walkable_tiles: [0, 1]
}
Specifying a `costs` map will cause different tiles to have different weights,
a detail that can be much better explained by the [A-star algorithm](https://en.wikipedia.org/wiki/A*_search_algorithm).
Any tiles not specified in the `costs` map, or if a `costs` map is not
specified at all, will have a cost of 1.
### Extra Costs
%Pathfinding.Grid{
costs: %{
1 => 3
}
extra_costs: %{
1 => %{
2 => 2
}
}
tiles: [
[1, 0, 1],
[1, 0, 1],
[1, 1, 1]
],
walkable_tiles: [0, 1]
}
Similar to `costs`, `extra_costs` indicates a specific coordinate as an
increased weight associated with it. Any coord with an extra cost will have
its tile's cost overriden, so in the above example the cost of traversing
(1, 2) is 2.
Specifying this map is programmatically cumbersome, so two alternatives exist:
* `Pathfinding.Grid.add_extra_cost/4` and
`Pathfinding.Grid.remove_extra_cost/3` can be used to specify a
single unwalkable coordinate.
* `Pathfinding.Grid.to_coord_map/2` can convert a list of coordinates into
an `extra_costs` map.
## Unwalkable Coords
%Pathfinding.Grid{
tiles: [
[1, 0, 1],
[1, 0, 1],
[1, 1, 1]
],
unwalkable_coords: %{
1 => %{
2 => true
}
},
walkable_tiles: [1]
}
`unwalkable_coords` is a map that can be specified to mark a specific
coordinate invalid for pathing, even though the tile is normally pathable.
This is useful to simulate an obstruction that is not typically represented
in the grid of tiles. In the above example, the right-most column is
unreachable by the left-most column because (1, 2) is unwalkable.
Specifying this map is programmatically cumbersome, so two alternatives exist:
* `Pathfinding.Grid.add_unwalkable_coord/3` and
`Pathfinding.Grid.remove_unwalkable_coord/3` can be used to specify a
single unwalkable coordinate.
* `Pathfinding.Grid.to_coord_map/2` can convert a list of coordinates into
an `unwalkable_coords` map.
## Unstoppable Coords
%Pathfinding.Grid{
tiles: [
[1, 0, 1],
[1, 0, 1],
[1, 1, 1]
],
unstoppable_coords: %{
1 => %{
2 => true
}
},
walkable_tiles: [1]
}
In the same vein as `unwalkable_coords`, `unstoppable_coords` are pathable, but
not valid destinations. This is useful to simulate an obstruction can be moved
through. In the above example, (1, 2) cannot be pathed to, but it can be
passed through, making the the right-most column accessible from left-most
column.
Specifying this map is programmatically cumbersome, so two alternatives exist:
* `Pathfinding.Grid.add_unstoppable_coord/3` and
`Pathfinding.Grid.remove_unstoppable_coord/3` can be used to specify a
single unstoppable coordinate.
* `Pathfinding.Grid.to_coord_map/2` can convert a list of coordinates into
an `unstoppable_coords` map.
## Grid Type
The grid's `type` determines how the grid will be traversed. The examples
below specify the order in which a coordinate's neighbors are traversed for
each grid type.
:cardinal
1
4 x 2
3
:hex
1 2
6 x 3
5 4
:intercardinal
8 1 2
7 x 3
6 5 4
"""
alias Pathfinding.Grid
@enforce_keys []
defstruct costs: %{},
extra_costs: %{},
walkable_tiles: [],
unwalkable_coords: %{},
unstoppable_coords: %{},
tiles: [],
type: :cardinal # :hex, :intercardinal
@type t :: %Grid{}
@spec is_cardinal?(t) :: Boolean.t
def is_cardinal?(%Grid{type: :cardinal}), do: true
def is_cardinal?(%Grid{}), do: false
@spec is_hex?(t) :: Boolean.t
def is_hex?(%Grid{type: :hex}), do: true
def is_hex?(%Grid{}), do: false
@spec is_intercardinal?(t) :: Boolean.t
def is_intercardinal?(%Grid{type: :intercardinal}), do: true
def is_intercardinal?(%Grid{}), do: false
@spec in_grid?(t, Number.t, Number.t) :: Boolean.t
def in_grid?(%Grid{}, x, y) when x < 0 or y < 0, do: false
def in_grid?(%Grid{tiles: tiles}, x, y) do
case y < length(tiles) do
false ->
true
true ->
row = Enum.at(tiles, y)
x < length(row)
end
end
@spec is_coord_stoppable?(t, Number.t, Number.t) :: Boolean.t
def is_coord_stoppable?(%Grid{unstoppable_coords: unstoppable_coords} = grid, x, y) do
case unstoppable_coords |> Map.get(y, %{}) |> Map.get(x) do
nil -> Grid.is_coord_walkable?(grid, x, y)
_ -> false
end
end
@spec is_coord_walkable?(t, Number.t, Number.t) :: Boolean.t
def is_coord_walkable?(
%Grid{tiles: tiles, walkable_tiles: walkable_tiles, unwalkable_coords: unwalkable_coords},
x,
y
) do
tile = tiles |> Enum.at(y, []) |> Enum.at(x)
case unwalkable_coords |> Map.get(y, %{}) |> Map.get(x) do
nil -> Enum.member?(walkable_tiles, tile)
_ -> false
end
end
@doc """
Converts a list of coordinates into a map of nested coordinates.
"""
@spec to_coord_map([%{x: Number.t, y: Number.t}, ...], Map.t, Boolean.t) :: Map.t
def to_coord_map(coords, map \\ %{}, value \\ true) when is_list(coords) do
coords
|> Enum.reduce(map, fn %{x: x, y: y}, coord_map ->
coord_map
|> Map.put(
y,
coord_map
|> Map.get(y, %{})
|> Map.put(x, value)
)
end)
end
@spec get_coord_cost(t, Number.t, Number.t) :: Number.t
def get_coord_cost(%Grid{tiles: tiles, costs: costs} = grid, x, y) do
case Grid.get_extra_cost(grid, x, y) do
nil ->
tile = tiles |> Enum.at(y, []) |> Enum.at(x)
costs
|> Map.get(tile, 1)
extra_cost ->
extra_cost
end
end
@spec set_tile_cost(t, Number.t, Number.t) :: t
def set_tile_cost(%Grid{costs: costs} = grid, tile, cost) do
%Grid{grid | costs: Map.put(costs, tile, cost)}
end
@spec get_extra_cost(t, Number.t, Number.t) :: Number.t
def get_extra_cost(%Grid{extra_costs: extra_costs}, x, y) do
extra_costs
|> Map.get(y, %{})
|> Map.get(x)
end
@spec add_extra_cost(t, Number.t, Number.t, Number.t) :: t
def add_extra_cost(%Grid{extra_costs: extra_costs} = grid, x, y, cost) do
add_coord(grid, :extra_costs, extra_costs, x, y, cost)
end
@spec remove_extra_cost(t, Number.t, Number.t) :: t
def remove_extra_cost(%Grid{extra_costs: extra_costs} = grid, x, y) do
remove_coord(grid, :extra_costs, extra_costs, x, y)
end
@spec clear_extra_costs(t) :: t
def clear_extra_costs(%Grid{} = grid) do
clear_coords(grid, :extra_costs)
end
@spec add_unwalkable_coord(t, Number.t, Number.t) :: t
def add_unwalkable_coord(%Grid{unwalkable_coords: unwalkable_coords} = grid, x, y) do
add_coord(grid, :unwalkable_coords, unwalkable_coords, x, y)
end
@spec remove_unwalkable_coord(t, Number.t, Number.t) :: t
def remove_unwalkable_coord(%Grid{unwalkable_coords: unwalkable_coords} = grid, x, y) do
remove_coord(grid, :unwalkable_coords, unwalkable_coords, x, y)
end
@spec clear_unwalkable_coords(t) :: t
def clear_unwalkable_coords(%Grid{} = grid) do
clear_coords(grid, :unwalkable_coords)
end
@spec add_unstoppable_coord(t, Number.t, Number.t) :: t
def add_unstoppable_coord(%Grid{unstoppable_coords: unstoppable_coords} = grid, x, y) do
add_coord(grid, :unstoppable_coords, unstoppable_coords, x, y)
end
@spec remove_unstoppable_coord(t, Number.t, Number.t) :: t
def remove_unstoppable_coord(%Grid{unstoppable_coords: unstoppable_coords} = grid, x, y) do
remove_coord(grid, :unstoppable_coords, unstoppable_coords, x, y)
end
@spec clear_unstoppable_coords(t) :: t
def clear_unstoppable_coords(%Grid{} = grid) do
clear_coords(grid, :unstoppable_coords)
end
defp add_coord(%Grid{} = grid, key, coords, x, y, value \\ true) do
case Map.get(coords, y) do
nil ->
grid
|> Map.put(
key,
coords
|> Map.put(y, Map.put(%{}, x, value))
)
nested ->
grid
|> Map.put(
key,
coords
|> Map.put(y, Map.put(nested, x, value))
)
end
end
defp remove_coord(%Grid{} = grid, key, coords, x, y) do
case Map.get(coords, y) do
nil ->
grid
value ->
grid
|> Map.put(
key,
coords
|> Map.put(y, Map.delete(value, x))
)
end
end
defp clear_coords(%Grid{} = grid, key) do
grid
|> Map.put(key, %{})
end
end
|
lib/grid.ex
| 0.936387
| 0.80077
|
grid.ex
|
starcoder
|
defmodule Scitree.Config do
@type t :: %__MODULE__{}
@type learners :: :cart | :gradient_boosted_trees | :random_forest
@default_options [
maximum_model_size_in_memory_in_bytes: -1.0,
maximum_training_duration_seconds: -1.0,
random_seed: 123_456
]
defstruct learner: :gradient_boosted_trees,
options: @default_options,
task: :classification,
label: "",
log_directory: ""
@type tasks :: :undefined | :classification | :regression | :ranking | :categorical_uplift
@doc """
initializes a new classification setting.
## Examples
iex> Scitree.Config.init()
%Scitree.Config{
label: "",
learner: :gradient_boosted_trees,
log_directory: "",
options: [
maximum_model_size_in_memory_in_bytes: -1.0,
maximum_training_duration_seconds: -1.0,
random_seed: 123456
],
task: :classification
}
"""
def init(), do: %__MODULE__{}
@doc """
This function defines which sorting method will be used and its options.
If you want to use the classic Random Forest model, you can use the following example as a basis.
## Examples
iex> Scitree.Config.init() |> Scitree.Config.learner(:random_forest)
%Scitree.Config{
label: "",
learner: :random_forest,
log_directory: "",
options: [
random_seed: 123456,
maximum_training_duration_seconds: -1.0,
maximum_model_size_in_memory_in_bytes: -1.0
],
task: :classification
}
Learner parameters can be changed, you can use the following options:
(parameters that are not manually set will assume default values)
* maximum_model_size_in_memory_in_bytes: Limit the size of the model when stored in ram.
* maximum_training_duration_seconds: Maximum training duration of the model expressed in seconds.
* random_seed: Random seed for the training of the model.
To change default options, can use the following example.
## Examples
iex> Scitree.Config.init() |> Scitree.Config.learner(:random_forest, random_seed: 654321)
%Scitree.Config{
label: "",
learner: :random_forest,
log_directory: "",
options: [
maximum_model_size_in_memory_in_bytes: -1.0,
maximum_training_duration_seconds: -1.0,
random_seed: 654321
],
task: :classification
}
"""
@spec learner(t(), learners(), list()) :: t()
def learner(config, learner, opts \\ []) do
options = Keyword.validate!(opts, @default_options)
%{config | options: options, learner: learner}
end
@spec task(t(), tasks()) :: t()
def task(config, type), do: %{config | task: type}
@spec label(t(), String.t()) :: t()
def label(config, label), do: %{config | label: label}
@doc """
Set a directory to save training logs
## Examples
iex> Scitree.Config.init() |> Scitree.Config.log_directory("/path")
%Scitree.Config{
label: "",
learner: :gradient_boosted_trees,
log_directory: "/path",
options: [
maximum_model_size_in_memory_in_bytes: -1.0,
maximum_training_duration_seconds: -1.0,
random_seed: 123456
],
task: :classification
}
"""
@spec log_directory(t(), String.t()) :: t()
def log_directory(config, dir), do: %{config | log_directory: dir}
end
|
lib/scitree/config.ex
| 0.874232
| 0.494019
|
config.ex
|
starcoder
|
defmodule Exzeitable.Parameters do
@moduledoc """
Gets default parameters, replaces with module opts and then with the function opts.
Validates that parameters are valid.
"""
alias Exzeitable.HTML.Format
alias Exzeitable.Parameters.{ParameterError, Validation}
@parameters %{
query: %{required: true},
repo: %{required: true},
routes: %{required: true},
path: %{required: true},
action_buttons: %{default: [:new, :show, :edit, :delete]},
belongs_to: %{default: nil},
per_page: %{default: 20},
debounce: %{default: 300},
refresh: %{default: false},
disable_hide: %{default: false},
pagination: %{default: [:top, :bottom]},
parent: %{default: nil},
assigns: %{default: %{}},
text: %{default: Exzeitable.Text.Default},
formatter: %{default: {Format, :format_field}}
}
@default_fields [
label: nil,
function: false,
hidden: false,
search: true,
order: true,
formatter: {Format, :format_field}
]
@virtual_fields [
function: true,
search: false,
order: false
]
@doc "Gets fields from options and merges it into the defaults"
@spec set_fields(keyword) :: [any]
def set_fields(opts) do
opts
|> Keyword.get(:fields, [])
|> Enum.map(fn {key, field} -> {key, merge_fields(field)} end)
end
# If virtual: true, a number of other options have to be overridden
@spec merge_fields(keyword) :: keyword
defp merge_fields(field) do
if Keyword.get(field, :virtual) do
@default_fields
|> Keyword.merge(field)
|> Keyword.merge(@virtual_fields)
else
Keyword.merge(@default_fields, field)
end
end
@spec process(keyword, keyword, atom) :: map
def process(function_opts, module_opts, calling_module) do
fields = set_fields(module_opts)
@parameters
|> Map.keys()
|> Enum.map(&get_key_value_pair(&1, function_opts, module_opts))
|> Enum.into(%{})
|> Map.merge(%{
"fields" => Enum.map(fields, fn {k, f} -> {k, Enum.into(f, %{})} end),
"module" => calling_module,
"page" => 1,
"order" => nil,
"count" => 0,
"search" => "",
"show_field_buttons" => false,
"csrf_token" => Phoenix.Controller.get_csrf_token()
})
|> Validation.paired_options()
end
defp get_key_value_pair(parameter, function_opts, module_opts) do
key = Atom.to_string(parameter)
function = Keyword.get(function_opts, parameter)
module = Keyword.get(module_opts, parameter)
default = get_in(@parameters, [parameter, :default])
is_required? = get_in(@parameters, [parameter, :required]) || false
case {function, module, default, is_required?} do
{nil, nil, _, true} -> raise ParameterError, parameter: parameter
{nil, nil, default_value, false} -> {key, default_value}
{nil, module_value, _, _} -> {key, module_value}
{function_value, _, _, _} -> {key, function_value}
end
end
end
|
lib/exzeitable/parameters.ex
| 0.744656
| 0.428174
|
parameters.ex
|
starcoder
|
defmodule FalconPlusApi.Api.Aggreator do
alias Maxwell.Conn
alias FalconPlusApi.{Util, Sig, Api}
@doc """
* [Session](#/authentication) Required
* numerator: εε
* denominator: εζ―
* step: ζ±ζ₯ε¨ζοΌη§δΈΊεδ½οΌ
### Request
```{
"tags": "",
"step": 60,
"numerator": "$(cpu.idle)",
"metric": "test.idle",
"hostgroup_id": 343,
"endpoint": "testenp",
"denominator": "2"
}```
### Response
```Status: 200```
```{
"id": 16,
"grp_id": 343,
"numerator": "$(cpu.idle)",
"denominator": "2",
"endpoint": "testenp",
"metric": "test.idle",
"tags": "",
"ds_type": "GAUGE",
"step": 60,
"creator": "root"
}```
"""
def create(sig, addr, opts \\ []) do
sig = Sig.get_sig(sig)
~s</api/v1/aggregators>
|> Util.url(addr)
|> Conn.new()
|> Api.set_opts(opts)
|> Conn.put_req_header("Apitoken", sig)
|> Api.get
|> Api.get_result
end
@doc """
* [Session](#/authentication) Required
### Response
```Status: 200```
```{"message":"aggregator:16 is deleted"}```
"""
def delete(sig, addr, opts \\ []) do
sig = Sig.get_sig(sig)
~s</api/v1/aggregator/16>
|> Util.url(addr)
|> Conn.new()
|> Api.set_opts(opts)
|> Conn.put_req_header("Apitoken", sig)
|> Api.delete
|> Api.get_result
end
@doc """
* [Session](#/authentication) Required
* ex. /api/v1/hostgroup/343/aggregators
* numerator: εε
* denominator: εζ―
* step: ζ±ζ₯ε¨ζοΌη§δΈΊεδ½οΌ
### Response
```Status: 200```
```[
{
"id": 13,
"grp_id": 343,
"numerator": "$(cpu.idle)",
"denominator": "2",
"endpoint": "testenp",
"metric": "test.idle",
"tags": "",
"ds_type": "GAUGE",
"step": 60,
"creator": "root"
},
{
"id": 14,
"grp_id": 343,
"numerator": "$(cpu.idle)",
"denominator": "2",
"endpoint": "testenp",
"metric": "test.idle",
"tags": "",
"ds_type": "GAUGE",
"step": 60,
"creator": "root"
}
]```
"""
def of_hostgroup(hostgroup_id, sig, addr, opts \\ []) do
sig = Sig.get_sig(sig)
~s</api/v1/hostgroup/#{hostgroup_id}/aggregators>
|> Util.url(addr)
|> Conn.new()
|> Api.set_opts(opts)
|> Conn.put_req_header("Apitoken", sig)
|> Api.get
|> Api.get_result
end
@doc """
* [Session](#/authentication) Required
* numerator: εε
* denominator: εζ―
* step: ζ±ζ₯ε¨ζοΌη§δΈΊεδ½οΌ
### Request
```{
"tags": "",
"step": 60,
"numerator": "$(cpu.idle)",
"metric": "test.idle",
"id": 16,
"endpoint": "testenp",
"denominator": "$#"
}```
### Response
```Status: 200```
```{
"id": 16,
"grp_id": 343,
"numerator": "$(cpu.idle)",
"denominator": "$#",
"endpoint": "testenp",
"metric": "test.idle",
"tags": "",
"ds_type": "GAUGE",
"step": 60,
"creator": "root"
}```
"""
def update(sig, addr, opts \\ []) do
sig = Sig.get_sig(sig)
~s</api/v1/aggregators>
|> Util.url(addr)
|> Conn.new()
|> Api.set_opts(opts)
|> Conn.put_req_header("Apitoken", sig)
|> Api.put
|> Api.get_result
end
end
|
lib/falcon_plus_api/api/aggreator.ex
| 0.52902
| 0.73077
|
aggreator.ex
|
starcoder
|
defmodule Genex.Tools.Genotype do
alias Statistics.Distributions
@moduledoc """
Contains functions for generating various Genotypes.
These are most of the genotypes you will use in basic genetic algorithms.
"""
@doc """
Creates a binary geneset.
Returns `Enum.t()`.
# Parameters
- `size`: Size of the geneset.
"""
def binary(size), do: for(_ <- 0..(size - 1), do: Enum.random(0..1))
@doc """
Creates a permutation genotype.
Returns `Enum.t()`.
# Parameters
- `values`: Possible values in the permutation.
"""
def permutation(values) when is_list(values), do: Enum.shuffle(values)
def permutation(lo..hi), do: Enum.shuffle(lo..hi)
def permutation(_), do: raise("Values must be enumerated.")
@doc """
Creates a bitstring geneset.
Returns `Enum.t()`.
# Parameters
- `size`: Size of the geneset.
- `alphabet`: Alphabet to use.
"""
def bitstring(size, alphabet \\ :downcase) do
alpha = "abcdefghijklmnopqrstuvwxyz"
numeric = "1234567890"
alphabets =
cond do
alphabet == :alpha -> alpha <> String.upcase(alpha)
alphabet == :alphanumeric -> alpha <> numeric
alphabet == :upcase -> String.upcase(alpha)
alphabet == :downcase -> alpha
alphabet == :all -> alpha <> numeric <> String.upcase(alpha)
end
|> String.split("", trim: true)
1..size
|> Enum.reduce([], fn _, acc -> [Enum.random(alphabets) | acc] end)
end
@doc """
Creates a geneset from given distribution.
Returns `Enum.t()`.
# Parameters
- `size`: Size of the geneset.
- `name`: Distribution name.
- `args`: Optional arguments to provide to distribution.
"""
def distribution(size, name \\ :normal, args \\ []) do
case name do
:beta -> for _ <- 1..size, do: apply(Distributions, :beta, args)
:binomial -> for _ <- 1..size, do: apply(Distributions, :binomial, args)
:chisq -> for _ <- 1..size, do: apply(Distributions, :chisq, args)
:exponential -> for _ <- 1..size, do: apply(Distributions, :exponential, args)
:f -> for _ <- 1..size, do: apply(Distributions, :f, args)
:hypergeometric -> for _ <- 1..size, do: apply(Distributions, :hypergeometric, args)
:normal -> for _ <- 1..size, do: apply(Distributions, :normal, args)
:poisson -> for _ <- 1..size, do: apply(Distributions, :poisson, args)
:t -> for _ <- 1..size, do: apply(Distributions, :t, args)
end
end
end
|
lib/genex/tools/genotype.ex
| 0.902827
| 0.750004
|
genotype.ex
|
starcoder
|
defmodule Ockam.Messaging.PipeChannel.Handshake do
@moduledoc """
Pipe channel handshake implementation
1.
Initiator creates a receiver and sends own inner address and receiver address
in the handshake message.
Return route of the handshake message contains a route to initiator receiver
2.
Responder creates a sender based on return route and saves the initiator address
Responder creates a receiver and sends a handshake with own inner address and
receiver address
3.
Initiator creates a sender using init route and responder receiver address
and saves the responder address
Handshake message format is in `Ockam.Messaging.PipeChannel.Metadata`
"""
@behaviour Ockam.Session.Handshake
alias Ockam.Message
alias Ockam.Messaging.PipeChannel.Metadata
require Logger
def init(handshake_options, state) do
Logger.info("Handshake init #{inspect(handshake_options)} #{inspect(state)}")
init_route = Map.fetch!(state, :init_route)
pipe_mod = Keyword.fetch!(handshake_options, :pipe_mod)
receiver_mod = pipe_mod.receiver()
receiver_options = Keyword.get(handshake_options, :receiver_options, [])
{:ok, receiver} = receiver_mod.create(receiver_options)
handshake_msg = %{
onward_route: init_route,
return_route: [state.handshake_address],
payload:
Metadata.encode(%Metadata{
channel_route: [state.worker_address],
receiver_route: [receiver]
})
}
{:next, handshake_msg, Map.put(state, :receiver, receiver)}
end
def handle_initiator(handshake_options, message, state) do
payload = Message.payload(message)
%Metadata{
channel_route: channel_route,
receiver_route: remote_receiver_route
} = Metadata.decode(payload)
init_route = Map.fetch!(state, :init_route)
receiver_route = make_receiver_route(init_route, remote_receiver_route)
pipe_mod = Keyword.fetch!(handshake_options, :pipe_mod)
sender_mod = pipe_mod.sender()
sender_options = Keyword.get(handshake_options, :sender_options, [])
{:ok, sender} =
sender_mod.create(Keyword.merge([receiver_route: receiver_route], sender_options))
## TODO: replace sender and channel_route with a single route
{:ready, [sender: sender, channel_route: channel_route], state}
end
def handle_responder(handshake_options, message, state) do
payload = Message.payload(message)
## We ignore receiver route here and rely on return route tracing
%Metadata{channel_route: channel_route, receiver_route: remote_receiver_route} =
Metadata.decode(payload)
return_route = Message.return_route(message)
receiver_route = make_receiver_route(return_route, remote_receiver_route)
sender_options = Keyword.get(handshake_options, :sender_options, [])
receiver_options = Keyword.get(handshake_options, :receiver_options, [])
pipe_mod = Keyword.fetch!(handshake_options, :pipe_mod)
sender_mod = pipe_mod.sender()
receiver_mod = pipe_mod.receiver()
{:ok, receiver} = receiver_mod.create(receiver_options)
{:ok, sender} =
sender_mod.create(Keyword.merge([receiver_route: receiver_route], sender_options))
response = %{
onward_route: return_route,
return_route: [state.handshake_address],
payload:
Metadata.encode(%Metadata{
channel_route: [state.worker_address],
receiver_route: [receiver]
})
}
{:ready, response, [sender: sender, channel_route: channel_route], state}
end
defp make_receiver_route(init_route, remote_receiver_route) do
Enum.take(init_route, Enum.count(init_route) - 1) ++ remote_receiver_route
end
end
|
implementations/elixir/ockam/ockam/lib/ockam/messaging/pipe_channel/handshake.ex
| 0.771628
| 0.435841
|
handshake.ex
|
starcoder
|
defmodule Twitter.User do
@moduledoc """
Represents the user and all the related informations
Every user in the system is represented with a server, all users are
identified by their name and so all processes are locally registered
using the same name.
When a user is mentioned for the first time a server is started
automatically, so there isn't an explicit `start` function
"""
alias Twitter.User
alias Twitter.Timeline
alias Twitter.Message
alias Twitter.Clock
use GenServer
@opaque t :: %User{name: String.t,
timeline: Timeline.t,
followers: [String.t],
following: [String.t]}
defstruct name: "", timeline: nil, followers: [], following: []
@doc """
Post a new message for the user
Post a message `message` from the user `user` to the `user`'s timeline,
consider the message created at time `at` (default: current local time)
"""
@spec post(String.t, String.t, Clock.t) :: :ok
def post(user, message, at \\ Clock.now) do
GenServer.cast(locate(user), {:post, message, at})
end
@doc """
Post a new message from another user to the user timeline
Post a message `message` from the user `from` to the `user`'s timeline,
consider the message created at time `at` (default: current local time)
"""
@spec post(String.t, String.t, String.t, Clock.t) :: :ok
def post(user, from, message, at) do
GenServer.cast(locate(user), {:post, from, message, at})
end
@doc """
Read all messages sent from the user
Returns all messages sent from the user `user` before given time (default:
current local time)
"""
@spec read(String.t, Clock.t) :: [Message.t]
def read(user, at \\ Clock.now) do
GenServer.call(locate(user), {:read, at})
end
@doc """
Follow a user
User `user` will follow the user `who`. The user `user` will receive all the
`who` messages and it's timeline will be integrated with all the previous
`who` messages. The meaning of previous is related to the given time `at`
(default: current local time)
"""
@spec follow(String.t, String.t, Clock.t) :: :ok
def follow(user, who, at \\ Clock.now) do
GenServer.cast(locate(user), {:follow, who, at})
end
@doc false
def followed_by(user, who, at \\ Clock.now) do
GenServer.cast(locate(user), {:followed_by, who, at})
end
@doc """
Read all messages in the timeline
Returns all messages sent from the user `user` and from all the users he
follows before the given time (default: current local time)
"""
@spec wall(String.t, Clock.t) :: [Message.t]
def wall(user, at \\ Clock.now) do
GenServer.call(locate(user), {:wall, at})
end
def init(name) do
{:ok, %User{name: name, timeline: Timeline.new}}
end
def handle_cast({:post, message, at}, user) do
timeline = Timeline.push(user.timeline, %Message{at: at, from: user.name, text: message})
user.followers |> Enum.each(&User.post(&1, user.name, message, at))
{:noreply, %User{user|timeline: timeline}}
end
def handle_cast({:post, from, message, at}, user) do
timeline = Timeline.push(user.timeline, %Message{at: at, from: from, text: message})
{:noreply, %User{user|timeline: timeline}}
end
def handle_cast({:follow, who, at}, user) do
timeline = User.read(who, at) |> Enum.reduce(user.timeline, &Timeline.push(&2, &1))
{:noreply, %User{user|timeline: timeline, following: [who|user.following]}}
end
def handle_cast({:followed_by, who, _at}, user) do
{:noreply, %User{user|followers: [who|user.followers]}}
end
def handle_call({:read, at}, _, user) do
{:reply, Timeline.from(user.timeline, user.name, at), user}
end
def handle_call({:wall, at}, _, user) do
{:reply, Timeline.wall(user.timeline, at), user}
end
defp locate(pid) when is_pid(pid), do: pid
defp locate(name) when is_binary(name) do
register_name = String.to_atom(name)
case Process.whereis(register_name) do
nil ->
{:ok, pid} = start_link(name, register_name)
pid
pid ->
pid
end
end
defp start_link(name, register_name) do
GenServer.start_link(User, name, name: register_name)
end
end
|
lib/user.ex
| 0.716119
| 0.4436
|
user.ex
|
starcoder
|
defmodule MatchEngine do
@moduledoc """
MatchEngine is an in-memory matching/filtering engine with
Mongo-like query syntax.
The query language consists of nested Elixir "keyword list". Each
component of the query consists of a *key* part and a *value*
part. The key part is either a logic operator (and/or/not), or a
reference to a field, the value part is either a plain value, or a
value operator.
When a query is run against a document, where each term is scored
individually and then summed. (This implies "or"). Some example
queries:
Two ways of saying "Score all documents in which the title equals `"hoi"`":
```
[title: "hoi"]
[title: [_eq: "hoi"]]
```
Combining various matchers with logic operators:
```
[_and: [name: "Arjan", age: 36]]
[_or: [name: "Arjan", age: 36]]
[_not: [title: "foo"]]
```
Performing matches in nested objects is also possible; the query
simply follows the shape of the data.
Given a document consisting of a nested structure, `%{"user" => %{"name" => "Arjan"}}`:
"User name equals Arjan":
```
[user: [name: "Arjan"]]
```
"User name does not equal Arjan":
```
[_not: [user: [name: "Arjan"]]]
```
> Note that this is a different approach for nesting fields than MongoDB, which uses dot notation for field nesting.
## Query execution
The queries can be run by calling `MatchEngine.score_all/2` or `MatchEngine.filter_all/2`.
Queries are first preprocessed, and then executed on a list of search
"documents". A "document" is just a normal Elixir map, with string
keys.
The preprocessing phase compiles any regexes, checks whether all
operators exist, and de-nests nested field structures.
The query phase runs the preprocessed query for each document in the
list, by calculating the score for the given document, given the
query. When using filter_all/2, documents with a zero score are
removed from the input list. When using score_all, the list is
sorted on score, descending, and this score, including any
additional metadata, is returned in a `"_match"` map inside the
document.
## Value operators
*Value operators* work on an individual field. Various operators can
be used to calculate a score for a given field.
### `_eq`
Scores on the equality of the argument.
```
[title: "hello"]
[title: [_eq: "hello"]]
```
### `_ne`
Scores on the *in*equality of the argument. ("Not equals")
```
[title: [_ne: "hello"]]
```
### `_lt`, `_gt`, `_lte`, `_gte`
Scores on using the comparison operators <, >, <= and >=.
```
[age: [_gt: 18]]
```
### `_in`
Scores when the document's value is a member of the given list.
```
[role: [_in: ["developer", "freelancer"]]]
```
### `_nin`
Scores when the document's value is *not* a member of the given list.
```
[role: [_nin: ["recruiter"]]]
```
### `_sim`
Normalized string similarity. The max of the Normalised Levenshtein
distance and Jaro distance.
### `_regex`
Match a regular expression. The input is a string, which gets compiled
into a regex. This operator scores on the length of match divided by
the total string length. It is possible to add named captures to the
regex, which then get added to the `_match` metadata map, as seen in the following exapmle:
```
# regex matches entire string, 100% score
assert %{"score" => 1} == score([title: [_regex: "foo"]], %{"title" => "foo"})
# regex matches with a capture called 'name'. It is boosted by weight.
assert %{"score" => 1.6, "name" => "food"} == score([title: [_regex: "(?P<name>foo[dl])", w: 4]], %{"title" => "foodtrucks"})
```
The regex match can also be inversed, where the document value is
treated as the regular expression, and the query input is treated as
the string to be matched. (No captures are supported in this case).
```
assert %{"score" => 0.5} == score([title: [_regex: "foobar", inverse: true]], %{"title" => "foo"})
```
### `_geo`
Calculate document score based on its geographical distance to a given
point. The geo distance (both in the operator and in the document) can
be given as:
- A regular list, e.g. `[4.56, 52.33]`
- A keyword list, e.g. `[lat: 52.33, lon: 4.56]`
- A map with atom keys, e.g. `%{lat: 52.33, lon: 4.56}`
- A map with string keys, e.g. `%{"lat" => 52.33, "lon" => 4.56}`
The calculated `distance` is returned in meters, as part of the `_match` map.
An extra argument, `max_distance` can be given to the operator which
specifies the maximum cutoff point. It defaults to 100km. (100_000).
Distance is scored logarithmically with respect to the maximum
distance.
```
doc = %{"location" => %{"lat" => 52.340500999999996, "lon" => 4.8832816}}
q = [location: [_geo: [lat: 52.340500999999996, lon: 4.8832816]]]
assert %{"score" => 1, "distance" => 0.0} == score(q, doc)
```
### `_time`
Score by an UTC timestamp, relative to the given time.
```
t1 = "2018-02-19T15:29:53.672235Z"
t2 = "2018-02-19T15:09:53.672235Z"
assert %{"score" => s} = score([inserted_at: [_time: t1]], %{"inserted_at" => t2})
```
This way, documents can be returned in order of recency.
## Logic operators
### `_and`
Combine matchers, multiplying the score. When one of the matchers
returns 0, the total score is 0 as well.
```
[_and: [name: "Arjan", age: 36]]
```
### `_or`
Combine matchers, adding the scores.
```
[_or: [name: "Arjan", id: 12]]
```
### `_not`
Reverse the score of the nested matchers. (when score > 0, return 0, otherwise, return 1.
```
[_not: [title: "foo"]]
```
### Matcher weights
`w: 10` can be added to a matcher term to boost its score by the given weight.
```
[title: [_eq: "Pete", w: 5], summary: [_sim: "hello", w: 2]]
```
`b: true` can be added to force a score of 1 when the score is > 0.
```
[title: [_sim: "hello", b: true]]
```
## Map syntax for queries
Instead of keyword lists, queries can also be specified as maps. In
this case, the keys of the map need to be strings. Query maps are
meant to be used from user-generated input, and can be easily created from JSON files.
```
[_not: [title: "foo"]]
# can also be written as:
%{"_not" => %{"title" => "foo"}}
[title: [_eq: "Pete", w: 5], summary: [_sim: "hello", w: 2]]
# can also be written as:
%{"title" => %{"_eq" => "Pete", "w" => 5}, "summary" => %{"_sim" => "hello", "w" => w}}
```
"""
alias MatchEngine.{Query, Score, Scoring}
@type query() :: [operator_pair] | map()
@type operator_pair() :: {operator(), operator_arg()}
@type operator_arg() :: any()
@type operator() ::
:_not | :_and | :_or | :_eq | :_ne | :_in | :_nin | :_sim | :_regex | :_geo | :_time
@type score_match() :: map()
@type doc() :: map()
@type doc_with_match() :: map()
@doc """
Score a single document agains the given query
Top-level query operators are treated as `or` clauses. The return
value includes `score` attribute which contains the actual score.
"""
@spec score(query(), doc()) :: score_match()
def score(query, doc) do
query
|> Query.preprocess()
|> Score.score(doc)
end
@doc """
Filter a single document agains the given query
Top-level query operators are treated as `and` clauses. The return
value includes `score` attribute which contains the actual score.
"""
@spec score(query(), doc()) :: score_match()
def filter(query, doc) do
query
|> Query.preprocess()
|> Score.filter(doc)
end
@doc """
Score all given documents against the given query.
All documents are returned, even when their score is 0. The returned
list of documents is sorted on their score, descending (best
matching document first).
The document contains a `_match` key which contains the `score`
attribute. Some operators, e.g. `_geo`, add additional information
to this match map, for instance, the geographical distance.
"""
@spec score_all([doc()], query()) :: [doc_with_match()]
def score_all(docs, query) do
query = Query.preprocess(query)
Scoring.score_all(docs, query)
end
@doc """
Filter all given documents agains the given query.
Only the documents that have a positive (greater than 0) score are
returned. The document order is preserved, no sorting on score is done.
"""
@spec score_all([doc()], query()) :: [doc_with_match()]
def filter_all(docs, query) do
query = Query.preprocess(query)
Scoring.filter_all(docs, query)
end
end
|
lib/match_engine.ex
| 0.936066
| 0.938632
|
match_engine.ex
|
starcoder
|
defmodule TwitterSpaceDL do
@moduledoc """
Twitter Space Audio Downloader
"""
require Logger
use GenServer
@user_agent "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/15.1 Safari/605.1.15"
@audio_space_metadata_endpoint "https://twitter.com/i/api/graphql/Uv5R_-Chxbn1FEkyUkSW2w/AudioSpaceById"
@live_video_stream_status_endpoint "https://twitter.com/i/api/1.1/live_video_stream/status/"
@user_by_screen_name_endpoint "https://twitter.com/i/api/graphql/1CL-tn62bpc-zqeQrWm4Kw/UserByScreenName"
@user_tweets_endpoint "https://twitter.com/i/api/graphql/jpCmlX6UgnPEZJknGKbmZA/UserTweets"
@filename_template "%{title}"
# ets table keys
@filename "filename"
@master_playlist "master_playlist"
@dyn_url "dyn_url"
@metadata "metadata"
@guest_token "guest_token"
@doc """
New Twitter Space downloader
- **source**: specify the space source
- `:space_url`.
For example, `"https://twitter.com/i/spaces/1OyJADqBEgDGb"`
- `:space_id`.
For example, `"1OyJADqBEgDGb"`
- **opts**: keyword options
- **show_ffmpeg_output**: forward FFmpeg output to IO.puts
Default value: `false`
- **save_dir**: set download directory
Default value: `__DIR__`
- **template**: filename template
Default value: `"%{title}"`. Valid keys are:
- `title`.
- `created_at`.
- `ended_at`.
- `rest_id`.
- `started_at`.
- `total_participated`.
- `total_replay_watched`.
- `updated_at`.
- **plugin_module**: name of the plugin module. The module should implement `TwitterSpaceDL.Plugin`
**Return**: `pid`
## Example
Download by space url
```elixir
space = TwitterSpaceDL.new!(:space_url, "https://twitter.com/i/spaces/1OyJADqBEgDGb")
# download synchronously
TwitterSpaceDL.download(space)
# download asynchronously
TwitterSpaceDL.async_download(space, self())
# iex > flush
# {#PID<0.368.0>, %{download_pid: #PID<0.370.0>, space_id: "1OyJADqBEgDGb"}}
# {#PID<0.368.0>, %{space_id: "1OyJADqBEgDGb"}, :ok}
# :ok
```
Download by space id and display ffmpeg output
```elixir
space = TwitterSpaceDL.new!(:space_id, "1OyJADqBEgDGb", show_ffmpeg_output: true)
# download synchronously
TwitterSpaceDL.download(space)
# download asynchronously
TwitterSpaceDL.async_download(space, self())
# iex > flush
# {#PID<0.368.0>, %{download_pid: #PID<0.370.0>, space_id: "1OyJADqBEgDGb"}}
# {#PID<0.368.0>, %{space_id: "1OyJADqBEgDGb"}, :ok}
# :ok
```
Download by space id, use custom filename template and save to `download` directory
```elixir
space = TwitterSpaceDL.new!(:space_id, "1OyJADqBEgDGb",
template: "space-%{title}-%{rest_id}-%{created_at}",
save_dir: "./download")
# download synchronously
TwitterSpaceDL.download(space)
# download asynchronously
TwitterSpaceDL.async_download(space, self())
# iex > flush
# {#PID<0.368.0>, %{download_pid: #PID<0.370.0>, space_id: "1OyJADqBEgDGb"}}
# {#PID<0.368.0>, %{space_id: "1OyJADqBEgDGb"}, :ok}
# :ok
```
Init by username, use custom filename template and use plugin module
```elixir
space = TwitterSpaceDL.new!(:user, "LaplusDarknesss",
template: "space-%{title}-%{rest_id}",
plugin_module: TwitterSpaceDL.Plugin.CLI)
# you can call this again to download new spaces (if space archive is available)
# download synchronously
TwitterSpaceDL.download(space)
# download asynchronously
TwitterSpaceDL.async_download(space, self())
# iex > flush
# {#PID<0.400.0>, %{download_pid: #PID<0.402.0>, username: "LaplusDarknesss"}}
# {#PID<0.400.0>, %{username: "LaplusDarknesss"}, [{"https://twitter.com/i/spaces/1mnGedeXloNKX", :ok}]}
# :ok
```
"""
def new!(source, source_arg, opts \\ default_opts()) do
{:ok, pid} = new(source, source_arg, opts)
pid
end
@doc """
New Twitter Space downloader
Please check `new!` for full information
## Example
Download by space url
```elixir
{:ok, space} = TwitterSpaceDL.new(:space_url, "https://twitter.com/i/spaces/1OyJADqBEgDGb")
TwitterSpaceDL.download(space)
```
"""
def new(source, source_arg, opts \\ default_opts())
def new(:space_id, id, opts) do
GenServer.start(__MODULE__, %{from_space_id: id, opts: sanitize_opts(opts)})
end
def new(:space_url, url, opts) do
GenServer.start(__MODULE__, %{from_space_url: url, opts: sanitize_opts(opts)})
end
def new(:user, username, opts) do
GenServer.start(__MODULE__, %{from_username: username, opts: sanitize_opts(opts)})
end
defp default_opts do
[
{:template, @filename_template},
{:save_dir, __DIR__},
{:show_ffmpeg_output, false},
{:guest_token, nil}
]
end
defp sanitize_opts(opts) do
default_opts()
|> Enum.reduce([], fn {k, v}, acc ->
value = opts[k] || v
Keyword.put_new(acc, k, value)
end)
end
defp ensure_ffmpeg do
if nil == System.find_executable("ffmpeg") do
raise "cannot find ffmpeg"
end
end
@doc """
Download Twitter Space audio recording
"""
def download(self_pid) do
ensure_ffmpeg()
GenServer.call(self_pid, :download, :infinity)
end
@doc """
Download Twitter Space audio recording asynchronously
"""
def async_download(self_pid, callback_pid) do
ensure_ffmpeg()
GenServer.cast(self_pid, {:download, callback_pid})
end
@impl true
def init(arg = %{from_space_url: url}) when is_binary(url) do
opts = Map.get(arg, :opts, default_opts())
case from_space_url(url) do
{:ok, space_id} -> {:ok, %{space_id: space_id, opts: opts}}
{:error, reason} -> {:stop, reason}
end
end
@impl true
def init(arg = %{from_space_id: space_id}) when is_binary(space_id) do
opts = Map.get(arg, :opts, default_opts())
{:ok, %{space_id: space_id, opts: opts}}
end
@impl true
def init(arg = %{from_username: username}) when is_binary(username) do
opts = Map.get(arg, :opts, default_opts())
{:ok, %{username: username, opts: opts}}
end
@impl true
def handle_call(:download, _from, state = %{space_id: _space_id}) do
case download_by_id(state) do
{:ok, download_results, ets_table} ->
state = Map.put(state, :ets_table, ets_table)
{:reply, download_results, state}
{:error, reason} ->
{:reply, {:error, reason}, state}
other ->
{:reply, {:error, other}, state}
end
end
@impl true
def handle_call(:download, _from, state = %{username: _username}) do
case download_by_user(state) do
{:ok, download_results, ets_table} ->
state = Map.put(state, :ets_table, ets_table)
{:reply, download_results, state}
{:error, reason} ->
{:reply, {:error, reason}, state}
other ->
{:reply, {:error, other}, state}
end
end
@impl true
def handle_cast({:download, callback_pid}, state = %{space_id: space_id}) do
self_pid = self()
child =
spawn(fn ->
case download_by_id(state) do
{:ok, download_results, ets_table} ->
:ets.delete(ets_table)
send(callback_pid, {self_pid, %{space_id: space_id}, download_results})
{:error, reason} ->
send(callback_pid, {self_pid, %{space_id: space_id}, {:error, reason}})
other ->
send(callback_pid, {self_pid, %{space_id: space_id}, {:error, other}})
end
end)
send(callback_pid, {self_pid, %{space_id: space_id, download_pid: child}})
{:noreply, state}
end
@impl true
def handle_cast({:download, callback_pid}, state = %{username: username}) do
self_pid = self()
child =
spawn(fn ->
case download_by_user(state) do
{:ok, download_results, ets_table} ->
:ets.delete(ets_table)
send(callback_pid, {self_pid, %{username: username}, download_results})
{:error, reason} ->
send(callback_pid, {self_pid, %{username: username}, %{error: reason}})
other ->
send(callback_pid, {self_pid, %{username: username}, %{error: other}})
end
end)
send(callback_pid, {self_pid, %{username: username, download_pid: child}})
{:noreply, state}
end
defp from_space_url(url) when is_binary(url) do
with [_, space_id | _] <- Regex.run(~r/spaces\/(\w+)/, url) do
{:ok, space_id}
else
_ ->
reason = "cannot find space id from given url: #{url}"
Logger.error(reason)
{:error, reason}
end
end
defp ffmpeg_arg(input, output, title) do
[
"-hide_banner",
"-y",
"-stats",
"-v",
"warning",
"-i",
input,
"-c",
"copy",
"-metadata",
"title=#{title}",
output
]
end
defp download_by_id(state = %{space_id: space_id, opts: opts}) do
ets_table =
case Map.get(state, :ets_table) do
nil -> :ets.new(:twspace_dl, [:set, :protected])
tab -> tab
end
template = opts[:template]
save_dir = opts[:save_dir]
File.mkdir_p!(save_dir)
with {:ok, playlist} <- playlist_content(space_id, ets_table, opts),
{:ok, filename} <- filename(space_id, ets_table, template, opts),
{:ok, dyn_playlist} <- dyn_url(space_id, ets_table, opts),
{:ok, %{data: %{audioSpace: %{metadata: %{state: space_state, title: title}}}}} <-
metadata(space_id, ets_table, opts) do
download_results =
_download(
System.find_executable("ffmpeg"),
filename,
playlist,
dyn_playlist,
title,
space_state,
save_dir,
opts
)
{:ok, download_results, ets_table}
else
{:error, reason} -> {:error, reason}
other -> other
end
end
defp download_by_user(state = %{username: username, opts: opts}) do
ets_table =
case Map.get(state, :ets_table) do
nil -> :ets.new(:twspace_dl, [:set, :protected])
tab -> tab
end
with {:ok, %{data: %{user: %{result: %{rest_id: user_id}}}}} <-
userinfo(username, ets_table, opts),
{:ok, tweets} <- recent_tweets(user_id, ets_table, opts) do
case Regex.scan(~r/https:\/\/twitter.com\/i\/spaces\/\w*/, tweets) do
[] ->
Logger.info("no space tweets found for user: #{username}, user_id: #{user_id}")
{:ok, [], ets_table}
space_urls ->
space_urls = Enum.uniq(space_urls)
Logger.info("found #{Enum.count(space_urls)} space tweets for user: #{username}, user_id: #{user_id}")
space_urls =
to_plugin_module(opts[:plugin_module], {:space_urls, 0}, space_urls, username, nil)
total = Enum.count(space_urls)
results =
space_urls
|> Enum.with_index(1)
|> Enum.map(fn {[space_url], index} ->
Logger.info("[#{index}/#{total}] user: #{username}, user_id: #{user_id}, url: #{space_url}")
with {:ok, space} <- TwitterSpaceDL.new(:space_url, space_url, opts) do
if Enum.count(:ets.lookup(ets_table, space_url)) == 0 do
ret = TwitterSpaceDL.download(space)
if ret == :ok do
:ets.insert(ets_table, {space_url, true})
{space_url, :ok}
else
{space_url, ret}
end
else
Logger.info(
"[#{index}/#{total}] user: #{username}, user_id: #{user_id}, url: #{space_url}, already downloaded"
)
{space_url, :already_downloaded}
end
else
ret -> {space_url, ret}
end
end)
{:ok, results, ets_table}
end
else
_ ->
:ets.delete(ets_table)
reason = "cannot find rest_id for user: #{username}"
Logger.error(reason)
{:error, reason}
end
end
defp _download(
ffmpeg,
filename,
playlist,
dyn_playlist,
title,
space_state,
save_dir,
show_ffmpeg_output
) do
m3u8_filename = write_playlist(filename, playlist)
m4a_filename = filename <> ".m4a"
m4a_live_filename = filename <> "_live.m4a"
concat_txt = "#{title}-concat.txt"
download_recorded =
ffmpeg_arg(m3u8_filename, m4a_filename, title)
|> List.insert_at(1, "-protocol_whitelist")
|> List.insert_at(2, "file,https,tls,tcp")
pipeline =
if space_state == "Running" do
{:ok, file} = File.open(concat_txt, [:write])
save_dir_abs = Path.expand(save_dir)
:ok = IO.binwrite(file, "file " <> Path.join(save_dir_abs, m4a_filename) <> "\n")
:ok = IO.binwrite(file, "file " <> Path.join(save_dir_abs, m4a_live_filename) <> "\n")
:ok = File.close(file)
download_live = ffmpeg_arg(dyn_playlist, m4a_live_filename, title)
merge_file =
ffmpeg_arg(concat_txt, m4a_filename, title)
|> List.insert_at(1, "-f")
|> List.insert_at(2, "concat")
|> List.insert_at(3, "-safe")
|> List.insert_at(4, "0")
[download_live, download_recorded, merge_file]
else
[download_recorded]
end
:ok = _download(ffmpeg, pipeline, show_ffmpeg_output)
# cleanup
if space_state == "Running" do
File.rm!(concat_txt)
File.rm!(m4a_live_filename)
end
:ok
end
defp _download(_ffmpeg, [], _show_ffmpeg_output), do: :ok
defp _download(ffmpeg, [args | rest], show_ffmpeg_output) do
port =
Port.open(
{:spawn_executable, ffmpeg},
[:binary, :exit_status, args: args]
)
receive do
{^port, {:exit_status, 0}} ->
nil
{^port, {:exit_status, status}} ->
Logger.warn("ffmpeg exit with status: #{status}")
{^port, {:data, stdout}} ->
if show_ffmpeg_output, do: IO.puts(Regex.replace(~r/\n/, stdout, "\r\n"))
end
_download(ffmpeg, rest, show_ffmpeg_output)
end
defp filename(space_id, ets_table, template, opts) do
with [{@filename, filename} | _] <- :ets.lookup(ets_table, @filename) do
filename
else
[] ->
case metadata(space_id, ets_table, opts) do
{:error, reason} ->
{:error, reason}
{:ok, %{data: %{audioSpace: %{metadata: meta}}}} ->
filename =
~r/\%\{(\w*)\}/
|> Regex.scan(template)
|> format_template(template, meta)
true = :ets.insert(ets_table, {@filename, filename})
{:ok, filename}
end
end
end
defp format_template([], template, _meta), do: template
defp format_template([[raw, key] | rest], template, meta) do
format_template(
rest,
raw
|> Regex.compile!()
|> Regex.replace(template, Map.get(meta, String.to_atom(key), "")),
meta
)
end
defp write_playlist(formatted_filename, playlist) do
output_filename = formatted_filename <> ".m3u8"
{:ok, file} = File.open(output_filename, [:write])
:ok = IO.binwrite(file, playlist)
:ok = File.close(file)
output_filename
end
defp playlist_content(space_id, ets_table, opts) do
ret_val =
with {:ok, playlist_url_str} <- playlist_url(space_id, ets_table, opts),
{:ok, master_url} <- master_url(space_id, ets_table, opts),
url_base <- Regex.replace(~r/master_playlist.m3u8.*/, master_url, ""),
%HTTPotion.Response{body: body, status_code: 200} <-
HTTPotion.get(playlist_url_str, follow_redirects: true) do
{:ok, Regex.replace(~r/chunk_/, body, "#{url_base}chunk_")}
else
{:error, reason} ->
{:error, reason}
_ ->
reason = "cannot fetch playlist for space_id: #{space_id}"
Logger.error(reason)
{:error, reason}
end
to_plugin_module(opts[:plugin_module], __ENV__.function, ret_val, nil, space_id)
end
defp playlist_url(space_id, ets_table, opts) do
ret_val =
with {:ok, master_playlist} <- master_url(space_id, ets_table, opts),
%HTTPotion.Response{body: body, status_code: 200} <-
HTTPotion.get(master_playlist, follow_redirects: true),
[_, _, _, suffix | _] <- String.split(body, "\n"),
%URI{host: host} <- URI.parse(master_playlist),
playlist <- "https://#{host}#{suffix}" do
{:ok, playlist}
else
{:error, reason} ->
{:error, reason}
_ ->
reason = "cannot get the playlist url"
Logger.error(reason)
{:error, reason}
end
to_plugin_module(opts[:plugin_module], __ENV__.function, ret_val, nil, space_id)
end
defp master_url(space_id, ets_table, opts) do
ret_val =
with [{@master_playlist, master_playlist} | _] <- :ets.lookup(ets_table, @master_playlist) do
{:ok, master_playlist}
else
[] ->
with {:ok, dyn_url} <- dyn_url(space_id, ets_table, opts),
master_playlist <-
Regex.replace(
~r/\/audio-space\/.*/,
dyn_url,
"/audio-space/master_playlist.m3u8"
),
true <- :ets.insert(ets_table, {@master_playlist, master_playlist}) do
{:ok, master_playlist}
else
{:error, reason} ->
{:error, reason}
_ ->
reason = "cannot get dyn_url"
Logger.error(reason)
{:error, reason}
end
end
to_plugin_module(opts[:plugin_module], __ENV__.function, ret_val, nil, space_id)
end
defp dyn_url(space_id, ets_table, opts) do
ret_val =
with [{@dyn_url, dyn_url} | _] <- :ets.lookup(ets_table, @dyn_url) do
{:ok, dyn_url}
else
[] ->
case metadata(space_id, ets_table, opts) do
{:error, reason} ->
{:error, reason}
{:ok,
%{
data: %{
audioSpace: %{metadata: %{state: "Ended", is_space_available_for_replay: false}}
}
}} ->
reason = "Space has ended but it is not available for replay, #{space_id}"
Logger.error(reason)
{:error, reason}
{:ok,
%{
data: %{
audioSpace: %{
metadata: %{
state: state,
media_key: media_key
}
}
}
}} ->
if (state != "Running" and state != "Ended") do
reason = "Space is not running or ended, #{space_id}: #{state}"
Logger.error(reason)
{:error, reason}
else
status_url = @live_video_stream_status_endpoint <> media_key
with %HTTPotion.Response{body: body, status_code: 200} <-
HTTPotion.get(status_url,
follow_redirects: true,
headers: [
authorization: get_authorization(opts),
cookie: "auth_token="
]
),
status <- Jason.decode!(body, keys: :atoms),
%{source: %{location: dyn_url}} <- status,
true <- :ets.insert(ets_table, {@dyn_url, dyn_url}) do
{:ok, dyn_url}
else
_ ->
reason = "Space is not available, #{space_id}"
Logger.error(reason)
{:error, reason}
end
end
meta ->
reason = "Cannot match JSON structure, #{IO.inspect(meta)}"
Logger.error(reason)
{:error, reason}
end
end
to_plugin_module(opts[:plugin_module], __ENV__.function, ret_val, nil, space_id)
end
defp metadata(space_id, ets_table, opts) when is_binary(space_id) do
ret_val =
with [{@metadata, meta} | _] <- :ets.lookup(ets_table, @metadata) do
{:ok, meta}
else
[] ->
get_url =
@audio_space_metadata_endpoint <>
"?variables=" <>
(%{
id: space_id,
isMetatagsQuery: false,
withSuperFollowsUserFields: true,
withBirdwatchPivots: false,
withDownvotePerspective: false,
withReactionsMetadata: false,
withReactionsPerspective: false,
withSuperFollowsTweetFields: true,
withReplays: true,
withScheduledSpaces: true
}
|> Jason.encode!()
|> URI.encode(fn _ -> false end))
with %HTTPotion.Response{body: body, status_code: 200} <-
HTTPotion.get(get_url,
follow_redirects: true,
headers: get_guest_header(ets_table, opts)
),
meta <- Jason.decode!(body, keys: :atoms),
%{data: %{audioSpace: %{metadata: %{media_key: _media_key}}}} <- meta,
true <- :ets.insert(ets_table, {@metadata, meta}) do
{:ok, meta}
else
_ ->
reason = "cannot fetch metadata for space #{space_id}: #{get_url}"
Logger.error(reason)
{:error, reason}
end
end
to_plugin_module(opts[:plugin_module], __ENV__.function, ret_val, nil, space_id)
end
defp to_plugin_module(nil, _func, result, _username, _space_id), do: result
defp to_plugin_module(plugin_module, {func_name, _}, result, username, space_id) do
if plugin_module != nil and function_exported?(plugin_module, func_name, 3) do
case apply(plugin_module, func_name, [result, username, space_id]) do
{:ok, maybe_modified_result} -> maybe_modified_result
{:stop, reason} -> exit({:by_plugin_module, reason})
:stop -> exit({:by_plugin_module, nil})
end
else
result
end
end
defp userinfo(username, ets_table, opts) do
get_url =
@user_by_screen_name_endpoint <>
"?variables=" <>
(%{
screen_name: username,
withSafetyModeUserFields: true,
withSuperFollowsUserFields: true,
withNftAvatar: false
}
|> Jason.encode!()
|> URI.encode(fn _ -> false end))
ret_val =
with %HTTPotion.Response{body: body, status_code: 200} <-
HTTPotion.get(get_url,
follow_redirects: true,
headers: get_guest_header(ets_table, opts)
),
{:ok, info} <- Jason.decode(body, keys: :atoms) do
{:ok, info}
else
_ ->
reason = "cannot fetch userinfo for user: #{username}"
Logger.error(reason)
{:error, reason}
end
to_plugin_module(opts[:plugin_module], __ENV__.function, ret_val, username, nil)
end
defp recent_tweets(user_id, ets_table, opts) do
get_url =
@user_tweets_endpoint <>
"?variables=" <>
(%{
userId: user_id,
count: 20,
withTweetQuoteCount: true,
includePromotedContent: true,
withQuickPromoteEligibilityTweetFields: true,
withSuperFollowsUserFields: true,
withUserResults: true,
withNftAvatar: false,
withBirdwatchPivots: false,
withReactionsMetadata: false,
withReactionsPerspective: false,
withSuperFollowsTweetFields: true,
withVoice: true
}
|> Jason.encode!()
|> URI.encode(fn _ -> false end))
ret_val =
with %HTTPotion.Response{body: body, status_code: 200} <-
HTTPotion.get(get_url,
follow_redirects: true,
headers: get_guest_header(ets_table, opts)
) do
{:ok, body}
else
_ ->
reason = "cannot fetch recent tweets for user_id: #{user_id}"
Logger.error(reason)
{:error, reason}
end
to_plugin_module(opts[:plugin_module], __ENV__.function, ret_val, nil, nil)
end
defp get_authorization(opts) do
auth =
"Bearer AAAAAAAAAAAAAAAAAAAAANRILgAAAAAAnNwIzUejRCOuH5E6I8xnZz4puTs%3D1Zv7ttfk8LF81IUq16cHjhLTvJu4FA33AGWWjCpTnA"
to_plugin_module(opts[:plugin_module], __ENV__.function, auth, nil, nil)
end
defp get_guest_header(ets_table, opts) do
ret_val =
with {:ok, guest_token} <- guest_token(ets_table, opts) do
[
authorization: get_authorization(opts),
"x-guest-token": "#{guest_token}"
]
else
[] ->
true = guest_token(ets_table, opts)
get_guest_header(ets_table, opts)
end
to_plugin_module(opts[:plugin_module], __ENV__.function, ret_val, nil, nil)
end
defp guest_token(ets_table, opts, retry_times \\ 5)
defp guest_token(ets_table, opts, retry_times) when retry_times >= 0 do
ret_val =
with [{@guest_token, guest_token} | _] <- :ets.lookup(ets_table, @guest_token) do
Logger.info("cached guest_token: #{guest_token}")
{:ok, guest_token}
else
_ ->
with %HTTPotion.Response{body: body, status_code: 200} <-
HTTPotion.get("https://twitter.com/",
follow_redirects: true,
headers: ["User-Agent": @user_agent]
),
[_, guest_token_str | _] <- Regex.run(~r/gt=(\d{19})/, body),
true <- :ets.insert(ets_table, {@guest_token, guest_token_str}) do
Logger.info("guest_token: #{guest_token_str}")
{:ok, guest_token_str}
else
_ ->
Logger.warn("guest_token not found, retrying... #{retry_times} times left")
:timer.sleep(1000)
guest_token(ets_table, opts, retry_times - 1)
end
end
to_plugin_module(opts[:plugin_module], __ENV__.function, ret_val, nil, nil)
end
defp guest_token(_ets_table, _opts, retry_times) when retry_times < 0 do
reason = "no guest_token found"
Logger.error(reason)
{:error, reason}
end
end
|
lib/twitter_space_dl.ex
| 0.775477
| 0.429669
|
twitter_space_dl.ex
|
starcoder
|
defmodule Annex.Data.List1D do
@moduledoc """
The Annex.Data.List is the most basic Annex.Data.
"""
use Annex.Data
alias Annex.{
AnnexError,
Data.List2D,
Shape,
Utils
}
import Utils, only: [is_pos_integer: 1]
@type t() :: [float(), ...]
defguard is_list1D(data) when Data.is_flat_data(data)
@impl Data
@spec cast(any, Shape.concrete()) :: t()
def cast(data, [1, n]), do: cast(data, [n])
def cast(data, [n, 1]), do: cast(data, [n])
def cast(data, [n] = shape) when is_list1D(data) and is_integer(n) do
elements_count = length(data)
if elements_count != n do
raise %AnnexError{
message: """
The number of items in the provided data did not match the required number of items of the given
shape.
""",
details: [
shape: shape,
expected_count: n,
actual_count: elements_count,
data: data
]
}
end
data
end
@impl Data
@spec to_flat_list(t()) :: Data.flat_data()
def to_flat_list(data) when is_list1D(data), do: data
@impl Data
@spec shape(t()) :: Shape.t()
def shape(data) when is_list1D(data), do: [length(data)]
@impl Data
@spec is_type?(Data.data()) :: boolean
def is_type?(data), do: is_list1D(data)
@impl Data
@spec apply_op(t(), Data.op(), Data.args()) :: Data.flat_data()
def apply_op(data, op, args) do
case {op, args} do
{:map, [func]} -> Enum.map(data, func)
{:subtract, [right]} -> subtract(data, right)
end
end
@spec subtract(t(), t()) :: t()
def subtract(a, b) do
Utils.zipmap(a, b, fn ax, bx -> ax - bx end)
end
@doc """
Generates a list of `n` floats between -1.0 and 1.0.
"""
@spec new_random(pos_integer()) :: t()
def new_random(n) when is_pos_integer(n) do
fn -> Utils.random_float() end
|> Stream.repeatedly()
|> Enum.take(n)
end
@spec ones(pos_integer()) :: t()
def ones(n) when is_pos_integer(n) do
fn -> 1.0 end
|> Stream.repeatedly()
|> Enum.take(n)
end
@doc """
Calculates the average of a 1D list.
"""
@spec mean(any()) :: float()
def mean([]), do: 0.0
def mean(items) do
{counted, totaled} =
Enum.reduce(items, {0, 0.0}, fn item, {count, total} ->
{count + 1, total + item}
end)
totaled / counted
end
def mean([], _), do: 0.0
@doc """
Calculates the dot product which is the sum of element-wise multiplication of two enumerables.
"""
@spec dot(t(), t()) :: float()
def dot(a, b) when is_list1D(a) and is_list1D(b) do
a
|> Utils.zipmap(b, fn ax, bx -> ax * bx end)
|> Enum.sum()
end
@spec transpose(t()) :: List2D.t()
def transpose(data) when is_list1D(data) do
Enum.map(data, fn f -> [f] end)
end
@doc """
Turns a list of floats into floats between 0.0 and 1.0 at their respective ratio.
"""
@spec normalize(t()) :: t()
def normalize(data) when is_list1D(data) do
{minimum, maximum} = Enum.min_max(data)
case maximum - minimum do
0.0 -> Enum.map(data, fn _ -> 1.0 end)
diff -> Enum.map(data, fn item -> (item - minimum) / diff end)
end
end
@doc """
Turns a list of floats into their proportions.
The sum of the output should be approximately 1.0.
"""
@spec proportions(t()) :: t()
def proportions(data) when is_list1D(data) do
case Enum.sum(data) do
0.0 -> Enum.map(data, fn item -> item end)
sum -> Enum.map(data, fn item -> item / sum end)
end
end
end
|
lib/annex/data/list_1d.ex
| 0.814754
| 0.706722
|
list_1d.ex
|
starcoder
|
defmodule Surface.Catalogue.Playground do
@moduledoc """
Experimental LiveView to create Playgrounds for catalogue tools.
## Options
Besides the buit-in options provided by the LiveView itself, a Playground also
provides the following options:
* `subject` - Required. The target component of the Playground.
* `height` - Required. The initial height of the Playground.
* `catalogue` - Optional. A module that implements the `Surface.Catalogue`
providing additional information to the catalogue tool. Usually required
if you want to share your components as a library.
* `body` - Optional. Sets/overrides the attributes of the Playground's body tag.
Useful to set a different background or padding.
"""
import Phoenix.LiveView
@pubsub Surface.Catalogue.PubSub
defmacro __using__(opts) do
subject = Surface.Catalogue.fetch_subject!(opts, __MODULE__, __CALLER__)
quote do
use Surface.LiveView, unquote(opts)
alias unquote(subject)
require Surface.Catalogue.Data, as: Data
@config unquote(opts)
@before_compile unquote(__MODULE__)
@impl true
def mount(params, session, socket) do
unquote(__MODULE__).__mount__(params, session, socket, unquote(subject))
end
@impl true
def handle_info(message, socket) do
unquote(__MODULE__).__handle_info__(message, socket)
end
end
end
# Retrieves or creates the window id that should be used to filter
# PubSub messages from the Playground.
@doc false
def get_window_id(session, params) do
key = "__window_id__"
get_value_by_key(session, key) ||
get_value_by_key(params, key) ||
Base.encode16(:crypto.strong_rand_bytes(16))
end
# Subscribes to receive notification messages from the Playground.
@doc false
def subscribe(window_id) do
Phoenix.PubSub.subscribe(@pubsub, topic(window_id))
end
defp notify_init(window_id, subject, props, events, props_values_with_events) do
message = {:playground_init, self(), subject, props, events, props_values_with_events}
Phoenix.PubSub.broadcast(@pubsub, topic(window_id), message)
end
defp notify_event_received(window_id, event, value, props) do
message = {:playground_event_received, event, value, props}
Phoenix.PubSub.broadcast(@pubsub, topic(window_id), message)
end
defp topic(window_id) do
"#{@pubsub}:#{window_id}"
end
defmacro __before_compile__(env) do
config = Module.get_attribute(env.module, :config)
subject = Keyword.fetch!(config, :subject)
module_doc =
quote do
@moduledoc catalogue: [
type: :playground,
subject: unquote(subject),
config: unquote(config)
]
end
if Module.defines?(env.module, {:handle_event, 3}) do
quote do
unquote(module_doc)
defoverridable handle_event: 3
@impl true
def handle_event(event, value, socket) do
result = super(event, value, socket)
socket =
case result do
{:noreply, socket} -> socket
{:reply, _map, socket} -> socket
end
unquote(__MODULE__).__handle_event__(event, value, socket)
result
end
end
else
quote do
unquote(module_doc)
@impl true
def handle_event(event, value, socket) do
unquote(__MODULE__).__handle_event__(event, value, socket)
end
end
end
end
@doc false
def __mount__(params, session, socket, subject) do
window_id = get_window_id(session, params)
socket = assign(socket, :__window_id__, window_id)
if connected?(socket) do
{events, props} =
subject.__props__()
|> Enum.split_with(fn prop -> prop.type == :event end)
events_props_values = generate_events_props(events)
props_values =
props
|> get_props_default_values()
|> Map.merge(socket.assigns.props)
|> Map.merge(events_props_values)
notify_init(window_id, subject, props, events, props_values)
{:ok, assign(socket, :props, props_values)}
else
{:ok, socket}
end
end
@doc false
def __handle_info__({:update_props, values}, socket) do
{:noreply, assign(socket, :props, values)}
end
def __handle_info__(:wake_up, socket) do
{:noreply, socket}
end
@doc false
def __handle_event__(event, value, socket) do
window_id = socket.assigns[:__window_id__]
notify_event_received(window_id, event, value, socket.assigns.props)
{:noreply, socket}
end
defp get_value_by_key(map, key) when is_map(map) do
map[key]
end
defp get_value_by_key(_map, _key) do
nil
end
defp generate_events_props(events) do
for %{name: name} <- events, into: %{} do
{name, %{name: name, target: :live_view}}
end
end
defp get_props_default_values(props) do
for %{name: name, opts: opts} <- props,
Keyword.has_key?(opts, :default),
into: %{} do
{name, opts[:default]}
end
end
end
|
lib/surface/catalogue/playground.ex
| 0.863075
| 0.484868
|
playground.ex
|
starcoder
|
defmodule Zaryn.Election.ValidationConstraints do
@moduledoc """
Represents the constraints for the validation nodes election
"""
@default_min_validation_geo_patch 3
@default_min_validations 3
defstruct [
:min_geo_patch,
:min_validation_nodes,
:validation_number
]
alias Zaryn.TransactionChain.Transaction
alias Zaryn.TransactionChain.TransactionData
alias Zaryn.TransactionChain.TransactionData.Ledger
alias Zaryn.TransactionChain.TransactionData.ZARYNLedger
@typedoc """
Each validation constraints represent a function which will be executed during the election algorithms:
- min_geo_patch: Require number of distinct geographic patch for the elected validation nodes.
This property ensure the geographical security of the transaction validation by spliting
the computation in many place on the world.
- min_validation_nodes: Require number of minimum validation nodes.
- validation_number: Require number of validation nodes for a given transaction.
"""
@type t :: %__MODULE__{
min_geo_patch: (() -> non_neg_integer()),
min_validation_nodes: (non_neg_integer() -> non_neg_integer()),
validation_number: (Transaction.t(), non_neg_integer() -> non_neg_integer())
}
def new(
min_geo_patch_fun \\ &min_geo_patch/0,
min_validation_nodes_fun \\ &min_validation_nodes/1,
validation_number_fun \\ &validation_number/2
) do
%__MODULE__{
min_geo_patch: min_geo_patch_fun,
min_validation_nodes: min_validation_nodes_fun,
validation_number: validation_number_fun
}
end
@doc """
Determine the minimum of geo patch to cover
"""
@spec min_geo_patch :: non_neg_integer()
def min_geo_patch, do: @default_min_validation_geo_patch
@doc """
Define the minimum of validations
"""
@spec min_validation_nodes(non_neg_integer()) :: non_neg_integer()
def min_validation_nodes(nb_authorized_nodes)
when nb_authorized_nodes < @default_min_validations,
do: nb_authorized_nodes
def min_validation_nodes(_), do: @default_min_validations
@doc """
Get the number of validations for a given transaction.
"""
@spec validation_number(Transaction.t(), nb_authorized_nodes :: non_neg_integer()) ::
non_neg_integer()
def validation_number(
%Transaction{
data: %TransactionData{ledger: %Ledger{zaryn: %ZARYNLedger{transfers: transfers}}}
},
nb_authorized_nodes
)
when is_integer(nb_authorized_nodes) do
total_transfers = Enum.reduce(transfers, 0.0, &(&2 + &1.amount))
if total_transfers > 10 do
validation_number =
trunc(
:math.floor(min_validation_nodes(nb_authorized_nodes) * :math.log10(total_transfers))
)
if validation_number > nb_authorized_nodes do
nb_authorized_nodes
else
validation_number
end
else
min_validation_nodes(nb_authorized_nodes)
end
end
end
|
lib/zaryn/election/constraints/validation.ex
| 0.871693
| 0.610366
|
validation.ex
|
starcoder
|
defmodule Cryptopunk.Key do
@moduledoc """
Utility functions to work with keys
"""
defstruct [:type, :key, :chain_code, :depth, :index, :parent_fingerprint]
alias Cryptopunk.Utils
@type t :: %__MODULE__{}
@master_hmac_key "Bitcoin seed"
@spec new(Keyword.t()) :: t()
def new(opts) do
type = Keyword.fetch!(opts, :type)
key = Keyword.fetch!(opts, :key)
chain_code = Keyword.fetch!(opts, :chain_code)
index = Keyword.fetch!(opts, :index)
{depth, parent_fingerprint} =
case Keyword.get(opts, :parent_key) do
nil ->
depth = Keyword.fetch!(opts, :depth)
parent_fingerprint = Keyword.fetch!(opts, :parent_fingerprint)
{depth, parent_fingerprint}
parent_key ->
depth = parent_key.depth + 1
parent_fingerprint = fingerprint(parent_key)
{depth, parent_fingerprint}
end
%__MODULE__{
type: type,
key: key,
chain_code: chain_code,
depth: depth,
index: index,
parent_fingerprint: parent_fingerprint
}
end
@spec new_private(Keyword.t()) :: t()
def new_private(opts) do
opts
|> Keyword.put(:type, :private)
|> new()
end
@spec new_public(Keyword.t()) :: t()
def new_public(opts) do
opts
|> Keyword.put(:type, :public)
|> new()
end
@spec new_master_private(Keyword.t()) :: t()
def new_master_private(opts) do
opts
|> Keyword.put(:depth, 0)
|> Keyword.put(:parent_fingerprint, <<0::32>>)
|> Keyword.put(:index, 0)
|> new_private()
end
@spec new_master_public(Keyword.t()) :: t()
def new_master_public(opts) do
opts
|> Keyword.put(:depth, 0)
|> Keyword.put(:parent_fingerprint, <<0::32>>)
|> Keyword.put(:index, 0)
|> new_public()
end
@spec master_key(binary()) :: t()
def master_key(seed) do
<<private_key::binary-32, chain_code::binary-32>> = Utils.hmac_sha512(@master_hmac_key, seed)
new_master_private(key: private_key, chain_code: chain_code)
end
@spec public_from_private(t()) :: t() | no_return
def public_from_private(%__MODULE__{
key: key,
chain_code: chain_code,
depth: depth,
parent_fingerprint: parent_fingerprint,
index: index,
type: :private
}) do
{:ok, public_key} = ExSecp256k1.create_public_key(key)
new_public(
key: public_key,
chain_code: chain_code,
depth: depth,
parent_fingerprint: parent_fingerprint,
index: index
)
end
def public_from_private(%__MODULE__{type: :public}) do
raise ArgumentError, message: "Can not create public key"
end
@spec serialize(t(), binary()) :: String.t()
def serialize(%__MODULE__{} = key, version) do
key
|> serialize_key()
|> do_serialize(key, version)
|> ExBase58.encode_check!()
end
@spec deserialize(binary()) :: t()
def deserialize(<<"xpub", _rest::binary>> = encoded_key) do
do_deserialize(encoded_key, :public)
end
def deserialize(<<"xprv", _rest::binary>> = encoded_key) do
do_deserialize(encoded_key, :private)
end
defp do_deserialize(encoded_key, type) do
<<
_version_number::binary-4,
depth::8,
fingerprint::binary-4,
index::32,
chain_code::binary-32,
key::binary-33
>> = ExBase58.decode_check!(encoded_key)
%__MODULE__{
type: type,
key: deserialize_key(key, type),
chain_code: chain_code,
depth: depth,
index: index,
parent_fingerprint: fingerprint
}
end
defp deserialize_key(<<0::8, key::binary>>, :private), do: key
defp deserialize_key(key, :public) do
Utils.decompress_public_key(key)
end
defp serialize_key(%__MODULE__{type: :private, key: key}) do
<<0::8, key::binary>>
end
defp serialize_key(%__MODULE__{type: :public} = public_key) do
Utils.compress_public_key(public_key)
end
defp do_serialize(
raw_key,
%__MODULE__{
chain_code: chain_code,
depth: depth,
index: index,
parent_fingerprint: fingerprint
},
version
) do
<<
version::binary,
depth::8,
fingerprint::binary,
index::32,
chain_code::binary,
raw_key::binary
>>
end
defp fingerprint(%__MODULE__{type: :public} = key) do
serialized = Utils.compress_public_key(key)
sha256 = Utils.sha256_hash(serialized)
<<fingerprint::binary-4, _rest::binary>> = Utils.ripemd160_hash(sha256)
fingerprint
end
defp fingerprint(%__MODULE__{type: :private} = key) do
key
|> public_from_private()
|> fingerprint()
end
end
|
lib/cryptopunk/key.ex
| 0.826747
| 0.495972
|
key.ex
|
starcoder
|
defmodule Delta.Message do
alias Updates.QueryAnalyzer.Types.Quad, as: Quad
alias SparqlServer.Router.AccessGroupSupport, as: AccessGroupSupport
@moduledoc """
Contains code to construct the correct messenges for informing
clients.
"""
@typedoc """
Type of the messages which can be sent to a client. Currently, this
is a binary string.
"""
@type t :: String.t()
@doc """
Constructs a new message which can be sent to the clients based on a
quad delta.
"""
@spec construct(Delta.delta(), AccessGroupSupport.decoded_json_access_groups(), String.t()) ::
Delta.Message.t()
def construct(delta, access_groups, origin) do
# TODO we should include the current access rigths and an
# identifier for the originating service. This would help
# services ignore content which came from their end and would
# allow services to perform updates in the name of a specific
# user.
json_model = %{
"changeSets" =>
Enum.map(delta, fn delta_item ->
delta_item
|> convert_delta_item
|> add_allowed_groups(access_groups)
|> add_origin(origin)
end)
}
Poison.encode!(json_model)
end
defp convert_delta_item({:insert, quads}) do
%{"insert" => Enum.map(quads, &convert_quad/1)}
end
defp convert_delta_item({:delete, quads}) do
%{"delete" => Enum.map(quads, &convert_quad/1)}
end
@spec add_allowed_groups(Poison.Decoder.t(), AccessGroupSupport.decoded_json_access_groups()) ::
Poison.Decoder.t()
defp add_allowed_groups(map, :sudo) do
Map.put(map, "allowedGroups", "sudo")
end
defp add_allowed_groups(map, access_groups) do
json_access_groups = AccessGroupSupport.encode_json_access_groups(access_groups)
Map.put(map, "allowedGroups", json_access_groups)
end
defp add_origin(map, origin) do
Map.put(map, "origin", origin)
end
defp convert_quad(%Quad{graph: graph, subject: subject, predicate: predicate, object: object}) do
[g, s, p, o] =
Enum.map(
[graph, subject, predicate, object],
&Updates.QueryAnalyzer.P.to_sparql_result_value/1
)
%{"graph" => g, "subject" => s, "predicate" => p, "object" => o}
end
end
|
lib/delta/message.ex
| 0.526586
| 0.417153
|
message.ex
|
starcoder
|
defmodule Cased.BypassTagHelper do
@moduledoc """
Provides helpers to support configuring Bypass in test `setup`.
"""
@doc """
Configure Bypass with options:
## Examples
Don't do any configuration (no-op):
```
@tag :bypass
```
Configure Bypass to return the contents of `test/fixtures/foo.json`:
```
@tag bypass: [fixture: "foo"]
```
Configure Bypass to return a status of `502`:
```
@tag bypass: [status: 502]
```
Configure Bypass to parse page numbers and return the contents of `test/fixtures/foo.PAGE.json`:
```
@tag bypass: [fixture: "foo", paginated: true]
```
"""
# Support `@tag :bypass` β do nothing!
def configure_bypass(_bypass, true), do: :noop
# Support `@tag bypass: a_keyword_list`
def configure_bypass(bypass, settings) when is_list(settings) do
do_configure_bypass(bypass, Map.new(settings))
end
defp do_configure_bypass(bypass, %{paginated: true} = settings) do
status = Map.get(settings, :status, 200)
if status in 200..299 do
Bypass.expect(bypass, fn conn ->
%{"page" => page} = Plug.Conn.Query.decode(conn.query_string)
fixture = File.read!("test/fixtures/#{settings.fixture}.#{page}.json")
conn
|> Plug.Conn.put_resp_header(
"link",
[
~s(<http://localhost:#{bypass.port}/#{settings.fixture}?page=1&per_page=25>; rel="first"),
~s(<http://localhost:#{bypass.port}/#{settings.fixture}?page=3&per_page=25>; rel="last"),
~s(<http://localhost:#{bypass.port}/#{settings.fixture}?page=#{page}&per_page=25>; rel="self")
]
|> Enum.join(", ")
)
|> Plug.Conn.resp(status, fixture)
end)
else
Bypass.expect(bypass, fn conn ->
Plug.Conn.resp(conn, status, Map.get(settings, :body, ""))
end)
end
end
defp do_configure_bypass(bypass, settings) do
status = Map.get(settings, :status, 200)
fixture =
case Map.get(settings, :fixture) do
nil ->
nil
:empty ->
""
name ->
File.read!("test/fixtures/#{name}.json")
end
cond do
status in 200..299 ->
Bypass.expect_once(bypass, fn conn ->
Plug.Conn.resp(conn, status, fixture)
end)
status == 302 ->
redirect_url = "http://localhost:#{bypass.port}#{settings.redirect_path}"
Bypass.expect_once(bypass, fn conn ->
conn
|> Plug.Conn.put_resp_header("location", redirect_url)
|> Plug.Conn.resp(status, "")
end)
Bypass.expect_once(bypass, "GET", settings.redirect_path, fn conn ->
Plug.Conn.resp(conn, settings.redirect_status, fixture)
end)
true ->
Bypass.expect_once(bypass, fn conn ->
Plug.Conn.resp(conn, status, Map.get(settings, :body, ""))
end)
end
end
end
|
test/support/cased/bypass_tag_helper.ex
| 0.858511
| 0.778439
|
bypass_tag_helper.ex
|
starcoder
|
defmodule Mint.WebSocket.Frame do
@moduledoc false
# Functions and data structures for describing websocket frames.
# https://tools.ietf.org/html/rfc6455#section-5.2
import Record
alias Mint.WebSocket.{Utils, Extension}
alias Mint.WebSocketError
@compile {:inline, apply_mask: 2, apply_mask: 3}
shared = [{:reserved, <<0::size(3)>>}, :mask, :data, :fin?]
defrecord :continuation, shared
defrecord :text, shared
defrecord :binary, shared
# > All control frames MUST have a payload length of 125 bytes or less
# > and MUST NOT be fragmented.
defrecord :close, shared ++ [:code, :reason]
defrecord :ping, shared
defrecord :pong, shared
defguard is_control(frame)
when is_tuple(frame) and
(elem(frame, 0) == :close or elem(frame, 0) == :ping or elem(frame, 0) == :pong)
defguard is_fin(frame) when elem(frame, 4) == true
# guards frames dealt with in the user-space (not records)
defguardp is_friendly_frame(frame)
when frame in [:ping, :pong, :close] or
(is_tuple(frame) and elem(frame, 0) in [:text, :binary, :ping, :pong] and
is_binary(elem(frame, 1))) or
(is_tuple(frame) and elem(frame, 0) == :close and is_integer(elem(frame, 1)) and
is_binary(elem(frame, 2)))
# https://tools.ietf.org/html/rfc6455#section-7.4.1
@invalid_status_codes [1_004, 1_005, 1_006, 1_016, 1_100, 2_000, 2_999]
# https://tools.ietf.org/html/rfc6455#section-7.4.2
defguardp is_valid_close_code(code)
when code in 1_000..4_999 and code not in @invalid_status_codes
@opcodes %{
# non-control opcodes:
continuation: <<0x0::size(4)>>,
text: <<0x1::size(4)>>,
binary: <<0x2::size(4)>>,
# 0x3-7 reserved for future non-control frames
# control opcodes:
close: <<0x8::size(4)>>,
ping: <<0x9::size(4)>>,
pong: <<0xA::size(4)>>
# 0xB-F reserved for future control frames
}
@reverse_opcodes Map.new(@opcodes, fn {k, v} -> {v, k} end)
@non_control_opcodes [:continuation, :text, :binary]
def opcodes, do: Map.keys(@opcodes)
def new_mask, do: :crypto.strong_rand_bytes(4)
def encode(websocket, frame) when is_friendly_frame(frame) do
{frame, extensions} =
frame
|> translate()
|> Extension.encode(websocket.extensions)
websocket = put_in(websocket.extensions, extensions)
frame = encode_to_binary(frame)
{:ok, websocket, frame}
catch
:throw, {:mint, reason} -> {:error, websocket, reason}
end
@spec encode_to_binary(tuple()) :: binary()
defp encode_to_binary(frame) do
payload = payload(frame)
mask = mask(frame)
masked? = if mask == nil, do: 0, else: 1
encoded_payload_length = encode_payload_length(elem(frame, 0), byte_size(payload))
<<
encode_fin(frame)::bitstring,
reserved(frame)::bitstring,
encode_opcode(frame)::bitstring,
masked?::size(1),
encoded_payload_length::bitstring,
mask || <<>>::binary,
apply_mask(payload, mask)::bitstring
>>
end
defp payload(close(code: nil, reason: nil)) do
<<>>
end
defp payload(close(code: code, reason: reason)) do
code = code || 1_000
reason = reason || ""
<<code::unsigned-integer-size(8)-unit(2), reason::binary>>
end
for type <- Map.keys(@opcodes) -- [:close] do
defp payload(unquote(type)(data: data)), do: data
end
for type <- Map.keys(@opcodes) do
defp mask(unquote(type)(mask: mask)), do: mask
defp reserved(unquote(type)(reserved: reserved)), do: reserved
end
defp encode_fin(text(fin?: false)), do: <<0b0::size(1)>>
defp encode_fin(binary(fin?: false)), do: <<0b0::size(1)>>
defp encode_fin(continuation(fin?: false)), do: <<0b0::size(1)>>
defp encode_fin(_), do: <<0b1::size(1)>>
defp encode_opcode(frame), do: @opcodes[elem(frame, 0)]
def encode_payload_length(_opcode, length) when length in 0..125 do
<<length::integer-size(7)>>
end
def encode_payload_length(opcode, length)
when length in 126..65_535 and opcode in @non_control_opcodes do
<<126::integer-size(7), length::unsigned-integer-size(8)-unit(2)>>
end
def encode_payload_length(opcode, length)
when length in 65_535..9_223_372_036_854_775_807 and opcode in @non_control_opcodes do
<<127::integer-size(7), length::unsigned-integer-size(8)-unit(8)>>
end
def encode_payload_length(_opcode, _length) do
throw({:mint, %WebSocketError{reason: :payload_too_large}})
end
# Mask the payload by bytewise XOR-ing the payload bytes against the mask
# bytes (where the mask bytes repeat).
# This is an "involution" function: applying the mask will mask
# the data and applying the mask again will unmask it.
def apply_mask(payload, mask, acc \\ <<>>)
def apply_mask(payload, nil, _acc), do: payload
# n=4 is the happy path
# n=3..1 catches cases where the remaining byte_size/1 of the payload is shorter
# than the mask
for n <- 4..1 do
def apply_mask(
<<part_key::integer-size(8)-unit(unquote(n)), payload_rest::binary>>,
<<mask_key::integer-size(8)-unit(unquote(n)), _::binary>> = mask,
acc
) do
apply_mask(
payload_rest,
mask,
<<acc::binary, :erlang.bxor(mask_key, part_key)::integer-size(8)-unit(unquote(n))>>
)
end
end
def apply_mask(<<>>, _mask, acc), do: acc
@spec decode(Mint.WebSocket.t(), binary()) ::
{:ok, Mint.WebSocket.t(), [Mint.WebSocket.frame() | {:error, term()}]}
| {:error, Mint.WebSocket.t(), any()}
def decode(websocket, data) do
{websocket, frames} = binary_to_frames(websocket, data)
{websocket, frames} =
Enum.reduce(frames, {websocket, []}, fn
{:error, reason}, {websocket, acc} ->
{websocket, [{:error, reason} | acc]}
frame, {websocket, acc} ->
{frame, extensions} = Extension.decode(frame, websocket.extensions)
{put_in(websocket.extensions, extensions), [translate(frame) | acc]}
end)
{:ok, websocket, :lists.reverse(frames)}
catch
{:mint, reason} -> {:error, websocket, reason}
end
defp binary_to_frames(websocket, data) do
case websocket.buffer |> Utils.maybe_concat(data) |> decode_raw(websocket, []) do
{:ok, frames} ->
{websocket, frames} = resolve_fragments(websocket, frames)
{put_in(websocket.buffer, <<>>), frames}
{:buffer, partial, frames} ->
{websocket, frames} = resolve_fragments(websocket, frames)
{put_in(websocket.buffer, partial), frames}
end
end
defp decode_raw(
<<fin::size(1), reserved::bitstring-size(3), opcode::bitstring-size(4), masked::size(1),
payload_and_mask::bitstring>> = data,
websocket,
acc
) do
case decode_payload_and_mask(payload_and_mask, masked == 0b1) do
{:ok, payload, mask, rest} ->
frame = decode_full_frame_binary(opcode, fin, reserved, mask, payload)
decode_raw(rest, websocket, [frame | acc])
{:error, reason} ->
{:ok, :lists.reverse([{:error, reason} | acc])}
:buffer ->
{:buffer, data, :lists.reverse(acc)}
end
end
defp decode_raw(<<>>, _websocket, acc), do: {:ok, :lists.reverse(acc)}
defp decode_raw(partial, _websocket, acc) when is_binary(partial) do
{:buffer, partial, :lists.reverse(acc)}
end
defp decode_payload_and_mask(payload, masked?) do
with {:ok, payload_length, rest} <- decode_payload_length(payload),
{:ok, mask, rest} <- decode_mask(rest, masked?),
<<payload::binary-size(payload_length), more::bitstring>> <- rest do
{:ok, payload, mask, more}
else
partial when is_binary(partial) -> :buffer
:buffer -> :buffer
{:error, reason} -> {:error, reason}
end
end
defp decode_full_frame_binary(opcode, fin, reserved, mask, payload) do
with {:ok, opcode} <- decode_opcode(opcode) do
into_frame(
opcode,
_fin? = fin == 0b1,
reserved,
mask,
apply_mask(payload, mask)
)
end
end
defp decode_opcode(opcode) do
with :error <- Map.fetch(@reverse_opcodes, opcode) do
{:error, {:unsupported_opcode, opcode}}
end
end
defp decode_payload_length(
<<127::integer-size(7), payload_length::unsigned-integer-size(8)-unit(8),
rest::bitstring>>
),
do: {:ok, payload_length, rest}
defp decode_payload_length(<<127::integer-size(7)>>), do: :buffer
defp decode_payload_length(
<<126::integer-size(7), payload_length::unsigned-integer-size(8)-unit(2),
rest::bitstring>>
),
do: {:ok, payload_length, rest}
defp decode_payload_length(<<126::integer-size(7)>>), do: :buffer
defp decode_payload_length(<<payload_length::integer-size(7), rest::bitstring>>)
when payload_length in 0..125,
do: {:ok, payload_length, rest}
defp decode_payload_length(malformed) do
{:error, {:malformed_payload_length, malformed}}
end
defp decode_mask(payload, masked?)
defp decode_mask(<<mask::binary-size(8)-unit(4), rest::bitstring>>, true) do
{:ok, mask, rest}
end
defp decode_mask(payload, false) do
{:ok, nil, payload}
end
defp decode_mask(payload, _masked?) do
{:error, {:missing_mask, payload}}
end
for data_type <- [:continuation, :text, :binary, :ping, :pong] do
def into_frame(unquote(data_type), fin?, reserved, mask, payload) do
unquote(data_type)(
fin?: fin?,
reserved: reserved,
mask: mask,
data: payload
)
end
end
def into_frame(
:close,
fin?,
reserved,
mask,
<<code::unsigned-integer-size(8)-unit(2), reason::binary>> = payload
)
when byte_size(reason) in 0..123 and is_valid_close_code(code) do
if String.valid?(reason) do
close(reserved: reserved, mask: mask, code: code, reason: reason, fin?: fin?)
else
{:error, {:invalid_close_payload, payload}}
end
end
def into_frame(
:close,
fin?,
reserved,
mask,
<<>>
) do
close(reserved: reserved, mask: mask, code: 1_000, reason: "", fin?: fin?)
end
def into_frame(
:close,
_fin?,
_reserved,
_mask,
payload
) do
{:error, {:invalid_close_payload, payload}}
end
# translate from user-friendly tuple into record defined in this module
# (and the reverse)
@spec translate(Mint.WebSocket.frame() | Mint.WebSocket.shorthand_frame()) :: tuple()
@spec translate(tuple) :: Mint.WebSocket.frame()
for opcode <- Map.keys(@opcodes) do
def translate(unquote(opcode)(reserved: <<reserved::bitstring>>))
when reserved != <<0::size(3)>> do
{:error, {:malformed_reserved, reserved}}
end
end
def translate({:error, reason}), do: {:error, reason}
def translate({:text, text}) do
text(fin?: true, mask: new_mask(), data: text)
end
def translate(text(fin?: true, data: data)) do
if String.valid?(data) do
{:text, data}
else
{:error, {:invalid_utf8, data}}
end
end
def translate({:binary, binary}) do
binary(fin?: true, mask: new_mask(), data: binary)
end
def translate(binary(fin?: true, data: data)), do: {:binary, data}
def translate(:ping), do: translate({:ping, <<>>})
def translate({:ping, body}) do
ping(mask: new_mask(), data: body)
end
def translate(ping(data: data)), do: {:ping, data}
def translate(:pong), do: translate({:pong, <<>>})
def translate({:pong, body}) do
pong(mask: new_mask(), data: body)
end
def translate(pong(data: data)), do: {:pong, data}
def translate(:close) do
translate({:close, nil, nil})
end
def translate({:close, code, reason}) do
close(mask: new_mask(), code: code, reason: reason, data: <<>>)
end
def translate(close(code: code, reason: reason)) do
{:close, code, reason}
end
@doc """
Emits frames for any finalized fragments and stores any unfinalized fragments
in the `:fragment` key in the websocket data structure
"""
def resolve_fragments(websocket, frames, acc \\ [])
def resolve_fragments(websocket, [], acc) do
{websocket, :lists.reverse(acc)}
end
def resolve_fragments(websocket, [{:error, reason} | rest], acc) do
resolve_fragments(websocket, rest, [{:error, reason} | acc])
end
def resolve_fragments(websocket, [frame | rest], acc)
when is_control(frame) and is_fin(frame) do
resolve_fragments(websocket, rest, [frame | acc])
end
def resolve_fragments(websocket, [frame | rest], acc) when is_fin(frame) do
frame = combine(websocket.fragment, frame)
put_in(websocket.fragment, nil)
|> resolve_fragments(rest, [frame | acc])
end
def resolve_fragments(websocket, [frame | rest], acc) do
case combine(websocket.fragment, frame) do
{:error, reason} ->
put_in(websocket.fragment, nil)
|> resolve_fragments(rest, [{:error, reason} | acc])
frame ->
put_in(websocket.fragment, frame)
|> resolve_fragments(rest, acc)
end
end
defp combine(nil, continuation(fin?: true)), do: {:error, :insular_continuation}
defp combine(nil, frame), do: frame
for type <- [:continuation, :text, :binary] do
defp combine(
unquote(type)(data: frame_data) = frame,
continuation(data: continuation_data, fin?: fin?)
) do
unquote(type)(frame, data: Utils.maybe_concat(frame_data, continuation_data), fin?: fin?)
end
end
defp combine(a, b), do: {:error, {:cannot_combine_frames, a, b}}
end
|
lib/mint/web_socket/frame.ex
| 0.712932
| 0.52342
|
frame.ex
|
starcoder
|
defmodule HedwigTrivia.Logic do
@moduledoc """
A home for the business logic of fetching/answering questions.
"""
alias HedwigTrivia.{
Answer,
GameState,
Question
}
@type force_new :: boolean()
@incorrect_prefixes [
"No. Sorry ",
"I'm afraid "
]
@correct_prefixes [
"Yes! ",
"Bingo! ",
"You got it! "
]
@doc """
Fetch a new question if need be, but return a state with the correct game
information.
"""
@spec question(GameState.t(), force_new()) ::
{:ok | :error | :not_answered, GameState.t()}
def question(%{answered: answered} = state, false) do
if answered do
Question.fetch(state)
else
{:not_answered, state}
end
end
# force a new question fetch
def question(state, true), do: Question.fetch(state)
@doc """
Determine if the user-supplied guess matches the answer.
"""
@spec guess(GameState.t(), String.t()) ::
{:ok | :error | :already_answered, GameState.t()}
def guess(%{answer: answer, answered: false} = state, guess) do
if Answer.correct_or_close_enough?(answer, guess) do
state = %{state | answered: true}
{:ok, state}
else
# Just to be sure, let's set the answered state to false
state = %{state | answered: false}
{:error, state}
end
end
def guess(%{answered: true} = state, _guess), do: {:already_answered, state}
@doc """
Mark the question as answered and return the game state so the answer can
be revelead
"""
@spec solution(GameState.t()) :: {:ok | :error, GameState.t()}
def solution(%{answer: answer} = state) when is_nil(answer) or answer == "" do
{:error, %{state | answered: true}}
end
def solution(state), do: {:ok, %{state | answered: true}}
@doc """
Build up the full response from the category, value, and question
"""
@spec compose_full_question(GameState.t()) :: String.t()
def compose_full_question(%{
question: question,
category_name: category_name,
value: value
}) do
"_#{category_name}_[$#{value}] #{question}"
end
@doc """
Given a guess, return a string representing a response from the bot[]
"""
@spec incorrect(String.t()) :: String.t()
def incorrect(guess) do
Enum.random(@incorrect_prefixes) <> guess <> " is incorrect"
end
@doc """
Given a guess, return a string representing a response from the bot[]
"""
@spec correct(String.t()) :: String.t()
def correct(guess) do
Enum.random(@correct_prefixes) <> guess <> " is correct"
end
end
|
lib/hedwig_trivia/logic.ex
| 0.64512
| 0.546012
|
logic.ex
|
starcoder
|
defmodule Cryptopunk.Mnemonic do
@moduledoc """
Implements mnemonic generation logic.
See https://github.com/bitcoin/bips/blob/master/bip-0039.mediawiki
"""
@word_number_to_entropy_bits %{12 => 128, 15 => 160, 18 => 192, 21 => 224, 24 => 256}
@word_numbers Map.keys(@word_number_to_entropy_bits)
@words :cryptopunk
|> :code.priv_dir()
|> Path.join("words")
|> File.stream!()
|> Stream.map(&String.trim/1)
|> Enum.to_list()
@spec create(non_neg_integer()) :: String.t() | no_return
def create(word_number \\ Enum.max(@word_numbers))
def create(word_number) when word_number not in @word_numbers do
raise ArgumentError,
message:
"Number of words #{inspect(word_number)} is not supported, please use one of the #{inspect(@word_numbers)} "
end
def create(word_number) do
entropy_bits = Map.fetch!(@word_number_to_entropy_bits, word_number)
entropy_bits
|> create_entropy()
|> do_create_from_entropy(entropy_bits)
end
@spec create_from_entropy(binary()) :: String.t() | no_return
def create_from_entropy(entropy) do
found_entropy_bits =
Enum.find(@word_number_to_entropy_bits, fn {_number, bits} ->
div(bits, 8) == byte_size(entropy)
end)
case found_entropy_bits do
{_, entropy_bits} ->
do_create_from_entropy(entropy, entropy_bits)
_ ->
raise ArgumentError,
message: "Entropy size is invalid"
end
end
defp do_create_from_entropy(entropy, entropy_bits) do
entropy
|> append_checksum(entropy_bits)
|> to_mnemonic()
end
defp create_entropy(entropy_bits) do
entropy_bits
|> div(8)
|> :crypto.strong_rand_bytes()
end
defp append_checksum(entropy, entropy_bits) do
checksum_size = div(entropy_bits, 32)
<<checksum::bits-size(checksum_size), _::bits>> = :crypto.hash(:sha256, entropy)
<<entropy::bits, checksum::bits>>
end
defp to_mnemonic(bytes) do
words =
for <<chunk::size(11) <- bytes>> do
Enum.at(@words, chunk)
end
Enum.join(words, " ")
end
end
|
lib/cryptopunk/mnemonic.ex
| 0.876667
| 0.470493
|
mnemonic.ex
|
starcoder
|
defmodule BlurHash do
@moduledoc """
Pure Elixir implementation of Blurhash algorithm with no additional dependencies.
Blurhash is an algorithm by <NAME> that decodes an image to a very compact (~ 20-30 bytes) ASCII string representation, which can be then decoded into a blurred placeholder image. See the main repo (https://github.com/woltapp/blurhash) for the rationale and details.
This library supports only encoding.
More details on https://blurha.sh/
"""
@moduledoc since: "1.0.0"
@digit_characters "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz#$%*+,-.:;=?@[]^_{|}~"
@doc """
Calculates the blur hash from the given pixels
Returns Blurhash string
## Examples
iex> BlurHash.encode(pixels, 30, 30, 4, 3)
"LEHV6nWB2yk8pyo0adR*.7kCMdnj"
"""
@doc since: "1.0.0"
@type pixels :: [integer()]
@type width :: integer()
@type height :: integer()
@type components_y :: integer()
@type components_x :: integer()
@type hash :: String.t()
@spec encode(pixels, width, height, components_y, components_x) :: hash
def encode(pixels, width, height, components_y, components_x) do
size_flag = components_x - 1 + (components_y - 1) * 9
[dc | ac] = get_factors(pixels, width, height, components_y, components_x)
hash = encode_83(size_flag, 1)
cond do
length(ac) > 0 ->
actual_maximum_value =
ac
|> Enum.map(&Enum.max/1)
|> Enum.max()
quantised_maximum_value =
floor(Enum.max([0.0, Enum.min([82.0, floor(actual_maximum_value * 166 - 0.5)])]) / 1)
maximum_value = (quantised_maximum_value + 1) / 166
hash = hash <> encode_83(quantised_maximum_value, 1) <> encode_83(encode_dc(dc), 4)
Enum.reduce(
ac,
hash,
fn factor, acc ->
acc <>
(factor
|> encode_ac(maximum_value)
|> encode_83(2))
end
)
true ->
maximum_value = 1
hash <>
encode_83(0, 1) <>
(encode_dc(dc)
|> encode_83(4))
end
end
defp get_factors(pixels, width, height, components_y, components_x) do
bytes_per_pixel = 4
bytes_per_row = width * bytes_per_pixel
scale = 1 / (width * height)
tasks =
for y <- 0..(components_y - 1),
x <- 0..(components_x - 1),
reduce: [] do
acc ->
normalisation = if x === 0 && y === 0, do: 1, else: 2
acc ++
[
Task.async(fn ->
[total_r, total_g, total_b] =
for x1 <- 0..(width - 1),
y1 <- 0..(height - 1),
reduce: [0, 0, 0] do
rgb ->
basis =
normalisation *
:math.cos(:math.pi() * x * x1 / width) *
:math.cos(:math.pi() * y * y1 / height)
[r, g, b] = rgb
[
r +
basis *
s_rgb_to_linear(
Enum.fetch!(pixels, bytes_per_pixel * x1 + 0 + bytes_per_row * y1)
),
g +
basis *
s_rgb_to_linear(
Enum.fetch!(pixels, bytes_per_pixel * x1 + 1 + bytes_per_row * y1)
),
b +
basis *
s_rgb_to_linear(
Enum.fetch!(pixels, bytes_per_pixel * x1 + 2 + bytes_per_row * y1)
)
]
end
[total_r * scale, total_g * scale, total_b * scale]
end)
]
end
tasks
|> Task.yield_many(60_000)
|> Enum.map(fn {_, {:ok, result}} -> result end)
end
defp encode_83(_, 0), do: ""
defp encode_83(value, length) do
for i <- 1..length,
reduce: "" do
hash ->
digit =
floor(
rem(
floor(floor(value / 1) / :math.pow(83, length - i)),
83
) / 1
)
hash = hash <> String.at(@digit_characters, digit)
end
end
defp encode_dc([r, g, b]) do
r = linear_to_s_rgb(r)
g = linear_to_s_rgb(g)
b = linear_to_s_rgb(b)
r * 0x10000 + g * 0x100 + b
end
defp encode_ac([r, g, b], maximum_value) do
quant = fn value ->
sign = if value / maximum_value < 0, do: -1, else: 1
floor(
Enum.max([
0.0,
Enum.min([
18.0,
floor(sign * :math.pow(abs(value / maximum_value), 0.5) * 9 + 9.5)
])
]) / 1
)
end
quant.(r) * 19 * 19 + quant.(g) * 19 + quant.(b)
end
defp s_rgb_to_linear(value) do
v = value / 255.0
if v <= 0.04045 do
v / 12.92
else
:math.pow((v + 0.055) / 1.055, 2.4)
end
end
defp linear_to_s_rgb(value) do
v = max(0, min(1, value))
if v <= 0.0031308 do
round(v * 12.92 * 255 + 0.5)
else
round((1.055 * :math.pow(v, 1 / 2.4) - 0.055) * 255 + 0.5)
end
end
end
|
lib/blur_hash.ex
| 0.857723
| 0.555134
|
blur_hash.ex
|
starcoder
|
defmodule Mix.Tasks.Sfc.Gen.Init do
@moduledoc """
Generates a Surface component.
"""
use Mix.Task
@switches [template: :boolean, namespace: :string, demo: :boolean, context_app: :string]
@default_opts [template: true, namespace: "components", demo: true]
@aliases [t: :template, n: :namespace, d: :demo]
@doc false
def run(args) do
opts = parse_opts(args)
namespace_parts = validate_namespace!(opts[:namespace])
assigns = Mix.SfcGenLive.inflect(namespace_parts, "card") ++ opts
inject_in_formatter_exs()
inject_live_reload_config(assigns[:web_path])
inject_in_app_web_view_macro(assigns[:web_module_path])
maybe_include_demo(opts, assigns)
end
defp parse_opts(args) do
{opts, _parsed} =
OptionParser.parse!(args,
strict: @switches,
aliases: @aliases
)
merged_opts =
@default_opts
|> Keyword.merge(opts)
|> Mix.SfcGenLive.put_context_app(opts[:context_app])
merged_opts
end
defp validate_namespace!(namespace) do
namespace_parts = Mix.SfcGenLive.split_name(namespace)
cond do
not Mix.SfcGenLive.valid_namespace?(namespace_parts) ->
raise_with_help(
"Expected the namespace, #{inspect(namespace)}, to be a valid module name"
)
true ->
namespace_parts
end
end
@spec raise_with_help(String.t()) :: no_return()
defp raise_with_help(msg) do
Mix.raise("""
#{msg}
mix sfc.gen.init takes
- a `--[no-]demo` option, default true, that controls whether
a demo component will be generated in the app.
- a `--[no-]template` boolean option, default true, which specifies whether the
demo component template will be in a `.sface` file or in a `~H` sigil in
the component module
- an optional `--namespace` option that is a relative path
in `lib/my_app_web` where the demo card component will be created. The default
value is `components`. The `--namespace` option is ignored if
`--no-demo` is passed.
For example:
mix sfc.gen.init --namespace my_components
will create `lib/my_app_web/my_components/card.ex` and `lib/my_app_web/my_components/card.sface`
""")
end
defp maybe_include_demo(opts, assigns) do
if opts[:demo] do
paths = Mix.SfcGenLive.generator_paths()
files = [
{:eex, "card.ex", Path.join(assigns[:web_path], "#{assigns[:path]}.ex")}
]
template_files = [
{:eex, "card.sface", Path.join(assigns[:web_path], "#{assigns[:path]}.sface")}
]
Mix.Phoenix.copy_from(paths, "priv/templates/sfc.gen.init", assigns, files)
if opts[:template] do
Mix.Phoenix.copy_from(paths, "priv/templates/sfc.gen.init", assigns, template_files)
end
end
end
def inject_in_formatter_exs() do
file_path = ".formatter.exs"
file = File.read!(file_path)
unless Regex.match?(~r/import_deps:[^]]+:surface/, file) do
Mix.shell().info([:green, "* injecting ", :reset, Path.relative_to_cwd(file_path)])
file = String.replace(file, ~r/(import_deps:\s*\[[^]]+)\]/, "\\1, :surface]")
File.write!(file_path, file)
end
end
def inject_live_reload_config(web_path) do
file_path = "config/dev.exs"
file = File.read!(file_path)
unless Regex.match?(~r/live_reload:[^]]+\(sface\)/s, file) do
Mix.shell().info([:green, "* injecting ", :reset, Path.relative_to_cwd(file_path)])
file =
String.replace(
file,
~r/(live_reload: .*\n)( *~r)([^]]+")(\s*)\]/s,
"\\1\\2\\3,\n\\2\"#{web_path}/live/.*(sface)$\"\\4]"
)
File.write!(file_path, file)
end
end
def inject_in_app_web_view_macro(web_module_path) do
file = File.read!(web_module_path)
file =
Mix.SfcGenLive.insert_in_blocks_matching_fragment(
file,
"def view do",
"import Surface",
:quote,
:first
)
File.write!(web_module_path, file)
end
end
|
lib/mix/tasks/sfc.gen.init.ex
| 0.677154
| 0.401834
|
sfc.gen.init.ex
|
starcoder
|
defmodule ExDebugger.Tokenizer.NestedModule do
@moduledoc false
use CommendableComments
@modulecomment """
Nested Modules require special attention when employing `use ExDebugger`. This is because by default, every module
imports `Kernel` which includes `def` and `defp` which in the interest of hijacking the same requires `use ExDebugger`
to import selectively:
```
import Kernel, except: [def: 2, defp: 2]
import ExDebugger, only: [def: 2, defp: 2]
```
`import` is lexcially scoped, which means that accordingly, every nested module will import the functionality as done
by the parent module. For example the following works:
```elixir
defmodule A do
def a, do: 1
end
defmodule B do
import A
def b, do: a()
defmodule C do
def c, do: a()
end
end
iex(_)> B.C.c
1
```
This means that automatically each nested module would import the modified macros from `ExDebugger` and end up
crashing as `d/5` is not incorporated in their module definitions(as opposed to the parent module because `use` is not
lexically scoped).
It is not sensible to fix this crashing by automatically incorporating `d/5` in the nested modules as well as a
`use ExDebugger` on a parent module level having effect on the nested modules occurring therein is too implicit of
behaviour. As such, in order to maintain the existing convenience without crashing stuff this module has been coined
to collect all nested modules that explicitly employ `use ExDebugger` so we can detect whether or not definitions in a
nested module need to be annotated or not.
"""
@doc false
def usage_ex_debugger(tokens) do
usage_ex_debugger(tokens, %{ls: [], module_name: nil})
end
@doc false
def usage_ex_debugger([{:identifier, _, :defmodule} | tl], acc) do
{module_name, remainder} = name(tl)
usage_ex_debugger(remainder, %{module_name: module_name, ls: acc.ls})
end
def usage_ex_debugger([{:identifier, _, :use}, {:alias, _, :ExDebugger} | tl], acc) do
usage_ex_debugger(tl, %{module_name: nil, ls: [acc.module_name | acc.ls]})
end
def usage_ex_debugger([_ | tl], acc), do: usage_ex_debugger(tl, acc)
def usage_ex_debugger([], %{ls: ls}), do: ls
@doc false
def name(ls) when is_list(ls) do
{acc, tl} = name(ls, [])
{
acc
|> Enum.reverse()
|> Module.concat(),
tl
}
end
@doc false
defp name([{:alias, _, name_portion} | tl], acc), do: name(tl, [name_portion | acc])
defp name([{:., _} | tl], acc), do: name(tl, acc)
defp name([{:do, _} | tl], acc), do: {acc, tl}
end
|
lib/ex_debugger/tokenizer/nested_module.ex
| 0.799051
| 0.845241
|
nested_module.ex
|
starcoder
|
defmodule Asteroid.Crypto.Key do
@moduledoc """
Convenience module to work with cryptographic keys
"""
import Asteroid.Utils
alias JOSE.JWK
defmodule InvalidUseError do
@moduledoc """
Error returned when a `t:key_config_entry/0` is invalid because its `:use` is invalid
`:use` must be one atom specified in `t:key_use`.
"""
defexception []
@impl true
def message(_), do: "Invalid `:use` option (must be one of: [:sign, :enc])"
end
defmodule NoSuitableKeyError do
@moduledoc """
Error returned when no suitable key was found
This can be returned, for instance, when trying to find an encryption key for a client that
has published only signing keys.
"""
defexception []
@impl true
def message(_), do: "no suitable key was found"
end
@typedoc """
A JSON web key in its raw map format
## Example
```elixir
%{
"d" => "<KEY>",
"dp" => "BwKfV3Akq5_MFZDFZ<KEY>",
"dq" => "<KEY>",
"e" => "AQAB",
"kty" => "RSA",
"n" => "<KEY> <KEY> <KEY> MTBQY4uDZlxvb3qCo5ZwKh9kG4LT6_I5IhlJH7aGhyxXFvUK-DWNmoudF8\n NAco9_h9iaGNj8q2ethFkMLs91kzk2PAcDTW9gb54h4FRWyuXpoQ",
"p" => "<KEY>",
"q" => "<KEY>",
"qi" => "<KEY>"
}
```
"""
@type t :: map()
@type name :: String.t()
@typedoc """
Key config options
A key config entry can have the following values:
- `{:pem_file, Keyword.t()}` to load a PEM file stored in the disc, and that includes
a private key. The options of the `Keyword.t()` are:
- `:path`: the path to the file. **Mandatory**
- `:password`: the password of the file to decrypt it, if any
- `:use`: a `t:key_use/0`. **Mandatory**
- `:advertise`: a boolean to determine whether the key should be advertised on the
jwk URI endpoint or not. Defaults to `true`
- `{:map, Keyword.t()}`: a `JOSE.JWK` converted to a map (using for instance
`JOSE.JWK.to_map/1`). The options of the `Keyword.t()` are:
- `:key`: a key as returned by `JOSE.JWK.to_map/1`, for instance:
`{%{kty: :jose_jwk_kty_oct},
%{"k" => "P9dGnU_We5thJOOigUGtl00WmubLVAAr1kYsAUP80Sc", "kty" => "oct"}}`. **Mandatory**
- `:use`: a `t:key_use/0`. **Mandatory**
- `:advertise`: a boolean to determine whether the key should be advertised on the
jwk URI endpoint or not. Defaults to `true`
- `{:auto_gen, any()}`: configuration to automatically generated a key on startup.
The options of the `Keyword.t()` are:
- `params`: the parameters that will be passed to `JOSE.JWK.generate_key/1`. **Mandatory**
- `:use`: a `t:key_use/0`. **Mandatory**
- `:advertise`: a boolean to determine whether the key should be advertised on the
jwk URI endpoint or not. Defaults to `true`
"""
@type key_config_entry ::
{:pem_file, Keyword.t()}
| {:map, Keyword.t()}
| {:auto_gen, Keyword.t()}
@typedoc """
Key config entry
Each key requires a name and its associated config. Keys are referred and used by their
names.
"""
@type key_config :: %{required(name()) => key_config_entry()}
@type key_use :: :sig | :enc
@typedoc """
JOSE JWS signature algorithm
Example of output of `JOSE.JWA.supports/0`:
```elixir
iex> JOSE.JWA.supports() |> Enum.find(fn {:jws, _} -> true; _ -> false end) |> elem(1)
{:alg,
["ES256", "ES384", "ES512", "HS256", "HS384", "HS512", "PS256", "PS384",
"PS512", "RS256", "RS384", "RS512"]}
```
"""
@jws_alg [
"ES256",
"ES384",
"ES512",
"Ed25519",
"Ed25519ph",
"Ed448",
"Ed448ph",
"HS256",
"HS384",
"HS512",
"PS256",
"PS384",
"PS512",
"Poly1305",
"RS256",
"RS384",
"RS512"
]
@type jws_alg :: String.t()
@typedoc """
JOSE JWE algorithm
Example of output of `JOSE.JWA.supports/0`:
```elixir
iex> JOSE.JWA.supports() |> Enum.find(fn {:jwe, _, _, _} -> true; _ -> false end) |> elem(1)
{:alg,
["A128GCMKW", "A128KW", "A192GCMKW", "A192KW", "A256GCMKW", "A256KW",
"ECDH-ES", "ECDH-ES+A128KW", "ECDH-ES+A192KW", "ECDH-ES+A256KW",
"PBES2-HS256+A128KW", "PBES2-HS384+A192KW", "PBES2-HS512+A256KW", "RSA1_5",
"dir"]}
```
"""
@jwe_alg [
"A128GCMKW",
"A128KW",
"A192GCMKW",
"A192KW",
"A256GCMKW",
"A256KW",
"ECDH-ES",
"ECDH-ES+A128KW",
"ECDH-ES+A192KW",
"ECDH-ES+A256KW",
"PBES2-HS256+A128KW",
"PBES2-HS384+A192KW",
"PBES2-HS512+A256KW",
"RSA-OAEP",
"RSA-OAEP-256",
"RSA1_5",
"dir"
]
@type jwe_alg :: String.t()
@typedoc """
JOSE JWE encryption algorithm
Example of output of `JOSE.JWA.supports/0`:
```elixir
iex> JOSE.JWA.supports() |> Enum.find(fn {:jwe, _, _, _} -> true; _ -> false end) |> elem(2)
{:enc,
["A128CBC-HS256", "A128GCM", "A192CBC-HS384", "A192GCM", "A256CBC-HS512",
"A256GCM"]}
```
"""
@jwe_enc [
"A128CBC-HS256",
"A128GCM",
"A192CBC-HS384",
"A192GCM",
"A256CBC-HS512",
"A256GCM",
"ChaCha20/Poly1305"
]
@type jwe_enc :: String.t()
@spec load_from_config!() :: :ok
def load_from_config!() do
{cache_module, cache_opts} = astrenv(:crypto_keys_cache)
if function_exported?(cache_module, :start_link, 1) do
cache_module.start_link(cache_opts)
else
if function_exported?(cache_module, :start, 1) do
cache_module.start(cache_opts)
end
end
existing_keys =
for {key_name, _key} <- cache_module.get_all(cache_opts) do
key_name
end
inserted_keys =
for {key_name, key_config} <- astrenv(:crypto_keys, []) do
jwk = prepare!(key_config)
:ok = cache_module.put(key_name, jwk, cache_opts)
key_name
end
for key_to_delete <- existing_keys -- inserted_keys do
cache_module.delete(key_to_delete, cache_opts)
end
:ok
end
@spec prepare!(key_config_entry()) :: %JOSE.JWK{}
def prepare!({:pem_file, params}) do
if params[:use] == nil do
raise InvalidUseError
end
if params[:password] do
JWK.from_pem_file(params[:password], params[:path])
|> set_key_use(params[:use])
|> set_key_id()
|> set_advertised(params[:advertise])
else
JWK.from_pem_file(params[:path])
|> set_key_use(params[:use])
|> set_key_id()
|> set_advertised(params[:advertise])
end
end
def prepare!({:map, params}) do
if params[:use] == nil do
raise InvalidUseError
end
JWK.from_map(params[:key])
|> set_key_use(params[:use])
|> set_key_id()
|> set_advertised(params[:advertise])
end
def prepare!({:auto_gen, params}) do
if params[:use] == nil do
raise InvalidUseError
end
JWK.generate_key(params[:params])
|> set_key_use(params[:use])
|> set_key_id()
|> set_advertised(params[:advertise])
end
@spec get(name()) :: {:ok, %JOSE.JWK{}} | {:error, Exception.t()}
def get(key_name) do
{cache_module, cache_opts} = astrenv(:crypto_keys_cache)
cache_module.get(key_name, cache_opts)
end
@doc """
Returns all the keys
Note that it returns the private keys
"""
@spec get_all() :: [%JOSE.JWK{}]
def get_all() do
{cache_module, cache_opts} = astrenv(:crypto_keys_cache)
for {_key_name, jwk} <- cache_module.get_all(cache_opts) do
JOSE.JWK.from(jwk)
end
end
@doc """
Returns all the public keys
"""
@spec get_all_public() :: [%JOSE.JWK{}]
def get_all_public() do
{cache_module, cache_opts} = astrenv(:crypto_keys_cache)
for {_key_name, jwk} <- cache_module.get_all(cache_opts) do
JOSE.JWK.to_public(jwk)
end
end
@spec set_key_use(%JOSE.JWK{}, key_use()) :: %JOSE.JWK{}
def set_key_use(%JOSE.JWK{} = jwk, key_use) when key_use in [:sig, :enc] do
%{jwk | fields: Map.put(jwk.fields, "use", Atom.to_string(key_use))}
end
@spec set_key_ops(%JOSE.JWK{}, [String.t()]) :: %JOSE.JWK{}
def set_key_ops(%JOSE.JWK{} = jwk, key_ops) when is_list(key_ops) do
%{jwk | fields: Map.put(jwk.fields, "key_ops", key_ops)}
end
@spec set_key_id(%JOSE.JWK{}) :: %JOSE.JWK{}
def set_key_id(jwk) do
%{jwk | fields: Map.put(jwk.fields, "kid", JOSE.JWK.thumbprint(jwk))}
end
@spec set_key_sig_alg(%JOSE.JWK{}, jws_alg()) :: %JOSE.JWK{}
def set_key_sig_alg(jwk, jws_alg) when jws_alg in @jws_alg do
%{jwk | fields: Map.put(jwk.fields, "alg", jws_alg)}
end
@spec set_key_enc_alg(%JOSE.JWK{}, jwe_alg()) :: %JOSE.JWK{}
def set_key_enc_alg(jwk, jwe_alg) when jwe_alg in @jwe_alg do
%{jwk | fields: Map.put(jwk.fields, "alg", jwe_alg)}
end
@spec set_key_enc_enc(%JOSE.JWK{}, jwe_enc()) :: %JOSE.JWK{}
def set_key_enc_enc(jwk, jwe_enc) when jwe_enc in @jwe_enc do
%{jwk | fields: Map.put(jwk.fields, "enc", jwe_enc)}
end
@spec set_advertised(%JOSE.JWK{}, boolean() | nil) :: %JOSE.JWK{}
defp set_advertised(jwk, false) do
%{jwk | fields: Map.put(jwk.fields, "advertise", false)}
end
defp set_advertised(jwk, _) do
%{jwk | fields: Map.put(jwk.fields, "advertise", true)}
end
end
|
lib/asteroid/crypto/key.ex
| 0.95363
| 0.834272
|
key.ex
|
starcoder
|
defmodule Phoenix.Tracker do
@moduledoc ~S"""
Provides distributed Presence tracking to processes.
The `Tracker` API is used as a facade for a pool of `Phoenix.Tracker.Shard`s.
The responsibility of which calls go to which `Shard` is determined based on
the topic, on which a given function is called.
Tracker shards use a heartbeat protocol and CRDT to replicate presence
information across a cluster in an eventually consistent, conflict-free
manner. Under this design, there is no single source of truth or global
process. Each node runs a pool of `Phoenix.Tracker.Shard`s and node-local
changes are replicated across the cluster and handled locally as a diff of
changes.
* `tracker` - The name of the tracker handler module implementing the
`Phoenix.Tracker` behaviour
* `tracker_opts` - The list of options to pass to the tracker handler
* `pool_opts` - The list of options used to construct the shard pool
## Required `pool_opts`:
* `:name` - The name of the server, such as: `MyApp.Tracker`
This will also form the common prefix for all shard names
* `:pubsub_server` - The name of the PubSub server, such as: `MyApp.PubSub`
## Optional `pool_opts`:
* `:broadcast_period` - The interval in milliseconds to send delta broadcasts
across the cluster. Default `1500`
* `:max_silent_periods` - The max integer of broadcast periods for which no
delta broadcasts have been sent. Default `10` (15s heartbeat)
* `:down_period` - The interval in milliseconds to flag a replica
as temporarily down. Default `broadcast_period * max_silent_periods * 2`
(30s down detection). Note: This must be at least 2x the `broadcast_period`.
* `:permdown_period` - The interval in milliseconds to flag a replica
as permanently down, and discard its state.
Note: This must be at least greater than the `down_period`.
Default `1_200_000` (20 minutes)
* `:clock_sample_periods` - The numbers of heartbeat windows to sample
remote clocks before collapsing and requesting transfer. Default `2`
* `:max_delta_sizes` - The list of delta generation sizes to keep before
falling back to sending entire state. Defaults `[100, 1000, 10_000]`.
* `:log_level` - The log level to log events, defaults `:debug` and can be
disabled with `false`
* `:pool_size` - The number of tracker shards to launch. Default `1`
## Implementing a Tracker
To start a tracker, first add the tracker to your supervision tree:
worker(MyTracker, [[name: MyTracker, pubsub_server: MyPubSub]])
Next, implement `MyTracker` with support for the `Phoenix.Tracker`
behaviour callbacks. An example of a minimal tracker could include:
defmodule MyTracker do
@behaviour Phoenix.Tracker
def start_link(opts) do
opts = Keyword.merge([name: __MODULE__], opts)
Phoenix.Tracker.start_link(__MODULE__, opts, opts)
end
def init(opts) do
server = Keyword.fetch!(opts, :pubsub_server)
{:ok, %{pubsub_server: server, node_name: Phoenix.PubSub.node_name(server)}}
end
def handle_diff(diff, state) do
for {topic, {joins, leaves}} <- diff do
for {key, meta} <- joins do
IO.puts "presence join: key \"#{key}\" with meta #{inspect meta}"
msg = {:join, key, meta}
Phoenix.PubSub.direct_broadcast!(state.node_name, state.pubsub_server, topic, msg)
end
for {key, meta} <- leaves do
IO.puts "presence leave: key \"#{key}\" with meta #{inspect meta}"
msg = {:leave, key, meta}
Phoenix.PubSub.direct_broadcast!(state.node_name, state.pubsub_server, topic, msg)
end
end
{:ok, state}
end
end
Trackers must implement `start_link/1`, `init/1`, and `handle_diff/2`.
The `init/1` callback allows the tracker to manage its own state when
running within the `Phoenix.Tracker` server. The `handle_diff` callback
is invoked with a diff of presence join and leave events, grouped by
topic. As replicas heartbeat and replicate data, the local tracker state is
merged with the remote data, and the diff is sent to the callback. The
handler can use this information to notify subscribers of events, as
done above.
## Special Considerations
Operations within `handle_diff/2` happen *in the tracker server's context*.
Therefore, blocking operations should be avoided when possible, and offloaded
to a supervised task when required. Also, a crash in the `handle_diff/2` will
crash the tracker server, so operations that may crash the server should be
offloaded with a `Task.Supervisor` spawned process.
"""
use Supervisor
import Supervisor.Spec
alias Phoenix.Tracker.Shard
require Logger
@type presence :: {key :: String.t, meta :: map}
@type topic :: String.t
@callback init(Keyword.t) :: {:ok, state :: term} | {:error, reason :: term}
@callback handle_diff(%{topic => {joins :: [presence], leaves :: [presence]}}, state :: term) :: {:ok, state :: term}
## Client
@doc """
Tracks a presence.
* `server_name` - The registered name of the tracker server
* `pid` - The Pid to track
* `topic` - The `Phoenix.PubSub` topic for this presence
* `key` - The key identifying this presence
* `meta` - The map of metadata to attach to this presence
A process may be tracked multiple times, provided the topic and key pair
are unique for any prior calls for the given process.
## Examples
iex> Phoenix.Tracker.track(MyTracker, self(), "lobby", u.id, %{stat: "away"})
{:ok, "1WpAofWYIAA="}
iex> Phoenix.Tracker.track(MyTracker, self(), "lobby", u.id, %{stat: "away"})
{:error, {:already_tracked, #PID<0.56.0>, "lobby", "123"}}
"""
@spec track(atom, pid, topic, term, map) :: {:ok, ref :: binary} | {:error, reason :: term}
def track(tracker_name, pid, topic, key, meta) when is_pid(pid) and is_map(meta) do
tracker_name
|> Shard.name_for_topic(topic, pool_size(tracker_name))
|> GenServer.call({:track, pid, topic, key, meta})
end
@doc """
Untracks a presence.
* `server_name` - The registered name of the tracker server
* `pid` - The Pid to untrack
* `topic` - The `Phoenix.PubSub` topic to untrack for this presence
* `key` - The key identifying this presence
All presences for a given Pid can be untracked by calling the
`Phoenix.Tracker.untrack/2` signature of this function.
## Examples
iex> Phoenix.Tracker.untrack(MyTracker, self(), "lobby", u.id)
:ok
iex> Phoenix.Tracker.untrack(MyTracker, self())
:ok
"""
@spec untrack(atom, pid, topic, term) :: :ok
def untrack(tracker_name, pid, topic, key) when is_pid(pid) do
tracker_name
|> Shard.name_for_topic(topic, pool_size(tracker_name))
|> GenServer.call({:untrack, pid, topic, key})
end
def untrack(tracker_name, pid) when is_pid(pid) do
shard_multicall(tracker_name, {:untrack, pid})
:ok
end
@doc """
Updates a presence's metadata.
* `server_name` - The registered name of the tracker server
* `pid` - The Pid being tracked
* `topic` - The `Phoenix.PubSub` topic to update for this presence
* `key` - The key identifying this presence
* `meta` - Either a new map of metadata to attach to this presence,
or a function. The function will receive the current metadata as
input and the return value will be used as the new metadata
## Examples
iex> Phoenix.Tracker.update(MyTracker, self(), "lobby", u.id, %{stat: "zzz"})
{:ok, "1WpAofWYIAA="}
iex> Phoenix.Tracker.update(MyTracker, self(), "lobby", u.id, fn meta -> Map.put(meta, :away, true) end)
{:ok, "1WpAofWYIAA="}
"""
@spec update(atom, pid, topic, term, map | (map -> map)) :: {:ok, ref :: binary} | {:error, reason :: term}
def update(tracker_name, pid, topic, key, meta) when is_pid(pid) and (is_map(meta) or is_function(meta)) do
tracker_name
|> Shard.name_for_topic(topic, pool_size(tracker_name))
|> GenServer.call({:update, pid, topic, key, meta})
end
@doc """
Lists all presences tracked under a given topic.
* `server_name` - The registered name of the tracker server
* `topic` - The `Phoenix.PubSub` topic
Returns a lists of presences in key/metadata tuple pairs.
## Examples
iex> Phoenix.Tracker.list(MyTracker, "lobby")
[{123, %{name: "user 123"}}, {456, %{name: "user 456"}}]
"""
@spec list(atom, topic) :: [presence]
def list(tracker_name, topic) do
tracker_name
|> Shard.name_for_topic(topic, pool_size(tracker_name))
|> Phoenix.Tracker.Shard.list(topic)
end
@doc """
Gets presences tracked under a given topic and key pair.
* `server_name` - The registered name of the tracker server
* `topic` - The `Phoenix.PubSub` topic
* `key` - The key of the presence
Returns a lists of presence metadata.
## Examples
iex> Phoenix.Tracker.get_by_key(MyTracker, "lobby", "user1")
[{#PID<0.88.0>, %{name: "User 1"}, {#PID<0.89.0>, %{name: "User 1"}]
"""
def get_by_key(tracker_name, topic, key) do
tracker_name
|> Shard.name_for_topic(topic, pool_size(tracker_name))
|> Phoenix.Tracker.Shard.get_by_key(topic, key)
end
@doc """
Gracefully shuts down by broadcasting permdown to all replicas.
## Examples
iex> Phoenix.Tracker.graceful_permdown(MyTracker)
:ok
"""
@spec graceful_permdown(atom) :: :ok
def graceful_permdown(tracker_name) do
shard_multicall(tracker_name, :graceful_permdown)
Supervisor.stop(tracker_name)
end
def start_link(tracker, tracker_opts, pool_opts) do
name = Keyword.fetch!(pool_opts, :name)
Supervisor.start_link(__MODULE__,
[tracker, tracker_opts, pool_opts, name],
name: name)
end
def init([tracker. tracker_opts, opts]) do
server_name = Keyword.fetch!(opts, :name)
init([tracker, tracker_opts, opts, server_name])
end
def init([tracker, tracker_opts, opts, name]) do
pool_size = Keyword.get(opts, :pool_size, 1)
^name = :ets.new(name, [:set, :named_table, read_concurrency: true])
true = :ets.insert(name, {:pool_size, pool_size})
shards =
for n <- 0..(pool_size - 1) do
shard_name = Shard.name_for_number(name, n)
shard_opts = Keyword.put(opts, :shard_number, n)
worker(Phoenix.Tracker.Shard, [tracker, tracker_opts, shard_opts],
id: shard_name)
end
supervise(shards, strategy: :one_for_one,
max_restarts: pool_size * 2,
max_seconds: 1)
end
defp pool_size(tracker_name) do
[{:pool_size, size}] = :ets.lookup(tracker_name, :pool_size)
size
end
defp shard_multicall(tracker_name, message) do
for shard_number <- 0..(pool_size(tracker_name) - 1) do
tracker_name
|> Shard.name_for_number(shard_number)
|> GenServer.call(message)
end
end
end
|
lib/phoenix/tracker.ex
| 0.921623
| 0.745225
|
tracker.ex
|
starcoder
|
defmodule Plausible.Stats.Query do
defstruct date_range: nil, step_type: nil, period: nil, steps: nil, filters: %{}
def shift_back(%__MODULE__{period: "day"} = query) do
new_date = query.date_range.first |> Timex.shift(days: -1)
Map.put(query, :date_range, Date.range(new_date, new_date))
end
def shift_back(query) do
diff = Timex.diff(query.date_range.first, query.date_range.last, :days) - 1
new_first = query.date_range.first |> Timex.shift(days: diff)
new_last = query.date_range.last |> Timex.shift(days: diff)
Map.put(query, :date_range, Date.range(new_first, new_last))
end
def from(_tz, %{"period" => "day", "date" => date} = params) do
date = Date.from_iso8601!(date)
%__MODULE__{
period: "day",
date_range: Date.range(date, date),
step_type: "hour",
filters: parse_filters(params)
}
end
def from(tz, %{"period" => "day"} = params) do
date = today(tz)
%__MODULE__{
period: "day",
date_range: Date.range(date, date),
step_type: "hour",
filters: parse_filters(params)
}
end
def from(tz, %{"period" => "7d"} = params) do
end_date = today(tz)
start_date = end_date |> Timex.shift(days: -7)
%__MODULE__{
period: "7d",
date_range: Date.range(start_date, end_date),
step_type: "date",
filters: parse_filters(params)
}
end
def from(tz, %{"period" => "30d"} = params) do
end_date = today(tz)
start_date = end_date |> Timex.shift(days: -30)
%__MODULE__{
period: "30d",
date_range: Date.range(start_date, end_date),
step_type: "date",
filters: parse_filters(params)
}
end
def from(tz, %{"period" => "60d"} = params) do
end_date = today(tz)
start_date = end_date |> Timex.shift(days: -60)
%__MODULE__{
period: "60d",
date_range: Date.range(start_date, end_date),
step_type: "date",
filters: parse_filters(params)
}
end
def from(_tz, %{"period" => "month", "date" => date} = params) do
start_date = Date.from_iso8601!(date) |> Timex.beginning_of_month()
end_date = Timex.end_of_month(start_date)
%__MODULE__{
period: "month",
date_range: Date.range(start_date, end_date),
step_type: "date",
steps: Timex.diff(start_date, end_date, :days),
filters: parse_filters(params)
}
end
def from(tz, %{"period" => "6mo"} = params) do
start_date =
Timex.shift(today(tz), months: -5)
|> Timex.beginning_of_month()
%__MODULE__{
period: "6mo",
date_range: Date.range(start_date, today(tz)),
step_type: "month",
steps: 6,
filters: parse_filters(params)
}
end
def from(tz, %{"period" => "12mo"} = params) do
start_date =
Timex.shift(today(tz), months: -11)
|> Timex.beginning_of_month()
%__MODULE__{
period: "12mo",
date_range: Date.range(start_date, today(tz)),
step_type: "month",
steps: 12,
filters: parse_filters(params)
}
end
def from(_tz, %{"period" => "custom", "from" => from, "to" => to} = params) do
from_date = Date.from_iso8601!(from)
to_date = Date.from_iso8601!(to)
%__MODULE__{
period: "custom",
date_range: Date.range(from_date, to_date),
step_type: "date",
filters: parse_filters(params)
}
end
def from(tz, _) do
__MODULE__.from(tz, %{"period" => "30d"})
end
defp today(tz) do
Timex.now(tz) |> Timex.to_date()
end
defp parse_filters(params) do
if params["filters"] do
Jason.decode!(params["filters"])
end
end
end
|
lib/plausible/stats/query.ex
| 0.786295
| 0.516656
|
query.ex
|
starcoder
|
defmodule ExActivity do
alias ExActivity.{Activity, Log}
@moduledoc """
Enables to log activity in a structured way to a MySQL database. The logs are saved in a *non blocking way*, to minimize overhead in your application when logging.
The actual insertion in the dabase is done by using Elixir's `Task` functionality.
The `log` functions accepts a `ExActivity.Log` struct which is then saved to the database. Why the extra struct? Because the compiler is your friend.
Log like so:
ExActivity.log(%ExActivity.Log{
type: "AuthController",
action: "login",
result: "ERROR",
details: "invalid_email"
})
## Properties
* `type` - gives more context about the action you are logging; it could be the name of the controller where you log from, the context, ...
* `action` - method name, sub-action in method, ...
* `result` - outcome of the action such as ERROR, SUCCESS, WRONG_CODE, ...
* `details` - more like meta date about current state
* `data` - raw data, request parameters, db result, ...
_For now everything should be string or integer (for some fields), we will try to expand this to accept more data/struct like structures, and encode them to JSON._
## It's a web world
Next to the `ExActivity.log/1` method, there is also a `ExActivity.log/2` method that accepts a `Plug.Conn` conn as the second parameter. This way, the IP address and user agent will get appended to the log. For us, this is helpful information.
"""
@doc """
Accepts a `ExActivity.Log` struct and saves it to the database
"""
@spec log(Log.t) :: Activity.t
def log(%Log{} = log) do
attrs = Map.from_struct(log)
process_log(attrs)
end
@doc """
Accepts a `ExActivity.Log` struct and a `Plug.Conn` conn and saves it to the database. The `conn` provides extra metadata like the IP address and user agent.
"""
@spec log(Log.t, any) :: Activity.t
def log(%Log{} = log, conn) do
log
|> Map.from_struct()
|> Map.merge(parse_conn(conn))
|> process_log()
end
defp process_log(attrs) do
attrs = cast(attrs)
Task.start_link(fn -> Activity.log(attrs) end)
end
defp get_ip(conn) do
to_string(:inet_parse.ntoa(conn.remote_ip))
end
defp parse_conn(conn) do
%{
user_agent: get_conn_header(conn, "user-agent"),
ip: get_ip(conn)
}
end
defp get_conn_header(conn, header) do
headers = Enum.into(conn.req_headers, %{})
case headers[header] do
nil -> ""
_ -> headers[header]
end
end
defp cast(attrs) do
attrs
|> cast_integers()
end
defp cast_integers(attrs) do
case Map.has_key?(attrs, :details) && is_integer(attrs.details) do
true ->
attrs
|> Map.put(:details, Integer.to_string(attrs.details))
_ ->
attrs
end
end
end
|
lib/ex_activity.ex
| 0.81899
| 0.553686
|
ex_activity.ex
|
starcoder
|
defmodule Ptolemy.Engines.KV do
@moduledoc """
`Ptolemy.Engines.KV` provides a public facing API for CRUD operations for the Vault KV2 engine.
"""
alias Ptolemy.Engines.KV.Engine
alias Ptolemy.Server
@doc """
Fetches all of a secret's keys and value via the `:kv_engine` configuration.
See `fetch/2` for the description of the silent and version options.
## Example
```elixir
iex(2)> Ptolemy.Engines.KV.read(:production, :engine1, :ptolemy)
{:ok, %{
"test" => i am some value"
...
}
}
```
"""
@spec read(atom(), atom(), atom(), boolean(), integer()) ::
{:ok, String.t()} | {:error, String.t()}
def read(server_name, engine_name, secret, silent \\ false, version \\ 0) do
path = get_kv_path!(server_name, engine_name, secret, "data")
path_read(server_name, path, silent, version)
end
@doc """
Fetches all of a secret's keys and value via the `:kv_engine` configuration, errors out if an error occurs.
"""
@spec read!(atom(), atom(), atom(), boolean(), integer()) :: any() | no_return()
def read!(server_name, engine_name, secret, silent \\ false, version \\ 0) do
case read(server_name, engine_name, secret, silent, version) do
{:error, msg} -> raise RuntimeError, message: msg
{:ok, resp} -> resp
end
end
@doc """
Fetches all of a given secret's key and values from a KV engine via the specified path.
This function returns the full reponse of the remote vault server, enabling the silent option will only return a map with the key and value of the secret.
The version option will allow you to fetch specific version of the target secret.
## Example
```elixir
iex(2)> Ptolemy.Engines.KV.path_read(:production, "secret/data/ptolemy")
{:ok, %{
"Foo" => test"
...
}
}
```
"""
@spec path_read(atom(), String.t(), boolean(), integer()) ::
{:ok, String.t()} | {:error, String.t()}
def path_read(server_name, secret, silent \\ false, version \\ 0) when is_bitstring(secret) do
client = create_client(server_name)
opts = [version: version]
{_err, resp} = Engine.read_secret(client, secret, opts)
case resp do
%{} ->
case silent do
true ->
{:ok,
resp
|> Map.get("data")
|> Map.get("data")}
false ->
{:ok, resp}
end
_ ->
{:error, "Read from kv engine failed"}
end
end
@doc """
Updates an already existing secret via the `:kv_engine` configuration.
## Example
```elixir
iex(2)> Ptolemy.Engines.KV.update(:production, :engine1, :ptolemy, %{test: "i am a new value from config"})
{:ok, "KV secret updated"}
```
"""
@spec update(atom(), atom(), atom(), map(), integer() | nil) ::
{:ok, String.t()} | {:error, String.t()}
def update(server_name, engine_name, secret, payload, cas \\ nil) do
path = get_kv_path!(server_name, engine_name, secret, "data")
path_update(server_name, path, payload, cas)
end
@doc """
Updates an already existing secret via the `:kv_engine` configuration, errors out if an error occurs.
"""
@spec update!(atom(), atom(), atom(), map(), integer() | nil) :: :ok | no_return()
def update!(server_name, engine_name, secret, payload, cas \\ nil) do
case update(server_name, engine_name, secret, payload, cas) do
{:error, msg} -> raise RuntimeError, message: msg
_resp -> :ok
end
end
@doc """
Updates an already existing secret via the specified path.
## Example
```elixir
iex(2)> Ptolemy.Engines.KV.path_update(:production, "secret/data/ptolemy", %{test: "i am up-to-date from path"}, 1)
{:ok, "KV secret updated"}
```
"""
@spec path_update(atom(), String.t(), map()) ::
{:ok, String.t()} | {:error, String.t()}
def path_update(server_name, secret, payload, cas \\ nil) when is_bitstring(secret) do
case path_create(server_name, secret, payload, cas) do
{:ok, _} -> {:ok, "KV secret updated"}
err -> err
end
end
@doc """
Creates a secret according to the path specified in the `:kv_engine` specification.
## Example
```
iex(2)> Ptolemy.Engines.KV.create(:production, :engine1, :ptolemy, %{test: "i was created from config"})
{:ok, "KV secret created"}
```
"""
@spec create(atom(), atom(), atom(), map(), integer() | nil) ::
{:ok, String.t()} | {:error, String.t()}
def create(server_name, engine_name, secret, payload, cas \\ nil) do
path = get_kv_path!(server_name, engine_name, secret, "data")
path_create(server_name, path, payload, cas)
end
@doc """
Creates a secret according to the path specified in the ":kv_engine" specification, errors out if an error occurs.
"""
@spec create!(atom(), atom(), atom(), map(), integer() | nil) :: :ok | no_return()
def create!(server_name, engine_name, secret, payload, cas \\ nil) do
case create(server_name, engine_name, secret, payload, cas) do
{:error, msg} -> raise RuntimeError, message: msg
_resp -> :ok
end
end
@doc """
Creates a new secret via a KV engine at the specified path.
## Example
```elixir
iex(2)> Ptolemy.Engines.KV.path_create(:production, "secret/data/new", %{test: "i am created from path"})
{:ok, "KV secret created"}
"""
@spec path_create(atom(), String.t(), map(), integer() | nil) ::
{:ok, String.t()} | {:error, String.t()}
def path_create(server_name, secret, payload, cas \\ nil) when is_bitstring(secret) do
client = create_client(server_name)
Engine.create_secret(client, secret, payload, cas)
end
@doc """
Deletes a secific version of a secret via the `:kv_engine` configuration.
Specifying false under the destroy paramter will "delete" the secret (secret will be sent to recyling bin),
sepcifying true will permanently destroy the secret.
```elixir
iex(2)> Ptolemy.Engines.KV.delete(:production, :engine1, :ptolemy, [1,2], false)
{:ok, "KV secret deleted"}
```
"""
@spec delete(atom(), atom(), atom(), nonempty_list(integer()), boolean()) ::
{:ok, String.t()} | {:error, String.t()}
def delete(server_name, engine_name, secret, vers, destroy \\ false) do
case destroy do
true ->
path = get_kv_path!(server_name, engine_name, secret, "destroy")
path_destroy(server_name, path, vers)
false ->
path = get_kv_path!(server_name, engine_name, secret, "delete")
path_delete(server_name, path, vers)
end
end
@doc """
Deletes a secific version of a secret via the `:kv_engine` configuration, errors out if an errors occurs.
"""
@spec delete!(atom(), atom(), atom(), nonempty_list(integer()), boolean()) :: :ok | no_return()
def delete!(server_name, engine_name, secret, vers, destroy \\ false) do
case delete(server_name, engine_name, secret, vers, destroy) do
{:error, msg} -> raise RuntimeError, message: msg
_resp -> :ok
end
end
@doc """
Deletes a secific version of a secret at a specified path.
```elixir
iex(2)> Ptolemy.Engines.KV.path_delete(:production, "secret/delete/ptolemy", [1,2])
{:ok, "KV secret deleted"}
```
"""
@spec path_delete(atom(), String.t(), nonempty_list(integer())) ::
{:ok, String.t()} | {:error, String.t()}
def path_delete(server_name, secret, vers) do
client = create_client(server_name)
Engine.delete(client, secret, vers)
end
@doc """
Destroys a secific version of a secret via the `:kv_engine` configuration.
```elixir
iex(2)> Ptolemy.Engines.KV.destroy(:production, :engine1, :ptolemy, [1,2])
{:ok, "KV secret destroyed"}
```
"""
@spec destroy(atom(), atom(), String.t(), nonempty_list(integer())) ::
{:ok, String.t()} | {:error, String.t()}
def destroy(server_name, engine_name, secret, vers) do
path = get_kv_path!(server_name, engine_name, secret, "destroy")
path_destroy(server_name, path, vers)
end
@doc """
Destroys a secific version of a secret via the `:kv_engine` configuration, errors out if an error occurs.
"""
@spec destroy!(atom(), atom(), String.t(), nonempty_list(integer())) :: :ok | no_return()
def destroy!(server_name, engine_name, secret, vers) do
case destroy(server_name, engine_name, secret, vers) do
{:error, msg} -> raise RuntimeError, message: msg
_resp -> :ok
end
end
@doc """
Destroys a specific version of secret at a specified path.
```elixir
iex(2)> Ptolemy.Engines.KV.path_destroy(:production, "secret/destroy/ptolemy", [1,2])
{:ok, "KV secret destroyed"}
```
"""
@spec path_destroy(atom(), String.t(), nonempty_list(integer())) ::
{:ok, String.t()} | {:error, String.t()}
def path_destroy(server_name, secret, vers) do
client = create_client(server_name)
Engine.destroy(client, secret, vers)
end
# Tesla client function
defp create_client(server_name) do
creds = Server.fetch_credentials(server_name)
{:ok, http_opts} = Server.get_data(server_name, :http_opts)
{:ok, url} = Server.get_data(server_name, :vault_url)
Tesla.client(
[
{Tesla.Middleware.BaseUrl, "#{url}/v1"},
{Tesla.Middleware.Headers, creds},
{Tesla.Middleware.Opts, http_opts},
{Tesla.Middleware.JSON, []}
],
{Tesla.Adapter.Hackney, [ssl_options: [{:versions, [:"tlsv1.2"]}], recv_timeout: 10_000]}
)
end
# Helper functions to make paths
defp get_kv_path!(server_name, engine_name, secret, operation) when is_atom(secret) do
with {:ok, conf} <- Server.get_data(server_name, :engines),
{:ok, kv_conf} <- Keyword.fetch(conf, engine_name),
%{engine_path: path, secrets: secrets} <- kv_conf do
{:ok, secret_path} = Map.fetch(secrets, secret)
make_kv_path!(path, secret_path, operation)
else
{:error, "Not found!"} -> raise("#{server_name} does not have a kv_engine config")
:error -> raise("Could not find engine_name in specified config")
end
end
defp get_kv_path!(server_name, engine_name, secret, operation) when is_bitstring(secret) do
with {:ok, conf} <- Server.get_data(server_name, :engines),
{:ok, kv_conf} <- Keyword.fetch(conf, engine_name),
%{engine_path: path, secrets: _secrets} <- kv_conf do
make_kv_path!(path, secret, operation)
else
{:error, "Not found!"} -> raise("#{server_name} does not have a kv_engine config")
:error -> raise("Could not find engine_name in specified config")
end
end
defp make_kv_path!(engine_path, secret_path, operation) do
"/#{engine_path}#{operation}#{secret_path}"
end
end
|
lib/engines/kv/kv.ex
| 0.932707
| 0.782455
|
kv.ex
|
starcoder
|
defmodule Membrane.RTP.JitterBuffer do
@moduledoc """
Element that buffers and reorders RTP packets based on `sequence_number`.
"""
use Membrane.Filter
use Bunch
alias Membrane.{RTP, Time}
alias Membrane.RTP.Utils
alias __MODULE__.{BufferStore, Record}
require Bitwise
require Membrane.Logger
@type packet_index :: non_neg_integer()
@timestamp_limit Bitwise.bsl(1, 32)
def_output_pad :output, caps: RTP, demand_mode: :auto
def_input_pad :input, caps: RTP, demand_mode: :auto
@default_latency 200 |> Time.milliseconds()
def_options clock_rate: [type: :integer, spec: RTP.clock_rate_t()],
latency: [
type: :time,
default: @default_latency,
description: """
Delay introduced by JitterBuffer
"""
]
defmodule State do
@moduledoc false
use Bunch.Access
defstruct store: %BufferStore{},
clock_rate: nil,
latency: nil,
waiting?: true,
max_latency_timer: nil,
timestamp_base: nil,
previous_timestamp: nil
@type t :: %__MODULE__{
store: BufferStore.t(),
clock_rate: RTP.clock_rate_t(),
latency: Time.t(),
waiting?: boolean(),
max_latency_timer: reference
}
end
@impl true
def handle_init(%__MODULE__{latency: latency, clock_rate: clock_rate}) do
if latency == nil do
raise "Latancy cannot be nil"
end
{:ok, %State{latency: latency, clock_rate: clock_rate}}
end
@impl true
def handle_start_of_stream(:input, _context, state) do
Process.send_after(
self(),
:initial_latency_passed,
state.latency |> Time.to_milliseconds()
)
{:ok, %{state | waiting?: true}}
end
@impl true
def handle_end_of_stream(:input, _context, %State{store: store} = state) do
{actions, state} =
store
|> BufferStore.dump()
|> Enum.map_reduce(state, &record_to_action/2)
{{:ok, actions ++ [end_of_stream: :output]}, %State{state | store: %BufferStore{}}}
end
@impl true
def handle_process(:input, buffer, _context, %State{store: store, waiting?: true} = state) do
state =
case BufferStore.insert_buffer(store, buffer) do
{:ok, result} ->
%State{state | store: result}
{:error, :late_packet} ->
Membrane.Logger.debug("Late packet has arrived")
state
end
{:ok, state}
end
@impl true
def handle_process(:input, buffer, _context, %State{store: store} = state) do
case BufferStore.insert_buffer(store, buffer) do
{:ok, result} ->
state = %State{state | store: result}
send_buffers(state)
{:error, :late_packet} ->
Membrane.Logger.debug("Late packet has arrived")
{:ok, state}
end
end
@impl true
def handle_event(pad, event, ctx, state), do: super(pad, event, ctx, state)
@impl true
def handle_other(:initial_latency_passed, _context, state) do
state = %State{state | waiting?: false}
send_buffers(state)
end
@impl true
def handle_other(:send_buffers, _context, state) do
state = %State{state | max_latency_timer: nil}
send_buffers(state)
end
defp send_buffers(%State{store: store} = state) do
# Flushes buffers that stayed in queue longer than latency and any gaps before them
{too_old_records, store} = BufferStore.flush_older_than(store, state.latency)
# Additionally, flush buffers as long as there are no gaps
{buffers, store} = BufferStore.flush_ordered(store)
{actions, state} = (too_old_records ++ buffers) |> Enum.map_reduce(state, &record_to_action/2)
state = %{state | store: store} |> set_timer()
{{:ok, actions}, state}
end
@spec set_timer(State.t()) :: State.t()
defp set_timer(%State{max_latency_timer: nil, latency: latency} = state) do
new_timer =
case BufferStore.first_record_timestamp(state.store) do
nil ->
nil
buffer_ts ->
since_insertion = Time.monotonic_time() - buffer_ts
send_after_time = max(0, latency - since_insertion) |> Time.to_milliseconds()
Process.send_after(self(), :send_buffers, send_after_time)
end
%State{state | max_latency_timer: new_timer}
end
defp set_timer(%State{max_latency_timer: timer} = state) when timer != nil, do: state
defp record_to_action(nil, state) do
action = {:event, {:output, %Membrane.Event.Discontinuity{}}}
{action, state}
end
defp record_to_action(%Record{buffer: buffer}, state) do
%{timestamp: rtp_timestamp} = buffer.metadata.rtp
timestamp_base = state.timestamp_base || rtp_timestamp
previous_timestamp = state.previous_timestamp || rtp_timestamp
# timestamps in RTP don't have to be monotonic therefore there can be
# a situation where in 2 consecutive packets the latter packet will have smaller timestamp
# than the previous one while not overflowing the timestamp number
# https://datatracker.ietf.org/doc/html/rfc3550#section-5.1
timestamp_base =
case Utils.from_which_rollover(previous_timestamp, rtp_timestamp, @timestamp_limit) do
:next -> timestamp_base - @timestamp_limit
:previous -> timestamp_base + @timestamp_limit
:current -> timestamp_base
end
timestamp = div((rtp_timestamp - timestamp_base) * Time.second(), state.clock_rate)
action = {:buffer, {:output, %{buffer | pts: timestamp}}}
state = %{state | timestamp_base: timestamp_base, previous_timestamp: rtp_timestamp}
{action, state}
end
end
|
lib/membrane/rtp/jitter_buffer.ex
| 0.841142
| 0.405625
|
jitter_buffer.ex
|
starcoder
|
defmodule PhoenixIntegration.Form.Tag do
alias PhoenixIntegration.Form.Common
@moduledoc false
# A `Tag` is a representation of a value-providing HTML tag within a
# Phoenix-style HTML form. Tags live on the leaves of a tree (nested
# `Map`) representing the whole form. See [DESIGN.md](./DESIGN.md) for
# more.
# There are two types of tags.
# - some tags are associated with an list of values. Those tags
# will have named ending in `[]`: `name="animal[nicknames][]`.
# - others have one value, or occasionally zero values (such as an
# unchecked checkbox).
defstruct has_list_value: false,
# To accommodate the different tags, values are always stored in a
# list. The empty list represents a tag without a value.
values: [],
# The name is as given in the HTML tag.
name: nil,
# The path is the name split up into a list of symbols representing
# the tree structure implied by the[bracketed[name]].
path: [],
# The tag itself, like `"input"` or "textarea".
tag: "",
# Where relevant, the value of the "type=" attribute of the tag.
# Otherwise should be unused.
type: nil,
# Whether the particular value is checked (checkboxes, selects).
checked: false,
# The original Floki tag.
original: nil
def new!(floki_tag) do
{:ok, %__MODULE__{} = tag} = new(floki_tag)
tag
end
def new(floki_tag) do
with(
[name] <- Floki.attribute(floki_tag, "name"),
:ok <- check_name(name)
) do
{:ok, safe_new(floki_tag, name)}
else
[] ->
{:warning, :tag_has_no_name, floki_tag}
:empty_name ->
{:warning, :empty_name, floki_tag}
end
end
defp safe_new(floki_tag, name) do
type =
case Floki.attribute(floki_tag, "type") do
[] -> "`type` irrelevant for `#{name}`"
[x] -> x
end
checked = Floki.attribute(floki_tag, "checked") != []
%__MODULE__{tag: tag_name(floki_tag),
original: floki_tag,
type: type,
name: name,
checked: checked
}
|> add_fields_that_depend_on_name
|> add_values
end
# ----------------------------------------------------------------------------
defp add_fields_that_depend_on_name(incomplete_tag) do
has_list_value = String.ends_with?(incomplete_tag.name, "[]")
path =
case has_list_value do
false -> path_to(incomplete_tag.name)
true -> path_to(String.trim_trailing(incomplete_tag.name, "[]"))
end
%{ incomplete_tag |
path: path,
has_list_value: has_list_value}
end
# ----------------------------------------------------------------------------
defp add_values(%{tag: "textarea"} = incomplete_tag) do
raw_value = Floki.FlatText.get(incomplete_tag.original)
%{incomplete_tag | values: [raw_value]}
end
defp add_values(%{tag: "select"} = incomplete_tag) do
selected_values = fn selected_options ->
case Floki.attribute(selected_options, "value") do
[] ->
# "if no value attribute is included, the value defaults to the
# text contained inside the element" -
# https://developer.mozilla.org/en-US/docs/Web/HTML/Element/select
[Floki.FlatText.get(selected_options)]
values ->
values
end
end
value_when_no_option_is_selected = fn select ->
multiple? = Floki.attribute(select, "multiple") != []
options = Floki.find(select, "option")
case {multiple?, options} do
# I don't see it explicitly stated, but the value of a
# non-multiple `select` with no selected option is the value
# of the first option.
{false, [first|_rest]} -> selected_values.(first)
{true, _} -> []
# A `select` with no options is pretty silly. Nevertheless.
{_, []} -> []
end
end
values =
case Floki.find(incomplete_tag.original, "option[selected]") do
[] ->
value_when_no_option_is_selected.(incomplete_tag.original)
selected_options ->
selected_values.(selected_options)
end
%{incomplete_tag | values: values}
end
defp add_values(%{tag: "input"} = incomplete_tag) do
raw_values = Floki.attribute(incomplete_tag.original, "value")
%{incomplete_tag | values: apply_input_special_cases(incomplete_tag, raw_values)}
end
# ----------------------------------------------------------------------------
# Special cases for `input` tags as described in
# https://developer.mozilla.org/en-US/docs/Web/HTML/Element/Input/checkbox
# https://developer.mozilla.org/en-US/docs/Web/HTML/Element/input/radio
defp apply_input_special_cases(%{type: "checkbox"} = incomplete_tag, values),
do: tags_with_checked_attribute(incomplete_tag, values)
defp apply_input_special_cases(%{type: "radio"} = incomplete_tag, values),
do: tags_with_checked_attribute(incomplete_tag, values)
# This catches the zillion variants of the type="text" tag.
defp apply_input_special_cases(_incomplete_tag, []), do: [""]
defp apply_input_special_cases(_incomplete_tag, values), do: values
# ----------------------------------------------------------------------------
defp tags_with_checked_attribute(incomplete_tag, values) do
case {incomplete_tag.checked, values} do
{true,[]} -> ["on"]
{true,values} -> values
{false,_} -> []
end
end
# ----------------------------------------------------------------------------
defp path_to(name) do
name
|> separate_name_pieces
|> Enum.map(&(List.first(&1) |> Common.symbolize))
end
defp check_name(name) do
case separate_name_pieces(name) do
[] ->
:empty_name
_ ->
:ok
end
end
defp separate_name_pieces(name), do: Regex.scan(~r/[^\[\]]+/, name)
# Floki allows tags to come in two forms
defp tag_name([floki_tag]), do: tag_name(floki_tag)
defp tag_name({name, _, _}), do: name
end
|
lib/phoenix_integration/form/tag.ex
| 0.78785
| 0.538012
|
tag.ex
|
starcoder
|
defmodule Node do
@moduledoc """
Functions related to Erlang nodes.
"""
@type t :: atom
@doc """
Returns the current node. It returns the same as the built-in `node()`.
"""
@spec self :: t
def self do
:erlang.node()
end
@doc """
Returns `true` if the local node is alive; that is, if the node can be
part of a distributed system. Otherwise, it returns `false`.
"""
@spec alive? :: boolean
def alive? do
:erlang.is_alive()
end
@doc """
Returns a list of all visible nodes in the system, excluding
the local node. Same as `list(:visible)`.
"""
@spec list :: [t]
def list do
:erlang.nodes()
end
@doc """
Returns a list of nodes according to argument given. The result
returned when the argument is a list, is the list of nodes
satisfying the disjunction(s) of the list elements.
See http://www.erlang.org/doc/man/erlang.html#nodes-1 for more info.
"""
@typep list_arg :: :visible | :hidden | :connected | :this | :known
@spec list(list_arg | [list_arg]) :: [t]
def list(args) do
:erlang.nodes(args)
end
@doc """
Monitors the status of the node. If `flag` is `true`, monitoring is
turned on. If `flag` is `false`, monitoring is turned off.
See http://www.erlang.org/doc/man/erlang.html#monitor_node-2 for more info.
"""
@spec monitor(t, boolean) :: true
def monitor(node, flag) do
:erlang.monitor_node(node, flag)
end
@doc """
Behaves as `monitor/2` except that it allows an extra
option to be given, namely `:allow_passive_connect`.
See http://www.erlang.org/doc/man/erlang.html#monitor_node-3 for more info.
"""
@spec monitor(t, boolean, [:allow_passive_connect]) :: true
def monitor(node, flag, options) do
:erlang.monitor_node(node, flag, options)
end
@doc """
Forces the disconnection of a node. This will appear to the `node` as if
the local node has crashed. This BIF is mainly used in the Erlang network
authentication protocols. Returns `true` if disconnection succeeds, otherwise
`false`. If the local node is not alive, the function returns `:ignored`.
See http://www.erlang.org/doc/man/erlang.html#disconnect_node-1 for more info.
"""
@spec disconnect(t) :: boolean | :ignored
def disconnect(node) do
:erlang.disconnect_node(node)
end
@doc """
Establishes a connection to `node`. Returns `true` if successful,
`false` if not, and the atom `:ignored` if the local node is not
alive.
See http://erlang.org/doc/man/net_kernel.html#connect_node-1 for more info.
"""
@spec connect(t) :: boolean | :ignored
def connect(node) do
:net_kernel.connect_node(node)
end
@doc """
Returns the pid of a new process started by the application of `fun`
on `node`. If `node` does not exist, a useless pid is returned.
Check http://www.erlang.org/doc/man/erlang.html#spawn-2 for
the list of available options.
"""
@spec spawn(t, (() -> any)) :: pid
def spawn(node, fun) do
:erlang.spawn(node, fun)
end
@doc """
Returns the pid of a new process started by the application of `fun`
on `node`. If `node` does not exist, a useless pid is returned.
Check http://www.erlang.org/doc/man/erlang.html#spawn_opt-3 for
the list of available options.
"""
@spec spawn(t, (() -> any), Process.spawn_opts) :: pid | {pid, reference}
def spawn(node, fun, opts) do
:erlang.spawn_opt(node, fun, opts)
end
@doc """
Returns the pid of a new process started by the application of
`module.function(args)` on `node`. If `node` does not exists,
a useless pid is returned.
Check http://www.erlang.org/doc/man/erlang.html#spawn-4 for
the list of available options.
"""
@spec spawn(t, module, atom, [any]) :: pid
def spawn(node, module, fun, args) do
:erlang.spawn(node, module, fun, args)
end
@doc """
Returns the pid of a new process started by the application of
`module.function(args)` on `node`. If `node` does not exists,
a useless pid is returned.
Check http://www.erlang.org/doc/man/erlang.html#spawn_opt-5 for
the list of available options.
"""
@spec spawn(t, module, atom, [any], Process.spawn_opts) :: pid | {pid, reference}
def spawn(node, module, fun, args, opts) do
:erlang.spawn_opt(node, module, fun, args, opts)
end
@doc """
Returns the pid of a new process started by the application of `fun`
on `node`. A link is created between the calling process and the
new process, atomically. If `node` does not exist, a useless pid is returned
(and due to the link, an exit signal with exit reason `:noconnection` will be
received).
"""
@spec spawn_link(t, (() -> any)) :: pid
def spawn_link(node, fun) do
:erlang.spawn_link(node, fun)
end
@doc """
Returns the pid of a new process started by the application of
`module.function(args)` on `node`. A link is created between the calling
process and the new process, atomically. If `node` does not exist, a useless
pid is returned (and due to the link, an exit signal with exit reason
`:noconnection` will be received).
"""
@spec spawn_link(t, module, atom, [any]) :: pid
def spawn_link(node, module, fun, args) do
:erlang.spawn_link(node, module, fun, args)
end
@doc """
Sets the magic cookie of `node` to the atom `cookie`. The default node
is `Node.self`, the local node. If `node` is the local node, the function also
sets the cookie of all other unknown nodes to `cookie`.
This function will raise `FunctionClauseError` if the given `node` is not alive.
"""
def set_cookie(node // Node.self, cookie) when is_atom(cookie) do
:erlang.set_cookie(node, cookie)
end
@doc """
Returns the magic cookie of the local node, if the node is alive;
otherwise `:nocookie`.
"""
def get_cookie() do
:erlang.get_cookie()
end
end
|
lib/elixir/lib/node.ex
| 0.830594
| 0.517754
|
node.ex
|
starcoder
|
defmodule Tinymesh.Config do
import Tinymesh.Config.Packer
defmodule Error do
defexception type: nil, parameter: nil, addr: nil, message: ""
end
@serializedefaults %{
:addr => false,
:vsn => nil,
:ignorero => false
}
@unserializedefaults %{addr: false, vsn: nil}
@doc """
Serialize a configuration into a Tinymesh Configuration blob.
## Options
* `:addr` - Dictates if the output blob should have contain (addr, val)
pairs. Defaults to false, using the bytes potision
in the blob as the address (0 based index).
* `:vsn` - defines the fw revision used for the config
* `:ignorero` - Allow modification of read only bytes
* `:zerofill` - Initialize the buffer with nil values of size `n`
only used when `:addr` := false.
"""
def serialize(config), do: serialize(config, %{})
def serialize([_|_] = config, opts), do:
serialize(
Enum.reduce(config, %{}, fn({k, v}, acc) -> Map.put(acc, k, v) end),
opts)
def serialize(%{} = config, opts) do
vsn = getvsn config, opts
part = Map.get config, ["device", "part"], ""
opts = Map.merge @serializedefaults, Map.put(opts, :vsn, vsn)
opts = Map.put opts, :part, part
try do
acc = case {opts[:zerofill], opts[:addr]} do
{_, true} -> []
{nil,_} -> []
{size, false} ->
List.duplicate nil, size
end
unsafe? = true === opts[:unsafe]
buf = Enum.reduce config, acc, fn
({k, v}, acc) when unsafe? -> packunsafe {k, v}, acc, opts
({k, v}, acc) when not unsafe? -> pack {k, v}, acc, opts
end
# acc returns a list with possible `nil` values if no data could
# be filled in, convert this to zeros for backwards compatability
buf = :erlang.iolist_to_binary(Enum.map(buf, fn
(nil) -> 0
(v) -> v
end))
case buf do
buf when byte_size(buf) > 32 ->
{:error, :max_params}
buf ->
{:ok, buf}
end
rescue e in Tinymesh.Config.Packer.Error ->
{:error, [e.parameter, e.message]}
end
end
@doc """
Unserialize a Tinymesh Configuration blob.
## Options
* `:addr` - Mapates if the blob contains (addr, val) pairs.
Defaults to false, using the bytes position in the blob
as the address (0 based index)
* `:vsn` - defines the revision used for the config
"""
def unserialize(buf, opts \\ %{}) do
opts = Map.merge @unserializedefaults, opts
try do
{res, _p} = chunk(buf, opts[:addr]) |> Enum.reduce({%{}, %{}}, fn(i, acc) ->
unpack(i, acc, opts)
end)
vsn = getvsn res, opts
{:ok, filtervsn(res, vsn)}
rescue e in Tinymesh.Config.Packer.Error ->
{:error, [e.addr, e.message]}
end
end
def filtervsn(res, vsn) do
# Merge into with empty map to return a map instead of proplist
res
|> Enum.filter(&Tinymesh.Config.Packer.vsnfilter(&1, vsn))
|> Enum.reduce(%{}, fn({k, v}, acc) -> Map.put(acc, k, v) end)
end
defp getvsn(config, opts) do
case {opts[:vsn], Map.fetch(config, ["device", "fw_revision"])} do
{vsn, {:ok, _v}} when nil !== vsn ->
vsn
{_vsn, {:ok, v}} ->
v
{vsn, _} ->
vsn
end
end
defp chunk(buf, addr?), do: chunk(buf, addr?, {0, []})
defp chunk("", _addr?, {_, parts}), do: parts
defp chunk(<<byte>> <> rest, false = addr?, {p, parts}), do:
chunk(rest, addr?, {p + 1, [{p, byte} | parts]})
defp chunk(<<0, 0>> <> _, true, {_p, parts}), do:
parts
defp chunk(<<addr, val>> <> rest, true = addr?, {p, parts}), do:
chunk(rest, addr?, {p, [{addr, val} | parts]})
end
|
lib/tinymesh/config.ex
| 0.715921
| 0.55658
|
config.ex
|
starcoder
|
defmodule CredoModuleFunctionOrdering.Rule do
@moduledoc """
In a module, functions should be ordered to provide better readability
across the code base by exposing the most important functions definition types
first (e.g public ones) followed by the private functions
The order of function heirarchy in a module from top to bottom is as follows:
defstruct
defexception
defguard
def
defdelegate
defmacro
defguardp
defp
defmacrop
defoverridable
"""
@explanation [
check: @moduledoc
]
use Credo.Check, category: :readability
@function_weighings %{
defstruct: 0,
defexception: 1,
defguard: 2,
def: 3,
defdelegate: 4,
defmacro: 5,
defguardp: 6,
defp: 7,
defmacrop: 8,
defoverridable: 9
}
@spec run(Credo.SourceFile.t(), Keyword.t()) :: List.t()
def run(source_file, params \\ []) do
issue_meta = IssueMeta.for(source_file, params)
Credo.Code.prewalk(
source_file,
&traverse(&1, &2, issue_meta)
)
end
defp get_weighed_function_definitions(ast),
do:
get_all_function_definitions(ast)
|> Enum.sort(fn a, b ->
Map.get(@function_weighings, a.function_type) <=
Map.get(@function_weighings, b.function_type)
end)
defp traverse(
{:defmodule, _meta, _arguments} = ast,
issues,
issue_meta
) do
get_all_function_definitions(ast)
|> List.myers_difference(get_weighed_function_definitions(ast))
|> Keyword.get(:ins, [])
|> case do
[] ->
{ast, issues}
found_issues ->
formatted_issues =
Enum.map(found_issues, fn x -> issue_for(issue_meta, x.line_number) end)
{ast, [formatted_issues | issues]}
end
end
defp traverse(ast, issues, _issue_meta) do
{ast, issues}
end
defp traverse_for_function_definitions({function_type, metadata, _} = node, acc) do
@function_weighings
|> Map.has_key?(function_type)
|> case do
true ->
{node,
acc ++ [%{function_type: function_type, line_number: Keyword.get(metadata, :line)}]}
false ->
{node, acc}
end
end
defp traverse_for_function_definitions(node, acc), do: {node, acc}
defp get_all_function_definitions(ast) do
pre_traversal = fn node, acc ->
{node, acc}
end
{_, acc} =
ast |> Macro.traverse([], pre_traversal, &traverse_for_function_definitions(&1, &2))
acc
end
defp issue_for(issue_meta, line_no) do
format_issue(
issue_meta,
message: """
Functions should be ordered in a module as follows:
defstruct > defexception > defguard > def > defdelegate >
defmacro > defguardp > defp > defmacrop > defoverridable
""",
line_no: line_no
)
end
end
|
lib/rule.ex
| 0.688992
| 0.564729
|
rule.ex
|
starcoder
|
defmodule URI do
@moduledoc """
Utilities for working with and creating URIs.
"""
defstruct scheme: nil, path: nil, query: nil,
fragment: nil, authority: nil,
userinfo: nil, host: nil, port: nil
@type t :: %__MODULE__{}
import Bitwise
@doc """
Returns the default port for a given scheme.
If the scheme is unknown to URI, returns `nil`.
Any scheme may be registered via `default_port/2`.
## Examples
iex> URI.default_port("ftp")
21
iex> URI.default_port("ponzi")
nil
"""
def default_port(scheme) when is_binary(scheme) do
:elixir_config.get({:uri, scheme})
end
@doc """
Registers a scheme with a default port.
It is recommended for this function to be invoked in your
application start callback in case you want to register
new URIs.
"""
def default_port(scheme, port) when is_binary(scheme) and port > 0 do
:elixir_config.put({:uri, scheme}, port)
end
@doc """
Encodes an enumerable into a query string.
Takes an enumerable (containing a sequence of two-item tuples)
and returns a string of the form "key1=value1&key2=value2..." where
keys and values are URL encoded as per `encode/2`.
Keys and values can be any term that implements the `String.Chars`
protocol, except lists which are explicitly forbidden.
## Examples
iex> hd = %{"foo" => 1, "bar" => 2}
iex> URI.encode_query(hd)
"bar=2&foo=1"
"""
def encode_query(l), do: Enum.map_join(l, "&", &pair/1)
@doc """
Decodes a query string into a dictionary (by default uses a map).
Given a query string of the form "key1=value1&key2=value2...", produces a
map with one entry for each key-value pair. Each key and value will be a
binary. Keys and values will be percent-unescaped.
Use `query_decoder/1` if you want to iterate over each value manually.
## Examples
iex> URI.decode_query("foo=1&bar=2")
%{"bar" => "2", "foo" => "1"}
"""
def decode_query(q, dict \\ %{}) when is_binary(q) do
case do_decode_query(q) do
nil -> dict
{{k, v}, q} -> decode_query(q, Dict.put(dict, k, v))
end
end
@doc """
Returns an iterator function over the query string that decodes
the query string in steps.
## Examples
iex> URI.query_decoder("foo=1&bar=2") |> Enum.map(&(&1))
[{"foo", "1"}, {"bar", "2"}]
"""
def query_decoder(q) when is_binary(q) do
Stream.unfold(q, &do_decode_query/1)
end
defp do_decode_query("") do
nil
end
defp do_decode_query(q) do
{first, next} =
case :binary.split(q, "&") do
[first, rest] -> {first, rest}
[first] -> {first, ""}
end
current =
case :binary.split(first, "=") do
[key, value] ->
{decode_www_form(key), decode_www_form(value)}
[key] ->
{decode_www_form(key), nil}
end
{current, next}
end
defp pair({k, _}) when is_list(k) do
raise ArgumentError, "encode_query/1 keys cannot be lists, got: #{inspect k}"
end
defp pair({_, v}) when is_list(v) do
raise ArgumentError, "encode_query/1 values cannot be lists, got: #{inspect v}"
end
defp pair({k, v}) do
encode_www_form(Kernel.to_string(k)) <>
"=" <> encode_www_form(Kernel.to_string(v))
end
@doc """
Checks if the character is a "reserved" character in a URI.
Reserved characters are specified in [RFC3986, section 2.2](http://tools.ietf.org/html/rfc3986#section-2.2).
"""
def char_reserved?(c) do
c in ':/?#[]@!$&\'()*+,;='
end
@doc """
Checks if the character is a "unreserved" character in a URI.
Unreserved characters are specified in [RFC3986, section 2.3](http://tools.ietf.org/html/rfc3986#section-2.3).
"""
def char_unreserved?(c) do
c in ?0..?9 or
c in ?a..?z or
c in ?A..?Z or
c in '~_-.'
end
@doc """
Checks if the character is allowed unescaped in a URI.
This is the default used by `URI.encode/2` where both
reserved and unreserved characters are kept unescaped.
"""
def char_unescaped?(c) do
char_reserved?(c) or char_unreserved?(c)
end
@doc """
Percent-escapes a URI.
Accepts `predicate` function as an argument to specify if char can be left as is.
## Example
iex> URI.encode("ftp://s-ite.tld/?value=put it+ΠΉ")
"ftp://s-ite.tld/?value=put%20it+%D0%B9"
"""
def encode(str, predicate \\ &char_unescaped?/1) when is_binary(str) do
for <<c <- str>>, into: "", do: percent(c, predicate)
end
@doc """
Encodes a string as "x-www-urlencoded".
## Example
iex> URI.encode_www_form("put: it+ΠΉ")
"put%3A+it%2B%D0%B9"
"""
def encode_www_form(str) when is_binary(str) do
for <<c <- str>>, into: "" do
case percent(c, &char_unreserved?/1) do
"%20" -> "+"
pct -> pct
end
end
end
defp percent(c, predicate) do
if predicate.(c) do
<<c>>
else
"%" <> hex(bsr(c, 4)) <> hex(band(c, 15))
end
end
defp hex(n) when n <= 9, do: <<n + ?0>>
defp hex(n), do: <<n + ?A - 10>>
@doc """
Percent-unescapes a URI.
## Examples
iex> URI.decode("http%3A%2F%2Felixir-lang.org")
"http://elixir-lang.org"
"""
def decode(uri) do
unpercent(uri, "", false)
catch
:malformed_uri ->
raise ArgumentError, "malformed URI #{inspect uri}"
end
@doc """
Decodes a string as "x-www-urlencoded".
## Examples
iex> URI.decode_www_form("%3Call+in%2F")
"<all in/"
"""
def decode_www_form(str) do
unpercent(str, "", true)
catch
:malformed_uri ->
raise ArgumentError, "malformed URI #{inspect str}"
end
defp unpercent(<<?+, tail::binary>>, acc, spaces = true) do
unpercent(tail, <<acc::binary, ?\s>>, spaces)
end
defp unpercent(<<?%, hex_1, hex_2, tail::binary>>, acc, spaces) do
unpercent(tail, <<acc::binary, bsl(hex_to_dec(hex_1), 4) + hex_to_dec(hex_2)>>, spaces)
end
defp unpercent(<<?%, _::binary>>, _acc, _spaces), do: throw(:malformed_uri)
defp unpercent(<<head, tail::binary>>, acc, spaces) do
unpercent(tail, <<acc::binary, head>>, spaces)
end
defp unpercent(<<>>, acc, _spaces), do: acc
defp hex_to_dec(n) when n in ?A..?F, do: n - ?A + 10
defp hex_to_dec(n) when n in ?a..?f, do: n - ?a + 10
defp hex_to_dec(n) when n in ?0..?9, do: n - ?0
defp hex_to_dec(_n), do: throw(:malformed_uri)
@doc """
Parses a well-formed URI reference into its components.
Note this function expects a well-formed URI and does not perform
any validation. See the examples section below of how `URI.parse/1`
can be used to parse a wide range of relative URIs.
This function uses the parsing regular expression as defined
in the [Appendix B of RFC3986](http://tools.ietf.org/html/rfc3986#appendix-B).
When a URI is given without a port, the values registered via
`URI.default_port/1` and `URI.default_port/2` are used.
## Examples
iex> URI.parse("http://elixir-lang.org/")
%URI{scheme: "http", path: "/", query: nil, fragment: nil,
authority: "elixir-lang.org", userinfo: nil,
host: "elixir-lang.org", port: 80}
iex> URI.parse("//elixir-lang.org/")
%URI{authority: "elixir-lang.org", fragment: nil, host: "elixir-lang.org",
path: "/", port: nil, query: nil, scheme: nil, userinfo: nil}
iex> URI.parse("/foo/bar")
%URI{authority: nil, fragment: nil, host: nil, path: "/foo/bar",
port: nil, query: nil, scheme: nil, userinfo: nil}
iex> URI.parse("foo/bar")
%URI{authority: nil, fragment: nil, host: nil, path: "foo/bar",
port: nil, query: nil, scheme: nil, userinfo: nil}
"""
def parse(%URI{} = uri), do: uri
def parse(s) when is_binary(s) do
# From http://tools.ietf.org/html/rfc3986#appendix-B
regex = ~r/^(([a-z][a-z0-9\+\-\.]*):)?(\/\/([^\/?#]*))?([^?#]*)(\?([^#]*))?(#(.*))?/i
parts = nillify(Regex.run(regex, s))
destructure [_, _, scheme, _, authority, path, _, query, _, fragment], parts
{userinfo, host, port} = split_authority(authority)
if authority do
authority = ""
if userinfo, do: authority = authority <> userinfo <> "@"
if host, do: authority = authority <> host
if port, do: authority = authority <> ":" <> Integer.to_string(port)
end
scheme = normalize_scheme(scheme)
if is_nil(port) and not is_nil(scheme) do
port = default_port(scheme)
end
%URI{
scheme: scheme, path: path, query: query,
fragment: fragment, authority: authority,
userinfo: userinfo, host: host, port: port
}
end
# Split an authority into its userinfo, host and port parts.
defp split_authority(s) do
s = s || ""
components = Regex.run ~r/(^(.*)@)?(\[[a-zA-Z0-9:.]*\]|[^:]*)(:(\d*))?/, s
destructure [_, _, userinfo, host, _, port], nillify(components)
port = if port, do: String.to_integer(port)
host = if host, do: host |> String.lstrip(?[) |> String.rstrip(?])
{userinfo, host, port}
end
defp normalize_scheme(nil), do: nil
defp normalize_scheme(scheme), do: String.downcase(scheme)
# Regex.run returns empty strings sometimes. We want
# to replace those with nil for consistency.
defp nillify(l) do
for s <- l do
if byte_size(s) > 0, do: s, else: nil
end
end
@doc """
Converts the URI to string.
iex> URI.to_string(URI.parse("http://google.com"))
"http://google.com"
"""
defdelegate to_string(uri), to: String.Chars.URI
end
defimpl String.Chars, for: URI do
def to_string(uri) do
scheme = uri.scheme
if scheme && (port = URI.default_port(scheme)) do
if uri.port == port, do: uri = %{uri | port: nil}
end
# Based on http://tools.ietf.org/html/rfc3986#section-5.3
if uri.host do
authority = uri.host
if uri.userinfo, do: authority = uri.userinfo <> "@" <> authority
if uri.port, do: authority = authority <> ":" <> Integer.to_string(uri.port)
else
authority = uri.authority
end
result = ""
if uri.scheme, do: result = result <> uri.scheme <> ":"
if authority, do: result = result <> "//" <> authority
if uri.path, do: result = result <> uri.path
if uri.query, do: result = result <> "?" <> uri.query
if uri.fragment, do: result = result <> "#" <> uri.fragment
result
end
end
|
lib/elixir/lib/uri.ex
| 0.907358
| 0.510741
|
uri.ex
|
starcoder
|
defmodule Telegex.Marked.InlineParser do
@moduledoc """
Parsing implementation of inline nodes.
"""
use Telegex.Marked.Parser
alias Telegex.Marked.{
BoldRule,
UnderlineRule,
ItalicRule,
StrikethroughRule,
LinkRule,
InlineCodeRule
}
@rule_modules [BoldRule, UnderlineRule, ItalicRule, StrikethroughRule, LinkRule, InlineCodeRule]
@doc """
Parse inline elements in Markdown text.
"""
@spec parse(String.t(), keyword()) :: Telegex.Marked.document()
def parse(markdown, _options \\ []) do
lines = markdown |> String.split("\n")
lines_count = length(lines)
lastline? = fn index -> index + 1 == lines_count end
lines
|> Enum.with_index()
|> Enum.map(fn {line, index} -> parse_line(line, lastline?.(index), 0) end)
end
@spec parse_line(String.t(), boolean(), integer(), [Node.t()]) :: [Node.t()]
@doc """
Parse single-line Markdown text.
"""
def parse_line(line, lastline?, pos, nodes \\ [])
when is_binary(line) and is_boolean(lastline?) and is_integer(pos) and is_list(nodes) do
init_state = InlineState.new(line, pos)
case parse_node(init_state) do
{:match, state} ->
new_nodes = expand_children(state.nodes)
if state.pos <= init_state.line.len - 1 do
parse_line(line, lastline?, state.pos + 1, nodes ++ new_nodes)
else
if lastline?, do: nodes ++ new_nodes, else: nodes ++ new_nodes ++ [newline_node()]
end
{:nomatch, state} ->
nomatch(line, state, nodes, state.pos <= init_state.line.len - 1, lastline?)
end
end
@spec parse_node(InlineState.t()) :: {Telegex.Marked.Rule.match_status(), InlineState.t()}
defp parse_node(%InlineState{} = state) do
@rule_modules
|> Enum.reduce_while({:nomatch, state}, fn rule_module, result ->
{status, state} = rule_module.match(state)
if status == :match, do: {:halt, {:match, state}}, else: {:cont, result}
end)
end
# ε¦ζδΈεΉι
δ»»δ½θηΉοΌιε符继η»εΉι
γ
@spec nomatch(String.t(), InlineState.t(), [Node.t()], boolean(), boolean()) :: [Node.t()]
defp nomatch(line, %InlineState{} = state, nodes, not_ending?, lastline?) do
if not_ending? do
len = length(nodes)
this_char = String.at(line, state.pos)
nodes =
if len > 0 do
{last_node, nodes} = nodes |> List.pop_at(len - 1)
case last_node do
# δΈΊιΏε
ιε符εΉι
δΊ§ηε€§ιηθΏη»εε符θηΉοΌεεΉΆζεδΈδΈͺδΈΊε符串ηθηΉγ
%Node{type: :string, data: data} ->
nodes ++ [string_node(data <> this_char)]
_ ->
nodes ++ [last_node, string_node(this_char)]
end
else
nodes ++ [string_node(this_char)]
end
parse_line(line, lastline?, state.pos + 1, nodes)
else
if lastline?, do: nodes ++ state.nodes, else: nodes ++ state.nodes ++ [newline_node()]
end
end
@spec expand_children(Node.t()) :: Node.t()
defp expand_children(%Node{} = node) do
if node.type in [:string, :newline] && is_binary(node.children) do
node
else
%{node | children: parse_line(node.children, true, 0)}
end
end
@spec expand_children([Node.t()]) :: [Node.t()]
defp expand_children(nodes) when is_list(nodes) do
nodes |> Enum.map(&expand_children/1)
end
end
|
lib/telegex/marked/parsers/inline_parser.ex
| 0.553747
| 0.509459
|
inline_parser.ex
|
starcoder
|
require Record
defmodule JOSE.JWK do
@moduledoc ~S"""
JWK stands for JSON Web Key which is defined in [RFC 7517](https://tools.ietf.org/html/rfc7517).
"""
record = Record.extract(:jose_jwk, from_lib: "jose/include/jose_jwk.hrl")
keys = :lists.map(&elem(&1, 0), record)
vals = :lists.map(&{&1, [], nil}, keys)
pairs = :lists.zip(keys, vals)
defstruct keys
@type t :: %__MODULE__{}
@doc """
Converts a `JOSE.JWK` struct to a `:jose_jwk` record.
"""
def to_record(%JOSE.JWK{unquote_splicing(pairs)}) do
{:jose_jwk, unquote_splicing(vals)}
end
def to_record(list) when is_list(list), do: for element <- list, into: [], do: to_record(element)
@doc """
Converts a `:jose_jwk` record into a `JOSE.JWK`.
"""
def from_record(jose_jwk)
def from_record({:jose_jwk, unquote_splicing(vals)}) do
%JOSE.JWK{unquote_splicing(pairs)}
end
def from_record(list) when is_list(list), do: for element <- list, into: [], do: from_record(element)
## Decode API
@doc """
Converts a binary or map into a `JOSE.JWK`.
iex> JOSE.JWK.from(%{"k" => "", "kty" => "oct"})
%JOSE.JWK{fields: %{}, keys: :undefined, kty: {:jose_jwk_kty_oct, ""}}
iex> JOSE.JWK.from("{\"k\":\"\",\"kty\":\"oct\"}")
%JOSE.JWK{fields: %{}, keys: :undefined, kty: {:jose_jwk_kty_oct, ""}}
The `"kty"` field may be overridden with a custom module that implements the `:jose_jwk` and `:jose_jwk_kty` behaviours.
For example:
iex> JOSE.JWK.from({%{ kty: MyCustomKey }, %{ "kty" => "custom" }})
%JOSE.JWK{fields: %{}, keys: :undefined, kty: {MyCustomKey, :state}}
"""
def from(list) when is_list(list), do: for element <- list, into: [], do: from(element)
def from(jwk=%JOSE.JWK{}), do: from(to_record(jwk))
def from(any), do: :jose_jwk.from(any) |> from_record()
@doc """
Decrypts an encrypted binary or map into a `JOSE.JWK` using the specified `password`.
iex> JOSE.JWK.from("password", "<KEY>")
{%JOSE.JWE{alg: {:jose_jwe_alg_pbes2,
{:jose_jwe_alg_pbes2, :sha256, 128,
<<80, 66, 69, 83, 50, 45, 72, 83, 50, 53, 54, 43, 65, 49, 50, 56, 75, 87, 0, 156, 208, 149, 48, 2, 62, 231, 159, 80, 66, 105, 157, 18, 186, 101, 117>>,
4096}},
enc: {:jose_jwe_enc_aes,
{:jose_jwe_enc_aes, {:aes_gcm, 128}, 128, 16, 12, :undefined, :undefined,
:undefined, :undefined}}, fields: %{"cty" => "jwk+json"}, zip: :undefined},
%JOSE.JWK{fields: %{}, keys: :undefined, kty: {:jose_jwk_kty_oct, "secret"}}}
"""
def from(password, list) when is_list(list), do: for element <- list, into: [], do: from(password, element)
def from(password, jwk=%JOSE.JWK{}), do: from(password, to_record(jwk))
def from(password, any), do: :jose_jwk.from(password, any) |> from_encrypted_record()
@doc """
Converts a binary into a `JOSE.JWK`.
"""
def from_binary(list) when is_list(list), do: for element <- list, into: [], do: from_binary(element)
def from_binary(binary), do: :jose_jwk.from_binary(binary) |> from_record()
@doc """
Decrypts an encrypted binary into a `JOSE.JWK` using `password`. See `from/2`.
"""
def from_binary(password, list) when is_list(list), do: for element <- list, into: [], do: from_binary(password, element)
def from_binary(password, binary), do: :jose_jwk.from_binary(password, binary) |> from_encrypted_record()
@doc """
Reads file and calls `from_binary/1` to convert into a `JOSE.JWK`.
"""
def from_file(file), do: :jose_jwk.from_file(file) |> from_record()
@doc """
Reads encrypted file and calls `from_binary/2` to convert into a `JOSE.JWK` using `password`. See `from/2`.
"""
def from_file(password, file), do: :jose_jwk.from_file(password, file) |> from_encrypted_record()
@doc """
Converts Firebase certificate public keys into a map of `JOSE.JWK`.
"""
def from_firebase(any), do: :maps.fold(fn (k, v, a) -> :maps.put(k, from_record(v), a) end, %{}, :jose_jwk.from_firebase(any))
@doc """
Converts Erlang records for `:ECPrivateKey`, `:ECPublicKey`, `:RSAPrivateKey`, and `:RSAPublicKey` into a `JOSE.JWK`.
"""
def from_key(list) when is_list(list), do: for element <- list, into: [], do: from_key(element)
def from_key(key), do: :jose_jwk.from_key(key) |> from_record()
@doc """
Converts a map into a `JOSE.JWK`.
"""
def from_map(list) when is_list(list), do: for element <- list, into: [], do: from_map(element)
def from_map(map), do: :jose_jwk.from_map(map) |> from_record()
@doc """
Decrypts an encrypted map into a `JOSE.JWK` using `password`. See `from/2`.
"""
def from_map(password, list) when is_list(list), do: for element <- list, into: [], do: from_map(password, element)
def from_map(password, map), do: :jose_jwk.from_map(password, map) |> from_encrypted_record()
@doc """
Converts an arbitrary binary into a `JOSE.JWK` with `"kty"` of `"oct"`.
"""
def from_oct(list) when is_list(list), do: for element <- list, into: [], do: from_oct(element)
def from_oct(oct), do: :jose_jwk.from_oct(oct) |> from_record()
@doc """
Decrypts an encrypted arbitrary binary into a `JOSE.JWK` with `"kty"` of `"oct"` using `password`. See `from/2`.
"""
def from_oct(password, list) when is_list(list), do: for element <- list, into: [], do: from_oct(password, element)
def from_oct(password, oct), do: :jose_jwk.from_oct(password, oct) |> from_encrypted_record()
@doc """
Reads file and calls `from_oct/1` to convert into a `JOSE.JWK`.
"""
def from_oct_file(file), do: :jose_jwk.from_oct_file(file) |> from_record()
@doc """
Reads encrypted file and calls `from_oct/2` to convert into a `JOSE.JWK` using `password`. See `from/2`.
"""
def from_oct_file(password, file), do: :jose_jwk.from_oct_file(password, file) |> from_encrypted_record()
@doc """
Converts an octet key pair into a `JOSE.JWK` with `"kty"` of `"OKP"`.
"""
def from_okp(list) when is_list(list), do: for element <- list, into: [], do: from_okp(element)
def from_okp(okp), do: :jose_jwk.from_okp(okp) |> from_record()
@doc """
Converts an openssh key into a `JOSE.JWK` with `"kty"` of `"OKP"`.
"""
def from_openssh_key(list) when is_list(list), do: for element <- list, into: [], do: from_openssh_key(element)
def from_openssh_key(openssh_key), do: :jose_jwk.from_openssh_key(openssh_key) |> from_record()
@doc """
Reads file and calls `from_openssh_key/1` to convert into a `JOSE.JWK`.
"""
def from_openssh_key_file(file), do: :jose_jwk.from_openssh_key_file(file) |> from_record()
@doc """
Converts a PEM (Privacy Enhanced Email) binary into a `JOSE.JWK`.
"""
def from_pem(list) when is_list(list), do: for element <- list, into: [], do: from_pem(element)
def from_pem(pem), do: :jose_jwk.from_pem(pem) |> from_record()
@doc """
Decrypts an encrypted PEM (Privacy Enhanced Email) binary into a `JOSE.JWK` using `password`.
"""
def from_pem(password, list) when is_list(list), do: for element <- list, into: [], do: from_pem(password, element)
def from_pem(password, pem), do: :jose_jwk.from_pem(password, pem) |> from_record()
@doc """
Reads file and calls `from_oct/1` to convert into a `JOSE.JWK`.
"""
def from_pem_file(file), do: :jose_jwk.from_pem_file(file) |> from_record()
@doc """
Reads encrypted file and calls `from_pem/2` to convert into a `JOSE.JWK` using `password`.
"""
def from_pem_file(password, file), do: :jose_jwk.from_pem_file(password, file) |> from_record()
defp from_encrypted_record({jwe, jwk}) when is_tuple(jwe) and is_tuple(jwk),
do: {JOSE.JWE.from_record(jwe), from_record(jwk)}
defp from_encrypted_record(any), do: any
## Encode API
@doc """
Converts a `JOSE.JWK` into a binary.
"""
def to_binary(list) when is_list(list), do: for element <- list, into: [], do: to_binary(element)
def to_binary(jwk=%JOSE.JWK{}), do: to_binary(to_record(jwk))
def to_binary(jwk), do: :jose_jwk.to_binary(jwk)
@doc """
Encrypts a `JOSE.JWK` into a binary using `password` and the default `jwe` for the key type. See `to_binary/3`.
"""
def to_binary(password, list) when is_list(list), do: for element <- list, into: [], do: to_binary(password, element)
def to_binary(password, jwk=%JOSE.JWK{}), do: to_binary(password, to_record(jwk))
def to_binary(password, jwk), do: :jose_jwk.to_binary(password, jwk)
@doc """
Encrypts a `JOSE.JWK` into a binary using `password` and `jwe`.
"""
def to_binary(password, jwe=%JOSE.JWE{}, jwk), do: to_binary(password, JOSE.JWE.to_record(jwe), jwk)
def to_binary(password, jwe, jwk=%JOSE.JWK{}), do: to_binary(password, jwe, to_record(jwk))
def to_binary(password, jwe, jwk), do: :jose_jwk.to_binary(password, jwe, jwk)
@doc """
Calls `to_binary/1` on a `JOSE.JWK` and then writes the binary to file.
"""
def to_file(file, jwk=%JOSE.JWK{}), do: to_file(file, to_record(jwk))
def to_file(file, jwk), do: :jose_jwk.to_file(file, jwk)
@doc """
Calls `to_binary/2` on a `JOSE.JWK` and then writes the encrypted binary to file.
"""
def to_file(password, file, jwk=%JOSE.JWK{}), do: to_file(password, file, to_record(jwk))
def to_file(password, file, jwk), do: :jose_jwk.to_file(password, file, jwk)
@doc """
Calls `to_binary/3` on a `JOSE.JWK` and then writes the encrypted binary to file.
"""
def to_file(password, file, jwe=%JOSE.JWE{}, jwk), do: to_file(password, file, JOSE.JWE.to_record(jwe), jwk)
def to_file(password, file, jwe, jwk=%JOSE.JWK{}), do: to_file(password, file, jwe, to_record(jwk))
def to_file(password, file, jwe, jwk), do: :jose_jwk.to_file(password, file, jwe, jwk)
@doc """
Converts a `JOSE.JWK` into the raw key format.
"""
def to_key(list) when is_list(list), do: for element <- list, into: [], do: to_key(element)
def to_key(jwk=%JOSE.JWK{}), do: to_key(to_record(jwk))
def to_key(jwk), do: :jose_jwk.to_key(jwk)
@doc """
Converts a `JOSE.JWK` into a map.
"""
def to_map(list) when is_list(list), do: for element <- list, into: [], do: to_map(element)
def to_map(jwk=%JOSE.JWK{}), do: to_map(to_record(jwk))
def to_map(jwk), do: :jose_jwk.to_map(jwk)
@doc """
Encrypts a `JOSE.JWK` into a map using `password` and the default `jwe` for the key type. See `to_map/3`.
"""
def to_map(password, list) when is_list(list), do: for element <- list, into: [], do: to_map(password, element)
def to_map(password, jwk=%JOSE.JWK{}), do: to_map(password, to_record(jwk))
def to_map(password, jwk), do: :jose_jwk.to_map(password, jwk)
@doc """
Encrypts a `JOSE.JWK` into a map using `password` and `jwe`.
"""
def to_map(password, jwe=%JOSE.JWE{}, jwk), do: to_map(password, JOSE.JWE.to_record(jwe), jwk)
def to_map(password, jwe, jwk=%JOSE.JWK{}), do: to_map(password, jwe, to_record(jwk))
def to_map(password, jwe, jwk), do: :jose_jwk.to_map(password, jwe, jwk)
@doc """
Converts a `JOSE.JWK` into a raw binary octet.
"""
def to_oct(list) when is_list(list), do: for element <- list, into: [], do: to_oct(element)
def to_oct(jwk=%JOSE.JWK{}), do: to_oct(to_record(jwk))
def to_oct(jwk), do: :jose_jwk.to_oct(jwk)
@doc """
Encrypts a `JOSE.JWK` into a raw binary octet using `password` and the default `jwe` for the key type. See `to_oct/3`.
"""
def to_oct(password, list) when is_list(list), do: for element <- list, into: [], do: to_oct(password, element)
def to_oct(password, jwk=%JOSE.JWK{}), do: to_oct(password, to_record(jwk))
def to_oct(password, jwk), do: :jose_jwk.to_oct(password, jwk)
@doc """
Encrypts a `JOSE.JWK` into a raw binary octet using `password` and `jwe`.
"""
def to_oct(password, jwe=%JOSE.JWE{}, jwk), do: to_oct(password, JOSE.JWE.to_record(jwe), jwk)
def to_oct(password, jwe, jwk=%JOSE.JWK{}), do: to_oct(password, jwe, to_record(jwk))
def to_oct(password, jwe, jwk), do: :jose_jwk.to_oct(password, jwe, jwk)
@doc """
Calls `to_oct/1` on a `JOSE.JWK` and then writes the binary to file.
"""
def to_oct_file(file, jwk=%JOSE.JWK{}), do: to_oct_file(file, to_record(jwk))
def to_oct_file(file, jwk), do: :jose_jwk.to_oct_file(file, jwk)
@doc """
Calls `to_oct/2` on a `JOSE.JWK` and then writes the encrypted binary to file.
"""
def to_oct_file(password, file, jwk=%JOSE.JWK{}), do: to_oct_file(password, file, to_record(jwk))
def to_oct_file(password, file, jwk), do: :jose_jwk.to_oct_file(password, file, jwk)
@doc """
Calls `to_oct/3` on a `JOSE.JWK` and then writes the encrypted binary to file.
"""
def to_oct_file(password, file, jwe=%JOSE.JWE{}, jwk), do: to_oct_file(password, file, JOSE.JWE.to_record(jwe), jwk)
def to_oct_file(password, file, jwe, jwk=%JOSE.JWK{}), do: to_oct_file(password, file, jwe, to_record(jwk))
def to_oct_file(password, file, jwe, jwk), do: :jose_jwk.to_oct_file(password, file, jwe, jwk)
@doc """
Converts a `JOSE.JWK` into an octet key pair.
"""
def to_okp(list) when is_list(list), do: for element <- list, into: [], do: to_okp(element)
def to_okp(jwk=%JOSE.JWK{}), do: to_okp(to_record(jwk))
def to_okp(jwk), do: :jose_jwk.to_okp(jwk)
@doc """
Converts a `JOSE.JWK` into an OpenSSH key binary.
"""
def to_openssh_key(list) when is_list(list), do: for element <- list, into: [], do: to_openssh_key(element)
def to_openssh_key(jwk=%JOSE.JWK{}), do: to_openssh_key(to_record(jwk))
def to_openssh_key(jwk), do: :jose_jwk.to_openssh_key(jwk)
@doc """
Calls `to_openssh_key/1` on a `JOSE.JWK` and then writes the binary to file.
"""
def to_openssh_key_file(file, jwk=%JOSE.JWK{}), do: to_openssh_key_file(file, to_record(jwk))
def to_openssh_key_file(file, jwk), do: :jose_jwk.to_openssh_key_file(file, jwk)
@doc """
Converts a `JOSE.JWK` into a PEM (Privacy Enhanced Email) binary.
"""
def to_pem(list) when is_list(list), do: for element <- list, into: [], do: to_pem(element)
def to_pem(jwk=%JOSE.JWK{}), do: to_pem(to_record(jwk))
def to_pem(jwk), do: :jose_jwk.to_pem(jwk)
@doc """
Encrypts a `JOSE.JWK` into a PEM (Privacy Enhanced Email) encrypted binary using `password`.
"""
def to_pem(password, list) when is_list(list), do: for element <- list, into: [], do: to_pem(password, element)
def to_pem(password, jwk=%JOSE.JWK{}), do: to_pem(password, to_record(jwk))
def to_pem(password, jwk), do: :jose_jwk.to_pem(password, jwk)
@doc """
Calls `to_pem/1` on a `JOSE.JWK` and then writes the binary to file.
"""
def to_pem_file(file, jwk=%JOSE.JWK{}), do: to_pem_file(file, to_record(jwk))
def to_pem_file(file, jwk), do: :jose_jwk.to_pem_file(file, jwk)
@doc """
Calls `to_pem/2` on a `JOSE.JWK` and then writes the encrypted binary to file.
"""
def to_pem_file(password, file, jwk=%JOSE.JWK{}), do: to_pem_file(password, file, to_record(jwk))
def to_pem_file(password, file, jwk), do: :jose_jwk.to_pem_file(password, file, jwk)
@doc """
Converts a private `JOSE.JWK` into a public `JOSE.JWK`.
iex> jwk_rsa = JOSE.JWK.generate_key({:rsa, 256})
%JOSE.JWK{fields: %{}, keys: :undefined,
kty: {:jose_jwk_kty_rsa,
{:RSAPrivateKey, :"two-prime",
89657271283923333213688956979801646886488725937927826421780028977595670900943,
65537,
49624301670095289515744590467755999498582844809776145284365095264133428741569,
336111124810514302695156165996294214367,
266748895426976520545002702829665062929,
329628611699439793965634256329704106687,
266443630200356088742496100410997365601,
145084675516165292189647528713269147163, :asn1_NOVALUE}}}
iex> JOSE.JWK.to_public(jwk_rsa)
%JOSE.JWK{fields: %{}, keys: :undefined,
kty: {:jose_jwk_kty_rsa,
{:RSAPublicKey,
89657271283923333213688956979801646886488725937927826421780028977595670900943,
65537}}}
"""
def to_public(list) when is_list(list), do: for element <- list, into: [], do: to_public(element)
def to_public(jwk=%JOSE.JWK{}), do: to_public(to_record(jwk))
def to_public(jwk), do: :jose_jwk.to_public(jwk) |> from_record()
@doc """
Calls `to_public/1` and then `to_file/2` on a `JOSE.JWK`.
"""
def to_public_file(file, jwk=%JOSE.JWK{}), do: to_public_file(file, to_record(jwk))
def to_public_file(file, jwk), do: :jose_jwk.to_public_file(file, jwk)
@doc """
Calls `to_public/1` and then `to_key/1` on a `JOSE.JWK`.
"""
def to_public_key(list) when is_list(list), do: for element <- list, into: [], do: to_public_key(element)
def to_public_key(jwk=%JOSE.JWK{}), do: to_public_key(to_record(jwk))
def to_public_key(jwk), do: :jose_jwk.to_public_key(jwk)
@doc """
Calls `to_public/1` and then `to_map/1` on a `JOSE.JWK`.
"""
def to_public_map(list) when is_list(list), do: for element <- list, into: [], do: to_public_map(element)
def to_public_map(jwk=%JOSE.JWK{}), do: to_public_map(to_record(jwk))
def to_public_map(jwk), do: :jose_jwk.to_public_map(jwk)
@doc """
Converts a `JOSE.JWK` into a map that can be used by `thumbprint/1` and `thumbprint/2`.
"""
def to_thumbprint_map(list) when is_list(list), do: for element <- list, into: [], do: to_thumbprint_map(element)
def to_thumbprint_map(jwk=%JOSE.JWK{}), do: to_thumbprint_map(to_record(jwk))
def to_thumbprint_map(jwk), do: :jose_jwk.to_thumbprint_map(jwk)
## API
@doc """
Decrypts the `encrypted` binary or map using the `jwk`. See `JOSE.JWE.block_decrypt/2`.
"""
def block_decrypt(encrypted, jwk=%JOSE.JWK{}), do: block_decrypt(encrypted, to_record(jwk))
def block_decrypt(encrypted, {your_public_jwk=%JOSE.JWK{}, my_private_jwk}), do: block_decrypt(encrypted, {to_record(your_public_jwk), my_private_jwk})
def block_decrypt(encrypted, {your_public_jwk, my_private_jwk=%JOSE.JWK{}}), do: block_decrypt(encrypted, {your_public_jwk, to_record(my_private_jwk)})
def block_decrypt(encrypted, jwk) do
case :jose_jwk.block_decrypt(encrypted, jwk) do
{plain_text, jwe} when is_tuple(jwe) ->
{plain_text, JOSE.JWE.from_record(jwe)}
error ->
error
end
end
@doc """
Encrypts the `plain_text` using the `jwk` and the default `jwe` based on the key type. See `block_encrypt/3`.
"""
def block_encrypt(plain_text, jwk=%JOSE.JWK{}), do: block_encrypt(plain_text, to_record(jwk))
def block_encrypt(plain_text, {your_public_jwk=%JOSE.JWK{}, my_private_jwk}), do: block_encrypt(plain_text, {to_record(your_public_jwk), my_private_jwk})
def block_encrypt(plain_text, {your_public_jwk, my_private_jwk=%JOSE.JWK{}}), do: block_encrypt(plain_text, {your_public_jwk, to_record(my_private_jwk)})
def block_encrypt(plain_text, jwk), do: :jose_jwk.block_encrypt(plain_text, jwk)
@doc """
Encrypts the `plain_text` using the `jwk` and algorithms specified by the `jwe`. See `JOSE.JWE.block_encrypt/3`.
"""
def block_encrypt(plain_text, jwe=%JOSE.JWE{}, jwk), do: block_encrypt(plain_text, JOSE.JWE.to_record(jwe), jwk)
def block_encrypt(plain_text, jwe, jwk=%JOSE.JWK{}), do: block_encrypt(plain_text, jwe, to_record(jwk))
def block_encrypt(plain_text, jwe, {your_public_jwk=%JOSE.JWK{}, my_private_jwk}), do: block_encrypt(plain_text, jwe, {to_record(your_public_jwk), my_private_jwk})
def block_encrypt(plain_text, jwe, {your_public_jwk, my_private_jwk=%JOSE.JWK{}}), do: block_encrypt(plain_text, jwe, {your_public_jwk, to_record(my_private_jwk)})
def block_encrypt(plain_text, jwe, jwk), do: :jose_jwk.block_encrypt(plain_text, jwe, jwk)
@doc """
Returns a block encryptor map for the key type.
"""
def block_encryptor(list) when is_list(list), do: for element <- list, into: [], do: block_encryptor(element)
def block_encryptor(jwk=%JOSE.JWK{}), do: block_encryptor(to_record(jwk))
def block_encryptor(jwk), do: :jose_jwk.block_encryptor(jwk)
@doc """
Key Agreement decryption of the `encrypted` binary or map using `my_private_jwk`. See `box_encrypt/2` and `JOSE.JWE.block_decrypt/2`.
"""
def box_decrypt(encrypted, my_private_jwk=%JOSE.JWK{}), do: box_decrypt(encrypted, to_record(my_private_jwk))
def box_decrypt(encrypted, {your_public_jwk=%JOSE.JWK{}, my_private_jwk}), do: box_decrypt(encrypted, {to_record(your_public_jwk), my_private_jwk})
def box_decrypt(encrypted, {your_public_jwk, my_private_jwk=%JOSE.JWK{}}), do: box_decrypt(encrypted, {your_public_jwk, to_record(my_private_jwk)})
def box_decrypt(encrypted, my_private_jwk) do
case :jose_jwk.box_decrypt(encrypted, my_private_jwk) do
{plain_text, jwe} when is_tuple(jwe) ->
{plain_text, JOSE.JWE.from_record(jwe)}
error ->
error
end
end
@doc """
Key Agreement encryption of `plain_text` by generating an ephemeral private key based on `other_public_jwk` curve. See `box_encrypt/3`.
# bob wants alice to send him a secret, so he first sends alice his public key:
bob_public_jwk = JOSE.JWK.from(%{"crv" => "P-256", "kty" => "EC",
"x" => "<KEY>",
"y" => "<KEY>"})
# alice uses bob's public key to generate an ephemeral private key used to encrypt the secret:
iex> {enc_alice2bob_tuple, alice_private_jwk} = JOSE.JWK.box_encrypt("secret", bob_public_jwk)
{{%{alg: :jose_jwe_alg_ecdh_es, enc: :jose_jwe_enc_aes},
%{"ciphertext" => "zcIIZLDB", "encrypted_key" => "",
"iv" => "9p8c7YJV5htz8zLI",
"protected" => "<KEY>",
"tag" => "MHtfyNub8vG84ER0MPynuA"}},
%JOSE.JWK{fields: %{}, keys: :undefined,
kty: {:jose_jwk_kty_ec,
{:ECPrivateKey, 1,
<<138, 8, 179, 41, 203, 0, 127, 144, 178, 132, 66, 96, 50, 161, 103, 50, 4, 119, 71, 57, 63, 63, 33, 29, 69, 201, 182, 210, 106, 37, 196, 183>>,
{:namedCurve, {1, 2, 840, 10045, 3, 1, 7}},
<<4, 32, 94, 196, 214, 201, 7, 35, 109, 41, 7, 138, 103, 251, 237, 85, 198, 228, 49, 84, 140, 73, 247, 124, 38, 30, 22, 49, 76, 155, 85, 20, 213, 109, 62, 219, 195, 182, 133, 229, 237, 215, ...>>}}}}
# alice compacts the encrypted message and sends it to bob which contains alice's public key:
iex> enc_alice2bob_binary = JOSE.JWE.compact(enc_alice2bob_tuple) |> elem(1)
"<KEY>"
# bob can then decrypt the encrypted message using his private key:
bob_private_jwk = JOSE.JWK.from(%{"crv" => "P-256", "d" => "<KEY>",
"kty" => "EC", "x" => "<KEY>",
"y" => "fEHj1ehsIJ7PP-qon-oON<KEY>ZT7xqs"})
iex> JOSE.JWK.box_decrypt(enc_alice2bob_binary, bob_private_jwk)
{"secret",
%JOSE.JWE{alg: {:jose_jwe_alg_ecdh_es,
{:jose_jwe_alg_ecdh_es,
{{{:ECPoint,
<<4, 32, 94, 196, 214, 201, 7, 35, 109, 41, 7, 138, 103, 251, 237, 85, 198, 228, 49, 84, 140, 73, 247, 124, 38, 30, 22, 49, 76, 155, 85, 20, 213, 109, 62, 219, 195, 182, 133, 229, 237, 215, ...>>},
{:namedCurve, {1, 2, 840, 10045, 3, 1, 7}}}, %{}},
<<132, 118, 229, 43, 87, 210, 84, 36, 28, 44, 211, 1, 37, 109, 180, 203, 98, 120, 78, 205, 234, 30, 49, 160, 241, 223, 97, 173, 175, 236, 119, 104>>,
<<83, 205, 137, 34, 84, 12, 40, 5, 90, 127, 217, 174, 165, 32, 122, 176, 16, 196, 140, 112, 147, 214, 255, 200, 72, 205, 117, 10, 87, 115, 80, 247>>,
:undefined}},
enc: {:jose_jwe_enc_aes,
{:jose_jwe_enc_aes, {:aes_gcm, 128}, 128, 16, 12, :undefined, :undefined,
:undefined, :undefined}}, fields: %{}, zip: :undefined}}
"""
def box_encrypt(plain_text, other_public_jwk=%JOSE.JWK{}), do: box_encrypt(plain_text, to_record(other_public_jwk))
def box_encrypt(plain_text, other_public_jwk) do
case :jose_jwk.box_encrypt(plain_text, other_public_jwk) do
{encrypted, my_private_jwk} when is_tuple(my_private_jwk) ->
{encrypted, from_record(my_private_jwk)}
error ->
error
end
end
@doc """
Key Agreement encryption of `plain_text` using `my_private_jwk`, `other_public_jwk`, and the default `jwe` based on the key types. See `box_encrypt/4`.
"""
def box_encrypt(plain_text, other_public_jwk=%JOSE.JWK{}, my_private_jwk), do: box_encrypt(plain_text, to_record(other_public_jwk), my_private_jwk)
def box_encrypt(plain_text, other_public_jwk, my_private_jwk=%JOSE.JWK{}), do: box_encrypt(plain_text, other_public_jwk, to_record(my_private_jwk))
def box_encrypt(plain_text, other_public_jwk, my_private_jwk), do: :jose_jwk.box_encrypt(plain_text, other_public_jwk, my_private_jwk)
@doc """
Key Agreement encryption of `plain_text` using `my_private_jwk`, `other_public_jwk`, and the algorithms specified by the `jwe`.
# let's
"""
def box_encrypt(plain_text, jwe=%JOSE.JWE{}, other_public_jwk, my_private_jwk), do: box_encrypt(plain_text, JOSE.JWE.to_record(jwe), other_public_jwk, my_private_jwk)
def box_encrypt(plain_text, jwe, other_public_jwk=%JOSE.JWK{}, my_private_jwk), do: box_encrypt(plain_text, jwe, to_record(other_public_jwk), my_private_jwk)
def box_encrypt(plain_text, jwe, other_public_jwk, my_private_jwk=%JOSE.JWK{}), do: box_encrypt(plain_text, jwe, other_public_jwk, to_record(my_private_jwk))
def box_encrypt(plain_text, jwe, other_public_jwk, my_private_jwk), do: :jose_jwk.box_encrypt(plain_text, jwe, other_public_jwk, my_private_jwk)
@doc """
Generates a new `JOSE.JWK` based on another `JOSE.JWK` or from initialization params provided.
Passing another `JOSE.JWK` results in different behavior depending on the `"kty"`:
* `"EC"` - uses the same named curve to generate a new key
* `"oct"` - uses the byte size to generate a new key
* `"OKP"` - uses the same named curve to generate a new key
* `"RSA"` - uses the same modulus and exponent sizes to generate a new key
The following initialization params may also be used:
* `{:ec, "P-256" | "P-384" | "P-521"}` - generates an `"EC"` key using the `"P-256"`, `"P-384"`, or `"P-521"` curves
* `{:oct, bytes}` - generates an `"oct"` key made of a random `bytes` number of bytes
* `{:okp, :Ed25519 | :Ed25519ph | :Ed448 | :Ed448ph | :X25519 | :X448}` - generates an `"OKP"` key using the specified EdDSA or ECDH edwards curve
* `{:rsa, modulus_size} | {:rsa, modulus_size, exponent_size}` - generates an `"RSA"` key using the `modulus_size` and `exponent_size`
"""
def generate_key(jwk=%JOSE.JWK{}), do: jwk |> to_record() |> generate_key()
def generate_key(parameters), do: :jose_jwk.generate_key(parameters) |> from_record()
@doc """
Merges map on right into map on left.
"""
def merge(left=%JOSE.JWK{}, right), do: merge(left |> to_record(), right)
def merge(left, right=%JOSE.JWK{}), do: merge(left, right |> to_record())
def merge(left, right), do: :jose_jwk.merge(left, right) |> from_record()
@doc """
Computes the shared secret between two keys. Currently only works for `"EC"` keys and `"OKP"` keys with `"crv"` set to `"X25519"` or `"X448"`.
"""
def shared_secret(your_jwk=%JOSE.JWK{}, my_jwk), do: shared_secret(to_record(your_jwk), my_jwk)
def shared_secret(your_jwk, my_jwk=%JOSE.JWK{}), do: shared_secret(your_jwk, to_record(my_jwk))
def shared_secret(your_jwk, my_jwk), do: :jose_jwk.shared_secret(your_jwk, my_jwk)
@doc """
Signs the `plain_text` using the `jwk` and the default signer algorithm `jws` for the key type. See `sign/3`.
"""
def sign(plain_text, jwk=%JOSE.JWK{}), do: sign(plain_text, to_record(jwk))
def sign(plain_text, key_list) when is_list(key_list) do
keys = for key <- key_list, into: [] do
case key do
%JOSE.JWK{} ->
JOSE.JWK.to_record(key)
_ ->
key
end
end
:jose_jwk.sign(plain_text, keys)
end
def sign(plain_text, jwk), do: :jose_jwk.sign(plain_text, jwk)
@doc """
Signs the `plain_text` using the `jwk` and the algorithm specified by the `jws`. See `JOSE.JWS.sign/3`.
"""
def sign(plain_text, jws=%JOSE.JWS{}, jwk), do: sign(plain_text, JOSE.JWS.to_record(jws), jwk)
def sign(plain_text, jws, jwk=%JOSE.JWK{}), do: sign(plain_text, jws, to_record(jwk))
def sign(plain_text, signer_list, key_list) when is_list(signer_list) and is_list(key_list) and length(signer_list) === length(key_list) do
signers = for signer <- signer_list, into: [] do
case signer do
%JOSE.JWS{} ->
JOSE.JWS.to_record(signer)
_ ->
signer
end
end
keys = for key <- key_list, into: [] do
case key do
%JOSE.JWK{} ->
JOSE.JWK.to_record(key)
_ ->
key
end
end
:jose_jwk.sign(plain_text, signers, keys)
end
def sign(plain_text, jws, key_list) when is_list(key_list) and not is_list(jws) do
keys = for key <- key_list, into: [] do
case key do
%JOSE.JWK{} ->
JOSE.JWK.to_record(key)
_ ->
key
end
end
:jose_jwk.sign(plain_text, jws, keys)
end
def sign(plain_text, jws, jwk), do: :jose_jwk.sign(plain_text, jws, jwk)
@doc """
Returns a signer map for the key type.
"""
def signer(list) when is_list(list), do: for element <- list, into: [], do: signer(element)
def signer(jwk=%JOSE.JWK{}), do: signer(to_record(jwk))
def signer(jwk), do: :jose_jwk.signer(jwk)
@doc """
Returns the unique thumbprint for a `JOSE.JWK` using the `:sha256` digest type. See `thumbprint/2`.
"""
def thumbprint(list) when is_list(list), do: for element <- list, into: [], do: thumbprint(element)
def thumbprint(jwk=%JOSE.JWK{}), do: thumbprint(to_record(jwk))
def thumbprint(jwk), do: :jose_jwk.thumbprint(jwk)
@doc """
Returns the unique thumbprint for a `JOSE.JWK` using the `digest_type`.
# let's define two different keys that will have the same thumbprint
jwk1 = JOSE.JWK.from_oct("secret")
jwk2 = JOSE.JWK.from(%{ "use" => "sig", "k" => "c2VjcmV0", "kty" => "oct" })
iex> JOSE.JWK.thumbprint(jwk1)
"DWBh0SEIAPYh1x5uvot4z3AhaikHkxNJa3Ada2fT-Cg"
iex> JOSE.JWK.thumbprint(jwk2)
"<KEY>"
iex> JOSE.JWK.thumbprint(:md5, jwk1)
"Kldz8k5PQm7y1E3aNBlMiA"
iex> JOSE.JWK.thumbprint(:md5, jwk2)
"Kldz8k5PQm7y1E3aNBlMiA"
See JSON Web Key (JWK) Thumbprint [RFC 7638](https://tools.ietf.org/html/rfc7638) for more information.
"""
def thumbprint(digest_type, list) when is_list(list), do: for element <- list, into: [], do: thumbprint(digest_type, element)
def thumbprint(digest_type, jwk=%JOSE.JWK{}), do: thumbprint(digest_type, to_record(jwk))
def thumbprint(digest_type, jwk), do: :jose_jwk.thumbprint(digest_type, jwk)
@doc """
Returns a verifier algorithm list for the key type.
"""
def verifier(list) when is_list(list), do: for element <- list, into: [], do: verifier(element)
def verifier(jwk=%JOSE.JWK{}), do: verifier(to_record(jwk))
def verifier(jwk), do: :jose_jwk.verifier(jwk)
@doc """
Verifies the `signed` using the `jwk`. See `JOSE.JWS.verify_strict/3`.
"""
def verify(signed, jwk=%JOSE.JWK{}), do: verify(signed, to_record(jwk))
def verify(signed, jwk=[%JOSE.JWK{} | _]) do
verify(signed, for k <- jwk do
case k do
%JOSE.JWK{} ->
JOSE.JWK.to_record(k)
_ ->
k
end
end)
end
def verify(signed, jwk) do
try do
case :jose_jwk.verify(signed, jwk) do
{verified, payload, jws} when is_tuple(jws) ->
{verified, payload, JOSE.JWS.from_record(jws)}
list when is_list(list) ->
for {jwk, verifications} <- list do
{JOSE.JWK.from_record(jwk), Enum.map(verifications, fn
{verified, jwt, jws} when is_tuple(jwt) and is_tuple(jws) ->
{verified, from_record(jwt), JOSE.JWS.from_record(jws)}
other ->
other
end)}
end
error ->
error
end
catch
class, reason ->
{class, reason}
end
end
@doc """
Verifies the `signed` using the `jwk` and whitelists the `"alg"` using `allow`. See `JOSE.JWS.verify/2`.
"""
def verify_strict(signed, allow, jwk=%JOSE.JWK{}), do: verify_strict(signed, allow, to_record(jwk))
def verify_strict(signed, allow, jwk=[%JOSE.JWK{} | _]) do
verify_strict(signed, allow, for k <- jwk do
case k do
%JOSE.JWK{} ->
JOSE.JWK.to_record(k)
_ ->
k
end
end)
end
def verify_strict(signed, allow, jwk) do
try do
case :jose_jwk.verify_strict(signed, allow, jwk) do
{verified, payload, jws} when is_tuple(jws) ->
{verified, payload, JOSE.JWS.from_record(jws)}
list when is_list(list) ->
for {jwk, verifications} <- list do
{JOSE.JWK.from_record(jwk), Enum.map(verifications, fn
{verified, jwt, jws} when is_tuple(jwt) and is_tuple(jws) ->
{verified, from_record(jwt), JOSE.JWS.from_record(jws)}
other ->
other
end)}
end
error ->
error
end
catch
class, reason ->
{class, reason}
end
end
end
|
backend/deps/jose/lib/jose/jwk.ex
| 0.826292
| 0.421195
|
jwk.ex
|
starcoder
|
defmodule IRC.Server do
use GenServer
require Logger
# =============================================================================
# Internal API
# =============================================================================
@impl true
def init(state) do
{:ok, state}
end
@doc """
Process's state:
```
%{
clients: %{
[nickname]: [client pid]
},
channels: %{
[name]: [data struct]
},
}
```
"""
def start_link(_) do
Logger.info("Starting server")
GenServer.start_link(__MODULE__, %{clients: %{}, channels: %{}}, name: __MODULE__)
end
@impl true
def handle_cast({:command, client_state, command, parameters}, state) do
Logger.debug("Starting processing of command #{command}")
{_, module_suffix, _} = IRC.Parsers.Message.Commands.matching_value(command)
Task.start(fn ->
atom_name = "Elixir.IRC.Commands.#{module_suffix}"
try do
apply(String.to_existing_atom(atom_name), :run, [
parameters,
client_state,
state
])
rescue
_e in ArgumentError ->
Logger.warn("Could not find module & function for: #{atom_name}")
end
end)
{:noreply, state}
end
@impl true
def handle_cast({:broadcast, message}, state) do
Logger.debug("Broadcasting message: #{message}")
Enum.each(Map.values(state.clients), fn client_pid ->
client_state = IRC.ClientConnection.get_state(client_pid)
IRC.ClientConnection.send_to_client(client_state.socket, message)
end)
{:noreply, state}
end
# Get the state
@impl true
def handle_call(:get_state, _from, state) do
{:reply, state, state}
end
# Store a client pid in the state
@impl true
def handle_call({:connect_client, client_pid, nickname}, _from, state) do
new_clients = Map.put(state.clients, nickname, client_pid)
new_state = %{state | clients: new_clients}
Logger.info("Server state updated with new client #{nickname}")
{:reply, :ok, new_state}
end
# Change a client's nickname in the state
@impl true
def handle_call({:change_nickname, from, to}, _from, state) do
{client_pid, new_clients} = Map.pop!(state.clients, from)
new_clients = Map.put(new_clients, to, client_pid)
new_state = %{state | clients: new_clients}
Logger.debug("Server state updated with client rename #{from} -> #{to}")
{:reply, :ok, new_state}
end
# Remove a client from the state.
@impl true
def handle_call({:forget_client, client_pid}, _from, state) do
{nick_to_remove, _} = Enum.find(state.clients, fn {_, pid} -> pid == client_pid end)
Logger.debug("Removing \"#{nick_to_remove}\" from server state")
new_clients = Map.delete(state.clients, nick_to_remove)
{:reply, :ok, %{state | clients: new_clients}}
end
# =============================================================================
# Public API
# =============================================================================
@doc """
Send a command to the "server". The message has already
reached the server at this point, but this function is for
having the server handle the command that's
1. reached the server from the user's client, and
2. been processed into a valid command and parameters.
"""
@spec send_command(
client_state :: map(),
command :: String.t(),
parameters :: list()
) :: :ok
def send_command(client_state, command, parameters) do
GenServer.cast(__MODULE__, {:command, client_state, command, parameters})
end
@doc """
Get the server's stored state.
"""
@spec get_state() :: map()
def get_state() do
GenServer.call(__MODULE__, :get_state)
end
@doc """
Connect a client process to this server.
"""
@spec connect_client(client_pid :: pid(), nickname :: String.t()) :: :ok
def connect_client(client_pid, nickname) do
GenServer.call(__MODULE__, {:connect_client, client_pid, nickname})
end
@doc """
Change a client's nickname.
"""
@spec change_nickname(from :: String.t(), to :: String.t()) :: :ok
def change_nickname(from, to) do
GenServer.call(__MODULE__, {:change_nickname, from, to})
end
@doc """
Remove a client from the state.
"""
@spec forget_client(client_pid :: pid()) :: :ok
def forget_client(client_pid) do
GenServer.call(__MODULE__, {:forget_client, client_pid})
end
@doc """
Broadcast a message to all connected clients.
"""
@spec broadcast_message(message :: String.t()) :: :ok
def broadcast_message(message) do
GenServer.cast(__MODULE__, {:broadcast, message})
end
end
|
lib/server.ex
| 0.646572
| 0.498962
|
server.ex
|
starcoder
|
defmodule P3 do
@moduledoc """
## Examples
iex> P3.solve(5)
4
iex> P3.solve(11)
9
iex> P3.solve(4)
-1
"""
def main do
IO.read(:line) |> String.trim() |> String.to_integer() |> solve() |> IO.puts()
end
defmodule Heap do
defstruct data: nil, comparator: nil
def new(comparator), do: %__MODULE__{comparator: comparator}
def empty?(%__MODULE__{data: nil}), do: true
def empty?(%__MODULE__{}), do: false
def size(%__MODULE__{data: nil}), do: 0
def size(%__MODULE__{data: {size, _value, _left, _right}}), do: size
def top(%__MODULE__{data: nil}), do: nil
def top(%__MODULE__{data: {_size, value, _left, _right}}), do: value
def pop(%__MODULE__{data: data, comparator: comp} = heap) do
%{ heap | data: do_pop(comp, data)}
end
defp do_pop(_comparator, nil), do: nil
defp do_pop(comparator, {size, _v0, left, right}) do
with nil <- swap_on_pop(comparator, left, right) do
nil
else
{v1, left, right} ->
{size - 1, v1, do_pop(comparator, left), right}
end
end
defp swap_on_pop(comparator, left, right)
defp swap_on_pop(_comparator, nil, nil), do: nil
defp swap_on_pop(_comparator, left, nil), do: {elem(left, 1), left, nil}
defp swap_on_pop(_comparator, nil, right), do: {elem(right, 1), right, nil}
defp swap_on_pop(comparator, left, right),
do: if comparator.(elem(left, 1), elem(right, 1)),
do: {elem(left, 1), left, right},
else: {elem(right,1), right, left}
def push(%__MODULE__{data: data, comparator: comp} = heap, value) do
%{
heap |
data: do_push(value, comp, data)
}
end
defp do_push(value, comparator, data \\ nil)
defp do_push(v0, _comparator, nil), do: {1, v0, nil, nil}
defp do_push(v0, comparator, {size, v1, nil, nil}) do
{v0, v1} = swap_on_push(v0, v1, comparator)
{size + 1, v0, do_push(v1, comparator), nil}
end
defp do_push(v0, comparator, {size, v1, left, nil}) do
{v0, v1} = swap_on_push(v0, v1, comparator)
{size + 1, v0, left, do_push(v1, comparator)}
end
defp do_push(v0, comparator, {size, v1, nil, right}) do
{v0, v1} = swap_on_push(v0, v1, comparator)
{size + 1, v0, do_push(v1, comparator), right}
end
defp do_push(v0, comparator, {size, v1, {ls, _, _, _} = left, {rs, _, _, _} = right}) do
{v0, v1} = swap_on_push(v0, v1, comparator)
if rs < ls do
{size + 1, v0, left, do_push(v1, comparator, right)}
else
{size + 1, v0, do_push(v1, comparator, left), right}
end
end
defp swap_on_push(v0, v1, comparator) do
if comparator.(v0, v1) do
{v0, v1}
else
{v1, v0}
end
end
defimpl Collectable do
def into(heap) do
{
heap,
fn
heap, {:cont, v} -> Heap.push(heap, v)
heap, :done -> heap
_heap, :halt -> :ok
end
}
end
end
defimpl Enumerable do
def count(heap), do: {:ok, Heap.size(heap)}
def member?(_, _), do: {:error, __MODULE__}
def slice(_), do: {:error, __MODULE__}
def reduce(_heap, {:halt, acc}, _fun), do: {:halted, acc}
def reduce(heap, {:suspend, acc}, fun), do: {:suspended, acc, &reduce(heap, &1, fun)}
def reduce(%Heap{data: nil}, {:cont, acc}, _fun), do: {:done, acc}
def reduce(heap, {:cont, acc}, fun) do
reduce(Heap.pop(heap), fun.(Heap.top(heap), acc), fun)
end
end
end
def solve(n) do
{sets, seen} = greedy({1, 1}, n)
sets
|> Enum.into(
Heap.new(
fn {s0, p0}, {s1, p1} ->
s0 < s1 || (s0 == s1 && p0 > p1)
end
)
)
|> do_solve(n, seen)
end
defp greedy(seed, n, seen \\ %{}) do
fn {s0, p0}, {s1, p1} ->
s0 > s1 || (s0 == s1 && p0 > p1)
end
|> Heap.new()
|> Heap.push(seed)
|> do_greedy(n, seen)
end
defp do_greedy(heap, n, seen) do
with set <- Heap.top(heap),
[] <- next(set, n, seen) do
{Enum.to_list(heap), seen}
else
arr when is_list(arr) ->
{step, position} = Heap.top(heap)
arr
|> Enum.into(Heap.pop(heap))
|> do_greedy(n, Map.put(seen, position, step))
end
end
def do_solve(heap, n, seen \\ %{}) do
with false <- Heap.empty?(heap),
{step, position} when position == n <- Heap.top(heap) do
step
else
true ->
-1
{step, position} = set ->
with [seed] <- next(set, n, seen) do
{sets, seen} = greedy(seed, n, Map.put(seen, position, step))
sets
|> Enum.into(Heap.pop(heap))
|> do_solve(n, seen)
else
sets ->
sets
|> Enum.into(Heap.pop(heap))
|> do_solve(n, Map.put(seen, position, step))
end
end
end
defp next({_s, p}, n, _seen) when n <= p, do: []
defp next({s, p}, n, seen) do
p
|> to_displacement()
|> (fn d ->
[{s + 1, p - d}, {s + 1, p + d}]
|> Enum.filter(fn
{step, position} when 1 < position and position <= n ->
is_nil(seen[position]) || step < seen[position]
_ ->
false
end)
end).()
end
defp to_displacement(n), do: n |> Integer.digits(2) |> Enum.sum()
end
"""
defmodule Main do
def main do
IO.read(:line) |> String.trim() |> String.to_integer() |> solve() |> IO.puts()
end
defmodule Heap do
defstruct data: nil, comparator: nil
def new(comparator), do: %__MODULE__{comparator: comparator}
def empty?(%__MODULE__{data: nil}), do: true
def empty?(%__MODULE__{}), do: false
def top(%__MODULE__{data: nil}), do: nil
def top(%__MODULE__{data: {_size, value, _left, _right}}), do: value
def pop(%__MODULE__{data: data, comparator: comp} = heap) do
%{ heap | data: do_pop(comp, data)}
end
defp do_pop(_comparator, nil), do: nil
defp do_pop(comparator, {size, _v0, left, right}) do
with nil <- swap_on_pop(comparator, left, right) do
nil
else
{v1, left, right} ->
{size - 1, v1, do_pop(comparator, left), right}
end
end
defp swap_on_pop(comparator, left, right)
defp swap_on_pop(_comparator, nil, nil), do: nil
defp swap_on_pop(_comparator, left, nil), do: {elem(left, 1), left, nil}
defp swap_on_pop(_comparator, nil, right), do: {elem(right, 1), right, nil}
defp swap_on_pop(comparator, left, right),
do: if comparator.(elem(left, 1), elem(right, 1)),
do: {elem(left, 1), left, right},
else: {elem(right,1), right, left}
def push(%__MODULE__{data: data, comparator: comp} = heap, value) do
%{heap | data: do_push(value, comp, data)}
end
defp do_push(value, comparator, data \\ nil)
defp do_push(v0, _comparator, nil), do: {1, v0, nil, nil}
defp do_push(v0, comparator, {size, v1, nil, nil}) do
{v0, v1} = swap_on_push(v0, v1, comparator)
{size + 1, v0, do_push(v1, comparator), nil}
end
defp do_push(v0, comparator, {size, v1, left, nil}) do
{v0, v1} = swap_on_push(v0, v1, comparator)
{size + 1, v0, left, do_push(v1, comparator)}
end
defp do_push(v0, comparator, {size, v1, nil, right}) do
{v0, v1} = swap_on_push(v0, v1, comparator)
{size + 1, v0, do_push(v1, comparator), right}
end
defp do_push(v0, comparator, {size, v1, {ls, _, _, _} = left, {rs, _, _, _} = right}) do
{v0, v1} = swap_on_push(v0, v1, comparator)
if rs < ls do
{size + 1, v0, left, do_push(v1, comparator, right)}
else
{size + 1, v0, do_push(v1, comparator, left), right}
end
end
defp swap_on_push(v0, v1, comparator) do
if comparator.(v0, v1), do: {v0, v1}, else: {v1, v0}
end
defimpl Collectable do
def into(heap) do
{
heap,
fn
heap, {:cont, v} -> Heap.push(heap, v)
heap, :done -> heap
_heap, :halt -> :ok
end
}
end
end
defimpl Enumerable do
def count(heap), do: {:ok, Heap.size(heap)}
def member?(_, _), do: {:error, __MODULE__}
def slice(_), do: {:error, __MODULE__}
def reduce(_heap, {:halt, acc}, _fun), do: {:halted, acc}
def reduce(heap, {:suspend, acc}, fun), do: {:suspended, acc, &reduce(heap, &1, fun)}
def reduce(%Heap{data: nil}, {:cont, acc}, _fun), do: {:done, acc}
def reduce(heap, {:cont, acc}, fun) do
reduce(Heap.pop(heap), fun.(Heap.top(heap), acc), fun)
end
end
end
def solve(n) do
{sets, seen} = greedy({1, 1}, n)
sets
|> Enum.into(
Heap.new(
fn {s0, p0}, {s1, p1} ->
s0 < s1 || (s0 == s1 && p0 > p1)
end
)
)
|> do_solve(n, seen)
end
defp greedy(seed, n, seen \\ %{}) do
fn {s0, p0}, {s1, p1} ->
s0 > s1 || (s0 == s1 && p0 > p1)
end
|> Heap.new()
|> Heap.push(seed)
|> do_greedy(n, seen)
end
defp do_greedy(heap, n, seen) do
with set <- Heap.top(heap),
[] <- next(set, n, seen) do
{Enum.to_list(heap), seen}
else
arr when is_list(arr) ->
{step, position} = Heap.top(heap)
arr
|> Enum.into(Heap.pop(heap))
|> do_greedy(n, Map.put(seen, position, step))
end
end
def do_solve(heap, n, seen \\ %{}) do
with false <- Heap.empty?(heap),
{step, position} when position == n <- Heap.top(heap) do
step
else
true ->
-1
{step, position} = set ->
with [seed] <- next(set, n, seen) do
{sets, seen} = greedy(seed, n, Map.put(seen, position, step))
sets
|> Enum.into(Heap.pop(heap))
|> do_solve(n, seen)
else
sets ->
sets
|> Enum.into(Heap.pop(heap))
|> do_solve(n, Map.put(seen, position, step))
end
end
end
defp next({_s, p}, n, _seen) when n <= p, do: []
defp next({s, p}, n, seen) do
p
|> to_displacement()
|> (fn d ->
[{s + 1, p - d}, {s + 1, p + d}]
|> Enum.filter(fn
{step, position} when 1 < position and position <= n ->
is_nil(seen[position]) || step < seen[position]
_ ->
false
end)
end).()
end
defp to_displacement(n), do: n |> Integer.digits(2) |> Enum.sum()
end
"""
|
lib/100/p3.ex
| 0.754644
| 0.561876
|
p3.ex
|
starcoder
|
defmodule Homework.Transactions do
@moduledoc """
The Transactions context.
"""
import Ecto.Query, warn: false
import Paginator
alias Homework.Repo
alias Homework.Transactions.Transaction
@doc """
Returns the list of transactions.
## Examples
iex> list_transactions([])
[%Transaction{}, ...]
"""
@spec list_transactions(any) :: [Transaction]
def list_transactions(_args) do
Repo.all(Transaction)
end
@doc """
Gets paginated list of transactions
"""
@spec list_transactions_paginated(Ecto.Query, integer, integer) :: [Transaction]
def list_transactions_paginated(_args, limit, skip) do
Paginator.paginate((from t in Transaction), limit, skip)
end
@doc """
Gets a single transaction.
Raises `Ecto.NoResultsError` if the Transaction does not exist.
## Examples
iex> get_transaction!(123)
%Transaction{}
iex> get_transaction!(456)
** (Ecto.NoResultsError)
"""
@spec get_transaction!(String.t) :: Transaction | Ecto.NoResultsError
def get_transaction!(id), do: Repo.get!(Transaction, id)
# TODO method to get all transactions for a merchant, user, etc
# TODO parameter to choose to query for inserted_at or updated_at (macro?)
@doc """
Gets all transactions where inserted_at field is between a passed start and end date time
Start and end date times are in Naive DateTime format
"""
@spec get_transactions_time_range(NaiveDateTime, NaiveDateTime) :: [Transaction]
def get_transactions_time_range(start_date_time, end_date_time) do
query = from t in Transaction,
where: t.inserted_at >= ^start_date_time,
where: t.inserted_at < ^end_date_time
Repo.all(query)
end
@doc """
Get all transactions with amount between range (decimal notation dollars.cents)
## Examples
iex> get_transactions_amount_range(0.41, 0.45)
[%Transaction{...amount: 0.43...}]
"""
@spec get_transactions_amount_range(integer, integer) :: [Transaction]
# When I added the ecto type for converted amounts I expected to need to convert min and max in this query.
# I was surprised to find out that ecto will convert the min and max based on my defined dump methods, so I don't need
# to have multiple conversion methods - super cool!
def get_transactions_amount_range(min, max) do
query = from t in Transaction,
where: t.amount >= ^min,
where: t.amount < ^max
Repo.all(query)
end
@doc """
Creates a transaction.
## Examples
iex> create_transaction(%{field: value})
{:ok, %Transaction{}}
iex> create_transaction(%{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
@spec create_transaction(Transaction) :: Transaction
def create_transaction(attrs \\ %{}) do
%Transaction{}
|> Transaction.changeset(attrs)
|> Repo.insert()
end
@doc """
Updates a transaction.
## Examples
iex> update_transaction(transaction, %{field: new_value})
{:ok, %Transaction{}}
iex> update_transaction(transaction, %{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
@spec update_transaction(Transaction, %{}) :: {:ok, Transaction} | {:error, Ecto.Changeset}
def update_transaction(%Transaction{} = transaction, attrs) do
transaction
|> Transaction.changeset(attrs)
|> Repo.update()
end
@doc """
Deletes a transaction.
## Examples
iex> delete_transaction(transaction)
{:ok, %Transaction{}}
iex> delete_transaction(transaction)
{:error, %Ecto.Changeset{}}
"""
@spec delete_transaction(Transaction) :: {:ok, Transaction} | {:error, Ecto.Changeset}
def delete_transaction(%Transaction{} = transaction) do
Repo.delete(transaction)
end
@doc """
Returns an `%Ecto.Changeset{}` for tracking transaction changes.
## Examples
iex> change_transaction(transaction)
%Ecto.Changeset{data: %Transaction{}}
"""
@spec change_transaction(Transaction, %{}) :: Ecto.Changeset
def change_transaction(%Transaction{} = transaction, attrs \\ %{}) do
Transaction.changeset(transaction, attrs)
end
end
|
elixir/lib/homework/transactions.ex
| 0.741019
| 0.403097
|
transactions.ex
|
starcoder
|
defmodule QueueWrapper do
@type t() :: :queue.queue()
@moduledoc """
Elixir bindings to the erlang queue library with
a few additions, such as reduce & a means to do
equality. See the [erlang docs][docs] for more info
on the module.
In some cases function names have been given more
explict names, such as `in`, `out`, `drop`, & `len`.
Personally I find the name names such as `in` & `in_r`
unhelpful, when `in` is rear but `in_r` is the one with
the `r` suffix. But for `out`, the operation on the rear
has the suffix... There's likely a method to the madness
of those methods names, but they're personally unhelpful
for me.
[docs]: http://erlang.org/doc/man/queue.html
"""
defdelegate filter(fun, queue), to: :queue
defdelegate from_list(list), to: :queue
defdelegate is_empty(queue), to: :queue
defdelegate is_queue(term), to: :queue
defdelegate join(q1, q2), to: :queue
defdelegate member(item, queue), to: :queue
defdelegate new, to: :queue
defdelegate reverse(queue), to: :queue
defdelegate split(n, queue), to: :queue
defdelegate to_list(queue), to: :queue
defdelegate length(queue), to: :queue, as: :len
defdelegate in_rear(item, queue), to: :queue, as: :in
defdelegate in_front(item, queue), to: :queue, as: :in_r
defdelegate out_rear(queue), to: :queue, as: :out_r
defdelegate out_front(queue), to: :queue, as: :out
defdelegate drop_rear(queue), to: :queue, as: :drop_r
defdelegate drop_front(queue), to: :queue, as: :drop
defdelegate get_rear(queue), to: :queue, as: :get_r
defdelegate get_front(queue), to: :queue, as: :get
defdelegate peek_rear(queue), to: :queue, as: :peek_r
defdelegate peek_front(queue), to: :queue, as: :peek
# Note the method `liat` is not included here due to
# it being deprecated and being removed in a future
# release. Apparently you're meant to use liat instead.
defdelegate cons(item, queue), to: :queue
defdelegate init(queue), to: :queue
defdelegate last(queue), to: :queue
defdelegate liat(queue), to: :queue
defdelegate snoc(queue, item), to: :queue
defdelegate tail(queue), to: :queue
def replace_at(items, index, value) do
{left, right} = :queue.split(index, items)
right = :queue.drop(right)
left = :queue.in(value, left)
:queue.join(left, right)
end
def reduce_while(items, state, transform) do
case :queue.len(items) do
0 ->
state
_ ->
{{:value, value}, items} = :queue.out(items)
case transform.(value, state) do
{:cont, state} -> reduce_while(items, state, transform)
{:halt, state} -> state
end
end
end
def reduce(items, state, transform) do
case :queue.len(items) do
0 ->
state
_ ->
{{:value, value}, items} = :queue.out(items)
state = transform.(value, state)
reduce(items, state, transform)
end
end
def equal(left, right) do
equal(left, right, fn a, b -> a == b end)
end
def equal(left, right, eq) do
left_len = :queue.len(left)
right_len = :queue.len(right)
if left_len == right_len do
value =
reduce_while(left, right, fn left_value, right ->
{{:value, right_value}, right} = :queue.out(right)
if eq.(left_value, right_value) do
{:cont, right}
else
{:halt, :error}
end
end)
case value do
:error -> false
_ -> true
end
else
false
end
end
end
|
lib/queue_wrapper.ex
| 0.789599
| 0.619399
|
queue_wrapper.ex
|
starcoder
|
if Enum.any?(Application.loaded_applications(), fn {dep_name, _, _} -> dep_name === :plug end) do
defmodule Stripe.WebhookPlug do
@moduledoc """
Helper `Plug` to process webhook events and send them to a custom handler.
## Installation
To handle webhook events, you must first configure your application's endpoint.
Add the following to `endpoint.ex`, **before** `Plug.Parsers` is loaded.
```elixir
plug Stripe.WebhookPlug,
at: "/webhook/stripe",
handler: MyAppWeb.StripeHandler,
secret: "whsec_******"
```
If you have not yet added a webhook to your Stripe account, you can do so
by visiting `Developers > Webhooks` in the Stripe dashboard. Use the route
you configured in the endpoint above and copy the webhook secret into your
app's configuration.
### Supported options
- `at`: The URL path your application should listen for Stripe webhooks on.
Configure this to match whatever you set in the webhook.
- `handler`: Custom event handler module that accepts `Stripe.Event` structs
and processes them within your application. You must create this module.
- `secret`: Webhook secret starting with `whsec_` obtained from the Stripe
dashboard. This can also be a function or a tuple for runtime configuration.
- `tolerance`: Maximum age (in seconds) allowed for the webhook event.
See `Stripe.Webhook.construct_event/4` for more information.
## Handling events
You will need to create a custom event handler module to handle events.
Your event handler module should implement the `Stripe.WebhookHandler`
behavior, defining a `handle_event/1` function which takes a `Stripe.Event`
struct and returns either `{:ok, term}` or `:ok`. This will mark the event as
successfully processed. Alternatively handler can signal an error by returning
`:error` or `{:error, reason}` tuple, where reason is an atom or a string.
HTTP status code 400 will be used for errors.
### Example
```elixir
# lib/myapp_web/stripe_handler.ex
defmodule MyAppWeb.StripeHandler do
@behaviour Stripe.WebhookHandler
@impl true
def handle_event(%Stripe.Event{type: "charge.succeeded"} = event) do
# TODO: handle the charge.succeeded event
end
@impl true
def handle_event(%Stripe.Event{type: "invoice.payment_failed"} = event) do
# TODO: handle the invoice.payment_failed event
end
# Return HTTP 200 for unhandled events
@impl true
def handle_event(_event), do: :ok
end
```
## Configuration
You can configure the webhook secret in your app's own config file.
For example:
```elixir
config :myapp,
# [...]
stripe_webhook_secret: "whsec_******"
```
You may then include the secret in your endpoint:
```elixir
plug Stripe.WebhookPlug,
at: "/webhook/stripe",
handler: MyAppWeb.StripeHandler,
secret: Application.get_env(:myapp, :stripe_webhook_secret)
```
### Runtime configuration
If you're loading config dynamically at runtime (eg with `runtime.exs`
or an OTP app) you must pass a tuple or function as the secret.
```elixir
# With a tuple
plug Stripe.WebhookPlug,
at: "/webhook/stripe",
handler: MyAppWeb.StripeHandler,
secret: {Application, :get_env, [:myapp, :stripe_webhook_secret]}
# Or, with a function
plug Stripe.WebhookPlug,
at: "/webhook/stripe",
handler: MyAppWeb.StripeHandler,
secret: fn -> Application.get_env(:myapp, :stripe_webhook_secret) end
```
"""
import Plug.Conn
alias Plug.Conn
@behaviour Plug
@impl true
def init(opts) do
path_info = String.split(opts[:at], "/", trim: true)
opts
|> Enum.into(%{})
|> Map.put_new(:path_info, path_info)
end
@impl true
def call(
%Conn{method: "POST", path_info: path_info} = conn,
%{
path_info: path_info,
secret: secret,
handler: handler
} = opts
) do
secret = parse_secret!(secret)
with [signature] <- get_req_header(conn, "stripe-signature"),
{:ok, payload, _} = Conn.read_body(conn),
{:ok, %Stripe.Event{} = event} <- construct_event(payload, signature, secret, opts),
:ok <- handle_event!(handler, event) do
send_resp(conn, 200, "Webhook received.") |> halt()
else
{:handle_error, reason} -> send_resp(conn, 400, reason) |> halt()
_ -> send_resp(conn, 400, "Bad request.") |> halt()
end
end
@impl true
def call(%Conn{path_info: path_info} = conn, %{path_info: path_info}) do
send_resp(conn, 400, "Bad request.") |> halt()
end
@impl true
def call(conn, _), do: conn
defp construct_event(payload, signature, secret, %{tolerance: tolerance}) do
Stripe.Webhook.construct_event(payload, signature, secret, tolerance)
end
defp construct_event(payload, signature, secret, _opts) do
Stripe.Webhook.construct_event(payload, signature, secret)
end
defp handle_event!(handler, %Stripe.Event{} = event) do
case handler.handle_event(event) do
{:ok, _} ->
:ok
:ok ->
:ok
{:error, reason} when is_binary(reason) ->
{:handle_error, reason}
{:error, reason} when is_atom(reason) ->
{:handle_error, Atom.to_string(reason)}
:error ->
{:handle_error, ""}
resp ->
raise """
#{inspect(handler)}.handle_event/1 returned an invalid response. Expected {:ok, term}, :ok, {:error, reason} or :error
Got: #{inspect(resp)}
Event data: #{inspect(event)}
"""
end
end
defp parse_secret!({m, f, a}), do: apply(m, f, a)
defp parse_secret!(fun) when is_function(fun), do: fun.()
defp parse_secret!(secret) when is_binary(secret), do: secret
defp parse_secret!(secret) do
raise """
The Stripe webhook secret is invalid. Expected a string, tuple, or function.
Got: #{inspect(secret)}
If you're setting the secret at runtime, you need to pass a tuple or function.
For example:
plug Stripe.WebhookPlug,
at: "/webhook/stripe",
handler: MyAppWeb.StripeHandler,
secret: {Application, :get_env, [:myapp, :stripe_webhook_secret]}
"""
end
end
end
|
lib/stripe/webhook_plug.ex
| 0.717606
| 0.553686
|
webhook_plug.ex
|
starcoder
|
defmodule AWS.IoTSiteWise do
@moduledoc """
Welcome to the AWS IoT SiteWise API Reference. AWS IoT SiteWise is an AWS
service that connects [Industrial Internet of Things
(IIoT)](https://en.wikipedia.org/wiki/Internet_of_things#Industrial_applications)
devices to the power of the AWS Cloud. For more information, see the [AWS
IoT SiteWise User
Guide](https://docs.aws.amazon.com/iot-sitewise/latest/userguide/). For
information about AWS IoT SiteWise quotas, see
[Quotas](https://docs.aws.amazon.com/iot-sitewise/latest/userguide/quotas.html)
in the *AWS IoT SiteWise User Guide*.
"""
@doc """
Associates a child asset with the given parent asset through a hierarchy
defined in the parent asset's model. For more information, see [Associating
assets](https://docs.aws.amazon.com/iot-sitewise/latest/userguide/add-associated-assets.html)
in the *AWS IoT SiteWise User Guide*.
"""
def associate_assets(client, asset_id, input, options \\ []) do
path_ = "/assets/#{URI.encode(asset_id)}/associate"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Associates a group (batch) of assets with an AWS IoT SiteWise Monitor
project.
"""
def batch_associate_project_assets(client, project_id, input, options \\ []) do
path_ = "/projects/#{URI.encode(project_id)}/assets/associate"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, 200)
end
@doc """
Disassociates a group (batch) of assets from an AWS IoT SiteWise Monitor
project.
"""
def batch_disassociate_project_assets(client, project_id, input, options \\ []) do
path_ = "/projects/#{URI.encode(project_id)}/assets/disassociate"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, 200)
end
@doc """
Sends a list of asset property values to AWS IoT SiteWise. Each value is a
timestamp-quality-value (TQV) data point. For more information, see
[Ingesting data using the
API](https://docs.aws.amazon.com/iot-sitewise/latest/userguide/ingest-api.html)
in the *AWS IoT SiteWise User Guide*.
To identify an asset property, you must specify one of the following:
<ul> <li> The `assetId` and `propertyId` of an asset property.
</li> <li> A `propertyAlias`, which is a data stream alias (for example,
`/company/windfarm/3/turbine/7/temperature`). To define an asset property's
alias, see
[UpdateAssetProperty](https://docs.aws.amazon.com/iot-sitewise/latest/APIReference/API_UpdateAssetProperty.html).
</li> </ul> <important> With respect to Unix epoch time, AWS IoT SiteWise
accepts only TQVs that have a timestamp of no more than 15 minutes in the
past and no more than 5 minutes in the future. AWS IoT SiteWise rejects
timestamps outside of the inclusive range of [-15, +5] minutes and returns
a `TimestampOutOfRangeException` error.
For each asset property, AWS IoT SiteWise overwrites TQVs with duplicate
timestamps unless the newer TQV has a different quality. For example, if
you store a TQV `{T1, GOOD, V1}`, then storing `{T1, GOOD, V2}` replaces
the existing TQV.
</important> AWS IoT SiteWise authorizes access to each
`BatchPutAssetPropertyValue` entry individually. For more information, see
[BatchPutAssetPropertyValue
authorization](https://docs.aws.amazon.com/iot-sitewise/latest/userguide/security_iam_service-with-iam.html#security_iam_service-with-iam-id-based-policies-batchputassetpropertyvalue-action)
in the *AWS IoT SiteWise User Guide*.
"""
def batch_put_asset_property_value(client, input, options \\ []) do
path_ = "/properties"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Creates an access policy that grants the specified identity (AWS SSO user,
AWS SSO group, or IAM user) access to the specified AWS IoT SiteWise
Monitor portal or project resource.
"""
def create_access_policy(client, input, options \\ []) do
path_ = "/access-policies"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, 201)
end
@doc """
Creates an asset from an existing asset model. For more information, see
[Creating
assets](https://docs.aws.amazon.com/iot-sitewise/latest/userguide/create-assets.html)
in the *AWS IoT SiteWise User Guide*.
"""
def create_asset(client, input, options \\ []) do
path_ = "/assets"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, 202)
end
@doc """
Creates an asset model from specified property and hierarchy definitions.
You create assets from asset models. With asset models, you can easily
create assets of the same type that have standardized definitions. Each
asset created from a model inherits the asset model's property and
hierarchy definitions. For more information, see [Defining asset
models](https://docs.aws.amazon.com/iot-sitewise/latest/userguide/define-models.html)
in the *AWS IoT SiteWise User Guide*.
"""
def create_asset_model(client, input, options \\ []) do
path_ = "/asset-models"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, 202)
end
@doc """
Creates a dashboard in an AWS IoT SiteWise Monitor project.
"""
def create_dashboard(client, input, options \\ []) do
path_ = "/dashboards"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, 201)
end
@doc """
Creates a gateway, which is a virtual or edge device that delivers
industrial data streams from local servers to AWS IoT SiteWise. For more
information, see [Ingesting data using a
gateway](https://docs.aws.amazon.com/iot-sitewise/latest/userguide/gateway-connector.html)
in the *AWS IoT SiteWise User Guide*.
"""
def create_gateway(client, input, options \\ []) do
path_ = "/20200301/gateways"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, 201)
end
@doc """
Creates a portal, which can contain projects and dashboards. AWS IoT
SiteWise Monitor uses AWS SSO or IAM to authenticate portal users and
manage user permissions.
<note> Before you can sign in to a new portal, you must add at least one
identity to that portal. For more information, see [Adding or removing
portal
administrators](https://docs.aws.amazon.com/iot-sitewise/latest/userguide/administer-portals.html#portal-change-admins)
in the *AWS IoT SiteWise User Guide*.
</note>
"""
def create_portal(client, input, options \\ []) do
path_ = "/portals"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, 202)
end
@doc """
Creates a pre-signed URL to a portal. Use this operation to create URLs to
portals that use AWS Identity and Access Management (IAM) to authenticate
users. An IAM user with access to a portal can call this API to get a URL
to that portal. The URL contains a session token that lets the IAM user
access the portal.
"""
def create_presigned_portal_url(client, portal_id, session_duration_seconds \\ nil, options \\ []) do
path_ = "/portals/#{URI.encode(portal_id)}/presigned-url"
headers = []
query_ = []
query_ = if !is_nil(session_duration_seconds) do
[{"sessionDurationSeconds", session_duration_seconds} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
Creates a project in the specified portal.
"""
def create_project(client, input, options \\ []) do
path_ = "/projects"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, 201)
end
@doc """
Deletes an access policy that grants the specified identity access to the
specified AWS IoT SiteWise Monitor resource. You can use this operation to
revoke access to an AWS IoT SiteWise Monitor resource.
"""
def delete_access_policy(client, access_policy_id, input, options \\ []) do
path_ = "/access-policies/#{URI.encode(access_policy_id)}"
headers = []
{query_, input} =
[
{"clientToken", "clientToken"},
]
|> AWS.Request.build_params(input)
request(client, :delete, path_, query_, headers, input, options, 204)
end
@doc """
Deletes an asset. This action can't be undone. For more information, see
[Deleting assets and
models](https://docs.aws.amazon.com/iot-sitewise/latest/userguide/delete-assets-and-models.html)
in the *AWS IoT SiteWise User Guide*.
<note> You can't delete an asset that's associated to another asset. For
more information, see
[DisassociateAssets](https://docs.aws.amazon.com/iot-sitewise/latest/APIReference/API_DisassociateAssets.html).
</note>
"""
def delete_asset(client, asset_id, input, options \\ []) do
path_ = "/assets/#{URI.encode(asset_id)}"
headers = []
{query_, input} =
[
{"clientToken", "clientToken"},
]
|> AWS.Request.build_params(input)
request(client, :delete, path_, query_, headers, input, options, 202)
end
@doc """
Deletes an asset model. This action can't be undone. You must delete all
assets created from an asset model before you can delete the model. Also,
you can't delete an asset model if a parent asset model exists that
contains a property formula expression that depends on the asset model that
you want to delete. For more information, see [Deleting assets and
models](https://docs.aws.amazon.com/iot-sitewise/latest/userguide/delete-assets-and-models.html)
in the *AWS IoT SiteWise User Guide*.
"""
def delete_asset_model(client, asset_model_id, input, options \\ []) do
path_ = "/asset-models/#{URI.encode(asset_model_id)}"
headers = []
{query_, input} =
[
{"clientToken", "clientToken"},
]
|> AWS.Request.build_params(input)
request(client, :delete, path_, query_, headers, input, options, 202)
end
@doc """
Deletes a dashboard from AWS IoT SiteWise Monitor.
"""
def delete_dashboard(client, dashboard_id, input, options \\ []) do
path_ = "/dashboards/#{URI.encode(dashboard_id)}"
headers = []
{query_, input} =
[
{"clientToken", "clientToken"},
]
|> AWS.Request.build_params(input)
request(client, :delete, path_, query_, headers, input, options, 204)
end
@doc """
Deletes a gateway from AWS IoT SiteWise. When you delete a gateway, some of
the gateway's files remain in your gateway's file system.
"""
def delete_gateway(client, gateway_id, input, options \\ []) do
path_ = "/20200301/gateways/#{URI.encode(gateway_id)}"
headers = []
query_ = []
request(client, :delete, path_, query_, headers, input, options, nil)
end
@doc """
Deletes a portal from AWS IoT SiteWise Monitor.
"""
def delete_portal(client, portal_id, input, options \\ []) do
path_ = "/portals/#{URI.encode(portal_id)}"
headers = []
{query_, input} =
[
{"clientToken", "<PASSWORD>Token"},
]
|> AWS.Request.build_params(input)
request(client, :delete, path_, query_, headers, input, options, 202)
end
@doc """
Deletes a project from AWS IoT SiteWise Monitor.
"""
def delete_project(client, project_id, input, options \\ []) do
path_ = "/projects/#{URI.encode(project_id)}"
headers = []
{query_, input} =
[
{"clientToken", "<PASSWORD>Token"},
]
|> AWS.Request.build_params(input)
request(client, :delete, path_, query_, headers, input, options, 204)
end
@doc """
Describes an access policy, which specifies an identity's access to an AWS
IoT SiteWise Monitor portal or project.
"""
def describe_access_policy(client, access_policy_id, options \\ []) do
path_ = "/access-policies/#{URI.encode(access_policy_id)}"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
Retrieves information about an asset.
"""
def describe_asset(client, asset_id, options \\ []) do
path_ = "/assets/#{URI.encode(asset_id)}"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Retrieves information about an asset model.
"""
def describe_asset_model(client, asset_model_id, options \\ []) do
path_ = "/asset-models/#{URI.encode(asset_model_id)}"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Retrieves information about an asset property.
<note> When you call this operation for an attribute property, this
response includes the default attribute value that you define in the asset
model. If you update the default value in the model, this operation's
response includes the new default value.
</note> This operation doesn't return the value of the asset property. To
get the value of an asset property, use
[GetAssetPropertyValue](https://docs.aws.amazon.com/iot-sitewise/latest/APIReference/API_GetAssetPropertyValue.html).
"""
def describe_asset_property(client, asset_id, property_id, options \\ []) do
path_ = "/assets/#{URI.encode(asset_id)}/properties/#{URI.encode(property_id)}"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Retrieves information about a dashboard.
"""
def describe_dashboard(client, dashboard_id, options \\ []) do
path_ = "/dashboards/#{URI.encode(dashboard_id)}"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
Retrieves information about a gateway.
"""
def describe_gateway(client, gateway_id, options \\ []) do
path_ = "/20200301/gateways/#{URI.encode(gateway_id)}"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Retrieves information about a gateway capability configuration. Each
gateway capability defines data sources for a gateway. A capability
configuration can contain multiple data source configurations. If you
define OPC-UA sources for a gateway in the AWS IoT SiteWise console, all of
your OPC-UA sources are stored in one capability configuration. To list all
capability configurations for a gateway, use
[DescribeGateway](https://docs.aws.amazon.com/iot-sitewise/latest/APIReference/API_DescribeGateway.html).
"""
def describe_gateway_capability_configuration(client, capability_namespace, gateway_id, options \\ []) do
path_ = "/20200301/gateways/#{URI.encode(gateway_id)}/capability/#{URI.encode(capability_namespace)}"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Retrieves the current AWS IoT SiteWise logging options.
"""
def describe_logging_options(client, options \\ []) do
path_ = "/logging"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Retrieves information about a portal.
"""
def describe_portal(client, portal_id, options \\ []) do
path_ = "/portals/#{URI.encode(portal_id)}"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
Retrieves information about a project.
"""
def describe_project(client, project_id, options \\ []) do
path_ = "/projects/#{URI.encode(project_id)}"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
Disassociates a child asset from the given parent asset through a hierarchy
defined in the parent asset's model.
"""
def disassociate_assets(client, asset_id, input, options \\ []) do
path_ = "/assets/#{URI.encode(asset_id)}/disassociate"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Gets aggregated values for an asset property. For more information, see
[Querying
aggregates](https://docs.aws.amazon.com/iot-sitewise/latest/userguide/query-industrial-data.html#aggregates)
in the *AWS IoT SiteWise User Guide*.
To identify an asset property, you must specify one of the following:
<ul> <li> The `assetId` and `propertyId` of an asset property.
</li> <li> A `propertyAlias`, which is a data stream alias (for example,
`/company/windfarm/3/turbine/7/temperature`). To define an asset property's
alias, see
[UpdateAssetProperty](https://docs.aws.amazon.com/iot-sitewise/latest/APIReference/API_UpdateAssetProperty.html).
</li> </ul>
"""
def get_asset_property_aggregates(client, aggregate_types, asset_id \\ nil, end_date, max_results \\ nil, next_token \\ nil, property_alias \\ nil, property_id \\ nil, qualities \\ nil, resolution, start_date, time_ordering \\ nil, options \\ []) do
path_ = "/properties/aggregates"
headers = []
query_ = []
query_ = if !is_nil(time_ordering) do
[{"timeOrdering", time_ordering} | query_]
else
query_
end
query_ = if !is_nil(start_date) do
[{"startDate", start_date} | query_]
else
query_
end
query_ = if !is_nil(resolution) do
[{"resolution", resolution} | query_]
else
query_
end
query_ = if !is_nil(qualities) do
[{"qualities", qualities} | query_]
else
query_
end
query_ = if !is_nil(property_id) do
[{"propertyId", property_id} | query_]
else
query_
end
query_ = if !is_nil(property_alias) do
[{"propertyAlias", property_alias} | query_]
else
query_
end
query_ = if !is_nil(next_token) do
[{"nextToken", next_token} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"maxResults", max_results} | query_]
else
query_
end
query_ = if !is_nil(end_date) do
[{"endDate", end_date} | query_]
else
query_
end
query_ = if !is_nil(asset_id) do
[{"assetId", asset_id} | query_]
else
query_
end
query_ = if !is_nil(aggregate_types) do
[{"aggregateTypes", aggregate_types} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Gets an asset property's current value. For more information, see [Querying
current
values](https://docs.aws.amazon.com/iot-sitewise/latest/userguide/query-industrial-data.html#current-values)
in the *AWS IoT SiteWise User Guide*.
To identify an asset property, you must specify one of the following:
<ul> <li> The `assetId` and `propertyId` of an asset property.
</li> <li> A `propertyAlias`, which is a data stream alias (for example,
`/company/windfarm/3/turbine/7/temperature`). To define an asset property's
alias, see
[UpdateAssetProperty](https://docs.aws.amazon.com/iot-sitewise/latest/APIReference/API_UpdateAssetProperty.html).
</li> </ul>
"""
def get_asset_property_value(client, asset_id \\ nil, property_alias \\ nil, property_id \\ nil, options \\ []) do
path_ = "/properties/latest"
headers = []
query_ = []
query_ = if !is_nil(property_id) do
[{"propertyId", property_id} | query_]
else
query_
end
query_ = if !is_nil(property_alias) do
[{"propertyAlias", property_alias} | query_]
else
query_
end
query_ = if !is_nil(asset_id) do
[{"assetId", asset_id} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Gets the history of an asset property's values. For more information, see
[Querying historical
values](https://docs.aws.amazon.com/iot-sitewise/latest/userguide/query-industrial-data.html#historical-values)
in the *AWS IoT SiteWise User Guide*.
To identify an asset property, you must specify one of the following:
<ul> <li> The `assetId` and `propertyId` of an asset property.
</li> <li> A `propertyAlias`, which is a data stream alias (for example,
`/company/windfarm/3/turbine/7/temperature`). To define an asset property's
alias, see
[UpdateAssetProperty](https://docs.aws.amazon.com/iot-sitewise/latest/APIReference/API_UpdateAssetProperty.html).
</li> </ul>
"""
def get_asset_property_value_history(client, asset_id \\ nil, end_date \\ nil, max_results \\ nil, next_token \\ nil, property_alias \\ nil, property_id \\ nil, qualities \\ nil, start_date \\ nil, time_ordering \\ nil, options \\ []) do
path_ = "/properties/history"
headers = []
query_ = []
query_ = if !is_nil(time_ordering) do
[{"timeOrdering", time_ordering} | query_]
else
query_
end
query_ = if !is_nil(start_date) do
[{"startDate", start_date} | query_]
else
query_
end
query_ = if !is_nil(qualities) do
[{"qualities", qualities} | query_]
else
query_
end
query_ = if !is_nil(property_id) do
[{"propertyId", property_id} | query_]
else
query_
end
query_ = if !is_nil(property_alias) do
[{"propertyAlias", property_alias} | query_]
else
query_
end
query_ = if !is_nil(next_token) do
[{"nextToken", next_token} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"maxResults", max_results} | query_]
else
query_
end
query_ = if !is_nil(end_date) do
[{"endDate", end_date} | query_]
else
query_
end
query_ = if !is_nil(asset_id) do
[{"assetId", asset_id} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Retrieves a paginated list of access policies for an identity (an AWS SSO
user, an AWS SSO group, or an IAM user) or an AWS IoT SiteWise Monitor
resource (a portal or project).
"""
def list_access_policies(client, iam_arn \\ nil, identity_id \\ nil, identity_type \\ nil, max_results \\ nil, next_token \\ nil, resource_id \\ nil, resource_type \\ nil, options \\ []) do
path_ = "/access-policies"
headers = []
query_ = []
query_ = if !is_nil(resource_type) do
[{"resourceType", resource_type} | query_]
else
query_
end
query_ = if !is_nil(resource_id) do
[{"resourceId", resource_id} | query_]
else
query_
end
query_ = if !is_nil(next_token) do
[{"nextToken", next_token} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"maxResults", max_results} | query_]
else
query_
end
query_ = if !is_nil(identity_type) do
[{"identityType", identity_type} | query_]
else
query_
end
query_ = if !is_nil(identity_id) do
[{"identityId", identity_id} | query_]
else
query_
end
query_ = if !is_nil(iam_arn) do
[{"iamArn", iam_arn} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
Retrieves a paginated list of summaries of all asset models.
"""
def list_asset_models(client, max_results \\ nil, next_token \\ nil, options \\ []) do
path_ = "/asset-models"
headers = []
query_ = []
query_ = if !is_nil(next_token) do
[{"nextToken", next_token} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"maxResults", max_results} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Retrieves a paginated list of asset summaries.
You can use this operation to do the following:
<ul> <li> List assets based on a specific asset model.
</li> <li> List top-level assets.
</li> </ul> You can't use this operation to list all assets. To retrieve
summaries for all of your assets, use
[ListAssetModels](https://docs.aws.amazon.com/iot-sitewise/latest/APIReference/API_ListAssetModels.html)
to get all of your asset model IDs. Then, use ListAssets to get all assets
for each asset model.
"""
def list_assets(client, asset_model_id \\ nil, filter \\ nil, max_results \\ nil, next_token \\ nil, options \\ []) do
path_ = "/assets"
headers = []
query_ = []
query_ = if !is_nil(next_token) do
[{"nextToken", next_token} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"maxResults", max_results} | query_]
else
query_
end
query_ = if !is_nil(filter) do
[{"filter", filter} | query_]
else
query_
end
query_ = if !is_nil(asset_model_id) do
[{"assetModelId", asset_model_id} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Retrieves a paginated list of associated assets.
You can use this operation to do the following:
<ul> <li> List child assets associated to a parent asset by a hierarchy
that you specify.
</li> <li> List an asset's parent asset.
</li> </ul>
"""
def list_associated_assets(client, asset_id, hierarchy_id \\ nil, max_results \\ nil, next_token \\ nil, traversal_direction \\ nil, options \\ []) do
path_ = "/assets/#{URI.encode(asset_id)}/hierarchies"
headers = []
query_ = []
query_ = if !is_nil(traversal_direction) do
[{"traversalDirection", traversal_direction} | query_]
else
query_
end
query_ = if !is_nil(next_token) do
[{"nextToken", next_token} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"maxResults", max_results} | query_]
else
query_
end
query_ = if !is_nil(hierarchy_id) do
[{"hierarchyId", hierarchy_id} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Retrieves a paginated list of dashboards for an AWS IoT SiteWise Monitor
project.
"""
def list_dashboards(client, max_results \\ nil, next_token \\ nil, project_id, options \\ []) do
path_ = "/dashboards"
headers = []
query_ = []
query_ = if !is_nil(project_id) do
[{"projectId", project_id} | query_]
else
query_
end
query_ = if !is_nil(next_token) do
[{"nextToken", next_token} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"maxResults", max_results} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
Retrieves a paginated list of gateways.
"""
def list_gateways(client, max_results \\ nil, next_token \\ nil, options \\ []) do
path_ = "/20200301/gateways"
headers = []
query_ = []
query_ = if !is_nil(next_token) do
[{"nextToken", next_token} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"maxResults", max_results} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Retrieves a paginated list of AWS IoT SiteWise Monitor portals.
"""
def list_portals(client, max_results \\ nil, next_token \\ nil, options \\ []) do
path_ = "/portals"
headers = []
query_ = []
query_ = if !is_nil(next_token) do
[{"nextToken", next_token} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"maxResults", max_results} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
Retrieves a paginated list of assets associated with an AWS IoT SiteWise
Monitor project.
"""
def list_project_assets(client, project_id, max_results \\ nil, next_token \\ nil, options \\ []) do
path_ = "/projects/#{URI.encode(project_id)}/assets"
headers = []
query_ = []
query_ = if !is_nil(next_token) do
[{"nextToken", next_token} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"maxResults", max_results} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
Retrieves a paginated list of projects for an AWS IoT SiteWise Monitor
portal.
"""
def list_projects(client, max_results \\ nil, next_token \\ nil, portal_id, options \\ []) do
path_ = "/projects"
headers = []
query_ = []
query_ = if !is_nil(portal_id) do
[{"portalId", portal_id} | query_]
else
query_
end
query_ = if !is_nil(next_token) do
[{"nextToken", next_token} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"maxResults", max_results} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, 200)
end
@doc """
Retrieves the list of tags for an AWS IoT SiteWise resource.
"""
def list_tags_for_resource(client, resource_arn, options \\ []) do
path_ = "/tags"
headers = []
query_ = []
query_ = if !is_nil(resource_arn) do
[{"resourceArn", resource_arn} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Sets logging options for AWS IoT SiteWise.
"""
def put_logging_options(client, input, options \\ []) do
path_ = "/logging"
headers = []
query_ = []
request(client, :put, path_, query_, headers, input, options, nil)
end
@doc """
Adds tags to an AWS IoT SiteWise resource. If a tag already exists for the
resource, this operation updates the tag's value.
"""
def tag_resource(client, input, options \\ []) do
path_ = "/tags"
headers = []
{query_, input} =
[
{"resourceArn", "resourceArn"},
]
|> AWS.Request.build_params(input)
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Removes a tag from an AWS IoT SiteWise resource.
"""
def untag_resource(client, input, options \\ []) do
path_ = "/tags"
headers = []
{query_, input} =
[
{"resourceArn", "resourceArn"},
{"tagKeys", "tagKeys"},
]
|> AWS.Request.build_params(input)
request(client, :delete, path_, query_, headers, input, options, nil)
end
@doc """
Updates an existing access policy that specifies an identity's access to an
AWS IoT SiteWise Monitor portal or project resource.
"""
def update_access_policy(client, access_policy_id, input, options \\ []) do
path_ = "/access-policies/#{URI.encode(access_policy_id)}"
headers = []
query_ = []
request(client, :put, path_, query_, headers, input, options, 200)
end
@doc """
Updates an asset's name. For more information, see [Updating assets and
models](https://docs.aws.amazon.com/iot-sitewise/latest/userguide/update-assets-and-models.html)
in the *AWS IoT SiteWise User Guide*.
"""
def update_asset(client, asset_id, input, options \\ []) do
path_ = "/assets/#{URI.encode(asset_id)}"
headers = []
query_ = []
request(client, :put, path_, query_, headers, input, options, 202)
end
@doc """
Updates an asset model and all of the assets that were created from the
model. Each asset created from the model inherits the updated asset model's
property and hierarchy definitions. For more information, see [Updating
assets and
models](https://docs.aws.amazon.com/iot-sitewise/latest/userguide/update-assets-and-models.html)
in the *AWS IoT SiteWise User Guide*.
<important> This operation overwrites the existing model with the provided
model. To avoid deleting your asset model's properties or hierarchies, you
must include their IDs and definitions in the updated asset model payload.
For more information, see
[DescribeAssetModel](https://docs.aws.amazon.com/iot-sitewise/latest/APIReference/API_DescribeAssetModel.html).
If you remove a property from an asset model, AWS IoT SiteWise deletes all
previous data for that property. If you remove a hierarchy definition from
an asset model, AWS IoT SiteWise disassociates every asset associated with
that hierarchy. You can't change the type or data type of an existing
property.
</important>
"""
def update_asset_model(client, asset_model_id, input, options \\ []) do
path_ = "/asset-models/#{URI.encode(asset_model_id)}"
headers = []
query_ = []
request(client, :put, path_, query_, headers, input, options, 202)
end
@doc """
Updates an asset property's alias and notification state.
<important> This operation overwrites the property's existing alias and
notification state. To keep your existing property's alias or notification
state, you must include the existing values in the UpdateAssetProperty
request. For more information, see
[DescribeAssetProperty](https://docs.aws.amazon.com/iot-sitewise/latest/APIReference/API_DescribeAssetProperty.html).
</important>
"""
def update_asset_property(client, asset_id, property_id, input, options \\ []) do
path_ = "/assets/#{URI.encode(asset_id)}/properties/#{URI.encode(property_id)}"
headers = []
query_ = []
request(client, :put, path_, query_, headers, input, options, nil)
end
@doc """
Updates an AWS IoT SiteWise Monitor dashboard.
"""
def update_dashboard(client, dashboard_id, input, options \\ []) do
path_ = "/dashboards/#{URI.encode(dashboard_id)}"
headers = []
query_ = []
request(client, :put, path_, query_, headers, input, options, 200)
end
@doc """
Updates a gateway's name.
"""
def update_gateway(client, gateway_id, input, options \\ []) do
path_ = "/20200301/gateways/#{URI.encode(gateway_id)}"
headers = []
query_ = []
request(client, :put, path_, query_, headers, input, options, nil)
end
@doc """
Updates a gateway capability configuration or defines a new capability
configuration. Each gateway capability defines data sources for a gateway.
A capability configuration can contain multiple data source configurations.
If you define OPC-UA sources for a gateway in the AWS IoT SiteWise console,
all of your OPC-UA sources are stored in one capability configuration. To
list all capability configurations for a gateway, use
[DescribeGateway](https://docs.aws.amazon.com/iot-sitewise/latest/APIReference/API_DescribeGateway.html).
"""
def update_gateway_capability_configuration(client, gateway_id, input, options \\ []) do
path_ = "/20200301/gateways/#{URI.encode(gateway_id)}/capability"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, 201)
end
@doc """
Updates an AWS IoT SiteWise Monitor portal.
"""
def update_portal(client, portal_id, input, options \\ []) do
path_ = "/portals/#{URI.encode(portal_id)}"
headers = []
query_ = []
request(client, :put, path_, query_, headers, input, options, 202)
end
@doc """
Updates an AWS IoT SiteWise Monitor project.
"""
def update_project(client, project_id, input, options \\ []) do
path_ = "/projects/#{URI.encode(project_id)}"
headers = []
query_ = []
request(client, :put, path_, query_, headers, input, options, 200)
end
@spec request(AWS.Client.t(), binary(), binary(), list(), list(), map(), list(), pos_integer()) ::
{:ok, map() | nil, map()}
| {:error, term()}
defp request(client, method, path, query, headers, input, options, success_status_code) do
client = %{client | service: "iotsitewise"}
host = build_host("iotsitewise", client)
url = host
|> build_url(path, client)
|> add_query(query, client)
additional_headers = [{"Host", host}, {"Content-Type", "application/x-amz-json-1.1"}]
headers = AWS.Request.add_headers(additional_headers, headers)
payload = encode!(client, input)
headers = AWS.Request.sign_v4(client, method, url, headers, payload)
perform_request(client, method, url, payload, headers, options, success_status_code)
end
defp perform_request(client, method, url, payload, headers, options, success_status_code) do
case AWS.Client.request(client, method, url, payload, headers, options) do
{:ok, %{status_code: status_code, body: body} = response}
when is_nil(success_status_code) and status_code in [200, 202, 204]
when status_code == success_status_code ->
body = if(body != "", do: decode!(client, body))
{:ok, body, response}
{:ok, response} ->
{:error, {:unexpected_response, response}}
error = {:error, _reason} -> error
end
end
defp build_host(_endpoint_prefix, %{region: "local", endpoint: endpoint}) do
endpoint
end
defp build_host(_endpoint_prefix, %{region: "local"}) do
"localhost"
end
defp build_host(endpoint_prefix, %{region: region, endpoint: endpoint}) do
"#{endpoint_prefix}.#{region}.#{endpoint}"
end
defp build_url(host, path, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}#{path}"
end
defp add_query(url, [], _client) do
url
end
defp add_query(url, query, client) do
querystring = encode!(client, query, :query)
"#{url}?#{querystring}"
end
defp encode!(client, payload, format \\ :json) do
AWS.Client.encode!(client, payload, format)
end
defp decode!(client, payload) do
AWS.Client.decode!(client, payload, :json)
end
end
|
lib/aws/generated/iot_site_wise.ex
| 0.892691
| 0.53437
|
iot_site_wise.ex
|
starcoder
|
defmodule Noizu.SimpleObject do
@doc """
Begin configuring a Simple Object.
@example ```
defmodule Container do
use Noizu.SimpleObject
Noizu.SimpleObject.noizu_struct() do
public_field :contents
end
end
```
"""
defmacro __using__(options \\ nil) do
nmid_generator = options[:nmid_generator]
nmid_sequencer = options[:nmid_sequencer]
nmid_index = options[:nmid_index]
auto_generate = options[:auto_generate]
caller = __CALLER__
quote do
require Noizu.SimpleObject
require Noizu.AdvancedScaffolding.Internal.Helpers
#-------------------------
# Declare Annotation Attributes
#-------------------------
Module.register_attribute(__MODULE__, :index, accumulate: true)
Module.register_attribute(__MODULE__, :persistence_layer, accumulate: true)
Module.register_attribute(__MODULE__, :__nzdo__meta, accumulate: false)
Module.register_attribute(__MODULE__, :json_white_list, accumulate: false)
Module.register_attribute(__MODULE__, :json_format_group, accumulate: true)
Module.register_attribute(__MODULE__, :json_field_group, accumulate: true)
#---------------------
# Insure Single Call
#---------------------
@file unquote(__ENV__.file) <> ":#{unquote(__ENV__.line)}" <> "(via #{__ENV__.file}:#{__ENV__.line})"
Noizu.AdvancedScaffolding.Internal.Helpers.insure_single_use(:__nzdo__simple_defined, unquote(caller))
#-----------------
# Set Annotation fields if caller passed in options
#-----------------
if v = unquote(nmid_generator), do: Module.put_attribute(__MODULE__, :nmid_generator, v)
if v = unquote(nmid_sequencer), do: Module.put_attribute(__MODULE__, :nmid_sequencer, v)
if v = unquote(nmid_index), do: Module.put_attribute(__MODULE__, :nmid_index, v)
if unquote(auto_generate) != nil, do: Module.put_attribute(__MODULE__, :auto_generate, unquote(auto_generate))
end
end
@doc """
Define simple object fields/settings.
@example ```
defmodule Container do
use Noizu.SimpleObject
Noizu.SimpleObject.noizu_struct() do
public_field :contents
end
end
```
"""
defmacro noizu_struct(options \\ [], [do: block]) do
Noizu.AdvancedScaffolding.Internal.SimpleObject.Base.__noizu_struct__(__CALLER__, options, block)
end
end
|
lib/scaffolding/simple_object.ex
| 0.702734
| 0.474631
|
simple_object.ex
|
starcoder
|
defmodule BitPal.ExchangeRate do
alias BitPal.ExchangeRateSupervisor
alias BitPal.ExchangeRateSupervisor.Result
alias BitPalSchemas.Currency
alias Phoenix.PubSub
@pubsub BitPal.PubSub
@type pair :: {Currency.id(), Currency.id()}
@type t :: %__MODULE__{
rate: Decimal.t(),
pair: pair
}
defstruct [:rate, :pair]
# Creation
@spec new!(Money.t(), Money.t()) :: t
@spec new!(Decimal.t(), pair) :: t
def new!(x, y) do
case new(x, y) do
{:ok, res} -> res
_ -> raise ArgumentError, "invalid params to ExchangeRate.new"
end
end
@spec new(Decimal.t(), pair) :: {:ok, t} | :error
def new(_, {a, a}) do
:error
end
def new(rate, {a, b}) do
with false <- Decimal.lt?(rate, Decimal.new(0)),
{:ok, a} <- normalize_currency(a),
{:ok, b} <- normalize_currency(b) do
{:ok,
%__MODULE__{
rate: rate,
pair: {a, b}
}}
else
_ -> :error
end
end
@spec new(Money.t(), Money.t()) :: {:ok, t} | :error
def new(a, b) do
cond do
a.currency == b.currency ->
:error
Money.zero?(a) ->
:error
true ->
{:ok,
%__MODULE__{
rate: Decimal.div(Money.to_decimal(b), Money.to_decimal(a)),
pair: {a.currency, b.currency}
}}
end
end
# Requests
@spec request(pair(), keyword) :: {:ok, t()} | {:error, term}
def request(pair, opts \\ []) do
ExchangeRateSupervisor.request(pair, opts)
end
@spec request!(pair(), keyword) :: t()
def request!(pair, opts \\ []) do
ExchangeRateSupervisor.request!(pair, opts)
end
# Handling
@spec normalize(t, Money.t(), Money.t()) ::
{:ok, Money.t(), Money.t()}
| {:error, :mismatched_exchange_rate}
| {:error, :bad_params}
def normalize(exchange_rate, a, b) do
{ex_a, ex_b} = exchange_rate.pair
case {a, b} do
{%Money{currency: ^ex_a}, nil} ->
{:ok, a,
Money.parse!(
Decimal.mult(exchange_rate.rate, Money.to_decimal(a)),
elem(exchange_rate.pair, 1)
)}
{nil, %Money{currency: ^ex_b}} ->
{:ok,
Money.parse!(
Decimal.div(Money.to_decimal(b), exchange_rate.rate),
elem(exchange_rate.pair, 0)
), b}
{%Money{currency: ^ex_b}, %Money{currency: ^ex_a}} ->
normalize(exchange_rate, b, a)
{%Money{currency: ^ex_a}, %Money{currency: ^ex_b}} ->
case new(a, b) do
{:ok, rate} ->
if eq?(exchange_rate, rate) do
{:ok, a, b}
else
{:error, :mismatched_exchange_rate}
end
_ ->
{:error, :bad_params}
end
_ ->
{:error, :bad_params}
end
end
@spec eq?(t, t) :: boolean
def eq?(a, b) do
a.pair == b.pair && Decimal.eq?(a.rate, b.rate)
end
# Subscriptions
@spec subscribe(pair()) :: :ok
def subscribe(pair, opts \\ []) do
:ok = PubSub.subscribe(@pubsub, topic(pair))
ExchangeRateSupervisor.async_request(pair, opts)
:ok
end
@spec unsubscribe(pair()) :: :ok
def unsubscribe(pair) do
PubSub.unsubscribe(@pubsub, topic(pair))
end
@spec broadcast(pair(), Result.t()) :: :ok | {:error, term}
def broadcast(pair, res) do
PubSub.broadcast(@pubsub, topic(pair), {:exchange_rate, res.rate})
end
defp topic({from, to}) do
Atom.to_string(__MODULE__) <> Atom.to_string(from) <> Atom.to_string(to)
end
@spec normalize_currency(Currency.id()) :: {:ok, atom} | :error
defp normalize_currency(currency) do
{:ok, Money.Currency.to_atom(currency)}
rescue
_ -> :error
end
end
|
lib/bitpal/exchange_rate/exchange_rate.ex
| 0.865878
| 0.54583
|
exchange_rate.ex
|
starcoder
|
defmodule Astarte.Flow.Blocks.VirtualDevicePool do
@moduledoc """
This is a consumer block that takes `data` from incoming `Message`s and publishes it as an Astarte device,
interpreting the `key` as <realm>/<device_id>/<interface><path>.
The list of supported devices is configured using `start_link/1`.
"""
use GenStage
require Logger
alias Astarte.Device
alias Astarte.Flow.Compat
alias Astarte.Flow.Message
alias Astarte.Flow.VirtualDevicesSupervisor
@doc """
Starts the `VirtualDevicePool`.
## Options
* `:pairing_url` (required) - base URL of the Astarte Pairing API instance the devices will
connect to, e.g. `https://astarte.api.example.com/pairing` or `http://localhost:4003` for a
local installation. URL containing the API version suffix (i.e. `/v1`) are *deprecated* and
will be removed in a future release.
* `:devices` (required) - A list of supported devices, each represented by its `device_options` (see "Device options" below).
* `:ignore_ssl_errors` - A boolean to indicate wether devices have to ignore SSL errors when connecting to the broker. Defaults to `false`.
## Device options
* `:realm` (required)
* `:device_id` (required)
* `:credentials_secret` (required)
* `:interface_provider` (required)
See `Astarte.Device.start_link/1` for more documentation.
"""
@spec start_link(options) :: GenServer.on_start()
when options: [option],
option:
{:pairing_url, pairing_url :: String.t()}
| {:devices, devices}
| {:ignore_ssl_errors, ignore_ssl_errors :: boolean()},
devices: [device_options],
device_options: [device_option],
device_option:
{:realm, realm :: String.t()}
| {:device_id, device_id :: String.t()}
| {:credentials_secret, credentials_secret :: String.t()}
| {:interface_provider, {module(), term()} | String.t()}
def start_link(opts) do
GenStage.start_link(__MODULE__, opts)
end
# Callbacks
@impl true
def init(opts) do
pairing_url =
Keyword.fetch!(opts, :pairing_url)
|> Compat.normalize_device_pairing_url()
devices = Keyword.fetch!(opts, :devices)
ignore_ssl_errors = Keyword.get(opts, :ignore_ssl_errors, false)
base_opts = [
pairing_url: pairing_url,
ignore_ssl_errors: ignore_ssl_errors
]
with {:ok, devices} <- start_devices(base_opts, devices),
:ok <- wait_for_device_connections(devices) do
{:consumer, nil}
else
{:error, reason} ->
{:stop, reason}
end
end
defp start_devices(base_opts, devices) do
full_device_options =
for device_options <- devices do
Keyword.merge(base_opts, device_options)
end
result =
Enum.reduce_while(full_device_options, [], fn opts, acc ->
case DynamicSupervisor.start_child(VirtualDevicesSupervisor, {Device, opts}) do
{:ok, pid} ->
{:cont, [pid | acc]}
{:error, {:already_started, pid}} ->
# Someone else is already using this device, but that's fine
{:cont, [pid | acc]}
{:error, reason} ->
{:halt, {:error, reason}}
end
end)
case result do
{:error, reason} ->
{:error, reason}
devices ->
{:ok, devices}
end
end
defp wait_for_device_connections(devices) do
Enum.each(devices, fn device_pid ->
Device.wait_for_connection(device_pid)
end)
end
@impl true
def handle_events(events, _from, state) do
Enum.each(events, &handle_message/1)
{:noreply, [], state}
end
defp handle_message(message) do
%Message{
key: key,
data: data,
timestamp: timestamp_micros
} = message
with {:ok, {realm, device_id, interface, path}} <- parse_key(key),
{:ok, pid} <- fetch_device(realm, device_id),
{:ok, timestamp} <- DateTime.from_unix(timestamp_micros, :microsecond),
:ok <-
Device.send_datastream(pid, interface, path, data, timestamp: timestamp) do
:ok
else
{:error, reason} ->
_ = Logger.warn("Error handling message: #{inspect(reason)}", message: message)
{:error, reason}
end
end
defp parse_key(key) do
case String.split(key, "/") do
[realm, device_id, interface | path_tokens] ->
path = "/" <> Path.join(path_tokens)
{:ok, {realm, device_id, interface, path}}
_ ->
{:error, :invalid_astarte_key}
end
end
defp fetch_device(realm, device_id) do
case Astarte.Device.get_pid(realm, device_id) do
pid when is_pid(pid) ->
{:ok, pid}
nil ->
{:error, :device_not_found}
end
end
end
|
lib/astarte_flow/blocks/virtual_device_pool.ex
| 0.898632
| 0.465752
|
virtual_device_pool.ex
|
starcoder
|
defprotocol Recurly.XML.Parser do
@moduledoc """
Protocol responsible for parsing xml into resources
TODO - This still has some refactoring that can be done.
"""
@doc """
Parses an xml document into the given resource
## Parameters
- `resource` empty resource struct to parse into
- `xml_doc` String xml document
- `list` boolean value, use true if top level xml is a list
## Examples
```
xml_doc = "<account><account_code>myaccount</account></account>"
account = Recurly.XML.Parser.parse(%Recurly.Account{}, xml_doc, false)
```
"""
def parse(resource, xml_doc, list)
end
defimpl Recurly.XML.Parser, for: Any do
import SweetXml
alias Recurly.XML.Types
alias Recurly.XML.Schema
alias Recurly.XML.Field
def parse(resource, xml_doc, true) do
type = resource.__struct__
path = to_charlist("//#{type.__resource_name__}")
path = %SweetXpath{path: path, is_list: true}
xml_doc
|> xpath(path)
|> Enum.map(fn xml_node ->
parse(resource, xml_node, false)
end)
end
def parse(resource, xml_doc, false) do
type = resource.__struct__
path = "/#{type.__resource_name__}/"
xml_doc
|> to_struct(type, path)
|> insert_actions(xml_doc, path)
end
defp insert_actions(resource_struct, xml_doc, string_path) do
path = %SweetXpath{path: to_charlist(string_path <> "a"), is_list: true}
meta = resource_struct.__meta__
actions =
xml_doc
|> xmap(
actions: [
path,
name: ~x"./@name"s,
href: ~x"./@href"s,
method: ~x"./@method"s
]
)
|> Map.get(:actions)
|> Enum.reduce(%{}, fn (action, acc) ->
name = action |> Map.get(:name) |> String.to_atom
method = action |> Map.get(:method) |> String.to_atom
action = [method, Map.get(action, :href)]
Map.put(acc, name, action)
end)
%{resource_struct | __meta__: Map.put(meta, :actions, actions)}
end
defp to_struct(xml_node, type, string_path) do
schema = Schema.get(type)
href_attr = parse_xml_attribute(xml_node, string_path, "href")
path = %SweetXpath{path: to_charlist(string_path <> "*"), is_list: true}
xml_node
|> xpath(path)
|> Enum.map(fn xml_node ->
attr_name = xml_node |> xpath(~x"name(.)"s) |> String.downcase |> String.to_atom
field = Schema.find_field(schema, attr_name)
xml_attributes = parse_xml_attributes(xml_node)
{attr_name, xml_node, field, xml_attributes}
end)
|> Enum.map(&to_attribute/1)
|> Enum.reject(&is_nil/1)
|> Enum.concat([{:__meta__, %{href: href_attr}}])
|> from_map(type)
end
# Turns the xml node tuple into a resource attribute tuple
defp to_attribute({_attr_name, _xml_node, nil, _xml_attrs}) do
nil
end
defp to_attribute({attr_name, _xml_node, _field, %{"nil" => "nil"}}) do
{attr_name, nil}
end
defp to_attribute({attr_name, _xml_node, _field, %{"nil" => "true"}}) do
{attr_name, nil}
end
defp to_attribute({attr_name, xml_node, field, %{"type" => "array"}}) do
path = %SweetXpath{path: './*', is_list: true}
resources =
xml_node
|> xpath(path)
|> Enum.map(fn element_xml_node ->
to_struct(element_xml_node, field.type, "./")
end)
{attr_name, resources}
end
defp to_attribute({attr_name, xml_node, %Field{type: :date_time}, _xml_attrs}) do
case text_value(xml_node) do
nil -> {attr_name, nil}
val -> {attr_name, NaiveDateTime.from_iso8601!(val)}
end
end
defp to_attribute({attr_name, xml_node, %Field{type: :boolean}, _xml_attrs}) do
val = text_value(xml_node) |> String.downcase
case val do
"true" -> {attr_name, true}
"false" -> {attr_name, false}
_ -> raise ArgumentError, message: "Invalid boolean value #{inspect({attr_name, val})}"
end
end
defp to_attribute({attr_name, xml_node, field, xml_attrs}) do
href = Map.get(xml_attrs, "href")
childless = Map.get(xml_attrs, "childless")
cond do
childless && href && String.length(href) > 0 ->
{
attr_name,
%Recurly.Association{
href: href,
resource_type: field.type,
paginate: Field.pageable?(field)
}
}
Types.primitive?(field.type) -> # Can be parsed and cast to a primitive type
path = %SweetXpath{path: './text()', cast_to: field.type}
value = xpath(xml_node, path)
# TODO a better way to detect nil
if value == "" do
nil
else
{attr_name, value}
end
true -> # Is embedded and must parse out the children attributes
{attr_name, to_struct(xml_node, field.type, "./")}
end
end
defp from_map(enum, type) do
struct(type, enum)
end
# gives you a map of xml attributes and values for a node
# will also have childless => true if node has no children
defp parse_xml_attributes(xml_node) do
attrs =
~w(href type nil)
|> Enum.map(fn key ->
{key, parse_xml_attribute(xml_node, "./", key)}
end)
|> Enum.into(%{})
Map.put(attrs, "childless", xpath(xml_node, ~x"./*") == nil)
end
# parses a single xml attribute given a key
defp parse_xml_attribute(xml_node, path, attribute_key) do
text_value(xml_node, to_charlist("#{path}@#{attribute_key}"))
end
# Parses the text value of the xml node, optional path
defp text_value(xml_node, path \\ './text()') do
path = %SweetXpath{path: path, cast_to: :string}
value = xpath(xml_node, path)
case value do
"" -> nil
_ -> value
end
end
end
|
lib/recurly/xml/parser.ex
| 0.679072
| 0.435781
|
parser.ex
|
starcoder
|
defmodule Openflow.Action.NxFlowSpecLoad do
defstruct(
src: nil,
dst: nil,
n_bits: 0,
src_offset: 0,
dst_offset: 0
)
@learn_src_field 0
@learn_src_immediate 1
@learn_dst 1
alias __MODULE__
@type t :: %NxFlowSpecLoad{
src: atom(),
dst: atom(),
n_bits: non_neg_integer(),
src_offset: non_neg_integer(),
dst_offset: non_neg_integer()
}
@spec new(
src: atom(),
dst: atom(),
n_bits: non_neg_integer(),
src_offset: non_neg_integer(),
dst_offset: non_neg_integer()
) :: t()
def new(options) do
dst = options[:dst] || raise(":dst must be specified")
src = options[:src] || raise(":src must be specified")
n_bits = options[:n_bits] || Openflow.Match.n_bits_of(dst)
%NxFlowSpecLoad{
src: src,
dst: dst,
n_bits: n_bits,
src_offset: options[:src_offset] || 0,
dst_offset: options[:dst_offset] || 0
}
end
def to_binary(%NxFlowSpecLoad{} = fsm) do
%NxFlowSpecLoad{
dst: dst_field,
n_bits: n_bits,
src_offset: src_ofs,
dst_offset: dst_ofs
} = fsm
{src_code, src_bin} = codec_src(fsm)
dst_bin = Openflow.Match.codec_header(dst_field)
case src_code do
@learn_src_immediate ->
<<0::2, src_code::1, @learn_dst::2, n_bits::11, src_bin::bytes, dst_bin::4-bytes,
dst_ofs::16>>
@learn_src_field ->
<<0::2, src_code::1, @learn_dst::2, n_bits::11, src_bin::4-bytes, src_ofs::16,
dst_bin::4-bytes, dst_ofs::16>>
end
end
def read(
<<_::2, @learn_src_field::1, @learn_dst::2, n_bits::11, src_bin::4-bytes, src_ofs::16,
dst_bin::4-bytes, dst_ofs::16, rest::bitstring>>
) do
src = Openflow.Match.codec_header(src_bin)
dst = Openflow.Match.codec_header(dst_bin)
flow_spec = %NxFlowSpecLoad{
src: src,
dst: dst,
n_bits: n_bits,
src_offset: src_ofs,
dst_offset: dst_ofs
}
{flow_spec, rest}
end
def read(<<_::2, @learn_src_immediate::1, @learn_dst::2, n_bits::11, binary::bitstring>>) do
rounded_up_len = Openflow.Utils.pad_length(n_bits, 8)
rounded_up_nbits = n_bits + rounded_up_len
<<src_bin::size(rounded_up_nbits)-bits, dst_bin::4-bytes, dst_ofs::16, rest::bitstring>> =
binary
dst = Openflow.Match.codec_header(dst_bin)
src = Openflow.Match.decode_value(src_bin, dst)
flow_spec = %NxFlowSpecLoad{src: src, dst: dst, n_bits: n_bits, dst_offset: dst_ofs}
{flow_spec, rest}
end
# private functions
defp codec_src(%NxFlowSpecLoad{src: src_field}) when is_atom(src_field) do
src_bin = Openflow.Match.codec_header(src_field)
{@learn_src_field, src_bin}
end
defp codec_src(%NxFlowSpecLoad{src: src, dst: dst_field}) do
src_bin = Openflow.Match.encode_value(src, dst_field)
{@learn_src_immediate, src_bin}
end
end
|
lib/openflow/actions/nx_flow_spec_load.ex
| 0.710025
| 0.465752
|
nx_flow_spec_load.ex
|
starcoder
|
defmodule HumanName do
@moduledoc """
Documentation for HumanName.
"""
@doc """
Returns the initial for the first (given) name.
## Example
iex> HumanName.first_initial("<NAME>")
{:ok, "J"}
iex> HumanName.first_initial("Dr. Alibaster Cornelius Juniper III")
{:ok, "A"}
iex> HumanName.first_initial(12345)
{:error, "No valid name found"}
iex> HumanName.first_initial("")
{:error, "No valid name found"}
iex> HumanName.first_initial(nil)
{:error, "No valid name found"}
"""
def first_initial(full_name) when is_binary(full_name) and full_name != "",
do: HumanName.Native.first_initial(full_name)
def first_initial(_), do: name_not_provided()
@doc """
Returns the initial for the first (given) name and the full last name (surname).
## Example
iex> HumanName.first_initial_last("<NAME>")
{:ok, "<NAME>"}
iex> HumanName.first_initial_last("Dr. Alibaster Cornelius Juniper III")
{:ok, "<NAME>"}
iex> HumanName.first_initial_last("")
{:error, "No valid name found"}
"""
def first_initial_last(full_name) when is_binary(full_name) and full_name != "",
do: HumanName.Native.first_initial_last(full_name)
def first_initial_last(_), do: name_not_provided()
@doc """
Returns just the formatted first (given) name.
## Example
iex> HumanName.first_name("<NAME>")
{:ok, "Jimmy"}
iex> HumanName.first_name("Dr. Alibaster Cornelius Juniper III")
{:ok, "Alibaster"}
iex> HumanName.first_name("")
{:error, "No valid name found"}
"""
def first_name(full_name) when is_binary(full_name) and full_name != "",
do: HumanName.Native.first_name(full_name)
def first_name(_), do: name_not_provided()
@doc """
Returns the combined initials for the first (given) name and last (surname) name.
## Example
iex> HumanName.initials("<NAME>")
{:ok, "JJ"}
iex> HumanName.initials("Dr. Alibaster Cornelius Juniper III")
{:ok, "AJ"}
iex> HumanName.initials("")
{:error, "No valid name found"}
"""
def initials(full_name) when is_binary(full_name) and full_name != "",
do: HumanName.Native.initials(full_name)
def initials(_), do: name_not_provided()
@doc """
Returns just the formatted last name (surname).
## Example
iex> HumanName.last_name("<NAME>")
{:ok, "Jenkins"}
iex> HumanName.last_name("Dr. Alibaster Cornelius Juniper III")
{:ok, "Juniper"}
iex> HumanName.last_name("")
{:error, "No valid name found"}
"""
def last_name(full_name) when is_binary(full_name) and full_name != "",
do: HumanName.Native.last_name(full_name)
def last_name(_), do: name_not_provided()
@doc """
Returns a formatted version of just the first (given) and last (surname) names.
## Example
iex> HumanName.normalize("<NAME>")
{:ok, "<NAME>"}
iex> HumanName.normalize("Dr. Alibaster Cornelius Juniper III")
{:ok, "Alibaster Juniper"}
iex> HumanName.normalize("fred mccalister ")
{:ok, "<NAME>"}
iex> HumanName.normalize("")
{:error, "No valid name found"}
"""
def normalize(full_name) when is_binary(full_name) and full_name != "",
do: HumanName.Native.normalize(full_name)
def normalize(_), do: name_not_provided()
@doc """
Returns a formatted version of the entire name.
## Example
iex> HumanName.normalize_full("<NAME>")
{:ok, "<NAME>"}
iex> HumanName.normalize_full("Dr. <NAME> III")
{:ok, "<NAME>, III"}
iex> HumanName.normalize_full("")
{:error, "No valid name found"}
"""
def normalize_full(full_name) when is_binary(full_name) and full_name != "",
do: HumanName.Native.normalize_full(full_name)
def normalize_full(_), do: name_not_provided()
defp name_not_provided(), do: {:error, "No valid name found"}
end
|
lib/human_name.ex
| 0.735262
| 0.435001
|
human_name.ex
|
starcoder
|
defmodule Snitch.Data.Schema.StockLocation do
@moduledoc """
Models a store location or a warehouse where stock is stored, ready to be
shipped.
"""
use Snitch.Data.Schema
alias Snitch.Data.Schema.{Country, State, StockItem}
@typedoc """
## Fields
1. `:propagate_all_variants`
If this is set to `true` when creating a new `StockLocation`, then a
`StockItem` entry with `0` `:count_on_hand` and this `StockLocation` is
created for all currently existing variants.
"""
@type t :: %__MODULE__{}
schema "snitch_stock_locations" do
field(:name, :string)
# Internal system name
field(:admin_name, :string)
field(:default, :boolean, default: false)
field(:address_line_1, :string)
field(:address_line_2, :string)
field(:city, :string)
field(:zip_code, :string)
field(:phone, :string)
field(:propagate_all_variants, :boolean, default: true)
field(:backorderable_default, :boolean, default: false)
field(:active, :boolean, default: true)
has_many(:stock_items, StockItem)
has_many(:stock_movements, through: [:stock_items, :stock_movements])
belongs_to(:state, State)
belongs_to(:country, Country)
timestamps()
end
@required_fields ~w(name address_line_1 state_id country_id)a
@cast_fields ~w(admin_name address_line_2 city zip_code phone propagate_all_variants)a ++
~w(backorderable_default active)a ++ @required_fields
@spec create_changeset(t, map) :: Ecto.Changeset.t()
def create_changeset(%__MODULE__{} = stock_location, params),
do: changeset(stock_location, params)
@spec update_changeset(t, map) :: Ecto.Changeset.t()
def update_changeset(%__MODULE__{} = stock_location, params),
do: changeset(stock_location, params)
defp changeset(stock_location, params) do
stock_location
|> cast(params, @cast_fields)
|> validate_required(@required_fields)
|> validate_length(:address_line_1, min: 10)
|> validate_format(:phone, ~r/^\d{10}$/)
|> foreign_key_constraint(:state_id)
|> foreign_key_constraint(:country_id)
end
end
|
apps/snitch_core/lib/core/data/schema/stock/stock_location.ex
| 0.86411
| 0.409486
|
stock_location.ex
|
starcoder
|
defmodule Asteroid.Config do
@moduledoc """
Specification of configuration options and callbacks
"""
require Asteroid.Config.Builder
alias Asteroid.Client
alias Asteroid.Crypto
alias Asteroid.OIDC
alias Asteroid.Subject
@typedoc """
A map describing scope configuration
The map keys are the scope's names. The map values are `Keyword.t/0` with the following
options:
- `:auto`: if true, the scope is automatically granted
## Example
```elixir
%{
"scope-a" => [auto: true],
"scope-b" => [auto: true],
"scope-c" => [auto: false],
"scope-d" => [],
"scope-f" => [auto: true],
}
```
"""
@type scope_config :: map()
Asteroid.Config.Builder.defconfig do
@doc """
Access token store configuration
#### Options
- `:module`: the name of the module implementing the token's behaviours. No default,
**mandatory**
- `:opts`: options that will be passed to the all token's implementation functions. Refer to
the implementation documentation. Defaults to `[]`
- `:auto_install`: `boolean()` indicating whether the `install/1` callback of the
impementation should be called at Asteroid startup. Defaults to `true`
- `:auto_start`: `boolean()` indicating whether the `start_link/1` or `start/1` callback of
the Implementation should be called at Asteroid startup. Defaults to `true`
#### Example
```elixir
config :asteroid, :object_store_access_token, [
module: Asteroid.ObjectStore.AccessToken.Riak,
opts: [bucket_type: "ephemeral_token", purge_interval: 10]
]
```
"""
@type object_store_access_token :: Keyword.t()
field(:object_store_access_token,
config_time: :runtime
)
@doc """
Refresh token store configuration
#### Options
- `:module`: the name of the module implementing the token's behaviours. No default,
**mandatory**
- `:opts`: options that will be passed to the all token's implementation functions. Refer to
the implementation documentation. Defaults to `[]`
- `:auto_install`: `boolean()` indicating whether the `install/1` callback of the
impementation should be called at Asteroid startup. Defaults to `true`
- `:auto_start`: `boolean()` indicating whether the `start_link/1` or `start/1` callback of
the Implementation should be called at Asteroid startup. Defaults to `true`
#### Example
```elixir
config :asteroid, :object_store_refresh_token, [
module: Asteroid.ObjectStore.RefreshToken.Mnesia
]
```
"""
@type object_store_refresh_token :: Keyword.t()
field(:object_store_refresh_token,
config_time: :runtime
)
@doc """
Authorization code store configuration
#### Options
- `:module`: the name of the module implementing the token's behaviours. No default,
**mandatory**
- `:opts`: options that will be passed to the all token's implementation functions. Refer to
the implementation documentation. Defaults to `[]`
- `:auto_install`: `boolean()` indicating whether the `install/1` callback of the
impementation should be called at Asteroid startup. Defaults to `true`
- `:auto_start`: `boolean()` indicating whether the `start_link/1` or `start/1` callback of
the Implementation should be called at Asteroid startup. Defaults to `true`
#### Example
```elixir
config :asteroid, :object_store_authorization_code, [
module: Asteroid.ObjectStore.AuthorizationCode.Mnesia
]
```
"""
@type object_store_authorization_code :: Keyword.t()
field(:object_store_authorization_code,
config_time: :runtime
)
@doc """
Device code store configuration
#### Options
- `:module`: the name of the module implementing the token's behaviours. No default,
**mandatory**
- `:opts`: options that will be passed to the all token's implementation functions. Refer to
the implementation documentation. Defaults to `[]`
- `:auto_install`: `boolean()` indicating whether the `install/1` callback of the
impementation should be called at Asteroid startup. Defaults to `true`
- `:auto_start`: `boolean()` indicating whether the `start_link/1` or `start/1` callback of
the Implementation should be called at Asteroid startup. Defaults to `true`
#### Example
```elixir
config :asteroid, :object_store_device_code, [
module: Asteroid.ObjectStore.DeviceCode.Mnesia
]
```
"""
@type object_store_device_code :: Keyword.t()
field(:object_store_device_code,
config_time: :runtime
)
@doc """
Request object store configuration
#### Options
- `:module`: the name of the module implementing the token's behaviours. No default,
**mandatory**
- `:opts`: options that will be passed to the all token's implementation functions. Refer to
the implementation documentation. Defaults to `[]`
- `:auto_install`: `boolean()` indicating whether the `install/1` callback of the
impementation should be called at Asteroid startup. Defaults to `true`
- `:auto_start`: `boolean()` indicating whether the `start_link/1` or `start/1` callback of
the Implementation should be called at Asteroid startup. Defaults to `true`
#### Example
```elixir
config :asteroid, :object_store_request_object, [
module: Asteroid.ObjectStore.GenericKV.Mnesia
]
```
"""
@type object_store_request_object :: Keyword.t()
field(:object_store_request_object,
config_time: :runtime
)
@doc """
Authenticated session store configuration
#### Options
- `:module`: the name of the module implementing the object's behaviours. No default,
**mandatory**
- `:opts`: options that will be passed to the all object's implementation functions. Refer
to the implementation documentation. Defaults to `[]`
- `:auto_install`: `boolean()` indicating whether the `install/1` callback of the
impementation should be called at Asteroid startup. Defaults to `true`
- `:auto_start`: `boolean()` indicating whether the `start_link/1` or `start/1` callback of
the Implementation should be called at Asteroid startup. Defaults to `true`
#### Example
```elixir
config :asteroid, :object_store_authenticated_session, [
module: Asteroid.ObjectStore.AuthenticatedSession.Mnesia
]
```
"""
@type object_store_authenticated_session :: Keyword.t()
field(:object_store_authenticated_session,
config_time: :runtime
)
@doc """
Authentication event store configuration
#### Options
- `:module`: the name of the module implementing the object's behaviours. No default,
**mandatory**
- `:opts`: options that will be passed to the all object's implementation functions. Refer
to the implementation documentation. Defaults to `[]`
- `:auto_install`: `boolean()` indicating whether the `install/1` callback of the
impementation should be called at Asteroid startup. Defaults to `true`
- `:auto_start`: `boolean()` indicating whether the `start_link/1` or `start/1` callback of
the Implementation should be called at Asteroid startup. Defaults to `true`
#### Example
```elixir
config :asteroid, :object_store_authenticated_session, [
module: Asteroid.ObjectStore.AuthenticationEvent.Mnesia
]
```
"""
@type object_store_authentication_event :: Keyword.t()
field(:object_store_authentication_event,
config_time: :runtime
)
@doc """
Callback invoked before storing a refresh token
"""
@type object_store_refresh_token_before_store_callback ::
(Asteroid.Token.RefreshToken.t(), Asteroid.Context.t() ->
Asteroid.Token.RefreshToken.t())
field(:object_store_refresh_token_before_store_callback,
config_time: :runtime
)
@doc """
Callback invoked before storing an access token
"""
@type object_store_access_token_before_store_callback ::
(Asteroid.Token.AccessToken.t(), Asteroid.Context.t() ->
Asteroid.Token.RefreshToken.t())
field(:object_store_access_token_before_store_callback,
config_time: :runtime
)
@doc """
Callback invoked before storing an authorization code
"""
@type object_store_authorization_code_before_store_callback ::
(Asteroid.Token.AuthorizationCode.t(), Asteroid.Context.t() ->
Asteroid.Token.AuthorizationCode.t())
field(:object_store_authorization_code_before_store_callback,
config_time: :runtime
)
@doc """
Callback invoked before storing a device code
"""
@type object_store_device_code_before_store_callback ::
(Asteroid.Token.DeviceCode.t(), Asteroid.Context.t() ->
Asteroid.Token.DeviceCode.t())
field(:object_store_device_code_before_store_callback,
config_time: :runtime
)
@doc """
Callback invoked before storing authenticated session
"""
@type object_store_authenticated_session_before_store_callback ::
(Asteroid.OIDC.AuthenticatedSession.t(), Asteroid.Context.t() ->
Asteroid.OIDC.AuthenticatedSession.t())
field(:object_store_authenticated_session_before_store_callback,
config_time: :runtime
)
@doc """
Callback invoked before storing authentication event
"""
@type object_store_authentication_event_before_store_callback ::
(Asteroid.OIDC.AuthenticationEvent.t(), Asteroid.Context.t() ->
Asteroid.OIDC.AuthenticationEvent.t())
field(:object_store_authentication_event_before_store_callback,
config_time: :runtime
)
@doc """
Plugs installed on `"/api/oauth2"`
See also [protecting APIs](protecting-apis.html)
"""
@type api_oauth2_plugs :: [{module(), Keyword.t()}]
field(:api_oauth2_plugs,
config_time: :compile
)
@doc """
Plugs installed on `"/api/oauth2/token"`
See also [protecting APIs](protecting-apis.html)
"""
@type api_oauth2_endpoint_token_plugs :: [{module(), Keyword.t()}]
field(:api_oauth2_endpoint_token_plugs,
config_time: :compile
)
@doc """
Plugs installed on `"/api/oauth2/introspect"`
See also [protecting APIs](protecting-apis.html)
"""
@type api_oauth2_endpoint_introspect_plugs :: [{module(), Keyword.t()}]
field(:api_oauth2_endpoint_introspect_plugs,
config_time: :compile
)
@doc """
Plugs installed on `"/api/oauth2/revoke"`
See also [protecting APIs](protecting-apis.html)
"""
@type api_oauth2_endpoint_revoke_plugs :: [{module(), Keyword.t()}]
field(:api_oauth2_endpoint_revoke_plugs,
config_time: :compile
)
@doc """
Plugs installed on `"/api/oauth2/register"`
See also [protecting APIs](protecting-apis.html)
"""
@type api_oauth2_endpoint_register_plugs :: [{module(), Keyword.t()}]
field(:api_oauth2_endpoint_register_plugs,
config_time: :compile
)
@doc """
Plugs installed on `"/api/oauth2/device_authorization"`
See also [protecting APIs](protecting-apis.html)
"""
@type api_oauth2_endpoint_device_authorization_plugs :: [{module(), Keyword.t()}]
field(:api_oauth2_endpoint_device_authorization_plugs,
config_time: :compile
)
@doc """
Plugs installed on `/.well-known/`
See also [protecting APIs](protecting-apis.html)
"""
@type well_known_plugs :: [{module(), Keyword.t()}]
field(:well_known_plugs,
config_time: :compile
)
@doc """
Plugs installed on `/discovery/`
See also [protecting APIs](protecting-apis.html)
"""
@type discovery_plugs :: [{module(), Keyword.t()}]
field(:discovery_plugs,
config_time: :compile
)
@doc """
Plugs installed on browser pathes
See also [protecting APIs](protecting-apis.html)
"""
@type browser_plugs :: [{module(), Keyword.t()}]
field(:browser_plugs,
config_time: :compile
)
@doc """
List of enabled grant types
It is used in OAuth2 APIs (such as `/token`) so as to determine support, and for metadata
generation.
"""
@type oauth2_grant_types_enabled :: [Asteroid.OAuth2.grant_type()]
field(:oauth2_grant_types_enabled,
config_time: :runtime
)
@doc """
List of enabled response types
It is used in OAuth2 web authentication flows (`/authorize`) so as to determine support,
and for metadata generation.
"""
@type oauth2_response_types_enabled :: [Asteroid.OAuth2.response_type()]
field(:oauth2_response_types_enabled,
config_time: :runtime
)
@doc """
Callback to verify username and password in the ROPC flow.
"""
@typedoc """
Callback function for the `:oauth2_flow_ropc_username_password_verify_callback` configuration
option.
Calls the callback `callback` with `callback.(conn, username, password)`
"""
@type oauth2_ropc_username_password_verify_callback ::
(Plug.Conn.t(), String.t(), String.t() ->
{:ok, Asteroid.Subject.t()} | {:error, Exception.t()})
field(:oauth2_flow_ropc_username_password_verify_callback,
config_time: :runtime
)
@doc """
Verbosity level for the API's error messages
The `:debug` level can return information useful to attackers. The `:minimal` level can
break the specification's support.
"""
@type api_error_response_verbosity :: :debug | :normal | :minimal
field(:api_error_response_verbosity,
config_time: :runtime
)
@doc """
Scope configuration for the ROPC flow
"""
@type oauth2_flow_ropc_scope_config :: scope_config()
field(:oauth2_flow_ropc_scope_config,
config_time: :runtime,
used_by: [:oauth2_scope_callback]
)
@doc """
Callback called to set scopes according to the configuration
"""
@type oauth2_scope_callback ::
(OAuth2Utils.Scope.Set.t(), Asteroid.Context.t() -> OAuth2Utils.Scope.Set.t())
field(:oauth2_scope_callback,
config_time: :runtime,
uses: [
:oauth2_flow_ropc_scope_config,
:oauth2_flow_client_credentials_scope_config,
:oauth2_flow_device_authorization_scope_config,
:oidc_flow_authorization_code_scope_config,
:oidc_flow_implicit_scope_config,
:oidc_flow_hybrid_scope_config
]
)
@doc """
Defines whether a refresh token should be issued when initiating an ROPC flow
"""
@type oauth2_flow_ropc_issue_refresh_token_init :: boolean()
field(:oauth2_flow_ropc_issue_refresh_token_init,
config_time: :runtime,
used_by: [:oauth2_issue_refresh_token_callback]
)
@doc """
Defines whether a refresh token should be issued when refreshing tokens in the ROPC flow
"""
@type oauth2_flow_ropc_issue_refresh_token_refresh :: boolean()
field(:oauth2_flow_ropc_issue_refresh_token_refresh,
config_time: :runtime,
used_by: [:oauth2_issue_refresh_token_callback]
)
@doc """
Callback called to determine whether a refresh token should be issued
Note that client configuration takes precedence over configuration options. See
`Asteroid.Client` fields.
"""
@type oauth2_issue_refresh_token_callback :: (Asteroid.Context.t() -> boolean())
field(:oauth2_issue_refresh_token_callback,
config_time: :runtime,
uses: [
:oauth2_issue_refresh_token_init,
:oauth2_issue_refresh_token_refresh,
:oauth2_flow_ropc_issue_refresh_token_init,
:oauth2_flow_ropc_issue_refresh_token_refresh,
:oauth2_flow_client_credentials_issue_refresh_token_init,
:oauth2_flow_client_credentials_issue_refresh_token_refresh,
:oauth2_flow_authorization_code_issue_refresh_token_init,
:oauth2_flow_authorization_code_issue_refresh_token_refresh,
:oauth2_flow_device_authorization_issue_refresh_token_init,
:oauth2_flow_device_authorization_issue_refresh_token_refresh,
:oidc_flow_authorization_code_issue_refresh_token_init,
:oidc_flow_authorization_code_issue_refresh_token_refresh,
:oidc_flow_hybrid_issue_refresh_token_init,
:oidc_flow_hybrid_issue_refresh_token_refresh
]
)
@doc """
Defines the lifetime of a refresh token in the ROPC flow
"""
@type oauth2_flow_ropc_refresh_token_lifetime :: non_neg_integer()
field(:oauth2_flow_ropc_refresh_token_lifetime,
config_time: :runtime,
used_by: [:oauth2_refresh_token_lifetime_callback],
unit: "seconds"
)
@doc """
Callback called to determine the lifetime of a refresh token
Note that client configuration takes precedence over configuration options. See
`Asteroid.Client` fields.
"""
@type oauth2_refresh_token_lifetime_callback :: (Asteroid.Context.t() -> non_neg_integer())
field(:oauth2_refresh_token_lifetime_callback,
config_time: :runtime,
uses: [
:oauth2_refresh_token_lifetime,
:oauth2_flow_ropc_refresh_token_lifetime,
:oauth2_flow_client_credentials_refresh_token_lifetime,
:oauth2_flow_authorization_code_refresh_token_lifetime,
:oauth2_flow_device_authorization_refresh_token_lifetime,
:oidc_flow_authorization_code_refresh_token_lifetime,
:oidc_flow_hybrid_refresh_token_lifetime
]
)
@doc """
Defines the lifetime of an access token in the ROPC flow
"""
@type oauth2_flow_ropc_access_token_lifetime :: non_neg_integer()
field(:oauth2_flow_ropc_access_token_lifetime,
config_time: :runtime,
used_by: [:oauth2_access_token_lifetime_callback],
unit: "seconds"
)
@doc """
Defines the serialization format of an access token in the ROPC flow
"""
@type oauth2_flow_ropc_access_token_serialization_format ::
Asteroid.Token.serialization_format()
field(:oauth2_flow_ropc_access_token_serialization_format,
config_time: :runtime,
used_by: [:oauth2_access_token_serialization_format_callback]
)
@doc """
Defines the signing key name of an access token in the ROPC flow
"""
@type oauth2_flow_ropc_access_token_signing_key :: Crypto.Key.name()
field(:oauth2_flow_ropc_access_token_signing_key,
config_time: :runtime,
used_by: [:oauth2_access_token_signing_key_callback]
)
@doc """
Defines the signing algorithm of an access token in the ROPC flow
"""
@type oauth2_flow_ropc_access_token_signing_alg :: Crypto.Key.jws_alg()
field(:oauth2_flow_ropc_access_token_signing_alg,
config_time: :runtime,
used_by: [:oauth2_access_token_signing_alg_callback]
)
@doc """
Callback called to determine the lifetime of an access token
Note that client configuration takes precedence over configuration options. See
`Asteroid.Client` fields.
"""
@type oauth2_access_token_lifetime_callback :: (Asteroid.Context.t() -> non_neg_integer())
field(:oauth2_access_token_lifetime_callback,
config_time: :runtime,
uses: [
:oauth2_access_token_lifetime,
:oauth2_flow_ropc_access_token_lifetime,
:oauth2_flow_client_credentials_access_token_lifetime,
:oauth2_flow_authorization_code_access_token_lifetime,
:oauth2_flow_implicit_access_token_lifetime,
:oauth2_flow_device_authorization_access_token_lifetime,
:oidc_flow_authorization_code_access_token_lifetime,
:oidc_flow_implicit_access_token_lifetime,
:oidc_flow_hybrid_access_token_lifetime
]
)
@doc """
Callback called to determine the serialization format of an access token
Note that client configuration takes precedence over configuration options. See
`Asteroid.Client` fields.
"""
@type oauth2_access_token_serialization_format_callback ::
(Asteroid.Context.t() -> Asteroid.Token.serialization_format())
field(:oauth2_access_token_serialization_format_callback,
config_time: :runtime,
uses: [
:oauth2_access_token_serialization_format,
:oauth2_flow_ropc_access_token_serialization_format,
:oauth2_flow_client_credentials_access_token_serialization_format,
:oauth2_flow_authorization_code_access_token_serialization_format,
:oauth2_flow_implicit_access_token_serialization_format,
:oauth2_flow_device_authorization_access_token_serialization_format,
:oidc_flow_authorization_code_access_token_serialization_format,
:oidc_flow_implicit_access_token_serialization_format,
:oidc_flow_hybrid_access_token_serialization_format
]
)
@doc """
Callback called to determine the signing key name of an access token
Note that client configuration takes precedence over configuration options. See
`Asteroid.Client` fields.
"""
@type oauth2_access_token_signing_key_callback ::
(Asteroid.Context.t() -> Crypto.Key.name())
field(:oauth2_access_token_signing_key_callback,
config_time: :runtime,
uses: [
:oauth2_access_token_signing_key,
:oauth2_flow_ropc_access_token_signing_key,
:oauth2_flow_client_credentials_access_token_signing_key,
:oauth2_flow_authorization_code_access_token_signing_key,
:oauth2_flow_implicit_access_token_signing_key,
:oauth2_flow_device_authorization_access_token_signing_key,
:oidc_flow_authorization_code_access_token_signing_key,
:oidc_flow_implicit_access_token_signing_key,
:oidc_flow_hybrid_access_token_signing_key
]
)
@doc """
Callback called to determine the signing algorithm of an access token
Note that client configuration takes precedence over configuration options. See
`Asteroid.Client` fields.
"""
@type oauth2_access_token_signing_alg_callback ::
(Asteroid.Context.t() -> Crypto.Key.jws_alg())
field(:oauth2_access_token_signing_alg_callback,
config_time: :runtime,
uses: [
:oauth2_access_token_signing_alg,
:oauth2_flow_ropc_access_token_signing_alg,
:oauth2_flow_client_credentials_access_token_signing_alg,
:oauth2_flow_authorization_code_access_token_signing_alg,
:oauth2_flow_implicit_access_token_signing_alg,
:oauth2_flow_device_authorization_access_token_signing_alg,
:oidc_flow_authorization_code_access_token_signing_alg,
:oidc_flow_implicit_access_token_signing_alg,
:oidc_flow_hybrid_access_token_signing_alg
]
)
@doc """
Callback called to determine the lifetime of an authorization code
Note that client configuration takes precedence over configuration options. See
`Asteroid.Client` fields.
"""
@type oauth2_authorization_code_lifetime_callback ::
(Asteroid.Context.t() -> non_neg_integer())
field(:oauth2_authorization_code_lifetime_callback,
config_time: :runtime,
uses: [
:oauth2_authorization_code_lifetime,
:oauth2_flow_authorization_code_authorization_code_lifetime,
:oidc_flow_authorization_code_authorization_code_lifetime,
:oidc_flow_hybrid_authorization_code_lifetime
]
)
@doc """
Defines the lifetime of an authorization code in the code flow
"""
@type oauth2_flow_authorization_code_authorization_code_lifetime :: non_neg_integer()
field(:oauth2_flow_authorization_code_authorization_code_lifetime,
config_time: :runtime,
used_by: [:oauth2_authorization_code_lifetime_callback],
unit: "seconds"
)
@doc """
Callback invoked on the json response when the grant_type is "password"
"""
@type oauth2_endpoint_token_grant_type_password_before_send_resp_callback ::
(map(), Asteroid.Context.t() -> map())
field(:oauth2_endpoint_token_grant_type_password_before_send_resp_callback,
config_time: :runtime
)
@doc """
Callback invoked on the `t:Plug.Conn.t/0` response when the grant_type is "password"
"""
@type oauth2_endpoint_token_grant_type_password_before_send_conn_callback ::
(Plug.Conn.t(), Asteroid.Context.t() -> Plug.Conn.t())
field(:oauth2_endpoint_token_grant_type_password_before_send_conn_callback,
config_time: :runtime
)
@doc """
Callback invoked on the json response when the grant_type is "refresh_token"
"""
@type oauth2_endpoint_token_grant_type_refresh_token_before_send_resp_callback ::
(map(), Asteroid.Context.t() -> map())
field(:oauth2_endpoint_token_grant_type_refresh_token_before_send_resp_callback,
config_time: :runtime
)
@doc """
Callback invoked on the `t:Plug.Conn.t/0` response when the grant_type is "refresh_token"
"""
@type oauth2_endpoint_token_grant_type_refresh_token_before_send_conn_callback ::
(Plug.Conn.t(), Asteroid.Context.t() -> Plug.Conn.t())
field(:oauth2_endpoint_token_grant_type_refresh_token_before_send_conn_callback,
config_time: :runtime
)
@doc """
Callback invoked to determine if a client is authorized to introspect tokens on the
`"/introspect"` endpoint
"""
@type oauth2_endpoint_introspect_client_authorized ::
(Asteroid.Client.t() -> boolean())
field(:oauth2_endpoint_introspect_client_authorized,
config_time: :runtime
)
@doc """
Defines the default claims to be returned from the `"/introspect"` endpoint
Note that client's configuration takes precedence over this configuration option.
"""
@type oauth2_endpoint_introspect_claims_resp :: [String.t()]
field(:oauth2_endpoint_introspect_claims_resp,
config_time: :runtime,
used_by: [:oauth2_endpoint_introspect_claims_resp_callback]
)
@doc """
Callback invoked to determine the claims to be returned from the `"/introspect"` endpoint
"""
@type oauth2_endpoint_introspect_claims_resp_callback ::
(Asteroid.Client.t() -> [String.t()])
field(:oauth2_endpoint_introspect_claims_resp_callback,
config_time: :runtime,
uses: [:oauth2_endpoint_introspect_claims_resp]
)
@doc """
Callback invoked on the json response on the `"/introspect"` endpoint
"""
@type oauth2_endpoint_introspect_before_send_resp_callback ::
(map(), Asteroid.Context.t() -> map())
field(:oauth2_endpoint_introspect_before_send_resp_callback,
config_time: :runtime
)
@doc """
Callback invoked on the `t:Plug.Conn.t/0` response on the `"/introspect"` endpoint
"""
@type oauth2_endpoint_introspect_before_send_conn_callback ::
(Plug.Conn.t(), Asteroid.Context.t() -> Plug.Conn.t())
field(:oauth2_endpoint_introspect_before_send_conn_callback,
config_time: :runtime
)
@doc """
Scope configuration for the client credentials flow
"""
@type oauth2_flow_client_credentials_scope_config :: scope_config()
field(:oauth2_flow_client_credentials_scope_config,
config_time: :runtime,
used_by: [:oauth2_scope_callback]
)
@doc """
Defines whether a refresh token should be issued when initiating a client credentials
flow
Note that you should note, according to the specification, release a refresh token in
this flow.
"""
@type oauth2_flow_client_credentials_issue_refresh_token_init :: boolean()
field(:oauth2_flow_client_credentials_issue_refresh_token_init,
config_time: :runtime,
used_by: [:oauth2_issue_refresh_token_callback]
)
@doc """
Defines whether a refresh token should be issued when refreshing tokens
"""
@type oauth2_flow_client_credentials_issue_refresh_token_refresh :: boolean()
field(:oauth2_flow_client_credentials_issue_refresh_token_refresh,
config_time: :runtime,
used_by: [:oauth2_issue_refresh_token_callback]
)
@doc """
Defines the lifetime of a refresh token in the clienjt credentials flow
"""
@type oauth2_flow_client_credentials_refresh_token_lifetime :: non_neg_integer()
field(:oauth2_flow_client_credentials_refresh_token_lifetime,
config_time: :runtime,
used_by: [:oauth2_refresh_token_lifetime_callback],
unit: "seconds"
)
@doc """
Defines the lifetime of an access token in the client credentials flow
"""
@type oauth2_flow_client_credentials_access_token_lifetime :: non_neg_integer()
field(:oauth2_flow_client_credentials_access_token_lifetime,
config_time: :runtime,
used_by: [:oauth2_access_token_lifetime_callback],
unit: "seconds"
)
@doc """
Defines the serialization format of an access token in the client credentials flow
"""
@type oauth2_flow_client_credentials_access_token_serialization_format ::
Asteroid.Token.serialization_format()
field(:oauth2_flow_client_credentials_access_token_serialization_format,
config_time: :runtime,
used_by: [:oauth2_access_token_serialization_format_callback]
)
@doc """
Defines the signing key name of an access token in the client credentials flow
"""
@type oauth2_flow_client_credentials_access_token_signing_key :: Crypto.Key.name()
field(:oauth2_flow_client_credentials_access_token_signing_key,
config_time: :runtime,
used_by: [:oauth2_access_token_signing_key_callback]
)
@doc """
Defines the signing algorithm of an access token in the client credentials flow
"""
@type oauth2_flow_client_credentials_access_token_signing_alg :: Crypto.Key.jws_alg()
field(:oauth2_flow_client_credentials_access_token_signing_alg,
config_time: :runtime,
used_by: [:oauth2_access_token_signing_alg_callback]
)
@doc """
Callback invoked on the json response when the grant_type is `"client_credentials"`
"""
@type oauth2_endpoint_token_grant_type_client_credentials_before_send_resp_callback ::
(map(), Asteroid.Context.t() -> map())
field(:oauth2_endpoint_token_grant_type_client_credentials_before_send_resp_callback,
config_time: :runtime
)
@doc """
Callback invoked on the `t:Plug.Conn.t/0` response when the grant_type is
`"client_credentials"`
"""
@type oauth2_endpoint_token_grant_type_client_credentials_before_send_conn_callback ::
(Plug.Conn.t(), Asteroid.Context.t() -> Plug.Conn.t())
field(:oauth2_endpoint_token_grant_type_client_credentials_before_send_conn_callback,
config_time: :runtime
)
@doc """
Default callback invoked on the `/authorize` endpoint to trigger the web authorization
process flow for the OAuth2 authorization code flow
This workflow is in charge of authenticating and authorizing (scopes...) the user in regards
to the request. It will typically involve several step, i.e. display of web pages. It does
returns a `Plug.Conn.t()` to Phoenix but not to Asteroid directly. At the end of the process,
one of these callback shall be called:
- `AsteroidWeb.AuthorizeController.authorization_granted/2`
- `AsteroidWeb.AuthorizeController.authorization_denied/2`
"""
@type oauth2_flow_authorization_code_web_authorization_callback ::
AsteroidWeb.AuthorizeController.web_authorization_callback()
field(:oauth2_flow_authorization_code_web_authorization_callback,
config_time: :runtime,
used_by: [:web_authorization_callback]
)
@doc """
Callback invoked on the `t:Asteroid.OAuth2.RedirectUri.t/0` response on the `/authorize`
endpoint
"""
@type oauth2_endpoint_authorize_before_send_redirect_uri_callback ::
(Asteroid.OAuth2.RedirectUri.t(), Asteroid.Context.t() ->
Asteroid.OAuth2.RedirectUri.t())
field(:oauth2_endpoint_authorize_before_send_redirect_uri_callback,
config_time: :runtime
)
@doc """
Callback invoked on the `t:Plug.Conn.t/0` response on the `/authorize` endpoint
The connection is redirected immediatly after this callback returns.
"""
@type oauth2_endpoint_authorize_before_send_conn_callback ::
(Plug.Conn.t(), Asteroid.Context.t() -> Plug.Conn.t())
field(:oauth2_endpoint_authorize_before_send_conn_callback,
config_time: :runtime
)
@doc """
Defines whether a refresh token should be issued when submitting an authorization code
in the authorization code flow
"""
@type oauth2_flow_authorization_code_issue_refresh_token_init :: boolean()
field(:oauth2_flow_authorization_code_issue_refresh_token_init,
config_time: :runtime,
used_by: [:oauth2_issue_refresh_token_callback]
)
@doc """
Defines whether a refresh token should be issued when refreshing tokens in the authorization
code flow
"""
@type oauth2_flow_authorization_code_issue_refresh_token_refresh :: boolean()
field(:oauth2_flow_authorization_code_issue_refresh_token_refresh,
config_time: :runtime,
used_by: [:oauth2_issue_refresh_token_callback]
)
@doc """
Defines the lifetime of a refresh token in the authorization code flow
"""
@type oauth2_flow_authorization_code_refresh_token_lifetime :: non_neg_integer()
field(:oauth2_flow_authorization_code_refresh_token_lifetime,
config_time: :runtime,
used_by: [:oauth2_refresh_token_lifetime_callback],
unit: "seconds"
)
@doc """
Defines the lifetime of an access token in the authorization code flow
"""
@type oauth2_flow_authorization_code_access_token_lifetime :: non_neg_integer()
field(:oauth2_flow_authorization_code_access_token_lifetime,
config_time: :runtime,
used_by: [:oauth2_access_token_lifetime_callback],
unit: "seconds"
)
@doc """
Defines the serialization format of an access token in the authorization code flow
"""
@type oauth2_flow_authorization_code_access_token_serialization_format ::
Asteroid.Token.serialization_format()
field(:oauth2_flow_authorization_code_access_token_serialization_format,
config_time: :runtime,
used_by: [:oauth2_access_token_serialization_format_callback]
)
@doc """
Defines the signing key name of an access token in the authorization code flow
"""
@type oauth2_flow_authorization_code_access_token_signing_key :: Crypto.Key.name()
field(:oauth2_flow_authorization_code_access_token_signing_key,
config_time: :runtime,
used_by: [:oauth2_access_token_signing_key_callback]
)
@doc """
Defines the signing algorithm of an access token in the authorization code flow
"""
@type oauth2_flow_authorization_code_access_token_signing_alg :: Crypto.Key.jws_alg()
field(:oauth2_flow_authorization_code_access_token_signing_alg,
config_time: :runtime,
used_by: [:oauth2_access_token_signing_alg_callback]
)
@doc """
Callback invoked on the json response when the grant_type is "authorization_code"
"""
@type oauth2_endpoint_token_grant_type_authorization_code_before_send_resp_callback ::
(map(), Asteroid.Context.t() -> map())
field(:oauth2_endpoint_token_grant_type_authorization_code_before_send_resp_callback,
config_time: :runtime
)
@doc """
Callback invoked on the `t:Plug.Conn.t/0` response when the grant_type is
"authorization_code"
"""
@type oauth2_endpoint_token_grant_type_authorization_code_before_send_conn_callback ::
(Plug.Conn.t(), Asteroid.Context.t() -> Plug.Conn.t())
field(:oauth2_endpoint_token_grant_type_authorization_code_before_send_conn_callback,
config_time: :runtime
)
@doc """
Callback invoked on the `t:Plug.Conn.t/0` in the `/revoke` endpoint
"""
@type oauth2_endpoint_revoke_before_send_conn_callback ::
(Plug.Conn.t(), Asteroid.Context.t() -> Plug.Conn.t())
field(:oauth2_endpoint_revoke_before_send_conn_callback,
config_time: :runtime
)
@doc """
Default callback invoked on the `/authorize` endpoint to trigger the web authorization
process flow for the OAuth2 implicit flow
This workflow is in charge of authenticating and authorizing (scopes...) the user in regards
to the request. It will typically involve several step, i.e. display of web pages. It does
returns a `Plug.Conn.t()` to Phoenix but not to Asteroid directly. At the end of the process,
one of these callback shall be called:
- `AsteroidWeb.AuthorizeController.authorization_granted/2`
- `AsteroidWeb.AuthorizeController.authorization_denied/2`
"""
@type oauth2_flow_implicit_web_authorization_callback ::
(Plug.Conn.t(), AsteroidWeb.AuthorizeController.Request.t() -> Plug.Conn.t())
field(:oauth2_flow_implicit_web_authorization_callback,
config_time: :runtime,
used_by: [
:web_authorization_callback
]
)
@doc """
Defines the lifetime of an access token in the implicit flow
"""
@type oauth2_flow_implicit_access_token_lifetime :: non_neg_integer()
field(:oauth2_flow_implicit_access_token_lifetime,
config_time: :runtime,
used_by: [:oauth2_access_token_lifetime_callback],
unit: "seconds"
)
@doc """
Defines the serialization format of an access token in the implicit flow
"""
@type oauth2_flow_implicit_access_token_serialization_format ::
Asteroid.Token.serialization_format()
field(:oauth2_flow_implicit_access_token_serialization_format,
config_time: :runtime,
used_by: [:oauth2_access_token_serialization_format_callback]
)
@doc """
Defines the signing key name of an access token in the implicit flow
"""
@type oauth2_flow_implicit_access_token_signing_key :: Crypto.Key.name()
field(:oauth2_flow_implicit_access_token_signing_key,
config_time: :runtime,
used_by: [:oauth2_access_token_signing_key_callback]
)
@doc """
Defines the signing algorithm of an access token in the implicit flow
"""
@type oauth2_flow_implicit_access_token_signing_alg :: Crypto.Key.jws_alg()
field(:oauth2_flow_implicit_access_token_signing_alg,
config_time: :runtime,
used_by: [:oauth2_access_token_signing_alg_callback]
)
@doc """
The PKCE policy
This configuration option can have 3 values:
- `:disabled`: PKCE support is disabled
- `:mandatory`: all requests using the authorization code flow must use PKCE
- `:optional`: use of PKCE is optional, except for clients marked as forced to use it
"""
@type oauth2_pkce_policy :: :disabled | :optional | :mandatory
field(:oauth2_pkce_policy,
config_time: :runtime
)
@doc """
Code challenge methods supported
Supported methods are the following atoms:
- `:plain`
- `:S256`
"""
@type oauth2_pkce_allowed_methods :: [atom()]
field(:oauth2_pkce_allowed_methods,
config_time: :runtime
)
@doc """
Returns `true` if the client has to use PKCE, `false` otherwise
Used only when the `:oauth2_pkce_policy` configuration option is set to `:optional`
"""
@type oauth2_pkce_must_use_callback ::
(Client.t() -> boolean())
field(:oauth2_pkce_must_use_callback,
config_time: :runtime
)
@doc """
Callback called to determine whether a client is authorized to create new clients on
the register endpoint or not
"""
@type oauth2_endpoint_register_authorization_callback ::
(Plug.Conn.t(), Asteroid.Client.t() ->
:ok | {:error, Exception.t()})
field(:oauth2_endpoint_register_authorization_callback,
config_time: :runtime,
uses: [:oauth2_endpoint_register_authorization_policy]
)
@doc """
The client registration policy
This configuration option can have 3 values:
- `:all`: all clients are allowed to register new clients. Be careful when using this
value because public clients and the clients created by these public clients could DDOS the
client registration endpoint. You might consider severely rate-limiting these requests in
this case
- `:authenticated_clients`: only authenticated clients are allowed to create new clients
- `:authorized_clients`: clients that have the `"asteroid.register"` scope set or that
authenticate to that endpoint with an access token containing that scope
"""
@type oauth2_endpoint_register_authorization_policy ::
:all
| :authenticated_clients
| :authorized_clients
field(:oauth2_endpoint_register_authorization_policy,
config_time: :runtime,
used_by: [:oauth2_endpoint_register_authorization_callback]
)
@doc """
Additional fields that are saved when registering new clients
Note that this option is overriden by client configuration, if existing.
"""
@type oauth2_endpoint_register_additional_metadata_field :: [String.t()]
field(:oauth2_endpoint_register_additional_metadata_field,
config_time: :runtime
)
@doc """
Callback invoked on the json response when on the register endpoint
"""
@type oauth2_endpoint_register_before_send_resp_callback ::
(map(), Asteroid.Context.t() -> map())
field(:oauth2_endpoint_register_before_send_resp_callback,
config_time: :runtime
)
@doc """
Callback invoked on the `t:Plug.Conn.t/0` response on the register endpoint
"""
@type oauth2_endpoint_register_before_send_conn_callback ::
(Plug.Conn.t(), Asteroid.Context.t() -> Plug.Conn.t())
field(:oauth2_endpoint_register_before_send_conn_callback,
config_time: :runtime
)
@doc """
Callback invoked on the `t:Asteroid.Client.t()` before it's being saved
"""
@type oauth2_endpoint_register_client_before_save_callback ::
(Client.t(), Asteroid.Context.t() -> Client.t())
field(:oauth2_endpoint_register_client_before_save_callback,
config_time: :runtime
)
@doc """
Callback invoked to generate the client id of a newly created client
The callback should ensure that the client id does not already exists.
"""
@type oauth2_endpoint_register_gen_client_id_callback ::
(map(), Asteroid.Context.t() -> String.t())
field(:oauth2_endpoint_register_gen_client_id_callback,
config_time: :runtime
)
@doc """
Callback invoked to generate the client *resource* id of a newly created client
"""
@type oauth2_endpoint_register_gen_client_resource_id_callback ::
(map(), Asteroid.Context.t() -> AttributeRepository.resource_id())
field(:oauth2_endpoint_register_gen_client_resource_id_callback,
config_time: :runtime
)
@doc """
Callback invoked to determine the client type
"""
@type oauth2_endpoint_register_client_type_callback ::
(Client.t() -> Asteroid.OAuth2.Client.type())
field(:oauth2_endpoint_register_client_type_callback,
config_time: :runtime
)
@doc """
Callback called to determine the supported authentication of the token endpoint
"""
@type oauth2_endpoint_token_auth_methods_supported_callback ::
(() -> [Asteroid.OAuth2.Endpoint.auth_method()])
field(:oauth2_endpoint_token_auth_methods_supported_callback,
config_time: :runtime
)
@doc """
OAuth2 metadata service documentation URL
"""
@type oauth2_endpoint_metadata_service_documentation :: String.t()
field(:oauth2_endpoint_metadata_service_documentation,
config_time: :runtime
)
@doc """
OAuth2 metadata UI locales supported
"""
@type oauth2_endpoint_metadata_ui_locales_supported :: [String.t()]
field(:oauth2_endpoint_metadata_ui_locales_supported,
config_time: :runtime
)
@doc """
OAuth2 metadata OP policy URL
"""
@type oauth2_endpoint_metadata_op_policy_uri :: String.t()
field(:oauth2_endpoint_metadata_op_policy_uri,
config_time: :runtime
)
@doc """
OAuth2 metadata OP tos URL
"""
@type oauth2_endpoint_metadata_op_tos_uri :: String.t()
field(:oauth2_endpoint_metadata_op_tos_uri,
config_time: :runtime
)
@doc """
Metadata fields to be signed
The configuration option can have 3 values:
- `:disabled`: no metadata fields are signed
- `:all`: all fields are signed
- `[String.t()]`: a list of fields to be included in the signed statement
"""
@type oauth2_endpoint_metadata_signed_fields :: :disabled | :all | [String.t()]
field(:oauth2_endpoint_metadata_signed_fields,
config_time: :runtime
)
@doc """
Key name for the signed metadata fields
"""
@type oauth2_endpoint_metadata_signing_key :: Crypto.Key.name()
field(:oauth2_endpoint_metadata_signing_key,
config_time: :runtime
)
@doc """
Key algorithm for the signed metadata fields
"""
@type oauth2_endpoint_metadata_signing_alg :: Crypto.Key.jws_alg()
field(:oauth2_endpoint_metadata_signing_alg,
config_time: :runtime
)
@doc """
Callback invoked on the json response on the `/.well-known/oauth-authorization-server`
endpoint
Note that this callback is called before optional signature of metadata fields, so that
added fields can be signed as well.
"""
@type oauth2_endpoint_metadata_before_send_resp_callback :: (map() -> map())
field(:oauth2_endpoint_metadata_before_send_resp_callback,
config_time: :runtime
)
@doc """
Callback invoked on the `t:Plug.Conn.t/0` response on the
`/.well-known/oauth-authorization-server` endpoint
"""
@type oauth2_endpoint_metadata_before_send_conn_callback ::
(Plug.Conn.t() -> Plug.Conn.t())
field(:oauth2_endpoint_metadata_before_send_conn_callback,
config_time: :runtime
)
@doc """
Callback invoked on the json response on the `/discovery/keys` endpoint
"""
@type oauth2_endpoint_discovery_keys_before_send_resp_callback :: (map() -> map())
field(:oauth2_endpoint_discovery_keys_before_send_resp_callback,
config_time: :runtime
)
@doc """
Callback invoked on the `t:Plug.Conn.t/0` response on the `/discovery/keys` endpoint
"""
@type oauth2_endpoint_discovery_keys_before_send_conn_callback ::
(Plug.Conn.t() -> Plug.Conn.t())
field(:oauth2_endpoint_discovery_keys_before_send_conn_callback,
config_time: :runtime
)
@doc """
Cryptographic keys configuration
Refer to `t:Asteroid.Crypto.Key.key_config/0` for more information.
**Security consideration**: consider storing keys in a separate configuration file
(such as `secret.exs`).
"""
@type crypto_keys :: Crypto.Key.key_config()
field(:crypto_keys,
config_time: :runtime
)
@doc """
Cryptographic keys cache store
The first element is a module implementing the `Asteroid.Crypto.Key.Cache` behaviour, and
the second element are the module's options.
"""
@type crypto_keys_cache :: {module(), Crypto.Key.Cache.opts()}
field(:crypto_keys_cache,
config_time: :runtime
)
@doc """
Determines whether the `"none"` JWS algorithm is supported
It is set using the `JOSE.JWA.unsecured_signing/1` function on Asteroid startup. Defaults
to `false`.
"""
@type crypto_jws_none_alg_enabled :: boolean()
field(:crypto_jws_none_alg_enabled,
config_time: :runtime
)
@doc """
Scope configuration for the device authorization flow
"""
@type oauth2_flow_device_authorization_scope_config :: scope_config()
field(:oauth2_flow_device_authorization_scope_config,
config_time: :runtime,
used_by: [:oauth2_scope_callback]
)
@doc """
Callback invoked on the json response when the grant_type is
"urn:ietf:params:oauth:grant-type:device_code"
"""
@type oauth2_endpoint_device_authorization_before_send_resp_callback ::
(map(), Asteroid.Context.t() -> map())
field(:oauth2_endpoint_device_authorization_before_send_resp_callback,
config_time: :runtime
)
@doc """
Callback invoked on the `t:Plug.Conn.t/0` response when the grant_type is
"urn:ietf:params:oauth:grant-type:device_code"
"""
@type oauth2_endpoint_device_authorization_before_send_conn_callback ::
(Plug.Conn.t(), Asteroid.Context.t() -> Plug.Conn.t())
field(:oauth2_endpoint_device_authorization_before_send_conn_callback,
config_time: :runtime
)
@doc """
Defines the lifetime of a device code in the device authorization flow
"""
@type oauth2_flow_device_authorization_device_code_lifetime :: non_neg_integer()
field(:oauth2_flow_device_authorization_device_code_lifetime,
config_time: :runtime,
unit: "seconds"
)
@doc """
callback to generate the user code
"""
@type oauth2_flow_device_authorization_user_code_callback ::
(Asteroid.Context.t() -> String.t())
field(:oauth2_flow_device_authorization_user_code_callback,
config_time: :runtime,
unit: "seconds"
)
@doc """
Callback invoked on the `/device` endpoint to trigger the web authorization process flow
for the OAuth2 device authorization flow
This workflow is in charge of validating the user code, as well as authenticating and
authorizing (scopes...) the request. It will typically involve several step, i.e.
user code confirmation, authentication and optionnaly accepting scope through web pages.
It returns a `Plug.Conn.t()` to Phoenix but not to Asteroid directly. At the end of the
process, one of these callback shall be called:
- `AsteroidWeb.DeviceController.authorization_granted/2`
- `AsteroidWeb.DeviceController.authorization_denied/2`
"""
@type oauth2_flow_device_authorization_web_authorization_callback ::
(Plug.Conn.t(), AsteroidWeb.DeviceController.Request.t() -> Plug.Conn.t())
field(:oauth2_flow_device_authorization_web_authorization_callback,
config_time: :runtime
)
@doc """
Defines whether a refresh token should be issued when initiating a device authorization
flow
"""
@type oauth2_flow_device_authorization_issue_refresh_token_init :: boolean()
field(:oauth2_flow_device_authorization_issue_refresh_token_init,
config_time: :runtime,
used_by: [:oauth2_issue_refresh_token_callback]
)
@doc """
Defines whether a refresh token should be issued when refreshing tokens in the device
authorization flow
"""
@type oauth2_flow_device_authorization_issue_refresh_token_refresh :: boolean()
field(:oauth2_flow_device_authorization_issue_refresh_token_refresh,
config_time: :runtime,
used_by: [:oauth2_issue_refresh_token_callback]
)
@doc """
Defines the lifetime of a refresh token in the device authorization flow
"""
@type oauth2_flow_device_authorization_refresh_token_lifetime :: non_neg_integer()
field(:oauth2_flow_device_authorization_refresh_token_lifetime,
config_time: :runtime,
used_by: [:oauth2_refresh_token_lifetime_callback],
unit: "seconds"
)
@doc """
Defines the serialization format of an access token in the device authorization flow
"""
@type oauth2_flow_device_authorization_access_token_serialization_format ::
Asteroid.Token.serialization_format()
field(:oauth2_flow_device_authorization_access_token_serialization_format,
config_time: :runtime,
used_by: [:oauth2_access_token_serialization_format_callback]
)
@doc """
Defines the signing key name of an access token in the device authorization flow
"""
@type oauth2_flow_device_authorization_access_token_signing_key :: Crypto.Key.name()
field(:oauth2_flow_device_authorization_access_token_signing_key,
config_time: :runtime,
used_by: [:oauth2_access_token_signing_key_callback]
)
@doc """
Defines the signing algorithm of an access token in the device authorization flow
"""
@type oauth2_flow_device_authorization_access_token_signing_alg :: Crypto.Key.jws_alg()
field(:oauth2_flow_device_authorization_access_token_signing_alg,
config_time: :runtime,
used_by: [:oauth2_access_token_signing_alg_callback]
)
@doc """
Defines the lifetime of an access token in the device authorization flow
"""
@type oauth2_flow_device_authorization_access_token_lifetime :: non_neg_integer()
field(:oauth2_flow_device_authorization_access_token_lifetime,
config_time: :runtime,
used_by: [:oauth2_access_token_lifetime_callback],
unit: "seconds"
)
@doc """
Callback invoked on the json response when the grant_type is
"urn:ietf:params:oauth:grant-type:device_code"
"""
@type oauth2_endpoint_token_grant_type_device_code_before_send_resp_callback ::
(map(), Asteroid.Context.t() -> map())
field(:oauth2_endpoint_token_grant_type_device_code_before_send_resp_callback,
config_time: :runtime
)
@doc """
Callback invoked on the `t:Plug.Conn.t/0` response when the grant_type is
"urn:ietf:params:oauth:grant-type:device_code"
"""
@type oauth2_endpoint_token_grant_type_device_code_before_send_conn_callback ::
(Plug.Conn.t(), Asteroid.Context.t() -> Plug.Conn.t())
field(:oauth2_endpoint_token_grant_type_device_code_before_send_conn_callback,
config_time: :runtime
)
@doc """
Rate limiter module and options for the device authorization flow
The module throttles the incoming requests on `/api/oauth2/token` on the device code
parameter.
"""
@type oauth2_flow_device_authorization_rate_limiter ::
{module(), Asteroid.OAuth2.DeviceAuthorization.RateLimiter.opts()}
field(:oauth2_flow_device_authorization_rate_limiter,
config_time: :runtime
)
@doc """
Interval in seconds between 2 requests on the `/api/oauth2/token` with the same device code
in the device authorization flow
"""
@type oauth2_flow_device_authorization_rate_limiter_interval :: non_neg_integer()
field(:oauth2_flow_device_authorization_rate_limiter_interval,
config_time: :runtime,
unit: "seconds"
)
@doc """
JWT Secured Authorization Request (JAR) enabling flag
The possible values are:
- `:disabled`: JAR is disabled
- `:request_only`: on ly the `"request"` parameter is enabled
- `:request_uri_only`: only the `"request_uri"` is enabled
- `:enabled`: both the `"request"` and `"request_uri"` parameters are enabled
"""
@type oauth2_jar_enabled :: :disabled | :request_only | :request_uri_only | :enabled
field(:oauth2_jar_enabled,
config_time: :runtime
)
@doc """
Plugs installed on `"/api/request_object"`
See also [protecting APIs](protecting-apis.html)
"""
@type api_request_object_plugs :: [{module(), Keyword.t()}]
field(:api_request_object_plugs,
config_time: :compile
)
@doc """
Defines the lifetime of a request object stored internally
"""
@type oauth2_jar_request_object_lifetime :: non_neg_integer()
field(:oauth2_jar_request_object_lifetime,
config_time: :runtime,
unit: "seconds"
)
@doc """
Set the options of the HTTP request to retrieve external JAR request objects
The options are request options of `HTTPoison.Request`
"""
@type oauth2_jar_request_uri_get_opts :: Keyword.t()
field(:oauth2_jar_request_uri_get_opts,
config_time: :runtime
)
@doc """
List of supported signing algorithms for JAR request objects
"""
@type oauth2_jar_request_object_signing_alg_values_supported :: [Crypto.Key.jws_alg()]
field(:oauth2_jar_request_object_signing_alg_values_supported,
config_time: :runtime
)
@doc """
List of supported encryption algorithms for JAR request objects
"""
@type oauth2_jar_request_object_encryption_alg_values_supported :: [Crypto.Key.jwe_alg()]
field(:oauth2_jar_request_object_encryption_alg_values_supported,
config_time: :runtime
)
@doc """
List of supported encryption encryption algorithms for JAR request objects
"""
@type oauth2_jar_request_object_encryption_enc_values_supported :: [Crypto.Key.jwe_enc()]
field(:oauth2_jar_request_object_encryption_enc_values_supported,
config_time: :runtime
)
@doc """
Determines whether the audience should be checked when the request object is signed
Checks that the audience (one one of them) is the `"issuer"` of the server, using the
`Asteroid.OAuth2.issuer/0` function.
Defaults to `true`. As per the specification, there's no checking when the JWT is not
signed.
"""
@type oauth2_jar_request_object_verify_audience :: boolean()
field(:oauth2_jar_request_object_verify_audience,
config_time: :runtime
)
@doc """
Determines whether the issuer should be checked when the request object is signed
Defaults to `true`. As per the specification, there's no checking when the JWT is not
signed.
"""
@type oauth2_jar_request_object_verify_issuer :: boolean()
field(:oauth2_jar_request_object_verify_issuer,
config_time: :runtime
)
@doc """
Configuration of ACRs
"""
@type oidc_acr_config :: OIDC.ACR.config()
field(:oidc_acr_config,
config_time: :runtime,
used_by: [:web_authorization_callback]
)
@doc """
Scope configuration for the OIDC authorization code flow
"""
@type oidc_flow_authorization_code_scope_config :: scope_config()
field(:oidc_flow_authorization_code_scope_config,
config_time: :runtime,
used_by: [:oauth2_scope_callback]
)
@doc """
Scope configuration for the OIDC implicit flow
"""
@type oidc_flow_implicit_scope_config :: scope_config()
field(:oidc_flow_implicit_scope_config,
config_time: :runtime,
used_by: [:oauth2_scope_callback]
)
@doc """
Scope configuration for the OIDC hybrid flow
"""
@type oidc_flow_hybrid_scope_config :: scope_config()
field(:oidc_flow_hybrid_scope_config,
config_time: :runtime,
used_by: [:oauth2_scope_callback]
)
@doc """
Callback invoked on the `/authorize` endpoint to trigger the web authorization process flow
for the OAuth2 authorization code flow
This workflow is in charge of authenticating and authorizing (scopes...) the user in regards
to the request. It will typically involve several step, i.e. display of web pages. It does
returns a `Plug.Conn.t()` to Phoenix but not to Asteroid directly. At the end of the process,
one of these callback shall be called:
- `AsteroidWeb.AuthorizeController.authorization_granted/2`
- `AsteroidWeb.AuthorizeController.authorization_denied/2`
"""
@type web_authorization_callback ::
AsteroidWeb.AuthorizeController.web_authorization_callback()
field(:web_authorization_callback,
config_time: :runtime,
uses: [
:oauth2_flow_authorization_code_web_authorization_callback,
:oauth2_flow_implicit_web_authorization_callback,
:oidc_acr_config,
:oidc_flow_authorization_code_web_authorization_callback,
:oidc_flow_implicit_web_authorization_callback,
:oidc_flow_hybrid_web_authorization_callback
]
)
@doc """
Callback invoked on the `/authorize` endpoint to trigger the web authorization
process flow for the OpenID Connect authorization code flow, if the
`:oidc_acr_config` configuration option is not used.
This workflow is in charge of authenticating and authorizing (scopes...) the user in regards
to the request. It will typically involve several step, i.e. display of web pages. It does
returns a `Plug.Conn.t()` to Phoenix but not to Asteroid directly. At the end of the process,
one of these callback shall be called:
- `AsteroidWeb.AuthorizeController.authorization_granted/2`
- `AsteroidWeb.AuthorizeController.authorization_denied/2`
"""
@type oidc_flow_authorization_code_web_authorization_callback ::
AsteroidWeb.AuthorizeController.web_authorization_callback()
field(:oidc_flow_authorization_code_web_authorization_callback,
config_time: :runtime,
used_by: [:web_authorization_callback]
)
@doc """
Callback invoked on the `/authorize` endpoint to trigger the web authorization
process flow for the OpenID Connect implicit flow, if the
`:oidc_acr_config` configuration option is not used.
This workflow is in charge of authenticating and authorizing (scopes...) the user in regards
to the request. It will typically involve several step, i.e. display of web pages. It does
returns a `Plug.Conn.t()` to Phoenix but not to Asteroid directly. At the end of the process,
one of these callback shall be called:
- `AsteroidWeb.AuthorizeController.authorization_granted/2`
- `AsteroidWeb.AuthorizeController.authorization_denied/2`
"""
@type oidc_flow_implicit_web_authorization_callback ::
AsteroidWeb.AuthorizeController.web_authorization_callback()
field(:oidc_flow_implicit_web_authorization_callback,
config_time: :runtime,
used_by: [:web_authorization_callback]
)
@doc """
Callback invoked on the `/authorize` endpoint to trigger the web authorization
process flow for the OpenID Connect hybrid flow, if the
`:oidc_acr_config` configuration option is not used.
This workflow is in charge of authenticating and authorizing (scopes...) the user in regards
to the request. It will typically involve several step, i.e. display of web pages. It does
returns a `Plug.Conn.t()` to Phoenix but not to Asteroid directly. At the end of the process,
one of these callback shall be called:
- `AsteroidWeb.AuthorizeController.authorization_granted/2`
- `AsteroidWeb.AuthorizeController.authorization_denied/2`
"""
@type oidc_flow_hybrid_web_authorization_callback ::
AsteroidWeb.AuthorizeController.web_authorization_callback()
field(:oidc_flow_hybrid_web_authorization_callback,
config_time: :runtime,
used_by: [:web_authorization_callback]
)
@doc """
Defines whether a refresh token should be issued when submitting an authorization code
in the OIDC authorization code flow
"""
@type oidc_flow_authorization_code_issue_refresh_token_init :: boolean()
field(:oidc_flow_authorization_code_issue_refresh_token_init,
config_time: :runtime,
used_by: [:oauth2_issue_refresh_token_callback]
)
@doc """
Defines whether a refresh token should be issued when refreshing tokens in the OIDC
authorization code flow
"""
@type oidc_flow_authorization_code_issue_refresh_token_refresh :: boolean()
field(:oidc_flow_authorization_code_issue_refresh_token_refresh,
config_time: :runtime,
used_by: [:oauth2_issue_refresh_token_callback]
)
@doc """
Defines whether a refresh token should be issued when submitting an authorization code
in the OIDC hybrid flow
"""
@type oidc_flow_hybrid_issue_refresh_token_init :: boolean()
field(:oidc_flow_hybrid_issue_refresh_token_init,
config_time: :runtime,
used_by: [:oauth2_issue_refresh_token_callback]
)
@doc """
Defines whether a refresh token should be issued when refreshing tokens in the OIDC
hybrid flow
"""
@type oidc_flow_hybrid_issue_refresh_token_refresh :: boolean()
field(:oidc_flow_hybrid_issue_refresh_token_refresh,
config_time: :runtime,
used_by: [:oauth2_issue_refresh_token_callback]
)
@doc """
Defines the lifetime of a refresh token in the OIDC authorization code flow
"""
@type oidc_flow_authorization_code_refresh_token_lifetime :: non_neg_integer()
field(:oidc_flow_authorization_code_refresh_token_lifetime,
config_time: :runtime,
used_by: [:oauth2_refresh_token_lifetime_callback],
unit: "seconds"
)
@doc """
Defines the lifetime of a refresh token in the OIDC hybrid flow
"""
@type oidc_flow_hybrid_refresh_token_lifetime :: non_neg_integer()
field(:oidc_flow_hybrid_refresh_token_lifetime,
config_time: :runtime,
used_by: [:oauth2_refresh_token_lifetime_callback],
unit: "seconds"
)
@doc """
Defines the lifetime of an access token in the OIDC authorization code flow
"""
@type oidc_flow_authorization_code_access_token_lifetime :: non_neg_integer()
field(:oidc_flow_authorization_code_access_token_lifetime,
config_time: :runtime,
used_by: [:oauth2_access_token_lifetime_callback],
unit: "seconds"
)
@doc """
Defines the lifetime of an access token in the OIDC implicit flow
"""
@type oidc_flow_implicit_access_token_lifetime :: non_neg_integer()
field(:oidc_flow_implicit_access_token_lifetime,
config_time: :runtime,
used_by: [:oauth2_access_token_lifetime_callback],
unit: "seconds"
)
@doc """
Defines the lifetime of an access token in the OIDC hybrid flow
"""
@type oidc_flow_hybrid_access_token_lifetime :: non_neg_integer()
field(:oidc_flow_hybrid_access_token_lifetime,
config_time: :runtime,
used_by: [:oauth2_access_token_lifetime_callback],
unit: "seconds"
)
@doc """
Callback called to determine the lifetime of an ID token
Note that client configuration takes precedence over configuration options. See
`Asteroid.Client` fields.
"""
@type oidc_id_token_lifetime_callback :: (Asteroid.Context.t() -> non_neg_integer())
field(:oauth2_access_token_lifetime_callback,
config_time: :runtime,
uses: [
:oidc_id_token_lifetime,
:oidc_flow_authorization_code_id_token_lifetime,
:oidc_flow_implicit_id_token_lifetime,
:oidc_flow_hybrid_id_token_lifetime
]
)
@doc """
Defines the lifetime of an ID token in the OIDC authorization code flow
"""
@type oidc_flow_authorization_code_id_token_lifetime :: non_neg_integer()
field(:oidc_flow_authorization_code_id_token_lifetime,
config_time: :runtime,
used_by: [:oidc_id_token_lifetime_callback],
unit: "seconds"
)
@doc """
Defines the lifetime of an ID token in the OIDC implicit flow
"""
@type oidc_flow_implicit_id_token_lifetime :: non_neg_integer()
field(:oidc_flow_implicit_id_token_lifetime,
config_time: :runtime,
used_by: [:oidc_id_token_lifetime_callback],
unit: "seconds"
)
@doc """
Defines the lifetime of an ID token in the OIDC hybrid flow
"""
@type oidc_flow_hybrid_id_token_lifetime :: non_neg_integer()
field(:oidc_flow_hybrid_id_token_lifetime,
config_time: :runtime,
used_by: [:oidc_id_token_lifetime_callback],
unit: "seconds"
)
@doc """
List of acceptable signature `alg` algorithms to sign ID tokens
"""
@type oidc_id_token_signing_alg_values_supported :: [Crypto.Key.jws_alg()]
field(:oidc_id_token_signing_alg_values_supported,
config_time: :runtime
)
@doc """
List of acceptable encryption `alg` algorithms to encrypt ID tokens
"""
@type oidc_id_token_encryption_alg_values_supported :: [Crypto.Key.jwe_alg()]
field(:oidc_id_token_encryption_alg_values_supported,
config_time: :runtime
)
@doc """
List of acceptable encryption `enc` algorithms to encrypt ID tokens
"""
@type oidc_id_token_encryption_enc_values_supported :: [Crypto.Key.jwe_enc()]
field(:oidc_id_token_encryption_enc_values_supported,
config_time: :runtime
)
@doc """
Callback invoked before serializing an ID token
"""
@type token_id_token_before_serialize_callback ::
(Asteroid.Token.IDToken.t(), Asteroid.Context.t() -> Asteroid.Token.IDToken.t())
field(:token_id_token_before_serialize_callback,
config_time: :runtime
)
@doc """
Callback called to determine whether a new ID token should be issued when renewing
tokens on `/token` with a refresh token grant type
Note that client configuration takes precedence over configuration options. See
`Asteroid.Client` fields.
"""
@type oidc_issue_id_token_on_refresh_callback :: (Asteroid.Context.t() -> boolean())
field(:oidc_issue_id_token_on_refresh_callback,
config_time: :runtime,
uses: [
:oidc_issue_id_token_refresh,
:oidc_flow_authorization_code_issue_id_token_refresh,
:oidc_flow_hybrid_issue_id_token_refresh
]
)
@doc """
Defines whether an ID token should be issued when refreshing tokens in the OIDC
authorization code flow
"""
@type oidc_flow_authorization_code_issue_id_token_refresh :: boolean()
field(:oidc_flow_authorization_code_issue_id_token_refresh,
config_time: :runtime,
used_by: [:oidc_issue_id_token_on_refresh_callback]
)
@doc """
Defines whether an ID token should be issued when refreshing tokens in the OIDC
hybrid flow
"""
@type oidc_flow_hybrid_issue_id_token_refresh :: boolean()
field(:oidc_flow_hybrid_issue_id_token_refresh,
config_time: :runtime,
used_by: [:oidc_issue_id_token_on_refresh_callback]
)
@doc """
Defines the lifetime of an authorization code in the OIDC authorization code flow
"""
@type oidc_flow_authorization_code_authorization_code_lifetime :: non_neg_integer()
field(:oidc_flow_authorization_code_authorization_code_lifetime,
config_time: :runtime,
used_by: [:oauth2_authorization_code_lifetime_callback],
unit: "seconds"
)
@doc """
Defines the lifetime of an authorization code in the OIDC hybrid code flow
"""
@type oidc_flow_hybrid_authorization_code_lifetime :: non_neg_integer()
field(:oidc_flow_hybrid_authorization_code_lifetime,
config_time: :runtime,
used_by: [:oauth2_authorization_code_lifetime_callback],
unit: "seconds"
)
@doc """
Defines the serialization format of an access token in the OIDC authorization code flow
"""
@type oidc_flow_authorization_code_access_token_serialization_format ::
Asteroid.Token.serialization_format()
field(:oidc_flow_authorization_code_access_token_serialization_format,
config_time: :runtime,
used_by: [:oauth2_access_token_serialization_format_callback]
)
@doc """
Defines the serialization format of an access token in the OIDC implicit flow
"""
@type oidc_flow_implicit_access_token_serialization_format ::
Asteroid.Token.serialization_format()
field(:oidc_flow_implicit_access_token_serialization_format,
config_time: :runtime,
used_by: [:oauth2_access_token_serialization_format_callback]
)
@doc """
Defines the serialization format of an access token in the OIDC hybrid flow
"""
@type oidc_flow_hybrid_access_token_serialization_format ::
Asteroid.Token.serialization_format()
field(:oidc_flow_hybrid_access_token_serialization_format,
config_time: :runtime,
used_by: [:oauth2_access_token_serialization_format_callback]
)
@doc """
Defines the signing key name of an access token in the OIDC authorization code flow
"""
@type oidc_flow_authorization_code_access_token_signing_key :: Crypto.Key.name()
field(:oidc_flow_authorization_code_access_token_signing_key,
config_time: :runtime,
used_by: [:oauth2_access_token_signing_key_callback]
)
@doc """
Defines the signing key name of an access token in the OIDC implicit flow
"""
@type oidc_flow_implicit_access_token_signing_key :: Crypto.Key.name()
field(:oidc_flow_implicit_access_token_signing_key,
config_time: :runtime,
used_by: [:oauth2_access_token_signing_key_callback]
)
@doc """
Defines the signing key name of an access token in the OIDC hybrid flow
"""
@type oidc_flow_hybrid_access_token_signing_key :: Crypto.Key.name()
field(:oidc_flow_hybrid_access_token_signing_key,
config_time: :runtime,
used_by: [:oauth2_access_token_signing_key_callback]
)
@doc """
Defines the signing algorithm of an access token in the OIDC authorization code flow
"""
@type oidc_flow_authorization_code_access_token_signing_alg :: Crypto.Key.jws_alg()
field(:oidc_flow_authorization_code_access_token_signing_alg,
config_time: :runtime,
used_by: [:oauth2_access_token_signing_alg_callback]
)
@doc """
Defines the signing algorithm of an access token in the OIDC implicit flow
"""
@type oidc_flow_implicit_access_token_signing_alg :: Crypto.Key.jws_alg()
field(:oidc_flow_implicit_access_token_signing_alg,
config_time: :runtime,
used_by: [:oauth2_access_token_signing_alg_callback]
)
@doc """
Defines the signing algorithm of an access token in the OIDC hybrid flow
"""
@type oidc_flow_hybrid_access_token_signing_alg :: Crypto.Key.jws_alg()
field(:oidc_flow_hybrid_access_token_signing_alg,
config_time: :runtime,
used_by: [:oauth2_access_token_signing_alg_callback]
)
@doc """
Plugs installed on `"/api/ooidc"`
See also [protecting APIs](protecting-apis.html)
"""
@type api_oidc_plugs :: [{module(), Keyword.t()}]
field(:api_oidc_plugs,
config_time: :compile
)
@doc """
Plugs installed on `"/api/oidc/userinfo"`
See also [protecting APIs](protecting-apis.html)
"""
@type api_oidc_endpoint_userinfo_plugs :: [{module(), Keyword.t()}]
field(:api_oidc_endpoint_userinfo_plugs,
config_time: :compile
)
@doc """
Callback invoked on the json response on the `/userinfo` endpoint
"""
@type oidc_endpoint_userinfo_before_send_resp_callback ::
(map(), Asteroid.Context.t() -> map())
field(:oidc_endpoint_userinfo_before_send_resp_callback,
config_time: :runtime
)
@doc """
Callback invoked on the `t:Plug.Conn.t/0` response on the `/userinfo` endpoint
"""
@type oidc_endpoint_userinfo_before_send_conn_callback ::
(Plug.Conn.t(), Asteroid.Context.t() -> Plug.Conn.t())
field(:oidc_endpoint_userinfo_before_send_conn_callback,
config_time: :runtime
)
@doc """
List of acceptable signature `alg` algorithms for the signature response on the
`/api/oidc/userinfo` endpoint
"""
@type oidc_endpoint_userinfo_signature_alg_values_supported :: [Crypto.Key.jws_alg()]
field(:oidc_endpoint_userinfo_signature_alg_values_supported,
config_time: :runtime
)
@doc """
List of acceptable encryption `alg` algorithms for the encrypted response on the
`/api/oidc/userinfo` endpoint
"""
@type oidc_endpoint_userinfo_encryption_alg_values_supported :: [Crypto.Key.jwe_alg()]
field(:oidc_endpoint_userinfo_encryption_alg_values_supported,
config_time: :runtime
)
@doc """
List of acceptable encryption `enc` algorithms for the encrypted response on the
`/api/oidc/userinfo` endpoint
"""
@type oidc_endpoint_userinfo_encryption_enc_values_supported :: [Crypto.Key.jwe_enc()]
field(:oidc_endpoint_userinfo_encryption_enc_values_supported,
config_time: :runtime
)
@doc """
Claims supported (declarative)
This is only used for publishing it on the discovery endpoint.
"""
@type oidc_claims_supported :: [OIDC.claim_name()]
field(:oidc_claims_supported,
config_time: :runtime
)
@doc """
Policy for response mode selection
3 values are possible:
- `:disabled`: the `"response_mode"` param is not processed, and the default response
mode for the flow is choosen
- `:oidc_only`: the `"response_mode"` param is used for OIDC flows only
- `:enabled`: the `"response_mode"` param is used for all flows
"""
@type oauth2_response_mode_policy :: :disabled | :oidc_only | :enabled
field(:oauth2_response_mode_policy,
config_time: :runtime
)
@doc """
Callback invoked to calculate the `"sub"` returned in OpenID Connect ID tokens and
on the `/userinfo` endpoint
"""
@type oidc_subject_identifier_callback ::
(Subject.t(), Client.t() -> String.t())
field(:oidc_subject_identifier_callback,
config_time: :runtime
)
@doc """
Salt for the pairwise subject identifier type
By default, a random value set at compile time, which means that the pairwise `"subs"`
**will change** when compiling again, and **won't be stable**. To have stability, change
this configuration option with a random value (which doesn't need to be particularly
protected against theft), for example generating it with the following command:
```elixir
$ mix phx.gen.secret
<KEY>
```
"""
@type oidc_subject_identifier_pairwise_salt :: String.t()
field(:oidc_subject_identifier_pairwise_salt,
config_time: :runtime
)
@doc """
OIDC display values supported
Voluntary information to be published on the metadata endpoint. It is not used otherwise.
"""
@type oidc_endpoint_metadata_display_values_supported :: [String.t()]
field(:oidc_endpoint_metadata_display_values_supported,
config_time: :runtime
)
@doc """
Defines the lifetime of an access token
"""
@type oauth2_access_token_lifetime :: non_neg_integer()
field(:oauth2_access_token_lifetime,
config_time: :runtime,
used_by: [:oauth2_access_token_lifetime_callback],
unit: "seconds"
)
@doc """
Defines the serialization format of an access token
"""
@type oauth2_access_token_serialization_format ::
Asteroid.Token.serialization_format()
field(:oauth2_access_token_serialization_format,
config_time: :runtime,
used_by: [:oauth2_access_token_serialization_format_callback]
)
@doc """
Defines the signing algorithm of an access token
"""
@type oauth2_access_token_signing_alg :: Crypto.Key.jws_alg()
field(:oauth2_access_token_signing_alg,
config_time: :runtime,
used_by: [:oauth2_access_token_signing_alg_callback]
)
@doc """
Defines the signing key name of an access token
"""
@type oauth2_access_token_signing_key :: Crypto.Key.name()
field(:oauth2_access_token_signing_key,
config_time: :runtime,
used_by: [:oauth2_access_token_signing_key_callback]
)
@doc """
Defines the lifetime of an authorization code
"""
@type oauth2_authorization_code_lifetime :: non_neg_integer()
field(:oauth2_authorization_code_lifetime,
config_time: :runtime,
used_by: [:oauth2_authorization_code_lifetime_callback],
unit: "seconds"
)
@doc """
Defines whether a refresh token should be issued
"""
@type oauth2_issue_refresh_token_init :: boolean()
field(:oauth2_issue_refresh_token_init,
config_time: :runtime,
used_by: [:oauth2_issue_refresh_token_callback]
)
@doc """
Defines whether a refresh token should be issued when refreshing tokens
"""
@type oauth2_issue_refresh_token_refresh :: boolean()
field(:oauth2_issue_refresh_token_refresh,
config_time: :runtime,
used_by: [:oauth2_issue_refresh_token_callback]
)
@doc """
Defines the lifetime of a refresh token
"""
@type oauth2_refresh_token_lifetime :: non_neg_integer()
field(:oauth2_refresh_token_lifetime,
config_time: :runtime,
used_by: [:oauth2_refresh_token_lifetime_callback],
unit: "seconds"
)
@doc """
Defines the lifetime of an ID token
"""
@type oidc_id_token_lifetime :: non_neg_integer()
field(:oidc_id_token_lifetime,
config_time: :runtime,
used_by: [:oidc_id_token_lifetime_callback],
unit: "seconds"
)
@doc """
Defines whether an ID token should be issued when refreshing tokens
"""
@type oidc_issue_id_token_refresh :: boolean()
field(:oidc_issue_id_token_refresh,
config_time: :runtime,
used_by: [:oidc_issue_id_token_on_refresh_callback]
)
### end of configuration options
end
@doc """
Returns the markdown link to the documentation of a configuration option
"""
@spec link_to_option(atom()) :: String.t()
def link_to_option(configuration_option) do
configuration_option = to_string(configuration_option)
"[`:#{configuration_option}`](Asteroid.Config.html#module-#{configuration_option})"
end
end
|
lib/asteroid/config.ex
| 0.967533
| 0.810816
|
config.ex
|
starcoder
|
defmodule Calendar.Date.Parse do
@doc """
Parses ISO 8601 date strings.
The function accepts both the extended and the basic format.
## Examples
# Extended format
iex> iso8601("2016-01-05")
{:ok, %Date{year: 2016, month: 1, day: 5}}
# Basic format (the basic format does not have dashes)
iex> iso8601("20160105")
{:ok, %Date{year: 2016, month: 1, day: 5}}
iex> iso8601("2016-99-05")
{:error, :invalid_date}
"""
def iso8601(string) do
Calendar.NaiveDateTime.Parse.iso8601(string<>"T00:00:00")
|> iso8610result
end
defp iso8610result({:ok, ndt, _}), do: {:ok, ndt |> Calendar.NaiveDateTime.to_date}
defp iso8610result({:error, :invalid_datetime, _}), do: {:error, :invalid_date}
defp iso8610result({first, second, _}), do: {first, second}
@doc """
Parses ISO 8601 date strings.
Like `iso8601/1`, but returns the result untagged and raises
in case of an error.
## Examples
# Extended format
iex> iso8601!("2016-01-05")
%Date{year: 2016, month: 1, day: 5}
"""
def iso8601!(string) do
{:ok, result} = iso8601(string)
result
end
@doc """
Parses ISO 8601 week date strings.
## Examples
iex> iso_week_date("2004-W53-6")
{:ok, %Date{year: 2005, month: 1, day: 1}}
iex> iso_week_date("2008-W01-2")
{:ok, %Date{year: 2008, month: 1, day: 1}}
iex> iso_week_date("2004-W53-6D")
{:ok, %Date{year: 2005, month: 1, day: 1}}
iex> iso_week_date("2004-W53-9")
:error
iex> iso_week_date("2004-W54-9")
:error
iex> iso_week_date("2004-W0-9")
:error
"""
def iso_week_date(string) do
try do
string
|> String.replace("D", "")
|> do_iso_week_date
rescue
_ -> :error
end
end
defp do_iso_week_date(<<binyear::4-bytes, ?-, ?W, binweek::2-bytes, ?-, bday::1-bytes>>) do
{year, ""} = binyear |> Integer.parse
{week, ""} = binweek |> Integer.parse
{day, ""} = bday |> Integer.parse
date = Calendar.Date.dates_for_week_number(year, week)
|> List.to_tuple
|> elem((day-1))
{:ok, date}
end
@doc """
Parses ISO 8601 week date strings. Like iso_week_date/1
But returns the the result untagged and raises in case of an error.
## Examples
iex> iso_week_date!("2004-W53-6")
%Date{year: 2005, month: 1, day: 1}
"""
def iso_week_date!(string) do
{:ok, result} = iso_week_date(string)
result
end
end
|
lib/calendar/date/parse.ex
| 0.829837
| 0.500366
|
parse.ex
|
starcoder
|
defmodule Sugar.Controller do
@moduledoc """
Controllers facilitate some separation of concerns for your application's logic.
All handler actions should have an arrity of 2, with the first argument being
a `Plug.Conn` representing the current connection and the second argument
being a `Keyword` list of any parameters captured in the route path.
`Sugar.Controller` imports `Plug.Conn`, the `plug/1` and `plug/2` macros from
`Plug.Builder`, `Sugar.Controller`, and `Sugar.Controller.Helpers` for
convenience when creating handlers for your applications
## Example
defmodule Controllers.Pages do
use Sugar.Controller
@doc false
def index(conn, []) do
# Somehow get our content
pages = Queries.Page.all
render conn, pages
end
@doc false
def show(conn, args) do
result = case Integer.parse args["page_id"] do
:error ->
%Error{ id: "no_page_id",
message: "A valid page_id is required." }
{i, _} ->
Queries.Page.get i
end
render conn, result
end
@doc false
def create(conn, args) do
render conn, Queries.Page.create args, status: :created
end
@doc false
def update(conn, args) do
result = case Integer.parse args["page_id"] do
:error ->
%Error{ id: "no_page_id",
message: "A valid page_id is requried." }
{i, _} ->
Queries.Page.update i, args
end
render conn, result
end
end
"""
@doc false
defmacro __using__(_) do
quote do
import Plug.Conn
import Plug.Builder, only: [plug: 1, plug: 2]
import Sugar.Controller
import Sugar.Controller.Helpers
@before_compile Sugar.Controller
@behaviour Plug
Module.register_attribute(__MODULE__, :plugs, accumulate: true)
end
end
@doc false
defmacro __before_compile__(env) do
plugs = Module.get_attribute(env.module, :plugs)
|> Enum.map(fn { plug, opts, guard } ->
{ plug, Keyword.put_new(opts, :run, :before), guard }
end)
plug_stacks = build_plug_stacks env, plugs
quote do
def init(opts) do
opts
end
def call(conn, opts) do
conn = do_call(conn, :before, opts[:action])
conn = apply(__MODULE__, opts[:action], [ conn, opts[:args] ])
do_call(conn, :after, opts[:action])
end
defoverridable [init: 1, call: 2]
unquote(plug_stacks)
end
end
defp build_plug_stacks(env, plugs) do
only_actions = get_only_actions plugs
Enum.map only_actions ++ [nil], fn action ->
build_plug_stacks_for(action, env, plugs)
end
end
defp build_plug_stacks_for(action, env, plugs) do
before_body = build_calls_for(:before, action, env, plugs)
after_body = build_calls_for(:after, action, env, plugs)
quote do
unquote(before_body)
unquote(after_body)
end
end
defp build_calls_for(before_or_after, nil, env, plugs) do
plugs = plugs
|> Enum.filter(fn { _, opts, _ } ->
opts[:only] === nil
end)
|> Enum.filter(fn { _, opts, _ } ->
opts[:run] === before_or_after
end)
{ conn, body } = env |> Plug.Builder.compile(plugs, [])
quote do
defp do_call(unquote(conn), unquote(before_or_after), _) do
unquote(body)
end
end
end
defp build_calls_for(before_or_after, action, env, plugs) do
plugs = plugs
|> Enum.filter(fn { _, opts, _ } ->
opts[:only] === nil ||
action === opts[:only] ||
action in opts[:only]
end)
|> Enum.filter(fn { _, opts, _ } ->
opts[:run] === before_or_after
end)
{ conn, body } = env |> Plug.Builder.compile(plugs, [])
quote do
defp do_call(unquote(conn), unquote(before_or_after), unquote(action)) do
unquote(body)
end
end
end
defp get_only_actions(plugs) do
plugs
|> Enum.filter(fn { _, opts, _ } ->
opts[:only] != nil
end)
|> Enum.flat_map(fn { _, opts, _ } ->
opts[:only]
end)
|> Enum.uniq
end
end
|
lib/sugar/controller.ex
| 0.693369
| 0.430806
|
controller.ex
|
starcoder
|
defmodule RDF.BlankNode do
@moduledoc """
An RDF blank node (aka bnode) is a local node of a graph without an IRI.
This module can also be used as `RDF.Resource.Generator` for the generation
of random identifiers, which is using the `new/0` function.
For the generation of value-based blank nodes, you can use `RDF.BlankNode.Generator`.
see <https://www.w3.org/TR/rdf11-primer/#section-blank-node>
and <https://www.w3.org/TR/rdf11-concepts/#section-blank-nodes>
"""
@type t :: %__MODULE__{value: String.t()}
@enforce_keys [:value]
defstruct [:value]
use RDF.Resource.Generator
alias RDF.Resource.Generator
@doc """
Creates a random `RDF.BlankNode`.
"""
@spec new :: t
def new, do: new(:erlang.unique_integer([:positive]))
@doc """
Creates a `RDF.BlankNode` with a user-defined value for its identity.
## Examples
iex> RDF.bnode(:foo)
%RDF.BlankNode{value: "foo"}
"""
@spec new(reference | String.t() | atom | integer) :: t
def new(value)
def new(string) when is_binary(string), do: %__MODULE__{value: string}
def new(atom) when is_atom(atom), do: atom |> to_string() |> new()
def new(integer) when is_integer(integer), do: new("b#{integer}")
def new(ref) when is_reference(ref) do
"#Ref<" <> value = ref |> :erlang.ref_to_list() |> to_string()
value |> String.trim_trailing(">") |> new()
end
@doc """
Returns the internal string representation of a blank node.
"""
def value(%__MODULE__{} = bnode), do: bnode.value
@doc """
Tests for value equality of blank nodes.
Returns `nil` when the given arguments are not comparable as blank nodes.
"""
@spec equal_value?(t, t) :: boolean | nil
def equal_value?(left, right)
def equal_value?(%__MODULE__{value: left}, %__MODULE__{value: right}),
do: left == right
def equal_value?(_, _),
do: nil
@impl RDF.Resource.Generator
def generate(_), do: new()
@impl RDF.Resource.Generator
def generate(_, _) do
raise(
Generator.ConfigError,
"""
Value-based resource generation is not supported by RDF.BlankNode.
Use RDF.BlankNode.Generator or another generator.
"""
)
end
defimpl String.Chars do
def to_string(bnode), do: "_:#{bnode.value}"
end
end
|
lib/rdf/blank_node.ex
| 0.915842
| 0.612498
|
blank_node.ex
|
starcoder
|
defmodule Scenic.Scrollable.Hotkeys do
@moduledoc """
This module handles key mappings and keypress events for `Scenic.Scrollable` components.
"""
@typedoc """
A keycode represented by a string.
The string corresponds to the character as seen on the keyboard, rather than a numeric keycode.
Special keys are generally spelled out in lower case characters, such as "enter" or "escape".
"""
@type keycode :: String.t()
@typedoc """
The hotkey settings which can be passed in as style when creating a scrollable component.
Hotkeys are optional and available for 'up', 'down', 'left' and 'right' scroll movements.
"""
@type settings :: %{
optional(:up) => keycode,
optional(:down) => keycode,
optional(:left) => keycode,
optional(:right) => keycode
}
@typedoc """
The keymap used internally to determine if, and what key is set for a certain movement.
TODO support multiple keys for a single movement.
"""
@type key_map :: %{
up: :none | {:some, keycode},
down: :none | {:some, keycode},
left: :none | {:some, keycode},
right: :none | {:some, keycode}
}
@typedoc """
Button state containing information on if a key is currently pressed or released.
"""
@type key_pressed_state :: :released | :pressed
@typedoc """
Data structure containing information on the pressed state of all available hotkeys.
"""
@type key_pressed_states :: %{
up: key_pressed_state,
down: key_pressed_state,
left: key_pressed_state,
right: key_pressed_state
}
@typedoc """
The data structure used as state by this module. It contains information on what keys are mapped to what functionality,
and which keys are currently being pressed.
"""
@type t :: %__MODULE__{
key_map: key_map,
key_pressed_states: key_pressed_states
}
defstruct key_map: %{
up: :none,
down: :none,
left: :none,
right: :none
},
key_pressed_states: %{
up: :released,
down: :released,
left: :released,
right: :released
}
@doc """
Initialize the state this module acts upon, by passing it the `t:Scenic.Scrollable.Hotkeys.settings/0` settings map.
When nil is passed as argument, the default settings will be used.
Returns a `t:Scenic.Scrollable.Hotkeys.t/0`.
"""
@spec init(settings | nil) :: t
def init(nil), do: %__MODULE__{}
def init(settings) do
%__MODULE__{
key_map: %{
up: parse_keycode(settings[:up]),
down: parse_keycode(settings[:down]),
left: parse_keycode(settings[:left]),
right: parse_keycode(settings[:right])
}
}
end
@doc """
Modifies the hotkey state accordingly to the key mappings and the keycode passed.
When the passed keycode is set as one of the mapped keys, that key will be flagged as being pressed.
"""
@spec handle_key_press(t, keycode) :: t
def handle_key_press(%{key_map: %{up: up, down: down, left: left, right: right}} = state, key) do
case {:some, key} do
^up -> press(state, :up)
^down -> press(state, :down)
^left -> press(state, :left)
^right -> press(state, :right)
_ -> state
end
end
@doc """
Modifies the hotkey state accordingly to the key mappings and the keycode passed.
When the passed keycode is set as one of the mapped keys, that key will be flagged as being released.
"""
def handle_key_release(%{key_map: %{up: up, down: down, left: left, right: right}} = state, key) do
case {:some, key} do
^up -> release(state, :up)
^down -> release(state, :down)
^left -> release(state, :left)
^right -> release(state, :right)
_ -> state
end
end
@doc """
Obtain the current scroll direction based on the keys currently being pressed as a `t:Scenic.Math.vector_2`.
For example, when the 'left' key and the 'up' key are currently being pressed, the vector {-1, 1} describing the corresponding direction will be returned.
"""
@spec direction(t) :: Scenic.Math.vector_2()
def direction(%{key_pressed_states: pressed_states}) do
x =
case pressed_states do
%{left: :pressed, right: :released} -> -1
%{left: :released, right: :pressed} -> 1
_ -> 0
end
y =
case pressed_states do
%{up: :pressed, down: :released} -> 1
%{up: :released, down: :pressed} -> -1
_ -> 0
end
{x, y}
end
@doc """
Verify if one or more hotkeys are currently being pressed.
"""
@spec is_any_key_pressed?(t) :: boolean
def is_any_key_pressed?(%{key_pressed_states: pressed_states}) do
pressed_states
|> Map.values()
|> Enum.any?(&(&1 == :pressed))
end
# Flags the key as pressed in the `t:Scrollable.Hotkeys.t` state.
@spec press(t, :up | :down | :left | :right) :: t
defp press(state, key) do
%{state | key_pressed_states: Map.put(state.key_pressed_states, key, :pressed)}
end
# Flags the key as released in the `t:Scrollable.Hotkeys.t` state.
@spec release(t, :up | :down | :left | :right) :: t
defp release(state, key) do
%{state | key_pressed_states: Map.put(state.key_pressed_states, key, :released)}
end
# Converts a keycode passed in as `t:Scrollable.Hotkeys.settings` to conform to the `Scenic` key press event key naming,
# and wraps it in an `t:OptionEx.t` for internal use.
@spec parse_keycode(keycode) :: {:some, keycode} | :none
defp parse_keycode(keycode) do
keycode
|> OptionEx.return()
|> OptionEx.map(&upcase_single_char/1)
end
# Converts single lower case characters to upper case,
# and multiple character upper case strings to lower case,
# to conform to the `Scenic` key press event naming.
@spec upcase_single_char(keycode) :: keycode
defp upcase_single_char(keycode) do
if String.length(keycode) == 1 do
String.upcase(keycode)
else
String.downcase(keycode)
end
end
end
|
lib/utility/hotkeys.ex
| 0.819893
| 0.569104
|
hotkeys.ex
|
starcoder
|
defmodule Harald.Transport.UART.Framing do
@moduledoc """
A framer module that defines a frame as a HCI packet.
Reference: Version 5.0, Vol 2, Part E, 5.4
"""
alias Circuits.UART.Framing
defmodule State do
@moduledoc false
defstruct frame: <<>>, remaining_bytes: nil
end
@behaviour Framing
@impl Framing
def init(_args), do: {:ok, %State{}}
@impl Framing
def add_framing(data, state), do: {:ok, data, state}
@impl Framing
def flush(:transmit, state), do: state
def flush(:receive, _state), do: %State{}
def flush(:both, _state), do: %State{}
@impl Framing
def frame_timeout(state), do: {:ok, [state], <<>>}
@impl Framing
def remove_framing(new_data, state), do: process_data(new_data, state)
@doc """
Returns a tuple like `{remaining_desired_length, part_of_bin, rest_of_bin}`.
iex> binary_split(<<1, 2, 3, 4>>, 0)
{0, <<>>, <<1, 2, 3, 4>>}
iex> binary_split(<<1, 2, 3, 4>>, 2)
{0, <<1, 2>>, <<3, 4>>}
iex> binary_split(<<1, 2, 3, 4>>, 4)
{0, <<1, 2, 3, 4>>, <<>>}
iex> binary_split(<<1, 2, 3, 4>>, 6)
{2, <<1, 2, 3, 4>>, <<>>}
"""
def binary_split(bin, desired_length) do
bin_length = byte_size(bin)
if bin_length < desired_length do
{desired_length - bin_length, bin, <<>>}
else
{0, binary_part(bin, 0, desired_length),
binary_part(bin, bin_length, desired_length - bin_length)}
end
end
# `process_data/3` attempts to determine the type and length of a packet and will be called as
# data is received
defp process_data(data, state, messages \\ [])
# recursion base case
defp process_data(<<>>, state, messages) do
{process_status(state), Enum.reverse(messages), state}
end
# HCI ACL Data Packet
defp process_data(
<<2, _::size(16), length::size(16)>> <> data,
%State{frame: <<>>} = state,
messages
) do
process_data(data, length, state, messages)
end
# HCI Synchronous Data Packet
defp process_data(
<<3, _::size(16), length::size(8)>> <> data,
%State{frame: <<>>} = state,
messages
) do
process_data(data, length, state, messages)
end
# HCI Event Packet
defp process_data(
<<4, event_code::size(8), parameter_total_length::size(8), event_parameters::bits>>,
%State{frame: <<>>} = state,
messages
) do
process_data(
event_parameters,
parameter_total_length,
%{state | frame: <<4, event_code, parameter_total_length>>},
messages
)
end
# bad packet type
defp process_data(
<<indicator, _::bits>> = data,
%State{frame: <<>>} = state,
messages
)
when indicator not in 2..4 do
process_data(<<>>, state, [{:error, {:bad_packet_type, data}} | messages])
end
# pull data off the binary - already in a packet, however that does not mean the packet type and
# length have been resolved yet
defp process_data(data, state, messages) do
process_data(data, state.remaining_bytes, state, messages)
end
# `process_data/4` appends data to the frame until it has satisfied the remaining bytes
defp process_data(data, remaining_bytes, state, messages)
# no data, we don't know how many bytes we want yet
defp process_data(<<>> = data, nil, state, messages) do
process_data(data, state, messages)
end
# there is data, we don't know how many bytes we want yet, and the frame is empty, move the data
# in-frame
defp process_data(data, nil, %State{frame: <<>>} = state, messages) do
process_data(<<>>, %{state | frame: data}, messages)
end
# there is data, we don't know how many bytes we want yet, and the frame is not empty, append
# the data to the frame
defp process_data(data, nil, state, messages) do
process_data(state.frame <> data, %{state | frame: <<>>}, messages)
end
# there is data, we know how many bytes we want, append to the frame
defp process_data(data, remaining_bytes, state, messages) do
case binary_split(data, remaining_bytes) do
# the current remaining_bytes has been satisfied
{0, message, remaining_data} ->
process_data(remaining_data, %State{}, [state.frame <> message | messages])
# the current remaining_bytes has not been satisfied
{remaining_bytes, frame, <<>> = remaining_data} ->
process_data(
remaining_data,
%{state | remaining_bytes: remaining_bytes, frame: state.frame <> frame},
messages
)
end
end
defp process_status(%State{frame: <<>>, remaining_bytes: nil}), do: :ok
defp process_status(_state), do: :in_frame
end
|
lib/harald/transport/uart/framing.ex
| 0.782746
| 0.545528
|
framing.ex
|
starcoder
|
defmodule EVM.MachineCode do
@moduledoc """
Functions for helping read a contract's machine code.
"""
alias EVM.{ExecEnv, MachineState, Operation}
alias EVM.Operation.Metadata
@type t :: binary()
@doc """
Returns the current instruction being executed. In the
Yellow Paper, this is often referred to as `w`, and is
defined in Eq.(125) and again in Eq.(221).
## Examples
iex> EVM.MachineCode.current_operation(%EVM.MachineState{program_counter: 0}, %EVM.ExecEnv{machine_code: <<0xfdf8:f53e:61e4::18, 0xfd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b, 0xfdf8:f53e:61e4::18>>})
%EVM.Operation.Metadata{args: [], description: "Simple not operator.", fun: nil, group: :comparison_and_bitwise_logic, id: 21, input_count: 1, machine_code_offset: 0, output_count: 1, sym: :iszero}
iex> EVM.MachineCode.current_operation(%EVM.MachineState{program_counter: 1}, %EVM.ExecEnv{machine_code: <<0xfdf8:f53e:61e4::18, 0xfd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b, 0x12::8>>})
%EVM.Operation.Metadata{args: [], description: "Greater-than comparision.", fun: nil, group: :comparison_and_bitwise_logic, id: 17, input_count: 2, machine_code_offset: 0, output_count: 1, sym: :gt}
iex> EVM.MachineCode.current_operation(%EVM.MachineState{program_counter: 2}, %EVM.ExecEnv{machine_code: <<0xfdf8:f53e:61e4::18, 0xfd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b, 0x12::8>>})
%EVM.Operation.Metadata{args: [], description: "Signed less-than comparision.", fun: nil, group: :comparison_and_bitwise_logic, id: 18, input_count: 2, machine_code_offset: 0, output_count: 1, sym: :slt}
"""
@spec current_operation(MachineState.t(), ExecEnv.t()) :: Metadata.t()
def current_operation(machine_state, exec_env) do
exec_env.machine_code
|> Operation.get_operation_at(machine_state.program_counter)
|> Operation.metadata()
end
@doc """
Returns true if the given new program_counter is a valid jump
destination for the machine code, false otherwise.
TODO: Memoize
## Examples
iex> EVM.MachineCode.valid_jump_dest?(0, EVM.MachineCode.compile([:push1, 3, :push1, 5, :jumpdest, :add, :return, :jumpdest, :stop]))
false
iex> EVM.MachineCode.valid_jump_dest?(4, EVM.MachineCode.compile([:push1, 3, :push1, 5, :jumpdest, :add, :return, :jumpdest, :stop]))
true
iex> EVM.MachineCode.valid_jump_dest?(6, EVM.MachineCode.compile([:push1, 3, :push1, 5, :jumpdest, :add, :return, :jumpdest, :stop]))
false
iex> EVM.MachineCode.valid_jump_dest?(7, EVM.MachineCode.compile([:push1, 3, :push1, 5, :jumpdest, :add, :return, :jumpdest, :stop]))
true
iex> EVM.MachineCode.valid_jump_dest?(100, EVM.MachineCode.compile([:push1, 3, :push1, 5, :jumpdest, :add, :return, :jumpdest, :stop]))
false
"""
@spec valid_jump_dest?(MachineState.program_counter(), t) :: boolean()
def valid_jump_dest?(program_counter, machine_code) do
# TODO: This should be sorted for quick lookup
Enum.member?(machine_code |> valid_jump_destinations, program_counter)
end
@doc """
Returns the legal jump locations in the given machine code.
TODO: Memoize
## Example
iex> EVM.MachineCode.valid_jump_destinations(EVM.MachineCode.compile([:push1, 3, :push1, 5, :jumpdest, :add, :return, :jumpdest, :stop]))
[4, 7]
"""
@spec valid_jump_destinations(t) :: [MachineState.program_counter()]
def valid_jump_destinations(machine_code) do
do_valid_jump_destinations(machine_code, 0)
end
# Returns the valid jump destinations by scanning through
# entire set of machine code
defp do_valid_jump_destinations(machine_code, pos) do
operation =
machine_code
|> Operation.get_operation_at(pos)
|> Operation.decode()
next_pos = Operation.next_instr_pos(pos, operation)
cond do
pos >= byte_size(machine_code) ->
[]
operation == :jumpdest ->
[pos | do_valid_jump_destinations(machine_code, next_pos)]
true ->
do_valid_jump_destinations(machine_code, next_pos)
end
end
@doc """
Builds machine code for a given set of instructions and data.
## Examples
iex> EVM.MachineCode.compile([:push1, 3, :push1, 5, :add, :return])
<<0x60, 0x03, 0x60, 0x05, 0x01, 0xf3>>
iex> EVM.MachineCode.compile([])
<<>>
"""
@spec compile([atom() | integer()]) :: binary()
def compile(code) do
for n <- code, into: <<>> do
num =
case n do
x when is_atom(x) -> EVM.Operation.encode(n)
x when is_integer(x) -> x
end
:binary.encode_unsigned(num)
end
end
@doc """
Decompiles machine code.
## Options
* `:strict` (boolean) - if `true`, decompilation will raise an exception when unknown opcodes are encountered. If `false`, an `{:unknown, integer()}` will appear in place of the decoded op. Defaults to `true`.
## Examples
iex> EVM.MachineCode.decompile(<<0x60, 0x03, 0x60, 0x05, 0x01, 0xf3>>)
[:push1, 3, :push1, 5, :add, :return]
iex> EVM.MachineCode.decompile(<<97, 0, 4, 128, 97, 0, 14, 96, 0, 57, 97, 0, 18, 86, 96, 0, 53, 255, 91, 96, 0, 243>>)
[:push2, 0, 4, :dup1, :push2, 0, 14, :push1, 0, :codecopy, :push2, 0, 18, :jump, :push1, 0, :calldataload, :selfdestruct, :jumpdest, :push1, 0, :return]
iex> EVM.MachineCode.decompile(<<>>)
[]
iex> EVM.MachineCode.decompile(<<0x68, 0x00, 0x29>>)
[:push9, 0, 41, 0, 0, 0, 0, 0, 0, 0]
iex> EVM.MachineCode.decompile(<<0xfe, 0xf3>>, strict: false)
[:invalid, :return]
iex> EVM.MachineCode.decompile(<<0xfee, 0xf3>>, strict: false)
[{:unknown, 238}, :return]
"""
@type decompile_option :: {:strict, true | false}
@spec decompile(binary(), [decompile_option]) :: [atom() | integer()]
def decompile(bytecode, opts \\ []), do: decompile([], bytecode, opts)
defp decompile(acc, <<>>, _), do: Enum.reverse(acc)
defp decompile(acc, <<opcode::8, bytecode::binary()>>, opts) do
{op, rest_of_bytecode} =
decompile_opcode(opcode, EVM.Operation.metadata(opcode), bytecode, opts)
decompile(op ++ acc, rest_of_bytecode, opts)
end
defp decompile_opcode(opcode, nil, bytecode, opts) do
if Keyword.get(opts, :strict, true) do
raise ArgumentError, "unknown opcode 0x#{Integer.to_string(opcode, 16)} encountered"
else
{[{:unknown, opcode}], bytecode}
end
end
defp decompile_opcode(_opcode, %{sym: sym, machine_code_offset: args_size}, bytecode, _opts) do
decompile_instr(sym, args_size, bytecode)
end
defp decompile_instr(sym, nil, bytecode), do: {[sym], bytecode}
defp decompile_instr(sym, 0, bytecode), do: {[sym], bytecode}
defp decompile_instr(sym, args_size, bytecode) do
{encoded_argdata, rest_of_bytecode} = consume_instr_args(bytecode, args_size)
argdata = :binary.bin_to_list(encoded_argdata)
{Enum.reverse([sym | argdata]), rest_of_bytecode}
end
defp consume_instr_args(bytecode, args_size) when args_size > byte_size(bytecode) do
pad_by_bits = (args_size - byte_size(bytecode)) * 8
{bytecode <> <<0::size(pad_by_bits)>>, <<>>}
end
defp consume_instr_args(bytecode, args_size) do
<<op_args::binary-size(args_size), rest::binary()>> = bytecode
{op_args, rest}
end
end
|
apps/evm/lib/evm/machine_code.ex
| 0.766206
| 0.497925
|
machine_code.ex
|
starcoder
|
defmodule Ratatouille.App do
@moduledoc """
Defines the `Ratatouille.App` behaviour. It provides the structure for
architecting both large and small terminal applications. This structure
allows you to render views and update them over time or in response to user
input.
## A Simple Example
defmodule Counter.App do
@behaviour Ratatouille.App
def model(_context), do: 0
def update(model, msg) do
case msg do
{:event, %{ch: ?+}} -> model + 1
{:event, %{ch: ?-}} -> model - 1
_ -> model
end
end
def render(model) do
view do
label(content: "Counter is \#{model} (+/-)")
end
end
end
## Architecture
You may have recognized this pattern from [the Elm Architecture][1]. That's because
`Ratatouille.App` is just a close translation of this architecture to Elixir.
Because Elixir and Elm are both functional programming languages, the pattern
also works very well in Elixir. It helps to centralize state,
The architecture cleanly separates logic into the following three parts:
* **Model:** the state of your application
* **Update:** a way to update your state
* **View:** a way to view your state as a `%Ratatouille.Element{}` tree (think
HTML).
The behaviour callbacks map to these three parts:
* `c:init/1` defines your initial model, using provided context if needed.
* `c:update/2` handles a message and retuns a new model.
* `c:render/1` receives a model and builds a view (an element tree) to view it.
See the documentation for each callback below for additional details.
## Runtime
As long as you implement the behaviour callbacks, Ratatouille can handle the
rest. It provides a runtime (`Ratatouille.Runtime`), which will handle
actually running your application. That means setting up the window, rendering
the view, subscribing to events and passing these on to the application's
`update/2` callback, and making sure the view is always re-rendered when the
application's model changes.
## Fetching Data & Other Expensive Calls
A good rule of thumb for developing responsive UIs in any language is to never
block the UI thread. Here, we have a runtime process, but the same rule
applies.
In Ratatouille, your application should use `Ratatouille.Runtime.Command` in
order to start asynchronous tasks and get the result in the `update/2`
callback once finished.
[1]: https://guide.elm-lang.org/architecture/
"""
alias Ratatouille.Renderer.Element
@optional_callbacks subscribe: 1
@type context :: map()
@type model :: term
@type msg :: term
@doc """
The `init/1` callback defines the initial model. This model can be defined
based on the runtime context. See the "Runtime Context" section under
`Ratatouille.Runtime` for details on what context is provided.
"""
@callback init(context) :: model | {model, Command.t()}
@doc """
The `update/2` callback defines how to update the model in reaction to a
message (for example, an event or a tick).
The following messages are currently passed to `update/2` by the runtime:
* `{:event, event}` - A keyboard or click event.
* `{:refresh, event}` - A resize event.
The callback should always return the model. It can be the same model or an
updated one. If the model changes, the runtime will know to re-render the
model and update the window.
"""
@callback update(model, msg) :: model | {model, Command.t()}
@doc """
The optional `subscribe/1` callback defines a subscription for the application
given the initial model. Subscriptions are fulfilled via the `update/2`
callback.
For example, a subscription can be used to update the view every second (see
`Ratatouille.Runtime.Subscription.interval/2`):
@impl true
def subscribe(_model) do
Subscription.interval(1_000, :tick)
end
It's also possible to subscribe to multiple things via
`Ratatouille.Runtime.Subscription.batch/1`. See
`Ratatouille.Runtime.Subscription` for more details.
"""
@callback subscribe(model) :: Subscription.t()
@doc """
The `render/1` callback defines how to render the model as a view.
It should return a `%Ratatouille.Element{}` with the `:view` tag. For example:
@impl true
def render(model) do
view do
label(content: "Hello, \#{model.name}!")
end
end
"""
@callback render(model) :: Element.t()
end
|
lib/ratatouille/app.ex
| 0.90799
| 0.666339
|
app.ex
|
starcoder
|
defmodule Nosedrum.Storage do
@moduledoc """
Storages contain commands and are used by command invokers to look up commands.
How you start a storage is up to the module itself - what is
expected is that storage modules implement the behaviours
documented in this module.
The public-facing API of storage modules takes an optional argument,
the storage process or other information used to identify the storage
such as an ETS table name.
"""
@typedoc """
A single command module or mapping of subcommand names to command groups.
In addition to subcommand names, the key `:default` can be specified by
the module. `:default` should be invoked when none of the subcommands in the
map match.
"""
@type command_group ::
Module.t()
| %{optional(:default) => command_group, required(String.t()) => command_group}
@typedoc """
The "invocation path" of the command.
The public-facing API of storage modules should use this in order
to allow users to identify the command they want to operate on.
## Usage
To identify a single command, use a single element list, such as `["echo"]`.
To identify a subcommand, use a pair, such as `["infraction", "search"]`.
To identify the default subcommand invoked when no matching subcommand is
found, specify the group name first, then `:default`, such as
`["tags", :default]`.
"""
@type command_path :: [String.t() | :default, ...]
@doc """
Look up a command group under the specified `name`.
If the command was not found, `nil` should be returned.
"""
@callback lookup_command(name :: String.t(), storage :: reference) :: command_group | nil
@doc """
Add a new command under the given `path`.
If a command has the c:Nosedrum.Command.aliases/0 callback defined,
they will also be added under `path`. If the command already exists,
no error should be returned.
"""
@callback add_command(path :: command_path, command :: Module.t(), storage :: reference) ::
:ok | {:error, String.t()}
@doc """
Remove the command under the given `path`.
If a command has the c:Nosedrum.Command.aliases/0 callback defined,
they will also be removed under `path`. If the command does not exist,
no error should be returned.
"""
@callback remove_command(path :: command_path, storage :: reference) ::
:ok | {:error, String.t()}
@doc """
Return a mapping of command names to `t:command_group/0`s.
For top-level commands, the value should be a string, otherwise,
a mapping of subcommand names to subcommand modules as described
on `t:command_group/0`s documentation should be returned.
"""
@callback all_commands(storage :: reference) :: %{String.t() => command_group}
end
|
lib/nosedrum/storage.ex
| 0.850531
| 0.548613
|
storage.ex
|
starcoder
|
defmodule Blurhash do
@external_resource "README.md"
@moduledoc File.read!("README.md")
@type blurhash :: String.t()
@type pixels :: <<_::8>>
@type color :: {0..255, 0..255, 0..255}
@doc "Decode a blurhash. Returns raw pixels (8bit RGB) and average color."
@spec decode(blurhash, pos_integer(), pos_integer()) ::
{:ok, pixels(), color()} | {:error, :unexpected_components | :unexpected_end}
def decode(blurhash, width, height) do
with {:ok, pixels_iodata, average_color} <- Blurhash.Decoder.decode(blurhash, width, height) do
{:ok, IO.iodata_to_binary(pixels_iodata), average_color}
end
end
@type pixels_iodata :: pixels | [pixels | pixels_iodata]
@doc "Same as `decode/3`, except returns pixels as iodata."
@spec decode_to_iodata(String.t(), pos_integer(), pos_integer()) ::
{:ok, pixels_iodata(), color()} | {:error, :unexpected_components | :unexpected_end}
def decode_to_iodata(blurhash, width, height) do
Blurhash.Decoder.decode(blurhash, width, height)
end
@doc "Encodes a blurhash from raw pixels (8bit RGB)."
@spec encode(pixels(), pos_integer(), pos_integer(), 1..9, 1..9) ::
{:ok, blurhash()}
| {:error, :too_many_components | :too_little_components | :malformed_pixels}
def encode(pixels, width, height, components_x, components_y) do
Blurhash.Encoder.encode(pixels, width, height, components_x, components_y)
end
@doc "Downscale the image to 32 pixels wide and convert it to raw pixels, making it ready for Blurhash encoding. Returns path to image, width and height in case of success. Requires `Mogrify` package and ImageMagick to be installed on the system.`"
@spec downscale_image(Path.t()) ::
{:ok, Path.t(), pos_integer(), pos_integer()} | {:error, any()}
def downscale_image(path) do
try do
# XXX: Convert to downscaled png first, so we can get width/height information,
# since we are retaining aspect ratio
%{path: resized_path, width: resized_width, height: resized_height} =
path
|> Mogrify.open()
|> Mogrify.custom("thumbnail", "32x")
|> Mogrify.format("png")
|> Mogrify.save()
|> Mogrify.verbose()
%{path: converted_path} =
resized_path
|> Mogrify.open()
|> Mogrify.format("rgb")
|> Mogrify.custom("depth", "8")
|> Mogrify.save()
File.rm!(resized_path)
{:ok, converted_path, resized_width, resized_height}
rescue
e -> {:error, e}
end
end
@doc "Downscale the image using `&downscale_image/1` and encode a blurhash for it."
@spec downscale_and_encode(Path.t(), pos_integer(), pos_integer()) ::
{:ok, blurhash()} | {:error, any()}
def downscale_and_encode(path, components_x, components_y) do
with {:ok, path, width, height} <- downscale_image(path),
{:ok, pixels} <- File.read(path) do
try do
encode(pixels, width, height, components_x, components_y)
rescue
e ->
reraise e, __STACKTRACE__
after
File.rm!(path)
end
end
end
end
|
lib/blurhash.ex
| 0.840684
| 0.504944
|
blurhash.ex
|
starcoder
|
defmodule AmqpDirector.Client do
@moduledoc """
The AMQP RPC Client.
This module contains functionality for an RPC client. See `AmqpDirector.client_child_spec/3` for details on how to start the RPC client.
"""
alias AmqpDirector.Definitions
@typedoc """
Options for an RPC request.
* `:ttl` - Specified the TTL of the message on AMQP.
* `:timeout` - Time the client awaits response. Only valid for `call/6`. This is the `gen_server` timeout. Should be larger than the value for `:ttl`
* `:persistent` - Specifies the delivery mode for the AMQP messages. Setting this to `true` will make the broker log the
messages on disk. See AMQP specification for more information. Defaults to `false`
* `:return_headers` - Specified if the caller is interested into the amqp message header table
"""
@type request_options ::
{:timeout, pos_integer | :infinity}
| {:persistent, boolean}
| {:ttl, pos_integer}
| {:return_headers, boolean}
@doc """
Await until the client is started.
This will block the caller for the specified time or until the RPC client starts. This can be use to ensure that the client is running before
requests are made.
"""
@spec await(client :: atom | pid, timeout :: pos_integer | :infinity) :: any
def await(client, timeout \\ :infinity) do
:ad_client.await(client, timeout)
end
@doc """
Send an asynchronous request.
Sends an asynchronous request to an AMQP broker without expecting any response. It is send to the specified exchange using the specified routing key.
The `content_type` specifies the type of the payload while the `type` parameters refers to the AMQP message type header. See the AMQP reference for
details.
"""
@spec cast(
client :: atom | pid,
exchange :: String.t(),
routing_key :: String.t(),
payload :: binary | {binary, Definitions.amqp_table()},
content_type :: String.t(),
type :: String.t(),
options :: list(request_options)
) :: :ok
def cast(client, exchange, routing_key, payload, content_type, type, options \\ []) do
:ad_client.cast(client, exchange, routing_key, payload, content_type, type, options)
end
@typedoc "Error types for the synchronous RPC call."
@type call_error_reason :: :no_route | :no_consumers | {:reply_code, number}
@doc """
Send a synchronous request.
Sends a synchronous request to an AMQP broker and awaits for response for the length of `:timeout` option (Defaults to 5 seconds). It sends the message
to the specified exchange with the specified routing key. The `content_type` specifies the type of the payload. AMQP message type is always set to
`"request"` in case of a call.
"""
@spec call(
client :: atom | pid,
exchange :: String.t(),
routing_key :: String.t(),
payload :: binary | {binary, Definitions.amqp_table()},
content_type :: String.t(),
options :: list(request_options)
) ::
{:ok, content :: binary | {binary, Definitions.amqp_table()},
content_type :: String.t()}
| {:error, call_error_reason}
def call(client, exchange, routing_key, payload, content_type, options \\ [timeout: 5000]) do
:ad_client.call(client, exchange, routing_key, payload, content_type, options)
end
end
|
lib/amqp_director/client.ex
| 0.888532
| 0.434041
|
client.ex
|
starcoder
|
defmodule ExPolars.Series do
@moduledoc """
Documentation for `Series`.
"""
import Kernel, except: [+: 2, -: 2, *: 2, /: 2, ==: 2, <>: 2, >: 2, >=: 2, <: 2, <=: 2]
alias ExPolars.Native
defstruct [:inner]
@type t :: ExPolars.DataFrame
@type s :: ExPolars.Series
@dtype_strs %{
0 => "i8",
1 => "i16",
2 => "i32",
3 => "i64",
4 => "u8",
5 => "u16",
6 => "u32",
7 => "u64",
8 => "f32",
9 => "f64",
10 => "bool",
11 => "str",
12 => "list",
13 => "date32",
14 => "date64",
15 => "time64_nanosecond",
16 => "duration_nanosecond",
17 => "duration_millisecond",
18 => "object"
}
@dtype_vega %{
0 => "quantitative",
1 => "quantitative",
2 => "quantitative",
3 => "quantitative",
4 => "quantitative",
5 => "quantitative",
6 => "quantitative",
7 => "quantitative",
8 => "quantitative",
9 => "quantitative",
10 => "quantitative",
11 => "nominal",
12 => "ordinal",
13 => "temporal",
14 => "temporal",
15 => "temporal",
16 => "temporal",
17 => "temporal",
18 => "nominal"
}
@spec new(String.t(), list(String.t() | integer() | float() | boolean())) ::
{:ok, s()} | {:error, term}
def new(name, data) do
[first | _] = data
cond do
is_integer(first) -> Native.s_new_i64(name, data)
is_float(first) -> Native.s_new_f64(name, data)
is_boolean(first) -> Native.s_new_bool(name, data)
is_binary(first) -> Native.s_new_str(name, data)
true -> raise "Unspported datetype: #{inspect(first)}"
end
end
@spec new_duration_ns(String.t(), list(integer())) :: {:ok, s()} | {:error, term}
defdelegate new_duration_ns(name, data), to: Native, as: :s_new_duration_ns
@spec to_list(s() | {:ok, s()}) :: {:ok, list()} | {:error, term}
def to_list({:ok, s}), do: to_list(s)
def to_list(s) do
with {:ok, json} <- Native.s_to_json(s),
{:ok, data} <- Jason.decode(json) do
{:ok, data}
else
e -> e
end
end
@spec rechunk(s() | {:ok, s()}) :: {:ok, s()} | {:error, term}
def rechunk({:ok, s}), do: rechunk(s)
defdelegate rechunk(s), to: Native, as: :s_rechunk
@spec chunk_lengths(s() | {:ok, s()}) :: {:ok, s()} | {:error, term}
def chunk_lengths({:ok, s}), do: chunk_lengths(s)
defdelegate chunk_lengths(s), to: Native, as: :s_chunk_lengths
@spec name(s() | {:ok, s()}) :: {:ok, s()} | {:error, term}
def name({:ok, s}), do: name(s)
defdelegate name(s), to: Native, as: :s_name
@spec rename(s() | {:ok, s()}, String.t()) :: {:ok, s()} | {:error, term}
def rename({:ok, s}, name), do: rename(s, name)
defdelegate rename(s, name), to: Native, as: :s_rename
@spec dtype(s() | {:ok, s()}) :: {:ok, integer()} | {:error, term}
def dtype({:ok, s}), do: dtype(s)
defdelegate dtype(s), to: Native, as: :s_dtype
@spec dtype_str(s() | {:ok, s()}) :: String.t()
def dtype_str({:ok, s}), do: dtype_str(s)
def dtype_str(s) do
{:ok, t} = dtype(s)
@dtype_strs[t]
rescue
_ ->
@dtype_strs[18]
end
@spec dtype_vega(s() | {:ok, s()}) :: String.t()
def dtype_vega({:ok, s}), do: dtype_vega(s)
def dtype_vega(s) do
{:ok, t} = dtype(s)
@dtype_vega[t]
rescue
_ ->
@dtype_vega[18]
end
@spec n_chunks(s() | {:ok, s()}) :: {:ok, s()} | {:error, term}
@doc """
Get the number of chunks that this Series contains.
"""
def n_chunks({:ok, s}), do: n_chunks(s)
defdelegate n_chunks(s), to: Native, as: :s_n_chunks
@spec limit(s() | {:ok, s()}, integer()) :: {:ok, s()} | {:error, term}
@doc """
Take n elements from this Series.
Parameters
----------
num_elements
Amount of elements to take.
"""
def limit({:ok, s}, num_elements), do: limit(s, num_elements)
defdelegate limit(s, num_elements), to: Native, as: :s_limit
@spec slice(s() | {:ok, s()}, integer(), integer()) :: {:ok, s()} | {:error, term}
@doc """
Get a slice of this Series
Parameters
----------
offset
Offset index.
length
Length of the slice.
"""
def slice({:ok, s}, offset, length), do: slice(s, offset, length)
defdelegate slice(s, offset, length), to: Native, as: :s_slice
@spec append(s() | {:ok, s()}, s() | {:ok, s()}) :: {:ok, s()} | {:error, term}
@doc """
Append a Series to this one.
Parameters
----------
other
Series to append
"""
def append({:ok, s}, {:ok, other}), do: append(s, other)
def append(s, {:ok, other}), do: append(s, other)
def append({:ok, s}, other), do: append(s, other)
defdelegate append(s, other), to: Native, as: :s_append
@spec filter(s() | {:ok, s()}, s() | {:ok, s()}) :: {:ok, s()} | {:error, term}
@doc """
Filter elements by a boolean mask
Parameters
----------
filter
Boolean mask
"""
def filter({:ok, s}, {:ok, filter}), do: filter(s, filter)
def filter(s, {:ok, filter}), do: filter(s, filter)
def filter({:ok, s}, filter), do: filter(s, filter)
defdelegate filter(s, filter), to: Native, as: :s_filter
@spec add(s() | {:ok, s()}, s() | {:ok, s()}) :: {:ok, s()} | {:error, term}
def add({:ok, s}, {:ok, other}), do: add(s, other)
def add({:ok, s}, other), do: add(s, other)
def add(s, {:ok, other}), do: add(s, other)
defdelegate add(s, other), to: Native, as: :s_add
@spec sub(s() | {:ok, s()}, s() | {:ok, s()}) :: {:ok, s()} | {:error, term}
def sub({:ok, s}, {:ok, other}), do: sub(s, other)
def sub({:ok, s}, other), do: sub(s, other)
def sub(s, {:ok, other}), do: sub(s, other)
defdelegate sub(s, other), to: Native, as: :s_sub
@spec mul(s() | {:ok, s()}, s() | {:ok, s()}) :: {:ok, s()} | {:error, term}
def mul({:ok, s}, {:ok, other}), do: mul(s, other)
def mul({:ok, s}, other), do: mul(s, other)
def mul(s, {:ok, other}), do: mul(s, other)
defdelegate mul(s, other), to: Native, as: :s_mul
@spec divide(s() | {:ok, s()}, s() | {:ok, s()}) :: {:ok, s()} | {:error, term}
def divide({:ok, s}, {:ok, other}), do: divide(s, other)
def divide({:ok, s}, other), do: divide(s, other)
def divide(s, {:ok, other}), do: divide(s, other)
defdelegate divide(s, other), to: Native, as: :s_div
@spec head(s() | {:ok, s()}, integer()) :: {:ok, s()} | {:error, term}
def head(s, length \\ 5)
def head({:ok, s}, length), do: head(s, length)
def head(s, {:ok, length}), do: head(s, length)
defdelegate head(s, length), to: Native, as: :s_head
@spec tail(s() | {:ok, s()}, integer()) :: {:ok, s()} | {:error, term}
def tail(s, length \\ 5)
def tail({:ok, s}, length), do: tail(s, length)
defdelegate tail(s, length), to: Native, as: :s_tail
@spec sort(s() | {:ok, s()}, boolean()) :: {:ok, s()} | {:error, term}
def sort(s, reverse \\ false)
def sort({:ok, s}, reverse), do: sort(s, reverse)
defdelegate sort(s, reverse), to: Native, as: :s_sort
@spec argsort(s() | {:ok, s()}, boolean()) :: {:ok, s()} | {:error, term}
def argsort(s, reverse \\ false)
def argsort({:ok, s}, reverse), do: argsort(s, reverse)
defdelegate argsort(s, reverse), to: Native, as: :s_argsort
@spec unique(s() | {:ok, s()}) :: {:ok, s()} | {:error, term}
def unique({:ok, s}), do: unique(s)
defdelegate unique(s), to: Native, as: :s_unique
@spec value_counts(s() | {:ok, s()}) :: {:ok, s()} | {:error, term}
def value_counts({:ok, s}), do: value_counts(s)
defdelegate value_counts(s), to: Native, as: :s_value_counts
@spec arg_unique(s() | {:ok, s()}) :: {:ok, s()} | {:error, term}
def arg_unique({:ok, s}), do: arg_unique(s)
defdelegate arg_unique(s), to: Native, as: :s_arg_unique
@spec take(s() | {:ok, s()}, list(integer())) :: {:ok, s()} | {:error, term}
def take({:ok, s}, indeces), do: take(s, indeces)
defdelegate take(s, indeces), to: Native, as: :s_take
@spec take_with_series(s() | {:ok, s()}, s() | {:ok, s()}) :: {:ok, s()} | {:error, term}
def take_with_series({:ok, s}, {:ok, indeces}), do: take_with_series(s, indeces)
def take_with_series(s, {:ok, indeces}), do: take_with_series(s, indeces)
def take_with_series({:ok, s}, indeces), do: take_with_series(s, indeces)
defdelegate take_with_series(s, indeces), to: Native, as: :s_take_with_series
@spec null_count(s() | {:ok, s()}) :: {:ok, s()} | {:error, term}
def null_count({:ok, s}), do: null_count(s)
defdelegate null_count(s), to: Native, as: :s_null_count
@spec is_null(s() | {:ok, s()}) :: {:ok, s()} | {:error, term}
def is_null({:ok, s}), do: is_null(s)
defdelegate is_null(s), to: Native, as: :s_is_null
@spec is_not_null(s() | {:ok, s()}) :: {:ok, s()} | {:error, term}
def is_not_null({:ok, s}), do: is_not_null(s)
defdelegate is_not_null(s), to: Native, as: :s_is_not_null
@spec is_unique(s() | {:ok, s()}) :: {:ok, s()} | {:error, term}
def is_unique({:ok, s}), do: is_unique(s)
defdelegate is_unique(s), to: Native, as: :s_is_unique
@spec arg_true(s() | {:ok, s()}) :: {:ok, s()} | {:error, term}
def arg_true({:ok, s}), do: arg_true(s)
defdelegate arg_true(s), to: Native, as: :s_arg_true
@spec sample(s() | {:ok, s()}, integer() | float(), boolean()) :: {:ok, s()} | {:error, term}
def sample(s, n_or_frac, with_replacement \\ false)
def sample({:ok, s}, n_or_frac, with_replacement), do: sample(s, n_or_frac, with_replacement)
def sample(s, n_or_frac, with_replacement) do
case is_integer(n_or_frac) do
true -> Native.s_sample_n(s, n_or_frac, with_replacement)
_ -> Native.s_sample_frac(s, n_or_frac, with_replacement)
end
end
@spec is_duplicated(s() | {:ok, s()}) :: {:ok, s()} | {:error, term}
def is_duplicated({:ok, s}), do: is_duplicated(s)
defdelegate is_duplicated(s), to: Native, as: :s_is_duplicated
@spec explode(s() | {:ok, s()}) :: {:ok, s()} | {:error, term}
def explode({:ok, s}), do: explode(s)
defdelegate explode(s), to: Native, as: :s_explode
@spec take_every(s() | {:ok, s()}, integer()) :: {:ok, s()} | {:error, term}
def take_every({:ok, s}, n), do: take_every(s, n)
defdelegate take_every(s, n), to: Native, as: :s_take_every
@spec series_equal(s() | {:ok, s()}, s() | {:ok, s()}, boolean()) :: {:ok, s()} | {:error, term}
def series_equal(s, other, null_equal \\ false)
def series_equal({:ok, s}, {:ok, other}, null_equal), do: series_equal(s, other, null_equal)
def series_equal(s, {:ok, other}, null_equal), do: series_equal(s, other, null_equal)
def series_equal({:ok, s}, other, null_equal), do: series_equal(s, other, null_equal)
defdelegate series_equal(s, other, null_equal), to: Native, as: :s_series_equal
@spec eq(s() | {:ok, s()}, s() | {:ok, s()}) :: {:ok, s()} | {:error, term}
def eq({:ok, s}, {:ok, rhs}), do: eq(s, rhs)
def eq(s, {:ok, rhs}), do: eq(s, rhs)
def eq({:ok, s}, rhs), do: eq(s, rhs)
defdelegate eq(s, rhs), to: Native, as: :s_eq
@spec neq(s() | {:ok, s()}, s() | {:ok, s()}) :: {:ok, s()} | {:error, term}
def neq({:ok, s}, {:ok, rhs}), do: neq(s, rhs)
def neq(s, {:ok, rhs}), do: neq(s, rhs)
def neq({:ok, s}, rhs), do: neq(s, rhs)
defdelegate neq(s, rhs), to: Native, as: :s_neq
@spec gt(s() | {:ok, s()}, s() | {:ok, s()}) :: {:ok, s()} | {:error, term}
def gt({:ok, s}, {:ok, rhs}), do: gt(s, rhs)
def gt(s, {:ok, rhs}), do: gt(s, rhs)
def gt({:ok, s}, rhs), do: gt(s, rhs)
defdelegate gt(s, rhs), to: Native, as: :s_gt
@spec gt_eq(s() | {:ok, s()}, s() | {:ok, s()}) :: {:ok, s()} | {:error, term}
def gt_eq({:ok, s}, {:ok, rhs}), do: gt_eq(s, rhs)
def gt_eq(s, {:ok, rhs}), do: gt_eq(s, rhs)
def gt_eq({:ok, s}, rhs), do: gt_eq(s, rhs)
defdelegate gt_eq(s, rhs), to: Native, as: :s_gt_eq
@spec lt(s() | {:ok, s()}, s() | {:ok, s()}) :: {:ok, s()} | {:error, term}
def lt({:ok, s}, {:ok, rhs}), do: lt(s, rhs)
def lt(s, {:ok, rhs}), do: lt(s, rhs)
def lt({:ok, s}, rhs), do: lt(s, rhs)
defdelegate lt(s, rhs), to: Native, as: :s_lt
@spec lt_eq(s() | {:ok, s()}, s() | {:ok, s()}) :: {:ok, s()} | {:error, term}
def lt_eq({:ok, s}, {:ok, rhs}), do: lt_eq(s, rhs)
def lt_eq(s, {:ok, rhs}), do: lt_eq(s, rhs)
def lt_eq({:ok, s}, rhs), do: lt_eq(s, rhs)
defdelegate lt_eq(s, rhs), to: Native, as: :s_lt_eq
@spec not_(s() | {:ok, s()}) :: {:ok, s()} | {:error, term}
def not_({:ok, s}), do: not_(s)
defdelegate not_(s), to: Native, as: :s_not
@spec len(s() | {:ok, s()}) :: {:ok, s()} | {:error, term}
def len({:ok, s}), do: len(s)
defdelegate len(s), to: Native, as: :s_len
@spec drop_nulls(s() | {:ok, s()}) :: {:ok, s()} | {:error, term}
def drop_nulls({:ok, s}), do: drop_nulls(s)
defdelegate drop_nulls(s), to: Native, as: :s_drop_nulls
@spec fill_none(s() | {:ok, s()}, String.t()) :: {:ok, s()} | {:error, term}
def fill_none({:ok, s}, strategy), do: fill_none(s, strategy)
defdelegate fill_none(s, strategy), to: Native, as: :s_fill_none
@spec clone(s() | {:ok, s()}) :: {:ok, s()} | {:error, term}
def clone({:ok, s}), do: clone(s)
defdelegate clone(s), to: Native, as: :s_clone
@spec shift(s() | {:ok, s()}, integer()) :: {:ok, s()} | {:error, term}
def shift({:ok, s}, periods), do: shift(s, periods)
defdelegate shift(s, periods), to: Native, as: :s_shift
@spec zip_with(s() | {:ok, s()}, s() | {:ok, s()}, s() | {:ok, s()}) ::
{:ok, s()} | {:error, term}
def zip_with({:ok, s}, {:ok, mask}, {:ok, other}), do: zip_with(s, mask, other)
def zip_with(s, {:ok, mask}, {:ok, other}), do: zip_with(s, mask, other)
def zip_with({:ok, s}, mask, {:ok, other}), do: zip_with(s, mask, other)
def zip_with({:ok, s}, {:ok, mask}, other), do: zip_with(s, mask, other)
def zip_with(s, mask, {:ok, other}), do: zip_with(s, mask, other)
def zip_with(s, {:ok, mask}, other), do: zip_with(s, mask, other)
def zip_with({:ok, s}, mask, other), do: zip_with(s, mask, other)
defdelegate zip_with(s, mask, other), to: Native, as: :s_zip_with
@spec str_lengths(s() | {:ok, s()}) :: {:ok, s()} | {:error, term}
def str_lengths({:ok, s}), do: str_lengths(s)
defdelegate str_lengths(s), to: Native, as: :s_str_lengths
@spec str_contains(s() | {:ok, s()}, String.t()) :: {:ok, s()} | {:error, term}
def str_contains({:ok, s}, pat), do: str_contains(s, pat)
defdelegate str_contains(s, pat), to: Native, as: :s_str_contains
@spec str_replace(s() | {:ok, s()}, String.t(), String.t()) :: {:ok, s()} | {:error, term}
def str_replace({:ok, s}, pat, val), do: str_replace(s, pat, val)
defdelegate str_replace(s, pat, val), to: Native, as: :s_str_replace
@spec str_replace_all(s() | {:ok, s()}, String.t(), String.t()) :: {:ok, s()} | {:error, term}
def str_replace_all({:ok, s}, pat, val), do: str_replace_all(s, pat, val)
defdelegate str_replace_all(s, pat, val), to: Native, as: :s_str_replace_all
@spec str_to_uppercase(s() | {:ok, s()}) :: {:ok, s()} | {:error, term}
def str_to_uppercase({:ok, s}), do: str_to_uppercase(s)
defdelegate str_to_uppercase(s), to: Native, as: :s_str_to_uppercase
@spec str_to_lowercase(s() | {:ok, s()}) :: {:ok, s()} | {:error, term}
def str_to_lowercase({:ok, s}), do: str_to_lowercase(s)
defdelegate str_to_lowercase(s), to: Native, as: :s_str_to_lowercase
@spec str_parse_date32(s() | {:ok, s()}, String.t()) :: {:ok, s()} | {:error, term}
def str_parse_date32({:ok, s}, fmt), do: str_parse_date32(s, fmt)
defdelegate str_parse_date32(s, fmt), to: Native, as: :s_str_parse_date32
@spec str_parse_date64(s() | {:ok, s()}, String.t()) :: {:ok, s()} | {:error, term}
def str_parse_date64({:ok, s}, fmt), do: str_parse_date64(s, fmt)
defdelegate str_parse_date64(s, fmt), to: Native, as: :s_str_parse_date64
@spec datetime_str_fmt(s() | {:ok, s()}, String.t()) :: {:ok, s()} | {:error, term}
def datetime_str_fmt({:ok, s}, fmt), do: datetime_str_fmt(s, fmt)
defdelegate datetime_str_fmt(s, fmt), to: Native, as: :s_datetime_str_fmt
@spec as_duration(s() | {:ok, s()}) :: {:ok, s()} | {:error, term}
def as_duration({:ok, s}), do: as_duration(s)
defdelegate as_duration(s), to: Native, as: :s_as_duration
@spec to_dummies(s() | {:ok, s()}) :: {:ok, t()} | {:error, term}
def to_dummies({:ok, s}), do: to_dummies(s)
defdelegate to_dummies(s), to: Native, as: :s_to_dummies
@spec get_list(s() | {:ok, s()}, integer()) :: s() | {:ok, s()} | nil
def get_list({:ok, s}, index), do: get_list(s, index)
defdelegate get_list(s, index), to: Native, as: :s_get_list
@spec rolling_sum(s() | {:ok, s()}, integer(), nil | list(float()), boolean()) ::
{:ok, s()} | {:error, term}
def rolling_sum(s, window_size, weight \\ nil, ignore_null \\ false)
def rolling_sum({:ok, s}, window_size, weight, ignore_null),
do: rolling_sum(s, window_size, weight, ignore_null)
defdelegate rolling_sum(s, window_size, weight, ignore_null), to: Native, as: :s_rolling_sum
@spec rolling_mean(s() | {:ok, s()}, integer(), nil | list(float()), boolean()) ::
{:ok, s()} | {:error, term}
def rolling_mean(s, window_size, weight \\ nil, ignore_null \\ false)
def rolling_mean({:ok, s}, window_size, weight, ignore_null),
do: rolling_mean(s, window_size, weight, ignore_null)
defdelegate rolling_mean(s, window_size, weight, ignore_null), to: Native, as: :s_rolling_mean
@spec rolling_max(s() | {:ok, s()}, integer(), nil | list(float()), boolean()) ::
{:ok, s()} | {:error, term}
def rolling_max(s, window_size, weight \\ nil, ignore_null \\ false)
def rolling_max({:ok, s}, window_size, weight, ignore_null),
do: rolling_max(s, window_size, weight, ignore_null)
defdelegate rolling_max(s, window_size, weight, ignore_null), to: Native, as: :s_rolling_max
@spec rolling_min(s() | {:ok, s()}, integer(), nil | list(float()), boolean()) ::
{:ok, s()} | {:error, term}
def rolling_min(s, window_size, weight \\ nil, ignore_null \\ false)
def rolling_min({:ok, s}, window_size, weight, ignore_null),
do: rolling_min(s, window_size, weight, ignore_null)
defdelegate rolling_min(s, window_size, weight, ignore_null), to: Native, as: :s_rolling_min
@spec year(s() | {:ok, s()}) :: {:ok, s()} | {:error, term}
def year({:ok, s}), do: year(s)
defdelegate year(s), to: Native, as: :s_year
@spec month(s() | {:ok, s()}) :: {:ok, s()} | {:error, term}
def month({:ok, s}), do: month(s)
defdelegate month(s), to: Native, as: :s_month
@spec day(s() | {:ok, s()}) :: {:ok, s()} | {:error, term}
def day({:ok, s}), do: day(s)
defdelegate day(s), to: Native, as: :s_day
@spec ordinal_day(s() | {:ok, s()}) :: {:ok, s()} | {:error, term}
def ordinal_day({:ok, s}), do: ordinal_day(s)
defdelegate ordinal_day(s), to: Native, as: :s_ordinal_day
@spec hour(s() | {:ok, s()}) :: {:ok, s()} | {:error, term}
def hour({:ok, s}), do: hour(s)
defdelegate hour(s), to: Native, as: :s_hour
@spec minute(s() | {:ok, s()}) :: {:ok, s()} | {:error, term}
def minute({:ok, s}), do: minute(s)
defdelegate minute(s), to: Native, as: :s_minute
@spec second(s() | {:ok, s()}) :: {:ok, s()} | {:error, term}
def second({:ok, s}), do: second(s)
defdelegate second(s), to: Native, as: :s_second
@spec nanosecond(s() | {:ok, s()}) :: {:ok, s()} | {:error, term}
def nanosecond({:ok, s}), do: nanosecond(s)
defdelegate nanosecond(s), to: Native, as: :s_nanosecond
@spec set(s() | {:ok, s()}, s() | {:ok, s()}, float() | integer()) ::
{:ok, s()} | {:error, term}
@doc """
Set masked values.
Parameters
----------
filter
Boolean mask
value
Value to replace the the masked values with.
"""
def set({:ok, s}, {:ok, filter}, value), do: set(s, filter, value)
def set(s, {:ok, filter}, value), do: set(s, filter, value)
def set({:ok, s}, filter, value), do: set(s, filter, value)
def set(s, filter, value) do
t = dtype_str(s)
apply(Native, :"s_set_with_mask_#{t}", [s, filter, value])
end
@spec get(s() | {:ok, s()}, float() | integer()) :: {:ok, s()} | {:error, term}
def get({:ok, s}, index), do: get(s, index)
def get(s, index) do
t = dtype_str(s)
apply(Native, :"s_get_#{t}", [s, index])
end
@spec cast(s() | {:ok, s()}, :integer | :float | :str) :: {:ok, s()} | {:error, term}
def cast({:ok, s}, data_type), do: cast(s, data_type)
def cast(s, data_type) do
f =
cond do
data_type == :integer -> :s_cast_i64
data_type == :float -> :s_cast_f64
data_type == :str -> :s_cast_str
true -> :s_cast_str
end
apply(Native, f, [s])
end
@spec (s() | {:ok, s()}) + (s() | {:ok, s()}) :: {:ok, s()} | {:error, term()}
def {:ok, s} + {:ok, other}, do: s + other
def s + {:ok, other}, do: s + other
def {:ok, s} + other, do: s + other
def s + other when is_struct(s) and is_struct(other) do
add(s, other)
end
def s + other when is_struct(s) do
t = dtype_str(s)
apply(Native, :"s_add_#{t}", [s, other])
end
def _s + _other, do: {:error, "Not supported"}
@spec (s() | {:ok, s()}) - (s() | {:ok, s()}) :: {:ok, s()} | {:error, term()}
def {:ok, s} - {:ok, other}, do: s - other
def s - {:ok, other}, do: s - other
def {:ok, s} - other, do: s - other
def s - other when is_struct(s) and is_struct(other) do
sub(s, other)
end
def s - other when is_struct(s) do
t = dtype_str(s)
apply(Native, :"s_sub_#{t}", [s, other])
end
def _s - _other, do: {:error, "Not supported"}
@spec (s() | {:ok, s()}) * (s() | {:ok, s()}) :: {:ok, s()} | {:error, term()}
def {:ok, s} * {:ok, other}, do: s * other
def s * {:ok, other}, do: s * other
def {:ok, s} * other, do: s * other
def s * other when is_struct(s) and is_struct(other) do
mul(s, other)
end
def s * other when is_struct(s) do
t = dtype_str(s)
apply(Native, :"s_mul_#{t}", [s, other])
end
def _s * _other, do: {:error, "Not supported"}
@spec (s() | {:ok, s()}) / (s() | {:ok, s()}) :: {:ok, s()} | {:error, term()}
def {:ok, s} / {:ok, other}, do: s / other
def s / {:ok, other}, do: s / other
def {:ok, s} / other, do: s / other
def s / other when is_struct(s) and is_struct(other) do
divide(s, other)
end
def s / other when is_struct(s) do
t = dtype_str(s)
apply(Native, :"s_div_#{t}", [s, other])
end
def _s / _other, do: {:error, "Not supported"}
@spec (s() | {:ok, s()}) == (s() | {:ok, s()}) :: {:ok, s()} | {:error, term()}
def {:ok, s} == {:ok, other}, do: s == other
def {:ok, s} == other, do: s == other
def s == {:ok, other}, do: s == other
def s == other when is_struct(s) and is_struct(other) do
eq(s, other)
end
def s == other when is_struct(s) do
t = dtype_str(s)
apply(Native, :"s_eq_#{t}", [s, other])
end
def _s == _other, do: {:error, "Not supported"}
@spec (s() | {:ok, s()}) <> (s() | {:ok, s()}) :: {:ok, s()} | {:error, term()}
def {:ok, s} <> {:ok, other}, do: s <> other
def {:ok, s} <> other, do: s <> other
def s <> {:ok, other}, do: s <> other
def s <> other when is_struct(s) and is_struct(other) do
neq(s, other)
end
def s <> other when is_struct(s) do
t = dtype_str(s)
apply(Native, :"s_neq_#{t}", [s, other])
end
def _s <> _other, do: {:error, "Not supported"}
@spec (s() | {:ok, s()}) > (s() | {:ok, s()}) :: {:ok, s()} | {:error, term()}
def {:ok, s} > {:ok, other}, do: s > other
def {:ok, s} > other, do: s > other
def s > {:ok, other}, do: s > other
def s > other when is_struct(s) and is_struct(other) do
gt(s, other)
end
def s > other when is_struct(s) do
t = dtype_str(s)
apply(Native, :"s_gt_#{t}", [s, other])
end
def _s > _other, do: {:error, "Not supported"}
@spec (s() | {:ok, s()}) >= (s() | {:ok, s()}) :: {:ok, s()} | {:error, term()}
def {:ok, s} >= {:ok, other}, do: s >= other
def {:ok, s} >= other, do: s >= other
def s >= {:ok, other}, do: s >= other
def s >= other when is_struct(s) and is_struct(other) do
gt_eq(s, other)
end
def s >= other when is_struct(s) do
t = dtype_str(s)
apply(Native, :"s_gt_eq_#{t}", [s, other])
end
def _s >= _other, do: {:error, "Not supported"}
@spec (s() | {:ok, s()}) < (s() | {:ok, s()}) :: {:ok, s()} | {:error, term()}
def {:ok, s} < {:ok, other}, do: s < other
def {:ok, s} < other, do: s < other
def s < {:ok, other}, do: s < other
def s < other when is_struct(s) and is_struct(other) do
lt(s, other)
end
def s < other when is_struct(s) do
t = dtype_str(s)
apply(Native, :"s_lt_#{t}", [s, other])
end
def _s < _other, do: {:error, "Not supported"}
@spec (s() | {:ok, s()}) <= (s() | {:ok, s()}) :: {:ok, s()} | {:error, term()}
def {:ok, s} <= {:ok, other}, do: s <= other
def {:ok, s} <= other, do: s <= other
def s <= {:ok, other}, do: s <= other
def s <= other when is_struct(s) and is_struct(other) do
lt_eq(s, other)
end
def s <= other when is_struct(s) do
t = dtype_str(s)
apply(Native, :"s_lt_eq_#{t}", [s, other])
end
def _s <= _other, do: {:error, "Not supported"}
@spec sum(s() | {:ok, s()}) :: {:ok, s()} | {:error, term}
@doc """
Reduce this Series to the sum value.
"""
def sum({:ok, s}), do: sum(s)
def sum(s) do
t = dtype_str(s)
t =
case t do
"bool" -> "u32"
"u8" -> "u64"
"i8" -> "i64"
_ -> t
end
apply(Native, :"s_sum_#{t}", [s])
end
@spec mean(s() | {:ok, s()}) :: {:ok, s()} | {:error, term}
def mean({:ok, s}), do: mean(s)
def mean(s) do
t = dtype_str(s)
apply(Native, :"s_mean_#{t}", [s])
end
@spec min(s() | {:ok, s()}) :: {:ok, s()} | {:error, term}
def min({:ok, s}), do: min(s)
def min(s) do
t = dtype_str(s)
apply(Native, :"s_min_#{t}", [s])
end
@spec max(s() | {:ok, s()}) :: {:ok, s()} | {:error, term}
def max({:ok, s}), do: max(s)
def max(s) do
t = dtype_str(s)
apply(Native, :"s_max_#{t}", [s])
end
@spec std(s() | {:ok, s()}) :: {:ok, s()} | {:error, term}
def std({:ok, s}), do: std(s)
def std(_s) do
{:error, "Not implemented"}
end
@spec var(s() | {:ok, s()}) :: {:ok, s()} | {:error, term}
def var({:ok, s}), do: var(s)
def var(_s) do
{:error, "Not implemented"}
end
end
defimpl Inspect, for: ExPolars.Series do
alias ExPolars.Native
def inspect(data, _opts) do
case Native.s_as_str(data) do
{:ok, s} -> s
_ -> "Cannot output series"
end
end
end
|
lib/ex_polars/series.ex
| 0.861858
| 0.5835
|
series.ex
|
starcoder
|
defmodule Construct do
@moduledoc """
Construct internally divided into three components:
* `Construct` β defining structures;
* `Construct.Cast` β making structure instances;
* `Construct.Type` β type-coercion and custom type behaviour.
## Construct definition
defmodule StructureName do
use Construct, struct_opts
structure do
include AnotherStructure
field name, type, options
end
end
`struct_opts` is options passed to `c:make/2` and `c:make!/2`, described in `Construct.Cast.make/3`.
When you type `use Construct` β library bootstrapped few functions with `Construct` behaviour:
* `c:make/2` β just an alias to `Construct.Cast.make/3`;
* `c:make!/2` β alias to `c:make/2` but throws `Construct.MakeError` exception if provided params are invalid;
* `c:cast/2` β alias to `c:make/2` too, for follow `Construct.Type` behaviour and use defined structure as type.
"""
@type t :: struct
@doc """
Alias to `Construct.Cast.make/3`.
"""
@callback make(params :: map, opts :: Keyword.t) :: {:ok, t} | {:error, term}
@doc """
Alias to `c:make/2`, but raises an `Construct.MakeError` exception if params have errors.
"""
@callback make!(params :: map, opts :: Keyword.t) :: t
@doc """
Alias to `c:make/2`, used to follow `c:Construct.Type.cast/1` callback.
To use this structure as custom type.
"""
@callback cast(params :: map, opts :: Keyword.t) :: {:ok, t} | {:error, term}
@doc false
defmacro __using__(opts \\ [])
defmacro __using__({:%{}, _, _} = types) do
quote do
use Construct do
unquote(Construct.Compiler.define_from_types(types))
end
end
end
defmacro __using__(opts) when is_list(opts) do
{pre_ast, opts} = Construct.Compiler.pre(opts)
quote do
@behaviour Construct
@construct_opts unquote(opts)
unquote(pre_ast)
def make(params \\ %{}, opts \\ []) do
Construct.Cast.make(__MODULE__, params, Keyword.merge(opts, unquote(opts)))
end
def make!(params \\ %{}, opts \\ []) do
case make(params, opts) do
{:ok, structure} -> structure
{:error, reason} -> raise Construct.MakeError, %{reason: reason, params: params}
end
end
def cast(params, opts \\ []) do
make(params, opts)
end
defoverridable make: 2
end
end
@doc """
Defines a structure.
"""
defmacro structure([do: ast]) do
Construct.Compiler.define(ast)
end
@doc """
Includes provided structure and checks definition for validity at compile-time.
## Options
* `:only` - (integer) specify fields that should be taken from included module,
throws an error when field doesn't exist in provided module.
If included structure is invalid for some reason β this macro throws an
`Construct.DefinitionError` exception with detailed reason.
"""
@spec include(t, keyword) :: Macro.t()
defmacro include(module, opts \\ []) do
Construct.Compiler.define_include(module, opts)
end
@doc """
Defines field on the structure with given name, type and options.
Checks definition validity at compile time by name, type and options.
For custom types checks for module existence and `c:Construct.Type.cast/1` callback.
If field definition is invalid for some reason β it throws an `Construct.DefinitionError`
exception with detailed reason.
## Options
* `:default` β sets default value for that field:
* The default value is calculated at compilation time, so don't use expressions like
DateTime.utc_now or Ecto.UUID.generate as they would then be the same for all structures;
* Value from params is compared with default value before and after type cast;
* If you pass `field :a, type, default: nil` and `make(%{a: nil})` β type coercion will
not be used, `nil` compares with default value and just appends that value to structure;
* If field doesn't exist in params, it will use default value.
By default this option is unset. Notice that you can't use functions as a default value.
"""
@spec field(atom(), Construct.Type.t(), Keyword.t()) :: Macro.t()
defmacro field(name, type \\ :string, opts \\ [])
defmacro field(name, opts, [do: _] = ast) do
Construct.Compiler.define_nested_field(name, ast, opts)
end
defmacro field(name, [do: _] = ast, _opts) do
Construct.Compiler.define_nested_field(name, ast, [])
end
defmacro field(name, type, opts) do
quote do
Construct.Compiler.define_field(__MODULE__, unquote(name), unquote(type), unquote(opts))
end
end
@doc """
No doc at this time, should be written for 3.0.0 release
"""
defmacro structure_compile_hook(type, [do: ast]) do
Construct.Compiler.define_structure_compile_hook(type, ast)
end
@doc """
Collect types from defined Construct module to map
"""
def types_of!(module) do
if construct?(module) do
Construct.Compiler.collect_types(module)
else
raise ArgumentError, "not a Construct definition"
end
end
@doc """
Checks if provided module is Construct module
"""
def construct?(module) do
Construct.Compiler.construct_module?(module)
end
end
|
lib/construct.ex
| 0.855097
| 0.611382
|
construct.ex
|
starcoder
|
defmodule Ecto.Query.Builder.Select do
@moduledoc false
alias Ecto.Query.Builder
@doc """
Escapes a select.
It allows tuples, lists and variables at the top level or a
single `assoc(x, y)` expression.
## Examples
iex> escape({1, 2}, [])
{{:{}, [], [:{}, [], [1, 2]]}, %{}}
iex> escape([1, 2], [])
{[1, 2], %{}}
iex> escape(quote(do: x), [x: 0])
{{:{}, [], [:&, [], [0]]}, %{}}
iex> escape(quote(do: ^123), [])
{{:{}, [], [:^, [], [0]]}, %{0 => {123, :any}}}
"""
@spec escape(Macro.t, Keyword.t) :: {Macro.t, %{}}
def escape({:assoc, _, args} = assoc, vars) when is_list(args) do
escape_assoc(assoc, %{}, vars)
end
def escape(other, vars), do: do_escape(other, %{}, vars)
# Tuple
defp do_escape({left, right}, params, vars) do
do_escape({:{}, [], [left, right]}, params, vars)
end
# Tuple
defp do_escape({:{}, _, list}, params, vars) do
{list, params} = Enum.map_reduce(list, params, &do_escape(&1, &2, vars))
expr = {:{}, [], [:{}, [], list]}
{expr, params}
end
# List
defp do_escape(list, params, vars) when is_list(list) do
Enum.map_reduce(list, params, &do_escape(&1, &2, vars))
end
# var - where var is bound
defp do_escape({var, _, context}, params, vars)
when is_atom(var) and is_atom(context) do
expr = Builder.escape_var(var, vars)
{expr, params}
end
defp do_escape(other, params, vars) do
Builder.escape(other, :any, params, vars)
end
# assoc/2
defp escape_assoc({:assoc, _, [{var, _, context}, list]}, params, vars)
when is_atom(var) and is_atom(context) and is_list(list) do
var = Builder.escape_var(var, vars)
{list, params} = Enum.map_reduce(list, params,
&escape_assoc_fields(&1, &2, vars))
expr = {:{}, [], [:assoc, [], [var, list]]}
{expr, params}
end
defp escape_assoc(other, _params, _vars) do
Builder.error! "invalid expression `#{Macro.to_string(other)}` inside `assoc/2` selector"
end
defp escape_assoc_fields({field, {assoc_var, _, assoc_ctxt}}, params, vars)
when is_atom(field) and is_atom(assoc_var) and is_atom(assoc_ctxt) do
expr = {field, Builder.escape_var(assoc_var, vars)}
{expr, params}
end
defp escape_assoc_fields({field, other}, params, vars)
when is_atom(field) do
{expr, params} = escape_assoc(other, params, vars)
{{field, expr}, params}
end
defp escape_assoc_fields(other, params, vars) do
escape_assoc(other, params, vars)
end
@doc """
Builds a quoted expression.
The quoted expression should evaluate to a query at runtime.
If possible, it does all calculations at compile time to avoid
runtime work.
"""
@spec build(Macro.t, [Macro.t], Macro.t, Macro.Env.t) :: Macro.t
def build(query, binding, expr, env) do
binding = Builder.escape_binding(binding)
{expr, params} = escape(expr, binding)
params = Builder.escape_params(params)
select = quote do: %Ecto.Query.QueryExpr{
expr: unquote(expr),
params: unquote(params),
file: unquote(env.file),
line: unquote(env.line)}
Builder.apply_query(query, __MODULE__, [select], env)
end
@doc """
The callback applied by `build/4` to build the query.
"""
@spec apply(Ecto.Queryable.t, term) :: Ecto.Query.t
def apply(query, select) do
query = Ecto.Queryable.to_query(query)
if query.select do
Builder.error! "only one select expression is allowed in query"
else
%{query | select: select}
end
end
end
|
lib/ecto/query/builder/select.ex
| 0.806586
| 0.482368
|
select.ex
|
starcoder
|
defmodule GGity.Scale.Shape do
@moduledoc false
alias GGity.{Draw, Labels}
alias GGity.Scale.Shape
@palette [:circle, :square, :diamond, :triangle]
defstruct transform: nil,
levels: nil,
labels: :waivers,
guide: :legend
@type t() :: %__MODULE__{}
@spec new(keyword()) :: Shape.t()
def new(options \\ []), do: struct(Shape, options)
@spec train(Shape.t(), list(binary())) :: Shape.t()
def train(scale, [level | _other_levels] = levels) when is_list(levels) and is_binary(level) do
number_of_levels = length(levels)
palette =
@palette
|> Stream.cycle()
|> Enum.take(number_of_levels)
|> List.to_tuple()
values_map =
levels
|> Stream.with_index()
|> Stream.map(fn {level, index} ->
{level, elem(palette, index)}
end)
|> Enum.into(%{})
transform = fn value -> values_map[to_string(value)] end
struct(scale, levels: levels, transform: transform)
end
@spec draw_legend(Shape.t(), binary(), number()) :: iolist()
def draw_legend(%Shape{guide: :none}, _label, _key_height), do: []
def draw_legend(%Shape{levels: [_]}, _label, _key_height), do: []
def draw_legend(%Shape{levels: levels} = scale, label, key_height) do
[
Draw.text(
"#{label}",
x: "0",
y: "-5",
class: "gg-text gg-legend-title",
text_anchor: "left"
),
Stream.with_index(levels)
|> Enum.map(fn {level, index} -> draw_legend_item(scale, {level, index}, key_height) end)
]
end
defp draw_legend_item(scale, {level, index}, key_height) do
[
Draw.rect(
x: "0",
y: "#{key_height * index}",
height: key_height,
width: key_height,
class: "gg-legend-key"
),
Draw.marker(
scale.transform.(level),
{key_height / 2, key_height / 2 + key_height * index},
key_height / 3,
fill: "black",
fill_opacity: "1"
),
Draw.text(
"#{Labels.format(scale, level)}",
x: "#{5 + key_height}",
y: "#{10 + key_height * index}",
class: "gg-text gg-legend-text",
text_anchor: "left"
)
]
end
end
|
lib/ggity/scale/shape.ex
| 0.840324
| 0.419916
|
shape.ex
|
starcoder
|
defmodule CCSP.Chapter3.WordSearch do
alias CCSP.Chapter3.GridLocation
alias __MODULE__, as: T
@moduledoc """
Corresponds to CCSP in Python, Section 3.4 titled "Word Search".
"""
@type t :: __MODULE__.t()
@type grid :: list(list(String.t()))
@type row :: non_neg_integer
@type column :: non_neg_integer
@alphabet Enum.to_list(?A..?Z)
defstruct value: nil, row: nil, column: nil
@spec generate_grid(row, column) :: grid
def generate_grid(rows, columns) do
Enum.map(0..(rows - 1), fn _ ->
Enum.map(0..(columns - 1), fn _ ->
random_string(1)
end)
end)
end
@spec random_string(non_neg_integer) :: list(String.t())
def random_string(length) do
Stream.repeatedly(fn ->
Enum.random(@alphabet)
end)
|> Enum.take(length)
end
@spec generate_domain(String.t(), grid) :: list(list(GridLocation.t()))
def generate_domain(word, grid) do
height = length(grid) - 1
width = length(Enum.at(grid, 0)) - 1
length = String.length(word)
domain =
Enum.reduce(0..height, [], fn row, acc ->
Enum.reduce(0..width, acc, fn col, acc ->
columns = col..(col + length)
rows = row..(row + length)
potential_locations(acc, row, col, rows, columns, width, height, length)
end)
end)
Enum.reverse(domain)
end
@spec potential_locations(
list,
non_neg_integer,
non_neg_integer,
Range.t(),
Range.t(),
non_neg_integer,
non_neg_integer,
non_neg_integer
) :: list(list(GridLocation.t()))
def potential_locations(acc, row, col, rows, columns, width, height, length) do
# We leverage the `with`s and how they short circuit to progressively build up the `locations`.
locations = acc
locations =
with true <- col + length <= width,
locations <- left_to_right(columns, row, locations),
true <- row + length <= height,
locations <- diagonal_towards_bottom_right(rows, col, row, locations) do
locations
else
_ -> locations
end
locations =
with true <- row + length <= height,
locations <- top_to_bottom(rows, col, locations),
true <- col - length >= 0,
locations <- diagonal_towards_bottom_left(rows, col, row, locations) do
locations
else
_ -> locations
end
locations
end
defp left_to_right(columns, row, locations) do
Enum.map(columns, fn c ->
GridLocation.new(row, c)
end)
|> (&[&1 | locations]).()
end
defp diagonal_towards_bottom_right(rows, col, row, locations) do
Enum.map(rows, fn r ->
GridLocation.new(r, col + (r - row))
end)
|> (&[&1 | locations]).()
end
defp top_to_bottom(rows, col, locations) do
Enum.map(rows, fn r ->
GridLocation.new(r, col)
end)
|> (&[&1 | locations]).()
end
defp diagonal_towards_bottom_left(rows, col, row, locations) do
Enum.map(rows, fn r ->
GridLocation.new(r, col - (r - row))
end)
|> (&[&1 | locations]).()
end
@spec display_grid(grid) :: :ok
def display_grid(grid) do
Enum.each(grid, fn row ->
Enum.each(row, &IO.write(" #{&1} "))
IO.puts("")
end)
end
end
|
lib/ccsp/chapter3/word_search.ex
| 0.8398
| 0.555013
|
word_search.ex
|
starcoder
|
defmodule GoCounting do
@type position :: {integer, integer}
@type owner :: %{owner: atom, territory: [position]}
@type territories :: %{white: [position], black: [position], none: [position]}
@doc """
Return the owner and territory around a position
"""
@spec territory(board :: String.t(), position :: position) ::
{:ok, owner} | {:error, String.t()}
def territory(board, {x, y} = pos) do
size_x = String.split(board, "\n") |> hd |> String.length()
size_y = String.split(board, "\n", trim: true) |> length()
if x < 0 or x >= size_x or y < 0 or y >= size_y do
{:error, "Invalid coordinate"}
else
owner =
board
|> make_graph
|> expand_territory([pos])
|> get_owner
{:ok, owner}
end
end
@doc """
Return all white, black and neutral territories
"""
@spec territories(board :: String.t()) :: territories
def territories(board) do
graph = make_graph(board)
empties = for {pos, {:none, _neighbors}} <- graph, do: pos
territories(graph, empties, %{white: [], black: [], none: []})
end
defp territories(_graph, [], territories), do: territories
defp territories(graph, [pos | positions], territories) do
%{owner: owner, territory: territory} =
graph
|> expand_territory([pos])
|> get_owner
positions = Enum.reject(positions, &(&1 in territory))
territories = %{territories | owner => Enum.sort(territories[owner] ++ territory)}
territories(graph, positions, territories)
end
defp to_color(?W), do: :white
defp to_color(?B), do: :black
defp to_color(?_), do: :none
defp make_graph(board) do
board =
board
|> String.split("\n", trim: true)
|> Enum.map(fn row -> row |> to_charlist |> Enum.map(&to_color/1) end)
left_right_edges =
for {[color], r} <- Enum.with_index(board) do
# For rows with a single column we cannot use zip
%{{0, r} => {color, []}}
end ++
for {row, r} <- Enum.with_index(board),
{{cell, right_cell}, c} <- Enum.zip(row, tl(row)) |> Enum.with_index() do
# For rows with multiple columns, we zip
%{{c, r} => {cell, [{c + 1, r}]}, {c + 1, r} => {right_cell, [{c, r}]}}
end
top_down_edges =
case board do
[row] ->
for {color, c} <- Enum.with_index(row), do: %{{c, 0} => {color, []}}
_ ->
for {{row, row_below}, r} <- Enum.zip(board, tl(board)) |> Enum.with_index(),
{{cell, below_cell}, c} <- Enum.zip(row, row_below) |> Enum.with_index() do
%{{c, r} => {cell, [{c, r + 1}]}, {c, r + 1} => {below_cell, [{c, r}]}}
end
end
Enum.reduce(
left_right_edges ++ top_down_edges,
%{},
&Map.merge(&1, &2, fn _key, {cell, n1}, {cell, n2} -> {cell, n1 ++ n2} end)
)
end
defp expand_territory(graph, positions, visited \\ MapSet.new())
defp expand_territory(_graph, [], _visited), do: []
defp expand_territory(graph, [pos | positions], visited) do
{color, neighbors} =
case graph[pos] do
{:white, _neighbors} ->
{:white, []}
{:black, _neighbors} ->
{:black, []}
{:none, neighbors} ->
{:none, Enum.reject(neighbors, &(&1 in visited))}
end
[{pos, color} | expand_territory(graph, neighbors ++ positions, MapSet.put(visited, pos))]
end
defp get_owner(territory) do
empties = for {pos, :none} <- territory, do: pos
colors = for {_pos, color} when color != :none <- territory, do: color
%{
territory: Enum.sort(empties),
owner:
case {Enum.empty?(empties), Enum.uniq(colors)} do
{false, [:white]} -> :white
{false, [:black]} -> :black
_ -> :none
end
}
end
end
|
exercises/practice/go-counting/.meta/example.ex
| 0.838415
| 0.497376
|
example.ex
|
starcoder
|
defmodule EventLog do
@moduledoc """
Can host multiple streams. Stream has a log and index.
* Entries are being appended to log. There is no deletion operation.
* Entry contains the actual data, timestamp, crc and some meta data.
* Log is partitioned to segments that are named by it's least offset.
* Each segment has a coresponding index file which contains offset's position in segment.
* Log scales by splitting to more segments and has constant memory footprint.
* Stream is a log + index.
* There are no NIFs and other dependencies (pure Elixir).
* Goal: get to max. write throughput given Elixir/Erlang capablities
"""
use GenServer
require Logger
alias EventLog.{Appender, Reader}
def start_link(dir, opts \\ []) do
GenServer.start_link(__MODULE__, [dir, opts])
end
def init([dir, opts]) do
File.mkdir_p!(dir)
{:ok, %{dir: dir, opts: opts, streams: %{}}}
end
@doc """
Creates a stream.
"""
def create_stream(pid, name, opts \\ []) do
GenServer.call(pid, {:create_stream, name, opts})
end
@doc """
Deletes a stream.
"""
def delete_stream(pid, name) do
GenServer.call(pid, {:delete_stream, name})
end
@doc """
Appends data entry to the end of the stream's log.
"""
def append(pid, stream, data) do
GenServer.call(pid, {:append, stream, data})
end
@doc """
Basic info about underlying streams.
"""
def info(pid) do
GenServer.call(pid, :info)
end
@doc """
Returns a reader that can read log. Initalized at a given offset, by default at the very
beginning.
"""
def get_reader(pid, stream, offset \\ 0) do
GenServer.call(pid, {:get_reader, stream, offset})
end
@doc """
Syncs all buffers to disk. Should be called if read is right after append.
"""
def fsync(pid) do
GenServer.call(pid, :fsync)
end
@doc """
Closes all file descriptors.
"""
def close(pid) do
GenServer.call(pid, :close)
end
# ---
def handle_call({:create_stream, name, _opts}, _from, %{dir: dir, streams: streams} = state) do
if streams[name] do
{:reply, {:error, :stream_exists}, state}
else
stream_dir = Path.join(dir, name)
File.mkdir_p!(stream_dir)
{:ok, appender} = Appender.start_link(stream_dir)
stream = %{appender: appender, stream_dir: stream_dir}
{:reply, {:ok, stream}, %{state | streams: Map.put(streams, name, stream)}}
end
end
def handle_call({:delete_stream, name}, _from, %{streams: streams} = state) do
case streams[name] do
%{stream_dir: stream_dir} ->
File.rm_rf!(stream_dir)
{:reply, {:ok, 1}, Map.delete(state, name)}
_ ->
{:reply, {:error, :stream_not_exists}, state}
end
end
def handle_call({:append, name, data}, _from, %{streams: streams} = state) do
case streams[name] do
%{appender: appender} ->
Appender.append(appender, data)
{:reply, {:ok, 1}, state}
_ ->
{:reply, {:error, :stream_not_exists}, state}
end
end
def handle_call({:get_reader, name, offset}, _from, %{streams: streams} = state) do
case streams[name] do
%{stream_dir: stream_dir} ->
{:ok, reader} = Reader.start_link(stream_dir, offset)
{:reply, {:ok, reader}, state}
_ ->
{:reply, {:error, :stream_not_exists}, state}
end
end
def handle_call(:info, _from, %{streams: streams} = state) do
{:reply, {:ok, %{streams: streams |> Map.keys()}}, state}
end
def handle_call(:fsync, _from, %{streams: streams} = state) do
for {_name, %{appender: appender}} <- streams do
Appender.fsync(appender)
end
{:reply, :ok, state}
end
def handle_call(:close, _from, state) do
{:reply, {:ok}, state}
end
end
|
lib/event_log.ex
| 0.778355
| 0.509764
|
event_log.ex
|
starcoder
|
defmodule OMG.Performance do
@moduledoc """
OMG network child chain server performance test entrypoint. Setup and runs performance tests.
# Usage
Always `cd apps/omg_performance` before running performance tests
## start_simple_perftest runs test with 5 transactions for each 3 senders and default options.
```> mix run -e 'OMG.Performance.start_simple_perftest(5, 3)'```
## start_extended_perftest runs test with 100 transactions for one specified account and default options.
## extended test is run on testnet make sure you followed instruction in `README.md` and both `geth` and `omg_api` are running
```> mix run -e 'OMG.Performance.start_extended_perftest(100, [%{ addr: <<192, 206, 18, ...>>, priv: <<246, 22, 164, ...>>}], "0xbc5f ...")'```
## Parameters passed are: 1. number of transaction each sender will send, 2. list of senders (see: TestHelper.generate_entity()) and 3. `contract` address
# Note:
`:fprof` will print a warning:
```
Warning: {erlang, trace, 3} called in "<0.514.0>" - trace may become corrupt!
```
It is caused by using `procs: :all` in options. So far we're not using `:erlang.trace/3` in our code,
so it has been ignored. Otherwise it's easy to reproduce and report if anyone has the nerve
(github.com/erlang/otp and the JIRA it points you to).
"""
use OMG.API.LoggerExt
alias OMG.API.Crypto
alias OMG.API.Integration.DepositHelper
alias OMG.API.TestHelper
alias OMG.API.Utxo
require Utxo
@eth OMG.Eth.RootChain.eth_pseudo_address()
@doc """
start_simple_perf runs test with {ntx_to_send} tx for each {nspenders} senders with given options.
Default options:
```
%{
destdir: ".", # directory where the results will be put
profile: false,
block_every_ms: 2000 # how often do you want the tester to force a block being formed
}
```
"""
@spec start_simple_perftest(pos_integer(), pos_integer(), map()) :: :ok
def start_simple_perftest(ntx_to_send, nspenders, opts \\ %{}) do
_ =
Logger.info(
"PerfTest number of spenders: #{inspect(nspenders)}, number of tx to send per spender: #{inspect(ntx_to_send)}."
)
DeferredConfig.populate(:omg_rpc)
defaults = %{destdir: ".", profile: false, block_every_ms: 2000}
opts = Map.merge(defaults, opts)
{:ok, started_apps, api_children_supervisor} = setup_simple_perftest(opts)
spenders = create_spenders(nspenders)
utxos = create_utxos_for_simple_perftest(spenders, ntx_to_send)
run({ntx_to_send, utxos, opts, opts[:profile]})
cleanup_simple_perftest(started_apps, api_children_supervisor)
end
@doc """
Runs test with {ntx_to_send} transactions for each {spenders}.
Initial deposits for each account will be made on passed {contract_addr}.
Default options:
```
%{
destdir: ".", # directory where the results will be put
geth: System.get_env("ETHEREUM_RPC_URL"),
child_chain: "http://localhost:9656"
}
```
"""
@spec start_extended_perftest(
pos_integer(),
list(TestHelper.entity()),
Crypto.address_t(),
map()
) :: :ok
def start_extended_perftest(ntx_to_send, spenders, contract_addr, opts \\ %{}) do
_ =
Logger.info(
"PerfTest number of spenders: #{inspect(length(spenders))}, number of tx to send per spender: #{
inspect(ntx_to_send)
}."
)
DeferredConfig.populate(:omg_rpc)
url =
Application.get_env(:omg_rpc, OMG.RPC.Client, "http://localhost:9656")
|> case do
nil -> nil
opts -> Keyword.get(opts, :child_chain_url)
end
defaults = %{destdir: ".", geth: System.get_env("ETHEREUM_RPC_URL") || "http://localhost:8545", child_chain: url}
opts = Map.merge(defaults, opts)
{:ok, started_apps} = setup_extended_perftest(opts, contract_addr)
utxos = create_utxos_for_extended_perftest(spenders, ntx_to_send)
run({ntx_to_send, utxos, opts, false})
cleanup_extended_perftest(started_apps)
end
@spec setup_simple_perftest(map()) :: {:ok, list, pid}
defp setup_simple_perftest(opts) do
{:ok, _} = Application.ensure_all_started(:briefly)
{:ok, dbdir} = Briefly.create(directory: true, prefix: "leveldb")
Application.put_env(:omg_db, :leveldb_path, dbdir, persistent: true)
_ = Logger.info("Perftest leveldb path: #{inspect(dbdir)}")
:ok = OMG.DB.init()
# hackney is http-client httpoison's dependency
started_apps = ensure_all_started([:omg_db, :cowboy, :hackney])
# select just necessary components to run the tests
children = [
%{
id: Phoenix.PubSub.PG2,
start: {Phoenix.PubSub.PG2, :start_link, [:eventer, []]},
type: :supervisor
},
{OMG.API.State, []},
{OMG.API.FreshBlocks, []},
{OMG.API.FeeServer, []},
{OMG.RPC.Web.Endpoint, []}
]
{:ok, api_children_supervisor} = Supervisor.start_link(children, strategy: :one_for_one)
_ = OMG.Performance.BlockCreator.start_link(opts[:block_every_ms])
{:ok, started_apps, api_children_supervisor}
end
@spec setup_extended_perftest(map(), Crypto.address_t()) :: {:ok, list}
defp setup_extended_perftest(opts, contract_addr) do
{:ok, _} = Application.ensure_all_started(:ethereumex)
# hackney is http-client httpoison's dependency
started_apps = ensure_all_started([:hackney])
Application.put_env(:ethereumex, :request_timeout, :infinity)
Application.put_env(:ethereumex, :http_options, recv_timeout: :infinity)
Application.put_env(:ethereumex, :url, opts[:geth])
{:ok, contract_addr_enc} = Crypto.encode_address(contract_addr)
Application.put_env(:omg_eth, :contract_addr, contract_addr_enc)
{:ok, started_apps}
end
@spec cleanup_simple_perftest([], pid) :: :ok
defp cleanup_simple_perftest(started_apps, api_children_supervisor) do
:ok = Supervisor.stop(api_children_supervisor)
started_apps |> Enum.reverse() |> Enum.each(&Application.stop/1)
_ = Application.stop(:briefly)
Application.put_env(:omg_db, :leveldb_path, nil)
:ok
end
@spec cleanup_extended_perftest([]) :: :ok
defp cleanup_extended_perftest(started_apps) do
started_apps |> Enum.reverse() |> Enum.each(&Application.stop/1)
:ok
end
@spec run({pos_integer(), list(), %{atom => any()}, boolean()}) :: :ok
defp run(args) do
{:ok, data} = OMG.Performance.Runner.run(args)
_ = Logger.info("#{inspect(data)}")
:ok
end
# We're not basing on mix to start all neccessary test's components.
defp ensure_all_started(app_list) do
app_list
|> Enum.reduce([], fn app, list ->
{:ok, started_apps} = Application.ensure_all_started(app)
list ++ started_apps
end)
end
@spec create_spenders(pos_integer()) :: list(TestHelper.entity())
defp create_spenders(nspenders) do
1..nspenders
|> Enum.map(fn _nspender -> TestHelper.generate_entity() end)
end
@spec create_utxos_for_simple_perftest(list(TestHelper.entity()), pos_integer()) :: list()
defp create_utxos_for_simple_perftest(spenders, ntx_to_send) do
spenders
|> Enum.with_index(1)
|> Enum.map(fn {spender, index} ->
{:ok, _} = OMG.API.State.deposit([%{owner: spender.addr, currency: @eth, amount: ntx_to_send, blknum: index}])
utxo_pos = Utxo.position(index, 0, 0) |> Utxo.Position.encode()
%{owner: spender, utxo_pos: utxo_pos, amount: ntx_to_send}
end)
end
@spec create_utxos_for_extended_perftest(list(TestHelper.entity()), pos_integer()) :: list()
defp create_utxos_for_extended_perftest(spenders, ntx_to_send) do
make_deposits(10 * ntx_to_send, spenders)
|> Enum.map(fn {:ok, owner, blknum, amount} ->
utxo_pos = Utxo.position(blknum, 0, 0) |> Utxo.Position.encode()
%{owner: owner, utxo_pos: utxo_pos, amount: amount}
end)
end
defp make_deposits(value, accounts) do
deposit = fn account ->
deposit_blknum = DepositHelper.deposit_to_child_chain(account.addr, value)
{:ok, account, deposit_blknum, value}
end
accounts
|> Enum.map(&Task.async(fn -> deposit.(&1) end))
|> Enum.map(fn task -> Task.await(task, :infinity) end)
end
end
|
apps/omg_performance/lib/performance.ex
| 0.873882
| 0.882731
|
performance.ex
|
starcoder
|
defmodule EWallet.ComputedBalanceFetcher do
@moduledoc """
Handles the retrieval and formatting of balances from the local ledger.
"""
alias EWalletDB.{User, MintedToken}
alias LocalLedger.Balance
@doc """
Prepare the list of balances and turn them into a suitable format for
EWalletAPI using a provider_user_id.
## Examples
res = Balance.all(%{"provider_user_id" => "123"})
case res do
{:ok, balances} ->
# Everything went well, do something.
# response is the response returned by the local ledger (LocalLedger for
# example).
{:error, code, description} ->
# Something went wrong on the other side (LocalLedger maybe) and the
# retrieval failed.
end
"""
def all(%{"provider_user_id" => provider_user_id}) do
user = User.get_by_provider_user_id(provider_user_id)
case user do
nil ->
{:error, :provider_user_id_not_found}
user ->
balance = User.get_primary_balance(user)
format_all(balance.address)
end
end
@doc """
Prepare the list of balances and turn them into a suitable format for
EWalletAPI using only an address.
## Examples
res = Balance.all(%{"address" => "d26fc18f-d403-4a39-a039-21e2bc713688"})
case res do
{:ok, balances} ->
# Everything went well, do something.
# response is the response returned by the local ledger (LocalLedger for
# example).
{:error, code, description} ->
# Something went wrong on the other side (LocalLedger maybe) and the
# retrieval failed.
end
"""
def all(%{"address" => address}) do
format_all(address)
end
@doc """
Prepare the list of balances and turn them into a
suitable format for EWalletAPI using a user and a token_friendly_id
## Examples
res = Balance.get(user, "PLAY:e4222f72-46c5-4baa-98c0-680908fcdd84")
case res do
{:ok, balances} ->
# Everything went well, do something.
# response is the response returned by the local ledger (LocalLedger for
# example).
{:error, code, description} ->
# Something went wrong on the other side (LocalLedger maybe) and the
# retrieval failed.
end
"""
def get(%User{} = user, %MintedToken{} = minted_token) do
user_balance = User.get_primary_balance(user)
get(minted_token.friendly_id, user_balance.address)
end
@doc """
Prepare the list of balances and turn them into a
suitable format for EWalletAPI using a token_friendly_id and an address
## Examples
res = Balance.get("PLAY:e4222f72-46c5-4baa-98c0-680908fcdd84", "22a83591-d684-4bfd-9310-6bdecdec4f81")
case res do
{:ok, balances} ->
# Everything went well, do something.
# response is the response returned by the local ledger (LocalLedger for
# example).
{:error, code, description} ->
# Something went wrong on the other side (LocalLedger maybe) and the
# retrieval failed.
end
"""
def get(friendly_id, address) do
friendly_id |> Balance.get(address) |> process_response(address, :one)
end
defp format_all(address) do
address |> Balance.all() |> process_response(address, :all)
end
defp process_response(balances, address, type) do
case balances do
{:ok, data} ->
balances =
type
|> load_minted_tokens(data)
|> map_minted_tokens(data)
{:ok, %{address: address, balances: balances}}
balances ->
balances
end
end
defp load_minted_tokens(:all, _), do: MintedToken.all()
defp load_minted_tokens(:one, amounts) do
amounts |> Map.keys() |> MintedToken.get_all()
end
defp map_minted_tokens(minted_tokens, amounts) do
Enum.map(minted_tokens, fn minted_token ->
%{
minted_token: minted_token,
amount: amounts[minted_token.friendly_id] || 0
}
end)
end
end
|
apps/ewallet/lib/ewallet/fetchers/computed_balance_fetcher.ex
| 0.741019
| 0.407333
|
computed_balance_fetcher.ex
|
starcoder
|
defmodule DocGen.Content.Random do
use Private
@moduledoc """
Gives random videos based on the weight and lengths of videos and tags.
"""
@keyword_multiplier Application.fetch_env!(:doc_gen, :keyword_multiplier)
alias DocGen.{Content, Repo}
@doc """
Gives a random set of videos given a number of clips and a list of keywords.
"""
@spec give([String.t()], [non_neg_integer()]) ::
{[non_neg_integer()], non_neg_integer()}
def give(keywords, number_per_segment) do
Content.list_segments_with_videos()
|> Enum.map(& &1.videos)
|> Enum.zip(number_per_segment)
|> Enum.map(&take_random(&1, keywords))
|> List.flatten()
|> Enum.reject(&is_nil/1)
|> Enum.reduce({[], 0}, fn v, {video_ids, length} ->
{[v.id | video_ids], length + v.duration}
end)
end
private do
# take `n` videos randomly proportional to the keyword matches
@spec take_random({[%Content.Video{}], non_neg_integer()}, [String.t()]) ::
[%{}]
defp take_random({videos, number_to_take}, keywords) do
{videos, _left_behind_videos} =
Enum.reduce(1..number_to_take, {[], videos}, fn
_n, {_taken, []} = acc ->
acc
_n, {taken, videos} ->
hot_take = take_a_random(videos, keywords)
{[hot_take | taken], Enum.reject(videos, &(&1.id == hot_take.id))}
end)
videos
end
defp take_a_random(videos, keywords) do
videos
|> Enum.map(&score(&1, keywords))
|> Enum.map(&repeat(&1, &1.score, []))
|> List.flatten()
|> Enum.random()
end
@spec score(%Content.Video{}, [String.t()]) :: %{}
defp score(video, keywords) do
video
|> Repo.preload(:tags)
|> Map.from_struct()
|> multiply_keywords(keywords)
end
@spec repeat(%{}, non_neg_integer(), [%{}]) :: [%{}]
defp repeat(_v, 0, acc), do: acc
defp repeat(video, n, acc), do: repeat(video, n - 1, [video | acc])
@spec multiply_keywords(%{}, [String.t()]) :: %{score: non_neg_integer()}
defp multiply_keywords(%{tags: tags} = video, keywords) do
score =
Enum.reduce(tags, 0, fn %{name: name, weight: weight}, acc ->
multiplier = if name in keywords, do: @keyword_multiplier, else: 1
weight * multiplier + acc
end)
Map.put(video, :score, score)
end
end
end
|
lib/doc_gen/content/random.ex
| 0.857649
| 0.490175
|
random.ex
|
starcoder
|
defmodule Akd.SecureConnection do
require Logger
@moduledoc """
This module defines helper functions that are used by `Akd` to execute
a set of commands through the Secure channel, examples: ssh and scp
"""
@doc """
Takes a destination and commands and runs those commands on that destination.
## Examples
iex> Akd.SecureConnection.securecmd(Akd.Destination.local(), "echo hi")
{:error, %IO.Stream{device: :standard_io, line_or_bytes: :line, raw: false}}
"""
def securecmd(dest, cmds) do
cmds = "cd #{dest.path}\n" <> cmds
ssh(dest.user, dest.host, cmds, true)
end
@doc """
Takes a user, host and a string of operations and runs those operations
on that host
## Examples
iex> Akd.SecureConnection.ssh(:current, :local, "echo hi")
{:error, ""}
iex> Akd.SecureConnection.ssh(:current, :local, "echo hi", true)
{:error, %IO.Stream{device: :standard_io, line_or_bytes: :line, raw: false}}
"""
def ssh(user, scoped_ip, operations, stdio \\ false) do
Logger.info("ssh #{user}@#{scoped_ip}")
Logger.info("running: #{operations}")
opts = (stdio && [into: IO.stream(:stdio, :line)]) || []
case System.cmd("ssh", ["#{user}@#{scoped_ip}", operations], opts) do
{output, 0} -> {:ok, output}
{error, _} -> {:error, error}
end
end
@doc """
Takes a source and a destination and copies src to destination
## Examples
iex> src = "user@host:~/path"
iex> dest = "user2@host2:~/path2"
iex> Akd.SecureConnection.scp(src, dest)
{:error, %IO.Stream{device: :standard_io, line_or_bytes: :line, raw: false}}
iex> src = "user@host:~/path"
iex> dest = "user2@host2:~/path2"
iex> Akd.SecureConnection.scp(src, dest, ["-p a"])
{:error, %IO.Stream{device: :standard_io, line_or_bytes: :line, raw: false}}
"""
def scp(src, dest, opts \\ []) do
Logger.info("scp #{src} #{dest}")
case System.cmd("scp", opts ++ [src, dest], into: IO.stream(:stdio, :line)) do
{output, 0} -> {:ok, output}
{error, _} -> {:error, error}
end
end
end
|
lib/akd/helpers/secure_connection.ex
| 0.774157
| 0.448547
|
secure_connection.ex
|
starcoder
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.