code
stringlengths 114
1.05M
| path
stringlengths 3
312
| quality_prob
float64 0.5
0.99
| learning_prob
float64 0.2
1
| filename
stringlengths 3
168
| kind
stringclasses 1
value |
|---|---|---|---|---|---|
defmodule GameApp.Server do
@moduledoc """
`GameApp.Server` provides a stateful process that maintains an internal game
state and provides a public API for interacting with the game.
"""
use GenServer
alias __MODULE__, as: Server
alias GameApp.{Game, Player}
alias GameApp.Config, as: GameConfig
require Logger
@spec start_link(String.t(), Player.t(), GameConfig.t()) ::
{:ok, pid()} | :ignore | {:error, {:already_started, pid()} | term()}
def start_link(shortcode, player, config \\ %GameConfig{}) do
GenServer.start_link(Server, {shortcode, player, config}, name: via_tuple(shortcode))
end
### Client API
@doc """
Returns a summary of the game state for a game with the given shortcode.
"""
@spec summary(String.t()) :: Game.t()
def summary(shortcode) do
GenServer.call(via_tuple(shortcode), :summary)
end
@doc """
Joins a player to the game with the given shortcode.
"""
@spec join(String.t(), Player.t()) :: :ok
def join(shortcode, player) do
GenServer.cast(via_tuple(shortcode), {:player_join, player})
end
@doc """
Removes a player from the game with the given shortcode.
"""
@spec leave(String.t(), Player.t()) :: :ok
def leave(shortcode, player) do
GenServer.cast(via_tuple(shortcode), {:player_leave, player})
end
@doc """
Starts the game with the given shortcode.
"""
@spec start_game(String.t()) :: :ok
def start_game(shortcode) do
GenServer.cast(via_tuple(shortcode), :start_game)
end
@doc """
Starts the next round for the game with the given shortcode.
"""
@spec start_round(String.t(), pid()) :: :ok
def start_round(shortcode, channel_pid) do
GenServer.cast(via_tuple(shortcode), {:start_round, channel_pid})
end
@doc """
Selects a prompt to set for the current round.
"""
@spec select_prompt(String.t(), String.t(), pid()) :: :ok
def select_prompt(shortcode, prompt, channel_pid) do
GenServer.cast(via_tuple(shortcode), {:select_prompt, prompt, channel_pid})
end
@doc """
Selects a reaction for the given player in the current round.
"""
@spec select_reaction(String.t(), Player.t(), String.t(), pid()) :: :ok
def select_reaction(shortcode, player, reaction, channel_pid) do
GenServer.cast(via_tuple(shortcode), {:select_reaction, player, reaction, channel_pid})
end
@doc """
Selects a winner for the current round.
"""
@spec select_winner(String.t(), Player.t() | nil, pid()) :: :ok
def select_winner(shortcode, winner, channel_pid) do
GenServer.cast(via_tuple(shortcode), {:select_winner, winner, channel_pid})
end
### Server API
@impl true
def init({shortcode, player, %GameConfig{game_timeout: game_timeout} = config}) do
# TODO(shawk): dialyzer reports there may be a race condition when attempting
# to do a :ets.insert after an :ets.lookup. Ignoring for now, revisit later.
game =
case :ets.lookup(:games_table, shortcode) do
[] ->
game = Game.create(shortcode: shortcode, creator: player, config: config)
:ets.insert(:games_table, {shortcode, game})
game
[{^shortcode, game}] ->
game
end
_ = Logger.info("Spawned game server process '#{game.shortcode}'.")
{:ok, game, game_timeout}
end
@impl true
def handle_call(:summary, _from, game) do
{:reply, Game.summary(game), game, game.config.game_timeout}
end
@impl true
def handle_cast({:player_join, player}, game) do
next = Game.player_join(game, player)
update_ets(next)
{:noreply, next, game.config.game_timeout}
end
@impl true
def handle_cast({:player_leave, player}, game) do
next = Game.player_leave(game, player)
update_ets(next)
{:noreply, next, game.config.game_timeout}
end
@impl true
def handle_cast(:start_game, game) do
next = Game.start_game(game)
update_ets(next)
{:noreply, next, game.config.game_timeout}
end
@impl true
def handle_cast({:start_round, channel_pid}, game) do
next = Game.start_round(game)
update_ets(next)
# Advance to prompt selection after timeout
Process.send_after(
self(),
{:round_start_timeout, channel_pid},
game.config.round_start_timeout
)
{:noreply, next, game.config.game_timeout}
end
@impl true
def handle_cast({:select_prompt, prompt, channel_pid}, game) do
next = Game.select_prompt(game, prompt)
update_ets(next)
_ =
if game.config.reaction_selection_timeout > 0 do
Process.send_after(
self(),
{:reaction_timeout, channel_pid},
game.config.reaction_selection_timeout
)
end
{:noreply, next, game.config.game_timeout}
end
@impl true
def handle_cast({:select_reaction, player, reaction, channel_pid}, game) do
next = Game.select_reaction(game, player, reaction)
update_ets(next)
# Advance to winner_selection after a reaction is selected, if all reactions selected
_ =
if Game.all_players_reacted?(next) do
Process.send_after(
self(),
{:all_players_reacted, channel_pid},
game.config.winner_selection_timeout
)
end
{:noreply, next, game.config.game_timeout}
end
@impl true
def handle_cast({:select_winner, winner, channel_pid}, game) do
next = Game.select_winner(game, winner)
update_ets(next)
# Advance to next round after game.config.round_end_timeout
Process.send_after(
self(),
{:select_winner_timeout, channel_pid},
game.config.round_end_timeout
)
{:noreply, next, game.config.game_timeout}
end
# Info Callbacks
@impl true
def handle_info({:round_start_timeout, channel_pid}, game) do
next = Game.start_prompt_selection(game)
update_ets(next)
send(channel_pid, :broadcast_update)
{:noreply, next, game.config.game_timeout}
end
@impl true
def handle_info({message, channel_pid}, %{phase: :reaction_selection} = game)
when message in [:reaction_timeout, :all_players_reacted] do
next = Game.start_winner_selection(game)
update_ets(next)
send(channel_pid, :broadcast_update)
{:noreply, next, game.config.game_timeout}
end
def handle_info({message, _channel_pid}, game)
when message in [:reaction_timeout, :all_players_reacted] do
{:noreply, game, game.config.game_timeout}
end
@impl true
def handle_info({:select_winner_timeout, channel_pid}, game) do
game =
case Game.final_round?(game) do
true ->
next = Game.finalize(game)
update_ets(next)
next
false ->
Server.start_round(game.shortcode, channel_pid)
game
end
send(channel_pid, :broadcast_update)
{:noreply, game, game.config.game_timeout}
end
@impl true
def handle_info(:timeout, game) do
{:stop, {:shutdown, :timeout}, game}
end
@impl true
def terminate({:shutdown, :empty_game}, game) do
:ets.delete(:games_table, game.shortcode)
_ = Logger.info("Server '#{game.shortcode}' shutdown because all players left.")
:ok
end
def terminate({:shutdown, :timeout}, game) do
:ets.delete(:games_table, game.shortcode)
_ = Logger.info("Shutting down game '#{game.shortcode}', timed out.")
:ok
end
def terminate(_reason, game) do
_ = Logger.info("Game server process terminated '#{game.shortcode}'.")
:ok
end
### Helpers
@doc """
Returns a tuple used to register and lookup a game server process by name.
"""
@spec via_tuple(String.t()) :: {:via, Registry, {GameApp.Registry, String.t()}}
def via_tuple(shortcode) do
{:via, Registry, {GameApp.Registry, "game:" <> shortcode}}
end
@doc """
Returns the `pid` of the game server process registered under the
given `shortcode`, or `nil` if no process is registered.
"""
@spec game_pid(String.t()) :: pid() | nil
def game_pid(shortcode) do
shortcode
|> via_tuple()
|> GenServer.whereis()
end
@doc """
Generates a 4-letter code used as the identifier for a game server.
"""
@spec generate_shortcode() :: String.t()
def generate_shortcode() do
code =
shortcode_string()
|> String.downcase()
# ensure no duplicates
case game_pid(code) do
nil ->
code
_ ->
generate_shortcode()
end
end
defp update_ets(next) do
:ets.insert(:games_table, {next.shortcode, next})
end
defp shortcode_string() do
<<a, b, c, d>> = :crypto.strong_rand_bytes(4)
to_string([
65 + rem(a, 26),
65 + rem(b, 26),
65 + rem(c, 26),
65 + rem(d, 26)
])
end
end
|
apps/game/lib/game/server.ex
| 0.822973
| 0.485661
|
server.ex
|
starcoder
|
defmodule Cldr.Number.Formatter.Currency do
@moduledoc """
Number formatter for the `:currency` `:long` format.
This formatter implements formatting a currency in a long form. This
is not the same as decimal formatting with a currency placeholder.
To explain the difference, look at the following examples:
iex> Cldr.Number.to_string 123, TestBackend.Cldr, format: :currency, currency: "USD"
{:ok, "$123.00"}
iex> Cldr.Number.to_string 123, TestBackend.Cldr, format: :long, currency: "USD"
{:ok, "123 US dollars"}
In the first example the format is defined by a decimal mask. In this example
the format mask comes from:
iex> {:ok, formats} = Cldr.Number.Format.all_formats_for("en", TestBackend.Cldr)
...> formats.latn.currency
"¤#,##0.00"
In the second example we are using a format that combines the number with
a language translation of the currency name. In this example the format
comes from:
iex> {:ok, formats} = Cldr.Number.Format.all_formats_for("en", TestBackend.Cldr)
...> formats.latn.currency_long
%{one: [0, " ", 1], other: [0, " ", 1]}
Where "{0}" is replaced with the number formatted using the `:standard`
decimal format and "{1} is replaced with locale-specific name of the
currency adjusted for the locales plural rules."
**This module is not part of the public API and is subject
to change at any time.**
"""
alias Cldr.Number.{Format, System}
alias Cldr.{Substitution, Currency}
alias Cldr.Number.Format.Options
alias Cldr.Number.Formatter.Decimal
import Cldr.Number.Formatter.Decimal, only: [is_currency: 1]
import DigitalToken, only: [is_digital_token: 1]
@doc false
def to_string(number, _format, _backend, _options) when is_binary(number) do
{:error,
{
ArgumentError,
"Not a number: #{inspect number}. Currency long formats only support number or Decimal arguments"
}
}
end
# The format :currency_medium is a composition of :currency_long
# and the default :currency format.
def to_string(number, :currency_long_with_symbol, backend, options) do
decimal_options = decimal_options(options, backend)
decimal_format = decimal_options.format
number
|> Cldr.Number.to_string!(backend, long_options(options))
|> Decimal.to_string(decimal_format, backend, decimal_options)
end
def to_string(number, :currency_long, backend, options) do
locale = options.locale
number_system = System.system_name_from!(options.number_system, locale, backend)
cardinal = Module.concat(backend, Number.Cardinal)
if !(formats = Format.formats_for!(locale, number_system, backend).currency_long) do
raise ArgumentError,
message:
"No :currency_long format known for " <>
"locale #{inspect(locale)} and number system #{inspect(number_system)}."
end
options =
options
|> Map.put(:format, :standard)
|> set_fractional_digits(options.currency, options.fractional_digits)
|> Options.resolve_standard_format(backend)
currency_string = currency_string(number, options.currency, cardinal, locale, backend)
number_string = Cldr.Number.to_string!(number, backend, options)
format = cardinal.pluralize(number, locale, formats)
Substitution.substitute([number_string, currency_string], format)
|> :erlang.iolist_to_binary()
end
defp currency_string(number, currency, cardinal, locale, backend) when is_currency(currency) do
{:ok, currency} = Currency.currency_for_code(currency, backend, locale: locale)
cardinal.pluralize(number, locale, currency.count)
end
defp currency_string(_number, currency, _cardinal, _locale, _backend) when is_digital_token(currency) do
{:ok, currency_string} = DigitalToken.long_name(currency)
currency_string
end
defp set_fractional_digits(options, currency, nil) when is_currency(currency) do
Map.put(options, :fractional_digits, 0)
end
defp set_fractional_digits(options, _currency, _digits) do
options
end
defp long_options(options) do
options
|> Map.put(:format, :decimal_long)
|> Map.put(:currency, nil)
end
defp decimal_options(options, backend) do
currency_format = Currency.currency_format_from_locale(options.locale)
options = Map.put(options, :format, currency_format)
Options.resolve_standard_format(options, backend)
end
end
|
lib/cldr/number/formatter/currency_formatter.ex
| 0.891445
| 0.77675
|
currency_formatter.ex
|
starcoder
|
defmodule Whistle.Program do
alias Whistle.Socket
require Whistle.Html
alias Whistle.Program.Instance
@json_library Application.get_env(:whistle, :json_library, Jason)
defmacro __using__(_opts) do
quote do
@behaviour Whistle.Program
alias Whistle.Html
require Whistle.Html
import Whistle.Socket
import Whistle.Html.Parser, only: [sigil_H: 2]
end
end
@doc """
Receives parameters from the route, it should return the initial state or an error.
The parameters are taken from the program route:
```
defmodule Router do
use Whistle.Router, path: "/ws"
match("chat:*room", ChatProgram, %{"other" => true})
end
defmodule ChatProgram do
use Program
# when joining `chat:1`
def init(%{"room" => "1", "other" => true}) do
{:ok, %{}}
end
end
```
"""
@callback init(map()) :: {:ok, Whistle.state()} | {:error, any()}
@callback route(list(String.t()), Whistle.state(), Whistle.Session.t(), map()) ::
{:ok, Whistle.state()} | {:error, any()}
@doc """
The terminate callback will be called when the program instance shuts down, it will receive the state.
Remember that Programs will be automatically respawned if they crash, so there is no need to try restart it yourself. This callback could be useful to serialize the state and then load it later in the `init/1` callback.
"""
@callback terminate(Whistle.state()) :: any()
@doc """
The authorize callback will be called on a running program when a client tries to access it.
It receives the current state, the client's socket and the clients params. And must return an updated socket, an initial session or an error with a reason.
You cloud send a bearer token and verify it here to authorize a client.
```
def authorize(state, socket, %{"token" => token}) do
case MyApp.Guardian.decode_and_verify(token) do
{:ok, claims} ->
{:ok, socket, claims}
{:error, reason} ->
{:error, reason}
end
end
```
"""
@callback authorize(Whistle.state(), Socket.t(), map()) ::
{:ok, Socket.t(), Whistle.Session.t()} | {:error, any()}
@doc """
The update callback is called everytime an event handler is triggered, it will receive the message, the current state and the session of the client who triggered it.
```
defmodule CounterProgram do
use Program
def init(_args) do
{:ok, 0}
end
def update(:increase, state, session) do
{:ok, state + 1, session}
end
def view(state, session) do
Html.div([], [
Html.p([], to_string(state)),
Html.button([on: [click: :increase]], "Increase")
])
end
end
```
"""
@callback update(Whistle.message(), Whistle.state(), Socket.Session.t()) ::
{:ok, Whistle.state(), Whistle.Session.t()}
@doc """
`handle_info/2` is similar to how `GenServer.handle_info/2` works, it will receive a message and the current state, and it expects a new updated state returned. This callback can be triggered by sending Erlang messages to the program instance.
```
defmodule TimeProgram do
use Program
def init(_args) do
Process.send_after(self(), :tick, 1_000)
{:ok, DateTime.utc_now()}
end
def handle_info(:tick, state) do
Process.send_after(self(), :tick, 1_000)
{:ok, DateTime.utc_now()}
end
def view(time, session) do
Html.p([], DateTime.to_string(time))
end
end
```
"""
@callback handle_info(any(), Whistle.state()) :: {:ok, Whistle.state()}
@doc """
The view receives the programs state and the session of the client we are rendering the view for.
It must return a Dom tree, which looks like this:
```
# {key, {tag, {attributes, children}}}
{0, {"div", {[class: "red"], [
{0, {"p", {[], ["some text"]}}
]}}}
```
You can use the `Whistle.Html` helpers to generate this tree:
```
Html.div([class: "red"], [
Html.p([], "some text")
])
```
Or the `Whistle.Html.Parser.sigil_H/2` if you want to write plain HTML:
```
text = "some text"
~H"\""
<div class="red">
<p>{{ text }}</p>
</div>
"\""
```
Both the HTML helpers and the sigil will expand to a DOM tree at compile time.
"""
@callback view(Whistle.state(), Whistle.Session.t()) :: Whistle.Html.Dom.t()
@optional_callbacks [handle_info: 2, authorize: 3, terminate: 1, route: 4]
defp authorize(conn, router, program_name, params) do
channel_path = String.split(program_name, ":")
socket = Whistle.Socket.new(conn)
with {:ok, program, program_params} <- router.__match(channel_path),
{:ok, _} <-
Whistle.Program.Registry.ensure_started(router, program_name, program, program_params) do
Whistle.Program.Instance.authorize(
router,
program_name,
socket,
Map.merge(program_params, params)
)
end
end
defp render(conn, router, program_name, params) do
case authorize(conn, router, program_name, params) do
{:ok, _new_socket, new_session} ->
Whistle.Program.Instance.view(router, program_name, new_session)
end
end
@doc """
A fullscreen `Whistle.Program` renders the whole HTML document, this is useful if you want to also handle navigation in your program through the `Whistle.Program.route/4` callback.
When the Javscript library executes, it will automatically connect to the Program and become interactive, giving you both a static HTTP page and an interactive web page for free.
Remember to include the Javascript library via a `<script>` tag or module import.
Call in a `Plug` or a `Phoenix.Controller` action:
```
def index(conn, _opts) do
fullscreen(conn, MyProgramRouter, "counter")
end
```
Example of a program:
```
def route(["user", user_id], _state, session, _query_params) do
{:ok, %{session | route: {:user, user_id}}}
end
def view(state, %{route: {:user, user_id}}) do
view_document("You're viewing user ##\{user_id}")
end
def view(state, session) do
view_document("It Works!")
end
defp view_document(body) do
~H"\""
<html>
<head>
<title>My Whistle App</title>
<script src="/js/whistle.js"></script>
</head>
<body>
<h1>It works!<h1>
</body>
</html>
"\""
end
```
"""
def fullscreen(
conn = %{query_params: query_params, path_info: path},
router,
program_name,
params \\ %{}
) do
encoded_params =
params
|> @json_library.encode!()
|> Plug.HTML.html_escape()
with {:authorize, {:ok, _new_socket, new_session}} <-
{:authorize, authorize(conn, router, program_name, params)},
{:route, {:ok, routed_session}} <-
{:route, Instance.route(router, program_name, new_session, path, query_params)},
{:view, {0, {"html", {attributes, children}}}} <-
{:view, Instance.view(router, program_name, routed_session)} do
new_attributes =
attributes
|> Keyword.put(:"data-whistle-socket", Whistle.Router.url(conn, router))
|> Keyword.put(:"data-whistle-program", program_name)
|> Keyword.put(:"data-whistle-params", encoded_params)
new_children =
Enum.map(children, fn child ->
embed_programs(conn, router, child)
end)
view = Whistle.Html.Dom.node_to_string({0, Whistle.Html.html(new_attributes, new_children)})
resp = "<!DOCTYPE html>#{view}"
conn
|> Plug.Conn.put_resp_content_type("text/html")
|> Plug.Conn.send_resp(200, resp)
else
{:authorize, {:error, :not_found}} ->
# TODO: make this configurable
Plug.Conn.send_resp(conn, 403, "Forbidden")
{:route, {:error, :not_found}} ->
# TODO: make this configurable
Plug.Conn.send_resp(conn, 404, "Not found")
{:view, _} ->
raise """
Fullscreen programs must return a <html> tag as it's root element.
"""
end
end
@doc """
Use `embed/4` to embed a Program in a view. It will render the view in plain HTML. When the Javscript library executes, it will automatically connect to the Program and become interactive.
In a Phoenix template:
```html
<!-- lib/my_app_web/templates/page/index.html.eex -->
<div>
<%= embed(conn, MyProgramRouter, "counter") |> raw %>
</div>
```
In a `Plug` or a `Phoenix.Controller` action:
```
def index(conn, _opts) do
resp = embed(conn, MyProgramRouter, "counter")
conn
|> Plug.Conn.put_resp_content_type("text/html")
|> Plug.Conn.send_resp(200, resp)
end
```
"""
def embed(conn, router, program_name, params \\ %{}) do
embed_programs(conn, router, {0, Whistle.Html.program(program_name, params)})
|> Whistle.Html.Dom.node_to_string()
end
defp embed_programs(conn, router, {key, {:program, {name, params}}}) do
encoded_params =
params
|> @json_library.encode!()
|> Plug.HTML.html_escape()
{0, initial_view} = render(conn, router, name, params)
attributes = [
{"data-whistle-socket", Whistle.Router.url(conn, router)},
{"data-whistle-program", name},
{"data-whistle-params", encoded_params}
]
{key, Whistle.Html.node("whistle-program", attributes, [initial_view])}
end
defp embed_programs(_conn, _router, node = {_key, text}) when is_binary(text) do
node
end
defp embed_programs(conn, router, {key, {tag, {attributes, children}}}) do
new_children =
Enum.map(children, fn child ->
embed_programs(conn, router, child)
end)
{key, Whistle.Html.node(tag, attributes, new_children)}
end
end
|
lib/whistle/program.ex
| 0.80837
| 0.799931
|
program.ex
|
starcoder
|
defmodule Pushex.Validators.Type do
@moduledoc """
Ensure the value has the correct type.
The type can be provided in the following form:
* `type`: An atom representing the type.
It can be any of the `TYPE` in Elixir `is_TYPE` functions.
`:any` is treated as a special case and accepts any type.
* `[type]`: A list of types as described above. When a list is passed,
the value will be valid if it any of the types in the list.
* `type: inner_type`: Type should be either `map`, `list`, `tuple`, or `function`.
The usage are as follow
* `function: arity`: checks if the function has the correct arity.
* `map: {key_type, value_type}`: checks keys and value in the map with the provided types.
* `list: type`: checks every element in the list for the given types.
* `tuple: {type_a, type_b}`: check each element of the tuple with the provided types,
the types tuple should be the same size as the tuple itself.
## Options
* `:is`: Required. The type of the value, in the format described above.
* `:message`: Optional. A custom error message. May be in EEx format
and use the fields described in "Custom Error Messages," below.
## Examples
iex> Vex.Validators.Type.validate(1, is: :binary)
{:error, "must be of type :binary"}
iex> Vex.Validators.Type.validate(1, is: :number)
:ok
iex> Vex.Validators.Type.validate(nil, is: nil)
:ok
iex> Vex.Validators.Type.validate(1, is: :integer)
:ok
iex> Vex.Validators.Type.validate("foo"", is: :binary)
:ok
iex> Vex.Validators.Type.validate([1, 2, 3], is: [list: :integer])
:ok
iex> Vex.Validators.Type.validate(%{:a => 1, "b" => 2, 3 => 4}, is: :map)
:ok
iex> Vex.Validators.Type.validate(%{:a => 1, "b" => 2}, is: [map: {[:binary, :atom], :any}])
:ok
iex> Vex.Validators.Type.validate(%{"b" => 2, 3 => 4}, is: [map: {[:binary, :atom], :any}])
{:error, "must be of type {:map, {[:binary, :atom], :any}}"}
## Custom Error Messages
Custom error messages (in EEx format), provided as :message, can use the following values:
iex> Vex.Validators.Type.__validator__(:message_fields)
[value: "The bad value"]
An example:
iex> Vex.Validators.Type.validate([1], is: :binary, message: "<%= inspect value %> is not a string")
{:error, "[1] is not a string"}
"""
use Vex.Validator
@message_fields [value: "The bad value"]
@doc """
Validates the value against the given type.
See the module documentation for more info.
"""
@spec validate(any, Keyword.t) :: :ok | {:error, String.t}
def validate(value, options) when is_list(options) do
acceptable_types = Keyword.get(options, :is, [])
if do_validate(value, acceptable_types) do
:ok
else
message = "must be of type #{acceptable_type_str(acceptable_types)}"
{:error, message(options, message, value: value)}
end
end
# Allow any type, useful for composed types
defp do_validate(_value, :any), do: true
# Handle nil
defp do_validate(nil, nil), do: true
defp do_validate(nil, :atom), do: false
# Simple types
defp do_validate(value, :atom) when is_atom(value), do: true
defp do_validate(value, :number) when is_number(value), do: true
defp do_validate(value, :integer) when is_integer(value), do: true
defp do_validate(value, :float) when is_float(value), do: true
defp do_validate(value, :boolean) when is_boolean(value), do: true
defp do_validate(value, :binary) when is_binary(value), do: true
defp do_validate(value, :bitstring) when is_bitstring(value), do: true
defp do_validate(value, :tuple) when is_tuple(value), do: true
defp do_validate(value, :list) when is_list(value), do: true
defp do_validate(value, :map) when is_map(value), do: true
defp do_validate(value, :function) when is_function(value), do: true
defp do_validate(value, :reference) when is_reference(value), do: true
defp do_validate(value, :port) when is_port(value), do: true
defp do_validate(value, :pid) when is_pid(value), do: true
defp do_validate(%{__struct__: module}, module), do: true
# Complex types
defp do_validate(value, :string) when is_binary(value) do
String.valid?(value)
end
defp do_validate(value, function: arity) when is_function(value, arity), do: true
defp do_validate(list, list: type) when is_list(list) do
Enum.all?(list, &(do_validate(&1, type)))
end
defp do_validate(value, map: {key_type, value_type}) when is_map(value) do
Enum.all? value, fn {k, v} ->
do_validate(k, key_type) && do_validate(v, value_type)
end
end
defp do_validate(tuple, tuple: types)
when is_tuple(tuple) and is_tuple(types) and tuple_size(tuple) == tuple_size(types) do
Enum.all? Enum.zip(Tuple.to_list(tuple), Tuple.to_list(types)), fn {value, type} ->
do_validate(value, type)
end
end
# Accept multiple types
defp do_validate(value, acceptable_types) when is_list(acceptable_types) do
Enum.any?(acceptable_types, &(do_validate(value, &1)))
end
# Fail if nothing above matched
defp do_validate(_value, _type), do: false
defp acceptable_type_str([acceptable_type]), do: inspect(acceptable_type)
defp acceptable_type_str(acceptable_types) when is_list(acceptable_types) do
last_type = acceptable_types |> List.last |> inspect
but_last =
acceptable_types
|> Enum.take(Enum.count(acceptable_types) - 1)
|> Enum.map(&inspect/1)
|> Enum.join(", ")
"#{but_last} or #{last_type}"
end
defp acceptable_type_str(acceptable_type), do: inspect(acceptable_type)
end
|
lib/pushex/validators/type.ex
| 0.806434
| 0.7495
|
type.ex
|
starcoder
|
defmodule LogjamAgent.Channel do
alias LogjamAgent.Instrumentation
@moduledoc """
Use this module if you want to activate Logjam reporting for `Phoenix` channel implementations.
This will automatically instrument the `join`, `handle_in` and `handle_out` functions.
Join is called when a client attempts to "join" a channel. `handle_in` is called whenever
a message is sent to a channel server from a socket client. `handle_out` is called
after a channel broadcast in the `Phoenix` application and before a message is forwarded to a
socket client.
__Important !!!__: Please remove the `use Phoenix.Channel` from your module when using
this functionality. This module takes care of the necessary code generation.
## Example:
```elixir
defmodule UserChannel do
use LogjamAgent.Channel
def join(_topic, _params, socket) do
{:ok, socket}
end
def handle_in(_topic, _payload, socket) do
{:noreply, socket}
end
def handle_out(_topic, _payload, socket) do
{:noreply, socket}
end
end
```
"""
defmacro __using__(opts \\ []) do
quote do
opts = unquote(opts)
@behaviour Phoenix.Channel
@before_compile unquote(__MODULE__)
@on_definition unquote(__MODULE__)
@phoenix_intercepts []
@logjam_assigns_to_log []
@phoenix_log_join Keyword.get(opts, :log_join, :info)
@phoenix_log_handle_in Keyword.get(opts, :log_handle_in, :debug)
import unquote(__MODULE__)
import Phoenix.Socket, only: [assign: 3]
import Phoenix.Channel, except: [intercept: 1]
require Logger
Module.register_attribute(__MODULE__, :logjam_enabled_functions, accumulate: true)
def __socket__(:private) do
%{log_join: @phoenix_log_join,
log_handle_in: @phoenix_log_handle_in}
end
def code_change(_old, socket, _extra), do: {:ok, socket}
def handle_info(_message, socket), do: {:noreply, socket}
def terminate(_reason, _socket), do: :ok
defoverridable code_change: 3, handle_info: 2, terminate: 2
end
end
@supported_functions [:join, :handle_in, :handle_out]
def __on_definition__(env, kind, name, args, guards, body)
def __on_definition__(_env, :def, name, _args, _guards, nil) when name in @supported_functions, do: nil
def __on_definition__(%{module: mod}, :def, name, args, guards, [do: body]) when name in @supported_functions and length(args) == 3 do
definition = %Instrumentation.Definition{
name: name,
args: args,
guards: guards,
body: body
}
Module.put_attribute(mod, :logjam_enabled_functions, definition)
end
def __on_definition__(_env, _kind, _name, _args, _guards, _body), do: nil
defmacro intercept(events) do
quote do
@phoenix_intercepts unquote(events)
end
end
@doc """
Allows to explicitly add values from
the socket `assigns` map to the map
of request headers which is sent to logjam.
## Example:
```elixir
defmodule UserChannel do
use LogjamAgent.Channel
log_assigns [:auth_token]
def handle_in(_topic, _payload, socket) do
{:noreply, socket}
end
end
```
"""
defmacro log_assigns(assigns) when is_list(assigns) do
quote do
@logjam_assigns_to_log unquote(assigns)
end
end
defmacro __before_compile__(%{module: mod}) do
logjam_enabled_functions = Module.get_attribute(mod, :logjam_enabled_functions)
logjam_assigns_to_log = Module.get_attribute(mod, :logjam_assigns_to_log)
instrumented_functions = Instrumentation.instrument_all(
mod,
logjam_enabled_functions,
Instrumentation.Channel,
log_assigns: logjam_assigns_to_log)
quote do
def __intercepts__, do: @phoenix_intercepts
unquote_splicing(instrumented_functions)
end
end
end
|
lib/logjam_agent/channel.ex
| 0.893733
| 0.773815
|
channel.ex
|
starcoder
|
defmodule EarmarkTagCloud do
@moduledoc ~S"""
[](https://github.com/RobertDober/earmark_tag_cloud/actions/workflows/ci.yml)
[](https://coveralls.io/github/RobertDober/earmark_tag_cloud?branch=master)
[](https://hex.pm/packages/earmark_tag_cloud)
[](https://hex.pm/packages/earmark_tag_cloud)
[](https://hex.pm/packages/earmark_tag_cloud)
- Make Tag Clouds from a simple DSL added as annotations to paragraphs
e.g.
```
12 16 100 # translates to style="color: #000000; font-size: 16pt; font-weight: 100;"
#ffdd00 3em bb # style="color: #ffdd00; font-size: 3em; font-weight: 800;"
```
- Elixir Tools to create Tag clouds
iex(1)> dsl_to_attributes("12 16 100")
[{"style", "color: #000000; font-size: 16pt; font-weight: 100;"}]
- Earmark Integration (needs v1.4.16-pre2 or greater)
The most general way to integrate with Earmark is with `make_tag_clouds`
iex(2)> markdown = [
...(2)> "Elixir %tc: 12 20 800", "",
...(2)> "Erlang %tc: 10/red 2em", "",
...(2)> "Phoenix %tc: 8/sandybrown" ]
...(2)> render_html(markdown)
...(2)> markdown
...(2)> |> Earmark.as_ast!(annotations: "%tc:", inner_html: true)
...(2)> |> make_tag_clouds
...(2)> |> Earmark.transform
"<span style=\"color: #000000; font-size: 20pt; font-weight: 800;\">\nElixir </span>\n<span style=\"color: #ff7171; font-size: 2em;\">\nErlang </span>\n<span style=\"color: #ed6d00;\">\nPhoenix </span>\n"
We can render to html directly with `render_html`, which is a shortcut for the above
iex(3)> markdown = [
...(3)> "Elixir %tc: 12 20 800", "",
...(3)> "Erlang %tc: 10/red 2em", "",
...(3)> "Phoenix %tc: 8/sandybrown" ]
...(3)> render_html(markdown)
"<span style=\"color: #000000; font-size: 20pt; font-weight: 800;\">\nElixir </span>\n<span style=\"color: #ff7171; font-size: 2em;\">\nErlang </span>\n<span style=\"color: #ed6d00;\">\nPhoenix </span>\n"
Or just transform the AST
iex(4)> markdown = [
...(4)> "Elixir %tc: 12 20 800", "",
...(4)> "Erlang %tc: 10/red 2em", "",
...(4)> "Phoenix %tc: 8/sandybrown" ]
...(4)> render_ast(markdown)
[
{"span", [{"style", "color: #000000; font-size: 20pt; font-weight: 800;"}], ["Elixir "], %{annotation: "%tc: 12 20 800"}},
{"span", [{"style", "color: #ff7171; font-size: 2em;"}], ["Erlang "], %{annotation: "%tc: 10/red 2em"}},
{"span", [{"style", "color: #ed6d00;"}], ["Phoenix "], %{annotation: "%tc: 8/sandybrown"}}
]
which is a shortcut for this
iex(5)> markdown = [
...(5)> "Elixir %tc: 12 20 800", "",
...(5)> "Erlang %tc: 10/red 2em", "",
...(5)> "Phoenix %tc: 8/sandybrown" ]
...(5)> markdown
...(5)> |> Earmark.as_ast!(annotations: "%tc:", inner_html: true)
...(5)> |> make_tag_clouds
[
{"span", [{"style", "color: #000000; font-size: 20pt; font-weight: 800;"}], ["Elixir "], %{annotation: "%tc: 12 20 800"}},
{"span", [{"style", "color: #ff7171; font-size: 2em;"}], ["Erlang "], %{annotation: "%tc: 10/red 2em"}},
{"span", [{"style", "color: #ed6d00;"}], ["Phoenix "], %{annotation: "%tc: 8/sandybrown"}}
]
Of course not annotated blocks are not effected
iex(6)> markdown = [
...(6)> "Elixir %tc: 12 20 800", "",
...(6)> "Erlang", "",
...(6)> "Phoenix %tc: 8/sandybrown" ]
...(6)> render_ast(markdown)
[
{"span", [{"style", "color: #000000; font-size: 20pt; font-weight: 800;"}], ["Elixir "], %{annotation: "%tc: 12 20 800"}},
{"p", [], ["Erlang"], %{}},
{"span", [{"style", "color: #ed6d00;"}], ["Phoenix "], %{annotation: "%tc: 8/sandybrown"}}
]
And different annotations can be used, but than `make_tag_clouds` becomes a _NOP_
iex(7)> markdown = [
...(7)> "Elixir %%%: 12 20 800", "",
...(7)> "Erlang %%%: 10/red 2em", "",
...(7)> "Phoenix %%%: 8/sandybrown" ]
...(7)> markdown
...(7)> |> Earmark.as_ast!(annotations: "%%%:", inner_html: true)
...(7)> |> make_tag_clouds
[
{"p", [], ["Elixir "], %{annotation: "%%%: 12 20 800"}},
{"p", [], ["Erlang "], %{annotation: "%%%: 10/red 2em"}},
{"p", [], ["Phoenix "], %{annotation: "%%%: 8/sandybrown"}}
]
"""
defdelegate dsl_to_attributes(description), to: TagCloud.Compiler, as: :ast_style
defdelegate make_tag_clouds(ast, options \\ [annotation: "%tc:"]), to: EarmarkTagCloud.EarmarkAst
defdelegate render_ast(input), to: EarmarkTagCloud.EarmarkAst
defdelegate render_html(input), to: EarmarkTagCloud.EarmarkAst
@doc """
A convenience method to access this library's version
iex(8)> {:ok, _} = Version.parse(version())
"""
@spec version :: binary()
def version do
:application.ensure_started(:earmark_tag_cloud)
with {:ok, version} = :application.get_key(:earmark_tag_cloud, :vsn), do: to_string(version)
end
end
# SPDX-License-Identifier: Apache-2.0
|
lib/earmark_tag_cloud.ex
| 0.726329
| 0.812682
|
earmark_tag_cloud.ex
|
starcoder
|
defmodule Talib.Indicator do
alias Talib.MovingAverage
alias Talib.Utility
@moduledoc ~S"""
Module containing indicator functions, such as the RSI.
"""
@doc """
Gets the MACD of a list.
Version: 1.0
Source: http://stockcharts.com/school/doku.php?id=chart_school:technical_indicators:moving_average_convergence_divergence_macd
Audited by:
| Name | Title |
| :----------- | :---------------- |
| | |
"""
@spec macd([number], integer, integer, integer) :: [number, ...] | nil
def macd(data, long_period \\ 26, short_period \\ 12, signal_period \\ 9),
do: calculate_macd(data, long_period, short_period, signal_period)
@spec calculate_macd([number], integer, integer, integer) ::
[{number, number}, ...] | nil
defp calculate_macd([], _long, _short, _signal), do: nil
defp calculate_macd(_data, 0, _short, _signal), do: nil
defp calculate_macd(_data, _long, 0, _signal), do: nil
defp calculate_macd(_data, _long, _short, 0), do: nil
defp calculate_macd(data, long, short, signal)
when length(data) < long or
length(data) < short or
length(data) < signal do
nil
end
defp calculate_macd(data, long, short, signal) do
long_ema = MovingAverage.exponential(data, long)
short_ema = MovingAverage.exponential(data, short)
signal_ema = MovingAverage.exponential(data, signal)
short_long_ema = Enum.zip([long_ema, short_ema, signal_ema])
for {long_average, short_average, signal_average} <- short_long_ema do
{short_average - long_average, signal_average}
end
end
@doc """
Gets the RSI of a list.
Version: 1.0
Source: http://stockcharts.com/school/doku.php?id=chart_school:technical_indicators:relative_strength_index_rsi
Audited by:
| Name | Title |
| :----------- | :---------------- |
| | |
"""
@spec rsi([number], integer) :: [number, ...] | nil
def rsi(data, period \\ 14), do: calculate_rsi(data, period)
@spec calculate_rsi([number], integer) :: [number, ...] | nil
def calculate_rsi(data, period) when length(data) <= period, do: nil
def calculate_rsi(_data, 0), do: nil
def calculate_rsi(data, period) do
avg_gain = data
|> Utility.gain
|> MovingAverage.smoothed(period)
avg_loss = data
|> Utility.loss
|> MovingAverage.smoothed(period)
avg_gain_loss = Enum.zip(avg_gain, avg_loss)
for {average_gain, average_loss} <- avg_gain_loss do
relative_strength = case average_loss do
0 -> 100
_ -> average_gain / average_loss
end
100 - 100 / (relative_strength + 1)
end
end
end
|
lib/talib/indicator.ex
| 0.845177
| 0.561004
|
indicator.ex
|
starcoder
|
defmodule Aoc2021.Day11 do
@moduledoc """
See https://adventofcode.com/2021/day/11
"""
defmodule Reader do
@moduledoc false
def read_input(path) do
path
|> File.stream!()
|> parse_map()
end
def parse_map(stream) do
{map, _} =
stream
|> Stream.map(&String.trim/1)
|> Stream.reject(&empty_line?/1)
|> Stream.map(&String.graphemes/1)
|> Stream.map(&parse_numbers/1)
|> Enum.reduce({%{}, 0}, &parse_line/2)
map
end
defp empty_line?(""), do: true
defp empty_line?(_), do: false
defp parse_numbers(line) do
Enum.map(line, &String.to_integer/1)
end
defp parse_line(line, {map, row}) do
{new_map, _} =
Enum.reduce(line, {map, 0}, fn x, {map, col} ->
{Map.put(map, {row, col}, x), col + 1}
end)
{new_map, row + 1}
end
end
@spec solve_part1() :: non_neg_integer()
@spec solve_part1(Path.t()) :: non_neg_integer()
def solve_part1(path \\ "priv/day11/input.txt") do
map = Reader.read_input(path)
{_, count} =
Enum.reduce(1..100, {map, 0}, fn _, {map, flash_count} ->
new_map = step(map)
{reset_flashed(new_map), flash_count + count_flashes(new_map)}
end)
count
end
defp count_flashes(map) do
Enum.count(map, fn {_, v} -> v > 9 end)
end
defp reset_flashed(map) do
map
|> Enum.map(fn
{k, v} when v > 9 -> {k, 0}
{k, v} -> {k, v}
end)
|> Map.new()
end
defp step(map) do
{new_map, _} =
map
|> Map.keys()
|> Enum.reduce({map, MapSet.new()}, &increase_energy/2)
new_map
end
defp increase_energy(pos, {map, seen}) do
new_seen = MapSet.put(seen, pos)
case Map.get(map, pos, :outside) do
:outside ->
{map, seen}
x when x > 9 ->
# already flashed, no change
{map, new_seen}
9 = x ->
# flash! recurse to neighbours
new_map = Map.put(map, pos, x + 1)
pos
|> neighbours()
|> Enum.reduce({new_map, new_seen}, &increase_energy/2)
x when x < 9 ->
{Map.put(map, pos, x + 1), new_seen}
end
end
defp neighbours({row, col}) do
for dr <- -1..1, dc <- -1..1, {dr, dc} != {0, 0} do
{row + dr, col + dc}
end
end
@spec solve_part2() :: non_neg_integer()
@spec solve_part2(Path.t()) :: non_neg_integer()
def solve_part2(path \\ "priv/day11/input.txt") do
map = Reader.read_input(path)
first_all_flash_step(map)
end
defp first_all_flash_step(map) do
first_all_flash_step(map, 0, all_flash?(map))
end
defp first_all_flash_step(_, s, true), do: s
defp first_all_flash_step(map, s, _) do
new_map = step(map)
new_map
|> reset_flashed()
|> first_all_flash_step(s + 1, all_flash?(new_map))
end
def all_flash?(map) do
count_flashes(map) == map_size(map)
end
end
|
lib/aoc2021/day11.ex
| 0.697815
| 0.500793
|
day11.ex
|
starcoder
|
defmodule Toolshed.Log do
@moduledoc """
Utilities for attaching and detaching to the log
These utilities configure Elixir's console backend to attach
to the current group leader. This makes it work over `ssh` sessions
and play well with the IEx prompt.
"""
@doc """
Attach the current session to the Elixir logger
This forwards incoming log messages to the terminal. Call `detach/0` to stop
the messages.
Behind the scenes, this uses Elixir's built-in console logger and can be
configured similarly. See the [Logger console backend
documentation](https://hexdocs.pm/logger/Logger.html#module-console-backend)
for details. The following are useful options:
* `:level` - the minimum log level to report. E.g., specify `level: :warning`
to only see warnings and errors.
* `:metadata` - a list of metadata keys to show or `:all`
Unspecified options use either the console backend's default or those found
in the application environment for the `:console` Logger backend.
"""
@spec log_attach(keyword()) :: {:error, any} | {:ok, :undefined | pid}
def log_attach(options \\ []) do
case Process.get(__MODULE__) do
nil ->
all_options = Keyword.put(options, :device, Process.group_leader())
backend = {Logger.Backends.Console, all_options}
{:ok, pid} = GenServer.start(Toolshed.Log.Watcher, {Process.group_leader(), backend})
Process.put(__MODULE__, {pid, backend})
Logger.add_backend({Logger.Backends.Console, all_options})
_other ->
{:error, :detach_first}
end
end
@doc """
Detach the current session from the Elixir logger
"""
@spec log_detach :: :ok | {:error, :not_attached | :not_found}
def log_detach() do
case Process.get(__MODULE__) do
nil ->
{:error, :not_attached}
{pid, backend} ->
Process.delete(__MODULE__)
GenServer.stop(pid)
Logger.remove_backend(backend)
end
end
defmodule Watcher do
@moduledoc false
use GenServer
@impl GenServer
def init({watch_pid, backend}) do
Process.monitor(watch_pid)
{:ok, backend}
end
@impl GenServer
def handle_info({:DOWN, _ref, :process, _pid, _reason}, backend) do
_ = Logger.remove_backend(backend)
{:stop, :normal, backend}
end
end
end
|
lib/toolshed/log.ex
| 0.63409
| 0.464841
|
log.ex
|
starcoder
|
defmodule Rex2048.Game do
defstruct [:board, :score]
alias Rex2048.Board
alias Rex2048.Game
@doc """
iex> Rex2048.Game.init(2)
%Rex2048.Game{board: [2, 2, 0, 0], score: 0}
iex> Rex2048.Game.init(3)
%Rex2048.Game{board: [2, 0, 0, 0, 0, 0, 0, 0, 2], score: 0}
"""
def init(size) when size > 1 do
board = Board.empty(size)
|> Board.insert_at_random
|> Board.insert_at_random
%Game{board: board, score: 0}
end
@doc """
iex> board = %Rex2048.Game{board: [0, 2, 0, 4, 2, 4, 4, 4, 4], score: 0}
...> Rex2048.Game.move(board, :left)
%Rex2048.Game{board: [2, 2, 0, 4, 2, 4, 8, 4, 0], score: 8}
iex> board = %Rex2048.Game{board: [0, 2, 0, 4, 2, 4, 4, 4, 4], score: 8}
...> Rex2048.Game.move(board, :right)
%Rex2048.Game{board: [2, 0, 2, 4, 2, 4, 0, 4, 8], score: 16}
iex> board = %Rex2048.Game{board: [8, 4, 8, 2, 4, 0, 0, 0, 4], score: 10}
...> Rex2048.Game.move(board, :up)
%Rex2048.Game{board: [8, 8, 8, 2, 2, 4, 0, 0, 0], score: 18}
iex> board = %Rex2048.Game{board: [0, 2, 0, 4, 2, 4, 4, 4, 4], score: 10}
...> Rex2048.Game.move(board, :down)
%Rex2048.Game{board: [2, 0, 0, 0, 4, 0, 8, 4, 8], score: 30}
iex> board = %Rex2048.Game{board: [0, 0, 2, 2], score: 0}
...> Rex2048.Game.move(board, :down)
%Rex2048.Game{board: [0, 0, 2, 2], score: 0}
"""
def move(%Game{board: board, score: score}, direction) do
updated_board = Board.push(board, direction)
if updated_board == board do
%Game{board: board, score: score}
else
points = Board.calculate_points(board, updated_board)
updated_board = Board.insert_at_random(updated_board)
%Game{board: updated_board, score: score + points}
end
end
@doc """
iex> Rex2048.Game.won?(%Rex2048.Game{board: [2,0,0,0], score: 0})
false
iex> Rex2048.Game.won?(%Rex2048.Game{board: [2048,2,4,4], score: 0})
true
"""
def won?(%Game{board: board, score: _}) do
Board.reached_2048?(board)
end
@doc """
iex> Rex2048.Game.lost?(%Rex2048.Game{board: [2,0,0,0], score: 0})
false
iex> Rex2048.Game.lost?(%Rex2048.Game{board: [2,4,8,16], score: 0})
true
"""
def lost?(%Game{board: board, score: _}) do
!Board.can_move?(board)
end
end
defimpl String.Chars, for: Rex2048.Game do
def to_string(%Rex2048.Game{board: board, score: score}) do
stringified_board = board
|> Enum.map(fn number ->
number
|> tile_to_string
|> String.rjust(4)
end)
|> Rex2048.Board.rows
|> Enum.join("\r\n")
stringified_board <> "\r\n\r\nScore: #{score}"
end
defp tile_to_string(0), do: "."
defp tile_to_string(1024), do: "1k"
defp tile_to_string(2048), do: "2k"
defp tile_to_string(number), do: Integer.to_string(number)
end
|
lib/rex2048/game.ex
| 0.610337
| 0.671024
|
game.ex
|
starcoder
|
defmodule Militerm.ECS.EctoComponent do
@moduledoc """
The component service tracks component data for entities with a backing store in an Ecto repo.
For eaxmple, if yu have a component named MyGame.Components.Health, you
can start a copy of this with:
Militerm.ECS.EctoComponent.start_link(name: MyGame.Components.Health)
Then, later, you can set or get the data for the component:
Militerm.ECS.EctoComponent.set(MyGame.Components.Health, entity_id, %{hp: 100, max_hp: 100})
%{hp: hp, max_hp: hp} = Militerm.ECS.Component.get(MyGame.Components.Health, entity_id)
Each component defines its persistance mechanisms through the `store/2`, `update/3`, `fetch/1`,
`delete/1`, and `clear/0` functions. These are required and do not have default definitions.
By default, the component uses the Ecto repo set in the militerm configuration.
"""
import Ecto.Query
@callback process_record(term) :: term
@callback primary_keys(term) :: Keyword.t() | Map.t()
@callback write_data(term, term) :: term
@callback read_data(map) :: term
defmacro __using__(opts) do
default = Keyword.get(opts, :default)
repo = Keyword.get_lazy(opts, :repo, &Militerm.Config.repo/0)
schema = Keyword.fetch!(opts, :schema)
quote do
use Militerm.ECS.Component, unquote(opts)
@behaviour Militerm.ECS.EctoComponent
@schema unquote(schema)
@repo unquote(repo)
@impl true
def store(entity_id, data) do
Militerm.ECS.EctoComponent.ecto_store(
primary_keys(entity_id),
data,
@repo,
@schema,
__MODULE__
)
end
@impl true
def update(entity_id, nil, new_data) do
Militerm.ECS.EctoComponent.ecto_store(
primary_keys(entity_id),
new_data,
@repo,
@schema,
__MODULE__
)
end
def update(entity_id, old_data, new_data) do
Militerm.ECS.EctoComponent.ecto_update(
primary_keys(entity_id),
old_data,
new_data,
@repo,
@schema,
__MODULE__
)
end
@impl true
def fetch(entity_id) do
entity_id
|> primary_keys()
|> Militerm.ECS.EctoComponent.ecto_fetch(@repo, @schema)
|> __MODULE__.read_data()
|> __MODULE__.process_record()
end
@impl true
def delete(entity_id),
do: Militerm.ECS.EctoComponent.ecto_delete(primary_keys(entity_id), @repo, @schema)
@impl true
def clear(), do: Militerm.ECS.EctoComponent.ecto_clear(@repo, @schema)
def process_record(nil), do: @default
def process_record(record), do: record
def primary_keys(entity_id), do: [entity_id: entity_id]
def write_data(map, nil), do: map
def write_data(map, data), do: Map.put(map, :data, data)
def read_data(nil), do: nil
def read_data(map), do: Map.get(map, :data)
defoverridable process_record: 1, primary_keys: 1, write_data: 2, read_data: 1
end
end
def ecto_store(key, data, repo, schema, module) do
case ecto_fetch(key, repo, schema) do
nil ->
schema
|> struct
|> schema.changeset(
key
|> Enum.reduce(%{}, fn {k, v}, acc -> Map.put(acc, k, v) end)
|> module.write_data(data)
|> atoms_to_strings
)
|> repo.insert!()
record ->
ecto_update(key, record, data, repo, schema, module)
end
end
def ecto_update(key, _old_data, new_data, repo, schema, module) do
updates =
%{}
|> module.write_data(new_data)
|> strings_to_atoms()
|> Map.to_list()
key
|> Enum.reduce(schema, fn {k, v}, q ->
where(q, [i], field(i, ^k) == ^v)
end)
|> repo.update_all(set: updates)
end
def ecto_fetch(key, repo, schema) do
key
|> Enum.reduce(schema, fn {k, v}, q ->
where(q, [i], field(i, ^k) == ^v)
end)
|> repo.one
end
def ecto_delete(key, repo, schema) do
result =
key
|> Enum.reduce(schema, fn {k, v}, q ->
where(q, [i], field(i, ^k) == ^v)
end)
|> repo.delete_all
case result do
{:ok, _} -> :ok
_ -> :error
end
end
def ecto_clear(repo, schema) do
repo.delete_all(schema)
end
def atoms_to_strings(map) when is_map(map) do
map
|> Enum.map(fn {k, v} -> {atoms_to_strings(k), atoms_to_strings(v)} end)
|> Enum.into(%{})
end
def atoms_to_strings(list) when is_list(list) do
Enum.map(list, fn v -> atoms_to_strings(v) end)
end
def atoms_to_strings(atom) when is_atom(atom), do: to_string(atom)
def atoms_to_strings(otherwise), do: otherwise
def strings_to_atoms(atom) when is_atom(atom), do: atom
def strings_to_atoms(map) when is_map(map) do
map
|> Enum.map(fn {k, v} -> {strings_to_atoms(k), v} end)
|> Enum.into(%{})
end
def strings_to_atoms(string) when is_binary(string), do: String.to_atom(string)
def strings_to_atoms(otherwise), do: otherwise
end
|
lib/militerm/ecs/ecto_component.ex
| 0.769124
| 0.421135
|
ecto_component.ex
|
starcoder
|
defmodule Hobot do
@moduledoc """
A bot framework working on Erlang VM(Beam)
## Examples
For example, we can create an echo bot following like:
```
bot_name = "EchoBot"
adapter = %{module: Hobot.Plugin.Adapter.Shell}
handlers = [%{module: Hobot.Plugin.Handler.Echo, args: [["on_message"]]}]
{:ok, bot_pid} = Hobot.create(bot_name, adapter, handlers)
# Check behavior created bot
context = Hobot.context(bot_pid)
adapter_pid = Hobot.pid(context.adapter)
send(adapter_pid, "hello") # => "hello"
```
"""
def create(name, adapter, handlers, options \\ []) do
application_process = %Hobot.ApplicationProcess{
logger: Keyword.get(options, :logger, Logger),
name_registry: Keyword.get(options, :name_registry, Hobot.Application.name_registry()),
pub_sub: Keyword.get(options, :pub_sub, Hobot.Application.pub_sub()),
task_supervisor: Keyword.get(options, :task_supervisor, Hobot.Application.task_supervisor())
}
handlers_with_index = Enum.with_index(handlers)
Hobot.Supervisor.start_child(%{
name: name,
adapter: adapter,
handlers: handlers_with_index,
application_process: application_process
})
end
def context(value, name_registry \\ Hobot.Application.name_registry())
def context(name, name_registry) when is_binary(name) do
Agent.get({:via, Registry, {name_registry, Hobot.Bot.context(name)}}, & &1)
catch
:exit, _ ->
nil
end
def context(pid, name_registry) when is_pid(pid) do
children = Supervisor.which_children(pid)
case Enum.find(children, fn {process_name, _, _, _} ->
Regex.match?(~r"Context", process_name)
end) do
{context_process_name, _, _, _} ->
Agent.get({:via, Registry, {name_registry, context_process_name}}, & &1)
_ ->
nil
end
end
def pid(name, name_registry \\ Hobot.Application.name_registry()) do
case Registry.lookup(name_registry, name) do
[{pid, _}] -> pid
[] -> nil
end
end
def child_spec(opts \\ []) do
%{
id: __MODULE__,
start: {__MODULE__, :create, opts},
restart: :permanent,
shutdown: 5000,
type: :worker
}
end
end
|
lib/hobot.ex
| 0.665628
| 0.666353
|
hobot.ex
|
starcoder
|
defmodule TypoKiller.Files do
@moduledoc """
Parse all files and concat path to a folder to generate a complete list of files ready to be read
"""
@default_options [
# Defaults to 5KiB
max_size: 1024 * 5,
ignore_dot_files: true,
allowed_extensions: [],
blocked_extensions: [],
allowed_paths: [],
blocked_paths: []
]
@doc """
Find files inside path.
- When path is a file, it returns itself inside a list
- When path is a directory, scan the entire folder and subfolders for files
- When path is something different, it returns an empty list
"""
@spec find_files_on_folder(path :: String.t()) :: list(String.t()) | []
def find_files_on_folder(path \\ ".", options \\ []) do
options_map = build_options_map(options)
find_files(path, options_map)
end
defp build_options_map(options) do
@default_options
|> Keyword.merge(options)
|> Map.new(fn {key, value} ->
case value do
value when is_list(value) ->
{key, MapSet.new(value)}
value ->
{key, value}
end
end)
end
defp find_files(path, %{max_size: max_size} = options) do
cond do
file_in_size_range?(path, max_size) and allowed_file_extension?(path, options) ->
[path]
File.dir?(path) ->
File.ls!(path)
|> Enum.filter(&filter_dot_files(&1, options))
|> Enum.map(&Path.join(path, &1))
|> Enum.map(&find_files(&1, options))
|> Enum.concat()
|> Enum.filter(&allowed_path?(&1, options))
true ->
[]
end
end
defp filter_dot_files(file, %{ignore_dot_files: ignore_it?}) do
!(ignore_it? and String.starts_with?(file, "."))
end
defp file_in_size_range?(file, max_size, min_size \\ 0) do
%File.Stat{size: size, type: type} = File.stat!(file)
type == :regular and size <= max_size and size >= min_size
end
defp allowed_file_extension?(file, %{allowed_extensions: allowed, blocked_extensions: blocked}) do
check_allow_and_block_list(file, allowed, blocked, &extension_in_map?/2)
end
defp allowed_path?(path, %{allowed_paths: allowed, blocked_paths: blocked}) do
check_allow_and_block_list(path, allowed, blocked, &check_dir/2)
end
defp extension_in_map?(extensions_map, file) do
extension =
file
|> String.split(".")
|> Enum.reverse()
|> Enum.at(0)
MapSet.member?(extensions_map, extension)
end
defp check_dir(dir_map, dir) do
Enum.any?(dir_map, &String.contains?(dir, &1))
end
defp check_allow_and_block_list(item, allow_list, block_list, function) do
cond do
MapSet.size(allow_list) > 0 ->
function.(allow_list, item)
MapSet.size(block_list) > 0 ->
!function.(block_list, item)
true ->
true
end
end
end
|
lib/typo_killer/files.ex
| 0.611614
| 0.444987
|
files.ex
|
starcoder
|
defmodule Roulette do
@moduledoc ~S"""
Scalable PubSub client library which uses HashRing-ed gnatsd-cluster
## Prepare your own PubSub module
```elixir
defmodule MyApp.PubSub do
use Roulette, otp_app: :my_app
end
```
## Configuration
Setup configuration like following
```elixir
config :my_app, MyApp.PubSub,
servers: [
[host: "gnatsd1.example.org", port: 4222],
[host: "gnatsd2.example.org", port: 4222],
[host: "gnatsd3.example.org", port: 4222]
]
# ...
```
## Application
Append your PubSub module onto your application's supervisor
```elixir
defmodule MyApp.Application do
use Application
def start(_type, _args) do
children = [
{MyApp.Pubsub, []}
# ... other children
]
opts = [strategy: :one_for_one, name: MyApp.Supervisor]
Supervisor..start_link(children, opts)
end
end
```
## Simple Usage
```elixir
defmodule MyApp.Session do
use GenServer
def start_link(opts) do
GenServer.start_link(__MODULE__, opts, name: __MODULE__)
end
def init(opts) do
username = Keyword.fetch!(opts, :username)
MyApp.PubSub.sub!(username)
{:ok, %{username: username}}
end
def handle_info({:pubsub_message, topic, msg, pid}, state) do
# handle msg
{:noreply, state}
end
def terminate(reason, state) do
:ok
end
end
```
Anywhere else you want to publish message in your app.
```elixir
MyApp.PubSub.pub!("foobar", data)
```
## Premised gnatsd Network Architecture
gnatsd supports cluster-mode. This works with full-mesh and one-hop messaging system to sync events.
[gnatsd's full-mesh architecture](https://github.com/nats-io/gnatsd#full-mesh-required)

Roulette assumes that you put a load-balancer like AWS-NBL in front of each gnatsd-clusters.
Roulette doesn't have a responsiblity for health-check and load-balancing between gnatsd-servers
exists in a single gnatsd-cluster.
Roulette assumes that It's a load-balancers' responsibility.

Roulette connects to each gnatsd-server through load-balancers,
and doesn't mind which endpoint it connects to.
However if your application servers send `PUBLISH` so much,
it'll cause troubles eventuallly.
Roulette resolves this problem with `Consistent Hashing`.
Setup multiple gnatsd-cluster beforehand, and when your app sends
`PUBLISH` or `SUBSCRIBE` message,
"Which cluster your app sends message to" is decided by the `topic`.

## Detailed Usage
### Publish
ok/error style.
```elixir
topic = "foobar"
case MyApp.PubSub.pub(topic, data) do
:ok -> :ok
:error -> :error
end
```
If you don't mind error handling(not recommended on production),
you can use `pub!/2` instead
```elixir
topic = "foobar"
MyApp.PubSub.pub!(topic, data)
```
### Subscribe
ok/error style.
`sub/1` returns Supervisor.on_start()
```elixir
topic = "foobar"
case MyApp.PubSub.sub("foobar") do
{:ok, _pid} -> :ok
other ->
Logger.warn "failed to sub: #{inspect other}"
:error
end
```
If you don't mind error handling(not recommended on production),
you can use `sub!/1` instead
```elixir
MyApp.PubSub.sub!(topic)
```
### Unsubscribe
ok/error style.
`sub/1` returns Supervisor.on_start()
```elixir
topic = "foobar"
case MyApp.PubSub.unsub("foobar") do
:ok -> :ok
{:error, :not_found} -> :ok
end
```
If you don't mind error handling(not recommended on production),
you can use `unsub!/1` instead
```elixir
MyApp.PubSub.unsub!(topic)
```
In following example, you don't need to call `unsub/1` on `terminate/2`.
Because unsub is automatically handled, the process which calls `sub` terminates.
```elixir
defmodule MyApp.Session do
use GenServer
def start_link(opts) do
GenServer.start_link(__MODULE__, opts, name: __MODULE__)
end
def init(opts) do
username = Keyword.fetch!(opts, :username)
MyApp.PubSub.sub!(username)
{:ok, %{username: username}}
end
def handle_info({:pubsub_message, topic, msg, pid}, state) do
# handle msg
{:noreply, state}
end
def terminate(reason, state) do
# You don't need this line
# MyApp.PubSub.unsub(state.username)
:ok
end
end
```
"""
defmacro __using__(opts \\ []) do
quote location: :keep, bind_quoted: [opts: opts] do
@config Roulette.Config.load(__MODULE__, opts)
@spec pub(String.t, any) :: :ok | :error
def pub(topic, data) do
Roulette.Publisher.pub(__MODULE__, topic, data)
end
@spec pub!(String.t, any) :: :ok
def pub!(topic, data) do
case pub(topic, data) do
:ok -> :ok
:error -> raise Roulette.Error, "failed to pub: #{topic}"
end
end
@spec sub(String.t) :: Supervisor.on_start
def sub(topic) do
Roulette.Subscriber.sub(__MODULE__, topic)
end
@spec sub!(String.t) :: pid
def sub!(topic) do
case sub(topic) do
{:ok, pid} -> pid
other -> raise Roulette.Error, "failed to sub: #{inspect other}"
end
end
@spec unsub(String.t | pid) :: :ok | {:error, :not_found}
def unsub(topic_or_pid) do
Roulette.Subscriber.unsub(__MODULE__, topic_or_pid)
end
@spec unsub!(String.t | pid) :: :ok
def unsub!(topic_or_pid) do
case unsub(topic_or_pid) do
:ok -> :ok
other -> raise Roulette.Error, "failed to unsub: #{inspect other}"
end
end
@spec child_spec(any) :: Supervisor.child_spec
def child_spec(_opts) do
Roulette.Supervisor.child_spec(__MODULE__, @config)
end
end
end
end
|
lib/roulette.ex
| 0.761627
| 0.759827
|
roulette.ex
|
starcoder
|
defmodule Stripe.Connect.OAuth do
@moduledoc """
Work with Stripe Connect.
You can:
- generate the URL for starting the OAuth workflow
- authorize a new connected account with a token
- deauthorize an existing connected account
Stripe API reference: https://stripe.com/docs/connect/reference
"""
alias Stripe.{Config, Converter}
@callback token(code :: String.t()) :: {:ok, map}
@callback authorize_url(map) :: String.t()
@callback deauthorize_url(url :: String.t()) :: {:ok, map}
@authorize_url_valid_keys [
:always_prompt,
:client_id,
:redirect_uri,
:response_type,
:scope,
:state,
:stripe_landing,
:stripe_user
]
defmodule AuthorizeResponse do
defstruct [
:access_token,
:livemode,
:refresh_token,
:scope,
:stripe_user_id,
:stripe_publishable_key,
:token_type
]
end
defmodule TokenResponse do
defstruct [
:access_token,
:livemode,
:refresh_token,
:scope,
:stripe_user_id,
:stripe_publishable_key,
:token_type
]
end
defmodule DeauthorizeResponse do
defstruct [
:stripe_user_id
]
end
@doc """
Execute the OAuth callback to Stripe using the code supplied in the request parameter of the oauth redirect at the end of the onboarding workflow.
## Example
```
iex(1)> {:ok, resp} = Stripe.Connect.OAuth.token(code)
...(1)> IO.inspect resp
%Stripe.Connect.OAuth.TokenResponse{
access_token: "ACCESS_TOKEN",
livemode: false,
refresh_token: "<PASSWORD>_TOKEN",
scope: "read_write",
stripe_publishable_key: "PUBLISHABLE_KEY",
stripe_user_id: "USER_ID",
token_type: "bearer"
}
```
"""
@spec token(String.t(), Stripe.options()) :: {:ok, map} | {:error, %Stripe.Error{}}
def token(code, opts \\ []) do
endpoint = "token"
{api_key, _} = Keyword.pop(opts, :api_key)
body = %{
client_secret: api_key || get_client_secret(),
code: code,
grant_type: "authorization_code"
}
case Stripe.API.oauth_request(:post, endpoint, body) do
{:ok, result} -> {:ok, Converter.convert_result(result)}
{:error, error} -> {:error, error}
end
end
@doc """
De-authorizes the connected account.
Requires the customer to re-establish the link using the onboarding workflow.
## Example
```
iex(1)> {:ok, result} = Stripe.Connect.OAuth.deauthorize(stripe_user_id)
```
"""
@spec deauthorize(String.t()) :: {:ok, map} | {:error, %Stripe.Error{}}
def deauthorize(stripe_user_id) do
endpoint = "deauthorize"
body = %{
client_id: get_client_id(),
stripe_user_id: stripe_user_id
}
case Stripe.API.oauth_request(:post, endpoint, body) do
{:ok, result} -> {:ok, Converter.convert_result(result)}
{:error, error} -> {:error, error}
end
end
@doc ~S"""
Generate the URL to start a Stripe workflow.
## Paremeter Map Keys
The parameter map keys are derived from the [valid request parameter](https://stripe.com/docs/connect/reference)
for the Stripe Connect authorize endpoint. A parameter only needs to be provided if
you wish to override the default.
- `:always_prompt`
- `:client_id`
- `:redirect_uri`
- `:response_type`
- `:scope`
- `:state`
- `:stripe_landing`
- `:stripe_user`
For ease of use, any parameters you provide will be merged into
the following default map with sensible defaults. This also allows
you to call the function with no parameters and it will fall
back to this map:
```
%{
client_id: client_id, # :connect_client_id from configuration
response_type: "code",
scope: "read_write"
}
```
## Example
```
connect_opts = %{
state: "2686e7a93156ff5af76a83262ac653",
stripe_user: %{
"email" => "<EMAIL>",
"url" => "http://local.example.net",
"country" => "US",
"phone_number" => "5555555678",
"business_name" => "Jeanine & <NAME>",
"businessy_type" => "llc",
"first_name" => "Jeanine",
"last_name" => "Smith",
"dob_day" => 29,
"dob_month" => 1,
"dob_year" => 1983,
"street_address" => "123 Main St.",
"product_category" => "food_and_restuarants"
}
}
url = Stripe.Connect.OAuth.authorize_url(connect_opts)
```
"""
@spec authorize_url(map, :standard | :express) :: String.t()
def authorize_url(param_options \\ %{}, account_type \\ :standard) do
domain = "https://connect.stripe.com"
base_url =
case account_type do
:standard ->
domain <> "/oauth/authorize?"
:express ->
domain <> "/express/oauth/authorize?"
end
param_string =
get_default_authorize_map()
|> Map.merge(param_options)
|> Map.take(@authorize_url_valid_keys)
|> Stripe.URI.encode_query()
base_url <> param_string
end
@spec get_client_id() :: String.t()
defp get_client_id() do
Config.resolve(:connect_client_id)
end
@spec get_client_secret() :: String.t()
defp get_client_secret() do
Config.resolve(:api_key)
end
@spec get_default_authorize_map() :: map
defp get_default_authorize_map() do
%{
client_id: get_client_id(),
response_type: "code",
scope: "read_write"
}
end
end
|
lib/stripe/connect/oauth.ex
| 0.881232
| 0.457803
|
oauth.ex
|
starcoder
|
defmodule Serum.HeaderParser do
@moduledoc false
_moduledocp = """
This module takes care of parsing headers of page (or post) source files.
Header is where all page or post metadata goes into, and has the following
format:
```
---
key: value
...
---
```
where `---` in the first and last line delimits the beginning and the end of
the header area, and between these two lines are one or more key-value pair
delimited by a colon, where key is the name of a metadata and value is the
actual value of a metadata.
"""
require Serum.Result, as: Result
alias Serum.Error
alias Serum.HeaderParser.Extract
alias Serum.HeaderParser.ValueTransformer
@type options :: [{atom(), value_type()}]
@type value_type :: :string | :integer | :datetime | {:list, value_type()}
@type value :: binary() | integer() | DateTime.t() | [binary()] | [integer()] | [DateTime.t()]
@typep parse_result :: Result.t({map(), map(), binary(), integer()})
@doc """
Reads lines from a binary `data` and extracts the header into a map.
`options` is a keyword list which specifies the name and type of metadata the
header parser expects. So the typical `options` should look like this:
[key1: type1, key2: type2, ...]
See "Types" section for avilable value types.
`required` argument is a list of required keys (in atom). If the header parser
cannot find required keys in the header area, it returns an error.
## Types
Currently the HeaderParser supports following types:
* `:string` - A line of string. It can contain spaces.
* `:integer` - A decimal integer.
* `:datetime` - Date and time. Must be specified in the format of
`YYYY-MM-DD hh:mm:ss`. This data will be interpreted as a local time.
* `{:list, <type>}` - A list of multiple values separated by commas. Every
value must have the same type, either `:string`, `:integer`, or `:datetime`.
You cannot make a list of lists.
"""
@spec parse_header(Serum.File.t(), options(), [atom()]) :: parse_result()
def parse_header(file, options, required \\ [])
def parse_header(%Serum.File{in_data: nil} = file, _, _) do
Result.fail(Simple, ["cannot parse header: the file is not loaded"], file: file)
end
def parse_header(file, options, required) do
Result.run do
{kvs, rest, next_line} <- Extract.extract_header(file.in_data)
key_strings = options |> Keyword.keys() |> Enum.map(&to_string/1)
kv_groups = Enum.group_by(kvs, &(elem(elem(&1, 0), 0) in key_strings))
accepted_kv = kv_groups[true] || []
extras = kv_groups |> Map.get(false, []) |> Enum.map(&elem(&1, 0))
find_missing(accepted_kv, required, next_line)
parsed <- transform_values(accepted_kv, options)
Result.return({Map.new(parsed), Map.new(extras), rest, next_line})
else
{:error, %Error{} = e} -> {:error, Error.prewalk(e, &%Error{&1 | file: file})}
end
end
@spec find_missing([{binary(), binary()}], [atom()], integer()) :: Result.t()
defp find_missing(kv_list, required, line) do
req_strings = required |> Enum.map(&to_string/1) |> MapSet.new()
keys = kv_list |> Enum.map(&elem(elem(&1, 0), 0)) |> MapSet.new()
req_strings
|> MapSet.difference(keys)
|> MapSet.to_list()
|> case do
[] -> Result.return()
missings -> Result.fail(Simple, [missing_message(missings)], line: line)
end
end
@spec missing_message([binary()]) :: binary()
defp missing_message(missings)
defp missing_message([missing]), do: "`#{missing}` is required, but missing"
defp missing_message(missings) do
repr = missings |> Enum.map(&"`#{&1}`") |> Enum.reverse() |> Enum.join(", ")
"#{repr} are required, but missing"
end
@spec transform_values([{{binary(), binary()}, integer()}], keyword(atom())) ::
Result.t([{atom(), value()}])
defp transform_values(kvs, options) do
kvs
|> Enum.map(fn {{key, _value} = kv, line} ->
atom_key = String.to_existing_atom(key)
case ValueTransformer.transform_value(kv, options[atom_key], line) do
{:ok, value} -> Result.return({atom_key, value})
{:error, %Error{}} = error -> error
end
end)
|> Result.aggregate("failed to parse the header:")
end
end
|
lib/serum/header_parser.ex
| 0.856332
| 0.88578
|
header_parser.ex
|
starcoder
|
defmodule Curve448 do
import Bitwise
@moduledoc """
Curve448 Diffie-Hellman functions
"""
@typedoc """
public or secret key
"""
@type key :: <<_::224>>
@p 726_838_724_295_606_890_549_323_807_888_004_534_353_641_360_687_318_060_281_490_199_180_612_328_166_730_772_686_396_383_698_676_545_930_088_884_461_843_637_361_053_498_018_365_439
@a 156_326
defp clamp(c) do
c
|> band(~~~3)
|> bor(128 <<< (8 * 55))
end
# :math.pow yields floats.. and we only need this one
defp square(x), do: x * x
defp expmod(_b, 0, _m), do: 1
defp expmod(b, e, m) do
t = b |> expmod(div(e, 2), m) |> square |> rem(m)
case e &&& 1 do
1 -> rem(t * b, m)
_ -> t
end
end
defp inv(x), do: x |> expmod(@p - 2, @p)
defp add({xn, zn}, {xm, zm}, {xd, zd}) do
x = (xm * xn - zm * zn) |> square |> (&(&1 * 4 * zd)).()
z = (xm * zn - zm * xn) |> square |> (&(&1 * 4 * xd)).()
{rem(x, @p), rem(z, @p)}
end
defp double({xn, zn}) do
x = (square(xn) - square(zn)) |> square
z = 4 * xn * zn * (square(xn) + @a * xn * zn + square(zn))
{rem(x, @p), rem(z, @p)}
end
def curve448(n, base) do
one = {base, 1}
two = double(one)
{{x, z}, _} = nth_mult(n, {one, two})
rem(x * inv(z), @p)
end
defp nth_mult(1, basepair), do: basepair
defp nth_mult(n, {one, two}) do
{pm, pm1} = n |> div(2) |> nth_mult({one, two})
case n &&& 1 do
1 -> {add(pm, pm1, one), double(pm1)}
_ -> {double(pm), add(pm, pm1, one)}
end
end
@doc """
Generate a secret/public key pair
Returned tuple contains `{random_secret_key, derived_public_key}`
"""
@spec generate_key_pair :: {key, key}
def generate_key_pair do
# This algorithm is supposed to be resilient against poor RNG, but use the best we can
secret = :crypto.strong_rand_bytes(56)
{secret, derive_public_key(secret)}
end
@doc """
Derive a shared secret for a secret and public key
Given our secret key and our partner's public key, returns a
shared secret which can be derived by the partner in a complementary way.
"""
@spec derive_shared_secret(key, key) :: key | :error
def derive_shared_secret(<<our_secret::little-size(448)>>, <<their_public::little-size(448)>>) do
shared_secret =
our_secret
|> clamp
|> curve448(their_public)
<<shared_secret::little-size(448)>>
end
def derive_shared_secret(_ours, _theirs), do: :error
@doc """
Derive the public key from a secret key
"""
@spec derive_public_key(key) :: key | :error
def derive_public_key(<<our_secret::little-size(448)>>) do
public_key =
our_secret
|> clamp
|> curve448(5)
<<public_key::little-size(448)>>
end
def derive_public_key(_ours), do: :error
end
|
lib/curve448.ex
| 0.844281
| 0.453685
|
curve448.ex
|
starcoder
|
defmodule Riak.Ecto.Connection do
@moduledoc false
alias Riak.Ecto.NormalizedQuery.SearchQuery
alias Riak.Ecto.NormalizedQuery.FetchQuery
alias Riak.Ecto.NormalizedQuery.CountQuery
alias Riak.Ecto.NormalizedQuery.WriteQuery
## Worker
## Callbacks for adapter
def all(pool, query, opts \\ [])
def all(pool, %FetchQuery{} = query, _opts) do
coll = query.coll
_projection = query.projection
case Riak.fetch_type(pool, coll, query.id) do
{:ok, map} ->
[map
|>:riakc_map.value
|> crdt_to_map
|> Map.merge(%{id: query.id, context: %{map: map, total_count: 1}})]
{:error, :not_found} ->
[]
end
end
def all(pool, %SearchQuery{} = query, opts) do
coll = query.coll
_projection = query.projection
opts = query.opts ++ opts
filter = query.filter
order = query.order
query = query.query
opts = [{:filter, filter} | opts] ++ [{:sort, order}]
case Riak.search(pool, coll, query, opts) do
{:ok, {results, total_count}} ->
Enum.map(results, fn result ->
result
|> solr_to_map
|> Map.merge(%{context: %{map: nil, total_count: total_count}})
end)
end
end
def all(pool, %CountQuery{} = query, opts) do
coll = query.coll
_projection = query.projection
opts = query.opts ++ opts
filter = query.filter
query = "*:*" #query.query
opts = [filter: filter, rows: 0, start: 0] ++ opts
case Riak.search(pool, coll, query, opts) do
{:ok, {_, total_count}} ->
[%{"value" => total_count}]
end
end
defp crdt_to_map(values) do
Enum.reduce(values, %{}, fn
{{k, :flag}, v}, m -> Dict.put(m, k, v)
{{k, :register}, v}, m -> Dict.put(m, k, v)
{{k, :counter}, v}, m -> Dict.put(m, k, {:counter, v})
{{k, :set}, v}, m -> Dict.put(m, k, {:set, v})
{{k, :map}, v}, m -> Dict.put(m, k, crdt_to_map((v)))
end)
end
@ignore_fields ~w(_yz_id _yz_rb _yz_rt score)
defp solr_to_map({_, fields}) do
Enum.reduce(fields, %{}, fn
{field, _}, map when field in @ignore_fields -> map
{"_yz_rk", value}, map -> Dict.put(map, :id, value)
{key, value}, map -> map_solr_field(key, value, map)
end)
end
defp map_solr_field(key, value, map) do
case String.split(key, ".", parts: 2) do
[k] -> map_solr_field_value(k, value, "", map)
[k | [rest]] -> map_solr_field_value(k, value, rest, map)
end
end
defp map_solr_field_value(key, value, key_rest, map) do
case Regex.scan(~r/(.*)_(map|register|counter|flag|set)/r, key, capture: :all_but_first) do
[[field, "register"]] -> Dict.put(map, field, value)
[[field, "flag"]] -> Dict.put(map, field, value == "true")
[[field, "counter"]] -> Dict.put(map, field, {:counter, String.to_integer(value)})
[[field, "map"]] -> Dict.update(Dict.put_new(map, field, %{}), field, %{}, &map_solr_field(key_rest, value, &1))
_ -> map
end
end
@riak_types [:register, :flag, :map, :set]
defp erase_key_unless_type(map, key, exclude \\ [])
defp erase_key_unless_type(map, key, exclude) do
Enum.reduce(@riak_types -- exclude, map, fn type, acc ->
if :riakc_map.is_key({key, type}, acc) do
:riakc_map.erase({key, type}, acc)
else
acc
end
end)
end
defp apply_change(map, {key, empty}) when empty in [nil, []] do
erase_key_unless_type(map, key)
end
defp apply_change(map, {key, false}) do
map = erase_key_unless_type(map, key, [:flag])
:riakc_map.update({key, :flag}, &:riakc_flag.disable(&1), map)
end
defp apply_change(map, {k, true}) do
map = erase_key_unless_type(map, k, [:flag])
:riakc_map.update({k, :flag}, &:riakc_flag.enable(&1), map)
end
defp apply_change(map, {k, value}) when is_binary(value) do
map = erase_key_unless_type(map, k, [:register])
:riakc_map.update({k, :register}, &:riakc_register.set(value, &1), map)
end
defp apply_change(map, {k, {:counter, _value, increment}}) do
map = erase_key_unless_type(map, k, [:counter])
:riakc_map.update({k, :counter}, &:riakc_counter.increment(increment, &1), map)
end
defp apply_change(map, {k, {:set, value}}) when is_list(value) do
map = erase_key_unless_type(map, k, [:set])
:riakc_map.update({k, :set}, fn set ->
dirty_value = :riakc_set.value(set)
to_add = value -- dirty_value
to_rem = dirty_value -- value
set = Enum.reduce(to_add, set, &:riakc_set.add_element(&1, &2))
set = Enum.reduce(to_rem, set, &:riakc_set.del_element(&1, &2))
set
end, map)
end
defp apply_change(crdt_map, {key, value_map}) when is_map(value_map) do
crdt_map = erase_key_unless_type(crdt_map, key, [:map])
:riakc_map.update({key, :map}, fn inner_crdt_map ->
value_map_keys = Map.keys(value_map) |> Enum.map(&to_string/1)
inner_crdt_map =
Enum.reduce(:riakc_map.fetch_keys(inner_crdt_map), inner_crdt_map, fn {k, dt}, acc1 ->
if(k in value_map_keys, do: acc1, else: :riakc_map.erase({k, dt}, acc1))
end)
Enum.reduce(value_map, inner_crdt_map, fn {k, v}, acc ->
apply_change(acc, {to_string(k), v})
end)
end, crdt_map)
end
defp apply_change(crdt_map, {key, [%{id: _id} | _] = value_list}) when is_list(value_list) do
crdt_map = erase_key_unless_type(crdt_map, key, [:map])
crdt_map =
:riakc_map.update({key, :map}, fn inner_crdt_map ->
ids = Enum.map(value_list, &Map.fetch!(&1, :id)) |> Enum.map(&to_string/1)
Enum.reduce(:riakc_map.fetch_keys(inner_crdt_map), inner_crdt_map, fn {k, dt}, acc ->
if(k in ids, do: acc, else: :riakc_map.erase({k, dt}, acc))
end)
end, crdt_map)
Enum.reduce(value_list, crdt_map, fn %{id: id} = item, acc ->
item = Map.delete(item, :id)
:riakc_map.update({key, :map}, &apply_change(&1, {to_string(id), item}), acc)
end)
end
defp apply_change(crdt_map, {key, value_list}) when is_list(value_list) do
crdt_map = erase_key_unless_type(crdt_map, key, [:map])
Enum.reduce(value_list, crdt_map, fn item, acc ->
:riakc_map.update({key, :map}, &apply_change(&1, {to_string(:erlang.phash2(item)), item}), acc)
end)
end
defp apply_changes(crdt_map, updates) do
Enum.reduce(updates, crdt_map || :riakc_map.new, fn {key, new_value}, acc ->
apply_change(acc, {to_string(key), new_value})
end)
end
def update(pool, %WriteQuery{} = query, opts) do
coll = query.coll
command = query.command
context = query.context || %{}
_ = query.opts ++ opts
query = query.query
map = apply_changes(Map.get(context, :map), Dict.fetch!(command, :set))
op = :riakc_map.to_op(map)
case Riak.update_type(pool, coll, query[:id], op) do
:ok -> {:ok, []}
_ -> {:error, :stale}
end
end
def insert(pool, %WriteQuery{} = query, opts) do
coll = query.coll
command = query.command
context = query.context || %{}
_ = query.opts ++ opts
id = command[:id] || :undefined
map = apply_changes(Map.get(context, :map), command)
case Riak.update_type(pool, coll, id, :riakc_map.to_op(map)) do
:ok -> {:ok, 1}
{:ok, id} -> {:ok, %{inserted_id: id}}
end
end
def delete(pool, %WriteQuery{} = query, opts) do
coll = query.coll
_ = query.context
_ = query.opts ++ opts
query = query.query
id = Dict.fetch!(query, :id)
case Riak.delete(pool, coll, id) do
:ok -> {:ok, []}
_ -> {:error, :stale}
end
end
end
|
lib/riak_ecto/connection.ex
| 0.547343
| 0.451871
|
connection.ex
|
starcoder
|
defmodule ExJack.Server do
@moduledoc """
A GenServer module that interfaces with JACK audio API I/O.
There are two methods for outputting sound to JACK:
1. Calling `send_frames/1`
2. Setting an output function using `set_output_func/1`, which JACK
calls every time it wants frames.
At the moment, there is only one method of retrieving input data, which is to set
an input callback using `set_input_func/1`.
Latency will obviously vary and if you have a busy machine, expect xruns. xruns,
which is shorthand for overruns and underruns, occur when you either send too
many frames or not enough frames. If the CPU is busy doing some other work
and neglects to send frames to the soundcard, the soundcard buffer runs out of frames
to play. An underrun will then occur. You could send too many frames to the
soundcard. If you send more than its buffers can hold, the data will be lost. This
is an overrun.
"""
use GenServer
defstruct handler: nil,
shutdown_handler: nil,
current_frame: 0,
buffer_size: 0,
sample_rate: 44100,
output_func: &ExJack.Server.noop/1,
input_func: &ExJack.Server.noop/1
@type t :: %__MODULE__{
handler: any(),
shutdown_handler: any(),
current_frame: pos_integer(),
buffer_size: buffer_size_t,
sample_rate: sample_rate_t,
output_func: output_func_t,
input_func: input_func_t
}
@type sample_rate_t :: pos_integer()
@type buffer_size_t :: pos_integer()
@type frames_t :: list(float())
@type output_func_t :: (Range.t() -> frames_t)
@type input_func_t :: (frames_t -> any())
@type options_t :: %{
name: String.t(),
use_callback: boolean(),
auto_connect: boolean()
}
@doc """
Start the server.
JACK NIF will start a thread that runs the JACK client.
It will auto-connect to two standard channels which you can modify
through JACK.
## Parameters
- name: Used to name the JACK node (suffixed with `:in` and `:out`)
e.g. If you pass `%{name: "HelloWorld"}`, you can interface with this
connection within JACK through `HelloWorld:in` and `HelloWorld:out`.
"""
@spec start_link(options_t) :: GenServer.server()
def start_link(%{name: _name} = opts) do
GenServer.start_link(__MODULE__, opts, name: __MODULE__)
end
@doc """
Set the callback function that JACK will call when it requests more frames.
"""
@spec set_output_func(output_func_t) :: :ok
def set_output_func(output_func) do
GenServer.cast(__MODULE__, {:set_output_func, output_func})
end
@doc """
Returns the size of JACK's buffer
"""
@spec get_buffer_size() :: buffer_size_t()
def get_buffer_size() do
GenServer.call(__MODULE__, :buffer_size)
end
@doc """
Returns the sample rate in Hz that JACK is operating with
"""
@spec get_sample_rate() :: sample_rate_t()
def get_sample_rate() do
GenServer.call(__MODULE__, :sample_rate)
end
@doc """
Set the callback function that will receive input data from JACK each cycle.
The output of the function is currently not used for anything.
"""
@spec set_input_func(input_func_t) :: :ok
def set_input_func(input_func) do
GenServer.cast(__MODULE__, {:set_input_func, input_func})
end
@doc """
Sends a list of frames for JACK to play during its next cycle.
"""
@spec send_frames(frames_t) :: :ok
def send_frames(frames) do
unless Enum.empty?(frames) do
GenServer.cast(__MODULE__, {:send_frames, frames})
end
end
@impl true
def init(opts) do
{:ok, handler, shutdown_handler, %{buffer_size: buffer_size, sample_rate: sample_rate}} =
ExJack.Native.start(opts)
{:ok,
%__MODULE__{
handler: handler,
shutdown_handler: shutdown_handler,
current_frame: 0,
buffer_size: buffer_size,
sample_rate: sample_rate
}}
end
@impl true
@spec handle_call(:buffer_size, any(), t()) :: {:reply, buffer_size_t(), t()}
def handle_call(:buffer_size, _from, %{buffer_size: buffer_size} = state) do
{:reply, buffer_size, state}
end
@impl true
@spec handle_call(:sample_rate, any(), t()) :: {:reply, sample_rate_t(), t()}
def handle_call(:sample_rate, _from, %{sample_rate: sample_rate} = state) do
{:reply, sample_rate, state}
end
@impl true
@spec handle_cast({:set_output_func, output_func_t}, t()) :: {:noreply, t()}
def handle_cast({:set_output_func, output_func}, state) do
{:noreply, %{state | output_func: output_func}}
end
@impl true
@spec handle_cast({:set_input_func, input_func_t}, t()) :: {:noreply, t()}
def handle_cast({:set_input_func, input_func}, state) do
{:noreply, %{state | input_func: input_func}}
end
@impl true
@spec handle_cast({:send_frames, frames_t}, t()) :: {:noreply, t()}
def handle_cast({:send_frames, frames}, %{handler: handler} = state) do
ExJack.Native.send_frames(handler, frames)
{:noreply, state}
end
@impl true
@spec handle_info({:in_frames, frames_t}, t()) :: {:noreply, t()}
def handle_info({:in_frames, frames}, %{input_func: input_func} = state) do
input_func.(frames)
{:noreply, state}
end
@impl true
@spec handle_cast({:request, pos_integer()}, t()) :: {:noreply, __MODULE__.t()}
def handle_info(
{:request, requested_frames},
%{current_frame: current_frame, output_func: output_func} = state
) do
end_frames = current_frame + requested_frames - 1
send_frames(output_func.(current_frame..end_frames))
{:noreply, %{state | current_frame: end_frames + 1}}
end
@impl true
def terminate(_reason, %{shutdown_handler: shutdown_handler}) do
ExJack.Native.stop(shutdown_handler)
:ok
end
@doc false
def noop(_) do
[]
end
end
|
lib/ex_jack/server.ex
| 0.838977
| 0.498779
|
server.ex
|
starcoder
|
defmodule Absinthe.Relay.Node.IDTranslator do
@moduledoc """
An ID translator handles encoding and decoding a global ID
used in a Relay node.
This module provides the behaviour for implementing an ID Translator.
An example use case of this module would be a translator that encrypts the
global ID.
To use an ID Translator in your schema there are two methods.
#### Inline Config
```
defmodule MyApp.Schema do
use Absinthe.Schema
use Absinthe.Relay.Schema, [
flavor: :modern,
global_id_translator: MyApp.Absinthe.IDTranslator
]
# ...
end
```
#### Mix Config
```
config Absinthe.Relay, MyApp.Schema,
global_id_translator: MyApp.Absinthe.IDTranslator
```
## Example ID Translator
A basic example that encodes the global ID by joining the `type_name` and
`source_id` with `":"`.
```
defmodule MyApp.Absinthe.IDTranslator do
@behaviour Absinthe.Relay.Node.IDTranslator
def to_global_id(type_name, source_id, _schema) do
{:ok, "\#{type_name}:\#{source_id}"}
end
def from_global_id(global_id, _schema) do
case String.split(global_id, ":", parts: 2) do
[type_name, source_id] ->
{:ok, type_name, source_id}
_ ->
{:error, "Could not extract value from ID `\#{inspect global_id}`"}
end
end
end
```
"""
@doc """
Converts a node's type name and ID to a globally unique ID.
Returns `{:ok, global_id}` on success.
Returns `{:error, binary}` on failure.
"""
@callback to_global_id(
type_name :: binary,
source_id :: binary | integer,
schema :: Absinthe.Schema.t()
) :: {:ok, global_id :: Absinthe.Relay.Node.global_id()} | {:error, binary}
@doc """
Converts a globally unique ID to a node's type name and ID.
Returns `{:ok, type_name, source_id}` on success.
Returns `{:error, binary}` on failure.
"""
@callback from_global_id(
global_id :: Absinthe.Relay.Node.global_id(),
schema :: Absinthe.Schema.t() | nil
) :: {:ok, type_name :: binary, source_id :: binary} | {:error, binary}
end
|
lib/absinthe/relay/node/id_translator.ex
| 0.877962
| 0.666968
|
id_translator.ex
|
starcoder
|
defmodule Bamboo.AliyunAdapter do
@moduledoc """
Bamboo adapter to Sends emails through [Aliyun’s API](https://www.aliyun.com/product/directmail?spm=5176.8142029.388261.228.dKDNYN).
## Example config
```elixir
# In config/config.exs, or config.prod.exs, etc.
config :my_app, MyApp.Mailer,
adapter: Bamboo.AliyunAdapter,
uri: "https://dm.aliyuncs.com",
version: "2015-11-23",
region_id: "cn-hangzhou",
access_key_id: "sample",
access_key_secret: "secret",
address_type: 1,
reply_to_address: true,
click_trace: 1
```
"""
@behaviour Bamboo.Adapter
@aliyun_dm_fields ~w(Action AccountName ReplyToAddress AddressType ToAddress FromAlias Subject HtmlBody TextBody ClickTrace)a
@service_name "Aliyun"
alias Bamboo.Email
import Bamboo.ApiError
@impl Bamboo.Adapter
def deliver(email, config) do
headers = [
{"Content-Type", "application/x-www-form-urlencoded"}
]
body =
email
|> to_aliyun_body(config)
|> append_shared_info(config)
|> sign(config)
case :hackney.post(config.uri, headers, URI.encode_query(body), [:with_body]) do
{:ok, status, _headers, response} when status > 299 ->
raise_api_error(@service_name, response, body)
{:ok, status, headers, response} ->
%{status_code: status, headers: headers, body: response}
{:error, reason} ->
raise_api_error(inspect(reason))
end
end
@impl Bamboo.Adapter
def handle_config(config) do
for setting <- [
:uri,
:version,
:region_id,
:access_key_id,
:access_key_secret,
:address_type,
:reply_to_address,
:click_trace
] do
if config[setting] in [nil, ""] do
raise_missing_setting_error(config, setting)
end
end
config
end
defp raise_missing_setting_error(config, setting) do
raise ArgumentError, """
There was no #{setting} set for the Aliyun adapter.
Here are the config options that were passed in:
#{inspect(config)}
"""
end
defp append_shared_info(body, config) do
body
|> Keyword.put(:Action, "SingleSendMail")
|> Keyword.put(:Format, "JSON")
|> Keyword.put(:Version, config.version)
|> Keyword.put(:AccessKeyId, config.access_key_id)
|> Keyword.put(:SignatureMethod, "HMAC-SHA1")
|> Keyword.put(:SignatureVersion, "1.0")
|> Keyword.put(:SignatureNonce, gen_nonce())
|> Keyword.put(:Timestamp, DateTime.utc_now() |> DateTime.to_iso8601())
|> Keyword.put(:RegionId, config.region_id)
end
# Sign logic from the official PHP SDK:
# aliyun-php-sdk-core/RpcAcsRequest.php
defp sign(req, config) do
signature =
req
|> Enum.sort()
|> Enum.map(fn {key, item} -> "#{percent_encode(key)}=#{percent_encode(item)}" end)
|> Enum.join("&")
signature = "POST" <> "&%2F&" <> percent_encode(signature)
signature =
:sha
|> :crypto.hmac(config.access_key_secret <> "&", signature)
|> Base.encode64()
req
|> Keyword.put_new(:Signature, signature)
end
defp percent_encode(str) when is_binary(str) do
str
|> URI.encode_www_form()
|> String.replace("+", "%20")
|> String.replace("*", "%2A")
|> String.replace("%7E", "~")
end
defp percent_encode(value) do
value
|> to_string()
|> percent_encode()
end
defp gen_nonce do
24
|> :crypto.strong_rand_bytes()
|> :base64.encode()
end
defp to_aliyun_body(%Email{} = email, config) do
email
|> Map.from_struct()
|> put_subject(email)
|> put_from(email)
|> put_to(email)
|> put_html_body(email)
|> put_text_body(email)
|> Map.put(:AddressType, config.address_type)
|> Map.put(:ReplyToAddress, config.reply_to_address)
|> Map.put(:ClickTrace, config.click_trace)
|> filter_non_aliyun_dm_fields()
end
defp put_subject(body, %Email{subject: subject}) do
body
|> Map.put(:Subject, subject)
end
defp put_from(body, %Email{from: from}) do
case from do
{nil, email} ->
body
|> Map.put(:AccountName, email)
{name, email} ->
body
|> Map.put(:FromAlias, name)
|> Map.put(:AccountName, email)
end
end
defp put_to(body, %Email{to: to}) do
email = do_transform_email(to)
body
|> Map.put(:ToAddress, email)
end
defp do_transform_email(list) when is_list(list) do
list
|> Enum.map(&do_transform_email/1)
|> Enum.join(",")
end
defp do_transform_email({_name, email}) do
# name is not supported
email
end
defp put_html_body(body, %Email{html_body: html_body}), do: Map.put(body, :HtmlBody, html_body)
defp put_text_body(body, %Email{text_body: text_body}), do: Map.put(body, :TextBody, text_body)
defp filter_non_aliyun_dm_fields(map) do
Enum.filter(map, fn {key, value} ->
key in @aliyun_dm_fields && !(value in [nil, "", []])
end)
end
end
|
lib/bamboo/adapters/aliyun_adapter.ex
| 0.710025
| 0.608071
|
aliyun_adapter.ex
|
starcoder
|
defmodule AWS.KinesisVideoArchivedMedia do
alias AWS.Client
alias AWS.Request
def metadata do
%AWS.ServiceMetadata{
abbreviation: nil,
api_version: "2017-09-30",
content_type: "application/x-amz-json-1.1",
credential_scope: nil,
endpoint_prefix: "kinesisvideo",
global?: false,
protocol: "rest-json",
service_id: "Kinesis Video Archived Media",
signature_version: "v4",
signing_name: "kinesisvideo",
target_prefix: nil
}
end
@doc """
Downloads an MP4 file (clip) containing the archived, on-demand media from the
specified video stream over the specified time range.
Both the StreamName and the StreamARN parameters are optional, but you must
specify either the StreamName or the StreamARN when invoking this API operation.
As a prerequisite to using GetCLip API, you must obtain an endpoint using
`GetDataEndpoint`, specifying GET_CLIP for` the `APIName` parameter. `
```
An Amazon Kinesis video stream has the following requirements for providing data
through MP4:
The media must contain h.264 or h.265 encoded video and,
optionally, AAC or G.711 encoded audio. Specifically, the codec ID of track 1
should be `V_MPEG/ISO/AVC` (for h.264) or V_MPEGH/ISO/HEVC (for H.265).
Optionally, the codec ID of track 2 should be `A_AAC` (for AAC) or A_MS/ACM (for
G.711).
Data retention must be greater than 0.
The video track of each fragment must contain codec private data in
the Advanced Video Coding (AVC) for H.264 format and HEVC for H.265 format. For
more information, see [MPEG-4 specification ISO/IEC 14496-15](https://www.iso.org/standard/55980.html). For information about
adapting stream data to a given format, see [NAL Adaptation Flags](http://docs.aws.amazon.com/kinesisvideostreams/latest/dg/producer-reference-nal.html).
The audio track (if present) of each fragment must contain codec
private data in the AAC format ([AAC specification ISO/IEC 13818-7](https://www.iso.org/standard/43345.html)) or the [MS Wave format](http://www-mmsp.ece.mcgill.ca/Documents/AudioFormats/WAVE/WAVE.html).
You can monitor the amount of outgoing data by monitoring the
`GetClip.OutgoingBytes` Amazon CloudWatch metric. For information about using
CloudWatch to monitor Kinesis Video Streams, see [Monitoring Kinesis Video Streams](http://docs.aws.amazon.com/kinesisvideostreams/latest/dg/monitoring.html).
For pricing information, see [Amazon Kinesis Video Streams Pricing](https://aws.amazon.com/kinesis/video-streams/pricing/) and [AWS Pricing](https://aws.amazon.com/pricing/). Charges for outgoing AWS data apply.
```
"""
def get_clip(%Client{} = client, input, options \\ []) do
url_path = "/getClip"
headers = []
query_params = []
options =
Keyword.put(
options,
:response_header_parameters,
[{"Content-Type", "ContentType"}]
)
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Retrieves an MPEG Dynamic Adaptive Streaming over HTTP (DASH) URL for the
stream.
You can then open the URL in a media player to view the stream contents.
Both the `StreamName` and the `StreamARN` parameters are optional, but you must
specify either the `StreamName` or the `StreamARN` when invoking this API
operation.
An Amazon Kinesis video stream has the following requirements for providing data
through MPEG-DASH:
* The media must contain h.264 or h.265 encoded video and,
optionally, AAC or G.711 encoded audio. Specifically, the codec ID of track 1
should be `V_MPEG/ISO/AVC` (for h.264) or V_MPEGH/ISO/HEVC (for H.265).
Optionally, the codec ID of track 2 should be `A_AAC` (for AAC) or A_MS/ACM (for
G.711).
* Data retention must be greater than 0.
* The video track of each fragment must contain codec private data
in the Advanced Video Coding (AVC) for H.264 format and HEVC for H.265 format.
For more information, see [MPEG-4 specification ISO/IEC 14496-15](https://www.iso.org/standard/55980.html). For information about
adapting stream data to a given format, see [NAL Adaptation Flags](http://docs.aws.amazon.com/kinesisvideostreams/latest/dg/producer-reference-nal.html).
* The audio track (if present) of each fragment must contain codec
private data in the AAC format ([AAC specification ISO/IEC 13818-7](https://www.iso.org/standard/43345.html)) or the [MS Wave format](http://www-mmsp.ece.mcgill.ca/Documents/AudioFormats/WAVE/WAVE.html).
The following procedure shows how to use MPEG-DASH with Kinesis Video Streams:
1. Get an endpoint using
[GetDataEndpoint](http://docs.aws.amazon.com/kinesisvideostreams/latest/dg/API_GetDataEndpoint.html), specifying `GET_DASH_STREAMING_SESSION_URL` for the `APIName` parameter.
2. Retrieve the MPEG-DASH URL using `GetDASHStreamingSessionURL`.
Kinesis Video Streams creates an MPEG-DASH streaming session to be used for
accessing content in a stream using the MPEG-DASH protocol.
`GetDASHStreamingSessionURL` returns an authenticated URL (that includes an
encrypted session token) for the session's MPEG-DASH *manifest* (the root
resource needed for streaming with MPEG-DASH).
Don't share or store this token where an unauthorized entity can access it. The
token provides access to the content of the stream. Safeguard the token with the
same measures that you use with your AWS credentials.
The media that is made available through the manifest consists only of the
requested stream, time range, and format. No other media data (such as frames
outside the requested window or alternate bitrates) is made available.
3. Provide the URL (containing the encrypted session token) for the
MPEG-DASH manifest to a media player that supports the MPEG-DASH protocol.
Kinesis Video Streams makes the initialization fragment and media fragments
available through the manifest URL. The initialization fragment contains the
codec private data for the stream, and other data needed to set up the video or
audio decoder and renderer. The media fragments contain encoded video frames or
encoded audio samples.
4. The media player receives the authenticated URL and requests
stream metadata and media data normally. When the media player requests data, it
calls the following actions:
* **GetDASHManifest:** Retrieves an MPEG DASH manifest,
which contains the metadata for the media that you want to playback.
* **GetMP4InitFragment:** Retrieves the MP4
initialization fragment. The media player typically loads the initialization
fragment before loading any media fragments. This fragment contains the "`fytp`"
and "`moov`" MP4 atoms, and the child atoms that are needed to initialize the
media player decoder.
The initialization fragment does not correspond to a fragment in a Kinesis video
stream. It contains only the codec private data for the stream and respective
track, which the media player needs to decode the media frames.
* **GetMP4MediaFragment:** Retrieves MP4 media
fragments. These fragments contain the "`moof`" and "`mdat`" MP4 atoms and their
child atoms, containing the encoded fragment's media frames and their
timestamps.
After the first media fragment is made available in a streaming session, any
fragments that don't contain the same codec private data cause an error to be
returned when those different media fragments are loaded. Therefore, the codec
private data should not change between fragments in a session. This also means
that the session fails if the fragments in a stream change from having only
video to having both audio and video.
Data retrieved with this action is billable. See
[Pricing](https://aws.amazon.com/kinesis/video-streams/pricing/) for details.
For restrictions that apply to MPEG-DASH sessions, see [Kinesis Video Streams Limits](http://docs.aws.amazon.com/kinesisvideostreams/latest/dg/limits.html).
You can monitor the amount of data that the media player consumes by monitoring
the `GetMP4MediaFragment.OutgoingBytes` Amazon CloudWatch metric. For
information about using CloudWatch to monitor Kinesis Video Streams, see
[Monitoring Kinesis Video Streams](http://docs.aws.amazon.com/kinesisvideostreams/latest/dg/monitoring.html).
For pricing information, see [Amazon Kinesis Video Streams Pricing](https://aws.amazon.com/kinesis/video-streams/pricing/) and [AWS Pricing](https://aws.amazon.com/pricing/). Charges for both HLS sessions and
outgoing AWS data apply.
For more information about HLS, see [HTTP Live Streaming](https://developer.apple.com/streaming/) on the [Apple Developer site](https://developer.apple.com).
If an error is thrown after invoking a Kinesis Video Streams archived media API,
in addition to the HTTP status code and the response body, it includes the
following pieces of information:
`x-amz-ErrorType` HTTP header – contains a more specific error type
in addition to what the HTTP status code provides.
`x-amz-RequestId` HTTP header – if you want to report an issue to
AWS, the support team can better diagnose the problem if given the Request Id.
Both the HTTP status code and the ErrorType header can be utilized to make
programmatic decisions about whether errors are retry-able and under what
conditions, as well as provide information on what actions the client programmer
might need to take in order to successfully try again.
For more information, see the **Errors** section at the bottom of this topic, as
well as [Common Errors](https://docs.aws.amazon.com/kinesisvideostreams/latest/dg/CommonErrors.html).
"""
def get_dash_streaming_session_url(%Client{} = client, input, options \\ []) do
url_path = "/getDASHStreamingSessionURL"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Retrieves an HTTP Live Streaming (HLS) URL for the stream.
You can then open the URL in a browser or media player to view the stream
contents.
Both the `StreamName` and the `StreamARN` parameters are optional, but you must
specify either the `StreamName` or the `StreamARN` when invoking this API
operation.
An Amazon Kinesis video stream has the following requirements for providing data
through HLS:
* The media must contain h.264 or h.265 encoded video and,
optionally, AAC encoded audio. Specifically, the codec ID of track 1 should be
`V_MPEG/ISO/AVC` (for h.264) or `V_MPEG/ISO/HEVC` (for h.265). Optionally, the
codec ID of track 2 should be `A_AAC`.
* Data retention must be greater than 0.
* The video track of each fragment must contain codec private data
in the Advanced Video Coding (AVC) for H.264 format or HEVC for H.265 format
([MPEG-4 specification ISO/IEC 14496-15](https://www.iso.org/standard/55980.html)). For information about
adapting stream data to a given format, see [NAL Adaptation Flags](http://docs.aws.amazon.com/kinesisvideostreams/latest/dg/producer-reference-nal.html).
* The audio track (if present) of each fragment must contain codec
private data in the AAC format ([AAC specification ISO/IEC 13818-7](https://www.iso.org/standard/43345.html)).
Kinesis Video Streams HLS sessions contain fragments in the fragmented MPEG-4
form (also called fMP4 or CMAF) or the MPEG-2 form (also called TS chunks, which
the HLS specification also supports). For more information about HLS fragment
types, see the [HLS specification](https://tools.ietf.org/html/draft-pantos-http-live-streaming-23).
The following procedure shows how to use HLS with Kinesis Video Streams:
1. Get an endpoint using
[GetDataEndpoint](http://docs.aws.amazon.com/kinesisvideostreams/latest/dg/API_GetDataEndpoint.html), specifying `GET_HLS_STREAMING_SESSION_URL` for the `APIName` parameter.
2. Retrieve the HLS URL using `GetHLSStreamingSessionURL`. Kinesis
Video Streams creates an HLS streaming session to be used for accessing content
in a stream using the HLS protocol. `GetHLSStreamingSessionURL` returns an
authenticated URL (that includes an encrypted session token) for the session's
HLS *master playlist* (the root resource needed for streaming with HLS).
Don't share or store this token where an unauthorized entity could access it.
The token provides access to the content of the stream. Safeguard the token with
the same measures that you would use with your AWS credentials.
The media that is made available through the playlist consists only of the
requested stream, time range, and format. No other media data (such as frames
outside the requested window or alternate bitrates) is made available.
3. Provide the URL (containing the encrypted session token) for the
HLS master playlist to a media player that supports the HLS protocol. Kinesis
Video Streams makes the HLS media playlist, initialization fragment, and media
fragments available through the master playlist URL. The initialization fragment
contains the codec private data for the stream, and other data needed to set up
the video or audio decoder and renderer. The media fragments contain
H.264-encoded video frames or AAC-encoded audio samples.
4. The media player receives the authenticated URL and requests
stream metadata and media data normally. When the media player requests data, it
calls the following actions:
* **GetHLSMasterPlaylist:** Retrieves an HLS master
playlist, which contains a URL for the `GetHLSMediaPlaylist` action for each
track, and additional metadata for the media player, including estimated bitrate
and resolution.
* **GetHLSMediaPlaylist:** Retrieves an HLS media
playlist, which contains a URL to access the MP4 initialization fragment with
the `GetMP4InitFragment` action, and URLs to access the MP4 media fragments with
the `GetMP4MediaFragment` actions. The HLS media playlist also contains metadata
about the stream that the player needs to play it, such as whether the
`PlaybackMode` is `LIVE` or `ON_DEMAND`. The HLS media playlist is typically
static for sessions with a `PlaybackType` of `ON_DEMAND`. The HLS media playlist
is continually updated with new fragments for sessions with a `PlaybackType` of
`LIVE`. There is a distinct HLS media playlist for the video track and the audio
track (if applicable) that contains MP4 media URLs for the specific track.
* **GetMP4InitFragment:** Retrieves the MP4
initialization fragment. The media player typically loads the initialization
fragment before loading any media fragments. This fragment contains the "`fytp`"
and "`moov`" MP4 atoms, and the child atoms that are needed to initialize the
media player decoder.
The initialization fragment does not correspond to a fragment in a Kinesis video
stream. It contains only the codec private data for the stream and respective
track, which the media player needs to decode the media frames.
* **GetMP4MediaFragment:** Retrieves MP4 media
fragments. These fragments contain the "`moof`" and "`mdat`" MP4 atoms and their
child atoms, containing the encoded fragment's media frames and their
timestamps.
After the first media fragment is made available in a streaming session, any
fragments that don't contain the same codec private data cause an error to be
returned when those different media fragments are loaded. Therefore, the codec
private data should not change between fragments in a session. This also means
that the session fails if the fragments in a stream change from having only
video to having both audio and video.
Data retrieved with this action is billable. See
[Pricing](https://aws.amazon.com/kinesis/video-streams/pricing/) for details.
* **GetTSFragment:** Retrieves MPEG TS fragments
containing both initialization and media data for all tracks in the stream.
If the `ContainerFormat` is `MPEG_TS`, this API is used instead of
`GetMP4InitFragment` and `GetMP4MediaFragment` to retrieve stream media.
Data retrieved with this action is billable. For more information, see [Kinesis Video Streams pricing](https://aws.amazon.com/kinesis/video-streams/pricing/).
A streaming session URL must not be shared between players. The service might
throttle a session if multiple media players are sharing it. For connection
limits, see [Kinesis Video Streams Limits](http://docs.aws.amazon.com/kinesisvideostreams/latest/dg/limits.html).
You can monitor the amount of data that the media player consumes by monitoring
the `GetMP4MediaFragment.OutgoingBytes` Amazon CloudWatch metric. For
information about using CloudWatch to monitor Kinesis Video Streams, see
[Monitoring Kinesis Video Streams](http://docs.aws.amazon.com/kinesisvideostreams/latest/dg/monitoring.html).
For pricing information, see [Amazon Kinesis Video Streams Pricing](https://aws.amazon.com/kinesis/video-streams/pricing/) and [AWS Pricing](https://aws.amazon.com/pricing/). Charges for both HLS sessions and
outgoing AWS data apply.
For more information about HLS, see [HTTP Live Streaming](https://developer.apple.com/streaming/) on the [Apple Developer site](https://developer.apple.com).
If an error is thrown after invoking a Kinesis Video Streams archived media API,
in addition to the HTTP status code and the response body, it includes the
following pieces of information:
`x-amz-ErrorType` HTTP header – contains a more specific error type
in addition to what the HTTP status code provides.
`x-amz-RequestId` HTTP header – if you want to report an issue to
AWS, the support team can better diagnose the problem if given the Request Id.
Both the HTTP status code and the ErrorType header can be utilized to make
programmatic decisions about whether errors are retry-able and under what
conditions, as well as provide information on what actions the client programmer
might need to take in order to successfully try again.
For more information, see the **Errors** section at the bottom of this topic, as
well as [Common Errors](https://docs.aws.amazon.com/kinesisvideostreams/latest/dg/CommonErrors.html).
"""
def get_hls_streaming_session_url(%Client{} = client, input, options \\ []) do
url_path = "/getHLSStreamingSessionURL"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Gets media for a list of fragments (specified by fragment number) from the
archived data in an Amazon Kinesis video stream.
You must first call the `GetDataEndpoint` API to get an endpoint. Then send the
`GetMediaForFragmentList` requests to this endpoint using the [--endpoint-url parameter](https://docs.aws.amazon.com/cli/latest/reference/).
For limits, see [Kinesis Video Streams Limits](http://docs.aws.amazon.com/kinesisvideostreams/latest/dg/limits.html).
If an error is thrown after invoking a Kinesis Video Streams archived media API,
in addition to the HTTP status code and the response body, it includes the
following pieces of information:
`x-amz-ErrorType` HTTP header – contains a more specific error type
in addition to what the HTTP status code provides.
`x-amz-RequestId` HTTP header – if you want to report an issue to
AWS, the support team can better diagnose the problem if given the Request Id.
Both the HTTP status code and the ErrorType header can be utilized to make
programmatic decisions about whether errors are retry-able and under what
conditions, as well as provide information on what actions the client programmer
might need to take in order to successfully try again.
For more information, see the **Errors** section at the bottom of this topic, as
well as [Common Errors](https://docs.aws.amazon.com/kinesisvideostreams/latest/dg/CommonErrors.html).
"""
def get_media_for_fragment_list(%Client{} = client, input, options \\ []) do
url_path = "/getMediaForFragmentList"
headers = []
query_params = []
options =
Keyword.put(
options,
:response_header_parameters,
[{"Content-Type", "ContentType"}]
)
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Returns a list of `Fragment` objects from the specified stream and timestamp
range within the archived data.
Listing fragments is eventually consistent. This means that even if the producer
receives an acknowledgment that a fragment is persisted, the result might not be
returned immediately from a request to `ListFragments`. However, results are
typically available in less than one second.
You must first call the `GetDataEndpoint` API to get an endpoint. Then send the
`ListFragments` requests to this endpoint using the [--endpoint-url parameter](https://docs.aws.amazon.com/cli/latest/reference/).
If an error is thrown after invoking a Kinesis Video Streams archived media API,
in addition to the HTTP status code and the response body, it includes the
following pieces of information:
`x-amz-ErrorType` HTTP header – contains a more specific error type
in addition to what the HTTP status code provides.
`x-amz-RequestId` HTTP header – if you want to report an issue to
AWS, the support team can better diagnose the problem if given the Request Id.
Both the HTTP status code and the ErrorType header can be utilized to make
programmatic decisions about whether errors are retry-able and under what
conditions, as well as provide information on what actions the client programmer
might need to take in order to successfully try again.
For more information, see the **Errors** section at the bottom of this topic, as
well as [Common Errors](https://docs.aws.amazon.com/kinesisvideostreams/latest/dg/CommonErrors.html).
"""
def list_fragments(%Client{} = client, input, options \\ []) do
url_path = "/listFragments"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
end
|
lib/aws/generated/kinesis_video_archived_media.ex
| 0.906818
| 0.751694
|
kinesis_video_archived_media.ex
|
starcoder
|
defmodule OT.Text.Application do
@moduledoc """
The application of a text operation to a piece of text.
"""
alias OT.Text, as: Text
alias Text.Operation
@typedoc """
The result of an `apply/2` function call, representing either success or error
in application of an operation
"""
@type apply_result :: {:ok, OT.Text.datum}
| {:error, :delete_mismatch | :retain_too_long}
@doc """
Apply an operation to a piece of text.
Given a piece of text and an operation, iterate over each component in the
operation and apply it to the given text. If the operation is valid, the
function will return `{:ok, new_state}` where `new_state` is the text with
the operation applied to it. If the operation is invalid, an
`{:error, atom}` tuple will be returned.
## Examples
iex> OT.Text.Application.apply([1,2,3], [3, %{i: [100, 101, 102, 103]}])
{:ok, [1, 2, 3, 100, 101, 102, 103]}
iex> OT.Text.Application.apply([1,2,3], [-4])
{:error, :delete_too_long}
## Errors
- `:delete_mismatch` A delete component did not match the text it would have
deleted in the text
- `:retain_too_long` A retain component skipped past the end of the text
"""
@spec apply(Text.datum, Operation.t) :: apply_result
def apply(text, op), do: do_apply(text, op)
@doc """
Same as `apply/2`, but raises if the application fails.
"""
@spec apply!(Text.datum, Operation.t) :: Text.datum | no_return
def apply!(text, op) do
with {:ok, result} <- __MODULE__.apply(text, op) do
result
else
{:error, error} -> raise to_string(error)
end
end
@spec do_apply(Text.datum, Operation.t, Text.datum) :: apply_result
defp do_apply(text, op, result \\ [])
defp do_apply(text, [], result) do
{:ok, result ++ text}
end
defp do_apply(text, [%{i: ins} | op], result) do
text
|> do_apply(op, result ++ ins)
end
defp do_apply(text, [ret | op], result) when is_integer(ret) and 0 <= ret do
if ret <= length(text) do
retained = Enum.slice(text, 0..ret-1)
text = Enum.slice(text, ret..-1)
text
|> do_apply(op, result ++ retained)
else
{:error, :retain_too_long}
end
end
defp do_apply(text, [del | op], result) when is_integer(del) and del < 0 do
if abs(del) <= length(text) do
text = Enum.slice(text, -del..-1)
text
|> do_apply(op, result)
else
{:error, :delete_too_long}
end
end
end
|
lib/ot/text/application.ex
| 0.88106
| 0.598077
|
application.ex
|
starcoder
|
use Croma
defmodule Antikythera.TermUtil do
@moduledoc """
Utils for calculating the actual size of terms.
These utils traverse terms and accumulate the size of terms including the actual size of binary.
"""
@doc """
Returns the actual size of `term` in bytes.
"""
defun size(term :: term) :: non_neg_integer do
:erts_debug.flat_size(term) * :erlang.system_info(:wordsize) + total_binary_size(term)
end
defunp total_binary_size(term :: term) :: non_neg_integer do
b when is_bitstring(b) -> byte_size(b)
t when is_tuple(t) -> total_binary_size_in_list(Tuple.to_list(t))
l when is_list(l) -> total_binary_size_in_list(l)
m when is_map(m) -> total_binary_size_in_list(Map.to_list(m)) # including non-enumerable structs
_ -> 0
end
defunp total_binary_size_in_list(list :: v[[term]]) :: non_neg_integer do
Enum.reduce(list, 0, fn(e, acc) -> total_binary_size(e) + acc end)
end
@doc """
Returns whether actual size of `term` exceeds `limit` bytes.
This is more efficient than deciding by using `size/1` because this function returns immediately after exceeding `limit`, not traverses the entire term.
"""
defun size_smaller_or_equal?(term :: term, limit :: v[non_neg_integer]) :: boolean do
case limit - :erts_debug.flat_size(term) * :erlang.system_info(:wordsize) do
new_limit when new_limit >= 0 -> limit_minus_total_binary_size(term, new_limit) >= 0
_ -> false
end
end
defunp limit_minus_total_binary_size(term :: term, limit :: v[non_neg_integer]) :: integer do
case term do
b when is_bitstring(b) -> limit - byte_size(b)
t when is_tuple(t) -> reduce_while_positive(Tuple.to_list(t), limit)
l when is_list(l) -> reduce_while_positive(l, limit)
m when is_map(m) -> reduce_while_positive(Map.to_list(m), limit) # including non-enumerable structs
_ -> limit
end
end
defunp reduce_while_positive(list :: v[[term]], limit :: v[non_neg_integer]) :: integer do
Enum.reduce_while(list, limit, fn(e, l1) ->
l2 = limit_minus_total_binary_size(e, l1)
if l2 < 0, do: {:halt, l2}, else: {:cont, l2}
end)
end
end
|
lib/util/term_util.ex
| 0.809765
| 0.4231
|
term_util.ex
|
starcoder
|
defmodule Spreedly do
@moduledoc """
An Elixir client implementation of the Spreedly API.
For more info visit the [Spreedly API docs](https://docs.spreedly.com/reference/api/v1/)
for a detailed listing of available API methods.
## Usage
API interactions happen with a `Spreedly.Environment`.
iex> env = Spreedly.Environment.new(environment_key, access_secret)
Once you have an environment, you can use it to interact with the API.
### Run a purchase using a credit card
You can pattern match on the response.
iex> case Spreedly.purchase(env, "R8AKGmYwkZrrj2BpWcPge", "RjTFFZQp4MrH2HJNfPwK", 2344) do
{:ok, %{succeeded: true}} ->
IO.puts "Success!"
{:ok, %{succeeded: false, message: msg}} ->
IO.puts "Declined!"
{:error, reason} ->
IO.inspect reason
end
### Show a Transaction
iex> Spreedly.show_transaction(env, "7<PASSWORD>")
{:ok,
%{created_at: "2016-01-10T16:36:14Z", currency_code: nil, description: nil,
...
state: "gateway_processing_failed", succeeded: false,
token: "7f6837d1d22e049f8a47a8cc1fa9", transaction_type: "Verification",
updated_at: "2016-01-10T16:36:14Z"}}
iex> Spreedly.find_transaction(env, "NonExistentToken")
{:error, "Unable to find the transaction NonExistentToken."}
"""
import Spreedly.{Base, Path, RequestBody}
alias Spreedly.Environment
@spec add_gateway(Environment.t(), String.t(), map()) :: {:ok, any} | {:error, any}
def add_gateway(env, gateway_type, gateway_params \\ %{}) do
post_request(env, add_gateway_path(), add_gateway_body(gateway_type, gateway_params))
end
@spec add_receiver(Environment.t(), String.t(), Keyword.t()) :: {:ok, any} | {:error, any}
def add_receiver(env, receiver_type, options \\ []) do
post_request(env, add_receiver_path(), add_receiver_body(receiver_type, options))
end
@spec add_credit_card(Environment.t(), Keyword.t()) :: {:ok, any} | {:error, any}
def add_credit_card(env, options) do
post_request(env, add_payment_method_path(), add_credit_card_body(options))
end
@spec retain_payment_method(Environment.t(), String.t()) :: {:ok, any} | {:error, any}
def retain_payment_method(env, token) do
put_request(env, retain_payment_method_path(token))
end
@spec redact_gateway(Environment.t(), String.t()) :: {:ok, any} | {:error, any}
def redact_gateway(env, token) do
put_request(env, redact_gateway_method_path(token))
end
@spec redact_payment_method(Environment.t(), String.t()) :: {:ok, any} | {:error, any}
def redact_payment_method(env, token) do
put_request(env, redact_payment_method_path(token))
end
@spec store_payment_method(Environment.t(), String.t(), String.t()) :: {:ok, any} | {:error, any}
def store_payment_method(env, gateway_token, payment_method_token) do
post_request(env, store_payment_method_path(gateway_token), store_payment_method_body(payment_method_token))
end
@doc """
Make a purchase for the provided gateway token and payment method token
with optional request body data specified as a keyword list.
Amount should be provided as a positive integer in cents.
## Examples
purchase(env, "gateway_token", "payment_method_token", 100)
purchase(env, "gateway_token", "payment_method_token", 100, "USD", [order_id: "44", description: "My purchase"])
"""
@spec purchase(Environment.t(), String.t(), String.t(), pos_integer, String.t(), Keyword.t()) ::
{:ok, any} | {:error, any}
def purchase(env, gateway_token, payment_method_token, amount, currency_code \\ "USD", options \\ []) do
post_request(
env,
purchase_path(gateway_token),
auth_or_purchase_body(payment_method_token, amount, currency_code, options)
)
end
@doc """
Authorize a payment method to be charged a specific amount for the provided
gateway token with optional request body data specified as a keyword list.
Amount should be provided as a positive integer in cents.
## Examples
authorization(env, "gateway_token", "payment_method_token", 100)
authorization(env, "gateway_token", "payment_method_token", 100, "USD", [order_id: "44", description: "My auth"])
"""
@spec authorization(Environment.t(), String.t(), String.t(), pos_integer, String.t(), Keyword.t()) ::
{:ok, any} | {:error, any}
def authorization(env, gateway_token, payment_method_token, amount, currency_code \\ "USD", options \\ []) do
post_request(
env,
authorization_path(gateway_token),
auth_or_purchase_body(payment_method_token, amount, currency_code, options)
)
end
@spec capture(Environment.t(), String.t()) :: {:ok, any} | {:error, any}
def capture(env, transaction_token) do
post_request(env, capture_path(transaction_token))
end
@spec void(Environment.t(), String.t()) :: {:ok, any} | {:error, any}
def void(env, transaction_token) do
post_request(env, void_path(transaction_token))
end
@spec credit(Environment.t(), String.t()) :: {:ok, any} | {:error, any}
def credit(env, transaction_token) do
post_request(env, credit_path(transaction_token))
end
@spec credit(Environment.t(), String.t(), pos_integer, String.t()) :: {:ok, any} | {:error, any}
def credit(env, transaction_token, amount, currency_code) do
post_request(env, credit_path(transaction_token), credit_body(amount, currency_code))
end
@doc """
Determine if a credit card is a chargeable card and available for purchases,
with optional request body data specified as a keyword list.
## Examples
verify(env, "gateway_token", "payment_method_token")
verify(env, "gateway_token", "payment_method_token", "USD", [retain_on_success: true])
"""
@spec verify(Environment.t(), String.t(), String.t(), String.t() | nil, Keyword.t()) :: {:ok, any} | {:error, any}
def verify(env, gateway_token, payment_method_token, currency_code \\ nil, options \\ []) do
post_request(env, verify_path(gateway_token), verify_body(payment_method_token, currency_code, options))
end
@spec dispatch(Environment.t(), String.t(), String.t(), String.t() | nil, Keyword.t()) :: {:ok, any} | {:error, any}
def dispatch(env, gateway_token, payment_method_token, amount, currency_code \\ "USD", options \\ []) do
post_request(
env,
dispatch_path(),
dispatch_body(gateway_token, payment_method_token, amount, currency_code, options)
)
end
@spec show_gateway(Environment.t(), String.t()) :: {:ok, any} | {:error, any}
def show_gateway(env, gateway_token) do
get_request(env, show_gateway_path(gateway_token))
end
@spec show_receiver(Environment.t(), String.t()) :: {:ok, any} | {:error, any}
def show_receiver(env, receiver_token) do
get_request(env, show_receiver_path(receiver_token))
end
@spec show_payment_method(Environment.t(), String.t()) :: {:ok, any} | {:error, any}
def show_payment_method(env, payment_method_token) do
get_request(env, show_payment_method_path(payment_method_token))
end
@spec show_transaction(Environment.t(), String.t()) :: {:ok, any} | {:error, any}
def show_transaction(env, transaction_token) do
get_request(env, show_transaction_path(transaction_token))
end
@spec show_transcript(Environment.t(), String.t()) :: {:ok, any} | {:error, any}
def show_transcript(env, transaction_token) do
get_request(env, show_transcript_path(transaction_token), [], &transcript_response/1)
end
@spec show_dispatch(Environment.t(), String.t()) :: {:ok, any} | {:error, any}
def show_dispatch(env, dispatch_token) do
get_request(env, show_dispatch_path(dispatch_token))
end
@doc """
List transactions for the provided payment method token with
optional query params specified as a keyword list.
## Examples
list_payment_method_transactions(env, "token")
list_payment_method_transactions(env, "token", [order: :desc, since_token: "token"])
"""
@spec list_payment_method_transactions(Environment.t(), String.t(), Keyword.t()) :: {:ok, any} | {:error, any}
def list_payment_method_transactions(env, payment_method_token, params \\ []) do
get_request(env, list_payment_method_transactions_path(payment_method_token), params)
end
@doc """
List transactions for the provided gateway token with
optional query params specified as a keyword list.
## Examples
list_gateway_transactions(env, "token")
list_gateway_transactions(env, "token", order: :desc, since_token: "token"])
"""
@spec list_gateway_transactions(Environment.t(), String.t(), Keyword.t()) :: {:ok, any} | {:error, any}
def list_gateway_transactions(env, gateway_token, params \\ []) do
get_request(env, list_gateway_transactions_path(gateway_token), params)
end
@doc """
Retrieve a list of all transactions for the authenticated environment.
The list of transactions can be ordered and paginated by providing
optional query params specified as a keyword list.
## Params
* `:order` - The order of the returned list. Default is `asc`, which returns
the oldest records first. To list newer records first, use `desc`.
* `:since_token` - The token of the item to start from (e.g., the last token
received in the previous list if iterating through records).
* `:count` - The number of transactions to return. By default returns 20,
maximum allowed is 100.
## Examples
list_transactions(env)
list_transactions(env, order: :desc, since_token: "token", count: 100])
"""
@spec list_transactions(Environment.t(), Keyword.t()) :: {:ok, any} | {:error, any}
def list_transactions(env, params \\ []) do
get_request(env, list_transactions_path(), params)
end
@doc """
Retrieve a list of created gateways in the environment with optional
query params specified as a keyword list.
## Examples
list_created_gateways(env)
list_created_gateways(env, [order: :desc, since_token: "token"])
"""
@spec list_created_gateways(Environment.t(), Keyword.t()) :: {:ok, any} | {:error, any}
def list_created_gateways(env, params \\ []) do
get_request(env, list_created_gateways_path(), params)
end
@doc """
Retrieve a list of all gateways, and their properties, supported by Spreedly.
"""
def list_supported_gateways do
get_request(nil, list_supported_gateways_path())
end
defp transcript_response({:error, %HTTPoison.Error{reason: reason}}), do: {:error, reason}
defp transcript_response({:ok, %HTTPoison.Response{status_code: 200, body: body}}) do
{:ok, body}
end
defp transcript_response({:ok, %HTTPoison.Response{status_code: _, body: body}}), do: {:error, body}
end
|
lib/spreedly.ex
| 0.907982
| 0.563378
|
spreedly.ex
|
starcoder
|
defmodule ChallengeGov.Challenges.Challenge do
@moduledoc """
Challenge schema
"""
use Ecto.Schema
import Ecto.Changeset
alias ChallengeGov.Accounts.User
alias ChallengeGov.Agencies.Agency
alias ChallengeGov.Challenges
alias ChallengeGov.Challenges.ChallengeOwner
alias ChallengeGov.Challenges.FederalPartner
alias ChallengeGov.Challenges.NonFederalPartner
alias ChallengeGov.Challenges.Phase
alias ChallengeGov.Challenges.TimelineEvent
alias ChallengeGov.Submissions.SubmissionExport
alias ChallengeGov.SupportingDocuments.Document
alias ChallengeGov.Timeline.Event
@type t :: %__MODULE__{}
schema "challenges" do
field(:uuid, Ecto.UUID, autogenerate: true)
# Associations
belongs_to(:user, User)
belongs_to(:agency, Agency)
belongs_to(:sub_agency, Agency)
has_many(:events, Event, on_replace: :delete, on_delete: :delete_all)
has_many(:supporting_documents, Document, on_delete: :delete_all)
has_many(:challenge_owners, ChallengeOwner, on_delete: :delete_all)
has_many(:challenge_owner_users, through: [:challenge_owners, :user])
has_many(:federal_partners, FederalPartner, on_delete: :delete_all)
has_many(:federal_partner_agencies, through: [:federal_partners, :agency])
has_many(:non_federal_partners, NonFederalPartner, on_replace: :delete, on_delete: :delete_all)
has_many(:phases, Phase, on_replace: :delete)
has_many(:submission_exports, SubmissionExport)
embeds_many(:timeline_events, TimelineEvent, on_replace: :delete)
# Array fields. Pseudo associations
field(:primary_type, :string)
field(:types, {:array, :string}, default: [])
field(:other_type, :string)
# Images
field(:logo_key, Ecto.UUID)
field(:logo_extension, :string)
field(:winner_image_key, Ecto.UUID)
field(:winner_image_extension, :string)
field(:resource_banner_key, Ecto.UUID)
field(:resource_banner_extension, :string)
# Fields
field(:status, :string, default: "draft")
field(:sub_status, :string)
field(:last_section, :string)
field(:challenge_manager, :string)
field(:challenge_manager_email, :string)
field(:poc_email, :string)
field(:agency_name, :string)
field(:title, :string)
field(:custom_url, :string)
field(:external_url, :string)
field(:tagline, :string)
field(:type, :string)
field(:description, :string)
field(:description_delta, :string)
field(:brief_description, :string)
field(:brief_description_delta, :string)
field(:brief_description_length, :integer, virtual: true)
field(:how_to_enter, :string)
field(:fiscal_year, :string)
field(:start_date, :utc_datetime)
field(:end_date, :utc_datetime)
field(:archive_date, :utc_datetime)
field(:multi_phase, :boolean)
field(:number_of_phases, :string)
field(:phase_descriptions, :string)
field(:phase_dates, :string)
field(:judging_criteria, :string)
field(:prize_type, :string)
field(:prize_total, :integer, default: 0)
field(:non_monetary_prizes, :string)
field(:prize_description, :string)
field(:prize_description_delta, :string)
field(:eligibility_requirements, :string)
field(:eligibility_requirements_delta, :string)
field(:rules, :string)
field(:rules_delta, :string)
field(:terms_and_conditions, :string)
field(:terms_and_conditions_delta, :string)
field(:legal_authority, :string)
field(:faq, :string)
field(:faq_delta, :string)
field(:winner_information, :string)
field(:captured_on, :date)
field(:auto_publish_date, :utc_datetime)
field(:published_on, :date)
field(:rejection_message, :string)
field(:how_to_enter_link, :string)
field(:announcement, :string)
field(:announcement_datetime, :utc_datetime)
field(:gov_delivery_topic, :string)
field(:gov_delivery_subscribers, :integer, default: 0)
field(:upload_logo, :boolean)
field(:is_multi_phase, :boolean)
field(:terms_equal_rules, :boolean)
# Virtual Fields
field(:logo, :string, virtual: true)
field(:imported, :boolean)
# Meta Timestamps
field(:deleted_at, :utc_datetime)
timestamps(type: :utc_datetime_usec)
end
# - Challenge owner starts the form → saves it as a draft - Draft
# - Challenge owner submit for review from PMO - GSA Review
# - (2a) GSA Admin approves the challenge (waiting to be published according to date specified)- Approved
# - (2b) GSA Admin requests edits from Challenge Owner (i.e. date is wrong)- Edits Requested**
# - Challenge Owner updates the edits and re-submit to GSA Admin - GSA Review
# - Challenge goes Live - Published
# - Challenge is archived - Archived
# - Published status but updating Winners & FAQ and submitted to GSA Admin - GSA Review
# - Challenge Owner updates an Archived challenge posting - goes to "GSA Review" -> GSA Admin approves -> status back to Archived
@statuses [
%{id: "draft", label: "Draft"},
%{id: "gsa_review", label: "GSA review"},
%{id: "approved", label: "Approved"},
%{id: "edits_requested", label: "Edits requested"},
%{id: "unpublished", label: "Unpublished"},
%{id: "published", label: "Published"},
%{id: "archived", label: "Archived"}
]
@doc """
List of all challenge statuses
"""
def statuses(), do: @statuses
def status_ids() do
Enum.map(@statuses, & &1.id)
end
@doc """
Sub statuses that a published challenge can have
"""
@sub_statuses [
"open",
"closed",
"archived"
]
def sub_statuses(), do: @sub_statuses
@challenge_types [
"Software and apps",
"Creative (multimedia & design)",
"Ideas",
"Technology demonstration and hardware",
"Nominations",
"Business plans",
"Analytics, visualizations, algorithms",
"Scientific"
]
@doc """
List of all challenge types
"""
def challenge_types(), do: @challenge_types
@legal_authority [
"America COMPETES",
"Agency Prize Authority - DOT",
"Direct Prize Authority",
"Direct Prize Authority - DOD",
"Direct Prize Authority - DOE",
"Direct Prize Authority - USAID",
"Space Act",
"Grants and Cooperative Agreements",
"Necessary Expense Doctrine",
"Authority to Provide Non-Monetary Support to Prize Competitions",
"Procurement Authority",
"Other Transactions Authority",
"Agency Partnership Authority",
"Public-Private Partnership Authority",
"Other"
]
@doc """
List of all legal authority options
"""
def legal_authority(), do: @legal_authority
@sections [
%{id: "general", label: "General Info"},
%{id: "details", label: "Details"},
%{id: "timeline", label: "Timeline"},
%{id: "prizes", label: "Prizes"},
%{id: "rules", label: "Rules"},
%{id: "judging", label: "Judging"},
%{id: "how_to_enter", label: "How to enter"},
%{id: "resources", label: "Resources"},
%{id: "review", label: "Review and submit"}
]
@doc """
List of all valid sections
"""
def sections(), do: @sections
@doc false
def section_index(section) do
sections = sections()
Enum.find_index(sections, fn s -> s.id == section end)
end
@doc false
def curr_section(section) do
sections = sections()
curr_index = section_index(section)
Enum.at(sections, curr_index)
end
@doc false
def next_section(section) do
sections = sections()
curr_index = section_index(section)
if curr_index < length(sections) do
Enum.at(sections, curr_index + 1)
end
end
@doc false
def prev_section(section) do
sections = sections()
curr_index = section_index(section)
if curr_index > 0 do
Enum.at(sections, curr_index - 1)
end
end
@doc false
def to_section(section, action) do
case action do
"next" -> next_section(section)
"back" -> prev_section(section)
_ -> curr_section(section)
end
end
def changeset(struct, params) do
struct
|> cast(params, [
:user_id,
:agency_id,
:sub_agency_id,
:status,
:sub_status,
:challenge_manager,
:challenge_manager_email,
:poc_email,
:agency_name,
:title,
:custom_url,
:external_url,
:tagline,
:description,
:description_delta,
:brief_description,
:brief_description_delta,
:brief_description_length,
:how_to_enter,
:fiscal_year,
:start_date,
:end_date,
:multi_phase,
:number_of_phases,
:phase_descriptions,
:phase_dates,
:judging_criteria,
:non_monetary_prizes,
:prize_description,
:prize_description_delta,
:eligibility_requirements,
:eligibility_requirements_delta,
:rules,
:rules_delta,
:terms_and_conditions,
:terms_and_conditions_delta,
:legal_authority,
:faq,
:faq_delta,
:winner_information,
:primary_type,
:types,
:other_type,
:auto_publish_date,
:upload_logo,
:is_multi_phase,
:terms_equal_rules,
:prize_type,
:how_to_enter_link,
:announcement,
:announcement_datetime
])
|> cast_assoc(:non_federal_partners, with: &NonFederalPartner.draft_changeset/2)
|> cast_assoc(:events)
|> cast_assoc(:phases, with: &Phase.draft_changeset/2)
|> validate_timeline_events_draft(params)
|> validate_terms_draft(params)
|> maybe_set_start_end_dates(params)
|> unique_constraint(:custom_url, name: "challenges_custom_url_index")
end
def import_changeset(struct, params) do
struct
|> cast(params, [
:id,
:user_id,
:agency_id,
:status,
:sub_status,
:challenge_manager,
:challenge_manager_email,
:poc_email,
:agency_name,
:title,
:custom_url,
:external_url,
:tagline,
:description,
:brief_description,
:how_to_enter,
:fiscal_year,
:start_date,
:end_date,
:multi_phase,
:number_of_phases,
:phase_descriptions,
:phase_dates,
:judging_criteria,
:prize_total,
:non_monetary_prizes,
:prize_description,
:eligibility_requirements,
:rules,
:terms_and_conditions,
:legal_authority,
:faq,
:winner_information,
:primary_type,
:types,
:other_type,
:auto_publish_date,
:upload_logo,
:is_multi_phase,
:imported
])
|> unique_constraint(:id, name: :challenges_pkey)
|> cast_assoc(:non_federal_partners, with: &NonFederalPartner.draft_changeset/2)
|> cast_assoc(:events)
|> cast_assoc(:phases, with: &Phase.draft_changeset/2)
|> unique_constraint(:custom_url, name: "challenges_custom_url_index")
|> validate_phases(params)
|> maybe_set_start_end_dates(params)
end
def draft_changeset(struct, params = %{"section" => section}, action) do
struct
|> changeset(params)
|> put_change(:status, "draft")
|> put_change(:last_section, to_section(section, action).id)
end
def section_changeset(struct, params = %{"section" => section}, action) do
struct
|> changeset(params)
|> put_change(:last_section, to_section(section, action).id)
|> section_changeset_selector(params)
end
defp section_changeset_selector(struct, params = %{"section" => "general"}),
do: general_changeset(struct, params)
defp section_changeset_selector(struct, params = %{"section" => "details"}),
do: details_changeset(struct, params)
defp section_changeset_selector(struct, params = %{"section" => "timeline"}),
do: timeline_changeset(struct, params)
defp section_changeset_selector(struct, params = %{"section" => "prizes"}),
do: prizes_changeset(struct, params)
defp section_changeset_selector(struct, params = %{"section" => "rules"}),
do: rules_changeset(struct, params)
defp section_changeset_selector(struct, params = %{"section" => "judging"}),
do: judging_changeset(struct, params)
defp section_changeset_selector(struct, params = %{"section" => "how_to_enter"}),
do: how_to_enter_changeset(struct, params)
defp section_changeset_selector(struct, params = %{"section" => "resources"}),
do: resources_changeset(struct, params)
defp section_changeset_selector(struct, params = %{"section" => "review"}),
do: review_changeset(struct, params)
defp section_changeset_selector(struct, _), do: struct
def general_changeset(struct, _params) do
struct
|> validate_required([
:challenge_manager,
:challenge_manager_email,
:poc_email,
:agency_id,
:fiscal_year
])
|> cast_assoc(:non_federal_partners)
|> force_change(:fiscal_year, fetch_field!(struct, :fiscal_year))
|> validate_format(:challenge_manager_email, ~r/.+@.+\..+/)
|> validate_format(:poc_email, ~r/.+@.+\..+/)
|> validate_format(:fiscal_year, ~r/\bFY[0-9]{2}\b/)
end
def details_changeset(struct, params) do
struct
|> validate_required([
:title,
:tagline,
:primary_type,
:brief_description,
:description,
:auto_publish_date,
:upload_logo,
:is_multi_phase
])
|> validate_length(:title, max: 90)
|> validate_length(:tagline, max: 90)
|> validate_rich_text_length(:brief_description, 200)
|> validate_length(:description, max: 4000)
|> validate_length(:other_type, max: 45)
|> validate_inclusion(:primary_type, @challenge_types)
|> maybe_validate_types(params)
|> validate_upload_logo(params)
|> validate_auto_publish_date(params)
|> validate_custom_url(params)
|> validate_phases(params)
end
def validate_rich_text_length(struct, field, length) do
field_length = String.to_existing_atom("#{field}_length")
value = get_field(struct, field_length)
case value do
nil ->
struct
_ ->
if value > length do
add_error(struct, field, "can't be greater than #{length} characters")
else
struct
end
end
end
def timeline_changeset(struct, params) do
struct
|> validate_timeline_events(params)
end
def prizes_changeset(struct, params) do
struct
|> validate_required([
:prize_type
])
|> validate_prizes(params)
|> force_change(:prize_description, fetch_field!(struct, :prize_description))
|> validate_length(:prize_description, max: 1500)
end
def parse_currency(struct, %{"prize_total" => prize_total}) do
case Money.parse(prize_total, :USD) do
{:ok, money} ->
case money.amount <= 0 do
true ->
add_error(struct, :prize_total, "must be more than $0")
false ->
put_change(struct, :prize_total, money.amount)
end
:error ->
add_error(struct, :prize_total, "Invalid currency formatting")
end
end
def rules_changeset(struct, params) do
struct
|> validate_required([
:terms_equal_rules,
:eligibility_requirements,
:rules,
:legal_authority
])
|> validate_terms(params)
end
def judging_changeset(struct, _params) do
struct
|> cast_assoc(:phases, with: &Phase.judging_changeset/2)
end
def how_to_enter_changeset(struct, _params) do
struct
|> cast_assoc(:phases, with: &Phase.how_to_enter_changeset/2)
end
def resources_changeset(struct, _params) do
struct
|> force_change(:faq, fetch_field!(struct, :faq))
|> validate_length(:faq, max: 4000)
end
def review_changeset(struct, params) do
struct
|> general_changeset(params)
|> details_changeset(params)
|> timeline_changeset(params)
|> prizes_changeset(params)
|> rules_changeset(params)
|> judging_changeset(params)
|> how_to_enter_changeset(params)
|> resources_changeset(params)
|> submit_changeset()
end
def create_changeset(struct, params, _user) do
struct
|> changeset(params)
|> cast_assoc(:non_federal_partners)
|> put_change(:status, "gsa_review")
|> put_change(:captured_on, Date.utc_today())
|> validate_required([
:user_id,
:agency_id,
:challenge_manager,
:challenge_manager_email,
:poc_email,
:non_federal_partners,
:title,
:tagline,
:description,
:brief_description,
:auto_publish_date
])
|> foreign_key_constraint(:agency)
|> unique_constraint(:custom_url, name: "challenges_custom_url_index")
|> validate_inclusion(:status, status_ids())
|> validate_auto_publish_date(params)
end
def update_changeset(struct, params) do
struct
|> changeset(params)
|> cast_assoc(:non_federal_partners)
|> validate_required([
:user_id,
:agency_id,
:challenge_manager,
:challenge_manager_email,
:poc_email,
:non_federal_partners,
:title,
:tagline,
:description,
:brief_description,
:auto_publish_date
])
|> foreign_key_constraint(:agency)
|> unique_constraint(:custom_url, name: "challenges_custom_url_index")
|> validate_inclusion(:status, status_ids())
|> validate_auto_publish_date(params)
end
def create_announcement_changeset(struct, announcement) do
struct
|> change()
|> put_change(:announcement, announcement)
|> put_change(:announcement_datetime, DateTime.truncate(DateTime.utc_now(), :second))
|> validate_length(:announcement, max: 150)
end
def remove_announcement_changeset(struct) do
struct
|> change()
|> put_change(:announcement, nil)
|> put_change(:announcement_datetime, nil)
end
# to allow change to admin info?
def admin_update_changeset(struct, params) do
struct
|> cast(params, [:user_id])
|> update_changeset(params)
end
def approve_changeset(struct) do
struct
|> change()
|> put_change(:status, "approved")
|> put_change(:published_on, Date.utc_today())
|> validate_inclusion(:status, status_ids())
end
def publish_changeset(struct) do
struct
|> change()
|> put_change(:status, "published")
|> put_change(:published_on, Date.utc_today())
|> validate_inclusion(:status, status_ids())
end
def unpublish_changeset(struct) do
struct
|> change()
|> put_change(:status, "unpublished")
|> put_change(:published_on, Date.utc_today())
|> validate_inclusion(:status, status_ids())
end
def reject_changeset(struct, message) do
struct
|> change()
|> put_change(:rejection_message, message)
|> put_change(:status, "edits_requested")
|> validate_inclusion(:status, status_ids())
end
def submit_changeset(struct) do
struct
|> change()
|> put_change(:status, "gsa_review")
|> validate_inclusion(:status, status_ids())
end
# Image changesets
def logo_changeset(struct, key, extension) do
struct
|> change()
|> put_change(:logo_key, key)
|> put_change(:logo_extension, extension)
end
def winner_image_changeset(struct, key, extension) do
struct
|> change()
|> put_change(:winner_image_key, key)
|> put_change(:winner_image_extension, extension)
end
def resource_banner_changeset(struct, key, extension) do
struct
|> change()
|> put_change(:resource_banner_key, key)
|> put_change(:resource_banner_extension, extension)
end
defp maybe_validate_types(struct, %{"types" => types}) do
Enum.reduce(types, struct, fn type, struct ->
if type != "" and !Enum.member?(@challenge_types, type) do
add_error(struct, :types, "A value selected for an optional challenge type is invalid")
else
struct
end
end)
end
defp maybe_validate_types(struct, _params), do: struct
defp validate_upload_logo(struct, params = %{"upload_logo" => "true"}),
do: validate_logo(struct, params)
defp validate_upload_logo(struct, %{"upload_logo" => "false"}) do
struct
|> put_change(:logo_key, nil)
|> put_change(:logo_extension, nil)
end
defp validate_upload_logo(struct, _params), do: struct
defp validate_logo(struct, %{"logo" => logo}) when is_nil(logo),
do: add_error(struct, :logo, "Must upload a logo")
defp validate_logo(struct, %{"logo" => _logo}), do: struct
defp validate_logo(struct = %{data: %{logo_key: logo_key}}, _params) when is_nil(logo_key),
do: add_error(struct, :logo, "Must upload a logo")
defp validate_logo(struct, _params), do: struct
# defp validate_start_and_end_dates(struct, params) do
# with {:ok, start_date} <- Map.fetch(params, "start_date"),
# {:ok, end_date} <- Map.fetch(params, "end_date"),
# {:ok, start_date} <- Timex.parse(start_date, "{ISO:Extended}"),
# {:ok, end_date} <- Timex.parse(end_date, "{ISO:Extended}"),
# 1 <- Timex.compare(end_date, start_date) do
# struct
# else
# tc when tc == -1 or tc == 0 ->
# add_error(struct, :end_date, "must come after start date")
# _ ->
# add_error(struct, :start_and_end_date, "start and end date are required")
# end
# end
defp validate_auto_publish_date(struct, %{"auto_publish_date" => date, "challenge_id" => id})
when not is_nil(date) do
{:ok, date} = Timex.parse(date, "{ISO:Extended}")
{:ok, %{status: status}} = Challenges.get(id)
if status === "published", do: struct, else: check_auto_publish_date(struct, date)
end
defp validate_auto_publish_date(
struct = %{data: %{auto_publish_date: date, status: status}},
_params
)
when not is_nil(date) do
case status do
"published" ->
struct
_ ->
check_auto_publish_date(struct, date)
end
end
defp validate_auto_publish_date(struct, _params) do
struct
end
defp check_auto_publish_date(struct, date) do
now = Timex.now()
case Timex.compare(date, now) do
1 ->
struct
tc when tc == -1 or tc == 0 ->
add_error(struct, :auto_publish_date, "must be in the future")
_error ->
add_error(struct, :auto_publish_date, "is required")
end
end
defp validate_custom_url(struct, params) do
custom_url = Map.get(params, "custom_url")
challenge_title = Map.get(params, "title")
cond do
custom_url != "" && custom_url != nil ->
put_change(struct, :custom_url, create_custom_url_slug(custom_url))
challenge_title != "" && challenge_title != nil ->
put_change(struct, :custom_url, create_custom_url_slug(challenge_title))
true ->
struct
end
end
defp create_custom_url_slug(value) do
value
|> String.trim()
|> String.downcase()
|> String.replace(" ", "-")
end
defp maybe_set_start_end_dates(struct, %{"phases" => phases}) do
struct
|> set_start_date(phases)
|> set_end_date(phases)
|> set_archive_date(phases)
|> put_change(:sub_status, nil)
end
defp maybe_set_start_end_dates(struct, _params), do: struct
defp set_start_date(struct, phases) do
if Enum.any?(phases, fn {_, phase} -> dates_exist?(phase) end) do
{_, start_phase} =
phases
|> Enum.filter(fn {_, p} ->
p["open_to_submissions"] === "true" or p["open_to_submissions"] === true
end)
|> Enum.min_by(fn {_, p} -> p["start_date"] end)
{:ok, start_date} =
start_phase
|> Map.fetch!("start_date")
|> Timex.parse("{ISO:Extended}")
put_change(struct, :start_date, DateTime.truncate(start_date, :second))
else
struct
end
end
defp set_end_date(struct, phases) do
if Enum.any?(phases, fn {_, phase} -> dates_exist?(phase) end) do
{_, end_phase} =
phases
|> Enum.filter(fn {_, p} ->
p["open_to_submissions"] === "true" or p["open_to_submissions"] === true
end)
|> Enum.max_by(fn {_, p} -> p["end_date"] end)
{:ok, end_date} =
end_phase
|> Map.fetch!("end_date")
|> Timex.parse("{ISO:Extended}")
put_change(struct, :end_date, DateTime.truncate(end_date, :second))
else
struct
end
end
defp set_archive_date(struct, phases) do
if Enum.any?(phases, fn {_, phase} -> dates_exist?(phase) end) do
{_, end_phase} =
phases
|> Enum.max_by(fn {_, p} -> p["end_date"] end)
{:ok, end_date} =
end_phase
|> Map.fetch!("end_date")
|> Timex.parse("{ISO:Extended}")
put_change(struct, :archive_date, DateTime.truncate(end_date, :second))
else
struct
end
end
defp dates_exist?(%{"open_to_submissions" => "true", "start_date" => "", "end_date" => ""}),
do: false
defp dates_exist?(%{"open_to_submissions" => "true", "start_date" => "", "end_date" => _}),
do: false
defp dates_exist?(%{"open_to_submissions" => "true", "start_date" => _, "end_date" => ""}),
do: false
defp dates_exist?(%{"open_to_submissions" => true, "start_date" => "", "end_date" => ""}),
do: false
defp dates_exist?(%{"open_to_submissions" => true, "start_date" => "", "end_date" => _}),
do: false
defp dates_exist?(%{"open_to_submissions" => true, "start_date" => _, "end_date" => ""}),
do: false
defp dates_exist?(%{"open_to_submissions" => "true", "start_date" => _, "end_date" => _}),
do: true
defp dates_exist?(%{"open_to_submissions" => true, "start_date" => _, "end_date" => _}),
do: true
defp dates_exist?(_phase), do: false
defp validate_phases(struct, %{"is_multi_phase" => "true", "phases" => phases}) do
struct = cast_assoc(struct, :phases, with: &Phase.multi_phase_changeset/2)
phases
|> Enum.map(fn {index, phase} ->
overlap_check =
phases
|> Enum.reject(fn {i, _p} -> i === index end)
|> Enum.map(fn {_i, p} ->
date_range_overlaps(phase, p)
end)
|> Enum.any?()
!overlap_check && validate_phase_start_and_end(phase)
end)
|> Enum.all?()
|> case do
true ->
struct
false ->
add_error(
struct,
:phase_dates,
"Please check your phase dates for overlaps or invalid date ranges"
)
end
end
defp validate_phases(struct, %{"is_multi_phase" => "false", "phases" => phases}) do
struct = cast_assoc(struct, :phases, with: &Phase.save_changeset/2)
{_index, phase} = Enum.at(phases, 0)
phase
|> validate_phase_start_and_end
|> case do
true ->
struct
false ->
add_error(
struct,
:phase_dates,
"Please make sure you end date comes after your start date"
)
end
end
defp validate_phases(struct, _params), do: struct
defp validate_phase_start_and_end(%{"start_date" => "", "end_date" => ""}), do: false
defp validate_phase_start_and_end(%{"start_date" => start_date, "end_date" => end_date}) do
with {:ok, start_date} <- Timex.parse(start_date, "{ISO:Extended}"),
{:ok, end_date} <- Timex.parse(end_date, "{ISO:Extended}") do
Timex.compare(start_date, end_date) < 0
else
_ -> false
end
end
defp validate_phase_start_and_end(_phase), do: false
# If there is an overlap return true else false
defp date_range_overlaps(%{"start_date" => a_start, "end_date" => a_end}, %{
"start_date" => b_start,
"end_date" => b_end
}) do
with {:ok, a_start} <- Timex.parse(a_start, "{ISO:Extended}"),
{:ok, a_end} <- Timex.parse(a_end, "{ISO:Extended}"),
{:ok, b_start} <- Timex.parse(b_start, "{ISO:Extended}"),
{:ok, b_end} <- Timex.parse(b_end, "{ISO:Extended}") do
if (Timex.compare(a_start, b_start) <= 0 && Timex.compare(b_start, a_end) <= 0) ||
(Timex.compare(a_start, b_end) <= 0 && Timex.compare(b_end, a_end) <= 0) ||
(Timex.compare(b_start, a_start) < 0 && Timex.compare(a_end, b_end) < 0) do
true
else
false
end
else
_ ->
false
end
end
defp date_range_overlaps(_, _), do: true
defp validate_timeline_events(struct, %{"timeline_events" => ""}),
do: put_change(struct, :timeline_events, [])
defp validate_timeline_events(struct, %{"timeline_events" => _timeline_events}),
do:
cast_embed(struct, :timeline_events,
with: {TimelineEvent, :save_changeset, [Challenges.find_start_date(struct.data)]}
)
defp validate_timeline_events(struct, _), do: struct
defp validate_timeline_events_draft(struct, %{"timeline_events" => ""}),
do: put_change(struct, :timeline_events, [])
defp validate_timeline_events_draft(struct, %{"timeline_events" => _timeline_events}),
do: cast_embed(struct, :timeline_events, with: &TimelineEvent.draft_changeset/2)
defp validate_timeline_events_draft(struct, _), do: struct
defp validate_terms(struct, %{
"terms_equal_rules" => "true",
"rules" => rules,
"rules_delta" => rules_delta
}) do
struct
|> put_change(:terms_and_conditions, rules)
|> put_change(:terms_and_conditions_delta, rules_delta)
end
defp validate_terms(struct, _params), do: validate_required(struct, [:terms_and_conditions])
defp validate_terms_draft(struct, %{
"terms_equal_rules" => "true",
"rules" => rules,
"rules_delta" => rules_delta
}) do
struct
|> put_change(:terms_and_conditions, rules)
|> put_change(:terms_and_conditions_delta, rules_delta)
end
defp validate_terms_draft(struct, _params), do: struct
defp validate_prizes(struct, params = %{"prize_type" => "monetary"}) do
struct
|> parse_currency(params)
|> validate_required([:prize_total])
|> put_change(:non_monetary_prizes, nil)
end
defp validate_prizes(struct, _params = %{"prize_type" => "non_monetary"}) do
struct
|> validate_required([:non_monetary_prizes])
|> put_change(:prize_total, 0)
end
defp validate_prizes(struct, params = %{"prize_type" => "both"}) do
struct
|> parse_currency(params)
|> validate_required([
:prize_total,
:non_monetary_prizes
])
end
defp validate_prizes(struct, _params), do: struct
end
|
lib/challenge_gov/challenges/challenge.ex
| 0.706899
| 0.480905
|
challenge.ex
|
starcoder
|
defmodule BiMap do
@moduledoc """
Bi-directional map implementation backed by two maps.
> In computer science, a bidirectional map, or hash bag, is an associative data
> structure in which the `(key, value)` pairs form a one-to-one correspondence.
> Thus the binary relation is functional in each direction: `value` can also
> act as a key to `key`. A pair `(a, b)` thus provides a unique coupling
> between a `a` and `b` so that `b` can be found when `a` is used as a key and
> `a` can be found when `b` is used as a key.
>
> ~[Wikipedia](https://en.wikipedia.org/wiki/Bidirectional_map)
Entries in bimap do not follow any order.
BiMaps do not impose any restriction on the key and value type: anything can be
a key in a bimap, and also anything can be a value. As a bidirectional
key-value structure, bimaps do not allow duplicated keys and values. This means
it is not possible to store `[(A, B), (A, C)]` or `[(X, Z), (Y, Z)]` in
the bimap. If you need to lift this restriction to only not allowing duplicated
key-value pairs, check out `BiMultiMap`.
Keys and values are compared using the exact-equality operator (`===`).
## Example
iex> bm = BiMap.new(a: 1, b: 2)
#BiMap<[a: 1, b: 2]>
iex> BiMap.get(bm, :a)
1
iex> BiMap.get_key(bm, 2)
:b
iex> BiMap.put(bm, :a, 3)
#BiMap<[a: 3, b: 2]>
iex> BiMap.put(bm, :c, 2)
#BiMap<[a: 1, c: 2]>
## Protocols
`BiMap` implements `Enumerable`, `Collectable` and `Inspect` protocols.
"""
@typedoc "Key type"
@type k :: any
@typedoc "Value type"
@type v :: any
@opaque t(k, v) :: %BiMap{
keys: %{optional(k) => v},
values: %{optional(v) => k}
}
@type t :: t(any, any)
defstruct keys: %{}, values: %{}
@doc """
Creates a new bimap.
## Examples
iex> BiMap.new
#BiMap<[]>
"""
@spec new :: t
def new, do: %BiMap{}
@doc """
Creates a bimap from `enumerable` of key-value pairs.
Duplicated pairs are removed; the latest one prevails.
## Examples
iex> BiMap.new([a: "foo", b: "bar"])
#BiMap<[a: "foo", b: "bar"]>
"""
@spec new(Enum.t()) :: t
def new(enumerable)
def new(%BiMap{} = bimap), do: bimap
def new(enum) do
Enum.reduce(enum, new(), fn pair, bimap ->
BiMap.put(bimap, pair)
end)
end
@doc """
Creates a bimap from `enumerable` via transform function returning key-value
pairs.
## Examples
iex> BiMap.new([1, 2, 1], fn x -> {x, x * 2} end)
#BiMap<[{1, 2}, {2, 4}]>
"""
@spec new(Enum.t(), (term -> {k, v})) :: t
def new(enumerable, transform)
def new(enum, f) do
Enum.reduce(enum, new(), fn term, bimap ->
BiMap.put(bimap, f.(term))
end)
end
@doc """
Returns the number of elements in `bimap`.
The size of a bimap is the number of key-value pairs that the map contains.
## Examples
iex> BiMap.size(BiMap.new)
0
iex> bimap = BiMap.new([a: "foo", b: "bar"])
iex> BiMap.size(bimap)
2
"""
@spec size(t) :: non_neg_integer
def size(bimap)
def size(%BiMap{keys: keys}) do
map_size(keys)
end
@doc """
Returns `key ➜ value` mapping of `bimap`.
## Examples
iex> bimap = BiMap.new([a: "foo", b: "bar"])
iex> BiMap.left(bimap)
%{a: "foo", b: "bar"}
"""
@spec left(t) :: %{k => v}
def left(bimap)
def left(%BiMap{keys: keys}), do: keys
@doc """
Returns `value ➜ key` mapping of `bimap`.
## Examples
iex> bimap = BiMap.new([a: "foo", b: "bar"])
iex> BiMap.right(bimap)
%{"foo" => :a, "bar" => :b}
"""
@spec right(t) :: %{v => k}
def right(bimap)
def right(%BiMap{values: values}), do: values
@doc """
Returns all keys from `bimap`.
## Examples
iex> bimap = BiMap.new([a: 1, b: 2])
iex> BiMap.keys(bimap)
[:a, :b]
"""
@spec keys(t) :: [k]
def keys(bimap)
def keys(%BiMap{keys: keys}), do: Map.keys(keys)
@doc """
Returns all values from `bimap`.
## Examples
iex> bimap = BiMap.new([a: 1, b: 2])
iex> BiMap.values(bimap)
[1, 2]
"""
@spec values(t) :: [v]
def values(bimap)
def values(%BiMap{values: values}), do: Map.keys(values)
@doc """
Checks if `bimap` contains `{key, value}` pair.
## Examples
iex> bimap = BiMap.new([a: "foo", b: "bar"])
iex> BiMap.member?(bimap, :a, "foo")
true
iex> BiMap.member?(bimap, :a, "bar")
false
"""
@spec member?(t, k, v) :: boolean
def member?(bimap, key, value)
def member?(%BiMap{keys: keys}, key, value) do
Map.has_key?(keys, key) and keys[key] === value
end
@doc """
Convenience shortcut for `member?/3`.
"""
@spec member?(t, {k, v}) :: boolean
def member?(bimap, kv)
def member?(bimap, {key, value}), do: member?(bimap, key, value)
@doc """
Checks if `bimap` contains `key`.
## Examples
iex> bimap = BiMap.new([a: "foo", b: "bar"])
iex> BiMap.has_key?(bimap, :a)
true
iex> BiMap.has_key?(bimap, :x)
false
"""
@spec has_key?(t, k) :: boolean
def has_key?(bimap, key)
def has_key?(%BiMap{keys: keys}, left) do
Map.has_key?(keys, left)
end
@doc """
Checks if `bimap` contains `value`.
## Examples
iex> bimap = BiMap.new([a: "foo", b: "bar"])
iex> BiMap.has_value?(bimap, "foo")
true
iex> BiMap.has_value?(bimap, "moo")
false
"""
@spec has_value?(t, v) :: boolean
def has_value?(bimap, value)
def has_value?(%BiMap{values: values}, value) do
Map.has_key?(values, value)
end
@doc """
Checks if two bimaps are equal.
Two bimaps are considered to be equal if they contain the same keys and those
keys contain the same values.
## Examples
iex> Map.equal?(BiMap.new([a: 1, b: 2]), BiMap.new([b: 2, a: 1]))
true
iex> Map.equal?(BiMap.new([a: 1, b: 2]), BiMap.new([b: 1, a: 2]))
false
"""
@spec equal?(t, t) :: boolean
def equal?(bimap1, bimap2)
def equal?(%BiMap{keys: keys1}, %BiMap{keys: keys2}) do
Map.equal?(keys1, keys2)
end
@doc """
Gets the value for specific `key` in `bimap`
If `key` is present in `bimap` with value `value`, then `value` is returned.
Otherwise, `default` is returned (which is `nil` unless specified otherwise).
## Examples
iex> BiMap.get(BiMap.new(), :a)
nil
iex> bimap = BiMap.new([a: 1])
iex> BiMap.get(bimap, :a)
1
iex> BiMap.get(bimap, :b)
nil
iex> BiMap.get(bimap, :b, 3)
3
"""
@spec get(t, k, v) :: v
def get(bimap, key, default \\ nil)
def get(%BiMap{keys: keys}, key, default) do
Map.get(keys, key, default)
end
@doc """
Gets the key for specific `value` in `bimap`
This function is exact mirror of `get/3`.
## Examples
iex> BiMap.get_key(BiMap.new, 1)
nil
iex> bimap = BiMap.new([a: 1])
iex> BiMap.get_key(bimap, 1)
:a
iex> BiMap.get_key(bimap, 2)
nil
iex> BiMap.get_key(bimap, 2, :b)
:b
"""
@spec get_key(t, v, k) :: k
def get_key(bimap, value, default \\ nil)
def get_key(%BiMap{values: values}, value, default) do
Map.get(values, value, default)
end
@doc """
Fetches the value for specific `key` in `bimap`
If `key` is present in `bimap` with value `value`, then `{:ok, value}` is
returned. Otherwise, `:error` is returned.
## Examples
iex> BiMap.fetch(BiMap.new(), :a)
:error
iex> bimap = BiMap.new([a: 1])
iex> BiMap.fetch(bimap, :a)
{:ok, 1}
iex> BiMap.fetch(bimap, :b)
:error
"""
@spec fetch(t, k) :: {:ok, v} | :error
def fetch(bimap, key)
def fetch(%BiMap{keys: keys}, key) do
Map.fetch(keys, key)
end
@doc """
Fetches the value for specific `key` in `bimap`.
Raises `ArgumentError` if the key is absent.
## Examples
iex> bimap = BiMap.new([a: 1])
iex> BiMap.fetch!(bimap, :a)
1
"""
@spec fetch!(t, k) :: v
def fetch!(bimap, key)
def fetch!(bimap, key) do
case fetch(bimap, key) do
{:ok, value} -> value
:error -> raise ArgumentError, "key #{inspect(key)} not found in: #{inspect(bimap)}"
end
end
@doc """
Fetches the key for specific `value` in `bimap`
This function is exact mirror of `fetch/2`.
## Examples
iex> BiMap.fetch_key(BiMap.new, 1)
:error
iex> bimap = BiMap.new([a: 1])
iex> BiMap.fetch_key(bimap, 1)
{:ok, :a}
iex> BiMap.fetch_key(bimap, 2)
:error
"""
@spec fetch_key(t, v) :: {:ok, k} | :error
def fetch_key(bimap, value)
def fetch_key(%BiMap{values: values}, value) do
Map.fetch(values, value)
end
@doc """
Fetches the key for specific `value` in `bimap`.
Raises `ArgumentError` if the value is absent. This function is exact mirror of `fetch!/2`.
## Examples
iex> bimap = BiMap.new([a: 1])
iex> BiMap.fetch_key!(bimap, 1)
:a
"""
@spec fetch_key!(t, v) :: k
def fetch_key!(bimap, value)
def fetch_key!(bimap, value) do
case fetch_key(bimap, value) do
{:ok, key} -> key
:error -> raise ArgumentError, "value #{inspect(value)} not found in: #{inspect(bimap)}"
end
end
@doc """
Inserts `{key, value}` pair into `bimap`.
If either `key` or `value` is already in `bimap`, any overlapping bindings are
deleted.
## Examples
iex> bimap = BiMap.new
#BiMap<[]>
iex> bimap = BiMap.put(bimap, :a, 0)
#BiMap<[a: 0]>
iex> bimap = BiMap.put(bimap, :a, 1)
#BiMap<[a: 1]>
iex> BiMap.put(bimap, :b, 1)
#BiMap<[b: 1]>
"""
@spec put(t, k, v) :: t
def put(%BiMap{} = bimap, key, value) do
%{keys: keys, values: values} = bimap |> BiMap.delete_key(key) |> BiMap.delete_value(value)
%{bimap | keys: Map.put(keys, key, value), values: Map.put(values, value, key)}
end
@doc """
Convenience shortcut for `put/3`
"""
@spec put(t, {k, v}) :: t
def put(bimap, kv)
def put(bimap, {key, value}), do: put(bimap, key, value)
@doc """
Inserts `{key, value}` pair into `bimap` if `key` is not already in `bimap`.
If `key` already exists in `bimap`, `bimap` is returned unchanged.
If `key` does not exist and `value` is already in `bimap`, any overlapping bindings are
deleted.
## Examples
iex> bimap = BiMap.new
#BiMap<[]>
iex> bimap = BiMap.put_new_key(bimap, :a, 0)
#BiMap<[a: 0]>
iex> bimap = BiMap.put_new_key(bimap, :a, 1)
#BiMap<[a: 0]>
iex> BiMap.put_new_key(bimap, :b, 1)
#BiMap<[a: 0, b: 1]>
iex> BiMap.put_new_key(bimap, :c, 1)
#BiMap<[a: 0, c: 1]>
"""
@spec put_new_key(t, k, v) :: t
def put_new_key(%BiMap{} = bimap, key, value) do
if BiMap.has_key?(bimap, key) do
bimap
else
put(bimap, key, value)
end
end
@doc """
Inserts `{key, value}` pair into `bimap` if `value` is not already in `bimap`.
If `value` already exists in `bimap`, `bimap` is returned unchanged.
If `value` does not exist and `key` is already in `bimap`, any overlapping bindings are
deleted.
## Examples
iex> bimap = BiMap.new
#BiMap<[]>
iex> bimap = BiMap.put_new_value(bimap, :a, 0)
#BiMap<[a: 0]>
iex> bimap = BiMap.put_new_value(bimap, :a, 1)
#BiMap<[a: 1]>
iex> BiMap.put_new_value(bimap, :b, 1)
#BiMap<[a: 1]>
iex> BiMap.put_new_value(bimap, :c, 2)
#BiMap<[a: 1, c: 2]>
"""
@spec put_new_value(t, k, v) :: t
def put_new_value(%BiMap{} = bimap, key, value) do
if BiMap.has_value?(bimap, value) do
bimap
else
put(bimap, key, value)
end
end
@doc """
Deletes `{key, value}` pair from `bimap`.
If the `key` does not exist, or `value` does not match, returns `bimap`
unchanged.
## Examples
iex> bimap = BiMap.new([a: 1, b: 2])
iex> BiMap.delete(bimap, :b, 2)
#BiMap<[a: 1]>
iex> BiMap.delete(bimap, :c, 3)
#BiMap<[a: 1, b: 2]>
iex> BiMap.delete(bimap, :b, 3)
#BiMap<[a: 1, b: 2]>
"""
@spec delete(t, k, v) :: t
def delete(%BiMap{keys: keys, values: values} = bimap, key, value) do
case Map.fetch(keys, key) do
{:ok, ^value} ->
%{bimap | keys: Map.delete(keys, key), values: Map.delete(values, value)}
_ ->
bimap
end
end
@doc """
Deletes `{key, _}` pair from `bimap`.
If the `key` does not exist, returns `bimap` unchanged.
## Examples
iex> bimap = BiMap.new([a: 1, b: 2])
iex> BiMap.delete_key(bimap, :b)
#BiMap<[a: 1]>
iex> BiMap.delete_key(bimap, :c)
#BiMap<[a: 1, b: 2]>
"""
@spec delete_key(t, k) :: t
def delete_key(%BiMap{keys: keys, values: values} = bimap, key) do
case Map.fetch(keys, key) do
{:ok, value} ->
%{bimap | keys: Map.delete(keys, key), values: Map.delete(values, value)}
:error ->
bimap
end
end
@doc """
Deletes `{_, value}` pair from `bimap`.
If the `value` does not exist, returns `bimap` unchanged.
## Examples
iex> bimap = BiMap.new([a: 1, b: 2])
iex> BiMap.delete_value(bimap, 2)
#BiMap<[a: 1]>
iex> BiMap.delete_value(bimap, 3)
#BiMap<[a: 1, b: 2]>
"""
@spec delete_value(t, v) :: t
def delete_value(%BiMap{keys: keys, values: values} = bimap, value) do
case Map.fetch(values, value) do
{:ok, key} ->
%{bimap | keys: Map.delete(keys, key), values: Map.delete(values, value)}
:error ->
bimap
end
end
@doc """
Convenience shortcut for `delete/3`.
"""
@spec delete(t, {k, v}) :: t
def delete(bimap, kv)
def delete(bimap, {key, value}), do: delete(bimap, key, value)
@doc """
Returns list of unique key-value pairs in `bimap`.
## Examples
iex> bimap = BiMap.new([a: "foo", b: "bar"])
iex> BiMap.to_list(bimap)
[a: "foo", b: "bar"]
"""
@spec to_list(t) :: [{k, v}]
def to_list(bimap)
def to_list(%BiMap{keys: keys}) do
Map.to_list(keys)
end
defimpl Enumerable do
def reduce(bimap, acc, fun) do
Enumerable.List.reduce(BiMap.to_list(bimap), acc, fun)
end
def member?(bimap, val) do
{:ok, BiMap.member?(bimap, val)}
end
def count(bimap) do
{:ok, BiMap.size(bimap)}
end
def slice(_bimap) do
{:error, __MODULE__}
end
end
defimpl Collectable do
def into(original) do
{original,
fn
bimap, {:cont, pair} -> BiMap.put(bimap, pair)
bimap, :done -> bimap
_, :halt -> :ok
end}
end
end
defimpl Inspect do
import Inspect.Algebra
def inspect(bimap, opts) do
concat(["#BiMap<", Inspect.List.inspect(BiMap.to_list(bimap), opts), ">"])
end
end
end
|
lib/bimap.ex
| 0.946775
| 0.802362
|
bimap.ex
|
starcoder
|
defmodule OpentelemetryTelemetry do
@moduledoc """
`OpentelemetryTelemetry` provides conveniences for leveraging `telemetry`
events for `OpenTelemetry` bridge libraries.
## OpenTelemetry Contexts
`opentelemetry` does not automatically set current span context when ending
another span. Since `telemetry` events are executed in separate handlers with
no shared context, correlating individual events requires a mechanism to do so.
Additionally, when ending telemetry-based spans, the user must set the correct
parent context back as the current context. This ensures sibling spans are
correctly correlated to the shared parent span.
This library provides helper functions to manage contexts automatically with
`start_telemetry_span/4`, `set_current_telemetry_span/2`, and `end_telemetry_span/2`
to give bridge library authors a mechanism for working with these challenges. Once
`start_telemetry_span/4` or `set_current_telemetry_span/2` are called, users
can use all of `OpenTelemetry` as normal. By providing the application tracer id
and the event's metadata, the provided span functions will identify and manage
span contexts automatically.
### Example Telemetry Event Handlers
```
def handle_event(_event,
%{system_time: start_time},
metadata,
%{type: :start, tracer_id: tracer_id, span_name: name}) do
start_opts = %{start_time: start_time}
OpentelemetryTelemetry.start_telemetry_span(tracer_id, name, metadata, start_opts)
:ok
end
def handle_event(_event,
%{duration: duration},
metadata,
%{type: :stop, tracer_id: tracer_id}) do
OpentelemetryTelemetry.set_current_telemetry_span(tracer_id, metadata)
OpenTelemetry.Tracer.set_attribute(:duration, duration)
OpentelemetryTelemetry.end_telemetry_span(tracer_id, metadata)
:ok
end
def handle_event(_event,
%{duration: duration},
%{kind: kind, reason: reason, stacktrace: stacktrace} = metadata,
%{type: :exception, tracer_id: tracer_id}) do
ctx = OpentelemetryTelemetry.set_current_telemetry_span(tracer_id, metadata),
status = Opentelemetry.status(:error, to_string(reason, :utf8))
OpenTelemetry.Span.record_exception(ctx, kind, reason, stacktrace, [duration: duration])
OpenTelemetry.Tracer.set_status(status)
OpentelemetryTelemetry.end_telemetry_span(tracer_id, metadata)
:ok
end
def handle_event(_event, _measurements, _metadata, _config), do: :ok
```
### Limitations
Span contexts are currently stored in the process dictionary, so spans can only
be correlated within a single process at this time. This covers the primary use
case where library authors have implemented `telemetry:with_span` or the pattern
established in said function. Non-library authors should use opentelemetry directly
wherever possible.
If the `event_metadata` includes a `telemetry_span_context` (introduced in telemetry
`v0.4.3`), contexts are correlated by the `telemetry_span_context` id to guarantee
the correct otel span context. Span events in earlier versions of `telemetry` are stored
in a stack by `tracer_id` to lessen the likelihood of inadvertently closing the wrong
span.
"""
@typedoc """
A span ctx for a telemetry-based span.
"""
@type telemetry_span_ctx() :: :opentelemetry.span_ctx()
@typedoc """
The parent span ctx for a telemetry-based span. This is what the current span ctx was
at the time of starting a telemetry-based span.
"""
@type parent_span_ctx() :: :opentelemetry.span_ctx()
@type ctx_set() :: {parent_span_ctx(), telemetry_span_ctx()}
@typep tracer_id() :: atom()
@doc """
Start a telemetry-based span.
"""
@spec start_telemetry_span(
tracer_id(),
:opentelemetry.span_name(),
:telemetry.event_metadata(),
OpenTelemetry.Span.start_opts()
) :: OpenTelemetry.span_ctx()
defdelegate start_telemetry_span(tracer_id, span_name, event_metadata, start_opts),
to: :otel_telemetry
@doc """
Set the current span ctx based on the tracer_id and telemetry event metadata.
"""
@spec set_current_telemetry_span(tracer_id(), :telemetry.event_metadata()) ::
OpenTelemetry.span_ctx()
defdelegate set_current_telemetry_span(tracer_id, event_metadata), to: :otel_telemetry
@doc """
End a telemetry-based span based on the `tracer_id` and telemetry event metadata
and restore the current ctx to the span's parent ctx.
"""
@spec end_telemetry_span(tracer_id(), :telemetry.event_metadata()) :: :ok
defdelegate end_telemetry_span(tracer_id, event_metadata), to: :otel_telemetry
@doc false
defdelegate trace_application(app), to: :otel_telemetry
@doc false
defdelegate trace_application(app, opts), to: :otel_telemetry
end
|
utilities/opentelemetry_telemetry/lib/opentelemetry_telemetry.ex
| 0.903477
| 0.815673
|
opentelemetry_telemetry.ex
|
starcoder
|
defmodule Cqrs.Documentation do
@moduledoc false
alias Cqrs.Documentation
defmacro option_docs(options) do
quote bind_quoted: [options: options] do
docs =
options
|> Enum.sort_by(&elem(&1, 0))
|> Enum.map(fn
{name, {:enum, possible_values}, opts} ->
default = Documentation.option_default(opts)
description = Documentation.option_description(opts)
values =
possible_values
|> Enum.map(&"`#{&1}`")
|> Enum.join(" | ")
"* `#{name}`: `:enum`.#{description}Possible values: #{values}. Defaults to `#{inspect(default)}`."
{name, hint, opts} ->
default = Documentation.option_default(opts)
description = Documentation.option_description(opts)
hint =
cond do
is_binary(hint) -> hint
true -> inspect(hint)
end
"* `#{name}` (#{hint}) - #{description}Defaults to `#{inspect(default)}`"
end)
if length(docs) > 0 do
"""
## Options
#{Enum.join(docs, "\n")}
"""
else
""
end
end
end
def option_default(opts) do
Keyword.get(opts, :default, "nil")
end
def option_description(opts) do
case Keyword.get(opts, :description) do
nil -> ""
desc -> " #{String.trim_trailing(desc, ".")}. "
end
end
defmacro query_binding_docs(bindings) do
quote do
docs =
Enum.map(unquote(bindings), fn {name, schema} ->
"""
* `#{name}` (#{inspect(schema)})
"""
end)
if length(docs) > 0 do
"""
## Named Bindings
#{docs}
"""
else
""
end
end
end
defmacro field_docs(title, fields, required_fields) do
quote bind_quoted: [title: title, fields: fields, required_fields: required_fields] do
{required_fields, optional_fields} =
Enum.split_with(fields, fn {name, _type, _opts} ->
Enum.member?(required_fields, name)
end)
required_field_docs = Documentation.__fields_docs__(required_fields, "Required")
optional_field_docs = Documentation.__fields_docs__(optional_fields, "Optional")
"""
## #{title}
#{required_field_docs}
#{optional_field_docs}
"""
end
end
defmacro __fields_docs__(fields, title) do
quote do
field_docs =
unquote(fields)
|> Enum.reject(fn {_name, _type, opts} -> Keyword.get(opts, :internal, false) end)
|> Enum.sort_by(&elem(&1, 0))
|> Enum.map(fn {name, type, opts} ->
description =
case Keyword.get(opts, :description) do
nil -> ""
desc -> ". " <> String.trim_trailing(desc, ".") <> "."
end
field_type =
case type do
Ecto.Enum -> opts |> Keyword.fetch!(:values) |> Enum.join(" | ")
type when is_tuple(type) -> inspect(type)
_ -> type
end
defaults =
case Keyword.get(opts, :default) do
nil -> nil
default -> "Defaults to `#{inspect(default)}`."
end
"""
* `#{name}` (#{field_type})#{description} #{defaults}
"""
end)
if length(field_docs) > 0 do
"""
### #{unquote(title)}
#{field_docs}
"""
end
end
end
end
|
lib/cqrs/documentation.ex
| 0.609524
| 0.470554
|
documentation.ex
|
starcoder
|
defmodule AWS.Config do
@moduledoc """
AWS Config
AWS Config provides a way to keep track of the configurations of all the AWS
resources associated with your AWS account.
You can use AWS Config to get the current and historical configurations of each
AWS resource and also to get information about the relationship between the
resources. An AWS resource can be an Amazon Compute Cloud (Amazon EC2) instance,
an Elastic Block Store (EBS) volume, an elastic network Interface (ENI), or a
security group. For a complete list of resources currently supported by AWS
Config, see [Supported AWS Resources](https://docs.aws.amazon.com/config/latest/developerguide/resource-config-reference.html#supported-resources).
You can access and manage AWS Config through the AWS Management Console, the AWS
Command Line Interface (AWS CLI), the AWS Config API, or the AWS SDKs for AWS
Config. This reference guide contains documentation for the AWS Config API and
the AWS CLI commands that you can use to manage AWS Config. The AWS Config API
uses the Signature Version 4 protocol for signing requests. For more information
about how to sign a request with this protocol, see [Signature Version 4 Signing Process](https://docs.aws.amazon.com/general/latest/gr/signature-version-4.html).
For detailed information about AWS Config features and their associated actions
or commands, as well as how to work with AWS Management Console, see [What Is AWS
Config](https://docs.aws.amazon.com/config/latest/developerguide/WhatIsConfig.html)
in the *AWS Config Developer Guide*.
"""
alias AWS.Client
alias AWS.Request
def metadata do
%AWS.ServiceMetadata{
abbreviation: "Config Service",
api_version: "2014-11-12",
content_type: "application/x-amz-json-1.1",
credential_scope: nil,
endpoint_prefix: "config",
global?: false,
protocol: "json",
service_id: "Config Service",
signature_version: "v4",
signing_name: "config",
target_prefix: "StarlingDoveService"
}
end
@doc """
Returns the current configuration items for resources that are present in your
AWS Config aggregator.
The operation also returns a list of resources that are not processed in the
current request. If there are no unprocessed resources, the operation returns an
empty `unprocessedResourceIdentifiers` list.
The API does not return results for deleted resources.
The API does not return tags and relationships.
"""
def batch_get_aggregate_resource_config(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "BatchGetAggregateResourceConfig", input, options)
end
@doc """
Returns the current configuration for one or more requested resources.
The operation also returns a list of resources that are not processed in the
current request. If there are no unprocessed resources, the operation returns an
empty unprocessedResourceKeys list.
The API does not return results for deleted resources.
The API does not return any tags for the requested resources. This
information is filtered out of the supplementaryConfiguration section of the API
response.
"""
def batch_get_resource_config(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "BatchGetResourceConfig", input, options)
end
@doc """
Deletes the authorization granted to the specified configuration aggregator
account in a specified region.
"""
def delete_aggregation_authorization(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteAggregationAuthorization", input, options)
end
@doc """
Deletes the specified AWS Config rule and all of its evaluation results.
AWS Config sets the state of a rule to `DELETING` until the deletion is
complete. You cannot update a rule while it is in this state. If you make a
`PutConfigRule` or `DeleteConfigRule` request for the rule, you will receive a
`ResourceInUseException`.
You can check the state of a rule by using the `DescribeConfigRules` request.
"""
def delete_config_rule(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteConfigRule", input, options)
end
@doc """
Deletes the specified configuration aggregator and the aggregated data
associated with the aggregator.
"""
def delete_configuration_aggregator(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteConfigurationAggregator", input, options)
end
@doc """
Deletes the configuration recorder.
After the configuration recorder is deleted, AWS Config will not record resource
configuration changes until you create a new configuration recorder.
This action does not delete the configuration information that was previously
recorded. You will be able to access the previously recorded information by
using the `GetResourceConfigHistory` action, but you will not be able to access
this information in the AWS Config console until you create a new configuration
recorder.
"""
def delete_configuration_recorder(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteConfigurationRecorder", input, options)
end
@doc """
Deletes the specified conformance pack and all the AWS Config rules, remediation
actions, and all evaluation results within that conformance pack.
AWS Config sets the conformance pack to `DELETE_IN_PROGRESS` until the deletion
is complete. You cannot update a conformance pack while it is in this state.
"""
def delete_conformance_pack(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteConformancePack", input, options)
end
@doc """
Deletes the delivery channel.
Before you can delete the delivery channel, you must stop the configuration
recorder by using the `StopConfigurationRecorder` action.
"""
def delete_delivery_channel(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteDeliveryChannel", input, options)
end
@doc """
Deletes the evaluation results for the specified AWS Config rule.
You can specify one AWS Config rule per request. After you delete the evaluation
results, you can call the `StartConfigRulesEvaluation` API to start evaluating
your AWS resources against the rule.
"""
def delete_evaluation_results(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteEvaluationResults", input, options)
end
@doc """
Deletes the specified organization config rule and all of its evaluation results
from all member accounts in that organization.
Only a master account and a delegated administrator account can delete an
organization config rule. When calling this API with a delegated administrator,
you must ensure AWS Organizations `ListDelegatedAdministrator` permissions are
added.
AWS Config sets the state of a rule to DELETE_IN_PROGRESS until the deletion is
complete. You cannot update a rule while it is in this state.
"""
def delete_organization_config_rule(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteOrganizationConfigRule", input, options)
end
@doc """
Deletes the specified organization conformance pack and all of the config rules
and remediation actions from all member accounts in that organization.
Only a master account or a delegated administrator account can delete an
organization conformance pack. When calling this API with a delegated
administrator, you must ensure AWS Organizations `ListDelegatedAdministrator`
permissions are added.
AWS Config sets the state of a conformance pack to DELETE_IN_PROGRESS until the
deletion is complete. You cannot update a conformance pack while it is in this
state.
"""
def delete_organization_conformance_pack(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteOrganizationConformancePack", input, options)
end
@doc """
Deletes pending authorization requests for a specified aggregator account in a
specified region.
"""
def delete_pending_aggregation_request(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeletePendingAggregationRequest", input, options)
end
@doc """
Deletes the remediation configuration.
"""
def delete_remediation_configuration(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteRemediationConfiguration", input, options)
end
@doc """
Deletes one or more remediation exceptions mentioned in the resource keys.
AWS Config generates a remediation exception when a problem occurs executing a
remediation action to a specific resource. Remediation exceptions blocks
auto-remediation until the exception is cleared.
"""
def delete_remediation_exceptions(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteRemediationExceptions", input, options)
end
@doc """
Records the configuration state for a custom resource that has been deleted.
This API records a new ConfigurationItem with a ResourceDeleted status. You can
retrieve the ConfigurationItems recorded for this resource in your AWS Config
History.
"""
def delete_resource_config(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteResourceConfig", input, options)
end
@doc """
Deletes the retention configuration.
"""
def delete_retention_configuration(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteRetentionConfiguration", input, options)
end
@doc """
Deletes the stored query for a single AWS account and a single AWS Region.
"""
def delete_stored_query(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteStoredQuery", input, options)
end
@doc """
Schedules delivery of a configuration snapshot to the Amazon S3 bucket in the
specified delivery channel.
After the delivery has started, AWS Config sends the following notifications
using an Amazon SNS topic that you have specified.
* Notification of the start of the delivery.
* Notification of the completion of the delivery, if the delivery
was successfully completed.
* Notification of delivery failure, if the delivery failed.
"""
def deliver_config_snapshot(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeliverConfigSnapshot", input, options)
end
@doc """
Returns a list of compliant and noncompliant rules with the number of resources
for compliant and noncompliant rules.
The results can return an empty result page, but if you have a `nextToken`, the
results are displayed on the next page.
"""
def describe_aggregate_compliance_by_config_rules(%Client{} = client, input, options \\ []) do
Request.request_post(
client,
metadata(),
"DescribeAggregateComplianceByConfigRules",
input,
options
)
end
@doc """
Returns a list of authorizations granted to various aggregator accounts and
regions.
"""
def describe_aggregation_authorizations(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeAggregationAuthorizations", input, options)
end
@doc """
Indicates whether the specified AWS Config rules are compliant.
If a rule is noncompliant, this action returns the number of AWS resources that
do not comply with the rule.
A rule is compliant if all of the evaluated resources comply with it. It is
noncompliant if any of these resources do not comply.
If AWS Config has no current evaluation results for the rule, it returns
`INSUFFICIENT_DATA`. This result might indicate one of the following conditions:
* AWS Config has never invoked an evaluation for the rule. To check
whether it has, use the `DescribeConfigRuleEvaluationStatus` action to get the
`LastSuccessfulInvocationTime` and `LastFailedInvocationTime`.
* The rule's AWS Lambda function is failing to send evaluation
results to AWS Config. Verify that the role you assigned to your configuration
recorder includes the `config:PutEvaluations` permission. If the rule is a
custom rule, verify that the AWS Lambda execution role includes the
`config:PutEvaluations` permission.
* The rule's AWS Lambda function has returned `NOT_APPLICABLE` for
all evaluation results. This can occur if the resources were deleted or removed
from the rule's scope.
"""
def describe_compliance_by_config_rule(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeComplianceByConfigRule", input, options)
end
@doc """
Indicates whether the specified AWS resources are compliant.
If a resource is noncompliant, this action returns the number of AWS Config
rules that the resource does not comply with.
A resource is compliant if it complies with all the AWS Config rules that
evaluate it. It is noncompliant if it does not comply with one or more of these
rules.
If AWS Config has no current evaluation results for the resource, it returns
`INSUFFICIENT_DATA`. This result might indicate one of the following conditions
about the rules that evaluate the resource:
* AWS Config has never invoked an evaluation for the rule. To check
whether it has, use the `DescribeConfigRuleEvaluationStatus` action to get the
`LastSuccessfulInvocationTime` and `LastFailedInvocationTime`.
* The rule's AWS Lambda function is failing to send evaluation
results to AWS Config. Verify that the role that you assigned to your
configuration recorder includes the `config:PutEvaluations` permission. If the
rule is a custom rule, verify that the AWS Lambda execution role includes the
`config:PutEvaluations` permission.
* The rule's AWS Lambda function has returned `NOT_APPLICABLE` for
all evaluation results. This can occur if the resources were deleted or removed
from the rule's scope.
"""
def describe_compliance_by_resource(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeComplianceByResource", input, options)
end
@doc """
Returns status information for each of your AWS managed Config rules.
The status includes information such as the last time AWS Config invoked the
rule, the last time AWS Config failed to invoke the rule, and the related error
for the last failure.
"""
def describe_config_rule_evaluation_status(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeConfigRuleEvaluationStatus", input, options)
end
@doc """
Returns details about your AWS Config rules.
"""
def describe_config_rules(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeConfigRules", input, options)
end
@doc """
Returns status information for sources within an aggregator.
The status includes information about the last time AWS Config verified
authorization between the source account and an aggregator account. In case of a
failure, the status contains the related error code or message.
"""
def describe_configuration_aggregator_sources_status(%Client{} = client, input, options \\ []) do
Request.request_post(
client,
metadata(),
"DescribeConfigurationAggregatorSourcesStatus",
input,
options
)
end
@doc """
Returns the details of one or more configuration aggregators.
If the configuration aggregator is not specified, this action returns the
details for all the configuration aggregators associated with the account.
"""
def describe_configuration_aggregators(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeConfigurationAggregators", input, options)
end
@doc """
Returns the current status of the specified configuration recorder.
If a configuration recorder is not specified, this action returns the status of
all configuration recorders associated with the account.
Currently, you can specify only one configuration recorder per region in your
account.
"""
def describe_configuration_recorder_status(%Client{} = client, input, options \\ []) do
Request.request_post(
client,
metadata(),
"DescribeConfigurationRecorderStatus",
input,
options
)
end
@doc """
Returns the details for the specified configuration recorders.
If the configuration recorder is not specified, this action returns the details
for all configuration recorders associated with the account.
Currently, you can specify only one configuration recorder per region in your
account.
"""
def describe_configuration_recorders(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeConfigurationRecorders", input, options)
end
@doc """
Returns compliance details for each rule in that conformance pack.
You must provide exact rule names.
"""
def describe_conformance_pack_compliance(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeConformancePackCompliance", input, options)
end
@doc """
Provides one or more conformance packs deployment status.
If there are no conformance packs then you will see an empty result.
"""
def describe_conformance_pack_status(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeConformancePackStatus", input, options)
end
@doc """
Returns a list of one or more conformance packs.
"""
def describe_conformance_packs(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeConformancePacks", input, options)
end
@doc """
Returns the current status of the specified delivery channel.
If a delivery channel is not specified, this action returns the current status
of all delivery channels associated with the account.
Currently, you can specify only one delivery channel per region in your account.
"""
def describe_delivery_channel_status(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeDeliveryChannelStatus", input, options)
end
@doc """
Returns details about the specified delivery channel.
If a delivery channel is not specified, this action returns the details of all
delivery channels associated with the account.
Currently, you can specify only one delivery channel per region in your account.
"""
def describe_delivery_channels(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeDeliveryChannels", input, options)
end
@doc """
Provides organization config rule deployment status for an organization.
The status is not considered successful until organization config rule is
successfully deployed in all the member accounts with an exception of excluded
accounts.
When you specify the limit and the next token, you receive a paginated response.
Limit and next token are not applicable if you specify organization config rule
names. It is only applicable, when you request all the organization config
rules.
"""
def describe_organization_config_rule_statuses(%Client{} = client, input, options \\ []) do
Request.request_post(
client,
metadata(),
"DescribeOrganizationConfigRuleStatuses",
input,
options
)
end
@doc """
Returns a list of organization config rules.
When you specify the limit and the next token, you receive a paginated response.
Limit and next token are not applicable if you specify organization config rule
names. It is only applicable, when you request all the organization config
rules.
"""
def describe_organization_config_rules(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeOrganizationConfigRules", input, options)
end
@doc """
Provides organization conformance pack deployment status for an organization.
The status is not considered successful until organization conformance pack is
successfully deployed in all the member accounts with an exception of excluded
accounts.
When you specify the limit and the next token, you receive a paginated response.
Limit and next token are not applicable if you specify organization conformance
pack names. They are only applicable, when you request all the organization
conformance packs.
"""
def describe_organization_conformance_pack_statuses(%Client{} = client, input, options \\ []) do
Request.request_post(
client,
metadata(),
"DescribeOrganizationConformancePackStatuses",
input,
options
)
end
@doc """
Returns a list of organization conformance packs.
When you specify the limit and the next token, you receive a paginated response.
Limit and next token are not applicable if you specify organization conformance
packs names. They are only applicable, when you request all the organization
conformance packs.
"""
def describe_organization_conformance_packs(%Client{} = client, input, options \\ []) do
Request.request_post(
client,
metadata(),
"DescribeOrganizationConformancePacks",
input,
options
)
end
@doc """
Returns a list of all pending aggregation requests.
"""
def describe_pending_aggregation_requests(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribePendingAggregationRequests", input, options)
end
@doc """
Returns the details of one or more remediation configurations.
"""
def describe_remediation_configurations(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeRemediationConfigurations", input, options)
end
@doc """
Returns the details of one or more remediation exceptions.
A detailed view of a remediation exception for a set of resources that includes
an explanation of an exception and the time when the exception will be deleted.
When you specify the limit and the next token, you receive a paginated response.
AWS Config generates a remediation exception when a problem occurs executing a
remediation action to a specific resource. Remediation exceptions blocks
auto-remediation until the exception is cleared.
When you specify the limit and the next token, you receive a paginated response.
Limit and next token are not applicable if you request resources in batch. It is
only applicable, when you request all resources.
"""
def describe_remediation_exceptions(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeRemediationExceptions", input, options)
end
@doc """
Provides a detailed view of a Remediation Execution for a set of resources
including state, timestamps for when steps for the remediation execution occur,
and any error messages for steps that have failed.
When you specify the limit and the next token, you receive a paginated response.
"""
def describe_remediation_execution_status(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeRemediationExecutionStatus", input, options)
end
@doc """
Returns the details of one or more retention configurations.
If the retention configuration name is not specified, this action returns the
details for all the retention configurations for that account.
Currently, AWS Config supports only one retention configuration per region in
your account.
"""
def describe_retention_configurations(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeRetentionConfigurations", input, options)
end
@doc """
Returns the evaluation results for the specified AWS Config rule for a specific
resource in a rule.
The results indicate which AWS resources were evaluated by the rule, when each
resource was last evaluated, and whether each resource complies with the rule.
The results can return an empty result page. But if you have a `nextToken`, the
results are displayed on the next page.
"""
def get_aggregate_compliance_details_by_config_rule(%Client{} = client, input, options \\ []) do
Request.request_post(
client,
metadata(),
"GetAggregateComplianceDetailsByConfigRule",
input,
options
)
end
@doc """
Returns the number of compliant and noncompliant rules for one or more accounts
and regions in an aggregator.
The results can return an empty result page, but if you have a nextToken, the
results are displayed on the next page.
"""
def get_aggregate_config_rule_compliance_summary(%Client{} = client, input, options \\ []) do
Request.request_post(
client,
metadata(),
"GetAggregateConfigRuleComplianceSummary",
input,
options
)
end
@doc """
Returns the resource counts across accounts and regions that are present in your
AWS Config aggregator.
You can request the resource counts by providing filters and GroupByKey.
For example, if the input contains accountID 12345678910 and region us-east-1 in
filters, the API returns the count of resources in account ID 12345678910 and
region us-east-1. If the input contains ACCOUNT_ID as a GroupByKey, the API
returns resource counts for all source accounts that are present in your
aggregator.
"""
def get_aggregate_discovered_resource_counts(%Client{} = client, input, options \\ []) do
Request.request_post(
client,
metadata(),
"GetAggregateDiscoveredResourceCounts",
input,
options
)
end
@doc """
Returns configuration item that is aggregated for your specific resource in a
specific source account and region.
"""
def get_aggregate_resource_config(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetAggregateResourceConfig", input, options)
end
@doc """
Returns the evaluation results for the specified AWS Config rule.
The results indicate which AWS resources were evaluated by the rule, when each
resource was last evaluated, and whether each resource complies with the rule.
"""
def get_compliance_details_by_config_rule(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetComplianceDetailsByConfigRule", input, options)
end
@doc """
Returns the evaluation results for the specified AWS resource.
The results indicate which AWS Config rules were used to evaluate the resource,
when each rule was last used, and whether the resource complies with each rule.
"""
def get_compliance_details_by_resource(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetComplianceDetailsByResource", input, options)
end
@doc """
Returns the number of AWS Config rules that are compliant and noncompliant, up
to a maximum of 25 for each.
"""
def get_compliance_summary_by_config_rule(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetComplianceSummaryByConfigRule", input, options)
end
@doc """
Returns the number of resources that are compliant and the number that are
noncompliant.
You can specify one or more resource types to get these numbers for each
resource type. The maximum number returned is 100.
"""
def get_compliance_summary_by_resource_type(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetComplianceSummaryByResourceType", input, options)
end
@doc """
Returns compliance details of a conformance pack for all AWS resources that are
monitered by conformance pack.
"""
def get_conformance_pack_compliance_details(%Client{} = client, input, options \\ []) do
Request.request_post(
client,
metadata(),
"GetConformancePackComplianceDetails",
input,
options
)
end
@doc """
Returns compliance details for the conformance pack based on the cumulative
compliance results of all the rules in that conformance pack.
"""
def get_conformance_pack_compliance_summary(%Client{} = client, input, options \\ []) do
Request.request_post(
client,
metadata(),
"GetConformancePackComplianceSummary",
input,
options
)
end
@doc """
Returns the resource types, the number of each resource type, and the total
number of resources that AWS Config is recording in this region for your AWS
account.
## Example
1. AWS Config is recording three resource types in the US East
(Ohio) Region for your account: 25 EC2 instances, 20 IAM users, and 15 S3
buckets.
2. You make a call to the `GetDiscoveredResourceCounts` action and
specify that you want all resource types.
3. AWS Config returns the following:
* The resource types (EC2 instances, IAM users, and S3
buckets).
* The number of each resource type (25, 20, and 15).
* The total number of all resources (60).
The response is paginated. By default, AWS Config lists 100 `ResourceCount`
objects on each page. You can customize this number with the `limit` parameter.
The response includes a `nextToken` string. To get the next page of results, run
the request again and specify the string for the `nextToken` parameter.
If you make a call to the `GetDiscoveredResourceCounts` action, you might not
immediately receive resource counts in the following situations:
You are a new AWS Config customer.
You just enabled resource recording.
It might take a few minutes for AWS Config to record and count your resources.
Wait a few minutes and then retry the `GetDiscoveredResourceCounts` action.
"""
def get_discovered_resource_counts(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetDiscoveredResourceCounts", input, options)
end
@doc """
Returns detailed status for each member account within an organization for a
given organization config rule.
"""
def get_organization_config_rule_detailed_status(%Client{} = client, input, options \\ []) do
Request.request_post(
client,
metadata(),
"GetOrganizationConfigRuleDetailedStatus",
input,
options
)
end
@doc """
Returns detailed status for each member account within an organization for a
given organization conformance pack.
"""
def get_organization_conformance_pack_detailed_status(%Client{} = client, input, options \\ []) do
Request.request_post(
client,
metadata(),
"GetOrganizationConformancePackDetailedStatus",
input,
options
)
end
@doc """
Returns a list of configuration items for the specified resource.
The list contains details about each state of the resource during the specified
time interval. If you specified a retention period to retain your
`ConfigurationItems` between a minimum of 30 days and a maximum of 7 years (2557
days), AWS Config returns the `ConfigurationItems` for the specified retention
period.
The response is paginated. By default, AWS Config returns a limit of 10
configuration items per page. You can customize this number with the `limit`
parameter. The response includes a `nextToken` string. To get the next page of
results, run the request again and specify the string for the `nextToken`
parameter.
Each call to the API is limited to span a duration of seven days. It is likely
that the number of records returned is smaller than the specified `limit`. In
such cases, you can make another call, using the `nextToken`.
"""
def get_resource_config_history(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetResourceConfigHistory", input, options)
end
@doc """
Returns the details of a specific stored query.
"""
def get_stored_query(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetStoredQuery", input, options)
end
@doc """
Accepts a resource type and returns a list of resource identifiers that are
aggregated for a specific resource type across accounts and regions.
A resource identifier includes the resource type, ID, (if available) the custom
resource name, source account, and source region. You can narrow the results to
include only resources that have specific resource IDs, or a resource name, or
source account ID, or source region.
For example, if the input consists of accountID 12345678910 and the region is
us-east-1 for resource type `AWS::EC2::Instance` then the API returns all the
EC2 instance identifiers of accountID 12345678910 and region us-east-1.
"""
def list_aggregate_discovered_resources(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListAggregateDiscoveredResources", input, options)
end
@doc """
Accepts a resource type and returns a list of resource identifiers for the
resources of that type.
A resource identifier includes the resource type, ID, and (if available) the
custom resource name. The results consist of resources that AWS Config has
discovered, including those that AWS Config is not currently recording. You can
narrow the results to include only resources that have specific resource IDs or
a resource name.
You can specify either resource IDs or a resource name, but not both, in the
same request.
The response is paginated. By default, AWS Config lists 100 resource identifiers
on each page. You can customize this number with the `limit` parameter. The
response includes a `nextToken` string. To get the next page of results, run the
request again and specify the string for the `nextToken` parameter.
"""
def list_discovered_resources(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListDiscoveredResources", input, options)
end
@doc """
Lists the stored queries for a single AWS account and a single AWS Region.
The default is 100.
"""
def list_stored_queries(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListStoredQueries", input, options)
end
@doc """
List the tags for AWS Config resource.
"""
def list_tags_for_resource(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListTagsForResource", input, options)
end
@doc """
Authorizes the aggregator account and region to collect data from the source
account and region.
"""
def put_aggregation_authorization(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "PutAggregationAuthorization", input, options)
end
@doc """
Adds or updates an AWS Config rule for evaluating whether your AWS resources
comply with your desired configurations.
You can use this action for custom AWS Config rules and AWS managed Config
rules. A custom AWS Config rule is a rule that you develop and maintain. An AWS
managed Config rule is a customizable, predefined rule that AWS Config provides.
If you are adding a new custom AWS Config rule, you must first create the AWS
Lambda function that the rule invokes to evaluate your resources. When you use
the `PutConfigRule` action to add the rule to AWS Config, you must specify the
Amazon Resource Name (ARN) that AWS Lambda assigns to the function. Specify the
ARN for the `SourceIdentifier` key. This key is part of the `Source` object,
which is part of the `ConfigRule` object.
If you are adding an AWS managed Config rule, specify the rule's identifier for
the `SourceIdentifier` key. To reference AWS managed Config rule identifiers,
see [About AWS Managed Config Rules](https://docs.aws.amazon.com/config/latest/developerguide/evaluate-config_use-managed-rules.html).
For any new rule that you add, specify the `ConfigRuleName` in the `ConfigRule`
object. Do not specify the `ConfigRuleArn` or the `ConfigRuleId`. These values
are generated by AWS Config for new rules.
If you are updating a rule that you added previously, you can specify the rule
by `ConfigRuleName`, `ConfigRuleId`, or `ConfigRuleArn` in the `ConfigRule` data
type that you use in this request.
The maximum number of rules that AWS Config supports is 150.
For information about requesting a rule limit increase, see [AWS Config Limits](http://docs.aws.amazon.com/general/latest/gr/aws_service_limits.html#limits_config)
in the *AWS General Reference Guide*.
For more information about developing and using AWS Config rules, see
[Evaluating AWS Resource Configurations with AWS Config](https://docs.aws.amazon.com/config/latest/developerguide/evaluate-config.html)
in the *AWS Config Developer Guide*.
"""
def put_config_rule(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "PutConfigRule", input, options)
end
@doc """
Creates and updates the configuration aggregator with the selected source
accounts and regions.
The source account can be individual account(s) or an organization.
`accountIds` that are passed will be replaced with existing accounts. If you
want to add additional accounts into the aggregator, call `DescribeAggregator`
to get the previous accounts and then append new ones.
AWS Config should be enabled in source accounts and regions you want to
aggregate.
If your source type is an organization, you must be signed in to the management
account or a registered delegated administrator and all the features must be
enabled in your organization. If the caller is a management account, AWS Config
calls `EnableAwsServiceAccess` API to enable integration between AWS Config and
AWS Organizations. If the caller is a registered delegated administrator, AWS
Config calls `ListDelegatedAdministrators` API to verify whether the caller is a
valid delegated administrator.
To register a delegated administrator, see [Register a Delegated Administrator](https://docs.aws.amazon.com/config/latest/developerguide/set-up-aggregator-cli.html#register-a-delegated-administrator-cli)
in the AWS Config developer guide.
"""
def put_configuration_aggregator(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "PutConfigurationAggregator", input, options)
end
@doc """
Creates a new configuration recorder to record the selected resource
configurations.
You can use this action to change the role `roleARN` or the `recordingGroup` of
an existing recorder. To change the role, call the action on the existing
configuration recorder and specify a role.
Currently, you can specify only one configuration recorder per region in your
account.
If `ConfigurationRecorder` does not have the **recordingGroup** parameter
specified, the default is to record all supported resource types.
"""
def put_configuration_recorder(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "PutConfigurationRecorder", input, options)
end
@doc """
Creates or updates a conformance pack.
A conformance pack is a collection of AWS Config rules that can be easily
deployed in an account and a region and across AWS Organization.
This API creates a service linked role `AWSServiceRoleForConfigConforms` in your
account. The service linked role is created only when the role does not exist in
your account.
You must specify either the `TemplateS3Uri` or the `TemplateBody` parameter, but
not both. If you provide both AWS Config uses the `TemplateS3Uri` parameter and
ignores the `TemplateBody` parameter.
"""
def put_conformance_pack(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "PutConformancePack", input, options)
end
@doc """
Creates a delivery channel object to deliver configuration information to an
Amazon S3 bucket and Amazon SNS topic.
Before you can create a delivery channel, you must create a configuration
recorder.
You can use this action to change the Amazon S3 bucket or an Amazon SNS topic of
the existing delivery channel. To change the Amazon S3 bucket or an Amazon SNS
topic, call this action and specify the changed values for the S3 bucket and the
SNS topic. If you specify a different value for either the S3 bucket or the SNS
topic, this action will keep the existing value for the parameter that is not
changed.
You can have only one delivery channel per region in your account.
"""
def put_delivery_channel(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "PutDeliveryChannel", input, options)
end
@doc """
Used by an AWS Lambda function to deliver evaluation results to AWS Config.
This action is required in every AWS Lambda function that is invoked by an AWS
Config rule.
"""
def put_evaluations(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "PutEvaluations", input, options)
end
@doc """
Add or updates the evaluations for process checks.
This API checks if the rule is a process check when the name of the AWS Config
rule is provided.
"""
def put_external_evaluation(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "PutExternalEvaluation", input, options)
end
@doc """
Adds or updates organization config rule for your entire organization evaluating
whether your AWS resources comply with your desired configurations.
Only a master account and a delegated administrator can create or update an
organization config rule. When calling this API with a delegated administrator,
you must ensure AWS Organizations `ListDelegatedAdministrator` permissions are
added.
This API enables organization service access through the
`EnableAWSServiceAccess` action and creates a service linked role
`AWSServiceRoleForConfigMultiAccountSetup` in the master or delegated
administrator account of your organization. The service linked role is created
only when the role does not exist in the caller account. AWS Config verifies the
existence of role with `GetRole` action.
To use this API with delegated administrator, register a delegated administrator
by calling AWS Organization `register-delegated-administrator` for
`config-multiaccountsetup.amazonaws.com`.
You can use this action to create both custom AWS Config rules and AWS managed
Config rules. If you are adding a new custom AWS Config rule, you must first
create AWS Lambda function in the master account or a delegated administrator
that the rule invokes to evaluate your resources. When you use the
`PutOrganizationConfigRule` action to add the rule to AWS Config, you must
specify the Amazon Resource Name (ARN) that AWS Lambda assigns to the function.
If you are adding an AWS managed Config rule, specify the rule's identifier for
the `RuleIdentifier` key.
The maximum number of organization config rules that AWS Config supports is 150
and 3 delegated administrator per organization.
Prerequisite: Ensure you call `EnableAllFeatures` API to enable all features in
an organization.
Specify either `OrganizationCustomRuleMetadata` or
`OrganizationManagedRuleMetadata`.
"""
def put_organization_config_rule(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "PutOrganizationConfigRule", input, options)
end
@doc """
Deploys conformance packs across member accounts in an AWS Organization.
Only a master account and a delegated administrator can call this API. When
calling this API with a delegated administrator, you must ensure AWS
Organizations `ListDelegatedAdministrator` permissions are added.
This API enables organization service access for
`config-multiaccountsetup.amazonaws.com` through the `EnableAWSServiceAccess`
action and creates a service linked role
`AWSServiceRoleForConfigMultiAccountSetup` in the master or delegated
administrator account of your organization. The service linked role is created
only when the role does not exist in the caller account. To use this API with
delegated administrator, register a delegated administrator by calling AWS
Organization `register-delegate-admin` for
`config-multiaccountsetup.amazonaws.com`.
Prerequisite: Ensure you call `EnableAllFeatures` API to enable all features in
an organization.
You must specify either the `TemplateS3Uri` or the `TemplateBody` parameter, but
not both. If you provide both AWS Config uses the `TemplateS3Uri` parameter and
ignores the `TemplateBody` parameter.
AWS Config sets the state of a conformance pack to CREATE_IN_PROGRESS and
UPDATE_IN_PROGRESS until the conformance pack is created or updated. You cannot
update a conformance pack while it is in this state.
You can create 6 conformance packs with 25 AWS Config rules in each pack and 3
delegated administrator per organization.
"""
def put_organization_conformance_pack(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "PutOrganizationConformancePack", input, options)
end
@doc """
Adds or updates the remediation configuration with a specific AWS Config rule
with the selected target or action.
The API creates the `RemediationConfiguration` object for the AWS Config rule.
The AWS Config rule must already exist for you to add a remediation
configuration. The target (SSM document) must exist and have permissions to use
the target.
If you make backward incompatible changes to the SSM document, you must call
this again to ensure the remediations can run.
This API does not support adding remediation configurations for service-linked
AWS Config Rules such as Organization Config rules, the rules deployed by
conformance packs, and rules deployed by AWS Security Hub.
"""
def put_remediation_configurations(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "PutRemediationConfigurations", input, options)
end
@doc """
A remediation exception is when a specific resource is no longer considered for
auto-remediation.
This API adds a new exception or updates an existing exception for a specific
resource with a specific AWS Config rule.
AWS Config generates a remediation exception when a problem occurs executing a
remediation action to a specific resource. Remediation exceptions blocks
auto-remediation until the exception is cleared.
"""
def put_remediation_exceptions(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "PutRemediationExceptions", input, options)
end
@doc """
Records the configuration state for the resource provided in the request.
The configuration state of a resource is represented in AWS Config as
Configuration Items. Once this API records the configuration item, you can
retrieve the list of configuration items for the custom resource type using
existing AWS Config APIs.
The custom resource type must be registered with AWS CloudFormation. This API
accepts the configuration item registered with AWS CloudFormation.
When you call this API, AWS Config only stores configuration state of the
resource provided in the request. This API does not change or remediate the
configuration of the resource.
Write-only schema properites are not recorded as part of the published
configuration item.
"""
def put_resource_config(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "PutResourceConfig", input, options)
end
@doc """
Creates and updates the retention configuration with details about retention
period (number of days) that AWS Config stores your historical information.
The API creates the `RetentionConfiguration` object and names the object as
**default**. When you have a `RetentionConfiguration` object named **default**,
calling the API modifies the default object.
Currently, AWS Config supports only one retention configuration per region in
your account.
"""
def put_retention_configuration(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "PutRetentionConfiguration", input, options)
end
@doc """
Saves a new query or updates an existing saved query.
The `QueryName` must be unique for a single AWS account and a single AWS Region.
You can create upto 300 queries in a single AWS account and a single AWS Region.
"""
def put_stored_query(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "PutStoredQuery", input, options)
end
@doc """
Accepts a structured query language (SQL) SELECT command and an aggregator to
query configuration state of AWS resources across multiple accounts and regions,
performs the corresponding search, and returns resource configurations matching
the properties.
For more information about query components, see the [ ## Query Components
](https://docs.aws.amazon.com/config/latest/developerguide/query-components.html)
section in the AWS Config Developer Guide.
"""
def select_aggregate_resource_config(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "SelectAggregateResourceConfig", input, options)
end
@doc """
Accepts a structured query language (SQL) `SELECT` command, performs the
corresponding search, and returns resource configurations matching the
properties.
For more information about query components, see the [ ## Query Components
](https://docs.aws.amazon.com/config/latest/developerguide/query-components.html)
section in the AWS Config Developer Guide.
"""
def select_resource_config(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "SelectResourceConfig", input, options)
end
@doc """
Runs an on-demand evaluation for the specified AWS Config rules against the last
known configuration state of the resources.
Use `StartConfigRulesEvaluation` when you want to test that a rule you updated
is working as expected. `StartConfigRulesEvaluation` does not re-record the
latest configuration state for your resources. It re-runs an evaluation against
the last known state of your resources.
You can specify up to 25 AWS Config rules per request.
An existing `StartConfigRulesEvaluation` call for the specified rules must
complete before you can call the API again. If you chose to have AWS Config
stream to an Amazon SNS topic, you will receive a `ConfigRuleEvaluationStarted`
notification when the evaluation starts.
You don't need to call the `StartConfigRulesEvaluation` API to run an evaluation
for a new rule. When you create a rule, AWS Config evaluates your resources
against the rule automatically.
The `StartConfigRulesEvaluation` API is useful if you want to run on-demand
evaluations, such as the following example:
1. You have a custom rule that evaluates your IAM resources every 24
hours.
2. You update your Lambda function to add additional conditions to
your rule.
3. Instead of waiting for the next periodic evaluation, you call the
`StartConfigRulesEvaluation` API.
4. AWS Config invokes your Lambda function and evaluates your IAM
resources.
5. Your custom rule will still run periodic evaluations every 24
hours.
"""
def start_config_rules_evaluation(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "StartConfigRulesEvaluation", input, options)
end
@doc """
Starts recording configurations of the AWS resources you have selected to record
in your AWS account.
You must have created at least one delivery channel to successfully start the
configuration recorder.
"""
def start_configuration_recorder(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "StartConfigurationRecorder", input, options)
end
@doc """
Runs an on-demand remediation for the specified AWS Config rules against the
last known remediation configuration.
It runs an execution against the current state of your resources. Remediation
execution is asynchronous.
You can specify up to 100 resource keys per request. An existing
StartRemediationExecution call for the specified resource keys must complete
before you can call the API again.
"""
def start_remediation_execution(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "StartRemediationExecution", input, options)
end
@doc """
Stops recording configurations of the AWS resources you have selected to record
in your AWS account.
"""
def stop_configuration_recorder(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "StopConfigurationRecorder", input, options)
end
@doc """
Associates the specified tags to a resource with the specified resourceArn.
If existing tags on a resource are not specified in the request parameters, they
are not changed. When a resource is deleted, the tags associated with that
resource are deleted as well.
"""
def tag_resource(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "TagResource", input, options)
end
@doc """
Deletes specified tags from a resource.
"""
def untag_resource(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UntagResource", input, options)
end
end
|
lib/aws/generated/config.ex
| 0.873579
| 0.484014
|
config.ex
|
starcoder
|
defmodule Meeseeks do
alias Meeseeks.{Context, Document, Error, Parser, Result, Select, Selector, TupleTree}
@moduledoc """
Meeseeks is an Elixir library for parsing and extracting data from HTML and
XML with CSS or XPath selectors.
```elixir
import Meeseeks.CSS
html = HTTPoison.get!("https://news.ycombinator.com/").body
for story <- Meeseeks.all(html, css("tr.athing")) do
title = Meeseeks.one(story, css(".title a"))
%{title: Meeseeks.text(title),
url: Meeseeks.attr(title, "href")}
end
#=> [%{title: "...", url: "..."}, %{title: "...", url: "..."}, ...]
```
## Getting Started
### Parse
Start by parsing a source (HTML/XML string or `Meeseeks.TupleTree`) into
a `Meeseeks.Document` so that it can be queried.
`Meeseeks.parse/1` parses the source as HTML, but `Meeseeks.parse/2`
accepts a second argument of either `:html` or `:xml` that specifies how
the source is parsed.
```elixir
document = Meeseeks.parse("<div id=main><p>1</p><p>2</p><p>3</p></div>")
#=> Meeseeks.Document<{...}>
```
The selection functions accept an unparsed source, parsing it as HTML, but
parsing is expensive so parse ahead of time when running multiple
selections on the same document.
### Select
Next, use one of Meeseeks's selection functions - `fetch_all`, `all`,
`fetch_one`, or `one` - to search for nodes.
All these functions accept a queryable (a source, a document, or a
`Meeseeks.Result`), one or more `Meeseeks.Selector`s, and optionally an
initial context.
`all` returns a (possibly empty) list of results representing every node
matching one of the provided selectors, while `one` returns a result
representing the first node to match a selector (depth-first) or nil if
there is no match.
`fetch_all` and `fetch_one` work like `all` and `one` respectively, but
wrap the result in `{:ok, ...}` if there is a match or return
`{:error, %Meeseeks.Error{type: :select, reason: :no_match}}` if there is
not.
To generate selectors, use the `css` macro provided by `Meeseeks.CSS` or
the `xpath` macro provided by `Meeseeks.XPath`.
```elixir
import Meeseeks.CSS
result = Meeseeks.one(document, css("#main p"))
#=> #Meeseeks.Result<{ <p>1</p> }>
import Meeseeks.XPath
result = Meeseeks.one(document, xpath("//*[@id='main']//p"))
#=> #Meeseeks.Result<{ <p>1</p> }>
```
### Extract
Retrieve information from the `Meeseeks.Result` with an extraction
function.
The extraction functions are `attr`, `attrs`, `data`, `dataset`, `html`,
`own_text`, `tag`, `text`, `tree`.
```elixir
Meeseeks.tag(result)
#=> "p"
Meeseeks.text(result)
#=> "1"
Meeseeks.tree(result)
#=> {"p", [], ["1"]}
```
The extraction functions `html` and `tree` work on `Meeseeks.Document`s in
addition to `Meeseeks.Result`s.
```elixir
Meeseeks.html(document)
#=> "<html><head></head><body><div id=\\"main\\"><p>1</p><p>2</p><p>3</p></div></body></html>"
```
"""
@type queryable :: Parser.source() | Document.t() | Result.t()
@type extractable :: Document.t() | Result.t() | nil
@type selectors :: Selector.t() | [Selector.t()]
# Parse
@doc """
Parses a string or `Meeseeks.TupleTree` into a `Meeseeks.Document`.
`parse/1` parses as HTML, while `parse/2` accepts a second argument of
either `:html`, `:xml`, or `tuple_tree` that specifies how the source is
parsed.
## Examples
iex> Meeseeks.parse("<div id=main><p>Hello, Meeseeks!</p></div>")
#Meeseeks.Document<{...}>
iex> Meeseeks.parse("<book><author>GGK</author></book>", :xml)
#Meeseeks.Document<{...}>
iex> Meeseeks.parse({"div", [{"id", "main"}], [{"p", [], ["Hello, Meeseeks!"]}]}, :tuple_tree)
#Meeseeks.Document<{...}>
"""
@spec parse(Parser.source()) :: Document.t() | {:error, Error.t()}
def parse(source) do
Parser.parse(source)
end
@spec parse(Parser.source(), Parser.type()) :: Document.t() | {:error, Error.t()}
def parse(source, parser) do
Parser.parse(source, parser)
end
# Select
@doc """
Returns `{:ok, [Result, ...]}` if one of more nodes in the queryable match
a selector, or `{:error, %Meeseeks.Error{type: :select, reason: :no_match}}`
if none do.
Optionally accepts a `Meeseeks.Context` map.
Parses the source if it is not a `Meeseeks.Document` or `Meeseeks.Result`,
and may return `{:error, %Meeseeks.Error{type: parser}` if there is a parse
error.
If multiple selections are being ran on the same unparsed source, parse
first to avoid unnecessary computation.
## Examples
iex> import Meeseeks.CSS
iex> Meeseeks.fetch_all("<div id=main><p>1</p><p>2</p><p>3</p></div>", css("#main p")) |> elem(1) |> List.first()
#Meeseeks.Result<{ <p>1</p> }>
"""
@spec fetch_all(queryable, selectors) :: {:ok, [Result.t()]} | {:error, Error.t()}
def fetch_all(queryable, selectors) do
fetch_all(queryable, selectors, %{})
end
@spec fetch_all(queryable, selectors, Context.t()) :: {:ok, [Result.t()]} | {:error, Error.t()}
def fetch_all(queryable, selectors, context)
def fetch_all({:error, _} = error, _selectors, _context), do: error
def fetch_all(%Document{} = queryable, selectors, context) do
Select.fetch_all(queryable, selectors, context)
end
def fetch_all(%Result{} = queryable, selectors, context) do
Select.fetch_all(queryable, selectors, context)
end
def fetch_all(source, selectors, context) do
case parse(source) do
{:error, reason} -> {:error, reason}
document -> Select.fetch_all(document, selectors, context)
end
end
@doc """
Returns `[Result, ...]` if one or more nodes in the queryable match a
selector, or `[]` if none do.
Optionally accepts a `Meeseeks.Context` map.
Parses the source if it is not a `Meeseeks.Document` or `Meeseeks.Result`,
and may return `{:error, %Meeseeks.Error{type: parser}` if there is a parse
error.
If multiple selections are being ran on the same unparsed source, parse
first to avoid unnecessary computation.
## Examples
iex> import Meeseeks.CSS
iex> Meeseeks.all("<div id=main><p>1</p><p>2</p><p>3</p></div>", css("#main p")) |> List.first()
#Meeseeks.Result<{ <p>1</p> }>
"""
@spec all(queryable, selectors) :: [Result.t()] | {:error, Error.t()}
def all(queryable, selectors) do
all(queryable, selectors, %{})
end
@spec all(queryable, selectors, Context.t()) :: [Result.t()] | {:error, Error.t()}
def all(queryable, selectors, context)
def all({:error, _} = error, _selectors, _context), do: error
def all(%Document{} = queryable, selectors, context) do
Select.all(queryable, selectors, context)
end
def all(%Result{} = queryable, selectors, context) do
Select.all(queryable, selectors, context)
end
def all(source, selectors, context) do
case parse(source) do
{:error, reason} -> {:error, reason}
document -> Select.all(document, selectors, context)
end
end
@doc """
Returns `{:ok, Result}` for the first node in the queryable (depth-first)
matching a selector, or
`{:error, %Meeseeks.Error{type: :select, reason: :no_match}}` if none do.
Optionally accepts a `Meeseeks.Context` map.
Parses the source if it is not a `Meeseeks.Document` or `Meeseeks.Result`,
and may return `{:error, %Meeseeks.Error{type: parser}` if there is a parse
error.
If multiple selections are being ran on the same unparsed source, parse
first to avoid unnecessary computation.
## Examples
iex> import Meeseeks.CSS
iex> Meeseeks.fetch_one("<div id=main><p>1</p><p>2</p><p>3</p></div>", css("#main p")) |> elem(1)
#Meeseeks.Result<{ <p>1</p> }>
"""
@spec fetch_one(queryable, selectors) :: {:ok, Result.t()} | {:error, Error.t()}
def fetch_one(queryable, selectors) do
fetch_one(queryable, selectors, %{})
end
@spec fetch_one(queryable, selectors, Context.t()) :: {:ok, Result.t()} | {:error, Error.t()}
def fetch_one(queryable, selectors, context)
def fetch_one({:error, _} = error, _selectors, _context), do: error
def fetch_one(%Document{} = queryable, selectors, context) do
Select.fetch_one(queryable, selectors, context)
end
def fetch_one(%Result{} = queryable, selectors, context) do
Select.fetch_one(queryable, selectors, context)
end
def fetch_one(source, selectors, context) do
case parse(source) do
{:error, reason} -> {:error, reason}
document -> Select.fetch_one(document, selectors, context)
end
end
@doc """
Returns a `Result` for the first node in the queryable (depth-first)
matching a selector, or `nil` if none do.
Optionally accepts a `Meeseeks.Context` map.
Parses the source if it is not a `Meeseeks.Document` or `Meeseeks.Result`,
and may return `{:error, %Meeseeks.Error{type: parser}` if there is a parse
error.
If multiple selections are being ran on the same unparsed source, parse
first to avoid unnecessary computation.
## Examples
iex> import Meeseeks.CSS
iex> Meeseeks.one("<div id=main><p>1</p><p>2</p><p>3</p></div>", css("#main p"))
#Meeseeks.Result<{ <p>1</p> }>
"""
@spec one(queryable, selectors) :: Result.t() | nil | {:error, Error.t()}
def one(queryable, selectors) do
one(queryable, selectors, %{})
end
@spec one(queryable, selectors, Context.t()) :: Result.t() | nil | {:error, Error.t()}
def one(queryable, selectors, context)
def one({:error, _} = error, _selectors, _context), do: error
def one(%Document{} = queryable, selectors, context) do
Select.one(queryable, selectors, context)
end
def one(%Result{} = queryable, selectors, context) do
Select.one(queryable, selectors, context)
end
def one(source, selectors, context) do
case parse(source) do
{:error, reason} -> {:error, reason}
document -> Select.one(document, selectors, context)
end
end
@doc """
Returns the accumulated result of walking the queryable, accumulating nodes
that match a selector. Prefer `all` or `one`- `select` should only be used
when a custom accumulator is required.
Requires that a `Meeseeks.Accumulator` has been added to the context via
`Meeseeks.Context.add_accumulator/2`, and will raise an error if it hasn't.
Parses the source if it is not a `Meeseeks.Document` or `Meeseeks.Result`,
and may return `{:error, %Meeseeks.Error{type: parser}` if there is a parse
error.
If multiple selections are being ran on the same unparsed source, parse
first to avoid unnecessary computation.
## Examples
iex> import Meeseeks.CSS
iex> accumulator = %Meeseeks.Accumulator.One{}
iex> context = Meeseeks.Context.add_accumulator(%{}, accumulator)
iex> Meeseeks.select("<div id=main><p>1</p><p>2</p><p>3</p></div>", css("#main p"), context)
#Meeseeks.Result<{ <p>1</p> }>
"""
@spec select(queryable, selectors, Context.t()) :: any | {:error, Error.t()}
def select(queryable, selectors, context)
def select({:error, _} = error, _selectors, _context), do: error
def select(%Document{} = queryable, selectors, context) do
Select.select(queryable, selectors, context)
end
def select(%Result{} = queryable, selectors, context) do
Select.select(queryable, selectors, context)
end
def select(source, selectors, context) do
case parse(source) do
{:error, reason} -> {:error, reason}
document -> Select.select(document, selectors, context)
end
end
# Extract
@doc """
Returns the value of an attribute in a result, or nil if there isn't one.
Nil input returns `nil`.
## Examples
iex> import Meeseeks.CSS
iex> result = Meeseeks.one("<div id=example>Hi</div>", css("#example"))
#Meeseeks.Result<{ <div id="example">Hi</div> }>
iex> Meeseeks.attr(result, "id")
"example"
"""
@spec attr(extractable, String.t()) :: String.t() | nil
def attr(extractable, attribute)
def attr(nil, _), do: nil
def attr(%Result{} = result, attribute), do: Result.attr(result, attribute)
def attr(x, _attribute), do: raise_cannot_extract(x, "attr/2")
@doc """
Returns a result's attributes list, which may be empty, or nil if the
result represents a node without attributes.
Nil input returns `nil`.
## Examples
iex> import Meeseeks.CSS
iex> result = Meeseeks.one("<div id=example>Hi</div>", css("#example"))
#Meeseeks.Result<{ <div id="example">Hi</div> }>
iex> Meeseeks.attrs(result)
[{"id", "example"}]
"""
@spec attrs(extractable) :: [{String.t(), String.t()}] | nil
def attrs(extractable)
def attrs(nil), do: nil
def attrs(%Result{} = result), do: Result.attrs(result)
def attrs(x), do: raise_cannot_extract(x, "attrs/1")
@doc """
Returns the combined data of a result or the result's children, which may
be an empty string.
Once the data has been combined the whitespace is compacted by replacing
all instances of more than one whitespace character with a single space
and then trimmed.
Data is the content of `<script>` or `<style>` tags, or the content of
comments starting with "[CDATA[" and ending with "]]". The latter behavior
is to support the extraction of CDATA from HTML, since HTML5 parsers parse
CDATA as comments.
Nil input returns `nil`.
## Options
* `:collapse_whitespace` - Boolean determining whether or not to replace
blocks of whitespace with a single space character. Defaults to `true`.
* `:trim` - Boolean determining whether or not to trim the resulting
text. Defaults to `true`.
## Examples
iex> import Meeseeks.CSS
iex> result1 = Meeseeks.one("<div id=example>Hi</div>", css("#example"))
#Meeseeks.Result<{ <div id="example">Hi</div> }>
iex> Meeseeks.data(result1)
""
iex> result2 = Meeseeks.one("<script id=example>Hi</script>", css("#example"))
#Meeseeks.Result<{ <script id="example">Hi</script> }>
iex> Meeseeks.data(result2)
"Hi"
"""
@spec data(extractable, Keyword.t()) :: String.t() | nil
def data(extractable, opts \\ [])
def data(nil, _), do: nil
def data(%Result{} = result, opts), do: Result.data(result, opts)
def data(x, _), do: raise_cannot_extract(x, "data/1")
@doc """
Returns a map of a result's data attributes, or nil if the result
represents a node without attributes.
Behaves like HTMLElement.dataset; only valid data attributes are included,
and attribute names have "data-" removed and are converted to camelCase.
See: https://developer.mozilla.org/en-US/docs/Web/API/HTMLElement/dataset
Nil input returns `nil`.
## Examples
iex> import Meeseeks.CSS
iex> result = Meeseeks.one("<div id=example data-x-val=1 data-y-val=2></div>", css("#example"))
#Meeseeks.Result<{ <div id="example" data-x-val="1" data-y-val="2"></div> }>
iex> Meeseeks.dataset(result)
%{"xVal" => "1", "yVal" => "2"}
"""
@spec dataset(extractable) :: %{optional(String.t()) => String.t()} | nil
def dataset(extractable)
def dataset(nil), do: nil
def dataset(%Result{} = result), do: Result.dataset(result)
def dataset(x), do: raise_cannot_extract(x, "dataset/1")
@doc """
Returns a string representing the combined HTML of a document or result
and its descendants.
Nil input returns `nil`.
## Examples
iex> import Meeseeks.CSS
iex> document = Meeseeks.parse("<div id=example>Hi</div>")
iex> Meeseeks.html(document)
"<html><head></head><body><div id=\\"example\\">Hi</div></body></html>"
iex> result = Meeseeks.one(document, css("#example"))
#Meeseeks.Result<{ <div id="example">Hi</div> }>
iex> Meeseeks.html(result)
"<div id=\\"example\\">Hi</div>"
"""
@spec html(extractable) :: String.t() | nil
def html(extractable)
def html(nil), do: nil
def html(%Document{} = document), do: Document.html(document)
def html(%Result{} = result), do: Result.html(result)
def html(x), do: raise_cannot_extract(x, "html/1")
@doc """
Returns the combined text of a result or the result's children, which may
be an empty string.
Once the text has been combined the whitespace is compacted by replacing
all instances of more than one whitespace character with a single space
and then trimmed.
Nil input returns `nil`.
## Options
* `:collapse_whitespace` - Boolean determining whether or not to replace
blocks of whitespace with a single space character. Defaults to `true`.
* `:trim` - Boolean determining whether or not to trim the resulting
text. Defaults to `true`.
## Examples
iex> import Meeseeks.CSS
iex> result = Meeseeks.one("<div>Hello, <b>World!</b></div>", css("div"))
#Meeseeks.Result<{ <div>Hello, <b>World!</b></div> }>
iex> Meeseeks.own_text(result)
"Hello,"
"""
@spec own_text(extractable, Keyword.t()) :: String.t() | nil
def own_text(extractable, opts \\ [])
def own_text(nil, _), do: nil
def own_text(%Result{} = result, opts), do: Result.own_text(result, opts)
def own_text(x, _), do: raise_cannot_extract(x, "own_text/1")
@doc """
Returns a result's tag, or `nil` if the result represents a node without a
tag.
Nil input returns `nil`.
## Examples
iex> import Meeseeks.CSS
iex> result = Meeseeks.one("<div id=example>Hi</div>", css("#example"))
#Meeseeks.Result<{ <div id="example">Hi</div> }>
iex> Meeseeks.tag(result)
"div"
"""
@spec tag(extractable) :: String.t() | nil
def tag(extractable)
def tag(nil), do: nil
def tag(%Result{} = result), do: Result.tag(result)
def tag(x), do: raise_cannot_extract(x, "tag/1")
@doc """
Returns the combined text of a result or the result's descendants, which
may be an empty string.
Once the text has been combined the whitespace is compacted by replacing
all instances of more than one whitespace character with a single space
and then trimmed.
Nil input returns `nil`.
## Options
* `:collapse_whitespace` - Boolean determining whether or not to replace
blocks of whitespace with a single space character. Defaults to `true`.
* `:trim` - Boolean determining whether or not to trim the resulting
text. Defaults to `true`.
## Examples
iex> import Meeseeks.CSS
iex> result = Meeseeks.one("<div>Hello, <b>World!</b></div>", css("div"))
#Meeseeks.Result<{ <div>Hello, <b>World!</b></div> }>
iex> Meeseeks.text(result)
"Hello, World!"
"""
@spec text(extractable, Keyword.t()) :: String.t() | nil
def text(extractable, opts \\ [])
def text(nil, _), do: nil
def text(%Result{} = result, opts), do: Result.text(result, opts)
def text(x, _), do: raise_cannot_extract(x, "text/1")
@doc """
Returns the `Meeseeks.TupleTree` of a document or result and its
descendants.
Nil input returns `nil`.
## Examples
iex> import Meeseeks.CSS
iex> document = Meeseeks.parse("<div id=example>Hi</div>")
iex> Meeseeks.tree(document)
[{"html", [],
[{"head", [], []},
{"body", [], [{"div", [{"id", "example"}], ["Hi"]}]}]}]
iex> result = Meeseeks.one(document, css("#example"))
#Meeseeks.Result<{ <div id="example">Hi</div> }>
iex> Meeseeks.tree(result)
{"div", [{"id", "example"}], ["Hi"]}
"""
@spec tree(extractable) :: TupleTree.t() | nil
def tree(extractable)
def tree(nil), do: nil
def tree(%Document{} = document), do: Document.tree(document)
def tree(%Result{} = result), do: Result.tree(result)
def tree(x), do: raise_cannot_extract(x, "tree/1")
defp raise_cannot_extract(target, extractor) do
raise "Cannot run Meeseeks.#{extractor} on #{inspect(target)}"
end
end
|
lib/meeseeks.ex
| 0.8339
| 0.706861
|
meeseeks.ex
|
starcoder
|
defmodule EVM.ExecEnv do
alias EVM.AccountRepo
alias EVM.{BlockHeaderInfo, Configuration}
@moduledoc """
Stores information about the execution environment which led
to this EVM being called. This is, for instance, the sender of
a payment or message to a contract, or a sub-contract call.
We've added our interfaces for interacting with contracts
and accounts to this struct as well.
This generally relates to `I` in the Yellow Paper, defined in Section 9.3.
"""
defstruct address: nil,
originator: nil,
gas_price: nil,
data: nil,
sender: nil,
value_in_wei: nil,
machine_code: <<>>,
stack_depth: 0,
account_repo: nil,
block_header_info: nil,
config: Configuration.Frontier.new(),
static: false
@typedoc """
Terms from Yellow Paper:
- I_a: address
- I_o: originator
- I_p: gas_price
- I_d: data
- I_s: sender
- I_v: value_in_wei
- I_b: machine_code
- I_e: stack_depth
- I_H (via a behaviour): block_header info
"""
@type t :: %__MODULE__{
address: EVM.address(),
originator: EVM.address(),
gas_price: EVM.Gas.gas_price(),
data: binary(),
sender: EVM.address(),
value_in_wei: EVM.Wei.t(),
machine_code: EVM.MachineCode.t(),
stack_depth: integer(),
block_header_info: BlockHeaderInfo.t(),
account_repo: AccountRepo.t(),
config: Configuration.t(),
static: boolean()
}
@spec put_storage(t(), integer(), integer()) :: t()
def put_storage(
exec_env = %{account_repo: account_repo, address: address},
key,
value
) do
account_repo = AccountRepo.repo(account_repo).put_storage(account_repo, address, key, value)
Map.put(exec_env, :account_repo, account_repo)
end
@spec get_storage(t(), integer()) :: atom() | {:ok, integer()}
def get_storage(%{account_repo: account_repo, address: address}, key) do
AccountRepo.repo(account_repo).get_storage(account_repo, address, key)
end
@spec get_initial_storage(t(), integer()) :: atom() | {:ok, integer()}
def get_initial_storage(%{account_repo: account_repo, address: address}, key) do
AccountRepo.repo(account_repo).get_initial_storage(account_repo, address, key)
end
@spec get_balance(t()) :: EVM.Wei.t()
def get_balance(%{account_repo: account_repo, address: address}) do
AccountRepo.repo(account_repo).get_account_balance(account_repo, address)
end
@spec remove_storage(t(), integer()) :: t()
def remove_storage(exec_env = %{account_repo: account_repo, address: address}, key) do
account_repo = AccountRepo.repo(account_repo).remove_storage(account_repo, address, key)
Map.put(exec_env, :account_repo, account_repo)
end
@spec clear_account_balance(t()) :: t()
def clear_account_balance(exec_env = %{account_repo: account_repo, address: address}) do
account_repo = AccountRepo.repo(account_repo).clear_balance(account_repo, address)
Map.put(exec_env, :account_repo, account_repo)
end
@spec transfer_balance_to(t(), EVM.Address.t()) :: t()
def transfer_balance_to(exec_env, to) do
%{account_repo: account_repo, address: address} = exec_env
balance = AccountRepo.repo(account_repo).get_account_balance(account_repo, address)
transfer_wei_to(exec_env, to, balance)
end
@spec transfer_wei_to(t(), EVM.Address.t(), integer()) :: t()
def transfer_wei_to(exec_env, to, value) do
account_repo =
AccountRepo.repo(exec_env.account_repo).transfer(
exec_env.account_repo,
exec_env.address,
to,
value
)
%{exec_env | account_repo: account_repo}
end
@spec non_existent_account?(t(), EVM.Address.t()) :: boolean()
def non_existent_account?(exec_env, address) do
!AccountRepo.repo(exec_env.account_repo).account_exists?(
exec_env.account_repo,
address
)
end
@spec non_existent_or_empty_account?(t(), EVM.Address.t()) :: boolean()
def non_existent_or_empty_account?(exec_env, address) do
is_empty_account =
AccountRepo.repo(exec_env.account_repo).empty_account?(
exec_env.account_repo,
address
)
is_empty_account || non_existent_account?(exec_env, address)
end
end
|
apps/evm/lib/evm/exec_env.ex
| 0.846784
| 0.431345
|
exec_env.ex
|
starcoder
|
defmodule Yamlixir do
@moduledoc ~S"""
Simple YAML parser for Elixir.
"""
@type yaml :: String.t() | charlist
@type options :: keyword
@type decoded :: [any]
@type error :: Yamlixir.DecodingError.t()
@default_options [
detailed_constr: true,
str_node_as_binary: true
]
@doc ~S"""
Decodes a string of valid YAML into Elixir data.
Returns `{:ok, decoded}` on success and `{:error, %Yamlixir.DecodingError{}}` on failure.
## Options
* `:at` - Returns only the document at the given position in the list of documents. Expects input to be an integer.
* `:keys` - Controls how keys in maps are decoded. Defaults to strings. Possible values are:
* `:atoms` - keys are converted to atoms using `String.to_atom/1`
* `:atoms!` - keys are converted to atoms using `String.to_existing_atom/1`
## Examples
iex> Yamlixir.decode("")
{:ok, []}
iex> Yamlixir.decode("---")
{:ok, [%{}]}
iex> Yamlixir.decode(":")
{:error, %Yamlixir.DecodingError{}}
iex> Yamlixir.decode("a: b\nc: d")
{:ok, [%{"a" => "b", "c" => "d"}]}
iex> Yamlixir.decode("---\na: b\nc: d\n---\ne: f\ng: h")
{:ok, [%{"a" => "b", "c" => "d"}, %{"e" => "f", "g" => "h"}]}
iex> Yamlixir.decode("---\na: b\nc: d\n---\ne: f\ng: h", at: 0)
{:ok, %{"a" => "b", "c" => "d"}}
iex> Yamlixir.decode("---\na: b\nc: d\n---\ne: f\ng: h", at: -1)
{:ok, %{"e" => "f", "g" => "h"}}
iex> Yamlixir.decode("---\na: b\nc: d\n---\ne: f\ng: h", at: -1, keys: :atoms)
{:ok, %{e: "f", g: "h"}}
"""
@spec decode(yaml, options) :: {:ok, decoded} | {:error, error}
def decode(yaml, options \\ []), do: do_decode(yaml, options)
@doc ~S"""
The same as `decode/2` but raises a `Yamlixir.DecodingError` exception if it fails.
Returns the decoded YAML otherwise.
## Examples
iex> Yamlixir.decode!("")
[]
iex> Yamlixir.decode!("---")
[%{}]
iex> Yamlixir.decode!(":")
** (Yamlixir.DecodingError) decoding error
iex> Yamlixir.decode!("a: b\nc: d")
[%{"a" => "b", "c" => "d"}]
iex> Yamlixir.decode!("---\na: b\nc: d\n---\ne: f\ng: h")
[%{"a" => "b", "c" => "d"}, %{"e" => "f", "g" => "h"}]
iex> Yamlixir.decode!("---\na: b\nc: d\n---\ne: f\ng: h", at: 0)
%{"a" => "b", "c" => "d"}
iex> Yamlixir.decode!("---\na: b\nc: d\n---\ne: f\ng: h", at: -1)
%{"e" => "f", "g" => "h"}
iex> Yamlixir.decode!("---\na: b\nc: d\n---\ne: f\ng: h", at: -1, keys: :atoms)
%{e: "f", g: "h"}
"""
@spec decode!(yaml, options) :: decoded
def decode!(yaml, options \\ []) do
case do_decode(yaml, options) do
{:ok, decoded} -> decoded
{:error, exception} -> raise exception
end
end
@doc """
Handles the sigil `~y` for decoding YAML.
It passes the string to `decode!/2`, returning the decoded data. Raises a
`Yamlixir.DecodingError` exception when given invalid YAML.
## Modifiers
* `a`: keys are converted to atoms using `String.to_existing_atom/1`
## Examples
iex> ~y\"\"\"
...> a: b
...> c: d
...> \"\"\"
[%{"a" => "b", "c" => "d"}]
iex> ~y\"\"\"
...> a: b
...> c: d
...> \"\"\"a
[%{a: "b", c: "d"}]
"""
@spec sigil_y(yaml, list) :: decoded
def sigil_y(yaml, []), do: decode!(yaml)
def sigil_y(yaml, [?a]), do: decode!(yaml, keys: :atoms!)
@spec do_decode(yaml, options) :: {:ok, decoded} | {:error, error}
defp do_decode(yaml, options) do
options = Keyword.merge(options, @default_options)
decoded =
yaml
|> :yamerl_constr.string(options)
|> Yamlixir.YamerlParser.parse(options)
|> at(options[:at])
{:ok, decoded}
rescue
FunctionClauseError -> {:error, %Yamlixir.DecodingError{}}
catch
{:yamerl_exception, [{_, _, message, _, _, :no_matching_anchor, _, _}]} ->
{:error, %Yamlixir.DecodingError{message: List.to_string(message)}}
end
defp at(decoded, at) when is_integer(at), do: Enum.at(decoded, at)
defp at(decoded, nil), do: decoded
defp at(_decoded, _), do: raise(ArgumentError, "value given to option `:at` must be an integer")
end
|
lib/yamlixir.ex
| 0.933952
| 0.402451
|
yamlixir.ex
|
starcoder
|
defmodule Wand.WandEncoder do
alias WandCore.WandFile
alias WandCore.WandFile.Dependency
alias WandCore.Poison.Encoder
@moduledoc """
A [Poison](https://github.com/devinus/poison#encoder) encoder for `WandCore.WandFile`
It differs from the normal JSON encoding of a struct in the following ways:
1. The dependencies map is sorted by key name
2. Options are inlined and not pretty-printed, even though the rest of the object is
3. Atoms in a `WandCore.WandFile.Dependency` are encoded as `:atom_name`
"""
defimpl WandCore.Poison.Encoder, for: WandFile do
@default_indent 2
@default_offset 0
def encode(%WandFile{version: version, dependencies: dependencies}, options) do
indent = indent(options)
offset = offset(options) + indent
options = offset(options, offset)
[
{"version", version},
{"dependencies", dependencies}
]
|> Enum.map(fn {key, value} ->
{parse(key, options), parse(value, options)}
end)
|> create_map_body(offset)
|> wrap_map(offset, indent)
end
defp parse([], _options), do: "{}"
defp parse(dependencies, options) when is_list(dependencies) do
indent = indent(options)
offset = offset(options) + indent
options = offset(options, offset)
dependencies =
Enum.sort_by(dependencies, & &1.name)
|> Enum.map(fn dependency ->
{parse(dependency.name, options), parse(dependency, options)}
end)
create_map_body(dependencies, offset)
|> wrap_map(offset, indent)
end
defp parse(%Dependency{requirement: requirement, opts: opts}, options) when opts == %{} do
Encoder.BitString.encode(requirement, options)
end
defp parse(%Dependency{requirement: nil, opts: opts}, options) do
options = Keyword.drop(options, [:pretty])
[WandCore.Opts.encode(opts)]
|> Encoder.List.encode(options)
end
defp parse(%Dependency{requirement: requirement, opts: opts}, options) do
options = Keyword.drop(options, [:pretty])
[requirement, WandCore.Opts.encode(opts)]
|> Encoder.List.encode(options)
end
defp parse(key, options) when is_binary(key) do
to_string(key)
|> Encoder.BitString.encode(options)
end
defp wrap_map(body, offset, indent) do
["{\n", body, ?\n, spaces(offset - indent), ?}]
end
defp create_map_body(enumerable, offset) do
Enum.reverse(enumerable)
|> Enum.reduce([], fn {key, value}, acc ->
[
",\n",
spaces(offset),
key,
": ",
value
| acc
]
end)
|> tl
end
defp indent(options) do
Keyword.get(options, :indent, @default_indent)
end
defp offset(options) do
Keyword.get(options, :offset, @default_offset)
end
defp offset(options, value) do
Keyword.put(options, :offset, value)
end
defp spaces(count) do
:binary.copy(" ", count)
end
end
end
|
lib/wand_encoder.ex
| 0.671794
| 0.518912
|
wand_encoder.ex
|
starcoder
|
defmodule JSONRPC2.Servers.TCP do
@moduledoc """
A server for JSON-RPC 2.0 using a line-based TCP transport.
"""
alias JSONRPC2.Servers.TCP.Protocol
@default_timeout 1000 * 60 * 60
@doc """
Start a server with the given `handler` on `port` with `opts`.
Available options:
* `name` - a unique name that can be used to stop the server later. Defaults to the value of
`handler`.
* `num_acceptors` - number of acceptor processes to start. Defaults to 100.
* `transport` - ranch transport to use. Defaults to `:ranch_tcp`.
* `transport_opts` - ranch transport options. For `:ranch_tcp`, see
[here](http://ninenines.eu/docs/en/ranch/1.7/manual/ranch_tcp/).
* `timeout` - disconnect after this amount of milliseconds without a packet from a client.
Defaults to 1 hour.
* `line_packet` - by default, packets consist of a 4 byte header containing an unsigned integer
in big-endian byte order specifying the number of bytes in the packet, followed by that
number of bytes (equivalent to the
[erlang inet packet type `4`](https://erlang.org/doc/man/inet.html#packet)). If set to
`true`, packets will instead be terminated by line-endings, for compatibility with older
implementations.
"""
@spec start_listener(module, :inet.port_number(), Keyword.t()) :: {:ok, pid}
def start_listener(handler, port, opts \\ []) do
apply(:ranch, :start_listener, ranch_args(handler, port, opts))
end
@doc """
Returns a supervisor child spec for the given `handler` on `port` with `opts`.
Allows you to embed a server directly in your app's supervision tree, rather
than letting Ranch handle it.
See `start_listener/3` for available options.
"""
@spec child_spec(module, :inet.port_number(), Keyword.t()) :: {:ok, pid}
def child_spec(handler, port, opts \\ []) do
apply(:ranch, :child_spec, ranch_args(handler, port, opts))
end
@doc """
Stop the server with `name`.
"""
@spec stop(atom) :: :ok | {:error, :not_found}
def stop(name) do
:ranch.stop_listener(name)
end
defp ranch_args(handler, port, opts) do
name = Keyword.get(opts, :name, handler)
num_acceptors = Keyword.get(opts, :num_acceptors, 100)
transport = Keyword.get(opts, :transport, :ranch_tcp)
transport_opts = [port: port] ++ Keyword.get(opts, :transport_opts, [])
timeout = Keyword.get(opts, :timeout, @default_timeout)
line_packet = !!Keyword.get(opts, :line_packet)
protocol_opts = {handler, timeout, line_packet}
[name, num_acceptors, transport, transport_opts, Protocol, protocol_opts]
end
end
|
lib/jsonrpc2/servers/tcp.ex
| 0.85931
| 0.441733
|
tcp.ex
|
starcoder
|
defmodule Talan.BloomFilter do
@moduledoc """
Bloom filter implementation with **concurrent accessibility**,
powered by [:atomics](http://erlang.org/doc/man/atomics.html) module.
"A Bloom filter is a space-efficient probabilistic data structure,
conceived by <NAME> in 1970,
that is used to test whether an element is a member of a set"
[Bloom filter on Wikipedia](https://en.wikipedia.org/wiki/Bloom_filter#CITEREFZhiwangJungangJian2010)
## Credit
Partly inspired by [Blex](https://github.com/gyson/blex)
## Features
* Fixed size Bloom filter
* Concurrent reads & writes
* Custom & default hash functions
* Merge multiple Bloom filters into one
* Intersect multiple Bloom filters into one
* Estimate number of unique elements
* Estimate current false positive probability
## Examples
iex> b = Talan.BloomFilter.new(1000)
iex> b |> Talan.BloomFilter.put("Barna")
iex> b |> Talan.BloomFilter.member?("Barna")
true
iex> b |> Talan.BloomFilter.member?("Kovacs")
false
"""
alias __MODULE__, as: BF
@enforce_keys [:atomics_ref, :filter_length, :hash_functions]
defstruct [:atomics_ref, :filter_length, :hash_functions]
@type t :: %__MODULE__{
atomics_ref: reference,
filter_length: non_neg_integer,
hash_functions: list
}
@doc """
Returns a new `%Talan.BloomFilter{}` for the desired `cardinality`.
`cardinality` is the expected number of unique items. Duplicated items
can be infinite.
## Options
* `:false_positive_probability` - a float, defaults to 0.01
* `:hash_functions` - a list of hash functions, defaults to randomly seeded murmur
## Examples
iex> bloom_filter = Talan.BloomFilter.new(1_000_000)
iex> bloom_filter |> Talan.BloomFilter.put("Barna Kovacs")
:ok
"""
@spec new(pos_integer, list) :: t
def new(cardinality, options \\ []) when is_integer(cardinality) and cardinality > 0 do
false_positive_probability = options |> Keyword.get(:false_positive_probability, 0.01)
hash_functions = options |> Keyword.get(:hash_functions, [])
if false_positive_probability <= 0 || false_positive_probability >= 1 do
raise ArgumentError, """
false_positive_probability must be a float between 0 and 1.
E.g. 0.01
Got: #{inspect(false_positive_probability)}
"""
end
hash_functions =
case hash_functions do
[] ->
hash_count = required_hash_function_count(false_positive_probability)
Talan.seed_n_murmur_hash_fun(hash_count)
list ->
list
end
filter_length = required_filter_length(cardinality, false_positive_probability)
atomics_arity = max(div(filter_length, 64), 1)
atomics_ref = :atomics.new(atomics_arity, signed: false)
%BF{
atomics_ref: atomics_ref,
filter_length: atomics_arity * 64,
hash_functions: hash_functions
}
end
@doc """
Returns count of required hash functions for the
given `false_positive_probability`.
[Wikipedia - Bloom filter - Optimal number of hash functions](https://en.wikipedia.org/wiki/Bloom_filter#Optimal_number_of_hash_functions)
## Examples
iex> Talan.BloomFilter.required_hash_function_count(0.01)
7
iex> Talan.BloomFilter.required_hash_function_count(0.001)
10
iex> Talan.BloomFilter.required_hash_function_count(0.0001)
14
"""
@spec required_hash_function_count(float) :: non_neg_integer
def required_hash_function_count(false_positive_probability) do
-:math.log2(false_positive_probability)
|> Float.ceil()
|> round()
end
@doc """
Returns the required bit count given
* `cardinality` - Number of unique elements that will be inserted
* `false_positive_probability` - Desired false positive probability of membership
[Wikipedia - Bloom filter - Optimal number of hash functions](https://en.wikipedia.org/wiki/Bloom_filter#Optimal_number_of_hash_functions)
## Examples
iex> Talan.BloomFilter.required_filter_length(10_000, 0.01)
95851
"""
@spec required_filter_length(non_neg_integer, float) :: non_neg_integer
def required_filter_length(cardinality, false_positive_probability)
when is_integer(cardinality) and cardinality > 0 and false_positive_probability > 0 and
false_positive_probability < 1 do
import :math, only: [log: 1, pow: 2]
Float.ceil(-cardinality * log(false_positive_probability) / pow(log(2), 2))
|> round()
end
@doc """
Puts `term` into `bloom_filter` a `%Talan.BloomFilter{}` struct.
After this the `member?` function will always return `true`
for the membership of `term`.
Returns `:ok`.
## Examples
iex> b = Talan.BloomFilter.new(1000)
iex> b |> Talan.BloomFilter.put("<NAME>")
:ok
iex> b |> Talan.BloomFilter.put("<NAME>")
:ok
"""
@spec put(t, any) :: :ok
def put(%BF{} = bloom_filter, term) do
hashes = hash_term(bloom_filter, term)
put_hashes(bloom_filter, hashes)
:ok
end
@doc false
def put_hashes(%BF{atomics_ref: atomics_ref}, hashes) when is_list(hashes) do
hashes
|> Enum.each(fn hash ->
Abit.set_bit_at(atomics_ref, hash, 1)
end)
end
@doc """
Checks for membership of `term` in `bloom_filter`.
Returns `false` if not a member. (definitely not member)
Returns `true` if maybe a member. (possibly member)
## Examples
iex> b = Talan.BloomFilter.new(1000)
iex> b |> Talan.BloomFilter.member?("<NAME>")
false
iex> b |> Talan.BloomFilter.put("<NAME>")
iex> b |> Talan.BloomFilter.member?("<NAME>")
true
"""
@spec member?(t, any) :: boolean
def member?(%BF{atomics_ref: atomics_ref} = bloom_filter, term) do
hashes = hash_term(bloom_filter, term)
do_member?(atomics_ref, hashes)
end
defp do_member?(atomics_ref, [hash | hashes_tl]) do
if Abit.bit_at(atomics_ref, hash) == 1 do
do_member?(atomics_ref, hashes_tl)
else
false
end
end
defp do_member?(_, []), do: true
@doc """
Hashes `term` with all `hash_functions` of `%Talan.BloomFilter{}`.
Returns a list of hashed values.
## Examples
b = Talan.BloomFilter.new(1000)
Talan.BloomFilter.hash_term(b, :any_term_can_be_hashed)
[9386, 8954, 8645, 4068, 5445, 6914, 2844]
"""
@spec hash_term(t, any) :: list(integer)
def hash_term(%BF{filter_length: filter_length, hash_functions: hash_functions}, term) do
do_hash_term(filter_length, hash_functions, term)
end
defp do_hash_term(filter_length, hash_functions, term, acc \\ [])
defp do_hash_term(filter_length, [hash_fun | tl], term, acc) do
new_acc = [rem(hash_fun.(term), filter_length) | acc]
do_hash_term(filter_length, tl, term, new_acc)
end
defp do_hash_term(_, [], _, acc), do: acc
@doc """
Merge multiple `%Talan.BloomFilter{}` structs's atomics into one new struct.
Note: To work correctly filters with identical size & hash functions must be used.
Returns a new `%Talan.BloomFilter{}` struct which set bits are the merged set bits of
the bloom filters in the `list`.
## Examples
iex> hash_functions = Talan.seed_n_murmur_hash_fun(7)
iex> b1 = Talan.BloomFilter.new(1000, hash_functions: hash_functions)
iex> b1 |> Talan.BloomFilter.put("GitHub")
iex> b2 = Talan.BloomFilter.new(1000, hash_functions: hash_functions)
iex> b2 |> Talan.BloomFilter.put("Octocat")
:ok
iex> b3 = Talan.BloomFilter.merge([b1, b2])
iex> b3 |> Talan.BloomFilter.member?("GitHub")
true
iex> b3 |> Talan.BloomFilter.member?("Octocat")
true
"""
@spec merge(nonempty_list(t)) :: t
def merge(list = [first = %BF{atomics_ref: first_atomics_ref} | _tl]) do
%{size: size} = :atomics.info(first_atomics_ref)
new_atomics_ref = :atomics.new(size, signed: false)
list
|> Enum.reduce(
new_atomics_ref,
fn %BF{atomics_ref: atomics_ref}, acc ->
Abit.merge(acc, atomics_ref)
end
)
%BF{first | atomics_ref: new_atomics_ref}
end
@doc """
Intersection of `%Talan.BloomFilter{}` structs's atomics into one new struct.
Note: To work correctly filters with identical size & hash functions must be used.
Returns a new `%BloomFilter{}` struct which set bits are the intersection
the bloom filters in the `list`.
## Examples
iex> hash_functions = Talan.seed_n_murmur_hash_fun(7)
iex> b1 = Talan.BloomFilter.new(1000, hash_functions: hash_functions)
iex> b1 |> Talan.BloomFilter.put("GitHub")
iex> b2 = Talan.BloomFilter.new(1000, hash_functions: hash_functions)
iex> b2 |> Talan.BloomFilter.put("GitHub")
iex> b2 |> Talan.BloomFilter.put("Octocat")
:ok
iex> b3 = Talan.BloomFilter.intersection([b1, b2])
iex> b3 |> Talan.BloomFilter.member?("GitHub")
true
iex> b3 |> Talan.BloomFilter.member?("Octocat")
false
"""
@spec intersection(nonempty_list(t)) :: t
def intersection(list = [first = %BF{atomics_ref: first_atomics_ref} | _tl]) do
%{size: size} = :atomics.info(first_atomics_ref)
new_atomics_ref = :atomics.new(size, signed: false)
Abit.merge(new_atomics_ref, first_atomics_ref)
list
|> Enum.reduce(
new_atomics_ref,
fn %BF{atomics_ref: atomics_ref}, acc ->
Abit.intersect(acc, atomics_ref)
end
)
%BF{first | atomics_ref: new_atomics_ref}
end
@doc """
Returns an non negative integer representing the
estimated cardinality count of unique elements in the filter.
## Examples
iex> b = Talan.BloomFilter.new(1000)
iex> b |> Talan.BloomFilter.cardinality()
0
iex> b |> Talan.BloomFilter.put("Barna")
iex> b |> Talan.BloomFilter.cardinality()
1
iex> b |> Talan.BloomFilter.put("Barna")
iex> b |> Talan.BloomFilter.cardinality()
1
iex> b |> Talan.BloomFilter.put("Kovacs")
iex> b |> Talan.BloomFilter.cardinality()
2
"""
@spec cardinality(t) :: non_neg_integer
def cardinality(%BF{
atomics_ref: atomics_ref,
filter_length: filter_length,
hash_functions: hash_functions
}) do
set_bits_count = Abit.set_bits_count(atomics_ref)
hash_function_count = length(hash_functions)
cond do
set_bits_count < hash_function_count ->
0
set_bits_count == hash_function_count ->
1
filter_length == set_bits_count ->
round(filter_length / hash_function_count)
true ->
est = :math.log(filter_length - set_bits_count) - :math.log(filter_length)
round(filter_length * -est / hash_function_count)
end
end
@doc """
Returns a float representing current estimated
false positivity probability.
## Examples
iex> b = Talan.BloomFilter.new(1000)
iex> b |> Talan.BloomFilter.false_positive_probability()
0.0 # fpp zero when bloom filter is empty
iex> b |> Talan.BloomFilter.put("Barna") # fpp increase
iex> b |> Talan.BloomFilter.put("Kovacs")
iex> fpp = b |> Talan.BloomFilter.false_positive_probability()
iex> fpp > 0 && fpp < 1
true
"""
@spec false_positive_probability(t()) :: float()
def false_positive_probability(%BF{
atomics_ref: atomics_ref,
filter_length: filter_length,
hash_functions: hash_functions
}) do
bits_not_set_count = filter_length - Abit.set_bits_count(atomics_ref)
hash_function_count = length(hash_functions)
:math.pow(1 - bits_not_set_count / filter_length, hash_function_count)
end
@doc """
Returns a map representing the bit state of the `atomics_ref`.
Use this for debugging purposes.
## Examples
iex> b = Talan.BloomFilter.new(1000)
iex> b |> Talan.BloomFilter.bits_info()
%{total_bits: 9536, set_bits_count: 0, set_ratio: 0.0}
"""
@spec bits_info(t()) :: map()
def bits_info(%BF{atomics_ref: atomics_ref, filter_length: filter_length}) do
set_bits_count = Abit.set_bits_count(atomics_ref)
%{
total_bits: filter_length,
set_bits_count: set_bits_count,
set_ratio: set_bits_count / filter_length
}
end
end
|
lib/talan/bloom_filter.ex
| 0.920473
| 0.720983
|
bloom_filter.ex
|
starcoder
|
if Code.ensure_loaded?(Finch) do
defmodule Tesla.Adapter.Finch do
@moduledoc """
Adapter for [finch](https://github.com/keathley/finch).
Remember to add `{:finch, "~> 0.3"}` to dependencies. Also, you need to
recompile tesla after adding the `:finch` dependency:
```
mix deps.clean tesla
mix compile
```
## Examples
In order to use Finch, you must start it and provide a `:name`. For example,
in your supervision tree:
```elixir
children = [
{Finch, name: MyFinch}
]
```
You must provide the same name to this adapter:
```
# set globally in config/config.exs
config :tesla, :adapter, {Tesla.Adapter.Finch, name: MyFinch}
# set per module
defmodule MyClient do
use Tesla
adapter Tesla.Adapter.Finch, name: MyFinch
end
```
## Adapter specific options
* `:name` - The `:name` provided to Finch (**required**).
## [Finch options](https://hexdocs.pm/finch/Finch.html#request/3)
* `:pool_timeout` - This timeout is applied when a connection is checked
out from the pool. Default value is `5_000`.
* `:receive_timeout` - The maximum time to wait for a response before
returning an error. Default value is `15_000`.
"""
@behaviour Tesla.Adapter
alias Tesla.Multipart
@impl Tesla.Adapter
def call(%Tesla.Env{} = env, opts) do
opts = Tesla.Adapter.opts(env, opts)
name = Keyword.fetch!(opts, :name)
url = Tesla.build_url(env.url, env.query)
req_opts = Keyword.take(opts, [:pool_timeout, :receive_timeout])
case request(name, env.method, url, env.headers, env.body, req_opts) do
{:ok, %Finch.Response{status: status, headers: headers, body: body}} ->
{:ok, %Tesla.Env{env | status: status, headers: headers, body: body}}
{:error, mint_error} ->
{:error, Exception.message(mint_error)}
end
end
defp request(name, method, url, headers, %Multipart{} = mp, opts) do
headers = headers ++ Multipart.headers(mp)
body = Multipart.body(mp) |> Enum.to_list()
request(name, method, url, headers, body, opts)
end
defp request(_name, _method, _url, _headers, %Stream{}, _opts) do
raise "Streaming is not supported by this adapter!"
end
defp request(name, method, url, headers, body, opts) do
Finch.build(method, url, headers, body)
|> Finch.request(name, opts)
end
end
end
|
lib/tesla/adapter/finch.ex
| 0.866048
| 0.850655
|
finch.ex
|
starcoder
|
defmodule TuneWeb.ExplorerLive do
@moduledoc """
Main view used in the application. Covers:
- Search
- Suggestions
- Displaying details for artists, albums, etc.
- Mini player
## Mounting and authentication
When mounting, `TuneWeb.ExplorerLive` uses the session data to start a
Spotify session. Note that we use a private function `spotify_session/0` to load the appropriate
behaviour (for example in tests we use a mock).
`mount/3` always loads user profile data, player status and currently available
devices, as such information is always displayed irrespectively of the section.
When connected, `mount/3` also subscribes to session event for the relevant session id.
## Routing
Depending on the route, `handle_params/3` dispatches to different internal functions which
take care of loading additional data specific for each section.
## Events
Events generated by subscriptions are handled in `handle_info/2` and
primarily take care of updating the socket assigns. When appropriate, changes
are forwarded to the progress bar so that it gets updated. This mechanism ensures that:
- when the same song plays and only elapsed time changes, only the progress bar is re-rendered
- when the song changes or is played/paused, we also re-render other parts of
the UI (e.g. if the currently playing song is visualized in its album's
tracklist)
Events generated by the UI are all handled via `handle_event/3`.
"""
use TuneWeb, :live_view
alias Tune.Spotify.Schema.{Album, Device, Player, Track, User}
alias TuneWeb.{
AlbumView,
ArtistView,
MiniPlayerComponent,
PaginationView,
ProgressBarComponent,
SearchView,
ShowView,
SuggestionsView
}
@default_time_range "short_term"
@initial_state [
q: nil,
type: :track,
results: %{items: [], total: 0},
user: nil,
now_playing: %Player{},
item: :not_fetched,
artist_albums_group: :all,
per_page: 24,
page: 1,
suggestions_playlist: :not_fetched,
suggestions_recently_played_albums: :not_fetched,
suggestions_top_albums: :not_fetched,
suggestions_top_albums_time_range: @default_time_range,
suggestions_recommended_tracks: :not_fetched,
suggestions_recommended_tracks_time_range: @default_time_range
]
@impl true
def mount(_params, session, socket) do
case Tune.Auth.load_user(session) do
{:authenticated, session_id, user} ->
now_playing = spotify_session().now_playing(session_id)
devices = spotify_session().get_devices(session_id)
socket =
case spotify_session().get_player_token(session_id) do
{:ok, token} ->
assign(socket, :player_token, token)
error ->
handle_spotify_session_result(error, socket)
end
if connected?(socket) do
spotify_session().subscribe(session_id)
end
{:ok,
socket
|> assign(@initial_state)
|> assign(:static_changed, static_changed?(socket))
|> assign_new(:device_name, &generate_device_name/0)
|> assign(
session_id: session_id,
user: user,
premium?: User.premium?(user),
now_playing: now_playing,
devices: devices
)}
_error ->
{:ok, redirect(socket, to: "/auth/logout")}
end
end
@impl true
def handle_params(params, url, socket) do
case socket.assigns.live_action do
:suggestions -> handle_suggestions(params, url, socket)
:search -> handle_search(params, url, socket)
:artist_details -> handle_artist_details(params, url, socket)
:album_details -> handle_album_details(params, url, socket)
:show_details -> handle_show_details(params, url, socket)
:episode_details -> handle_episode_details(params, url, socket)
end
end
@impl true
def handle_event("toggle_play_pause", %{"key" => " "}, socket) do
spotify_session().toggle_play(socket.assigns.session_id)
{:noreply, socket}
end
def handle_event("toggle_play_pause", %{"key" => _}, socket) do
{:noreply, socket}
end
def handle_event("toggle_play_pause", _params, socket) do
socket.assigns.session_id
|> spotify_session().toggle_play()
|> handle_spotify_session_result(socket)
end
def handle_event("play", %{"uri" => uri, "context-uri" => context_uri}, socket) do
socket.assigns.session_id
|> spotify_session().play(uri, context_uri)
|> handle_spotify_session_result(socket)
end
def handle_event("play", %{"uri" => uri}, socket) do
socket.assigns.session_id
|> spotify_session().play(uri)
|> handle_spotify_session_result(socket)
end
def handle_event("next", _params, socket) do
socket.assigns.session_id
|> spotify_session().next()
|> handle_spotify_session_result(socket)
end
def handle_event("prev", _params, socket) do
socket.assigns.session_id
|> spotify_session().prev()
|> handle_spotify_session_result(socket)
end
def handle_event("seek", %{"position_ms" => position_ms}, socket) do
socket.assigns.session_id
|> spotify_session().seek(position_ms)
|> handle_spotify_session_result(socket)
end
def handle_event("search", params, socket) do
q = Map.get(params, "q")
type = Map.get(params, "type", "track")
{:noreply, push_patch(socket, to: Routes.explorer_path(socket, :search, q: q, type: type))}
end
def handle_event("set_top_albums_time_range", %{"time-range" => time_range}, socket) do
case get_top_tracks(socket.assigns.session_id, time_range) do
{:ok, top_tracks} ->
{:noreply,
assign(socket,
suggestions_top_albums: Album.from_tracks(top_tracks),
suggestions_top_albums_time_range: time_range
)}
error ->
handle_spotify_session_result(error, socket)
end
end
def handle_event("set_recommended_tracks_time_range", %{"time-range" => time_range}, socket) do
with {:ok, top_tracks} <- get_top_tracks(socket.assigns.session_id, time_range),
{:ok, recommended_tracks} <- get_recommendations(socket.assigns.session_id, top_tracks) do
{:noreply,
assign(socket,
suggestions_recommended_tracks: recommended_tracks,
suggestions_recommended_tracks_time_range: time_range
)}
else
error ->
handle_spotify_session_result(error, socket)
end
end
def handle_event("transfer_playback", %{"device" => device_id}, socket) do
case spotify_session().transfer_playback(socket.assigns.session_id, device_id) do
:ok ->
{:noreply, socket}
error ->
handle_spotify_session_result(error, socket)
end
end
def handle_event("inc_volume", %{}, socket) do
case socket.assigns.now_playing.device do
nil ->
{:noreply, socket}
device ->
volume_percent = min(device.volume_percent + 10, 100)
set_volume(volume_percent, socket)
end
end
def handle_event("dec_volume", %{}, socket) do
case socket.assigns.now_playing.device do
nil ->
{:noreply, socket}
device ->
volume_percent = max(device.volume_percent - 10, 0)
set_volume(volume_percent, socket)
end
end
def handle_event("set_volume", %{"volume_percent" => volume_percent}, socket) do
set_volume(volume_percent, socket)
end
def handle_event("refresh_devices", _params, socket) do
:ok = spotify_session().refresh_devices(socket.assigns.session_id)
{:noreply, socket}
end
@impl true
def handle_info({:now_playing, player}, socket) do
changes = Player.changes(socket.assigns.now_playing, player)
cond do
changes == [] ->
{:noreply, socket}
[:progress_ms] == changes ->
send_update(ProgressBarComponent, id: :progress_bar, progress_ms: player.progress_ms)
{:noreply, socket}
:item in changes ->
case spotify_session().recently_played_tracks(socket.assigns.session_id, limit: 50) do
{:ok, recently_played_tracks} ->
{:noreply,
assign(socket,
suggestions_recently_played_albums: Album.from_tracks(recently_played_tracks),
now_playing: player
)}
error ->
handle_spotify_session_result(error, socket)
end
true ->
{:noreply, assign(socket, :now_playing, player)}
end
end
def handle_info({:player_token, token}, socket) do
{:noreply, assign(socket, :player_token, token)}
end
def handle_info({:devices, devices}, socket) do
{:noreply, assign(socket, :devices, devices)}
end
defp spotify_session, do: Application.get_env(:tune, :spotify_session)
defp handle_suggestions(_params, _url, socket) do
socket = assign(socket, :page_title, gettext("Suggestions"))
with {:ok, playlist} <- get_suggestions_playlist(socket.assigns.session_id),
{:ok, top_tracks} <-
get_top_tracks(
socket.assigns.session_id,
socket.assigns.suggestions_top_albums_time_range
),
{:ok, recently_played_tracks} <-
spotify_session().recently_played_tracks(socket.assigns.session_id, limit: 50),
{:ok, recommended_tracks} <-
get_recommendations(socket.assigns.session_id, top_tracks) do
{:noreply,
assign(socket,
suggestions_playlist: playlist,
suggestions_top_albums: Album.from_tracks(top_tracks),
suggestions_recently_played_albums: Album.from_tracks(recently_played_tracks),
suggestions_recommended_tracks: recommended_tracks
)}
else
{:error, :not_present} ->
{:noreply, assign(socket, :suggestions_playlist, :not_present)}
error ->
handle_spotify_session_result(error, socket)
end
end
defp handle_search(params, _url, socket) do
q = Map.get(params, "q", "")
type = Map.get(params, "type", "track")
page = Map.get(params, "page", "1")
per_page = Map.get(params, "per_page", "24")
socket = assign(socket, :page_title, gettext("Search results"))
if String.length(q) >= 1 do
type = parse_type(type)
page = String.to_integer(page)
limit = String.to_integer(per_page)
offset = max(page - 1, 0) * limit
socket =
socket
|> assign(:q, q)
|> assign(:type, type)
|> assign(:page, page)
|> assign(:per_page, limit)
|> assign(:page_title, gettext("Search results for %{q}", %{q: q}))
search_opts = [types: [type], limit: limit, offset: offset]
case spotify_session().search(socket.assigns.session_id, q, search_opts) do
{:ok, results} ->
{:noreply, assign(socket, :results, Map.get(results, type))}
error ->
handle_spotify_session_result(error, socket)
end
else
{:noreply,
socket
|> assign(:q, nil)
|> assign(:type, type)
|> assign(:results, %{items: [], total: 0})}
end
end
defp handle_artist_details(%{"artist_id" => artist_id} = params, _url, socket) do
album_group =
params
|> Map.get("album_group", "all")
|> parse_album_group()
page =
params
|> Map.get("page", "1")
|> String.to_integer()
limit =
params
|> Map.get("per_page", "24")
|> String.to_integer()
offset = max(page - 1, 0) * limit
socket = assign(socket, :page_title, gettext("Artist details"))
with {:ok, artist} <- spotify_session().get_artist(socket.assigns.session_id, artist_id),
{:ok, %{albums: albums, total: total_albums}} <-
spotify_session().get_artist_albums(socket.assigns.session_id, artist_id,
limit: limit,
offset: offset,
album_group: album_group
) do
artist = %{artist | albums: albums, total_albums: total_albums}
{:noreply,
assign(socket, %{
item: artist,
artist_albums_group: album_group,
page: page,
per_page: limit,
page_title: gettext("Artist details for %{name}", %{name: artist.name})
})}
else
error ->
handle_spotify_session_result(error, socket)
end
end
defp handle_album_details(%{"album_id" => album_id}, _url, socket) do
socket = assign(socket, :page_title, gettext("Album details"))
case spotify_session().get_album(socket.assigns.session_id, album_id) do
{:ok, album} ->
{:noreply,
assign(socket,
item: album,
page_title: gettext("Album details for %{name}", %{name: album.name})
)}
error ->
handle_spotify_session_result(error, socket)
end
end
defp handle_show_details(%{"show_id" => show_id}, _url, socket) do
socket = assign(socket, :page_title, gettext("Show details"))
with {:ok, show} <- spotify_session().get_show(socket.assigns.session_id, show_id),
{:ok, episodes} <- spotify_session().get_episodes(socket.assigns.session_id, show_id) do
show = %{show | episodes: episodes}
{:noreply,
assign(socket,
item: show,
page_title: gettext("Show details for %{name}", %{name: show.name})
)}
else
error ->
handle_spotify_session_result(error, socket)
end
end
defp handle_episode_details(%{"episode_id" => episode_id}, _url, socket) do
socket = assign(socket, :page_title, gettext("Show details"))
with {:ok, episode} <- spotify_session().get_episode(socket.assigns.session_id, episode_id),
{:ok, episodes} <-
spotify_session().get_episodes(socket.assigns.session_id, episode.show.id) do
show = %{episode.show | episodes: episodes}
{:noreply,
assign(socket,
item: show,
page_title: gettext("Show details for %{name}", %{name: show.name})
)}
else
error ->
handle_spotify_session_result(error, socket)
end
end
defp parse_type("track"), do: :track
defp parse_type("album"), do: :album
defp parse_type("artist"), do: :artist
defp parse_type("episode"), do: :episode
defp parse_type("show"), do: :show
defp parse_album_group("all"), do: :all
defp parse_album_group("album"), do: :album
defp parse_album_group("single"), do: :single
defp parse_album_group("appears_on"), do: :appears_on
defp parse_album_group("compilation"), do: :compilation
defp handle_spotify_session_result(:ok, socket), do: {:noreply, socket}
defp handle_spotify_session_result({:error, 404}, socket) do
{:noreply, put_flash(socket, :error, gettext("No available devices"))}
end
defp handle_spotify_session_result({:error, reason}, socket) do
error_message = gettext("Spotify error: %{reason}", %{reason: inspect(reason)})
{:noreply, put_flash(socket, :error, error_message)}
end
@suggestions_playlist_name "Release Radar"
defp get_suggestions_playlist(session_id) do
with {:ok, results} <-
spotify_session().search(session_id, @suggestions_playlist_name,
types: [:playlist],
limit: 1
),
simplified_playlist when is_struct(simplified_playlist) <-
get_in(results, [:playlists, :items, Access.at(0)]) do
spotify_session().get_playlist(session_id, simplified_playlist.id)
else
nil -> {:error, :not_present}
error -> error
end
end
@top_tracks_limit 24
defp get_top_tracks(session_id, time_range) do
opts = [limit: @top_tracks_limit, time_range: time_range]
spotify_session().top_tracks(session_id, opts)
end
defp get_recommendations(session_id, tracks) do
artist_ids =
tracks
|> Track.artist_ids()
|> Enum.shuffle()
|> Enum.take(5)
spotify_session().get_recommendations_from_artists(session_id, artist_ids)
end
defp set_volume(volume_percent, socket) do
case spotify_session().set_volume(socket.assigns.session_id, volume_percent) do
:ok ->
{:noreply, socket}
error ->
handle_spotify_session_result(error, socket)
end
end
defp generate_device_name do
"tune-" <> Device.generate_name()
end
end
|
lib/tune_web/live/explorer_live.ex
| 0.815747
| 0.518485
|
explorer_live.ex
|
starcoder
|
defmodule Exchema.Predicates do
@moduledoc """
Exschema default predicates library
"""
@type error :: {:error, any}
@type failure :: false | error | [error, ...]
@type success :: :ok | true | []
@type result :: failure | success
@doc """
Just applies the function as if it was a predicate.
It also checks for exceptions to allow simpler functions.
## Examples
iex> Exchema.Predicates.fun(1, &is_integer/1)
true
iex> Exchema.Predicates.fun("1", &is_integer/1)
false
iex> Exchema.Predicates.fun(1, &(&1 > 0))
true
iex> Exchema.Predicates.fun(0, &(&1 > 0))
false
iex> Exchema.Predicates.fun(1, fn _ -> {:error, :custom_error} end)
{:error, :custom_error}
iex> Exchema.Predicates.fun(1, fn _ -> raise RuntimeError end)
{:error, :thrown}
"""
@spec fun(any, ((any) -> result)) :: result
def fun(val, fun) do
fun.(val)
rescue
_ -> {:error, :thrown}
end
@doc """
Checks the list type
It can also check the types of the elemsts of the list by
passing the `:element_type` param.
## Examples
iex> Exchema.Predicates.list("", :any)
{:error, :not_a_list}
iex> Exchema.Predicates.list([], :any)
:ok
iex> Exchema.Predicates.list(["",1,""], Exchema.Types.Integer)
{:error, {
:nested_errors,
[
{0, [{{Exchema.Predicates, :is}, :integer, :not_an_integer}]},
{2, [{{Exchema.Predicates, :is}, :integer, :not_an_integer}]}
]}
}
iex> Exchema.Predicates.list([1,2,3], Exchema.Types.Integer)
:ok
"""
def list(list, _) when not is_list(list) do
{:error, :not_a_list}
end
def list(_list, :any), do: :ok
def list(list, element_type) do
list
|> Enum.with_index
|> Enum.map(fn {e, idx} -> {idx, Exchema.errors(e, element_type)} end)
|> Enum.filter(fn {_, err} -> length(err) > 0 end)
|> nested_errors
end
defp nested_errors(errors, error_key \\ :nested_errors)
defp nested_errors([], _), do: :ok
defp nested_errors(errors, error_key) do
{:error, {error_key, errors}}
end
@doc """
Checks whether or not the given value is a struct or a specific struct.
Note: It's named `is_struct` to avoid conflict with `Kernel.struct`.
## Examples
iex> Exchema.Predicates.is_struct(%{}, [])
{:error, :not_a_struct}
iex> Exchema.Predicates.is_struct(nil, [])
{:error, :not_a_struct}
Also, keep in mind that many internal types are actually structs
iex> Exchema.Predicates.is_struct(DateTime.utc_now, nil)
:ok
iex> Exchema.Predicates.is_struct(NaiveDateTime.utc_now, nil)
:ok
iex> Exchema.Predicates.is_struct(DateTime.utc_now, DateTime)
:ok
iex> Exchema.Predicates.is_struct(DateTime.utc_now, NaiveDateTime)
{:error, :invalid_struct}
iex> Exchema.Predicates.is_struct(NaiveDateTime.utc_now, DateTime)
{:error, :invalid_struct}
iex> Exchema.Predicates.is_struct(DateTime.utc_now, [NaiveDateTime, DateTime])
:ok
iex> Exchema.Predicates.is_struct(Date.utc_today, [NaiveDateTime, DateTime])
{:error, :invalid_struct}
"""
def is_struct(%{__struct__: real}, expected), do: check_struct(real, expected)
def is_struct(_, _), do: {:error, :not_a_struct}
defp check_struct(real, expected) when expected == real, do: :ok
defp check_struct(_, expected) when expected in [nil, :any], do: :ok
defp check_struct(real, alloweds) when is_list(alloweds) do
if real in alloweds, do: :ok, else: {:error, :invalid_struct}
end
defp check_struct(_,_), do: {:error, :invalid_struct}
@doc """
Checks the key types of a map
## Examples
iex> Exchema.Predicates.key_type("", :any)
{:error, :not_a_map}
iex > Exchema.Predicates.key_type(%{1 => "value"}, Exchema.Types.Integer)
:ok
iex > Exchema.Predicates.key_type(%{"key" => 1}, Exchema.Types.Integer)
{:error, {
:key_errors,
[{"key", [{{Exchema.Predicates, :is}, :integer, :not_an_integer}]}]
}}
"""
def key_type(%{} = map, type) do
map
|> Map.keys
|> Enum.flat_map(& Exchema.errors(&1, type))
|> nested_errors(:key_errors)
end
def key_type(_, _), do: {:error, :not_a_map}
@doc """
Checks the types of the values of a Map or Keyword List
## Examples
iex > Exchema.Predicates.value_type(%{"key" => 1}, Exchema.Types.Integer)
:ok
iex > Exchema.Predicates.value_type([key: 1], Exchema.Types.Integer)
:ok
iex > Exchema.Predicates.value_type(%{1 => "value"}, Exchema.Types.Integer)
{:error, {
:nested_errors,
[{1, [{{Exchema.Predicates, :is}, :integer, :not_an_integer}]}]
}}
iex > Exchema.Predicates.value_type([foo: :bar], Exchema.Types.Integer)
{:error, {
:nested_errors,
[{:foo, [{{Exchema.Predicates, :is}, :integer, :not_an_integer}]}]
}}
"""
def value_type(%{} = map, type), do: do_value_type(Map.to_list(map), type)
def value_type(kwlist, type) do
if Keyword.keyword?(kwlist) do
do_value_type(kwlist, type)
else
{:error, :not_list_or_map}
end
end
defp do_value_type(tuple_list, type) do
tuple_list
|> Enum.flat_map(fn {key, value} ->
case Exchema.errors(value, type) do
[] -> []
errs -> [{key, errs}]
end
end)
|> nested_errors(:nested_errors)
end
@doc """
Checks the types of specific fields of a Map or Keyword List
## Examples
iex> Exchema.Predicates.fields(%{foo: 1}, foo: Exchema.Types.Integer)
:ok
iex> Exchema.Predicates.fields([foo: 1], foo: Exchema.Types.Integer)
:ok
iex> Exchema.Predicates.fields(%{foo: :bar}, foo: Exchema.Types.Integer)
{:error, {
:nested_errors,
[{:foo, [{{Exchema.Predicates, :is}, :integer, :not_an_integer}]}]
}}
"""
def fields(%{} = map, fields), do: do_fields(map, &Map.get/2, fields)
def fields(kwlist, fields) do
if Keyword.keyword?(kwlist) do
do_fields(kwlist, &Keyword.get/2, fields)
else
{:error, :not_list_or_map}
end
end
defp do_fields(collection, get_fn, fields) do
fields
|> Enum.flat_map(fn {key, type} ->
case Exchema.errors(get_fn.(collection, key), type) do
[] -> []
errs -> [{key, errs}]
end
end)
|> nested_errors(:nested_errors)
end
@doc """
Checks against system guards like `is_integer` or `is_float`.
## Examples
iex> Exchema.Predicates.is(1, :integer)
:ok
iex> Exchema.Predicates.is(1.0, :float)
:ok
iex> Exchema.Predicates.is(1, :nil)
{:error, :not_nil}
iex> Exchema.Predicates.is(1, :atom)
{:error, :not_an_atom}
iex> Exchema.Predicates.is(nil, :binary)
{:error, :not_a_binary}
iex> Exchema.Predicates.is(nil, :bitstring)
{:error, :not_a_bitstring}
iex> Exchema.Predicates.is(nil, :boolean)
{:error, :not_a_boolean}
iex> Exchema.Predicates.is(nil, :float)
{:error, :not_a_float}
iex> Exchema.Predicates.is(nil, :function)
{:error, :not_a_function}
iex> Exchema.Predicates.is(nil, :integer)
{:error, :not_an_integer}
iex> Exchema.Predicates.is(nil, :list)
{:error, :not_a_list}
iex> Exchema.Predicates.is(nil, :map)
{:error, :not_a_map}
iex> Exchema.Predicates.is(nil, :number)
{:error, :not_a_number}
iex> Exchema.Predicates.is(nil, :pid)
{:error, :not_a_pid}
iex> Exchema.Predicates.is(nil, :port)
{:error, :not_a_port}
iex> Exchema.Predicates.is(nil, :reference)
{:error, :not_a_reference}
iex> Exchema.Predicates.is(nil, :tuple)
{:error, :not_a_tuple}
"""
# Explicit nil case becasue Kernel.is_nil is a macro
def is(nil, nil), do: :ok
def is(_, nil), do: {:error, :not_nil}
def is(val, key) do
if apply(Kernel, :"is_#{key}", [val]) do
:ok
else
{:error, is_error_msg(key)}
end
end
defp is_error_msg(:atom), do: :not_an_atom
defp is_error_msg(:integer), do: :not_an_integer
defp is_error_msg(key), do: :"not_a_#{key}"
@doc """
Ensure the value is in a list of values
## Examples
iex> Exchema.Predicates.inclusion("apple", ["apple", "banana"])
:ok
iex> Exchema.Predicates.inclusion(5, 1..10)
:ok
iex> Exchema.Predicates.inclusion("horse", ["apple", "banana"])
{:error, :invalid}
"""
def inclusion(val, values) do
if val in values, do: :ok, else: {:error, :invalid}
end
@doc """
Ensure the value is not in a list of values
## Examples
iex> Exchema.Predicates.exclusion("apple", ["apple", "banana"])
{:error, :invalid}
iex> Exchema.Predicates.exclusion(5, 1..10)
{:error, :invalid}
iex> Exchema.Predicates.exclusion("horse", ["apple", "banana"])
:ok
"""
def exclusion(val, values) do
if val in values, do: {:error, :invalid}, else: :ok
end
@doc """
Checks against a specific regex format
## Examples
iex> Exchema.Predicates.format("starts-with", ~r/^starts-/)
:ok
iex> Exchema.Predicates.format("does-not-starts-with", ~r/^starts-/)
{:error, :invalid}
"""
def format(val, regex) when is_binary(val) do
if Regex.match?(regex, val), do: :ok, else: {:error, :invalid}
end
def format(_, _), do: {:error, :invalid}
@doc """
Checks the length of the input. You can pass a max, a min, a range or a specific lenght.
Can check length of either lists, strings or tuples.
## Examples
iex> Exchema.Predicates.length("123", 3)
:ok
iex> Exchema.Predicates.length([1,2,3], 3)
:ok
iex> Exchema.Predicates.length({1,2,3}, 3)
:ok
iex> Exchema.Predicates.length([1,2,3], min: 2)
:ok
iex> Exchema.Predicates.length([1,2,3], max: 3)
:ok
iex> Exchema.Predicates.length([1,2,3], 2..4)
:ok
iex> Exchema.Predicates.length([1,2,3], min: 2, max: 4)
:ok
iex> Exchema.Predicates.length([1,2,3], min: 4)
{:error, :invalid_length}
iex> Exchema.Predicates.length([1,2,3], max: 2)
{:error, :invalid_length}
iex> Exchema.Predicates.length([1,2,3], min: 1, max: 2)
{:error, :invalid_length}
iex> Exchema.Predicates.length([1,2,3], 2)
{:error, :invalid_length}
iex> Exchema.Predicates.length([1,2,3], 1..2)
{:error, :invalid_length}
"""
def length(val, opts) when is_binary(val) do
compare_length(String.length(val), length_bounds(opts))
end
def length(val, opts) when is_tuple(val) do
compare_length(val |> Tuple.to_list |> length, length_bounds(opts))
end
def length(val, opts) when is_list(val) do
compare_length(length(val), length_bounds(opts))
end
def length(_, _), do: {:error, :invalid}
defp length_bounds(n) when is_integer(n), do: {n, n}
defp length_bounds(%{__struct__: Range, first: min, last: max}), do: {min, max}
defp length_bounds(opts) when is_list(opts) do
{Keyword.get(opts, :min), Keyword.get(opts, :max)}
end
defp length_bounds(_), do: {nil, nil}
defp compare_length(_, {nil, nil}), do: :ok
defp compare_length(l, {min, nil}) do
if min > l, do: {:error, :invalid_length}, else: :ok
end
defp compare_length(l, {nil, max}) do
if max < l, do: {:error, :invalid_length}, else: :ok
end
defp compare_length(l, {min, max}) when min > l or max < l, do: {:error, :invalid_length}
defp compare_length(_, _), do: :ok
@doc """
Checks if something is greater than a value
iex> Exchema.Predicates.gt(2, 1)
:ok
iex> Exchema.Predicates.gt(2, 2)
{:error, :not_greater}
iex> Exchema.Predicates.gt(2, 3)
{:error, :not_greater}
iex> Exchema.Predicates.gt("b", "a")
:ok
iex> Exchema.Predicates.gt("a", "b")
{:error, :not_greater}
"""
def gt(a, b) when a > b, do: :ok
def gt(_, _), do: {:error, :not_greater}
@doc """
Checks if something is greater than or equal to a value
iex> Exchema.Predicates.gte(2, 1)
:ok
iex> Exchema.Predicates.gte(2, 2)
:ok
iex> Exchema.Predicates.gte(2, 3)
{:error, :not_greater_or_equal}
iex> Exchema.Predicates.gte("b", "a")
:ok
iex> Exchema.Predicates.gte("a", "b")
{:error, :not_greater_or_equal}
"""
def gte(a, b) when a >= b, do: :ok
def gte(_, _), do: {:error, :not_greater_or_equal}
@doc """
Checks if something is lesser than a value
iex> Exchema.Predicates.lt(1, 2)
:ok
iex> Exchema.Predicates.lt(2, 2)
{:error, :not_lesser}
iex> Exchema.Predicates.lt(3, 2)
{:error, :not_lesser}
iex> Exchema.Predicates.lt("a", "b")
:ok
iex> Exchema.Predicates.lt("b", "a")
{:error, :not_lesser}
"""
def lt(a, b) when a < b, do: :ok
def lt(_, _), do: {:error, :not_lesser}
@doc """
Checks if something is lesser than or equal a value
iex> Exchema.Predicates.lte(1, 2)
:ok
iex> Exchema.Predicates.lte(2, 2)
:ok
iex> Exchema.Predicates.lte(3, 2)
{:error, :not_lesser_or_equal}
iex> Exchema.Predicates.lte("a", "b")
:ok
iex> Exchema.Predicates.lte("b", "a")
{:error, :not_lesser_or_equal}
"""
def lte(a, b) when a <= b, do: :ok
def lte(_, _), do: {:error, :not_lesser_or_equal}
end
|
lib/exchema/predicates.ex
| 0.879374
| 0.42054
|
predicates.ex
|
starcoder
|
defmodule Ueberauth.Strategy.Mailchimp do
@moduledoc """
Implements an ÜeberauthMailchimp strategy for authentication with mailchimp.com.
When configuring the strategy in the Üeberauth providers, you can specify some defaults.
config :ueberauth, Ueberauth,
providers: [
mailchimp: { Ueberauth.Strategy.Mailchimp, [] }
]
```
"""
use Ueberauth.Strategy,
uid_field: :email,
oauth2_module: Ueberauth.Strategy.Mailchimp.OAuth
alias Ueberauth.Auth.Credentials
# When handling the request just redirect to Mailchimp
@doc false
def handle_request!(conn) do
opts = Keyword.put([], :redirect_uri, callback_url(conn))
module = option(conn, :oauth2_module)
redirect!(conn, apply(module, :authorize_url!, [opts]))
end
# When handling the callback, if there was no errors we need to
# make two calls. The first, to fetch the mailchimp auth is so that we can get hold of
# the user id so we can make a query to fetch the user info.
# So that it is available later to build the auth struct, we put it in the private section of the conn.
@doc false
def handle_callback!(%Plug.Conn{params: %{"code" => code}} = conn) do
module = option(conn, :oauth2_module)
params = [code: code]
redirect_uri = get_redirect_uri(conn)
options = %{
options: [
client_options: [redirect_uri: redirect_uri]
]
}
token = apply(module, :get_token!, [params, options])
if token.access_token == nil do
set_errors!(conn, [
error(token.other_params["error"], token.other_params["error_description"])
])
else
conn
|> store_token(token)
end
end
# If we don't match code, then we have an issue
@doc false
def handle_callback!(conn) do
set_errors!(conn, [error("missing_code", "No code received")])
end
# We store the token for use later when fetching the mailchimp auth and user and constructing the auth struct.
@doc false
defp store_token(conn, token) do
put_private(conn, :mailchimp_token, token)
end
# Remove the temporary storage in the conn for our data. Run after the auth struct has been built.
@doc false
def handle_cleanup!(conn) do
conn
|> put_private(:mailchimp_token, nil)
end
@doc false
def credentials(conn) do
token = conn.private.mailchimp_token
%Credentials{
token: token.access_token,
refresh_token: token.refresh_token,
expires_at: token.expires_at,
token_type: token.token_type,
expires: !!token.expires_at,
scopes: [],
other: %{}
}
end
defp option(conn, key) do
Keyword.get(options(conn), key, Keyword.get(default_options(), key))
end
defp get_redirect_uri(%Plug.Conn{} = conn) do
config = Application.get_env(:ueberauth, Ueberauth)
redirect_uri = Keyword.get(config, :redirect_uri)
if is_nil(redirect_uri) do
callback_url(conn)
else
redirect_uri
end
end
end
|
lib/ueberauth/strategy/mailchimp.ex
| 0.57523
| 0.700216
|
mailchimp.ex
|
starcoder
|
defmodule Heroicons do
@moduledoc """
This package adds a convenient way of using [Heroicons](https://heroicons.com) with your Phoenix, Phoenix LiveView and Surface applications.
Heroicons is "A set of 450+ free MIT-licensed high-quality SVG icons for you to use in your web projects."
Created by the amazing folks at [Tailwind Labs](https://github.com/tailwindlabs).
You can find the original docs [here](https://heroicons.com) and repo [here](https://github.com/tailwindlabs/heroicons).
## Installation
Add `ex_heroicons` to the list of dependencies in `mix.exs`:
def deps do
[
{:ex_heroicons, "~> 0.6.0"}
]
end
Then run `mix deps.get`.
## Usage
#### With Eex or Leex
<%= Heroicons.icon("academic-cap", type: "outline", class: "h-4 w-4") %>
#### With Heex
<Heroicons.LiveView.icon name="academic-cap" type="outline" class="h-4 w-4" />
#### With Surface
<Heroicons.Surface.Icon name="academic-cap" type="outline" class="h-4 w-4" />
## Config
Defaults can be set in the `Heroicons` application configuration.
config :ex_heroicons, type: "outline"
"""
alias __MODULE__.Icon
icon_paths = "node_modules/heroicons/**/*.svg" |> Path.wildcard()
icons =
for icon_path <- icon_paths do
@external_resource Path.relative_to_cwd(icon_path)
Icon.parse!(icon_path)
end
types = icons |> Enum.map(& &1.type) |> Enum.uniq()
names = icons |> Enum.map(& &1.name) |> Enum.uniq()
@types types
@names names
@doc "Returns a list of available icon types"
@spec types() :: [String.t()]
def types(), do: @types
@doc "Returns a list of available icon names"
@spec names() :: [String.t()]
def names(), do: @names
@doc false
def default_type() do
case Application.get_env(:ex_heroicons, :type) do
nil ->
nil
type when is_binary(type) ->
if type in types() do
type
else
raise ArgumentError,
"expected default type to be one of #{inspect(types())}, got: #{inspect(type)}"
end
type ->
raise ArgumentError,
"expected default type to be one of #{inspect(types())}, got: #{inspect(type)}"
end
end
@doc """
Generates an icon.
Options may be passed through to the SVG tag for custom attributes.
## Options
* `:type` - the icon type. Accepted values are #{inspect(types)}. Required if default type is not configured.
* `:class` - the css class added to the SVG tag
## Examples
icon("academic-cap", type: "outline", class: "h-4 w-4")
#=> <svg class="h-4 w-4" xmlns="http://www.w3.org/2000/svg" fill="none" viewBox="0 0 24 24" stroke="currentColor">
<path d="M12 14l9-5-9-5-9 5 9 5z"/>
<path d="M12 14l6.16-3.422a12.083 12.083 0 01.665 6.479A11.952 11.952 0 0012 20.055a11.952 11.952 0 00-6.824-2.998 12.078 12.078 0 01.665-6.479L12 14z"/>
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M12 14l9-5-9-5-9 5 9 5zm0 0l6.16-3.422a12.083 12.083 0 01.665 6.479A11.952 11.952 0 0012 20.055a11.952 11.952 0 00-6.824-2.998 12.078 12.078 0 01.665-6.479L12 14zm-4 6v-7.5l4-2.222"/>
</svg>
"""
@spec icon(String.t(), keyword) :: Phoenix.HTML.safe()
def icon(name, opts \\ []) when is_binary(name) and is_list(opts) do
{type, opts} = Keyword.pop(opts, :type, default_type())
unless type do
raise ArgumentError,
"expected type in options, got: #{inspect(opts)}"
end
unless type in types() do
raise ArgumentError,
"expected type to be one of #{inspect(types())}, got: #{inspect(type)}"
end
icon(type, name, opts)
end
for %Icon{type: type, name: name, file: file} <- icons do
defp icon(unquote(type), unquote(name), opts) do
attrs = Icon.opts_to_attrs(opts)
Icon.insert_attrs(unquote(file), attrs)
end
end
defp icon(type, name, _opts) do
raise ArgumentError,
"icon #{inspect(name)} with type #{inspect(type)} does not exist."
end
end
|
lib/heroicons.ex
| 0.856797
| 0.54256
|
heroicons.ex
|
starcoder
|
defmodule Farmbot.Regimen.Manager do
@moduledoc "Manages a Regimen"
use Farmbot.Logger
use GenServer
alias Farmbot.CeleryScript
alias Farmbot.Asset
alias Asset.Regimen
import Farmbot.System.ConfigStorage, only: [get_config_value: 3]
defmodule Error do
@moduledoc false
defexception [:epoch, :regimen, :message]
end
defmodule Item do
@moduledoc false
@type t :: %__MODULE__{
name: binary,
time_offset: integer,
sequence: CeleryScript.AST.t
}
defstruct [:time_offset, :sequence, :name]
def parse(%{time_offset: offset, sequence_id: sequence_id})
do
sequence = fetch_sequence(sequence_id)
{:ok, ast} = CeleryScript.AST.decode(sequence)
ast_with_label = %{ast | args: Map.put(ast.args, :label, sequence.name)}
%__MODULE__{
name: sequence.name,
time_offset: offset,
sequence: ast_with_label}
end
def fetch_sequence(id), do: Asset.get_sequence_by_id!(id)
end
@doc false
def start_link(regimen, time) do
GenServer.start_link(__MODULE__, [regimen, time], name: :"regimen-#{regimen.id}")
end
def init([regimen, time]) do
# parse and sort the regimen items
items = filter_items(regimen)
first_item = List.first(items)
regimen = %{regimen | regimen_items: items}
epoch = build_epoch(time) || raise Error,
message: "Could not determine EPOCH because no timezone was supplied.",
epoch: :error, regimen: regimen
initial_state = %{
next_execution: nil,
regimen: regimen,
epoch: epoch,
timer: nil
}
if first_item do
state = build_next_state(regimen, first_item, self(), initial_state)
{:ok, state}
else
Logger.warn 2, "[#{regimen.name}] has no items on regimen."
:ignore
end
end
def handle_info(:execute, state) do
{item, regimen} = pop_item(state.regimen)
if item do
do_item(item, regimen, state)
else
complete(regimen, state)
end
end
def handle_info(:skip, state) do
{item, regimen} = pop_item(state.regimen)
if item do
do_item(nil, regimen, state)
else
complete(regimen, state)
end
end
defp complete(regimen, state) do
Logger.success 2, "[#{regimen.name}] is complete!"
# spawn fn() ->
# RegSup.remove_child(regimen)
# end
{:stop, :normal, state}
# {:noreply, :finished}
end
defp filter_items(regimen) do
regimen.regimen_items
|> Enum.map(&Item.parse(&1))
|> Enum.sort(&(&1.time_offset <= &2.time_offset))
end
defp do_item(item, regimen, state) do
if item do
Logger.busy 2, "[#{regimen.name}] is going to execute: #{item.name}"
CeleryScript.execute(item.sequence)
end
next_item = List.first(regimen.regimen_items)
if next_item do
new_state = build_next_state(regimen, next_item, self(), state)
{:noreply, new_state}
else
complete(regimen, state)
end
end
def build_next_state(
%Regimen{} = regimen,
%Item{} = nx_itm,
pid, state)
do
next_dt = Timex.shift(state.epoch, milliseconds: nx_itm.time_offset)
timezone = get_config_value(:string, "settings", "timezone")
now = Timex.now(timezone)
offset_from_now = Timex.diff(next_dt, now, :milliseconds)
timer = if (offset_from_now < 0) and (offset_from_now < -60_000) do
Logger.info 3, "[#{regimen.name}] #{[nx_itm.name]} has been scheduled " <>
"to happen more than one minute ago: #{offset_from_now} Skipping it."
Process.send_after(pid, :skip, 1000)
else
{msg, real_offset} = ensure_not_negative(offset_from_now)
Process.send_after(pid, msg, real_offset)
end
timestr = "#{next_dt.month}/#{next_dt.day}/#{next_dt.year} " <>
"at: #{next_dt.hour}:#{next_dt.minute} (#{offset_from_now} milliseconds)"
Logger.debug 3, "[#{regimen.name}] next item will execute on #{timestr}"
%{state | timer: timer,
regimen: regimen,
next_execution: next_dt}
end
defp ensure_not_negative(offset) when offset < -60_000, do: {:skip, 1000}
defp ensure_not_negative(offset) when offset < 0, do: {:execute, 1000}
defp ensure_not_negative(offset), do: {:execute, offset}
@spec pop_item(Regimen.t) :: {Item.t | nil, Regimen.t}
# when there is more than one item pop the top one
defp pop_item(%Regimen{regimen_items: [do_this_one | items]} = r) do
{do_this_one, %Regimen{r | regimen_items: items}}
end
# returns midnight of today
@spec build_epoch(DateTime.t) :: DateTime.t
def build_epoch(time) do
tz = get_config_value(:string, "settings", "timezone")
n = Timex.Timezone.convert(time, tz)
Timex.shift(n, hours: -n.hour, seconds: -n.second, minutes: -n.minute)
end
end
|
lib/farmbot/regimen/manager.ex
| 0.704567
| 0.446615
|
manager.ex
|
starcoder
|
defmodule RedixPool do
@moduledoc """
This module provides an API for using `Redix` through a pool of workers.
## Overview
`RedixPool` is very simple, it is merely wraps `Redix` with a pool of `Poolboy`
workers. All function calls get passed through to a `Redix` connection.
Please see the [redix](https://github.com/whatyouhide/redix) library for
more in-depth documentation. Many of the examples in this documentation are
pulled directly from the `Redix` docs.
"""
use Application
alias RedixPool.Config
@type command :: [binary]
# This is hard-coded into the poolboy calls. Because
# we are inferring information here, we don't want to
# be doing this after getting the pool started.
# ways we can try to make this configurable:
# - Store stuff back into Application env after computing it
# - Use the Ecto.Repo pattern, and let the developer
# decide how to get this config.
@default_timeout 5000
@doc "Start the default pool if args is empty"
def start(type, args) when length(args) == 0, do: start(type, [[]])
@doc """
Pass a list of pool specs to start
Example
```elixir
def application do
[mod: {RedixPool,[
[pool: :redix_default],
[pool: :sessions_ro, pool_name: :session_ro]]}]
end
```
```elixir
config :redix_pool, :redix_default, []
config :redix_pool, :sessions_ro, []
```
"""
def start(_type, _args) do
children = Config.starting_pools
|> Enum.map(&__MODULE__.redix_pool_spec/1)
# |> IO.inspect
opts = [strategy: :one_for_one, name: RedixPool.Supervisor]
Supervisor.start_link(children, opts)
end
@doc "Convenience helper for starting a pool supervisor"
def start_pool(pool_name) when is_atom(pool_name), do: start_pool(pool: pool_name)
def start_pool(args) when is_list(args) do
children = [RedixPool.redix_pool_spec(args)]
{:ok, pid} = Supervisor.start_link(children, strategy: :one_for_one)
{:ok, pid}
end
@doc "Returns a poolboy child spec based upon parsing configs"
def redix_pool_spec(pool_name) when is_atom(pool_name), do: redix_pool_spec(pool: pool_name)
def redix_pool_spec(args) when is_list(args) do
%{
pool_name: pool_name,
redix_opts: redix_opts,
pool_size: pool_size,
pool_max_overflow: pool_max_overflow
} = Config.config_map(args)
pool_options = [
name: {:local, pool_name},
worker_module: Redix,
size: pool_size,
max_overflow: pool_max_overflow
]
:poolboy.child_spec(pool_name, pool_options, redix_opts)
end
@doc """
Returns a child spec for a single worker based upon parsing configs.
"""
def redix_worker_spec(args) do
%{
redix_opts: redix_opts,
} = Config.config_map(args)
Redix.child_spec(redix_opts)
end
@doc """
Normalizes the Redix worker args so that it is compatible with poolboy.
Extracted from the Redix source code.
"""
def normalize_redix_spec({uri, other_opts}) do
uri
|> Redix.URI.opts_from_uri
|> Keyword.merge(other_opts)
end
@doc"""
Wrapper to call `Redix.command/3` inside a poolboy transaction.
## Examples
iex> RedixPool.command(:redix_default, ["SET", "k", "foo"])
{:ok, "OK"}
iex> RedixPool.command(:redix_default, ["GET", "k"])
{:ok, "foo"}
"""
@spec command(atom, command, Keyword.t) ::
{:ok, [Redix.Protocol.redis_value]} | {:error, atom | Redix.Error.t}
def command(pool_name, args, opts \\ []) do
:poolboy.transaction(
pool_name,
fn(conn) -> Redix.command(conn, args, opts) end,
poolboy_timeout(pool_name)
)
end
@doc"""
Wrapper to call `Redix.command!/3` inside a poolboy transaction, raising if
there's an error.
## Examples
iex> RedixPool.command!(:redix_default, ["SET", "k", "foo"])
"OK"
iex> RedixPool.command!(:redix_default, ["GET", "k"])
"foo"
"""
@spec command!(atom, command, Keyword.t) :: Redix.Protocol.redis_value | no_return
def command!(pool_name, args, opts \\ []) do
:poolboy.transaction(
pool_name,
fn(conn) -> Redix.command!(conn, args, opts) end,
poolboy_timeout(pool_name)
)
end
@doc"""
Wrapper to call `Redix.pipeline/3` inside a poolboy transaction.
## Examples
iex> RedixPool.pipeline(:redix_default, [["INCR", "mykey"], ["INCR", "mykey"], ["DECR", "mykey"]])
{:ok, [1, 2, 1]}
iex> RedixPool.pipeline(:redix_default, [["SET", "k", "foo"], ["INCR", "k"], ["GET", "k"]])
{:ok, ["OK", %Redix.Error{message: "ERR value is not an integer or out of range"}, "foo"]}
"""
@spec pipeline(atom, [command], Keyword.t) ::
{:ok, [Redix.Protocol.redis_value]} | {:error, atom}
def pipeline(pool_name, args, opts \\ []) do
:poolboy.transaction(
pool_name,
fn(conn) -> Redix.pipeline(conn, args, opts) end,
poolboy_timeout(pool_name)
)
end
@doc"""
Wrapper to call `Redix.pipeline!/3` inside a poolboy transaction, raising if there
are errors issuing the commands (but not if the commands are successfully
issued and result in errors).
## Examples
iex> RedixPool.pipeline!(:redix_default, [["INCR", "mykey"], ["INCR", "mykey"], ["DECR", "mykey"]])
[1, 2, 1]
iex> RedixPool.pipeline!(:redix_default, [["SET", "k", "foo"], ["INCR", "k"], ["GET", "k"]])
["OK", %Redix.Error{message: "ERR value is not an integer or out of range"}, "foo"]
"""
@spec pipeline!(atom, [command], Keyword.t) :: [Redix.Protocol.redis_value] | no_return
def pipeline!(pool_name, args, opts \\ []) do
:poolboy.transaction(
pool_name,
fn(conn) -> Redix.pipeline!(conn, args, opts) end,
poolboy_timeout(pool_name)
)
end
@doc false
defp poolboy_timeout(pool_name) do
:radix_pool
Application.get_env(:radix_pool, pool_name)
|> Access.get(:timeout, @default_timeout)
end
end
|
lib/redix_pool.ex
| 0.907369
| 0.722062
|
redix_pool.ex
|
starcoder
|
defmodule Rummage.Phoenix.SearchController do
@moduledoc """
`SearchController` a controller helper in `Rummage.Phoenix` which stores
helpers for Search hook in `Rummage`. This formats params before `index`
action into a format that is expected by the default `Rummage.Ecto`'s search
hook: `Rummage.Ecto.Search`
```
"""
@doc """
This function formats params into `rumamge` params, that are expected by
`Rummage.Ecto`'s default search hook:
## Examples
When `rummage` passed is an empty `Map`, it returns
and empty `Map`:
iex> alias Rummage.Phoenix.SearchController
iex> rummage = %{}
iex> SearchController.rummage(rummage)
%{}
When `rummage` passed is not an empty `Map`, but
doesn't have a `"search"` key, it returns
and empty `Map`:
iex> alias Rummage.Phoenix.SearchController
iex> rummage = %{"pizza" => "eat"}
iex> SearchController.rummage(rummage)
%{}
When `rummage` passed is not an empty `Map`, but
the value corresponding to `"search"` key is an empty `String`,
it returns and empty `Map`:
iex> alias Rummage.Phoenix.SearchController
iex> rummage = %{"search" => ""}
iex> SearchController.rummage(rummage)
%{}
When `rummage` passed is not an empty `Map`, but
the value corresponding to `"search"` key is a `Map`,
it returns the `Map` itself, with association converted
to a one element list (This will change in future when `Rumamge.Phoenix`
will have support for deeper `associations`:
iex> alias Rummage.Phoenix.SearchController
iex> rummage = %{"search" => %{"field" => %{"assoc" => "assoc"}}}
iex> SearchController.rummage(rummage)
%{"field" => %{"assoc" => ["assoc"]}}
"""
def rummage(rummage) do
search_params = Map.get(rummage, "search")
case search_params do
s when s in [nil, "", %{}] -> %{}
_ ->
search_params = search_params
|> Map.to_list
|> Enum.map(&change_assoc_to_a_one_member_list &1)
|> Enum.reject(fn(x) -> elem(x, 1) == "" end)
|> Enum.into(%{})
search_params
end
end
# This is temporary until we figure out how to add multiple assoc to a form
defp change_assoc_to_a_one_member_list({field, params}) do
case params["assoc"] do
nil -> {field, Map.put(params, "assoc", [])}
assoc when assoc == "" -> {field, Map.put(params, "assoc", [])}
assoc when is_binary(assoc) -> {field, Map.put(params, "assoc", String.split(assoc, " -> "))}
_ -> {field, params}
end
end
end
|
lib/rummage_phoenix/hooks/controllers/search_controller.ex
| 0.809502
| 0.78108
|
search_controller.ex
|
starcoder
|
defmodule Day25 do
use Bitwise
def part1(input) do
Interpreter.new(input)
|> Map.put(:out_handler, {&analyse_signal/2, nil})
|> find_value(0)
end
defp find_value(interpreter, value) do
result = interpreter
|> Map.put(:a, value)
|> Interpreter.execute
case result do
:ok -> value
:error -> find_value(interpreter, value + 1)
end
end
defp analyse_signal(value, nil) do
next = (value + 1) &&& 1
{:cont, {0, next}}
end
defp analyse_signal(_, {64, _}) do
{:halt, :ok}
end
defp analyse_signal(value, {num_bits, value}) do
next = (value + 1) &&& 1
{:cont, {num_bits + 1, next}}
end
defp analyse_signal(_, _) do
{:halt, :error}
end
end
defmodule Interpreter do
use Bitwise
def new(program, c \\ 0) do
machine(program)
|> Map.put(:c, c)
end
defp machine(program) do
read_program(program)
|> Map.put(:a, 0)
|> Map.put(:b, 0)
|> Map.put(:c, 0)
|> Map.put(:d, 0)
|> Map.put(:ip, 0)
|> Map.put(:out_handler, {fn _, _ -> {:cont, 0} end, 0})
end
def execute(memory), do: execute(memory, 0)
def execute(memory, ip) do
instr = Map.get(memory, ip, :done)
case instr do
{:cpy, src, dst} when is_atom(dst) ->
memory = Map.replace!(memory, dst, get_value(memory, src))
execute(memory, ip + 1)
{:cpy, _src, dst} when is_integer(dst) ->
execute(memory, ip + 1)
{:inc, dst} ->
value = Map.fetch!(memory, dst)
memory = Map.replace!(memory, dst, value + 1)
execute(memory, ip + 1)
{:dec, dst} ->
value = Map.fetch!(memory, dst)
memory = Map.replace!(memory, dst, value - 1)
execute(memory, ip + 1)
{:jnz, src, offset} ->
case get_value(memory, src) do
0 -> execute(memory, ip + 1)
_ -> execute(memory, ip + get_value(memory, offset))
end
{:tgl, src} ->
address = ip + get_value(memory, src)
memory = toggle(memory, address)
execute(memory, ip + 1)
{:out, src} ->
value = get_value(memory, src)
true = value in 0..1
{f, acc} = Map.fetch!(memory, :out_handler)
case f.(value, acc) do
{:cont, acc} ->
memory = %{memory | out_handler: {f, acc}}
execute(memory, ip + 1)
{:halt, return} ->
return
end
:done ->
memory
end
end
defp toggle(memory, address) do
case memory do
%{^address => instr} ->
instr = toggle_instr(instr)
Map.replace!(memory, address, instr)
%{} ->
memory
end
end
defp toggle_instr(instr) do
case instr do
{:inc, dst} ->
{:dec, dst}
{_, arg} ->
{:inc, arg}
{:jnz, arg1, arg2} ->
{:cpy, arg1, arg2}
{_, arg1, arg2} ->
{:jnz, arg1, arg2}
end
end
defp get_value(memory, src) do
if is_atom(src) do
Map.fetch!(memory, src)
else
src
end
end
defp read_program(input) do
input
|> Enum.map(fn instr ->
[name | args] = String.split(instr, " ")
args = Enum.map(args, fn arg ->
case Integer.parse(arg) do
:error -> String.to_atom(arg)
{val, ""} -> val
end
end)
List.to_tuple([String.to_atom(name) | args])
end)
|> Stream.with_index
|> Stream.map(fn {code, index} -> {index, code} end)
|> Map.new
end
end
|
day25/lib/day25.ex
| 0.628179
| 0.593874
|
day25.ex
|
starcoder
|
defmodule Jason.OrderedObject do
@doc """
Struct implementing a JSON object retaining order of properties.
A wrapper around a keyword (that supports non-atom keys) allowing for
proper protocol implementations.
Implements the `Access` behaviour and `Enumerable` protocol with
complexity similar to keywords/lists.
"""
@behaviour Access
@type t :: %__MODULE__{values: [{String.Chars.t(), term()}]}
defstruct values: []
def new(values) when is_list(values) do
%__MODULE__{values: values}
end
@impl Access
def fetch(%__MODULE__{values: values}, key) do
case :lists.keyfind(key, 1, values) do
{_, value} -> {:ok, value}
false -> :error
end
end
@impl Access
def get_and_update(%__MODULE__{values: values} = obj, key, function) do
{result, new_values} = get_and_update(values, [], key, function)
{result, %{obj | values: new_values}}
end
@impl Access
def pop(%__MODULE__{values: values} = obj, key, default \\ nil) do
case :lists.keyfind(key, 1, values) do
{_, value} -> {value, %{obj | values: delete_key(values, key)}}
false -> {default, obj}
end
end
defp get_and_update([{key, current} | t], acc, key, fun) do
case fun.(current) do
{get, value} ->
{get, :lists.reverse(acc, [{key, value} | t])}
:pop ->
{current, :lists.reverse(acc, t)}
other ->
raise "the given function must return a two-element tuple or :pop, got: #{inspect(other)}"
end
end
defp get_and_update([{_, _} = h | t], acc, key, fun), do: get_and_update(t, [h | acc], key, fun)
defp get_and_update([], acc, key, fun) do
case fun.(nil) do
{get, update} ->
{get, [{key, update} | :lists.reverse(acc)]}
:pop ->
{nil, :lists.reverse(acc)}
other ->
raise "the given function must return a two-element tuple or :pop, got: #{inspect(other)}"
end
end
defp delete_key([{key, _} | tail], key), do: delete_key(tail, key)
defp delete_key([{_, _} = pair | tail], key), do: [pair | delete_key(tail, key)]
defp delete_key([], _key), do: []
end
defimpl Enumerable, for: Jason.OrderedObject do
def count(%{values: []}), do: {:ok, 0}
def count(_obj), do: {:error, __MODULE__}
def member?(%{values: []}, _value), do: {:ok, false}
def member?(_obj, _value), do: {:error, __MODULE__}
def slice(%{values: []}), do: {:ok, 0, fn _, _ -> [] end}
def slice(_obj), do: {:error, __MODULE__}
def reduce(%{values: values}, acc, fun), do: Enumerable.List.reduce(values, acc, fun)
end
defimpl Jason.Encoder, for: Jason.OrderedObject do
def encode(%{values: values}, opts) do
Jason.Encode.keyword(values, opts)
end
end
|
lib/ordered_object.ex
| 0.892443
| 0.671622
|
ordered_object.ex
|
starcoder
|
defmodule AWS.Signer do
@moduledoc """
With code signing for IoT, you can sign code that you create for any IoT device
that is supported by Amazon Web Services (AWS).
Code signing is available through [Amazon FreeRTOS](http://docs.aws.amazon.com/freertos/latest/userguide/) and [AWS IoT Device Management](http://docs.aws.amazon.com/iot/latest/developerguide/), and
integrated with [AWS Certificate Manager (ACM)](http://docs.aws.amazon.com/acm/latest/userguide/). In order to sign code,
you import a third-party code signing certificate with ACM that is used to sign
updates in Amazon FreeRTOS and AWS IoT Device Management. For general
information about using code signing, see the [Code Signing for IoT Developer Guide](http://docs.aws.amazon.com/signer/latest/developerguide/Welcome.html).
"""
@doc """
Changes the state of an `ACTIVE` signing profile to `CANCELED`.
A canceled profile is still viewable with the `ListSigningProfiles` operation,
but it cannot perform new signing jobs, and is deleted two years after
cancelation.
"""
def cancel_signing_profile(client, profile_name, input, options \\ []) do
path_ = "/signing-profiles/#{URI.encode(profile_name)}"
headers = []
query_ = []
request(client, :delete, path_, query_, headers, input, options, nil)
end
@doc """
Returns information about a specific code signing job.
You specify the job by using the `jobId` value that is returned by the
`StartSigningJob` operation.
"""
def describe_signing_job(client, job_id, options \\ []) do
path_ = "/signing-jobs/#{URI.encode(job_id)}"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Returns information on a specific signing platform.
"""
def get_signing_platform(client, platform_id, options \\ []) do
path_ = "/signing-platforms/#{URI.encode(platform_id)}"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Returns information on a specific signing profile.
"""
def get_signing_profile(client, profile_name, options \\ []) do
path_ = "/signing-profiles/#{URI.encode(profile_name)}"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Lists all your signing jobs.
You can use the `maxResults` parameter to limit the number of signing jobs that
are returned in the response. If additional jobs remain to be listed, code
signing returns a `nextToken` value. Use this value in subsequent calls to
`ListSigningJobs` to fetch the remaining values. You can continue calling
`ListSigningJobs` with your `maxResults` parameter and with new values that code
signing returns in the `nextToken` parameter until all of your signing jobs have
been returned.
"""
def list_signing_jobs(client, max_results \\ nil, next_token \\ nil, platform_id \\ nil, requested_by \\ nil, status \\ nil, options \\ []) do
path_ = "/signing-jobs"
headers = []
query_ = []
query_ = if !is_nil(status) do
[{"status", status} | query_]
else
query_
end
query_ = if !is_nil(requested_by) do
[{"requestedBy", requested_by} | query_]
else
query_
end
query_ = if !is_nil(platform_id) do
[{"platformId", platform_id} | query_]
else
query_
end
query_ = if !is_nil(next_token) do
[{"nextToken", next_token} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"maxResults", max_results} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Lists all signing platforms available in code signing that match the request
parameters.
If additional jobs remain to be listed, code signing returns a `nextToken`
value. Use this value in subsequent calls to `ListSigningJobs` to fetch the
remaining values. You can continue calling `ListSigningJobs` with your
`maxResults` parameter and with new values that code signing returns in the
`nextToken` parameter until all of your signing jobs have been returned.
"""
def list_signing_platforms(client, category \\ nil, max_results \\ nil, next_token \\ nil, partner \\ nil, target \\ nil, options \\ []) do
path_ = "/signing-platforms"
headers = []
query_ = []
query_ = if !is_nil(target) do
[{"target", target} | query_]
else
query_
end
query_ = if !is_nil(partner) do
[{"partner", partner} | query_]
else
query_
end
query_ = if !is_nil(next_token) do
[{"nextToken", next_token} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"maxResults", max_results} | query_]
else
query_
end
query_ = if !is_nil(category) do
[{"category", category} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Lists all available signing profiles in your AWS account.
Returns only profiles with an `ACTIVE` status unless the `includeCanceled`
request field is set to `true`. If additional jobs remain to be listed, code
signing returns a `nextToken` value. Use this value in subsequent calls to
`ListSigningJobs` to fetch the remaining values. You can continue calling
`ListSigningJobs` with your `maxResults` parameter and with new values that code
signing returns in the `nextToken` parameter until all of your signing jobs have
been returned.
"""
def list_signing_profiles(client, include_canceled \\ nil, max_results \\ nil, next_token \\ nil, options \\ []) do
path_ = "/signing-profiles"
headers = []
query_ = []
query_ = if !is_nil(next_token) do
[{"nextToken", next_token} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"maxResults", max_results} | query_]
else
query_
end
query_ = if !is_nil(include_canceled) do
[{"includeCanceled", include_canceled} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Returns a list of the tags associated with a signing profile resource.
"""
def list_tags_for_resource(client, resource_arn, options \\ []) do
path_ = "/tags/#{URI.encode(resource_arn)}"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Creates a signing profile.
A signing profile is a code signing template that can be used to carry out a
pre-defined signing job. For more information, see
[http://docs.aws.amazon.com/signer/latest/developerguide/gs-profile.html](http://docs.aws.amazon.com/signer/latest/developerguide/gs-profile.html)
"""
def put_signing_profile(client, profile_name, input, options \\ []) do
path_ = "/signing-profiles/#{URI.encode(profile_name)}"
headers = []
query_ = []
request(client, :put, path_, query_, headers, input, options, nil)
end
@doc """
Initiates a signing job to be performed on the code provided.
Signing jobs are viewable by the `ListSigningJobs` operation for two years after
they are performed. Note the following requirements:
* You must create an Amazon S3 source bucket. For more information,
see [Create a Bucket](http://docs.aws.amazon.com/AmazonS3/latest/gsg/CreatingABucket.html) in
the *Amazon S3 Getting Started Guide*.
* Your S3 source bucket must be version enabled.
* You must create an S3 destination bucket. Code signing uses your
S3 destination bucket to write your signed code.
* You specify the name of the source and destination buckets when
calling the `StartSigningJob` operation.
* You must also specify a request token that identifies your request
to code signing.
You can call the `DescribeSigningJob` and the `ListSigningJobs` actions after
you call `StartSigningJob`.
For a Java example that shows how to use this action, see
[http://docs.aws.amazon.com/acm/latest/userguide/](http://docs.aws.amazon.com/acm/latest/userguide/)
"""
def start_signing_job(client, input, options \\ []) do
path_ = "/signing-jobs"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Adds one or more tags to a signing profile.
Tags are labels that you can use to identify and organize your AWS resources.
Each tag consists of a key and an optional value. To specify the signing
profile, use its Amazon Resource Name (ARN). To specify the tag, use a key-value
pair.
"""
def tag_resource(client, resource_arn, input, options \\ []) do
path_ = "/tags/#{URI.encode(resource_arn)}"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Removes one or more tags from a signing profile.
To remove the tags, specify a list of tag keys.
"""
def untag_resource(client, resource_arn, input, options \\ []) do
path_ = "/tags/#{URI.encode(resource_arn)}"
headers = []
{query_, input} =
[
{"tagKeys", "tagKeys"},
]
|> AWS.Request.build_params(input)
request(client, :delete, path_, query_, headers, input, options, nil)
end
@spec request(AWS.Client.t(), binary(), binary(), list(), list(), map(), list(), pos_integer()) ::
{:ok, map() | nil, map()}
| {:error, term()}
defp request(client, method, path, query, headers, input, options, success_status_code) do
client = %{client | service: "signer"}
host = build_host("signer", client)
url = host
|> build_url(path, client)
|> add_query(query, client)
additional_headers = [{"Host", host}, {"Content-Type", "application/x-amz-json-1.1"}]
headers = AWS.Request.add_headers(additional_headers, headers)
payload = encode!(client, input)
headers = AWS.Request.sign_v4(client, method, url, headers, payload)
perform_request(client, method, url, payload, headers, options, success_status_code)
end
defp perform_request(client, method, url, payload, headers, options, success_status_code) do
case AWS.Client.request(client, method, url, payload, headers, options) do
{:ok, %{status_code: status_code, body: body} = response}
when is_nil(success_status_code) and status_code in [200, 202, 204]
when status_code == success_status_code ->
body = if(body != "", do: decode!(client, body))
{:ok, body, response}
{:ok, response} ->
{:error, {:unexpected_response, response}}
error = {:error, _reason} -> error
end
end
defp build_host(_endpoint_prefix, %{region: "local", endpoint: endpoint}) do
endpoint
end
defp build_host(_endpoint_prefix, %{region: "local"}) do
"localhost"
end
defp build_host(endpoint_prefix, %{region: region, endpoint: endpoint}) do
"#{endpoint_prefix}.#{region}.#{endpoint}"
end
defp build_url(host, path, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}#{path}"
end
defp add_query(url, [], _client) do
url
end
defp add_query(url, query, client) do
querystring = encode!(client, query, :query)
"#{url}?#{querystring}"
end
defp encode!(client, payload, format \\ :json) do
AWS.Client.encode!(client, payload, format)
end
defp decode!(client, payload) do
AWS.Client.decode!(client, payload, :json)
end
end
|
lib/aws/generated/signer.ex
| 0.840488
| 0.490785
|
signer.ex
|
starcoder
|
defmodule IntSort.CLI do
@moduledoc """
Handles the command line and options parsing logic
"""
alias LargeSort.Shared.CLI
alias IntSort.CLI.Args
alias IntSort.CLI.Options
alias IntSort.Chunk
@type parsed_args() :: {keyword(), list(String.t()), list()}
@type output_func() :: (IO.chardata() | String.Chars.t() -> :ok)
# The number of times the progress bar will update between 0% and 100%
@progress_updates 1000
# The generation number for the chunking process
@chunk_gen 1
# The maximum number of intermediate files to be merged at once
@merge_files 10
# This is the main entry point to the application
def main(argv) do
argv
|> Args.parse_args()
|> process()
end
# Starts the application processing based on the parsing of the arguments
defp process({:ok, :help}) do
output_usage_info(&IO.puts/1)
System.halt(0)
end
defp process({:error, messages}) do
# Retrieve the function to use for output
output = &IO.puts/1
Enum.each(messages, output)
output.("")
output_usage_info(output)
end
# Gets called when the options are parse correctly in order to run the main process
defp process({:ok, options}) do
# Retrieve the function to use for output
output = output_func(options)
time_description = CLI.measure(fn -> process(options, output) end)
|> CLI.ellapsed_time_description()
output.("")
output.(time_description)
end
# Runs the main process with the given options
@spec process(Options.t(), output_func()) :: :ok
defp process(options, output) do
# Calculate the number of integers and chunks in the input file
display_integer_counting_message(output)
{num_integers, num_chunks} =
integer_chunk_counts(options.input_file, options.chunk_size)
|> display_integer_counting_result(output)
# Create the chunk files
chunk_files = create_chunks(options, num_chunks, output, options.silent) |> Enum.to_list()
output.("#{Enum.count(chunk_files)} Gen 1 intermediate files were generated")
# Merge the chunk files
output.("")
output.("Merging Gen 1 intermediate files")
merge_file =
merge_chunks(options, chunk_files, num_integers, Path.dirname(options.output_file), output)
# Move the final merge file to the file specified in the parameters
File.rename!(merge_file, options.output_file)
end
# Returns the function used for output
@spec output_func(Options.t()) :: output_func()
defp output_func(%Options{silent: true}) do
fn _ -> :ok end
end
defp output_func(_) do
fn output -> IO.puts(output) end
end
# Prints usage information
@spec output_usage_info(output_func()) :: :ok
defp output_usage_info(output) do
output.("""
usage: int_sort [--help] --input-file <file> --chunk-size <integer> [--keep-intermediate] [--silent] <file>
example: int_sort --input-file "data/random_integers.txt" --chunk-size 1000 "sorted_integers.txt"
""")
end
@spec create_chunks(Options.t(), non_neg_integer(), output_func(), boolean()) :: Enum.t()
defp create_chunks(options, num_chunks, output, silent) do
output.("Creating Chunk Files")
# Calculate the progress update frequency for chunk creation
update_frequency = progress_update_frequency(num_chunks, @progress_updates)
# Create the chunk files
IntSort.create_chunk_files(
options.input_file,
Path.dirname(options.output_file),
options.chunk_size,
@chunk_gen
)
# Number each chunk
|> Stream.with_index(1)
# Update the progress bar after each chunk has been processed
|> Stream.each(fn {_, chunk_num} ->
update_progress_bar(chunk_num, num_chunks, update_frequency, silent)
end)
# Transform the stream back into chunk file names
|> Stream.map(fn {chunk_file, _} -> chunk_file end)
end
# Merges the chunk files and returns the path to the final merged file
@spec merge_chunks(Options.t(), Enum.t(), non_neg_integer(), String.t(), output_func()) ::
String.t()
defp merge_chunks(_, [], _, _, output) do
output.("There were no integers to merge. Creating an empty output file.")
empty_file = IntSort.gen_file_name(1, 1)
File.write!(empty_file, "")
empty_file
end
defp merge_chunks(options, chunk_files, num_integers, output_dir, output) do
# Calculate the progress update frequency for integer merging
update_frequency = progress_update_frequency(num_integers, @progress_updates)
# Perform the merge
gen_file_name = fn gen, count -> Path.join(output_dir, IntSort.gen_file_name(gen, count)) end
merge_status = fn _, count ->
file_merge_status(count, num_integers, update_frequency, options.silent)
end
merge_gen_completed = fn gen, file_count -> merge_gen_completed(gen, file_count, output) end
remove_files = remove_files_func(not options.keep_intermediate)
merged_file =
IntSort.total_merge(
chunk_files,
@merge_files,
gen_file_name,
&IntSort.merge_intermediate_files/4,
remove_files,
merge_status,
merge_gen_completed
)
merged_file
end
# Returns a function that removes a file or does nothing, depending on whether we want
# to remove files
@spec remove_files_func(boolean()) :: (Enum.t() -> :ok)
defp remove_files_func(true) do
fn files -> Enum.each(files, &File.rm!/1) end
end
defp remove_files_func(false) do
fn _ -> :ok end
end
# Outputs the current status of the ongoing file merge
@spec file_merge_status(non_neg_integer(), non_neg_integer(), non_neg_integer(), boolean()) ::
:ok
defp file_merge_status(count, total_count, update_frequency, silent) do
update_progress_bar(count, total_count, update_frequency, silent)
end
# Outputs to the screen when a merge generation is completed
@spec merge_gen_completed(non_neg_integer(), non_neg_integer(), output_func()) :: :ok
defp merge_gen_completed(gen, file_count, output) when file_count > 1 do
output.("Gen #{gen - 1} files were merged into #{file_count} Gen #{gen} files")
output.("")
output.("Merging Gen #{gen} intermediate files")
end
defp merge_gen_completed(gen, _, output) do
output.("Gen #{gen - 1} files were merged into a single output file")
end
defp integer_chunk_counts(input_file, chunk_size) do
integer_count = IntSort.integer_count(input_file)
chunk_count = Chunk.num_chunks(integer_count, chunk_size)
{integer_count, chunk_count}
end
@spec display_integer_counting_message(output_func()) :: :ok
defp display_integer_counting_message(output) do
output.("Determining the number of integers and chunks in the input file...")
end
@spec display_integer_counting_result({non_neg_integer(), non_neg_integer()}, output_func()) ::
{non_neg_integer(), non_neg_integer()}
defp display_integer_counting_result(data = {integers, chunks}, output) do
output.("Number of Integers: #{integers}")
output.("Number of Chunks: #{chunks}")
data
end
# Calculates the progress update frequency (the number of items that pass between
# updates) based on the total number of items and the number of updates that
# are to be made to the progress bar
@spec progress_update_frequency(non_neg_integer(), non_neg_integer()) :: non_neg_integer()
defp progress_update_frequency(total_count, num_updates) do
ceil(total_count / num_updates)
end
# Updates the current progress bar
# This clause updates the progress bar occasionally when a larger number of items
# are being processed so that the program doesn't spend all its time on progress
# bar updates
@spec update_progress_bar(non_neg_integer(), non_neg_integer(), non_neg_integer(), boolean()) ::
:ok
defp update_progress_bar(current_count, total_count, update_frequency, false)
when rem(current_count, update_frequency) == 0 do
ProgressBar.render(current_count, total_count, progress_bar_format())
end
# Updates the progress bar when all the items have finished being processed.
# Otherwise, it won't show at 100% unless the total happens to be evenly
# divisible by the update frequency
defp update_progress_bar(current_count, total_count, _, false)
when current_count == total_count do
ProgressBar.render(current_count, total_count, progress_bar_format())
end
# If the current item count does not match the update frequency or if the
# silent option is enabled, don't update the progress bar
defp update_progress_bar(_, _, _, _), do: :ok
# Returns the format of the progress bar
defp progress_bar_format() do
[
suffix: :count
]
end
@doc """
Retrieves the number of files merged at the same time
"""
def merge_files(), do: @merge_files
end
|
int_sort/lib/cli/cli.ex
| 0.846292
| 0.460107
|
cli.ex
|
starcoder
|
defmodule Plymio.Fontais.Vekil.ProxyForomDict do
# a vekil dictionary where the proxies are atoms and the forom quoted forms
# i.e. the dictionary used by Plymio.Vekil.Form
# has functions that mirror the Plymio.Fontais.Vekil protocol but does *not*
# implement the protocol
@moduledoc false
use Plymio.Fontais.Attribute
@type form :: Plymio.Fontais.form()
@type forms :: Plymio.Fontais.forms()
@type error :: Plymio.Fontais.error()
import Plymio.Fontais.Error,
only: [
new_error_result: 1
]
import Plymio.Fontais.Option,
only: [
opts_validate: 1,
opts_fetch: 2
]
import Plymio.Fontais.Form,
only: [
forms_normalise: 1,
forms_edit: 2
]
import Plymio.Fontais.Funcio,
only: [
map_collate0_enum: 2,
map_gather0_enum: 2
]
@doc false
@since "0.1.0"
defdelegate normalise_proxies(proxies), to: Plymio.Fontais.Utility, as: :normalise_keys
@doc false
@since "0.1.0"
defdelegate validate_proxy(proxy), to: Plymio.Fontais.Utility, as: :validate_key
@doc false
@since "0.1.0"
defdelegate validate_proxies(proxies), to: Plymio.Fontais.Utility, as: :validate_keys
@since "0.1.0"
@spec normalise_proxy_forom_dict(any) :: {:ok, map} | {:error, error}
defp normalise_proxy_forom_dict(vekil)
defp normalise_proxy_forom_dict(vekil) do
cond do
is_map(vekil) -> vekil
Keyword.keyword?(vekil) -> vekil |> Enum.into(%{})
true -> vekil
end
|> validate_proxy_forom_dict
end
@since "0.1.0"
@spec validate_proxy_forom_dict(any) :: {:ok, map} | {:error, error}
def validate_proxy_forom_dict(vekil)
def validate_proxy_forom_dict(vekil) when is_map(vekil) do
with {:ok, _} <- vekil |> Map.keys() |> validate_proxies do
{:ok, vekil}
else
{:error, %{__exception__: true} = error} ->
new_error_result(m: "vekil invalid", v: error)
end
end
def validate_proxy_forom_dict(vekil) do
new_error_result(m: "vekil invalid", v: vekil)
end
@doc false
@spec validate_proxy_forom_dict!(any) :: map | no_return
def validate_proxy_forom_dict!(value) do
with {:ok, dict} <- value |> validate_proxy_forom_dict do
dict
else
{:error, error} -> raise error
end
end
@since "0.1.0"
@spec transform_proxy_forom_dict(any) :: {:ok, map} | {:error, error}
def transform_proxy_forom_dict(vekil, opts \\ [])
def transform_proxy_forom_dict(vekil, []) do
vekil |> validate_proxy_forom_dict
end
def transform_proxy_forom_dict(vekil, opts) do
with {:ok, vekil} <- vekil |> validate_proxy_forom_dict,
{:ok, opts} <- opts |> opts_validate do
opts
|> Enum.reduce_while(vekil, fn
{:transform_k = _verb, fun}, vekil when is_function(fun, 1) ->
{:cont, vekil |> Enum.map(fn {k, v} -> {k |> fun.(), v} end)}
{:transform_v = _verb, fun}, vekil when is_function(fun, 1) ->
{:cont, vekil |> Enum.map(fn {k, v} -> {k, v |> fun.()} end)}
{:transform_kv = _verb, fun}, vekil when is_function(fun, 1) ->
{:cont, vekil |> Enum.map(fn kv -> kv |> fun.() end)}
x, _vekil ->
{:halt, new_error_result(m: "vekil transform invalid", v: x)}
end)
|> case do
{:error, %{__struct__: _}} = result -> result
vekil -> vekil |> normalise_proxy_forom_dict
end
else
{:error, %{__struct__: _}} = result -> result
end
end
@doc false
@since "0.1.0"
@spec create_proxy_forom_dict(any) :: {:ok, map} | {:error, error}
def create_proxy_forom_dict(value) do
cond do
Keyword.keyword?(value) -> [value]
true -> value |> List.wrap()
end
|> map_collate0_enum(fn
v when is_atom(v) ->
{:ok, apply(v, :__vekil__, [])}
v ->
{:ok, v}
end)
|> case do
{:error, %{__struct__: _}} = result ->
result
{:ok, dicts} ->
dicts
|> map_collate0_enum(fn
%{__struct__: _} = v ->
v
|> Map.get(@plymio_fontais_key_dict)
|> case do
x when is_map(x) ->
{:ok, x}
x ->
new_error_result(m: "struct dict invalid", v: x)
end
v when is_map(v) ->
{:ok, v}
v when is_list(v) ->
case v |> Keyword.keyword?() do
true ->
{:ok, v |> Enum.into(%{})}
_ ->
new_error_result(m: "proxy forom dict invalid", v: v)
end
v ->
new_error_result(m: "proxy forom dict", v: v)
end)
end
|> case do
{:error, %{__struct__: _}} = result ->
result
{:ok, dicts} ->
dicts
|> Enum.reduce(%{}, fn m, s -> Map.merge(s, m) end)
|> validate_proxy_forom_dict
end
end
@doc false
@spec create_proxy_forom_dict!(any) :: map | no_return
def create_proxy_forom_dict!(value) do
with {:ok, dict} <- value |> create_proxy_forom_dict do
dict
else
{:error, error} -> raise error
end
end
defp reduce_gather_opts(gather_opts) do
with {:ok, gather_opts} <- gather_opts |> Plymio.Fontais.Option.opts_validate() do
gather_opts
|> Plymio.Fontais.Funcio.gather_opts_error_get()
|> case do
{:ok, []} ->
gather_opts |> Plymio.Fontais.Funcio.gather_opts_ok_get()
{:ok, error_tuples} ->
error_tuples
|> case do
[{_proxy, error}] -> {:error, error}
tuples -> new_error_result(m: "proxies invalid", v: tuples |> Keyword.keys())
end
end
else
{:error, %{__exception__: true}} = result -> result
end
end
@since "0.1.0"
defp resolve_forom_in_proxy_forom_dict(proxy_forom_dict, forom, seen_proxies)
defp resolve_forom_in_proxy_forom_dict(_proxy_forom_dict, nil, _seen_proxies) do
{:ok, nil}
end
# must be a proxy
defp resolve_forom_in_proxy_forom_dict(proxy_forom_dict, forom, seen_proxies)
when is_atom(forom) do
proxy_forom_dict |> resolve_proxy_in_proxy_forom_dict(forom, seen_proxies)
end
defp resolve_forom_in_proxy_forom_dict(proxy_forom_dict, forom, seen_proxies)
when is_list(forom) do
forom
|> map_collate0_enum(fn value ->
proxy_forom_dict |> resolve_forom_in_proxy_forom_dict(value, seen_proxies)
end)
end
# must be a proxy
defp resolve_forom_in_proxy_forom_dict(_proxy_forom_dict, forom, _seen_proxies) do
{:ok, forom}
end
@since "0.2.0"
defp resolve_proxy_in_proxy_forom_dict(proxy_forom_dict, proxy, seen_proxies)
defp resolve_proxy_in_proxy_forom_dict(proxy_forom_dict, proxy, seen_proxies)
when is_map(proxy_forom_dict) and is_atom(proxy) do
seen_proxies
|> Map.has_key?(proxy)
|> case do
true ->
new_error_result(m: "proxy seen before", v: proxy)
_ ->
proxy_forom_dict
|> Map.fetch(proxy)
|> case do
{:ok, forom} ->
# mark seen
seen_proxies = seen_proxies |> Map.put(proxy, nil)
with {:ok, form} <-
proxy_forom_dict |> resolve_forom_in_proxy_forom_dict(forom, seen_proxies) do
{:ok, form}
else
{:error, %{__exception__: true}} = result -> result
end
_ ->
new_error_result(m: "proxy invalid", v: proxy)
end
end
end
@doc false
@since "0.2.0"
@spec resolve_proxies(any, any) :: {:ok, forms} | {:error, error}
def resolve_proxies(proxy_forom_dict, proxies) do
with {:ok, proxy_forom_dict} <- proxy_forom_dict |> validate_proxy_forom_dict,
true <- true do
proxies
|> List.wrap()
|> map_gather0_enum(fn
proxy when is_atom(proxy) ->
proxy_forom_dict |> resolve_proxy_in_proxy_forom_dict(proxy, %{})
value ->
# must be a form
{:ok, value}
end)
|> case do
{:error, %{__struct__: _}} = result ->
result
{:ok, gather_opts} ->
gather_opts
|> reduce_gather_opts
|> case do
{:error, %{__struct__: _}} = result ->
result
{:ok, ok_tuples} ->
{:ok, ok_tuples}
end
end
else
{:error, %{__exception__: true}} = result -> result
end
end
@doc false
@since "0.1.0"
@spec produce_proxies(any, any) :: {:ok, forms} | {:error, error}
def produce_proxies(proxies, opts \\ [])
def produce_proxies(proxies, opts) when is_list(opts) do
with {:ok, opts} <- opts |> opts_validate,
{:ok, vekil} <- opts |> opts_fetch(@plymio_fontais_key_dict),
{:ok, tuples} <- vekil |> resolve_proxies(proxies),
forms <- tuples |> Keyword.values(),
{:ok, forms} <- forms |> forms_normalise,
{:ok, _forms} = result <-
forms
|> forms_edit(opts |> Keyword.get(@plymio_fontais_key_forms_edit, [])) do
result
else
{:error, %{__exception__: true}} = result -> result
end
end
@doc false
@since "0.1.0"
defmacro reify_proxies(proxies, opts \\ []) do
quote bind_quoted: [proxies: proxies, opts: opts] do
with {:ok, forms} <- proxies |> Plymio.Fontais.Vekil.ProxyForomDict.produce_proxies(opts) do
forms
|> Code.eval_quoted([], __ENV__)
else
{:error, %{__exception__: true} = error} -> raise error
end
end
end
end
|
lib/fontais/vekil/proxy_forom_dict.ex
| 0.735357
| 0.44342
|
proxy_forom_dict.ex
|
starcoder
|
defmodule DreaeQL do
@moduledoc """
A library for parsing a simple query language into an abstract syntax tree.
"""
@doc """
Parses a string into a DreaeQL AST. Returns a tuple containing the AST
and any unused tokens, if any.
## Examples
```
iex> DreaeQL.parse("foo = 123 and bar = \\"456\\"")
{%DreaeQL.Operators.And{
left_side: %DreaeQL.Operators.Equals{
left_side: %DreaeQL.Terms.Identifier{ident: "foo", term: :ident},
operator: :binary,
right_side: %DreaeQL.Terms.LiteralInt{term: :int, value: 123}
},
operator: :binary,
right_side: %DreaeQL.Operators.Equals{
left_side: %DreaeQL.Terms.Identifier{ident: "bar", term: :ident},
operator: :binary,
right_side: %DreaeQL.Terms.LiteralString{term: :string, value: "456"}
}
}, []}
```
"""
@spec parse(String.t) :: {expression(), list}
def parse(string), do: parse_tokens(tokenize(string))
@doc false
def tokenize(data) do
tokenize(data, [])
end
defp tokenize(" " <> data, tokens), do: tokenize(data, tokens)
defp tokenize(<<t::unsigned-8>> <> data, tokens) when t >= ?0 and t <= ?9, do: consume_number(data, <<t::unsigned-8>>, tokens)
defp tokenize(<<t::unsigned-8>> <> data, tokens) when (t >= ?A and t <= ?Z) or (t >= ?a and t <= ?z), do: consume_identifier(data, <<t::unsigned-8>>, tokens)
defp tokenize("\"" <> data, tokens), do: consume_string(data, "", tokens)
defp tokenize("-" <> data, tokens), do: consume_number(data, "-", tokens)
defp tokenize("(" <> data, tokens), do: tokenize(data, [:open_paren | tokens])
defp tokenize(")" <> data, tokens), do: tokenize(data, [:close_paren | tokens])
defp tokenize("=" <> data, tokens), do: tokenize(data, [:equals | tokens])
defp tokenize("!=" <> data, tokens), do: tokenize(data, [:not_equals | tokens])
defp tokenize("<" <> data, tokens), do: tokenize(data, [:lt | tokens])
defp tokenize("<=" <> data, tokens), do: tokenize(data, [:le | tokens])
defp tokenize(">" <> data, tokens), do: tokenize(data, [:gt | tokens])
defp tokenize(">=" <> data, tokens), do: tokenize(data, [:ge | tokens])
defp tokenize("", tokens), do: Enum.reverse(tokens)
defp consume_number(<<t::unsigned-8>> <> data, token, tokens) when t >= ?0 and t <= ?9, do: consume_number(data, token <> <<t::unsigned-8>>, tokens)
defp consume_number("_" <> data, token, tokens), do: consume_number(data, token, tokens)
defp consume_number("." <> data, token, tokens), do: consume_float(data, token <> ".", tokens)
defp consume_number(data, token, tokens), do: tokenize(data, [finalize_number(token) | tokens])
defp finalize_number(token) do
{num, ""} = Integer.parse(token)
[:literal, :int, num]
end
defp consume_float(<<t::unsigned-8>> <> data, token, tokens) when t >= ?0 and t <= ?9, do: consume_float(data, token <> <<t::unsigned-8>>, tokens)
defp consume_float("_" <> data, token, tokens), do: consume_float(data, token, tokens)
defp consume_float(data, token, tokens), do: tokenize(data, [finalize_float(token) | tokens])
defp finalize_float(token) do
{num, ""} = Float.parse(token)
[:literal, :float, num]
end
defp consume_identifier(<<t::unsigned-8>> <> data, token, tokens)
when (t >= ?A and t <= ?Z)
or (t >= ?a and t <= ?z)
or (t == ?. or t == ?_)
do
consume_identifier(data, token <> <<t::unsigned-8>>, tokens)
end
defp consume_identifier(data, token, tokens), do: tokenize(data, [finalize_identifier(token) | tokens])
defp consume_string("\"" <> data, buffer, tokens), do: tokenize(data, [[:literal, :string, buffer] | tokens])
defp consume_string("\\" <> <<c::unsigned-8>> <> data, buffer, tokens), do: consume_string(data, buffer <> <<c::unsigned-8>>, tokens)
defp consume_string(<<c::unsigned-8>> <> data, buffer, tokens), do: consume_string(data, buffer <> <<c::unsigned-8>>, tokens)
defp finalize_identifier("and"), do: :and
defp finalize_identifier("or"), do: :or
defp finalize_identifier("not"), do: :not
defp finalize_identifier("true"), do: [:literal, :bool, :true]
defp finalize_identifier("false"), do: [:literal, :bool, :false]
defp finalize_identifier(token), do: [:identifier, token]
defp parse_tokens(tokens) do
parse_query(tokens)
end
@type expression :: DreaeQL.dreaeql_operator | DreaeQL.dreaeql_term
use DreaeQL.Terms
use DreaeQL.Operators
defp parse_term([[:identifier, _ident] = ident | tokens]), do: {parse_term_ident(ident), tokens}
defp parse_term([[:literal, _t, _d] = literal | tokens]), do: {parse_term_literal(literal), tokens}
defp parse_term([:open_paren | tokens]) do
{term, [:close_paren | tokens]} = parse_expression(tokens, 0)
{term, tokens}
end
defp parse_term_literal([:literal, :int, num]), do: %Terms.LiteralInt{value: num}
defp parse_term_literal([:literal, :float, num]), do: %Terms.LiteralFloat{value: num}
defp parse_term_literal([:literal, :string, value]), do: %Terms.LiteralString{value: value}
defp parse_term_literal([:literal, :bool, :true]), do: %Terms.LiteralBool{value: true}
defp parse_term_literal([:literal, :bool, :false]), do: %Terms.LiteralBool{value: false}
defp parse_term_ident([:identifier, ident]), do: %Terms.Identifier{ident: ident}
# This is effectively a Pratt parser
defp parse_expression([op | tokens] = token_stream, min_bp) do
{lhs, tokens} = case operator_precedence(op) do
{0, r_bp} ->
{rhs, tokens} = parse_expression(tokens, r_bp)
{finalize_operator(op, rhs), tokens}
_ -> parse_term(token_stream)
end
parse_expression(lhs, tokens, min_bp)
end
defp parse_expression(lhs, [op | tokens] = token_stream, min_bp) do
case operator_precedence(op) do
{l_bp, r_bp} ->
if l_bp < min_bp do
{lhs, token_stream}
else
{rhs, tokens} = parse_expression(tokens, r_bp)
parse_expression(finalize_operator(op, lhs, rhs), tokens, min_bp)
end
_ -> {lhs, token_stream}
end
end
defp parse_expression(lhs, [], _), do: {lhs, []}
defp operator_precedence(:and), do: {1, 2}
defp operator_precedence(:or), do: {1, 2}
defp operator_precedence(:equals), do: {4, 3}
defp operator_precedence(:not_equals), do: {4, 3}
defp operator_precedence(:lt), do: {4, 3}
defp operator_precedence(:gt), do: {4, 3}
defp operator_precedence(:le), do: {4, 3}
defp operator_precedence(:ge), do: {4, 3}
defp operator_precedence(:not), do: {0, 3}
defp operator_precedence(_), do: nil
defp finalize_operator(:equals, lhs, rhs), do: %Operators.Equals{left_side: lhs, right_side: rhs}
defp finalize_operator(:not_equals, lhs, rhs), do: %Operators.NotEquals{left_side: lhs, right_side: rhs}
defp finalize_operator(:gt, lhs, rhs), do: %Operators.GreaterThan{left_side: lhs, right_side: rhs}
defp finalize_operator(:ge, lhs, rhs), do: %Operators.GreaterThanEquals{left_side: lhs, right_side: rhs}
defp finalize_operator(:lt, lhs, rhs), do: %Operators.LessThan{left_side: lhs, right_side: rhs}
defp finalize_operator(:le, lhs, rhs), do: %Operators.LessThanEquals{left_side: lhs, right_side: rhs}
defp finalize_operator(:and, lhs, rhs), do: %Operators.And{left_side: lhs, right_side: rhs}
defp finalize_operator(:or, lhs, rhs), do: %Operators.Or{left_side: lhs, right_side: rhs}
defp finalize_operator(:not, rhs), do: %Operators.Not{expr: rhs}
defp parse_query(tokens) do
parse_expression(tokens, 0)
end
end
|
lib/dreaeql.ex
| 0.861567
| 0.888227
|
dreaeql.ex
|
starcoder
|
defmodule ExPool.State do
@moduledoc """
The internal state of the pool.
This module defines a `ExPool.State` struct and the main functions
for working with pool internal state.
## Fields
* `stash` - stash of available workers
* `monitors` - store for the monitored references
* `queue` - queue to store the waiting requests
"""
alias ExPool.State
alias ExPool.State.Config
alias ExPool.State.Factory
alias ExPool.State.Stash
alias ExPool.State.Monitors
alias ExPool.State.Queue
@type config :: Config.t
@type factory :: Factory.t
@type stash :: Stash.t
@type worker :: Stash.worker
@type monitors :: Monitors.t
@type item :: Monitors.item
@type ref :: Monitors.ref
@type queue :: Queue.t
@type t :: %__MODULE__{
config: config,
factory: factory,
stash: stash,
monitors: monitors,
queue: queue
}
defstruct config: nil,
factory: nil,
stash: nil,
monitors: nil,
queue: nil
@doc """
Creates a new pool state with the given configuration.
## Configuration options
* `:worker_mod` - (Required) The worker module. It has to fit in a
supervision tree (like a GenServer).
* `:size` - (Optional) The size of the pool (default 5).
"""
@spec new(opts :: [Keyword]) :: t
def new(opts) do
%State{
config: Config.new(opts),
factory: Factory.new(opts),
stash: Stash.new(opts),
monitors: Monitors.new(opts),
queue: Queue.new(opts)
}
end
## Config
@doc """
Returns the total number of workers.
"""
@spec size(t) :: non_neg_integer
def size(%State{config: config}), do: config.size
## Factory
@spec create_worker(t) :: {worker, t}
def create_worker(%State{factory: factory} = state) do
{worker, factory} = Factory.create(factory)
{worker, %{state|factory: factory}}
end
@spec total_workers(t) :: non_neg_integer
def total_workers(%State{factory: factory}), do: factory.total
@spec destroy_worker(t, worker) :: t
def destroy_worker(%State{factory: factory} = state, worker),
do: %{state|factory: Factory.destroy(factory, worker)}
@spec report_dead_worker(t) :: t
def report_dead_worker(%State{factory: factory} = state),
do: %{state|factory: Factory.report_death(factory)}
## Stash
@doc """
Returns the number of available workers.
"""
@spec available_workers(t) :: non_neg_integer
def available_workers(%State{stash: stash}), do: Stash.available(stash)
@doc """
Gets a worker from the pool if there is any available.
"""
@spec get_worker(t) :: {:ok, {worker, t}} | {:empty, t}
def get_worker(%State{stash: stash} = state) do
case Stash.get(stash) do
{:ok, {worker, new_stash}} -> {:ok, {worker, %{state|stash: new_stash}}}
{:empty, _stash} -> {:empty, state}
end
end
@doc """
Returns a worker to the pool.
"""
@spec return_worker(t, worker) :: t
def return_worker(%State{stash: stash} = state, worker),
do: %{state|stash: Stash.return(stash, worker)}
## Monitors
@doc """
Stores the given item and its associated reference.
"""
@spec add_monitor(t, item, ref) :: t
def add_monitor(%State{monitors: monitors} = state, item, ref) do
monitors = Monitors.add(monitors, item, ref)
%{state|monitors: monitors}
end
@doc """
Gets an item from its reference.
"""
@spec item_from_ref(t, ref) :: {:ok, item} | :not_found
def item_from_ref(%State{monitors: monitors}, ref),
do: Monitors.item_from_ref(monitors, ref)
@doc """
Gets a reference from its item.
"""
@spec ref_from_item(t, item) :: {:ok, ref} | :not_found
def ref_from_item(%State{monitors: monitors}, item),
do: Monitors.ref_from_item(monitors, item)
@doc """
Removes the given item and its associated reference.
"""
@spec remove_monitor(t, item) :: t
def remove_monitor(%State{monitors: monitors} = state, item) do
monitors = Monitors.forget(monitors, item)
%{state|monitors: monitors}
end
## Queue
@doc """
Returns the number of waiting processes.
"""
@spec queue_size(t) :: non_neg_integer
def queue_size(%State{queue: queue}), do: Queue.size(queue)
@doc """
Adds an item to the queue.
"""
@spec enqueue(t, item) :: t
def enqueue(%State{queue: queue} = state, item),
do: %{state|queue: Queue.push(queue, item)}
@doc """
Keeps on the queue only items for those the function is true.
"""
@spec keep_on_queue(t, (item -> boolean)) :: t
def keep_on_queue(%State{queue: queue} = state, filter),
do: %{state|queue: Queue.keep(queue, filter)}
@doc """
Pops an item from the queue.
"""
@spec pop_from_queue(t) :: {:ok, {item, t}} | {:empty, t}
def pop_from_queue(%State{queue: queue} = state) do
case Queue.pop(queue) do
{:ok, {item, new_queue}} -> {:ok, {item, %{state|queue: new_queue}}}
{:empty, _queue} -> {:empty, state}
end
end
end
|
lib/ex_pool/state.ex
| 0.878386
| 0.476762
|
state.ex
|
starcoder
|
defmodule ChalkAuthorization do
@moduledoc """
Chalk is an authorization module with support for roles that can handle configurable custom actions and permissions. It also supports user and group based authorization.
It is inspired in the Unix file permissions.
"""
@doc """
Chalk integrates the next functions to handle the authorization system.
## `permission_map`
Get the translation between a permission and its integer representation.
## `permission_changeset(item, attrs)`
Get the changeset to update the permissions.
- `item` can be an atom or a string.
- `attrs` can be a...
## `can?(user, permission, element)`
Check if a user has permission to perform an action on a specific element.
### Parameters
- `user` can be a map or nil.
- `permission` and `element`, both can be an atom or a string.
## `get_permissions(user, element)`
Get the permissions of a user on an element.
### Parameters
- `user` can be a map.
- `element` can be an atom or a string.
## `add_group(user, group)`
Add a user to a specific group.
### Parameters
- `user` can be a map.
- `group` can be a string or a list of groups.
## `remove_group(user, group)
Add a user to a specific group.
### Parameters
- `user` can be a map.
- `group` can be a string or a list of groups.
## `is_a?(user, group)
Check if the user is in a specific group.
### Parameters
- `user` can be a map.
- `groups` can be a string, a bitstring or a list of groups.
## set_permissions(user, element)
Grant permissions to a user on an item.
### Parameters
- `user` can be a map.
- `element` can be an atom or a string. `value` can
be an integer or a string.
## set_superuser(user, boolean)
Grant or revoke a user the role of superuser (all permissions).
### Parameters
- `user` can be a map.
- `boolean` can be `true` or `false`.
"""
defmacro __using__(repo: repo, group_permissions: group_permissions) do
quote do
@doc """
Get the translation between a permission and its integer representation
"""
def permission_map,
do: Application.get_env(:chalk_authorization, :permission_map, %{c: 1, r: 2, u: 4, d: 8})
@doc """
Get the changeset to update the permissions.
## Parameters
- `item`: can be an atom or a string.
- `attrs`: can be a map.
"""
def permissions_changeset(item, attrs),
do: cast(item, attrs, [:superuser, :groups, :permissions])
@doc """
Check if a user has permission to perform an action on a specific element.
## Parameters
- `user`: can be a map or `nil`.
- `permission`: can be an atom or a string.
- `element`: can be an atom or a string.
## Returns
`true` or `false`.
"""
def can?(nil, _permission, _element),
do: false
def can?(user, permission, element) when is_atom(permission),
do: can?(user, Atom.to_string(permission), element)
def can?(%{superuser: true}, _permission, _element),
do: true
def can?(%{groups: [group | groups]} = user, permission, element),
do:
user
|> get_group_permissions(group)
|> Map.put(:groups, groups)
|> can?(permission, element)
def can?(user, permission, element),
do:
user
|> get_permissions(Atom.to_string(element))
|> permissions_int_to_string
|> String.contains?(permission)
"""
Get the permissions of a specific group and upgrade the user's
permissions according to the ones given to the group.
"""
defp get_group_permissions(),
do: unquote(group_permissions) || %{}
defp get_group_permissions(user, group),
do:
if(
Map.has_key?(get_group_permissions(), group) &&
Enum.member?(user.groups, group),
do: upgrade_to_group(user, Map.get(get_group_permissions(), group)),
else: user
)
"""
Upgrade the users permissions according to the ones given
to the group. If the user has higher permissions than the
group given or the user is in a group with higher permissions,
the permissions aren't upgraded.
"""
defp upgrade_to_group(%{permissions: permissions} = user, group_permissions),
do:
Map.put(
user,
:permissions,
upgrade_to_group(permissions, Map.to_list(group_permissions))
)
defp upgrade_to_group(permissions, []),
do: permissions
defp upgrade_to_group(permissions, [{permission, value} | group_permissions]),
do:
if(
Map.has_key?(permissions, permission) &&
permissions[permission] >= value,
do: permissions,
else:
Map.put(permissions, permission, value)
|> upgrade_to_group(group_permissions)
)
@doc """
Get the permissions of a user on an element.
## Parameters
- `user` can be a map.
- `element` can be an atom or a string.
## Returns
The permission of the user or `0`.
"""
def get_permissions(user, element) when is_atom(element),
do: get_permissions(user, Atom.to_string(element))
def get_permissions(user, element),
do:
if(Map.has_key?(user.permissions, element),
do: user.permissions[element],
else: 0
)
@doc """
Add a user to a specific group.
## Parameters
- `user` can be a map.
- `group` can be a string or a list of groups.
## Returns
The group or groups added to the user.
"""
def add_group(user, []),
do: user
def add_group(user, [group | groups]),
do: add_group(add_group(user, group), groups)
def add_group(user, group) when not is_bitstring(group),
do: add_group(user, "#{group}")
def add_group(%{groups: groups} = user, group),
do:
user
|> __MODULE__.permissions_changeset(%{
groups: (groups ++ [group]) |> Enum.sort() |> Enum.uniq()
})
|> unquote(repo).update()
|> elem(1)
@doc """
Remove a user from a specific group.
## Parameters
- `user` can be a map.
- `group` can be a string, a bitstring or a list of groups.
## Returns
The group or groups added to the user.
"""
def remove_group(user, []),
do: user
def remove_group(user, [group | groups]),
do: remove_group(remove_group(user, group), groups)
def remove_group(user, group) when not is_bitstring(group),
do: remove_group(user, "#{group}")
def remove_group(%{groups: groups} = user, group),
do:
user
|> __MODULE__.permissions_changeset(%{
groups: Enum.reject(groups, fn g -> g == group end)
})
|> unquote(repo).update()
|> elem(1)
@doc """
Check if the user is in a specific group.
## Parameters
- `user` can be a map.
- `groups` can be a string, a bitstring or a list of groups.
## Returns
`true` or `false`.
"""
def is_a?(user, groups) when is_list(groups),
do: groups |> Enum.all?(fn g -> user |> is_a?(g) end)
def is_a?(user, group) when not is_bitstring(group),
do: is_a?(user, "#{group}")
def is_a?(%{groups: groups}, group),
do: Enum.member?(groups, group)
@doc """
Grant permissions to a user on an item.
## Parameters
- `user` can be a map.
- `element` can be an atom or a string. `value` can
be an integer or a string.
## Returns
The `repo` updated or an error.
"""
def set_permissions(user, element, value) when is_atom(element),
do: set_permissions(user, Atom.to_string(element), value)
def set_permissions(user, element, value) when is_integer(value) do
if value >= 0 and value <= Enum.sum(Map.values(permission_map())) do
user
|> __MODULE__.permissions_changeset(%{
permissions: Map.put(user.permissions, element, value)
})
|> unquote(repo).update()
else
{:error, user}
end
end
def set_permissions(user, element, value) do
case value do
"+" <> permissions ->
set_permissions(
user,
element,
get_permissions(user, element) + permissions_string_to_int(permissions)
)
"-" <> permissions ->
set_permissions(
user,
element,
get_permissions(user, element) - permissions_string_to_int(permissions)
)
_ ->
set_permissions(user, element, permissions_string_to_int(value))
end
end
"""
Convert a string of permissions into an integer.
"""
defp permissions_string_to_int(string) do
string
|> String.graphemes()
|> Enum.uniq()
|> Enum.map(fn p -> permission_map()[String.to_atom(p)] end)
|> Enum.sum()
end
"""
Convert an integer permission into a string.
"""
defp permissions_int_to_string(int) when is_integer(int) do
keys =
Map.keys(permission_map())
|> Enum.sort_by(fn k -> permission_map()[k] end)
|> Enum.reverse()
permissions_int_to_string(int, keys, [])
end
defp permissions_int_to_string(rest, [], acc) do
cond do
rest == 0 ->
acc |> Enum.map(fn a -> Atom.to_string(a) end) |> Enum.join()
true ->
:error
end
end
defp permissions_int_to_string(rest, [key | tail], acc) do
if rest - permission_map()[key] >= 0 do
permissions_int_to_string(rest - permission_map()[key], tail, [key | acc])
else
permissions_int_to_string(rest, tail, acc)
end
end
@doc """
Grant or revoke a user the role of superuser (all permissions).
## Parameters
- `user` can be a map.
- `boolean` can be `true` or `false`.
## Returns
`true` or `false`.
"""
def set_superuser(%{superuser: _} = user, boolean) when is_boolean(boolean),
do:
user
|> __MODULE__.permissions_changeset(%{superuser: boolean})
|> unquote(repo).update()
|> elem(1)
end
end
end
|
lib/chalk_authorization.ex
| 0.901821
| 0.693439
|
chalk_authorization.ex
|
starcoder
|
defmodule Riffed.Enumeration do
@moduledoc """
Provides enumeration semantics, but with an Elixir flavor.
## Usage
Thrift enums are not handled well by the erlang thrift bindings. They're turned into
ints and left to fend for themselves. This is no way to treat an Enum. The `Riffed.Enum`
module brings them back into the fold so you can have familiar enumeration semantics.
To (re)define an enum, use the defenum macro like this:
defenum UserState do
:active -> 1
:inactive -> 2
:banned -> 3
end
Then, for all structs that use the enum, use the corresponding `enumerize_struct` macro:
enumerize_struct(User, state: UserState)
Riffed will then change the state field into a UserState enum whenever it deserializes a User
struct. Similarly, UserState enums will be serialized as ints.
### Using Enumerations
Enums are just Elixir structs in a module that defines functions for turning ints into enums
and enums into ints. All enum modules have `value` and `ordinal` functions; `value` converts an integer into an Enumeration and `ordinal` converts an atom into an Enumeration.
Like all structs, enums can be pattern matched against:
def ban_user(%User{state: UserState.banned}) do
{:error, :already_banned}
end
def ban_user(user=%User{}) do
User.ban(user)
end
"""
defmodule Output do
@moduledoc false
defstruct conversion_fns: [], modules: [], fn_args_conversions: HashDict.new
end
defmodule ArgConversion do
@moduledoc false
defstruct args: nil, return_type: nil, fn_name: nil
def new(call, return_type) do
{fn_name, args} = Macro.decompose_call(call)
%ArgConversion{fn_name: fn_name, args: args, return_type: return_type}
end
end
defmacro __using__(_opts) do
Module.register_attribute(__CALLER__.module, :enums, accumulate: true)
Module.register_attribute(__CALLER__.module, :enums_orig, accumulate: true)
Module.register_attribute(__CALLER__.module, :enum_conversions, accumulate: true)
Module.register_attribute(__CALLER__.module, :enum_conversions_orig, accumulate: true)
Module.register_attribute(__CALLER__.module, :enum_arg_conversion, accumulate: true)
Module.register_attribute(__CALLER__.module, :enum_arg_conversion_orig, accumulate: true)
quote do
require Riffed.Enumeration
import Riffed.Enumeration, only: [defenum: 2,
enumerize_struct: 2,
enumerize_function: 1,
enumerize_function: 2,
]
end
end
@doc """
Defines an enum. Enums are a series of mappings from atoms to an integer value.
They are specified much like cases in a cond statment, like this:
defenum ResponseStatus do
:success -> 200
:server_error -> 500
:not_found -> 404
end
"""
defmacro defenum(enum_name, do: mappings) do
mapping_kwargs = Enum.map(
mappings,
fn
({:->, _, [[k], v]}) when is_atom(k) ->
{k, v}
(other) ->
x = Macro.to_string(other)
raise "#{x} is not in the form :key -> value"
end)
Module.put_attribute(__CALLER__.module, :enums, {enum_name, mapping_kwargs})
Module.put_attribute(__CALLER__.module, :enums_orig, {enum_name, mappings})
end
@doc """
Tells Riffed that a struct has enums that need to be converted.
Assume you have a struct that represents a user and they have a field named
state that is a UserState enum.
enumerize_struct(Structs.MyStruct, state: UserState)
"""
defmacro enumerize_struct(struct_name, fields) do
Module.put_attribute(__CALLER__.module, :enum_conversions, {struct_name, fields})
Module.put_attribute(__CALLER__.module, :enum_conversions_orig, {struct_name, fields})
end
@doc """
Tells Riffed to convert argument of the named function.
The `fn_call` argument is a function signature to match, and you mark arguments to
be converted to enums. For example:
enumerize_function my_thrift_function(_, _, EnumOne, EnumTwo)
"""
defmacro enumerize_function(fn_call) do
Module.put_attribute(__CALLER__.module,
:enum_arg_conversion,
ArgConversion.new(fn_call, nil))
Module.put_attribute(__CALLER__.module,
:enum_arg_conversion_orig, {fn_call, nil})
end
@doc """
Tells Riffed to convert both arguments and return values of the named function to a struct.
The `fn_call` argument is a function signature to match, and you mark arguments to be
converted into enums. You can also provide a `returns:` keyword to mark the return value of
the function to be converted into an enum. For example:
enumerize_function get_enumeration(), returns: MyDefinedEnum
"""
defmacro enumerize_function(fn_call, return_kwargs) do
{return_type, _} = Code.eval_quoted(return_kwargs)
Module.put_attribute(__CALLER__.module,
:enum_arg_conversion,
ArgConversion.new(fn_call, return_type[:returns]))
Module.put_attribute(__CALLER__.module,
:enum_arg_conversion_orig, {fn_call, return_kwargs})
end
@doc false
def reconstitute(parent_module) do
enum_declarations = Module.get_attribute(parent_module, :enums_orig)
|> Enum.map(fn({enum_name, mapping_kwargs}) ->
quote do
defenum(unquote(enum_name), do: unquote(List.flatten(mapping_kwargs)))
end
end)
conversion_declarations = Module.get_attribute(parent_module, :enum_conversions_orig)
|> Enum.map(fn({struct_name, field_name}) ->
quote do
enumerize_struct(unquote(struct_name), unquote(field_name))
end
end)
List.flatten([enum_declarations, conversion_declarations])
end
@doc false
def build_cast_return_value_to_erlang(struct_module) do
get_overrides(struct_module).functions
|> Enum.reduce(
[],
fn({_fn_name, conversion=%ArgConversion{}}, acc) ->
Enum.reduce(
conversion.args, acc,
fn
({_arg_name, :none}, acc) ->
acc
({_arg_name, conversion}, acc) ->
quoted = process_arg(struct_module, conversion)
[quoted | acc]
end)
end)
end
@doc false
def get_overridden_type(fn_name, :return_type, overrides, type_spec) do
fn_overrides = Map.get(overrides, fn_name)
if fn_overrides do
fn_overrides.return_type || type_spec
else
type_spec
end
end
def get_overridden_type(fn_name, arg_name, overrides, type_spec) do
fn_overrides = Map.get(overrides, fn_name)
if fn_overrides do
case Keyword.get(fn_overrides.args, arg_name) do
:none -> type_spec
other -> other
end
else
type_spec
end
end
defp process_arg(struct_module, conversion) do
enum_module = Module.concat(struct_module, conversion)
quote do
def to_erlang(enum=%unquote(enum_module){}, _) do
enum.value()
end
end
end
@doc false
def get_overrides(container_module) do
{enum_field_conversions, _} = container_module
|> Module.get_attribute(:enum_conversions)
|> Code.eval_quoted
enum_function_conversions = container_module
|> Module.get_attribute(:enum_arg_conversion)
|> Enum.map(fn(conv=%ArgConversion{}) ->
args = conv.args
|> Enum.with_index
|> Enum.map(fn
({{_, _, nil}, idx}) ->
# underscore case ( enumerize_function my_fn(_) )
{:"arg_#{idx + 1}", :none}
({other, idx}) ->
{rv_type, _} = Code.eval_quoted(other)
{:"arg_#{idx + 1}", rv_type}
end)
%ArgConversion{conv | args: args}
end)
structs = enum_field_conversions
|> Enum.reduce(%{}, fn({struct_name, mappings}, acc) ->
fq_struct_name = Module.concat(:Elixir, Module.concat(container_module, struct_name))
Map.put(acc, fq_struct_name, mappings)
end)
functions = enum_function_conversions
|> Enum.reduce(%{}, fn(conversion, acc) ->
Map.put(acc, conversion.fn_name, conversion)
end)
%{structs: structs, functions: functions}
end
@doc false
def build(container_module) do
enums = Module.get_attribute(container_module, :enums)
enum_modules = Enum.map(enums, &build_enum_module/1)
int_to_enums = Enum.map(enums,
&(build_erlang_to_enum_function(container_module, &1)))
enum_to_ints = Enum.map(enums,
&(build_enum_to_erlang_function(container_module, &1)))
enum_conversion_fns = Enum.concat(int_to_enums, enum_to_ints)
%Output{conversion_fns: enum_conversion_fns, modules: enum_modules}
end
defp build_enum_module({enum_name, mappings}) do
mapping = Macro.expand(mappings, __ENV__)
fns = Enum.map(
mapping,
fn({k, v}) ->
quote do
def unquote(k)() do
%unquote(enum_name){ordinal: unquote(k), value: unquote(v)}
end
def value(unquote(v)) do
%unquote(enum_name){ordinal: unquote(k), value: unquote(v)}
end
end
end)
quote do
defmodule unquote(enum_name) do
defstruct ordinal: nil, value: nil
unquote_splicing(fns)
def ordinals do
unquote(Keyword.keys mapping)
end
def values do
unquote(Keyword.values mapping)
end
def mappings do
unquote(mapping)
end
end
end
end
defp build_enum_to_erlang_function(container_module, enum_decl) do
enum_name = extract_enum_name(enum_decl)
enum_alias = {:__aliases__, [alias: false], [enum_name]}
fq_enum_name = Module.concat(container_module, enum_name)
quote do
def to_erlang(enum=%unquote(fq_enum_name){}, unquote(enum_alias)) do
enum.value()
end
def to_erlang(enum=%unquote(fq_enum_name){}, _) do
enum.value()
end
end
end
defp build_erlang_to_enum_function(container_module, enum_decl) do
enum_name = extract_enum_name(enum_decl)
enum_alias = {:__aliases__, [alias: false], [enum_name]}
fq_enum_name = Module.concat(container_module, enum_name)
quote do
def to_elixir(erlang_value, unquote(enum_alias)) do
unquote(fq_enum_name).value(erlang_value)
end
end
end
defp extract_enum_name(enum_decl) do
case enum_decl do
{{_, _, [enum_name]}, _} ->
enum_name
{{_, _, enum_name}, _} when is_list(enum_name) ->
Module.concat(enum_name)
end
end
end
|
lib/riffed/enumeration.ex
| 0.835651
| 0.505432
|
enumeration.ex
|
starcoder
|
defmodule Faqcheck.Sources.StringHelpers do
alias Faqcheck.Referrals.OperatingHours
@doc """
Try to extract business hours from a description string.
## Examples
iex> extract_hours("Something containing no hours")
[]
iex> extract_hours("Pantry Hours are Monday-Friday 10:00 am - 3:30 pm.")
Enum.map(
Enum.map(0..4, &Faqcheck.Referrals.OperatingHours.Weekday.from/1),
&%Faqcheck.Referrals.OperatingHours{
weekday: &1,
opens: ~T[10:00:00],
closes: ~T[15:30:00],
})
iex> extract_hours("M-TH 8-5 & F 10-3")
[
%Faqcheck.Referrals.OperatingHours{
weekday: Faqcheck.Referrals.OperatingHours.Weekday.Monday,
opens: ~T[08:00:00],
closes: ~T[17:00:00],
},
%Faqcheck.Referrals.OperatingHours{
weekday: Faqcheck.Referrals.OperatingHours.Weekday.Tuesday,
opens: ~T[08:00:00],
closes: ~T[17:00:00],
},
%Faqcheck.Referrals.OperatingHours{
weekday: Faqcheck.Referrals.OperatingHours.Weekday.Wednesday,
opens: ~T[08:00:00],
closes: ~T[17:00:00],
},
%Faqcheck.Referrals.OperatingHours{
weekday: Faqcheck.Referrals.OperatingHours.Weekday.Thursday,
opens: ~T[08:00:00],
closes: ~T[17:00:00],
},
%Faqcheck.Referrals.OperatingHours{
weekday: Faqcheck.Referrals.OperatingHours.Weekday.Friday,
opens: ~T[10:00:00],
closes: ~T[15:00:00],
},
]
"""
def extract_hours(desc) do
desc
|> String.split("&")
|> Enum.map(&capture_hours/1)
|> Enum.filter(fn x -> !is_nil(x) end)
|> Enum.flat_map(fn captures ->
OperatingHours.from_description(
captures["start_day"],
captures["end_day"],
captures["opens"],
captures["closes"])
end)
end
def extract_irregular_hours(weekday, desc) do
captures = capture_irregular_hours(desc)
{opens, closes} = OperatingHours.parse_hours(captures["hours"])
%OperatingHours{
weekday: weekday,
opens: opens,
closes: closes,
week_regularity: extract_week_regularity(captures["regularity"]),
}
end
def extract_week_regularity(r) do
if r == "" do
nil
else
String.to_integer(r)
end
end
@extract_hours_regex ~r/(?P<start_day>M(o|on|onday)?|T(u|ue|ues|uesday)?|W(ed|eds|ednesday)?|T(H|h|hu|hursday)?|F(r|ri|riday)?|S(a|at|aturday)?|S(u|un|unday)?)\s*(-\s*(?P<end_day>M(o|on|onday)?|T(u|ue|ues|uesday)?|W(ed|eds|ednesday)?|T(H|h|hu|hursday)?|F(r|ri|riday)?|S(a|at|aturday)?|S(u|un|unday)?))?\s+(?P<opens>(?<=\s)(?:(?:2[0-3])|(?:[01]?[0-9]))(?:\:[0-5][0-9])?)\s*(am)?\s*-\s*(?P<closes>(?:(?:2[0-3])|(?:[01]?[0-9]))(?:\:[0-5][0-9])?)/
@doc """
Capture business hours fields using a regex search.
## Examples
iex> capture_hours("M-TH 8-5")
%{"start_day" => "M", "end_day" => "TH", "opens" => "8", "closes" => "5"}
"""
def capture_hours(desc) do
Regex.named_captures(@extract_hours_regex, desc)
end
@extract_irregular_hours_regex ~r/((?P<regularity>\d)(st|nd|rd|th)[^\(]*\(?)?(?P<hours>(?:(?:2[0-3])|(?:[01]?[0-9]))(?:\:[0-5][0-9])?\s*(am|pm)?(\s*-\s*(?:(?:2[0-3])|(?:[01]?[0-9]))(?:\:[0-5][0-9])?\s*(am|pm)?)?)/
@doc """
Capture operating hours, including 1st/2nd/3rd of the month, using a regex search.
## Examples
iex> capture_irregular_hours("3rd Thursday (9:45 am)")
%{"regularity" => "3", "hours" => "9:45 am"}
iex> capture_irregular_hours("1st Thursday (10:00 am - 2:00)")
%{"regularity" => "1", "hours" => "10:00 am - 2:00"}
"""
def capture_irregular_hours(hours) do
Regex.named_captures(@extract_irregular_hours_regex, hours)
end
@doc """
Build business hours using a simple syntax.
## Examples
iex> parse_hours("M, T, TH, F: 8am-5pm & W: 8am-7pm")
[
%Faqcheck.Referrals.OperatingHours{
weekday: Faqcheck.Referrals.OperatingHours.Weekday.Monday,
opens: ~T[08:00:00],
closes: ~T[17:00:00],
},
%Faqcheck.Referrals.OperatingHours{
weekday: Faqcheck.Referrals.OperatingHours.Weekday.Tuesday,
opens: ~T[08:00:00],
closes: ~T[17:00:00],
},
%Faqcheck.Referrals.OperatingHours{
weekday: Faqcheck.Referrals.OperatingHours.Weekday.Wednesday,
opens: ~T[08:00:00],
closes: ~T[19:00:00],
},
%Faqcheck.Referrals.OperatingHours{
weekday: Faqcheck.Referrals.OperatingHours.Weekday.Thursday,
opens: ~T[08:00:00],
closes: ~T[17:00:00],
},
%Faqcheck.Referrals.OperatingHours{
weekday: Faqcheck.Referrals.OperatingHours.Weekday.Friday,
opens: ~T[08:00:00],
closes: ~T[17:00:00],
},
]
iex> parse_hours("M: 8am-12pm, 1pm-5pm")
[
%Faqcheck.Referrals.OperatingHours{
weekday: Faqcheck.Referrals.OperatingHours.Weekday.Monday,
opens: ~T[08:00:00],
closes: ~T[12:00:00],
},
%Faqcheck.Referrals.OperatingHours{
weekday: Faqcheck.Referrals.OperatingHours.Weekday.Monday,
opens: ~T[13:00:00],
closes: ~T[17:00:00],
},
]
iex> parse_hours("M-W 8AM-5PM")
[
%Faqcheck.Referrals.OperatingHours{
weekday: Faqcheck.Referrals.OperatingHours.Weekday.Monday,
opens: ~T[08:00:00],
closes: ~T[17:00:00],
},
%Faqcheck.Referrals.OperatingHours{
weekday: Faqcheck.Referrals.OperatingHours.Weekday.Tuesday,
opens: ~T[08:00:00],
closes: ~T[17:00:00],
},
%Faqcheck.Referrals.OperatingHours{
weekday: Faqcheck.Referrals.OperatingHours.Weekday.Wednesday,
opens: ~T[08:00:00],
closes: ~T[17:00:00],
},
]
iex> parse_hours("M-T 8am - 5:30pm ; F: 8am-12pm")
[
%Faqcheck.Referrals.OperatingHours{
weekday: Faqcheck.Referrals.OperatingHours.Weekday.Monday,
opens: ~T[08:00:00],
closes: ~T[17:30:00],
},
%Faqcheck.Referrals.OperatingHours{
weekday: Faqcheck.Referrals.OperatingHours.Weekday.Tuesday,
opens: ~T[08:00:00],
closes: ~T[17:30:00],
},
%Faqcheck.Referrals.OperatingHours{
weekday: Faqcheck.Referrals.OperatingHours.Weekday.TuesdayFriday,
opens: ~T[08:00:00],
closes: ~T[12:00:00],
},
]
"""
def parse_hours(desc) do
desc = String.trim(desc)
cond do
desc == "" ->
[]
desc == "24/7" or desc == "24 hours" ->
[OperatingHours.always_open]
true ->
String.split(desc, ~r/[;&]/)
|> Enum.flat_map(fn g ->
[days_str, hours_str] = String.split(g, ~r/(:|\s+(?=\d))/, parts: 2)
days = String.split(days_str, ~r/(,|and)/)
|> Enum.flat_map(&OperatingHours.parse_days/1)
hours = String.split(hours_str, ~r/(,|and)/)
|> Enum.filter(fn h -> String.trim(h) != "" end)
|> Enum.map(&OperatingHours.parse_hours/1)
for d <- days, {opens, closes} <- hours, do: %OperatingHours{weekday: d, opens: opens, closes: closes}
end)
|> Enum.sort_by(fn h -> h.weekday.value end)
end
end
end
|
apps/faqcheck/lib/faqcheck/sources/helpers/string_helpers.ex
| 0.626353
| 0.556882
|
string_helpers.ex
|
starcoder
|
defmodule Day14 do
use Bitwise
def part1(input) do
0..127
|> Stream.map(fn n ->
input <> "-" <> Integer.to_string(n)
end)
|> Stream.map(&KnotHash.hash/1)
|> Stream.map(fn h ->
count_bits(h, 128, 0)
end)
|> Enum.reduce(&+/2)
end
def part2(input) do
chart = 0..127
|> Stream.map(fn n ->
input <> "-" <> Integer.to_string(n)
end)
|> Stream.map(&KnotHash.hash/1)
|> Stream.with_index()
|> Stream.flat_map(fn {n, row} ->
{used, _} =
Enum.reduce(0..127, {[], n} , fn col, {acc, n} ->
case band(n, 1) do
0 -> {acc, n >>> 1};
1 -> {[{{row, col}, :used} | acc], n >>> 1}
end
end)
used
end)
|> Map.new
count_regions(chart, 0)
end
defp count_bits(_n, 0, acc), do: acc
defp count_bits(n, left, acc) do
count_bits(n >>> 1, left - 1, acc + band(n, 1))
end
defp count_regions(chart, num_regions) do
case Map.keys(chart) do
[pos | _] ->
chart = remove_region(chart, pos)
count_regions(chart, num_regions + 1)
[] ->
num_regions
end
end
defp remove_region(chart, pos) do
case Map.has_key?(chart, pos) do
false ->
chart
true ->
chart = Map.drop(chart, [pos])
Enum.reduce(neighbors(pos), chart, fn pos, chart ->
remove_region(chart, pos)
end)
end
end
defp neighbors({row, col}) do
[{row - 1, col}, {row + 1, col}, {row, col - 1}, {row, col + 1}]
end
end
defmodule KnotHash do
use Bitwise
def hash(string) do
lengths = :erlang.binary_to_list(string) ++ [17, 31, 73, 47, 23]
state = new_state()
state = Enum.reduce(1..64, state, fn _, acc ->
Enum.reduce(lengths, acc, &tie/2)
end)
sparse_hash = rotate(state)
{hashes, []} = Enum.map_reduce(1..16, sparse_hash, fn _, acc ->
{block, acc} = Enum.split(acc, 16)
hash = Enum.reduce(block, &Bitwise.bxor/2)
{hash, acc}
end)
Enum.reduce(hashes, 0, fn h, acc ->
bor(acc <<< 8, h)
end)
end
defp new_state(length \\ 256) do
%{list: Enum.to_list(0..length-1),
skip_size: 0,
current_pos: 0}
end
defp tie(length, state) do
%{list: list, skip_size: skip_size, current_pos: current_pos} = state
{first, last} = Enum.split(list, length)
list = last ++ Enum.reverse(first)
{first, last} = Enum.split(list, skip_size)
list = last ++ first
current_pos = current_pos + length + skip_size
%{state | list: list, skip_size: rem(skip_size + 1, 256), current_pos: current_pos}
end
defp rotate(state) do
%{list: list, current_pos: current_pos} = state
current_pos = rem(current_pos, length(list))
{first, last} = Enum.split(list, - current_pos)
last ++ first
end
end
|
day14/lib/day14.ex
| 0.505615
| 0.478346
|
day14.ex
|
starcoder
|
defmodule CTE do
@moduledoc """
The Closure Table for Elixir strategy, CTE for short, is a simple and elegant way of storing and working with hierarchies. It involves storing all paths through a tree, not just those with a direct parent-child relationship. You may want to chose this model, over the [Nested Sets model](https://en.wikipedia.org/wiki/Nested_set_model), should you need referential integrity and to assign nodes to multiple trees.
With CTE you can navigate through hierarchies using a simple [API](CTE.Adapter.html#functions), such as: finding the ascendants and descendants of a node, inserting and deleting nodes, moving entire sub-trees or print them as a digraph (.dot) file.
### Quick example.
For this example we're using the in-[Memory Adapter](CTE.Adapter.Memory.html#content). This `Adapter` is useful for prototyping or with data structures that can easily fit in memory; their persistence being taken care of by other components. For more involved use cases, CTE integrates with Ecto using a simple API.
When used from a module, the CTE expects the: `:otp_app` and `:adapter` attributes, to be defined. The `:otp_app` should point to an OTP application that might provide additional configuration. Equally so are the: `:nodes` and the `:paths` attributes. The `:nodes` attribute, in the case of the [Memory Adapter](CTE.Adapter.Memory.html#content), is a Map defining your nodes while the: `:paths` attribute, is a list containing the tree path between the nodes - a list of lists. For example:
defmodule CTM do
use CTE,
otp_app: :ct_empty,
adapter: CTE.Adapter.Memory,
nodes: %{
1 => %{id: 1, author: "Olie", comment: "Is Closure Table better than the Nested Sets?"},
2 => %{id: 2, author: "Rolie", comment: "It depends. Do you need referential integrity?"},
3 => %{id: 3, author: "Polie", comment: "Yeah."}
},
paths: [[1, 1], [1, 2], [1, 3], [2, 2], [2, 3], [3, 3]]
end
When using the `CTE.Adapter.Ecto`, the: `:nodes` attribute, will be a Schema i.e. `Post`, `TreePath`, etc! In our initial implementation, the nodes definitions must have at least the `:id`, as one of their properties. This caveat will be lifted in a later implementation.
Add the `CTM` module to your main supervision tree:
defmodule CTM.Application do
@moduledoc false
use Application
def start(_type, _args) do
opts = [strategy: :one_for_one, name: CTM.Supervisor]
Supervisor.start_link([CTM], opts)
end
end
Using `iex -S mix`, for quickly experimenting with the CTE API:
- find the descendants of comment #1
```elixir
iex» CTM.descendants(1)
{:ok, [3, 2]}
```
- find the ancestors
```elixir
iex» CTM.ancestors(2)
{:ok, [1]}
iex» CTM.ancestors(3)
{:ok, [1]}
```
- find the ancestors, with information about the node:
```elixir
iex» CTM.ancestors(2, nodes: true)
{:ok,
[
%{
author: "Olie",
comment: "Is Closure Table better than the Nested Sets?",
id: 1
}
]}
```
- move leafs/subtrees around. From being a child of comment #1, to becoming a
child of comment #2, in the following example:
```elixir
iex» CTM.move(3, 2, limit: 1)
:ok
iex» CTM.descendants(2)
{:ok, [3]}
```
Please check the docs, the tests, and the examples folder, for more details.
"""
@type config :: Keyword.t()
@type table :: String.t() | atom
@type nodes :: map() | table
@type paths :: [list()] | table
@type repo :: Ecto.Repo
@type t :: %__MODULE__{
adapter: any() | nil,
nodes: nodes | nil,
paths: paths | nil,
repo: repo | nil
}
defstruct [:nodes, :paths, :adapter, :repo]
defmacro __using__(opts) do
quote bind_quoted: [opts: opts] do
@default_adapter CTE.Adapter.Memory
@default_config [nodes: [], paths: [], adapter: @default_adapter, repo: nil]
@default_dynamic_supervisor opts[:default_dynamic_supervisor] || __MODULE__
@otp_app Keyword.fetch!(opts, :otp_app)
@adapter Keyword.fetch!(opts, :adapter)
@opts opts
@doc false
def config(), do: parse_config(@opts)
@doc false
def __adapter__ do
{{:repo, _repo}, %{pid: adapter}} = CTE.Registry.lookup(get_dynamic_supervisor())
adapter
end
@doc false
def child_spec(opts) do
%{
id: __MODULE__,
start: {__MODULE__, :start_link, [opts]},
type: :supervisor
}
end
@doc false
def start_link(opts \\ []) do
CTE.Supervisor.start_link(__MODULE__, @otp_app, @adapter, config())
end
@compile {:inline, get_dynamic_supervisor: 0}
def get_dynamic_supervisor() do
Process.get({__MODULE__, :dynamic_supervisor}, @default_dynamic_supervisor)
end
def put_dynamic_supervisor(dynamic) when is_atom(dynamic) or is_pid(dynamic) do
Process.put({__MODULE__, :dynamic_supervisor}, dynamic) || @default_dynamic_supervisor
end
def insert(leaf, ancestor, opts \\ [])
def insert(leaf, ancestor, opts), do: @adapter.insert(__adapter__(), leaf, ancestor, opts)
def tree(leaf, opts \\ [])
def tree(leaf, opts), do: @adapter.tree(__adapter__(), leaf, opts)
def ancestors(descendant, opts \\ [])
def ancestors(descendant, opts), do: @adapter.ancestors(__adapter__(), descendant, opts)
def descendants(ancestor, opts \\ [])
def descendants(ancestor, opts), do: @adapter.descendants(__adapter__(), ancestor, opts)
@doc """
when limit: 1, the default value, then delete only the leafs, else the entire subtree
"""
def delete(leaf, ops \\ [])
def delete(leaf, opts), do: @adapter.delete(__adapter__(), leaf, opts)
def move(leaf, ancestor, opts \\ [])
def move(leaf, ancestor, opts), do: @adapter.move(__adapter__(), leaf, ancestor, opts)
defp parse_config(config), do: CTE.parse_config(@otp_app, __MODULE__, @opts, config)
end
end
@doc false
def parse_config(otp_app, adapter, adapter_config, dynamic_config) do
conf =
Application.get_env(otp_app, adapter, [])
|> Keyword.merge(adapter_config)
|> Keyword.merge(dynamic_config)
|> CTE.interpolate_env_vars()
%CTE{
nodes: Keyword.get(conf, :nodes, []),
paths: Keyword.get(conf, :paths, []),
repo: Keyword.get(conf, :repo, nil),
adapter: Keyword.get(conf, :adapter)
}
end
@doc false
def interpolate_env_vars(config) do
Enum.map(config, fn
{key, {:system, env_var}} -> {key, System.get_env(env_var)}
{key, value} -> {key, value}
end)
end
end
|
lib/cte.ex
| 0.863204
| 0.90355
|
cte.ex
|
starcoder
|
defmodule Datacop do
@moduledoc """
An authorization library with `Dataloader` and `Absinthe` support.
"""
@typedoc """
Option
* `:subject` – any value you want to access in the authorize/3 callback.
* `:loader` – an initialized Dataloader struct with loaded sources.
"""
@type option :: {:subject, any()} | {:loader, Dataloader.t()}
@typedoc "Any term which describes an action you want to perform. Usually atoms are used."
@type action :: term()
@typedoc "Usually your user struct"
@type actor :: term()
@doc """
Authorize an action.
Processes `c:Datacop.Policy.authorize/3` result.
## Examples
> Datacop.permit(MyApp.Accounts, :view_email, current_user, subject: other_user)
:ok
> Datacop.permit(MyApp.Accounts, :view_email, current_user, subject: other_user, loader: loader)
{:error, %Datacop.UnauthorizedError{message: "Unauthorized"}}
"""
@spec permit(policy :: module(), action(), actor(), opts :: [option()]) ::
:ok | {:error, Datacop.UnauthorizedError.t()}
def permit(module, action, actor, opts \\ []) do
subject = Keyword.get(opts, :subject)
case module.authorize(action, actor, subject) do
{:dataloader, %{source_name: source_name, batch_key: batch_key, inputs: inputs}} ->
loader = Keyword.get_lazy(opts, :loader, fn -> default_loader(module) end)
loader
|> Dataloader.load(source_name, batch_key, inputs)
|> Dataloader.run()
|> Dataloader.get(source_name, batch_key, inputs)
|> Datacop.Policy.normalize_output()
result ->
Datacop.Policy.normalize_output(result)
end
end
@doc """
The same as `permit/4`, but returns a `boolean`.
"""
@spec permit?(policy :: module(), action(), actor(), opts :: [option()]) :: boolean()
def permit?(module, action, actor, opts \\ []) do
module
|> permit(action, actor, Keyword.delete(opts, :callback))
|> case do
:ok -> true
_ -> false
end
end
@doc """
Returns initialized dataloader struct with the source.
It requires `data/0` function to be defined for the particular module.
"""
def default_loader(module) do
if Kernel.function_exported?(module, :data, 0) do
Dataloader.new() |> Dataloader.add_source(module, module.data())
else
raise ArgumentError, "Cannot automatically determine the source of
#{inspect(module)} - specify the `data/0` function OR pass loader explicitly"
end
end
end
|
lib/datacop.ex
| 0.887516
| 0.453988
|
datacop.ex
|
starcoder
|
defmodule Microdata.Strategy.JSONLD do
@moduledoc """
`Microdata.Strategy.JSONLD` defines a strategy to extract linked data from a `Meeseeks.Document`, based on the W3C [JSON-LD standard](https://www.w3.org/TR/json-ld/).
### Caveats
- Only a small fraction of section 6 of the [JSON-LD specification](https://www.w3.org/TR/json-ld/) is implemented
"""
@behaviour Microdata.Strategy
import Meeseeks.XPath
alias Microdata.{Item, Property}
@impl true
def parse_items(doc, _base_uri \\ nil) do
doc
|> Meeseeks.all(xpath("//script[@type=\"application/ld+json\"]"))
|> Enum.map(&parse_result(&1))
|> List.flatten()
|> Enum.reject(&is_nil/1)
end
defp parse_result(result) do
result
|> Meeseeks.data()
|> Microdata.json_library().decode()
|> case do
{:ok, object} -> parse_object(object, %{})
{:error, _} -> nil
end
end
def parse_object(object, _) when is_nil(object), do: nil
def parse_object(object, parent_context) when is_list(object) do
object
|> Enum.map(&parse_object(&1, parent_context))
end
def parse_object(object, parent_context) do
id = object["@id"]
context =
object
|> extract_context()
|> normalize_definitions(parent_context)
|> Enum.into(parent_context)
types =
object
|> extract_types()
|> normalize_types(context)
|> Enum.into(MapSet.new())
object
|> extract_properties()
|> map_terms(context)
|> normalize_values(context)
|> List.flatten()
|> to_property_list()
|> case do
[] ->
parse_object(object["@graph"], context)
properties ->
%Item{
id: id,
types: types,
properties: properties
}
end
end
defp extract_context(object) do
object
|> Map.get("@context")
|> case do
context when is_map(context) ->
context
context when is_binary(context) ->
context
|> download_context()
|> Microdata.json_library().decode!()
|> extract_context()
_ ->
%{}
end
end
defp download_context("https://schema.org/") do
read_locally("schema.org.json")
end
defp download_context("http://schema.org/") do
read_locally("schema.org.json")
end
defp download_context("https://schema.org") do
read_locally("schema.org.json")
end
defp download_context("http://schema.org") do
read_locally("schema.org.json")
end
defp download_context(context) do
HTTPoison.get!(context, [Accept: "application/ld+json"], follow_redirect: true).body
end
defp read_locally(file) do
:code.priv_dir(:microdata)
|> Path.join("schemas")
|> Path.join(file)
|> File.read!()
end
defp normalize_definitions(context, parent_context) do
lookup_context = Map.merge(context, parent_context)
context
|> Enum.map(fn {term, mapping} ->
case mapping do
iri when is_binary(iri) -> {term, enlarge_iri(iri, lookup_context)}
%{"@id" => iri} -> {term, enlarge_iri(iri, lookup_context)}
end
end)
end
defp enlarge_iri(iri, context) do
iri
|> String.split(":")
|> case do
[iri] -> iri
[prefix, suffix] -> "#{context[prefix]}#{suffix}"
end
end
defp extract_types(object) do
case object["@type"] do
types when is_list(types) -> types
nil -> []
type -> [type]
end
end
defp normalize_types(types, context) do
types
|> Enum.map(fn type -> context[type] || type end)
end
defp extract_properties(object) do
object
|> Enum.reject(fn {term, _value} ->
term |> String.starts_with?("@")
end)
end
defp map_terms(properties, context) do
properties
|> Enum.map(fn {term, value} ->
{context[term] || term, value}
end)
end
defp normalize_values(properties, context) do
properties
|> Enum.map(fn {term, value} ->
case value do
list when is_list(list) ->
list |> Enum.map(&{term, &1}) |> normalize_values(context)
%{"@id" => value} ->
{term, value}
%{} = value ->
{term, parse_object(value, context)}
value ->
{term, value}
end
end)
end
defp to_property_list(properties) do
properties
|> Enum.map(fn {term, value} ->
%Property{
names: [term] |> MapSet.new(),
value: value
}
end)
end
end
|
lib/microdata/strategy/json_ld.ex
| 0.858778
| 0.532729
|
json_ld.ex
|
starcoder
|
defmodule Lemma.En.Verbs do
@moduledoc false
@verbs_set """
aah abacinate abandon abase abash abate abbreviate abdicate abduce abduct
aberrate abet abhor abide abjure ablactate ablate abnegate abolish abominate
abort abound about-face abrade abrase abreact abridge abrogate abscise abscond
abseil absent absolve absorb absquatulate abstain abstract abuse abut aby abye
accede accelerate accent accentuate accept access accession acclaim acclimate
acclimatise acclimatize accommodate accompany accomplish accord accost account
accouter accoutre accredit accrete accrue acculturate accumulate accurse accuse
accustom ace acerbate acetify acetylate acetylise acetylize ache achieve
achromatise achromatize acid-wash acidify acidulate acknowledge acquaint
acquiesce acquire acquit act action activate actualise actualize actuate
acuminate ad-lib adapt add addict addle address adduce adduct adhere adjoin
adjourn adjudge adjudicate adjure adjust admeasure administer administrate
admire admit admix admonish adolesce adopt adore adorn adsorb adulate adulterate
adumbrate advance advantage advect adventure advert advertise advertize advise
advocate aerate aerify aerosolise aerosolize aestivate affect affiance affiliate
affirm affix afflict afford afforest affranchise affright affront age agenise
agenize agglomerate agglutinate aggrade aggrandise aggrandize aggravate
aggregate aggress aggrieve aggroup agitate agnise agnize agonise agonize agree
aid ail aim air air-condition air-cool air-drop air-freight air-ship air-slake
airbrush airfreight airlift airmail airt alarm alchemise alchemize alcoholise
alcoholize alert alibi alien alienate alight align aliment aline alkalify
alkalinise alkalinize alkalise alkalize allay allege allegorise allegorize
alleviate alligator alliterate allocate allot allow allowance alloy allude
allure ally alphabetise alphabetize alter altercate alternate aluminise
aluminize amalgamate amass amaze ambition amble ambulate ambuscade ambush
ameliorate amend amerce americanise americanize ammoniate ammonify amnesty
amortise amortize amount amplify amputate amuse anaesthetise anaesthetize
anagram anagrammatise anagrammatize analogise analogize analyse analyze
anastomose anathematise anathematize anathemise anathemize anatomise anatomize
anchor ancylose anele anesthetise anesthetize anger angle angle-park anglicise
anglicize anguish angulate animadvert animalise animalize animate animise
animize ankylose anneal annex annihilate annotate announce annoy annul
annunciate anodise anodize anoint answer antagonise antagonize ante antecede
antedate anthologise anthologize anthropomorphise anthropomorphize antic
anticipate antiquate antique antisepticize ape aphorise aphorize apologise
apologize apostatise apostatize apostrophise apostrophize apotheose apotheosise
apotheosize appal appall apparel appeal appear appease append apperceive
appertain applaud applique apply appoint apportion appose appraise appreciate
apprehend apprentice apprise apprize approach approbate appropriate approve
approximate aquaplane aquatint arbitrage arbitrate arborise arborize arc arch
archaise archaize archive argue argufy arise arm armor armour aromatise
aromatize arouse arraign arrange array arrest arrive arrogate arterialise
arterialize article articulate ascend ascertain ascribe ash ask asperse asphalt
asphyxiate aspirate aspire assail assassinate assault assay assemble assent
assert assess asseverate assibilate assign assimilate assist associate assoil
assonate assort assuage assume assure asterisk astonish astound astringe
astrogate atom-bomb atomise atomize atone atrophy attach attack attain attaint
attemper attempt attend attenuate attest attire attitudinise attitudinize attorn
attract attribute attune auction auctioneer audit audition augment augur aurify
auscultate auspicate authenticate author authorise authorize autoclave autograph
automate automatise automatize automobile autopsy autotomise autotomize avail
avalanche avenge aver average avert avianise avianize aviate avoid avouch avow
avulse await awake awaken award awe ax axe azure baa babbitt babble baby baby-
sit bach bachelor back backbite backcross backdate backfire background backhand
backlash backlog backpack backpedal backscatter backslap backslide backspace
backstitch backstop backstroke backtrack bacterise bacterize badge badger
badmouth baffle bag bail bait bake balance bald bale balk balkanise balkanize
ball ballast balloon ballot ballyhoo ballyrag bamboozle ban band bandage bandy
bang banish bank bankroll bankrupt banquet banter baptise baptize bar barb
barbarise barbarize barbecue barbeque barber bard barde bare barf bargain barge
bark barnstorm baronetise baronetize barrack barrage barrel barricade barricado
barter base bash basify bask basset bastardise bastardize baste bastinado bat
batch bate batfowl bath bathe batik batten batter battle baulk bawl bay bayonet
be beach beacon bead beak beam bean bear beard beat beatify beautify beaver
bebop becalm bechance becharm beckon becloud become bed bed-hop bedamn bedaub
bedaze bedazzle bedeck bedevil bedew bedhop bedight bedim bedizen bedraggle beef
beep beeswax beetle befall befit befog befool befoul befriend befuddle beg beget
beggar begild begin begrime begrudge beguile behave behead behold behoove behove
bejewel belabor belabour belay belch beleaguer belie believe belittle bell
bellow belly belly-flop belly-land bellyache bellylaugh belong belt bemire
bemoan bemock bemuse bench bend benday benefact benefice beneficiate benefit
benficiate benight benumb beplaster bequeath berate bereave berry berth beseech
beseem beset beshrew besiege besmear besmirch besot bespangle bespatter bespeak
bespeckle bespot besprinkle best bestialise bestialize bestir bestow bestrew
bestride bet bethink betide betoken betray betroth better bevel bewail beware
bewhisker bewilder bewitch bewray bias bib bicker bicycle bid bide biff
bifurcate bight bike bilge bilk bill billet billow bin bind binge bioassay
biodegrade birch bird bird-nest birdie birdlime birdnest birdwatch birl birle
birr birth bisect bitch bite bitt bitter bituminise bituminize bivouac blab
blabber black blackball blackberry blacken blackguard blackjack blacklead
blackleg blacklist blackmail blacktop blackwash blame blanch blandish blank
blanket blare blarney blaspheme blast blat blate blather blaze blazon bleach
blear bleat bleed bleep blemish blench blend bless blether blight blind
blindfold blindside blink blinker blister blither blitz blitzkrieg bloat blob
block blockade blog blood bloody bloom blossom blot blotch bloviate blow blow-
dry blub blubber bludgeon blue blue-pencil blueprint bluff blunder blunt blur
blurt blush bluster board boast boat boat-race bob bobble bobsled bode bodge
body body-surf bodypaint bog bogey boggle boil boldface bollix bollocks
bolshevise bolshevize bolster bolt bomb bombard bombilate bombinate bombproof
bond bonderise bonderize bone bong bonk bonnet boo boob boogie book boom
boomerang boondoggle boost boot bootleg bootlick bootstrap booze bop border bore
borrow bosom boss botanise botanize botch bother bottle bottlefeed bottleneck
bottom bounce bound bourgeon bouse bow bowdlerise bowdlerize bower bowl bowse
box boycott brabble brace brachiate bracket brad brag braid brail braille brain
brainstorm brainwash braise brake branch brand brandish brandmark brattice
brattle brave bravo brawl bray braze brazen breach bread break break-dance
breakfast bream breast breastfeed breaststroke breathalyse breathalyze breathe
brecciate breed breeze brevet brew bribe bridge bridle brief brigade brighten
brim brine bring brisk brisken bristle broach broadcast broaden broadside
brocade broider broil broker bromate brominate bronze brooch brood brook broom
browbeat brown brown-nose browse bruise bruit brunch brush brutalise brutalize
bubble buccaneer buck bucket buckle buckram bud budge budget buff buffalo buffer
buffet bug bugger bugle build bulge bulk bull bulldog bulldoze bulletin
bulletproof bullshit bully bullyrag bulwark bum bumble bump bunch bunco bundle
bung bungle bunk bunker bunt buoy bur burble burden burgeon burglarise
burglarize burgle burke burl burlesque burn burnish burp burr burrow burst
burthen bury bus bush bushel bushwhack busk buss bust bustle busy butcher butt
butt-weld butter butterfly button buttonhole buttress buttweld butylate buy buzz
by-bid bypass ca-ca cab cabal cabbage cabin cable cache cachinnate cackel cackle
caddie caddy cadge cage cajole cake cakewalk calcify calcimine calcine calculate
calendar calender calibrate caliper calk call calligraph calliper callous callus
calm calumniate calve camber camouflage camp campaign camphorate can canal
canalise canalize cancel candle candy cane canker cannibalise cannibalize cannon
cannonade cannulate cannulise cannulize canoe canonise canonize canoodle canopy
cant canter cantilever cantillate canton canulate canvas canvass cap capacitate
caparison caper capitalise capitalize capitulate caponise caponize capriole
capsize capsulate capsule capsulise capsulize captain caption captivate capture
caracole caramelise caramelize caravan carbonate carbonise carbonize carboxylate
carburet carburise carburize card care careen career caress caricature carjack
cark carmine carnalise carnalize carnify carol carom carouse carp carpenter
carpet carry cart cartoon cartwheel carve cascade case caseate cash cashier
casket cast castigate castle castrate cat catabolise catabolize catalog
catalogue catalyse catalyze catapult catcall catch catechise catechize
categorise categorize catenate catenulate cater caterwaul cathect catheterise
catheterize catholicise catholicize catnap caucus caulk cause causeway cauterise
cauterize caution cave cavern cavil cavort caw cease cede celebrate cement cense
censor censure census center centralise centralize centre centrifugate
centrifuge cere cerebrate certificate certify cha-cha chafe chaff chaffer
chagrin chain chain-smoke chair chairman chalk challenge chamber chamfer champ
champion chance chandelle change channel channel-surf channelise channelize
chant chap chaperon chaperone char character characterise characterize charcoal
charge chariot charleston charm chart charter chase chasse chasten chastise chat
chatter chauffeur chaw cheapen cheat check checker checkmate checkrow cheek
cheep cheer cheerlead cheese chelate chemisorb cheque chequer cherish chevvy
chevy chew chicane chicken-fight chickenfight chide child-proof childproof chill
chime chin chine chink chip chirk chiromance chirp chirr chirrup chisel chit-
chat chitchat chitter chivvy chivy chlorinate chloroform chock choir choke chomp
chondrify choose chop chord choreograph chortle chorus chouse christen
christianise christianize chrome chromium-plate chronicle chronologise
chronologize chuck chuckle chuff chug chunk church churn churr chute cicatrise
cicatrize cinch cinematise cinematize cipher circle circuit circularise
circularize circulate circumambulate circumcise circumfuse circumnavigate
circumscribe circumstantiate circumvallate circumvent circumvolute circumvolve
cite citify citrate civilise civilize clabber clack claim clam clamber clamor
clamour clamp clang clangor clangour clank clap clapboard clapperclaw claret
clarify clarion clash clasp class classicise classicize classify clatter claver
claw claxon clean cleanse clear clear-cut cleat cleave clench clerk clew click
climax climb clinch cling clink clinker clip clitter cloak clobber clock clog
cloister clomp clone clop close closet closure clot clothe cloture cloud clout
clown cloy club cluck clue clump clunk cluster clutch clutter co-author co-
educate co-occur co-opt co-sign co-star co-vary coach coact coagulate coal
coalesce coapt coarsen coast coat coax cobble cobblestone cocainise cocainize
cock cocker cockle coconspire cocoon cod coddle code codify coeducate coerce
coexist coffin cog coggle cogitate cognise cognize cohabit cohere coif coiffe
coiffure coil coin coincide coinsure coke cold-cream cold-shoulder coldcock
coldwork collaborate collapse collar collate collateralize collect collectivise
collectivize collide colligate collimate collocate collogue collude colly
colonise colonize color colorcast colorise colorize colour colourise colourize
comb combat combine combust come comfit comfort command commandeer commemorate
commence commend comment commentate commercialise commercialize comminate
commingle comminute commiserate commission commit commix commove communalise
communalize commune communicate communise communize commutate commute compact
companion company compare comparison-shop compart compartmentalise
compartmentalize compass compassionate compel compensate compere compete compile
complain complect complement complete complexify complexion complicate
compliment complot comply comport compose compost compound comprehend compress
comprise compromise compute computerise computerize con concatenate conceal
concede conceive concenter concentrate concentre conceptualise conceptualize
concern concert concertina concertise concertize conciliate conclude concoct
concord concrete concretise concretize concur concuss condemn condense
condescend condition condole condone conduce conduct cone confab confabulate
confect confection confederate confer confess confide configure confine confirm
confiscate conflagrate conflate conflict conform confound confront confuse
confute conga conge congeal congee congest conglobate conglobe conglomerate
conglutinate congratulate congregate conjecture conjoin conjugate conjure conk
conn connect connive connote conquer conscript consecrate consent conserve
consider consign consist consociate console consolidate consonate consort
conspire constellate consternate constipate constitute constitutionalise
constitutionalize constrain constrict constringe construct construe
consubstantiate consult consume consummate contact contain containerise
containerize contaminate contemn contemplate contemporise contemporize contend
content contest continue contort contour contract contradance contradict
contradistinguish contraindicate contrast contravene contredanse contribute
contrive control controvert contuse convalesce convect convene conventionalise
conventionalize converge converse convert convey convict convince convoke
convolute convolve convoy convulse coo cooccur cook cool cooper cooperate
coordinate cop cope copolymerise copolymerize copper copper-bottom copulate copy
copyedit copyread copyright coquet coquette corbel cord corduroy core cork
corkscrew corn corner cornice coronate corrade corral correct correlate
correspond corroborate corrode corrugate corrupt corset coruscate cosh cosign
cosponsor cosset cost costume cotton couch cough counsel count countenance
counter counter-drill counteract counterattack counterbalance counterchallenge
counterchange countercheck counterclaim counterfeit countermand countermarch
countermine counterplot counterpoint counterpoise counterpose countersign
countersink counterstrike countervail counterweight country-dance couple course
court court-martial covenant cover covet cow cower cowhide cowl cox cozen crab
crack crackle cradle craft cram cramp cranch crane crank crap crape crash crash-
dive crate craunch crave crawfish crawl crayon craze creak cream crease create
credit creep cremate crenel crenelate crenellate creolize creosote crepe
crepitate crescendo crest crew crib crick cricket criminalise criminalize
criminate crimp crimson cringe crinkle cripple crisp crispen crisscross
criticise criticize critique croak crochet crock cronk crook croon crop croquet
cross cross-check cross-dress cross-fertilise cross-fertilize cross-file cross-
index cross-link cross-pollinate cross-refer crossbreed crosscut crosshatch
crossruff crouch crow crowd crown crucify cruise crumb crumble crump crumple
crunch crusade crush crust cry crystalise crystalize crystallise crystallize cub
cube cuckold cuckoo cuddle cudgel cue cuff cull culminate cultivate culture
cumber cumulate cup curb curdle cure curl curry currycomb curse curtail curtain
curtsey curtsy curve curvet cushion cuss custom-make customise customize cut
cutinize cybernate cycle cyclostyle cypher dab dabble dado daisy-chain dally dam
damage damascene damn damp dampen dance dandify dandle dangle dapple dare darken
darn dart dash date date-mark dateline datemark daub daunt dawdle dawn daydream
daze dazzle de-access de-aerate de-emphasise de-emphasize de-energise de-
energize de-escalate de-ice de-iodinate de-ionate deaccession deactivate deaden
deaerate deaf deafen deal deaminate deaminize debar debark debase debate debauch
debilitate debit debone debouch debrief debug debunk debut decaffeinate
decalcify decamp decant decapitate decarbonate decarbonise decarbonize
decarboxylate decarburise decarburize decay decease deceive decelerate
decentralise decentralize decerebrate decertify dechlorinate decide decimalise
decimalize decimate decipher deck declaim declare declassify declaw decline
declutch decoct decode decoke decollate decolonise decolonize decolor decolorise
decolorize decolour decolourise decolourize decommission decompose decompress
deconcentrate deconsecrate deconstruct decontaminate decontrol decorate
decorticate decouple decoy decrease decree decrepitate decrescendo decriminalise
decriminalize decry decrypt decussate dedicate dedifferentiate deduce deduct
deem deep-dye deep-fat-fry deep-fry deep-six deepen deface defalcate defame
defang defat default defeat defecate defect defeminise defeminize defend
defenestrate defer defervesce defibrillate defibrinate defile define deflagrate
deflate deflect deflower defog defoliate deforest deform defraud defray defrock
defrost defuse defy degas degauss degenerate deglaze deglycerolise deglycerolize
degrade degrease degust dehisce dehorn dehumanise dehumanize dehumidify
dehydrate dehydrogenate deice deify deign deionize deject delay delegate delete
deliberate delight delimit delimitate delineate deliquesce delist deliver
delocalize delouse delude deluge delve demagnetise demagnetize demand demarcate
demasculinise demasculinize dematerialise dematerialize demean demilitarise
demilitarize demineralise demineralize demise demist demo demob demobilise
demobilize democratise democratize demodulate demolish demonetise demonetize
demonise demonize demonstrate demoralise demoralize demote demulsify demur
demyelinate demystify demythologise demythologize denationalise denationalize
denaturalise denaturalize denature denazify denigrate denitrify denominate
denote denounce dent denudate denude deny deodorise deodorize deodourise
deoxidise deoxidize deoxygenate depart depend depersonalise depersonalize depict
depilate deplane deplete deplore deploy deplumate deplume depolarise depolarize
depone depopulate deport depose deposit deprave deprecate depreciate depress
depressurise depressurize deprive depute deputise deputize deracinate derail
derange derate derecognise derecognize deregulate derequisition derestrict
deride derive derogate desacralize desalinate desalinise desalinize desalt
descale descant descend describe descry desecrate desegregate desensitise
desensitize desert deserve desex desexualise desexualize desiccate design
designate desire desist desolate desorb despair despatch despise despoil despond
desquamate destabilise destabilize destain destalinise destalinize destine
destress destroy destruct desulfurize desulphurize desynchronise desynchronize
detach detail detain detect deter deterge deteriorate determine detest dethaw
dethrone detick detonate detour detox detoxicate detoxify detract detrain
detransitivise detransitivize detribalise detribalize detusk devaluate devalue
devastate devein develop deviate devil devilise devilize devise devitalise
devitalize devitrify devoice devolve devote devour diabolise diabolize diagnose
diagonalise diagonalize diagram dial dialyse dialyze diazotize dibble dice
dichotomise dichotomize dicker dictate didder diddle die diet differ
differentiate diffract diffuse dig digest digitalise digitalize digitise
digitize dignify digress dike dilapidate dilate dilly-dally dillydally dilute
dim dimension diminish dimple din dine ding dingdong dinge dip diphthongise
diphthongize direct dirty disable disabuse disaccord disadvantage disaffect
disafforest disagree disallow disambiguate disappear disappoint disapprove
disarm disarrange disarray disarticulate disassemble disassociate disavow
disband disbar disbelieve disbud disburden disburse disc-jockey discard discase
discern discerp discharge discipline disclaim disclose disco discolor
discolorise discolorize discolour discolourise discombobulate discomfit
discommode discompose disconcert disconnect discontent discontinue discord
discount discountenance discourage discourse discover discredit discriminate
discuss disdain disembark disembarrass disembody disembowel disembroil disenable
disenchant disencumber disenfranchise disengage disentangle disestablish
disesteem disfavor disfavour disfigure disforest disfranchise disgorge disgrace
disgruntle disguise disgust dish disharmonize dishearten dishevel dishonor
dishonour disillusion disincarnate disincline disinfect disinfest disinherit
disintegrate disinter disinvest disinvolve disjoin disjoint disk disk-jockey
dislike dislocate dislodge dismantle dismay dismember dismiss dismount disobey
disoblige disorder disorganise disorganize disorient disorientate disown
disparage dispatch dispel dispense disperse dispirit displace display displease
displume disport dispose dispossess dispread disprove dispute disqualify
disquiet disregard disrespect disrobe disrupt diss dissatisfy dissect dissemble
disseminate dissent dissertate dissever dissimilate dissimulate dissipate
dissociate dissolve dissonate dissuade distance distemper distend distil distill
distinguish distort distract distrain distress distribute district distrust
disturb disunify disunite ditch dither ditto divagate divaricate dive dive-bomb
diverge diversify divert divest divide divine divorce divulge dizen dizzy dj do
dock docket doctor document dodder dodge doff dog dogfight dogmatise dogmatize
dogsled domesticate domesticise domesticize domicile domiciliate dominate
domineer don donate dong doodle doom dope dose doss dot dote double double-check
double-date double-glaze double-park double-space double-team doubt douche douse
dovetail dower down downgrade download downplay downsize dowse doze draft drag
draggle dragoon drain dramatise dramatize drape draught draw drawl dread dream
dredge drench dress dribble drift drill drink drip drip-dry drive drivel drizzle
drone drool droop drop drop-kick dropforge dropkick drown drowse drub drudge
drug drum dry dry-dock dry-nurse dry-rot dry-wall drydock dub duck duel dulcify
dulcorate dull dumbfound dummy dump dun dung dunk dupe duplex duplicate dusk
dust dwarf dwell dwindle dye dyke dynamise dynamite dynamize e-mail eagle
earmark earn earth ease eat eavesdrop ebb ebonise ebonize echo eclipse economise
economize eddy edge edify edit editorialise editorialize educate educe
edulcorate eff efface effect effectuate effeminise effeminize effervesce
effloresce effuse egest egg egotrip egress ejaculate eject elaborate elapse
elate elbow elect electioneer electrify electrocute electroplate elegise elegize
elevate elicit elide eliminate elocute elongate elope elucidate elude elute
emaciate email emanate emancipate emasculate embalm embank embargo embark
embarrass embattle embed embellish embezzle embitter emblazon embody embolden
emboss embower embrace embrangle embrittle embrocate embroider embroil embrown
emcee emend emerge emigrate emit emote empale empanel empathise empathize
emphasise emphasize emplace emplane employ empower empty empurple emulate
emulsify enable enact enamel enamor enamour encamp encapsulate encase enchain
enchant encipher encircle enclose enclothe encode encompass encore encounter
encourage encrimson encroach encrust encrypt encumber end endanger endear
endeavor endeavour endorse endow endue endure energise energize enervate
enfeeble enfeoff enfilade enfold enforce enfranchise engage engender engild
engineer englut engorge engraft engrave engross engulf enhance enjoin enjoy
enkindle enlace enlarge enlighten enlist enliven enmesh ennoble enounce enplane
enquire enrage enrapture enrich enrobe enrol enroll ensconce enshrine enshroud
ensile ensky enslave ensnare ensnarl ensue ensure entail entangle enter
entertain enthral enthrall enthrone enthuse entice entitle entomb entrain
entrance entrap entreat entrench entrust entwine enucleate enumerate enunciate
envelop envenom environ envisage envision envy enwrap epilate epitomise
epitomize epoxy equal equalise equalize equate equilibrate equilibrise
equilibrize equip equivocate eradicate erase erect erode eroticize err eruct
erupt escalade escalate escallop escape eschew escort espouse espy essay
establish esteem esterify estimate estivate estrange etch eternalise eternalize
eternise eternize etherealize etherialise etherify etherise etherize etiolate
etymologise etymologize eulogise eulogize euphemise euphemize europeanise
europeanize evacuate evade evaluate evanesce evangelise evangelize evaporate
even eventuate evert evict evidence evince eviscerate evoke evolve exacerbate
exact exaggerate exalt examine exasperate excavate exceed excel except excerpt
exchange excise excite exclaim exclude excogitate excommunicate excoriate
excrete excruciate exculpate excuse execrate execute exemplify exempt exenterate
exercise exert exfoliate exhale exhaust exhibit exhilarate exhort exhume exile
exist exit exonerate exorcise exorcize expand expatiate expatriate expect
expectorate expedite expel expend expense experience experiment expiate expire
explain explicate explode exploit explore export expose exposit expostulate
expound express express-mail expropriate expunge expurgate exsert exsiccate
extemporise extemporize extend extenuate exteriorise exteriorize exterminate
externalise externalize extinguish extirpate extol extort extract extradite
extrapolate extravasate extricate extrude exuberate exudate exude exult exuviate
eye eyeball eyewitness fabricate face face-harden face-lift facilitate facsimile
factor factorise factorize fade fag faggot fagot fail faint fair fake falcon
fall falsify falter familiarise familiarize famish fan fancify fancy fantasise
fantasize fantasy farce fare farm farrow fart fascinate fashion fast fasten fat
fate father fathom fatigue fatten fault favor favour fawn fax faze fear feast
feather featherbed feature fecundate federalise federalize federate fee fee-tail
feed feel feign feint felicitate fell fellate felt feminise feminize fence fend
ferment ferret ferry fertilise fertilize fester festinate festoon fetch fete
fetishize fetter fettle feud feudalize fib fibrillate fictionalise fictionalize
fiddle fidget field field-test fight figure filch file filet filiate filibuster
filigree fill fillet film film-make filter filtrate fin finagle finalise
finalize finance financier find fine fine-tune finedraw finger finger-paint
finger-spell fingerprint fingerspell finish fink fire firebomb fireproof firm
fish fishtail fissure fistfight fit fix fixate fizz fizzle flabbergast flag
flagellate flail flake flambe flame flank flap flare flash flash-freeze flat-hat
flatten flatter flaunt flavor flavour flaw flay fleck fledge flee fleece fleer
fleet flense flesh flex flick flicker flight flim-flam flinch fling flip flip-
flop flirt flit flitter float flocculate flock flog flood floodlight floor flop
floss flounce flounder flour flourish flout flow flower flub fluctuate fluff
flummox flump flunk fluoresce fluoridate fluoridise fluoridize flurry flush
fluster flute flutter flux fly fly-fish flyfish foal foam fob focalise focalize
focus fodder fog foil foist fold foliate follow foment fondle fool foolproof
foot footle footnote footslog forage foray forbear forbid force force-feed
force-land ford forearm forebode forecast foreclose foredate foredoom forefend
foregather forego foreground foreknow foreordain foresee foreshadow foreshorten
foreshow forest forestall foreswear foretell forewarn forfeit forfend forgather
forge forget forgive forgo fork form formalise formalize format formicate
formularise formularize formulate fornicate forsake forswear fort fortify
forward fossilise fossilize foster foul found founder fowl fox foxhunt foxtrot
fraction fractionate fracture fragment fragmentise fragmentize frame franchise
frank frap fraternise fraternize fray frazzle freak freckle free free-associate
free-base freelance freeload freewheel freeze freeze-dry freight french french-
fry frenchify frequent fresco freshen fress fret fricassee fright frighten
fringe frisk fritter frivol frizz frizzle frock frog frogmarch frolic front
frost froth frown fructify fruit frustrate fry ftp fuck fuddle fudge fuel fulfil
fulfill full fullback fulminate fumble fume fumigate function fund fund-raise
fundraise funk funnel furbish furcate furl furlough furnish furrow further fuse
fusillade fuss fustigate g.i. gab gabble gad gag gage gaggle gain gainsay gall
gallivant gallop galumph galvanise galvanize gamble gambol game gang gang-rape
gangrene gaol gap gape garage garb garble garden gargle garland garment garner
garnish garnishee garotte garrison garrote garrotte garter gas gasconade gash
gasify gasp gate gate-crash gather gauffer gauge gawk gawp gaze gazette gazump
gear gee gel gelatinise gelatinize geld geminate general generalise generalize
generate gentle gentrify genuflect germinate gerrymander gestate gesticulate
gesture get geyser ghettoise ghettoize ghost ghostwrite gi gibber gibbet gibe
gift gift-wrap giggle gild gimp gin ginger gip gird girdle girth give glaciate
gladden glamorise glamorize glamour glamourise glamourize glance glare glass
glaze gleam glean glide glide-bomb glimmer glimpse glint glissade glisten
glitter gloat globalise globalize globe-trot glom glorify glory gloss glow
glower glue glug glut gluttonise gluttonize glycerolise glycerolize gnarl gnash
gnaw go goad gobble goffer goggle gold-plate goldbrick goldplate golf gong goof
goof-proof goofproof google goose gore gorge gormandise gormandize gossip gouge
gourmandize govern gown grab grace gradate grade graduate graft grain grandstand
grant granulate graph grapple grasp grass grate gratify grave gravel gravitate
gray graze grease green greet grey griddle grieve grill grimace grime grin grind
grip gripe grit grizzle groak groan groin grok groom groove grope gross grouch
ground group grouse grout grovel grow growl grub grubstake grudge grumble grunt
grunt-hoot gruntle guarantee guard guess guesstimate guffaw guggle guide
guillotine gull gulp gum gun gurgle gush gut gutter guttle guy guzzle gybe gyp
gyrate habilitate habit habituate hack hackle haemagglutinate haggle hail hale
halloo hallow hallucinate halt halter halve ham hammer hamper hamstring hand
hand-build hand-dye hand-pick hand-wash handbuild handcolor handcolour handcraft
handcuff handicap handle handstamp handwash handwrite hang hanker hap happen
harangue harass harbinger harbor harbour harden hare hark harken harlequin harm
harmonise harmonize harness harp harpoon harrow harry harshen harvest hash hasp
hassle hasten hat hatch hatchel hate haul haunt have haw hawk hay hazard haze
head headbutt headline headquarter heal heap hear hearken hearten heat heave
heckle hectograph hector hedge hedgehop hee-haw heed heel heft heighten heist
heliograph helm help hem hemagglutinate hemorrhage hemstitch hen-peck henna
herald herd hero-worship hesitate heterodyne hew hex hibachi hibernate hiccough
hiccup hide hie higgle high-tail highjack highlight hightail hijack hike hill
hinder hinge hint hire hiss hit hitch hitchhike hive hoard hoax hob hobble
hobnail hobnob hock hoe hog hog-tie hoist hold hole holiday holler hollo hollow
holystone home home-school homer homestead homogenise homogenize homologise
homologize hone honey honeycomb honeymoon honk honor honour hood hoodoo hoodwink
hoof hook hoop hoot hoover hop hop-skip hope hopple horn hornswoggle horrify
horripilate horse horse-race horse-trade horseshoe horsewhip hose hospitalise
hospitalize host hot-dog hot-wire hot-work hotfoot hound house house-train
housebreak houseclean housekeep hover howl huckster huddle hue huff hug hulk
hull hum humanise humanize humble humbug humidify humify humiliate humor humour
hump hunch hunger hunker hunt hurdle hurl hurrah hurry hurt hurtle husband hush
husk hustle hybridise hybridize hydrate hydrogen-bomb hydrogenate hydrolise
hydrolize hydrolyse hydrolyze hydroplane hygienise hygienize hymn hype
hyperbolise hyperbolize hyperextend hypertrophy hyperventilate hyphen hyphenate
hypnotise hypnotize hypophysectomise hypophysectomize hypostatise hypostatize
hypothecate hypothesise hypothesize ice idealise idealize ideate identify idle
idolise idolize ignite ignore ill-treat ill-use illegalise illegalize illume
illuminate illumine illustrate image imagine imbed imbibe imbricate imbrue imbue
imitate immaterialise immaterialize immerse immigrate immingle immix immobilise
immobilize immolate immortalise immortalize immunise immunize immure impact
impair impale impanel impart impeach impede impel impend imperil impersonate
impinge implant implement implicate implode implore imply import importune
impose impound impoverish imprecate impregnate impress imprint imprison improve
improvise improvize impugn impulse-buy impute inactivate inaugurate incandesce
incapacitate incarcerate incarnadine incarnate incase incense inch incinerate
incise incite incline inclose include incommode inconvenience incorporate
increase incriminate incrust incubate inculcate inculpate incur incurvate
indemnify indent indenture index indicate indict indispose indite individualise
individualize individuate indoctrinate indorse induce induct indue indulge
indurate industrialise industrialize indwell inebriate infatuate infect infer
infest infiltrate infix inflame inflate inflect inflict influence inform infract
infringe infuriate infuscate infuse ingeminate ingest ingraft ingrain ingratiate
ingurgitate inhabit inhale inhere inherit inhibit inhume initial initialise
initialize initiate inject injure ink inlay inmarry innervate innovate inoculate
inosculate input inquire inscribe inseminate insert inset insinuate insist
insolate inspan inspect inspire inspirit inspissate instal install instance
instantiate instigate instil instill institute institutionalise institutionalize
instruct instrument instrumentate insufflate insulate insult insure integrate
intend intensify inter interact interbreed intercalate intercede intercept
interchange intercommunicate interconnect interdepend interdict interest
interfere interiorise interiorize interject interlace interlard interleave
interlink interlock interlope interlude intermarry intermediate intermingle
intermit intermix intern internalise internalize internationalise
internationalize interpellate interpenetrate interpolate interpose interpret
interrelate interrogate interrupt intersect intersperse interstratify intertwine
intervene interview interweave intimate intimidate intonate intone intoxicate
intransitivise intransitivize intrench intrigue introduce introject intromit
introspect introvert intrude intrust intubate intuit intumesce intussuscept
inunct inundate inure invade invaginate invalid invalidate inveigh inveigle
invent inventory invert invest investigate invigilate invigorate invite invoice
invoke involve inweave iodinate iodise iodize ionate ionise ionize iridesce irk
iron irradiate irrigate irritate irrupt islamise islamize isolate isomerise
isomerize issue italicise italicize itch itemise itemize iterate itinerate jab
jabber jack jacket jackknife jacklight jackrabbit jactitate jade jag jail jam
jampack jangle japan jar jaundice jaunt jaw jawbone jaywalk jazz jeer jell
jellify jelly jeopardise jeopardize jerk jest jet jettison jewel jib jibe jig
jiggle jilt jimmy jingle jingle-jangle jinx jitterbug jive job jockey jog joggle
join joint joke jollify jolly jolt josh jostle jot jounce journey joust joy
joyride jubilate judder judge jug juggle julienne jumble jump jump-start
jumpstart junk junket junketeer justify jut juxtapose kayak kayo keel keen keep
kennel keratinise keratinize kern key keynote kibbitz kibitz kibosh kick kick-
start kid kidnap kill kindle kink kip kiss kit kite kitten knap knead kneecap
kneel knell knife knight knit knock knot know knuckle kotow kowtow kvetch label
labialise labialize labor labour lace lacerate lack lacquer lactate ladder lade
laden ladle lag laicise laicize lallygag lam lamb lambast lambaste lame lament
laminate lampoon lance land landscape languish lap lapidate lapidify lapse lard
lark larn larrup lash lasso last latch lateralize lather latinise latinize laud
laugh launch launder lave lavish lay layer laze leach lead leaf league leak lean
leap leapfrog learn lease leash leather leave leaven lecture leech leer legalise
legalize legislate legitimate legitimatise legitimatize legitimise legitimize
lend lengthen lenify lessen let letter levant level lever leverage levitate levy
lexicalise lexicalize liaise libel liberalise liberalize liberate librate
licence license lick lie lifehack lift ligate light lighten lighter lignify like
liken lilt limber lime limit limn limp line linearise linearize linger link
lionise lionize lip-read lip-sync lip-synch lipread lipstick liquefy liquidate
liquidise liquidize liquify lisp list listen literalise literalize lithograph
litigate litter live liven load loaf loan loathe lob lobby localise localize
locate lock locomote lodge loft log log-in logroll loiter loll lollop lollygag
long look loom loop loose loosen loot lop lope lord lose lot louden lounge lour
love low lowball lower lube lubricate lucubrate luff lug luge lull lumber
luminesce lump lunch lunge lurch lure lurk lust lustrate luxate luxuriate lynch
lyophilise lyophilize lyric lysogenize macadamise macadamize macerate
machicolate machinate machine machine-wash macrame maculate madden madder
madrigal magnetise magnetize magnify mail maim mainline maintain major make
maledict malfunction malign malinger malnourish malt maltreat malversate mambo
man manacle manage mandate manducate maneuver mangle manhandle manicure manifest
manifold manipulate manoeuver manoeuvre mantle manufacture manumit manure map
mapquest mar maraud marble marbleise marbleize marcel march marginalise
marginalize marinade marinate mark market maroon marry marshal martyr martyrise
martyrize marvel masculinise masculinize mash mask masquerade mass mass-produce
massacre massage master mastermind masticate masturbate mat match mate
materialise materialize matriculate matt-up matte matter maturate mature maul
maunder maximise maximize mean meander measure mechanise mechanize meddle
mediate medicate medicine meditate meet melanise melanize meld meliorate mellow
melodise melodize melt memorialise memorialize memorise memorize menace mend
menstruate mensurate mention mentor meow mercerise mercerize merchandise merge
merit mesh mesmerise mesmerize mess message metabolise metabolize metal metalize
metallize metamorphose metastasise metastasize meter metricate metricise
metricize metrify mew mewl miaou miaow micro-cook microcopy microfilm microwave
micturate middle miff migrate mildew militarise militarize militate milk mill
mime mimeo mimeograph mimic mince mind mine mineralize mingle miniate
miniaturise miniaturize minify minimise minimize minister minstrel mint mire
mirror misaddress misadvise misalign misally misapply misapprehend
misappropriate misbehave misbelieve miscalculate miscall miscarry miscast
miscegenate misconceive misconduct misconstrue miscount miscreate misdate
misdeal misdeliver misdemean misdirect misdo misestimate misfire misfunction
misgauge misgive misgovern misguide mishandle misidentify misinform misinterpret
misjudge mislay mislead mismanage mismarry mismatch mismate misname misperceive
misplace misplay misprint mispronounce misquote misread misremember misrepresent
miss misspeak misspell misspend misstate mist mistake mistime mistranslate
mistreat mistrust misunderstand misuse miter mitigate mix mizzle moan mob
mobilise mobilize mock model moderate modernise modernize modify modulate moil
moisten moisturise moisturize mold molder molest mollify mollycoddle molt
monetise monetize monger mongrelise mongrelize monish monitor monkey monologuise
monologuize monopolise monopolize monumentalise monumentalize moo mooch moon
moonlight moonshine moor moot mop mope moralise moralize morph mortar mortgage
mortice mortify mortise mosey mosh mothball mother mothproof motion motivate
motley motor motorbike motorboat motorcycle motorise motorize mottle mould
moulder moult mound mount mountaineer mourn mouse mousse mouth move mow muck
muckrake mud mud-wrestle muddle muddy mudwrestle muff muffle mug mulch mulct
mull multiply mumble mummify munch munition murder murk murmur muscle muse mush
mushroom muss mussitate muster mutate mute mutilate mutiny mutter muzzle mystify
mythicise mythicize mythologise mythologize nab nag nail name namedrop nap
narcotise narcotize nark narrate narrow nasale nasalise nasalize nationalise
nationalize natter naturalise naturalize nauseate navigate nazify near neaten
necessitate neck necrose need needle negate negative neglect negociate negotiate
neigh neighbor neighbour nerve nest nestle net netmail nett nettle network
neuter neutralise neutralize nibble nick nickel nickel-and-dime nicker nickname
nictate nictitate niggle nigrify nip nitpick nitrate nitrify nitrogenise
nitrogenize nix nobble nock nod noise nol.pros. nominate nonplus noose normalise
normalize nose nosedive nosh notarise notarize notate notch note notice notify
nourish novate novelise novelize nucleate nudge nuke nullify numb number
numerate nurse nurture nut nutate nutrify nuzzle o.d. o.k. obey obfuscate object
objectify objurgate obligate oblige obliterate obnubilate obscure observe obsess
obsolesce obstinate obstipate obstruct obtain obtrude obtund obturate obviate
occasion occidentalise occidentalize occlude occult occupy occur odorize
odourise off offend offer officer officialise officialize officiate offload
offsaddle offset ogle oil oink okay omen omit one-step ooh ooze opacify opalesce
opalise opalize open operate opine oppose oppress oppugn opsonize opt optimise
optimize orate orb orbit orchestrate ordain order ordinate organise organize
orient orientalise orientalize orientate originate ornament orphan oscillate
osculate ossify ostentate ostracise ostracize oust out out-herod outbalance
outbid outbrave outcall outclass outcrop outcry outdistance outdo outdraw
outface outfight outfit outflank outfox outgeneral outgo outgrow outguess
outlast outlaw outline outlive outmaneuver outmanoeuvre outmarch outmatch
outmode outnumber outpace outperform outplay outpoint output outrage outrange
outrank outride outrival outroar outrun outsail outscore outsell outshine
outshout outsmart outsource outspan outstare outstay outstrip outvie outvote
outwear outweigh outwit ovenbake over-correct over-refine overachieve overact
overarch overawe overbalance overbear overbid overboil overburden overcapitalise
overcapitalize overcast overcharge overclothe overcloud overcome overcompensate
overcook overcrop overcrowd overcultivate overdo overdose overdramatise
overdramatize overdraw overdress overdrive overeat overemphasise overemphasize
overestimate overexert overexploit overexpose overextend overfatigue overfeed
overfill overflow overfly overgeneralise overgeneralize overgorge overgrow
overhang overhaul overhear overheat overindulge overjoy overlap overlay overleap
overlie overload overlook overmaster overpay overplay overpopulate overpower
overpraise overprice overprint overproduce overprotect overrate overreach
overreact overrefine override overrule overrun oversee oversew overshadow
overshoot oversimplify oversleep overspecialise overspecialize overspend
overspread overstate overstay overstep overstock overstrain overstress
overstretch overstuff oversupply overtake overtax overthrow overtire overtop
overtrump overturn overuse overvalue overweary overwhelm overwinter overwork
overwrite ovulate owe own oxidate oxidise oxidize oxygenate oxygenise oxygenize
oyster ozonise ozonize pace pacify pack package pad paddle padlock paganise
paganize page paginate pain paint pair pal palatalise palatalize palaver pale
palisade pall palliate palm palpate palpebrate palpitate palsy palter pamper pan
pan-broil pan-fry pander panel panhandle panic pant pant-hoot pantomime paper
par parachute parade paragraph parallel parallel-park parallelize paralyse
paralyze paraphrase parboil parcel parch pardon pare parent parget park parlay
parley parody parole parrot parry parse part partake parti-color participate
particularise particularize partition partner party pass paste pasteurise
pasteurize pasture pat patch patent patinate patinise patinize patrol patronage
patronise patronize patter pattern pattern-bomb pauperise pauperize pause pave
paw pawn pay peach peak peal pearl peck peculate pedal peddle pedicure pee pee-
pee peek peel peep peer peeve peg pelt pen penalise penalize pencil penetrate
penny-pinch pension people pepper peptise peptize perambulate perceive perch
percolate percuss peregrinate perennate perfect perforate perform perfume
perfuse peril perish perjure perk perm permeate permit permute perorate peroxide
perpetrate perpetuate perplex persecute perseverate persevere persist
personalise personalize personate personify perspire persuade pertain perturb
peruse pervade pervaporate pervert pester pestle pet petition petrify pettifog
phase philander philosophise philosophize phlebotomise phlebotomize phonate
phone phosphoresce photocopy photograph photosensitise photosensitize photostat
phrase pick picket pickle picnic picture piddle piece piece-dye pierce piffle
pig pigeonhole piggyback pigment pile pilfer pillage pillory pillow pilot pimp
pin pinch pine ping pinion pink pinkify pinnacle pinpoint pioneer pip pipe pipe-
clay pique pirate pirouette piss pistol-whip pit pitch pitchfork pith pitter-
patter pity pivot placard placate place place-kick plagiarise plagiarize plague
plain plait plan plane plank plant plash plaster plasticise plasticize
plastinate plat plate platinize platitudinize play playact plea-bargain pleach
plead please pleat pledge plicate plight plod plonk plop plot plough plow pluck
plug plumb plume plummet plump plunder plunge plunk pluralise pluralize ply
poach pock pocket pockmark pod podcast poetise poetize point poise poison poke
polarise polarize pole poleax poleaxe polemicise polemicize polemise polemize
police polish politicise politicize politick polka poll pollard pollenate
pollinate pollute polychrome polychromise polychromize polymerise polymerize
pomade pommel pompadour ponder poniard pontificate pooch pooh-pooh pool pop
popularise popularize populate porcelainize pore port portend porter portion
portray pose posit position possess post postdate postmark postpone postpose
postulate posture pot potentiate pother potter pouch poultice pounce pound pour
pout powder powderise powderize power power-dive powerwash powwow practice
practise praise prance prang prank prate prattle prawn pray preach preachify
preamble prearrange preassemble precede precess precipitate precis preclude
preconceive precondition precook predate predecease predestinate predestine
predetermine predicate predict predigest predispose predominate preempt preen
preexist prefabricate preface prefer prefigure prefix preform preheat prehend
preisolate prejudge prejudice prelude premeditate premier premiere premise
premiss preoccupy preordain prepare prepay preponderate prepose prepossess
prerecord presage prescribe present preserve preside press pressure pressure-
cook pressure-wash pressurise pressurize presume presuppose pretend pretermit
prettify prevail prevaricate prevent preview previse prey price prick prickle
pride prim prime primp prink print prioritise prioritize prise privatise
privatize privilege prize prizefight probate probe proceed process proclaim
procrastinate procreate proctor procure prod produce profane profess
professionalise professionalize proffer profile profit profiteer prognosticate
program programme progress prohibit project prolapse proliferate prologise
prologize prologuize prolong promenade promise promote prompt promulgate pronate
pronk pronounce proof proofread prop propagandise propagandize propagate propel
prophesy propitiate proportion propose proposition propound prorate prorogue
proscribe prosecute proselytise proselytize prospect prosper prostitute
prostrate protect protest protract protrude protuberate prove provide provision
provoke prowl prune pry psalm psychoanalyse psychoanalyze ptyalise ptyalize pub-
crawl publicise publicize publish pucker puddle puff puke pule pull pullulate
pulp pulsate pulse pulverise pulverize pumice pummel pump pun punch punctuate
puncture punish punt pup pupate purchase puree purge purify purl purloin purple
purport purpose purpurate purr purse pursue purvey push pussyfoot put putrefy
putt putter putty puzzle pyramid quack quadruple quadruplicate quaff quail quake
qualify quantify quantise quantize quarantine quarrel quarry quarter quarterback
quash quaver queen queer quell quench query quest question quetch queue quibble
quick-freeze quicken quickstep quiesce quiet quieten quilt quintuple quip quirk
quit quiver quiz quote rabbet rabbit race rack racket racketeer raddle radiate
radicalize radio radiolocate raffle raft rafter rag rage raid rail railroad
raiment rain raise rake rally ram ramble ramify ramp rampage ranch randomise
randomize range rank rankle ransack ransom rant rap rape rappel rarefy rarify
rase rasp rasterize rat ratchet rate ratify ratiocinate ration rationalise
rationalize rattle ravage rave ravel raven ravish ray raze razor razz re-address
re-afforest re-argue re-arm re-assume re-create re-emerge re-emphasise re-
emphasize re-enter re-equip re-examine re-explain re-explore re-incorporate re-
introduce re-start reabsorb reach reacquaint react reactivate read readapt
readjust readmit ready reaffirm realign realine realise realize reallocate
reallot ream reanimate reap reappear reapportion reappraise rear rear-end rearm
rearrange reason reassail reassemble reassert reassess reassign reassure
reattribute reave reawaken rebate rebel rebind reboot rebound rebroadcast rebuff
rebuild rebuke rebury rebut recalcitrate recalculate recall recant recap
recapitulate recapture recast recede receipt receive recess recharge recidivate
reciprocate recite reckon reclaim reclassify recline recode recognise recognize
recoil recollect recombine recommence recommend recommit recompense reconcile
recondition reconfirm reconnoiter reconnoitre reconquer reconsecrate reconsider
reconstitute reconstruct reconvene reconvert reconvict recopy record recount
recoup recover recreate recriminate recrudesce recruit rectify recumb recuperate
recur recurve recuse recycle red-eye red-ink redact redden rede redecorate
rededicate redeem redefine redeploy redeposit redesign redetermine redevelop
redirect rediscover redispose redistribute redline redo redouble redound redress
reduce reduplicate reecho reef reek reel reelect reenact reenforce reestablish
reevaluate reeve reexamine reface refashion refer referee reference refill
refinance refine refinish refit reflate reflect reflectorise reflectorize
refloat refocus reforest reforge reform reformulate refract refracture refrain
refresh refreshen refrigerate refuel refund refurbish refurnish refuse refute
regain regale regard regenerate regiment register regorge regress regret regroup
regrow regularise regularize regulate regurgitate rehabilitate reharmonise
reharmonize rehash rehear rehearse reheat reheel rehouse reify reign reignite
reimburse reimpose rein reincarnate reinforce reinstall reinstate reinsure
reintegrate reinterpret reintroduce reinvent reinvigorate reissue reiterate
reject rejig rejoice rejoin rejuvenate rekindle relace relapse relate relativise
relativize relax relay relearn release relegate relent relieve reline relinquish
relish relive reload relocate rely relyric remain remainder remake remand remark
remarry remediate remedy remember remilitarise remilitarize remind reminisce
remit remodel remold remonstrate remould remount remove remunerate rename rend
render render-set rendezvous renegade renege renegociate renegotiate renew
renormalise renormalize renounce renovate rent reopen reorder reorganise
reorganize reorient reorientate repaint repair repatriate repay repeal repeat
repel repent repercuss rephrase repine replace replant replay replenish replete
replicate reply repoint report repose reposit reposition repossess repot
reprehend represent repress reprieve reprimand reprint reprise reprize reproach
reprobate reprocess reproduce reproof reprove republish repudiate repugn repulse
repurchase repute request require requisition requite reread rerun rescale
reschedule rescind rescue reseal research reseat resect reseed resell resemble
resent reserve reset resettle resew reshape reship reshoot reshuffle reside
resift resign resile resinate resist resize resmudge resole resolve resonate
resorb resort resound respect respire respite resplend respond rest restart
restate restitute restock restore restrain restrengthen restrict restructure
resublime resubmit result resume resurface resurge resurrect resuscitate
resuspend ret retail retain retake retaliate retard retch retell rethink retick
reticulate retie retire retool retort retouch retrace retract retrain
retranslate retransmit retread retreat retrench retrieve retrofit retroflex
retrograde retrogress retrospect retrovert retry return reunify reunite reuse
rev revalue revamp reveal revel revenge reverberate revere reverence reverse
revert revet review revile revise revisit revitalise revitalize revive revivify
revoke revolt revolutionise revolutionize revolve reward rewire reword rework
rewrite rhapsodise rhapsodize rhumba rhyme rib rice rick ricochet rid riddle
ride ridge ridicule riff riffle rifle rig right rigidify rile rim rime ring
rinse riot rip ripen riposte ripple rise risk ritualise ritualize rival rive
rivet roach roam roar roast rob robe rock rocket roil roister roleplay roll
rollerblade rollick romance romanise romanize romanticise romanticize romp roneo
roof rook room roost root rootle rope rosin rot rotate rouge rough rough-dry
rough-hew rough-house rough-sand roughcast roughen roughhouse round rouse rout
route rove row rub rubber rubberise rubberize rubberneck rubberstamp rubbish
rubify rubric rubricate ruck ruckle ruddle rue ruff ruffle ruggedise ruggedize
ruin rule rumba rumble ruminate rummage rumor rumour rumple rumpus run rupture
rush rust rusticate rustle rut saber sabotage sabre saccharify sack sacrifice
sadden saddle safeguard sag sail sailplane saint salaam salinate salivate sallow
salt saltate salute salvage salve samba sample sanctify sanction sand sandbag
sandblast sandpaper sandwich sanitate sanitise sanitize sap saponify sashay sass
sate satellite satiate satirise satirize satisfice satisfise satisfy saturate
sauce saunter saute savage save savor savour savvy saw say scab scaffold scald
scale scallop scalp scam scamp scamper scan scandalise scandalize scant scar
scare scarf scarify scarper scat scatter scavenge scend scent schedule
schematise schematize scheme schlep schmoose schmooze schnorr school schuss
scintillate scissor sclaff scoff scold scollop scoop scoot scorch score scorn
scotch scour scourge scout scowl scrabble scrag scram scramble scranch scrap
scrape scratch scraunch scrawl screak scream screech screen screw scribble
scribe scrimmage scrimp scrimshank script scroll scrounge scrub scrunch scruple
scrutinise scrutinize scry scud scuff scuffle scull sculpt sculpture scum
scupper scurry scuttle scythe seal seam seaplane sear search season seat secede
secern secernate seclude second second-guess secrete section sectionalise
sectionalize secularise secularize secure sedate sediment seduce see seed seek
seel seem seep seesaw seethe segment segregate segue seine seize select self-
destroy self-destruct sell sellotape semaphore semi-automatise semi-automatize
send senesce sense sensibilise sensibilize sensify sensitise sensitize
sensualise sensualize sentence sentimentalise sentimentalize sentimentise
sentimentize separate sequence sequester sequestrate serenade serialise
serialize sermonise sermonize serrate serve service set settle sever severalise
severalize sew sex sexualise sexualize shack shackle shade shadow shadowbox
shaft shag shake shallow sham shamanise shamanize shamble shame shampoo shanghai
shank shape share shark sharpen sharpshoot shatter shave she-bop shear sheathe
shed sheer sheet shell shellac shellack shelter shelve shepherd shew shield
shift shill shillyshally shimmer shimmy shin shine shingle shinny ship shipwreck
shirk shirr shirt shit shiver shlep shmoose shmooze shnorr shoal shock shoe
shoehorn shoetree shoo shoot shop shoplift shore short short-change short-
circuit shorten shortlist shoulder shout shove shovel show shower shred shriek
shrill shrimp shrine shrink shrinkwrap shrive shrivel shroud shrug shuck shudder
shuffle shun shunt shush shut shutter shuttle shuttlecock shy sibilate sic sick
sicken side side-slip sideline sidestep sideswipe sidetrack sidle sieve sift
sigh sight sight-read sight-sing sightread sightsee sightsing sign signal
signalise signalize signify signpost silence silhouette silkscreen silt silver
silver-plate silverplate simmer simonise simonize simper simplify simulate sin
sing singe single single-foot singsong singularise singularize sink sinter sip
siphon sire siss sit site situate size sizz sizzle skank skate skateboard
skedaddle sketch skew skewer ski skid skim skimcoat skimp skin skin-dive skinny-
dip skip skip-bomb skipper skirl skirmish skirt skitter skittle skive skreak
skreigh skulk skunk sky skydive skyjack skylark skyrocket slabber slack slacken
slag slake slalom slam slam-dunk slander slang slant slap slash slat slate
slather slaughter slave slaver slay sled sledge sledgehammer sleek sleep
sleepwalk sleet sleigh slenderise slenderize sleuth slew slice slick slide
slight slim slime sling slink slip slit slither sliver slobber slog sloganeer
slop slope slosh slot slouch slough slow slue slug sluice slum slumber slump
slur slurp slush smack smart smash smatter smear smell smelt smile smirch smirk
smite smock smoke smolder smooch smooth smoothen smother smoulder smudge smuggle
smut smutch snack snaffle snafu snag snail snake snap snare snarf snarl snatch
sneak sneer sneeze snick snicker sniff sniffle snigger snip snipe snitch snivel
snog snooker snoop snooze snore snorkel snort snow snow-blind snowball snowboard
snowmobile snowshoe snub snuff snuffle snuggle soak soap soar sob sober
socialise socialize sock sod sodomise sodomize soft-pedal soft-soap soft-solder
soften soil sojourn solace solarise solarize solder soldier sole solemnise
solemnize solicit solidify soliloquise soliloquize solmizate solo solvate solve
somersault somnambulate sonnet soot soothe sop sophisticate sorb sorcerise
sorcerize sorrow sort sough sound soundproof soup sour source souse sovietise
sovietize sow space spacewalk spade spam span spang spangle spank spar spare
sparge spark sparkle spat spatchcock spatter spawn spay speak spear spearhead
specialise specialize speciate specify speck speckle spectate speculate speech-
read speechify speed spell spellbind spelunk spend spew sphacelate spice spiel
spike spill spin spin-dry spiral spirit spiritise spiritize spiritualise
spiritualize spirt spit spite splash splat splatter splay splice splint splinter
split splosh splotch splurge splutter spoil spondaise spondaize sponge sponsor
spoof spook spool spoon spoonfeed sport sportscast sporulate spot spot-check
spot-weld spotlight spotweld spout sprain sprawl spray spray-dry spread spread-
eagle spreadeagle spree spring spring-clean sprinkle sprint spritz sprout spruce
spud spue spume spur spurn spurt sputter spy squabble squall squander square
squash squat squawk squeak squeal squeegee squeeze squelch squinch squint squire
squirm squirt squish stab stabilise stabilize stable stack staff stag stage
stagger stagnate stain stake stale stalemate stalinise stalinize stalk stall
stammer stamp stampede stanch stand standardise standardize staple star
starboard starch stare stargaze start startle starve stash state station staunch
stave stay steady steal steam steam-heat steamer steamroll steamroller steel
steep steepen steer stem stencil stenograph step stereotype sterilise sterilize
stet stew stick stickle stiffen stifle stigmatise stigmatize still still-fish
still-hunt stimulate sting stink stint stipple stipulate stir stitch stock
stock-take stockade stockpile stoke stomach stomp stone stone-wash stonewall
stonewash stooge stool stoop stop stopper stopple store storm stow straddle
strafe straggle straighten strain straiten strand strangle strangulate strap
stratify straw stray streak stream streamline street-walk streetwalk strengthen
stress stretch strew striate strickle stride stridulate strike string strip
strip-search stripe strive stroke stroll strong-arm strop structure struggle
strum strut stub stucco stud study stuff stultify stumble stump stun stunt
stupefy stutter style stylise stylize stymie stymy sub subcontract subdivide
subdue subedit subject subjoin subjugate sublease sublet sublimate sublime
subluxate submarine submerge submerse submit subordinate suborn subpoena
subrogate subscribe subserve subside subsidise subsidize subsist substantiate
substitute subsume subtend subtilise subtilize subtitle subtract suburbanise
suburbanize subvent subvention subvert subvocalise subvocalize succeed succor
succour succumb succuss suck suckle suction sudate suds sue suffer suffice
suffix suffocate suffuse sugar sugarcoat suggest suit sulfate sulfur sulfurette
sulk sully sulphur sulphurette sum summarise summarize summate summer summerise
summerize summit summon summons sun sunbathe sunburn sunder suntan sup
superannuate supercede supercharge superfetate superimpose superinfect
superintend superordinate superpose superscribe supersede supervene supervise
supinate supplant supple supplement supplicate supply support suppose suppress
suppurate surcharge surf surface surface-mine surfboard surfeit surge surmise
surmount surpass surprise surrender surround surtax surveil survey survive
suspect suspend suspire sustain susurrate suture swab swaddle swag swage swagger
swallow swamp swan swank swap swarm swash swat swathe swatter sway swear sweat
sweep sweet-talk sweeten swell swelter swerve swig swill swim swindle swing
swinge swipe swirl swish switch switch-hit swivel swob swoon swoop swoosh swop
swosh swot syllabicate syllabify syllabise syllabize syllogise syllogize
symbolise symbolize symmetrise symmetrize sympathise sympathize symphonise
symphonize sync synchronise synchronize syncopate syncretise syncretize
syndicate synthesise synthesize syphon syringe systematise systematize systemise
systemize table taboo tabularise tabularize tabulate tack tackle tag tail
tailgate tailor tailor-make taint take talc talk tally tame tamp tamper tampon
tan tangle tango tank tantalise tantalize tap tapdance tape taper tar tar-and-
feather target tariff tarmac tarnish tarry task taste tat tattle tattoo taunt
tauten tax taxi teach team tear teargas tease tee teem teeter teeter-totter
teetertotter teethe teetotal telecast telecommunicate telefax telegraph
telepathise telepathize telephone teleport telescope televise telex tell temper
temporise temporize tempt tenant tend tender tenderise tenderize tense tent
tenure tergiversate term terminate terrace terrasse terrify territorialise
territorialize terrorise terrorize tessellate test testify tether thank thatch
thaw theme theologise theologize theorise theorize thermostat thicken thieve
thin think thirst thoriate thrash thread threaten thresh thrill thrive throb
thrombose throne throng throttle throw thrum thrust thud thumb thumbtack thump
thunder thurify thwack thwart tick ticket tickle ticktack ticktock tide tidy tie
tie-dye tighten tile till tiller tilt time tin tinct tincture ting tinge tingle
tink tinker tinkle tinsel tint tintinnabulate tip tipple tippytoe tiptoe tire
tissue tithe titillate titivate title titrate titter tittivate tittle-tattle
tittup toady toast toboggan toddle toe toenail tog toggle toil toilet-train
tolerate toll tomahawk tone tongue tongue-tie tonsure tool toot tootle top
topdress tope topicalize topple torch torment torpedo torture toss tot total
totalise totalize tote totter touch touch-type toughen tour tourney tousle tout
tow towel tower toy trace track trade trademark traduce traffic trail train
traipse tram trammel tramp trample trance tranquilize tranquillise tranquillize
transact transaminate transcend transcribe transduce transect transfer
transfigure transfix transform transfuse transgress transistorise transistorize
transit transition transitivise transitivize translate transliterate translocate
transmigrate transmit transmogrify transmute transpirate transpire transplant
transport transpose transship transubstantiate transude trap trash traumatise
traumatize travail travel traverse travesty trawl tread treadle treasure treat
treble tree trek trellis tremble tremor trench trend trepan trephine trespass
triangulate tribulate trice trick trickle trifle trifurcate trigger trill trim
trip triple triple-space triple-tongue triplicate trisect triumph trivialise
trivialize troat troll troop trot trouble trouble-shoot troubleshoot trounce
trowel truck truckle trudge true trump trumpet truncate trundle truss trust try
tsk tube tuck tucker tug tumble tumefy tumesce tune tunnel turf turn turtle tusk
tussle tut tut-tut tutor twaddle twang tweak tweedle tweet tweeze twiddle twig
twill twin twine twinge twinkle twirl twirp twist twit twitch twitter two-step
two-time type typecast typeset typewrite typify tyrannise tyrannize uglify
ulcerate ultracentrifuge ululate umpire unarm unbalance unbar unbelt unbend
unbind unblock unbolt unbosom unbox unbrace unbraid unbridle unbuckle unburden
unbutton uncase unchain unchurch unclasp unclip uncloak unclog unclothe
unclutter uncoil uncompress uncork uncouple uncover uncrate uncross uncurl
undeceive underachieve underact underbid undercharge undercoat undercut
underdevelop underdress underestimate underexpose undergird undergo undergrow
underlay underlie underline undermine undernourish underpay underperform
underpin underplay underprice underproduce underquote underrate underscore
undersell undershoot undersign underspend understand understate understock
understudy undertake undervalue underwrite undo undock undrape undress undulate
unearth unfasten unfit unfold unfreeze unfrock unfurl unhallow unhand unharness
unhinge unhitch unhook unhorse unicycle uniform uniformise uniformize unify
unionise unionize unite unitise unitize universalise universalize unknot unlace
unlade unlash unlax unlearn unleash unlive unload unlock unloose unloosen unmake
unman unmask unmuzzle unnerve unpack unpick unpin unplug unravel unreel unroll
unsaddle unsanctify unsay unscramble unscrew unseal unseat unsettle unsex
unsheathe unsnarl unsolder unspell unstaple unstrain unstrap unstring unstuff
unsubstantialise unsubstantialize untangle unteach unthaw untie untune untwine
untwist unveil unweave unwind unwire unwrap unyoke unzip up upbraid upchuck
update upend upgrade upheave uphold upholster uplift upload upraise uprise
uproot upset upstage urbanise urbanize urge urinate urticate use usher usurp
utilise utilize utter vacate vacation vaccinate vacillate vacuum vacuum-clean
vagabond valet validate valuate value vamoose vamp vandalise vandalize vanish
vanquish vaporise vaporize variegate varnish vary vascularise vascularize
vasectomise vasectomize vaticinate vault vaunt veer vegetate veil vein velcro
vellicate vend veneer venerate venesect vent ventilate venture verbalise
verbalize verbify verdigris verge verify vermiculate vermilion verse versify
vesicate vesiculate vest vesture vet veto vex vibrate victimise victimize
victual videotape vie view vilify vilipend vindicate vinify violate virilise
virilize visa visit visualise visualize vitalise vitalize vitaminise vitaminize
vitiate vitrify vitriol vituperate vivify vivisect vocalise vocalize vociferate
voice void volatilise volatilize volley volunteer vomit voodoo vote vouch
vouchsafe vow vowelise vowelize voyage vroom vulcanise vulcanize vulgarise
vulgarize wad waddle wade waffle waft wag wage wager waggle wail wait waitress
waive wake waken walk wall wallop wallow wallpaper waltz wamble wan wander wane
wangle wank want wanton war warble ward ware warehouse warm warn warp warrant
wash wassail waste watch water water-wash watercolor watercolour waterproof
wattle waul wave waver wawl wax waylay weaken wean weaponize wear weary weather
weatherproof weatherstrip weave web wed wedel wedge wee wee-wee weed weekend
weep weigh weight weight-lift weightlift welch welcome weld well welsh welt
welter wench wend westernise westernize wet wet-nurse whack whale wham whang
whap wharf wheedle wheel wheelbarrow wheeze whelk whelm whelp whet whicker whiff
whimper whine whinny whip whipsaw whir whirl whirligig whirlpool whirr whish
whisk whisker whisper whistle whistlestop white white-out whiten whiteout
whitewash whittle whiz whizz wholesale whomp whoop whoosh whop whore widen widow
wield wiggle wigwag will wilt win wince winch wind window-dress window-shop
windsurf wine wing wink winkle winnow winter winterise winterize wipe wire
wiretap wisecrack wish witch withdraw wither withhold withstand witness wive
wobble wolf wolf-whistle womanise womanize wonder woo woolgather woosh word work
worm worry worsen worship worst wound wow wrack wrangle wrap wrawl wreak wreathe
wreck wrench wrest wrestle wrick wriggle wring wrinkle write writhe wrong x-ray
xerox yacht yack yak yammer yank yap yarn yarn-dye yaup yaw yawl yawn yawp yearn
yell yellow yelp yen yield yip yodel yoke yowl zap zero zest zigzag zinc zip
zipper zone zoom
""" |> String.split(~r/\s+/) |> MapSet.new(&String.trim/1)
def all do
@verbs_set
end
end
|
lib/en/verbs.ex
| 0.522933
| 0.402891
|
verbs.ex
|
starcoder
|
defmodule FusionAuth.Plugs.AuthorizeJWT do
@moduledoc """
The `FusionAuth.Plugs.AuthorizeJWT` module provides authentication of JWT tokens on incoming requests.
## Examples
```
config/{env}.exs
config :fusion_auth,
token_header_key: "authorization"
```
```
lib/my_web_server/router.ex
defmodule MyWebServer.Router do
use MyWebServer, :router
pipeline :protected do
plug(FusionAuth.Plugs.AuthorizeJWT)
end
end
```
## Plug Options
- client :: FusionAuth.client(String.t(), String.t(), String.t()) // default FusionAuth.client()
- conn_key :: atom() // default :user
- atomize_keys :: boolean() // default true
- case_format :: :underscore | :camelcase // default :underscore
"""
alias FusionAuth.Utils
@default_options [
client: nil,
conn_key: :user,
atomize_keys: true,
case_format: :underscore
]
@formatter [
underscore: &Recase.to_snake/1,
camelcase: &Recase.to_camel/1
]
@spec init(keyword()) :: keyword()
def init(opts \\ []), do: opts
@spec call(%Plug.Conn{}, keyword()) :: %Plug.Conn{}
def call(conn, opts \\ []) do
options = Keyword.merge(@default_options, opts)
client = options[:client] || FusionAuth.client()
with {:ok, token} <- Utils.fetch_token(conn),
{:ok, claims} <- verify_token(client, token) do
Plug.Conn.assign(
conn,
options[:conn_key],
format(claims, options[:atomize_keys], options[:case_format])
)
else
_ ->
conn
|> Plug.Conn.halt()
|> Plug.Conn.send_resp(401, "Unauthorized")
end
end
defp format(claims, false, key_format),
do:
claims
|> Recase.Enumerable.convert_keys(@formatter[key_format])
defp format(claims, true, key_format),
do:
claims
|> Recase.Enumerable.atomize_keys(@formatter[key_format])
defp verify_token(client, token) do
case FusionAuth.JWT.validate_jwt(client, token) do
{:ok, %{"jwt" => claims}, _} -> {:ok, claims}
_ -> :error
end
end
end
|
lib/fusion_auth/plugs/authorize_jwt.ex
| 0.881079
| 0.676874
|
authorize_jwt.ex
|
starcoder
|
defmodule Fares do
@moduledoc """
Handling logic around fares.
"""
alias Routes.Route
alias Schedules.Trip
alias Stops.Stop
alias Zones.Zone
@silver_line_rapid_transit ~w(741 742 743 746)
@silver_line_rapid_transit_set MapSet.new(@silver_line_rapid_transit)
@express_routes ~w(170 325 326 351 352 354 426 428 434 450 459 501 502 503 504 505)
@express_route_set MapSet.new(@express_routes)
@foxboro_reverse_commute ~w(741 743 745 750 752 754 756)
@foxboro_reverse_commute_set MapSet.new(@foxboro_reverse_commute)
@terminus_stops ["place-sstat", "place-north", "North Station", "South Station"]
@type ferry_name ::
:ferry_cross_harbor
| :ferry_inner_harbor
| :commuter_ferry_logan
| :commuter_ferry
| :ferry_george
@type fare_type :: :highest_one_way_fare | :lowest_one_way_fare | :reduced_one_way_fare
@doc """
Calculate the fare between a pair of stops.
NB: origin and destination can be Stop IDs or names.
"""
@spec fare_for_stops(
:commuter_rail | :ferry,
String.t(),
String.t(),
String.t() | Trip.t() | nil
) ::
{:ok, Fares.Fare.fare_name()}
| :error
def fare_for_stops(route_type_atom, origin, destination, trip_details \\ nil)
def fare_for_stops(:commuter_rail, origin, destination, trip_details) do
with origin_zone when not is_nil(origin_zone) <- zone_for_stop(origin),
dest_zone when not is_nil(dest_zone) <- zone_for_stop(destination) do
cond do
foxboro_pilot?(trip_details) ->
{:ok, calculate_foxboro_zones(origin_zone, dest_zone)}
Zone.combo_zone?(origin_zone) ->
{:ok, calculate_combo(origin_zone, dest_zone, destination)}
Zone.combo_zone?(dest_zone) ->
{:ok, calculate_combo(dest_zone, origin_zone, origin)}
true ->
{:ok, calculate_commuter_rail(origin_zone, dest_zone)}
end
else
_ -> :error
end
end
def fare_for_stops(:ferry, origin, destination, _) do
{:ok, calculate_ferry(origin, destination)}
end
defp zone_for_stop(stop_id) do
case Stops.Repo.get(stop_id) do
%{zone: zone} -> zone
_ -> nil
end
end
@spec calculate_commuter_rail(any, any) :: {:interzone, binary} | {:zone, any}
def calculate_commuter_rail(start_zone, "1A") do
{:zone, start_zone}
end
def calculate_commuter_rail("1A", end_zone) do
{:zone, end_zone}
end
def calculate_commuter_rail(start_zone, end_zone) do
# we need to include all zones travelled in, ie zone 3 -> 5 is 3 zones
total_zones = abs(String.to_integer(start_zone) - String.to_integer(end_zone)) + 1
{:interzone, "#{total_zones}"}
end
@spec calculate_combo(Zone.t(), Zone.t(), Stop.id_t()) :: {:interzone, binary} | {:zone, any}
defp calculate_combo(combo_zone, _other_zone, other_stop_id)
when other_stop_id in @terminus_stops do
{:zone, Zone.terminus_zone(combo_zone)}
end
defp calculate_combo(combo_zone, other_zone, _other_stop_id) do
general_combo_zone = Zone.general_zone(combo_zone)
general_other_zone = Zone.general_zone(other_zone)
calculate_commuter_rail(general_combo_zone, general_other_zone)
end
def calculate_foxboro_zones(start_zone, "1A") when start_zone != "1A" do
calculate_commuter_rail(start_zone, "1")
end
def calculate_foxboro_zones("1A", end_zone) when end_zone != "1A" do
calculate_commuter_rail("1", end_zone)
end
def calculate_foxboro_zones(start_zone, end_zone) do
calculate_commuter_rail(start_zone, end_zone)
end
@spec calculate_ferry(String.t(), String.t()) :: ferry_name
defp calculate_ferry(origin, destination)
when "Boat-George" in [origin, destination] do
:ferry_george
end
defp calculate_ferry(origin, destination)
when "Boat-Charlestown" in [origin, destination] and "Boat-Logan" in [origin, destination] do
:ferry_cross_harbor
end
defp calculate_ferry(origin, destination)
when "Boat-Long" in [origin, destination] and "Boat-Logan" in [origin, destination] do
:ferry_cross_harbor
end
defp calculate_ferry(origin, destination)
when "Boat-Charlestown" in [origin, destination] and
"Boat-Long-South" in [origin, destination] do
:ferry_inner_harbor
end
defp calculate_ferry(origin, destination) when "Boat-Logan" in [origin, destination] do
:commuter_ferry_logan
end
defp calculate_ferry(_origin, _destination) do
:commuter_ferry
end
@spec foxboro_pilot?(Trip.t() | nil) :: boolean
defp foxboro_pilot?(%Trip{name: id, id: "CR-Weekday-Fall-19" <> _}),
do: id in @foxboro_reverse_commute_set
defp foxboro_pilot?(_), do: false
@spec silver_line_rapid_transit?(Route.id_t()) :: boolean
def silver_line_rapid_transit?(<<id::binary>>),
do: id in @silver_line_rapid_transit_set
@spec silver_line_airport_stop?(Route.id_t(), Stop.id_t() | nil) :: boolean
def silver_line_airport_stop?(route_id, origin_id)
def silver_line_airport_stop?(_, nil), do: false
def silver_line_airport_stop?("741", "17091"), do: true
def silver_line_airport_stop?("741", "27092"), do: true
def silver_line_airport_stop?("741", "17093"), do: true
def silver_line_airport_stop?("741", "17094"), do: true
def silver_line_airport_stop?("741", "17095"), do: true
def silver_line_airport_stop?(<<_route_id::binary>>, <<_origin_id::binary>>), do: false
@spec express?(Route.id_t()) :: boolean
def express?(<<id::binary>>), do: id in @express_route_set
def silver_line_rapid_transit, do: @silver_line_rapid_transit
def express, do: @express_routes
@type fare_atom :: Route.gtfs_route_type() | :express_bus
@spec to_fare_atom(fare_atom | Route.id_t() | Route.t()) :: fare_atom
def to_fare_atom(route_or_atom) do
case route_or_atom do
%Route{type: 3, id: id} ->
cond do
silver_line_rapid_transit?(id) -> :subway
express?(id) -> :express_bus
true -> :bus
end
%Route{} ->
Route.type_atom(route_or_atom)
<<id::binary>> ->
Routes.Repo.get(id) |> to_fare_atom
_ ->
route_or_atom
end
end
@spec get_fare_by_type(TripPlan.Leg.t(), fare_type) :: Fares.Fare.t() | nil
def get_fare_by_type(leg, fare_type) do
leg
|> Kernel.get_in([
Access.key(:mode, %{}),
Access.key(:fares, %{}),
Access.key(fare_type)
])
end
end
|
apps/fares/lib/fares.ex
| 0.709523
| 0.42185
|
fares.ex
|
starcoder
|
defmodule Optimal do
@moduledoc """
Documentation for Optimal.
"""
@type schema() :: Optimal.Schema.t()
@type error :: {atom, String.t()}
@type validation_result :: {:ok, Keyword.t()} | {:error, [error]}
@doc "See `Optimal.Schema.new/1`"
defdelegate schema(opts), to: Optimal.Schema, as: :new
defdelegate schema(), to: Optimal.Schema, as: :new
@doc "See `Optimal.Schema.merge/2`"
defdelegate merge(left, right), to: Optimal.Schema
defdelegate merge(left, right, opts), to: Optimal.Schema
@doc "See `Optimal.Doc.document/2`"
defdelegate document(schema, opts), to: Optimal.Doc
defdelegate document(schema), to: Optimal.Doc
@doc """
Validates opts according to a schema or the constructor for a schema. Raises on invalid opts.
iex> Optimal.validate!([reticulate_splines?: true], opts: [:reticulate_splines?])
[reticulate_splines?: true]
iex> Optimal.validate!([reticulate_splines?: true], opts: [:load_textures?], extra_keys?: true)
[reticulate_splines?: true]
iex> schema = Optimal.schema(opts: [:reticulate_splines?], required: [:reticulate_splines?], extra_keys?: true)
...> Optimal.validate!([reticulate_splines?: true, hack_interwebs?: true], schema)
[reticulate_splines?: true, hack_interwebs?: true]
iex> Optimal.validate!([], opts: [:reticulate_splines?], required: [:reticulate_splines?])
** (ArgumentError) Opt Validation Error: reticulate_splines? - is required
"""
@spec validate!(opts :: Keyword.t(), schema :: schema()) :: Keyword.t() | no_return
def validate!(opts, schema = %Optimal.Schema{}) do
case validate(opts, schema) do
{:ok, opts} ->
opts
{:error, errors} ->
message = message(errors)
raise ArgumentError, message
end
end
def validate!(opts, schema_config) do
validate!(opts, schema(schema_config))
end
@spec validate(opts :: Keyword.t(), schema :: Optimal.Schema.t()) ::
{:ok, Keyword.t()} | {:error, [error]}
def validate(opts, schema) when is_map(opts) do
validate(Enum.into(opts, []), schema)
end
def validate(opts, schema) when is_list(opts) do
with_defaults =
Enum.reduce(schema.defaults, opts, fn {default, value}, opts ->
Keyword.put_new(opts, default, value)
end)
{:ok, with_defaults}
|> validate_required(with_defaults, schema)
|> validate_types(with_defaults, schema)
|> validate_extra_keys(with_defaults, schema)
|> validate_custom(schema)
end
def validate(_opts, _schema) do
{:error, [{:opts, "opts must be a keyword list or a map."}]}
end
defp validate_custom(
validation_result = {:ok, opts},
schema = %{custom: [{field, custom} | rest]}
) do
result =
case custom.(opts[field], field, opts, schema) do
true ->
validation_result
false ->
add_errors(validation_result, {field, "failed a custom validation"})
:ok ->
validation_result
{:ok, updated_opts} ->
{:ok, updated_opts}
{:error, error_or_errors} ->
add_errors(validation_result, error_or_errors)
[] ->
validation_result
errors when is_list(errors) ->
add_errors(validation_result, errors)
end
validate_custom(result, %{schema | custom: rest})
end
defp validate_custom(validation_result, _schema), do: validation_result
defp validate_types(validation_result, opts, %{types: types}) do
Enum.reduce(types, validation_result, fn {field, type}, result ->
cond do
!Keyword.has_key?(opts, field) ->
result
match?(%Optimal.Schema{}, type) ||
match?({nested_type, %Optimal.Schema{}} when nested_type in [:keyword, :list], type) ->
validate_nested_schema(result, type, opts, field)
Optimal.Type.matches_type?(type, opts[field]) ->
result
true ->
message = "must be of type " <> sanitize_type(type)
add_errors(result, {field, message})
end
end)
end
defp validate_nested_schema(result, type, opts, field) do
case do_nested_schema_validation(type, opts[field]) do
{:ok, value} ->
update_result_key(result, field, value)
{:error, message} ->
add_errors(result, {field, message})
end
end
defp do_nested_schema_validation({:list, _schema}, value)
when not is_list(value) do
{:error, "expected a list of keywords conforming to a subschema"}
end
defp do_nested_schema_validation({:list, schema}, value) do
nested_opts_result =
value
|> Enum.with_index()
|> Enum.reduce({[], []}, fn {keyword, index}, {opts_acc, errors_acc} ->
case validate(keyword, schema) do
{:ok, new_opts} ->
{[new_opts | opts_acc], errors_acc}
{:error, errors} ->
message = nested_error_message(index, errors)
{opts_acc, [message | errors_acc]}
end
end)
case nested_opts_result do
{opts_acc, []} ->
{:ok, Enum.reverse(opts_acc)}
{_, errors} ->
message = Enum.join(errors, ", ")
{:error, message}
end
end
defp do_nested_schema_validation({:keyword, schema}, value) do
nested_opts_result =
value
|> Enum.reduce({[], []}, fn {key, keyword}, {opts_acc, errors_acc} ->
case validate(keyword, schema) do
{:ok, new_opts} ->
{[{key, new_opts} | opts_acc], errors_acc}
{:error, errors} ->
message = nested_error_message(key, errors)
{opts_acc, [message | errors_acc]}
end
end)
case nested_opts_result do
{opts_acc, []} ->
{:ok, Enum.reverse(opts_acc)}
{_, errors} ->
message = Enum.join(errors, ", ")
{:error, message}
end
end
defp do_nested_schema_validation(schema = %Optimal.Schema{}, value) do
case validate(value, schema) do
{:ok, new_opts} ->
{:ok, new_opts}
{:error, errors} ->
message =
Enum.map_join(errors, ", ", fn {nested_field, message} ->
"nested field #{nested_field} #{message}"
end)
{:error, message}
end
end
defp nested_error_message(nesting, errors) do
Enum.map_join(errors, ", ", fn {nested_field, message} ->
"nested field [#{nesting}][#{nested_field}] #{message}"
end)
end
defp sanitize_type(%struct{}), do: sanitize_type({:struct, struct})
defp sanitize_type(term), do: inspect(term)
defp validate_required(validation_result, opts, %{required: required}) do
Enum.reduce(required, validation_result, fn key, result ->
if is_nil(opts[key]) do
add_errors(result, {key, "is required"})
else
result
end
end)
end
defp validate_extra_keys(validation_result, _opts, %{extra_keys?: true}),
do: validation_result
defp validate_extra_keys(validation_result, opts, %{opts: keys}) do
extra_keys =
opts
|> Keyword.keys()
|> Kernel.--(keys)
Enum.reduce(
extra_keys,
validation_result,
&add_errors(&2, {&1, "is not allowed (no extra keys)"})
)
end
defp update_result_key({:ok, opts}, field, value) do
{:ok, Keyword.put(opts, field, value)}
end
defp update_result_key(other, _, _) do
other
end
defp add_errors({:ok, _opts}, error_or_errors) do
add_errors({:error, []}, error_or_errors)
end
defp add_errors({:error, existing_errors}, error_or_errors) do
errors = List.wrap(error_or_errors)
{:error, errors ++ existing_errors}
end
defp message(errors) do
short_messages =
errors
|> Enum.map(&short_message/1)
|> Enum.join(", ")
"Opt Validation Error: " <> "#{short_messages}"
end
defp short_message({path, message}) when is_list(path) do
path =
path
|> Enum.with_index()
|> Enum.map(fn {elem, i} ->
if i == 0 do
elem
else
"[:#{elem}]"
end
end)
|> Enum.join()
"#{path} - #{message}"
end
defp short_message({field, message}) do
"#{field} - #{message}"
end
end
|
lib/optimal.ex
| 0.842766
| 0.533397
|
optimal.ex
|
starcoder
|
defmodule Chunkr.PaginationPlanner do
@moduledoc """
Macros for establishing your pagination strategies.
For example:
defmodule MyApp.PaginationPlanner do
use Chunkr.PaginationPlanner
# Sort by a single column.
paginate_by :username do
sort :asc, as(:user).username
end
# Sort by DESC `user.inserted_at`, with ASC `user.id` as a tiebreaker.
# In this case, `user.id` is explicitly called out as a UUID.
paginate_by :user_created_at do
sort :desc, as(:user).inserted_at
sort :asc, as(:user).id, type: :binary_id
end
# Sort names in ASC order.
# Coalesce any `NULL` name values so they're at the end of the result set.
# Use `user.id` as the tiebreaker.
paginate_by :last_name do
sort :asc, fragment("coalesce(?, 'zzz')", as(:user).last_name)
sort :asc, fragment("coalesce(?, 'zzz')", as(:user).first_name)
sort :desc, as(:user).id
end
end
The `paginate_by/2` macro above takes a name for the pagination strategy along with the
fields to sort by in their desired order. The fields can be actual table columns or
dynamically-generated values via Ecto fragments. Fragments are especially handy for
implementing case-insensitive sorts, coalescing `NULL` values, and so forth.
Each call to `sort` requires a sort direction (`:asc` or `:desc`), any valid Ecto fragment
or field (using [`:as`](https://hexdocs.pm/ecto/Ecto.Query.html#module-named-bindings)), and an
optional [`:type`](https://hexdocs.pm/ecto/3.7.1/Ecto.Query.API.html#type/2) keyword.
If `:type` is provided, the relevant cursor value will be cast to that type when filtering records.
The result of registering these pagination strategies is that, at compile time, Chunkr
automatically defines the functions necessary to take future queries and extend them for
your desired pagination strategies. This involves dynamically implementing functions to sort,
filter, and limit your queries according to these strategies as well as functions to select
both the fields needed for the cursor as well as the records themselves.
## Ordering
It is essential that your results are deterministically ordered, otherwise you will see
unexpected results. Therefore, the final column used for sorting (i.e. the ultimate tie-breaker)
must _always_ be unique and non-NULL.
## Named bindings
Because these sort clauses must reference bindings that have not yet been established,
we use [`:as`](https://hexdocs.pm/ecto/Ecto.Query.html#module-named-bindings)
to take advantage of Ecto's late binding. The column referenced by `:as` must then be
explicitly provided within your query or it fail.
## Always coalesce `NULL` values!
SQL cannot reasonably compare `NULL` to a non-`NULL` value using operators like `<` and `>`.
However, when filtering records against our cursor values, it's not uncommon to find ourselves
in a situation where our sorted fields may include `NULL` values. Without intervention, any
records that contain a `NULL` value in one of the sort fields would be entirely dropped from the
result set, which is almost surely _not_ the intention.
To work around this awkwardness, you'll need to pick a value that is almost sure to come before
or after the rest of your results (depending on whether you want `NULL` values to sort to the
beginning or the end of your results respectively) and coalesce any `NULL` values in sorted
fields so that these records sort to the desired location. With keyset-based pagination,
it's not enough to use a strategy like ordering by `NULLS LAST` or `NULLS FIRST`.
Remember, it's not the ordering itself where this is problematic; it's the efficient filtering
of records (via comparison to a cursor) where records with `NULL` values would get dropped.
Note that you only need to coalesce values within your actual pagination strategy, and the
coalesced values will only be used behind the scenes (for cursor values and when filtering
records against cursors). You **_do not_** need to coalesce values in the query that you
provide to `Chunkr.Pagination.paginate/4`, and you need not worry about values somehow being
altered by Chunkr in the records that are returned in each page of results.
## Indexes
In order to get maximum performance from your paginated queries, you'll want to
create database indexes that align with your pagination strategy. When sorting by multiple
columns, you will need to have an index in place that includes each of those columns with
sort orders matching your strategy. However, you shouldn't need to include the inverse order
as the database should be able to recognize and automatically reverse the index order when
necessary. By providing an index that matches your pagination strategy, you should be able to
take advantage of [efficient pipelined top-N queries](https://use-the-index-luke.com/sql/partial-results/top-n-queries).
## Limitations
Chunkr limits the number of `sort` clauses to 4.
"""
@doc false
defmacro __using__(_) do
quote do
import unquote(__MODULE__)
require Ecto.Query
def apply_limit(query, limit) do
Ecto.Query.limit(query, ^limit)
end
end
end
@doc """
Implements the functions necessary for pagination.
paginate_by :user_id do
sort :asc, as(:user).id
end
"""
defmacro paginate_by(query_name, do: {:sort, _, args}) do
sorts = [parse_sorts(args)]
implement(query_name, sorts)
end
defmacro paginate_by(query_name, do: {:__block__, _, sorts}) do
sorts = Enum.map(sorts, fn {:sort, _, args} -> parse_sorts(args) end)
implement(query_name, sorts)
end
@doc false
def parse_sorts([dir, field]), do: {dir, field, nil}
def parse_sorts([dir, field, [type: type]]), do: {dir, field, type}
@doc false
def with_cursor_fields_func(query_name, fields) do
quote do
def apply_select(query, unquote(query_name)) do
Ecto.Query.select(query, [record], {unquote(fields), record})
end
end
end
@doc false
def with_order_func(query_name, primary_sort_dir, order_bys) do
inverted_sort_dir = invert(primary_sort_dir)
quote do
def apply_order(query, unquote(query_name), unquote(primary_sort_dir), :forward) do
Ecto.Query.order_by(query, unquote(order_bys))
end
def apply_order(query, unquote(query_name), unquote(primary_sort_dir), :backward) do
Ecto.Query.order_by(query, unquote(order_bys))
|> Ecto.Query.reverse_order()
end
def apply_order(query, unquote(query_name), unquote(inverted_sort_dir), :forward) do
Ecto.Query.order_by(query, unquote(order_bys))
|> Ecto.Query.reverse_order()
end
def apply_order(query, unquote(query_name), unquote(inverted_sort_dir), :backward) do
Ecto.Query.order_by(query, unquote(order_bys))
end
end
end
@doc false
def implement(query_name, sorts) when length(sorts) == 1 do
[{dir1, f1, t1}] = sorts
rdir1 = invert(dir1)
operators = derive_operators([dir1])
[op1] = operators
[rop1] = operators |> Enum.map(&invert/1)
order_bys = Enum.map(sorts, fn {dir, field, _type} -> {dir, field} end)
fields = Enum.map(sorts, fn {_dir, field, _type} -> field end)
quote do
def beyond_cursor(query, unquote(query_name), unquote(dir1), :forward, cursor_values) do
[cv1] = cursor_values
Ecto.Query.where(query, compare(unquote(f1), unquote(op1), cv1, unquote(t1)))
end
def beyond_cursor(query, unquote(query_name), unquote(dir1), :backward, cursor_values) do
[cv1] = cursor_values
Ecto.Query.where(query, compare(unquote(f1), unquote(rop1), cv1, unquote(t1)))
end
def beyond_cursor(query, unquote(query_name), unquote(rdir1), :forward, cursor_values) do
[cv1] = cursor_values
Ecto.Query.where(query, compare(unquote(f1), unquote(rop1), cv1, unquote(t1)))
end
def beyond_cursor(query, unquote(query_name), unquote(rdir1), :backward, cursor_values) do
[cv1] = cursor_values
Ecto.Query.where(query, compare(unquote(f1), unquote(op1), cv1, unquote(t1)))
end
unquote(with_order_func(query_name, dir1, order_bys))
unquote(with_cursor_fields_func(query_name, fields))
end
end
def implement(query_name, sorts) when length(sorts) == 2 do
[{dir1, f1, t1}, {dir2, f2, t2}] = sorts
rdir1 = invert(dir1)
operators = derive_operators([dir1, dir2])
[op1, op2, op3, op4] = operators
[rop1, rop2, rop3, rop4] = Enum.map(operators, &invert/1)
order_bys = Enum.map(sorts, fn {dir, field, _type} -> {dir, field} end)
fields = Enum.map(sorts, fn {_dir, field, _type} -> field end)
quote do
def beyond_cursor(query, unquote(query_name), unquote(dir1), :forward, cursor_values) do
[cv1, cv2] = cursor_values
query
|> Ecto.Query.where(
compare(unquote(f1), unquote(op1), cv1, unquote(t1)) and
(compare(unquote(f1), unquote(op2), cv1, unquote(t1)) or
(compare(unquote(f1), unquote(op3), cv1, unquote(t1)) and
compare(unquote(f2), unquote(op4), cv2, unquote(t2))))
)
end
def beyond_cursor(query, unquote(query_name), unquote(dir1), :backward, cursor_values) do
[cv1, cv2] = cursor_values
query
|> Ecto.Query.where(
compare(unquote(f1), unquote(rop1), cv1, unquote(t1)) and
(compare(unquote(f1), unquote(rop2), cv1, unquote(t1)) or
(compare(unquote(f1), unquote(rop3), cv1, unquote(t1)) and
compare(unquote(f2), unquote(rop4), cv2, unquote(t2))))
)
end
def beyond_cursor(query, unquote(query_name), unquote(rdir1), :forward, cursor_values) do
[cv1, cv2] = cursor_values
query
|> Ecto.Query.where(
compare(unquote(f1), unquote(rop1), cv1, unquote(t1)) and
(compare(unquote(f1), unquote(rop2), cv1, unquote(t1)) or
(compare(unquote(f1), unquote(rop3), cv1, unquote(t1)) and
compare(unquote(f2), unquote(rop4), cv2, unquote(t2))))
)
end
def beyond_cursor(query, unquote(query_name), unquote(rdir1), :backward, cursor_values) do
[cv1, cv2] = cursor_values
query
|> Ecto.Query.where(
compare(unquote(f1), unquote(op1), cv1, unquote(t1)) and
(compare(unquote(f1), unquote(op2), cv1, unquote(t1)) or
(compare(unquote(f1), unquote(op3), cv1, unquote(t1)) and
compare(unquote(f2), unquote(op4), cv2, unquote(t2))))
)
end
unquote(with_order_func(query_name, dir1, order_bys))
unquote(with_cursor_fields_func(query_name, fields))
end
end
@doc false
def implement(query_name, sorts) when length(sorts) == 3 do
[{dir1, f1, t1}, {dir2, f2, t2}, {dir3, f3, t3}] = sorts
rdir1 = invert(dir1)
operators = derive_operators([dir1, dir2, dir3])
[op1, op2, op3, op4, op5, op6, op7] = operators
[rop1, rop2, rop3, rop4, rop5, rop6, rop7] = Enum.map(operators, &invert/1)
order_bys = Enum.map(sorts, fn {dir, field, _type} -> {dir, field} end)
fields = Enum.map(sorts, fn {_dir, field, _type} -> field end)
quote do
def beyond_cursor(query, unquote(query_name), unquote(dir1), :forward, cursor_values) do
[cv1, cv2, cv3] = cursor_values
query
|> Ecto.Query.where(
compare(unquote(f1), unquote(op1), cv1, unquote(t1)) and
(compare(unquote(f1), unquote(op2), cv1, unquote(t1)) or
((compare(unquote(f1), unquote(op3), cv1, unquote(t1)) and
compare(unquote(f2), unquote(op4), cv2, unquote(t2))) or
(compare(unquote(f1), unquote(op5), cv1, unquote(t1)) and
compare(unquote(f2), unquote(op6), cv2, unquote(t2)) and
compare(unquote(f3), unquote(op7), cv3, unquote(t3)))))
)
end
def beyond_cursor(query, unquote(query_name), unquote(dir1), :backward, cursor_values) do
[cv1, cv2, cv3] = cursor_values
query
|> Ecto.Query.where(
compare(unquote(f1), unquote(rop1), cv1, unquote(t1)) and
(compare(unquote(f1), unquote(rop2), cv1, unquote(t1)) or
((compare(unquote(f1), unquote(rop3), cv1, unquote(t1)) and
compare(unquote(f2), unquote(rop4), cv2, unquote(t2))) or
(compare(unquote(f1), unquote(rop5), cv1, unquote(t1)) and
compare(unquote(f2), unquote(rop6), cv2, unquote(t2)) and
compare(unquote(f3), unquote(rop7), cv3, unquote(t3)))))
)
end
def beyond_cursor(query, unquote(query_name), unquote(rdir1), :forward, cursor_values) do
[cv1, cv2, cv3] = cursor_values
query
|> Ecto.Query.where(
compare(unquote(f1), unquote(rop1), cv1, unquote(t1)) and
(compare(unquote(f1), unquote(rop2), cv1, unquote(t1)) or
((compare(unquote(f1), unquote(rop3), cv1, unquote(t1)) and
compare(unquote(f2), unquote(rop4), cv2, unquote(t2))) or
(compare(unquote(f1), unquote(rop5), cv1, unquote(t1)) and
compare(unquote(f2), unquote(rop6), cv2, unquote(t2)) and
compare(unquote(f3), unquote(rop7), cv3, unquote(t3)))))
)
end
def beyond_cursor(query, unquote(query_name), unquote(rdir1), :backward, cursor_values) do
[cv1, cv2, cv3] = cursor_values
query
|> Ecto.Query.where(
compare(unquote(f1), unquote(op1), cv1, unquote(t1)) and
(compare(unquote(f1), unquote(op2), cv1, unquote(t1)) or
((compare(unquote(f1), unquote(op3), cv1, unquote(t1)) and
compare(unquote(f2), unquote(op4), cv2, unquote(t2))) or
(compare(unquote(f1), unquote(op5), cv1, unquote(t1)) and
compare(unquote(f2), unquote(op6), cv2, unquote(t2)) and
compare(unquote(f3), unquote(op7), cv3, unquote(t3)))))
)
end
unquote(with_order_func(query_name, dir1, order_bys))
unquote(with_cursor_fields_func(query_name, fields))
end
end
def implement(query_name, sorts) when length(sorts) == 4 do
[{dir1, f1, t1}, {dir2, f2, t2}, {dir3, f3, t3}, {dir4, f4, t4}] = sorts
rdir1 = invert(dir1)
order_bys = Enum.map(sorts, fn {dir, field, _type} -> {dir, field} end)
fields = Enum.map(sorts, fn {_dir, field, _type} -> field end)
operators = derive_operators([dir1, dir2, dir3, dir4])
[op1, op2, op3, op4, op5, op6, op7, op8, op9, op10, op11] = operators
[rop1, rop2, rop3, rop4, rop5, rop6, rop7, rop8, rop9, rop10, rop11] =
Enum.map(operators, &invert/1)
quote do
def beyond_cursor(query, unquote(query_name), unquote(dir1), :forward, cursor_values) do
[cv1, cv2, cv3, cv4] = cursor_values
query
|> Ecto.Query.where(
compare(unquote(f1), unquote(op1), cv1, unquote(t1)) and
(compare(unquote(f1), unquote(op2), cv1, unquote(t1)) or
((compare(unquote(f1), unquote(op3), cv1, unquote(t1)) and
compare(unquote(f2), unquote(op4), cv2, unquote(t2))) or
((compare(unquote(f1), unquote(op5), cv1, unquote(t1)) and
compare(unquote(f2), unquote(op6), cv2, unquote(t2)) and
compare(unquote(f3), unquote(op7), cv3, unquote(t3))) or
(compare(unquote(f1), unquote(op8), cv1, unquote(t1)) and
compare(unquote(f2), unquote(op9), cv2, unquote(t2)) and
compare(unquote(f3), unquote(op10), cv3, unquote(t3)) and
compare(unquote(f4), unquote(op11), cv4, unquote(t4))))))
)
end
def beyond_cursor(query, unquote(query_name), unquote(dir1), :backward, cursor_values) do
[cv1, cv2, cv3, cv4] = cursor_values
query
|> Ecto.Query.where(
compare(unquote(f1), unquote(rop1), cv1, unquote(t1)) and
(compare(unquote(f1), unquote(rop2), cv1, unquote(t1)) or
((compare(unquote(f1), unquote(rop3), cv1, unquote(t1)) and
compare(unquote(f2), unquote(rop4), cv2, unquote(t2))) or
((compare(unquote(f1), unquote(rop5), cv1, unquote(t1)) and
compare(unquote(f2), unquote(rop6), cv2, unquote(t2)) and
compare(unquote(f3), unquote(rop7), cv3, unquote(t3))) or
(compare(unquote(f1), unquote(rop8), cv1, unquote(t1)) and
compare(unquote(f2), unquote(rop9), cv2, unquote(t2)) and
compare(unquote(f3), unquote(rop10), cv3, unquote(t3)) and
compare(unquote(f4), unquote(rop11), cv4, unquote(t4))))))
)
end
def beyond_cursor(query, unquote(query_name), unquote(rdir1), :forward, cursor_values) do
[cv1, cv2, cv3, cv4] = cursor_values
query
|> Ecto.Query.where(
compare(unquote(f1), unquote(rop1), cv1, unquote(t1)) and
(compare(unquote(f1), unquote(rop2), cv1, unquote(t1)) or
((compare(unquote(f1), unquote(rop3), cv1, unquote(t1)) and
compare(unquote(f2), unquote(rop4), cv2, unquote(t2))) or
((compare(unquote(f1), unquote(rop5), cv1, unquote(t1)) and
compare(unquote(f2), unquote(rop6), cv2, unquote(t2)) and
compare(unquote(f3), unquote(rop7), cv3, unquote(t3))) or
(compare(unquote(f1), unquote(rop8), cv1, unquote(t1)) and
compare(unquote(f2), unquote(rop9), cv2, unquote(t2)) and
compare(unquote(f3), unquote(rop10), cv3, unquote(t3)) and
compare(unquote(f4), unquote(rop11), cv4, unquote(t4))))))
)
end
def beyond_cursor(query, unquote(query_name), unquote(rdir1), :backward, cursor_values) do
[cv1, cv2, cv3, cv4] = cursor_values
query
|> Ecto.Query.where(
compare(unquote(f1), unquote(op1), cv1, unquote(t1)) and
(compare(unquote(f1), unquote(op2), cv1, unquote(t1)) or
((compare(unquote(f1), unquote(op3), cv1, unquote(t1)) and
compare(unquote(f2), unquote(op4), cv2, unquote(t2))) or
((compare(unquote(f1), unquote(op5), cv1, unquote(t1)) and
compare(unquote(f2), unquote(op6), cv2, unquote(t2)) and
compare(unquote(f3), unquote(op7), cv3, unquote(t3))) or
(compare(unquote(f1), unquote(op8), cv1, unquote(t1)) and
compare(unquote(f2), unquote(op9), cv2, unquote(t2)) and
compare(unquote(f3), unquote(op10), cv3, unquote(t3)) and
compare(unquote(f4), unquote(op11), cv4, unquote(t4))))))
)
end
unquote(with_order_func(query_name, dir1, order_bys))
unquote(with_cursor_fields_func(query_name, fields))
end
end
@doc false
def derive_operators([single_sort_dir]), do: [comparison_operator(single_sort_dir)]
def derive_operators([dir1 | _rest] = multiple_sort_dirs) do
multiple_sort_dirs
|> Enum.with_index(1)
|> Enum.reduce(index_friendly_comparison_operator(dir1), fn {dir, sort_col_num}, operators ->
[operators, operators_for_sort_field(sort_col_num, dir)]
end)
|> List.flatten()
end
defp operators_for_sort_field(sort_col_num, sort_col_dir) do
[
List.duplicate(:eq, sort_col_num - 1),
comparison_operator(sort_col_dir)
]
end
@doc false
def invert(:asc), do: :desc
def invert(:desc), do: :asc
def invert(:eq), do: :eq
def invert(:gt), do: :lt
def invert(:gte), do: :lte
def invert(:lt), do: :gt
def invert(:lte), do: :gte
@doc false
def index_friendly_comparison_operator(:asc), do: :gte
def index_friendly_comparison_operator(:desc), do: :lte
@doc false
def comparison_operator(:asc), do: :gt
def comparison_operator(:desc), do: :lt
@doc false
defmacro compare(field, :gte, value, nil) do
quote do: unquote(field) >= ^unquote(value)
end
defmacro compare(field, :gte, value, type) do
quote do: unquote(field) >= type(^unquote(value), unquote(type))
end
defmacro compare(field, :gt, value, nil) do
quote do: unquote(field) > ^unquote(value)
end
defmacro compare(field, :gt, value, type) do
quote do: unquote(field) > type(^unquote(value), unquote(type))
end
defmacro compare(field, :eq, value, nil) do
quote do: unquote(field) == ^unquote(value)
end
defmacro compare(field, :eq, value, type) do
quote do: unquote(field) == type(^unquote(value), unquote(type))
end
defmacro compare(field, :lt, value, nil) do
quote do: unquote(field) < ^unquote(value)
end
defmacro compare(field, :lt, value, type) do
quote do: unquote(field) < type(^unquote(value), unquote(type))
end
defmacro compare(field, :lte, value, nil) do
quote do: unquote(field) <= ^unquote(value)
end
defmacro compare(field, :lte, value, type) do
quote do: unquote(field) <= type(^unquote(value), unquote(type))
end
end
|
lib/chunkr/pagination_planner.ex
| 0.841468
| 0.729411
|
pagination_planner.ex
|
starcoder
|
defmodule Asteroid.ObjectStore.AccessToken.Mnesia do
@moduledoc """
Mnesia implementation of the `Asteroid.ObjectStore.AccessToken` behaviour
## Options
The options (`Asteroid.ObjectStore.AccessToken.opts()`) are:
- `:table_name`: an `atom()` for the table name. Defaults to `:asteroid_access_token`
- `:tab_def`: Mnesia's table definitions of the `:mnesia.create_table/2` function. Defaults to
the options below. User-defined `:tab_def` will be merged on a key basis, i.e. defaults will
not be erased. One can use it to add additional indexes for clients or devices, e.g.:
`tab_def: [index: :refresh_token, :subject_id, :client_id]`
- `:purge_interval`: the `integer()` interval in seconds the purge process will be triggered,
or `:no_purge` to disable purge. Defaults to `300` (5 minutes)
## Default Mnesia table definition
```elixir
[
attributes: [:id, :refresh_token_id, :subject_id, :client_id, :device_id, :data],
index: [:refresh_token_id]
]
```
## Purge process
The purge process uses the `Singleton` library. Therefore the purge process will be unique
per cluster (and that's probably what you want if you use Mnesia).
"""
require Logger
alias Asteroid.Token.AccessToken
@behaviour Asteroid.ObjectStore.AccessToken
@impl true
def install(opts) do
:mnesia.stop()
:mnesia.create_schema([node()])
:mnesia.start()
table_name = opts[:table_name] || :asteroid_access_token
tab_def =
[
attributes: [:id, :refresh_token_id, :subject_id, :client_id, :device_id, :data],
index: [:refresh_token_id]
]
|> Keyword.merge(opts[:tab_def] || [])
case :mnesia.create_table(table_name, tab_def) do
{:atomic, :ok} ->
Logger.info("#{__MODULE__}: created access token store #{table_name}")
:ok
{:aborted, {:already_exists, _}} ->
Logger.info("#{__MODULE__}: access token store #{table_name} already exists")
:ok
{:aborted, reason} ->
Logger.error(
"#{__MODULE__}: failed to create access token store #{table_name} " <>
"(reason: #{inspect(reason)})"
)
{:error, reason}
end
end
@impl true
def start_link(opts) do
case :mnesia.start() do
:ok ->
opts = Keyword.merge([purge_interval: 300], opts)
# we launch the process anyway because we need to return a process
# but the singleton will do nothing if the value is `:no_purge`
Singleton.start_child(__MODULE__.Purge, opts, __MODULE__)
{:error, _} = error ->
error
end
end
@impl true
def get(access_token_id, opts) do
table_name = opts[:table_name] || :asteroid_access_token
case :mnesia.dirty_read(table_name, access_token_id) do
[] ->
Logger.debug(
"#{__MODULE__}: getting access token `#{access_token_id}`, " <> "value: `nil`"
)
{:ok, nil}
[
{^table_name, ^access_token_id, refresh_token_id, _subject_id, _client_id, _device_id,
data}
] ->
access_token =
AccessToken.new(
id: access_token_id,
refresh_token_id: refresh_token_id,
data: data
)
Logger.debug(
"#{__MODULE__}: getting access token `#{access_token_id}`, " <>
"value: `#{inspect(access_token)}`"
)
{:ok, access_token}
_ ->
{:error, "Multiple results from Mnesia"}
end
catch
:exit, reason ->
{:error, reason}
end
@impl true
def get_from_refresh_token_id(refresh_token_id, opts) do
table_name = opts[:table_name] || :asteroid_access_token
{:ok,
for {_table_name, access_token_id, _refresh_token_id, _subject_id, _client_id, _device_id,
_data} <- :mnesia.dirty_match_object({table_name, :_, refresh_token_id, :_, :_, :_, :_}) do
access_token_id
end}
catch
:exit, reason ->
{:error, reason}
end
@impl true
def get_from_subject_id(subject_id, opts) do
table_name = opts[:table_name] || :asteroid_access_token
{:ok,
for {_table_name, access_token_id, _refresh_token_id, _subject_id, _client_id, _device_id,
_data} <- :mnesia.dirty_match_object({table_name, :_, :_, subject_id, :_, :_, :_}) do
access_token_id
end}
catch
:exit, reason ->
{:error, reason}
end
@impl true
def get_from_client_id(client_id, opts) do
table_name = opts[:table_name] || :asteroid_access_token
{:ok,
for {_table_name, access_token_id, _refresh_token_id, _subject_id, _client_id, _device_id,
_data} <- :mnesia.dirty_match_object({table_name, :_, :_, :_, client_id, :_, :_}) do
access_token_id
end}
catch
:exit, reason ->
{:error, reason}
end
@impl true
def get_from_device_id(device_id, opts) do
table_name = opts[:table_name] || :asteroid_access_token
{:ok,
for {_table_name, access_token_id, _refresh_token_id, _subject_id, _client_id, _device_id,
_data} <- :mnesia.dirty_match_object({table_name, :_, :_, :_, :_, device_id, :_}) do
access_token_id
end}
catch
:exit, reason ->
{:error, reason}
end
@impl true
def put(access_token, opts) do
table_name = opts[:table_name] || :asteroid_access_token
record = {
table_name,
access_token.id,
access_token.refresh_token_id,
access_token.data["sub"],
access_token.data["client_id"],
access_token.data["device_id"],
access_token.data
}
:mnesia.dirty_write(table_name, record)
Logger.debug(
"#{__MODULE__}: stored access token `#{access_token.id}`, " <>
"value: `#{inspect(access_token)}`"
)
:ok
catch
:exit, reason ->
{:error, reason}
end
@impl true
def delete(access_token_id, opts) do
table_name = opts[:table_name] || :asteroid_access_token
:mnesia.dirty_delete(table_name, access_token_id)
Logger.debug("#{__MODULE__}: deleted access token `#{access_token_id}`")
:ok
catch
:exit, reason ->
{:error, reason}
end
end
|
lib/asteroid/object_store/access_token/mnesia.ex
| 0.874627
| 0.782891
|
mnesia.ex
|
starcoder
|
defmodule DataBuffer.Telemetry do
@moduledoc """
DataBuffer produces multiple telemetry events.
## Events
* `[:data_buffer, :insert, :start]` - Called when a buffer insert starts.
#### Measurements
* `:system_time` - The current monotonic system time.
#### Metadata
* `:buffer` - The name of the buffer.
* `[:data_buffer, :insert, :stop]` - Called when a buffer insert stops.
#### Measurements
* `:duration` - The amount of time taken to insert data to the buffer.
#### Metadata
* `:buffer` - The name of the buffer.
* `[:data_buffer, :insert, :exception]` - Called when a buffer insert has an exception.
#### Measurements
* `:duration` - The amount of time before the error occurred.
#### Metadata
* `:buffer` - The name of the buffer.
* `:kind` - The kind of error raised.
* `:reason` - The reason for the error.
* `:stacktrace` - The stacktrace of the error.
* `[:data_buffer, :flush, :start]` - Called when a buffer flush starts.
#### Measurements
* `:system_time` - The current monotonic system time.
#### Metadata
* `:buffer` - The name of the buffer.
* `:size` - The buffer size.
* `[:data_buffer, :flush, :stop]` - Called when a buffer flush stops.
#### Measurements
* `:duration` - The amount of time taken to flush the buffer.
#### Metadata
* `:buffer` - The name of the buffer.
* `[:data_buffer, :flush, :exception]` - Called when a buffer flush has an exception.
#### Measurements
* `:duration` - The amount of time before the error occurred.
#### Metadata
* `:buffer` - The name of the buffer.
* `:kind` - The kind of error raised.
* `:reason` - The reason for the error.
* `:stacktrace` - The stacktrace of the error.
"""
@doc false
@spec span(atom(), map(), (() -> {any, map})) :: any()
def span(name, meta, fun) do
:telemetry.span([:data_buffer, name], meta, fun)
end
end
|
lib/data_buffer/telemetry.ex
| 0.848612
| 0.754712
|
telemetry.ex
|
starcoder
|
defmodule SPARQL.Functions.Cast do
alias RDF.Literal
defmodule Integer do
@moduledoc """
An `SPARQL.ExtensionFunction` for the `xsd:integer` XPath constructor function.
See:
- <https://www.w3.org/TR/sparql11-query/#FunctionMapping>
- <https://www.w3.org/TR/xpath-functions/#casting-to-numerics>
"""
use SPARQL.ExtensionFunction,
name: "http://www.w3.org/2001/XMLSchema#integer"
def call(_, [%Literal{} = literal], _, _) do
RDF.Integer.cast(literal) || :error
end
def call(_, _, _, _), do: :error
end
defmodule Decimal do
@moduledoc """
An `SPARQL.ExtensionFunction` for the `xsd:decimal` XPath constructor function.
See:
- <https://www.w3.org/TR/sparql11-query/#FunctionMapping>
- <https://www.w3.org/TR/xpath-functions/#casting-to-numerics>
"""
use SPARQL.ExtensionFunction,
name: "http://www.w3.org/2001/XMLSchema#decimal"
def call(_, [%Literal{} = literal], _, _) do
RDF.Decimal.cast(literal) || :error
end
def call(_, _, _, _), do: :error
end
defmodule Float do
@moduledoc """
An `SPARQL.ExtensionFunction` for the `xsd:float` XPath constructor function.
See:
- <https://www.w3.org/TR/sparql11-query/#FunctionMapping>
- <https://www.w3.org/TR/xpath-functions/#casting-to-numerics>
"""
use SPARQL.ExtensionFunction,
name: "http://www.w3.org/2001/XMLSchema#float"
def call(_, [%Literal{} = literal], _, _) do
# TODO: Remove this hack of reusing the RDF.Double.cast function until we have a proper RDF.Float datatype
with %Literal{} = double_literal <- RDF.Double.cast(literal) do
double_literal
|> Literal.lexical()
|> Literal.new(datatype: name())
else
_ -> :error
end
end
def call(_, _, _, _), do: :error
end
defmodule Double do
@moduledoc """
An `SPARQL.ExtensionFunction` for the `xsd:double` XPath constructor function.
See:
- <https://www.w3.org/TR/sparql11-query/#FunctionMapping>
- <https://www.w3.org/TR/xpath-functions/#casting-to-numerics>
"""
use SPARQL.ExtensionFunction,
name: "http://www.w3.org/2001/XMLSchema#double"
def call(_, [%Literal{} = literal], _, _) do
RDF.Double.cast(literal) || :error
end
def call(_, _, _, _), do: :error
end
defmodule String do
@moduledoc """
An `SPARQL.ExtensionFunction` for the `xsd:string` XPath constructor function.
See:
- <https://www.w3.org/TR/sparql11-query/#FunctionMapping>
- <https://www.w3.org/TR/xpath-functions/#casting-to-string>
"""
use SPARQL.ExtensionFunction,
name: "http://www.w3.org/2001/XMLSchema#string"
def call(_, [value], _, _) do
RDF.String.cast(value) || :error
end
def call(_, _, _, _), do: :error
end
defmodule Boolean do
@moduledoc """
An `SPARQL.ExtensionFunction` for the `xsd:boolean` XPath constructor function.
See:
- <https://www.w3.org/TR/sparql11-query/#FunctionMapping>
- <https://www.w3.org/TR/xpath-functions/#casting-boolean>
"""
use SPARQL.ExtensionFunction,
name: "http://www.w3.org/2001/XMLSchema#boolean"
def call(_, [%Literal{} = literal], _, _) do
RDF.Boolean.cast(literal) || :error
end
def call(_, _, _, _), do: :error
end
defmodule DateTime do
@moduledoc """
An `SPARQL.ExtensionFunction` for the `xsd:dateTime` XPath constructor function.
See:
- <https://www.w3.org/TR/sparql11-query/#FunctionMapping>
- <https://www.w3.org/TR/xpath-functions/#casting-to-datetimes>
"""
use SPARQL.ExtensionFunction,
name: "http://www.w3.org/2001/XMLSchema#dateTime"
def call(_, [%Literal{} = literal], _, _) do
RDF.DateTime.cast(literal) || :error
end
def call(_, _, _, _), do: :error
end
defmodule Date do
@moduledoc """
An `SPARQL.ExtensionFunction` for the `xsd:date` XPath constructor function.
Note: This is not required to be implemented on all SPARQL processing engines,
so a query using this function might not be portable.
See: <https://www.w3.org/TR/xpath-functions/#casting-to-datetimes>
"""
use SPARQL.ExtensionFunction,
name: "http://www.w3.org/2001/XMLSchema#date"
def call(_, [%Literal{} = literal], _, _) do
RDF.Date.cast(literal) || :error
end
def call(_, _, _, _), do: :error
end
defmodule Time do
@moduledoc """
An `SPARQL.ExtensionFunction` for the `xsd:time` XPath constructor function.
Note: This is not required to be implemented on all SPARQL processing engines,
so a query using this function might not be portable.
See: <https://www.w3.org/TR/xpath-functions/#casting-to-datetimes>
"""
use SPARQL.ExtensionFunction,
name: "http://www.w3.org/2001/XMLSchema#time"
def call(_, [%Literal{} = literal], _, _) do
RDF.Time.cast(literal) || :error
end
def call(_, _, _, _), do: :error
end
end
|
lib/sparql/functions/cast.ex
| 0.633864
| 0.64377
|
cast.ex
|
starcoder
|
defmodule HTMLParser.TreeBuilder do
@moduledoc """
Builds an HTML node tree from a parsed list of tags with depth counts.
"""
alias HTMLParser.{HTMLCommentNode, HTMLNodeTree, HTMLTextNode, ParseState}
@spec build(ParseState.tags()) :: {:ok, [HTMLNodeTree.t()] | HTMLNodeTree.t()} | {:error, any()}
def build(tags) do
tags
|> validate_node_list()
|> case do
:ok ->
case do_build(tags) do
[node] -> {:ok, node}
nodes -> {:ok, nodes}
end
{:error, reason} ->
{:error, reason}
end
end
defp validate_node_list(nodes) do
nodes
|> Enum.reject(&match?({:comment, _comment}, &1))
|> Enum.reject(&match?({:text, _comment}, &1))
|> Enum.filter(&is_tuple/1)
|> Enum.group_by(&elem(&1, 0), &elem(&1, 1))
|> Enum.reduce([], fn {tag, nodes}, acc ->
too_many_closing = Enum.filter(nodes, fn %{depth_count: depth_count} -> depth_count < 0 end)
case too_many_closing do
[] -> acc
[error_nodes] -> [{tag, {:extra_closing_tag, error_nodes}} | acc]
end
end)
|> case do
[] ->
:ok
error_nodes ->
errors =
error_nodes
|> Enum.map(fn {tag, {error, meta}} ->
{tag, {error, meta.newline_count, meta.char_count}}
end)
{:error, errors}
end
end
defp do_build([]), do: []
defp do_build([{:"!DOCTYPE", _} | elements]) do
do_build(elements)
end
defp do_build([{:"!doctype", _} | elements]) do
do_build(elements)
end
defp do_build([{:comment, element} | elements]) do
[HTMLCommentNode.new(element) | do_build(elements)]
end
defp do_build([{:text, element} | elements]) do
[HTMLTextNode.new(element) | do_build(elements)]
end
defp do_build([{tag, %{attrs: attrs, depth_count: depth_count}} | elements]) do
node = tag |> HTMLNodeTree.new() |> HTMLNodeTree.put_attrs(attrs)
case Enum.split_while(elements, ¬_matching_tag?(tag, depth_count, &1)) do
{remaining, [_close_tag | siblings]} ->
tree = HTMLNodeTree.add_children(node, do_build(remaining))
[tree] ++ do_build(siblings)
# Self-closing / empty tag
{remaining, []} ->
node = HTMLNodeTree.put_empty(node)
[node] ++ do_build(remaining)
end
end
defp do_build([_element | elements]) do
do_build(elements)
end
defp not_matching_tag?(tag, depth_count, {other_tag, other_meta}) do
other_tag != tag or other_meta.depth_count != depth_count
end
defp not_matching_tag?(_tag, _depth_count, _text), do: true
end
|
lib/html_parser/tree_builder.ex
| 0.811153
| 0.416174
|
tree_builder.ex
|
starcoder
|
defmodule SgfParsing do
# Used to make recursive parsers lazy
defmacro lazy(parser) do
quote do
fn string -> unquote(parser).(string) end
end
end
defmodule Sgf do
defstruct properties: %{}, children: []
end
@type sgf :: %Sgf{properties: map, children: [sgf]}
@doc """
Parse a string into a Smart Game Format tree
"""
@spec parse(encoded :: String.t()) :: {:ok, sgf} | {:error, String.t()}
def parse(encoded) do
parser = parse_tree_paren() |> eof()
with {:ok, tree, ""} <- run_parser(parser, encoded) do
{:ok, tree}
else
{:error, err, _rest} -> {:error, err}
end
end
# TREE PARSER
def parse_tree() do
parse_properties =
char(?;)
|> error("tree with no nodes")
|> drop_and(many(parse_property()))
|> map(&Map.new/1)
parse_children =
one_of([
map(parse_tree(), &List.wrap/1),
many(parse_tree_paren())
])
|> lazy()
lift2(&%Sgf{properties: &1, children: &2}, parse_properties, parse_children)
end
def parse_tree_paren() do
char(?()
|> error("tree missing")
|> drop_and(parse_tree())
|> drop(char(?)))
end
def parse_property() do
parse_name =
some(satisfy(&(&1 not in '[();')))
|> map(&Enum.join(&1, ""))
|> validate(&(&1 == String.upcase(&1)), "property must be in uppercase")
parse_attributes =
some(
char(?[)
|> error("properties without delimiter")
|> drop_and(many(escaped(&(&1 != ?]))))
|> drop(char(?]))
|> map(&Enum.join(&1, ""))
)
lift2(&{&1, &2}, parse_name, parse_attributes)
end
def escaped(p) do
one_of([
lift2(&escape/2, char(?\\), satisfy(&(&1 in 'nt]['))),
satisfy(p)
])
end
def escape("\\", "n"), do: "\n"
def escape("\\", "t"), do: "\t"
def escape("\\", "]"), do: "]"
def escape("\\", "["), do: "["
# PARSER COMBINATORS LIBRARY
# Inspired from Haskell libraries like Parsec
# and https://serokell.io/blog/parser-combinators-in-elixir
def run_parser(parser, string), do: parser.(string)
def eof(parser) do
fn string ->
with {:ok, _, ""} = ok <- parser.(string) do
ok
else
{:ok, _a, rest} -> {:error, "Not end of file", rest}
err -> err
end
end
end
def satisfy(p) do
fn
<<char, rest::bitstring>> = string ->
if p.(char) do
{:ok, <<char>>, rest}
else
{:error, "unexpected #{char}", string}
end
"" ->
{:error, "unexpected end of string", ""}
end
end
def char(c), do: satisfy(&(&1 == c)) |> error("expected character #{<<c>>}")
def string(str) do
str
|> to_charlist
|> Enum.map(&char/1)
|> Enum.reduce(inject(""), &lift2(fn a, b -> a <> b end, &1, &2))
end
def some(parser) do
fn input ->
with {:ok, result, rest} <- parser.(input),
{:ok, results, rest} <- many(parser).(rest) do
{:ok, [result | results], rest}
end
end
end
def many(parser) do
fn input ->
with {:ok, result, rest} <- some(parser).(input) do
{:ok, result, rest}
else
{:error, _err, ^input} -> {:ok, [], input}
err -> err
end
end
end
def one_of(parsers) when is_list(parsers) do
fn string ->
Enum.reduce_while(parsers, {:error, "no parsers", string}, fn
_parser, {:ok, _, _} = result -> {:halt, result}
parser, _err -> {:cont, parser.(string)}
end)
end
end
def map(parser, f) do
fn string ->
with {:ok, a, rest} <- parser.(string) do
{:ok, f.(a), rest}
end
end
end
def error(parser, err) do
fn string ->
with {:error, _err, rest} <- parser.(string) do
{:error, err, rest}
end
end
end
def drop(p1, p2) do
fn string ->
with {:ok, a, rest} <- p1.(string),
{:ok, _, rest} <- p2.(rest) do
{:ok, a, rest}
end
end
end
def drop_and(p1, p2) do
fn string ->
with {:ok, _, rest} <- p1.(string) do
p2.(rest)
end
end
end
def inject(a) do
fn string -> {:ok, a, string} end
end
def lift2(pair, p1, p2) do
fn string ->
with {:ok, a, rest} <- p1.(string),
{:ok, b, rest} <- p2.(rest) do
{:ok, pair.(a, b), rest}
end
end
end
def validate(parser, p, err) do
fn string ->
with {:ok, result, rest} <- parser.(string) do
if p.(result) do
{:ok, result, rest}
else
{:error, err, rest}
end
end
end
end
end
|
exercises/practice/sgf-parsing/.meta/example.ex
| 0.652241
| 0.407481
|
example.ex
|
starcoder
|
defprotocol FileStore do
@moduledoc """
FileStore allows you to read, write, upload, download, and interact
with files, regardless of where they are stored.
## Adapters
This package ships with the following adapters:
* `FileStore.Adapters.Disk`
* `FileStore.Adapters.S3`
* `FileStore.Adapters.Memory`
* `FileStore.Adapters.Null`
The documentation for each adapter includes an example that demonstrates
it's usage.
"""
@type key :: binary()
@type list_opts :: [{:prefix, binary()}]
@type delete_all_opts :: [{:prefix, binary()}]
@type write_opts :: [
{:content_type, binary()}
| {:disposition, binary()}
]
@type public_url_opts :: [
{:content_type, binary()}
| {:disposition, binary()}
]
@type signed_url_opts :: [
{:content_type, binary()}
| {:disposition, binary()}
| {:expires_in, integer()}
]
@doc """
Write a file to the store. If a file with the given `key`
already exists, it will be overwritten.
## Options
* `:content_type` - Sets the content type hint for the adapter.
* `:disposition` - Sets the content disposition hint for the adapter.
## Examples
iex> FileStore.write(store, "foo", "hello world")
:ok
"""
@spec write(t, key, binary, write_opts) :: :ok | {:error, term}
def write(store, key, content, opts \\ [])
@doc """
Read the contents of a file in store into memory.
## Examples
iex> FileStore.read(store, "foo")
{:ok, "hello world"}
"""
@spec read(t, key) :: {:ok, binary} | {:error, term}
def read(store, key)
@doc """
Upload a file to the store. If a file with the given `key`
already exists, it will be overwritten.
## Examples
iex> FileStore.upload(store, "/path/to/bar.txt", "foo")
:ok
"""
@spec upload(t, Path.t(), key) :: :ok | {:error, term}
def upload(store, source, key)
@doc """
Download a file from the store and save it to the given `path`.
## Examples
iex> FileStore.download(store, "foo", "/path/to/bar.txt")
:ok
"""
@spec download(t, key, Path.t()) :: :ok | {:error, term}
def download(store, key, destination)
@doc """
Retrieve information about a file from the store.
## Examples
iex> FileStore.stat(store, "foo")
{:ok, %FileStore.Stat{key: "foo", etag: "2e5pd429", size: 24}}
"""
@spec stat(t, key) :: {:ok, FileStore.Stat.t()} | {:error, term}
def stat(store, key)
@doc """
Delete a file from the store.
## Examples
iex> FileStore.delete(store, "foo")
:ok
"""
@spec delete(t, key) :: :ok | {:error, term}
def delete(store, key)
@doc """
Delete files in bulk.
## Options
* `:prefix` - Only delete keys matching the given prefix.
## Examples
iex> FileStore.delete_all(store)
:ok
iex> FileStore.delete_all(store, prefix: "foo/")
:ok
"""
@spec delete_all(t, delete_all_opts) :: :ok | {:error, term}
def delete_all(store, opts \\ [])
@doc """
Copy a file to a new location.
## Examples
iex> FileStore.copy(store, "path/foo.txt", "path/bar.txt")
:ok
"""
@spec copy(t(), key(), key()) :: :ok | {:error, term()}
def copy(store, src, dest)
@doc """
Renames a file from one name to another.
**Note**: Some underlying adapters can not do this in an atomic fashion.
## Examples
iex> FileStore.rename(store, "path/foo.txt", "path/bar.txt")
:ok
"""
@spec rename(t(), key(), key()) :: :ok | {:error, term()}
def rename(store, src, dest)
@doc """
Get URL for your file, assuming that the file is publicly accessible.
## Options
* `:content_type` - Force the `Content-Type` of the response.
* `:disposition` - Force the `Content-Disposition` of the response.
## Examples
iex> FileStore.get_public_url(store, "foo")
"https://mybucket.s3-us-east-1.amazonaws.com/foo"
"""
@spec get_public_url(t, key, public_url_opts) :: binary
def get_public_url(store, key, opts \\ [])
@doc """
Generate a signed URL for your file. Any user with this URL should be able
to access the file.
## Options
* `:expires_in` - The number of seconds before the URL expires.
* `:content_type` - Force the `Content-Type` of the response.
* `:disposition` - Force the `Content-Disposition` of the response.
## Examples
iex> FileStore.get_signed_url(store, "foo")
{:ok, "https://s3.amazonaws.com/mybucket/foo?X-AMZ-Expires=3600&..."}
"""
@spec get_signed_url(t, key, signed_url_opts) :: {:ok, binary} | {:error, term}
def get_signed_url(store, key, opts \\ [])
@doc """
List files in the store.
## Options
* `:prefix` - Only return keys matching the given prefix.
## Examples
iex> Enum.to_list(FileStore.list!(store))
["bar", "foo/bar"]
iex> Enum.to_list(FileStore.list!(store, prefix: "foo"))
["foo/bar"]
"""
@spec list!(t, list_opts) :: Enumerable.t()
def list!(store, opts \\ [])
end
|
lib/file_store.ex
| 0.918681
| 0.414573
|
file_store.ex
|
starcoder
|
defmodule Bandit do
@moduledoc """
Bandit is an HTTP server for Plug apps.
As an HTTP server, Bandit's primary goal is to act as 'glue' between client connections managed
by [Thousand Island](https://github.com/mtrudel/thousand_island) and application code defined
via the [Plug API](https://github.com/elixir-plug/plug). As such there really isn't a whole lot
of user-visible surface area to Bandit, and as a consequence the API documentation presented here
is somewhat sparse. This is by design! Bandit is intended to 'just work' in almost all cases;
the only thought users typically have to put into Bandit comes in the choice of which options (if
any) they would like to change when starting a Bandit server. The sparseness of the Bandit API
should not be taken as an indicator of the comprehensiveness or robustness of the project.
## Basic Usage
Usage of Bandit is very straightforward. Assuming you have a Plug module implemented already, you can
host it within Bandit by adding something similar to the following to your application's
`Application.start/2` function:
```elixir
def start(_type, _args) do
children = [
{Bandit, plug: MyApp.MyPlug, scheme: :http, options: [port: 4000]}
]
opts = [strategy: :one_for_one, name: MyApp.Supervisor]
Supervisor.start_link(children, opts)
end
```
## Writing Plug Applications
For details about writing Plug based applications, consult the excellent [Plug
documentation](https://hexdocs.pm/plug/) for plenty of examples & tips to get started. Note that
while Bandit supports the complete Plug API & should work correctly with any Plug-based
application you may write, it does not currently support Phoenix applications due to our lack of
support for WebSocket connections. Early support for Phoenix will be coming to Bandit in the
0.4.x release series (likely Q4'21), with full support landing in the 0.7.x release series
(likely Q1'22).
## Config Options
Bandit takes a number of options at startup:
* `plug`: The plug to handle connections. Can be specified as `MyPlug` or `{MyPlug, plug_opts}`
* `scheme`: One of `:http` or `:https`. If `:https` is specified, you will need
to specify `certfile` and `keyfile` in the `transport_options` subsection of `options`.
* `read_timeout`: How long to wait for data from the client before timing out and closing the
connection, specified in milliseconds. Defaults to 60_000
* `options`: Options to pass to `ThousandIsland`. For an exhaustive list of options see the
`ThousandIsland` documentation, however some common options are:
* `port`: The port to bind to. Defaults to 4000
* `num_acceptors`: The number of acceptor processes to run. This is mostly a performance
tuning knob and can usually be left at the default value of 10
* `transport_module`: The name of the module which provides basic socket functions.
This overrides any value set for `scheme` and is intended for cases where control
over the socket at a fundamental level is needed.
* `transport_options`: A keyword list of options to be passed into the transport socket's listen function
## Setting up an HTTPS Server
By far the most common stumbling block encountered with configuration involves setting up an
HTTPS server. Bandit is comparatively easy to set up in this regard, with a working example
looking similar to the following:
```elixir
def start(_type, _args) do
bandit_options = [
port: 4000,
transport_options: [
certfile: Path.join(__DIR__, "path/to/cert.pem"),
keyfile: Path.join(__DIR__, "path/to/key.pem")
]
]
children = [
{Bandit, plug: MyApp.MyPlug, scheme: :https, options: bandit_options}
]
opts = [strategy: :one_for_one, name: MyApp.Supervisor]
Supervisor.start_link(children, opts)
end
```
"""
@typedoc "A Plug definition"
@type plug :: {module(), keyword()}
@spec child_spec([]) :: Supervisor.child_spec()
def child_spec(arg) do
{options, illegal_options} =
arg
|> Keyword.get(:options, [])
|> Keyword.split(~w(port num_acceptors transport_module transport_options)a)
if illegal_options != [] do
raise "Unsupported option(s) in Bandit config: #{inspect(illegal_options)}"
end
{transport_module, extra_transport_options} =
case Keyword.get(arg, :scheme, :http) do
:http -> {ThousandIsland.Transports.TCP, []}
:https -> {ThousandIsland.Transports.SSL, alpn_preferred_protocols: ["h2", "http/1.1"]}
end
read_timeout = Keyword.get(arg, :read_timeout, 60_000)
options =
options
|> Keyword.put_new(:transport_module, transport_module)
|> Keyword.update(
:transport_options,
extra_transport_options,
&Keyword.merge(&1, extra_transport_options)
)
|> Keyword.put(:handler_module, Bandit.DelegatingHandler)
|> Keyword.put(:handler_options, %{
plug: plug(arg),
handler_module: Bandit.InitialHandler,
read_timeout: read_timeout
})
%{id: Bandit, start: {ThousandIsland, :start_link, [options]}}
end
defp plug(arg) do
arg
|> Keyword.fetch!(:plug)
|> case do
{plug, plug_options} -> {plug, plug.init(plug_options)}
plug -> {plug, plug.init([])}
end
end
end
|
lib/bandit.ex
| 0.884139
| 0.915469
|
bandit.ex
|
starcoder
|
defmodule Ecto.Adapters.Riak.Util do
@type entity :: Ecto.Entity.t
## ----------------------------------------------------------------------
## Search Schema and Buckets
## ----------------------------------------------------------------------e
@doc """
Returns the bucket name for an Ecto Model.
Each bucket should only store one type of Ecto Model.
"""
@spec bucket(atom | entity) :: binary
def bucket(x) when is_atom(x) do
if function_exported?(x, :__model__, 1) do
x.__model__(:source)
else
nil
end
end
def bucket(entity) do
bucket(entity.model)
end
@doc """
Returns the search index for an Ecto Model.
Each Model should have it's own search index
"""
@spec search_index(atom) :: binary
def search_index(model) do
bucket(model)
end
@doc """
Returns the name of the default Yokozuna search index
which comes pre-built in Riak 2.0pre5 and later
"""
def default_search_schema(), do: "_yz_default"
def default_bucket_type(), do: "ecto_riak"
## ----------------------------------------------------------------------
## Misc Helpers
## ----------------------------------------------------------------------
## Turns anything that implements
## the String.Chars protocol into an atom
@spec to_atom(term) :: atom
def to_atom(x) when is_atom(x), do: x
def to_atom(x) do
try do
to_string(x) |> binary_to_existing_atom
catch
## case where there isn't an existing atom
_,_ -> to_string(x) |> binary_to_atom
end
end
## ----------------------------------------------------------------------
## Entity Helpers
## ----------------------------------------------------------------------
@doc """
Returns a keyword list of all fields of an entity.
"""
@spec entity_keyword(entity) :: Keyword.t
def entity_keyword(entity) do
elem(entity, 0).__entity__(:keywords, entity, primary_key: true)
end
@doc """
Returns the type of a specified entity field.
"""
@spec entity_field_type(entity, atom) :: atom
def entity_field_type(entity, field) do
elem(entity, 0).__entity__(:field_type, field)
end
@spec entity_name_from_model(binary | atom) :: atom
def entity_name_from_model(name) when is_binary(name) or is_atom(name) do
name_str = to_string(name)
suffix = ".Entity"
if String.ends_with?(name_str, suffix) do
name |> to_atom
else
(name_str <> suffix) |> to_atom
end
end
@doc """
Checks if an entity passes a basic Riak Entity validation.
(has the minimum of information to be persisted/migrated on Riak)
Returns an empty list if there are no validation errors.
"""
@spec entity_validate(entity) :: [] | [term]
def entity_validate(entity) do
version =
try do
if is_integer(entity.version) && entity.version >= 0 do
[]
else
[version: "version must be a non-negative integer"]
end
rescue x -> [version: x.message] end
id =
try do
if is_binary(entity.id) do
[]
else
[id: "ID must be a globally unique string"]
end
rescue x -> [id: x.message] end
version ++ id
end
end
|
lib/ecto/adapters/riak/util.ex
| 0.722821
| 0.455804
|
util.ex
|
starcoder
|
defmodule ExPng.Chunks.ImageData do
@moduledoc """
Stores the raw data of an IDAT image data chunk from a PNG image.
Since PNG images can be encoded with the image data split between multiple
IDAT chunks to allow generating an image in a streaming manner, `merge/1`
provides support for merging multiple chunks into one before being fully
decoded by `ExPng`.
"""
use ExPng.Constants
alias ExPng.{Chunks.Header, Color, Image}
alias ExPng.Image.{Filtering, Pixelation}
import ExPng.Utilities, only: [reduce_to_binary: 1]
@type t :: %__MODULE__{
data: binary,
type: :IDAT
}
defstruct [:data, type: :IDAT]
@doc """
Returns a new `ImageData` struct with the provided data.
"""
@spec new(:IDAT, binary) :: {:ok, __MODULE__.t()}
def new(:IDAT, data) do
{:ok, %__MODULE__{data: data}}
end
@doc """
Merges a list of ImageData chunks into one
"""
@spec merge([__MODULE__.t(), ...]) :: __MODULE__.t()
def merge([%__MODULE__{} | _] = data_chunks) do
data =
data_chunks
|> Enum.map(& &1.data)
|> reduce_to_binary()
|> inflate()
|> reduce_to_binary()
%__MODULE__{data: data}
end
@behaviour ExPng.Encodeable
@impl true
def to_bytes(%__MODULE__{data: data}, encoding_options \\ []) do
compression = Keyword.get(encoding_options, :compression, 6)
data = deflate(data, compression)
length = byte_size(data)
type = <<73, 68, 65, 84>>
crc = :erlang.crc32([type, data])
<<length::32>> <> type <> data <> <<crc::32>>
end
@doc """
Takes an image, a header, and optionally, a filter type and palette,
and returns an `ImageData` struct containing the image data translated into
a bytestring.
"""
@spec from_pixels(
ExPng.maybe(Image.t()),
Header.t(),
ExPng.maybe(ExPng.filter()),
ExPng.maybe(Image.row())
) :: __MODULE__.t()
def from_pixels(image, header, filter_type \\ @filter_none, palette \\ nil)
def from_pixels(nil, _, _, _), do: %__MODULE__{data: nil}
def from_pixels(image, header, filter_type, palette) do
lines =
Enum.map(image.pixels, fn line ->
Task.async(fn ->
Pixelation.from_pixels(line, header.bit_depth, header.color_mode, palette)
end)
end)
|> Enum.map(fn task ->
Task.await(task)
end)
pixel_size = Color.pixel_bytesize(header.color_mode, header.bit_depth)
data = apply_filter(lines, pixel_size, filter_type)
%__MODULE__{data: data}
end
## PRIVATE
defp apply_filter([head | _] = lines, pixel_size, filter_type) do
pad =
Stream.cycle([<<0>>])
|> Enum.take(byte_size(head))
|> Enum.reduce(&Kernel.<>/2)
Enum.chunk_every([pad | lines], 2, 1, :discard)
|> Enum.map(fn [prev, line] ->
Filtering.apply_filter({filter_type, line}, pixel_size, prev)
end)
|> Enum.map(fn line -> <<filter_type>> <> line end)
|> reduce_to_binary()
end
defp inflate(data) do
zstream = :zlib.open()
:zlib.inflateInit(zstream)
inflated_data = :zlib.inflate(zstream, data)
:zlib.inflateEnd(zstream)
:zlib.close(zstream)
inflated_data
end
defp deflate(data, compression) do
zstream = :zlib.open()
:zlib.deflateInit(zstream, compression)
deflated_data = :zlib.deflate(zstream, data, :finish)
:zlib.deflateEnd(zstream)
:zlib.close(zstream)
deflated_data
|> reduce_to_binary()
end
end
|
lib/ex_png/chunks/image_data.ex
| 0.907035
| 0.796015
|
image_data.ex
|
starcoder
|
defmodule Okta do
@moduledoc """
This library provides an Elixir API for accessing the [Okta Developer APIs](https://developer.okta.com/docs/reference/).
Currently implemented are:
* [Users API](https://developer.okta.com/docs/reference/api/users/)
* [Groups API](https://developer.okta.com/docs/reference/api/groups/)
* [Apps API](https://developer.okta.com/docs/reference/api/apps/)
* [Trusted Origins API](https://developer.okta.com/docs/reference/api/trusted-origins/)
* [Event Hook Handler](https://developer.okta.com/docs/concepts/event-hooks/)
The API access uses the [Tesla](https://github.com/teamon/tesla) library and
relies on the caller passing in an Okta base URL and an API Key to create a
client. The client is then passed into all API calls.
The API returns a 3 element tuple. If the API HTTP status code is less
the 300 (ie. suceeded) it returns `:ok`, the HTTP body as a map and the full
Tesla Env if you need to access more data about thre return. if the API HTTP
status code is greater than 300. it returns `:error`, the HTTP body and the
Telsa Env. If the API doesn't return at all it should return `:error`, a blank
map and the error from Tesla.
client = Okta.client("https://dev-000000.okta.com", "thisismykeycreatedinokta")
profile = %{
firstName: "test",
lastName: "user",
}
case Okta.Users.create_user(client, profile) do
{:ok, %{"id" => id, "status" => status}, _env} ->
update_user(%{okta_id: id, okta_status: status})
{:error, %{"errorSummary" => errorSummary}, _env} ->
Logger.error(errorSummary)
end
## Installation
If [available in Hex](https://hex.pm/docs/publish), the package can be
installed by adding `okta` to your list of dependencies in `mix.exs`:
def deps do
[
{:okta_api, "~> 0.1.14"},
]
end
Documentation can be generated with [ExDoc](https://github.com/elixir-lang/ex_doc)
and published on [HexDocs](https://hexdocs.pm). Once published, the docs can
be found at [https://hexdocs.pm/okta](https://hexdocs.pm/okta_api).
"""
@type client() :: Tesla.Client.t()
@type result() :: {:ok, map() | String.t(), Tesla.Env.t()} | {:error, map(), any}
@spec client(String.t(), String.t()) :: client()
def client(base_url, api_key) do
middleware = [
{Tesla.Middleware.BaseUrl, base_url},
Tesla.Middleware.JSON,
{Tesla.Middleware.Headers, [{"authorization", "SSWS " <> api_key}]}
]
Tesla.client(middleware, adapter())
end
@spec result({:ok, Tesla.Env.t()}) :: result()
def result({:ok, %{status: status, body: body} = env}) when status < 300 do
{:ok, body, env}
end
@spec result({:ok, Tesla.Env.t()}) :: result()
def result({:ok, %{status: status, body: body} = env}) when status >= 300 do
{:error, body, env}
end
@spec result({:error, any}) :: result()
def result({:error, any}), do: {:error, %{}, any}
@doc false
def adapter do
case Application.get_env(:okta_api, :tesla) do
nil -> {Tesla.Adapter.Hackney, [recv_timeout: 30_000]}
tesla -> tesla[:adapter]
end
end
end
|
lib/okta.ex
| 0.811788
| 0.756582
|
okta.ex
|
starcoder
|
defmodule QLib.LeasedQueue do
@moduledoc """
LeasedQueue is a simple and leased abstraction around state.
The items stored in a LeasedQueue have a limited lifetime. The LeasedQueue sets a
period time or lease time (by default 60 seconds) which is used to automatically
remove expired items.
An item expires if its time (processing time) in the queue is greater than the leased
time specified by the queue. The processing time is the time when the item was
stored in the queue.
A LeasedQueue guarantees that a pop-call never will return an expired item.
## Examples
iex(1)> {:ok, q} = QLib.LeasedQueue.new 5_000
{:ok, #PID<0.152.0>}
iex(2)> QLib.LeasedQueue.push(q, 1)
:ok
iex(3)> :timer.sleep(1_000)
:ok
iex(4)> QLib.LeasedQueue.push(q, 2)
:ok
iex(5)> :timer.sleep(4_000)
nil
iex(6)> QLib.LeasedQueue.size(q)
0
iex(7)> QLib.LeasedQueue.push q, 3
:ok
iex(8)> QLib.LeasedQueue.push q, 4
:ok
iex(9)> QLib.LeasedQueue.size(q)
1
iex(10)> QLib.LeasedQueue.size(q)
0
iex(11)> QLib.LeasedQueue.destroy(q)
:ok
"""
@doc """
Creates a new empty LeasedQueue using the `lease` value as lease time.
"""
def new(lease \\ 60_000) do
GenServer.start_link(QLib.LeasedServer, lease)
end
@doc """
Destroy the LeasedQueue.
"""
def destroy(queue) do
GenServer.stop(queue)
end
@doc """
Inserts the `item` in the LeasedQueue.
"""
def push(queue, item) do
GenServer.cast(queue, {:push, item})
end
@doc """
Removes and return the first item in the LeasedQueue.
"""
def pop(queue, timeout \\ 5000) do
GenServer.call(queue, :pop, timeout)
end
@doc """
Removes all the items in the LeasedQueue.
"""
def clear(queue) do
GenServer.cast(queue, :clear)
end
@doc """
Returns the size of the LeasedQueue.
"""
def size(queue, timeout \\ 5000) do
GenServer.call(queue, :size, timeout)
end
end
|
lib/leased_queue.ex
| 0.735737
| 0.427158
|
leased_queue.ex
|
starcoder
|
defmodule Backchain do
import KB
import Utils
@doc """
Applies backward chaining (reasoning) on a query (phrased as a predicate).
If the query subject(s) contain only constants, returns true or false.
E.g.,
Query: man(socrates) -> true
Query: mortal(hypatia) -> true
If the query subject(s) contain variables, returns all solutions,
i.e. constants for which the query holds.
E.g.,
Query: man(X) -> ["sartre", "socrates"]
Query: mortal(X) -> ["sartre", "socrates", "beauvoir", "hypatia"]
Query: city_of(X, canada) -> [["toronto", "canada"], ["montreal", "canada"], ...]
"""
def backchain(kb, query) do
{:predicate, word, subjects} = query
if not query_valid?(kb, query) do
:invalid_query
else
if not includes_variable?(subjects) do
# Query only contains constants (no variables) - no substitutions required
bc(kb, [query])
else
# Query contains variables - search for constants that make query true
if matches_fact?(kb, word) do
# Query is a fact
backchain_fact(kb, word)
else
# Query is a rule
backchain_rule(kb, word)
end
end
end
end
def backchain_fact(kb, word) do
List.foldl(
possible_subjects(kb, word),
[],
fn(subjects, solutions) ->
if backchain(kb, {:predicate, word, subjects}) do
solutions ++ [subjects]
else
solutions
end
end
)
end
def backchain_rule(kb, word) do
List.foldl(
lookup_rule(kb, word),
[],
fn({:rule, _, antecedents}, solutions) ->
List.foldl(
antecedents,
solutions,
fn(antecedent, solutions) -> solutions ++ backchain(kb, antecedent) end
)
end
)
end
def bc(kb, stack) do
if Stack.empty?(stack) do
# Base case: all goals satisfied
true
else
{goal, stack} = Stack.pop(stack)
if kb_fact?(kb, {:fact, goal}) do
# Current goal satisfied, test the rest
bc(kb, stack)
else
# If no facts match, go through matching rules and test their antecedents
results = List.foldl(
matching_rules(kb, goal),
[],
fn({:rule, {:predicate, _, rule_subjects}, rule_body}, results) ->
{:predicate, _, goal_subjects} = goal
{_, matches} = unify(rule_subjects, goal_subjects)
# For each match, replace all instances of the variable with the constant
antecedents = List.foldl(
matches,
rule_body,
fn({var, const}, antecedents) ->
replace_vars_with_const(antecedents, var, const)
end
)
stack = Stack.push_multiple(stack, antecedents)
[bc(kb, stack) | results]
end
)
Enum.any?(results)
end
end
end
@doc """
Checks if the subject is a constant (e.g. "socrates").
"""
def constant?(subject), do: starts_with_lowercase?(subject)
@doc """
Checks if the subject is a variable (e.g. "Person").
"""
def variable?(subject), do: starts_with_uppercase?(subject)
@doc """
Checks if the given subjects includes a variable.
"""
def includes_variable?(subjects), do: Enum.any?(subjects, &variable?/1)
@doc """
Checks if the query is valid, i.e. matches a fact or rule in the knowledge base.
"""
def query_valid?(kb, {:predicate, word, _}) do
matches_fact?(kb, word) || matches_rule?(kb, word)
end
end
|
lib/backchain.ex
| 0.68658
| 0.4831
|
backchain.ex
|
starcoder
|
defmodule AntlUtilsElixir.DateTime.Comparison do
@moduledoc """
Little wrapper around DateTime
"""
@doc ~S"""
Returns whether datetime1 is greater than datetime2
## Examples
iex> Comparison.gt?(DateTime.from_naive!(~N[2018-01-01 00:00:00], "Etc/UTC"), DateTime.from_naive!(~N[2018-01-02 00:00:00], "Etc/UTC"))
false
iex> Comparison.gt?(DateTime.from_naive!(~N[2018-01-01 00:00:00], "Etc/UTC"), DateTime.from_naive!(~N[2018-01-01 00:00:00], "Etc/UTC"))
false
iex> Comparison.gt?(DateTime.from_naive!(~N[2018-01-02 00:00:00], "Etc/UTC"), DateTime.from_naive!(~N[2018-01-01 00:00:00], "Etc/UTC"))
true
"""
@spec gt?(DateTime.t(), DateTime.t()) :: boolean()
def gt?(%DateTime{} = dt1, %DateTime{} = dt2) do
DateTime.compare(dt1, dt2) == :gt
end
@doc ~S"""
Returns whether datetime1 is greater than or equal to datetime2
## Examples
iex> Comparison.gte?(DateTime.from_naive!(~N[2018-01-01 00:00:00], "Etc/UTC"), DateTime.from_naive!(~N[2018-01-02 00:00:00], "Etc/UTC"))
false
iex> Comparison.gte?(DateTime.from_naive!(~N[2018-01-01 00:00:00], "Etc/UTC"), DateTime.from_naive!(~N[2018-01-01 00:00:00], "Etc/UTC"))
true
iex> Comparison.gte?(DateTime.from_naive!(~N[2018-01-02 00:00:00], "Etc/UTC"), DateTime.from_naive!(~N[2018-01-01 00:00:00], "Etc/UTC"))
true
"""
@spec gte?(DateTime.t(), DateTime.t()) :: boolean()
def gte?(%DateTime{} = dt1, %DateTime{} = dt2) do
DateTime.compare(dt1, dt2) in [:eq, :gt]
end
@doc ~S"""
Returns whether datetime1 is less than datetime2
## Examples
iex> Comparison.lt?(DateTime.from_naive!(~N[2018-01-02 00:00:00], "Etc/UTC"), DateTime.from_naive!(~N[2018-01-01 00:00:00], "Etc/UTC"))
false
iex> Comparison.lt?(DateTime.from_naive!(~N[2018-01-01 00:00:00], "Etc/UTC"), DateTime.from_naive!(~N[2018-01-01 00:00:00], "Etc/UTC"))
false
iex> Comparison.lt?(DateTime.from_naive!(~N[2018-01-01 00:00:00], "Etc/UTC"), DateTime.from_naive!(~N[2018-01-02 00:00:00], "Etc/UTC"))
true
"""
@spec lt?(DateTime.t(), DateTime.t()) :: boolean()
def lt?(%DateTime{} = dt1, %DateTime{} = dt2) do
DateTime.compare(dt1, dt2) == :lt
end
@doc ~S"""
Returns whether datetime1 is less than or equal to datetime2
## Examples
iex> Comparison.lte?(DateTime.from_naive!(~N[2018-01-02 00:00:00], "Etc/UTC"), DateTime.from_naive!(~N[2018-01-01 00:00:00], "Etc/UTC"))
false
iex> Comparison.lte?(DateTime.from_naive!(~N[2018-01-01 00:00:00], "Etc/UTC"), DateTime.from_naive!(~N[2018-01-01 00:00:00], "Etc/UTC"))
true
iex> Comparison.lte?(DateTime.from_naive!(~N[2018-01-01 00:00:00], "Etc/UTC"), DateTime.from_naive!(~N[2018-01-02 00:00:00], "Etc/UTC"))
true
"""
@spec lte?(DateTime.t(), DateTime.t()) :: boolean()
def lte?(%DateTime{} = dt1, %DateTime{} = dt2) do
DateTime.compare(dt1, dt2) in [:eq, :lt]
end
@doc ~S"""
Returns the min date between datetime1 and datetime2
## Examples
iex> Comparison.min(DateTime.from_naive!(~N[2018-01-02 00:00:00], "Etc/UTC"), DateTime.from_naive!(~N[2018-01-01 00:00:00], "Etc/UTC"))
DateTime.from_naive!(~N[2018-01-01 00:00:00], "Etc/UTC")
iex> Comparison.min(DateTime.from_naive!(~N[2018-01-01 00:00:00], "Etc/UTC"), DateTime.from_naive!(~N[2018-01-01 00:00:00], "Etc/UTC"))
DateTime.from_naive!(~N[2018-01-01 00:00:00], "Etc/UTC")
iex> Comparison.min(DateTime.from_naive!(~N[2018-01-01 00:00:00], "Etc/UTC"), DateTime.from_naive!(~N[2018-01-02 00:00:00], "Etc/UTC"))
DateTime.from_naive!(~N[2018-01-01 00:00:00], "Etc/UTC")
"""
@spec min(DateTime.t(), DateTime.t()) :: DateTime.t()
def min(%DateTime{} = dt1, %DateTime{} = dt2) do
if lt?(dt1, dt2) do
dt1
else
dt2
end
end
@doc ~S"""
Returns the max date between datetime1 and datetime2
## Examples
iex> Comparison.max(DateTime.from_naive!(~N[2018-01-02 00:00:00], "Etc/UTC"), DateTime.from_naive!(~N[2018-01-01 00:00:00], "Etc/UTC"))
DateTime.from_naive!(~N[2018-01-02 00:00:00], "Etc/UTC")
iex> Comparison.max(DateTime.from_naive!(~N[2018-01-01 00:00:00], "Etc/UTC"), DateTime.from_naive!(~N[2018-01-01 00:00:00], "Etc/UTC"))
DateTime.from_naive!(~N[2018-01-01 00:00:00], "Etc/UTC")
iex> Comparison.max(DateTime.from_naive!(~N[2018-01-01 00:00:00], "Etc/UTC"), DateTime.from_naive!(~N[2018-01-02 00:00:00], "Etc/UTC"))
DateTime.from_naive!(~N[2018-01-02 00:00:00], "Etc/UTC")
"""
@spec max(DateTime.t(), DateTime.t()) :: DateTime.t()
def max(%DateTime{} = dt1, %DateTime{} = dt2) do
if gt?(dt1, dt2) do
dt1
else
dt2
end
end
end
|
lib/datetime/comparison.ex
| 0.842102
| 0.411673
|
comparison.ex
|
starcoder
|
defmodule ReactPhoenix.ClientSide do
@moduledoc """
Functions to make rendering React components easy in Phoenix views.
Combined with the javascript also included in this package, rendering React
components in your Phoenix views can be much easier. The module was built
with Brunch in mind (vs Webpack). Since Phoenix uses Brunch by default, this
package can make getting React into your application much faster than
switching over to a different system.
"""
import Phoenix.HTML.Tag
@doc """
Generate a div containing the named React component with no props or options.
Returns safe html: `{:safe, [60, "div", ...]}`.
You can utilize this in your Phoenix views:
```
<%= ReactPhoenix.ClientSide.react_component("MyComponent") %>
```
The resulting `<div>` tag is formatted specifically for the included javascript
helper to then turn into your named React component.
"""
@spec react_component(name :: String.t()) :: Phoenix.HTML.safe()
def react_component(name), do: react_component(name, %{})
@doc """
Generate a div containing the named React component and pass it props.
Returns safe html: `{:safe, [60, "div", ...]}`.
Props can be passed in as a Map or a List.
You can utilize this in your Phoenix views:
```
<%= ReactPhoenix.ClientSide.react_component("MyComponent", %{language: "elixir", awesome: true}) %>
```
The resulting `<div>` tag is formatted specifically for the included javascript
helper to then turn into your named React component and then pass in the props specified.
"""
@spec react_component(name :: String.t(), props :: list | map) :: Phoenix.HTML.safe()
def react_component(name, props) when is_list(props) do
react_component(name, Enum.into(props, %{}))
end
def react_component(name, props) when is_map(props) do
props = Jason.encode!(props)
content_tag(:div, "", [{:data, [react_class: name, react_props: props]}])
end
@doc """
Generate a div containing the named React component and pass it props and options.
Returns safe html: `{:safe, [60, "div", ...]}`.
For now, props MUST be passed in as a Map. The only option currently accepted is `target_id`.
If you pass in a `target_id`, the resulting `<div>` tag will tell the javascirpt helper
which HTML element you'd like to render the React component. This is helpful in scenarios
like server-side rendering of a component.
You can utilize this in your Phoenix views:
```
<%= ReactPhoenix.ClientSide.react_component(
"MyComponent", # <- component name
%{language: "elixir", awesome: true}, # <- props
target_id: "react-div" # <- options
) %>
```
The resulting `<div>` tag is formatted specifically for the included javascript
helper to then turn into your named React component and then pass in the props specified.
"""
@spec react_component(name :: String.t(), props :: map, opts :: [target_id: String.t()]) ::
Phoenix.HTML.safe()
def react_component(name, props, opts) when is_map(props) do
props = Jason.encode!(props)
content_tag(:div, "", [
{:data, [react_class: name, react_props: props, react_target_id: opts[:target_id]]}
])
end
end
|
lib/react_phoenix/client_side.ex
| 0.87339
| 0.790692
|
client_side.ex
|
starcoder
|
defmodule AWS.ElasticLoadBalancingv2 do
@moduledoc """
Elastic Load Balancing
A load balancer distributes incoming traffic across targets, such as your EC2
instances.
This enables you to increase the availability of your application. The load
balancer also monitors the health of its registered targets and ensures that it
routes traffic only to healthy targets. You configure your load balancer to
accept incoming traffic by specifying one or more listeners, which are
configured with a protocol and port number for connections from clients to the
load balancer. You configure a target group with a protocol and port number for
connections from the load balancer to the targets, and with health check
settings to be used when checking the health status of the targets.
Elastic Load Balancing supports the following types of load balancers:
Application Load Balancers, Network Load Balancers, Gateway Load Balancers, and
Classic Load Balancers. This reference covers the following load balancer types:
* Application Load Balancer - Operates at the application layer
(layer 7) and supports HTTP and HTTPS.
* Network Load Balancer - Operates at the transport layer (layer 4)
and supports TCP, TLS, and UDP.
* Gateway Load Balancer - Operates at the network layer (layer 3).
For more information, see the [Elastic Load Balancing User Guide](https://docs.aws.amazon.com/elasticloadbalancing/latest/userguide/).
All Elastic Load Balancing operations are idempotent, which means that they
complete at most one time. If you repeat an operation, it succeeds.
"""
alias AWS.Client
alias AWS.Request
def metadata do
%AWS.ServiceMetadata{
abbreviation: "Elastic Load Balancing v2",
api_version: "2015-12-01",
content_type: "application/x-www-form-urlencoded",
credential_scope: nil,
endpoint_prefix: "elasticloadbalancing",
global?: false,
protocol: "query",
service_id: "Elastic Load Balancing v2",
signature_version: "v4",
signing_name: "elasticloadbalancing",
target_prefix: nil
}
end
@doc """
Adds the specified SSL server certificate to the certificate list for the
specified HTTPS or TLS listener.
If the certificate in already in the certificate list, the call is successful
but the certificate is not added again.
For more information, see [HTTPS listeners](https://docs.aws.amazon.com/elasticloadbalancing/latest/application/create-https-listener.html)
in the *Application Load Balancers Guide* or [TLS listeners](https://docs.aws.amazon.com/elasticloadbalancing/latest/network/create-tls-listener.html)
in the *Network Load Balancers Guide*.
"""
def add_listener_certificates(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "AddListenerCertificates", input, options)
end
@doc """
Adds the specified tags to the specified Elastic Load Balancing resource.
You can tag your Application Load Balancers, Network Load Balancers, Gateway
Load Balancers, target groups, listeners, and rules.
Each tag consists of a key and an optional value. If a resource already has a
tag with the same key, `AddTags` updates its value.
"""
def add_tags(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "AddTags", input, options)
end
@doc """
Creates a listener for the specified Application Load Balancer, Network Load
Balancer, or Gateway Load Balancer.
For more information, see the following:
* [Listeners for your Application Load Balancers](https://docs.aws.amazon.com/elasticloadbalancing/latest/application/load-balancer-listeners.html)
* [Listeners for your Network Load Balancers](https://docs.aws.amazon.com/elasticloadbalancing/latest/network/load-balancer-listeners.html)
* [Listeners for your Gateway Load Balancers](https://docs.aws.amazon.com/elasticloadbalancing/latest/gateway/gateway-listeners.html)
This operation is idempotent, which means that it completes at most one time. If
you attempt to create multiple listeners with the same settings, each call
succeeds.
"""
def create_listener(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateListener", input, options)
end
@doc """
Creates an Application Load Balancer, Network Load Balancer, or Gateway Load
Balancer.
For more information, see the following:
* [Application Load Balancers](https://docs.aws.amazon.com/elasticloadbalancing/latest/application/application-load-balancers.html)
* [Network Load Balancers](https://docs.aws.amazon.com/elasticloadbalancing/latest/network/network-load-balancers.html)
* [Gateway Load Balancers](https://docs.aws.amazon.com/elasticloadbalancing/latest/gateway/gateway-load-balancers.html)
This operation is idempotent, which means that it completes at most one time. If
you attempt to create multiple load balancers with the same settings, each call
succeeds.
"""
def create_load_balancer(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateLoadBalancer", input, options)
end
@doc """
Creates a rule for the specified listener.
The listener must be associated with an Application Load Balancer.
Each rule consists of a priority, one or more actions, and one or more
conditions. Rules are evaluated in priority order, from the lowest value to the
highest value. When the conditions for a rule are met, its actions are
performed. If the conditions for no rules are met, the actions for the default
rule are performed. For more information, see [Listener rules](https://docs.aws.amazon.com/elasticloadbalancing/latest/application/load-balancer-listeners.html#listener-rules)
in the *Application Load Balancers Guide*.
"""
def create_rule(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateRule", input, options)
end
@doc """
Creates a target group.
For more information, see the following:
* [Target groups for your Application Load Balancers](https://docs.aws.amazon.com/elasticloadbalancing/latest/application/load-balancer-target-groups.html)
* [Target groups for your Network Load Balancers](https://docs.aws.amazon.com/elasticloadbalancing/latest/network/load-balancer-target-groups.html)
* [Target groups for your Gateway Load Balancers](https://docs.aws.amazon.com/elasticloadbalancing/latest/gateway/target-groups.html)
This operation is idempotent, which means that it completes at most one time. If
you attempt to create multiple target groups with the same settings, each call
succeeds.
"""
def create_target_group(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateTargetGroup", input, options)
end
@doc """
Deletes the specified listener.
Alternatively, your listener is deleted when you delete the load balancer to
which it is attached.
"""
def delete_listener(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteListener", input, options)
end
@doc """
Deletes the specified Application Load Balancer, Network Load Balancer, or
Gateway Load Balancer.
Deleting a load balancer also deletes its listeners.
You can't delete a load balancer if deletion protection is enabled. If the load
balancer does not exist or has already been deleted, the call succeeds.
Deleting a load balancer does not affect its registered targets. For example,
your EC2 instances continue to run and are still registered to their target
groups. If you no longer need these EC2 instances, you can stop or terminate
them.
"""
def delete_load_balancer(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteLoadBalancer", input, options)
end
@doc """
Deletes the specified rule.
You can't delete the default rule.
"""
def delete_rule(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteRule", input, options)
end
@doc """
Deletes the specified target group.
You can delete a target group if it is not referenced by any actions. Deleting a
target group also deletes any associated health checks. Deleting a target group
does not affect its registered targets. For example, any EC2 instances continue
to run until you stop or terminate them.
"""
def delete_target_group(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteTargetGroup", input, options)
end
@doc """
Deregisters the specified targets from the specified target group.
After the targets are deregistered, they no longer receive traffic from the load
balancer.
"""
def deregister_targets(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeregisterTargets", input, options)
end
@doc """
Describes the current Elastic Load Balancing resource limits for your AWS
account.
For more information, see the following:
* [Quotas for your Application Load Balancers](https://docs.aws.amazon.com/elasticloadbalancing/latest/application/load-balancer-limits.html)
* [Quotas for your Network Load Balancers](https://docs.aws.amazon.com/elasticloadbalancing/latest/network/load-balancer-limits.html)
* [Quotas for your Gateway Load Balancers](https://docs.aws.amazon.com/elasticloadbalancing/latest/gateway/quotas-limits.html)
"""
def describe_account_limits(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeAccountLimits", input, options)
end
@doc """
Describes the default certificate and the certificate list for the specified
HTTPS or TLS listener.
If the default certificate is also in the certificate list, it appears twice in
the results (once with `IsDefault` set to true and once with `IsDefault` set to
false).
For more information, see [SSL certificates](https://docs.aws.amazon.com/elasticloadbalancing/latest/application/create-https-listener.html#https-listener-certificates)
in the *Application Load Balancers Guide* or [Server certificates](https://docs.aws.amazon.com/elasticloadbalancing/latest/network/create-tls-listener.html#tls-listener-certificate)
in the *Network Load Balancers Guide*.
"""
def describe_listener_certificates(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeListenerCertificates", input, options)
end
@doc """
Describes the specified listeners or the listeners for the specified Application
Load Balancer, Network Load Balancer, or Gateway Load Balancer.
You must specify either a load balancer or one or more listeners.
"""
def describe_listeners(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeListeners", input, options)
end
@doc """
Describes the attributes for the specified Application Load Balancer, Network
Load Balancer, or Gateway Load Balancer.
For more information, see the following:
* [Load balancer attributes](https://docs.aws.amazon.com/elasticloadbalancing/latest/application/application-load-balancers.html#load-balancer-attributes)
in the *Application Load Balancers Guide*
* [Load balancer attributes](https://docs.aws.amazon.com/elasticloadbalancing/latest/network/network-load-balancers.html#load-balancer-attributes)
in the *Network Load Balancers Guide*
* [Load balancer attributes](https://docs.aws.amazon.com/elasticloadbalancing/latest/gateway/gateway-load-balancers.html#load-balancer-attributes)
in the *Gateway Load Balancers Guide*
"""
def describe_load_balancer_attributes(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeLoadBalancerAttributes", input, options)
end
@doc """
Describes the specified load balancers or all of your load balancers.
"""
def describe_load_balancers(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeLoadBalancers", input, options)
end
@doc """
Describes the specified rules or the rules for the specified listener.
You must specify either a listener or one or more rules.
"""
def describe_rules(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeRules", input, options)
end
@doc """
Describes the specified policies or all policies used for SSL negotiation.
For more information, see [Security policies](https://docs.aws.amazon.com/elasticloadbalancing/latest/application/create-https-listener.html#describe-ssl-policies)
in the *Application Load Balancers Guide* or [Security policies](https://docs.aws.amazon.com/elasticloadbalancing/latest/network/create-tls-listener.html#describe-ssl-policies)
in the *Network Load Balancers Guide*.
"""
def describe_ssl_policies(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeSSLPolicies", input, options)
end
@doc """
Describes the tags for the specified Elastic Load Balancing resources.
You can describe the tags for one or more Application Load Balancers, Network
Load Balancers, Gateway Load Balancers, target groups, listeners, or rules.
"""
def describe_tags(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeTags", input, options)
end
@doc """
Describes the attributes for the specified target group.
For more information, see the following:
* [Target group attributes](https://docs.aws.amazon.com/elasticloadbalancing/latest/application/load-balancer-target-groups.html#target-group-attributes)
in the *Application Load Balancers Guide*
* [Target group attributes](https://docs.aws.amazon.com/elasticloadbalancing/latest/network/load-balancer-target-groups.html#target-group-attributes)
in the *Network Load Balancers Guide*
* [Target group attributes](https://docs.aws.amazon.com/elasticloadbalancing/latest/gateway/target-groups.html#target-group-attributes)
in the *Gateway Load Balancers Guide*
"""
def describe_target_group_attributes(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeTargetGroupAttributes", input, options)
end
@doc """
Describes the specified target groups or all of your target groups.
By default, all target groups are described. Alternatively, you can specify one
of the following to filter the results: the ARN of the load balancer, the names
of one or more target groups, or the ARNs of one or more target groups.
"""
def describe_target_groups(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeTargetGroups", input, options)
end
@doc """
Describes the health of the specified targets or all of your targets.
"""
def describe_target_health(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeTargetHealth", input, options)
end
@doc """
Replaces the specified properties of the specified listener.
Any properties that you do not specify remain unchanged.
Changing the protocol from HTTPS to HTTP, or from TLS to TCP, removes the
security policy and default certificate properties. If you change the protocol
from HTTP to HTTPS, or from TCP to TLS, you must add the security policy and
default certificate properties.
To add an item to a list, remove an item from a list, or update an item in a
list, you must provide the entire list. For example, to add an action, specify a
list with the current actions plus the new action.
"""
def modify_listener(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ModifyListener", input, options)
end
@doc """
Modifies the specified attributes of the specified Application Load Balancer,
Network Load Balancer, or Gateway Load Balancer.
If any of the specified attributes can't be modified as requested, the call
fails. Any existing attributes that you do not modify retain their current
values.
"""
def modify_load_balancer_attributes(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ModifyLoadBalancerAttributes", input, options)
end
@doc """
Replaces the specified properties of the specified rule.
Any properties that you do not specify are unchanged.
To add an item to a list, remove an item from a list, or update an item in a
list, you must provide the entire list. For example, to add an action, specify a
list with the current actions plus the new action.
"""
def modify_rule(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ModifyRule", input, options)
end
@doc """
Modifies the health checks used when evaluating the health state of the targets
in the specified target group.
"""
def modify_target_group(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ModifyTargetGroup", input, options)
end
@doc """
Modifies the specified attributes of the specified target group.
"""
def modify_target_group_attributes(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ModifyTargetGroupAttributes", input, options)
end
@doc """
Registers the specified targets with the specified target group.
If the target is an EC2 instance, it must be in the `running` state when you
register it.
By default, the load balancer routes requests to registered targets using the
protocol and port for the target group. Alternatively, you can override the port
for a target when you register it. You can register each EC2 instance or IP
address with the same target group multiple times using different ports.
With a Network Load Balancer, you cannot register instances by instance ID if
they have the following instance types: C1, CC1, CC2, CG1, CG2, CR1, CS1, G1,
G2, HI1, HS1, M1, M2, M3, and T1. You can register instances of these types by
IP address.
"""
def register_targets(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "RegisterTargets", input, options)
end
@doc """
Removes the specified certificate from the certificate list for the specified
HTTPS or TLS listener.
"""
def remove_listener_certificates(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "RemoveListenerCertificates", input, options)
end
@doc """
Removes the specified tags from the specified Elastic Load Balancing resources.
You can remove the tags for one or more Application Load Balancers, Network Load
Balancers, Gateway Load Balancers, target groups, listeners, or rules.
"""
def remove_tags(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "RemoveTags", input, options)
end
@doc """
Sets the type of IP addresses used by the subnets of the specified Application
Load Balancer or Network Load Balancer.
"""
def set_ip_address_type(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "SetIpAddressType", input, options)
end
@doc """
Sets the priorities of the specified rules.
You can reorder the rules as long as there are no priority conflicts in the new
order. Any existing rules that you do not specify retain their current priority.
"""
def set_rule_priorities(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "SetRulePriorities", input, options)
end
@doc """
Associates the specified security groups with the specified Application Load
Balancer.
The specified security groups override the previously associated security
groups.
You can't specify a security group for a Network Load Balancer or Gateway Load
Balancer.
"""
def set_security_groups(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "SetSecurityGroups", input, options)
end
@doc """
Enables the Availability Zones for the specified public subnets for the
specified Application Load Balancer or Network Load Balancer.
The specified subnets replace the previously enabled subnets.
When you specify subnets for a Network Load Balancer, you must include all
subnets that were enabled previously, with their existing configurations, plus
any additional subnets.
"""
def set_subnets(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "SetSubnets", input, options)
end
end
|
lib/aws/generated/elastic_load_balancingv2.ex
| 0.899449
| 0.703822
|
elastic_load_balancingv2.ex
|
starcoder
|
defmodule Exquisitle.Impl.Game do
alias Exquisitle.Impl.Game.{Guess, Tally}
alias Exquisitle.Type
@type t :: %__MODULE__{
state: Type.state(),
guessed_words: list(Type.hints()),
absent_letters: MapSet.t(String.t()),
present_letters: MapSet.t(String.t()),
correct_letters: MapSet.t(String.t()),
answers: MapSet.t(String.t()),
dictionary: MapSet.t(String.t())
}
defstruct state: :initialized,
guessed_words: [],
absent_letters: MapSet.new(),
present_letters: MapSet.new(),
correct_letters: MapSet.new(),
answers: MapSet.new(),
dictionary: MapSet.new()
@spec new_easy :: t()
def new_easy do
answer = Enum.random(Dictionary.common_words())
words = Dictionary.all_words()
%__MODULE__{
answers: MapSet.new([answer]),
dictionary: words
}
end
@spec new_hard :: t()
def new_hard do
answers = Dictionary.common_words() |> MapSet.new()
words = Dictionary.all_words()
%__MODULE__{
answers: answers,
dictionary: words
}
end
@spec make_move(t(), term()) :: {t(), Type.tally()}
def make_move(game, guess) do
game
|> Guess.make_guess(guess)
|> update_game(game)
|> Tally.call()
end
defp update_game({:noop, _, _}, game), do: game
defp update_game({:bad_guess, _, _}, game), do: %{game | state: :bad_guess}
defp update_game({:good_guess, guess_with_hints, answers}, game) do
hints = Enum.group_by(guess_with_hints, &List.last(&1), &List.first(&1))
game
|> Map.put(:answers, answers)
|> Map.update(:guessed_words, [], &(&1 ++ [guess_with_hints]))
|> Map.update(:absent_letters, [], &update_letters(&1, hints[:absent]))
|> Map.update(:present_letters, [], &update_letters(&1, hints[:present]))
|> Map.update(:correct_letters, [], &update_letters(&1, hints[:correct]))
|> maybe_won(guess_with_hints)
end
defp update_letters(current, nil), do: current
defp update_letters(current, values), do: MapSet.union(current, MapSet.new(values))
defp maybe_won(game, hints) do
if Enum.all?(hints, &(List.last(&1) == :correct)) do
%{game | state: :won}
else
%{game | state: :good_guess}
end
end
end
|
apps/exquisitle/lib/impl/game.ex
| 0.600774
| 0.476275
|
game.ex
|
starcoder
|
defmodule Gig.Recipe.RefreshEvents do
@moduledoc """
This recipe takes a location id
and returns a list of Songkick events close to the
specified location.
"""
use Recipe
alias Gig.{Event,
Songkick.ApiClient}
# Setup rate limit to 60 calls per minute
@rate_limit_scale 60_000
@rate_limit 60
@type metro_area :: pos_integer
@type step :: :check_rate_limit
| :fetch_data
| :parse_metro_area
| :parse_events
| :parse_artists
| :queue_releases
| :store_location
| :store_events
@type assigns :: %{coords: {ApiClient.lat, ApiClient.lng},
response: map,
metro_area: metro_area,
events: []}
@type state :: %Recipe{assigns: assigns}
@type success :: {metro_area, [Event.t]}
@doc false
@spec steps :: [step]
def steps, do: [:check_rate_limit,
:fetch_data,
:parse_metro_area,
:parse_events,
:parse_artists,
:queue_releases,
:store_location,
:store_events]
@doc false
@spec handle_result(state) :: success
def handle_result(state) do
{state.assigns.metro_area, state.assigns.events}
end
@doc false
@spec handle_error(step, term, state) :: term
def handle_error(_step, error, _state), do: error
@doc """
Given lat and lng, returns a list of Songkick events
"""
@spec run(ApiClient.lat, ApiClient.lng) :: {:ok, success}
| {:error, term}
def run(lat, lng) do
Gig.Recipe.run(__MODULE__, initial_state(lat, lng))
end
@doc """
Returns the initial state for the recipe.
"""
@spec initial_state(ApiClient.lat, ApiClient.lng) :: state
def initial_state(lat, lng) do
Recipe.initial_state()
|> Recipe.assign(:coords, {lat, lng})
end
@doc false
@spec check_rate_limit(state) :: {:ok, state} | {:error, {:rate_limit_reached, pos_integer}}
def check_rate_limit(state) do
case ExRated.check_rate(__MODULE__, @rate_limit_scale, @rate_limit) do
{:ok, _} ->
{:ok, state}
{:error, limit} ->
{:error, {:rate_limit_reached, limit}}
end
end
@doc false
@spec fetch_data(state) :: {:ok, state} | {:error, term}
def fetch_data(state) do
{lat, lng} = state.assigns.coords
case ApiClient.get_events(lat, lng) do
{:ok, response} ->
{:ok, Recipe.assign(state, :response, response)}
error ->
error
end
end
@doc false
@spec parse_metro_area(state) :: {:ok, state}
def parse_metro_area(state) do
metro_area = get_in(state.assigns.response, ["resultsPage",
"clientLocation",
"metroAreaId"])
{:ok, Recipe.assign(state, :metro_area, metro_area)}
end
@doc false
@spec parse_events(state) :: {:ok, state}
def parse_events(state) do
events = state.assigns.response
|> get_in(["resultsPage", "results"])
|> Map.get("event", [])
|> Enum.map(&Event.from_api_response/1)
{:ok, Recipe.assign(state, :events, events)}
end
@doc false
@spec parse_artists(state) :: {:ok, state}
def parse_artists(state) do
artists = state.assigns.events
|> Enum.flat_map(fn(e) -> e.artists end)
|> Enum.uniq
{:ok, Recipe.assign(state, :artists, artists)}
end
@doc false
@spec queue_releases(state) :: {:ok, state}
def queue_releases(state) do
state.assigns.artists
|> Enum.filter(fn(a) -> a.mbid end)
|> Enum.each(fn(a) ->
case Gig.Store.find(Gig.Store.Release, a.mbid) do
{:ok, _artist} ->
Gig.Store.extend(Gig.Store.Release, a.mbid)
_error ->
Gig.Release.Throttle.queue(a.mbid)
end
end)
{:ok, state}
end
@doc false
@spec store_location(state) :: {:ok, state}
def store_location(state) do
%{coords: coords,
metro_area: metro_area,
events: events} = state.assigns
event_ids = Enum.map(events, fn(e) -> e.id end)
location = Gig.Location.new(coords, metro_area, event_ids)
true = Gig.Store.save(Gig.Store.Location, location, coords)
{:ok, state}
end
@doc false
@spec store_events(state) :: {:ok, state}
def store_events(state) do
true = Gig.Store.save(Gig.Store.Event, state.assigns.events)
{:ok, state}
end
end
|
lib/gig/recipe/refresh_events.ex
| 0.729616
| 0.421135
|
refresh_events.ex
|
starcoder
|
defmodule ResponseSnapshot do
@moduledoc """
ResponseSnapshot is a testing tool for Elixir that captures the output of responses
and ensures that they do not change in between test runs. The output is saved to disk,
meant to be checked into source control. This output can be used by frontend and other tests
to ensure proper integration between frontend and backend code, or to ensure that endpoint
responses do not change over time.
## Usage
The most basic is a simple call to `store_and_compare!/2` as such:
```
response_json
|> ResponseSnapshot.store_and_compare!(path: "test/location/i/want/output.json")
```
This will cause the output to be written to disk the first time, and then compared
using exact match in all future tests.
## Options
* path - The path of the fixture on disk
* mode - The comparison mode of the diff algorithm. Values must be: :exact, :keys
* ignored_keys - Keys to ignore during comparison. Can be exact or wildcard matches
## Application Config
In addition to being able to set configuration for each call, certain configuration
can be achieved using the Application module. The following options are available:
* path_base - The base of the path that all fixture paths will be relative to
* mode - Same as mode option
* ignored_keys - Same as ignored_keys options; the lists are combined
Option values passed into the `store_and_compare!/2` function are used over the
Application config values.
## Comparison Modes
The `store_and_compare!/2` interface has 2 different modes, exact and keys. The `:exact`
mode is default and requires both key and value of the comparison to match the stored
snapshot. The `:keys` mode requires only the keys of the comparison to match the stored
snapshot. This can be useful in testing that the shape of an endpoint doesn't change
over time, without worrying about the test input.
## Ignored Keys
It is possible to ignore keys that will change between test runs. This is most common
for dynamic fields such as ids, timestamps, etc. Ignored keys can be done via an exact
string comparison, or a wildcard-like implementation.
```
response_json
|> ResponseSnapshot.store_and_compare!(path: path, ignored_keys: ["exact.example", {"partial", :any_nesting}])
```
The exact.example key requires that the shape of the JSON is exact -> key. The partial key
allows for matches such as "partial", "partial.nested", or "nested.partial".
Ignored keys will only ignore value changes, not key additions or removals. This is
due to an addition or removal affecting the output shape, which would go against the
goals of this library.
"""
alias ResponseSnapshot.{Changes, Config, Diff, FileManager, SnapshotMismatchError}
@doc """
Compares the data to an existing fixture, following the mode and ignored keys.
If the fixture doesn't exist, it will be written and no asserts will occur.
"""
@spec store_and_compare!(any(), Keyword.t()) :: any()
def store_and_compare!(data, opts) do
path = Config.get_path(opts)
mode = Config.get_mode(opts)
ignored_keys = Config.get_ignored_keys(opts)
case FileManager.fixture_exists?(path) do
true ->
compare_existing_fixture(data, path: path, mode: mode, ignored_keys: ignored_keys)
false ->
FileManager.write_fixture(path, data: data)
end
data
end
defp compare_existing_fixture(data, path: path, mode: mode, ignored_keys: ignored_keys) do
%{"data" => existing_data} = FileManager.read_fixture(path)
changes =
Diff.compare(data, existing_data)
|> adjust_changes_for_mode(mode)
|> adjust_changes_for_ignored_keys(ignored_keys)
case changes == Changes.empty() do
true ->
:ok
false ->
raise SnapshotMismatchError,
path: path,
changes: changes,
existing_data: existing_data,
new_data: data
end
end
defp adjust_changes_for_mode(changes, :exact), do: changes
defp adjust_changes_for_mode(changes, :keys), do: changes |> Changes.clear(:modifications)
defp adjust_changes_for_ignored_keys(changes, ignored_keys) when is_list(ignored_keys) do
changes
|> remove_ignored_keys_from_changes(:modifications, ignored_keys)
end
defp remove_ignored_keys_from_changes(changes, field, ignored_keys) do
modified_list =
Map.get(changes, field)
|> Enum.reject(fn path ->
Enum.find(ignored_keys, fn ignored_key ->
ignored_key_matches_path?(ignored_key, path)
end)
end)
Map.put(changes, field, modified_list)
end
defp ignored_key_matches_path?(ignored_key, path) when is_bitstring(ignored_key) do
ignored_key == path
end
defp ignored_key_matches_path?({ignored_key, :any_nesting}, path) when is_bitstring(ignored_key) do
# start of string or . followed by ignored key followed by . or end of string
path =~ Regex.compile!("(^|\.)(#{ignored_key})(\.|$)")
end
end
|
lib/response_snapshot.ex
| 0.91938
| 0.896115
|
response_snapshot.ex
|
starcoder
|
defmodule Routeguide.Point do
use Protobuf
@type t :: %__MODULE__{
latitude: integer,
longitude: integer
}
defstruct [:latitude, :longitude]
field :latitude, 1, optional: true, type: :int32
field :longitude, 2, optional: true, type: :int32
end
defmodule Routeguide.Rectangle do
use Protobuf
@type t :: %__MODULE__{
lo: Routeguide.Point.t(),
hi: Routeguide.Point.t()
}
defstruct [:lo, :hi]
field :lo, 1, optional: true, type: Routeguide.Point
field :hi, 2, optional: true, type: Routeguide.Point
end
defmodule Routeguide.Feature do
use Protobuf
@type t :: %__MODULE__{
name: String.t(),
location: Routeguide.Point.t()
}
defstruct [:name, :location]
field :name, 1, optional: true, type: :string
field :location, 2, optional: true, type: Routeguide.Point
end
defmodule Routeguide.RouteNote do
use Protobuf
@type t :: %__MODULE__{
location: Routeguide.Point.t(),
message: String.t()
}
defstruct [:location, :message]
field :location, 1, optional: true, type: Routeguide.Point
field :message, 2, optional: true, type: :string
end
defmodule Routeguide.RouteSummary do
use Protobuf
@type t :: %__MODULE__{
point_count: integer,
feature_count: integer,
distance: integer,
elapsed_time: integer
}
defstruct [:point_count, :feature_count, :distance, :elapsed_time]
field :point_count, 1, optional: true, type: :int32
field :feature_count, 2, optional: true, type: :int32
field :distance, 3, optional: true, type: :int32
field :elapsed_time, 4, optional: true, type: :int32
end
defmodule Routeguide.RouteGuide.Service do
use GRPC.Service, name: "routeguide.RouteGuide"
rpc :GetFeature, Routeguide.Point, Routeguide.Feature
rpc :ListFeatures, Routeguide.Rectangle, stream(Routeguide.Feature)
rpc :RecordRoute, stream(Routeguide.Point), Routeguide.RouteSummary
rpc :RouteChat, stream(Routeguide.RouteNote), stream(Routeguide.RouteNote)
rpc :AsyncRouteChat, stream(Routeguide.RouteNote), stream(Routeguide.RouteNote)
end
defmodule Routeguide.RouteGuide.Stub do
use GRPC.Stub, service: Routeguide.RouteGuide.Service
end
|
test/support/route_guide.pb.ex
| 0.737347
| 0.498779
|
route_guide.pb.ex
|
starcoder
|
defmodule BitPal.Crypto.Base32 do
@moduledoc """
This module implements the Base32 scheme used in various places in various
cryptocurrencies. Note that this is *not* the standard Base32 encoding, the
alphabet used here is different.
In addition to Base32 encoding/decoding, we also provide checksums using the
polymod function usually used in Bitcoin/Bitcoin Cash if desired. It is
convenient to provide this functionality alongside the encoding/decoding since
checksumming is done on the 5-bit chunks rather than 8-bit chunks.
"""
use Bitwise
@doc """
Decode a base32-string into a binary representation. No checksumming.
"""
def decode(data) do
from_5bit(from_ascii(data))
end
@doc """
Decode a base32-string into a binary representation. Use checksum as specified:
- :none - no checksum
- :polymod - default for Bitcoin Cash
"""
def decode(data, checksum) do
decode(data, checksum, insert: <<>>)
end
@doc """
Decode a base32-string into a binary representation with the given checksum.
There are two variants: either provide a prefix that is expected to be found
in the beginning of "data" (prefix:), or assume that some data was already in
the beginning (insert:). None of the two are included in the decoded data,'
Returns: :error on failure.
"""
def decode(data, checksum, prefix: prefix) do
prefix_size = byte_size(prefix)
<<first::binary-size(prefix_size), rest::binary>> = data
if first == prefix do
decode(rest, checksum, insert: prefix)
else
:error
end
end
def decode(data, checksum, insert: insert) do
data = from_ascii(data)
s = byte_size(data) - hash_size(checksum)
if s <= 0 do
:error
else
<<payload::binary-size(s), hash::binary>> = data
if hash_message(checksum, insert <> payload) == hash do
from_5bit(payload)
else
:error
end
end
end
@doc """
Encode some data into a base32-string. Appends a checksum using the polymod functionality.
"""
def encode(data) do
to_ascii(to_5bit(data))
end
@doc """
Encode data into a base32-string with some checksum:
- :none - no checksum
- :polymod - default for Bitcoin cash
"""
def encode(data, checksum) do
encode(data, checksum, insert: <<>>)
end
@doc """
Encode data into a base32-string with the given checksum.
There are two variants: either provide a prefix that is expected to be found
in the beginning of "data" (prefix:), or assume that some data was already in
the beginning (insert:). None of the two are included in the decoded data,'
Returns: :error on failure.
"""
def encode(data, checksum, prefix: prefix) do
prefix <> encode(data, checksum, insert: prefix)
end
def encode(data, checksum, insert: insert) do
data = to_5bit(data)
to_ascii(data <> hash_message(checksum, insert <> data))
end
# Hash size (base32 encoded size)
def hash_size(:none), do: 0
def hash_size(:polymod), do: 8
# Hash data.
def hash_message(:none, _message) do
<<>>
end
def hash_message(:polymod, message) do
checksum = polymod(message <> <<0, 0, 0, 0, 0, 0, 0, 0>>)
to_5bit(<<
checksum >>> (4 * 8) &&& 0xFF,
checksum >>> (3 * 8) &&& 0xFF,
checksum >>> (2 * 8) &&& 0xFF,
checksum >>> (1 * 8) &&& 0xFF,
checksum >>> (0 * 8) &&& 0xFF
>>)
end
@doc """
Decode base32 into 5-bit numbers. This is a semi low-level operation, but it
is useful as a building block for other primitives in case the standard
polymod checksum is not suitable for some reason.
"""
def from_ascii(binary) do
binary
|> :binary.bin_to_list()
|> Enum.map(fn x -> digit_to_num(x) end)
|> :binary.list_to_bin()
end
@doc """
Convert a binary of 5-bit numbers to a binary with 8-bit numbers. Raises an error on non-zero padding.
"""
def from_5bit(binary) do
from_5bit_i(binary, 0, 0)
end
# Helper to 'from_5bit'
defp from_5bit_i(numbers, bits_from_prev, val_from_prev) do
case numbers do
<<first, rest::binary>> ->
bits = bits_from_prev + 5
val = (val_from_prev <<< 5) + first
if bits >= 8 do
here = val >>> (bits - 8)
<<here>> <> from_5bit_i(rest, bits - 8, val &&& (1 <<< (bits - 8)) - 1)
else
from_5bit_i(rest, bits, val)
end
<<>> ->
# Check so that the padding is zero!
if val_from_prev != 0 do
raise("Invalid base32 data! Padding must be zero: " <> inspect(val_from_prev))
end
<<>>
end
end
@doc """
Convert a sequence of 8-byte elements to a sequence of 5-byte tuples. Insert padding as necessary.
"""
def to_5bit(binary) do
to_5bit_i(binary, 0, 0)
end
# Helper
defp to_5bit_i(binary, spare_bits, spare_count) do
# Shave off any bits that are too large.
spare_bits = spare_bits &&& (1 <<< spare_count) - 1
case binary do
<<first, rest::binary>> ->
spare_bits = spare_bits <<< 8 ||| first
spare_count = spare_count + 8
if spare_count >= 10 do
insert = spare_bits >>> (spare_count - 10)
<<insert >>> 5, insert &&& 0x1F>> <> to_5bit_i(rest, spare_bits, spare_count - 10)
else
insert = spare_bits >>> (spare_count - 5)
<<insert>> <> to_5bit_i(rest, spare_bits, spare_count - 5)
end
<<>> ->
# Add padding if needed.
if spare_count > 0 do
<<spare_bits <<< (5 - spare_count)>>
else
<<>>
end
end
end
@doc """
Encode a sequence of 5-bit numbers into the base32 alphabet.
"""
def to_ascii(binary) do
binary
|> :binary.bin_to_list()
|> Enum.map(fn x -> num_to_digit(x) end)
|> :binary.list_to_bin()
end
@doc """
Compute the checksum typically used in Bitcoin/Bitcoin Cash (Polymod).
From here: https://www.bitcoincash.org/spec/cashaddr.html
Operates on a binary of 5-bit integers and returns a 64-bit integer.
"""
def polymod(binary) do
polymod_i(binary, 1)
end
# Helper for the polymod function.
defp polymod_i(binary, c) do
case binary do
<<first, rest::binary>> ->
c0 = c >>> 35 &&& 0xFF
# Note: We support binaries containing something other than 5-bit numbers by
# slicing anything higher than 0x1F off.
c = bxor((c &&& 0x07FFFFFFFF) <<< 5, first &&& 0x1F)
c = if (c0 &&& 0x01) != 0, do: bxor(c, 0x98F2BC8E61), else: c
c = if (c0 &&& 0x02) != 0, do: bxor(c, 0x79B76D99E2), else: c
c = if (c0 &&& 0x04) != 0, do: bxor(c, 0xF33E5FB3C4), else: c
c = if (c0 &&& 0x08) != 0, do: bxor(c, 0xAE2EABE2A8), else: c
c = if (c0 &&& 0x10) != 0, do: bxor(c, 0x1E4F43E470), else: c
polymod_i(rest, c)
<<>> ->
bxor(c, 1)
end
end
# Decode a single base32 digit as specified by Bitcoin and Bitcoin Cash.
defp digit_to_num(value) do
case value do
?q -> 0
?p -> 1
?z -> 2
?r -> 3
?y -> 4
?9 -> 5
?x -> 6
?8 -> 7
?g -> 8
?f -> 9
?2 -> 10
?t -> 11
?v -> 12
?d -> 13
?w -> 14
?0 -> 15
?s -> 16
?3 -> 17
?j -> 18
?n -> 19
?5 -> 20
?4 -> 21
?k -> 22
?h -> 23
?c -> 24
?e -> 25
?6 -> 26
?m -> 27
?u -> 28
?a -> 29
?7 -> 30
?l -> 31
end
end
# Encode a single base32 digit as specified by Bitcoin and Bitcoin Cash.
defp num_to_digit(value) do
case value do
0 -> ?q
1 -> ?p
2 -> ?z
3 -> ?r
4 -> ?y
5 -> ?9
6 -> ?x
7 -> ?8
8 -> ?g
9 -> ?f
10 -> ?2
11 -> ?t
12 -> ?v
13 -> ?d
14 -> ?w
15 -> ?0
16 -> ?s
17 -> ?3
18 -> ?j
19 -> ?n
20 -> ?5
21 -> ?4
22 -> ?k
23 -> ?h
24 -> ?c
25 -> ?e
26 -> ?6
27 -> ?m
28 -> ?u
29 -> ?a
30 -> ?7
31 -> ?l
end
end
end
|
lib/bitpal/crypto/base32.ex
| 0.754373
| 0.590277
|
base32.ex
|
starcoder
|
defmodule Grizzly.ZWave.Commands.NetworkManagementMultiChannelCapabilityReport do
@moduledoc """
Command to query the capabilities of one individual endpoint or aggregated
end point
Params:
* `:seq_number` - the sequence number for this command
* `:node_id` - the node id that has the end point to query
* `:end_point` - the end point to query
* `:generic_device_class` - the generic device class
* `:specific_device_class` - the specific device class
* `:command_classes` - the command class list
Sending this command to a device with the end point `0` will return the
information about the device it self. This is the same as sending the
`Grizzly.ZWave.Commands.NodeInfoCachedGet` command.
"""
@behaviour Grizzly.ZWave.Command
alias Grizzly.ZWave
alias Grizzly.ZWave.{Command, CommandClasses, DeviceClasses, NodeId}
alias Grizzly.ZWave.CommandClasses.NetworkManagementProxy
@type param() ::
{:seq_number, ZWave.seq_number()}
| {:node_id, ZWave.node_id()}
| {:end_point, 0..127}
| {:generic_device_class, DeviceClasses.generic_device_class()}
| {:specific_device_class, DeviceClasses.specific_device_class()}
| {:command_classes, list()}
@impl Grizzly.ZWave.Command
@spec new([param()]) :: {:ok, Command.t()}
def new(params \\ []) do
command = %Command{
name: :network_management_multi_channel_capability_report,
command_byte: 0x08,
command_class: NetworkManagementProxy,
params: params,
impl: __MODULE__
}
{:ok, command}
end
@impl Grizzly.ZWave.Command
def encode_params(command, encode_opts \\ []) do
seq_number = Command.param!(command, :seq_number)
node_id = Command.param!(command, :node_id)
end_point = Command.param!(command, :end_point)
generic_device_class = Command.param!(command, :generic_device_class)
specific_device_class_byte = encode_specific_device_class(generic_device_class, command)
generic_device_class_byte = encode_generic_device_class(generic_device_class)
cc_binary =
command
|> Command.param!(:command_classes)
|> CommandClasses.command_class_list_to_binary()
case Keyword.get(encode_opts, :command_class_version, 4) do
4 ->
delimiter =
<<cc_list_byte_size(cc_binary), 0::1, end_point::7, generic_device_class_byte,
specific_device_class_byte, cc_binary::binary>>
<<seq_number, NodeId.encode_extended(node_id, delimiter: delimiter)::binary>>
v when v < 4 ->
<<seq_number, NodeId.encode(node_id)::binary, cc_list_byte_size(cc_binary), end_point,
generic_device_class_byte, specific_device_class_byte, cc_binary::binary>>
end
end
defp cc_list_byte_size(<<0>>), do: 0
defp cc_list_byte_size(binary), do: byte_size(binary)
defp encode_generic_device_class(0), do: 0
defp encode_generic_device_class(nil), do: 0
defp encode_generic_device_class(gen_dev_class),
do: DeviceClasses.generic_device_class_to_byte(gen_dev_class)
defp encode_specific_device_class(0, _command), do: 0
defp encode_specific_device_class(nil, _command), do: 0
defp encode_specific_device_class(gen_dev_class, command) do
case Command.param(command, :specific_device_class) do
nil ->
0
0 ->
0
spec_dev_class ->
DeviceClasses.specific_device_class_to_byte(gen_dev_class, spec_dev_class)
end
end
@impl Grizzly.ZWave.Command
def decode_params(<<seq_number, node_id, 0x00, _reserved::1, end_point::7, 0x00, 0x00, 0x00>>) do
{:ok,
[
seq_number: seq_number,
node_id: node_id,
generic_device_class: :unknown,
specific_device_class: :unknown,
command_classes: [],
end_point: end_point
]}
end
def decode_params(<<seq_number, params::binary>>) do
<<_node_id, cc_len, _reserved::1, end_point::7, generic_device_class, specific_device_class,
command_classes::binary-size(cc_len), _rest::binary>> = params
{:ok, generic_device_class} =
DeviceClasses.generic_device_class_from_byte(generic_device_class)
{:ok, specific_device_class} =
DeviceClasses.specific_device_class_from_byte(
generic_device_class,
specific_device_class
)
delimiter_size = cc_len + 4
{:ok,
[
seq_number: seq_number,
node_id: NodeId.parse(params, delimiter_size: delimiter_size),
end_point: end_point,
generic_device_class: generic_device_class,
specific_device_class: specific_device_class,
command_classes: CommandClasses.command_class_list_from_binary(command_classes)
]}
end
end
|
lib/grizzly/zwave/commands/network_management_multi_channel_capability_report.ex
| 0.861829
| 0.441312
|
network_management_multi_channel_capability_report.ex
|
starcoder
|
defmodule MangoPay.Wallet do
@moduledoc """
Functions for MangoPay [wallet](https://docs.mangopay.com/endpoints/v2.01/wallets#e20_the-wallet-object).
"""
use MangoPay.Query.Base
set_path "wallets"
@doc """
Get a wallet.
## Examples
{:ok, wallet} = MangoPay.Wallet.get(id)
"""
def get id do
_get id
end
@doc """
Get a wallet.
## Examples
wallet = MangoPay.Wallet.get!(id)
"""
def get! id do
_get! id
end
@doc """
Create a wallet.
## Examples
params = %{
"Tag": "custom meta",
"Owners": [ "8494514" ],
"Description": "My big project",
"Currency": "EUR"
}
{:ok, wallet} = MangoPay.Wallet.create(params)
"""
def create params do
_create params
end
@doc """
Create a wallet.
## Examples
params = %{
"Tag": "custom meta",
"Owners": [ "8494514" ],
"Description": "My big project",
"Currency": "EUR"
}
wallet = MangoPay.Wallet.create!(params)
"""
def create! params do
_create! params
end
@doc """
Update a wallet.
## Examples
params = %{
"Tag": "custom meta",
"Description": "My big project"
}
{:ok, wallet} = MangoPay.Wallet.update(id, params)
"""
def update id, params do
_update params, id
end
@doc """
Update a wallet.
## Examples
params = %{
"Tag": "custom meta",
"Description": "My big project"
}
wallet = MangoPay.Wallet.update!(id, params)
"""
def update! id, params do
_update! params, id
end
@doc """
List all wallets by user.
## Examples
user_id = Id of a user
query = %{
"Page": 1,
"Per_Page": 25,
"Sort": "CreationDate:DESC"
}
{:ok, wallets} = MangoPay.Wallet.all_by_user!(user_id, query)
"""
def all_by_user id, query \\ %{} do
_all [MangoPay.User.path(id), MangoPay.Wallet.path()], query
end
@doc """
List all wallets by user.
## Examples
user_id = Id of a user
query = %{
"Page": 1,
"Per_Page": 25,
"Sort": "CreationDate:DESC"
}
wallets = MangoPay.Wallet.all_by_user!(user_id, query)
"""
def all_by_user! id, query \\ %{} do
_all! [MangoPay.User.path(id), MangoPay.Wallet.path()], query
end
end
|
lib/mango_pay/wallet.ex
| 0.657868
| 0.40295
|
wallet.ex
|
starcoder
|
if Code.ensure_loaded?(Ecto) do
defmodule Dictator.Policies.BelongsTo do
@moduledoc """
Policy definition commonly used in typical `belongs_to` associations.
This policy assumes the users can read (`:show`, `:index`, `:new`,
`:create`) any information but only write (`:edit`, `:update`, `:delete`)
their own.
As an example, in a typical Twitter-like application, a user `has_many`
posts and a post `belongs_to` a user. You can define a policy to let users
manage their own posts but read all others by doing the following:
```
defmodule MyAppWeb.Policies.Post do
alias MyApp.{Post, User}
use Dictator.Policies.EctoSchema, for: Post
def can?(_, action, _) when action in [:index, :show, :new, :create], do: true
def can?(%User{id: id}, action, %{resource: %Post{user_id: id}})
when action in [:edit, :update, :delete],
do: true
def can?(_, _, _), do: false
end
```
This scenario is so common, it is abstracted completely through this module
and you can simply `use Dictator.Policies.BelongsTo, for: Post` to make
use of it. The following example is equivalent to the previous one:
```
defmodule MyAppWeb.Policies.Post do
use Dictator.Policies.BelongsTo, for: MyApp.Post
end
```
## Allowed Options
All options available in `Dictator.Policies.EctoSchema` plus the following:
* `foreign_key`: foreign key of the current user in the resource being
accessed. If a Post belongs to a User, this option would typically be
`:user_id`. Defaults to `:user_id`.
* `owner_key`: primary key of the current user. Defaults to `:id`
## Examples
Assuming a typical `User` schema, with an `:id` primary key, and a typical
`Post` schema, with a `belongs_to` association to a `User`:
```
# lib/my_app_web/policies/post.ex
defmodule MyAppWeb.Policies.Post do
use Dictator.Policies.BelongsTo, for: MyApp.Post
end
```
If, however, the user has a `uuid` primary key and the post has an
`admin_id` key instead of the typical `uer_id`, you should do the
following:
```
# lib/my_app_web/policies/post.ex
defmodule MyAppWeb.Policies.Post do
use Dictator.Policies.BelongsTo, for: MyApp.Post, owner_key: :uuid,
foreign_key: :admin_id
end
```
"""
defmacro __using__(opts) do
quote do
use Dictator.Policies.EctoSchema, unquote(opts)
@foreign_key Keyword.get(unquote(opts), :foreign_key, :user_id)
@owner_key Keyword.get(unquote(opts), :owner_key, :id)
alias Dictator.Policy
@impl Policy
def can?(_, action, _) when action in [:index, :show, :new, :create], do: true
@impl Policy
def can?(%{@owner_key => owner_id}, action, %{
resource: %@schema{@foreign_key => owner_id}
})
when action in [:edit, :update, :delete],
do: true
@impl Policy
def can?(_user, _action, _params), do: false
end
end
end
end
|
lib/dictator/policies/belongs_to.ex
| 0.846371
| 0.844985
|
belongs_to.ex
|
starcoder
|
defmodule ExtraEnum.Mailbox do
@moduledoc """
Enumerates against the mailbox by matching.
Can match a single pattern or multiple patterns. Arbitrary code can be
executed when using the multiple pattern syntax, giving feature parity with
`Kernel.SpecialForms.receive/1`.
Stops enumerating when the end of the mailbox is reached. Can be asked to
delay momentarily to allow late messages to filter in.
## Examples
### Single Pattern Match
iex> require ExtraEnum.Mailbox, as: Mailbox
iex> send(self(), {:message, :test1})
{:message, :test1}
iex> send(self(), {:message, :test2})
{:message, :test2}
iex> Mailbox.match({:message, _}) |> Enum.to_list()
[{:message, :test1}, {:message, :test2}]
### Multiple Pattern Match
iex> require ExtraEnum.Mailbox, as: Mailbox
iex> send(self(), {:message, :test1})
{:message, :test1}
iex> send(self(), {:message, :test2})
{:message, :test2}
iex> Mailbox.match do
...> {:message, :test1} -> 1
...> {:message, :test2} -> 2
...> end |> Enum.to_list()
[1, 2]
"""
# === boiler plate ===
# Struct holds a dynamically built receiver function. Must be dynamically
# built to capture environment at call site in its closure.
defstruct [:receiver]
# === Macros ===
@doc """
Create an enumerable that will match a given pattern in the process mailbox.
Accepts as parameters:
* a single pattern as the first argument
* multiple patterns and result values as a `do` block (like
`Kernel.SpecialForms.case/2`)
* a keyword list of options
It's not allowed to use the single and multiple pattern syntax at the same
time.
## Options:
* `delay` - waits a given number of milliseconds before ending enumeration
to allow late messages to trickle in.
"""
defmacro match(arg1, arg2 \\ nil) do
# Arguments move around weirdly due to quirks in default argument handling.
# This seemed to be the easiest way to sort it out.
case {arg1, arg2} do
{[do: multi_pattern], nil} ->
# do block, no opts
build_match_many(multi_pattern, [])
{opts, [do: multi_pattern]} when is_list(opts) ->
# do block, opts
build_match_many(multi_pattern, opts)
{single_pattern, nil} ->
# simple match, no opts
build_match_single(single_pattern, [])
{single_pattern, opts} when is_list(opts) ->
# simple match, opts
build_match_single(single_pattern, opts)
end
end
@doc """
Like `match/2`, but only accepts the single match syntax.
"""
defmacro match_single(single_pattern, opts \\ []) when is_list(opts) do
build_match_single(single_pattern, opts)
end
@doc """
Like `match/2`, but only accepts the multiple match syntax.
"""
defmacro match_many(opts \\ [], do: multi_pattern) when is_list(opts) do
build_match_many(multi_pattern, opts)
end
# === helper functions ===
defp build_match_single(single_pattern, opts) when is_list(opts) do
delay = Keyword.get(opts, :delay, 0)
quote location: :keep do
%unquote(__MODULE__){
receiver: fn timeout_ref ->
receive do
unquote(single_pattern) = msg ->
msg
after
unquote(delay) ->
timeout_ref
end
end
}
end
end
defp build_match_many(multi_pattern, opts) when is_list(opts) do
delay = Keyword.get(opts, :delay, 0)
quote location: :keep do
%unquote(__MODULE__){
receiver: fn timeout_ref ->
receive do
unquote(multi_pattern)
after
unquote(delay) ->
timeout_ref
end
end
}
end
end
# === protocol implementation ===
defimpl Enumerable do
alias ExtraEnum.Mailbox
def reduce(%Mailbox{} = _mailbox, {:halt, acc}, fun) when is_function(fun), do: {:halted, acc}
def reduce(%Mailbox{} = mailbox, {:suspend, acc}, fun) when is_function(fun),
do: {:suspended, acc, &reduce(mailbox, &1, fun)}
def reduce(%Mailbox{} = mailbox, {:cont, acc}, fun) when is_function(fun) do
timeout_ref = make_ref()
case mailbox.receiver.(timeout_ref) do
^timeout_ref ->
{:done, acc}
msg ->
reduce(mailbox, fun.(msg, acc), fun)
end
end
def slice(%Mailbox{}), do: {:error, __MODULE__}
def count(%Mailbox{}), do: {:error, __MODULE__}
def member?(%Mailbox{}, _), do: {:error, __MODULE__}
end
end
|
lib/extra_enum/mailbox.ex
| 0.84497
| 0.474631
|
mailbox.ex
|
starcoder
|
defmodule Calixir.HolidaysTableMaker do
@moduledoc """
This module generates Elixir data from the Calixir-4.0 holidays data.
"""
@doc """
Returns the list of holiday lists.
"""
def holiday_dates_from_file(file) do
file
|> check_path
|> File.read!
|> String.trim
|> String.split("\n")
|> tl # ignore the header line
|> Enum.map(&(holiday_date_from_csv(&1)))
end
defp holiday_date_from_csv(csv_string) do
# Transform a csv_string with holiday data into a line of Elixir data.
[holiday_name, csv_function | csv_values] = holiday_line_from_csv(csv_string)
holiday_function = holiday_function_from_csv(csv_function)
holiday_data = holiday_values_from_csv(csv_values)
[holiday_function, holiday_name] ++ holiday_data
end
defp holiday_line_from_csv(csv_line) do
csv_line
|> String.split(",")
|> Enum.map(&(String.trim(&1)))
end
defp holiday_function_from_csv(csv_function) do
csv_function
|> String.replace("-", "_")
|> String.to_atom
end
defp holiday_values_from_csv(csv_values) do
csv_values
|> Enum.map(&(value_from_csv(&1)))
|> Enum.chunk_every(2)
|> Enum.zip(2000..2103)
|> Enum.map(&(holiday_value_from_chunk(&1)))
end
defp holiday_value_from_chunk({[nil, nil], year}) do
{year, 0, 0}
end
defp holiday_value_from_chunk({[month, day], year}) do
{year, month, day}
end
defp value_from_csv(csv_value) do
# Transforms a single csv value into an Elxir value.
s = String.trim(csv_value)
cond do
String.match?(s, ~r/^[0-9+-]+$/) -> String.to_integer(s)
String.match?(s, ~r/^[0-9.+-]+$/) -> String.to_float(s)
s == "f" -> false
s == "t" -> true
s == "none" -> nil
s == "" -> nil
s == "bogus" -> "bogus"
true -> s |> String.downcase |> String.to_atom
end
end
defp check_path(files) when is_list(files) do
# Checks if all the files in the list exist.
# Aborts on error, else returns a list with the full paths.
Enum.map(files, &(check_path(&1)))
end
defp check_path(file) do
# Checks if the file exists.
# Aborts on error, else returns the path.
path = Path.expand("./assets/#{file}")
if not File.exists?(path) do
raise "error: file <#{path}> does not exist"
end
path
end
end
|
lib/calixir/holidays_table_maker.ex
| 0.699357
| 0.472744
|
holidays_table_maker.ex
|
starcoder
|
defmodule Membrane.RTP.MPEGAudio.Depayloader do
@moduledoc """
Parses RTP payloads into parsable mpeg chunks based on [RFC 2038](https://tools.ietf.org/html/rfc2038#section-3.5)
```
0 1 2 3
0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
| MBZ | Frag_offset |
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
```
MBZ: Must be zero, reserved for future use.
Frag_offset: Byte offset into the audio frame for the data in this packet.
"""
use Membrane.Filter
alias Membrane.{Buffer, RTP, RemoteStream}
alias Membrane.Caps.Audio.MPEG
@default_demand 1
def_input_pad :input, caps: RTP, demand_unit: :buffers
def_output_pad :output, caps: {RemoteStream, content_format: MPEG, type: :packetized}
defmodule State do
@moduledoc false
defstruct [:buffer_size]
@type t :: %State{
buffer_size: pos_integer()
}
end
@impl true
def handle_caps(:input, _caps, _context, state) do
caps = %RemoteStream{content_format: MPEG, type: :packetized}
{{:ok, caps: {:output, caps}}, state}
end
@impl true
def handle_process(:input, buffer, context, %State{buffer_size: nil}) do
buffer_size = byte_size(buffer.payload)
handle_process(:input, buffer, context, %State{buffer_size: buffer_size})
end
def handle_process(:input, buffer, _ctx, state) do
with %Buffer{
payload: <<fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b, _offset::16, depayloaded::binary>>
} <- buffer do
{{:ok, buffer: {:output, %Buffer{buffer | payload: depayloaded}}, redemand: :output}, state}
else
%Buffer{} -> {{:error, :invalid_payload}, state}
end
end
@impl true
def handle_demand(:output, size, :buffers, _ctx, state) do
{{:ok, demand: {:input, size}}, state}
end
def handle_demand(:output, _size, :bytes, _context, %State{buffer_size: nil} = state) do
{{:ok, demand: {:input, @default_demand}}, state}
end
def handle_demand(:output, size, :bytes, _context, %State{buffer_size: buffer_size} = state)
when is_number(buffer_size) do
# Demand in buffer needs to be bigger or equal to one in bytes
demand_size =
(size / buffer_size)
|> Float.ceil()
|> trunc()
{{:ok, demand: {:input, demand_size}}, state}
end
end
|
lib/membrane_element_mpeg_audio/depayloader.ex
| 0.766949
| 0.753036
|
depayloader.ex
|
starcoder
|
defmodule Blunt.Data.Factories.Values do
alias Blunt.Data.Factories.Values
@doc """
The `field` of the factory source data will be assigned
to the value
"""
defmacro const(field, value) do
quote do
%Values.Constant{field: unquote(field), value: unquote(value)}
end
end
@doc """
If the `field` is not supplied to the factory,
the given `message` will be dispatched with the returned
data to be put into the factory source data under the `field` key.
"""
defmacro data(field, message) do
create_data(field, message, [], lazy: false)
end
defmacro data(field, message, do: body) do
values = extract_values(body)
create_data(field, message, values, lazy: false)
end
defmacro data(field, message, opts \\ [], do: body) do
values = extract_values(body)
opts = Keyword.put(opts, :lazy, false)
create_data(field, message, values, opts)
end
@doc """
Same as `data` but
"""
defmacro lazy_data(field, message) do
create_data(field, message, [], lazy: true)
end
defmacro lazy_data(field, message, do: body) do
values = extract_values(body)
create_data(field, message, values, lazy: true)
end
defmacro lazy_data(field, message, opts \\ [], do: body) do
values = extract_values(body)
opts = Keyword.put(opts, :lazy, true)
create_data(field, message, values, opts)
end
defp extract_values({:__block__, _meta, elements}), do: elements
defp extract_values(nil), do: []
defp extract_values(list) when is_list(list), do: list
defp extract_values(element), do: [element]
defp create_data(field, message, values, opts) do
{lazy, opts} = Keyword.pop!(opts, :lazy)
{operation, message, values} = data_props(message, values)
quote do
%Values.Data{
lazy: unquote(lazy),
field: unquote(field),
factory: %{
values: unquote(values),
message: unquote(message),
operation: unquote(operation),
opts: unquote(opts)
}
}
end
end
defp data_props(message, values) do
case message do
{operation, {message, values}} -> {operation, message, values}
{operation, message} -> {operation, message, values}
message -> {:dispatch, message, values}
end
end
@doc """
The `field` of the factory source data will be assigned
to the value of `path_func_or_value` in the factory source
"""
defmacro prop(field, path_func_or_value) do
quote do
%Values.Prop{field: unquote(field), path_func_or_value: unquote(path_func_or_value)}
end
end
@doc """
The `field` of the factory source data will be assigned
to the value of `path_func_or_value` in the factory source
"""
defmacro lazy_prop(field, path_func_or_value) do
quote do
%Values.Prop{field: unquote(field), path_func_or_value: unquote(path_func_or_value), lazy: true}
end
end
@doc """
Merges a key from input into the current factory data.
This will not overwrite any existing data.
"""
defmacro merge_input(key, opts \\ []) do
quote do
%Values.MergeInput{key: unquote(key), opts: unquote(opts)}
end
end
defmacro map(func) do
quote do
%Values.Mapper{func: unquote(func)}
end
end
defmacro child(field, factory_name) do
quote do
%Values.Build{field: unquote(field), factory_name: unquote(factory_name)}
end
end
defmacro defaults(values) do
quote do
%Values.Defaults{values: Enum.into(unquote(values), %{})}
end
end
defmacro required_prop(field) when is_atom(field) do
quote do
%Values.RequiredProp{field: unquote(field)}
end
end
defmacro remove(fields) when is_atom(fields) or is_list(fields) do
fields = List.wrap(fields)
quote do
%Values.RemoveProp{fields: unquote(fields)}
end
end
defmacro input(props) when is_list(props) do
quote do
%Values.Input{props: unquote(props)}
end
end
defmacro inspect_props(:declared) do
quote do
%Values.InspectProps{props: :declared}
end
end
defmacro inspect_props(props) when is_list(props) do
quote do
%Values.InspectProps{props: unquote(props)}
end
end
defmacro inspect_props do
quote do
%Values.InspectProps{props: :all}
end
end
end
|
apps/blunt_data/lib/blunt/data/factories/values.ex
| 0.77768
| 0.778523
|
values.ex
|
starcoder
|
defmodule Tradehub.Ticker do
@moduledoc """
Enable features to work with tickers endpoints.
"""
import Tradehub.Raising
@typedoc "Candlestick resolution allowed value"
@type resolution :: 1 | 5 | 30 | 60 | 360 | 1440
@doc """
Requests candlesticks for the given market.
## Parameters
- **market**: the market symbols: e.g `swth_eth1`
- **resolution**: the candlestick period in minutes, possible values are: 1, 5, 30, 60, 360, 1440
- **from**: the start of time range for data in epoch `seconds`
- **to**: the end of time range for data in epoch `seconds`
## Returns
- a list of `Tradehub.candlestick()` as expected
- a string that represents of an error
- an error if something goes wrong with the connection
## Examples
iex> Tradehub.Ticker.candlesticks("swth_eth1", 5, 1610203000, 1610203000)
"""
@spec candlesticks(String.t(), resolution(), integer, integer) ::
{:ok, list(Tradehub.candlestick())} | String.t() | {:error, HTTPoison.Error.t()}
@spec candlesticks!(String.t(), resolution(), integer, integer) ::
list(Tradehub.candlestick()) | String.t()
def candlesticks(market, resolution, from, to) do
case Tradehub.get(
"candlesticks",
params: %{
market: market,
resolution: resolution,
from: from,
to: to
}
) do
{:ok, response} -> {:ok, response.body}
other -> other
end
end
raising(:candlesticks, market, resolution, from, to)
@doc """
Get recent ticker prices of the given market.
## Returns
- an object of type `Tradehub.ticker_prices()` as expected
- an error if something goes wrong with the connection
## Note
The `GET /get_prices` endpoint is not completely implemented, it always responses
an object of type `Tradehub.ticker_prices()` although the market param is invalid
## Examples
iex> Tradehub.Ticker.prices("swth_eth1")
"""
@spec prices(String.t()) :: {:ok, Tradehub.ticker_prices()} | {:error, HTTPoison.Error.t()}
@spec prices!(String.t()) :: Tradehub.ticker_prices()
def prices(market) do
request =
Tradehub.get(
"get_prices",
params: %{market: market}
)
case request do
{:ok, response} -> {:ok, response.body}
other -> other
end
end
raising(:prices, market)
@doc """
Requests latest statistics information about the given market or all markets
## Returns
- a list of `Tradehub.market_stats()` as expected
- a string that represents of an error
- an error if something goes wrong with the connection
## Examples
iex> Tradehub.Ticker.market_stats
iex> Tradehub.Ticker.market_stats("swth_eth1")
"""
@spec market_stats(nil) ::
{:ok, list(Tradehub.market_stats())} | String.t() | {:error, HTTPoison.Error.t()}
@spec market_stats(String.t()) ::
{:ok, list(Tradehub.market_stats())} | String.t() | {:error, HTTPoison.Error.t()}
@spec market_stats!(String.t()) :: list(Tradehub.market_stats()) | String.t()
def market_stats(market \\ nil) do
request =
Tradehub.get(
"get_market_stats",
params: %{market: market}
)
case request do
{:ok, response} -> {:ok, response.body}
other -> other
end
end
raising(:market_stats)
raising(:market_stats, market)
end
|
lib/tradehub/ticker.ex
| 0.954942
| 0.697274
|
ticker.ex
|
starcoder
|
defmodule Aegis do
@moduledoc """
Lightweight, flexible authorization.
## Example
As an example, suppose your library defines the following resources:
defmodule User do
defstruct [id: nil]
end
defmodule Puppy do
defstruct [id: nil, user_id: nil, hungry: false]
end
defmodule Kitten do
defstruct [id: nil, user_id: nil, hungry: false]
end
If you want to define access to the `Puppy` resource, the first step is to define a
policy for the puppy resource. Maybe something like:
defmodule Puppy.Policy do
@behaviour Aegis.Policy
def authorized?(_user, {:index, _puppy}), do: true
def authorized?(%User{id: id}, {:show, %Puppy{user_id: id}}), do: true
def authorized?(_user, {:show, _puppy}), do: false
end
For the purposes of our example, the puppy policy definition above specifies that:
* any user has access to the "index" page of puppy data, and
* only users who own particular puppies (i.e. the puppy's `user_id` matches the users `id`) will be able to access the data corresponding to their "show" page
With this, we can check check for authorization of puppies via making calls to
`Aegis.authorized?/3` with the appropriate arguments:
iex> Aegis.authorized?(%User{id: 1}, {:index, Puppy}, Puppy.Policy)
true
iex> Aegis.authorized?(%User{id: 2}, {:index, Puppy}, Puppy.Policy)
true
iex> Aegis.authorized?(%User{id: 1}, {:show, %Puppy{user_id: 1}}, Puppy.Policy)
true
iex> Aegis.authorized?(%User{id: 1}, {:show, %Puppy{user_id: 2}}, Puppy.Policy)
false
At this point, you may have noticed that we haven't defined a policy
definition for our `Kitten` resource. As such, if we attempt to check for
authorization, we will receive an error that lets us know that a
corresponding policy wasn't found via a lookup based off policy naming
convention:
iex> Aegis.authorized?(:user, {:index, Kitten})
** (Aegis.PolicyNotFoundError) Policy not found: Elixir.Kitten.Policy
If we really don't want to define a policy for the `Kitten` resource, one way
we can get around this error is to explicitely pass the policy via which the
kitten resource should be authorized. For the purpose of this example, we'll
just specify that the kitten "index" page can refer to the `Puppy.Policy`:
iex> Aegis.authorized?(:user, {:index, Kitten}, Puppy.Policy)
true
## Configuration
The following configuration options are available for assignment:
* `policy_finder`- Aegis uses this value to determine how it policies are found. Defaults to `Aegis.DefaultPolicyFinder`.
```elixir
config :aegis, :policy_finder, MyPolicyFinder
```
"""
defmodule Accessor do
@moduledoc false
@type t :: Process.t() | any()
end
defmodule Accessible do
@moduledoc false
@type t :: Tuple.t() | fun()
end
@type accessor :: Accessor.t()
@type accessible :: Accessible.t()
@doc """
Returns `true` if an accessor is authorized to access a resource or perform an
operation. The responsibility for determining whether or not a given resource
or operation is accessible to an accessor is delegated to a policy module.
The policy module for any given authorization check can be explicitely
specified, or, if no policy is provided, an attempt is made to locate
a policy for the accessible via a search based on conventional policy naming.
"""
@spec authorized?(__MODULE__.accessor(), __MODULE__.accessible(), module()) :: boolean
def authorized?(accessor, accessible, policy \\ nil)
def authorized?(accessor, accessible, nil) do
authorized?(accessor, accessible, fetch_policy_module(accessible))
end
def authorized?(accessor, accessible, policy) do
apply(policy, :authorized?, [accessor, accessible])
end
@spec auth_scope(__MODULE__.accessor(), __MODULE__.accessible(), module()) :: list()
def auth_scope(accessor, accessible, policy \\ nil)
def auth_scope(accessor, accessible, nil) do
auth_scope(accessor, accessible, fetch_policy_module(accessible))
end
def auth_scope(accessor, accessible, policy) do
apply(policy, :auth_scope, [accessor, accessible])
end
def policy_finder do
Application.get_env(:aegis, :policy_finder, __MODULE__.DefaultPolicyFinder)
end
defmodule PolicyNotFoundError do
defexception [:message]
end
defp fetch_policy_module(arg) do
case policy_finder().call(arg) do
{:error, nil} -> raise PolicyNotFoundError, "No Policy for nil object"
{:error, mod} -> raise PolicyNotFoundError, "Policy not found: #{mod}"
{:ok, mod} -> mod
end
end
end
|
lib/aegis.ex
| 0.86431
| 0.760117
|
aegis.ex
|
starcoder
|
defmodule AwsExRay.Segment do
@moduledoc ~S"""
This module provides data structure which represents X-Ray's **segment**
"""
alias AwsExRay.Config
alias AwsExRay.Segment.Formatter
alias AwsExRay.Trace
alias AwsExRay.Util
alias AwsExRay.Record.HTTPRequest
alias AwsExRay.Record.HTTPResponse
alias AwsExRay.Record.Error
@type t :: %__MODULE__{
id: String.t,
name: String.t,
version: String.t,
trace: Trace.t,
start_time: float,
end_time: float,
annotation: map,
metadata: map,
error: map | nil,
http: map
}
defstruct id: "",
name: "",
version: "",
trace: nil,
start_time: 0.0,
end_time: 0.0,
error: %{},
annotation: %{},
metadata: %{},
http: %{}
@spec new(trace :: Trace.t, name :: String.t) :: t
def new(trace, name) do
%__MODULE__{
id: Util.generate_model_id(),
name: name,
version: Config.service_version(),
trace: trace,
start_time: Util.now(),
error: nil,
end_time: 0.0,
annotation: %{} |> Map.merge(Config.default_annotation),
metadata: %{
tracing_sdk: %{
name: Config.library_name,
version: Config.library_version,
}
} |> Map.merge(Config.default_metadata),
http: %{
request: nil,
response: nil
}
}
end
@spec add_annotations(
seg :: t,
annotations :: map
) :: t
def add_annotations(seg, annotations) do
annotations
|> Enum.reduce(seg, fn {key, value}, seg ->
add_annotation(seg, key, value)
end)
end
@spec add_annotation(
seg :: t,
key :: atom | String.t,
value :: any
) :: t
def add_annotation(seg, key, value) do
annotation = seg.annotation
annotation = Map.put(annotation, key, value)
Map.put(seg, :annotation, annotation)
end
@spec set_http_request(seg :: t, req :: HTTPRequest.t) :: t
def set_http_request(seg, req) do
put_in(seg.http.request, req)
end
@spec set_http_request(seg :: t, res :: HTTPResponse.t) :: t
def set_http_response(seg, res) do
put_in(seg.http.response, res)
end
@spec set_error(seg :: t, error :: Error.t) :: t
def set_error(seg, error) do
Map.put(seg, :error, error)
end
@spec generate_trace_value(seg :: t) :: String.t
def generate_trace_value(seg) do
trace = seg.trace
trace = %{trace|parent: seg.id}
Trace.to_string(trace)
end
@spec sampled?(seg :: t) :: boolean
def sampled?(seg) do
seg.trace.sampled
end
@spec finished?(seg :: t) :: boolean
def finished?(seg) do
seg.end_time > 0
end
@spec finish(seg :: t) :: t
def finish(seg) do
if finished?(seg) do
seg
else
%{seg|end_time: Util.now()}
end
end
@spec to_json(seg :: t) :: String.t
def to_json(seg), do: Formatter.to_json(seg)
end
|
lib/aws_ex_ray/segment.ex
| 0.832134
| 0.404655
|
segment.ex
|
starcoder
|
defmodule Haex.Data.TypeConstructorBuilder do
@moduledoc """
generates AST representation of `Haex.Data.TypeConstructor` to return from
`Haex.data/1` macro
"""
alias Haex.Ast
alias Haex.Data.DataConstructor
alias Haex.Data.DataConstructorBuilder
alias Haex.Data.TypeConstructor
@spec build(TypeConstructor.t(), [DataConstructor.t()]) :: Macro.output()
def build(%TypeConstructor{name: name} = tc, data_constructors) do
quote do
defmodule unquote(Ast.mod(name)) do
unquote(type_t(tc, data_constructors))
unquote(Enum.map(data_constructors, &DataConstructorBuilder.build/1))
unquote(Enum.map(data_constructors, &build_helper(tc, &1)))
end
end
end
@spec build_helper(TypeConstructor.t(), DataConstructor.t()) :: Macro.output()
def build_helper(
%TypeConstructor{} = tc,
%DataConstructor{record?: false, name: name, params: dc_params} = dc
) do
helper_name = DataConstructor.helper_name(dc)
type_fields = DataConstructorBuilder.type_fields(dc)
args = Macro.generate_arguments(length(dc_params), nil)
helper_type_t = helper_type_t(tc, dc)
helper_when_clause = helper_when_clause(tc, dc)
mod = Ast.mod(name)
quote do
@spec unquote(helper_name)(unquote_splicing(type_fields)) :: unquote(helper_type_t)
when unquote(helper_when_clause)
def unquote(helper_name)(unquote_splicing(args)),
do: unquote(mod).new(unquote_splicing(args))
end
end
def build_helper(
%TypeConstructor{} = tc,
%DataConstructor{record?: true, name: name} = dc
) do
helper_name = DataConstructor.helper_name(dc)
type_fields = DataConstructorBuilder.type_fields(dc)
type_field_names = type_fields |> Enum.map(fn {name, _field} -> name end)
type_field_args =
type_fields
|> Enum.map(fn {name, type} -> quote(do: unquote({name, [], Elixir}) :: unquote(type)) end)
args = Enum.map(type_field_names, fn name -> {name, [], Elixir} end)
helper_type_t = helper_type_t(tc, dc)
helper_when_clause = helper_when_clause(tc, dc)
mod = Ast.mod(name)
quote do
@spec unquote(helper_name)(unquote_splicing(type_field_args)) :: unquote(helper_type_t)
when unquote(helper_when_clause)
def unquote(helper_name)(unquote_splicing(args)),
do: unquote(mod).new(unquote_splicing(args))
end
end
@spec type_t(TypeConstructor.t(), [DataConstructor.t()]) :: Macro.output()
defp type_t(%TypeConstructor{params: params} = tc, data_constructors) do
quoted_params = Enum.map(params, fn param -> {param, [], Elixir} end)
dc_type_ts =
data_constructors
|> Enum.map(&DataConstructorBuilder.qualified_type_t(tc, &1))
|> Ast.or_pipe_join()
quote do
@type t(unquote_splicing(quoted_params)) :: unquote(dc_type_ts)
end
end
@spec helper_type_t(TypeConstructor.t(), DataConstructor.t()) :: Macro.output()
defp helper_type_t(%TypeConstructor{params: params}, %DataConstructor{} = dc) do
quoted_params =
params
|> Enum.map(&underscore_unused_param(&1, dc))
|> Enum.map(fn param -> {param, [], Elixir} end)
quote do
t(unquote_splicing(quoted_params))
end
end
defp helper_when_clause(%TypeConstructor{params: params}, %DataConstructor{} = dc) do
Enum.map(params, fn param -> {underscore_unused_param(param, dc), {:var, [], Elixir}} end)
end
defp underscore_unused_param(param, %DataConstructor{} = dc) do
if DataConstructor.has_variable?(dc, param) do
param
else
:"_#{param}"
end
end
end
|
lib/haex/data/type_constructor_builder.ex
| 0.74055
| 0.465266
|
type_constructor_builder.ex
|
starcoder
|
defmodule Lab42.SimpleStateMachine do
use Lab42.SimpleStateMachine.Types
@moduledoc """
## A simple state machine.
`SimpleStateMachine` is a minimalistic approach to write _State Machines_ which operate on a list of inputs.
The machine is defined by a map, mapping, transitions to each state, called `transition_map`.
%{ start: [ transition, ... ],
some_state: [transition, ...] }
### Format of a Transition
`{trigger, transition_fn, new_state}` or, if the state is not supposed to change, `{trigger, transition_fn}`
For each input in the list of inputs the machine will try to match the input with the `trigger` from the transition.
If such a match is found the `transition_fn` is called with a `%SimpleStateMachine.Match` struct which will give access
to the following fields:
%SimpleStateMachine.Match{
input: "The element from the input list",
data: "Injected value when the state machine is run, like an accumulator in Enum.reduce",
matched: "A value depending on what kind of trigger was used"}
The return value of the `transition_fn` will be injected into the `Match` struct's `data:` field for the next
loop.
#### Types of triggers
* Function triggers
...are the most versatile triggers, when a function trigger triggers on an input it returns an unfalsy
value that is passed into the `Match` struct's `matched:` field.
* Regex triggers
...can, obviously, only be used with `String` inputs. The result of `Regex.run(trigger, input)` is passed into
the `Match` struct's `matched:` field.
* `true` trigger
... matches always, `true` is passed into the `matched:` field.
* `:end` trigger
... does never match, however its associated `transaction_fn` is called, and its result will bet the result
of the machine's run. See also the `end:` state below.
#### Special States
Two states are special in the sense that their names are fixed.
* `:start` state
Is defined like any other state but is the machine's initial `current_state`. It is **obviously** necessarily
present in the `transition_map`.
* `:end` state
Can only have one `transition_fn` which is invoked at the end of the input list.
#### Reserved States
* `:halt` state
No transition definitions for this state will ever be read. If the `current_state` of the machine becomes
the `:halt` state, it stops and returns the `Match` struct's `data:` field.
No `:end` state or trigger treatment is performed.
* `:error` state and states starting with `:_`
Reserved for future use.
### Some Detailed Examples
Let's start with a single state machine.
iex(0)> parse_and_add = fn(string, data) ->
...(0)> {n, _} = Integer.parse(string)
...(0)> %{data|sum: data.sum + n} end
...(0)> add_error = fn(%{input: input, data: data}) ->
...(0)> %{data|errors: [input|data.errors]} end
...(0)> states = %{
...(0)> start: [
...(0)> {~r(\\d+), fn %{matched: [d], data: data} -> parse_and_add.(d, data) end},
...(0)> {true, add_error},
...(0)> ],
...(0)> end: fn %{data: %{errors: errors, sum: sum}} -> {sum, Enum.reverse(errors)} end }
...(0)> run(~w{12 error 30 incorrect}, %{sum: 0, errors: []}, states)
{42, ~w(error incorrect)}
If the data is initially nil it needs not be passed into `run` and if the `transaction_fn` is a nop, it can be designated
by `nil`.
iex(1)> states = %{
...(1)> start: [
...(1)> { ~r{(\\d+)}, fn %{matched: [_, d]} -> d end, :halt },
...(1)> { true, nil } ]}
...(1)> run(~w{ hello 42 84 }, states)
"42"
The difference between `:halt` and `:end` can be demonstrated with these slighly modified machines
iex(2)> sm1 = %{
...(2)> start: [
...(2)> { ~r{(\\d+)}, fn %{matched: [_, d]} -> d end, :halt },
...(2)> { true, nil } ],
...(2)> end: fn %{data: x} -> {n, _} = Integer.parse(x); n end }
...(2)> sm2 = %{
...(2)> start: [
...(2)> { ~r{(\\d+)}, fn %{matched: [_, d]} -> d end, :end },
...(2)> { true, nil } ],
...(2)> end: fn %{data: x} -> {n, _} = Integer.parse(x); n end }
...(2)> { run(~w{ hello 42 84 }, sm1), run(~w{ hello 42 84 }, sm2) }
{"42", 42}
So far we have only seen `Regex` and `true` triggers, the next example uses function triggers
iex(3)> odd? = &(rem(&1, 2) == 1)
...(3)> states = %{
...(3)> start: [
...(3)> {odd?, fn %{input: n, data: sum} -> sum + n end},
...(3)> {true} ] }
...(3)> run(1..6|>Enum.into([]), 0, states)
9
Some might suggest that the `{true}` transition should be a default, but we prefer to raise an error
if no transition matches
iex(4)> odd? = &(rem(&1, 2) == 1)
...(4)> states = %{
...(4)> start: [
...(4)> {odd?, fn %{input: n, data: sum} -> sum + n end} ]}
...(4)> run(1..6|>Enum.into([]), 0, states)
** (RuntimeError) No transition found in state :start, on input 2
An even more obvious exception is raised if a state has no transitions defined, that holds for the predefined
`:start` state as for any other state.
iex(5)> states=%{}
...(5)> run(~w[alpha beta], states)
** (RuntimeError) No transitions defined for state :start
iex(6)> states=%{
...(6)> start: [
...(6)> {true, nil, :second} ]}
...(6)> run(~w[alpha beta], states)
** (RuntimeError) No transitions defined for state :second
"""
import Lab42.SimpleStateMachine.Data
import Lab42.SimpleStateMachine.Runner
@spec run( list(), any(), maybe(map()) ) :: any()
def run(input, data_or_states, states_or_nil \\ nil)
def run(input, states, nil), do: run(:start, input, from_data(nil) , states)
def run(input, data, states),do: run(:start, input, from_data(data), states)
end
|
lib/lab42/simple_state_machine.ex
| 0.833528
| 0.898855
|
simple_state_machine.ex
|
starcoder
|
defmodule Scenic.Primitive.Style.Input do
@moduledoc """
Flags whether or not track `cursor_button` events on this primitive.
Example:
```elixir
graph
|> rect( {100, 200}, id: :my_rect, input: :cursor_button )
```
### Data Format
The data for the input style is the type of input you want to receive when the cursor
is positioned over the primitive. This can be any single member or combination (in a list)
of the following input types
* `:cursor_button` - Went when a button on the cursor (mouse) was used.
* `:cursor_pos` - Sent when the cursor moves over the primitive.
* `:cursor_scroll` - Sent when the cursor's scroll wheel moves.
```elixir
graph
|> rect( {100, 200}, id: :my_rect, input: [:cursor_button, :cursor_pos] )
```
"""
use Scenic.Primitive.Style
alias Scenic.ViewPort
# ============================================================================
@doc false
def validate(input_type) when is_atom(input_type), do: validate([input_type])
def validate(input_types) when is_list(input_types) do
valid_types = ViewPort.Input.positional_inputs()
inputs = Enum.uniq(input_types)
Enum.all?(inputs, &Enum.member?(valid_types, &1))
|> case do
true -> {:ok, inputs}
false -> invalid_types(input_types)
end
end
def validate(data) do
{
:error,
"""
#{IO.ANSI.red()}Invalid Input specification
Received: #{inspect(data)}
#{IO.ANSI.yellow()}
The :input style must be any of #{inspect(ViewPort.Input.positional_inputs())}
or a list containing any combination of those input types.#{IO.ANSI.default_color()}
"""
}
end
defp invalid_types(input_types) do
{
:error,
"""
#{IO.ANSI.red()}Invalid Input specification
Received: #{inspect(input_types)}
#{IO.ANSI.yellow()}
The :input style must be any of #{inspect(ViewPort.Input.positional_inputs())}
or a list containing any combination of those input types.#{IO.ANSI.default_color()}
"""
}
end
end
|
lib/scenic/primitive/style/input.ex
| 0.871653
| 0.784567
|
input.ex
|
starcoder
|
defmodule PixelFont.TableSource.OS_2.Enums do
weight_classes = [
thin: 100,
extra_light: 200,
light: 300,
normal: 400,
medium: 500,
semi_bold: 600,
bold: 700,
extra_bold: 800,
black: 900
]
weight_class_type_expr =
weight_classes
|> Keyword.keys()
|> Enum.reverse()
|> Enum.reduce({:non_neg_integer, [], []}, &{:|, [], [&1, &2]})
@type weight_class :: unquote(weight_class_type_expr)
@spec weight_class(weight_class()) :: integer()
def weight_class(value)
Enum.each(weight_classes, fn {key, value} ->
def weight_class(unquote(key)), do: unquote(value)
end)
def weight_class(value) when is_integer(value), do: value
width_classes = [
ultra_condensed: 1,
extra_condensed: 2,
condensed: 3,
semi_condensed: 4,
medium: 5,
semi_expanded: 6,
expanded: 7,
extra_expanded: 8,
ultra_expanded: 9
]
width_class_type_expr =
width_classes
|> Keyword.keys()
|> Enum.reverse()
|> Enum.reduce(&{:|, [], [&1, &2]})
@type width_class :: unquote(width_class_type_expr)
@spec width_class(width_class()) :: integer()
def width_class(value)
Enum.each(width_classes, fn {key, value} ->
def width_class(unquote(key)), do: unquote(value)
end)
family_class_data = %{
oldstyle_serif:
{0x0100,
%{
ibm_rounded_legibility: 1,
garande: 2,
venetian: 3,
modified_venetian: 4,
dutch_modern: 5,
dutch_traditional: 6,
contemporary: 7,
calligraphic: 8
}},
transitional_serif:
{0x0200,
%{
direct_line: 1,
script: 2
}},
modern_serif:
{0x0300,
%{
italian: 1,
script: 2
}},
clarendon_serif:
{0x0400,
%{
clarendon: 1,
modern: 2,
traditional: 3,
newspaper: 4,
stub_serif: 5,
monotone: 6,
typewriter: 7
}},
slab_serif:
{0x0500,
%{
monotone: 1,
humanist: 2,
geometric: 3,
swiss: 4,
typewriter: 5
}},
freeform_serif:
{0x0700,
%{
modern: 1
}},
sans_serif:
{0x0800,
%{
ibm_neo_grotesque_gothic: 1,
humanist: 2,
low_x_round_geometric: 3,
high_x_round_geometric: 4,
neo_grotesque_gothic: 5,
modified_neo_grotesque_gothic: 6,
typewriter_gothic: 9,
matrix: 10
}},
ornamental:
{0x0900,
%{
engraver: 1,
black_letter: 2,
decorative: 3,
three_dimentional: 4
}},
script:
{0x0A00,
%{
uncial: 1,
brush_joined: 2,
formal_joined: 3,
monotone_joined: 4,
calligraphic: 5,
brush_unjoined: 6,
formal_unjoined: 7,
monotone_unjoined: 8
}},
symbolic:
{0x0C00,
%{
mixed_serif: 3,
oldstyle_serif: 6,
neo_grotesque_sans_serif: 7
}}
}
@spec family_class({atom(), atom()}) :: integer()
def family_class(value)
def family_class({:no_classification, _}), do: 0x0000
Enum.each(family_class_data, fn {class, {base_value, subclasses}} ->
def family_class({unquote(class), :no_classification}), do: unquote(base_value)
Enum.each(subclasses, fn {subclass, value} ->
def family_class({unquote(class), unquote(subclass)}), do: unquote(base_value + value)
end)
def family_class({unquote(class), :miscellaneous}), do: unquote(base_value + 15)
end)
end
|
lib/pixel_font/table_source/os_2/enums.ex
| 0.68215
| 0.472501
|
enums.ex
|
starcoder
|
defmodule GenStage.Utils do
@moduledoc false
@doc """
Validates the argument is a list.
"""
def validate_list(opts, key, default) do
{value, opts} = Keyword.pop(opts, key, default)
if is_list(value) do
{:ok, value, opts}
else
{:error, "expected #{inspect(key)} to be a list, got: #{inspect(value)}"}
end
end
@doc """
Validates the given option is one of the values.
"""
def validate_in(opts, key, default, values) do
{value, opts} = Keyword.pop(opts, key, default)
if value in values do
{:ok, value, opts}
else
{:error, "expected #{inspect(key)} to be one of #{inspect(values)}, got: #{inspect(value)}"}
end
end
@doc """
Validates an integer.
"""
def validate_integer(opts, key, default, min, max, infinity?) do
{value, opts} = Keyword.pop(opts, key, default)
cond do
value == :infinity and infinity? ->
{:ok, value, opts}
not is_integer(value) ->
error_message = "expected #{inspect(key)} to be an integer, got: #{inspect(value)}"
{:error, error_message}
value < min ->
error_message =
"expected #{inspect(key)} to be equal to or greater than #{min}, got: #{inspect(value)}"
{:error, error_message}
value > max ->
error_message =
"expected #{inspect(key)} to be equal to or less than #{max}, got: #{inspect(value)}"
{:error, error_message}
true ->
{:ok, value, opts}
end
end
@doc """
Validates there are no options left.
"""
def validate_no_opts(opts) do
if opts == [] do
:ok
else
{:error, "unknown options #{inspect(opts)}"}
end
end
@doc """
Helper to check if a shutdown is transient.
"""
defmacro is_transient_shutdown(value) do
quote do
unquote(value) == :normal or unquote(value) == :shutdown or
(is_tuple(unquote(value)) and tuple_size(unquote(value)) == 2 and
elem(unquote(value), 0) == :shutdown)
end
end
@doc """
Returns the name of the current process or self.
"""
def self_name() do
case :erlang.process_info(self(), :registered_name) do
{:registered_name, name} when is_atom(name) -> name
_ -> self()
end
end
@doc """
Splits a list of events into messages cotroller by min, max, and demand.
"""
def split_batches(events, from, min, max, demand) do
split_batches(events, from, min, max, demand, demand, [])
end
defp split_batches([], _from, _min, _max, _old_demand, new_demand, batches) do
{new_demand, :lists.reverse(batches)}
end
defp split_batches(events, from, min, max, old_demand, new_demand, batches) do
{events, batch, batch_size} = split_events(events, max - min, 0, [])
# Adjust the batch size to whatever is left of the demand in case of excess.
{old_demand, batch_size} =
case old_demand - batch_size do
diff when diff < 0 ->
error_msg = 'GenStage consumer ~tp has received ~tp events in excess from: ~tp~n'
:error_logger.error_msg(error_msg, [self_name(), abs(diff), from])
{0, old_demand}
diff ->
{diff, batch_size}
end
# In case we've reached min, we will ask for more events.
{new_demand, batch_size} =
case new_demand - batch_size do
diff when diff <= min ->
{max, max - diff}
diff ->
{diff, 0}
end
split_batches(events, from, min, max, old_demand, new_demand, [{batch, batch_size} | batches])
end
defp split_events(events, limit, limit, acc), do: {events, :lists.reverse(acc), limit}
defp split_events([], _limit, counter, acc), do: {[], :lists.reverse(acc), counter}
defp split_events([event | events], limit, counter, acc) do
split_events(events, limit, counter + 1, [event | acc])
end
end
|
deps/gen_stage/lib/gen_stage/utils.ex
| 0.785884
| 0.499634
|
utils.ex
|
starcoder
|
defprotocol Phoenix.HTML.Safe do
@moduledoc """
Defines the HTML safe protocol.
In order to promote HTML safety, Phoenix templates
do not use `Kernel.to_string/1` to convert data types to
strings in templates. Instead, Phoenix uses this
protocol which must be implemented by data structures
and guarantee that a HTML safe representation is returned.
Furthermore, this protocol relies on iodata, which provides
better performance when sending or streaming data to the client.
"""
def to_iodata(data)
end
defimpl Phoenix.HTML.Safe, for: Atom do
def to_iodata(nil), do: ""
def to_iodata(atom), do: Phoenix.HTML.Engine.html_escape(Atom.to_string(atom))
end
defimpl Phoenix.HTML.Safe, for: BitString do
defdelegate to_iodata(data), to: Phoenix.HTML.Engine, as: :html_escape
end
defimpl Phoenix.HTML.Safe, for: Time do
defdelegate to_iodata(data), to: Time, as: :to_iso8601
end
defimpl Phoenix.HTML.Safe, for: Date do
defdelegate to_iodata(data), to: Date, as: :to_iso8601
end
defimpl Phoenix.HTML.Safe, for: NaiveDateTime do
defdelegate to_iodata(data), to: NaiveDateTime, as: :to_iso8601
end
defimpl Phoenix.HTML.Safe, for: DateTime do
def to_iodata(data) do
# Call escape in case someone can inject reserved
# characters in the timezone or its abbreviation
Phoenix.HTML.Engine.html_escape(DateTime.to_iso8601(data))
end
end
defimpl Phoenix.HTML.Safe, for: List do
def to_iodata([h | t]) do
[to_iodata(h) | to_iodata(t)]
end
def to_iodata([]) do
[]
end
def to_iodata(?<), do: "<"
def to_iodata(?>), do: ">"
def to_iodata(?&), do: "&"
def to_iodata(?"), do: """
def to_iodata(?'), do: "'"
def to_iodata(h) when is_integer(h) and h <= 255 do
h
end
def to_iodata(h) when is_integer(h) do
raise ArgumentError,
"lists in Phoenix.HTML templates only support iodata, and not chardata. Integers may only represent bytes. " <>
"It's likely you meant to pass a string with double quotes instead of a char list with single quotes."
end
def to_iodata(h) when is_binary(h) do
Phoenix.HTML.Engine.html_escape(h)
end
def to_iodata({:safe, data}) do
data
end
def to_iodata(other) do
raise ArgumentError,
"lists in Phoenix.HTML and templates may only contain integers representing bytes, binaries or other lists, " <>
"got invalid entry: #{inspect(other)}"
end
end
defimpl Phoenix.HTML.Safe, for: Integer do
def to_iodata(data), do: Integer.to_string(data)
end
defimpl Phoenix.HTML.Safe, for: Float do
def to_iodata(data) do
IO.iodata_to_binary(:io_lib_format.fwrite_g(data))
end
end
defimpl Phoenix.HTML.Safe, for: Tuple do
def to_iodata({:safe, data}), do: data
def to_iodata(value), do: raise(Protocol.UndefinedError, protocol: @protocol, value: value)
end
|
lib/phoenix_html/safe.ex
| 0.830972
| 0.692902
|
safe.ex
|
starcoder
|
defmodule Livebook.JSInterop do
@moduledoc false
alias Livebook.Delta
@doc """
Returns the result of applying `delta` to `string`.
The delta operation lengths (retain, delete) are treated such that
they match the JavaScript strings behavior.
JavaScript uses UTF-16 encoding, in which every character is stored
as either one or two 16-bit code units. JS treats the number of units
as string length and this also impacts position-based functions like
`String.slice`. To match this behavior we first convert normal UTF-8
string into a list of UTF-16 code points, then apply the delta to this
list and finally convert back to a UTF-8 string.
"""
@spec apply_delta_to_string(Delta.t(), String.t()) :: String.t()
def apply_delta_to_string(delta, string) do
code_units = string_to_utf16_code_units(string)
delta.ops
|> apply_to_code_units(code_units)
|> utf16_code_units_to_string()
end
defp apply_to_code_units([], code_units), do: code_units
defp apply_to_code_units([{:retain, n} | ops], code_units) do
{left, right} = Enum.split(code_units, n)
left ++ apply_to_code_units(ops, right)
end
defp apply_to_code_units([{:insert, inserted} | ops], code_units) do
string_to_utf16_code_units(inserted) ++ apply_to_code_units(ops, code_units)
end
defp apply_to_code_units([{:delete, n} | ops], code_units) do
apply_to_code_units(ops, Enum.slice(code_units, n..-1))
end
@doc """
Computes Myers Difference between the given strings and returns its
`Delta` representation.
The diff is computed on UTF-16 code units and the resulting delta
is JavaScript-compatible. See `apply_delta_to_string/2` for more
details.
"""
@spec diff(String.t(), String.t()) :: Delta.t()
def diff(string1, string2) do
units1 = string_to_utf16_code_units(string1)
units2 = string_to_utf16_code_units(string2)
units1
|> List.myers_difference(units2)
|> Enum.reduce(Delta.new(), fn
{:eq, units}, delta -> Delta.retain(delta, length(units))
{:ins, units}, delta -> Delta.insert(delta, utf16_code_units_to_string(units))
{:del, units}, delta -> Delta.delete(delta, length(units))
end)
|> Delta.trim()
end
@doc """
Returns a column number in the Elixir string corresponding to
the given column interpreted in terms of UTF-16 code units.
"""
@spec js_column_to_elixir(pos_integer(), String.t()) :: pos_integer()
def js_column_to_elixir(column, line) do
line
|> string_to_utf16_code_units()
|> Enum.take(column - 1)
|> utf16_code_units_to_string()
|> String.length()
|> Kernel.+(1)
end
@doc """
Returns a column represented in terms of UTF-16 code units
corresponding to the given column number in Elixir string.
"""
@spec elixir_column_to_js(pos_integer(), String.t()) :: pos_integer()
def elixir_column_to_js(column, line) do
line
|> string_take(column - 1)
|> string_to_utf16_code_units()
|> length()
|> Kernel.+(1)
end
defp string_take(_string, 0), do: ""
defp string_take(string, n) when n > 0, do: String.slice(string, 0..(n - 1))
# UTF-16 helpers
defp string_to_utf16_code_units(string) do
string
|> :unicode.characters_to_binary(:utf8, :utf16)
|> utf16_binary_to_code_units([])
|> Enum.reverse()
end
defp utf16_binary_to_code_units(<<>>, code_units), do: code_units
defp utf16_binary_to_code_units(<<code_unit::size(16), rest::binary>>, code_units) do
utf16_binary_to_code_units(rest, [code_unit | code_units])
end
defp utf16_code_units_to_string(code_units) do
code_units
|> Enum.reverse()
|> code_units_to_utf16_binary(<<>>)
|> :unicode.characters_to_binary(:utf16, :utf8)
end
defp code_units_to_utf16_binary([], utf16_binary), do: utf16_binary
defp code_units_to_utf16_binary([code_unit | code_units], utf16_binary) do
code_units_to_utf16_binary(code_units, <<code_unit::size(16), utf16_binary::binary>>)
end
end
|
lib/livebook/js_interop.ex
| 0.867443
| 0.792865
|
js_interop.ex
|
starcoder
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.