code
stringlengths 114
1.05M
| path
stringlengths 3
312
| quality_prob
float64 0.5
0.99
| learning_prob
float64 0.2
1
| filename
stringlengths 3
168
| kind
stringclasses 1
value |
|---|---|---|---|---|---|
defmodule Custodian.Github.Processor do
@moduledoc """
Processing functions for each type of event from GitHub's webhook that we
respond to.
Labels are defined according to the NewAperio [spec]. More information can
be found in the GitHub repo, which includes a programmatic tool for generating
our standard labels in a repo.
[spec]: https://github.com/newaperio/github-labels
"""
alias Custodian.Bots
alias Custodian.Bots.Bot
alias Custodian.Repo
alias Ecto.Multi
@github Application.get_env(:custodian, :github_api)
@doc """
Processes installation events.
## Created/Added
Processes an **installation**. This happens when a user adds the app to one
or more bots. It iterates over every bot and makes a new record for each.
## Deleted
Processes an **uninstallation**. This happens when a user removes the app
from a bot. It deletes the given record in the database by its repo id.
"""
@spec installation(map) :: {:ok, [Bot.t()]}
def installation(%{"action" => "created"} = params) do
Appsignal.increment_counter("event_installation_created_count", 1)
create_bots(params["installation"]["id"], params["repositories"])
end
def installation(%{"action" => "added"} = params) do
Appsignal.increment_counter("event_installation_added_count", 1)
create_bots(params["installation"]["id"], params["repositories_added"])
end
def installation(%{"action" => "removed"} = params) do
Appsignal.increment_counter("event_installation_deleted_count", 1)
delete_bots(params["repositories_removed"])
end
@doc """
Processes pull request events.
## Opened
Processes an "opened" pull request.
- Adds `needs-review` label unless `ready-to-merge`/`in-progress`
## Opened (draft)
Processes an "opened" draft pull request.
- Adds `in-progress` label unless `needs-review`/`ready-to-merge`
## Closed
Processes a **closed** pull request. This is either a merged PR or one that
was closed manually.
- Removes all labels
- Deletes branch
## Reopened
Processes a **reopened** pull request.
- Removes all labels
"""
@spec pr(map) :: {:ok, Bot.t()}
def pr(%{"action" => "opened", "pull_request" => %{"draft" => true}} = params) do
Appsignal.increment_counter("event_pr_open_count", 1)
bot = Bots.get_bot_by!(repo_id: params["repository"]["id"])
labels = @github.Labels.all({bot, params["pull_request"]["number"]})
if !Enum.member?(labels, "ready-to-merge") &&
!Enum.member?(labels, "needs-review") do
@github.Labels.add(
{bot, params["pull_request"]["number"]},
"in-progress"
)
end
{:ok, bot}
end
def pr(%{"action" => "opened"} = params) do
Appsignal.increment_counter("event_pr_open_count", 1)
bot = Bots.get_bot_by!(repo_id: params["repository"]["id"])
labels = @github.Labels.all({bot, params["pull_request"]["number"]})
if !Enum.member?(labels, "ready-to-merge") &&
!Enum.member?(labels, "in-progress") do
@github.Labels.add(
{bot, params["pull_request"]["number"]},
"needs-review"
)
end
{:ok, bot}
end
def pr(%{"action" => "closed"} = params) do
Appsignal.increment_counter("event_pr_closed_count", 1)
bot = Bots.get_bot_by!(repo_id: params["repository"]["id"])
branch = params["pull_request"]["head"]["ref"]
{bot, params["pull_request"]["number"]}
|> @github.Labels.remove("in-progress")
|> @github.Labels.remove("needs-review")
|> @github.Labels.remove("ready-to-merge")
@github.References.remove(bot, branch)
{:ok, bot}
end
def pr(%{"action" => "reopened"} = params) do
Appsignal.increment_counter("event_pr_reopened_count", 1)
bot = Bots.get_bot_by!(repo_id: params["repository"]["id"])
{bot, params["pull_request"]["number"]}
|> @github.Labels.remove("in-progress")
|> @github.Labels.remove("needs-review")
|> @github.Labels.remove("ready-to-merge")
{:ok, bot}
end
def pr(_), do: :ok
@doc """
Processes pull request review events.
## Approved
Processes an **approved** pull request review.
- Removes needs-review and in-progress labels
- Adds ready-to-merge label
- Merges changes from the base branch to keep head updated
## Changes Requested
Processes a **changes requested** pull request review.
- Removes needs-review and ready-to-merge labels
- Adds in-progress label
## Commented
For reviews that are just **comments**, we ignore the payload and return
an error. No action is taken.
"""
@spec review(map) :: {:ok, Bot.t()}
def review(%{
"review" => %{"state" => "approved"},
"pull_request" => pr_params,
"repository" => %{"id" => repo_id}
}) do
Appsignal.increment_counter("event_review_approved_count", 1)
bot = Bots.get_bot_by!(repo_id: repo_id)
{bot, pr_params["number"]}
|> @github.Labels.remove("needs-review")
|> @github.Labels.remove("in-progress")
|> @github.Labels.add("ready-to-merge")
@github.Merges.merge(
bot,
pr_params["head"]["ref"],
pr_params["base"]["ref"]
)
{:ok, bot}
end
def review(%{"review" => %{"state" => "changes_requested"}} = params) do
Appsignal.increment_counter("event_review_changes_requested_count", 1)
bot = Bots.get_bot_by!(repo_id: params["repository"]["id"])
{bot, params["pull_request"]["number"]}
|> @github.Labels.remove("needs-review")
|> @github.Labels.remove("ready-to-merge")
|> @github.Labels.add("in-progress")
{:ok, bot}
end
def review(_params), do: :ok
@doc """
Process repository events.
- Updates bot name when repository is renamed
"""
@spec repo(map) :: {:ok, Bot.t()} | {:error, Ecto.Changeset.t()}
def repo(%{"action" => "renamed"} = params) do
%{repo_id: params["repository"]["id"]}
|> Bots.get_bot_by!()
|> Bots.update_bot(%{name: params["repository"]["name"]})
end
def repo(_params), do: :ok
@spec create_bots(integer, [map]) :: {:ok, [Bot.t()]}
defp create_bots(installation_id, bots) do
multi =
Enum.reduce(Enum.with_index(bots), Multi.new(), fn {bot, index}, multi ->
[owner, name] = bot["full_name"] |> String.split("/")
Multi.insert(
multi,
index,
Bot.changeset(%Bot{}, %{
owner: owner,
name: name,
repo_id: bot["id"],
installation_id: installation_id
})
)
end)
Appsignal.increment_counter("bot_count", multi |> Multi.to_list() |> length)
Repo.transaction(multi)
end
@spec delete_bots([map]) :: {:ok, [Bot.t()]}
defp delete_bots(bots) do
multi =
Enum.reduce(Enum.with_index(bots), Multi.new(), fn {bot, index}, multi ->
bot = Bots.get_bot_by!(%{repo_id: bot["id"]})
Multi.delete(multi, index, bot)
end)
Appsignal.increment_counter(
"bot_count",
-(multi |> Multi.to_list() |> length)
)
Repo.transaction(multi)
end
end
|
lib/custodian/github/processor.ex
| 0.691602
| 0.544983
|
processor.ex
|
starcoder
|
defmodule PolyPartition.Helpers do
alias PolyPartition.Geometry
@moduledoc """
Helper functions for PolyPartition
"""
@doc """
Calculates the determinant of two points provided as a segment
## Examples
iex> PolyPartition.Helpers.det_seg([ [2, 3], [1, -1] ])
-5
"""
def det_seg(seg) do
[p, q] = seg
det(p, q)
end
defp det(p, q) do
[x1, y1] = p
[x2, y2] = q
(x1 * y2) - (x2 * y1)
end
@doc """
Takes two numbers, returns true if their signs differ, false otherwise
## Examples
iex> PolyPartition.Helpers.sgn_to_bool(-1, 2)
true
iex> PolyPartition.Helpers.sgn_to_bool(3, 3)
false
iex> PolyPartition.Helpers.sgn_to_bool(3, 0)
false
"""
def sgn_to_bool(a, b) do
cond do
a * b < 0 -> true
true -> false
end
end
@doc """
Find the index of the vertex to use to split the polygon
`split_coord` assumes there is a vertex that is
- not a neighbor of the first vertex, and
- is "line of sight" from the first vertex.
I believe this is always the case for polygons with more than three vertices,
but haven't proven it.
`split_coord` will start by testing the vertex farthest away (in circular order)
from the first vertex and step out one vertex at a time alternating left and right.
The `step` parameter is incremented by a private function `next`.
## Examples
iex> poly = [[0,1], [1, 0], [2, 0], [3,1], [2,2], [1,2]]
iex> PolyPartition.Helpers.split_coord(poly, 0)
3
iex> poly = [[0,1], [1, 0], [2, 0], [3,1], [2,2], [2,0.5]]
iex> PolyPartition.Helpers.split_coord(poly, 0)
2
"""
def split_coord(poly, step) do
opp_index = round(:math.floor(length(poly) / 2)) + step
case Geometry.good_cut?(poly, opp_index) do
false -> split_coord(poly, next(step))
_ -> opp_index
end
end
defp next(n) do
cond do
n < 0 -> (-1 * n)
true -> -1 * (n + 1)
end
end
@doc """
Add a vertex at the midpoint of a polygon's longest side
If we have a triangle, we need to add a vertex to make a split. We choose the
longest side to keep the polygon as "fat" as possible
The generated point will have float coordinates, regardless of input
## Examples
iex> poly = [[0,1], [1,0], [2,1]]
iex> PolyPartition.Helpers.split_side(poly)
[[1.0, 1.0], [0,1], [1,0], [2,1],]
"""
def split_side(poly) do
{_, pt, ind} = poly
|> Stream.with_index
|> Enum.map(fn(x) ->
{point, index} = x
next = Enum.at(poly, rem((index + 1), length(poly)))
{Geometry.sq_length([point, next]), Geometry.midpoint([point, next]), index}
end)
|> List.foldr({0.0, 0, 0}, fn(x, acc) ->
{length, _, _} = x
{a_length, _, _} = acc
cond do
length > a_length -> x
true -> acc
end
end)
List.insert_at(poly, rem((ind + 1), length(poly)), pt)
end
@doc """
Takes a polygon and returns a list of two polygons forming a partition of the first
If any degenerate polygons are created, we retry with a different initial vertex
## Examples
iex> poly = [[0,1], [1, 0], [2, 0], [3,1], [2,2], [1,2]]
iex> PolyPartition.Helpers.split(poly, 0)
[[[0,1], [1,0], [2,0], [3,1]], [[3,1], [2,2], [1,2], [0,1]]]
"""
def split(poly, retries) do
cond do
retries >= length(poly) -> poly
true -> p = case length(poly) do
3 -> split_side(poly)
_ -> poly
end
opp_index = split_coord(p, 0)
result = []
r = result ++ rotate_list([Enum.slice(p, 0..opp_index)]) ++ [Enum.slice(p, opp_index..length(p)) ++ [hd(p)]]
t = r
|> Enum.map(fn(x) -> Geometry.area(x) end)
|> List.foldr(1, fn(x, acc) -> cond do
x < acc -> x
true -> acc
end
end)
case {t, retries >= length(poly) - 1} do
{0.0, false} -> split(rotate_list(poly), retries + 1) #split failed, try another vertex
_ -> r
end
end
end
defp rotate_list(list) do
list
|> Stream.with_index
|> Enum.map(fn(x) ->
{_, index} = x
Enum.at(list, rem((index + 1), length(list)))
end)
end
end
|
lib/Helpers.ex
| 0.884999
| 0.816845
|
Helpers.ex
|
starcoder
|
defmodule StepFlow.Amqp.CompletedConsumer do
@moduledoc """
Consumer of all job with completed status.
"""
require Logger
alias StepFlow.Amqp.CompletedConsumer
alias StepFlow.Jobs
alias StepFlow.Metrics.JobInstrumenter
alias StepFlow.Workflows
alias StepFlow.Workflows.StepManager
use StepFlow.Amqp.CommonConsumer, %{
queue: "job_completed",
prefetch_count: 1,
consumer: &CompletedConsumer.consume/4
}
@doc """
Consume messages with completed topic, update Job status and continue the workflow.
"""
def consume(
channel,
tag,
_redelivered,
%{
"job_id" => job_id,
"status" => status
} = payload
) do
case Jobs.get_job(job_id) do
nil ->
Basic.reject(channel, tag, requeue: false)
job ->
workflow =
job
|> Map.get(:workflow_id)
|> Workflows.get_workflow!()
set_generated_destination_paths(payload, job)
set_output_parameters(payload, workflow)
JobInstrumenter.inc(:step_flow_jobs_completed, job.name)
{:ok, job_status} = Jobs.Status.set_job_status(job_id, status)
Workflows.Status.define_workflow_status(job.workflow_id, :job_completed, job_status)
Workflows.notification_from_job(job_id)
StepManager.check_step_status(%{job_id: job_id})
Basic.ack(channel, tag)
end
end
def consume(channel, tag, _redelivered, payload) do
Logger.error("Job completed #{inspect(payload)}")
Basic.reject(channel, tag, requeue: false)
end
defp set_generated_destination_paths(payload, job) do
case StepFlow.Map.get_by_key_or_atom(payload, "destination_paths") do
nil ->
nil
destination_paths ->
job_parameters =
job.parameters ++
[
%{
id: "destination_paths",
type: "array_of_strings",
value: destination_paths
}
]
Jobs.update_job(job, %{parameters: job_parameters})
end
end
defp set_output_parameters(payload, workflow) do
case StepFlow.Map.get_by_key_or_atom(payload, "parameters") do
nil ->
nil
parameters ->
parameters = workflow.parameters ++ parameters
Workflows.update_workflow(workflow, %{parameters: parameters})
end
end
end
|
lib/step_flow/amqp/completed_consumer.ex
| 0.666605
| 0.415877
|
completed_consumer.ex
|
starcoder
|
defmodule Unpoly do
@moduledoc """
A Plug adapter and helpers for Unpoly, the unobtrusive JavaScript framework.
## Options
* `:cookie_name` - the cookie name where the request method is echoed to. Defaults to
`"_up_method"`.
* `:cookie_opts` - additional options to pass to method cookie.
See `Plug.Conn.put_resp_cookie/4` for all available options.
"""
@doc """
Alias for `Unpoly.unpoly?/1`
"""
@spec up?(Plug.Conn.t()) :: boolean()
def up?(conn), do: unpoly?(conn)
@doc """
Returns whether the current request is a [page fragment update](https://unpoly.com/up.replace)
triggered by an Unpoly frontend.
"""
@spec unpoly?(Plug.Conn.t()) :: boolean()
def unpoly?(conn), do: target(conn) !== nil
@doc """
Returns the CSS selector for a fragment that Unpoly will update in
case of a successful response (200 status code).
The Unpoly frontend will expect an HTML response containing an element
that matches this selector.
Server-side code is free to optimize its successful response by only returning HTML
that matches this selector.
"""
@spec target(Plug.Conn.t()) :: String.t() | nil
def target(conn), do: get_req_header(conn, "x-up-target")
@doc """
Returns the CSS selector for a fragment that Unpoly will update in
case of an failed response. Server errors or validation failures are
all examples for a failed response (non-200 status code).
The Unpoly frontend will expect an HTML response containing an element
that matches this selector.
Server-side code is free to optimize its response by only returning HTML
that matches this selector.
"""
@spec fail_target(Plug.Conn.t()) :: String.t() | nil
def fail_target(conn), do: get_req_header(conn, "x-up-fail-target")
@doc """
Returns whether the given CSS selector is targeted by the current fragment
update in case of a successful response (200 status code).
Note that the matching logic is very simplistic and does not actually know
how your page layout is structured. It will return `true` if
the tested selector and the requested CSS selector matches exactly, or if the
requested selector is `body` or `html`.
Always returns `true` if the current request is not an Unpoly fragment update.
"""
@spec target?(Plug.Conn.t(), String.t()) :: boolean()
def target?(conn, tested_target), do: query_target(conn, target(conn), tested_target)
@doc """
Returns whether the given CSS selector is targeted by the current fragment
update in case of a failed response (non-200 status code).
Note that the matching logic is very simplistic and does not actually know
how your page layout is structured. It will return `true` if
the tested selector and the requested CSS selector matches exactly, or if the
requested selector is `body` or `html`.
Always returns `true` if the current request is not an Unpoly fragment update.
"""
@spec fail_target?(Plug.Conn.t(), String.t()) :: boolean()
def fail_target?(conn, tested_target), do: query_target(conn, fail_target(conn), tested_target)
@doc """
Returns whether the given CSS selector is targeted by the current fragment
update for either a success or a failed response.
Note that the matching logic is very simplistic and does not actually know
how your page layout is structured. It will return `true` if
the tested selector and the requested CSS selector matches exactly, or if the
requested selector is `body` or `html`.
Always returns `true` if the current request is not an Unpoly fragment update.
"""
@spec any_target?(Plug.Conn.t(), String.t()) :: boolean()
def any_target?(conn, tested_target),
do: target?(conn, tested_target) || fail_target?(conn, tested_target)
@doc """
Returns whether the current form submission should be
[validated](https://unpoly.com/input-up-validate) (and not be saved to the database).
"""
@spec validate?(Plug.Conn.t()) :: boolean()
def validate?(conn), do: validate_name(conn) !== nil
@doc """
If the current form submission is a [validation](https://unpoly.com/input-up-validate),
this returns the name attribute of the form field that has triggered
the validation.
"""
@spec validate_name(Plug.Conn.t()) :: String.t() | nil
def validate_name(conn), do: get_req_header(conn, "x-up-validate")
@doc """
Forces Unpoly to use the given string as the document title when processing
this response.
This is useful when you skip rendering the `<head>` in an Unpoly request.
"""
@spec put_title(Plug.Conn.t(), String.t()) :: Plug.Conn.t()
def put_title(conn, new_title), do: Plug.Conn.put_resp_header(conn, "x-up-title", new_title)
# Plug
def init(opts \\ []) do
cookie_name = Keyword.get(opts, :cookie_name, "_up_method")
cookie_opts = Keyword.get(opts, :cookie_opts, http_only: false)
{cookie_name, cookie_opts}
end
def call(conn, {cookie_name, cookie_opts}) do
conn
|> Plug.Conn.fetch_cookies()
|> echo_request_headers()
|> append_method_cookie(cookie_name, cookie_opts)
end
@doc """
Sets the value of the "x-up-location" response header.
"""
@spec put_resp_location_header(Plug.Conn.t(), String.t()) :: Plug.Conn.t()
def put_resp_location_header(conn, value) do
Plug.Conn.put_resp_header(conn, "x-up-location", value)
end
@doc """
Sets the value of the "x-up-method" response header.
"""
@spec put_resp_method_header(Plug.Conn.t(), String.t()) :: Plug.Conn.t()
def put_resp_method_header(conn, value) do
Plug.Conn.put_resp_header(conn, "x-up-method", value)
end
defp echo_request_headers(conn) do
conn
|> put_resp_location_header(Phoenix.Controller.current_url(conn))
|> put_resp_method_header(conn.method)
end
defp append_method_cookie(conn, cookie_name, cookie_opts) do
cond do
conn.method != "GET" && !up?(conn) ->
Plug.Conn.put_resp_cookie(conn, cookie_name, conn.method, cookie_opts)
Map.has_key?(conn.req_cookies, "_up_method") ->
Plug.Conn.delete_resp_cookie(conn, cookie_name, cookie_opts)
true ->
conn
end
end
## Helpers
defp get_req_header(conn, key),
do: Plug.Conn.get_req_header(conn, key) |> List.first()
defp query_target(conn, actual_target, tested_target) do
if up?(conn) do
cond do
actual_target == tested_target -> true
actual_target == "html" -> true
actual_target == "body" && tested_target not in ["head", "title", "meta"] -> true
true -> false
end
else
true
end
end
end
|
lib/unpoly.ex
| 0.841451
| 0.580322
|
unpoly.ex
|
starcoder
|
defmodule Faker.Pokemon.En do
import Faker, only: [sampler: 2]
@moduledoc """
Functions for Pokemon names in English
"""
@doc """
Returns a Pokemon name
## Examples
iex> Faker.Pokemon.En.name()
"Fraxure"
iex> Faker.Pokemon.En.name()
"Shellos"
iex> Faker.Pokemon.En.name()
"Ambipom"
iex> Faker.Pokemon.En.name()
"Forretress"
"""
@spec name() :: String.t()
sampler(:name, [
"Bulbasaur",
"Ivysaur",
"Venusaur",
"Charmander",
"Charmeleon",
"Charizard",
"Squirtle",
"Wartortle",
"Blastoise",
"Caterpie",
"Metapod",
"Butterfree",
"Weedle",
"Kakuna",
"Beedrill",
"Pidgey",
"Pidgeotto",
"Pidgeot",
"Rattata",
"Raticate",
"Spearow",
"Fearow",
"Ekans",
"Arbok",
"Pikachu",
"Raichu",
"Sandshrew",
"Sandslash",
"Nidoran",
"Nidorina",
"Nidoqueen",
"Nidoran",
"Nidorino",
"Nidoking",
"Clefairy",
"Clefable",
"Vulpix",
"Ninetales",
"Jigglypuff",
"Wigglytuff",
"Zubat",
"Golbat",
"Oddish",
"Gloom",
"Vileplume",
"Paras",
"Parasect",
"Venonat",
"Venomoth",
"Diglett",
"Dugtrio",
"Meowth",
"Persian",
"Psyduck",
"Golduck",
"Mankey",
"Primeape",
"Growlithe",
"Arcanine",
"Poliwag",
"Poliwhirl",
"Poliwrath",
"Abra",
"Kadabra",
"Alakazam",
"Machop",
"Machoke",
"Machamp",
"Bellsprout",
"Weepinbell",
"Victreebel",
"Tentacool",
"Tentacruel",
"Geodude",
"Graveler",
"Golem",
"Ponyta",
"Rapidash",
"Slowpoke",
"Slowbro",
"Magnemite",
"Magneton",
"Farfetch'd",
"Doduo",
"Dodrio",
"Seel",
"Dewgong",
"Grimer",
"Muk",
"Shellder",
"Cloyster",
"Gastly",
"Haunter",
"Gengar",
"Onix",
"Drowzee",
"Hypno",
"Krabby",
"Kingler",
"Voltorb",
"Electrode",
"Exeggcute",
"Exeggutor",
"Cubone",
"Marowak",
"Hitmonlee",
"Hitmonchan",
"Lickitung",
"Koffing",
"Weezing",
"Rhyhorn",
"Rhydon",
"Chansey",
"Tangela",
"Kangaskhan",
"Horsea",
"Seadra",
"Goldeen",
"Seaking",
"Staryu",
"Starmie",
"Mr. Mime",
"Scyther",
"Jynx",
"Electabuzz",
"Magmar",
"Pinsir",
"Tauros",
"Magikarp",
"Gyarados",
"Lapras",
"Ditto",
"Eevee",
"Vaporeon",
"Jolteon",
"Flareon",
"Porygon",
"Omanyte",
"Omastar",
"Kabuto",
"Kabutops",
"Aerodactyl",
"Snorlax",
"Articuno",
"Zapdos",
"Moltres",
"Dratini",
"Dragonair",
"Dragonite",
"Mewtwo",
"Mew",
"Chikorita",
"Bayleef",
"Meganium",
"Cyndaquil",
"Quilava",
"Typhlosion",
"Totodile",
"Croconaw",
"Feraligatr",
"Sentret",
"Furret",
"Hoothoot",
"Noctowl",
"Ledyba",
"Ledian",
"Spinarak",
"Ariados",
"Crobat",
"Chinchou",
"Lanturn",
"Pichu",
"Cleffa",
"Igglybuff",
"Togepi",
"Togetic",
"Natu",
"Xatu",
"Mareep",
"Flaaffy",
"Ampharos",
"Bellossom",
"Marill",
"Azumarill",
"Sudowoodo",
"Politoed",
"Hoppip",
"Skiploom",
"Jumpluff",
"Aipom",
"Sunkern",
"Sunflora",
"Yanma",
"Wooper",
"Quagsire",
"Espeon",
"Umbreon",
"Murkrow",
"Slowking",
"Misdreavus",
"Unown",
"Wobbuffet",
"Girafarig",
"Pineco",
"Forretress",
"Dunsparce",
"Gligar",
"Steelix",
"Snubbull",
"Granbull",
"Qwilfish",
"Scizor",
"Shuckle",
"Heracross",
"Sneasel",
"Teddiursa",
"Ursaring",
"Slugma",
"Magcargo",
"Swinub",
"Piloswine",
"Corsola",
"Remoraid",
"Octillery",
"Delibird",
"Mantine",
"Skarmory",
"Houndour",
"Houndoom",
"Kingdra",
"Phanpy",
"Donphan",
"Porygon2",
"Stantler",
"Smeargle",
"Tyrogue",
"Hitmontop",
"Smoochum",
"Elekid",
"Magby",
"Miltank",
"Blissey",
"Raikou",
"Entei",
"Suicune",
"Larvitar",
"Pupitar",
"Tyranitar",
"Lugia",
"Ho-oh",
"Celebi",
"Treecko",
"Grovyle",
"Sceptile",
"Torchic",
"Combusken",
"Blaziken",
"Mudkip",
"Marshtomp",
"Swampert",
"Poochyena",
"Mightyena",
"Zigzagoon",
"Linoone",
"Wurmple",
"Silcoon",
"Beautifly",
"Cascoon",
"Dustox",
"Lotad",
"Lombre",
"Ludicolo",
"Seedot",
"Nuzleaf",
"Shiftry",
"Taillow",
"Swellow",
"Wingull",
"Pelipper",
"Ralts",
"Kirlia",
"Gardevoir",
"Surskit",
"Masquerain",
"Shroomish",
"Breloom",
"Slakoth",
"Vigoroth",
"Slaking",
"Nincada",
"Ninjask",
"Shedinja",
"Whismur",
"Loudred",
"Exploud",
"Makuhita",
"Hariyama",
"Azurill",
"Nosepass",
"Skitty",
"Delcatty",
"Sableye",
"Mawile",
"Aron",
"Lairon",
"Aggron",
"Meditite",
"Medicham",
"Electrike",
"Manectric",
"Plusle",
"Minun",
"Volbeat",
"Illumise",
"Roselia",
"Gulpin",
"Swalot",
"Carvanha",
"Sharpedo",
"Wailmer",
"Wailord",
"Numel",
"Camerupt",
"Torkoal",
"Spoink",
"Grumpig",
"Spinda",
"Trapinch",
"Vibrava",
"Flygon",
"Cacnea",
"Cacturne",
"Swablu",
"Altaria",
"Zangoose",
"Seviper",
"Lunatone",
"Solrock",
"Barboach",
"Whiscash",
"Corphish",
"Crawdaunt",
"Baltoy",
"Claydol",
"Lileep",
"Cradily",
"Anorith",
"Armaldo",
"Feebas",
"Milotic",
"Castform",
"Kecleon",
"Shuppet",
"Banette",
"Duskull",
"Dusclops",
"Tropius",
"Chimecho",
"Absol",
"Wynaut",
"Snorunt",
"Glalie",
"Spheal",
"Sealeo",
"Walrein",
"Clamperl",
"Huntail",
"Gorebyss",
"Relicanth",
"Luvdisc",
"Bagon",
"Shelgon",
"Salamence",
"Beldum",
"Metang",
"Metagross",
"Regirock",
"Regice",
"Registeel",
"Latias",
"Latios",
"Kyogre",
"Groudon",
"Rayquaza",
"Jirachi",
"Deoxys-normal",
"Turtwig",
"Grotle",
"Torterra",
"Chimchar",
"Monferno",
"Infernape",
"Piplup",
"Prinplup",
"Empoleon",
"Starly",
"Staravia",
"Staraptor",
"Bidoof",
"Bibarel",
"Kricketot",
"Kricketune",
"Shinx",
"Luxio",
"Luxray",
"Budew",
"Roserade",
"Cranidos",
"Rampardos",
"Shieldon",
"Bastiodon",
"Burmy",
"Wormadam-plant",
"Mothim",
"Combee",
"Vespiquen",
"Pachirisu",
"Buizel",
"Floatzel",
"Cherubi",
"Cherrim",
"Shellos",
"Gastrodon",
"Ambipom",
"Drifloon",
"Drifblim",
"Buneary",
"Lopunny",
"Mismagius",
"Honchkrow",
"Glameow",
"Purugly",
"Chingling",
"Stunky",
"Skuntank",
"Bronzor",
"Bronzong",
"Bonsly",
"Mime-jr",
"Happiny",
"Chatot",
"Spiritomb",
"Gible",
"Gabite",
"Garchomp",
"Munchlax",
"Riolu",
"Lucario",
"Hippopotas",
"Hippowdon",
"Skorupi",
"Drapion",
"Croagunk",
"Toxicroak",
"Carnivine",
"Finneon",
"Lumineon",
"Mantyke",
"Snover",
"Abomasnow",
"Weavile",
"Magnezone",
"Lickilicky",
"Rhyperior",
"Tangrowth",
"Electivire",
"Magmortar",
"Togekiss",
"Yanmega",
"Leafeon",
"Glaceon",
"Gliscor",
"Mamoswine",
"Porygon-z",
"Gallade",
"Probopass",
"Dusknoir",
"Froslass",
"Rotom",
"Uxie",
"Mesprit",
"Azelf",
"Dialga",
"Palkia",
"Heatran",
"Regigigas",
"Giratina-altered",
"Cresselia",
"Phione",
"Manaphy",
"Darkrai",
"Shaymin-land",
"Arceus",
"Victini",
"Snivy",
"Servine",
"Serperior",
"Tepig",
"Pignite",
"Emboar",
"Oshawott",
"Dewott",
"Samurott",
"Patrat",
"Watchog",
"Lillipup",
"Herdier",
"Stoutland",
"Purrloin",
"Liepard",
"Pansage",
"Simisage",
"Pansear",
"Simisear",
"Panpour",
"Simipour",
"Munna",
"Musharna",
"Pidove",
"Tranquill",
"Unfezant",
"Blitzle",
"Zebstrika",
"Roggenrola",
"Boldore",
"Gigalith",
"Woobat",
"Swoobat",
"Drilbur",
"Excadrill",
"Audino",
"Timburr",
"Gurdurr",
"Conkeldurr",
"Tympole",
"Palpitoad",
"Seismitoad",
"Throh",
"Sawk",
"Sewaddle",
"Swadloon",
"Leavanny",
"Venipede",
"Whirlipede",
"Scolipede",
"Cottonee",
"Whimsicott",
"Petilil",
"Lilligant",
"Basculin-red-striped",
"Sandile",
"Krokorok",
"Krookodile",
"Darumaka",
"Darmanitan-standard",
"Maractus",
"Dwebble",
"Crustle",
"Scraggy",
"Scrafty",
"Sigilyph",
"Yamask",
"Cofagrigus",
"Tirtouga",
"Carracosta",
"Archen",
"Archeops",
"Trubbish",
"Garbodor",
"Zorua",
"Zoroark",
"Minccino",
"Cinccino",
"Gothita",
"Gothorita",
"Gothitelle",
"Solosis",
"Duosion",
"Reuniclus",
"Ducklett",
"Swanna",
"Vanillite",
"Vanillish",
"Vanilluxe",
"Deerling",
"Sawsbuck",
"Emolga",
"Karrablast",
"Escavalier",
"Foongus",
"Amoonguss",
"Frillish",
"Jellicent",
"Alomomola",
"Joltik",
"Galvantula",
"Ferroseed",
"Ferrothorn",
"Klink",
"Klang",
"Klinklang",
"Tynamo",
"Eelektrik",
"Eelektross",
"Elgyem",
"Beheeyem",
"Litwick",
"Lampent",
"Chandelure",
"Axew",
"Fraxure",
"Haxorus",
"Cubchoo",
"Beartic",
"Cryogonal",
"Shelmet",
"Accelgor",
"Stunfisk",
"Mienfoo",
"Mienshao",
"Druddigon",
"Golett",
"Golurk",
"Pawniard",
"Bisharp",
"Bouffalant",
"Rufflet",
"Braviary",
"Vullaby",
"Mandibuzz",
"Heatmor",
"Durant",
"Deino",
"Zweilous",
"Hydreigon",
"Larvesta",
"Volcarona",
"Cobalion",
"Terrakion",
"Virizion",
"Tornadus-incarnate",
"Thundurus-incarnate",
"Reshiram",
"Zekrom",
"Landorus-incarnate",
"Kyurem",
"Keldeo-ordinary",
"Meloetta-aria",
"Genesect",
"Chespin",
"Quilladin",
"Chesnaught",
"Fennekin",
"Braixen",
"Delphox",
"Froakie",
"Frogadier",
"Greninja",
"Bunnelby",
"Diggersby",
"Fletchling",
"Fletchinder",
"Talonflame",
"Scatterbug",
"Spewpa",
"Vivillon",
"Litleo",
"Pyroar",
"Flabebe",
"Floette",
"Florges",
"Skiddo",
"Gogoat",
"Pancham",
"Pangoro",
"Furfrou",
"Espurr",
"Meowstic-male",
"Honedge",
"Doublade",
"Aegislash-shield",
"Spritzee",
"Aromatisse",
"Swirlix",
"Slurpuff",
"Inkay",
"Malamar",
"Binacle",
"Barbaracle",
"Skrelp",
"Dragalge",
"Clauncher",
"Clawitzer",
"Helioptile",
"Heliolisk",
"Tyrunt",
"Tyrantrum",
"Amaura",
"Aurorus",
"Sylveon",
"Hawlucha",
"Dedenne",
"Carbink",
"Goomy",
"Sliggoo",
"Goodra",
"Klefki",
"Phantump",
"Trevenant",
"Pumpkaboo-average",
"Gourgeist-average",
"Bergmite",
"Avalugg",
"Noibat",
"Noivern",
"Xerneas",
"Yveltal",
"Zygarde",
"Diancie",
"Hoopa",
"Volcanion",
"Rowlet",
"Dartrix",
"Decidueye",
"Litten",
"Torracat",
"Incineroar",
"Popplio",
"Brionne",
"Primarina",
"Pikipek",
"Trumbeak",
"Toucannon",
"Yungoos",
"Gumshoos",
"Grubbin",
"Charjabug",
"Vikavolt",
"Crabrawler",
"Crabominable",
"Oricorio-baile",
"Cutiefly",
"Ribombee",
"Rockruff",
"Lycanroc-midday",
"Wishiwashi-solo",
"Mareanie",
"Toxapex",
"Mudbray",
"Mudsdale",
"Dewpider",
"Araquanid",
"Fomantis",
"Lurantis",
"Morelull",
"Shiinotic",
"Salandit",
"Salazzle",
"Stufful",
"Bewear",
"Bounsweet",
"Steenee",
"Tsareena",
"Comfey",
"Oranguru",
"Passimian",
"Wimpod",
"Golisopod",
"Sandygast",
"Palossand",
"Pyukumuku",
"Type-null",
"Silvally",
"Minior-red-meteor",
"Komala",
"Turtonator",
"Togedemaru",
"Mimikyu-disguised",
"Bruxish",
"Drampa",
"Dhelmise",
"Jangmo-o",
"Hakamo-o",
"Kommo-o",
"Tapu-koko",
"Tapu-lele",
"Tapu-bulu",
"Tapu-fini",
"Cosmog",
"Cosmoem",
"Solgaleo",
"Lunala",
"Nihilego",
"Buzzwole",
"Pheromosa",
"Xurkitree",
"Celesteela",
"Kartana",
"Guzzlord",
"Necrozma",
"Magearna",
"Marshadow",
"Poipole",
"Naganadel",
"Stakataka",
"Blacephalon",
"Zeraora",
"Deoxys-attack",
"Deoxys-defense",
"Deoxys-speed",
"Wormadam-sandy",
"Wormadam-trash",
"Shaymin-sky",
"Giratina-origin",
"Rotom-heat",
"Rotom-wash",
"Rotom-frost",
"Rotom-fan",
"Rotom-mow",
"Castform-sunny",
"Castform-rainy",
"Castform-snowy",
"Basculin-blue-striped",
"Darmanitan-zen",
"Meloetta-pirouette",
"Tornadus-therian",
"Thundurus-therian",
"Landorus-therian",
"Kyurem-black",
"Kyurem-white",
"Keldeo-resolute",
"Meowstic-female",
"Aegislash-blade",
"Pumpkaboo-small",
"Pumpkaboo-large",
"Pumpkaboo-super",
"Gourgeist-small",
"Gourgeist-large",
"Gourgeist-super",
"Venusaur-mega",
"Charizard-mega-x",
"Charizard-mega-y",
"Blastoise-mega",
"Alakazam-mega",
"Gengar-mega",
"Kangaskhan-mega",
"Pinsir-mega",
"Gyarados-mega",
"Aerodactyl-mega",
"Mewtwo-mega-x",
"Mewtwo-mega-y",
"Ampharos-mega",
"Scizor-mega",
"Heracross-mega",
"Houndoom-mega",
"Tyranitar-mega",
"Blaziken-mega",
"Gardevoir-mega",
"Mawile-mega",
"Aggron-mega",
"Medicham-mega",
"Manectric-mega",
"Banette-mega",
"Absol-mega",
"Garchomp-mega",
"Lucario-mega",
"Abomasnow-mega",
"Floette-eternal",
"Latias-mega",
"Latios-mega",
"Swampert-mega",
"Sceptile-mega",
"Sableye-mega",
"Altaria-mega",
"Gallade-mega",
"Audino-mega",
"Sharpedo-mega",
"Slowbro-mega",
"Steelix-mega",
"Pidgeot-mega",
"Glalie-mega",
"Diancie-mega",
"Metagross-mega",
"Kyogre-primal",
"Groudon-primal",
"Rayquaza-mega",
"Pikachu-rock-star",
"Pikachu-belle",
"Pikachu-pop-star",
"Pikachu-phd",
"Pikachu-libre",
"Pikachu-cosplay",
"Hoopa-unbound",
"Camerupt-mega",
"Lopunny-mega",
"Salamence-mega",
"Beedrill-mega",
"Rattata-alola",
"Raticate-alola",
"Raticate-totem-alola",
"Pikachu-original-cap",
"Pikachu-hoenn-cap",
"Pikachu-sinnoh-cap",
"Pikachu-unova-cap",
"Pikachu-kalos-cap",
"Pikachu-alola-cap",
"Raichu-alola",
"Sandshrew-alola",
"Sandslash-alola",
"Vulpix-alola",
"Ninetales-alola",
"Diglett-alola",
"Dugtrio-alola",
"Meowth-alola",
"Persian-alola",
"Geodude-alola",
"Graveler-alola",
"Golem-alola",
"Grimer-alola",
"Muk-alola",
"Exeggutor-alola",
"Marowak-alola",
"Greninja-battle-bond",
"Greninja-ash",
"Zygarde-10",
"Zygarde-50",
"Zygarde-complete",
"Gumshoos-totem",
"Vikavolt-totem",
"Oricorio-pom-pom",
"Oricorio-pau",
"Oricorio-sensu",
"Lycanroc-midnight",
"Wishiwashi-school",
"Lurantis-totem",
"Salazzle-totem",
"Minior-orange-meteor",
"Minior-yellow-meteor",
"Minior-green-meteor",
"Minior-blue-meteor",
"Minior-indigo-meteor",
"Minior-violet-meteor",
"Minior-red",
"Minior-orange",
"Minior-yellow",
"Minior-green",
"Minior-blue",
"Minior-indigo",
"Minior-violet",
"Mimikyu-busted",
"Mimikyu-totem-disguised",
"Mimikyu-totem-busted",
"Kommo-o-totem",
"Magearna-original",
"Pikachu-partner-cap",
"Marowak-totem",
"Ribombee-totem",
"Rockruff-own-tempo",
"Lycanroc-dusk",
"Araquanid-totem",
"Togedemaru-totem",
"Necrozma-dusk",
"Necrozma-dawn",
"Necrozma-ultra"
])
@doc """
Returns a location from Pokemon universe
## Examples
iex> Faker.Pokemon.En.location()
"Vaniville Town"
iex> Faker.Pokemon.En.location()
"Slateport City"
iex> Faker.Pokemon.En.location()
"Shalour City"
iex> Faker.Pokemon.En.location()
"Solaceon Town"
"""
@spec location() :: String.t()
sampler(:location, [
"Accumula Town",
"Ambrette Town",
"Anistar City",
"Anville Town",
"Aquacorde Town",
"Aspertia City",
"Azalea Town",
"Black City",
"Blackthorn City",
"Camphrier Town",
"Canalave City",
"Castelia City",
"Celadon City",
"Celestic Town",
"Cerulean City",
"Cherrygrove City",
"Cianwood City",
"Cinnabar Island",
"Coumarine City",
"Couriway Town",
"Cyllage City",
"Dendemille Town",
"Dewford Town",
"Driftveil City",
"Ecruteak City",
"Eterna City",
"Ever Grande City",
"Fallarbor Town",
"Fight Area",
"Five Island",
"Floaroma Town",
"Floccesy Town",
"Fortree City",
"Four Island",
"Frontier Access",
"Fuchsia City",
"Geosenge Town",
"Goldenrod City",
"Hearthome City",
"Humilau City",
"Icirrus City",
"Jubilife City",
"Kiloude City",
"Lacunosa Town",
"Lavaridge Town",
"Lavender Town",
"Laverre City",
"Lentimas Town",
"Littleroot Town",
"Lilycove City",
"Lumiose City",
"Mahogany Town",
"Mauville City",
"Mistralton City",
"Mossdeep City",
"Nacrene City",
"New Bark Town",
"Nimbasa City",
"Nuvema Town",
"Oldale Town",
"Olivine City",
"One Island",
"Opelucid City",
"Oreburgh City",
"Pacifidlog Town",
"Pallet Town",
"Pastoria City",
"Petalburg City",
"Pewter City",
"Resort Area",
"Rustboro City",
"Safari Zone Gate",
"Saffron City",
"Sandgem Town",
"Santalune City",
"Striaton City",
"Seven Island",
"Shalour City",
"Six Island",
"Slateport City",
"Snowbelle City",
"Snowpoint City",
"Solaceon Town",
"Sootopolis City",
"Sunyshore City",
"Survival Area",
"Three Island",
"Twinleaf Town",
"Two Island",
"Undella Town",
"Vaniville Town",
"Veilstone City",
"Verdanturf Town",
"Vermilion City",
"Violet City",
"Virbank City",
"Viridian City",
"White Forest"
])
end
|
lib/faker/pokemon/en.ex
| 0.63114
| 0.422088
|
en.ex
|
starcoder
|
defmodule Chiton do
@type coordinate::{integer(), integer()}
@spec expand_map(%{coordinate() => integer})::%{coordinate() => integer()}
def expand_map(map) do
{mx, my} = Map.keys(map) |> Enum.sort() |> List.last()
tile_coords = Map.to_list(map)
for x_xp <- 0..4 do
for y_xp <- 0..4 do
Enum.map(tile_coords,
fn {{x, y}, v} ->
{
{(mx+1)*x_xp + x, (my+1)*y_xp + y},
rem(v + x_xp + y_xp - 1, 9) + 1
}
end
)
end
end
|> List.flatten()
|> Map.new()
end
@spec get_neighbours(coordinate(), coordinate())::[coordinate()]
def get_neighbours({x,y}, {mx,my}) do
[{x-1,y}, {x + 1,y}, {x,y-1}, {x,y+1}]
|> Enum.reject(fn {c,d} -> c < 0 or c > mx or d < 0 or d > my end)
end
@spec get_corner_distance(%{coordinate() => integer})::integer()
def get_corner_distance(path_map) do
tgt = Map.keys(path_map) |> Enum.sort() |> List.last()
Map.get(path_map, tgt, :infinity)
end
@spec manhattan(coordinate(), coordinate())::integer()
# For specific usecase. We only measure until the end, so c > x and d > y!
def manhattan({x,y}, {c,d}) do
(c - x) + (d - y)
end
@spec compute_all_paths(%{coordinate() => integer})::%{coordinate() => integer()}
def compute_all_paths(map) do
start = {0,0}
goal = Map.keys(map) |> Enum.sort() |> List.last()
openset = [start]
camefrom = %{}
gScore = %{start => 0}
fScore = %{start => manhattan(start, goal)}
a_star(map, camefrom, gScore, fScore, goal, openset)
end
@spec a_star_inner(
%{coordinate() => integer}, #map
coordinate(), #current
coordinate(), #goal
%{coordinate() => coordinate()}, #camefrom
%{coordinate() => integer()}, #gScore
%{coordinate() => integer()}, #fScore
[coordinate()], #openset
[coordinate()] #neighbours
)::%{coordinate() => integer()}
def a_star_inner(map, current, goal, camefrom, gScore, fScore, openset, neighboars)
def a_star_inner(_map, _current, _goal, camefrom, gScore, fScore, openset, []) do
{camefrom, gScore, fScore, openset}
end
def a_star_inner(map, current, goal, camefrom, gScore, fScore, openset, [nb|nb_tl]) do
tentative_gScore = gScore[current] + map[nb]
if tentative_gScore < Map.get(gScore, nb, :infinity) do
a_star_inner(
map,
current,
goal,
Map.put(camefrom, nb, current),
Map.put(gScore, nb, tentative_gScore),
Map.put(fScore, nb, tentative_gScore + manhattan(nb, goal)),
[nb|openset],
nb_tl)
else
a_star_inner(map, current, goal, camefrom, gScore, fScore, openset, nb_tl)
end
end
@spec a_star(
%{coordinate() => integer}, #map
%{coordinate() => coordinate()}, #camefrom
%{coordinate() => integer()}, #gScore
%{coordinate() => integer()}, #fScore
coordinate(), #goal
[coordinate()] #openset
)::%{coordinate() => integer()}
def a_star(map, camefrom, gScore, fScore, goal, openset)
def a_star(_map, _camefrom, gScore, _fScore, _goal, []) do
gScore
end
def a_star(_map, _camefrom, gScore, _fScore, goal, [goal|_]) do
gScore
end
def a_star(map, camefrom, gScore, fScore, goal, [current|openset]) do
{camefrom, gScore, fScore, openset} = a_star_inner(map, current, goal, camefrom, gScore, fScore, openset, get_neighbours(current, goal))
openset = openset |> Enum.sort(fn x, y -> fScore[x] <= fScore[y] end) |> Enum.uniq()
a_star(map, camefrom, gScore, fScore, goal, openset)
end
end
|
lib/chiton.ex
| 0.69451
| 0.564098
|
chiton.ex
|
starcoder
|
defmodule Welcome2Game.Game do
alias Welcome2Game.{
Card,
Plan,
State,
Tableau,
MoveFinder,
EstateMaker,
EstatePlanner,
GameEnder
}
use Gex.Game
def default_state() do
Welcome2Game.Game.new_game()
end
def random_state() do
Welcome2Game.Game.new_game()
end
def new_game do
deck =
Welcome2Constants.deck()
|> Poison.decode!(as: [%Card{}])
|> Enum.shuffle()
[plan1, plan2, plan3] =
Enum.map(
Welcome2Constants.plans(),
fn json ->
Poison.decode!(json, as: [%Welcome2Game.Plan{}])
|> Enum.shuffle()
|> hd
|> Plan.clean_needs()
end
)
size = deck |> length |> div(3)
%State{
state: :playing,
plan1: plan1,
plan2: plan2,
plan3: plan3,
deck1: deck |> Enum.slice(0 * size, size),
deck2: deck |> Enum.slice(1 * size, size),
deck3: deck |> Enum.slice(2 * size, size),
shown1: [],
shown2: [],
shown3: [],
player: %Tableau{}
}
|> draw
end
def draw(state) do
%{
deck1: [drawn_card1 | remainder_deck1],
deck2: [drawn_card2 | remainder_deck2],
deck3: [drawn_card3 | remainder_deck3],
shown1: shown1,
shown2: shown2,
shown3: shown3
} =
cond do
# commented out, because end of game should usually occur if the decks are empty
# length(state.deck1) <= 1 -> shuffle(state)
true -> state
end
%State{
state
| deck1: remainder_deck1,
deck2: remainder_deck2,
deck3: remainder_deck3,
shown1: [drawn_card1 | shown1],
shown2: [drawn_card2 | shown2],
shown3: [drawn_card3 | shown3]
}
end
def shuffle(state) do
deck =
(state.deck1 ++ state.shown1 ++ state.deck2 ++ state.shown2 ++ state.deck3 ++ state.shown3)
|> Enum.shuffle()
len = deck |> length
size = div(len, 3)
%State{
state
| deck1: deck |> Enum.slice(0 * size, size),
deck2: deck |> Enum.slice(1 * size, size),
deck3: deck |> Enum.slice(2 * size, size),
shown1: [],
shown2: [],
shown3: []
}
|> draw
end
def permit(state, number) do
%State{
state
| permit: state |> Map.get(:"shown#{number}") |> hd,
checkpoint: state.checkpoint || state,
current_move: [{:permit, number} | state.current_move]
}
end
def refuse(state) do
%State{
state
| player: struct(state.player, %{refusals: state.player.refusals + 1}),
permit: :refused,
checkpoint: state.checkpoint || state,
current_move: [:refuse | state.current_move]
}
end
def build(state, row, index) do
%State{
state
| player: struct(state.player, %{:"row#{row}#{index}number" => state.permit.face}),
built: {row, index},
current_move: [{:build, row, index} | state.current_move]
}
end
def pool(state = %{built: {row, index}}) do
effect = :pool
%State{
state
| player:
struct(state.player, %{
:pools => state.player.pools + 1,
:"row#{row}#{index}pool" => true
}),
effect: effect,
current_move: [effect | state.current_move]
}
end
def agent(state, size) do
effect = {:agent, size}
old_value = state.player |> Map.get(:"estate#{size}")
new_value = MoveFinder.next_estate(size, old_value) || old_value
%State{
state
| player: struct(state.player, %{:"estate#{size}" => new_value}),
effect: effect,
current_move: [effect | state.current_move]
}
end
def park(state = %{built: {row, _}}) do
effect = :park
old_value = Map.get(state.player, :"park#{row}")
new_value = MoveFinder.next_park(row, old_value)
%State{
state
| player: struct(state.player, %{:"park#{row}" => new_value}),
effect: effect,
current_move: [effect | state.current_move]
}
end
def fence(state, row, index) do
effect = {:fence, row, index}
%State{
state
| player: struct(state.player, %{:"fence#{row}#{index}" => true}),
effect: effect,
current_move: [effect | state.current_move]
}
end
def bis(state, row, index, offset) do
effect = {:bis, row, index, offset}
%State{
state
| player:
struct(state.player, %{
:"row#{row}#{index}bis" => true,
:bis => state.player.bis + 1,
:"row#{row}#{index}number" =>
Map.get(state.player, :"row#{row}#{index + offset}number")
}),
effect: effect,
current_move: [effect | state.current_move]
}
end
def temp(state, row, index, offset) do
effect = {:temp, row, index, offset}
%State{
state
| player:
struct(state.player, %{
:"row#{row}#{index}number" => state.permit.face + offset,
:temps => state.player.temps + 1
}),
built: {row, index},
effect: effect,
current_move: [effect | state.current_move]
}
end
def commit(state) do
%State{
state
| permit: nil,
built: nil,
effect: nil,
moves: state.current_move ++ [:commit] ++ state.moves,
current_move: [],
checkpoint: nil
}
|> EstateMaker.update()
|> EstatePlanner.update()
|> GameEnder.update()
|> draw
end
def rollback(state) do
state.checkpoint
end
def view(state) do
%{
winner: state.winner,
player: state.player |> Map.from_struct(),
plan1: state.plan1,
plan2: state.plan2,
plan3: state.plan3,
plan1_used: state.plan1_used,
plan2_used: state.plan2_used,
plan3_used: state.plan3_used,
deck1_suit: state.deck1 |> top |> Map.get(:suit),
deck2_suit: state.deck2 |> top |> Map.get(:suit),
deck3_suit: state.deck3 |> top |> Map.get(:suit),
deck1_length: state.deck1 |> length,
deck2_length: state.deck2 |> length,
deck3_length: state.deck3 |> length,
shown1: state.shown1 |> top,
shown2: state.shown2 |> top,
shown3: state.shown3 |> top,
state: state.state,
permit: state.permit,
built: state.built,
moves: MoveFinder.moves(state)
}
end
defp top([]) do
nil
end
defp top(list) do
list |> hd
end
end
|
apps/welcome2_game/lib/welcome2_game/game.ex
| 0.520009
| 0.48182
|
game.ex
|
starcoder
|
defmodule Day15 do
@moduledoc """
Advent of Code 2019
Day 15: Oxygen System
"""
alias Day15.{Part1, Part2}
def get_program() do
Path.join(__DIR__, "inputs/day15.txt")
|> File.read!()
|> String.trim()
|> String.split(",")
|> Enum.map(&String.to_integer/1)
end
def execute() do
program = get_program()
IO.puts("Part 1: #{Part1.run(program)}")
IO.puts("Part 2: #{Part2.run(program)}")
end
end
defmodule Queue do
def new(), do: :queue.new()
def enq(queue, val), do: :queue.in(val, queue)
def deq(queue), do: :queue.out(queue)
def empty?(queue), do: :queue.is_empty(queue)
end
defmodule Day15.Part1 do
def run(program) do
program
|> queue_initial_coords()
|> find_path_to_oxygen_system()
|> (&elem(&1, 0)).()
|> length()
end
def queue_initial_coords(program, {x, y} \\ {0, 0}) do
Queue.new()
|> Queue.enq({[1], {x, y + 1}, %{program: program}})
|> Queue.enq({[2], {x, y - 1}, %{program: program}})
|> Queue.enq({[3], {x - 1, y}, %{program: program}})
|> Queue.enq({[4], {x + 1, y}, %{program: program}})
end
def find_path_to_oxygen_system(discovered, visited \\ MapSet.new()) do
{{:value, {series, coords, program_state}}, discovered} = Queue.deq(discovered)
direction = List.first(series)
visited = MapSet.put(visited, coords)
case run_repair_droid(direction, program_state) do
{0, _} ->
find_path_to_oxygen_system(discovered, visited)
{1, program_state} ->
queue_newly_discovered(discovered, visited, direction, series, coords, program_state)
|> find_path_to_oxygen_system(visited)
{2, program_state} ->
{series, {coords, program_state}}
end
end
@opposites %{1 => 2, 2 => 1, 3 => 4, 4 => 3}
def queue_newly_discovered(discovered, visited, direction, series, coords, program_state) do
[1, 2, 3, 4]
|> List.delete(@opposites[direction])
|> Enum.map(fn next_direction ->
new_coords = move_coords(coords, next_direction)
if not MapSet.member?(visited, new_coords),
do: {[next_direction | series], new_coords, program_state},
else: nil
end)
|> Enum.reject(&(&1 == nil))
|> Enum.reduce(discovered, &Queue.enq(&2, &1))
end
def run_repair_droid(direction, program_state) do
GenServer.start_link(Intcode, [], name: RepairDroid)
GenServer.call(RepairDroid, {:set_state, program_state})
{:output, status} = GenServer.call(RepairDroid, {:run, [direction]})
state = GenServer.call(RepairDroid, {:get_state})
GenServer.stop(RepairDroid)
{status, state}
end
defp move_coords({x, y}, direction) do
case direction do
1 -> {x, y + 1}
2 -> {x, y - 1}
3 -> {x - 1, y}
4 -> {x + 1, y}
end
end
end
defmodule Day15.Part2 do
alias Day15.Part1
def run(program) do
{oxygen_coords, %{program: program}} =
program
|> Part1.queue_initial_coords()
|> Part1.find_path_to_oxygen_system()
|> (&elem(&1, 1)).()
program
|> Part1.queue_initial_coords(oxygen_coords)
|> find_longest_path(MapSet.new([oxygen_coords]))
|> length()
end
@doc """
A BFS traversal where we return the last (and longest, by BFS definition) path
from the oxygen tank to a coordinate in the area.
"""
def find_longest_path(discovered, visited) do
{{:value, {series, coords, program_state}}, discovered} = Queue.deq(discovered)
[direction | series_tail] = series
{code, program_state} = Part1.run_repair_droid(direction, program_state)
discovered =
case code do
0 ->
discovered
1 ->
Part1.queue_newly_discovered(
discovered,
visited,
direction,
series,
coords,
program_state
)
end
visited = MapSet.put(visited, coords)
if Queue.empty?(discovered),
do: if(code == 1, do: series, else: series_tail),
else: find_longest_path(discovered, visited)
end
end
|
lib/day15.ex
| 0.715126
| 0.453867
|
day15.ex
|
starcoder
|
defmodule Day14 do
@moduledoc """
AoC 2019, Day 14 - Space Stoichiometry
"""
defmodule State do
defstruct ore_used: 0, steps: [], need: %{}, stock: %{}
end
@doc """
Amount of ORE required by the nanofactory to produce 1 FUEL
"""
def part1 do
Util.priv_file(:day14, "day14_input.txt")
|> File.read!()
|> min_ore()
end
@doc """
Compute the max fuel that can be produced with 1 trillion ore
"""
def part2 do
Util.priv_file(:day14, "day14_input.txt")
|> File.read!()
|> max_fuel()
end
@doc """
Compute maximum fuel that can be produced with 1 trillion ore
"""
def max_fuel(str) do
rules = parse_str(str)
binary_search(rules, run(rules, 1), run(rules, 50_000_000_000))
end
defp run(rules, target) do
result =
generate(
rules,
rules,
%State{need: %{FUEL: target}, stock: %{ORE: 100_000_000_000_000_000_000}},
%State{ore_used: 300_000_000_000_0000}
)
ore = Map.get(result, :ore_used)
{target, ore}
end
@cargo_ore 1_000_000_000_000
def binary_search(_rules, {low_fuel, _low_ore}, {high_fuel, _high_ore})
when low_fuel >= high_fuel do
low_fuel
end
def binary_search(rules, low = {low_fuel, _low_ore}, high = {high_fuel, _high_ore}) do
target = low_fuel + Integer.floor_div(high_fuel - low_fuel, 2)
if low_fuel == target do
low_fuel
else
new = {_actual, ore} = run(rules, target)
cond do
ore == @cargo_ore -> target
ore < @cargo_ore -> binary_search(rules, new, high)
ore > @cargo_ore -> binary_search(rules, low, new)
end
end
end
@doc """
Compute the minimum required ore to produce 1 fuel from the given machine
"""
def min_ore(str) do
rules = parse_str(str)
generate(rules, rules, %State{need: %{FUEL: 1}, stock: %{ORE: 100_000_000_000_000}}, %State{
ore_used: 300_000_000
})
|> Map.get(:ore_used)
end
defp parse_str(str) do
String.split(str, "\n", trim: true)
|> Enum.map(&String.replace(&1, ["=", ">", ","], ""))
|> Enum.map(&String.split/1)
|> Enum.map(&Enum.chunk_every(&1, 2))
|> Enum.map(&Enum.map(&1, fn [n, s] -> {String.to_integer(n), String.to_atom(s)} end))
|> Enum.map(&Enum.reverse/1)
|> Enum.map(fn [h | rest] -> {rest, h} end)
end
defp generate(_a, _c, %State{ore_used: c}, best = %State{ore_used: b}) when c > b, do: best
defp generate(_all_rules, _curr_rules, s = %State{need: n}, _best) when map_size(n) == 0, do: s
defp generate(_all_rules, [], _curr, _best), do: nil
defp generate(all_rules, [{lst, {cnt, p}} | rest], state, best) do
if Map.has_key?(state.need, p) do
need_val = Map.get(state.need, p)
mult = Integer.floor_div(need_val, cnt)
mult = if mult == 0, do: 1, else: mult
{cnt, p} = {cnt * mult, p}
lst = Enum.map(lst, fn {cnt, k} -> {cnt * mult, k} end)
needs = subtract_generated(state.need, {cnt, p})
neg_needs = Enum.filter(needs, fn {_k, v} -> v < 0 end)
needs = remove_less_than_zero(needs)
stock = Enum.reduce(lst, state.stock, &reduce_stock/2)
neg_stock = Enum.filter(stock, fn {_k, v} -> v < 0 end)
stock = remove_less_than_zero(stock)
needs = add_costs(needs, neg_stock)
stock = add_costs(stock, neg_needs)
steps = [{lst, {cnt, p}} | state.steps]
ore = Enum.reduce(lst, state.ore_used, &add_ore_used/2)
generate(
all_rules,
all_rules,
%State{state | need: needs, stock: stock, steps: steps, ore_used: ore},
best
)
else
generate(all_rules, rest, state, best)
end
end
defp add_ore_used({cnt, :ORE}, acc), do: acc + cnt
defp add_ore_used(_cost, acc), do: acc
defp reduce_stock({cnt, kind}, map) do
if Map.has_key?(map, kind) do
Map.update!(map, kind, &(&1 - cnt))
else
Map.put(map, kind, -1 * cnt)
end
end
defp subtract_generated(map, {cnt, k}) do
Map.update!(map, k, &(&1 - cnt))
end
defp add_costs(map, []), do: map
defp add_costs(map, [{k, cnt} | rest]) do
Map.update(map, k, -1 * cnt, &(&1 + -1 * cnt))
|> add_costs(rest)
end
defp remove_less_than_zero(map) do
map
|> Enum.filter(fn {_k, v} -> v > 0 end)
|> Enum.into(%{})
end
end
|
apps/day14/lib/day14.ex
| 0.736969
| 0.465509
|
day14.ex
|
starcoder
|
defmodule CanvasAPI.TeamService do
@moduledoc """
A service for viewing and manipulating teams.
"""
use CanvasAPI.Web, :service
alias CanvasAPI.{Account, WhitelistedSlackDomain, Team}
import CanvasAPI.UUIDMatch
@preload [:oauth_tokens]
@doc """
Insert a team.
## Examples
```elixir
TeamService.insert(params, type: :personal)
```
"""
@spec insert(map, Keyword.t) :: {:ok, %Team{}} | {:error, Ecto.Changeset.t}
def insert(params, type: :personal) do
%Team{}
|> Team.create_changeset(params, type: :personal)
|> Repo.insert
end
def insert(params, type: :slack) do
if !System.get_env("DOMAIN_WHITELISTING") ||
domain_whitelisted?(params["domain"]) do
%Team{}
|> Team.create_changeset(params, type: :slack)
|> Repo.insert
else
{:error, {:domain_not_whitelisted, params["domain"]}}
end
end
@doc """
List teams for a given account.
Options:
- `filter`: `map` A string-keyed filter map
- `domain`: `String.t` A domain to filter teams by
## Examples
```elixir
TeamService.list(account, filter: %{"domain" => "usecanvas"})
```
"""
@spec list(%Account{}, Keyword.t) :: [%Team{}]
def list(account, opts) do
from(t in assoc(account, :teams),
order_by: [desc: is_nil(t.slack_id), asc: :name],
preload: ^@preload)
|> filter(opts[:filter])
|> Repo.all
end
@doc """
Show a specific team by ID or domain.
Options:
- `account`: `%Account{}` An account to scope the team find to
## Examples
```elixir
TeamService.show("usecanvas")
```
"""
@spec show(String.t, Keyword.t) :: {:ok, %Team{}} | {:error, :not_found}
def show(id, opts \\ [])
def show(id, account: account) do
from(assoc(account, :teams), preload: ^@preload)
|> do_get(id)
end
def show(id, _opts) do
from(Team, preload: ^@preload)
|> do_get(id)
end
@doc """
Update a team (currently only allows changing domain of personal teams).
## Examples
```elixir
TeamService.update(team, %{"domain" => "my-domain"})
```
"""
@spec update(%Team{}, map) :: {:ok, %Team{}} | {:error, Ecto.Changeset.t}
def update(team, params) do
team
|> Team.update_changeset(params)
|> Repo.update
end
@spec do_get(Ecto.Queryable.t, String.t) :: {:ok, %Team{}}
| {:error, :not_found}
defp do_get(queryable, id = match_uuid()) do
Repo.get(queryable, id)
|> case do
nil -> {:error, :not_found}
team -> {:ok, team}
end
end
defp do_get(queryable, domain) do
queryable
|> where(domain: ^domain)
|> Repo.one
|> case do
nil -> {:error, :not_found}
team -> {:ok, team}
end
end
@doc """
Find the user associated with `team` for `account` and add it to `team` as
`account_user`.
## Examples
```elixir
TeamService.add_account_user(team, account)
```
"""
@spec add_account_user(%Team{}, %Account{} | nil) :: %Team{}
def add_account_user(team, nil), do: Map.put(team, :account_user, nil)
def add_account_user(team, account) do
user =
from(assoc(account, :users),
where: [team_id: ^team.id])
|> Repo.one
Map.put(team, :account_user, user)
end
@spec filter(Ecto.Queryable.t, map | nil) :: [%Team{}]
defp filter(queryable, %{"domain" => domain}),
do: where(queryable, domain: ^domain)
defp filter(queryable, _), do: queryable
@spec domain_whitelisted?(String.t | nil) :: boolean
defp domain_whitelisted?(nil), do: false
defp domain_whitelisted?(domain) do
from(WhitelistedSlackDomain, where: [domain: ^domain])
|> Repo.one
|> case do
nil -> false
_ -> true
end
end
end
|
lib/canvas_api/services/team_service.ex
| 0.841337
| 0.800302
|
team_service.ex
|
starcoder
|
defmodule Result do
@moduledoc """
Tools for working with result tuples.
Influenced by the Rust Option/Result implementation.
"""
@type value :: any
@type error :: any
@type ok(value_type) :: {:ok, value_type}
@type ok :: ok(any)
@type err(error_type) :: {:error, error_type}
@type err :: err(any)
@type t(value_type, error_type) :: ok(value_type) | err(error_type)
@type t :: t(any, any)
# ---
@doc """
Turns a value into a result that contains that value.
If the value is already a result, it is returned unchanged.
## Examples
iex> Result.ok(:a)
{:ok, :a}
iex> Result.ok(Result.ok(:a))
{:ok, :a}
"""
@spec ok(value | ok) :: ok
def ok(value_or_ok_result)
def ok({:ok, _} = ok), do: ok
def ok(value), do: {:ok, value}
# ---
@doc """
Turns an error into a result that contains that error.
If the error is already a result, it is returned unchanged.
## Examples
iex> Result.err(:a)
{:error, :a}
iex> Result.err(Result.err(:a))
{:error, :a}
"""
@spec err(error | err) :: err
def err(error_or_err_result)
def err({:error, _} = err), do: err
def err(error), do: {:error, error}
# ---
@doc """
Returns true if a result has a contained value and false otherwise.
## Examples
iex> Enum.filter([ok: 1, error: 2, ok: 3], &Result.ok?/1)
[ok: 1, ok: 3]
iex> Enum.split_with([ok: 1, error: 2, ok: 3], &Result.ok?/1)
{[ok: 1, ok: 3], [error: 2]}
"""
@spec ok?(t) :: boolean
def ok?(result)
def ok?({:ok, _}), do: true
def ok?({:error, _}), do: false
# ---
@doc """
Returns false if a result has a contained value and true otherwise.
## Examples
iex> Enum.filter([ok: 1, error: 2, ok: 3], &Result.err?/1)
[error: 2]
"""
@spec err?(t) :: boolean
def err?(result), do: not ok?(result)
# ---
@doc """
Maps a Result to another Result by applying a function to a contained value, leaving
non-ok tuples untouched.
Note that the given function is expected to return a value. See `Result.and_then/2`
if you want to pass a function that returns a result.
## Examples
iex> {:ok, :a} |> Result.map(fn :a -> :b end)
{:ok, :b}
iex> {:error, :a} |> Result.map(fn :a -> :b end)
{:error, :a}
"""
@spec map(t, (value -> value)) :: t
def map(result, value_to_value_fn)
def map({:ok, value}, fun), do: {:ok, fun.(value)}
def map({:error, _} = err, _), do: err
# ---
@doc """
Maps a Result to another Result by applying a function to a contained error, leaving
ok tuples untouched.
This function can be used to compose the results of two functions, where the map
function returns an error.
## Examples
iex> {:error, :a} |> Result.map_err(fn :a -> :b end)
{:error, :b}
iex> {:ok, :a} |> Result.map_err(fn :a -> :b end)
{:ok, :a}
"""
@spec map_err(t, (error -> error)) :: t
def map_err(result, error_to_error_fn)
def map_err({:error, error}, fun), do: {:error, fun.(error)}
def map_err({:ok, _} = ok, _), do: ok
# ---
@doc """
Maps a Result to another Result by applying a function to a contained value, leaving
non-ok tuples untouched.
Note that the given function is expected to return a result. See `Result.map/2` if
you want to pass a function that returns a value.
## Examples
iex> {:ok, :a} |> Result.and_then(fn :a -> {:ok, :b} end)
{:ok, :b}
iex> {:ok, :a} |> Result.and_then(fn :a -> {:error, :b} end)
{:error, :b}
iex> {:error, :a} |> Result.and_then(fn :a -> {:ok, :b} end)
{:error, :a}
"""
@spec and_then(t, (value -> t)) :: t
def and_then(result, value_to_result_fn)
def and_then({:ok, value}, fun), do: fun.(value)
def and_then({:error, _} = err, _), do: err
# ---
@doc """
Maps a Result to another Result by applying a function to a contained value, leaving
ok tuples untouched.
Note that the given function is expected to return a result. See `Result.map_err/2`
if you want to pass a function that returns a value.
## Examples
iex> {:ok, :a} |> Result.or_else(fn :a -> {:ok, :b} end)
{:ok, :a}
iex> {:error, :a} |> Result.or_else(fn :a -> {:ok, :b} end)
{:ok, :b}
iex> {:error, :a} |> Result.or_else(fn :a -> {:error, :b} end)
{:error, :b}
"""
@spec or_else(t, (error -> t)) :: t
def or_else(result, error_to_result_fn)
def or_else({:ok, _} = ok, _), do: ok
def or_else({:error, error}, fun), do: fun.(error)
# ---
@doc """
Returns the contained value or throw an error.
"""
@spec unwrap(ok, expectation :: String.t() | nil) :: value
def unwrap(ok_result, expectation \\ nil)
def unwrap({:ok, value}, _), do: value
def unwrap({:error, _} = err, nil),
do: raise(ArgumentError, "Not a value result: #{inspect(err)}")
def unwrap({:error, _} = err, expectation),
do: raise(ArgumentError, ~s(Expected "#{expectation}": #{inspect(err)}))
# ---
@doc """
Returns the contained value or a default.
"""
@spec unwrap_or(t, default :: value) :: value
def unwrap_or(result, default_value)
def unwrap_or({:ok, value}, _), do: value
def unwrap_or({:error, _}, default), do: default
# ---
@doc """
Returns the contained error or throw an error.
"""
@spec unwrap_err(err) :: error
def unwrap_err(err_result)
def unwrap_err({:error, error}), do: error
def unwrap_err({:ok, _} = ok), do: raise(ArgumentError, "Not an error result: #{inspect(ok)}")
# ---
@doc """
Returns the contained error or a default.
"""
@spec unwrap_err_or(t, default :: value | error) :: value | error
def unwrap_err_or(result, default_value)
def unwrap_err_or({:error, error}, _), do: error
def unwrap_err_or({:ok, _}, default), do: default
# ---
@doc """
Unwraps ok-results and rejects error-results.
## Examples
iex> [ok: :a, ok: :b, error: :c] |> Result.filter_and_unwrap()
[:a, :b]
"""
@spec filter_and_unwrap([t]) :: [value]
def filter_and_unwrap(results) when is_list(results) do
results
|> Enum.filter(&ok?/1)
|> Enum.map(&unwrap/1)
end
# ---
@doc """
Unwraps error-results and rejects ok-results.
## Examples
iex> [ok: :a, ok: :b, error: :c] |> Result.filter_and_unwrap_err()
[:c]
"""
@spec filter_and_unwrap_err([t]) :: [error]
def filter_and_unwrap_err(results) when is_list(results) do
results
|> Enum.filter(&err?/1)
|> Enum.map(&unwrap_err/1)
end
# ---
@doc """
Turns a list of results into a result with lists of either values or errors.
As soon as there is at least one error in the given list, the result is an
error-type Result.
## Example
iex> [ok: :a, ok: :b] |> Result.list_to_result()
{:ok, [:a, :b]}
iex> [ok: :a, ok: :b, error: :c, error: :d] |> Result.list_to_result()
{:error, [:c, :d]}
"""
@type fmt_errors :: ([error] -> any)
@spec list_to_result([t], fmt_errors) :: ok(list) | err(list)
def list_to_result(results, fmt_errors \\ & &1) when is_list(results) do
errors = filter_and_unwrap_err(results)
case errors do
[] -> results |> filter_and_unwrap() |> ok()
_ -> fmt_errors.(errors) |> err()
end
end
# ---
@doc """
Turns a result that holds a list of values or errors into a list of results.
## Example
iex> {:ok, [:a, :b]} |> Result.result_to_list()
[ok: :a, ok: :b]
iex> {:error, [:c, :d]} |> Result.result_to_list()
[error: :c, error: :d]
"""
@spec result_to_list(t) :: [ok] | [err]
def result_to_list({:ok, values}), do: Enum.map(values, &ok/1)
def result_to_list({:error, errors}), do: Enum.map(errors, &err/1)
end
|
apps/rig/lib/result.ex
| 0.919895
| 0.560493
|
result.ex
|
starcoder
|
defmodule Brainfux.Executor do
@moduledoc """
The actual execution functions are here.
Functions in this module are called runtime.
"""
alias Brainfux.State
@spec execute(State.t, String.t) :: State.t
def execute(state, "") do
state
end
def execute(state, "+" <> rest) do
[head | tail] = state.forward
new_state = %{state | forward: [head + 1 | tail]}
execute(new_state, rest)
end
def execute(state, "-" <> rest) do
[head | tail] = state.forward
new_state = %{state | forward: [head - 1 | tail]}
execute(new_state, rest)
end
def execute(state, ">" <> rest) do
[head | tail] = state.forward
tail = if Enum.empty?(tail), do: [0], else: tail
new_state = %{%{state | back: [head | state.back]} | forward: tail}
execute(new_state, rest)
end
def execute(state, "<" <> rest) do
[head | tail] = state.back
tail = if Enum.empty?(tail), do: [0], else: tail
new_state = %{%{state | back: tail} | forward: [head | state.forward]}
execute(new_state, rest)
end
def execute(state, "," <> rest) do
[input_head | input_tail] = state.input
[_ | forward_tail] = state.forward
next_forward = [input_head | forward_tail]
new_state = %{%{state | input: input_tail} | forward: next_forward}
execute(new_state, rest)
end
def execute(state, "." <> rest) do
[head | _] = state.forward
new_state = %{state | output: state.output <> <<head>>}
execute(new_state, rest)
end
def execute(state, "[" <> rest) do
if hd(state.forward) == 0 do
{_, rest} = find_matching_bracket(rest)
execute(state, rest)
else
{block, _} = find_matching_bracket(rest)
new_state = execute(state, block)
execute(new_state, "[" <> rest)
end
end
@spec find_matching_bracket(String.t) :: {String.t, String.t}
defp find_matching_bracket(code) do
find_matching_bracket("", code, 0)
end
@spec find_matching_bracket(String.t, String.t, non_neg_integer) ::
{String.t, String.t}
defp find_matching_bracket(block, code, depth) do
case Regex.run(~R/([^\[\]]*)([\[\]])(.*)/, code) do
nil ->
{block, code}
[_, before, "]", rest] ->
if depth == 0 do
{block <> before, rest}
else
find_matching_bracket(block <> before <> "]", rest, depth - 1)
end
[_, before, "[", rest] ->
find_matching_bracket(block <> before <> "[", rest, depth + 1)
end
end
end
|
lib/brainfux/executor.ex
| 0.740456
| 0.536616
|
executor.ex
|
starcoder
|
defmodule AWS.Organizations do
@moduledoc """
AWS Organizations
"""
@doc """
Sends a response to the originator of a handshake agreeing to the action
proposed by the handshake request.
This operation can be called only by the following principals when they also
have the relevant IAM permissions:
* **Invitation to join** or ## Approve all features request
handshakes: only a principal from the member account.
The user who calls the API for an invitation to join must have the
`organizations:AcceptHandshake` permission. If you enabled all features in the
organization, the user must also have the `iam:CreateServiceLinkedRole`
permission so that AWS Organizations can create the required service-linked role
named `AWSServiceRoleForOrganizations`. For more information, see [AWS Organizations and Service-Linked
Roles](http://docs.aws.amazon.com/organizations/latest/userguide/orgs_integration_services.html#orgs_integration_service-linked-roles)
in the *AWS Organizations User Guide*.
* **Enable all features final confirmation** handshake: only a
principal from the master account.
For more information about invitations, see [Inviting an AWS Account to Join Your
Organization](https://docs.aws.amazon.com/organizations/latest/userguide/orgs_manage_accounts_invites.html)
in the *AWS Organizations User Guide.* For more information about requests to
enable all features in the organization, see [Enabling All Features in Your Organization](https://docs.aws.amazon.com/organizations/latest/userguide/orgs_manage_org_support-all-features.html)
in the *AWS Organizations User Guide.*
After you accept a handshake, it continues to appear in the results of relevant
APIs for only 30 days. After that, it's deleted.
"""
def accept_handshake(client, input, options \\ []) do
request(client, "AcceptHandshake", input, options)
end
@doc """
Attaches a policy to a root, an organizational unit (OU), or an individual
account.
How the policy affects accounts depends on the type of policy. Refer to the *AWS
Organizations User Guide* for information about each policy type:
*
[AISERVICES_OPT_OUT_POLICY](http://docs.aws.amazon.com/organizations/latest/userguide/orgs_manage_policies_ai-opt-out.html) *
[BACKUP_POLICY](http://docs.aws.amazon.com/organizations/latest/userguide/orgs_manage_policies_backup.html)
*
[SERVICE_CONTROL_POLICY](http://docs.aws.amazon.com/organizations/latest/userguide/orgs_manage_policies_scp.html) *
[TAG_POLICY](http://docs.aws.amazon.com/organizations/latest/userguide/orgs_manage_policies_tag-policies.html)
This operation can be called only from the organization's master account.
"""
def attach_policy(client, input, options \\ []) do
request(client, "AttachPolicy", input, options)
end
@doc """
Cancels a handshake.
Canceling a handshake sets the handshake state to `CANCELED`.
This operation can be called only from the account that originated the
handshake. The recipient of the handshake can't cancel it, but can use
`DeclineHandshake` instead. After a handshake is canceled, the recipient can no
longer respond to that handshake.
After you cancel a handshake, it continues to appear in the results of relevant
APIs for only 30 days. After that, it's deleted.
"""
def cancel_handshake(client, input, options \\ []) do
request(client, "CancelHandshake", input, options)
end
@doc """
Creates an AWS account that is automatically a member of the organization whose
credentials made the request.
This is an asynchronous request that AWS performs in the background. Because
`CreateAccount` operates asynchronously, it can return a successful completion
message even though account initialization might still be in progress. You might
need to wait a few minutes before you can successfully access the account. To
check the status of the request, do one of the following:
* Use the `OperationId` response element from this operation to
provide as a parameter to the `DescribeCreateAccountStatus` operation.
* Check the AWS CloudTrail log for the `CreateAccountResult` event.
For information on using AWS CloudTrail with AWS Organizations, see [Monitoring the Activity in Your
Organization](http://docs.aws.amazon.com/organizations/latest/userguide/orgs_monitoring.html)
in the *AWS Organizations User Guide.*
The user who calls the API to create an account must have the
`organizations:CreateAccount` permission. If you enabled all features in the
organization, AWS Organizations creates the required service-linked role named
`AWSServiceRoleForOrganizations`. For more information, see [AWS Organizations and Service-Linked
Roles](http://docs.aws.amazon.com/organizations/latest/userguide/orgs_integrate_services.html#orgs_integrate_services-using_slrs)
in the *AWS Organizations User Guide*.
If the request includes tags, then the requester must have the
`organizations:TagResource` permission.
AWS Organizations preconfigures the new member account with a role (named
`OrganizationAccountAccessRole` by default) that grants users in the master
account administrator permissions in the new member account. Principals in the
master account can assume the role. AWS Organizations clones the company name
and address information for the new account from the organization's master
account.
This operation can be called only from the organization's master account.
For more information about creating accounts, see [Creating an AWS Account in Your
Organization](https://docs.aws.amazon.com/organizations/latest/userguide/orgs_manage_accounts_create.html)
in the *AWS Organizations User Guide.*
When you create an account in an organization using the AWS
Organizations console, API, or CLI commands, the information required for the
account to operate as a standalone account, such as a payment method and signing
the end user license agreement (EULA) is *not* automatically collected. If you
must remove an account from your organization later, you can do so only after
you provide the missing information. Follow the steps at [ To leave an organization as a member
account](http://docs.aws.amazon.com/organizations/latest/userguide/orgs_manage_accounts_remove.html#leave-without-all-info)
in the *AWS Organizations User Guide*.
If you get an exception that indicates that you exceeded your
account limits for the organization, contact [AWS Support](https://console.aws.amazon.com/support/home#/).
If you get an exception that indicates that the operation failed
because your organization is still initializing, wait one hour and then try
again. If the error persists, contact [AWS Support](https://console.aws.amazon.com/support/home#/).
Using `CreateAccount` to create multiple temporary accounts isn't
recommended. You can only close an account from the Billing and Cost Management
Console, and you must be signed in as the root user. For information on the
requirements and process for closing an account, see [Closing an AWS Account](http://docs.aws.amazon.com/organizations/latest/userguide/orgs_manage_accounts_close.html)
in the *AWS Organizations User Guide*.
When you create a member account with this operation, you can choose whether to
create the account with the ## IAM User and Role Access to Billing Information
switch enabled. If you enable it, IAM users and roles that have appropriate
permissions can view billing information for the account. If you disable it,
only the account root user can access billing information. For information about
how to disable this switch for an account, see [Granting Access to Your Billing Information and
Tools](https://docs.aws.amazon.com/awsaccountbilling/latest/aboutv2/grantaccess.html).
"""
def create_account(client, input, options \\ []) do
request(client, "CreateAccount", input, options)
end
@doc """
This action is available if all of the following are true:
* You're authorized to create accounts in the AWS GovCloud (US)
Region.
For more information on the AWS GovCloud (US) Region, see the [ *AWS GovCloud User
Guide*.](http://docs.aws.amazon.com/govcloud-us/latest/UserGuide/welcome.html)
* You already have an account in the AWS GovCloud (US) Region that
is associated with your master account in the commercial Region.
* You call this action from the master account of your organization
in the commercial Region.
* You have the `organizations:CreateGovCloudAccount` permission.
AWS Organizations automatically creates the required service-linked role named
`AWSServiceRoleForOrganizations`. For more information, see [AWS Organizations and Service-Linked
Roles](http://docs.aws.amazon.com/organizations/latest/userguide/orgs_integrate_services.html#orgs_integrate_services-using_slrs)
in the *AWS Organizations User Guide.*
AWS automatically enables AWS CloudTrail for AWS GovCloud (US) accounts, but you
should also do the following:
* Verify that AWS CloudTrail is enabled to store logs.
* Create an S3 bucket for AWS CloudTrail log storage.
For more information, see [Verifying AWS CloudTrail Is Enabled](http://docs.aws.amazon.com/govcloud-us/latest/UserGuide/verifying-cloudtrail.html)
in the *AWS GovCloud User Guide*.
If the request includes tags, then the requester must have the
`organizations:TagResource` permission. The tags are attached to the commercial
account associated with the GovCloud account, rather than the GovCloud account
itself. To add tags to the GovCloud account, call the `TagResource` operation in
the GovCloud Region after the new GovCloud account exists.
You call this action from the master account of your organization in the
commercial Region to create a standalone AWS account in the AWS GovCloud (US)
Region. After the account is created, the master account of an organization in
the AWS GovCloud (US) Region can invite it to that organization. For more
information on inviting standalone accounts in the AWS GovCloud (US) to join an
organization, see [AWS Organizations](http://docs.aws.amazon.com/govcloud-us/latest/UserGuide/govcloud-organizations.html)
in the *AWS GovCloud User Guide.*
Calling `CreateGovCloudAccount` is an asynchronous request that AWS performs in
the background. Because `CreateGovCloudAccount` operates asynchronously, it can
return a successful completion message even though account initialization might
still be in progress. You might need to wait a few minutes before you can
successfully access the account. To check the status of the request, do one of
the following:
* Use the `OperationId` response element from this operation to
provide as a parameter to the `DescribeCreateAccountStatus` operation.
* Check the AWS CloudTrail log for the `CreateAccountResult` event.
For information on using AWS CloudTrail with Organizations, see [Monitoring the Activity in Your
Organization](http://docs.aws.amazon.com/organizations/latest/userguide/orgs_monitoring.html)
in the *AWS Organizations User Guide.*
When you call the `CreateGovCloudAccount` action, you create two accounts: a
standalone account in the AWS GovCloud (US) Region and an associated account in
the commercial Region for billing and support purposes. The account in the
commercial Region is automatically a member of the organization whose
credentials made the request. Both accounts are associated with the same email
address.
A role is created in the new account in the commercial Region that allows the
master account in the organization in the commercial Region to assume it. An AWS
GovCloud (US) account is then created and associated with the commercial account
that you just created. A role is also created in the new AWS GovCloud (US)
account that can be assumed by the AWS GovCloud (US) account that is associated
with the master account of the commercial organization. For more information and
to view a diagram that explains how account access works, see [AWS Organizations](http://docs.aws.amazon.com/govcloud-us/latest/UserGuide/govcloud-organizations.html)
in the *AWS GovCloud User Guide.*
For more information about creating accounts, see [Creating an AWS Account in Your
Organization](https://docs.aws.amazon.com/organizations/latest/userguide/orgs_manage_accounts_create.html)
in the *AWS Organizations User Guide.*
When you create an account in an organization using the AWS
Organizations console, API, or CLI commands, the information required for the
account to operate as a standalone account is *not* automatically collected.
This includes a payment method and signing the end user license agreement
(EULA). If you must remove an account from your organization later, you can do
so only after you provide the missing information. Follow the steps at [ To leave an organization as a member
account](http://docs.aws.amazon.com/organizations/latest/userguide/orgs_manage_accounts_remove.html#leave-without-all-info)
in the *AWS Organizations User Guide.*
If you get an exception that indicates that you exceeded your
account limits for the organization, contact [AWS Support](https://console.aws.amazon.com/support/home#/).
If you get an exception that indicates that the operation failed
because your organization is still initializing, wait one hour and then try
again. If the error persists, contact [AWS Support](https://console.aws.amazon.com/support/home#/).
Using `CreateGovCloudAccount` to create multiple temporary accounts
isn't recommended. You can only close an account from the AWS Billing and Cost
Management console, and you must be signed in as the root user. For information
on the requirements and process for closing an account, see [Closing an AWS Account](http://docs.aws.amazon.com/organizations/latest/userguide/orgs_manage_accounts_close.html)
in the *AWS Organizations User Guide*.
When you create a member account with this operation, you can choose whether to
create the account with the ## IAM User and Role Access to Billing Information
switch enabled. If you enable it, IAM users and roles that have appropriate
permissions can view billing information for the account. If you disable it,
only the account root user can access billing information. For information about
how to disable this switch for an account, see [Granting Access to Your Billing Information and
Tools](https://docs.aws.amazon.com/awsaccountbilling/latest/aboutv2/grantaccess.html).
"""
def create_gov_cloud_account(client, input, options \\ []) do
request(client, "CreateGovCloudAccount", input, options)
end
@doc """
Creates an AWS organization.
The account whose user is calling the `CreateOrganization` operation
automatically becomes the [master account](https://docs.aws.amazon.com/organizations/latest/userguide/orgs_getting-started_concepts.html#account)
of the new organization.
This operation must be called using credentials from the account that is to
become the new organization's master account. The principal must also have the
relevant IAM permissions.
By default (or if you set the `FeatureSet` parameter to `ALL`), the new
organization is created with all features enabled and service control policies
automatically enabled in the root. If you instead choose to create the
organization supporting only the consolidated billing features by setting the
`FeatureSet` parameter to `CONSOLIDATED_BILLING"`, no policy types are enabled
by default, and you can't use organization policies
"""
def create_organization(client, input, options \\ []) do
request(client, "CreateOrganization", input, options)
end
@doc """
Creates an organizational unit (OU) within a root or parent OU.
An OU is a container for accounts that enables you to organize your accounts to
apply policies according to your business requirements. The number of levels
deep that you can nest OUs is dependent upon the policy types enabled for that
root. For service control policies, the limit is five.
For more information about OUs, see [Managing Organizational Units](https://docs.aws.amazon.com/organizations/latest/userguide/orgs_manage_ous.html)
in the *AWS Organizations User Guide.*
If the request includes tags, then the requester must have the
`organizations:TagResource` permission.
This operation can be called only from the organization's master account.
"""
def create_organizational_unit(client, input, options \\ []) do
request(client, "CreateOrganizationalUnit", input, options)
end
@doc """
Creates a policy of a specified type that you can attach to a root, an
organizational unit (OU), or an individual AWS account.
For more information about policies and their use, see [Managing Organization Policies](https://docs.aws.amazon.com/organizations/latest/userguide/orgs_manage_policies.html).
If the request includes tags, then the requester must have the
`organizations:TagResource` permission.
This operation can be called only from the organization's master account.
"""
def create_policy(client, input, options \\ []) do
request(client, "CreatePolicy", input, options)
end
@doc """
Declines a handshake request.
This sets the handshake state to `DECLINED` and effectively deactivates the
request.
This operation can be called only from the account that received the handshake.
The originator of the handshake can use `CancelHandshake` instead. The
originator can't reactivate a declined request, but can reinitiate the process
with a new handshake request.
After you decline a handshake, it continues to appear in the results of relevant
APIs for only 30 days. After that, it's deleted.
"""
def decline_handshake(client, input, options \\ []) do
request(client, "DeclineHandshake", input, options)
end
@doc """
Deletes the organization.
You can delete an organization only by using credentials from the master
account. The organization must be empty of member accounts.
"""
def delete_organization(client, input, options \\ []) do
request(client, "DeleteOrganization", input, options)
end
@doc """
Deletes an organizational unit (OU) from a root or another OU.
You must first remove all accounts and child OUs from the OU that you want to
delete.
This operation can be called only from the organization's master account.
"""
def delete_organizational_unit(client, input, options \\ []) do
request(client, "DeleteOrganizationalUnit", input, options)
end
@doc """
Deletes the specified policy from your organization.
Before you perform this operation, you must first detach the policy from all
organizational units (OUs), roots, and accounts.
This operation can be called only from the organization's master account.
"""
def delete_policy(client, input, options \\ []) do
request(client, "DeletePolicy", input, options)
end
@doc """
Removes the specified member AWS account as a delegated administrator for the
specified AWS service.
Deregistering a delegated administrator can have unintended impacts on the
functionality of the enabled AWS service. See the documentation for the enabled
service before you deregister a delegated administrator so that you understand
any potential impacts.
You can run this action only for AWS services that support this feature. For a
current list of services that support it, see the column *Supports Delegated
Administrator* in the table at [AWS Services that you can use with AWS Organizations](https://docs.aws.amazon.com/organizations/latest/userguide/orgs_integrated-services-list.html)
in the *AWS Organizations User Guide.*
This operation can be called only from the organization's master account.
"""
def deregister_delegated_administrator(client, input, options \\ []) do
request(client, "DeregisterDelegatedAdministrator", input, options)
end
@doc """
Retrieves AWS Organizations-related information about the specified account.
This operation can be called only from the organization's master account or by a
member account that is a delegated administrator for an AWS service.
"""
def describe_account(client, input, options \\ []) do
request(client, "DescribeAccount", input, options)
end
@doc """
Retrieves the current status of an asynchronous request to create an account.
This operation can be called only from the organization's master account or by a
member account that is a delegated administrator for an AWS service.
"""
def describe_create_account_status(client, input, options \\ []) do
request(client, "DescribeCreateAccountStatus", input, options)
end
@doc """
Returns the contents of the effective policy for specified policy type and
account.
The effective policy is the aggregation of any policies of the specified type
that the account inherits, plus any policy of that type that is directly
attached to the account.
This operation applies only to policy types *other* than service control
policies (SCPs).
For more information about policy inheritance, see [How Policy Inheritance Works](http://docs.aws.amazon.com/organizations/latest/userguide/orgs_manage_policies-inheritance.html)
in the *AWS Organizations User Guide*.
This operation can be called only from the organization's master account or by a
member account that is a delegated administrator for an AWS service.
"""
def describe_effective_policy(client, input, options \\ []) do
request(client, "DescribeEffectivePolicy", input, options)
end
@doc """
Retrieves information about a previously requested handshake.
The handshake ID comes from the response to the original
`InviteAccountToOrganization` operation that generated the handshake.
You can access handshakes that are `ACCEPTED`, `DECLINED`, or `CANCELED` for
only 30 days after they change to that state. They're then deleted and no longer
accessible.
This operation can be called from any account in the organization.
"""
def describe_handshake(client, input, options \\ []) do
request(client, "DescribeHandshake", input, options)
end
@doc """
Retrieves information about the organization that the user's account belongs to.
This operation can be called from any account in the organization.
Even if a policy type is shown as available in the organization, you can disable
it separately at the root level with `DisablePolicyType`. Use `ListRoots` to see
the status of policy types for a specified root.
"""
def describe_organization(client, input, options \\ []) do
request(client, "DescribeOrganization", input, options)
end
@doc """
Retrieves information about an organizational unit (OU).
This operation can be called only from the organization's master account or by a
member account that is a delegated administrator for an AWS service.
"""
def describe_organizational_unit(client, input, options \\ []) do
request(client, "DescribeOrganizationalUnit", input, options)
end
@doc """
Retrieves information about a policy.
This operation can be called only from the organization's master account or by a
member account that is a delegated administrator for an AWS service.
"""
def describe_policy(client, input, options \\ []) do
request(client, "DescribePolicy", input, options)
end
@doc """
Detaches a policy from a target root, organizational unit (OU), or account.
If the policy being detached is a service control policy (SCP), the changes to
permissions for AWS Identity and Access Management (IAM) users and roles in
affected accounts are immediate.
Every root, OU, and account must have at least one SCP attached. If you want to
replace the default `FullAWSAccess` policy with an SCP that limits the
permissions that can be delegated, you must attach the replacement SCP before
you can remove the default SCP. This is the authorization strategy of an "[allow list](https://docs.aws.amazon.com/organizations/latest/userguide/SCP_strategies.html#orgs_policies_allowlist)".
If you instead attach a second SCP and leave the `FullAWSAccess` SCP still
attached, and specify `"Effect": "Deny"` in the second SCP to override the
`"Effect": "Allow"` in the `FullAWSAccess` policy (or any other attached SCP),
you're using the authorization strategy of a "[deny list](https://docs.aws.amazon.com/organizations/latest/userguide/SCP_strategies.html#orgs_policies_denylist)".
This operation can be called only from the organization's master account.
"""
def detach_policy(client, input, options \\ []) do
request(client, "DetachPolicy", input, options)
end
@doc """
Disables the integration of an AWS service (the service that is specified by
`ServicePrincipal`) with AWS Organizations.
When you disable integration, the specified service no longer can create a
[service-linked role](http://docs.aws.amazon.com/IAM/latest/UserGuide/using-service-linked-roles.html)
in *new* accounts in your organization. This means the service can't perform
operations on your behalf on any new accounts in your organization. The service
can still perform operations in older accounts until the service completes its
clean-up from AWS Organizations.
We recommend that you disable integration between AWS Organizations and the
specified AWS service by using the console or commands that are provided by the
specified service. Doing so ensures that the other service is aware that it can
clean up any resources that are required only for the integration. How the
service cleans up its resources in the organization's accounts depends on that
service. For more information, see the documentation for the other AWS service.
After you perform the `DisableAWSServiceAccess` operation, the specified service
can no longer perform operations in your organization's accounts unless the
operations are explicitly permitted by the IAM policies that are attached to
your roles.
For more information about integrating other services with AWS Organizations,
including the list of services that work with Organizations, see [Integrating AWS Organizations with Other AWS
Services](http://docs.aws.amazon.com/organizations/latest/userguide/orgs_integrate_services.html)
in the *AWS Organizations User Guide.*
This operation can be called only from the organization's master account.
"""
def disable_a_w_s_service_access(client, input, options \\ []) do
request(client, "DisableAWSServiceAccess", input, options)
end
@doc """
Disables an organizational policy type in a root.
A policy of a certain type can be attached to entities in a root only if that
type is enabled in the root. After you perform this operation, you no longer can
attach policies of the specified type to that root or to any organizational unit
(OU) or account in that root. You can undo this by using the `EnablePolicyType`
operation.
This is an asynchronous request that AWS performs in the background. If you
disable a policy type for a root, it still appears enabled for the organization
if [all features](https://docs.aws.amazon.com/organizations/latest/userguide/orgs_manage_org_support-all-features.html)
are enabled for the organization. AWS recommends that you first use `ListRoots`
to see the status of policy types for a specified root, and then use this
operation.
This operation can be called only from the organization's master account.
To view the status of available policy types in the organization, use
`DescribeOrganization`.
"""
def disable_policy_type(client, input, options \\ []) do
request(client, "DisablePolicyType", input, options)
end
@doc """
Enables the integration of an AWS service (the service that is specified by
`ServicePrincipal`) with AWS Organizations.
When you enable integration, you allow the specified service to create a
[service-linked role](http://docs.aws.amazon.com/IAM/latest/UserGuide/using-service-linked-roles.html)
in all the accounts in your organization. This allows the service to perform
operations on your behalf in your organization and its accounts.
We recommend that you enable integration between AWS Organizations and the
specified AWS service by using the console or commands that are provided by the
specified service. Doing so ensures that the service is aware that it can create
the resources that are required for the integration. How the service creates
those resources in the organization's accounts depends on that service. For more
information, see the documentation for the other AWS service.
For more information about enabling services to integrate with AWS
Organizations, see [Integrating AWS Organizations with Other AWS Services](http://docs.aws.amazon.com/organizations/latest/userguide/orgs_integrate_services.html)
in the *AWS Organizations User Guide.*
This operation can be called only from the organization's master account and
only if the organization has [enabled all features](http://docs.aws.amazon.com/organizations/latest/userguide/orgs_manage_org_support-all-features.html).
"""
def enable_a_w_s_service_access(client, input, options \\ []) do
request(client, "EnableAWSServiceAccess", input, options)
end
@doc """
Enables all features in an organization.
This enables the use of organization policies that can restrict the services and
actions that can be called in each account. Until you enable all features, you
have access only to consolidated billing, and you can't use any of the advanced
account administration features that AWS Organizations supports. For more
information, see [Enabling All Features in Your Organization](https://docs.aws.amazon.com/organizations/latest/userguide/orgs_manage_org_support-all-features.html)
in the *AWS Organizations User Guide.*
This operation is required only for organizations that were created explicitly
with only the consolidated billing features enabled. Calling this operation
sends a handshake to every invited account in the organization. The feature set
change can be finalized and the additional features enabled only after all
administrators in the invited accounts approve the change by accepting the
handshake.
After you enable all features, you can separately enable or disable individual
policy types in a root using `EnablePolicyType` and `DisablePolicyType`. To see
the status of policy types in a root, use `ListRoots`.
After all invited member accounts accept the handshake, you finalize the feature
set change by accepting the handshake that contains `"Action":
"ENABLE_ALL_FEATURES"`. This completes the change.
After you enable all features in your organization, the master account in the
organization can apply policies on all member accounts. These policies can
restrict what users and even administrators in those accounts can do. The master
account can apply policies that prevent accounts from leaving the organization.
Ensure that your account administrators are aware of this.
This operation can be called only from the organization's master account.
"""
def enable_all_features(client, input, options \\ []) do
request(client, "EnableAllFeatures", input, options)
end
@doc """
Enables a policy type in a root.
After you enable a policy type in a root, you can attach policies of that type
to the root, any organizational unit (OU), or account in that root. You can undo
this by using the `DisablePolicyType` operation.
This is an asynchronous request that AWS performs in the background. AWS
recommends that you first use `ListRoots` to see the status of policy types for
a specified root, and then use this operation.
This operation can be called only from the organization's master account.
You can enable a policy type in a root only if that policy type is available in
the organization. To view the status of available policy types in the
organization, use `DescribeOrganization`.
"""
def enable_policy_type(client, input, options \\ []) do
request(client, "EnablePolicyType", input, options)
end
@doc """
Sends an invitation to another account to join your organization as a member
account.
AWS Organizations sends email on your behalf to the email address that is
associated with the other account's owner. The invitation is implemented as a
`Handshake` whose details are in the response.
You can invite AWS accounts only from the same seller as the master
account. For example, if your organization's master account was created by
Amazon Internet Services Pvt. Ltd (AISPL), an AWS seller in India, you can
invite only other AISPL accounts to your organization. You can't combine
accounts from AISPL and AWS or from any other AWS seller. For more information,
see [Consolidated Billing in India](http://docs.aws.amazon.com/awsaccountbilling/latest/aboutv2/useconsolidatedbilliing-India.html).
If you receive an exception that indicates that you exceeded your
account limits for the organization or that the operation failed because your
organization is still initializing, wait one hour and then try again. If the
error persists after an hour, contact [AWS Support](https://console.aws.amazon.com/support/home#/).
If the request includes tags, then the requester must have the
`organizations:TagResource` permission.
This operation can be called only from the organization's master account.
"""
def invite_account_to_organization(client, input, options \\ []) do
request(client, "InviteAccountToOrganization", input, options)
end
@doc """
Removes a member account from its parent organization.
This version of the operation is performed by the account that wants to leave.
To remove a member account as a user in the master account, use
`RemoveAccountFromOrganization` instead.
This operation can be called only from a member account in the organization.
The master account in an organization with all features enabled can
set service control policies (SCPs) that can restrict what administrators of
member accounts can do. This includes preventing them from successfully calling
`LeaveOrganization` and leaving the organization.
You can leave an organization as a member account only if the
account is configured with the information required to operate as a standalone
account. When you create an account in an organization using the AWS
Organizations console, API, or CLI commands, the information required of
standalone accounts is *not* automatically collected. For each account that you
want to make standalone, you must perform the following steps. If any of the
steps are already completed for this account, that step doesn't appear.
Choose a support plan
Provide and verify the required contact information
Provide a current payment method
AWS uses the payment method to charge for any billable (not free tier) AWS
activity that occurs while the account isn't attached to an organization. Follow
the steps at [ To leave an organization when all required account information has not yet been
provided](http://docs.aws.amazon.com/organizations/latest/userguide/orgs_manage_accounts_remove.html#leave-without-all-info)
in the *AWS Organizations User Guide.*
You can leave an organization only after you enable IAM user access
to billing in your account. For more information, see [Activating Access to the Billing and Cost Management
Console](http://docs.aws.amazon.com/awsaccountbilling/latest/aboutv2/grantaccess.html#ControllingAccessWebsite-Activate)
in the *AWS Billing and Cost Management User Guide.*
After the account leaves the organization, all tags that were
attached to the account object in the organization are deleted. AWS accounts
outside of an organization do not support tags.
"""
def leave_organization(client, input, options \\ []) do
request(client, "LeaveOrganization", input, options)
end
@doc """
Returns a list of the AWS services that you enabled to integrate with your
organization.
After a service on this list creates the resources that it requires for the
integration, it can perform operations on your organization and its accounts.
For more information about integrating other services with AWS Organizations,
including the list of services that currently work with Organizations, see
[Integrating AWS Organizations with Other AWS Services](http://docs.aws.amazon.com/organizations/latest/userguide/orgs_integrate_services.html)
in the *AWS Organizations User Guide.*
This operation can be called only from the organization's master account or by a
member account that is a delegated administrator for an AWS service.
"""
def list_a_w_s_service_access_for_organization(client, input, options \\ []) do
request(client, "ListAWSServiceAccessForOrganization", input, options)
end
@doc """
Lists all the accounts in the organization.
To request only the accounts in a specified root or organizational unit (OU),
use the `ListAccountsForParent` operation instead.
Always check the `NextToken` response parameter for a `null` value when calling
a `List*` operation. These operations can occasionally return an empty set of
results even when there are more results available. The `NextToken` response
parameter value is `null` *only* when there are no more results to display.
This operation can be called only from the organization's master account or by a
member account that is a delegated administrator for an AWS service.
"""
def list_accounts(client, input, options \\ []) do
request(client, "ListAccounts", input, options)
end
@doc """
Lists the accounts in an organization that are contained by the specified target
root or organizational unit (OU).
If you specify the root, you get a list of all the accounts that aren't in any
OU. If you specify an OU, you get a list of all the accounts in only that OU and
not in any child OUs. To get a list of all accounts in the organization, use the
`ListAccounts` operation.
Always check the `NextToken` response parameter for a `null` value when calling
a `List*` operation. These operations can occasionally return an empty set of
results even when there are more results available. The `NextToken` response
parameter value is `null` *only* when there are no more results to display.
This operation can be called only from the organization's master account or by a
member account that is a delegated administrator for an AWS service.
"""
def list_accounts_for_parent(client, input, options \\ []) do
request(client, "ListAccountsForParent", input, options)
end
@doc """
Lists all of the organizational units (OUs) or accounts that are contained in
the specified parent OU or root.
This operation, along with `ListParents` enables you to traverse the tree
structure that makes up this root.
Always check the `NextToken` response parameter for a `null` value when calling
a `List*` operation. These operations can occasionally return an empty set of
results even when there are more results available. The `NextToken` response
parameter value is `null` *only* when there are no more results to display.
This operation can be called only from the organization's master account or by a
member account that is a delegated administrator for an AWS service.
"""
def list_children(client, input, options \\ []) do
request(client, "ListChildren", input, options)
end
@doc """
Lists the account creation requests that match the specified status that is
currently being tracked for the organization.
Always check the `NextToken` response parameter for a `null` value when calling
a `List*` operation. These operations can occasionally return an empty set of
results even when there are more results available. The `NextToken` response
parameter value is `null` *only* when there are no more results to display.
This operation can be called only from the organization's master account or by a
member account that is a delegated administrator for an AWS service.
"""
def list_create_account_status(client, input, options \\ []) do
request(client, "ListCreateAccountStatus", input, options)
end
@doc """
Lists the AWS accounts that are designated as delegated administrators in this
organization.
This operation can be called only from the organization's master account or by a
member account that is a delegated administrator for an AWS service.
"""
def list_delegated_administrators(client, input, options \\ []) do
request(client, "ListDelegatedAdministrators", input, options)
end
@doc """
List the AWS services for which the specified account is a delegated
administrator.
This operation can be called only from the organization's master account or by a
member account that is a delegated administrator for an AWS service.
"""
def list_delegated_services_for_account(client, input, options \\ []) do
request(client, "ListDelegatedServicesForAccount", input, options)
end
@doc """
Lists the current handshakes that are associated with the account of the
requesting user.
Handshakes that are `ACCEPTED`, `DECLINED`, or `CANCELED` appear in the results
of this API for only 30 days after changing to that state. After that, they're
deleted and no longer accessible.
Always check the `NextToken` response parameter for a `null` value when calling
a `List*` operation. These operations can occasionally return an empty set of
results even when there are more results available. The `NextToken` response
parameter value is `null` *only* when there are no more results to display.
This operation can be called from any account in the organization.
"""
def list_handshakes_for_account(client, input, options \\ []) do
request(client, "ListHandshakesForAccount", input, options)
end
@doc """
Lists the handshakes that are associated with the organization that the
requesting user is part of.
The `ListHandshakesForOrganization` operation returns a list of handshake
structures. Each structure contains details and status about a handshake.
Handshakes that are `ACCEPTED`, `DECLINED`, or `CANCELED` appear in the results
of this API for only 30 days after changing to that state. After that, they're
deleted and no longer accessible.
Always check the `NextToken` response parameter for a `null` value when calling
a `List*` operation. These operations can occasionally return an empty set of
results even when there are more results available. The `NextToken` response
parameter value is `null` *only* when there are no more results to display.
This operation can be called only from the organization's master account or by a
member account that is a delegated administrator for an AWS service.
"""
def list_handshakes_for_organization(client, input, options \\ []) do
request(client, "ListHandshakesForOrganization", input, options)
end
@doc """
Lists the organizational units (OUs) in a parent organizational unit or root.
Always check the `NextToken` response parameter for a `null` value when calling
a `List*` operation. These operations can occasionally return an empty set of
results even when there are more results available. The `NextToken` response
parameter value is `null` *only* when there are no more results to display.
This operation can be called only from the organization's master account or by a
member account that is a delegated administrator for an AWS service.
"""
def list_organizational_units_for_parent(client, input, options \\ []) do
request(client, "ListOrganizationalUnitsForParent", input, options)
end
@doc """
Lists the root or organizational units (OUs) that serve as the immediate parent
of the specified child OU or account.
This operation, along with `ListChildren` enables you to traverse the tree
structure that makes up this root.
Always check the `NextToken` response parameter for a `null` value when calling
a `List*` operation. These operations can occasionally return an empty set of
results even when there are more results available. The `NextToken` response
parameter value is `null` *only* when there are no more results to display.
This operation can be called only from the organization's master account or by a
member account that is a delegated administrator for an AWS service.
In the current release, a child can have only a single parent.
"""
def list_parents(client, input, options \\ []) do
request(client, "ListParents", input, options)
end
@doc """
Retrieves the list of all policies in an organization of a specified type.
Always check the `NextToken` response parameter for a `null` value when calling
a `List*` operation. These operations can occasionally return an empty set of
results even when there are more results available. The `NextToken` response
parameter value is `null` *only* when there are no more results to display.
This operation can be called only from the organization's master account or by a
member account that is a delegated administrator for an AWS service.
"""
def list_policies(client, input, options \\ []) do
request(client, "ListPolicies", input, options)
end
@doc """
Lists the policies that are directly attached to the specified target root,
organizational unit (OU), or account.
You must specify the policy type that you want included in the returned list.
Always check the `NextToken` response parameter for a `null` value when calling
a `List*` operation. These operations can occasionally return an empty set of
results even when there are more results available. The `NextToken` response
parameter value is `null` *only* when there are no more results to display.
This operation can be called only from the organization's master account or by a
member account that is a delegated administrator for an AWS service.
"""
def list_policies_for_target(client, input, options \\ []) do
request(client, "ListPoliciesForTarget", input, options)
end
@doc """
Lists the roots that are defined in the current organization.
Always check the `NextToken` response parameter for a `null` value when calling
a `List*` operation. These operations can occasionally return an empty set of
results even when there are more results available. The `NextToken` response
parameter value is `null` *only* when there are no more results to display.
This operation can be called only from the organization's master account or by a
member account that is a delegated administrator for an AWS service.
Policy types can be enabled and disabled in roots. This is distinct from whether
they're available in the organization. When you enable all features, you make
policy types available for use in that organization. Individual policy types can
then be enabled and disabled in a root. To see the availability of a policy type
in an organization, use `DescribeOrganization`.
"""
def list_roots(client, input, options \\ []) do
request(client, "ListRoots", input, options)
end
@doc """
Lists tags that are attached to the specified resource.
You can attach tags to the following resources in AWS Organizations.
* AWS account
* Organization root
* Organizational unit (OU)
* Policy (any type)
This operation can be called only from the organization's master account or by a
member account that is a delegated administrator for an AWS service.
"""
def list_tags_for_resource(client, input, options \\ []) do
request(client, "ListTagsForResource", input, options)
end
@doc """
Lists all the roots, organizational units (OUs), and accounts that the specified
policy is attached to.
Always check the `NextToken` response parameter for a `null` value when calling
a `List*` operation. These operations can occasionally return an empty set of
results even when there are more results available. The `NextToken` response
parameter value is `null` *only* when there are no more results to display.
This operation can be called only from the organization's master account or by a
member account that is a delegated administrator for an AWS service.
"""
def list_targets_for_policy(client, input, options \\ []) do
request(client, "ListTargetsForPolicy", input, options)
end
@doc """
Moves an account from its current source parent root or organizational unit (OU)
to the specified destination parent root or OU.
This operation can be called only from the organization's master account.
"""
def move_account(client, input, options \\ []) do
request(client, "MoveAccount", input, options)
end
@doc """
Enables the specified member account to administer the Organizations features of
the specified AWS service.
It grants read-only access to AWS Organizations service data. The account still
requires IAM permissions to access and administer the AWS service.
You can run this action only for AWS services that support this feature. For a
current list of services that support it, see the column *Supports Delegated
Administrator* in the table at [AWS Services that you can use with AWS Organizations](https://docs.aws.amazon.com/organizations/latest/userguide/orgs_integrated-services-list.html)
in the *AWS Organizations User Guide.*
This operation can be called only from the organization's master account.
"""
def register_delegated_administrator(client, input, options \\ []) do
request(client, "RegisterDelegatedAdministrator", input, options)
end
@doc """
Removes the specified account from the organization.
The removed account becomes a standalone account that isn't a member of any
organization. It's no longer subject to any policies and is responsible for its
own bill payments. The organization's master account is no longer charged for
any expenses accrued by the member account after it's removed from the
organization.
This operation can be called only from the organization's master account. Member
accounts can remove themselves with `LeaveOrganization` instead.
You can remove an account from your organization only if the
account is configured with the information required to operate as a standalone
account. When you create an account in an organization using the AWS
Organizations console, API, or CLI commands, the information required of
standalone accounts is *not* automatically collected. For an account that you
want to make standalone, you must choose a support plan, provide and verify the
required contact information, and provide a current payment method. AWS uses the
payment method to charge for any billable (not free tier) AWS activity that
occurs while the account isn't attached to an organization. To remove an account
that doesn't yet have this information, you must sign in as the member account
and follow the steps at [ To leave an organization when all required account information has not yet been
provided](http://docs.aws.amazon.com/organizations/latest/userguide/orgs_manage_accounts_remove.html#leave-without-all-info)
in the *AWS Organizations User Guide.*
After the account leaves the organization, all tags that were
attached to the account object in the organization are deleted. AWS accounts
outside of an organization do not support tags.
"""
def remove_account_from_organization(client, input, options \\ []) do
request(client, "RemoveAccountFromOrganization", input, options)
end
@doc """
Adds one or more tags to the specified resource.
Currently, you can attach tags to the following resources in AWS Organizations.
* AWS account
* Organization root
* Organizational unit (OU)
* Policy (any type)
This operation can be called only from the organization's master account.
"""
def tag_resource(client, input, options \\ []) do
request(client, "TagResource", input, options)
end
@doc """
Removes any tags with the specified keys from the specified resource.
You can attach tags to the following resources in AWS Organizations.
* AWS account
* Organization root
* Organizational unit (OU)
* Policy (any type)
This operation can be called only from the organization's master account.
"""
def untag_resource(client, input, options \\ []) do
request(client, "UntagResource", input, options)
end
@doc """
Renames the specified organizational unit (OU).
The ID and ARN don't change. The child OUs and accounts remain in place, and any
attached policies of the OU remain attached.
This operation can be called only from the organization's master account.
"""
def update_organizational_unit(client, input, options \\ []) do
request(client, "UpdateOrganizationalUnit", input, options)
end
@doc """
Updates an existing policy with a new name, description, or content.
If you don't supply any parameter, that value remains unchanged. You can't
change a policy's type.
This operation can be called only from the organization's master account.
"""
def update_policy(client, input, options \\ []) do
request(client, "UpdatePolicy", input, options)
end
@spec request(AWS.Client.t(), binary(), map(), list()) ::
{:ok, map() | nil, map()}
| {:error, term()}
defp request(client, action, input, options) do
client = %{client | service: "organizations",
region: "us-east-1"}
host = build_host("organizations", client)
url = build_url(host, client)
headers = [
{"Host", host},
{"Content-Type", "application/x-amz-json-1.1"},
{"X-Amz-Target", "AWSOrganizationsV20161128.#{action}"}
]
payload = encode!(client, input)
headers = AWS.Request.sign_v4(client, "POST", url, headers, payload)
post(client, url, payload, headers, options)
end
defp post(client, url, payload, headers, options) do
case AWS.Client.request(client, :post, url, payload, headers, options) do
{:ok, %{status_code: 200, body: body} = response} ->
body = if body != "", do: decode!(client, body)
{:ok, body, response}
{:ok, response} ->
{:error, {:unexpected_response, response}}
error = {:error, _reason} -> error
end
end
defp build_host(_endpoint_prefix, %{region: "local", endpoint: endpoint}) do
endpoint
end
defp build_host(_endpoint_prefix, %{region: "local"}) do
"localhost"
end
defp build_host(endpoint_prefix, %{endpoint: endpoint}) do
"#{endpoint_prefix}.#{endpoint}"
end
defp build_url(host, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}/"
end
defp encode!(client, payload) do
AWS.Client.encode!(client, payload, :json)
end
defp decode!(client, payload) do
AWS.Client.decode!(client, payload, :json)
end
end
|
lib/aws/generated/organizations.ex
| 0.832066
| 0.569912
|
organizations.ex
|
starcoder
|
defmodule Turbo.Ecto.Services.BuildSearchQuery do
@moduledoc """
`Turbo.Ecto.Services.BuildSearchQuery` is a service module which serves the search hook.
`@search_types` is a collection of all the 8 valid `search_types` that come shipped with
`Turbo.Ecto`'s default search hook. The types are:
* [x] `eq`: equal. (SQL: `col = 'value'`)
* [x] `not_eq`: not equal. (SQL: col != 'value')
* [x] `lt`: less than. (SQL: col < 1024)
* [x] `lteq`: less than or equal. (SQL: col <= 1024)
* [x] `gt`: greater than. (SQL: col > 1024)
* [x] `gteq`: greater than or equal. (SQL: col >= 1024)
* [x] `is_present`: not null and not empty. (SQL: col is not null AND col != '')
* [x] `is_blank`: is null or empty. (SQL: col is null OR col = '')
* [x] `is_null`: is null or not null (SQL: col is null)
* [x] `is_true` is true. (SQL: col is true)
* [x] `is_not_true` is not true. (SQL: col is false)
* [x] `is_false` is false. (SQL: col is false)
* [x] `is_not_false` is true. (SQL: col is true)
* [x] `like`: contains trem value. (SQL: col like "%value%")
* [x] `not_like`: not contains value. (SQL: col not like '%value%')
* [x] `ilike`: contains value in a case insensitive fashion. (SQL: )
* [x] `not_ilike`: not contains value in a case insensitive fashion. (SQL:
* [x] `in` contains. (SQL: col in ['1024', '1025'])
* [x] `not_in` not contains. (SQL: col not in ['1024', '1025'])
* [x] `start_with` start with. (SQL: col like 'value%')
* [x] `not_start_with` not start with. (SQL: col not like 'value%')
* [x] `end_with` end with. (SQL: col like '%value')
* [x] `not_end_with` (SQL: col not like '%value')
* [x] `between`: between begin and end. (SQL: begin <= col and col <= end)
"""
alias Turbo.Ecto.Hooks.Search.Attribute
@search_types ~w(eq
not_eq
lt
lteq
gt
gteq
is_true
is_not_true
is_false
is_not_false
is_present
is_blank
is_null
is_not_null
like
not_like
ilike
not_ilike
in
not_in
start_with
not_start_with
end_with
not_end_with
between
)
@true_values [1, '1', 'T', 't', true, 'true', 'TRUE', "1", "T", "t", "true", "TRUE"]
@false_values [0, '0', 'F', 'f', false, 'false', 'FALSE', "0", "F", "f", "false", "FALSE"]
def search_types, do: @search_types
@doc """
## Examples
When `search_type` is `:eq`:
iex> alias Turbo.Ecto.Services.BuildSearchQuery
iex> alias Turbo.Ecto.Hooks.Search.Attribute
iex> BuildSearchQuery.handle_expr(:eq, %Attribute{name: :price, parent: :query}, ["10"])
{:==, [context: Turbo.Ecto.Services.BuildSearchQuery, import: Kernel], [{:field, [], [{:query, [], Elixir}, :price]}, {:^, [], ["10"]}]}
When `search_type` is `:not_eq`:
iex> alias Turbo.Ecto.Services.BuildSearchQuery
iex> alias Turbo.Ecto.Hooks.Search.Attribute
iex> BuildSearchQuery.handle_expr(:not_eq, %Attribute{name: :price, parent: :query}, ["10"])
{:!=, [context: Turbo.Ecto.Services.BuildSearchQuery, import: Kernel], [{:field, [], [{:query, [], Elixir}, :price]}, {:^, [], ["10"]}]}
When `search_type` is `:lt`:
iex> alias Turbo.Ecto.Services.BuildSearchQuery
iex> alias Turbo.Ecto.Hooks.Search.Attribute
iex> BuildSearchQuery.handle_expr(:lt, %Attribute{name: :price, parent: :query}, ["10"])
{:<, [context: Turbo.Ecto.Services.BuildSearchQuery, import: Kernel], [{:field, [], [{:query, [], Elixir}, :price]}, {:^, [], ["10"]}]}
When `search_type` is `:lteq`:
iex> alias Turbo.Ecto.Services.BuildSearchQuery
iex> alias Turbo.Ecto.Hooks.Search.Attribute
iex> BuildSearchQuery.handle_expr(:lteq, %Attribute{name: :price, parent: :query}, ["10"])
{:<=, [context: Turbo.Ecto.Services.BuildSearchQuery, import: Kernel], [{:field, [], [{:query, [], Elixir}, :price]}, {:^, [], ["10"]}]}
When `search_type` is `:gt`:
iex> alias Turbo.Ecto.Services.BuildSearchQuery
iex> alias Turbo.Ecto.Hooks.Search.Attribute
iex> BuildSearchQuery.handle_expr(:gt, %Attribute{name: :price, parent: :query}, ["10"])
{:>, [context: Turbo.Ecto.Services.BuildSearchQuery, import: Kernel], [{:field, [], [{:query, [], Elixir}, :price]}, {:^, [], ["10"]}]}
When `search_type` is `:gteq`:
iex> alias Turbo.Ecto.Services.BuildSearchQuery
iex> alias Turbo.Ecto.Hooks.Search.Attribute
iex> BuildSearchQuery.handle_expr(:gteq, %Attribute{name: :price, parent: :query}, ["10"])
{:>=, [context: Turbo.Ecto.Services.BuildSearchQuery, import: Kernel], [{:field, [], [{:query, [], Elixir}, :price]}, {:^, [], ["10"]}]}
When `search_type` is `:like`:
iex> alias Turbo.Ecto.Services.BuildSearchQuery
iex> alias Turbo.Ecto.Hooks.Search.Attribute
iex> BuildSearchQuery.handle_expr(:like, %Attribute{name: :title, parent: :query}, ["elixir"])
{:like, [], [{:field, [], [{:query, [], Elixir}, :title]}, "%elixir%"]}
When `search_type` is `:not_like`:
iex> alias Turbo.Ecto.Services.BuildSearchQuery
iex> alias Turbo.Ecto.Hooks.Search.Attribute
iex> BuildSearchQuery.handle_expr(:not_like, %Attribute{name: :title, parent: :query}, ["elixir"])
{:not, [context: Turbo.Ecto.Services.BuildSearchQuery, import: Kernel], [{:like, [], [{:field, [], [{:query, [], Elixir}, :title]}, "%elixir%"]}]}
When `search_type` is `:ilike`:
iex> alias Turbo.Ecto.Services.BuildSearchQuery
iex> alias Turbo.Ecto.Hooks.Search.Attribute
iex> BuildSearchQuery.handle_expr(:ilike, %Attribute{name: :title, parent: :query}, ["elixir"])
{:ilike, [], [{:field, [], [{:query, [], Elixir}, :title]}, "%elixir%"]}
When `search_type` is `:not_ilike`:
iex> alias Turbo.Ecto.Services.BuildSearchQuery
iex> alias Turbo.Ecto.Hooks.Search.Attribute
iex> BuildSearchQuery.handle_expr(:not_ilike, %Attribute{name: :title, parent: :query}, ["elixir"])
{:not, [context: Turbo.Ecto.Services.BuildSearchQuery, import: Kernel], [{:ilike, [], [{:field, [], [{:query, [], Elixir}, :title]}, "%elixir%"]}]}
When `search_type` is `:in`:
iex> alias Turbo.Ecto.Services.BuildSearchQuery
iex> alias Turbo.Ecto.Hooks.Search.Attribute
iex> BuildSearchQuery.handle_expr(:in, %Attribute{name: :price, parent: :query}, ["10", "20"])
{:in, [context: Turbo.Ecto.Services.BuildSearchQuery, import: Kernel], [{:field, [], [{:query, [], Elixir}, :price]}, {:^, [], [["10", "20"]]}]}
When `search_type` is `:not_in`:
iex> alias Turbo.Ecto.Services.BuildSearchQuery
iex> alias Turbo.Ecto.Hooks.Search.Attribute
iex> BuildSearchQuery.handle_expr(:not_in, %Attribute{name: :price, parent: :query}, ["10", "20"])
{
:not,
[context: Turbo.Ecto.Services.BuildSearchQuery, import: Kernel],
[{:in, [context: Turbo.Ecto.Services.BuildSearchQuery, import: Kernel], [{:field, [], [{:query, [], Elixir}, :price]}, {:^, [], [["10", "20"]]}]}]
}
When `search_type` is `:start_with`:
iex> alias Turbo.Ecto.Services.BuildSearchQuery
iex> alias Turbo.Ecto.Hooks.Search.Attribute
iex> BuildSearchQuery.handle_expr(:start_with, %Attribute{name: :title, parent: :query}, ["elixir"])
{:ilike, [], [{:field, [], [{:query, [], Elixir}, :title]}, "elixir%"]}
When `search_type` is `:not_start_with`:
iex> alias Turbo.Ecto.Services.BuildSearchQuery
iex> alias Turbo.Ecto.Hooks.Search.Attribute
iex> BuildSearchQuery.handle_expr(:not_start_with, %Attribute{name: :title, parent: :query}, ["elixir"])
{:not, [context: Turbo.Ecto.Services.BuildSearchQuery, import: Kernel], [{:ilike, [], [{:field, [], [{:query, [], Elixir}, :title]}, "elixir%"]}]}
When `search_type` is `:end_with`:
iex> alias Turbo.Ecto.Services.BuildSearchQuery
iex> alias Turbo.Ecto.Hooks.Search.Attribute
iex> BuildSearchQuery.handle_expr(:end_with, %Attribute{name: :title, parent: :query}, ["elixir"])
{:ilike, [], [{:field, [], [{:query, [], Elixir}, :title]}, "%elixir%"]}
When `search_type` is `:not_end_with`:
iex> alias Turbo.Ecto.Services.BuildSearchQuery
iex> alias Turbo.Ecto.Hooks.Search.Attribute
iex> BuildSearchQuery.handle_expr(:not_end_with, %Attribute{name: :title, parent: :query}, ["elixir"])
{:not, [context: Turbo.Ecto.Services.BuildSearchQuery, import: Kernel], [{:ilike, [], [{:field, [], [{:query, [], Elixir}, :title]}, "%elixir%"]}]}
When `search_type` is `:is_true`:
iex> alias Turbo.Ecto.Services.BuildSearchQuery
iex> alias Turbo.Ecto.Hooks.Search.Attribute
iex> BuildSearchQuery.handle_expr(:is_true, %Attribute{name: :available, parent: :query}, [true])
{:==, [context: Turbo.Ecto.Services.BuildSearchQuery, import: Kernel], [{:field, [], [{:query, [], Elixir}, :available]}, {:^, [], [true]}]}
iex> BuildSearchQuery.handle_expr(:is_true, %Attribute{name: :available, parent: :query}, [false])
{:!=, [context: Turbo.Ecto.Services.BuildSearchQuery, import: Kernel], [{:field, [], [{:query, [], Elixir}, :available]}, {:^, [], [true]}]}
When `search_type` is `:is_not_true`:
iex> alias Turbo.Ecto.Services.BuildSearchQuery
iex> alias Turbo.Ecto.Hooks.Search.Attribute
iex> BuildSearchQuery.handle_expr(:is_not_true, %Attribute{name: :available, parent: :query}, [true])
{:==, [context: Turbo.Ecto.Services.BuildSearchQuery, import: Kernel], [{:field, [], [{:query, [], Elixir}, :available]}, {:^, [], [false]}]}
iex> BuildSearchQuery.handle_expr(:is_not_true, %Attribute{name: :available, parent: :query}, [false])
{:!=, [context: Turbo.Ecto.Services.BuildSearchQuery, import: Kernel], [{:field, [], [{:query, [], Elixir}, :available]}, {:^, [], [false]}]}
When `search_type` is `:is_false`:
iex> alias Turbo.Ecto.Services.BuildSearchQuery
iex> alias Turbo.Ecto.Hooks.Search.Attribute
iex> BuildSearchQuery.handle_expr(:is_false, %Attribute{name: :price, parent: :query}, [true])
{:==, [context: Turbo.Ecto.Services.BuildSearchQuery, import: Kernel], [{:field, [], [{:query, [], Elixir}, :price]}, {:^, [], [false]}]}
iex> BuildSearchQuery.handle_expr(:is_false, %Attribute{name: :price, parent: :query}, [false])
{:!=, [context: Turbo.Ecto.Services.BuildSearchQuery, import: Kernel], [{:field, [], [{:query, [], Elixir}, :price]}, {:^, [], [false]}]}
When `search_type` is `:is_not_false`:
iex> alias Turbo.Ecto.Services.BuildSearchQuery
iex> alias Turbo.Ecto.Hooks.Search.Attribute
iex> BuildSearchQuery.handle_expr(:is_not_false, %Attribute{name: :price, parent: :query}, [true])
{:!=, [context: Turbo.Ecto.Services.BuildSearchQuery, import: Kernel], [{:field, [], [{:query, [], Elixir}, :price]}, {:^, [], [false]}]}
iex> BuildSearchQuery.handle_expr(:is_not_false, %Attribute{name: :price, parent: :query}, [false])
{:==, [context: Turbo.Ecto.Services.BuildSearchQuery, import: Kernel], [{:field, [], [{:query, [], Elixir}, :price]}, {:^, [], [false]}]}
When `search_type` is `:is_null`:
iex> alias Turbo.Ecto.Services.BuildSearchQuery
iex> alias Turbo.Ecto.Hooks.Search.Attribute
iex> BuildSearchQuery.handle_expr(:is_null, %Attribute{name: :available, parent: :query}, [true])
{:is_nil, [context: Turbo.Ecto.Services.BuildSearchQuery, import: Kernel], [{:field, [], [{:query, [], Elixir}, :available]}]}
iex> BuildSearchQuery.handle_expr(:is_null, %Attribute{name: :available, parent: :query}, [false])
{:not, [context: Turbo.Ecto.Services.BuildSearchQuery, import: Kernel], [{:is_nil, [context: Turbo.Ecto.Services.BuildSearchQuery, import: Kernel], [{:field, [], [{:query, [], Elixir}, :available]}]}]}
When `search_type` is `:is_not_null`:
iex> alias Turbo.Ecto.Services.BuildSearchQuery
iex> alias Turbo.Ecto.Hooks.Search.Attribute
iex> BuildSearchQuery.handle_expr(:is_not_null, %Attribute{name: :available, parent: :query}, [true])
{:not, [context: Turbo.Ecto.Services.BuildSearchQuery, import: Kernel], [{:is_nil, [context: Turbo.Ecto.Services.BuildSearchQuery, import: Kernel], [{:field, [], [{:query, [], Elixir}, :available]}]}]}
iex> BuildSearchQuery.handle_expr(:is_not_null, %Attribute{name: :available, parent: :query}, [false])
{:not, [context: Turbo.Ecto.Services.BuildSearchQuery, import: Kernel], [{:is_nil, [context: Turbo.Ecto.Services.BuildSearchQuery, import: Kernel], [{:field, [], [{:query, [], Elixir}, :available]}]}]}
When `search_type` is `:is_present`:
iex> alias Turbo.Ecto.Services.BuildSearchQuery
iex> alias Turbo.Ecto.Hooks.Search.Attribute
iex> BuildSearchQuery.handle_expr(:is_present, %Attribute{name: :available, parent: :query}, [true])
{
:not,
[context: Turbo.Ecto.Services.BuildSearchQuery, import: Kernel],
[{:or, [context: Turbo.Ecto.Services.BuildSearchQuery, import: Kernel], [{:is_nil, [context: Turbo.Ecto.Services.BuildSearchQuery, import: Kernel], [{:field, [], [{:query, [], Elixir}, :available]}]}, {:==, [context: Turbo.Ecto.Services.BuildSearchQuery, import: Kernel], [{:field, [], [{:query, [], Elixir}, :available]}, {:^, [], [""]}]}]}]
}
iex> BuildSearchQuery.handle_expr(:is_present, %Attribute{name: :available, parent: :query}, [false])
{
:or,
[context: Turbo.Ecto.Services.BuildSearchQuery, import: Kernel],
[{:not, [context: Turbo.Ecto.Services.BuildSearchQuery, import: Kernel], [{:is_nil, [context: Turbo.Ecto.Services.BuildSearchQuery, import: Kernel], [{:field, [], [{:query, [], Elixir}, :available]}]}]}, {:!=, [context: Turbo.Ecto.Services.BuildSearchQuery, import: Kernel], [{:field, [], [{:query, [], Elixir}, :available]}, {:^, [], [""]}]}]
}
When `search_type` is `:is_blank`:
iex> alias Turbo.Ecto.Services.BuildSearchQuery
iex> alias Turbo.Ecto.Hooks.Search.Attribute
iex> BuildSearchQuery.handle_expr(:is_blank, %Attribute{name: :available, parent: :query}, [true])
{:or, [context: Turbo.Ecto.Services.BuildSearchQuery, import: Kernel], [{:is_nil, [context: Turbo.Ecto.Services.BuildSearchQuery, import: Kernel], [{:field, [], [{:query, [], Elixir}, :available]}]}, {:==, [context: Turbo.Ecto.Services.BuildSearchQuery, import: Kernel], [{:field, [], [{:query, [], Elixir}, :available]}, {:^, [], [""]}]}]}
iex> BuildSearchQuery.handle_expr(:is_blank, %Attribute{name: :available, parent: :query}, [false])
{:or,
[context: Turbo.Ecto.Services.BuildSearchQuery, import: Kernel],
[
{:not,
[context: Turbo.Ecto.Services.BuildSearchQuery, import: Kernel],
[
{:is_nil,
[
context: Turbo.Ecto.Services.BuildSearchQuery,
import: Kernel
], [{:field, [], [{:query, [], Elixir}, :available]}]}
]},
{:!=,
[context: Turbo.Ecto.Services.BuildSearchQuery, import: Kernel],
[
{:field, [], [{:query, [], Elixir}, :available]},
{:^, [], [""]}
]}
]}
When `search_type` is `:between`:
iex> alias Turbo.Ecto.Services.BuildSearchQuery
iex> alias Turbo.Ecto.Hooks.Search.Attribute
iex> BuildSearchQuery.handle_expr(:between, %Attribute{name: :price, parent: :query}, ["10", "20"])
{
:and,
[context: Turbo.Ecto.Services.BuildSearchQuery, import: Kernel],
[
{:<, [context: Turbo.Ecto.Services.BuildSearchQuery, import: Kernel], [{:^, [], ["10"]}, {:field, [], [{:query, [], Elixir}, :price]}]},
{:<, [{:context, Turbo.Ecto.Services.BuildSearchQuery}, {:import, Kernel}], [{:field, [], [{:query, [], Elixir}, :price]}, {:^, [], ["20"]}]}
]
}
"""
@spec handle_expr(atom(), %Attribute{}, list()) :: tuple()
def handle_expr(:eq, attribute, [value | _]) do
quote(do: unquote(field_expr(attribute)) == ^unquote(value))
end
def handle_expr(:not_eq, attribute, [value | _]) do
quote do: unquote(field_expr(attribute)) != ^unquote(value)
end
def handle_expr(:ilike, attribute, [value | _]) do
quote do: ilike(unquote(field_expr(attribute)), unquote("%#{value}%"))
end
def handle_expr(:not_ilike, attribute, [value | _]) do
quote do: not ilike(unquote(field_expr(attribute)), unquote("%#{value}%"))
end
def handle_expr(:lt, attribute, [value | _]) do
quote do: unquote(field_expr(attribute)) < ^unquote(value)
end
def handle_expr(:lteq, attribute, [value | _]) do
quote do: unquote(field_expr(attribute)) <= ^unquote(value)
end
def handle_expr(:gt, attribute, [value | _]) do
quote do: unquote(field_expr(attribute)) > ^unquote(value)
end
def handle_expr(:gteq, attribute, [value | _]) do
quote do: unquote(field_expr(attribute)) >= ^unquote(value)
end
def handle_expr(:is_true, attribute, [value | _]) when value in @true_values do
handle_expr(:eq, attribute, [true])
end
def handle_expr(:is_true, attribute, [value | _]) when value in @false_values do
handle_expr(:not_eq, attribute, [true])
end
def handle_expr(:is_not_true, attribute, [value | _]) when value in @true_values do
handle_expr(:eq, attribute, [false])
end
def handle_expr(:is_not_true, attribute, [value | _]) when value in @false_values do
handle_expr(:not_eq, attribute, [false])
end
def handle_expr(:is_false, attribute, [value | _]) when value in @true_values do
handle_expr(:eq, attribute, [false])
end
def handle_expr(:is_false, attribute, [value | _]) when value in @false_values do
handle_expr(:not_eq, attribute, [false])
end
def handle_expr(:is_not_false, attribute, [value | _]) when value in @true_values do
handle_expr(:not_eq, attribute, [false])
end
def handle_expr(:is_not_false, attribute, [value | _]) when value in @false_values do
handle_expr(:eq, attribute, [false])
end
def handle_expr(:is_present, attribute, [value | _] = values) when value in @true_values do
quote(do: not unquote(handle_expr(:is_blank, attribute, values)))
end
def handle_expr(:is_present, attribute, [value | _] = values) when value in @false_values do
quote(do: unquote(handle_expr(:is_blank, attribute, values)))
end
def handle_expr(:is_blank, attribute, [value | _]) when value in @true_values do
quote(do: is_nil(unquote(field_expr(attribute))) or unquote(field_expr(attribute)) == ^"")
end
def handle_expr(:is_blank, attribute, [value | _]) when value in @false_values do
quote(do: not is_nil(unquote(field_expr(attribute))) or unquote(field_expr(attribute)) != ^"")
end
def handle_expr(:is_null, attribute, [value | _]) when value in @true_values do
quote(do: is_nil(unquote(field_expr(attribute))))
end
def handle_expr(:is_null, attribute, [value | _]) when value in @false_values do
quote(do: not is_nil(unquote(field_expr(attribute))))
end
def handle_expr(:is_not_null, attribute, [value | _] = values) when value in @true_values do
quote(do: not unquote(handle_expr(:is_null, attribute, values)))
end
def handle_expr(:is_not_null, attribute, [value | _] = values) when value in @false_values do
quote(do: unquote(handle_expr(:is_null, attribute, values)))
end
def handle_expr(:like, attribute, [value | _]) do
quote do: like(unquote(field_expr(attribute)), unquote("%#{value}%"))
end
def handle_expr(:not_like, attribute, [value | _]) do
quote do: not like(unquote(field_expr(attribute)), unquote("%#{value}%"))
end
def handle_expr(:in, attribute, values) do
quote do: unquote(field_expr(attribute)) in ^unquote(values)
end
def handle_expr(:not_in, attribute, values) do
quote do: not (unquote(field_expr(attribute)) in ^unquote(values))
end
def handle_expr(:matches, attribute, [value | _]) do
quote do: ilike(unquote(field_expr(attribute)), ^unquote(value))
end
def handle_expr(:does_not_match, attribute, [value | _]) do
quote do: not ilike(unquote(field_expr(attribute)), ^unquote(value))
end
def handle_expr(:start_with, attribute, [value | _]) do
quote do: ilike(unquote(field_expr(attribute)), unquote("#{value}%"))
end
def handle_expr(:not_start_with, attribute, [value | _]) do
quote do: not ilike(unquote(field_expr(attribute)), unquote("#{value}%"))
end
def handle_expr(:end_with, attribute, [value | _]) do
quote do: ilike(unquote(field_expr(attribute)), unquote("%#{value}%"))
end
def handle_expr(:not_end_with, attribute, [value | _]) do
quote do: not ilike(unquote(field_expr(attribute)), unquote("%#{value}%"))
end
def handle_expr(:between, attribute, [hd | last] = values) when length(values) == 2 do
quote(
do:
^unquote(hd) < unquote(field_expr(attribute)) and
unquote(field_expr(attribute)) < ^unquote(hd(last))
)
end
def handle_expr(:between, attribute, [value | _]) when is_binary(value) do
result = String.split(value, "..")
handle_expr(:between, attribute, result)
end
defp field_expr(%Attribute{name: name, parent: parent}) do
quote do: field(unquote(Macro.var(parent, Elixir)), unquote(name))
end
end
|
lib/turbo_ecto/services/build_search_query.ex
| 0.808294
| 0.610541
|
build_search_query.ex
|
starcoder
|
defmodule Filtrex.Utils.Encoder do
@moduledoc """
Helper methods for implementing the `Filtrex.Encoder` protocol.
"""
@doc """
This macro allows a simple creation of encoders using a simple DSL.
Example:
```
encoder "equals", "does not equal", "column = ?", &(&1)
```
In this example, a comparator and its reverse are passed in followed
by an expression where "column" is substituted for the actual column
name from the struct. The final argument is a function (which is not
necessary in this case since it is the default value) that takes the
raw value being passed in and returns the transformed value to be
injected as a value into the fragment expression.
"""
defmacro encoder(comparator, reverse_comparator, expression, values_function \\ {:&, [], [[{:&, [], [1]}]]}) do
quote do
import Filtrex.Utils.Encoder
def encode(condition = %{comparator: unquote(comparator), inverse: true}) do
condition |> struct(inverse: false, comparator: unquote(reverse_comparator)) |> encode
end
def encode(%{column: column, comparator: unquote(comparator), value: value}) do
values =
unquote(values_function).(value)
|> intersperse_column_refs(column)
%Filtrex.Fragment{
expression: String.replace(unquote(expression), "column", "?"),
values: values
}
end
end
end
@doc """
Intersperses proper Ecto column references between values to be
queried.
## Examples
intersperse_column_refs(["post"], "title")
# => [s.title, "post"]
intersperse_column_refs(["best", "post"], "title")
# => [s.title, "best", s.title, "post"]
## Background
Ecto queries support string query fragments, but fields referenced in
these fragments need to specifically reference fields, or you will get
"Ambiguous column" errors for some queries.
In other words:
# Good
where(query, [s], fragment("lower(?) = lower(?)", s.title, "post")
# Bad
where(query, [s], fragment("lower(title) = lower(?)", "post"))
Interpolating `s.title` into the fragment arguments ensures that joined
tables which also have the `title` column will not conflict.
See `Ecto.Query.API.fragment/1` for more details.
"""
def intersperse_column_refs(values, column) do
column = String.to_existing_atom(column)
[quote do: s.unquote(column)]
|> Stream.cycle
|> Enum.take(length(values))
|> Enum.zip(values)
|> Enum.map(&Tuple.to_list/1)
|> List.flatten
end
end
|
lib/filtrex/utils/encoder.ex
| 0.919254
| 0.89974
|
encoder.ex
|
starcoder
|
defmodule Livebook.Utils.ANSI do
@moduledoc false
@type modifier ::
{:font_weight, :bold | :light}
| {:font_style, :italic}
| {:text_decoration, :underline | :line_through | :overline}
| {:foreground_color, color()}
| {:background_color, color()}
@type color :: basic_color() | {:grayscale24, 0..23} | {:rgb6, 0..5, 0..5, 0..5}
@type basic_color ::
:black
| :red
| :green
| :yellow
| :blue
| :magenta
| :cyan
| :white
| :light_black
| :light_red
| :light_green
| :light_yellow
| :light_blue
| :light_magenta
| :light_cyan
| :light_white
@doc """
Takes a string with ANSI escape codes and parses it
into a list of `{modifiers, string}` parts.
"""
@spec parse_ansi_string(String.t()) :: list({list(modifier()), String.t()})
def parse_ansi_string(string) do
[head | ansi_prefixed_strings] = String.split(string, "\e")
# Each part has the form of {modifiers, string}
{tail_parts, _} =
Enum.map_reduce(ansi_prefixed_strings, %{}, fn string, modifiers ->
{modifiers, rest} =
case ansi_prefix_to_modifiers(string) do
{:ok, new_modifiers, rest} ->
modifiers = Enum.reduce(new_modifiers, modifiers, &apply_modifier(&2, &1))
{modifiers, rest}
:error ->
{modifiers, "\e" <> string}
end
{{Map.to_list(modifiers), rest}, modifiers}
end)
parts = [{[], head} | tail_parts]
parts
|> Enum.reject(fn {_modifiers, string} -> string == "" end)
|> merge_adjacent_parts([])
end
defp merge_adjacent_parts([], acc), do: Enum.reverse(acc)
defp merge_adjacent_parts([{modifiers, string1}, {modifiers, string2} | parts], acc) do
merge_adjacent_parts([{modifiers, string1 <> string2} | parts], acc)
end
defp merge_adjacent_parts([part | parts], acc) do
merge_adjacent_parts(parts, [part | acc])
end
defp ansi_prefix_to_modifiers("[1A" <> rest), do: {:ok, [:ignored], rest}
defp ansi_prefix_to_modifiers("[1B" <> rest), do: {:ok, [:ignored], rest}
defp ansi_prefix_to_modifiers("[1C" <> rest), do: {:ok, [:ignored], rest}
defp ansi_prefix_to_modifiers("[1D" <> rest), do: {:ok, [:ignored], rest}
defp ansi_prefix_to_modifiers("[2J" <> rest), do: {:ok, [:ignored], rest}
defp ansi_prefix_to_modifiers("[2K" <> rest), do: {:ok, [:ignored], rest}
defp ansi_prefix_to_modifiers("[H" <> rest), do: {:ok, [:ignored], rest}
# "\e(B" is RFC1468's switch to ASCII character set and can be ignored. This
# can appear even when JIS character sets aren't in use
defp ansi_prefix_to_modifiers("(B" <> rest), do: {:ok, [:ignored], rest}
defp ansi_prefix_to_modifiers("[" <> rest) do
with [args_string, rest] <- String.split(rest, "m", parts: 2),
{:ok, args} <- parse_ansi_args(args_string),
{:ok, modifiers} <- ansi_args_to_modifiers(args, []) do
{:ok, modifiers, rest}
else
_ -> :error
end
end
defp ansi_prefix_to_modifiers(_string), do: :error
defp parse_ansi_args(args_string) do
args_string
|> String.split(";")
|> Enum.reduce_while([], fn arg, parsed ->
case parse_ansi_arg(arg) do
{:ok, n} -> {:cont, [n | parsed]}
:error -> {:halt, :error}
end
end)
|> case do
:error -> :error
parsed -> {:ok, Enum.reverse(parsed)}
end
end
defp parse_ansi_arg(""), do: {:ok, 0}
defp parse_ansi_arg(string) do
case Integer.parse(string) do
{n, ""} -> {:ok, n}
_ -> :error
end
end
defp ansi_args_to_modifiers([], acc), do: {:ok, Enum.reverse(acc)}
defp ansi_args_to_modifiers(args, acc) do
case ansi_args_to_modifier(args) do
{:ok, modifier, args} -> ansi_args_to_modifiers(args, [modifier | acc])
:error -> :error
end
end
@colors [:black, :red, :green, :yellow, :blue, :magenta, :cyan, :white]
defp ansi_args_to_modifier(args) do
case args do
[0 | args] ->
{:ok, :reset, args}
[1 | args] ->
{:ok, {:font_weight, :bold}, args}
[2 | args] ->
{:ok, {:font_weight, :light}, args}
[3 | args] ->
{:ok, {:font_style, :italic}, args}
[4 | args] ->
{:ok, {:text_decoration, :underline}, args}
[9 | args] ->
{:ok, {:text_decoration, :line_through}, args}
[22 | args] ->
{:ok, {:font_weight, :reset}, args}
[23 | args] ->
{:ok, {:font_style, :reset}, args}
[24 | args] ->
{:ok, {:text_decoration, :reset}, args}
[n | args] when n in 30..37 ->
color = Enum.at(@colors, n - 30)
{:ok, {:foreground_color, color}, args}
[38, 5, bit8 | args] when bit8 in 0..255 ->
color = color_from_code(bit8)
{:ok, {:foreground_color, color}, args}
[39 | args] ->
{:ok, {:foreground_color, :reset}, args}
[n | args] when n in 40..47 ->
color = Enum.at(@colors, n - 40)
{:ok, {:background_color, color}, args}
[48, 5, bit8 | args] when bit8 in 0..255 ->
color = color_from_code(bit8)
{:ok, {:background_color, color}, args}
[49 | args] ->
{:ok, {:background_color, :reset}, args}
[53 | args] ->
{:ok, {:text_decoration, :overline}, args}
[55 | args] ->
{:ok, {:text_decoration, :reset}, args}
[n | args] when n in 90..97 ->
color = Enum.at(@colors, n - 90)
{:ok, {:foreground_color, :"light_#{color}"}, args}
[n | args] when n in 100..107 ->
color = Enum.at(@colors, n - 100)
{:ok, {:background_color, :"light_#{color}"}, args}
[n | args] when n <= 107 ->
{:ok, :ignored, args}
_ ->
:error
end
end
defp color_from_code(code) when code in 0..7 do
Enum.at(@colors, code)
end
defp color_from_code(code) when code in 8..15 do
color = Enum.at(@colors, code - 8)
:"light_#{color}"
end
defp color_from_code(code) when code in 16..231 do
rgb_code = code - 16
b = rgb_code |> rem(6)
g = rgb_code |> div(6) |> rem(6)
r = rgb_code |> div(36)
{:rgb6, r, g, b}
end
defp color_from_code(code) when code in 232..255 do
level = code - 232
{:grayscale24, level}
end
defp apply_modifier(modifiers, :ignored), do: modifiers
defp apply_modifier(_modifiers, :reset), do: %{}
defp apply_modifier(modifiers, {key, :reset}), do: Map.delete(modifiers, key)
defp apply_modifier(modifiers, {key, value}), do: Map.put(modifiers, key, value)
end
|
lib/livebook/utils/ansi.ex
| 0.834845
| 0.538316
|
ansi.ex
|
starcoder
|
defmodule AWS.AppStream do
@moduledoc """
Amazon AppStream 2.0
This is the *Amazon AppStream 2.0 API Reference*.
This documentation provides descriptions and syntax for each of the actions and
data types in AppStream 2.0. AppStream 2.0 is a fully managed, secure
application streaming service that lets you stream desktop applications to users
without rewriting applications. AppStream 2.0 manages the AWS resources that are
required to host and run your applications, scales automatically, and provides
access to your users on demand.
You can call the AppStream 2.0 API operations by using an interface VPC endpoint
(interface endpoint). For more information, see [Access AppStream 2.0 API Operations and CLI Commands Through an Interface VPC
Endpoint](https://docs.aws.amazon.com/appstream2/latest/developerguide/access-api-cli-through-interface-vpc-endpoint.html)
in the *Amazon AppStream 2.0 Administration Guide*.
To learn more about AppStream 2.0, see the following resources:
* [Amazon AppStream 2.0 product page](http://aws.amazon.com/appstream2)
* [Amazon AppStream 2.0 documentation](http://aws.amazon.com/documentation/appstream2)
"""
alias AWS.Client
alias AWS.Request
def metadata do
%AWS.ServiceMetadata{
abbreviation: nil,
api_version: "2016-12-01",
content_type: "application/x-amz-json-1.1",
credential_scope: nil,
endpoint_prefix: "appstream2",
global?: false,
protocol: "json",
service_id: "AppStream",
signature_version: "v4",
signing_name: "appstream",
target_prefix: "PhotonAdminProxyService"
}
end
@doc """
Associates the specified fleet with the specified stack.
"""
def associate_fleet(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "AssociateFleet", input, options)
end
@doc """
Associates the specified users with the specified stacks.
Users in a user pool cannot be assigned to stacks with fleets that are joined to
an Active Directory domain.
"""
def batch_associate_user_stack(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "BatchAssociateUserStack", input, options)
end
@doc """
Disassociates the specified users from the specified stacks.
"""
def batch_disassociate_user_stack(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "BatchDisassociateUserStack", input, options)
end
@doc """
Copies the image within the same region or to a new region within the same AWS
account.
Note that any tags you added to the image will not be copied.
"""
def copy_image(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CopyImage", input, options)
end
@doc """
Creates a Directory Config object in AppStream 2.0.
This object includes the configuration information required to join fleets and
image builders to Microsoft Active Directory domains.
"""
def create_directory_config(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateDirectoryConfig", input, options)
end
@doc """
Creates a fleet.
A fleet consists of streaming instances that run a specified image.
"""
def create_fleet(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateFleet", input, options)
end
@doc """
Creates an image builder.
An image builder is a virtual machine that is used to create an image.
The initial state of the builder is `PENDING`. When it is ready, the state is
`RUNNING`.
"""
def create_image_builder(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateImageBuilder", input, options)
end
@doc """
Creates a URL to start an image builder streaming session.
"""
def create_image_builder_streaming_url(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateImageBuilderStreamingURL", input, options)
end
@doc """
Creates a stack to start streaming applications to users.
A stack consists of an associated fleet, user access policies, and storage
configurations.
"""
def create_stack(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateStack", input, options)
end
@doc """
Creates a temporary URL to start an AppStream 2.0 streaming session for the
specified user.
A streaming URL enables application streaming to be tested without user setup.
"""
def create_streaming_url(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateStreamingURL", input, options)
end
@doc """
Creates a new image with the latest Windows operating system updates, driver
updates, and AppStream 2.0 agent software.
For more information, see the "Update an Image by Using Managed AppStream 2.0
Image Updates" section in [Administer Your AppStream 2.0 Images](https://docs.aws.amazon.com/appstream2/latest/developerguide/administer-images.html),
in the *Amazon AppStream 2.0 Administration Guide*.
"""
def create_updated_image(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateUpdatedImage", input, options)
end
@doc """
Creates a usage report subscription.
Usage reports are generated daily.
"""
def create_usage_report_subscription(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateUsageReportSubscription", input, options)
end
@doc """
Creates a new user in the user pool.
"""
def create_user(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateUser", input, options)
end
@doc """
Deletes the specified Directory Config object from AppStream 2.0.
This object includes the information required to join streaming instances to an
Active Directory domain.
"""
def delete_directory_config(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteDirectoryConfig", input, options)
end
@doc """
Deletes the specified fleet.
"""
def delete_fleet(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteFleet", input, options)
end
@doc """
Deletes the specified image.
You cannot delete an image when it is in use. After you delete an image, you
cannot provision new capacity using the image.
"""
def delete_image(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteImage", input, options)
end
@doc """
Deletes the specified image builder and releases the capacity.
"""
def delete_image_builder(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteImageBuilder", input, options)
end
@doc """
Deletes permissions for the specified private image.
After you delete permissions for an image, AWS accounts to which you previously
granted these permissions can no longer use the image.
"""
def delete_image_permissions(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteImagePermissions", input, options)
end
@doc """
Deletes the specified stack.
After the stack is deleted, the application streaming environment provided by
the stack is no longer available to users. Also, any reservations made for
application streaming sessions for the stack are released.
"""
def delete_stack(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteStack", input, options)
end
@doc """
Disables usage report generation.
"""
def delete_usage_report_subscription(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteUsageReportSubscription", input, options)
end
@doc """
Deletes a user from the user pool.
"""
def delete_user(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteUser", input, options)
end
@doc """
Retrieves a list that describes one or more specified Directory Config objects
for AppStream 2.0, if the names for these objects are provided.
Otherwise, all Directory Config objects in the account are described. These
objects include the configuration information required to join fleets and image
builders to Microsoft Active Directory domains.
Although the response syntax in this topic includes the account password, this
password is not returned in the actual response.
"""
def describe_directory_configs(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeDirectoryConfigs", input, options)
end
@doc """
Retrieves a list that describes one or more specified fleets, if the fleet names
are provided.
Otherwise, all fleets in the account are described.
"""
def describe_fleets(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeFleets", input, options)
end
@doc """
Retrieves a list that describes one or more specified image builders, if the
image builder names are provided.
Otherwise, all image builders in the account are described.
"""
def describe_image_builders(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeImageBuilders", input, options)
end
@doc """
Retrieves a list that describes the permissions for shared AWS account IDs on a
private image that you own.
"""
def describe_image_permissions(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeImagePermissions", input, options)
end
@doc """
Retrieves a list that describes one or more specified images, if the image names
or image ARNs are provided.
Otherwise, all images in the account are described.
"""
def describe_images(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeImages", input, options)
end
@doc """
Retrieves a list that describes the streaming sessions for a specified stack and
fleet.
If a UserId is provided for the stack and fleet, only streaming sessions for
that user are described. If an authentication type is not provided, the default
is to authenticate users using a streaming URL.
"""
def describe_sessions(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeSessions", input, options)
end
@doc """
Retrieves a list that describes one or more specified stacks, if the stack names
are provided.
Otherwise, all stacks in the account are described.
"""
def describe_stacks(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeStacks", input, options)
end
@doc """
Retrieves a list that describes one or more usage report subscriptions.
"""
def describe_usage_report_subscriptions(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeUsageReportSubscriptions", input, options)
end
@doc """
Retrieves a list that describes the UserStackAssociation objects.
You must specify either or both of the following:
* The stack name
* The user name (email address of the user associated with the
stack) and the authentication type for the user
"""
def describe_user_stack_associations(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeUserStackAssociations", input, options)
end
@doc """
Retrieves a list that describes one or more specified users in the user pool.
"""
def describe_users(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeUsers", input, options)
end
@doc """
Disables the specified user in the user pool.
Users can't sign in to AppStream 2.0 until they are re-enabled. This action does
not delete the user.
"""
def disable_user(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DisableUser", input, options)
end
@doc """
Disassociates the specified fleet from the specified stack.
"""
def disassociate_fleet(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DisassociateFleet", input, options)
end
@doc """
Enables a user in the user pool.
After being enabled, users can sign in to AppStream 2.0 and open applications
from the stacks to which they are assigned.
"""
def enable_user(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "EnableUser", input, options)
end
@doc """
Immediately stops the specified streaming session.
"""
def expire_session(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ExpireSession", input, options)
end
@doc """
Retrieves the name of the fleet that is associated with the specified stack.
"""
def list_associated_fleets(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListAssociatedFleets", input, options)
end
@doc """
Retrieves the name of the stack with which the specified fleet is associated.
"""
def list_associated_stacks(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListAssociatedStacks", input, options)
end
@doc """
Retrieves a list of all tags for the specified AppStream 2.0 resource.
You can tag AppStream 2.0 image builders, images, fleets, and stacks.
For more information about tags, see [Tagging Your Resources](https://docs.aws.amazon.com/appstream2/latest/developerguide/tagging-basic.html)
in the *Amazon AppStream 2.0 Administration Guide*.
"""
def list_tags_for_resource(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListTagsForResource", input, options)
end
@doc """
Starts the specified fleet.
"""
def start_fleet(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "StartFleet", input, options)
end
@doc """
Starts the specified image builder.
"""
def start_image_builder(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "StartImageBuilder", input, options)
end
@doc """
Stops the specified fleet.
"""
def stop_fleet(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "StopFleet", input, options)
end
@doc """
Stops the specified image builder.
"""
def stop_image_builder(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "StopImageBuilder", input, options)
end
@doc """
Adds or overwrites one or more tags for the specified AppStream 2.0 resource.
You can tag AppStream 2.0 image builders, images, fleets, and stacks.
Each tag consists of a key and an optional value. If a resource already has a
tag with the same key, this operation updates its value.
To list the current tags for your resources, use `ListTagsForResource`. To
disassociate tags from your resources, use `UntagResource`.
For more information about tags, see [Tagging Your Resources](https://docs.aws.amazon.com/appstream2/latest/developerguide/tagging-basic.html)
in the *Amazon AppStream 2.0 Administration Guide*.
"""
def tag_resource(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "TagResource", input, options)
end
@doc """
Disassociates one or more specified tags from the specified AppStream 2.0
resource.
To list the current tags for your resources, use `ListTagsForResource`.
For more information about tags, see [Tagging Your Resources](https://docs.aws.amazon.com/appstream2/latest/developerguide/tagging-basic.html)
in the *Amazon AppStream 2.0 Administration Guide*.
"""
def untag_resource(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UntagResource", input, options)
end
@doc """
Updates the specified Directory Config object in AppStream 2.0.
This object includes the configuration information required to join fleets and
image builders to Microsoft Active Directory domains.
"""
def update_directory_config(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UpdateDirectoryConfig", input, options)
end
@doc """
Updates the specified fleet.
If the fleet is in the `STOPPED` state, you can update any attribute except the
fleet name. If the fleet is in the `RUNNING` state, you can update the
`DisplayName`, `ComputeCapacity`, `ImageARN`, `ImageName`,
`IdleDisconnectTimeoutInSeconds`, and `DisconnectTimeoutInSeconds` attributes.
If the fleet is in the `STARTING` or `STOPPING` state, you can't update it.
"""
def update_fleet(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UpdateFleet", input, options)
end
@doc """
Adds or updates permissions for the specified private image.
"""
def update_image_permissions(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UpdateImagePermissions", input, options)
end
@doc """
Updates the specified fields for the specified stack.
"""
def update_stack(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UpdateStack", input, options)
end
end
|
lib/aws/generated/app_stream.ex
| 0.888124
| 0.443661
|
app_stream.ex
|
starcoder
|
defmodule Zstream.EncryptionCoder.Traditional do
@moduledoc """
Implements the tradition encryption.
"""
@behaviour Zstream.EncryptionCoder
use Bitwise
defmodule State do
@moduledoc false
defstruct key0: 0x12345678,
key1: 0x23456789,
key2: 0x34567890,
header: nil,
header_sent: false
end
def init(options) do
password = Keyword.fetch!(options, :password)
header =
:crypto.strong_rand_bytes(10) <>
<<dos_time(Keyword.fetch!(options, :mtime))::little-size(16)>>
state = %State{header: header}
update_keys(state, password)
end
def encode(chunk, state) do
{chunk, state} =
if !state.header_sent do
{[state.header, chunk], %{state | header_sent: true}}
else
{chunk, state}
end
encrypt(state, IO.iodata_to_binary(chunk))
end
def close(_state) do
[]
end
def general_purpose_flag, do: 0x0001
defp encrypt(state, chunk), do: encrypt(state, chunk, [])
defp encrypt(state, <<>>, encrypted), do: {Enum.reverse(encrypted), state}
defp encrypt(state, <<char::binary-size(1)>> <> rest, encrypted) do
<<byte::integer-size(8)>> = char
temp = (state.key2 ||| 2) &&& 0x0000FFFF
temp = (temp * Bitwise.bxor(temp, 1)) >>> 8 &&& 0x000000FF
cipher = << Bitwise.bxor(byte, temp)::integer-size(8)>>
state = update_keys(state, <<byte::integer-size(8)>>)
encrypt(state, rest, [cipher | encrypted])
end
defp update_keys(state, <<>>), do: state
defp update_keys(state, <<char::binary-size(1)>> <> rest) do
state = put_in(state.key0, crc32(state.key0, char))
state =
put_in(
state.key1,
(state.key1 + (state.key0 &&& 0x000000FF)) * 134_775_813 + 1 &&& 0xFFFFFFFF
)
state =
put_in(
state.key2,
crc32(state.key2, <<state.key1 >>> 24::integer-size(8)>>)
)
update_keys(state, rest)
end
defp crc32(current, data) do
Bitwise.bxor(:erlang.crc32(Bitwise.bxor(current, 0xFFFFFFFF), data), 0xFFFFFFFF)
end
defp dos_time(t) do
round(t.second / 2 + (t.minute <<< 5) + (t.hour <<< 11))
end
end
|
lib/zstream/encryption_coder/traditional.ex
| 0.613121
| 0.473536
|
traditional.ex
|
starcoder
|
defmodule ElixirRigidPhysics.Util.List do
@moduledoc """
List utils module for functions and things that should've been in the stdlib but aren't.
"""
@doc """
Function to find every pair of elements in a list.
## Examples
iex> alias ElixirRigidPhysics.Util.List
iex> List.generate_pairs([])
[]
iex> alias ElixirRigidPhysics.Util.List
iex> List.generate_pairs([1])
[]
iex> alias ElixirRigidPhysics.Util.List
iex> List.generate_pairs([1,2])
[{1,2}]
iex> alias ElixirRigidPhysics.Util.List
iex> List.generate_pairs([1,2,3])
[{1,2}, {1,3}, {2,3}]
"""
@spec generate_pairs([any()]) :: [{any(), any()}]
def generate_pairs([]), do: []
def generate_pairs([_]), do: []
def generate_pairs([a, b]), do: [{a, b}]
def generate_pairs([a | rest]) do
main_pairs = for i <- rest, do: {a, i}
main_pairs ++ generate_pairs(rest)
end
@doc """
Function to find every pair of elements in a list.
## Examples
iex> alias ElixirRigidPhysics.Util.List
iex> List.generate_pairs([], fn(_a,_b) -> true end)
[]
iex> alias ElixirRigidPhysics.Util.List
iex> List.generate_pairs([1], fn(_a,_b) -> true end)
[]
iex> alias ElixirRigidPhysics.Util.List
iex> List.generate_pairs([1,2], fn(_a,_b) -> true end)
[{1,2}]
iex> alias ElixirRigidPhysics.Util.List
iex> List.generate_pairs([1,2], fn(_a,_b) -> false end)
[]
iex> alias ElixirRigidPhysics.Util.List
iex> List.generate_pairs([1,2,3], fn(a,b) -> a * b > 3 end)
[{2,3}]
"""
@spec generate_pairs([any()], (any(), any() -> boolean())) :: [{any(), any()}]
def generate_pairs([], _predicate), do: []
def generate_pairs([_], _predicate), do: []
def generate_pairs([a, b], predicate) do
if predicate.(a, b) do
[{a, b}]
else
[]
end
end
def generate_pairs([a | rest], predicate) do
main_pairs = for i <- rest, predicate.(a, i), do: {a, i}
main_pairs ++ generate_pairs(rest)
end
end
|
lib/util/list.ex
| 0.647464
| 0.406008
|
list.ex
|
starcoder
|
defmodule Absinthe.Type.Interface do
@moduledoc """
A defined interface type that represent a list of named fields and their
arguments.
Fields on an interface have the same rules as fields on an
`Absinthe.Type.Object`.
If an `Absinthe.Type.Object` lists an interface in its `:interfaces` entry,
it guarantees that it defines the same fields and arguments that the
interface does.
Because sometimes it's for the interface to determine the implementing type of
a resolved object, you must either:
* Provide a `:resolve_type` function on the interface
* Provide a `:is_type_of` function on each implementing type
```
interface :named_entity do
field :name, :string
resolve_type fn
%{age: _}, _ -> :person
%{employee_count: _}, _ -> :business
_, _ -> nil
end
end
object :person do
field :name, :string
field :age, :string
interface :named_entity
end
object :business do
field :name, :string
field :employee_count, :integer
interface :named_entity
end
```
"""
use Absinthe.Introspection.TypeKind, :interface
alias Absinthe.Type
alias Absinthe.Schema
@typedoc """
* `:name` - The name of the interface type. Should be a TitleCased `binary`. Set automatically.
* `:description` - A nice description for introspection.
* `:fields` - A map of `Absinthe.Type.Field` structs. See `Absinthe.Schema.Notation.field/4` and
* `:args` - A map of `Absinthe.Type.Argument` structs. See `Absinthe.Schema.Notation.arg/2`.
* `:resolve_type` - A function used to determine the implementing type of a resolved object. See also `Absinthe.Type.Object`'s `:is_type_of`.
The `:resolve_type` function will be passed two arguments; the object whose type needs to be identified, and the `Absinthe.Execution` struct providing the full execution context.
The `__private__` and `:__reference__` keys are for internal use.
"""
@type t :: %__MODULE__{
name: binary,
description: binary,
fields: map,
identifier: atom,
interfaces: [Absinthe.Type.Interface.t()],
__private__: Keyword.t(),
definition: module,
__reference__: Type.Reference.t()
}
defstruct name: nil,
description: nil,
fields: nil,
identifier: nil,
resolve_type: nil,
interfaces: [],
__private__: [],
definition: nil,
__reference__: nil
@doc false
defdelegate functions, to: Absinthe.Blueprint.Schema.InterfaceTypeDefinition
@spec resolve_type(Type.Interface.t(), any, Absinthe.Resolution.t()) :: Type.t() | nil
def resolve_type(type, obj, env, opts \\ [lookup: true])
def resolve_type(interface, obj, %{schema: schema} = env, opts) do
if resolver = Type.function(interface, :resolve_type) do
case resolver.(obj, env) do
nil ->
nil
ident when is_atom(ident) ->
if opts[:lookup] do
Absinthe.Schema.lookup_type(schema, ident)
else
ident
end
end
else
type_name =
Schema.implementors(schema, interface.identifier)
|> Enum.find(fn type ->
Absinthe.Type.function(type, :is_type_of).(obj)
end)
if opts[:lookup] do
Absinthe.Schema.lookup_type(schema, type_name)
else
type_name
end
end
end
@doc """
Whether the interface (or implementors) are correctly configured to resolve
objects.
"""
@spec type_resolvable?(Schema.t(), t) :: boolean
def type_resolvable?(schema, %{resolve_type: nil} = iface) do
Schema.implementors(schema, iface)
|> Enum.all?(& &1.is_type_of)
end
def type_resolvable?(_, %{resolve_type: _}) do
true
end
@doc false
@spec member?(t, Type.t()) :: boolean
def member?(%{identifier: ident}, %{interfaces: ifaces}) do
ident in ifaces
end
def member?(_, _) do
false
end
end
|
lib/absinthe/type/interface.ex
| 0.880598
| 0.890865
|
interface.ex
|
starcoder
|
defmodule ApiWeb.RateLimiter do
@moduledoc """
Tracks user requests for rate limiting.
The rate limiter server counts the number of requests a given user has made
within a given interval. An error is returned for a user if they attempt to
make a request after they've reached their allotted request amount.
"""
@limiter ApiWeb.config(:rate_limiter, :limiter)
@clear_interval ApiWeb.config(:rate_limiter, :clear_interval)
@wait_time_ms ApiWeb.config(:rate_limiter, :wait_time_ms)
@intervals_per_day div(86_400_000, @clear_interval)
@max_anon_per_interval ApiWeb.config(:rate_limiter, :max_anon_per_interval)
@max_registered_per_interval ApiWeb.config(:rate_limiter, :max_registered_per_interval)
@type limit_data :: {non_neg_integer, non_neg_integer, non_neg_integer}
@type log_result :: :ok | {:ok | :rate_limited, limit_data}
## Client
def start_link(_opts \\ []) do
@limiter.start_link(clear_interval: @clear_interval)
end
@doc """
Logs that the user is making a resource to a given resource. If the user
has already reached their allotted request amount, an error tuple is returned.
Requests are counted in #{@intervals_per_day} #{@clear_interval}ms intervals per day. The max requests per user
per interval vary based on the type of user and whether they have requested a limit increase.
| `ApiWeb.User` `type` | Requests Tracked By | `ApiWeb.User.t` `limit` | Max Requests Per Interval |
|-------------------|---------------------|----------------------|--------------------------------------|
| `:anon` | IP Address | `nil` | `#{@max_anon_per_interval}` |
| `:registered` | `ApiWeb.User.t` `id` | `nil` | `#{
@max_registered_per_interval
}` |
| `:registered` | `ApiWeb.User.t` `id` | integer | `user.limit / #{
@intervals_per_day
}` |
"""
@spec log_request(any, String.t()) :: log_result
def log_request(_, "/_health" <> _), do: :ok
def log_request(user, _request_path) do
max = max_requests(user)
{key, reset_ms} = key_and_reset_time(user)
case @limiter.rate_limited?(key, max) do
:rate_limited ->
# wait some time to avoid rate-limited clients hammering the server
Process.sleep(@wait_time_ms)
{:rate_limited, {max, 0, reset_ms}}
{:remaining, remaining} ->
{:ok, {max, remaining, reset_ms}}
end
end
defp key_and_reset_time(user) do
time_bucket = div(System.system_time(:millisecond), @clear_interval)
key = "#{user.id}_#{time_bucket}"
reset_ms = (time_bucket + 1) * @clear_interval
{key, reset_ms}
end
@doc false
def clear_interval, do: @clear_interval
@doc false
def intervals_per_day, do: @intervals_per_day
if Mix.env() == :test do
@doc "Helper function for testing, to clear the limiter state."
def force_clear do
@limiter.clear()
end
@doc "Helper function for testing, to list the active IDs."
def list do
@limiter.list()
end
end
@doc false
def max_anon_per_interval, do: @max_anon_per_interval
@doc false
def max_registered_per_interval, do: @max_registered_per_interval
@doc """
Returns the maximum number of requests a key can make over the interval.
"""
@spec max_requests(ApiWeb.User.t()) :: non_neg_integer
def max_requests(%ApiWeb.User{type: :anon}) do
@max_anon_per_interval
end
def max_requests(%ApiWeb.User{type: :registered, limit: nil}) do
@max_registered_per_interval
end
def max_requests(%ApiWeb.User{type: :registered, limit: daily_limit}) do
div(daily_limit, @intervals_per_day)
end
end
|
apps/api_web/lib/api_web/rate_limiter.ex
| 0.828973
| 0.440168
|
rate_limiter.ex
|
starcoder
|
defmodule Riptide.Interceptor do
@moduledoc """
Riptide Interceptors let you define simple rules using Elixir's pattern matching that trigger conditionally when data is written or read. Each one is defined as a module that can be added to your Riptide configuration for easy enabling/disabling.
```elixir
config :riptide,
interceptors: [
TodoList.Permissions,
TodoList.Todo.Created,
TodoList.Todo.Alert
]
```
Every Interceptor in this list is called in order for every Mutation and Query processed
## Mutation Interceptors
Mutation interceptors run as a mutation is being processed. The callbacks are called for each part of the paths in the mutation so you can define a pattern to match any kind of mutation. The arguments passed to them are
- `path`: A string list representing the path where the data is being written
- `layer`: The `merge` and `delete` that is occuring at the path
- `mut`: The full, original mutation
- `state`: The state of the connection which can be used to store things like the currently authed user
### `mutation_before`
This runs before a mutation is written. It's best used to perform validations to make sure the data looks right, augmenting mutations with information that is known by the server only, or data denormalization. Here is an example that keeps track of the time when a Todo was marked complete
```elixir
defmodule Todo.Created do
use Riptide.Interceptor
def mutation_before(["todos", _key], %{ merge: %{ "complete" => true }}, state) do
{
:merge,
%{
"times" => %{
"completed" => :os.system_time(:millisecond)
}
}
}
end
end
```
The valid responses are
- `:ok` - Returns successfully but doesn't modify anything
- `{:error, err}` - Halts processing of interceptors and returns the error
- `{:combine, mut}` - Combines `mut` with the input mutation before writing
- `{:merge, map}` - Convenience version of `:combine` that merges `map` at the current path
- `{:delete, map}` - Convenience version of `:combine` that deletes `map` at the current path
### `mutation_effect`
This interceptor can be used to schedule work to be done after a mutation is successfully written. It can be used to trigger side effects like sending an email or syncing data with a third party system.
```elixir
defmodule Todo.Created do
use Riptide.Interceptor
def mutation_before(["todos", _key], %{ merge: %{ "complete" => true }}, state) do
{
:merge,
%{
"times" => %{
"completed" => :os.system_time(:millisecond)
}
}
}
end
end
```
The valid responses are
- `:ok` - Returns successfully but doesn't schedule any work
- `{fun, args}` - Calls `fun` in the current module with `args`
- `{module, fun, args}` - Calls `fun` in `module` with `args`
## Query Interceptors
Query interceptors run as a query is being processed. They can be used to allow/disallow access to certain paths or even expose third party data sources. Unlike the mutation interceptors they are called only once for each path requested by a query. The arguments passed to them are
- `path`: A string list representing the full path where the data is being written
- `opts`: The options for the query at this path
- `state`: The state of the connection which can be used to store things like the currently authed user
### `query_before`
This runs before data is read. A common way to use it is to control access to data
```elixir
defmodule Todo.Permissions do
use Riptide.Interceptor
def query_before(["secrets" | _rest], _opts, state) do
case state do
state.user === "bad-guy" -> {:error, :auth_error}
true -> :ok
end
end
end
```
The valid responses are
- `:ok` - Returns successfully
- `{:error, err}` - Halts processing of interceptors and returns the error
### `query_resolve`
This is run before data is fetched from the store. This interceptor allows you to return data for the query and skip reading from the store. They effectively create virtual paths.
```elixir
defmodule Todo.Twilio do
use Riptide.Interceptor
def query_resolve(["twilio", "numbers" | _rest], _opts, state) do
TwilioApi.numbers()
|> case do
{:ok, result} -> Kernel.get_in(result, rest)
{:error, err} -> {:error, err}
end
end
end
```
The valid responses are
- `nil` - Skips this interceptor and continues processing
- `any_value` - Returns `any_value` as the data under the requested path
"""
require Logger
@doc """
Trigger `query_before` callback on configured interceptors for given query
"""
def query_before(query, state),
do: query_before(query, state, Riptide.Config.riptide_interceptors())
@doc """
Trigger `query_before` callback on interceptors for given query
"""
def query_before(query, state, interceptors) do
query
|> query_trigger(interceptors, :query_before, [state])
|> Enum.find_value(fn
{_mod, _, nil} -> nil
{_mod, _, :ok} -> nil
{_, _, result} -> result
end)
|> case do
nil -> :ok
result -> result
end
end
@doc """
Trigger `query_resolve` callback on configured interceptors for given query
"""
def query_resolve(query, state),
do: query_resolve(query, state, Riptide.Config.riptide_interceptors())
@doc """
Trigger `query_resolve` callback on interceptors for given query
"""
def query_resolve(query, state, interceptors) do
query
|> query_trigger(interceptors, :query_resolve, [state])
|> Stream.filter(fn
{_mod, _path, nil} -> false
_ -> true
end)
|> Enum.map(fn {_mod, path, value} -> {path, value} end)
end
defp query_trigger(query, interceptors, fun, args) do
layers = Riptide.Query.flatten(query)
interceptors
|> Stream.flat_map(fn mod ->
Stream.map(layers, fn {path, opts} ->
result = apply(mod, fun, [path, opts | args])
if logging?() and result != nil,
do: Logger.info("#{mod} #{fun} #{inspect(path)} -> #{inspect(result)}")
{mod, path, result}
end)
end)
end
@doc """
Trigger `mutation_effect` callback on configured interceptors for given mutation
"""
def mutation_effect(mutation, state),
do: mutation_effect(mutation, state, Riptide.Config.riptide_interceptors())
@doc """
Trigger `mutation_effect` callback on interceptors for given mutation
"""
def mutation_effect(mutation, state, interceptors) do
mutation
|> mutation_trigger(interceptors, :mutation_effect, [mutation, state])
|> Stream.map(fn {mod, _path, result} ->
case result do
{fun, args} -> Riptide.Scheduler.schedule_in(0, mod, fun, args)
{mod_other, fun, args} -> Riptide.Scheduler.schedule_in(0, mod_other, fun, args)
_ -> Riptide.Mutation.new()
end
end)
|> Riptide.Mutation.combine()
|> Riptide.Mutation.combine(mutation)
end
@doc """
Trigger `mutation_before` callback on configured interceptors for given mutation
"""
@spec mutation_before(Riptide.Mutation.t(), any()) ::
{:ok, Riptide.Mutation.t()} | {:error, any()}
def mutation_before(mutation, state),
do: mutation_before(mutation, state, Riptide.Config.riptide_interceptors())
@doc """
Trigger `mutation_before` callback on interceptors for given mutation
"""
@spec mutation_before(Riptide.Mutation.t(), any(), [atom()]) ::
{:ok, Riptide.Mutation.t()} | {:error, any()}
def mutation_before(mutation, state, interceptors) do
mutation
|> mutation_trigger(interceptors, :mutation_before, [
mutation,
state
])
|> Enum.reduce_while({:ok, mutation}, fn {mod, path, item}, {:ok, collect} ->
case item do
nil ->
{:cont, {:ok, collect}}
:ok ->
{:cont, {:ok, collect}}
{:delete, delete} ->
{:cont,
{:ok, Riptide.Mutation.combine(collect, Riptide.Mutation.put_delete(path, delete))}}
{:merge, merge} ->
{:cont,
{:ok, Riptide.Mutation.combine(collect, Riptide.Mutation.put_merge(path, merge))}}
{:combine, next} ->
{:cont, {:ok, Riptide.Mutation.combine(collect, next)}}
result = {:error, _} ->
{:halt, result}
_ ->
{:halt, {:error, {:invalid_interceptor, mod}}}
end
end)
end
@doc false
def mutation_after(mutation, state),
do: mutation_after(mutation, state, Riptide.Config.riptide_interceptors())
@doc false
def mutation_after(mutation, state, interceptors) do
mutation
|> mutation_trigger(interceptors, :mutation_after, [
mutation,
state
])
|> Enum.find(fn
{_mod, _path, :ok} -> false
{_mod, _path, nil} -> false
{_mod, _path, {:error, _}} -> true
end)
|> case do
nil -> :ok
{_mod, result} -> result
end
end
defp mutation_trigger(mut, interceptors, fun, args) do
layers = Riptide.Mutation.layers(mut)
(interceptors ++ [Riptide.Scheduler.Interceptor])
|> Stream.flat_map(fn mod ->
Stream.map(layers, fn {path, data} ->
result = apply(mod, fun, [path, data | args])
if logging?() and result != nil,
do: Logger.info("#{mod} #{fun} #{inspect(path)} -> #{inspect(result)}")
{mod, path, result}
end)
end)
end
@doc false
def logging?() do
Keyword.get(Logger.metadata(), :interceptor) == true
end
@doc false
def logging_enable() do
Logger.metadata(interceptor: true)
end
@doc false
def logging_disable() do
Logger.metadata(interceptor: false)
end
@callback query_resolve(path :: list(String.t()), opts :: map, state :: any) ::
{:ok, any} | {:error, term} | nil
@callback query_before(path :: list(String.t()), opts :: map, state :: any) ::
:ok | {:error, term} | nil
@callback mutation_before(
path :: list(String.t()),
layer :: Riptide.Mutation.t(),
mut :: Riptide.Mutation.t(),
state :: String.t()
) :: :ok | {:error, term} | {:combine, Riptide.Mutation.t()} | nil
@doc false
@callback mutation_after(
path :: list(String.t()),
layer :: Riptide.Mutation.t(),
mut :: Riptide.Mutation.t(),
state :: String.t()
) :: :ok | nil
@callback mutation_effect(
path :: list(String.t()),
layer :: Riptide.Mutation.t(),
mut :: Riptide.Mutation.t(),
state :: String.t()
) :: :ok | {atom(), atom(), list()} | {atom(), list()} | nil
defmacro __using__(_opts) do
quote do
@behaviour Riptide.Interceptor
@before_compile Riptide.Interceptor
end
end
defmacro __before_compile__(_env) do
quote do
def mutation_before(_path, _layer, _mutation, _state), do: nil
def mutation_after(_path, _layer, _mutation, _state), do: nil
def mutation_effect(_path, _layer, _mutation, _state), do: nil
def query_before(_path, _opts, _state), do: nil
def query_resolve(_path, _opts, _state), do: nil
end
end
end
|
packages/elixir/lib/riptide/store/interceptor.ex
| 0.943007
| 0.934813
|
interceptor.ex
|
starcoder
|
defmodule Genex.Tools.Benchmarks.SingleObjectiveContinuous do
@moduledoc """
Provides benchmark functions for Single Objective Continuous optimization problems.
These functions haven't been tested. More documentation/testing is coming later.
"""
@doc """
Cigar objective function.
Returns ```math```.
# Parameters
- `xs`: `Enum`.
"""
def cigar([x0 | xi]) do
x0
|> :math.pow(2)
|> Kernel.*(
1_000_000
|> Kernel.*(
xi
|> Enum.map(&:math.pow(&1, 2))
|> Enum.sum()
)
)
end
@doc """
Plane objective function.
Returns ```math```.
# Parameters
- `xs`: `Enum`.
"""
def plane([x0 | _]), do: x0
@doc """
Sphere objective function.
Returns ```math```.
# Parameters
- `xs`: `Enum`.
"""
def sphere(xs) do
xs
|> Enum.map(&:math.pow(&1, 2))
end
@doc """
Random objective function.
Returns ```math```.
# Parameters
- `xs`: `Enum`.
"""
def rand(_), do: :rand.uniform()
@doc """
Ackley objective function.
Returns ```math```.
# Parameters
- `xs`: `Enum`.
"""
def ackley(xs) do
n = Enum.count(xs)
g_x =
-0.2
|> Kernel.*(
:math.sqrt(
1
|> Kernel./(n)
|> Kernel.*(
xs
|> Enum.map(&:math.pow(&1, 2))
|> Enum.sum()
)
)
)
h_x =
1
|> Kernel./(n)
|> Kernel.*(
xs
|> Enum.map(&:math.cos(2 * :math.pi() * &1))
|> Enum.sum()
)
20
|> Kernel.-(20 * :math.exp(g_x))
|> Kernel.+(:math.exp(1))
|> Kernel.-(:math.exp(h_x))
end
@doc """
Bohachevsky objective function.
Returns ```math```.
# Paramters
- `xs`: `Enum`.
"""
def bohachevsky(xs) do
[x0 | [x1 | xi]] = xs
xs = [{x0, x1} | Enum.chunk_every(xi, 2, 1, [])]
xs
|> Enum.map(fn {xi, xiplus1} ->
xi
|> :math.pow(2)
|> Kernel.+(2 * :math.pow(xiplus1, 2))
|> Kernel.-(0.3 * :math.cos(3 * :math.pi() * xi))
|> Kernel.-(0.4 * :math.cos(4 * :math.pi() * xiplus1))
|> Kernel.+(0.7)
end)
|> Enum.sum()
end
@doc """
Griewank objective function.
Returns ```math```.
# Parameters
- `xs`: `Enum`.
"""
def griewank(xs) do
sum = Enum.sum(Enum.map(xs, &:math.pow(&1, 2)))
product =
xs
|> Enum.with_index()
|> Enum.reduce(
1,
fn {i, xi}, acc ->
xi
|> Kernel./(:math.sqrt(i))
|> :math.cos()
|> Kernel.*(acc)
end
)
(1 / 4000)
|> Kernel.*(sum)
|> Kernel.-(product)
|> Kernel.+(1)
end
@doc """
h1 objective function.
Returns ```math```.
# Parameters
- `x`: `number`.
- `y`: `number`.
"""
def h1(x, y) do
num =
x
|> Kernel.-(y / 8)
|> :math.sin()
|> :math.pow(2)
|> Kernel.+(
y
|> Kernel.+(x / 8)
|> :math.sin()
|> :math.pow(2)
)
denom =
x
|> Kernel.-(8.6998)
|> Kernel.+(
y
|> Kernel.-(6.7665)
|> :math.pow(2)
)
|> :math.sqrt()
|> Kernel.+(1)
num / denom
end
@doc """
Himmelblau objective function.
Returns ```math```.
# Paramters
- `x`: `number`.
- `y`: `number`.
"""
def himmelblau(x, y) do
x
|> :math.pow(2)
|> Kernel.+(y)
|> Kernel.-(11)
|> :math.pow(2)
|> Kernel.+(
x
|> Kernel.+(:math.pow(y, 2))
|> Kernel.-(7)
|> :math.pow(2)
)
end
@doc """
Rastrigin objective function.
Returns ```math```.
# Parameters
- `xs`: `Enum`.
"""
def rastrigin(xs) do
n = Enum.count(xs)
10
|> Kernel.*(n)
|> Kernel.+(
xs
|> Enum.map(&(:math.pow(&1, 2) - 10 * :math.cos(2 * :math.pi() * &1)))
|> Enum.sum()
)
end
@doc """
Rosebrock objective function.
Returns ```math```.
# Parameters
- `xs`: `Enum`.
"""
def rosenbrock(xs) do
[x0 | [x1 | xi]] = xs
xs = [{x0, x1} | Enum.chunk_every(xi, 2, 1, [])]
xs
|> Enum.map(fn {xi, xiplus1} ->
(1 - xi)
|> :math.pow(2)
|> Kernel.+(
100
|> Kernel.*(:math.pow(xiplus1 - :math.pow(xi, 2), 2))
)
end)
|> Enum.sum()
end
def schaffer, do: :ok
def schwefel, do: :ok
def shekel, do: :ok
end
|
lib/genex/tools/benchmarks/single_objective_continuous.ex
| 0.873485
| 0.927822
|
single_objective_continuous.ex
|
starcoder
|
defmodule SSD1322.SPIConnection do
@moduledoc """
Provides a high-level hardware interface to SSD1322-style SPI interfaces.
"""
defstruct spi: nil, dc: nil, reset: nil
@data_chunk_size 4096
@doc """
Initializes the SPI / GPIO connection to the display, but does not reset it
or otherwise communicate with it in any way
Can take an optional keyword list to configure the connection details. Valid keys include:
* `spi_dev`: The name of the spi device to connect to. Defaults to `spidev0.0`
* `dc_pin`: The GPIO pin number of the line to use for D/C select. Defaults to 24
* `reset_pin`: The GPIO pin number of the line to use for reset. Defaults to 25
"""
def init(opts \\ []) do
{:ok, spi} = opts |> Keyword.get(:spi_dev, "spidev0.0") |> Circuits.SPI.open(speed_hz: 8_000_000, delay_us: 5)
{:ok, dc} = opts |> Keyword.get(:dc_pin, 24) |> Circuits.GPIO.open(:output)
{:ok, reset} = opts |> Keyword.get(:reset_pin, 25) |> Circuits.GPIO.open(:output)
%__MODULE__{spi: spi, dc: dc, reset: reset}
end
@doc """
Issues a hardware reset to the display
"""
def reset(%__MODULE__{reset: reset}) do
Circuits.GPIO.write(reset, 0)
Circuits.GPIO.write(reset, 1)
end
@doc """
Sends the given command to the display, along with optional data
"""
def command(%__MODULE__{spi: spi, dc: dc} = conn, command, data \\ nil) do
Circuits.GPIO.write(dc, 0)
Circuits.SPI.transfer(spi, command)
if data do
data(conn, data)
end
end
@doc """
Sends data to the display, chunking it into runs of at most `data_chunk_size` bytes
"""
def data(%__MODULE__{spi: spi, dc: dc}, data) do
Circuits.GPIO.write(dc, 1)
data_chunked(spi, data)
end
defp data_chunked(spi, data) do
case data do
<<head::binary-size(@data_chunk_size), tail::binary>> ->
Circuits.SPI.transfer(spi, <<head::binary-size(@data_chunk_size)>>)
data_chunked(spi, tail)
remainder ->
Circuits.SPI.transfer(spi, remainder)
end
end
end
|
lib/ssd1322/spi_connection.ex
| 0.770724
| 0.427068
|
spi_connection.ex
|
starcoder
|
defmodule MangoPay.Mandate do
@moduledoc """
Functions for MangoPay [mandate](https://docs.mangopay.com/endpoints/v2.01/mandates#e230_the-mandate-object).
"""
use MangoPay.Query.Base
set_path "mandates"
@doc """
Get a mandate.
## Examples
{:ok, mandate} = MangoPay.Mandate.get(id)
"""
def get id do
_get id
end
@doc """
Get a mandate.
## Examples
mandate = MangoPay.Mandate.get!(id)
"""
def get! id do
_get! id
end
@doc """
Create a mandate.
## Examples
params = %{
"Tag": "custom meta",
"BankAccountId": "14213351",
"Culture": "EN",
"ReturnURL": "http://www.my-site.com/returnURL/"
}
{:ok, mandate} = MangoPay.Mandate.create(params)
"""
def create params do
_create params, ["mandates", "directdebit/web"]
end
@doc """
Create a mandate.
## Examples
params = %{
"Tag": "custom meta",
"BankAccountId": "14213351",
"Culture": "EN",
"ReturnURL": "http://www.my-site.com/returnURL/"
}
mandate = MangoPay.Mandate.create!(params)
"""
def create! params do
_create! params, ["mandates", "directdebit/web"]
end
@doc """
List all mandates.
## Examples
query = %{
"Page": 1,
"Per_Page": 25,
"Sort": "CreationDate:DESC",
"BeforeDate": 1463440221,
"AfterDate": 1431817821
}
{:ok, mandates} = MangoPay.Mandates.all(query)
"""
def all(query \\ %{}) do
_all(nil, query)
end
@doc """
List all mandates.
## Examples
query = %{
"Page": 1,
"Per_Page": 25,
"Sort": "CreationDate:DESC",
"BeforeDate": 1463440221,
"AfterDate": 1431817821
}
mandates = MangoPay.Mandates.all!(query)
"""
def all!(query \\ %{}) do
_all!(nil, query)
end
@doc """
Cancel a mandate object.
## Examples
{:ok, mandate} = MangoPay.Mandate.cancel("mandate_id")
"""
def cancel mandate_id do
_update %{}, [resource(), "#{mandate_id}", "cancel"]
end
@doc """
Cancel a mandate object.
## Examples
mandate = MangoPay.Mandate.cancel!("mandate_id")
"""
def cancel! mandate_id do
_update! %{}, [resource(), "#{mandate_id}", "cancel"]
end
@doc """
List all mandates for a user.
## Examples
user_id = Id of user object
query = %{
"Page": 1,
"Per_Page": 25,
"Sort": "CreationDate:DESC",
"BeforeDate": 1463440221,
"AfterDate": 1431817821
}
{:ok, mandates} = MangoPay.Mandate.all_by_user(user_id, query)
"""
def all_by_user user_id, query \\ %{} do
_all [MangoPay.User.path(user_id), resource()], query
end
@doc """
List all mandates for a user.
## Examples
user_id = Id of a user object
query = %{
"Page": 1,
"Per_Page": 25,
"Sort": "CreationDate:DESC",
"BeforeDate": 1463440221,
"AfterDate": 1431817821
}
mandates = MangoPay.Mandate.all_by_user!(user_id, query)
"""
def all_by_user! user_id, query \\ %{} do
_all! [MangoPay.User.path(user_id), resource()], query
end
@doc """
List all mandates for a bank account.
## Examples
user_id = Id of a User object
bank_account_id = Id of a BankAccount object
query = %{
"Page": 1,
"Per_Page": 25,
"Sort": "CreationDate:DESC",
"BeforeDate": 1463440221,
"AfterDate": 1431817821
}
{:ok, mandates} = MangoPay.Mandate.all_by_bank_account(user_id, bank_account_id)
"""
def all_by_bank_account user_id, bank_account_id, query \\ %{} do
_all [MangoPay.User.path(user_id), MangoPay.BankAccount.path(bank_account_id), resource()], query
end
@doc """
List all mandates for a bank account.
## Examples
user_id = Id of a user object
bank_account_id = Id of a bank account object
query = %{
"Page": 1,
"Per_Page": 25,
"Sort": "CreationDate:DESC",
"BeforeDate": 1463440221,
"AfterDate": 1431817821
}
mandates = MangoPay.Mandate.all_by_bank_account!(user_id, bank_account_id, query)
"""
def all_by_bank_account! user_id, bank_account_id, query \\ %{} do
_all! [MangoPay.User.path(user_id), MangoPay.BankAccount.path(bank_account_id), resource()], query
end
end
|
lib/mango_pay/mandate.ex
| 0.748536
| 0.464294
|
mandate.ex
|
starcoder
|
defmodule RList.Ruby do
@moduledoc """
Summarized all of Ruby's Array functions.
Functions corresponding to the following patterns are not implemented
- When a function with the same name already exists in Elixir.
- When a method name includes `!`.
- &, *, +, -, <<, <=>, ==, [], []=.
"""
@spec __using__(any) :: list
defmacro __using__(_opts) do
RUtils.define_all_functions!(__MODULE__)
end
@type type_pattern :: number() | String.t() | Range.t() | Regex.t()
@type type_enumerable :: Enumerable.t()
import REnum.Support
# https://ruby-doc.org/core-3.1.0/Array.html
# [:all?, :any?, :append, :assoc, :at, :bsearch, :bsearch_index, :clear, :collect, :collect!, :combination, :compact, :compact!, :concat, :count, :cycle, :deconstruct, :delete, :delete_at, :delete_if, :difference, :dig, :drop, :drop_while, :each, :each_index, :empty?, :eql?, :fetch, :fill, :filter, :filter!, :find_index, :first, :flatten, :flatten!, :hash, :include?, :index, :initialize_copy, :insert, :inspect, :intersect?, :intersection, :join, :keep_if, :last, :length, :map, :map!, :max, :min, :minmax, :none?, :old_to_s, :one?, :pack, :permutation, :pop, :prepend, :product, :push, :rassoc, :reject, :reject!, :repeated_combination, :repeated_permutation, :replace, :reverse, :reverse!, :reverse_each, :rindex, :rotate, :rotate!, :sample, :select, :select!, :shift, :shuffle, :shuffle!, :size, :slice, :slice!, :sort, :sort!, :sort_by!, :sum, :take, :take_while, :to_a, :to_ary, :to_h, :to_s, :transpose, :union, :uniq, :uniq!, :unshift, :values_at, :zip]
# |> RUtils.required_functions([List, REnum])
# ✔ append
# ✔ assoc
# × bsearch
# × bsearch_index
# ✔ clear
# ✔ combination
# × deconstruct
# ✔ delete_if
# ✔ difference
# ✔ dig
# ✔ each_index
# ✔ eql?
# ✔ fill
# hash TODO: Low priority
# ✔ index
# × initialize_copy
# ✔ insert
# ✔ inspect
# ✔ intersect?
# ✔ intersection
# ✔ keep_if
# ✔ length
# × old_to_s
# pack TODO: Low priority
# ✔ permutation
# ✔ pop
# ✔ prepend
# ✔ push
# ✔ rassoc
# ✔ repeated_combination
# ✔ repeated_permutation
# × replace
# ✔ rindex
# ✔ rotate
# ✔ sample
# ✔ shift
# ✔ size
# ✔ to_ary
# ✔ to_s
# ✔ transpose
# ✔ union
# ✔ unshift
# ✔ values_at
@doc """
Appends trailing elements.
## Examples
iex> [:foo, 'bar', 2]
iex> |> RList.push([:baz, :bat])
[:foo, 'bar', 2, :baz, :bat]
iex> [:foo, 'bar', 2]
iex> |> RList.push(:baz)
[:foo, 'bar', 2, :baz]
"""
@spec push(list(), list() | any) :: list()
def push(list, elements_or_element) do
list ++ List.wrap(elements_or_element)
end
@doc """
Returns [].
## Examples
iex> [[:foo, 0], [2, 4], [4, 5, 6], [4, 5]]
iex> |> RList.clear()
[]
"""
@spec clear(list()) :: []
def clear(list) when is_list(list), do: []
@doc """
Returns Stream that is each repeated combinations of elements of given list. The order of combinations is indeterminate.
## Examples
iex> RList.combination([1, 2, 3, 4], 1)
iex> |> Enum.to_list()
[[1],[2],[3],[4]]
iex> RList.combination([1, 2, 3, 4], 3)
iex> |> Enum.to_list()
[[1,2,3],[1,2,4],[1,3,4],[2,3,4]]
iex> RList.combination([1, 2, 3, 4], 0)
iex> |> Enum.to_list()
[[]]
iex> RList.combination([1, 2, 3, 4], 5)
iex> |> Enum.to_list()
[]
"""
@spec combination(list(), non_neg_integer()) :: type_enumerable
def combination(list, length) do
_combination(list, length) |> REnum.lazy()
end
@doc """
Calls the function with combinations of elements of given list; returns :ok. The order of combinations is indeterminate.
## Examples
iex> RList.combination([1, 2, 3, 4], 1, &(IO.inspect(&1)))
# [1]
# [2]
# [3]
# [4]
:ok
iex> RList.combination([1, 2, 3, 4], 3, &(IO.inspect(&1)))
# [1, 2, 3]
# [1, 2, 4]
# [1, 3, 4]
# [2, 3, 4]
:ok
iex> RList.combination([1, 2, 3, 4], 0, &(IO.inspect(&1)))
# []
:ok
iex> RList.combination([1, 2, 3, 4], 5, &(IO.inspect(&1)))
:ok
"""
@spec combination(list(), non_neg_integer, function()) :: :ok
def combination(list, n, func) do
combination(list, n) |> Enum.each(fn el -> func.(el) end)
end
def _combination(_elements, 0), do: [[]]
def _combination([], _), do: []
def _combination([head | tail], n) do
for(comb <- _combination(tail, n - 1), do: [head | comb]) ++ _combination(tail, n)
end
@doc """
Returns Stream that is each repeated combinations of elements of given list. The order of combinations is indeterminate.
## Examples
iex> RList.repeated_combination([1, 2, 3], 1)
iex> |> Enum.to_list()
[[1], [2], [3]]
iex> RList.repeated_combination([1, 2, 3], 2)
iex> |> Enum.to_list()
[[1, 1], [1, 2], [1, 3], [2, 2], [2, 3], [3, 3]]
iex> RList.repeated_combination([1, 2, 3], 3)
iex> |> Enum.to_list()
[[1, 1, 1], [1, 1, 2], [1, 1, 3], [1, 2, 2], [1, 2, 3], [1, 3, 3], [2, 2, 2], [2, 2, 3], [2, 3, 3], [3, 3, 3]]
iex> RList.repeated_combination([1, 2, 3], 0)
iex> |> Enum.to_list()
[[]]
iex> RList.repeated_combination([1, 2, 3], 5)
iex> |> Enum.to_list()
[
[1, 1, 1, 1, 1],
[1, 1, 1, 1, 2],
[1, 1, 1, 1, 3],
[1, 1, 1, 2, 2],
[1, 1, 1, 2, 3],
[1, 1, 1, 3, 3],
[1, 1, 2, 2, 2],
[1, 1, 2, 2, 3],
[1, 1, 2, 3, 3],
[1, 1, 3, 3, 3],
[1, 2, 2, 2, 2],
[1, 2, 2, 2, 3],
[1, 2, 2, 3, 3],
[1, 2, 3, 3, 3],
[1, 3, 3, 3, 3],
[2, 2, 2, 2, 2],
[2, 2, 2, 2, 3],
[2, 2, 2, 3, 3],
[2, 2, 3, 3, 3],
[2, 3, 3, 3, 3],
[3, 3, 3, 3, 3]
]
"""
@spec repeated_combination(list(), non_neg_integer()) :: type_enumerable
def repeated_combination(list, length) do
_repeated_combination(list, length) |> REnum.lazy()
end
@doc """
Calls the function with each repeated combinations of elements of given list; returns :ok. The order of combinations is indeterminate.
## Examples
iex> RList.repeated_combination([1, 2, 3, 4], 2, &(IO.inspect(&1)))
# [1, 1]
# [1, 2]
# [1, 3]
# [2, 2]
# [2, 3]
# [3, 3]
:ok
"""
@spec repeated_combination(list(), non_neg_integer, function()) :: :ok
def repeated_combination(list, n, func) do
_repeated_combination(list, n) |> Enum.each(fn el -> func.(el) end)
end
def _repeated_combination(_elements, 0), do: [[]]
def _repeated_combination([], _), do: []
def _repeated_combination([h | t] = s, n) do
for(l <- _repeated_combination(s, n - 1), do: [h | l]) ++ _repeated_combination(t, n)
end
@doc """
Returns Stream that is each repeated permutations of elements of given list. The order of permutations is indeterminate.
## Examples
iex> RList.permutation([1, 2, 3], 1)
iex> |> Enum.to_list()
[[1],[2],[3]]
iex> RList.permutation([1, 2, 3], 2)
iex> |> Enum.to_list()
[[1,2],[1,3],[2,1],[2,3],[3,1],[3,2]]
iex> RList.permutation([1, 2, 3])
iex> |> Enum.to_list()
[[1,2,3],[1,3,2],[2,1,3],[2,3,1],[3,1,2],[3,2,1]]
iex> RList.permutation([1, 2, 3], 0)
iex> |> Enum.to_list()
[[]]
iex> RList.permutation([1, 2, 3], 4)
iex> |> Enum.to_list()
[]
"""
@spec permutation(list(), non_neg_integer() | nil) :: type_enumerable
def permutation(list, length \\ nil) do
n = length || Enum.count(list)
cond do
n <= 0 ->
[[]]
n == Enum.count(list) ->
_permutation(list)
n >= Enum.count(list) ->
[]
true ->
_permutation(list)
|> Enum.map(&Enum.take(&1, n))
|> Enum.uniq()
end
|> REnum.lazy()
end
defp _permutation(list),
do: for(elem <- list, rest <- permutation(list -- [elem]), do: [elem | rest])
@doc """
Returns Stream that is each repeated permutations of elements of given list. The order of permutations is indeterminate.
## Examples
iex> RList.repeated_permutation([1, 2], 1)
iex> |> Enum.to_list()
[[1],[2]]
iex> RList.repeated_permutation([1, 2], 2)
iex> |> Enum.to_list()
[[1,1],[1,2],[2,1],[2,2]]
iex> RList.repeated_permutation([1, 2], 3)
iex> |> Enum.to_list()
[[1,1,1],[1,1,2],[1,2,1],[1,2,2],[2,1,1],[2,1,2],[2,2,1],[2,2,2]]
iex> RList.repeated_permutation([1, 2], 0)
iex> |> Enum.to_list()
[[]]
"""
@spec repeated_permutation(list(), non_neg_integer()) :: type_enumerable
def repeated_permutation(list, length) do
_repeated_permutation(list, length) |> REnum.lazy()
end
defp _repeated_permutation([], _), do: [[]]
defp _repeated_permutation(_, 0), do: [[]]
defp _repeated_permutation(list, length),
do: for(x <- list, y <- _repeated_permutation(list, length - 1), do: [x | y])
@doc """
Returns differences between list1 and list2.
## Examples
iex> [0, 1, 1, 2, 1, 1, 3, 1, 1]
iex> |> RList.difference([1])
[0, 2, 3]
iex> [0, 1, 2]
iex> |> RList.difference([4])
[0, 1, 2]
"""
@spec difference(list(), list()) :: list()
def difference(list1, list2) do
list1
|> Enum.reject(fn el ->
el in list2
end)
end
@doc """
Finds and returns the element in nested elements that is specified by index and identifiers.
## Examples
iex> [:foo, [:bar, :baz, [:bat, :bam]]]
iex> |> RList.dig(1)
[:bar, :baz, [:bat, :bam]]
iex> [:foo, [:bar, :baz, [:bat, :bam]]]
iex> |> RList.dig(1, [2])
[:bat, :bam]
iex> [:foo, [:bar, :baz, [:bat, :bam]]]
iex> |> RList.dig(1, [2, 0])
:bat
iex> [:foo, [:bar, :baz, [:bat, :bam]]]
iex> |> RList.dig(1, [2, 3])
nil
"""
@spec dig(list(), integer, list()) :: any
def dig(list, index, identifiers \\ []) do
el = Enum.at(list, index)
if(Enum.any?(identifiers)) do
[next_index | next_identifiers] = identifiers
dig(el, next_index, next_identifiers)
else
el
end
end
@doc """
Returns the index of a specified element.
## Examples
iex> [:foo, "bar", 2, "bar"]
iex> |> RList.index("bar")
1
iex> [2, 4, 6, 8]
iex> |> RList.index(5..7)
2
iex> [2, 4, 6, 8]
iex> |> RList.index(&(&1 == 8))
3
"""
@spec index(list(), type_pattern | function()) :: any
def index(list, func_or_pattern) when is_function(func_or_pattern) do
Enum.find_index(list, func_or_pattern)
end
def index(list, func_or_pattern) do
index(list, match_function(func_or_pattern))
end
@doc """
Returns true if list1 == list2.
## Examples
iex> [:foo, 'bar', 2]
iex> |> RList.eql?([:foo, 'bar', 2])
true
iex> [:foo, 'bar', 2]
iex> |> RList.eql?([:foo, 'bar', 3])
false
"""
@spec eql?(list(), list()) :: boolean()
def eql?(list1, list2) do
list1 == list2
end
@doc """
Returns true if the list1 and list2 have at least one element in common, otherwise returns false.
## Examples
iex> [1, 2, 3]
iex> |> RList.intersect?([3, 4, 5])
true
iex> [1, 2, 3]
iex> |> RList.intersect?([5, 6, 7])
false
"""
@spec intersect?(list(), list()) :: boolean()
def intersect?(list1, list2) do
intersection(list1, list2)
|> Enum.count() > 0
end
@doc """
Returns a new list containing each element found both in list1 and in all of the given list2; duplicates are omitted.
## Examples
iex> [1, 2, 3]
iex> |> RList.intersection([3, 4, 5])
[3]
iex> [1, 2, 3]
iex> |> RList.intersection([5, 6, 7])
[]
iex> [1, 2, 3]
iex> |> RList.intersection([1, 2, 3])
[1, 2, 3]
"""
@spec intersection(list(), list()) :: list()
def intersection(list1, list2) do
m1 = MapSet.new(list1)
m2 = MapSet.new(list2)
MapSet.intersection(m1, m2)
|> Enum.to_list()
end
@doc """
Returns one or more random elements.
"""
def sample(list, n \\ 1) do
taked =
list
|> Enum.shuffle()
|> Enum.take(n)
if(taked |> Enum.count() > 1) do
taked
else
[head | _] = taked
head
end
end
if(VersionManager.support_version?()) do
@doc """
Fills the list with the provided value. The filler can be either a function or a fixed value.
## Examples
iex> RList.fill(~w[a b c d], "x")
["x", "x", "x", "x"]
iex> RList.fill(~w[a b c d], "x", 0..1)
["x", "x", "c", "d"]
iex> RList.fill(~w[a b c d], fn _, i -> i * i end)
[0, 1, 4, 9]
iex> RList.fill(~w[a b c d], fn _, i -> i * 2 end, 0..1)
[0, 2, "c", "d"]
"""
end
@spec fill(list(), any) :: list()
def fill(list, filler_fun) when is_function(filler_fun) do
Enum.with_index(list, filler_fun)
end
def fill(list, filler), do: Enum.map(list, fn _ -> filler end)
@spec fill(list(), any, Range.t()) :: list()
def fill(list, filler_fun, a..b) when is_function(filler_fun) do
Enum.with_index(list, fn
x, i when i >= a and i <= b -> filler_fun.(x, i)
x, _i -> x
end)
end
def fill(list, filler, fill_range), do: fill(list, fn _, _ -> filler end, fill_range)
@doc """
Returns a list containing the elements in list corresponding to the given selector(s).
The selectors may be either integer indices or ranges.
## Examples
iex> RList.values_at(~w[a b c d e f], [1, 3, 5])
["b", "d", "f"]
iex> RList.values_at(~w[a b c d e f], [1, 3, 5, 7])
["b", "d", "f", nil]
iex> RList.values_at(~w[a b c d e f], [-1, -2, -2, -7])
["f", "e", "e", nil]
iex> RList.values_at(~w[a b c d e f], [4..6, 3..5])
["e", "f", nil, "d", "e", "f"]
iex> RList.values_at(~w[a b c d e f], 4..6)
["e", "f", nil]
"""
@spec values_at(list(), [integer | Range.t()] | Range.t()) :: list()
def values_at(list, indices) do
indices
|> Enum.map(fn
i when is_integer(i) -> i
i -> Enum.to_list(i)
end)
|> List.flatten()
|> Enum.map(&Enum.at(list, &1))
end
@doc """
Returns a new list by joining two lists, excluding any duplicates and preserving the order from the given lists.
## Examples
iex> RList.union(["a", "b", "c"], [ "c", "d", "a"])
["a", "b", "c", "d"]
iex> ["a"] |> RList.union(["e", "b"]) |> RList.union(["a", "c", "b"])
["a", "e", "b", "c"]
"""
@spec union(list(), list()) :: list()
def union(list_a, list_b), do: Enum.uniq(list_a ++ list_b)
@doc """
Prepends elements to the front of the list, moving other elements upwards.
## Examples
iex> RList.unshift(~w[b c d], "a")
["a", "b", "c", "d"]
iex> RList.unshift(~w[b c d], [1, 2])
[1, 2, "b", "c", "d"]
"""
@spec unshift(list(), any) :: list()
def unshift(list, prepend) when is_list(prepend), do: prepend ++ list
def unshift(list, prepend), do: [prepend | list]
@doc """
Splits the list into the first n elements and the rest. Returns nil if the list is empty.
## Examples
iex> RList.shift([])
nil
iex> RList.shift(~w[-m -q -filename])
{["-m"], ["-q", "-filename"]}
iex> RList.shift(~w[-m -q -filename], 2)
{["-m", "-q"], ["-filename"]}
"""
@spec shift(list(), integer) :: {list(), list()} | nil
def shift(list, count \\ 1)
def shift([], _count), do: nil
def shift(list, count), do: Enum.split(list, count)
@doc """
Splits the list into the last n elements and the rest. Returns nil if the list is empty.
## Examples
iex> RList.pop([])
nil
iex> RList.pop(~w[-m -q -filename test.txt])
{["test.txt"], ["-m", "-q", "-filename"]}
iex> RList.pop(~w[-m -q -filename test.txt], 2)
{["-filename", "test.txt"], ["-m", "-q"]}
"""
@spec pop(list(), integer) :: {list(), list()} | nil
def pop(list, count \\ 1) do
list
|> Enum.reverse()
|> shift(count)
|> _pop()
end
defp _pop(nil), do: nil
defp _pop(tuple) do
{
elem(tuple, 0) |> Enum.reverse(),
elem(tuple, 1) |> Enum.reverse()
}
end
@doc """
Returns the first element that is a List whose last element `==` the specified term.
## Examples
iex> [{:foo, 0}, [2, 4], [4, 5, 6], [4, 5]]
iex> |> RList.rassoc(4)
[2, 4]
iex> [{:foo, 0}, [2, 4], [4, 5, 6], [4, 5]]
iex> |> RList.rassoc(0)
{:foo, 0}
iex> [[1, "one"], [2, "two"], [3, "three"], ["ii", "two"]]
iex> |> RList.rassoc("two")
[2, "two"]
iex> [[1, "one"], [2, "two"], [3, "three"], ["ii", "two"]]
iex> |> RList.rassoc("four")
nil
iex> [] |> RList.rassoc(4)
nil
iex> [[]] |> RList.rassoc(4)
nil
iex> [{}] |> RList.rassoc(4)
nil
"""
@spec rassoc([list | tuple], any) :: list | nil
def rassoc(list, key) do
Enum.find(list, fn
nil -> nil
[] -> nil
{} -> nil
x when is_tuple(x) -> x |> Tuple.to_list() |> Enum.reverse() |> hd() == key
x -> x |> Enum.reverse() |> hd() == key
end)
end
@doc """
Returns the first element in list that is an List whose first key == obj:
## Examples
iex> [{:foo, 0}, [2, 4], [4, 5, 6], [4, 5]]
iex> |> RList.assoc(4)
[4, 5, 6]
iex> [[:foo, 0], [2, 4], [4, 5, 6], [4, 5]]
iex> |> RList.assoc(1)
nil
iex> [[:foo, 0], [2, 4], %{a: 4, b: 5, c: 6}, [4, 5]]
iex> |> RList.assoc({:a, 4})
%{a: 4, b: 5, c: 6}
"""
@spec assoc(list(), any) :: any
def assoc(list, key) do
list
|> Enum.find(fn
nil -> nil
[] -> nil
{} -> nil
x when is_tuple(x) -> x |> Tuple.to_list() |> hd() == key
x -> x |> Enum.to_list() |> hd() == key
end)
end
@doc """
Returns the index of the last element found in in the list. Returns nil if no match is found.
## Examples
iex> RList.rindex(~w[a b b b c], "b")
3
iex> RList.rindex(~w[a b b b c], "z")
nil
iex> RList.rindex(~w[a b b b c], fn x -> x == "b" end)
3
"""
@spec rindex(list(), any) :: integer | nil
def rindex(list, finder) when is_function(finder) do
list
|> Enum.with_index()
|> Enum.reverse()
|> Enum.find_value(fn {x, i} -> finder.(x) && i end)
end
def rindex(list, finder), do: rindex(list, &Kernel.==(&1, finder))
@doc """
Rotate the list so that the element at count is the first element of the list.
## Examples
iex> RList.rotate(~w[a b c d])
["b", "c", "d", "a"]
iex> RList.rotate(~w[a b c d], 2)
["c", "d", "a", "b"]
iex> RList.rotate(~w[a b c d], -3)
["b", "c", "d", "a"]
"""
@spec rotate(list(), integer) :: list()
def rotate(list, count \\ 1) do
{first, last} = Enum.split(list, count)
last ++ first
end
@doc """
Returns list.
## Examples
iex> RList.to_ary(["c", "d", "a", "b"])
["c", "d", "a", "b"]
"""
@spec to_ary(list()) :: list()
def to_ary(list), do: list
defdelegate append(list, elements), to: __MODULE__, as: :push
defdelegate delete_if(list, func), to: Enum, as: :reject
defdelegate keep_if(list, func), to: Enum, as: :filter
defdelegate length(list), to: Enum, as: :count
defdelegate size(list), to: Enum, as: :count
defdelegate to_s(list), to: Kernel, as: :inspect
defdelegate inspect(list), to: Kernel, as: :inspect
defdelegate each_index(list, func), to: Enum, as: :with_index
defdelegate insert(list, index, element), to: List, as: :insert_at
defdelegate transpose(list_of_lists), to: List, as: :zip
defdelegate prepend(list, count \\ 1), to: __MODULE__, as: :shift
defdelegate all_combination(list, length), to: __MODULE__, as: :repeated_combination
defdelegate all_combination(list, length, func), to: __MODULE__, as: :repeated_combination
end
|
lib/r_list/ruby.ex
| 0.667473
| 0.57063
|
ruby.ex
|
starcoder
|
defmodule Combination do
@moduledoc """
Provide a set of algorithms to generate combinations and permutations.
For source collection containing non-distinct elements, pipe the resultant list through `Enum.uniq/1`
to remove duplicate elements.
"""
@doc """
Generate combinations based on given collection.
## Examples
iex> 1..3 |> Combination.combine(2)
[[3, 2], [3, 1], [2, 1]]
"""
@spec combine(Enum.t(), non_neg_integer) :: [list]
def combine(collection, k) when is_integer(k) and k >= 0 do
list = Enum.to_list(collection)
list_length = Enum.count(list)
if k > list_length do
raise Enum.OutOfBoundsError
else
do_combine(list, list_length, k, [], [])
end
end
defp do_combine(_list, _list_length, 0, _pick_acc, _acc), do: [[]]
# optimization
defp do_combine(list, _list_length, 1, _pick_acc, _acc), do: list |> Enum.map(&[&1])
defp do_combine(list, list_length, k, pick_acc, acc) do
list
|> Stream.unfold(fn [h | t] -> {{h, t}, t} end)
|> Enum.take(list_length)
|> Enum.reduce(acc, fn {x, sublist}, acc ->
sublist_length = Enum.count(sublist)
pick_acc_length = Enum.count(pick_acc)
if k > pick_acc_length + 1 + sublist_length do
# insufficient elements in sublist to generate new valid combinations
acc
else
new_pick_acc = [x | pick_acc]
new_pick_acc_length = pick_acc_length + 1
case new_pick_acc_length do
^k -> [new_pick_acc | acc]
_ -> do_combine(sublist, sublist_length, k, new_pick_acc, acc)
end
end
end)
end
@doc """
Generate all permutation of the collection, filtered by `filter` function.
The `filter` function filters the generated permutation before it is added
to the result list. It returns true by default, thus allowing all permutations.
## Example
iex> 1..3 |> Combination.permutate
[[1, 2, 3], [2, 1, 3], [3, 1, 2], [1, 3, 2], [2, 3, 1], [3, 2, 1]]
iex> 1..3 |> Combination.permutate(fn p -> Enum.at(p, 0) == 1 end)
[[1, 2, 3], [1, 3, 2]]
"""
@spec permutate(Enum.t(), (list -> as_boolean(term))) :: [list]
def permutate(collection, filter \\ fn _p -> true end) do
collection
|> Enum.to_list()
|> do_permutate(filter, [], [])
end
defp do_permutate([], filter, pick_acc, acc) do
if filter.(pick_acc), do: [pick_acc | acc], else: acc
end
defp do_permutate(list, filter, pick_acc, acc) do
list
|> Stream.unfold(fn [h | t] -> {{h, t}, t ++ [h]} end)
|> Enum.take(Enum.count(list))
|> Enum.reduce(acc, fn {h, t}, acc ->
do_permutate(t, filter, [h | pick_acc], acc)
end)
end
end
|
lib/combination.ex
| 0.890538
| 0.538134
|
combination.ex
|
starcoder
|
defmodule RateTheDub.Jikan do
@moduledoc """
Fetches information from the [Jikan](https://jikan.moe) API for MyAnimeList.
It is important to cache information from the API to be a good user and not hit
JIkan too hard. This will also make RateTheDub faster too!
"""
@character_role "Main"
@characters_taken 5
use Tesla
alias RateTheDub.Anime.AnimeSeries
alias RateTheDub.Characters.Character
alias RateTheDub.VoiceActors.Actor
plug Tesla.Middleware.BaseUrl, "https://api.jikan.moe/v3"
plug Tesla.Middleware.Timeout, timeout: 2_000
plug Tesla.Middleware.Retry
plug Tesla.Middleware.FollowRedirects
plug Tesla.Middleware.Logger, debug: false
plug RateTheDub.ETagCache
plug Tesla.Middleware.DecodeJson
@doc """
Returns the Jikan search results of a given set of search terms terms, parsed
into an array of `AnimeSeries` structs.
## Examples
iex> search!("cowboy bebop")
[%AnimeSeries{}, ...]
"""
@spec search!(terms :: String.t()) :: [map]
def search!(""), do: []
def search!(nil), do: []
def search!(terms) when is_binary(terms) do
terms = terms |> String.trim() |> String.downcase()
get!("/search/anime", query: [q: terms, page: 1, limit: 10]).body
|> Map.get("results", [])
|> Stream.filter(&Map.get(&1, "mal_id"))
|> Enum.map(&jikan_to_series/1)
end
@doc """
Returns the JSON data of an Anime Series from Jikan parsed into Elixir but not
yet into known structures.
"""
@spec get_series_json!(id :: integer) :: map
def get_series_json!(id) when is_integer(id) do
get!("/anime/#{id}", opts: [cache: false]).body
end
@doc """
Returns the JSON data of the characters associated with an Anime Series from Jikan.
From this the voice actor information will be pulled out.
In order to properly respect Jikan and MAL's terms of service this function
intentionally limits to only "Main" characters and a limit set by
`@characters_taken`.
"""
@spec get_characters_json!(id :: integer) :: [map]
def get_characters_json!(id) when is_integer(id) do
get!("/anime/#{id}/characters_staff", opts: [cache: false])
|> Map.get(:body)
|> Map.get("characters")
|> Stream.filter(&(&1["role"] == @character_role))
|> Enum.take(@characters_taken)
end
@doc """
Returns all the information for the series with the given MAL ID, this
function is the primary output of this entire module.
This returns a tuple of the series, characters, actors, and the relations
between the characters and actors.
"""
@spec get_series_everything!(id :: integer) ::
{%AnimeSeries{}, [%Character{}], [%Actor{}], [Keyword.t()]}
def get_series_everything!(id) when is_integer(id) do
char_json = get_characters_json!(id)
characters = jikan_to_characters(char_json)
{actors, relations} = jikan_to_voice_actors(char_json)
langs = actors_to_languages(actors)
series =
get_series_json!(id)
|> jikan_to_series(langs)
{series, characters, actors, relations}
end
# Private data transformation functions
@spec jikan_to_series(series :: map, langs :: [String.t()]) :: %AnimeSeries{}
defp jikan_to_series(series, langs \\ []) do
%AnimeSeries{
mal_id: series["mal_id"],
title: series["title"],
# TODO get translated titles
title_tr: %{},
poster_url: series["image_url"],
streaming: %{},
featured_in: nil,
dubbed_in: langs,
url: series["url"]
}
end
@spec jikan_to_characters(char_json :: [map]) :: [%Character{}]
defp jikan_to_characters(char_json) do
char_json
|> Enum.map(fn c ->
%Character{
mal_id: c["mal_id"],
name: c["name"],
picture_url: c["image_url"],
url: c["url"]
}
end)
end
@spec jikan_to_voice_actors(char_json :: [map]) :: {[%Actor{}], [Keyword.t()]}
defp jikan_to_voice_actors(char_json) do
char_json
|> Stream.flat_map(&chara_to_voice_actors/1)
|> Enum.map(fn {c, a} ->
{
%Actor{
mal_id: a["mal_id"],
picture_url: a["image_url"],
name: a["name"],
language: RateTheDub.Locale.en_name_to_code(a["language"]),
url: a["url"]
},
[character_id: c, actor_id: a["mal_id"]]
}
end)
|> Enum.unzip()
end
# Helper functions
@spec chara_to_voice_actors(chara :: map) :: [map]
defp chara_to_voice_actors(chara) do
chara
|> Map.get("voice_actors")
|> Stream.chunk_while(
[],
&chunk_by_language/2,
&{:cont, &1}
)
|> Enum.map(&{chara["mal_id"], List.first(&1)})
end
defp chunk_by_language(a, []), do: {:cont, [a]}
@spec chunk_by_language(a :: map, acc :: [map]) :: {:cont, [map]} | {:cont, [map], [map]}
defp chunk_by_language(a, [f | _] = acc) do
if a["language"] == f["language"] do
{:cont, [a | acc]}
else
{:cont, Enum.reverse(acc), [a]}
end
end
@spec actors_to_languages(actors :: [%Actor{}]) :: [String.t()]
defp actors_to_languages(actors) do
actors
|> Stream.map(&Map.get(&1, :language))
|> Enum.uniq()
end
end
|
lib/ratethedub/jikan.ex
| 0.775095
| 0.527864
|
jikan.ex
|
starcoder
|
defmodule Ash.Changeset.ManagedRelationshipHelpers do
@moduledoc """
Tools for introspecting managed relationships.
Extensions can use this to look at an argument that will be passed
to a `manage_relationship` change and determine what their behavior
should be. For example, AshAdmin uses these to find out what kind of
nested form it should offer for each argument that manages a relationship.
"""
def sanitize_opts(relationship, opts) do
[
on_no_match: :ignore,
on_missing: :ignore,
on_match: :ignore,
on_lookup: :ignore
]
|> Keyword.merge(opts)
|> Keyword.update!(:on_no_match, fn
:create when relationship.type == :many_to_many ->
action = Ash.Resource.Info.primary_action!(relationship.destination, :create)
join_action = Ash.Resource.Info.primary_action!(relationship.through, :create)
{:create, action.name, join_action.name, []}
{:create, action_name} when relationship.type == :many_to_many ->
join_action = Ash.Resource.Info.primary_action!(relationship.through, :create)
{:create, action_name, join_action.name, []}
:create ->
action = Ash.Resource.Info.primary_action!(relationship.destination, :create)
{:create, action.name}
other ->
other
end)
|> Keyword.update!(:on_missing, fn
:destroy when relationship.type == :many_to_many ->
action = Ash.Resource.Info.primary_action!(relationship.destination, :destroy)
join_action = Ash.Resource.Info.primary_action!(relationship.through, :destroy)
{:destroy, action.name, join_action.name}
{:destroy, action_name} when relationship.type == :many_to_many ->
join_action = Ash.Resource.Info.primary_action!(relationship.through, :destroy)
{:destroy, action_name, join_action.name}
:destroy ->
action = Ash.Resource.Info.primary_action!(relationship.destination, :destroy)
{:destroy, action.name}
:unrelate ->
{:unrelate, nil}
other ->
other
end)
|> Keyword.update!(:on_match, fn
:update when relationship.type == :many_to_many ->
update = Ash.Resource.Info.primary_action!(relationship.destination, :update)
join_update = Ash.Resource.Info.primary_action!(relationship.through, :update)
{:update, update.name, join_update.name, []}
{:update, update} when relationship.type == :many_to_many ->
join_update = Ash.Resource.Info.primary_action!(relationship.through, :update)
{:update, update, join_update.name, []}
{:update, update, join_update} when relationship.type == :many_to_many ->
{:update, update, join_update, []}
:update ->
action = Ash.Resource.Info.primary_action!(relationship.destination, :update)
{:update, action.name}
:unrelate ->
{:unrelate, nil}
other ->
other
end)
|> Keyword.update!(:on_lookup, fn
key when relationship.type == :many_to_many and key in [:relate, :relate_and_update] ->
read = Ash.Resource.Info.primary_action(relationship.destination, :read)
create = Ash.Resource.Info.primary_action(relationship.through, :create)
{key, create.name, read.name}
{key, action}
when relationship.type == :many_to_many and
key in [:relate, :relate_and_update] ->
read = Ash.Resource.Info.primary_action(relationship.destination, :read)
{key, action, read.name}
{key, action, read}
when relationship.type == :many_to_many and
key in [:relate, :relate_and_update] ->
{key, action, read}
key
when relationship.type in [:has_many, :has_one] and key in [:relate, :relate_and_update] ->
read = Ash.Resource.Info.primary_action(relationship.destination, :read)
update = Ash.Resource.Info.primary_action(relationship.destination, :update)
{key, update.name, read.name}
{key, update}
when relationship.type in [:has_many, :has_one] and key in [:relate, :relate_and_update] ->
read = Ash.Resource.Info.primary_action(relationship.destination, :read)
{key, update, read.name}
key when key in [:relate, :relate_and_update] ->
read = Ash.Resource.Info.primary_action(relationship.destination, :read)
update = Ash.Resource.Info.primary_action(relationship.source, :update)
{key, update.name, read.name}
{key, update} when key in [:relate, :relate_and_update] ->
read = Ash.Resource.Info.primary_action(relationship.destination, :read)
{key, update, read.name}
other ->
other
end)
end
def on_match_destination_actions(opts, relationship) do
opts = sanitize_opts(relationship, opts)
cond do
opts[:on_match] in [:ignore, :error] ->
nil
unwrap(opts[:on_match]) == :unrelate ->
nil
opts[:on_match] == :no_match ->
on_no_match_destination_actions(opts, relationship)
opts[:on_match] == :missing ->
on_missing_destination_actions(opts, relationship)
unwrap(opts[:on_match]) == :update ->
case opts[:on_match] do
:update ->
all(destination(primary_action_name(relationship.destination, :update)))
{:update, action_name} ->
all(destination(action_name))
{:update, action_name, join_table_action_name, keys} ->
all([destination(action_name), join(join_table_action_name, keys)])
end
end
end
def on_no_match_destination_actions(opts, relationship) do
opts = sanitize_opts(relationship, opts)
case opts[:on_no_match] do
value when value in [:ignore, :error] ->
nil
:create ->
all(destination(primary_action_name(relationship.destination, :create)))
{:create, action_name} ->
all(destination(action_name))
{:create, _action_name, join_table_action_name, :all} ->
all([join(join_table_action_name, :all)])
{:create, action_name, join_table_action_name, keys} ->
all([destination(action_name), join(join_table_action_name, keys)])
end
end
def on_missing_destination_actions(opts, relationship) do
opts = sanitize_opts(relationship, opts)
case opts[:on_missing] do
:destroy ->
all(destination(primary_action_name(relationship.destination, :destroy)))
{:destroy, action_name} ->
all(destination(action_name))
{:destroy, action_name, join_resource_action_name} ->
all([destination(action_name), join(join_resource_action_name, [])])
_ ->
nil
end
end
def on_lookup_update_action(opts, relationship) do
opts = sanitize_opts(relationship, opts)
if unwrap(opts[:on_lookup]) not in [:relate, :ignore] do
case opts[:on_lookup] do
:relate_and_update when relationship.type == :many_to_many ->
join(primary_action_name(relationship.through, :create), [])
{:relate_and_update, action_name} when relationship.type == :many_to_many ->
join(action_name, action_name)
{:relate_and_update, action_name, _} when relationship.type == :many_to_many ->
join(action_name, [])
{:relate_and_update, action_name, _, keys} when relationship.type == :many_to_many ->
join(action_name, keys)
:relate_and_update when relationship.type in [:has_one, :has_many] ->
destination(primary_action_name(relationship.destination, :update))
:relate_and_update when relationship.type in [:belongs_to] ->
source(primary_action_name(relationship.source, :update))
{:relate_and_update, action_name} ->
destination(action_name)
{:relate_and_update, action_name, _} ->
destination(action_name)
end
end
end
defp all(values) do
case Enum.filter(List.wrap(values), & &1) do
[] -> nil
values -> values
end
end
defp source(nil), do: nil
defp source(action), do: {:source, action}
defp destination(nil), do: nil
defp destination(action), do: {:destination, action}
defp join(nil, _), do: nil
defp join(action_name, keys), do: {:join, action_name, keys}
def could_handle_missing?(opts) do
opts[:on_missing] not in [:ignore, :error]
end
def could_lookup?(opts) do
opts[:on_lookup] != :ignore
end
def could_create?(opts) do
unwrap(opts[:on_no_match]) == :create || unwrap(opts[:on_match]) == :no_match
end
def could_update?(opts) do
unwrap(opts[:on_match]) not in [:ignore, :no_match, :missing]
end
def must_load?(opts) do
only_creates_or_ignores? =
unwrap(opts[:on_match]) in [:no_match, :ignore] &&
unwrap(opts[:on_no_match]) in [:create, :ignore]
can_skip_load? = opts[:on_missing] == :ignore && only_creates_or_ignores?
not can_skip_load?
end
defp primary_action_name(resource, type) do
primary_action = Ash.Resource.Info.primary_action(resource, type)
if primary_action do
primary_action.name
else
primary_action
end
end
defp unwrap(value) when is_atom(value), do: value
defp unwrap(tuple) when is_tuple(tuple), do: elem(tuple, 0)
defp unwrap(value), do: value
end
|
lib/ash/changeset/managed_relationship_helpers.ex
| 0.799129
| 0.427516
|
managed_relationship_helpers.ex
|
starcoder
|
defmodule Phoenix.NotAcceptableError do
@moduledoc """
Raised when one of the `accept*` headers is not accepted by the server.
This exception is commonly raised by `Phoenix.Controller.accepts/2`
which negotiates the media types the server is able to serve with
the contents the client is able to render.
If you are seeing this error, you should check if you are listing
the desired formats in your `:accepts` plug or if you are setting
the proper accept header in the client. The exception contains the
acceptable mime types in the `accepts` field.
"""
defexception message: nil, accepts: [], plug_status: 406
end
defmodule Phoenix.MissingParamError do
@moduledoc """
Raised when a key is expected to be present in the request parameters,
but is not.
This exception is raised by `Phoenix.Controller.scrub_params/2` which:
* Checks to see if the required_key is present (can be empty)
* Changes all empty parameters to nils ("" -> nil).
If you are seeing this error, you should handle the error and surface it
to the end user. It means that there is a parameter missing from the request.
"""
defexception [:message, plug_status: 400]
def exception([key: value]) do
msg = "expected key #{inspect value} to be present in params, " <>
"please send the expected key or adapt your scrub_params/2 call"
%Phoenix.MissingParamError{message: msg}
end
end
defmodule Phoenix.ActionClauseError do
defexception [message: nil, plug_status: 400]
def exception(opts) do
controller = Keyword.fetch!(opts, :controller)
action = Keyword.fetch!(opts, :action)
params = Keyword.fetch!(opts, :params)
msg = """
could not find a matching #{inspect controller}.#{action} clause
to process request. This typically happens when there is a
parameter mismatch but may also happen when any of the other
action arguments do not match. The request parameters are:
#{inspect params}
"""
%Phoenix.ActionClauseError{message: msg}
end
end
|
lib/phoenix/exceptions.ex
| 0.749271
| 0.542439
|
exceptions.ex
|
starcoder
|
defmodule Data.Parser.BuiltIn do
@moduledoc """
Parsers for built-in Elixir data types.
"""
alias Error
alias FE.Result
alias Data.Parser
@doc """
Creates a parser that successfully parses `integer`s, and returns the
domain error `:not_an_integer` for all other inputs.
## Examples
iex> Data.Parser.BuiltIn.integer().(1)
{:ok, 1}
iex> {:error, e} = Data.Parser.BuiltIn.integer().(1.0)
...> Error.reason(e)
:not_an_integer
iex> {:error, e} = Data.Parser.BuiltIn.integer().(:hi)
...> Error.reason(e)
:not_an_integer
"""
@spec integer() :: Parser.t(integer, Error.t())
def integer do
fn
int when is_integer(int) -> Result.ok(int)
_other -> Error.domain(:not_an_integer) |> Result.error()
end
end
@doc """
Creates a parser that successfully parses `float`s, and returns the
domain error `:not_a_float` for all other inputs.
## Examples
iex> Data.Parser.BuiltIn.float().(1.0)
{:ok, 1.0}
iex> {:error, e} = Data.Parser.BuiltIn.float().(1)
...> Error.reason(e)
:not_a_float
iex> {:error, e} = Data.Parser.BuiltIn.float().(:hi)
...> Error.reason(e)
:not_a_float
"""
@spec float() :: Parser.t(float, Error.t())
def float do
fn
int when is_float(int) -> Result.ok(int)
_other -> Error.domain(:not_a_float) |> Result.error()
end
end
@doc """
Creates a parser that succesfully parses `String.t`s (a.k.a binaries), and
returns the domain error `:not_a_string` for all other inputs.
## Examples
iex> Data.Parser.BuiltIn.string().("hi")
{:ok, "hi"}
iex> {:error, e} = Data.Parser.BuiltIn.string().('hi')
...> Error.reason(e)
:not_a_string
iex> {:error, e} = Data.Parser.BuiltIn.string().(:hi)
...> Error.reason(e)
:not_a_string
"""
@spec string() :: Parser.t(String.t(), Error.t())
def string() do
fn
s when is_binary(s) -> Result.ok(s)
_other -> Error.domain(:not_a_string) |> Result.error()
end
end
@doc """
Creates a parser that successfully parses `boolean`s, and returns the
domain error `:not_a_boolean` for all other inputs.
## Examples
iex> Data.Parser.BuiltIn.boolean().(true)
{:ok, true}
iex> Data.Parser.BuiltIn.boolean().(false)
{:ok, false}
iex> {:error, e} = Data.Parser.BuiltIn.boolean().(1.0)
...> Error.reason(e)
:not_a_boolean
iex> {:error, e} = Data.Parser.BuiltIn.boolean().([:truth, :or, :dare])
...> Error.reason(e)
:not_a_boolean
"""
@spec boolean() :: Parser.t(boolean(), Error.t())
def boolean do
fn
bool when is_boolean(bool) -> Result.ok(bool)
_other -> Error.domain(:not_a_boolean) |> Result.error()
end
end
@doc """
Creates a parser that successfully parses `atom`s, and returns the
domain error `:not_an_atom` for all other inputs.
## Examples
iex> Data.Parser.BuiltIn.atom().(:atom)
{:ok, :atom}
iex> Data.Parser.BuiltIn.atom().(:other_atom)
{:ok, :other_atom}
iex> {:error, e} = Data.Parser.BuiltIn.atom().(1.0)
...> Error.reason(e)
:not_an_atom
iex> {:error, e} = Data.Parser.BuiltIn.atom().(["truth", "or", "dare"])
...> Error.reason(e)
:not_an_atom
"""
@spec atom() :: Parser.t(atom(), Error.t())
def atom do
fn
atom when is_atom(atom) -> Result.ok(atom)
_other -> Error.domain(:not_an_atom) |> Result.error()
end
end
@doc """
Creates a parser that successfully parses `Date.t`s or `String.t` that
represent legitimate `Date.t`s.
Returns a domain error representing the parse failure if
the string input cannot be parsed, and the domain error `:not_a_date`
for all other inputs.
## Examples
iex> {:ok, d} = Data.Parser.BuiltIn.date().(~D[1999-12-31])
...> d
~D[1999-12-31]
iex> {:ok, d} = Data.Parser.BuiltIn.date().("1999-12-31")
...> d
~D[1999-12-31]
iex> {:error, e} = Data.Parser.BuiltIn.date().("19991232")
...> Error.reason(e)
:invalid_format
iex> {:error, e} = Data.Parser.BuiltIn.date().("1999-12-32")
...> Error.reason(e)
:invalid_date
iex> {:error, e} = Data.Parser.BuiltIn.date().(123456789)
...> Error.reason(e)
:not_a_date
"""
@spec date() :: Parser.t(Date.t(), Error.t())
def date do
fn
%Date{} = date ->
Result.ok(date)
string when is_binary(string) ->
case Date.from_iso8601(string) do
{:ok, d} -> Result.ok(d)
{:error, reason} -> Error.domain(reason) |> Result.error()
end
_other ->
Error.domain(:not_a_date) |> Result.error()
end
end
@doc """
Creates a parser that successfully parses `DateTime.t`s or `String.t` that
represent legitimate `DateTime.t`s.
Returns a domain error representing the parse failure if the string input
cannot be parsed, and the domain error `:not_a_datetime` for all other inputs.
## Examples
iex> Data.Parser.BuiltIn.datetime().(~U[1999-12-31 23:59:59Z])
{:ok, ~U[1999-12-31 23:59:59Z]}
iex> Data.Parser.BuiltIn.datetime().("1999-12-31 23:59:59Z")
{:ok, ~U[1999-12-31 23:59:59Z]}
iex> {:error, e} = Data.Parser.BuiltIn.datetime().("1999-12-32 23:59:59Z")
...> Error.reason(e)
:invalid_date
iex> {:error, e} = Data.Parser.BuiltIn.datetime().("1999-12-31 23:59:99Z")
...> Error.reason(e)
:invalid_time
iex> {:error, e} = Data.Parser.BuiltIn.datetime().("1999-12-31 23:59:59")
...> Error.reason(e)
:missing_offset
iex> {:error, e} = Data.Parser.BuiltIn.datetime().(123456789)
...> Error.reason(e)
:not_a_datetime
"""
@spec datetime() :: Parser.t(DateTime.t(), Error.t())
def datetime do
fn
%DateTime{} = datetime ->
Result.ok(datetime)
string when is_binary(string) ->
case DateTime.from_iso8601(string) do
{:ok, dt, _offset} -> Result.ok(dt)
{:error, reason} -> Error.domain(reason) |> Result.error()
end
_other ->
Error.domain(:not_a_datetime) |> Result.error()
end
end
@doc """
Creates a parser that successfully parses `NaiveDateTime.t`s or `String.t` that
represent legitimate `NaiveDateTime.t`s.
Returns a domain error representing the parse failure if the string input
cannot be parsed, and the domain error `:not_a_naive_datetime` for all other
inputs.
## Examples
iex> Data.Parser.BuiltIn.naive_datetime.(~N[1999-12-31 23:59:59])
{:ok, ~N[1999-12-31 23:59:59]}
iex> Data.Parser.BuiltIn.naive_datetime.("1999-12-31 23:59:59")
{:ok, ~N[1999-12-31 23:59:59]}
iex> {:error, e} = Data.Parser.BuiltIn.naive_datetime.("1999-12-32 23:59:59")
...> Error.reason(e)
:invalid_date
iex> {:error, e} = Data.Parser.BuiltIn.naive_datetime.("1999-12-31 23:59:99")
...> Error.reason(e)
:invalid_time
iex> {:error, e} = Data.Parser.BuiltIn.naive_datetime.(123456789)
...> Error.reason(e)
:not_a_naive_datetime
"""
@spec naive_datetime() :: Parser.t(NaiveDateTime.t(), Error.t())
def naive_datetime do
fn
%NaiveDateTime{} = naive_datetime ->
Result.ok(naive_datetime)
string when is_binary(string) ->
case NaiveDateTime.from_iso8601(string) do
{:ok, ndt} -> Result.ok(ndt)
{:error, reason} -> Error.domain(reason) |> Result.error()
end
_other ->
Error.domain(:not_a_naive_datetime) |> Result.error()
end
end
@doc """
Creates a parser that successfully parses strings representing either
`Integer`s or `Float`s.
Returns a domain error detailing the parse failure on bad inputs.
Look out! Partial results, such as that of Integer.parse("abc123"), still
count as errors!
## Examples
iex> Data.Parser.BuiltIn.string_of(Float).("1.1")
{:ok, 1.1}
iex> {:error, e} = Data.Parser.BuiltIn.string_of(Float).("abc")
...> Error.reason(e)
:not_parseable_as_float
...> Error.details(e)
%{input: "abc", native_parser_output: :error}
iex> Data.Parser.BuiltIn.string_of(Integer).("1234567890")
{:ok, 1234567890}
iex> {:error, e} = Data.Parser.BuiltIn.string_of(Integer).("123abc")
...> Error.reason(e)
:not_parseable_as_integer
...> Error.details(e)
%{input: "123abc", native_parser_output: {123, "abc"}}
iex> {:error, e} = Data.Parser.BuiltIn.string_of(Integer).([])
...> Error.reason(e)
:not_a_string
"""
@spec string_of(Integer | Float) :: Parser.t(integer() | float(), Error.t())
def string_of(mod) when mod in [Integer, Float] do
mod_downcase = String.downcase("#{inspect(mod)}")
fn input ->
case is_binary(input) && mod.parse(input) do
{n, ""} ->
Result.ok(n)
false ->
Error.domain(:not_a_string, %{input: input})
|> Result.error()
output ->
Error.domain(
:"not_parseable_as_#{mod_downcase}",
%{input: input, native_parser_output: output}
)
|> Result.error()
end
end
end
end
|
lib/data/parser/built_in.ex
| 0.920616
| 0.562867
|
built_in.ex
|
starcoder
|
defmodule ExCO2Mini.Decoder do
require Logger
use Bitwise
@moduledoc """
Decodes packets from the CO₂Mini device.
"""
@key [<KEY>
@doc """
Returns a list of eight integers, representing the eight-byte key used to
communicate with the device.
"""
def key, do: @key
@doc """
Decodes (and checksums) an eight-byte data packet received from the device.
Returns `{key, value}`, where `key` is an 8-bit integer and `value` is a
16-bit integer.
Raises an error if the data packet does not pass the checksum routine.
"""
def decode(<<data::bytes-size(8)>>) do
data
|> decrypt(@key)
|> checksum!()
|> extract()
end
@ctmp [0x48, 0x74, 0x65, 0x6D, 0x70, 0x39, 0x39, 0x65]
|> Enum.map(fn n -> (n >>> 4 ||| n <<< 4) &&& 0xFF end)
@shuffle [2, 4, 0, 7, 1, 6, 5, 3]
# Honestly, this is pretty voodoo.
# It's taken directly from the hackaday.io project,
# but reformulated to more Elixir-style transformations.
defp decrypt(<<data::bytes-size(8)>>, key) do
@shuffle
# Use @shuffle as a list of indices to extract:
|> Enum.map(&:binary.at(data, &1))
# XOR with the key:
|> Enum.zip(key)
|> Enum.map(fn {p1, key} -> p1 ^^^ key end)
# Zip indices [7, 0..6] (shifted) with indices [0..7] (plain)
|> zip_with_shifted_list(1)
# Take shifted (sx) + plain (x) and do some bitwise math on them:
|> Enum.map(fn {sx, x} -> (x >>> 3 ||| sx <<< 5) &&& 0xFF end)
# Zip with @ctmp and do more bitwise math:
|> Enum.zip(@ctmp)
|> Enum.map(fn {x, ct} -> 0x100 + x - ct &&& 0xFF end)
# Finally, dump to a binary (note: not `String`!) again.
|> :erlang.list_to_binary()
end
defp zip_with_shifted_list(list, shift) do
shift_list(list, shift)
|> Enum.zip(list)
end
defp shift_list(list, shift) when shift > 0 do
Enum.take(list, -shift) ++ Enum.drop(list, -shift)
end
defp checksum!(<<b1, b2, b3, b4, b5, b6, b7, b8>> = data) do
cond do
b5 != 0x0D ->
raise "Checksum failed (b5): #{inspect(data)}"
(b1 + b2 + b3 &&& 0xFF) != b4 ->
raise "Checksum failed (b123 vs b4): #{inspect(data)}"
{b6, b7, b8} != {0, 0, 0} ->
raise "Checksum failed (b678): #{inspect(data)}"
true ->
data
end
end
defp extract(<<key, value::size(16), _chksum, _0x0D, 0, 0, 0>>) do
{key, value}
end
end
|
lib/ex_co2_mini/decoder.ex
| 0.859678
| 0.576125
|
decoder.ex
|
starcoder
|
defmodule Mix.Tasks.Cloak.Migrate do
@moduledoc """
Migrates a schema table to a new encryption cipher.
## Rationale
Cloak vaults will automatically decrypt fields which were encrypted
by a retired key, and reencrypt them with the new key when they change.
However, this usually is not enough for key rotation. Usually, you want
to proactively reencrypt all your fields with the new key, so that the
old key can be decommissioned.
This task allows you to do just that.
## Strategy
This task will migrate a table following this strategy:
- Query for minimum ID in the table
- Query for maximum ID in the table
- For each ID between, attempt to:
- Fetch the row with that ID, locking it
- If present, reencrypt all Cloak fields with the new cipher
- Write the row, unlocking it
The queries are issued in parallel to maximize speed. Each row is fetched
and written back as quickly as possible to reduce the amount of time the
row is locked.
## Warnings
1. **IMPORTANT: `mix cloak.migrate` only works on tables with an integer, sequential
`:id` field. This is the default setting for Ecto schemas, so it shouldn't be a
problem for most users.**
2. Because `mix cloak.migrate` issues queries in parallel, it can consume
all your database connections. For this reason, you may wish to use a
separate `Repo` with a limited `:pool` just for Cloak migrations. This will
allow you to prevent any performance impact by throttling Cloak to use only
a limited number of database connections.
## Configuration
Ensure that you have configured your vault to use the new cipher by default!
# If using mix configuration...
config :my_app, MyApp.Vault,
ciphers: [
default: {Cloak.Ciphers.AES.GCM, tag: "NEW", key: <<...>>},
retired: {Cloak.Ciphers.AES.CTR, tag: "OLD", key: <<...>>>}
]
# If configuring in the `init/1` callback:
defmodule MyApp.Vault do
use Cloak.Vault, otp_app: :my_app
@impl Cloak.Vault
def init(config) do
config =
Keyword.put(config, :ciphers, [
default: {Cloak.Ciphers.AES.GCM, tag: "NEW", key: <<...>>},
retired: {Cloak.Ciphers.AES.CTR, tag: "OLD", key: <<...>>>}
])
{:ok, config}
end
end
If you want to migrate multiple schemas at once, you may find it convenient
to specify the schemas in your `config/config.exs`:
config :my_app,
cloak_repo: [MyApp.Repo],
cloak_schemas: [MyApp.Schema1, MyApp.Schema2]
## Usage
To run against only a specific repo and schema, use the `-r` and `-s` flags:
mix cloak.migrate -r MyApp.Repo -s MyApp.Schema
If you've configured multiple schemas at once, as shown above, you can simply
run:
mix cloak.migrate
"""
use Mix.Task
import IO.ANSI, only: [yellow: 0, green: 0, reset: 0]
alias Cloak.Migrator
@doc false
def run(args) do
Mix.Task.run("app.start", [])
configs = Mix.Cloak.parse_config(args)
for {_app, config} <- configs,
schema <- config.schemas do
Mix.shell().info("Migrating #{yellow()}#{inspect(schema)}#{reset()}...")
Migrator.migrate(config.repo, schema)
Mix.shell().info(green() <> "Migration complete!" <> reset())
end
:ok
end
end
|
lib/mix/tasks/cloak.migrate.ex
| 0.876957
| 0.42919
|
cloak.migrate.ex
|
starcoder
|
defmodule ExInsights.Utils do
@moduledoc false
@doc ~S"""
Convert ms to c# time span format. Ported from https://github.com/Microsoft/ApplicationInsights-node.js/blob/68e217e6c6646114d8df0952437590724070204f/Library/Util.ts#L122
### Parameters:
'''
number: Number for time in milliseconds.
'''
### Examples:
iex> ExInsights.Utils.ms_to_timespan(1000)
"00:00:01.000"
iex> ExInsights.Utils.ms_to_timespan(600000)
"00:10:00.000"
"""
@spec ms_to_timespan(number :: number) :: String.t()
def ms_to_timespan(number) when not is_number(number), do: ms_to_timespan(0)
def ms_to_timespan(number) when number < 0, do: ms_to_timespan(0)
def ms_to_timespan(number) do
sec =
(number / 1000)
|> mod(60)
|> to_fixed(7)
|> String.replace(~r/0{0,4}$/, "")
sec =
if index_of(sec, ".") < 2 do
"0" <> sec
else
sec
end
min =
(number / (1000 * 60))
|> Float.floor()
|> round
|> mod(60)
|> to_string()
min =
if String.length(min) < 2 do
"0" <> min
else
min
end
hour =
(number / (1000 * 60 * 60))
|> Float.floor()
|> round
|> mod(24)
|> to_string()
hour =
if String.length(hour) < 2 do
"0" <> hour
else
hour
end
days =
(number / (1000 * 60 * 60 * 24))
|> Float.floor()
|> round
|> case do
x when x > 0 -> to_string(x) <> "."
_ -> ""
end
"#{days}#{hour}:#{min}:#{sec}"
end
defp to_fixed(number, decimals) when is_integer(number), do: to_fixed(number / 1, decimals)
defp to_fixed(number, decimals), do: :erlang.float_to_binary(number, decimals: decimals)
defp index_of(str, pattern), do: :binary.match(str, pattern) |> elem(0)
defp mod(a, b) when is_integer(a), do: rem(a, b)
defp mod(a, b) do
a_floor = a |> Float.floor() |> round()
rem(a_floor, b) + (a - a_floor)
end
@doc ~S"""
Converts the severity level to the appropriate value
### Parameters:
```
severity_level: The level of severity for the event.
```
### Examples:
iex> ExInsights.Utils.convert(:info)
1
iex> ExInsights.Utils.convert(:verbose)
0
"""
@spec convert(severity_level :: ExInsights.severity_level()) :: integer
def convert(:verbose), do: 0
def convert(:warning), do: 2
def convert(:error), do: 3
def convert(:critical), do: 4
def convert(_info), do: 1
@spec diff_timestamp_millis({integer, integer, integer}, {integer, integer, integer}) :: float
def diff_timestamp_millis({megasecs1, secs1, microsecs1}, {megasecs2, secs2, microsecs2}) do
((megasecs2 - megasecs1) * 1_000_000_000)
|> Kernel.+((secs2 - secs1) * 1_000)
|> Kernel.+((microsecs2 - microsecs1) / 1_000)
end
@doc """
Returns true if the given arg looks like a stacktrace, see stacktrace_entry() in `Exception`
"""
def stacktrace?([]), do: true
def stacktrace?([{_m, _f, _a, _l} | t]), do: stacktrace?(t)
def stacktrace?([{_f, _a, _l} | t]), do: stacktrace?(t)
def stacktrace?([_ | _]), do: false
def stacktrace?(_), do: false
def parse_stack_trace(stack_trace) do
stack_trace |> Enum.with_index() |> Enum.map(&do_parse_stack_trace/1)
end
defp do_parse_stack_trace({{module, function, arity, location}, index}) do
%{
level: index,
method: Exception.format_mfa(module, function, arity),
assembly: to_string(Application.get_application(module)),
fileName: Keyword.get(location, :file, nil),
line: Keyword.get(location, :line, nil)
}
end
end
|
lib/utils.ex
| 0.858303
| 0.469155
|
utils.ex
|
starcoder
|
defmodule Petrovich.Parser do
@moduledoc """
Parser receives the name and case to infect it to.
Then it parses the rules to find the appropriate modifications.
It then calls `Applier` to modify the value.
This module should not be used directly. Use `Petrovich` module instead.
"""
alias Petrovich.{Applier, Detector, NameStore}
alias Petrovich.Exceptions.ParseException
alias Petrovich.Utils.ResultJoiner
@doc """
Parses name and gets modifiers for the given case.
Then it passes the name and modification rules to the `Applier`.
## Examples
iex> Parser.parse("Николай", :firstname, :dative, :male)
{:ok, "Николаю"}
iex> Parser.parse("Пирогов", :lastname, :instrumental, :male)
{:ok, "Пироговым"}
"""
@spec parse(String.t(), atom(), atom(), atom() | nil) ::
{:ok, String.t()} | :error
def parse(data, _, :nomenative, _), do: {:ok, data}
def parse(data, type, case_, gender) do
gender = maybe_detect_gender(gender, data, type)
apply_rule(data, to_string(type), case_, to_string(gender))
end
@doc """
Pretty much the same as `parse/4`, but raises exception instead.
## Examples
iex> Parser.parse!("Николай", :firstname, :dative, :male)
"Николаю"
iex> Parser.parse!("Пирогов", :lastname, :instrumental, :male)
"Пироговым"
"""
@spec parse!(String.t(), atom(), atom(), atom() | nil) :: String.t()
def parse!(data, type, case_, gender) do
case parse(data, type, case_, gender) do
{:ok, value} -> value
:error -> raise ParseException
end
end
defp maybe_detect_gender(gender, data, type) do
with nil <- gender,
{:ok, parsed_gender} <- Detector.detect_gender(data, type) do
parsed_gender
else
:error -> :androgynous
_ -> gender
end
end
defp apply_rule(values, type, case_, gender) do
%{"exceptions" => exceptions, "suffixes" => suffixes} = NameStore.get(type)
values
|> String.split("-")
|> Enum.map(fn item ->
prepare_value(item, case_, gender, exceptions, suffixes)
end)
|> ResultJoiner.join_all_results(&join_callback/1)
end
defp prepare_value(value, case_, gender, exceptions, suffixes) do
value
|> String.downcase()
|> maybe_exception(gender, exceptions)
|> maybe_rule(gender, suffixes)
|> case do
{:error, _} -> :error
{:ok, res} -> {:ok, Applier.apply(value, case_, res)}
end
end
defp join_callback(results) do
results
|> Enum.map(fn {_, item} -> item end)
|> Enum.join("-")
end
defp maybe_exception(name, gender, exceptions) do
exceptions
|> Enum.filter(fn item -> fits?(:e, name, gender, item) end)
|> pick_one(name)
end
defp maybe_rule({:ok, rule}, _, _), do: {:ok, rule}
defp maybe_rule({:error, name}, gender, suffixes) do
suffixes
|> Enum.filter(fn item -> fits?(:r, name, gender, item) end)
|> pick_one(name)
end
defp fits?(:e, name, gender, rule) do
gender?(gender, rule) and name in rule["test"]
end
defp fits?(:r, name, gender, rule) do
name_len = String.length(name)
gender?(gender, rule) and
Enum.any?(rule["test"], fn test ->
test_len = String.length(test)
test == String.slice(name, name_len - test_len, test_len)
end)
end
defp gender?(gender, rule) do
gender == rule["gender"] or rule["gender"] == "androgynous"
end
defp pick_one(items, name) do
items
|> Enum.at(0)
|> case do
nil -> {:error, name}
rule -> {:ok, rule}
end
end
end
|
lib/petrovich/parser.ex
| 0.852874
| 0.631083
|
parser.ex
|
starcoder
|
defmodule GGity.Docs.Theme do
@moduledoc false
@doc false
@spec examples() :: iolist()
def examples do
[
"""
Examples.mtcars()
|> Plot.new(%{x: :wt, y: :mpg})
|> Plot.geom_point()
|> Plot.labs(title: "Fuel economy declines as weight decreases")
""",
"""
# Examples below assume that element constructors are imported
# e.g. `import GGity.Element.{Line, Rect, Text}
# Plot formatting
Examples.mtcars()
|> Plot.new(%{x: :wt, y: :mpg})
|> Plot.geom_point()
|> Plot.labs(title: "Fuel economy declines as weight decreases")
|> Plot.theme(plot_title: element_text(size: 10))
""",
"""
Examples.mtcars()
|> Plot.new(%{x: :wt, y: :mpg})
|> Plot.geom_point()
|> Plot.labs(title: "Fuel economy declines as weight decreases")
|> Plot.theme(plot_background: element_rect(fill: "green"))
""",
"""
# Panel formatting
Examples.mtcars()
|> Plot.new(%{x: :wt, y: :mpg})
|> Plot.geom_point()
|> Plot.labs(title: "Fuel economy declines as weight decreases")
|> Plot.theme(panel_background: element_rect(fill: "white", color: "grey"))
""",
# TODO: Major gridlines should be drawn on top of minor gridlines.
# Unfortunately we draw each axis and gridlines set as one SVG group,
# so this will required material changes to the axis-drawing internals.
"""
Examples.mtcars()
|> Plot.new(%{x: :wt, y: :mpg})
|> Plot.geom_point()
|> Plot.labs(title: "Fuel economy declines as weight decreases")
|> Plot.theme(panel_grid_major: element_line(color: "black"))
""",
"""
# Axis formatting
Examples.mtcars()
|> Plot.new(%{x: :wt, y: :mpg})
|> Plot.geom_point()
|> Plot.labs(title: "Fuel economy declines as weight decreases")
|> Plot.theme(axis_line: element_line(size: 6, color: "grey"))
""",
"""
Examples.mtcars()
|> Plot.new(%{x: :wt, y: :mpg})
|> Plot.geom_point()
|> Plot.labs(title: "Fuel economy declines as weight decreases")
|> Plot.theme(axis_text: element_text(color: "blue"))
""",
"""
Examples.mtcars()
|> Plot.new(%{x: :wt, y: :mpg})
|> Plot.geom_point()
|> Plot.labs(title: "Fuel economy declines as weight decreases")
|> Plot.theme(axis_ticks: element_line(size: 4))
""",
"""
# Turn the x-axis ticks inward
Examples.mtcars()
|> Plot.new(%{x: :wt, y: :mpg})
|> Plot.geom_point()
|> Plot.labs(title: "Fuel economy declines as weight decreases")
|> Plot.theme(axis_ticks_length_x: -2)
""",
"""
# GGity does not support legend position, but legend key boxes
# and text can be styled as you would expect
# Default styling
Examples.mtcars()
|> Plot.new(%{x: :wt, y: :mpg})
|> Plot.geom_point(%{color: :cyl, shape: :vs})
|> Plot.labs(
x: "Weight (1000 lbs)",
y: "Fuel economy (mpg)",
color: "Cylinders",
shape: "Transmission"
)
""",
"""
# Style legend keys
Examples.mtcars()
|> Plot.new(%{x: :wt, y: :mpg})
|> Plot.geom_point(%{color: :cyl, shape: :vs})
|> Plot.labs(
x: "Weight (1000 lbs)",
y: "Fuel economy (mpg)",
color: "Cylinders",
shape: "Transmission"
)
|> Plot.theme(legend_key: element_rect(fill: "white", color: "black"))
""",
"""
# Style legend text
Examples.mtcars()
|> Plot.new(%{x: :wt, y: :mpg})
|> Plot.geom_point(%{color: :cyl, shape: :vs})
|> Plot.labs(
x: "Weight (1000 lbs)",
y: "Fuel economy (mpg)",
color: "Cylinders",
shape: "Transmission"
)
|> Plot.theme(legend_text: element_text(size: 4, color: "red"))
""",
"""
# Style legend title
Examples.mtcars()
|> Plot.new(%{x: :wt, y: :mpg})
|> Plot.geom_point(%{color: :cyl, shape: :vs})
|> Plot.labs(
x: "Weight (1000 lbs)",
y: "Fuel economy (mpg)",
color: "Cylinders",
shape: "Transmission"
)
|> Plot.theme(legend_title: element_text(face: "bold"))
"""
]
end
end
|
lib/mix/tasks/doc_examples/theme.ex
| 0.761583
| 0.514705
|
theme.ex
|
starcoder
|
defmodule QueryBuilder.JoinMaker do
@moduledoc false
require Ecto.Query
@doc ~S"""
Options may be:
* `:mode`: if set to `:if_preferable`, schemas are joined only if it is better
performance-wise; this happens only for one case: when the association has a
one-to-one cardinality, it is better to join and include the association's result
in the result set of the query, rather than emitting a new DB query.
* `:type`: see `Ecto.Query.join/5`'s qualifier argument for possible values.
"""
def make_joins(ecto_query, assoc_list) do
do_make_joins(ecto_query, assoc_list, [], [], assoc_list)
# returns {ecto_query, new_assoc_list}
end
defp do_make_joins(ecto_query, [], _, new_assoc_list, _original_assoc_list),
do: {ecto_query, new_assoc_list}
defp do_make_joins(ecto_query, [assoc_data | tail], bindings, new_assoc_list, original_assoc_list) do
{ecto_query, assoc_data, bindings} =
maybe_join(ecto_query, assoc_data, bindings, original_assoc_list)
{ecto_query, nested_assocs} =
if assoc_data.has_joined do
do_make_joins(ecto_query, assoc_data.nested_assocs, bindings, [], original_assoc_list)
else
{ecto_query, assoc_data.nested_assocs}
end
assoc_data = %{assoc_data | nested_assocs: nested_assocs}
{ecto_query, new_assoc_list} =
do_make_joins(ecto_query, tail, bindings, new_assoc_list, original_assoc_list)
{ecto_query, [assoc_data | new_assoc_list]}
end
defp maybe_join(ecto_query, %{cardinality: :many, join_type: :inner_if_cardinality_is_one} = assoc_data, bindings, _original_assoc_list),
do: {ecto_query, assoc_data, bindings}
defp maybe_join(ecto_query, assoc_data, bindings, original_assoc_list) do
%{
source_binding: source_binding,
source_schema: source_schema,
assoc_binding: assoc_binding,
assoc_field: assoc_field,
assoc_schema: assoc_schema,
join_type: join_type
} = assoc_data
if Ecto.Query.has_named_binding?(ecto_query, assoc_binding) do
raise "has already joined"
end
join_type = if(join_type == :left, do: :left, else: :inner)
on =
if assoc_data.join_filters != [] do
assoc_data.join_filters
|> Enum.map(fn [filters, or_filters] ->
QueryBuilder.Query.Where.build_dynamic_query(ecto_query, original_assoc_list, filters, or_filters)
end)
|> Enum.reduce(&Ecto.Query.dynamic(^&1 and ^&2))
else
[]
end
unless Enum.member?(bindings, assoc_binding) do
# see schema.ex's module doc in order to understand what's going on here
ecto_query =
if String.contains?(to_string(assoc_binding), "__") do
source_schema._join(ecto_query, join_type, source_binding, assoc_field, on)
else
assoc_schema._join(ecto_query, join_type, source_binding, assoc_field, on)
end
{
ecto_query,
%{assoc_data | has_joined: true},
[assoc_binding | bindings]
}
else
{ecto_query, assoc_data, bindings}
end
end
end
|
lib/join_maker.ex
| 0.724286
| 0.461927
|
join_maker.ex
|
starcoder
|
defmodule Meeseeks do
alias Meeseeks.{Context, Document, Error, Parser, Result, Select, Selector, TupleTree}
@moduledoc """
Meeseeks is an Elixir library for parsing and extracting data from HTML and
XML with CSS or XPath selectors.
```elixir
import Meeseeks.CSS
html = HTTPoison.get!("https://news.ycombinator.com/").body
for story <- Meeseeks.all(html, css("tr.athing")) do
title = Meeseeks.one(story, css(".title a"))
%{title: Meeseeks.text(title),
url: Meeseeks.attr(title, "href")}
end
#=> [%{title: "...", url: "..."}, %{title: "...", url: "..."}, ...]
```
## Features
- Friendly API
- Browser-grade HTML5 parser
- Permissive XML parser
- CSS and XPath selectors
- Rich, extensible selector architecture
- Helpers to extract data from selections
## Why?
Meeseeks exists in the same space as an earlier library called Floki, so
why was Meeseeks created and why would you use it instead of Floki?
#### Floki is a couple years older than Meeseeks, so why does Meeseeks even exist?
Meeseeks exists because Floki used to be unable to do what I needed.
When I started learning Elixir I reimplemented a small project I had
written in another language. Part of that project involved extracting data
from HTML, and unbeknownst to me some of the HTML I needed to extract data
from was malformed.
This had never been a problem before because the HTML parser I was using
in the other language was HTML5 spec compliant and handled the malformed
HTML just as well as a browser. Unfortunately for me, Floki used (and still
uses by default) the `:mochiweb_html` parser which is nowhere near HTML5
spec compliant, and just silently dropped the data I needed when parsing.
Meeseeks started out as an attempt to write an HTML5 spec compliant parser
in Elixir (spoiler: it's really hard), then switched to using Mozilla's
[html5ever](https://github.com/servo/html5ever) via Rustler after
[Hans](https://github.com/hansihe) wrote `html5ever_elixir`.
Floki gained optional support for using `html5ever_elixir` as its parser
around the same time, but it still used `:mochiweb_html` (which doesn't
require Rust to be part of the build process) by default and I released
Meeseeks as a safer alternative.
#### Why should I use Meeseeks instead of Floki?
When Meeseeks was released it came with a safer default HTML parser, a more
complete collection of CSS selectors, and a more extensible selector
architecture than Floki.
Since then Meeseeks has been further expanded with functionality Floki
just doesn't have, such as an XML parser and XPath selectors.
It won't matter to most users, but the selection architecture is much
richer than Floki's, and permits the creation all kinds of interesting
custom, stateful selectors (in fact, both the CSS and XPath selector
strings compile down to the same selector structs that anybody can define).
What probably will matter more to users is the friendly API, extensive
documentation, and the attention to the details of usability seen in such
places as the custom formatting for result structs
(`#Meeseeks.Result<{ <p>1</p> }>`) and the descriptive errors.
#### Is Floki ever a better choice than Meeseeks?
Yes, there are two main cases when Floki is clearly a better choice than
Meeseeks.
Firstly, if you absolutely can't include Rust in your build process AND you
know that the HTML you'll be working with is well-formed and won't require
an HTML5 spec compliant parser then using Floki with the `:mochiweb_html`
parser is a reasonable choice.
However, if you have any doubts about the HTML you'll be parsing you should
probably figure out a way to use a better parser because using
`:mochiweb_html` in that situation may be a timebomb.
Secondly, if you want to make updates to an HTML document then Floki
provides facilities to do so while Meeseeks, which is entirely focused on
selecting and extracting data, does not.
#### How does performance compare between Floki and Meeseeks?
Performance is similar enough between the two that it's probably not worth
choosing one over the other for that reason.
For details and benchmarks, see [Meeseeks vs. Floki Performance
](https://github.com/mischov/meeseeks_floki_bench).
## Compatibility
Meeseeks is tested with a minimum combination of Elixir 1.4.0 and
Erlang/OTP 19.3, and a maximum combination of Elixir 1.8.1 and
Erlang/OTP 21.0.
## Dependencies
Meeseeks depends on [html5ever](https://github.com/servo/html5ever) via
[meeseeks_html5ever](https://github.com/mischov/meeseeks_html5ever).
Because html5ever is a Rust library, you will need to have the Rust
compiler [installed](https://www.rust-lang.org/en-US/install.html).
This dependency is necessary because there are no HTML5 spec compliant
parsers written in Elixir/Erlang.
## Getting Started
### Parse
Start by parsing a source (HTML/XML string or `Meeseeks.TupleTree`) into
a `Meeseeks.Document` so that it can be queried.
`Meeseeks.parse/1` parses the source as HTML, but `Meeseeks.parse/2`
accepts a second argument of either `:html` or `:xml` that specifies how
the source is parsed.
```elixir
document = Meeseeks.parse("<div id=main><p>1</p><p>2</p><p>3</p></div>")
#=> Meeseeks.Document<{...}>
```
The selection functions accept an unparsed source, parsing it as HTML, but
parsing is expensive so parse ahead of time when running multiple
selections on the same document.
### Select
Next, use one of Meeseeks's selection functions - `fetch_all`, `all`,
`fetch_one`, or `one` - to search for nodes.
All these functions accept a queryable (a source, a document, or a
`Meeseeks.Result`), one or more `Meeseeks.Selector`s, and optionally an
initial context.
`all` returns a (possibly empty) list of results representing every node
matching one of the provided selectors, while `one` returns a result
representing the first node to match a selector (depth-first) or nil if
there is no match.
`fetch_all` and `fetch_one` work like `all` and `one` respectively, but
wrap the result in `{:ok, ...}` if there is a match or return
`{:error, %Meeseeks.Error{type: :select, reason: :no_match}}` if there is
not.
To generate selectors, use the `css` macro provided by `Meeseeks.CSS` or
the `xpath` macro provided by `Meeseeks.XPath`.
```elixir
import Meeseeks.CSS
result = Meeseeks.one(document, css("#main p"))
#=> #Meeseeks.Result<{ <p>1</p> }>
import Meeseeks.XPath
result = Meeseeks.one(document, xpath("//*[@id='main']//p"))
#=> #Meeseeks.Result<{ <p>1</p> }>
```
### Extract
Retrieve information from the `Meeseeks.Result` with an extraction
function.
The extraction functions are `attr`, `attrs`, `data`, `dataset`, `html`,
`own_text`, `tag`, `text`, `tree`.
```elixir
Meeseeks.tag(result)
#=> "p"
Meeseeks.text(result)
#=> "1"
Meeseeks.tree(result)
#=> {"p", [], ["1"]}
```
The extraction functions `html` and `tree` work on `Meeseeks.Document`s in
addition to `Meeseeks.Result`s.
```elixir
Meeseeks.html(document)
#=> "<html><head></head><body><div id=\\"main\\"><p>1</p><p>2</p><p>3</p></div></body></html>"
```
## Custom Selectors
Meeseeks is designed to have extremely extensible selectors, and creating
a custom selector is as easy as defining a struct that implements the
`Meeseeks.Selector` behaviour.
```elixir
defmodule CommentContainsSelector do
use Meeseeks.Selector
alias Meeseeks.Document
defstruct value: ""
def match(selector, %Document.Comment{} = node, _document, _context) do
String.contains?(node.content, selector.value)
end
def match(_selector, _node, _document, _context) do
false
end
end
selector = %CommentContainsSelector{value: "TODO"}
Meeseeks.one("<!-- TODO: Close vuln! -->", selector)
#=> #Meeseeks.Result<{ <!-- TODO: Close vuln! --> }>
```
To learn more, check the documentation for `Meeseeks.Selector` and
`Meeseeks.Selector.Combinator`
"""
@type queryable :: Parser.source() | Document.t() | Result.t()
@type extractable :: Document.t() | Result.t() | nil
@type selectors :: Selector.t() | [Selector.t()]
# Parse
@doc """
Parses a string or `Meeseeks.TupleTree` into a `Meeseeks.Document`.
`parse/1` parses as HTML, while `parse/2` accepts a second argument of
either `:html`, `:xml`, or `tuple_tree` that specifies how the source is
parsed.
## Examples
iex> Meeseeks.parse("<div id=main><p>Hello, Meeseeks!</p></div>")
#Meeseeks.Document<{...}>
iex> Meeseeks.parse("<book><author>GGK</author></book>", :xml)
#Meeseeks.Document<{...}>
iex> Meeseeks.parse({"div", [{"id", "main"}], [{"p", [], ["Hello, Meeseeks!"]}]}, :tuple_tree)
#Meeseeks.Document<{...}>
"""
@spec parse(Parser.source()) :: Document.t() | {:error, Error.t()}
def parse(source) do
Parser.parse(source)
end
@spec parse(Parser.source(), Parser.type()) :: Document.t() | {:error, Error.t()}
def parse(source, parser) do
Parser.parse(source, parser)
end
# Select
@doc """
Returns `{:ok, [Result, ...]}` if one of more nodes in the queryable match
a selector, or `{:error, %Meeseeks.Error{type: :select, reason: :no_match}}`
if none do.
Optionally accepts a `Meeseeks.Context` map.
Parses the source if it is not a `Meeseeks.Document` or `Meeseeks.Result`,
and may return `{:error, %Meeseeks.Error{type: parser}` if there is a parse
error.
If multiple selections are being ran on the same unparsed source, parse
first to avoid unnecessary computation.
## Examples
iex> import Meeseeks.CSS
iex> Meeseeks.fetch_all("<div id=main><p>1</p><p>2</p><p>3</p></div>", css("#main p")) |> elem(1) |> List.first()
#Meeseeks.Result<{ <p>1</p> }>
"""
@spec fetch_all(queryable, selectors) :: {:ok, [Result.t()]} | {:error, Error.t()}
def fetch_all(queryable, selectors) do
fetch_all(queryable, selectors, %{})
end
@spec fetch_all(queryable, selectors, Context.t()) :: {:ok, [Result.t()]} | {:error, Error.t()}
def fetch_all(queryable, selectors, context)
def fetch_all({:error, _} = error, _selectors, _context), do: error
def fetch_all(%Document{} = queryable, selectors, context) do
Select.fetch_all(queryable, selectors, context)
end
def fetch_all(%Result{} = queryable, selectors, context) do
Select.fetch_all(queryable, selectors, context)
end
def fetch_all(source, selectors, context) do
case parse(source) do
{:error, reason} -> {:error, reason}
document -> Select.fetch_all(document, selectors, context)
end
end
@doc """
Returns `[Result, ...]` if one or more nodes in the queryable match a
selector, or `[]` if none do.
Optionally accepts a `Meeseeks.Context` map.
Parses the source if it is not a `Meeseeks.Document` or `Meeseeks.Result`,
and may return `{:error, %Meeseeks.Error{type: parser}` if there is a parse
error.
If multiple selections are being ran on the same unparsed source, parse
first to avoid unnecessary computation.
## Examples
iex> import Meeseeks.CSS
iex> Meeseeks.all("<div id=main><p>1</p><p>2</p><p>3</p></div>", css("#main p")) |> List.first()
#Meeseeks.Result<{ <p>1</p> }>
"""
@spec all(queryable, selectors) :: [Result.t()] | {:error, Error.t()}
def all(queryable, selectors) do
all(queryable, selectors, %{})
end
@spec all(queryable, selectors, Context.t()) :: [Result.t()] | {:error, Error.t()}
def all(queryable, selectors, context)
def all({:error, _} = error, _selectors, _context), do: error
def all(%Document{} = queryable, selectors, context) do
Select.all(queryable, selectors, context)
end
def all(%Result{} = queryable, selectors, context) do
Select.all(queryable, selectors, context)
end
def all(source, selectors, context) do
case parse(source) do
{:error, reason} -> {:error, reason}
document -> Select.all(document, selectors, context)
end
end
@doc """
Returns `{:ok, Result}` for the first node in the queryable (depth-first)
matching a selector, or
`{:error, %Meeseeks.Error{type: :select, reason: :no_match}}` if none do.
Optionally accepts a `Meeseeks.Context` map.
Parses the source if it is not a `Meeseeks.Document` or `Meeseeks.Result`,
and may return `{:error, %Meeseeks.Error{type: parser}` if there is a parse
error.
If multiple selections are being ran on the same unparsed source, parse
first to avoid unnecessary computation.
## Examples
iex> import Meeseeks.CSS
iex> Meeseeks.fetch_one("<div id=main><p>1</p><p>2</p><p>3</p></div>", css("#main p")) |> elem(1)
#Meeseeks.Result<{ <p>1</p> }>
"""
@spec fetch_one(queryable, selectors) :: {:ok, Result.t()} | {:error, Error.t()}
def fetch_one(queryable, selectors) do
fetch_one(queryable, selectors, %{})
end
@spec fetch_one(queryable, selectors, Context.t()) :: {:ok, Result.t()} | {:error, Error.t()}
def fetch_one(queryable, selectors, context)
def fetch_one({:error, _} = error, _selectors, _context), do: error
def fetch_one(%Document{} = queryable, selectors, context) do
Select.fetch_one(queryable, selectors, context)
end
def fetch_one(%Result{} = queryable, selectors, context) do
Select.fetch_one(queryable, selectors, context)
end
def fetch_one(source, selectors, context) do
case parse(source) do
{:error, reason} -> {:error, reason}
document -> Select.fetch_one(document, selectors, context)
end
end
@doc """
Returns a `Result` for the first node in the queryable (depth-first)
matching a selector, or `nil` if none do.
Optionally accepts a `Meeseeks.Context` map.
Parses the source if it is not a `Meeseeks.Document` or `Meeseeks.Result`,
and may return `{:error, %Meeseeks.Error{type: parser}` if there is a parse
error.
If multiple selections are being ran on the same unparsed source, parse
first to avoid unnecessary computation.
## Examples
iex> import Meeseeks.CSS
iex> Meeseeks.one("<div id=main><p>1</p><p>2</p><p>3</p></div>", css("#main p"))
#Meeseeks.Result<{ <p>1</p> }>
"""
@spec one(queryable, selectors) :: Result.t() | nil | {:error, Error.t()}
def one(queryable, selectors) do
one(queryable, selectors, %{})
end
@spec one(queryable, selectors, Context.t()) :: Result.t() | nil | {:error, Error.t()}
def one(queryable, selectors, context)
def one({:error, _} = error, _selectors, _context), do: error
def one(%Document{} = queryable, selectors, context) do
Select.one(queryable, selectors, context)
end
def one(%Result{} = queryable, selectors, context) do
Select.one(queryable, selectors, context)
end
def one(source, selectors, context) do
case parse(source) do
{:error, reason} -> {:error, reason}
document -> Select.one(document, selectors, context)
end
end
@doc """
Returns the accumulated result of walking the queryable, accumulating nodes
that match a selector. Prefer `all` or `one`- `select` should only be used
when a custom accumulator is required.
Requires that a `Meeseeks.Accumulator` has been added to the context via
`Meeseeks.Context.add_accumulator/2`, and will raise an error if it hasn't.
Parses the source if it is not a `Meeseeks.Document` or `Meeseeks.Result`,
and may return `{:error, %Meeseeks.Error{type: parser}` if there is a parse
error.
If multiple selections are being ran on the same unparsed source, parse
first to avoid unnecessary computation.
## Examples
iex> import Meeseeks.CSS
iex> accumulator = %Meeseeks.Accumulator.One{}
iex> context = Meeseeks.Context.add_accumulator(%{}, accumulator)
iex> Meeseeks.select("<div id=main><p>1</p><p>2</p><p>3</p></div>", css("#main p"), context)
#Meeseeks.Result<{ <p>1</p> }>
"""
@spec select(queryable, selectors, Context.t()) :: any | {:error, Error.t()}
def select(queryable, selectors, context)
def select({:error, _} = error, _selectors, _context), do: error
def select(%Document{} = queryable, selectors, context) do
Select.select(queryable, selectors, context)
end
def select(%Result{} = queryable, selectors, context) do
Select.select(queryable, selectors, context)
end
def select(source, selectors, context) do
case parse(source) do
{:error, reason} -> {:error, reason}
document -> Select.select(document, selectors, context)
end
end
# Extract
@doc """
Returns the value of an attribute in a result, or nil if there isn't one.
Nil input returns `nil`.
## Examples
iex> import Meeseeks.CSS
iex> result = Meeseeks.one("<div id=example>Hi</div>", css("#example"))
#Meeseeks.Result<{ <div id="example">Hi</div> }>
iex> Meeseeks.attr(result, "id")
"example"
"""
@spec attr(extractable, String.t()) :: String.t() | nil
def attr(extractable, attribute)
def attr(nil, _), do: nil
def attr(%Result{} = result, attribute), do: Result.attr(result, attribute)
def attr(x, _attribute), do: raise_cannot_extract(x, "attr/2")
@doc """
Returns a result's attributes list, which may be empty, or nil if the
result represents a node without attributes.
Nil input returns `nil`.
## Examples
iex> import Meeseeks.CSS
iex> result = Meeseeks.one("<div id=example>Hi</div>", css("#example"))
#Meeseeks.Result<{ <div id="example">Hi</div> }>
iex> Meeseeks.attrs(result)
[{"id", "example"}]
"""
@spec attrs(extractable) :: [{String.t(), String.t()}] | nil
def attrs(extractable)
def attrs(nil), do: nil
def attrs(%Result{} = result), do: Result.attrs(result)
def attrs(x), do: raise_cannot_extract(x, "attrs/1")
@doc """
Returns the combined data of a result or the result's children, which may
be an empty string.
Once the data has been combined the whitespace is compacted by replacing
all instances of more than one whitespace character with a single space
and then trimmed.
Data is the content of `<script>` or `<style>` tags, or the content of
comments starting with "[CDATA[" and ending with "]]". The latter behavior
is to support the extraction of CDATA from HTML, since HTML5 parsers parse
CDATA as comments.
Nil input returns `nil`.
## Options
* `:collapse_whitespace` - Boolean determining whether or not to replace
blocks of whitespace with a single space character. Defaults to `true`.
* `:trim` - Boolean determining whether or not to trim the resulting
text. Defaults to `true`.
## Examples
iex> import Meeseeks.CSS
iex> result1 = Meeseeks.one("<div id=example>Hi</div>", css("#example"))
#Meeseeks.Result<{ <div id="example">Hi</div> }>
iex> Meeseeks.data(result1)
""
iex> result2 = Meeseeks.one("<script id=example>Hi</script>", css("#example"))
#Meeseeks.Result<{ <script id="example">Hi</script> }>
iex> Meeseeks.data(result2)
"Hi"
"""
@spec data(extractable, Keyword.t()) :: String.t() | nil
def data(extractable, opts \\ [])
def data(nil, _), do: nil
def data(%Result{} = result, opts), do: Result.data(result, opts)
def data(x, _), do: raise_cannot_extract(x, "data/1")
@doc """
Returns a map of a result's data attributes, or nil if the result
represents a node without attributes.
Behaves like HTMLElement.dataset; only valid data attributes are included,
and attribute names have "data-" removed and are converted to camelCase.
See: https://developer.mozilla.org/en-US/docs/Web/API/HTMLElement/dataset
Nil input returns `nil`.
## Examples
iex> import Meeseeks.CSS
iex> result = Meeseeks.one("<div id=example data-x-val=1 data-y-val=2></div>", css("#example"))
#Meeseeks.Result<{ <div id="example" data-x-val="1" data-y-val="2"></div> }>
iex> Meeseeks.dataset(result)
%{"xVal" => "1", "yVal" => "2"}
"""
@spec dataset(extractable) :: %{optional(String.t()) => String.t()} | nil
def dataset(extractable)
def dataset(nil), do: nil
def dataset(%Result{} = result), do: Result.dataset(result)
def dataset(x), do: raise_cannot_extract(x, "dataset/1")
@doc """
Returns a string representing the combined HTML of a document or result
and its descendants.
Nil input returns `nil`.
## Examples
iex> import Meeseeks.CSS
iex> document = Meeseeks.parse("<div id=example>Hi</div>")
iex> Meeseeks.html(document)
"<html><head></head><body><div id=\\"example\\">Hi</div></body></html>"
iex> result = Meeseeks.one(document, css("#example"))
#Meeseeks.Result<{ <div id="example">Hi</div> }>
iex> Meeseeks.html(result)
"<div id=\\"example\\">Hi</div>"
"""
@spec html(extractable) :: String.t() | nil
def html(extractable)
def html(nil), do: nil
def html(%Document{} = document), do: Document.html(document)
def html(%Result{} = result), do: Result.html(result)
def html(x), do: raise_cannot_extract(x, "html/1")
@doc """
Returns the combined text of a result or the result's children, which may
be an empty string.
Once the text has been combined the whitespace is compacted by replacing
all instances of more than one whitespace character with a single space
and then trimmed.
Nil input returns `nil`.
## Options
* `:collapse_whitespace` - Boolean determining whether or not to replace
blocks of whitespace with a single space character. Defaults to `true`.
* `:trim` - Boolean determining whether or not to trim the resulting
text. Defaults to `true`.
## Examples
iex> import Meeseeks.CSS
iex> result = Meeseeks.one("<div>Hello, <b>World!</b></div>", css("div"))
#Meeseeks.Result<{ <div>Hello, <b>World!</b></div> }>
iex> Meeseeks.own_text(result)
"Hello,"
"""
@spec own_text(extractable, Keyword.t()) :: String.t() | nil
def own_text(extractable, opts \\ [])
def own_text(nil, _), do: nil
def own_text(%Result{} = result, opts), do: Result.own_text(result, opts)
def own_text(x, _), do: raise_cannot_extract(x, "own_text/1")
@doc """
Returns a result's tag, or `nil` if the result represents a node without a
tag.
Nil input returns `nil`.
## Examples
iex> import Meeseeks.CSS
iex> result = Meeseeks.one("<div id=example>Hi</div>", css("#example"))
#Meeseeks.Result<{ <div id="example">Hi</div> }>
iex> Meeseeks.tag(result)
"div"
"""
@spec tag(extractable) :: String.t() | nil
def tag(extractable)
def tag(nil), do: nil
def tag(%Result{} = result), do: Result.tag(result)
def tag(x), do: raise_cannot_extract(x, "tag/1")
@doc """
Returns the combined text of a result or the result's descendants, which
may be an empty string.
Once the text has been combined the whitespace is compacted by replacing
all instances of more than one whitespace character with a single space
and then trimmed.
Nil input returns `nil`.
## Options
* `:collapse_whitespace` - Boolean determining whether or not to replace
blocks of whitespace with a single space character. Defaults to `true`.
* `:trim` - Boolean determining whether or not to trim the resulting
text. Defaults to `true`.
## Examples
iex> import Meeseeks.CSS
iex> result = Meeseeks.one("<div>Hello, <b>World!</b></div>", css("div"))
#Meeseeks.Result<{ <div>Hello, <b>World!</b></div> }>
iex> Meeseeks.text(result)
"Hello, World!"
"""
@spec text(extractable, Keyword.t()) :: String.t() | nil
def text(extractable, opts \\ [])
def text(nil, _), do: nil
def text(%Result{} = result, opts), do: Result.text(result, opts)
def text(x, _), do: raise_cannot_extract(x, "text/1")
@doc """
Returns the `Meeseeks.TupleTree` of a document or result and its
descendants.
Nil input returns `nil`.
## Examples
iex> import Meeseeks.CSS
iex> document = Meeseeks.parse("<div id=example>Hi</div>")
iex> Meeseeks.tree(document)
[{"html", [],
[{"head", [], []},
{"body", [], [{"div", [{"id", "example"}], ["Hi"]}]}]}]
iex> result = Meeseeks.one(document, css("#example"))
#Meeseeks.Result<{ <div id="example">Hi</div> }>
iex> Meeseeks.tree(result)
{"div", [{"id", "example"}], ["Hi"]}
"""
@spec tree(extractable) :: TupleTree.t() | nil
def tree(extractable)
def tree(nil), do: nil
def tree(%Document{} = document), do: Document.tree(document)
def tree(%Result{} = result), do: Result.tree(result)
def tree(x), do: raise_cannot_extract(x, "tree/1")
defp raise_cannot_extract(target, extractor) do
raise "Cannot run Meeseeks.#{extractor} on #{inspect(target)}"
end
end
|
lib/meeseeks.ex
| 0.724481
| 0.716591
|
meeseeks.ex
|
starcoder
|
defmodule Topo.Util do
@moduledoc false
@type point :: {number, number}
@spec cross(point, point, point) :: number
def cross({ax, ay}, {bx, by}, {cx, cy}) do
{ax - cx, ay - cy}
|> Vector.cross({bx - cx, by - cy})
|> Vector.component(:z)
end
@spec collinear?(point, point, point) :: boolean
def collinear?(a, b, c) do
cross(a, b, c) == 0
end
@spec side(point, point, point) :: number
def side({ax, ay}, {bx, by}, {px, py}) do
(px - ax) * (by - ay) - (py - ay) * (bx - ax)
end
@spec between?(point, point, point) :: boolean
def between?({ax, ay}, {bx, by}, {px, py})
when ax == bx and ay == by and (px != ax and py != ay),
do: false
def between?({ax, ay}, {bx, by}, {_, py}) when ax == bx and ay != by,
do: (ay <= py && py <= by) || (ay >= py && py >= by)
def between?({ax, _}, {bx, _}, {px, _}), do: (ax <= px && px <= bx) || (ax >= px && px >= bx)
@spec assert_no_collinear(list) :: list
def assert_no_collinear([a, b, c | rest]) do
if collinear?(a, c, b) && between?(a, c, b) do
assert_no_collinear([a, c | rest])
else
[a] ++ assert_no_collinear([b, c | rest])
end
end
def assert_no_collinear(ring), do: ring
@spec midpoint(point, point) :: point
def midpoint(a, b) do
Vector.divide(Vector.add(a, b), 2)
end
@spec any_edge_pair_not?(list, list, atom) :: boolean
def any_edge_pair_not?(a, b, rel) do
do_any_edge_pair?(a, b, fn a1, a2, b1, b2 ->
elem(SegSeg.intersection(a1, a2, b1, b2), 1) != rel
end)
end
@spec any_edge_pair?(list, list, atom) :: boolean
def any_edge_pair?(a, b, rel) do
do_any_edge_pair?(a, b, fn a1, a2, b1, b2 ->
elem(SegSeg.intersection(a1, a2, b1, b2), 1) == rel
end)
end
@spec do_any_edge_pair?(list, list, function) :: boolean
defp do_any_edge_pair?(_, [_], _), do: false
defp do_any_edge_pair?(a, [b1, b2 | rest], fun) do
any_edge?(a, [b1, b2], fun) || do_any_edge_pair?(a, [b2 | rest], fun)
end
@spec any_edge?(list, list, function) :: boolean
defp any_edge?([_], _, _), do: false
defp any_edge?([a1, a2 | rest], [b1, b2], fun) do
fun.(a1, a2, b1, b2) || any_edge?([a2 | rest], [b1, b2], fun)
end
end
|
lib/topo/util.ex
| 0.836821
| 0.649342
|
util.ex
|
starcoder
|
if Code.ensure_loaded?(Phoenix) do
defmodule PromEx.Plugins.Phoenix do
@moduledoc """
This plugin captures metrics emitted by Phoenix. Specifically, it captures HTTP request metrics and
Phoenix channel metrics.
## Plugin options
This plugin supports the following options:
- `metric_prefix`: This option is OPTIONAL and is used to override the default metric prefix of
`[otp_app, :prom_ex, :phoenix]`. If this changes you will also want to set `phoenix_metric_prefix`
in your `dashboard_assigns` to the snakecase version of your prefix, the default
`phoenix_metric_prefix` is `{otp_app}_prom_ex_phoenix`.
### Single Endpoint/Router
- `endpoint`: **Required** This is the full module name of your Phoenix Endpoint (e.g MyAppWeb.Endpoint).
- `router`: **Required** This is the full module name of your Phoenix Router (e.g MyAppWeb.Router).
- `event_prefix`: **Optional**, allows you to set the event prefix for the Telemetry events.
- `endpoint`: This is a REQUIRED option and is the full module name of your Phoenix Endpoint (e.g MyAppWeb.Endpoint).
- `event_prefix`: This option is OPTIONAL and allows you to set the event prefix for the Telemetry events. This
value should align with what you pass to `Plug.Telemetry` in your `endpoint.ex` file (see the plug docs
for more information https://hexdocs.pm/plug/Plug.Telemetry.html)
This value should align with what you pass to `Plug.Telemetry` in your `endpoint.ex` file (see the plug docs for more information https://hexdocs.pm/plug/Plug.Telemetry.html)
- `additional_routes`: **Optional** This option allows you to specify route path labels for applications routes
not defined in your Router module.
For example, if you want to track telemetry events for a plug in your
`endpoint.ex` file, you can provide a keyword list with the structure `[some-route: ~r(\/some-path)]` and any
time that the route is called and the plug handles the call, the path label for this particular Prometheus metric
will be set to `some-route`. You can pass in either a regular expression or a string to match the incoming
request.
e.g
```elixir
{
PromEx.Plugins.Phoenix,
endpoint: MyApp.Endpoint,
router: MyAppWeb.Public.Router,
event_prefix: [:admin, :endpoint]
}
```
### Multiple Endpoints/Router
- `endpoints`: This accepts a list of per Phoenix Endpoint options `{endpoint_name, endpoint_opts}`
- `endpoint_name`: **Required** This is the full module name of your Phoenix Endpoint (e.g MyAppWeb.Endpoint).
- `endpoint_opts`: Per endpoint plugin options:
- `:routers`: **Required** List of routers modules for the endpoint, the HTTP metrics will be augmented with controller/action/path information from the routers.
- `:event_prefix`: **Optional** Allows you to set the event prefix for the Telemetry events. This
value should align with what you pass to `Plug.Telemetry` in the corresponding endpoint module (see the plug docs
for more information https://hexdocs.pm/plug/Plug.Telemetry.html)
- `:additional_routes`: This option allows you to specify route path labels for applications routes
not defined in your Router modules for the corresponding endpoint.
e.g
```elixir
{
PromEx.Plugins.Phoenix,
endpoints: [
{MyApp.Endpoint, routers: [MyAppWeb.Public.Router]},
{MyApp.Endpoint2, routers: [MyAppWeb.Admin.Router], event_prefix: [:admin, :endpoint]}
]
}
```
## Metric Groups
This plugin exposes the following metric groups:
- `:phoenix_http_event_metrics`
- `:phoenix_channel_event_metrics`
- `:phoenix_socket_event_metrics`
- `:phoenix_endpoint_manual_metrics`
## Usage
To use plugin in your application, add the following to your PromEx module:
```elixir
defmodule WebApp.PromEx do
use PromEx, otp_app: :web_app
@impl true
def plugins do
[
...
{
PromEx.Plugins.Phoenix,
endpoint: MyApp.Endpoint,
router: MyAppWeb.Public.Router
}
]
end
@impl true
def dashboards do
[
...
{:prom_ex, "phoenix.json"}
]
end
end
```
When working with multiple Phoenix routers use the `endpoints` option instead:
```elixir
defmodule WebApp.PromEx do
use PromEx, otp_app: :web_app
@impl true
def plugins do
[
...
{
PromEx.Plugins.Phoenix,
endpoints: [
{MyApp.Endpoint, routers: [MyAppWeb.Public.Router]},
{MyApp.Endpoint2, routers: [MyAppWeb.Admin.Router], event_prefix: [:admin, :endpoint]}
]
}
]
end
@impl true
def dashboards do
[
...
{:prom_ex, "phoenix.json"}
]
end
end
```
"""
use PromEx.Plugin
require Logger
alias Phoenix.Socket
alias Plug.Conn
@stop_event [:prom_ex, :plugin, :phoenix, :stop]
@impl true
def event_metrics(opts) do
otp_app = Keyword.fetch!(opts, :otp_app)
metric_prefix = Keyword.get(opts, :metric_prefix, PromEx.metric_prefix(otp_app, :phoenix))
phoenix_event_prefixes = fetch_event_prefixes!(opts)
set_up_telemetry_proxy(phoenix_event_prefixes)
# Event metrics definitions
[
http_events(metric_prefix, opts),
channel_events(metric_prefix),
socket_events(metric_prefix)
]
end
@impl true
def manual_metrics(opts) do
otp_app = Keyword.fetch!(opts, :otp_app)
metric_prefix = PromEx.metric_prefix(otp_app, :phoenix)
[
endpoint_info(metric_prefix, opts)
]
end
defp endpoint_info(metric_prefix, opts) do
# Fetch user options
phoenix_endpoint = Keyword.get(opts, :endpoint) || Keyword.get(opts, :endpoints)
Manual.build(
:phoenix_endpoint_manual_metrics,
{__MODULE__, :execute_phoenix_endpoint_info, [phoenix_endpoint]},
[
last_value(
metric_prefix ++ [:endpoint, :url, :info],
event_name: [:prom_ex, :plugin, :phoenix, :endpoint_url],
description: "The configured URL of the Endpoint module.",
measurement: :status,
tags: [:url, :endpoint]
),
last_value(
metric_prefix ++ [:endpoint, :port, :info],
event_name: [:prom_ex, :plugin, :phoenix, :endpoint_port],
description: "The configured port of the Endpoint module.",
measurement: :status,
tags: [:port, :endpoint]
)
]
)
end
@doc false
def execute_phoenix_endpoint_info(endpoint) do
# TODO: This is a bit of a hack until Phoenix supports an init telemetry event to
# reliably get the configuration.
endpoint_init_checker = fn
count, endpoint_module, endpoint_init_checker_function when count < 10 ->
case Process.whereis(endpoint_module) do
pid when is_pid(pid) ->
measurements = %{status: 1}
url_metadata = %{url: endpoint_module.url(), endpoint: normalize_module_name(endpoint_module)}
:telemetry.execute([:prom_ex, :plugin, :phoenix, :endpoint_url], measurements, url_metadata)
%URI{port: port} = endpoint_module.struct_url()
port_metadata = %{port: port, endpoint: normalize_module_name(endpoint_module)}
:telemetry.execute([:prom_ex, :plugin, :phoenix, :endpoint_port], measurements, port_metadata)
_ ->
Process.sleep(1_000)
endpoint_init_checker_function.(count + 1, endpoint_module, endpoint_init_checker_function)
end
_, _, _ ->
:noop
end
if is_list(endpoint) do
endpoint
|> Enum.each(fn {endpoint_module, _} ->
Task.start(fn ->
endpoint_init_checker.(0, endpoint_module, endpoint_init_checker)
end)
end)
else
Task.start(fn ->
endpoint_init_checker.(0, endpoint, endpoint_init_checker)
end)
end
end
defp http_events(metric_prefix, opts) do
routers = fetch_routers!(opts)
additional_routes = fetch_additional_routes!(opts)
http_metrics_tags = [:status, :method, :path, :controller, :action]
Event.build(
:phoenix_http_event_metrics,
[
# Capture request duration information
distribution(
metric_prefix ++ [:http, :request, :duration, :milliseconds],
event_name: @stop_event,
measurement: :duration,
description: "The time it takes for the application to respond to HTTP requests.",
reporter_options: [
buckets: exponential!(1, 2, 12)
],
tag_values: get_conn_tags(routers, additional_routes),
tags: http_metrics_tags,
unit: {:native, :millisecond}
),
# Capture response payload size information
distribution(
metric_prefix ++ [:http, :response, :size, :bytes],
event_name: @stop_event,
description: "The size of the HTTP response payload.",
reporter_options: [
buckets: exponential!(1, 4, 12)
],
measurement: fn _measurements, metadata ->
case metadata.conn.resp_body do
nil -> 0
_ -> :erlang.iolist_size(metadata.conn.resp_body)
end
end,
tag_values: get_conn_tags(routers, additional_routes),
tags: http_metrics_tags,
unit: :byte
),
# Capture the number of requests that have been serviced
counter(
metric_prefix ++ [:http, :requests, :total],
event_name: @stop_event,
description: "The number of requests have been serviced.",
tag_values: get_conn_tags(routers, additional_routes),
tags: http_metrics_tags
)
]
)
end
defp channel_events(metric_prefix) do
Event.build(
:phoenix_channel_event_metrics,
[
# Capture the number of channel joins that have occurred
counter(
metric_prefix ++ [:channel, :joined, :total],
event_name: [:phoenix, :channel_joined],
description: "The number of channel joins that have occurred.",
tag_values: fn %{result: result, socket: %Socket{transport: transport}} ->
%{
transport: transport,
result: result
}
end,
tags: [:result, :transport]
),
# Capture channel handle_in duration
distribution(
metric_prefix ++ [:channel, :handled_in, :duration, :milliseconds],
event_name: [:phoenix, :channel_handled_in],
measurement: :duration,
description: "The time it takes for the application to respond to channel messages.",
reporter_options: [
buckets: exponential!(1, 2, 12)
],
unit: {:native, :millisecond}
)
]
)
end
defp socket_events(metric_prefix) do
Event.build(
:phoenix_socket_event_metrics,
[
# Capture socket connection duration
distribution(
metric_prefix ++ [:socket, :connected, :duration, :milliseconds],
event_name: [:phoenix, :socket_connected],
measurement: :duration,
description: "The time it takes for the application to establish a socket connection.",
reporter_options: [
buckets: exponential!(1, 2, 12)
],
tags: [:result, :transport],
unit: {:native, :millisecond}
)
]
)
end
defp get_conn_tags(routers, []) do
fn
%{conn: %Conn{} = conn} ->
default_route_tags = %{
path: "Unknown",
controller: "Unknown",
action: "Unknown"
}
conn
|> do_get_router_info(routers, default_route_tags)
|> Map.merge(%{
status: conn.status,
method: conn.method
})
_ ->
# TODO: Change this to warning as warn is deprecated as of Elixir 1.11
Logger.warn("Could not resolve path for request")
end
end
defp get_conn_tags(routers, additional_routes) do
fn
%{conn: %Conn{} = conn} ->
default_route_tags = handle_additional_routes_check(conn, additional_routes)
conn
|> do_get_router_info(routers, default_route_tags)
|> Map.merge(%{
status: conn.status,
method: conn.method
})
_ ->
# TODO: Change this to warning as warn is deprecated as of Elixir 1.11
Logger.warn("Could not resolve path for request")
end
end
defp do_get_router_info(conn, routers, default_route_tags) do
routers
|> Enum.find_value(default_route_tags, fn router ->
case Phoenix.Router.route_info(router, conn.method, conn.request_path, "") do
:error ->
false
%{route: path, plug: controller, plug_opts: action} ->
%{
path: path,
controller: normalize_module_name(controller),
action: normalize_action(action)
}
end
end)
end
defp handle_additional_routes_check(%Conn{request_path: request_path}, additional_routes) do
default_tags = %{
path: "Unknown",
controller: "Unknown",
action: "Unknown"
}
additional_routes
|> Enum.find_value(default_tags, fn {path_label, route_check} ->
cond do
is_binary(route_check) and route_check == request_path ->
%{
path: path_label,
controller: "NA",
action: "NA"
}
match?(%Regex{}, route_check) and Regex.match?(route_check, request_path) ->
%{
path: path_label,
controller: "NA",
action: "NA"
}
true ->
false
end
end)
end
defp set_up_telemetry_proxy(phoenix_event_prefixes) do
phoenix_event_prefixes
|> Enum.each(fn telemetry_prefix ->
stop_event = telemetry_prefix ++ [:stop]
:telemetry.attach(
[:prom_ex, :phoenix, :proxy] ++ telemetry_prefix,
stop_event,
&__MODULE__.handle_proxy_phoenix_event/4,
%{}
)
end)
end
@doc false
def handle_proxy_phoenix_event(_event_name, event_measurement, event_metadata, _config) do
:telemetry.execute(@stop_event, event_measurement, event_metadata)
end
defp normalize_module_name(name) when is_atom(name) do
name
|> Atom.to_string()
|> String.trim_leading("Elixir.")
end
defp normalize_module_name(name), do: name
defp normalize_action(action) when is_atom(action), do: action
defp normalize_action(_action), do: "Unknown"
defp fetch_additional_routes!(opts) do
opts
|> fetch_either!(:router, :endpoints)
|> case do
endpoints when is_list(endpoints) ->
endpoints
|> Enum.flat_map(fn
{_endpoint, endpoint_opts} ->
Keyword.get(endpoint_opts, :additional_routes, [])
end)
|> MapSet.new()
|> MapSet.to_list()
_router ->
Keyword.get(opts, :additional_routes, [])
end
end
defp fetch_event_prefixes!(opts) do
opts
|> fetch_either!(:router, :endpoints)
|> case do
endpoints when is_list(endpoints) ->
endpoints
|> Enum.map(fn
{_endpoint, endpoint_opts} ->
Keyword.get(endpoint_opts, :event_prefix, [:phoenix, :endpoint])
end)
_router ->
[Keyword.get(opts, :event_prefix, [:phoenix, :endpoint])]
end
|> MapSet.new()
|> MapSet.to_list()
end
defp fetch_routers!(opts) do
opts
|> fetch_either!(:router, :endpoints)
|> case do
endpoints when is_list(endpoints) ->
endpoints
|> Enum.flat_map(fn
{_endpoint, endpoint_opts} ->
endpoint_opts
|> Keyword.fetch!(:routers)
end)
|> MapSet.new()
|> MapSet.to_list()
router ->
[router]
end
end
defp fetch_either!(keywordlist, key1, key2) do
case {Keyword.has_key?(keywordlist, key1), Keyword.has_key?(keywordlist, key2)} do
{true, _} ->
keywordlist[key1]
{false, true} ->
keywordlist[key2]
{false, false} ->
raise KeyError, "Neither #{inspect(key1)} nor #{inspect(key2)} found in #{inspect(keywordlist)}"
end
end
end
else
defmodule PromEx.Plugins.Phoenix do
@moduledoc false
use PromEx.Plugin
@impl true
def event_metrics(_opts) do
PromEx.Plugin.no_dep_raise(__MODULE__, "Phoenix")
end
end
end
|
lib/prom_ex/plugins/phoenix.ex
| 0.884127
| 0.643014
|
phoenix.ex
|
starcoder
|
defmodule WordsWithEnemies.GameChannel do
@moduledoc """
Contains callbacks that control the actual gameplay.
"""
use WordsWithEnemies.Web, :channel
import WordsWithEnemies.WordFinder, only: [word_list: 0, using: 2]
alias WordsWithEnemies.{Game, Letters, WordFinder, Hints, Player}
alias WordsWithEnemies.Game.Registry, as: GameRegistry
alias WordsWithEnemies.Game.Server, as: GameServer
@minimum_players 2
@doc """
Connect to a standard single player game.
"""
def join("games:ai", _payload, socket) do
{:ok, socket}
end
@doc """
Connect to the multiplayer game. The player will already
be a member if they've joined the game through the lobby.
"""
def join("games:" <> game_id, _payload, socket) do
game_id = String.to_integer(game_id)
user_id = socket.assigns.user_id
if can_join?(game_id, user_id) do
send(self(), {:begin_game?, game_id})
{:ok, socket}
else
{:error, %{reason: "unauthorised"}}
end
end
def can_join?(game_id, player_id) do
case GameRegistry.lookup(game_id) do
{:ok, pid} ->
%Game{players: players} = GameServer.lookup(pid)
Enum.any?(players, &(&1.id === player_id))
{:error, _reason} ->
false
end
end
@doc """
Sends the initial set of letters, and hints for words that can
be made from these letters, to the client.
"""
def handle_in("games:start", %{"difficulty" => difficulty}, socket) do
letters = get_hints_and_letters(difficulty)
{:reply, {:ok, %{letters: letters}}, socket}
end
def handle_in("games:change_letters", %{"difficulty" => difficulty}, socket) do
letters = get_hints_and_letters(difficulty)
{:reply, {:ok, %{letters: letters}}, socket}
end
def handle_in("games:new_hints", %{"difficulty" => difficulty, "letters" => letters}, socket) do
main_pid = self()
spawn(fn -> get_hints(main_pid, letters, difficulty) end)
{:reply, :ok, socket}
end
def get_hints_and_letters(difficulty) do
main_pid = self() # keep reference for spawned function.
letters = Letters.generate_set(:player, difficulty)
# Get the hints in another process so we don't block everything else.
spawn(fn -> get_hints(main_pid, letters, difficulty) end)
letters
end
defp get_hints(main_pid, letters, difficulty) when is_pid(main_pid) do
opts = hint_strength(difficulty)
send(main_pid, {:send_hints, Hints.from_letters(letters, opts)})
end
defp hint_strength("easy"), do: [min: 5]
defp hint_strength("medium"), do: [min: 4, max: 8]
defp hint_strength("hard"), do: [min: 3, max: 5]
@doc """
Adds a new letter to the client's current set.
"""
def handle_in("games:add_letter", %{"letters" => letters}, socket) do
new_letter = Letters.add_letter(letters)
{:reply, {:ok, %{letter: new_letter}}, socket}
end
@doc """
Called when the user submits a word; returns either `true` or `false`
depending on whether it's in the dictionary.
"""
def handle_in("games:check_validity", %{"word" => word}, socket) do
valid? = WordFinder.valid?(word)
{:reply, {:ok, %{valid: valid?}}, socket}
end
@doc """
Sends the client all the word that can be made from `letters`.
"""
def handle_in("games:get_words", %{"letters" => letters}, socket) do
words =
word_list()
|> using(letters)
|> Enum.to_list
|> sort_by_length
{:reply, {:ok, %{words: words}}, socket}
end
defp sort_by_length(list) when is_list(list) do
Enum.sort(list, &(String.length(&1) < String.length(&2)))
end
@doc """
Informs all connected clients that a letter has been moved from the
word bank to the word. Used in multiplayer games.
"""
def handle_in("games:letter_to_word", %{"letter_id" => letter_id}, socket) do
if valid_move?(letter_id, socket) do
broadcast_from(socket, "letter_to_word", %{letter_id: letter_id})
end
{:reply, :ok, socket}
end
@doc """
Informs all connected clients that a letter has been moved back from the
word to the word bank. Used in multiplayer games.
"""
def handle_in("games:letter_to_bank", %{"letter_id" => letter_id}, socket) do
if valid_move?(letter_id, socket) do
broadcast_from(socket, "letter_to_bank", %{letter_id: letter_id})
end
end
def handle_in("games:generate_word", %{"difficulty" => difficulty, "user_word" => user_word}, socket) do
letters = Letters.generate_set(:ai, difficulty)
word = word_list |> using(letters) |> Enum.random
{:reply, {:ok, %{word: word}}, socket}
end
@doc """
"""
def handle_info({:begin_game?, game_id}, socket) do
{:ok, pid} = GameRegistry.lookup(game_id)
%Game{players: players} = GameServer.lookup(pid)
if length(players) >= @minimum_players do
GameServer.begin_game(pid)
GameServer.distribute_letters(pid)
%Game{players: players} = GameServer.lookup(pid)
letter_indexes = map_letters_to_index(players)
letters = %{
indexedLetters: letter_indexes |> key_by_index |> keys_to_string,
playerLetters: build_key_list(players, key_by_letter(letter_indexes), %{})
}
broadcast(socket, "begin_game", letters)
end
{:noreply, socket}
end
defp map_letters_to_index(players) do
players
|> Enum.map(fn(%Player{letters: letters}) -> letters end)
|> List.flatten()
|> Enum.with_index()
end
defp key_by_index(letters) do
Enum.reduce(letters, %{}, fn({letter, index}, letters) ->
Map.put(letters, index, letter)
end)
end
defp key_by_letter(letters) do
Enum.reduce(letters, %{}, fn({letter, index}, letters) ->
Map.update(letters, letter, [index], &(&1 ++ [index]))
end)
end
defp build_key_list([], _keys, results), do: results
defp build_key_list([%Player{id: id, letters: letters} | rest], keys, results) do
%{keys: keys, letter_keys: letter_keys} = do_build_key_list(letters, keys, [])
results = Map.put(results, to_string(id), letter_keys)
build_key_list(rest, keys, results)
end
defp do_build_key_list([], keys, letter_keys) do
%{keys: keys, letter_keys: letter_keys}
end
defp do_build_key_list([letter|rest], keys, letter_keys) do
[key | other_keys] = keys[letter]
keys = Map.put(keys, letter, other_keys)
do_build_key_list(rest, keys, letter_keys ++ [key])
end
defp keys_to_string(map) do
Map.new(map, fn({k, v}) -> {to_string(k), v} end)
end
@doc """
Sends hints for a set of letters to the client when they've
been generated.
"""
def handle_info({:send_hints, hints}, socket) do
push(socket, "receive_hints", %{hints: hints})
{:noreply, socket}
end
defp valid_move?(letter, socket) do
true
end
end
|
web/channels/game_channel.ex
| 0.672332
| 0.489686
|
game_channel.ex
|
starcoder
|
defmodule KittenBlue.JWK do
@moduledoc """
Structure containing `kid`, `alg`, `JOSE.JWK` and handling functions
"""
require Logger
defstruct [
:kid,
:alg,
:key
]
@type t :: %__MODULE__{kid: String.t(), alg: String.t(), key: JOSE.JWK.t()}
# Set the default value here to avoid compilation errors where Configuration does not exist.
@http_client (case(Application.fetch_env(:kitten_blue, __MODULE__)) do
{:ok, config} ->
config |> Keyword.fetch!(:http_client)
_ ->
Scratcher.HttpClient
end)
# NOTE: from_compact/to_conpact does not support Poly1305
@algs_for_oct ["HS256", "HS384", "HS512"]
@algs_for_pem [
"ES256",
"ES384",
"ES512",
"Ed25519",
"Ed25519ph",
"Ed448",
"Ed448ph",
"PS256",
"PS384",
"PS512",
"RS256",
"RS384",
"RS512"
]
@doc """
```Elixir
kid = "sample_201804"
alg = "RS256"
key = JOSE.JWK.from_pem_file("rsa-2048.pem")
kb_jwk = KittenBlue.JWK.new([kid, alg, key])
kb_jwk = KittenBlue.JWK.new([kid: kid, alg: alg, key: key])
kb_jwk = KittenBlue.JWK.new(%{kid: kid, alg: alg, key: key})
```
"""
@spec new(params :: Keywords.t()) :: t
def new(params = [kid: _, alg: _, key: _]) do
struct(__MODULE__, Map.new(params))
end
@spec new(params :: List.t()) :: t
def new([kid, alg, key]) do
struct(__MODULE__, %{kid: kid, alg: alg, key: key})
end
@spec new(params :: Map.t()) :: t
def new(params = %{kid: _, alg: _, key: _}) do
struct(__MODULE__, params)
end
@doc """
Convert `KittenBlue.JWK` list to `JSON Web Key Sets` format public keys.
```Elixir
kb_jwk_list = [kb_jwk]
public_jwk_sets = KittenBlue.JWK.list_to_public_jwk_sets(kb_jwk_list)
```
"""
@spec list_to_public_jwk_sets(jwk_list :: List.t()) :: map | nil
def list_to_public_jwk_sets([]) do
nil
end
def list_to_public_jwk_sets(jwk_list) when is_list(jwk_list) do
%{
"keys" =>
jwk_list
|> Enum.map(fn jwk -> to_public_jwk_set(jwk) end)
|> Enum.filter(&(!is_nil(&1)))
}
end
@doc """
Convert `KittenBlue.JWK` to `JSON Web Key Sets` format public key.
```Elixir
public_jwk_set = KittenBlue.JWK.to_public_jwk_set(kb_jwk)
```
"""
@spec to_public_jwk_set(jwk :: t) :: map | nil
def to_public_jwk_set(jwk = %__MODULE__{}) do
jwk.key
|> JOSE.JWK.to_public()
|> JOSE.JWK.to_map()
|> elem(1)
|> Map.put("alg", jwk.alg)
|> Map.put("kid", jwk.kid)
end
def to_public_jwk_set(_) do
nil
end
@doc """
Convert `JSON Web Key Sets` format public keys to `KittenBlue.JWK` list.
```
kb_jwk_list = KittenBlue.JWK.public_jwk_sets_to_list(public_jwk_sets)
```
"""
@spec public_jwk_sets_to_list(public_json_web_key_sets :: map) :: List.t()
def public_jwk_sets_to_list(_public_json_web_key_sets = %{"keys" => public_jwk_sets})
when is_list(public_jwk_sets) do
public_jwk_sets
|> Enum.map(fn public_jwk_set -> from_public_jwk_set(public_jwk_set) end)
|> Enum.filter(&(!is_nil(&1)))
end
def public_jwk_sets_to_list(_public_json_web_key_sets) do
[]
end
@doc """
Convert `JSON Web Key Sets` format public key to `KittenBlue.JWK`.
```
kb_jwk = KittenBlue.JWK.from_public_jwk_set(public_jwk_set)
```
"""
@spec from_public_jwk_set(public_json_web_key_set :: map) :: t | nil
def from_public_jwk_set(jwk_map) when is_map(jwk_map) do
try do
with alg when alg != nil <- jwk_map["alg"],
kid when kid != nil <- jwk_map["kid"],
key = %JOSE.JWK{} <- jwk_map |> JOSE.JWK.from_map() do
new(kid: kid, alg: alg, key: key)
else
_ -> nil
end
rescue
_ -> nil
end
end
def from_public_jwk_set(_) do
nil
end
@doc """
Convert `KittenBlue.JWK` List to compact storable format for configration.
```
kb_jwk_list = [kb_jwk]
kb_jwk_list_config = KittenBlue.JWK.list_to_compact(kb_jwk_list)
```
"""
@spec list_to_compact(jwk_list :: List.t(), opts :: Keyword.t()) :: List.t()
def list_to_compact(jwk_list, opts \\ []) do
jwk_list
|> Enum.map(fn jwk -> to_compact(jwk, opts) end)
end
@doc """
Convert `KittenBlue.JWK` to compact storable format for configration.
```
kb_jwk_config = KittenBlue.JWK.to_compact(kb_jwk)
```
"""
@spec to_compact(jwk :: t(), opts :: Keyword.t()) :: List.t()
def to_compact(jwk, opts \\ []) do
case {jwk.alg, opts[:use_map]} do
{_, true} ->
[jwk.kid, jwk.alg, jwk.key |> JOSE.JWK.to_map() |> elem(1)]
{alg, nil} when alg in @algs_for_oct ->
[
jwk.kid,
jwk.alg,
jwk.key |> JOSE.JWK.to_oct() |> elem(1) |> Base.encode64(padding: false)
]
{alg, nil} when alg in @algs_for_pem ->
[jwk.kid, jwk.alg, jwk.key |> JOSE.JWK.to_pem() |> elem(1)]
{_, _} ->
[]
end
end
@doc """
Convert compact storable format to `KittenBlue.JWK`.
```
kb_jwk_list = KittenBlue.JWK.compact_to_list(kb_jwk_list_config)
```
"""
@spec compact_to_list(jwk_compact_list :: list()) :: t()
def compact_to_list(jwk_compact_list) when is_list(jwk_compact_list) do
jwk_compact_list
|> Enum.map(fn jwk_compact -> from_compact(jwk_compact) end)
|> Enum.filter(&(!is_nil(&1)))
end
@doc """
Convert compact storable format to `KittenBlue.JWK`.
```
kb_jwk = KittenBlue.JWK.from_compact(kb_jwk_config)
```
"""
@spec from_compact(jwk_compact :: list()) :: t() | nil
def from_compact(_jwk_compact = [kid, alg, key]) do
cond do
is_map(key) ->
[kid, alg, key |> JOSE.JWK.from_map()] |> new()
alg in @algs_for_oct ->
[kid, alg, key |> Base.decode64!(padding: false) |> JOSE.JWK.from_oct()] |> new()
alg in @algs_for_pem ->
[kid, alg, key |> JOSE.JWK.from_pem()] |> new()
true ->
nil
end
end
@doc """
Fetch jwks uri and return jwk list.
```
kb_jwk_list = KittenBlue.JWK.fetch!(jwks_uri)
```
NOTE: The HTTP client must be implemented using Scratcher.HttpClient as the Behavior.
* [hexdocs](https://hexdocs.pm/scratcher/Scratcher.HttpClient.html)
* [github](https://github.com/ritou/elixir-scratcher/blob/master/lib/scratcher/http_client.ex)
"""
@spec fetch!(jwks_uri :: String.t()) :: [t()] | nil
def fetch!(jwks_uri) do
case @http_client.request(:get, jwks_uri, "", [], []) do
{:ok, %{status_code: 200, body: body}} ->
Jason.decode!(body) |> __MODULE__.public_jwk_sets_to_list()
{:ok, %{} = res} ->
Logger.warn("HTTP Client returned {:ok, #{inspect(res)}}")
nil
{:error, %{reason: _} = error} ->
Logger.warn("HTTP Client returned {:error, #{inspect(error)}}")
nil
end
end
@doc """
Convert config format to `KittenBlue.JWK` for main issuerance.
For JWT (JWS) signatures, there are cases where a single key is used to issue a signature and multiple keys are used for verification.
You can easily get the issuing key from the config with the following description.
```elixir
config :your_app, Your.Module,
kid: "kid20200914",
keys: [["kid20200914", "HS256", "<KEY>"]]
```
The key specified by `:kid` must be included in `:keys`.
```elixir
@config Application.fetch_env!(:your_app, Your.Module)
kb_jwk_to_issue = find_key_to_issue(@config)
```
"""
@spec find_key_to_issue(config :: Keyword.t()) :: t() | nil
def find_key_to_issue(config) do
with keys <- config |> Keyword.fetch!(:keys) |> KittenBlue.JWK.compact_to_list(),
kid <- config |> Keyword.fetch!(:kid) do
Enum.find(keys, fn kb_jwk -> kb_jwk.kid == kid end)
end
end
end
|
lib/kitten_blue/jwk.ex
| 0.819749
| 0.767646
|
jwk.ex
|
starcoder
|
defmodule Range do
@moduledoc """
Defines a Range.
A Range is represented internally as a struct. However,
the most common form of creating and matching on ranges
is via the `../2` macro, auto-imported from Kernel:
iex> range = 1..3
1..3
iex> first .. last = range
iex> first
1
iex> last
3
"""
defstruct first: nil, last: nil
@type t :: %Range{}
@type t(first, last) :: %Range{first: first, last: last}
@doc """
Creates a new range.
"""
def new(first, last) do
%Range{first: first, last: last}
end
@doc """
Returns true if the given argument is a range.
## Examples
iex> Range.range?(1..3)
true
iex> Range.range?(0)
false
"""
def range?(%Range{}), do: true
def range?(_), do: false
end
defprotocol Range.Iterator do
@moduledoc """
A protocol used for iterating range elements.
"""
@doc """
Returns the function that calculates the next item.
"""
def next(first, range)
@doc """
Counts how many items are in the range.
"""
def count(first, range)
end
defimpl Enumerable, for: Range do
def reduce(first .. last = range, acc, fun) do
reduce(first, last, acc, fun, Range.Iterator.next(first, range), last >= first)
end
defp reduce(_x, _y, {:halt, acc}, _fun, _next, _up) do
{:halted, acc}
end
defp reduce(x, y, {:suspend, acc}, fun, next, up) do
{:suspended, acc, &reduce(x, y, &1, fun, next, up)}
end
defp reduce(x, y, {:cont, acc}, fun, next, true) when x <= y do
reduce(next.(x), y, fun.(x, acc), fun, next, true)
end
defp reduce(x, y, {:cont, acc}, fun, next, false) when x >= y do
reduce(next.(x), y, fun.(x, acc), fun, next, false)
end
defp reduce(_, _, {:cont, acc}, _fun, _next, _up) do
{:done, acc}
end
def member?(first .. last, value) do
if first <= last do
{:ok, first <= value and value <= last}
else
{:ok, last <= value and value <= first}
end
end
def count(first .. _ = range) do
{:ok, Range.Iterator.count(first, range)}
end
end
defimpl Range.Iterator, for: Integer do
def next(first, _ .. last) when is_integer(last) do
if last >= first do
&(&1 + 1)
else
&(&1 - 1)
end
end
def count(first, _ .. last) when is_integer(last) do
if last >= first do
last - first + 1
else
first - last + 1
end
end
end
defimpl Inspect, for: Range do
import Inspect.Algebra
def inspect(first .. last, opts) do
concat [to_doc(first, opts), "..", to_doc(last, opts)]
end
end
|
lib/elixir/lib/range.ex
| 0.868423
| 0.615146
|
range.ex
|
starcoder
|
defmodule Sammal.Tokenizer do
@moduledoc """
Tokenizer and various helper methods.
"""
alias Sammal.{Expr, SammalError}
@tokenizer_regex ~r/(['()]|"[^"]*"?|[\w-+\/.#]+)/
@doc ~S"""
Split a line of raw input into a list of Node structs.
## Example
iex> Sammal.Tokenizer.tokenize("(define x 10)")
{:ok, [%Sammal.Expr{lex: "(", line: 0, row: 0, val: :"(", ctx: "(define x 10)"},
%Sammal.Expr{lex: "define", line: 0, row: 1, val: :define, ctx: "(define x 10)"},
%Sammal.Expr{lex: "x", line: 0, row: 8, val: :x, ctx: "(define x 10)"},
%Sammal.Expr{lex: "10", line: 0, row: 10, val: 10, ctx: "(define x 10)"},
%Sammal.Expr{lex: ")", line: 0, row: 12, val: :")", ctx: "(define x 10)"}]}
"""
def tokenize(line, line_index \\ 0)
def tokenize(";" <> _, _), do: {:ok, []}
def tokenize(line, line_index) do
tokens =
@tokenizer_regex
|> Regex.scan(line, capture: :first, return: :index)
|> Enum.map(fn [{row, n}] ->
lex = String.slice(line, row, n)
token = %Expr{ctx: line, row: row, lex: lex, line: line_index}
case lexeme_to_value(lex) do
{:ok, val} -> %{token | val: val}
{:error, error} -> throw SammalError.new(error, token)
end
end)
{:ok, tokens}
catch
%SammalError{} = error -> {:error, error}
end
@doc ~S"""
Given a lexeme, return a matching Elixir value (or an error).
## Example
iex> Sammal.Tokenizer.lexeme_to_value("12")
{:ok, 12}
iex> Sammal.Tokenizer.lexeme_to_value("12.12")
{:ok, 12.12}
iex> Sammal.Tokenizer.lexeme_to_value("\"12\"")
{:ok, "12"}
"""
def lexeme_to_value("#t"), do: {:ok, true}
def lexeme_to_value("#f"), do: {:ok, false}
def lexeme_to_value("\"" <> tail) do
if String.ends_with?(tail, "\"") do
{:ok, String.slice(tail, 0..-2)}
else
{:error, :ending_quote}
end
end
def lexeme_to_value(lex) do
case Integer.parse(lex) do
{val, ""} -> {:ok, val}
:error -> {:ok, String.to_atom(lex)}
{val, _} -> case Float.parse(lex) do
{val, ""} -> {:ok, val}
_ -> {:ok, String.to_atom(lex)}
end
end
end
end
|
lib/tokenizer.ex
| 0.619932
| 0.435962
|
tokenizer.ex
|
starcoder
|
defmodule URI do
@on_load :preload_parsers
defrecord Info, [scheme: nil, path: nil, query: nil,
fragment: nil, authority: nil,
userinfo: nil, host: nil, port: nil,
specifics: nil]
import Bitwise
@moduledoc """
Utilities for working with and creating URIs.
"""
@doc """
Takes an enumerable (containing a sequence of two-item tuples)
and returns a string of k=v&k2=v2... where keys and values are
URL encoded as per encode. Keys and values can be any term
that implements the Binary.Chars protocol (i.e. can be converted
to binary).
"""
def encode_query(l), do: Enum.map_join(l, "&", pair(&1))
@doc """
Given a query string of the form "key1=value1&key=value2...", produces an
orddict with one entry for each key-value pair. Each key and value will be a
binary. It also does percent-unescaping of both keys and values.
Use decoder/1 if you want to customize or iterate each value manually.
"""
def decode_query(q, dict // HashDict.new) when is_binary(q) do
Enum.reduce query_decoder(q), dict, fn({ k, v }, acc) -> Dict.put(acc, k, v) end
end
@doc """
Returns an iterator function over the query string that decodes
the query string in steps.
"""
def query_decoder(q) when is_binary(q) do
fn(acc, fun) ->
do_decoder(q, acc, fun)
end
end
defp do_decoder("", acc, _fun) do
acc
end
defp do_decoder(q, acc, fun) do
next =
case :binary.split(q, "&") do
[first, rest] -> rest
[first] -> ""
end
current =
case :binary.split(first, "=") do
[ key, value ] -> { decode(key), decode(value) }
[ key ] -> { decode(key), nil }
end
do_decoder(next, fun.(current, acc), fun)
end
defp pair({k, v}) do
encode(to_binary(k)) <> "=" <> encode(to_binary(v))
end
@doc """
Percent (URL) encodes a URI.
"""
def encode(s), do: bc <<c>> inbits s, do: <<percent(c) :: binary>>
defp percent(32), do: <<?+>>
defp percent(?-), do: <<?->>
defp percent(?_), do: <<?_>>
defp percent(?.), do: <<?.>>
defp percent(c)
when c >= ?0 and c <= ?9
when c >= ?a and c <= ?z
when c >= ?A and c <= ?Z do
<<c>>
end
defp percent(c), do: "%" <> hex(bsr(c, 4)) <> hex(band(c, 15))
defp hex(n) when n <= 9, do: <<n + ?0>>
defp hex(n), do: <<n + ?A - 10>>
@doc """
Unpercent (URL) decodes a URI.
"""
def decode(<<?%, hex1, hex2, tail :: binary >>) do
<< bsl(hex2dec(hex1), 4) + hex2dec(hex2) >> <> decode(tail)
end
def decode(<<head, tail :: binary >>) do
<<check_plus(head)>> <> decode(tail)
end
def decode(<<>>), do: <<>>
defp hex2dec(n) when n in ?A..?F, do: n - ?A + 10
defp hex2dec(n) when n in ?0..?9, do: n - ?0
defp check_plus(?+), do: 32
defp check_plus(c), do: c
@doc """
Parses a URI into components.
URIs have portions that are handled specially for the
particular scheme of the URI. For example, http and https
have different default ports. Sometimes the parsing
of portions themselves are different. This parser
is extensible via behavior modules. If you have a
module named URI.MYSCHEME with a function called
'parse' that takes a single argument, the generically
parsed URI, that function will be called when this
parse function is passed a URI of that scheme. This
allows you to build on top of what the URI library
currently offers. You also need to define default_port
which takes 0 arguments and returns the default port
for that particular scheme. Take a look at URI.HTTPS for an
example of one of these extension modules.
"""
def parse(s) when is_binary(s) do
# From http://tools.ietf.org/html/rfc3986#appendix-B
regex = %r/^(([^:\/?#]+):)?(\/\/([^\/?#]*))?([^?#]*)(\?([^#]*))?(#(.*))?/
parts = nillify(Regex.run(regex, s))
destructure [_, _, scheme, _, authority, path, _, query, _, fragment], parts
{ userinfo, host, port } = split_authority(authority)
if authority do
authority = ""
if userinfo, do: authority = authority <> userinfo <> "@"
if host, do: authority = authority <> host
if port, do: authority = authority <> ":" <> integer_to_binary(port)
end
info = URI.Info[
scheme: scheme && String.downcase(scheme), path: path, query: query,
fragment: fragment, authority: authority,
userinfo: userinfo, host: host, port: port
]
scheme_specific(scheme, info)
end
@doc false
def scheme_module(scheme) do
if scheme do
module =
try do
Module.safe_concat(URI, String.upcase(scheme))
rescue
ArgumentError -> nil
end
if module && Code.ensure_loaded?(module) do
module
end
end
end
defp scheme_specific(scheme, info) do
if module = scheme_module(scheme) do
module.parse(default_port(info, module))
else
info
end
end
defp default_port(info, module) do
if info.port, do: info, else: info.port(module.default_port)
end
# Split an authority into its userinfo, host and port parts.
defp split_authority(s) do
s = s || ""
components = Regex.run %r/(^(.*)@)?([^:]*)(:(\d*))?/, s
destructure [_, _, userinfo, host, _, port], nillify(components)
port = if port, do: binary_to_integer(port)
{ userinfo, host && String.downcase(host), port }
end
# Regex.run returns empty strings sometimes. We want
# to replace those with nil for consistency.
defp nillify(l) do
lc s inlist l do
if size(s) > 0 do
s
else
nil
end
end
end
# Reference parsers so the parse/1 doesn't fail
# on safe_concat.
defp preload_parsers do
parsers = [URI.FTP, URI.HTTP, URI.HTTPS, URI.LDAP, URI.SFTP, URI.TFTP]
Enum.each parsers, Code.ensure_loaded(&1)
:ok
end
end
defimpl Binary.Chars, for: URI.Info do
def to_binary(uri) do
result = ""
if module = URI.scheme_module(uri.scheme) do
if module.default_port == uri.port, do: uri = uri.port(nil)
end
if uri.scheme, do: result = result <> uri.scheme <> "://"
if uri.userinfo, do: result = result <> uri.userinfo <> "@"
if uri.host, do: result = result <> uri.host
if uri.port, do: result = result <> ":" <> integer_to_binary(uri.port)
if uri.path, do: result = result <> uri.path
if uri.query, do: result = result <> "?" <> uri.query
if uri.fragment, do: result = result <> "#" <> uri.fragment
result
end
end
|
lib/elixir/lib/uri.ex
| 0.76708
| 0.646097
|
uri.ex
|
starcoder
|
defmodule GEMS.MatrixStore do
@moduledoc """
Persistence layer for the current Matrix (public)
Used to retrieve the initial state of the matrix when a user
connects.
Note: there are a lot of ways to implement this, for now
I've chosen to leverage Presence, aka: Phoenix.Tracker, since
it's battletested. Behind the scenes it uses PG2 (or just PG).
"""
@type state() :: %{board: any(), updated_at: DateTime.t() | nil}
use GenServer
import GEMS.Util.Time
alias GEMSWeb.PubSub
alias GEMSWeb.Presence
@name :"store:matrix"
@topic "store:matrix"
def start_link(_default) do
GenServer.start_link(__MODULE__, %{}, name: @name)
end
@impl true
def init(_default) do
state = build_state()
PubSub.subscribe(@topic)
Presence.track(self(), @topic, Node.self(), state)
{:ok, state}
end
def update(board, updated_at), do: GenServer.cast(@name, {:update, board, updated_at})
def get(), do: GenServer.call(@name, :get)
@impl true
def handle_call(:get, _from, %{board: board} = state) do
{:reply, board, state}
end
@impl true
def handle_cast({:update, board, updated_at}, _prev_state) do
new_state = build_state(board, updated_at)
update_presence(new_state)
{:noreply, new_state}
end
@impl true
def handle_info(%{event: "presence_diff"}, _state) do
{:noreply, get_latest_state(@topic) || build_state()}
end
def handle_info(
{:matrix_update, %{board: b}},
state
) do
update_presence(build_state(b, now()))
{:noreply, state}
end
@doc """
Iterate tracked presences and return the newest state.
"""
@spec get_latest_state(String.t()) :: state()
def get_latest_state(topic) do
Presence.list(topic)
|> Map.values()
|> Stream.map(fn
%{
metas: [
%{
board: _board,
updated_at: _updated_at
} = s
]
} ->
s
end)
|> Stream.filter(fn %{updated_at: d} -> d != nil end)
|> Enum.sort(fn %{updated_at: d1}, %{updated_at: d2} -> DateTime.compare(d1, d2) != :lt end)
|> List.first()
end
defp build_state(board \\ nil, updated_at \\ nil), do: %{board: board, updated_at: updated_at}
defp update_presence(data) do
Presence.update(Process.whereis(@name), @topic, Node.self(), data)
end
end
|
lib/gems/matrix_store.ex
| 0.826642
| 0.504211
|
matrix_store.ex
|
starcoder
|
defmodule Versioned.Absinthe do
@moduledoc """
Helpers for Absinthe schemas.
"""
alias Versioned.Helpers
@doc """
Declare an object, versioned compliment, and interface, based off name `name`.
The caller should `use Absinthe.Schema.Notation` as here we return code
which invokes its `object` macro.
Both objects belong to an interface which encompasses the common fields.
All common fields (except `:id` and `:inserted_at`) are included under an
interface, named by the entity name and suffixed `_base`.
The generated object will have the following fields:
* `:id` - ID of the record.
* `:version_id` - ID of the most recent record's version.
* `:inserted_at` - Timestamp when the record was created.
* `:updated_at` - Timestamp when the record was last updated.
* Additionally, all fields declared in the block.
The generated version object will have the following fields:
* `:id` - ID of the version record.
* `:foo_id` - If the entity was `:foo`, then this would be the id of the main
record for which this version is based.
* `:is_deleted` - Boolean indicating if the record was deleted as of this version.
* `:inserted_at` - Timestamp when the version record was created.
Additionally, all fields declared in the block.
## Declaring Fields for Version Object Only
In order for a field to appear only in the version object, use the
`:version_fields` option in the (optional) keyword list before the do block:
versioned_object :car,
version_fields: [person_versions: non_null(list_of(non_null(:person_version)))] do
...
end
"""
defmacro versioned_object(name, opts \\ [], do: block) do
{:__block__, _m, lines_ast} = Helpers.normalize_block(block)
{version_fields, opts} = Keyword.pop(opts, :version_fields)
quote do
object unquote(name), unquote(opts) do
field :id, non_null(:id)
field :version_id, :id
field :inserted_at, non_null(:datetime)
field :updated_at, non_null(:datetime)
unquote(drop_version_fields(block))
interface(unquote(:"#{name}_base"))
end
object unquote(:"#{name}_version") do
field :id, non_null(:id)
field unquote(:"#{name}_id"), :id
field :is_deleted, :boolean
field :inserted_at, :datetime
version_fields(unquote(version_fields))
version_lines(unquote(lines_ast))
interface(unquote(:"#{name}_base"))
end
interface unquote(:"#{name}_base") do
unquote(block)
resolve_type(fn
%{version_id: _}, _ -> unquote(name)
%{unquote(:"#{name}_id") => _}, _ -> unquote(:"#{name}_version")
_, _ -> nil
end)
end
end
end
# Drop `version_field` lines for the base (non-version) object.
@spec drop_version_fields(Macro.t()) :: Macro.t()
defp drop_version_fields({:__block__, top_m, lines}) do
lines = Enum.reject(lines, &match?({:version_field, _, _}, &1))
{:__block__, top_m, lines}
end
defmacro version_fields(fields) do
do_field = fn key, type, opts ->
quote do
field unquote(key), unquote(type), unquote(opts)
end
end
Enum.map(fields || [], fn
{key, {type, opts}} -> do_field.(key, type, opts)
{key, type} -> do_field.(key, type, [])
end)
end
@doc """
Convert a list of ast lines into ast lines to be used for the version object.
"""
defmacro version_lines(lines_ast) do
lines_ast
|> Enum.reduce([], fn
{:version_field, m, a}, acc -> [{:field, m, a} | acc]
other, acc -> [other | acc]
end)
|> Enum.reverse()
end
end
|
lib/versioned/absinthe.ex
| 0.776453
| 0.548794
|
absinthe.ex
|
starcoder
|
defmodule GitHooks.Tasks.MFA do
@moduledoc """
Represents a `{module, function, arity}` (a.k.a. `mfa`) that will be evaluated
by the Kernel module.
An `mfa` should be configured as `{module, function, arity}`. The function of
the module **will always receive the hook arguments** and the arity is
expected to match the same number to avoid any unexpected behaviour.
See [Elixir documentation](https://hexdocs.pm/elixir/typespecs.html#types-and-their-syntax) for more information.
For example:
```elixir
config :git_hooks,
hooks: [
pre_commit: [
{MyModule, :my_function, 1}
]
]
```
"""
@typedoc """
Represents an `mfa` to be executed.
"""
@type t :: %__MODULE__{
module: atom,
function: atom,
args: [any],
result: term
}
defstruct [:module, :function, args: [], result: nil]
@doc """
Creates a new `mfa` struct.
### Examples
iex> #{__MODULE__}.new({MyModule, :my_function, 1}, :pre_commit, ["commit message"])
%#{__MODULE__}{module: MyModule, function: :my_function, args: ["commit message"]}
"""
@spec new(mfa(), GitHooks.git_hook_type(), GitHooks.git_hook_args()) :: __MODULE__.t()
def new({module, function, arity}, git_hook_type, git_hook_args) do
expected_arity = length(git_hook_args)
if arity != expected_arity do
raise """
Invalid #{module}.#{function} arity for #{git_hook_type}, expected #{expected_arity} but got #{
arity
}. Check the Git hooks documentation to fix the expected parameters.
"""
end
%__MODULE__{
module: module,
function: function,
args: git_hook_args
}
end
end
defimpl GitHooks.Task, for: GitHooks.Tasks.MFA do
alias GitHooks.Tasks.MFA
alias GitHooks.Printer
# Kernel.apply will throw a error if something fails
def run(
%MFA{
module: module,
function: function,
args: args
} = mfa,
_opts
) do
result = Kernel.apply(module, function, args)
Map.put(mfa, :result, result)
rescue
error ->
IO.warn(inspect(error))
Map.put(mfa, :result, error)
end
def success?(%MFA{result: :ok}), do: true
def success?(%MFA{result: _}), do: false
def print_result(%MFA{module: module, function: function, result: :ok} = mix_task) do
Printer.success("`#{module}.#{function}` was successful")
mix_task
end
def print_result(%MFA{module: module, function: function, result: _} = mix_task) do
Printer.error("`#{module}.#{function}` execution failed")
mix_task
end
end
|
lib/tasks/mfa.ex
| 0.876039
| 0.90261
|
mfa.ex
|
starcoder
|
defmodule Pact do
@moduledoc """
A module for managing dependecies in your applicaiton without having to
"inject" dependencies all the way down your aplication. Pact allows you to
set and get dependencies in your application code, and generate fakes and
replace modules in your tests.
To use Pact, define a module in your application that has `use Pact` in it,
and then call `start_link` on it to start registering your dependencies.
## Usage
```
defmodule MyApp.Pact do
use Pact
register "http", HTTPoison
end
MyApp.Pact.start_link
defmodule MyApp.Users do
def all do
MyApp.Pact.get("http").get("http://foobar.com/api/users")
end
end
```
Then in your tests you can use Pact to replace the module easily:
```
defmodule MyAppTest do
use ExUnit.Case
require MyApp.Pact
test "requests the corrent endpoint" do
fakeHTTP = MyApp.Pact.generate :http do
def get(url) do
send self(), {:called, url}
end
end
MyApp.Pact.replace "http", fakeHTTP do
MyApp.Users.all
end
assert_receive {:called, "http://foobar.com/api/users"}
end
end
```
## Functions / Macros
* `generate(name, block)` - Generates an anonymous module that's body is
block`.
* `replace(name, module, block)` - Replaces `name` with `module` in the given
`block` only.
* `register(name, module)` - Registers `name` as `module`.
* `get(name)` - Get registed module for `name`.
"""
defmacro __using__(_) do
quote do
import Pact
use GenServer
@modules %{}
@before_compile Pact
defmacro generate(name, do: block) do
string_name = to_string(name)
uid = :base64.encode(:crypto.strong_rand_bytes(5))
module_name = String.to_atom("#{__MODULE__}.Fakes.#{string_name}.#{uid}")
module = Module.create(module_name, block, Macro.Env.location(__ENV__))
quote do
unquote(module_name)
end
end
defmacro replace(name, module, do: block) do
quote do
existing_module = unquote(__MODULE__).get(unquote(name))
unquote(__MODULE__).register(unquote(name), unquote(module))
unquote(block)
unquote(__MODULE__).register(unquote(name), existing_module)
end
end
def register(name, module) do
GenServer.cast(__MODULE__, {:register, name, module})
end
def get(name) do
GenServer.call(__MODULE__, {:get, name})
end
# Genserver implementation
def init(container) do
{:ok, container}
end
def handle_cast({:register, name, module}, state) do
modules = Map.put(state.modules, name, module)
{:noreply, %{state | modules: modules}}
end
def handle_call({:get, name}, _from, state) do
module = get_in(state, [:modules, name])
{:reply, module, state}
end
end
end
@doc false
defmacro register(name, module) do
quote do
@modules Map.put(@modules, unquote(name), unquote(module))
end
end
defmacro __before_compile__(_env) do
quote do
def start_link do
GenServer.start_link(__MODULE__, %{modules: @modules}, name: __MODULE__)
end
end
end
end
|
lib/pact.ex
| 0.737347
| 0.647164
|
pact.ex
|
starcoder
|
defmodule AWS.Organizations do
@moduledoc """
AWS Organizations API Reference
AWS Organizations is a web service that enables you to consolidate your
multiple AWS accounts into an *organization* and centrally manage your
accounts and their resources.
This guide provides descriptions of the Organizations API. For more
information about using this service, see the [AWS Organizations User
Guide](http://docs.aws.amazon.com/organizations/latest/userguide/orgs_introduction.html).
**API Version**
This version of the Organizations API Reference documents the Organizations
API version 2016-11-28.
<note> As an alternative to using the API directly, you can use one of the
AWS SDKs, which consist of libraries and sample code for various
programming languages and platforms (Java, Ruby, .NET, iOS, Android, and
more). The SDKs provide a convenient way to create programmatic access to
AWS Organizations. For example, the SDKs take care of cryptographically
signing requests, managing errors, and retrying requests automatically. For
more information about the AWS SDKs, including how to download and install
them, see [Tools for Amazon Web Services](http://aws.amazon.com/tools/).
</note> We recommend that you use the AWS SDKs to make programmatic API
calls to Organizations. However, you also can use the Organizations Query
API to make direct calls to the Organizations web service. To learn more
about the Organizations Query API, see [Making Query
Requests](http://docs.aws.amazon.com/organizations/latest/userguide/orgs_query-requests.html)
in the *AWS Organizations User Guide*. Organizations supports GET and POST
requests for all actions. That is, the API does not require you to use GET
for some actions and POST for others. However, GET requests are subject to
the limitation size of a URL. Therefore, for operations that require larger
sizes, use a POST request.
**Signing Requests**
When you send HTTP requests to AWS, you must sign the requests so that AWS
can identify who sent them. You sign requests with your AWS access key,
which consists of an access key ID and a secret access key. We strongly
recommend that you do not create an access key for your root account.
Anyone who has the access key for your root account has unrestricted access
to all the resources in your account. Instead, create an access key for an
IAM user account that has administrative privileges. As another option, use
AWS Security Token Service to generate temporary security credentials, and
use those credentials to sign requests.
To sign requests, we recommend that you use [Signature Version
4](http://docs.aws.amazon.com/general/latest/gr/signature-version-4.html).
If you have an existing application that uses Signature Version 2, you do
not have to update it to use Signature Version 4. However, some operations
now require Signature Version 4. The documentation for operations that
require version 4 indicate this requirement.
When you use the AWS Command Line Interface (AWS CLI) or one of the AWS
SDKs to make requests to AWS, these tools automatically sign the requests
for you with the access key that you specify when you configure the tools.
In this release, each organization can have only one root. In a future
release, a single organization will support multiple roots.
**Support and Feedback for AWS Organizations**
We welcome your feedback. Send your comments to
[<EMAIL>](mailto:<EMAIL>)
or post your feedback and questions in our private [AWS Organizations
support forum](http://forums.aws.amazon.com/forum.jspa?forumID=219). If you
don't have access to the forum, send a request for access to the email
address, along with your forum user ID. For more information about the AWS
support forums, see [Forums Help](http://forums.aws.amazon.com/help.jspa).
**Endpoint to Call When Using the CLI or the AWS API**
For the current release of Organizations, you must specify the `us-east-1`
region for all AWS API and CLI calls. You can do this in the CLI by using
these parameters and commands:
<ul> <li> Use the following parameter with each command to specify both the
endpoint and its region:
`--endpoint-url https://organizations.us-east-1.amazonaws.<EMAIL>`
</li> <li> Use the default endpoint, but configure your default region with
this command:
`aws configure set default.region us-east-1`
</li> <li> Use the following parameter with each command to specify the
endpoint:
`--region us-east-1`
</li> </ul> For the various SDKs used to call the APIs, see the
documentation for the SDK of interest to learn how to direct the requests
to a specific endpoint. For more information, see [Regions and
Endpoints](http://docs.aws.amazon.com/general/latest/gr/rande.html#sts_region)
in the *AWS General Reference*.
**How examples are presented**
The JSON returned by the AWS Organizations service as response to your
requests is returned as a single long string without line breaks or
formatting whitespace. Both line breaks and whitespace are included in the
examples in this guide to improve readability. When example input
parameters also would result in long strings that would extend beyond the
screen, we insert line breaks to enhance readability. You should always
submit the input as a single JSON text string.
**Recording API Requests**
AWS Organizations supports AWS CloudTrail, a service that records AWS API
calls for your AWS account and delivers log files to an Amazon S3 bucket.
By using information collected by AWS CloudTrail, you can determine which
requests were successfully made to Organizations, who made the request,
when it was made, and so on. For more about AWS Organizations and its
support for AWS CloudTrail, see [Logging AWS Organizations Events with AWS
CloudTrail](http://docs.aws.amazon.com/organizations/latest/userguide/orgs_cloudtrail-integration.html)
in the *AWS Organizations User Guide*. To learn more about CloudTrail,
including how to turn it on and find your log files, see the [AWS
CloudTrail User
Guide](http://docs.aws.amazon.com/awscloudtrail/latest/userguide/what_is_cloud_trail_top_level.html).
"""
@doc """
Sends a response to the originator of a handshake agreeing to the action
proposed by the handshake request.
This operation can be called only by the following principals when they
also have the relevant IAM permissions:
<ul> <li> **Invitation to join** or **Approve all features request**
handshakes: only a principal from the member account.
</li> <li> **Enable all features final confirmation** handshake: only a
principal from the master account.
For more information about invitations, see [Inviting an AWS Account to
Join Your
Organization](http://docs.aws.amazon.com/organizations/latest/userguide/orgs_manage_accounts_invites.html)
in the *AWS Organizations User Guide*. For more information about requests
to enable all features in the organization, see [Enabling All Features in
Your
Organization](http://docs.aws.amazon.com/organizations/latest/userguide/orgs_manage_org_support-all-features.html)
in the *AWS Organizations User Guide*.
</li> </ul>
"""
def accept_handshake(client, input, options \\ []) do
request(client, "AcceptHandshake", input, options)
end
@doc """
Attaches a policy to a root, an organizational unit, or an individual
account. How the policy affects accounts depends on the type of policy:
<ul> <li> **Service control policy (SCP)** - An SCP specifies what
permissions can be delegated to users in affected member accounts. The
scope of influence for a policy depends on what you attach the policy to:
<ul> <li> If you attach an SCP to a root, it affects all accounts in the
organization.
</li> <li> If you attach an SCP to an OU, it affects all accounts in that
OU and in any child OUs.
</li> <li> If you attach the policy directly to an account, then it affects
only that account.
</li> </ul> SCPs essentially are permission "filters". When you attach one
SCP to a higher level root or OU, and you also attach a different SCP to a
child OU or to an account, the child policy can further restrict only the
permissions that pass through the parent filter and are available to the
child. An SCP that is attached to a child cannot grant a permission that is
not already granted by the parent. For example, imagine that the parent SCP
allows permissions A, B, C, D, and E. The child SCP allows C, D, E, F, and
G. The result is that the accounts affected by the child SCP are allowed to
use only C, D, and E. They cannot use A or B because they were filtered out
by the child OU. They also cannot use F and G because they were filtered
out by the parent OU. They cannot be granted back by the child SCP; child
SCPs can only filter the permissions they receive from the parent SCP.
AWS Organizations attaches a default SCP named `"FullAWSAccess` to every
root, OU, and account. This default SCP allows all services and actions,
enabling any new child OU or account to inherit the permissions of the
parent root or OU. If you detach the default policy, you must replace it
with a policy that specifies the permissions that you want to allow in that
OU or account.
For more information about how Organizations policies permissions work, see
[Using Service Control
Policies](http://docs.aws.amazon.com/organizations/latest/userguide/orgs_manage_policies_scp.html)
in the *AWS Organizations User Guide*.
</li> </ul> This operation can be called only from the organization's
master account.
"""
def attach_policy(client, input, options \\ []) do
request(client, "AttachPolicy", input, options)
end
@doc """
Cancels a handshake. Canceling a handshake sets the handshake state to
`CANCELED`.
This operation can be called only from the account that originated the
handshake. The recipient of the handshake can't cancel it, but can use
`DeclineHandshake` instead. After a handshake is canceled, the recipient
can no longer respond to that handshake.
"""
def cancel_handshake(client, input, options \\ []) do
request(client, "CancelHandshake", input, options)
end
@doc """
Creates an AWS account that is automatically a member of the organization
whose credentials made the request. This is an asynchronous request that
AWS performs in the background. If you want to check the status of the
request later, you need the `OperationId` response element from this
operation to provide as a parameter to the `DescribeCreateAccountStatus`
operation.
AWS Organizations preconfigures the new member account with a role (named
`OrganizationAccountAccessRole` by default) that grants administrator
permissions to the new account. Principals in the master account can assume
the role. AWS Organizations clones the company name and address information
for the new account from the organization's master account.
For more information about creating accounts, see [Creating an AWS Account
in Your
Organization](http://docs.aws.amazon.com/organizations/latest/userguide/orgs_manage_accounts_create.html)
in the *AWS Organizations User Guide*.
<important> You cannot remove accounts that are created with this operation
from an organization. That also means that you cannot delete an
organization that contains an account that is created with this operation.
</important> <note> When you create a member account with this operation,
the account is created with the **IAM User and Role Access to Billing
Information** switch enabled. This allows IAM users and roles that are
granted appropriate permissions to view billing information. If this is
disabled, then only the account root user can access billing information.
For information about how to disable this for an account, see [Granting
Access to Your Billing Information and
Tools](http://docs.aws.amazon.com/awsaccountbilling/latest/aboutv2/grantaccess.html).
</note> This operation can be called only from the organization's master
account.
"""
def create_account(client, input, options \\ []) do
request(client, "CreateAccount", input, options)
end
@doc """
Creates an AWS organization. The account whose user is calling the
CreateOrganization operation automatically becomes the [master
account](http://docs.aws.amazon.com/IAM/latest/UserGuide/orgs_getting-started_concepts.html#account)
of the new organization.
This operation must be called using credentials from the account that is to
become the new organization's master account. The principal must also have
the relevant IAM permissions.
By default (or if you set the `FeatureSet` parameter to `ALL`), the new
organization is created with all features enabled and service control
policies automatically enabled in the root. If you instead choose to create
the organization supporting only the consolidated billing features by
setting the `FeatureSet` parameter to `CONSOLIDATED_BILLING"`, then no
policy types are enabled by default and you cannot use organization
policies.
"""
def create_organization(client, input, options \\ []) do
request(client, "CreateOrganization", input, options)
end
@doc """
Creates an organizational unit (OU) within a root or parent OU. An OU is a
container for accounts that enables you to organize your accounts to apply
policies according to your business requirements. The number of levels deep
that you can nest OUs is dependent upon the policy types enabled for that
root. For service control policies, the limit is five.
For more information about OUs, see [Managing Organizational
Units](http://docs.aws.amazon.com/organizations/latest/userguide/orgs_manage_ous.html)
in the *AWS Organizations User Guide*.
This operation can be called only from the organization's master account.
"""
def create_organizational_unit(client, input, options \\ []) do
request(client, "CreateOrganizationalUnit", input, options)
end
@doc """
Creates a policy of a specified type that you can attach to a root, an
organizational unit (OU), or an individual AWS account.
For more information about policies and their use, see [Managing
Organization
Policies](http://docs.aws.amazon.com/organizations/latest/userguide/orgs_manage_policies.html).
This operation can be called only from the organization's master account.
"""
def create_policy(client, input, options \\ []) do
request(client, "CreatePolicy", input, options)
end
@doc """
Declines a handshake request. This sets the handshake state to `DECLINED`
and effectively deactivates the request.
This operation can be called only from the account that received the
handshake. The originator of the handshake can use `CancelHandshake`
instead. The originator can't reactivate a declined request, but can
re-initiate the process with a new handshake request.
"""
def decline_handshake(client, input, options \\ []) do
request(client, "DeclineHandshake", input, options)
end
@doc """
Deletes the organization. You can delete an organization only by using
credentials from the master account. The organization must be empty of
member accounts, OUs, and policies.
<important> If you create any accounts using Organizations operations or
the Organizations console, you can't remove those accounts from the
organization, which means that you can't delete the organization.
</important>
"""
def delete_organization(client, input, options \\ []) do
request(client, "DeleteOrganization", input, options)
end
@doc """
Deletes an organizational unit from a root or another OU. You must first
remove all accounts and child OUs from the OU that you want to delete.
This operation can be called only from the organization's master account.
"""
def delete_organizational_unit(client, input, options \\ []) do
request(client, "DeleteOrganizationalUnit", input, options)
end
@doc """
Deletes the specified policy from your organization. Before you perform
this operation, you must first detach the policy from all OUs, roots, and
accounts.
This operation can be called only from the organization's master account.
"""
def delete_policy(client, input, options \\ []) do
request(client, "DeletePolicy", input, options)
end
@doc """
Retrieves Organizations-related information about the specified account.
This operation can be called only from the organization's master account.
"""
def describe_account(client, input, options \\ []) do
request(client, "DescribeAccount", input, options)
end
@doc """
Retrieves the current status of an asynchronous request to create an
account.
This operation can be called only from the organization's master account.
"""
def describe_create_account_status(client, input, options \\ []) do
request(client, "DescribeCreateAccountStatus", input, options)
end
@doc """
Retrieves information about a previously requested handshake. The handshake
ID comes from the response to the original `InviteAccountToOrganization`
operation that generated the handshake.
This operation can be called from any account in the organization.
"""
def describe_handshake(client, input, options \\ []) do
request(client, "DescribeHandshake", input, options)
end
@doc """
Retrieves information about the organization that the user's account
belongs to.
This operation can be called from any account in the organization.
"""
def describe_organization(client, input, options \\ []) do
request(client, "DescribeOrganization", input, options)
end
@doc """
Retrieves information about an organizational unit (OU).
This operation can be called only from the organization's master account.
"""
def describe_organizational_unit(client, input, options \\ []) do
request(client, "DescribeOrganizationalUnit", input, options)
end
@doc """
Retrieves information about a policy.
This operation can be called only from the organization's master account.
"""
def describe_policy(client, input, options \\ []) do
request(client, "DescribePolicy", input, options)
end
@doc """
Detaches a policy from a target root, organizational unit, or account. If
the policy being detached is a service control policy (SCP), the changes to
permissions for IAM users and roles in affected accounts are immediate.
**Note:** Every root, OU, and account must have at least one SCP attached.
If you want to replace the default `FullAWSAccess` policy with one that
limits the permissions that can be delegated, then you must attach the
replacement policy before you can remove the default one. This is the
authorization strategy of
[whitelisting](http://docs.aws.amazon.com/organizations/latest/userguide/orgs_manage_policies_about-scps.html#orgs_policies_whitelist).
If you instead attach a second SCP and leave the `FullAWSAccess` SCP still
attached, and specify `"Effect": "Deny"` in the second SCP to override the
`"Effect": "Allow"` in the `FullAWSAccess` policy (or any other attached
SCP), then you are using the authorization strategy of
[blacklisting](http://docs.aws.amazon.com/organizations/latest/userguide/orgs_manage_policies_about-scps.html#orgs_policies_blacklist).
This operation can be called only from the organization's master account.
"""
def detach_policy(client, input, options \\ []) do
request(client, "DetachPolicy", input, options)
end
@doc """
Disables an organizational control policy type in a root. A poicy of a
certain type can be attached to entities in a root only if that type is
enabled in the root. After you perform this operation, you no longer can
attach policies of the specified type to that root or to any OU or account
in that root. You can undo this by using the `EnablePolicyType` operation.
This operation can be called only from the organization's master account.
"""
def disable_policy_type(client, input, options \\ []) do
request(client, "DisablePolicyType", input, options)
end
@doc """
Enables all features in an organization. This enables the use of
organization policies that can restrict the services and actions that can
be called in each account. Until you enable all features, you have access
only to consolidated billing, and you can't use any of the advanced account
administration features that AWS Organizations supports. For more
information, see [Enabling All Features in Your
Organization](http://docs.aws.amazon.com/organizations/latest/userguide/orgs_manage_org_support-all-features.html)
in the *AWS Organizations User Guide*.
<important> This operation is required only for organizations that were
created explicitly with only the consolidated billing features enabled, or
that were migrated from a Consolidated Billing account family to
Organizations. Calling this operation sends a handshake to every invited
account in the organization. The feature set change can be finalized and
the additional features enabled only after all administrators in the
invited accounts approve the change by accepting the handshake.
</important> After all invited member accounts accept the handshake, you
finalize the feature set change by accepting the handshake that contains
`"Action": "ENABLE_ALL_FEATURES"`. This completes the change.
After you enable all features in your organization, the master account in
the organization can apply policies on all member accounts. These policies
can restrict what users and even administrators in those accounts can do.
The master account can apply policies that prevent accounts from leaving
the organization. Ensure that your account administrators are aware of
this.
This operation can be called only from the organization's master account.
"""
def enable_all_features(client, input, options \\ []) do
request(client, "EnableAllFeatures", input, options)
end
@doc """
Enables a policy type in a root. After you enable a policy type in a root,
you can attach policies of that type to the root, any OU, or account in
that root. You can undo this by using the `DisablePolicyType` operation.
This operation can be called only from the organization's master account.
"""
def enable_policy_type(client, input, options \\ []) do
request(client, "EnablePolicyType", input, options)
end
@doc """
Sends an invitation to another account to join your organization as a
member account. Organizations sends email on your behalf to the email
address that is associated with the other account's owner. The invitation
is implemented as a `Handshake` whose details are in the response.
This operation can be called only from the organization's master account.
"""
def invite_account_to_organization(client, input, options \\ []) do
request(client, "InviteAccountToOrganization", input, options)
end
@doc """
Removes a member account from its parent organization. This version of the
operation is performed by the account that wants to leave. To remove a
member account as a user in the master account, use
`RemoveAccountFromOrganization` instead.
This operation can be called only from a member account in the
organization.
<important> The master account in an organization with all features enabled
can set service control policies (SCPs) that can restrict what
administrators of member accounts can do, including preventing them from
successfully calling `LeaveOrganization` and leaving the organization.
</important>
"""
def leave_organization(client, input, options \\ []) do
request(client, "LeaveOrganization", input, options)
end
@doc """
Lists all the accounts in the organization. To request only the accounts in
a root or OU, use the `ListAccountsForParent` operation instead.
This operation can be called only from the organization's master account.
"""
def list_accounts(client, input, options \\ []) do
request(client, "ListAccounts", input, options)
end
@doc """
Lists the accounts in an organization that are contained by the specified
target root or organizational unit (OU). If you specify the root, you get a
list of all the accounts that are not in any OU. If you specify an OU, you
get a list of all the accounts in only that OU, and not in any child OUs.
To get a list of all accounts in the organization, use the `ListAccounts`
operation.
"""
def list_accounts_for_parent(client, input, options \\ []) do
request(client, "ListAccountsForParent", input, options)
end
@doc """
Lists all of the OUs or accounts that are contained in the specified parent
OU or root. This operation, along with `ListParents` enables you to
traverse the tree structure that makes up this root.
"""
def list_children(client, input, options \\ []) do
request(client, "ListChildren", input, options)
end
@doc """
Lists the account creation requests that match the specified status that is
currently being tracked for the organization.
This operation can be called only from the organization's master account.
"""
def list_create_account_status(client, input, options \\ []) do
request(client, "ListCreateAccountStatus", input, options)
end
@doc """
Lists the current handshakes that are associated with the account of the
requesting user.
This operation can be called from any account in the organization.
"""
def list_handshakes_for_account(client, input, options \\ []) do
request(client, "ListHandshakesForAccount", input, options)
end
@doc """
Lists the handshakes that are associated with the organization that the
requesting user is part of. The `ListHandshakesForOrganization` operation
returns a list of handshake structures. Each structure contains details and
status about a handshake.
This operation can be called only from the organization's master account.
"""
def list_handshakes_for_organization(client, input, options \\ []) do
request(client, "ListHandshakesForOrganization", input, options)
end
@doc """
Lists the organizational units (OUs) in a parent organizational unit or
root.
This operation can be called only from the organization's master account.
"""
def list_organizational_units_for_parent(client, input, options \\ []) do
request(client, "ListOrganizationalUnitsForParent", input, options)
end
@doc """
Lists the root or organizational units (OUs) that serve as the immediate
parent of the specified child OU or account. This operation, along with
`ListChildren` enables you to traverse the tree structure that makes up
this root.
This operation can be called only from the organization's master account.
<note> In the current release, a child can have only a single parent.
</note>
"""
def list_parents(client, input, options \\ []) do
request(client, "ListParents", input, options)
end
@doc """
Retrieves the list of all policies in an organization of a specified type.
This operation can be called only from the organization's master account.
"""
def list_policies(client, input, options \\ []) do
request(client, "ListPolicies", input, options)
end
@doc """
Lists the policies that are directly attached to the specified target root,
organizational unit (OU), or account. You must specify the policy type that
you want included in the returned list.
This operation can be called only from the organization's master account.
"""
def list_policies_for_target(client, input, options \\ []) do
request(client, "ListPoliciesForTarget", input, options)
end
@doc """
Lists the roots that are defined in the current organization.
This operation can be called only from the organization's master account.
"""
def list_roots(client, input, options \\ []) do
request(client, "ListRoots", input, options)
end
@doc """
Lists all the roots, OUs, and accounts to which the specified policy is
attached.
This operation can be called only from the organization's master account.
"""
def list_targets_for_policy(client, input, options \\ []) do
request(client, "ListTargetsForPolicy", input, options)
end
@doc """
Moves an account from its current source parent root or OU to the specified
destination parent root or OU.
This operation can be called only from the organization's master account.
"""
def move_account(client, input, options \\ []) do
request(client, "MoveAccount", input, options)
end
@doc """
Removes the specified account from the organization.
The removed account becomes a stand-alone account that is not a member of
any organization. It is no longer subject to any policies and is
responsible for its own bill payments. The organization's master account is
no longer charged for any expenses accrued by the member account after it
is removed from the organization.
This operation can be called only from the organization's master account.
Member accounts can remove themselves with `LeaveOrganization` instead.
<important> You can remove only existing accounts that were invited to join
the organization. You cannot remove accounts that were created by AWS
Organizations.
</important>
"""
def remove_account_from_organization(client, input, options \\ []) do
request(client, "RemoveAccountFromOrganization", input, options)
end
@doc """
Renames the specified organizational unit (OU). The ID and ARN do not
change. The child OUs and accounts remain in place, and any attached
policies of the OU remain attached.
This operation can be called only from the organization's master account.
"""
def update_organizational_unit(client, input, options \\ []) do
request(client, "UpdateOrganizationalUnit", input, options)
end
@doc """
Updates an existing policy with a new name, description, or content. If any
parameter is not supplied, that value remains unchanged. Note that you
cannot change a policy's type.
This operation can be called only from the organization's master account.
"""
def update_policy(client, input, options \\ []) do
request(client, "UpdatePolicy", input, options)
end
@spec request(map(), binary(), map(), list()) ::
{:ok, Poison.Parser.t | nil, Poison.Response.t} |
{:error, Poison.Parser.t} |
{:error, HTTPoison.Error.t}
defp request(client, action, input, options) do
client = %{client | service: "organizations"}
host = get_host("organizations", client)
url = get_url(host, client)
headers = [{"Host", host},
{"Content-Type", "application/x-amz-json-1.1"},
{"X-Amz-Target", "AWSOrganizationsV20161128.#{action}"}]
payload = Poison.Encoder.encode(input, [])
headers = AWS.Request.sign_v4(client, "POST", url, headers, payload)
case HTTPoison.post(url, payload, headers, options) do
{:ok, response=%HTTPoison.Response{status_code: 200, body: ""}} ->
{:ok, nil, response}
{:ok, response=%HTTPoison.Response{status_code: 200, body: body}} ->
{:ok, Poison.Parser.parse!(body), response}
{:ok, _response=%HTTPoison.Response{body: body}} ->
error = Poison.Parser.parse!(body)
exception = error["__type"]
message = error["message"]
{:error, {exception, message}}
{:error, %HTTPoison.Error{reason: reason}} ->
{:error, %HTTPoison.Error{reason: reason}}
end
end
defp get_host(endpoint_prefix, client) do
if client.region == "local" do
"localhost"
else
"#{endpoint_prefix}.#{client.region}.#{client.endpoint}"
end
end
defp get_url(host, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}/"
end
end
|
lib/aws/organizations.ex
| 0.839997
| 0.564279
|
organizations.ex
|
starcoder
|
defmodule Confex.Type do
@moduledoc """
This module is responsible for Confex type-casting.
"""
@type value :: String.t() | nil
@type t ::
:string
| :integer
| :float
| :boolean
| :atom
| :module
| :list
| {module :: module, function :: atom, additional_arguments :: list}
@boolean_true ["true", "1", "yes"]
@boolean_false ["false", "0", "no"]
@list_separator ","
@doc """
Parse string and cast it to Elixir type.
"""
@spec cast(value :: value, type :: t()) :: {:ok, any()} | {:error, String.t()}
def cast(nil, _type) do
{:ok, nil}
end
def cast(value, :string) do
{:ok, value}
end
def cast(value, :module) do
{:ok, Module.concat([value])}
end
def cast(value, :integer) do
case Integer.parse(value) do
{integer, ""} ->
{:ok, integer}
{_integer, remainder_of_binary} ->
reason = "can not cast #{inspect(value)} to Integer, result contains binary remainder #{remainder_of_binary}"
{:error, reason}
:error ->
{:error, "can not cast #{inspect(value)} to Integer"}
end
end
def cast(value, :float) do
case Float.parse(value) do
{float, ""} ->
{:ok, float}
{_float, remainder_of_binary} ->
reason = "can not cast #{inspect(value)} to Float, result contains binary remainder #{remainder_of_binary}"
{:error, reason}
:error ->
{:error, "can not cast #{inspect(value)} to Float"}
end
end
def cast(value, :atom) do
result =
value
|> String.to_charlist()
|> List.to_atom()
{:ok, result}
end
def cast(value, :boolean) do
downcased_value = String.downcase(value)
cond do
Enum.member?(@boolean_true, downcased_value) ->
{:ok, true}
Enum.member?(@boolean_false, downcased_value) ->
{:ok, false}
true ->
reason =
"can not cast #{inspect(value)} to boolean, expected values are 'true', 'false', '1', '0', 'yes' or 'no'"
{:error, reason}
end
end
def cast(value, :list) do
result =
value
|> String.split(@list_separator)
|> Enum.map(&String.trim/1)
{:ok, result}
end
def cast(value, {module, function, additional_arguments}) do
case apply(module, function, [value] ++ additional_arguments) do
{:ok, value} ->
{:ok, value}
{:error, reason} ->
{:error, reason}
other_return ->
arity = length(additional_arguments) + 1
reason =
"expected `#{module}.#{function}/#{arity}` to return " <>
"either `{:ok, value}` or `{:error, reason}` tuple, got: `#{inspect(other_return)}`"
{:error, reason}
end
end
end
|
lib/confex/type.ex
| 0.79049
| 0.450359
|
type.ex
|
starcoder
|
defmodule Nostrum.Voice do
@moduledoc """
Interface for playing and listening to audio through Discord's voice channels.
# Using Discord Voice Channels
To play sound in Discord with Nostrum, you'll need `ffmpeg` to be installed.
If you don't have the executable `ffmpeg` in the path, the absolute path may
be configured through config keys `:nostrum, :ffmpeg`. If you don't want to use
ffmpeg, read on to the next section.
A bot may be connected to at most one voice channel per guild. For this reason,
most of the functions in this module take a guild id, and the resulting action
will be performed in the given guild's voice channel that the bot is connected to.
The primary Discord gateway responsible for all text based communication relies on
one websocket connection per shard, where small bots typically only have one shard.
The Discord voice gateways work by establishing a websocket connection per guild/channel.
After some handshaking on this connection, audio data can be sent over UDP/RTP. Behind
the scenes the voice websocket connections are implemented nearly the same way the main
shard websocket connections are, and require no developer intervention.
In addition to playing audio, listening to incoming audio is supported through the
functions `listen/3` and `start_listen_async/1`.
## Voice Without FFmpeg
If you wish to BYOE (Bring Your Own Encoder), there are a few options.
- Use `:raw` as `type` for `play/4`
- Provide the complete list of opus frames as the input
- Use `:raw_s` as `type` for `play/4`
- Provide a stateful enumerable of opus frames as input (think GenServer wrapped in `Stream.unfold/2`)
- Use lower level functions to send opus frames at your leisure
- Send packets on your own time using `send_frames/2`
"""
alias Nostrum.Api
alias Nostrum.Struct.{Channel, Guild, VoiceState, VoiceWSState}
alias Nostrum.Voice.Audio
alias Nostrum.Voice.Opus
alias Nostrum.Voice.Ports
alias Nostrum.Voice.Session
alias Nostrum.Voice.Supervisor, as: VoiceSupervisor
require Logger
use GenServer
@typedoc """
RTP sequence
"""
@typedoc since: "0.6.0"
@type rtp_sequence :: non_neg_integer()
@typedoc """
RTP timestamp
"""
@typedoc since: "0.6.0"
@type rtp_timestamp :: non_neg_integer()
@typedoc """
RTP SSRC
"""
@typedoc since: "0.6.0"
@type rtp_ssrc :: non_neg_integer()
@typedoc """
Opus packet
"""
@typedoc since: "0.6.0"
@type opus_packet :: binary()
@typedoc """
Tuple with RTP header elements and opus packet
"""
@typedoc since: "0.6.0"
@type rtp_opus :: {{rtp_sequence(), rtp_timestamp(), rtp_ssrc()}, opus_packet()}
@typedoc """
The type of play input
The type given to `play/4` determines how the input parameter is interpreted.
See `play/4` for more information.
"""
@typedoc since: "0.6.0"
@type play_type :: :url | :pipe | :ytdl | :stream | :raw | :raw_s
@typedoc """
The play input
The input given to `play/4`, either a compatible URL or binary audio data.
See `play/4` for more information.
"""
@typedoc since: "0.6.0"
@type play_input :: String.t() | binary() | Enum.t()
@raw_types [:raw, :raw_s]
@ffm_types [:url, :pipe, :ytdl, :stream]
@url_types [:url, :ytdl, :stream]
@doc false
def start_link(_args) do
GenServer.start_link(__MODULE__, %{}, name: VoiceStateMap)
end
@doc false
def init(args) do
{:ok, args}
end
@doc false
def update_voice(guild_id, args \\ []) do
GenServer.call(VoiceStateMap, {:update, guild_id, args})
end
@doc false
def get_voice(guild_id) do
GenServer.call(VoiceStateMap, {:get, guild_id})
end
@doc false
def remove_voice(guild_id) do
GenServer.call(VoiceStateMap, {:remove, guild_id})
end
@doc """
Joins or moves the bot to a voice channel.
This function calls `Nostrum.Api.update_voice_state/4`.
The fifth argument `persist` defaults to `true`. When true, if calling `join_channel/5`
while already in a different channel in the same guild, the audio source will be persisted
in the new channel. If the audio is actively playing at the time of changing channels,
it will resume playing automatically upon joining. If there is an active audio source
that has been paused before changing channels, the audio will be able to be resumed manually if
`resume/1` is called.
If `persist` is set to false, the audio source will be destroyed before changing channels.
The same effect is achieved by calling `stop/1` or `leave_channel/1` before `join_channel/5`
"""
@spec join_channel(Guild.id(), Channel.id(), boolean, boolean, boolean) :: no_return | :ok
def join_channel(
guild_id,
channel_id,
self_mute \\ false,
self_deaf \\ false,
persist \\ true
) do
with %VoiceState{} = voice <- get_voice(guild_id) do
update_voice(guild_id,
persist_source: persist,
persist_playback: persist and VoiceState.playing?(voice)
)
end
Api.update_voice_state(guild_id, channel_id, self_mute, self_deaf)
end
@doc """
Leaves the voice channel of the given guild id.
This function is equivalent to calling `Nostrum.Api.update_voice_state(guild_id, nil)`.
"""
@spec leave_channel(Guild.id()) :: no_return | :ok
def leave_channel(guild_id) do
Api.update_voice_state(guild_id, nil)
end
@doc """
Plays sound in the voice channel the bot is in.
The bot must be connected to a voice channel in the guild specified.
## Parameters
- `guild_id` - ID of guild whose voice channel the sound will be played in.
- `input` - Audio to be played, `t:play_input/0`. Input type determined by `type` parameter.
- `type` - Type of input, `t:play_type/0` (defaults to `:url`).
- `:url` Input will be [any url that `ffmpeg` can read](https://www.ffmpeg.org/ffmpeg-protocols.html).
- `:pipe` Input will be data that is piped to stdin of `ffmpeg`.
- `:ytdl` Input will be url for `youtube-dl`, which gets automatically piped to `ffmpeg`.
- `:stream` Input will be livestream url for `streamlink`, which gets automatically piped to `ffmpeg`.
- `:raw` Input will be an enumerable of raw opus packets. This bypasses `ffmpeg` and all options.
- `:raw_s` Same as `:raw` but input must be stateful, i.e. calling `Enum.take/2` on `input` is not idempotent.
- `options` - See options section below.
Returns `{:error, reason}` if unable to play or a sound is playing, else `:ok`.
## Options
- `:start_pos` (string) - The start position of the audio to be played. Defaults to beginning.
- `:duration` (string) - The duration to of the audio to be played . Defaults to entire duration.
- `:realtime` (boolean) - Make ffmpeg process the input in realtime instead of as fast as possible. Defaults to true.
- `:volume` (number) - The output volume of the audio. Default volume is 1.0.
- `:filter` (string) - Filter(s) to be applied to the audio. No filters applied by default.
The values of `:start_pos` and `:duration` can be [any time duration that ffmpeg can read](https://ffmpeg.org/ffmpeg-utils.html#Time-duration).
The `:filter` can be used multiple times in a single call (see examples).
The values of `:filter` can be [any audio filters that ffmpeg can read](https://ffmpeg.org/ffmpeg-filters.html#Audio-Filters).
Filters will be applied in order and can be as complex as you want. The world is your oyster!
Note that using the `:volume` option is shortcut for the "volume" filter, and will be added to the end of the filter chain, acting as a master volume.
Volume values between `0.0` and `1.0` act as standard operating range where `0` is off and `1` is max.
Values greater than `1.0` will add saturation and distortion to the audio.
Negative values act the same as their position but reverse the polarity of the waveform.
Having all the ffmpeg audio filters available is *extremely powerful* so it may be worth learning some of them for your use cases.
If you use any filters to *increase* the playback speed of your audio, it's recommended to set the `:realtime` option to `false`
because realtime processing is relative to the original playback speed.
## Examples
```Elixir
iex> Nostrum.Voice.join_channel(123456789, 420691337)
iex> Nostrum.Voice.play(123456789, "~/music/FavoriteSong.mp3", :url)
iex> Nostrum.Voice.play(123456789, "~/music/NotFavoriteButStillGoodSong.mp3", :url, volume: 0.5)
iex> Nostrum.Voice.play(123456789, "~/music/ThisWillBeHeavilyDistorted.mp3", :url, volume: 1000)
```
```Elixir
iex> Nostrum.Voice.join_channel(123456789, 420691337)
iex> raw_data = File.read!("~/music/sound_effect.wav")
iex> Nostrum.Voice.play(123456789, raw_data, :pipe)
```
```Elixir
iex> Nostrum.Voice.join_channel(123456789, 420691337)
iex> Nostrum.Voice.play(123456789, "https://www.youtube.com/watch?v=b4RJ-QGOtw4", :ytdl,
...> realtime: true, start_pos: "0:17", duration: "30")
iex> Nostrum.Voice.play(123456789, "https://www.youtube.com/watch?v=0ngcL_5ekXo", :ytdl,
...> filter: "lowpass=f=1200", filter: "highpass=f=300", filter: "asetrate=44100*0.5")
```
```Elixir
iex> Nostrum.Voice.join_channel(123456789, 420691337)
iex> Nostrum.Voice.play(123456789, "https://www.twitch.tv/pestily", :stream)
iex> Nostrum.Voice.play(123456789, "https://youtu.be/LN4r-K8ZP5Q", :stream)
```
"""
@spec play(Guild.id(), play_input(), play_type(), keyword()) :: :ok | {:error, String.t()}
def play(guild_id, input, type \\ :url, options \\ []) do
voice = get_voice(guild_id)
cond do
not VoiceState.ready_for_rtp?(voice) ->
{:error, "Must be connected to voice channel to play audio."}
VoiceState.playing?(voice) ->
{:error, "Audio already playing in voice channel."}
true ->
if is_pid(voice.ffmpeg_proc), do: Ports.close(voice.ffmpeg_proc)
voice =
update_voice(guild_id,
current_url: if(type in @url_types, do: input),
ffmpeg_proc: if(type in @ffm_types, do: Audio.spawn_ffmpeg(input, type, options)),
raw_audio: if(type in @raw_types, do: input),
raw_stateful: type === :raw_s
)
set_speaking(voice, true)
update_voice(guild_id, player_pid: spawn(Audio, :start_player, [voice]))
:ok
end
end
@doc """
Stops the current sound being played in a voice channel.
The bot must be connected to a voice channel in the guild specified.
## Parameters
- `guild_id` - ID of guild whose voice channel the sound will be stopped in.
Returns `{:error, reason}` if unable to stop or no sound is playing, else `:ok`.
If a sound has finished playing, this function does not need to be called to start
playing another sound.
## Examples
```Elixir
iex> Nostrum.Voice.join_channel(123456789, 420691337)
iex> Nostrum.Voice.play(123456789, "http://brandthill.com/files/weird_dubstep_noises.mp3")
iex> Nostrum.Voice.stop(123456789)
```
"""
@spec stop(Guild.id()) :: :ok | {:error, String.t()}
def stop(guild_id) do
voice = get_voice(guild_id)
cond do
not VoiceState.ready_for_rtp?(voice) ->
{:error, "Must be connected to voice channel to stop audio."}
not VoiceState.playing?(voice) ->
{:error, "Audio must be playing to stop."}
true ->
set_speaking(voice, false)
Process.exit(voice.player_pid, :stop)
if is_pid(voice.ffmpeg_proc), do: Ports.close(voice.ffmpeg_proc)
:ok
end
end
@doc """
Pauses the current sound being played in a voice channel.
The bot must be connected to a voice channel in the guild specified.
## Parameters
- `guild_id` - ID of guild whose voice channel the sound will be paused in.
Returns `{:error, reason}` if unable to pause or no sound is playing, else `:ok`.
This function is similar to `stop/1`, except that the sound may be
resumed after being paused.
## Examples
```Elixir
iex> Nostrum.Voice.join_channel(123456789, 420691337)
iex> Nostrum.Voice.play(123456789, "~/files/twelve_hour_loop_of_waterfall_sounds.mp3")
iex> Nostrum.Voice.pause(123456789)
```
"""
@spec pause(Guild.id()) :: :ok | {:error, String.t()}
def pause(guild_id) do
voice = get_voice(guild_id)
cond do
not VoiceState.ready_for_rtp?(voice) ->
{:error, "Must be connected to voice channel to pause audio."}
not VoiceState.playing?(voice) ->
{:error, "Audio must be playing to pause."}
true ->
set_speaking(voice, false)
Process.exit(voice.player_pid, :pause)
:ok
end
end
@doc """
Resumes playing the current paused sound in a voice channel.
The bot must be connected to a voice channel in the guild specified.
## Parameters
- `guild_id` - ID of guild whose voice channel the sound will be resumed in.
Returns `{:error, reason}` if unable to resume or no sound has been paused, otherwise returns `:ok`.
This function is used to resume a sound that had previously been paused.
## Examples
```Elixir
iex> Nostrum.Voice.join_channel(123456789, 420691337)
iex> Nostrum.Voice.play(123456789, "~/stuff/Toto - Africa (Bass Boosted)")
iex> Nostrum.Voice.pause(123456789)
iex> Nostrum.Voice.resume(123456789)
```
"""
@spec resume(Guild.id()) :: :ok | {:error, String.t()}
def resume(guild_id) do
voice = get_voice(guild_id)
cond do
not VoiceState.ready_for_rtp?(voice) ->
{:error, "Must be connected to voice channel to resume audio."}
VoiceState.playing?(voice) ->
{:error, "Audio already playing in voice channel."}
is_nil(voice.ffmpeg_proc) and is_nil(voice.raw_audio) ->
{:error, "Audio must be paused to resume."}
true ->
set_speaking(voice, true)
update_voice(guild_id, player_pid: spawn(Audio, :resume_player, [voice]))
:ok
end
end
@doc """
Checks if the bot is playing sound in a voice channel.
## Parameters
- `guild_id` - ID of guild to check if audio being played.
Returns `true` if the bot is currently being played in a voice channel, otherwise `false`.
## Examples
```Elixir
iex> Nostrum.Voice.join_channel(123456789, 420691337)
iex> Nostrum.Voice.play(123456789, "https://a-real-site.biz/RickRoll.m4a")
iex> Nostrum.Voice.playing?(123456789)
true
iex> Nostrum.Voice.pause(123456789)
iex> Nostrum.Voice.playing?(123456789)
false
```
"""
@spec playing?(Guild.id()) :: boolean
def playing?(guild_id) do
get_voice(guild_id) |> VoiceState.playing?()
end
@doc """
Checks if the connection is up and ready to play audio.
## Parameters
- `guild_id` - ID of guild to check if voice connection is up.
Returns `true` if the bot is connected to a voice channel, otherwise `false`.
This function does not check if audio is already playing. For that, use `playing?/1`.
## Examples
```Elixir
iex> Nostrum.Voice.join_channel(123456789, 420691337)
iex> Nostrum.Voice.ready?(123456789)
true
iex> Nostrum.Voice.leave_channel(123456789)
iex> Nostrum.Voice.ready?(123456789)
false
```
"""
@spec ready?(Guild.id()) :: boolean
def ready?(guild_id) do
get_voice(guild_id) |> VoiceState.ready_for_rtp?()
end
@doc """
Gets the id of the voice channel that the bot is connected to.
## Parameters
- `guild_id` - ID of guild that the resultant channel belongs to.
Returns the `channel_id` for the channel the bot is connected to, otherwise `nil`.
## Examples
```Elixir
iex> Nostrum.Voice.join_channel(123456789, 420691337)
iex> Nostrum.Voice.get_channel(123456789)
420691337
iex> Nostrum.Voice.leave_channel(123456789)
iex> Nostrum.Voice.get_channel(123456789)
nil
```
"""
@spec get_channel_id(Guild.id()) :: Channel.id()
def get_channel_id(guild_id) do
voice = get_voice(guild_id)
if voice, do: voice.channel_id
end
@doc """
Gets the current URL being played.
If `play/4` was invoked with type `:url`, `:ytdl`, or `:stream`, this function will return
the URL given as input last time it was called.
If `play/4` was invoked with type `:pipe`, `:raw`, or `:raw_s`, this will return `nil`
as the input is raw audio data, not be a readable URL string.
"""
@doc since: "0.6.0"
@spec get_current_url(Guild.id()) :: String.t() | nil
def get_current_url(guild_id) do
voice = get_voice(guild_id)
if voice, do: voice.current_url
end
@doc """
Gets a map of RTP SSRC to user id.
Within a voice channel, an SSRC (synchronization source) will uniquely map to a
user id of a user who is speaking.
If listening to incoming voice packets asynchronously, this function will not be
needed as the `t:Nostrum.Struct.VoiceWSState.ssrc_map/0` will be available with every event.
If listening with `listen/3`, this function may be used. It is recommended to
cache the result of this function and only call it again when you encounter an
SSRC that is not present in the cached result. This is to reduce excess load on the
voice websocket and voice state processes.
"""
@doc since: "0.6.0"
@spec get_ssrc_map(Guild.id()) :: VoiceWSState.ssrc_map()
def get_ssrc_map(guild_id) do
voice = get_voice(guild_id)
v_ws = Session.get_ws_state(voice.session_pid)
v_ws.ssrc_map
end
@doc false
def set_speaking(%VoiceState{} = voice, speaking, timed_out \\ false) do
Session.set_speaking(voice.session_pid, speaking, timed_out)
end
@doc """
Low-level. Set speaking flag in voice channel.
This function does not need to be called unless you are sending audio frames
directly using `Nostrum.Voice.send_frames/2`.
"""
@doc since: "0.5.0"
@spec set_is_speaking(Guild.id(), boolean) :: :ok
def set_is_speaking(guild_id, speaking), do: get_voice(guild_id) |> set_speaking(speaking)
@doc """
Low-level. Send pre-encoded audio packets directly.
Speaking should be set to true via `Nostrum.Voice.set_is_speaking/2` before sending frames.
Opus frames will be encrypted and prefixed with the appropriate RTP header and sent immediately.
The length of `frames` depends on how often you wish to send a sequence of frames.
A single frame contains 20ms of audio. Sending more than 50 frames (1 second of audio)
in a single function call may result in inconsistent playback rates.
`Nostrum.Voice.playing?/1` will not return accurate values when using `send_frames/2`
instead of `Nostrum.Voice.play/4`
"""
@doc since: "0.5.0"
@spec send_frames(Guild.id(), [opus_packet()]) :: :ok | {:error, String.t()}
def send_frames(guild_id, frames) when is_list(frames) do
voice = get_voice(guild_id)
if VoiceState.ready_for_rtp?(voice) do
Audio.send_frames(frames, voice)
:ok
else
{:error, "Must be connected to voice channel to send frames."}
end
end
@doc """
Low-level. Manually connect to voice websockets gateway.
This function should only be called if config option `:voice_auto_connect` is set to `false`.
By default Nostrum will automatically create a voice gateway when joining a channel.
"""
@doc since: "0.5.0"
@spec connect_to_gateway(Guild.id()) :: :ok | {:error, String.t()}
def connect_to_gateway(guild_id) do
voice = get_voice(guild_id)
cond do
VoiceState.ready_for_ws?(voice) ->
VoiceSupervisor.create_session(voice)
:ok
is_nil(voice) ->
{:error, "Must be in voice channel to connect to gateway."}
true ->
{:error, "Voice gateway connection already open."}
end
end
@doc """
Listen for incoming voice RTP packets.
## Parameters
- `guild_id` - ID of guild that the bot is listening to.
- `num_packets` - Number of packets to wait for.
- `raw_rtp` - Whether to return raw RTP packets. Defaults to `false`.
Returns a list of tuples of type `t:rtp_opus/0`.
The inner tuple contains fields from the RTP header and can be matched against
to retrieve information about the packet such as the SSRC, which identifies the source.
Note that RTP timestamps are completely unrelated to Unix timestamps.
If `raw_rtp` is set to `true`, a list of raw RTP packets is returned instead.
To extract an opus packet from an RTP packet, see `extract_opus_packet/1`.
This function will block until the specified number of packets is received.
"""
@doc since: "0.6.0"
@spec listen(Guild.id(), pos_integer, boolean) :: [rtp_opus()] | [binary] | {:error, String.t()}
def listen(guild_id, num_packets, raw_rtp \\ false) do
voice = get_voice(guild_id)
if VoiceState.ready_for_rtp?(voice) do
packets = Audio.get_unique_rtp_packets(voice, num_packets)
if raw_rtp do
Enum.map(packets, fn {header, payload} -> header <> payload end)
else
Enum.map(packets, fn {header, payload} ->
<<_::16, seq::integer-16, time::integer-32, ssrc::integer-32>> = header
opus = Opus.strip_rtp_ext(payload)
{{seq, time, ssrc}, opus}
end)
end
else
{:error, "Must be connected to voice channel to listen for incoming data."}
end
end
@doc """
Start asynchronously receiving events for incoming RTP packets for an active voice session.
This is an alternative to the blocking `listen/3`. Events will be generated asynchronously
when a user is speaking. See `t:Nostrum.Consumer.voice_incoming_packet/0` for more info.
"""
@doc since: "0.6.0"
@spec start_listen_async(Guild.id()) :: :ok | {:error, term()}
def start_listen_async(guild_id), do: set_udp_active(guild_id, true)
@doc """
Stop asynchronously receiving events for incoming RTP packets for an active voice session.
"""
@doc since: "0.6.0"
@spec stop_listen_async(Guild.id()) :: :ok | {:error, term()}
def stop_listen_async(guild_id), do: set_udp_active(guild_id, false)
defp set_udp_active(guild_id, active?) do
voice = get_voice(guild_id)
if VoiceState.ready_for_rtp?(voice) do
voice.udp_socket |> :inet.setopts([{:active, active?}])
else
{:error, "Must be connected to voice channel to alter socket options."}
end
end
@doc """
Extract the opus packet from the RTP packet received from Discord.
Incoming voice RTP packets contain a fixed length RTP header and an optional
RTP header extension, which must be stripped to retrieve the underlying opus packet.
"""
@doc since: "0.6.0"
@spec extract_opus_packet(binary) :: opus_packet()
def extract_opus_packet(packet) do
<<_header::96, payload::binary>> = packet
Opus.strip_rtp_ext(payload)
end
@doc """
Create a complete Ogg logical bitstream from a list of Opus packets.
This function takes a list of opus packets and returns a list of Ogg
encapsulated Opus pages for a single Ogg logical bitstream.
It is highly recommended to learn about the Ogg container format to
understand how to use the data.
To get started, assuming you have a list of evenly temporally spaced
and consecutive opus packets from a single source that you want written
to a file, you can run the following:
```elixir
bitstream =
opus_packets
|> create_ogg_bitstream()
|> :binary.list_to_bin()
File.write!("my_recording.ogg", bitstream)
```
When creating a logical bitstream, ensure that the packets are all from a single SSRC.
When listening in a channel with multiple speakers, you should be storing the received
packets in unique buckets for each SSRC so that the multiple audio sources don't become
jumbled. A single logical bitstream should represent audio data from a single speaker.
An Ogg physical bitstream (e.g. a file) may be composed of multiple interleaved Ogg
logical bitstreams as each logical bitstream and its constituent pages contain a unique
and randomly generated bitstream serial number, but this is a story for another time.
Assuming you have a list of `t:rtp_opus/0` packets that are not separated by ssrc, you
may do the following:
```elixir
jumbled_packets
|> Stream.filter(fn {{_seq, _time, ssrc}, _opus} -> ssrc == particular_ssrc end)
|> Enum.map(fn {{_seq, _time, _ssrc}, opus} -> opus end)
|> create_ogg_bitstream()
```
"""
@doc since: "0.5.1"
@spec create_ogg_bitstream([opus_packet()]) :: [binary]
defdelegate create_ogg_bitstream(opus_packets), to: Opus
@doc """
Pad discontinuous chunks of opus audio with silence.
This function takes a list of `t:rtp_opus/0`, which is a tuple containing RTP bits and
opus audio data. It returns a list of opus audio packets. The reason the input has to be in
the `t:rtp_opus/0` tuple format returned by `listen/3` and async listen events is that the
RTP packet header contains info on the relative timestamps of incoming packets; the opus
packets themselves don't contain information relating to timing.
The Discord client will continue to internally increment the `t:rtp_timestamp()` when the
user is not speaking such that the duration of pauses can be determined from the RTP packets.
Bots will typically not behave this way, so if you call this function on audio produced by
a bot it is very likely that no silence will be inserted.
The use case of this function is as follows:
Consider a user speaks for two seconds, pauses for ten seconds, then speaks for another two
seconds. During the pause, no RTP packets will be received, so if you create a bitstream from
it, the resulting audio will be both two-second speaking segments consecutively without the
long pause in the middle. If you wish to preserve the timing of the speaking and include the
pause, calling this function will interleave the appropriate amount of opus silence packets
to maintain temporal fidelity.
Note that the Discord client currently sends about 10 silence packets (200 ms) each time it
detects end of speech, so creating a bitstream without first padding your audio with this
function will maintain short silences between speech segments.
*This function should only be called on a collection of RTP packets from a single SSRC*
"""
@doc since: "0.6.0"
@spec pad_opus(nonempty_list(rtp_opus())) :: [opus_packet()]
defdelegate pad_opus(packets), to: Opus
@doc false
def handle_call({:update, guild_id, args}, _from, state) do
voice =
state
|> Map.get(guild_id, VoiceState.new(guild_id: guild_id))
|> Map.merge(Enum.into(args, %{}))
state = Map.put(state, guild_id, voice)
if Application.get_env(:nostrum, :voice_auto_connect, true),
do: start_if_ready(voice)
{:reply, voice, state}
end
@doc false
def handle_call({:get, guild_id}, _from, state) do
{:reply, Map.get(state, guild_id), state}
end
@doc false
def handle_call({:remove, guild_id}, _from, state) do
state[guild_id] |> VoiceState.cleanup()
VoiceSupervisor.end_session(guild_id)
{:reply, true, Map.delete(state, guild_id)}
end
@doc false
def start_if_ready(%VoiceState{} = voice) do
if VoiceState.ready_for_ws?(voice) do
VoiceSupervisor.create_session(voice)
end
end
@doc false
def on_channel_join_new(p) do
update_voice(p.guild_id,
channel_id: p.channel_id,
session: p.session_id,
self_mute: p.self_mute,
self_deaf: p.self_deaf
)
end
@doc false
def on_channel_join_change(p, voice) do
v_ws = Session.get_ws_state(voice.session_pid)
# On the off-chance that we receive Voice Server Update first:
{new_token, new_gateway} =
if voice.token == v_ws.token do
# Need to reset
{nil, nil}
else
# Already updated
{voice.token, voice.gateway}
end
%{
ffmpeg_proc: ffmpeg_proc,
raw_audio: raw_audio,
raw_stateful: raw_stateful,
current_url: current_url,
persist_source: persist_source,
persist_playback: persist_playback
} = voice
# Nil-ify ffmpeg_proc so it doesn't get closed when cleanup is called
if persist_source, do: update_voice(p.guild_id, ffmpeg_proc: nil)
remove_voice(p.guild_id)
fields =
[
channel_id: p.channel_id,
session: p.session_id,
self_mute: p.self_mute,
self_deaf: p.self_deaf,
token: <PASSWORD>,
gateway: new_gateway
] ++
if persist_source,
do: [
ffmpeg_proc: ffmpeg_proc,
raw_audio: raw_audio,
raw_stateful: raw_stateful,
current_url: current_url,
persist_playback: persist_playback
],
else: []
update_voice(p.guild_id, fields)
end
end
|
lib/nostrum/voice.ex
| 0.897119
| 0.460471
|
voice.ex
|
starcoder
|
defmodule ScenicWidgets.Core.Structs.Frame do
@moduledoc """
A Frame struct defines the rectangular size of a component.
"""
defmodule Coordinates do
defstruct [x: 0, y: 0]
def new(x: x, y: y), do: %__MODULE__{x: x, y: y}
def new(%{x: x, y: y}), do: %__MODULE__{x: x, y: y}
def new({x, y}), do: %__MODULE__{x: x, y: y}
end
defmodule Dimensions do
defstruct [width: 0, height: 0]
def new(width: w, height: h), do: %__MODULE__{width: w, height: h}
def new(%{width: w, height: h}), do: %__MODULE__{width: w, height: h}
def new({w, h}), do: %__MODULE__{width: w, height: h}
end
defstruct [
pin: {0, 0}, # The {x, y} of the top-left of this Frame
top_left: nil, # A %Coordinates{} struct, this is essentially the same as the pin, but having it leads to some nice syntax e.g. frame.top_left.x
orientation: :top_left, # In Scenic, the pin is always in the top-left corner of the Graph - as x increases, we go _down_ the screen, and as y increases we go to the right
size: nil, # How large in {width, height} this Frame is
dimensions: nil, # a %Dimensions{} struct, specifying the height and width of the frame - this makes for some nice syntax down the road e.g. frame.dimensions.width, rather than having to pull out a {width, height} tuple
margin: %{
top: 0,
right: 0,
bottom: 0,
left: 0 },
label: nil, # an optional label, usually used to render a footer bar
opts: %{} # A map to hold options, e.g. %{render_footer?: true}
]
# Make a new frame the same size as the ViewPort
def new(%Scenic.ViewPort{size: {w, h}}) do
%__MODULE__{
pin: {0, 0},
top_left: Coordinates.new(x: 0, y: 0),
size: {w, h},
dimensions: Dimensions.new(width: w, height: h)
}
end
# Make a new frame, with the top-left corner at point `pin`
def new([pin: {x, y}, size: {w, h}]) do
%__MODULE__{
pin: {x, y},
top_left: Coordinates.new(x: x, y: y),
size: {w, h},
dimensions: Dimensions.new(width: w, height: h)
}
end
end
|
lib/core/structs/frame.ex
| 0.885235
| 0.673037
|
frame.ex
|
starcoder
|
defmodule ExDgraph.Utils do
@moduledoc "Common utilities"
alias ExDgraph.Expr.Uid
def as_rendered(value) do
case value do
x when is_list(x) -> x |> Poison.encode!()
%Date{} = x -> x |> Date.to_iso8601() |> Kernel.<>("T00:00:00.0+00:00")
%DateTime{} = x -> x |> DateTime.to_iso8601() |> String.replace("Z", "+00:00")
x -> x |> to_string
end
end
def infer_type(type) do
case type do
x when is_boolean(x) -> :bool
x when is_binary(x) -> :string
x when is_integer(x) -> :int
x when is_float(x) -> :float
x when is_list(x) -> :geo
%DateTime{} -> :datetime
%Date{} -> :date
%Uid{} -> :uid
end
end
def as_literal(value, type) do
case {type, value} do
{:int, v} when is_integer(v) -> {:ok, to_string(v)}
{:float, v} when is_float(v) -> {:ok, as_rendered(v)}
{:bool, v} when is_boolean(v) -> {:ok, as_rendered(v)}
{:string, v} when is_binary(v) -> {:ok, v |> strip_quotes |> wrap_quotes}
{:date, %Date{} = v} -> {:ok, as_rendered(v)}
{:datetime, %DateTime{} = v} -> {:ok, as_rendered(v)}
{:geo, v} when is_list(v) -> check_and_render_geo_numbers(v)
{:uid, v} when is_binary(v) -> {:ok, "<" <> v <> ">"}
_ -> {:error, {:invalidly_typed_value, value, type}}
end
end
def as_string(value) do
value
|> as_rendered
|> strip_quotes
|> wrap_quotes
end
defp check_and_render_geo_numbers(nums) do
if nums |> List.flatten() |> Enum.all?(&is_float/1) do
{:ok, nums |> as_rendered}
else
{:error, :invalid_geo_json}
end
end
defp wrap_quotes(value) when is_binary(value) do
"\"" <> value <> "\""
end
defp strip_quotes(value) when is_binary(value) do
value
|> String.replace(~r/^"/, "")
|> String.replace(~r/"&/, "")
end
def has_function?(module, func, arity) do
:erlang.function_exported(module, func, arity)
end
def has_struct?(module) when is_atom(module) do
Code.ensure_loaded?(module)
has_function?(module, :__struct__, 0)
end
def get_value(params, key, default \\ nil) when is_atom(key) do
str_key = to_string(key)
cond do
Map.has_key?(params, key) -> Map.get(params, key)
Map.has_key?(params, str_key) -> Map.get(params, str_key)
true -> default
end
end
@doc """
Fills in the given `opts` with default options.
"""
@spec default_config(Keyword.t()) :: Keyword.t()
def default_config(config \\ Application.get_env(:ex_dgraph, ExDgraph)) do
config
|> Keyword.put_new(:hostname, System.get_env("DGRAPH_HOST") || 'localhost')
|> Keyword.put_new(:port, System.get_env("DGRAPH_PORT") || 9080)
|> Keyword.put_new(:pool_size, 5)
|> Keyword.put_new(:max_overflow, 2)
|> Keyword.put_new(:timeout, 15_000)
|> Keyword.put_new(:pool, DBConnection.Poolboy)
|> Keyword.put_new(:ssl, false)
|> Keyword.put_new(:tls_client_auth, false)
|> Keyword.put_new(:certfile, nil)
|> Keyword.put_new(:keyfile, nil)
|> Keyword.put_new(:cacertfile, nil)
|> Keyword.put_new(:retry_linear_backoff, delay: 150, factor: 2, tries: 3)
|> Keyword.put_new(:enforce_struct_schema, false)
|> Keyword.put_new(:keepalive, :infinity)
|> Enum.reject(fn {_k, v} -> is_nil(v) end)
end
end
# Partly Copyright (c) 2017 <NAME>
# Source https://github.com/elbow-jason/dgraph_ex
|
lib/exdgraph/utils.ex
| 0.680135
| 0.462048
|
utils.ex
|
starcoder
|
defmodule Hangman.Player.FSM do
@moduledoc """
Module implements a non-process player fsm
which handles managing the state of types implemented
through Player and the Player Action protocol.
FSM provides a state machine wrapper over the `Player`
The FSM is not coupled at all to the
specific player type but the generic Player
which relies on the Action Protocol, which
provides for succinct code along with the already succinct
design of the Fsm module code.
Works for all supported player types
States are `initial`, `begin`, `setup`, `action`, `transit`, `exit`
The event `proceed` transitions between states, when we are not issuing
a `guess` or `initialize` event.
Here are the state transition flows:
A) initial -> begin
B) begin -> setup | exit
C) setup -> action
D) action -> transit | setup
E) transit -> begin | exit
F) exit -> exit
Basically upon leaving the initial state, we transition to begin.
From there we make the determination of whether we should proceed on to setup the guess
state or terminate early and exit.
If the game was recently just aborted and we are done with playing any more games -> we exit.
Once we are in the setup state it is obvious that our next step is to the action state.
Here we can try out our new guess (either selected or auto-generated)
From action state we either circle back to setup state to generate the new word set state and
overall guess state and possibly to collect the next user guess. Else, we have either
won or lost the game and can confidently move to the transit state.
The transit state indicates that we are in transition having a single game over.
Either we proceed to start a new game and head to begin or we've already finished
all games and happily head to the exit state.
Ultimately the specific Flow or CLI `Handler`, when in exit state terminates the FSM loop
"""
use Fsm, initial_state: :initial, initial_data: nil
alias Hangman.Player
require Logger
defstate initial do
defevent initialize(args) do
player = Player.new(args)
next_state(:begin, player)
end
end
defstate begin do
defevent proceed, data: player do
{player, code} = player |> Player.begin()
case code do
:start -> respond({:begin, "fsm begin"}, :setup, player)
:finished -> respond({:begin, "going to fsm exit"}, :exit, player)
end
end
end
defstate setup do
defevent proceed, data: player do
{player, status} = player |> Player.setup()
# Logger.debug "FSM setup: player is #{inspect player}"
case status do
[] -> respond({:setup, []}, :action, player)
_ -> respond({:setup, [display: player.display, status: status]}, :action, player)
end
end
end
defstate action do
defevent guess(data), data: player do
{player, status} = player |> Player.guess(data)
_ = Logger.debug("FSM action: player is #{inspect(player)}")
# check if we get game won or game lost
case status do
{code, text} when code in [:won, :lost] ->
respond({:action, text}, :transit, player)
{:guessing, text} ->
respond({:action, text}, :setup, player)
end
end
end
defstate transit do
defevent proceed, data: player do
{player, status} = player |> Player.transition()
# _ = Logger.debug "FSM transit: player is #{inspect player}"
case status do
{:start, text} ->
respond({:transit, text}, :begin, player)
{:finished, text} ->
respond({:transit, text}, :exit, player)
end
end
end
defstate exit do
defevent proceed, data: player do
_ = Logger.debug("FSM exit: player is #{inspect(player)}")
# Games Over
respond({:exit, player.round.status_text}, :exit, player)
end
end
# called for undefined state/event mapping when inside any state
defevent(_, do: raise("Player FSM doesn't support requested state:event mapping."))
end
|
lib/hangman/player_fsm.ex
| 0.72526
| 0.873431
|
player_fsm.ex
|
starcoder
|
defmodule Phoenix.HTML do
@moduledoc """
Conveniences for working HTML strings and templates.
When used, it imports this module and, in the future,
many other modules under the `Phoenix.HTML` namespace.
## HTML Safe
One of the main responsibilities of this module is to
provide convenience functions for escaping and marking
HTML code as safe.
In order to mark some code as safe, developers should
invoke the `safe/1` function. User data or data coming
from the database should never be marked as safe, it
should be kept as regular data or given to `html_escape/1`
so its contents are escaped and the end result is considered
to be safe.
"""
@doc false
defmacro __using__(_) do
quote do
import Phoenix.HTML
end
end
@type safe :: {:safe, unsafe}
@type unsafe :: iodata
@doc """
Gets the flash messages from the `%Plug.Conn{}`
See `Phoenix.Controller.get_flash/2` for details.
"""
@spec get_flash(Plug.Conn.t, atom) :: binary
def get_flash(conn, key), do: Phoenix.Controller.get_flash(conn, key)
@doc """
Marks the given value as safe, therefore its contents won't be escaped.
iex> Phoenix.HTML.safe("<hello>")
{:safe, "<hello>"}
iex> Phoenix.HTML.safe({:safe, "<hello>"})
{:safe, "<hello>"}
"""
@spec safe(unsafe | safe) :: safe
def safe({:safe, value}), do: {:safe, value}
def safe(value) when is_binary(value) or is_list(value), do: {:safe, value}
@doc """
Concatenates data safely.
iex> Phoenix.HTML.safe_concat("<hello>", "<world>")
{:safe, ["<hello>"|"<world>"]}
iex> Phoenix.HTML.safe_concat({:safe, "<hello>"}, "<world>")
{:safe, ["<hello>"|"<world>"]}
iex> Phoenix.HTML.safe_concat("<hello>", {:safe, "<world>"})
{:safe, ["<hello>"|"<world>"]}
iex> Phoenix.HTML.safe_concat({:safe, "<hello>"}, {:safe, "<world>"})
{:safe, ["<hello>"|"<world>"]}
"""
@spec safe_concat(unsafe | safe, unsafe | safe) :: safe
def safe_concat({:safe, data1}, {:safe, data2}), do: {:safe, [data1|data2]}
def safe_concat({:safe, data1}, data2), do: {:safe, [data1|io_escape(data2)]}
def safe_concat(data1, {:safe, data2}), do: {:safe, [io_escape(data1)|data2]}
def safe_concat(data1, data2), do: {:safe, [io_escape(data1)|io_escape(data2)]}
@doc """
Escapes the HTML entities in the given string, marking it as safe.
iex> Phoenix.HTML.html_escape("<hello>")
{:safe, "<hello>"}
iex> Phoenix.HTML.html_escape('<hello>')
{:safe, ["<", 104, 101, 108, 108, 111, ">"]}
iex> Phoenix.HTML.html_escape({:safe, "<hello>"})
{:safe, "<hello>"}
"""
@spec html_escape(safe | unsafe) :: safe
def html_escape({:safe, data}) do
{:safe, data}
end
def html_escape(data) do
{:safe, io_escape(data)}
end
defp io_escape(data) when is_binary(data) do
Phoenix.HTML.Safe.BitString.to_iodata(data)
end
defp io_escape(data) when is_list(data) do
Phoenix.HTML.Safe.List.to_iodata(data)
end
end
|
lib/phoenix/html.ex
| 0.765374
| 0.571856
|
html.ex
|
starcoder
|
defmodule Raft.RPC do
@moduledoc """
Defines multiple rpc commands and functions for broadcasting messages to other
peers.
"""
alias Raft.Configuration.Server
require Logger
defmodule AppendEntriesReq do
@enforce_keys [:leader_id, :entries, :prev_log_index, :prev_log_term, :leader_commit]
defstruct [
:to, #Who we're sening this to
:term, # Leaders current term
:from, # Who sent this
:leader_id, # We need this so we can respond to the correct pid and so
# followers can redirect clients
:entries, # Log entries to store. This is empty for heartbeats
:prev_log_index, # index of log entry immediately preceding new ones
:prev_log_term, # term of previous log index entry
:leader_commit, # The leaders commit index
]
end
defmodule AppendEntriesResp do
defstruct [
:to,
:from, # We need this so we can track who sent us the message
:term, # The current term for the leader to update itself
:index, # The index we're at. Used to prevent re-commits with duplicate
# Rpcs.
:success, # true if follower contained entry matching prev_log_index and
# prev_log_term
]
end
defmodule RequestVoteReq do
defstruct [
:to, # Who we're going to send this to. A %Server{}
:term, # candidates term
:from, # Who sent this message
:candidate_id, # candidate requesting vote
:last_log_index, # index of candidates last log entry
:last_log_term, # term of candidates last log entry
]
end
defmodule RequestVoteResp do
defstruct [
:to, # Who we're sending this to
:from, # pid that the message came from
:term, # current term for the candidate to update itself
:vote_granted, # true means candidate received vote
]
end
@type server :: pid()
@type msg :: %AppendEntriesReq{}
| %AppendEntriesResp{}
| %RequestVoteReq{}
| %RequestVoteResp{}
def broadcast(rpcs) do
Enum.map(rpcs, &send_msg/1)
end
@doc """
Sends a message to a server
"""
@spec send_msg(msg()) :: pid()
def send_msg(%{from: from, to: to}=rpc) do
spawn fn ->
to
|> Server.to_server
|> GenStateMachine.call(rpc)
|> case do
%AppendEntriesResp{}=resp ->
GenStateMachine.cast(from, resp)
%RequestVoteResp{}=resp ->
GenStateMachine.cast(from, resp)
error ->
Logger.error fn ->
"Error: #{inspect error} sending #{inspect rpc} to #{to} from #{from}"
end
end
end
end
end
|
lib/raft/rpc.ex
| 0.557966
| 0.442215
|
rpc.ex
|
starcoder
|
defmodule CSQuery.Range do
@moduledoc """
An AWS CloudSearch structured query syntax representation of ranges that may
be inclusive or exclusive, open or closed, and may be constructed of
integers, floats, `t:DateTime.t/0` values, or (in some cases) strings.
> A brief note about notation: `{` and `}` denote *exclusive* range bounds;
> `[` and `]` denote *inclusive* range bounds.
## Inclusive or Exclusive Ranges
Ranges that are inclusive cover the entire range, including the boundaries of
the range. These are typical of Elixir `t:Range.t/0` values. The Elixir range
`1..10` is all integers from `1` through `10`. `CSQuery.Range` values may be
lower-bound exclusive, upper-bound exclusive, or both-bound exclusive.
* `[1,10]`: lower- and upper-bound inclusive. Integers `1` through `10`.
* `{1,10}`: lower- and upper-bound exclusive; Integers `2` through `9`.
* `{1,10]`: lower-bound exclusive, upper-bound inclusive. Integers `2`
through `10`.
* `[1,10}`: lower-bound inclusive, upper-bound exclusive. Integers `1`
through `9`.
## Open or Closed Ranges
An open range is one that omits either the upper or lower bound.
Representationally, an omitted bound must be described as exclusive.
* `{,10]`: Open range for integers up to `10`.
* `[10,}`: Open range for integers `10` or larger.
Logically, a fully open range (`{,}`) is possible, but is meaningless in the
context of a search, so a `CSQuery.OpenRangeError` will be thrown.
iex> new({nil, nil})
** (CSQuery.OpenRangeError) CSQuery.Range types may not be open on both upper and lower bounds.
iex> new(%{})
** (CSQuery.OpenRangeError) CSQuery.Range types may not be open on both upper and lower bounds.
## Range Types
Elixir range values are restricted to integers. CloudSearch ranges may be:
* Integers:
```
iex> new({1, 10}) |> to_value
"[1,10]"
```
* Floats:
```
iex> new({:math.pi(), :math.pi() * 2}) |> to_value
"[3.141592653589793,6.283185307179586]"
```
* Mixed numbers:
```
iex> new({1, :math.pi() * 2}) |> to_value
"[1,6.283185307179586]"
```
* Timestamps
```
iex> start = %DateTime{
...> year: 2018, month: 7, day: 21,
...> hour: 17, minute: 55, second: 0,
...> time_zone: "America/Toronto", zone_abbr: "EST",
...> utc_offset: -14_400, std_offset: 0
...> }
iex> finish = %DateTime{
...> year: 2018, month: 7, day: 21,
...> hour: 19, minute: 55, second: 0,
...> time_zone: "America/Toronto", zone_abbr: "EST",
...> utc_offset: -14_400, std_offset: 0
...> }
iex> new({start, finish}) |> to_value
"['2018-07-21T17:55:00-04:00','2018-07-21T19:55:00-04:00']"
```
* Strings
```
iex> new({"a", "z"}) |> to_value
"['a','z']"
```
integers, floats, timestamps, or strings.
`CSQuery.Range` construction looks for compatible types (integers and floats
may be mixed, but neither timestamps nor strings may be mixed with other
types), and a `CSQuery.RangeValueTypeError` will be thrown if incompatible
types are found.
iex> new(%{first: 3, last: "z"})
** (CSQuery.RangeValueTypeError) CSQuery.Range types must be compatible (numbers, dates, and strings may not be mixed).
iex> new(%{first: DateTime.utc_now(), last: "z"})
** (CSQuery.RangeValueTypeError) CSQuery.Range types must be compatible (numbers, dates, and strings may not be mixed).
"""
@typedoc "Supported values for CSQuery.Range values."
@type value :: nil | integer | float | DateTime.t() | String.t()
@type t :: %__MODULE__{first: value, first?: value, last: value, last?: value}
defstruct [:first, :first?, :last, :last?]
@doc """
Create a new `CSQuery.Range` value.
"""
@spec new(Range.t()) :: t
def new(%Range{first: first, last: last}), do: %__MODULE__{first: first, last: last}
@spec new({nil | number, nil | number}) :: t | no_return
@spec new({nil | String.t(), nil | String.t()}) :: t | no_return
@spec new({nil | DateTime.t(), nil | DateTime.t()}) :: t | no_return
def new({_, _} = value), do: build(value)
@spec new(map) :: t | no_return
def new(%{} = range), do: build(range)
def to_value(%{first: first, first?: first?, last: last, last?: last?}) do
lower(value(first), value(first?)) <> "," <> upper(value(last), value(last?))
end
def is_range_string?(value) do
value
|> String.split(",")
|> case do
[low, high] ->
(String.starts_with?(low, "[") || String.starts_with?(low, "{")) &&
(String.ends_with?(high, "]") || String.ends_with?(high, "}"))
_ ->
false
end
end
defp value(nil), do: nil
defp value(value) when is_number(value), do: value
defp value(value) when is_binary(value), do: "'#{value}'"
defp value(%DateTime{} = value), do: "'#{DateTime.to_iso8601(value)}'"
@blank [nil, ""]
defp lower(f, f?) when f in @blank and f? in @blank, do: "{"
defp lower(f, f?) when f in @blank, do: "{#{f?}"
defp lower(f, _), do: "[#{f}"
defp upper(l, l?) when l in @blank and l? in @blank, do: "}"
defp upper(l, l?) when l in @blank, do: "#{l?}}"
defp upper(l, _), do: "#{l}]"
defp build({first, last}), do: valid?(%__MODULE__{first: first, last: last})
defp build(%{} = range), do: valid?(struct(__MODULE__, range))
defp valid?(%__MODULE__{} = range) do
case valid?(Map.values(range)) do
true -> range
exception when is_atom(exception) -> raise(exception)
end
end
defp valid?([_, nil, nil, nil, nil]), do: CSQuery.OpenRangeError
defp valid?([_, a, b, c, d])
when (is_nil(a) or is_number(a)) and (is_nil(b) or is_number(b)) and
(is_nil(c) or is_number(c)) and (is_nil(d) or is_number(d)),
do: true
defp valid?([_, a, b, c, d])
when (is_nil(a) or is_binary(a)) and (is_nil(b) or is_binary(b)) and
(is_nil(c) or is_binary(c)) and (is_nil(d) or is_binary(d)),
do: true
defp valid?([_, a, b, c, d])
when (is_nil(a) or is_map(a)) and (is_nil(b) or is_map(b)) and (is_nil(c) or is_map(c)) and
(is_nil(d) or is_map(d)) do
[a, b, c, d]
|> Enum.filter(& &1)
|> Enum.map(&Map.get(&1, :__struct__))
|> Enum.all?(&(&1 == DateTime))
|> if(do: true, else: CSQuery.RangeValueTypeError)
end
defp valid?(_range), do: CSQuery.RangeValueTypeError
end
|
lib/csquery/range.ex
| 0.926992
| 0.919895
|
range.ex
|
starcoder
|
defmodule Payjp.Customers do
@moduledoc """
Main API for working with Customers at Payjp. Through this API you can:
- create customers
- get a customer
- update customer
- delete single customer
- delete all customer
- get customers list
- get all customers
- list subscriptions for the customer
- get a subscription for the customer
Supports Connect workflow by allowing to pass in any API key explicitely (vs using the one from env/config).
(API ref: https://pay.jp/docs/api/#customer-顧客
"""
@endpoint "customers"
@doc """
Creates a Customer with the given parameters - all of which are optional.
## Example
```
new_customer = [
email: "<EMAIL>",
description: "An Test Account",
metadata:[
app_order_id: "ABC123"
app_state_x: "xyz"
],
card: [
number: "4111111111111111",
exp_month: 01,
exp_year: 2018,
cvc: 123,
name: "<NAME>"
]
]
{:ok, res} = Payjp.Customers.create new_customer
```
"""
def create(params) do
create params, Payjp.config_or_env_key
end
@doc """
Creates a Customer with the given parameters - all of which are optional.
Using a given payjp key to apply against the account associated.
## Example
```
{:ok, res} = Payjp.Customers.create new_customer, key
```
"""
def create(params, key) do
Payjp.make_request_with_key(:post, @endpoint, key, params)
|> Payjp.Util.handle_payjp_response
end
@doc """
Retrieves a given Customer with the specified ID. Returns 404 if not found.
## Example
```
{:ok, cust} = Payjp.Customers.get "customer_id"
```
"""
def get(id) do
get id, Payjp.config_or_env_key
end
@doc """
Retrieves a given Customer with the specified ID. Returns 404 if not found.
Using a given payjp key to apply against the account associated.
## Example
```
{:ok, cust} = Payjp.Customers.get "customer_id", key
```
"""
def get(id, key) do
Payjp.make_request_with_key(:get, "#{@endpoint}/#{id}", key)
|> Payjp.Util.handle_payjp_response
end
@doc """
Updates a Customer with the given parameters - all of which are optional.
## Example
```
new_fields = [
email: "<EMAIL>",
description: "New description",
]
{:ok, res} = Payjp.Customers.update(customer_id, new_fields)
```
"""
def update(customer_id, params) do
update(customer_id, params, Payjp.config_or_env_key)
end
@doc """
Updates a Customer with the given parameters - all of which are optional.
Using a given payjp key to apply against the account associated.
## Example
```
{:ok, res} = Payjp.Customers.update(customer_id, new_fields, key)
```
"""
def update(customer_id, params, key) do
Payjp.make_request_with_key(:post, "#{@endpoint}/#{customer_id}", key, params)
|> Payjp.Util.handle_payjp_response
end
@doc """
Returns a list of Customers with a default limit of 10 which you can override with `list/1`
## Example
```
{:ok, customers} = Payjp.Customers.list
{:ok, customers} = Payjp.Customers.list(since: 1487473464, limit: 20)
```
"""
def list(opts \\ []) do
list Payjp.config_or_env_key, opts
end
@doc """
Returns a list of Customers with a default limit of 10 which you can override with `list/1`
Using a given payjp key to apply against the account associated.
## Example
```
{:ok, customers} = Payjp.Customers.list(key, since: 1487473464, limit: 20)
```
"""
def list(key, opts) do
Payjp.Util.list @endpoint, key, opts
end
@doc """
Deletes a Customer with the specified ID
## Example
```
{:ok, resp} = Payjp.Customers.delete "customer_id"
```
"""
def delete(id) do
delete id, Payjp.config_or_env_key
end
@doc """
Deletes a Customer with the specified ID
Using a given payjp key to apply against the account associated.
## Example
```
{:ok, resp} = Payjp.Customers.delete "customer_id", key
```
"""
def delete(id, key) do
Payjp.make_request_with_key(:delete, "#{@endpoint}/#{id}", key)
|> Payjp.Util.handle_payjp_response
end
@doc """
Deletes all Customers
## Example
```
Payjp.Customers.delete_all
```
"""
def delete_all do
case all() do
{:ok, customers} ->
Enum.each customers, fn c -> delete(c["id"]) end
{:error, err} -> raise err
end
end
@doc """
Deletes all Customers
Using a given payjp key to apply against the account associated.
## Example
```
Payjp.Customers.delete_all key
```
"""
def delete_all key do
case all() do
{:ok, customers} ->
Enum.each customers, fn c -> delete(c["id"], key) end
{:error, err} -> raise err
end
end
@max_fetch_size 100
@doc """
List all customers.
##Example
```
{:ok, customers} = Payjp.Customers.all
```
"""
def all( accum \\ [], opts \\ [limit: @max_fetch_size]) do
all Payjp.config_or_env_key, accum, opts
end
@doc """
List all customers.
Using a given payjp key to apply against the account associated.
##Example
```
{:ok, customers} = Payjp.Customers.all key, accum, since
```
"""
def all(key, accum, opts) do
case Payjp.Util.list_raw("#{@endpoint}", key, opts) do
{:ok, resp} ->
case resp[:has_more] do
true ->
last_sub = List.last( resp[:data] )
all( key, resp[:data] ++ accum, until: last_sub["created"], limit: @max_fetch_size)
false ->
result = resp[:data] ++ accum
{:ok, result}
end
{:error, err} -> raise err
end
end
@doc """
Returns a subscription; customer_id and subscription_id are required.
## Example
```
{:ok, sub} = Payjp.Subscriptions.subscription "customer_id", "subscription_id"
```
"""
def subscription(customer_id, sub_id) do
subscription customer_id, sub_id, Payjp.config_or_env_key
end
@doc """
Returns a subscription using given api key; customer_id and subscription_id are required.
## Example
```
{:ok, sub} = Payjp.Subscriptions.subscription "customer_id", "subscription_id", key
```
"""
def subscription(customer_id, sub_id, key) do
Payjp.make_request_with_key(:get, "#{@endpoint}/#{customer_id}/subscriptions/#{sub_id}", key)
|> Payjp.Util.handle_payjp_response
end
@doc """
Returns subscription list for the specified customer; customer_id is required.
## Example
```
{:ok, sub} = Payjp.Subscriptions.subscriptions "customer_id"
```
"""
def subscriptions( customer_id, accum \\ [], opts \\ []) do
subscriptions customer_id, accum, Payjp.config_or_env_key, opts
end
@doc """
Returns subscription list for the specified customer with given api key; customer_id is required.
## Example
```
{:ok, sub} = Payjp.Subscriptions.subscriptions "customer_id" key, []
```
"""
def subscriptions(customer_id, accum, key, opts) do
case Payjp.Util.list_raw("#{@endpoint}/#{customer_id}/subscriptions", key, opts) do
{:ok, resp} ->
case resp[:has_more] do
true ->
last_sub = List.last( resp[:data] )
subscriptions(customer_id, resp[:data] ++ accum, key, since: last_sub["created"])
false ->
result = resp[:data] ++ accum
{:ok, result}
end
end
end
end
|
lib/payjp/customers.ex
| 0.834272
| 0.851768
|
customers.ex
|
starcoder
|
defmodule MarsRoverKata.Planet do
@moduledoc """
Represent the planet in which the robot moves.
The planet is represented by a grid of max_x * max_y shape on zero based
system and is a sphere so connects vertical edges towards themselves are
in inverted coordinates.
"""
alias MarsRoverKata.Point
alias MarsRoverKata.Position
@type t :: %__MODULE__{
max_x: integer(),
max_y: integer(),
obstacles: list(Point.t())
}
defstruct max_x: 0,
max_y: 0,
obstacles: []
@doc ~S"""
Transforms a relative position to an absolute based a planet shape
## Examples
iex> MarsRoverKata.Planet.to_absolute_position(%MarsRoverKata.Position{point: MarsRoverKata.Point.new(-1, -1)}, %MarsRoverKata.Planet{max_x: 5, max_y: 5})
%MarsRoverKata.Position{point: %MarsRoverKata.Point{x: 4, y: 4}}
iex> MarsRoverKata.Planet.to_absolute_position(%MarsRoverKata.Position{point: MarsRoverKata.Point.new(12, 12)}, %MarsRoverKata.Planet{max_x: 5, max_y: 5})
%MarsRoverKata.Position{point: %MarsRoverKata.Point{x: 2, y: 2}}
iex> MarsRoverKata.Planet.to_absolute_position(%MarsRoverKata.Position{point: MarsRoverKata.Point.new(-1, 1)}, %MarsRoverKata.Planet{max_x: 5, max_y: 5})
%MarsRoverKata.Position{point: %MarsRoverKata.Point{x: 4, y: 1}}
iex> MarsRoverKata.Planet.to_absolute_position(%MarsRoverKata.Position{point: MarsRoverKata.Point.new(1, -1)}, %MarsRoverKata.Planet{max_x: 5, max_y: 5})
%MarsRoverKata.Position{point: %MarsRoverKata.Point{x: 4, y: 1}}
"""
@spec to_absolute_position(__MODULE__.t(), Position.t()) :: Position.t()
def to_absolute_position(
%Position{
point: %Point{
x: x,
y: y
},
direction: direction
},
%__MODULE__{max_x: max_x, max_y: max_y}
) do
point =
if crossing_vertical_edges?(max_y, y) do
Point.new(
Integer.mod(y, max_y),
Integer.mod(x, max_x)
)
else
Point.new(
Integer.mod(x, max_x),
Integer.mod(y, max_y)
)
end
%Position{
point: point,
direction: direction
}
end
@spec has_obstacles?(__MODULE__.t(), Point.t()) :: boolean
def has_obstacles?(%__MODULE__{obstacles: obstacles}, point) do
Enum.any?(obstacles, &(&1 == point))
end
defp crossing_vertical_edges?(max_y, y) do
0 > y || y > max_y
end
end
defimpl String.Chars, for: MarsRoverKata.Planet do
alias MarsRoverKata.Planet
def to_string(%Planet{max_x: max_x, max_y: max_y}) do
"#{max_x}:#{max_y}"
end
end
|
lib/mars_rover_kata/planet.ex
| 0.899204
| 0.72543
|
planet.ex
|
starcoder
|
defmodule StripePost.Client do
@moduledoc """
Access service functionality through Elixir functions,
wrapping the underlying HTTP API calls.
This is where you will want to write your custom
code to access your API.
"""
alias StripePost.Api
@doc """
Charge an account with the following body configurations
StripePost.charge(
%{amount: 10000,
currency: "cad",
description: "3 wozzle",
source: "pk_abc_123"}
)
Where the `source` is the payment token received from Stripe most likely
in your client javascriopt.
You also now also authorize (without charging) an account by setting the
`capture` field to `false`. For more details [auth and capture](https://stripe.com/docs/charges#auth-and-capture)
StripePost.charge(
%{amount: 10000,
currency: "cad",
description: "3 wozzle",
source: "pk_abc_123"
capture: false}
)
The configurations are optional, and can be (preferrably) configured as elixir configs,
like:
config :stripe_post,
secret_key: "sk_test_abc123",
public_key: "pk_test_def456",
content_type: "application/x-www-form-urlencoded"
But, if you must, then you can specify it directly like
opts = %{
secret_key: "sk_test_abc123",
content_type: "application/x-www-form-urlencoded"
}
"""
def charge(body, opts \\ nil) do
Api.request(:post, [resource: "charges", body: body], opts)
end
@doc """
Capture the payment of an existing, uncaptured, charge.
This is the second half of the two-step payment flow, where first
you created a charge with the capture option set to false.
For example, if you charged the following, but did NOT capture
StripePost.charge(
%{amount: 10000,
currency: "cad",
description: "3 wozzle",
source: "pk_abc_123"
capture: false}
)
The results will contain a charge ID, and captured should be false, for example
{"id": "ch_abc123",
"paid": true,
"status": "succeeded",
"captured": false}
When you are ready to capture the payment, use that charge "id", you can also
provide additional fields, for example:
StripePost.capture(
"ch_abc123",
%{amount: 10000,
application_fee: 100,
destination: 90210}
)
Please visit https://stripe.com/docs/api#capture_charge for more information
The configurations are optional, and can be (preferrably) configured as elixir opts,
like:
config :stripe_post,
secret_key: "sk_test_abc123",
public_key: "pk_test_def456",
content_type: "application/x-www-form-urlencoded"
But, if you must, then you can specify it directly like
opts = %{
secret_key: "sk_test_abc123",
content_type: "application/x-www-form-urlencoded"
}
"""
def capture(charge_id, body \\ %{}, opts \\ nil) do
Api.request(:post, [resource: ["charges", charge_id, "capture"], body: body], opts)
end
@doc """
Create a customer with the following body configurations
body = %{description: "customer xxx", source: "pk_abc_123"}
"""
def create_customer(body, opts \\ nil) do
Api.request(:post, [resource: "customers", body: body], opts)
end
@doc """
Retrieve a customer by his/her stripe ID
"""
def get_customer(id, opts \\ nil) do
Api.request(:post, [resource: ["customers", id]], opts)
end
@doc """
List all customer, if you don't provide a limit we will fetch them all
query_params = %{limit: 100, starting_after: "obj_pk_1234"}
"""
def list_customers(query_params \\ %{}, opts \\ []) do
case query_params[:limit] do
nil -> all_customers(:first, query_params, opts)
_ -> do_list_customers(query_params, opts)
end
|> clean_customers
end
defp all_customers(:first, query_params, opts) do
query_params
|> do_list_customers(opts)
|> all_customers([], query_params, opts)
end
defp all_customers(
{:ok, %{"data" => new_customers, "has_more" => false}},
acc,
_,
_
) do
{:ok, %{"data" => acc ++ new_customers, "has_more" => false}}
end
defp all_customers(
{:ok, %{"data" => new_customers, "has_more" => true}},
acc,
query_params,
opts
) do
new_customers
|> List.last()
|> Map.get(:id)
|> (fn starting_after -> query_params |> Map.put(:starting_after, starting_after) end).()
|> do_list_customers(opts)
|> all_customers(acc ++ new_customers, query_params, opts)
end
defp all_customers(resp, _acc, _query_params, _opts), do: resp
defp do_list_customers(query_params, opts) do
Api.request(
:get,
[resource: "/customers?" <> URI.encode_query(query_params |> reject_nil())],
opts
)
end
defp clean_customers({:error, _}), do: nil
defp clean_customers({200, %{data: customers}}) do
customers
|> Enum.map(fn c -> {c[:description], c} end)
|> Enum.into(%{})
|> (fn mapped -> {:ok, mapped} end).()
end
defp reject_nil(map) do
map
|> Enum.reject(fn {_k, v} -> v == nil end)
|> Enum.into(%{})
end
end
|
lib/stripe_post/client.ex
| 0.85817
| 0.405125
|
client.ex
|
starcoder
|
defmodule ExPool.Pool do
@moduledoc """
Pool GenServer.
It provides an interface to start a pool, check in and check out workers.
```elixir
alias ExPool.Pool
# Starts a new pool
{:ok, pool} = Pool.start_link(config)
# Blocks until there is a worker available
worker = Pool.check_out(pool)
# do some work with the worker
# Returns the worker to the pool
:ok = Pool.check_in(pool, worker)
```
"""
use GenServer
alias ExPool.Manager
@doc """
Starts a new pool GenServer.
## Options:
* :name - (Optional) The name of the pool
* The rest of the options will be passed to the internal
state as pool configuration (for more information about the
available options check ExPool.Pool.State.new/1).
"""
@spec start_link(opts :: [Keyword]) :: Supervisor.on_start
def start_link(opts \\ []) do
name_opts = Keyword.take(opts, [:name])
GenServer.start_link(__MODULE__, opts, name_opts)
end
@doc """
Get information about the current state of the pool.
"""
@spec info(pool :: pid) :: map
def info(pool) do
GenServer.call(pool, :info)
end
@doc """
Retrieve a worker from the pool.
If there aren't any available workers it blocks until one is available.
"""
@spec check_out(pool :: pid) :: worker :: pid
def check_out(pool) do
GenServer.call(pool, :check_out)
end
@doc """
Returns a worker into the pool to be used by other processes.
"""
@spec check_in(pool :: pid, worker :: pid) :: :ok
def check_in(pool, worker) do
GenServer.cast(pool, {:check_in, worker})
end
@doc false
def init(config) do
state = Manager.new(config)
{:ok, state}
end
@doc false
def handle_call(:check_out, from, state) do
case Manager.check_out(state, from) do
{:ok, {worker, new_state}} -> {:reply, worker, new_state}
{:waiting, new_state} -> {:noreply, new_state}
end
end
@doc false
def handle_call(:info, _from, state) do
info = Manager.info(state)
{:reply, info, state}
end
@doc false
def handle_cast({:check_in, worker}, state) do
state = state
|> Manager.check_in(worker)
|> handle_possible_checkout
{:noreply, state}
end
@doc false
def handle_info({:DOWN, ref, :process, _obj, _reason}, state) do
state = state
|> Manager.process_down(ref)
|> handle_possible_checkout
{:noreply, state}
end
defp handle_possible_checkout({:check_out, {from, worker, state}}) do
GenServer.reply(from, worker)
state
end
defp handle_possible_checkout({:ok, state}) do
state
end
end
|
lib/ex_pool/pool.ex
| 0.74512
| 0.825379
|
pool.ex
|
starcoder
|
defmodule ExUssd do
alias __MODULE__
@typedoc """
ExUssd menu structure
"""
@type t :: %__MODULE__{
data: any(),
default_error: String.t(),
delimiter: String.t(),
error: String.t(),
is_zero_based: boolean(),
menu_list: list(ExUssd.t()),
name: String.t(),
nav: [ExUssd.Nav.t()],
navigate: fun(),
orientation: atom(),
parent: ExUssd.t(),
resolve: fun() | mfa(),
should_close: boolean(),
show_navigation: boolean(),
title: String.t()
}
@typedoc """
ExUssd menu
"""
@type menu() :: ExUssd.t()
@typedoc """
The Gateway payload value
Typically you will Register a callback URL with your gateway provider that they will call whenever they get a request from a client.
Example:
You would have a simple endpoint that you receive POST requests from your gateway provider.
```elixir
# Africa talking callback URL
post "v1/ussd" do
payload = conn.params
# With the payload you can call `ExUssd.goto/1`
menu = ExUssd.new(name: "HOME", resolve: AppWeb.HomeResolver)
case ExUssd.goto(menu: menu, payload: payload) do
{:ok, %{menu_string: menu_string, should_close: false}} ->
"CON " <> menu_string
{:ok, %{menu_string: menu_string, should_close: true}} ->
# End Session
ExUssd.end_session(session_id: session_id)
"END " <> menu_string
end
end
```
"""
@type payload() :: map()
@typedoc """
It's a map of metadata about the session
The map contains the following keys:
- attempt: The number of attempts the user has made to enter the menu
- invoked_at: The time the menu was invoked
- route: The route that was invoked
- text: This is the text that was entered by the user. We receive this from the gateway payload.
Example:
```elixir
%{attempt: %{count: 2, input: ["wrong2", "wrong1"]}, invoked_at: ~U[2024-09-25 09:10:15Z], route: "*555*1#", text: "1"}
```
"""
@type metadata() :: map()
@doc """
ExUssd provides different life cycle methods for your menu.
`ussd_init/2`
This callback must be implemented in your module as it is called when the menu is first invoked.
This callback is invoked once
`ussd_init/2` is called with the following arguments:
- menu: The menu that was invoked
- payload: The payload that was received from the gateway provider
Example:
iex> defmodule AppWeb.HomeResolver do
...> use ExUssd
...> def ussd_init(menu, _) do
...> ExUssd.set(menu, title: "Enter your PIN")
...> end
...> end
iex> # To simulate a user entering a PIN, you can use the `ExUssd.to_string/2` method.
iex> menu = ExUssd.new(name: "HOME", resolve: AppWeb.HomeResolver)
iex> ExUssd.to_string!(menu, :ussd_init, [])
"Enter your PIN"
"""
@callback ussd_init(
menu :: menu(),
payload :: payload()
) :: menu()
@doc """
`ussd_callback/3` is a the second life cycle method.
This callback is invoked every time the user enters a value into the current menu.
You can think of `ussd_callback/3` as a optional validation callback.
`ussd_callback/3` is called with the following arguments:
- menu: The menu that was invoked
- payload: The payload that was received from the gateway provider
- metadata: The metadata about the session
Example:
iex> defmodule AppWeb.PinResolver do
...> use ExUssd
...> def ussd_init(menu, _) do
...> ExUssd.set(menu, title: "Enter your PIN")
...> end
...> def ussd_callback(menu, payload, _) do
...> if payload.text == "5555" do
...> ExUssd.set(menu, resolve: &success_menu/2)
...> else
...> ExUssd.set(menu, error: "Wrong PIN\\n")
...> end
...> end
...> def success_menu(menu, _) do
...> menu
...> |> ExUssd.set(title: "You have Entered the Secret Number, 5555")
...> |> ExUssd.set(should_close: true)
...> end
...> end
iex> # To simulate a user entering correct PIN, you can use the `ExUssd.to_string/3` method.
iex> menu = ExUssd.new(name: "HOME", resolve: AppWeb.PinResolver)
iex> ExUssd.to_string!(menu, :ussd_callback, [payload: %{text: "5555"}])
"You have Entered the Secret Number, 5555"
iex> # To simulate a user entering wrong PIN, you can use the `ExUssd.to_string/3` method.
iex> menu = ExUssd.new(name: "HOME", resolve: AppWeb.PinResolver)
iex> ExUssd.to_string!(menu, :ussd_callback, payload: %{text: "5556"})
"Wrong PIN\\nEnter your PIN"
## Note:
#### Use `default_error`
`ussd_callback/3` will use the default error message if the callback returns `false`.
Example:
iex> defmodule AppWeb.PinResolver do
...> use ExUssd
...> def ussd_init(menu, _) do
...> ExUssd.set(menu, title: "Enter your PIN")
...> end
...> def ussd_callback(menu, payload, _) do
...> if payload.text == "5555" do
...> ExUssd.set(menu, resolve: &success_menu/2)
...> end
...> end
...> def success_menu(menu, _) do
...> menu
...> |> ExUssd.set(title: "You have Entered the Secret Number, 5555")
...> |> ExUssd.set(should_close: true)
...> end
...> end
iex> # To simulate a user entering wrong PIN.
iex> menu = ExUssd.new(name: "PIN", resolve: AppWeb.PinResolver)
iex> ExUssd.to_string!(menu, :ussd_callback, payload: %{text: "5556"})
"Invalid Choice\\nEnter your PIN"
#### Life cycle
`ussd_callback/3` is called before ussd menu list is resolved.
If the callback returns `false` or it's not implemented, ExUssd will proccess to resolve the user input section from the menu list.
Example:
iex> defmodule AppWeb.ProductResolver do
...> use ExUssd
...> def ussd_init(menu, _) do
...> menu
...> |> ExUssd.set(title: "Product List, Enter 5555 for Offers")
...> |> ExUssd.add(ExUssd.new(name: "Product A", resolve: &product_a/2))
...> |> ExUssd.add(ExUssd.new(name: "Product B", resolve: &product_b/2))
...> |> ExUssd.add(ExUssd.new(name: "Product C", resolve: &product_c/2))
...> end
...> def ussd_callback(menu, payload, _) do
...> if payload.text == "5555" do
...> ExUssd.set(menu, resolve: &product_offer/2)
...> end
...> end
...> def product_a(menu, _payload), do: ExUssd.set(menu, title: "selected product a")
...> def product_b(menu, _payload), do: ExUssd.set(menu, title: "selected product b")
...> def product_c(menu, _payload), do: ExUssd.set(menu, title: "selected product c")
...> def product_offer(menu, _payload), do: menu |> ExUssd.set(title: "selected product offer")
...> end
iex> menu = ExUssd.new(name: "HOME", resolve: AppWeb.ProductResolver)
iex> # To simulate a user entering "5555"
iex> ExUssd.to_string!(menu, simulate: true, payload: %{text: "5555"})
"selected product offer"
iex> # To simulate a user selecting option "1"
iex> ExUssd.to_string!(menu, simulate: true, payload: %{text: "1"})
"selected product a"
"""
@callback ussd_callback(
menu :: menu(),
payload :: payload(),
metadata :: metadata()
) :: menu()
@doc """
`ussd_after_callback/3` is a the third life cycle method.
This callback is invoked every time before ussd menu is rendered. It's invoke after menu_list is resolved.
You can think of `ussd_after_callback/3` as a optional clean up callback.
Example:
iex> defmodule AppWeb.ProductResolver do
...> use ExUssd
...> def ussd_init(menu, _) do
...> menu
...> |> ExUssd.set(title: "Product List")
...> |> ExUssd.add(ExUssd.new(name: "Product A", resolve: &product_a/2))
...> |> ExUssd.add(ExUssd.new(name: "Product B", resolve: &product_b/2))
...> |> ExUssd.add(ExUssd.new(name: "Product C", resolve: &product_c/2))
...> end
...> def ussd_after_callback(%{error: true} = _menu, _payload, _metadata) do
...> # Use the gateway payload and metadata to capture user metrics on error
...> end
...> def ussd_after_callback(_menu, _payload, _metadata) do
...> # Use the gateway payload and metadata to capture user metrics before navigating to next menu
...> end
...> def product_a(menu, _payload), do: ExUssd.set(menu, title: "selected product a")
...> def product_b(menu, _payload), do: ExUssd.set(menu, title: "selected product b")
...> def product_c(menu, _payload), do: ExUssd.set(menu, title: "selected product c")
...> end
iex> menu = ExUssd.new(name: "HOME", resolve: AppWeb.ProductResolver)
iex> # To simulate a user selecting option "1"
iex> ExUssd.to_string!(menu, simulate: true, payload: %{text: "1"})
"selected product a"
iex> # To simulate a user selecting invalid option "42"
iex> ExUssd.to_string!(menu, simulate: true, payload: %{text: "42"})
"Invalid Choice\\nProduct List\\n1:Product A\\n2:Product B\\n3:Product C"
# Note:
`ussd_after_callback/3` can to used to render menu if set conditions are met.
For example, you can use `ussd_after_callback/3` to render a custom menu if the user has not entered the correct PIN.
Example:
iex> defmodule AppWeb.HomeResolver do
...> use ExUssd
...> def ussd_init(menu, _) do
...> ExUssd.set(menu, title: "Enter your PIN")
...> end
...> def ussd_callback(menu, payload, _) do
...> if payload.text == "5555" do
...> ExUssd.set(menu, resolve: &success_menu/2)
...> else
...> ExUssd.set(menu, error: "Wrong PIN\\n")
...> end
...> end
...> def ussd_after_callback(%{error: true} = menu, _payload, %{attempt: %{count: 3}}) do
...> menu
...> |> ExUssd.set(title: "Account is locked, you have entered the wrong PIN 3 times")
...> |> ExUssd.set(should_close: true)
...> end
...> def success_menu(menu, _) do
...> menu
...> |> ExUssd.set(title: "You have Entered the Secret Number, 5555")
...> |> ExUssd.set(should_close: true)
...> end
...> end
iex> # To simulate a user entering wrong PIN 3 times.
iex> menu = ExUssd.new(name: "PIN", resolve: AppWeb.HomeResolver)
iex> ExUssd.to_string!(menu, :ussd_after_callback, payload: %{text: "5556", attempt: %{count: 3}})
"Account is locked, you have entered the wrong PIN 3 times"
"""
@callback ussd_after_callback(
menu :: menu(),
payload :: payload(),
metadata :: metadata()
) :: any()
@optional_callbacks ussd_callback: 3,
ussd_after_callback: 3
defstruct [
:data,
:error,
:is_zero_based,
:name,
:navigate,
:parent,
:resolve,
:title,
delimiter: Application.get_env(:ex_ussd, :delimiter) || ":",
default_error: Application.get_env(:ex_ussd, :default_error) || "Invalid Choice\n",
menu_list: [],
nav:
Application.get_env(:ex_ussd, :nav) ||
[
ExUssd.Nav.new(
type: :home,
name: "HOME",
match: "00",
reverse: true,
orientation: :vertical
),
ExUssd.Nav.new(type: :back, name: "BACK", match: "0", right: 1),
ExUssd.Nav.new(type: :next, name: "MORE", match: "98")
],
orientation: :vertical,
split: Application.get_env(:ex_ussd, :split) || 7,
should_close: false,
show_navigation: true
]
defmacro __using__([]) do
quote do
@behaviour ExUssd
end
end
@doc """
Add menu to ExUssd menu list.
Arguments:
- menu :: menu() :: The parent menu
- menu :: menu() :: The menu to add to the parent menu list.
## Example
iex> resolve = fn menu, _payload ->
...> menu
...> |> ExUssd.set(title: "Menu title")
...> |> ExUssd.add(ExUssd.new(name: "option 1", resolve: &(ExUssd.set(&1, title: "option 1"))))
...> |> ExUssd.add(ExUssd.new(name: "option 2", resolve: &(ExUssd.set(&1, title: "option 2"))))
...> end
iex> menu = ExUssd.new(name: "HOME", resolve: resolve)
iex> ExUssd.to_string!(menu, [])
"Menu title\\n1:option 1\\n2:option 2"
"""
defdelegate add(menu, child), to: ExUssd.Op
@doc """
Add menus to ExUssd menu list.
Arguments:
- `menu` :: menu() :: The parent menu
- `menus` :: list(menu()) :: The menu to add to the parent menu list.
- `opts` :: keyword_args() :: Options to pass to `ExUssd.add/3`
Example:
iex> defmodule AppWeb.LocationResolver do
...> use ExUssd
...> def ussd_init(menu, _) do
...> # Get locations from database
...> locations = Enum.map(1..5, &Integer.to_string/1)
...> # convert locations to menus
...> menus = Enum.map(locations, fn location ->
...> ExUssd.new(name: "Location " <> location, data: %{name: location})
...> end)
...> menu
...> |> ExUssd.set(title: "Select Location")
...> |> ExUssd.add(menus, resolve: &(ExUssd.set(&1, title: "Location " <> &1.data.name)))
...> end
...> end
iex> menu = ExUssd.new(name: "HOME", resolve: AppWeb.LocationResolver)
iex> ExUssd.to_string!(menu, [])
"Select Location\\n1:Location 1\\n2:Location 2\\n3:Location 3\\n4:Location 4\\n5:Location 5"
"""
defdelegate add(menu, menus, opts), to: ExUssd.Op
@doc """
Teminates session.
```elixir
ExUssd.end_session(session_id: "sn1")
```
"""
defdelegate end_session(opts), to: ExUssd.Op
@doc """
`ExUssd.goto/1` is called when the gateway provider calls the callback URL.
Keyword Arguments:
- `payload`: The payload from the gateway provider.
- `menu`: The menu to be rendered.
Example:
```elixir
case ExUssd.goto(menu: menu, payload: payload) do
{:ok, %{menu_string: menu_string, should_close: false}} ->
"CON " <> menu_string
{:ok, %{menu_string: menu_string, should_close: true}} ->
# End Session
ExUssd.end_session(session_id: session_id)
"END " <> menu_string
end
```
"""
defdelegate goto(opts), to: ExUssd.Op
@doc """
`ExUssd.new/1` - Creates a new ExUssd menu.
Keyword Arguments:
- `name` :: The name of the menu.
- `resolve` :: The resolve function to be called when the menu is selected.
- `orientation` :: The orientation of the menu.
- `is_zero_based` :: indicates whether the menu list is zero based.
Example:
iex> resolve = fn menu, _payload ->
...> menu
...> |> ExUssd.set(title: "Menu title")
...> |> ExUssd.add(ExUssd.new(name: "option 1", resolve: &(ExUssd.set(&1, title: "option 1"))))
...> |> ExUssd.add(ExUssd.new(name: "option 2", resolve: &(ExUssd.set(&1, title: "option 2"))))
...> end
iex> menu = ExUssd.new(name: "HOME", resolve: resolve)
iex> ExUssd.to_string!(menu, [])
"Menu title\\n1:option 1\\n2:option 2"
iex> # Change menu orientation
iex> menu = ExUssd.new(name: "HOME", resolve: resolve, orientation: :horizontal)
iex> ExUssd.to_string!(menu, [])
"1:2\\noption 1\\nMORE:98"
## zero based
Used when the menu list is zero based.
Example:
iex> resolve = fn menu, _payload ->
...> menu
...> |> ExUssd.set(title: "Menu title")
...> |> ExUssd.add(ExUssd.new(name: "offers", resolve: fn menu, _ -> ExUssd.set(menu, title: "offers") end))
...> |> ExUssd.add(ExUssd.new(name: "option 1", resolve: &(ExUssd.set(&1, title: "option 1"))))
...> |> ExUssd.add(ExUssd.new(name: "option 2", resolve: &(ExUssd.set(&1, title: "option 2"))))
...> end
iex> menu = ExUssd.new(name: "HOME", is_zero_based: true, resolve: resolve)
iex> ExUssd.to_string!(menu, [])
"Menu title\\n0:offers\\n1:option 1\\n2:option 2"
iex> ExUssd.to_string!(menu, [simulate: true, payload: %{text: "0"}])
"offers"
NOTE:
`ExUssd.new/1` can be used to create a menu with a callback function.
Use the anonymous function syntax to create menu if you want to perform some action before the menu is rendered.
Remember to use `ExUssd.set` to set the menu name and the resolve function/module.
Example:
iex> defmodule User do
...> def get_user(phone_number), do: %{name: "John", phone_number: phone_number, type: :personal}
...> end
iex> defmodule HomeResolver do
...> def home(%ExUssd{data: %{name: name}} = menu, _) do
...> menu
...> |> ExUssd.set(title: "Welcome " <> name)
...> |> ExUssd.add(ExUssd.new(name: "Product A", resolve: &product_a/2))
...> |> ExUssd.add(ExUssd.new(name: "Product B", resolve: &product_b/2))
...> |> ExUssd.add(ExUssd.new(name: "Product C", resolve: &product_c/2))
...> end
...> def product_a(menu, _payload), do: ExUssd.set(menu, title: "selected product a")
...> def product_b(menu, _payload), do: ExUssd.set(menu, title: "selected product b")
...> def product_c(menu, _payload), do: ExUssd.set(menu, title: "selected product c")
...> end
iex> menu = ExUssd.new(fn menu, %{phone: phone} = _payload ->
...> user = User.get_user(phone)
...> menu
...> |> ExUssd.set(name: "Home")
...> |> ExUssd.set(data: user)
...> |> ExUssd.set(resolve: &HomeResolver.home/2)
...> end)
iex> ExUssd.to_string!(menu, [payload: %{text: "*544#", phone: "072000000"}])
"Welcome John\\n1:Product A\\n2:Product B\\n3:Product C"
You can also use the anonymous function syntax to create menu if you want to create dymamic menu name.
Example:
iex> defmodule User do
...> def get_user(phone_number), do: %{name: "John", phone_number: phone_number, type: :personal}
...> end
iex> defmodule HomeResolver do
...> def home(menu, %{phone: phone} = _payload) do
...> user = User.get_user(phone)
...> menu
...> |> ExUssd.set(title: "Welcome "<> user.name)
...> |> ExUssd.set(data: user)
...> |> ExUssd.add(ExUssd.new(name: "Product A", resolve: &product_a/2))
...> |> ExUssd.add(ExUssd.new(name: "Product B", resolve: &product_b/2))
...> |> ExUssd.add(ExUssd.new(name: "Product C", resolve: &product_c/2))
...> |> ExUssd.add(ExUssd.new(&account/2))
...> end
...> def product_a(menu, _payload), do: ExUssd.set(menu, title: "selected product a")
...> def product_b(menu, _payload), do: ExUssd.set(menu, title: "selected product b")
...> def product_c(menu, _payload), do: ExUssd.set(menu, title: "selected product c")
...> def account(%{data: %{type: :personal, name: name}} = menu, _payload) do
...> # Should be stateless, don't put call functions with side effect (Insert to DB, fetch)
...> # Because it will be called every time the menu is rendered because the menu `:name` is dynamic
...> # See `ExUssd.new/2` for more details where `:name` is static.
...> menu
...> |> ExUssd.set(name: "Personal account")
...> |> ExUssd.set(resolve: &(ExUssd.set(&1, title: "Personal account")))
...> end
...> def account(%{data: %{type: :business, name: name}} = menu, _payload) do
...> # Should be stateless, don't put call functions with side effect (Insert to DB, fetch)
...> # Because it will be called every time the menu is rendered because the menu `:name` is dynamic
...> # See `ExUssd.new/2` for more details where `:name` is static.
...> menu
...> |> ExUssd.set(name: "Business account")
...> |> ExUssd.set(resolve: &(ExUssd.set(&1, title: "Business account")))
...> end
...> end
iex> menu = ExUssd.new(name: "HOME", resolve: &HomeResolver.home/2)
iex> ExUssd.to_string!(menu, [payload: %{text: "*544#", phone: "072000000"}])
"Welcome John\\n1:Product A\\n2:Product B\\n3:Product C\\n4:Personal account"
"""
defdelegate new(opts), to: ExUssd.Op
@doc """
`ExUssd.new/2` - Creates a new ExUssd menu.
Arguments:
name: The name of the menu.
resolve: The resolve function/module.
It similiar to `ExUssd.new/1` that takes callback function.
The only difference is that it takes a static name argument.
Example:
iex> defmodule User do
...> def get_user(phone_number), do: %{name: "John", phone_number: phone_number, type: :personal}
...> end
iex> defmodule HomeResolver do
...> def home(menu, %{phone: phone} = _payload) do
...> user = User.get_user(phone)
...> menu
...> |> ExUssd.set(title: "Welcome "<> user.name)
...> |> ExUssd.set(data: user)
...> |> ExUssd.add(ExUssd.new(name: "Product A", resolve: &product_a/2))
...> |> ExUssd.add(ExUssd.new(name: "Product B", resolve: &product_b/2))
...> |> ExUssd.add(ExUssd.new(name: "Product C", resolve: &product_c/2))
...> |> ExUssd.add(ExUssd.new("Account", &account/2))
...> end
...> def product_a(menu, _payload), do: ExUssd.set(menu, title: "selected product a")
...> def product_b(menu, _payload), do: ExUssd.set(menu, title: "selected product b")
...> def product_c(menu, _payload), do: ExUssd.set(menu, title: "selected product c")
...> def account(%{data: %{type: :personal}} = menu, _payload) do
...> # Get Personal account details, then set as data
...> ExUssd.set(menu, resolve: &(ExUssd.set(&1, title: "Personal account")))
...> end
...> def account(%{data: %{type: :business, name: name}} = menu, _payload) do
...> # Get Business account details, then set as data
...> ExUssd.set(menu, resolve: &(ExUssd.set(&1, title: "Business account")))
...> end
...> end
iex> menu = ExUssd.new(name: "HOME", resolve: &HomeResolver.home/2)
iex> ExUssd.to_string!(menu, [payload: %{text: "*544#", phone: "072000000"}])
"Welcome John\\n1:Product A\\n2:Product B\\n3:Product C\\n4:Account"
"""
defdelegate new(name, function), to: ExUssd.Op
@doc """
`ExUssd.set/2` - Sets the menu field.
Arguments:
menu: The menu to set.
field: The field to set.
It sets the field of the menu.
## Settable Fields
- **`:data`** Set data to pass through to next menu. N/B - ExUssd menu are stateful unless using `ExUssd.new/2` with `:name` and `:resolve` as arguments;
```elixir
data = %{name: "<NAME>"}
# stateful
menu
|> ExUssd.set(data: data)
|> ExUssd.add(ExUssd.new(&check_balance/2))
menu
|> ExUssd.set(data: data)
|> ExUssd.add(ExUssd.new("Check Balance", &check_balance/2))
# stateless
menu
|> ExUssd.add(ExUssd.new(data: data, name: "Check Balance", resolve: &check_balance/2))
```
- **`:delimiter`** Set's menu style delimiter. Default- `:`
- **`:default_error`** Default error shown on invalid input
- **`:error`** Set custom error message
- **`:name`** Sets the name of the menu
- **`:nav`** Its used to set a new ExUssd Nav menu, see `ExUssd.Nav.new/1`
- **`:orientation`** Sets the menu orientation. Available option;
- `:horizontal` - Left to right. Blog/articles style menu
- `vertical` - Top to bottom(default)
- **`:resolve`** set the resolve function/module
- **`:should_close`** Indicate whether to USSD session should end or continue
- **`:show_navigation`** Set show navigation menu. Default - `true`
- **`:split`** Set menu batch size. Default - 7
- **`:title`** Set menu title
"""
defdelegate set(menu, opts), to: ExUssd.Op
@doc """
Use `ExUssd.to_string/2` to get menu string representation and should close value which indicates if the session.
`ExUssd.to_string/2` takes a menu and opts.
Example:
iex> defmodule AppWeb.ProductResolver do
...> use ExUssd
...> def ussd_init(menu, _) do
...> menu
...> |> ExUssd.set(title: "Product List")
...> |> ExUssd.add(ExUssd.new(name: "Product A", resolve: &product_a/2))
...> |> ExUssd.add(ExUssd.new(name: "Product B", resolve: &product_b/2))
...> |> ExUssd.add(ExUssd.new(name: "Product C", resolve: &product_c/2))
...> end
...> def product_a(menu, _payload), do: ExUssd.set(menu, title: "selected product a")
...> def product_b(menu, _payload), do: ExUssd.set(menu, title: "selected product b")
...> def product_c(menu, _payload), do: ExUssd.set(menu, title: "selected product c")
...> end
iex> menu = ExUssd.new(name: "HOME", resolve: AppWeb.ProductResolver)
iex> # Simulate the first time user enters the menu
iex> ExUssd.to_string(menu, [])
{:ok, %{menu_string: "Product List\\n1:Product A\\n2:Product B\\n3:Product C", should_close: false}}
iex> # To simulate a user selecting option "1"
iex> ExUssd.to_string(menu, [simulate: true, payload: %{text: "1"}])
{:ok, %{menu_string: "selected product a", should_close: false}}
NOTE:
If your `ussd_init/2` callback expects `data` field to have values, Use `init_data`.
Example:
iex> defmodule AppWeb.ProductResolver do
...> use ExUssd
...> def ussd_init(%ExUssd{data: %{user_name: user_name}} = menu, _) do
...> menu
...> |> ExUssd.set(title: "Welcome " <> user_name <> ", Select Product")
...> |> ExUssd.add(ExUssd.new(name: "Product A", resolve: &product_a/2))
...> |> ExUssd.add(ExUssd.new(name: "Product B", resolve: &product_b/2))
...> |> ExUssd.add(ExUssd.new(name: "Product C", resolve: &product_c/2))
...> end
...> def product_a(menu, _payload), do: ExUssd.set(menu, title: "selected product a")
...> def product_b(menu, _payload), do: ExUssd.set(menu, title: "selected product b")
...> def product_c(menu, _payload), do: ExUssd.set(menu, title: "selected product c")
...> end
iex> menu = ExUssd.new(name: "HOME", resolve: AppWeb.ProductResolver)
iex> # Simulate the first time user enters the menu
iex> ExUssd.to_string(menu, init_data: %{user_name: "John"})
{:ok, %{menu_string: "Welcome John, Select Product\\n1:Product A\\n2:Product B\\n3:Product C", should_close: false}}
"""
defdelegate to_string(menu, opts), to: ExUssd.Op
@doc """
`ExUssd.to_string/3` is similar to `ExUssd.to_string/2`
The only difference is that it takes a `menu`, `atom` and `opts`.
Its used to test the menu life cycle.
Example:
iex> defmodule AppWeb.PinResolver do
...> use ExUssd
...> def ussd_init(menu, _) do
...> ExUssd.set(menu, title: "Enter your PIN")
...> end
...> def ussd_callback(menu, payload, _) do
...> if payload.text == "5555" do
...> ExUssd.set(menu, resolve: &success_menu/2)
...> end
...> end
...> def success_menu(menu, _) do
...> menu
...> |> ExUssd.set(title: "You have Entered the Secret Number, 5555")
...> |> ExUssd.set(should_close: true)
...> end
...> end
iex> menu = ExUssd.new(name: "PIN", resolve: AppWeb.PinResolver)
iex> # Get `ussd_init/2` menu string representation
iex> ExUssd.to_string(menu, :ussd_init, [])
{:ok, %{menu_string: "Enter your PIN", should_close: false}}
iex> # Get `ussd_callback/2` menu string representation
iex> ExUssd.to_string(menu, :ussd_callback, payload: %{text: "5555"})
{:ok, %{menu_string: "You have Entered the Secret Number, 5555", should_close: true}}
"""
defdelegate to_string(menu, atom, opts), to: ExUssd.Op
@doc """
`ExUssd.to_string!/2` gets the menu string text from `ExUssd.to_string/2`
See `ExUssd.to_string/2` for more details.
Example:
iex> defmodule AppWeb.ProductResolver do
...> def products(menu, _) do
...> menu
...> |> ExUssd.set(title: "Product List")
...> |> ExUssd.add(ExUssd.new(name: "Product A", resolve: &product_a/2))
...> |> ExUssd.add(ExUssd.new(name: "Product B", resolve: &product_b/2))
...> |> ExUssd.add(ExUssd.new(name: "Product C", resolve: &product_c/2))
...> end
...> def product_a(menu, _payload), do: ExUssd.set(menu, title: "selected product a")
...> def product_b(menu, _payload), do: ExUssd.set(menu, title: "selected product b")
...> def product_c(menu, _payload), do: ExUssd.set(menu, title: "selected product c")
...> end
iex> menu = ExUssd.new(name: "HOME", resolve: &AppWeb.ProductResolver.products/2)
iex> # Simulate the first time user enters the menu
iex> ExUssd.to_string!(menu, [])
"Product List\\n1:Product A\\n2:Product B\\n3:Product C"
"""
defdelegate to_string!(menu, opts), to: ExUssd.Op
@doc """
`ExUssd.to_string!/3` gets the menu string text from `ExUssd.to_string/3`
See `ExUssd.to_string/3` for more details.
Example:
iex> defmodule AppWeb.PinResolver do
...> use ExUssd
...> def ussd_init(menu, _) do
...> ExUssd.set(menu, title: "Enter your PIN")
...> end
...> def ussd_callback(menu, payload, _) do
...> if payload.text == "5555" do
...> ExUssd.set(menu, resolve: &success_menu/2)
...> end
...> end
...> def success_menu(menu, _) do
...> menu
...> |> ExUssd.set(title: "You have Entered the Secret Number, 5555")
...> |> ExUssd.set(should_close: true)
...> end
...> end
iex> menu = ExUssd.new(name: "PIN", resolve: AppWeb.PinResolver)
iex> # Get `ussd_init/2` menu string representation
iex> ExUssd.to_string!(menu, :ussd_init, [])
"Enter your PIN"
iex> # Get `ussd_callback/2` menu string representation
iex> ExUssd.to_string!(menu, :ussd_callback, payload: %{text: "5555"})
"You have Entered the Secret Number, 5555"
"""
defdelegate to_string!(menu, atom, opts), to: ExUssd.Op
end
|
lib/ex_ussd.ex
| 0.824709
| 0.70477
|
ex_ussd.ex
|
starcoder
|
defmodule Scenic.Primitive.Line do
@moduledoc """
Draw a line on the screen.
## Data
`{point_a, point_b}`
The data for a line is a tuple containing two points.
* `point_a` - position to start drawing from
* `point_b` - position to draw to
## Styles
This primitive recognizes the following styles
* [`hidden`](Scenic.Primitive.Style.Hidden.html) - show or hide the primitive
* [`cap`](Scenic.Primitive.Style.Cap.html) - says how to draw the ends of the line.
* [`stroke`](Scenic.Primitive.Style.Stroke.html) - stroke the outline of the primitive. In this case, only the curvy part.
## Usage
You should add/modify primitives via the helper functions in
[`Scenic.Primitives`](Scenic.Primitives.html#line/3)
"""
use Scenic.Primitive
# import IEx
@styles [:hidden, :stroke, :cap]
# ============================================================================
# data verification and serialization
# --------------------------------------------------------
@doc false
def info(data),
do: """
#{IO.ANSI.red()}#{__MODULE__} data must be two points: {{x0,y0}, {x1,y1}}
#{IO.ANSI.yellow()}Received: #{inspect(data)}
#{IO.ANSI.default_color()}
"""
# --------------------------------------------------------
@doc false
def verify({{x0, y0}, {x1, y1}} = data)
when is_number(x0) and is_number(y0) and is_number(x1) and is_number(y1),
do: {:ok, data}
def verify(_), do: :invalid_data
# ============================================================================
@doc """
Returns a list of styles recognized by this primitive.
"""
@spec valid_styles() :: [:cap | :hidden | :stroke, ...]
def valid_styles(), do: @styles
# ============================================================================
# --------------------------------------------------------
def default_pin(data), do: centroid(data)
# --------------------------------------------------------
@doc """
Returns a the midpoint of the line. This is used as the default pin when applying
rotate or scale transforms.
"""
def centroid(data)
def centroid({{x0, y0}, {x1, y1}}) do
{
(x0 + x1) / 2,
(y0 + y1) / 2
}
end
end
|
lib/scenic/primitive/line.ex
| 0.907242
| 0.628821
|
line.ex
|
starcoder
|
defmodule Regex do
@moduledoc %B"""
Regular expressions for Elixir built on top of the re module
in the Erlang Standard Library. More information can be found
on re documentation: http://www.erlang.org/doc/man/re.html
Regular expressions in Elixir can be created using Regex.compile!
or using the special form with `%r`:
# A simple regular expressions that matches foo anywhere in the string
%r/foo/
# A regular expression with case insensitive options and handle unicode chars
%r/foo/iu
The re module provides several options, the one available in Elixir, followed by
their shortcut in parenthesis, are:
* unicode (u) - used when you want to match against specific unicode characters
* caseless (i) - add case insensitivity
* dotall (s) - causes dot to match newlines and also set newline to anycrlf.
The new line setting can be overwritten by setting `(*CR)` or `(*LF)` or
`(*CRLF)` or `(*ANY)` according to re documentation
* multiline (m) - causes `^` and `$` to mark the beginning and end of each line.
You need to use `\A` and `\z` to match the end or beginning of the string
* extended (x) - whitespace characters are ignored except when escaped and
allow `#` to delimit comments
* firstline (f) - forces the unanchored pattern to match before or at the first
newline, though the matched text may continue over the newline
* ungreedy (r) - invert the "greediness" of the regexp
The options not available are:
* anchored - not available, use `^` or `\A` instead
* dollar_endonly - not available, use `\z` instead
* no_auto_capture - not available, use `?:` instead
* newline - not available, use `(*CR)` or `(*LF)` or `(*CRLF)` or `(*ANYCRLF)`
or `(*ANY)` at the beginning of the regexp according to the re documentation
Most of the functions in this module accept either a binary or a char list
as subject. The result is based on the argument (a binary will return
a binary, a char list will return a char list).
"""
defexception CompileError, message: "regex could not be compiled"
@doc """
Compiles the regular expression according to the given options.
It returns `{ :ok, regex }` in case of success,
`{ :error, reason }` otherwise.
"""
def compile(source, options // "") do
source = to_binary(source)
options = to_binary(options)
re_opts = translate_options(options)
case Erlang.re.compile(source, re_opts) do
{ :ok, compiled } ->
{ :ok, { Regex, compiled, source, options } }
error ->
error
end
end
@doc """
Compiles the regular expression according to the given options.
Fails with `Regex.CompileError` if the regex cannot be compiled.
"""
def compile!(source, options // "") do
case compile(source, options) do
{ :ok, regex } -> regex
{ :error, { reason, at } } -> raise Regex.CompileError, message: "#{reason} at position #{at}"
end
end
@doc """
Runs the regular expression against the given string
and returns the index (zero indexes) where the first
match occurs, nil otherwise.
## Examples
Regex.index %r/c(d)/, "abcd" #=> 3
Regex.index %r/e/, "abcd" #=> nil
"""
def index({ Regex, compiled, _, _ }, string) do
case Erlang.re.run(string, compiled, [{ :capture, :first, :index }]) do
:nomatch -> nil
{ :match, [{index,_}] } -> index
end
end
@doc """
Returns a boolean if there was a match or not.
## Examples
Regex.match? %r/foo/, "foo" #=> true
Regex.match? %r/foo/, "bar" #=> false
"""
def match?({ Regex, compiled, _, _ }, string) do
:nomatch != Erlang.re.run(string, compiled)
end
@doc """
Runs the regular expression against the given string.
It returns a list with all matches or nil if no match ocurred.
## Examples
Regex.run %r/c(d)/, "abcd" #=> ["cd", "d"]
Regex.run %r/e/, "abcd" #=> nil
"""
def run({ Regex, compiled, _, _ }, string) do
case Erlang.re.run(string, compiled, [{ :capture, :all, return_for(string) }]) do
:nomatch ->
nil
{ :match, results } ->
results
end
end
@doc """
Returns a list with the match indexes in the given string.
The matches are tuples where the first element is the index
(zero indexed) the match happened and the second is the length
of the match.
## Examples
Regex.indexes %r/c(d)/, "abcd" #=> [{2,2},{3,1}]
Regex.indexes %r/e/, "abcd" #=> nil
"""
def indexes({ Regex, compiled, _, _ }, string) do
case Erlang.re.run(string, compiled, [{ :capture, :all, :index }]) do
:nomatch ->
nil
{ :match, results } ->
results
end
end
@doc """
Returns the underlying re_pattern in the regular expression.
"""
def re_pattern({ Regex, compiled, _, _ }) do
compiled
end
@doc """
Returns the regex source as binary.
## Examples
Regex.source %r(foo) #=> "foo"
"""
def source({ Regex, _, source, _ }) do
source
end
@doc """
Returns the regex options as a list.
## Examples
Regex.opts %r(foo)m #=> 'm'
"""
def opts({ Regex, _, _, opts }) do
opts
end
@doc """
Same as run, but scans the target several times collecting all matches of
the regular expression. A list is returned with each match. If the item in
the list is a binary, it means there were no captures. If the item is another
list, each element in this secondary list is a capture.
## Examples
Regex.scan %r/c(d|e)/, "abcd abce" #=> [["d"], ["e"]]
Regex.scan %r/c(?:d|e)/, "abcd abce" #=> ["cd", "ce"]
Regex.scan %r/e/, "abcd" #=> []
"""
def scan({ Regex, compiled, _, _ }, string) do
options = [{ :capture, :all, return_for(string) }, :global, { :offset, 0 }]
case Erlang.re.run(string, compiled, options) do
:nomatch -> []
{ :match, results } ->
lc result inlist results do
case result do
[t] -> t
[h|t] -> t
end
end
end
end
@doc """
Split the given target in the number of parts specified. If no ammount
of parts is given, it defaults to :infinity.
"""
def split({ Regex, compiled, _, _ }, string, parts // :infinity) do
options = [{ :return, return_for(string) }, :trim, { :parts, parts }]
Erlang.re.split(string, compiled, options)
end
@doc %B"""
Receives a string and a replacement and returns a string where the
first match of the regular expressions is replaced by replacement.
Inside the replacement, you can either give "&" to access the whole
regular expression or \N, where N is in integer to access an specific
matching parens.
## Examples
Regex.replace(%r/d/, "abc", "d") #=> "abc"
Regex.replace(%r/b/, "abc", "d") #=> "adc"
Regex.replace(%r/b/, "abc", "[&]") #=> "a[b]c"
Regex.replace(%r/b/, "abc", "[\\&]") #=> "a[&]c"
Regex.replace(%r/(b)/, "abc", "[\\1]") #=> "a[b]c"
"""
def replace({ Regex, compiled, _, _ }, string, replacement) do
Erlang.re.replace(string, compiled, replacement, [{ :return, return_for(string) }])
end
@doc """
The same as replace, but replaces all parts where the regular
expressions matches in the string. Please read `replace/3` for
documentation and examples.
"""
def replace_all({ Regex, compiled, _, _ }, string, replacement) do
Erlang.re.replace(string, compiled, replacement, [{ :return, return_for(string) }, :global])
end
# Helpers
@doc false
# Unescape map function used by Binary.unescape.
def unescape_map(?f), do: ?\f
def unescape_map(?n), do: ?\n
def unescape_map(?r), do: ?\r
def unescape_map(?t), do: ?\t
def unescape_map(?v), do: ?\v
def unescape_map(_), do: false
# Private Helpers
defp return_for(element) when is_binary(element), do: :binary
defp return_for(element) when is_list(element), do: :list
defp translate_options(<<?u, t|:binary>>), do: [:unicode|translate_options(t)]
defp translate_options(<<?i, t|:binary>>), do: [:caseless|translate_options(t)]
defp translate_options(<<?x, t|:binary>>), do: [:extended|translate_options(t)]
defp translate_options(<<?f, t|:binary>>), do: [:firstline|translate_options(t)]
defp translate_options(<<?r, t|:binary>>), do: [:ungreedy|translate_options(t)]
defp translate_options(<<?s, t|:binary>>), do: [:dotall,{:newline,:anycrlf}|translate_options(t)]
defp translate_options(<<?m, t|:binary>>), do: [:multiline|translate_options(t)]
defp translate_options(<<>>), do: []
end
|
lib/elixir/lib/regex.ex
| 0.908769
| 0.635958
|
regex.ex
|
starcoder
|
defmodule Tint do
@moduledoc """
A library allowing calculations with colors and conversions between different
colorspaces.
"""
alias Tint.{CMYK, DIN99, HSV, Lab, RGB, XYZ}
@typedoc """
A type representing a color.
"""
@type color ::
CMYK.t()
| DIN99.t()
| HSV.t()
| Lab.t()
| RGB.t()
| XYZ.t()
@colorspace_aliases %{
cmyk: Tint.CMYK,
din99: Tint.DIN99,
hsv: Tint.HSV,
lab: Tint.Lab,
rgb: Tint.RGB,
xyz: Tint.XYZ
}
@typedoc """
A type representing a colorspace.
"""
@type colorspace :: atom | module
@doc """
Gets the converted module for the given colorspace atom or module.
"""
@doc since: "1.0.0"
@spec converter_for(colorspace) :: {:ok, module} | :error
def converter_for(colorspace) do
colorspace_mod = Map.get(@colorspace_aliases, colorspace, colorspace)
convertible_mod = Module.concat(colorspace_mod, Convertible)
if Code.ensure_loaded?(convertible_mod) &&
function_exported?(convertible_mod, :convert, 1) do
{:ok, convertible_mod}
else
:error
end
end
@doc """
Converts the given color to another colorspace.
## Examples
iex> Tint.convert(Tint.RGB.new(40, 66, 67), :cmyk)
{:ok, %Tint.CMYK{cyan: 0.403, magenta: 0.0149, yellow: 0.0, key: 0.7373}}
iex> Tint.convert(Tint.RGB.new(255, 127, 30), Tint.HSV)
{:ok, %Tint.HSV{hue: 25.9, saturation: 0.8824, value: 1.0}}
iex> Tint.convert(Tint.RGB.new(255, 127, 30), :invalid)
:error
"""
@doc since: "1.0.0"
@spec convert(color, colorspace) :: {:ok, color} | :error
def convert(color, colorspace) do
with {:ok, convertible_mod} <- converter_for(colorspace) do
{:ok, convertible_mod.convert(color)}
end
end
@doc """
Converts the given color to another colorspace. Raises when the colorspace
is invalid.
## Examples
iex> Tint.convert!(Tint.RGB.new(40, 66, 67), :cmyk)
%Tint.CMYK{cyan: 0.403, magenta: 0.0149, yellow: 0.0, key: 0.7373}
iex> Tint.convert!(Tint.RGB.new(255, 127, 30), Tint.HSV)
%Tint.HSV{hue: 25.9, saturation: 0.8824, value: 1.0}
iex> Tint.convert!(Tint.RGB.new(255, 127, 30), :foo)
** (ArgumentError) Unknown colorspace: :foo
"""
@doc since: "1.0.0"
@spec convert!(color, colorspace) :: color
def convert!(color, colorspace) do
case convert(color, colorspace) do
{:ok, color} ->
color
:error ->
raise ArgumentError, "Unknown colorspace: #{inspect(colorspace)}"
end
end
@doc """
Converts the given color to the CMYK colorspace.
## Example
iex> Tint.to_cmyk(Tint.RGB.new(40, 66, 67))
#Tint.CMYK<40.3%,1.49%,0.0%,73.73%>
"""
@doc since: "0.3.0"
@spec to_cmyk(color) :: CMYK.t()
defdelegate to_cmyk(color), to: CMYK.Convertible, as: :convert
@doc """
Converts the given color to the DIN99 colorspace.
"""
@doc since: "1.0.0"
@spec to_din99(color) :: DIN99.t()
defdelegate to_din99(color), to: DIN99.Convertible, as: :convert
@doc """
Converts the given color to the HSV colorspace.
## Example
iex> Tint.to_hsv(Tint.RGB.new(255, 127, 30))
#Tint.HSV<25.9°,88.24%,100.0%>
"""
@spec to_hsv(color) :: HSV.t()
defdelegate to_hsv(color), to: HSV.Convertible, as: :convert
@doc """
Converts the given color to the CIELAB colorspace.
"""
@doc since: "1.0.0"
@spec to_lab(color) :: Lab.t()
defdelegate to_lab(color), to: Lab.Convertible, as: :convert
@doc """
Converts the given color to the RGB colorspace.
## Example
iex> Tint.to_rgb(Tint.HSV.new(25.8, 0.882, 1))
#Tint.RGB<255,127,30 (#FF7F1E)>
"""
@spec to_rgb(color) :: RGB.t()
defdelegate to_rgb(color), to: RGB.Convertible, as: :convert
@doc """
Converts the given color to the XYZ (CIE 1931) colorspace.
"""
@doc since: "1.0.0"
@spec to_xyz(color) :: XYZ.t()
defdelegate to_xyz(color), to: XYZ.Convertible, as: :convert
end
|
lib/tint.ex
| 0.932253
| 0.441974
|
tint.ex
|
starcoder
|
defmodule XUtil.GenServer do
@moduledoc "Simple utilities for avoiding boilerplate in a GenServer implementation."
@doc """
If your GenServer is a thin wrapper around a struct, you can make its handle_call()
implementation be "just this."
Supports operations that:
- Update the state
- May return an error (with an optional explanation)
- Query the state
...but not operations that both modify the state *and* query something.
"""
def apply_call(impl_struct_state, impl) when is_struct(impl_struct_state) and is_function(impl) do
case impl.(impl_struct_state) do
updated_state when is_struct(updated_state) -> {:reply, :ok, updated_state}
{:ok, updated_state} when is_struct(updated_state) -> {:reply, :ok, updated_state}
:error -> {:reply, :error, impl_struct_state}
{:error, explanation} = e when is_binary(explanation) -> {:reply, e, impl_struct_state}
return_value -> {:reply, return_value, impl_struct_state}
end
end
def fetch(impl_struct_state, impl) when is_struct(impl_struct_state) and is_function(impl) do
{:reply, impl.(impl_struct_state), impl_struct_state}
end
def fetch_or_apply(state, {:apply, impl}), do: apply_call(state, impl)
def fetch_or_apply(state, {:apply, impl, a1}), do: apply_call(state, fn state -> impl.(state, a1) end)
def fetch_or_apply(state, {:apply, impl, a1, a2}), do: apply_call(state, fn state -> impl.(state, a1, a2) end)
def fetch_or_apply(state, {:apply, impl, a1, a2, a3}), do: apply_call(state, fn state -> impl.(state, a1, a2, a3) end)
def fetch_or_apply(state, {:apply, impl, a1, a2, a3, a4}), do: apply_call(state, fn state -> impl.(state, a1, a2, a3, a4) end)
def fetch_or_apply(state, {:fetch, impl}), do: fetch(state, impl)
def fetch_or_apply(state, {:fetch, impl, a1}), do: fetch(state, fn state -> impl.(state, a1) end)
def fetch_or_apply(state, {:fetch, impl, a1, a2}), do: fetch(state, fn state -> impl.(state, a1, a2) end)
def fetch_or_apply(state, {:fetch, impl, a1, a2, a3}), do: fetch(state, fn state -> impl.(state, a1, a2, a3) end)
def fetch_or_apply(state, {:fetch, impl, a1, a2, a3, a4}), do: fetch(state, fn state -> impl.(state, a1, a2, a3, a4) end)
end
|
lib/x_util/gen_server.ex
| 0.772745
| 0.496277
|
gen_server.ex
|
starcoder
|
defmodule CbLocomotion.StepperMotor do
use GenServer
# alias Saxophone.StepperMotor
defstruct pins: [], direction: :neutral, position: 0, step_millis: 0, timer_ref: nil, gear: :low
@position_pin_values [
[0, 0, 0, 1],
[0, 0, 1, 1],
[0, 0, 1, 0],
[0, 1, 1, 0],
[0, 1, 0, 0],
[1, 1, 0, 0],
[1, 0, 0, 0],
[1, 0, 0, 1],
]
@moduledoc """
Represents the interface, via GPIO pins, to a stepper motor driver. Can adjust speed and direction.
"""
def start_link(pin_ids = [_, _, _, _], opts \\ []) do
GenServer.start_link(__MODULE__, pin_ids, opts)
end
## API
@doc """
Set direction, either :forward, :back, or :neutral
"""
def set_direction(pid, direction) do
pid |> GenServer.call({:set_direction, direction})
end
@doc """
Set the step rate in milliseconds
"""
def set_step_rate(pid, new_step_rate) do
pid |> GenServer.call({:set_step_rate, new_step_rate})
end
def state(pid) do
pid |> GenServer.call(:get_state)
end
def set_low_gear(pid) do
pid |> GenServer.call({:set_gear, :low})
end
def set_high_gear(pid) do
pid |> GenServer.call({:set_gear, :high})
end
## Callbacks
def init(pin_ids) do
gpio_pins = pin_ids |> Enum.map(fn pin ->
{:ok, gpio_pid} = Gpio.start_link(pin, :output)
gpio_pid
end)
send(self, :step)
{:ok, %__MODULE__{pins: gpio_pins}}
end
def handle_call({:set_direction, direction}, _from, status) do
timer_ref = schedule_next_step(direction, status.step_millis, status.timer_ref)
{:reply, :ok, %{status | direction: direction, timer_ref: timer_ref}}
end
def handle_call({:set_step_rate, step_rate}, _from, status) do
timer_ref = schedule_next_step(status.direction, step_rate, status.timer_ref)
{:reply, :ok, %{status | step_millis: step_rate, timer_ref: timer_ref}}
end
def handle_call(:get_state, _from, status) do
{:reply, status, status}
end
def handle_call({:set_gear, gear}, _from, status) do
{:reply, :ok, %{status | gear: gear}}
end
def handle_info(:step, status = %{pins: pins,
direction: direction,
step_millis: step_millis,
position: position,
gear: gear}) do
next_position = new_position(position, direction, step(gear))
@position_pin_values
|> Enum.at(next_position)
|> Enum.zip(pins)
|> Enum.each(fn {value, pin} ->
pin |> Gpio.write(value)
end)
timer_ref = schedule_next_step(direction, step_millis)
{:noreply, %{status | position: next_position, timer_ref: timer_ref}}
end
defp schedule_next_step(direction, step_millis, timer_ref) do
cancel_timer(timer_ref)
schedule_next_step(direction, step_millis)
end
defp schedule_next_step(:neutral, _step_millis), do: nil
defp schedule_next_step(_direction, step_millis), do: Process.send_after(self, :step, step_millis)
defp cancel_timer(:nil), do: false
defp cancel_timer(timer_ref), do: Process.cancel_timer(timer_ref)
defp new_position(position, :neutral, _) do
position
end
@lint {~r/Refactor/, false}
defp new_position(position, :forward, current_step) do
(position + current_step) |> rem(8)
end
defp new_position(position, :back, current_step) do
(8 + position - current_step) |> rem(8)
end
defp step(:low), do: 1
defp step(:high), do: 2
end
|
apps/cb_locomotion/lib/cb_locomotion/stepper_motor.ex
| 0.567577
| 0.679764
|
stepper_motor.ex
|
starcoder
|
defmodule DiscordEx.Voice.Buffer do
@moduledoc """
Buffer Module for holding and reading audio.
"""
@doc "Create a new queue"
@spec start :: pid
def start do
{:ok, queue} = Agent.start_link fn -> <<>> end
queue
end
@doc "Write to the buffer/queue binary data"
@spec write(pid, binary) :: atom
def write(queue, new_data) do
data = new_data |> :erlang.binary_to_list |> Enum.reverse |> :erlang.list_to_binary
Agent.update(queue, fn(existing_data) -> (data <> existing_data) end)
end
@doc "Read off of the buffer based on a set bit size"
@spec read(pid, integer) :: binary
def read(queue, size_in_bits) do
data = Agent.get(queue, fn data -> data end)
{remaining_data, capture_data} = _slice_data_in_bits(data, size_in_bits)
Agent.update(queue, fn(_existing_data) -> remaining_data end)
capture_data |> :erlang.binary_to_list |> Enum.reverse |> :erlang.list_to_binary
end
@doc "Read off of the buffer based on a set bit size and return the integer format"
@spec read(pid, integer, atom) :: binary
def read(queue, size_in_bits, :integer) do
data = Agent.get(queue, fn data -> data end)
if data != "" do
{remaining_data, capture_data} = _slice_data_in_bits(data, size_in_bits, :integer)
Agent.update(queue, fn(_existing_data) -> remaining_data end)
capture_data
else
0
end
end
@doc "Drain the buffer based off the bit size and apply the result to the function - you don't actually have to use time to make use of this"
@spec drain(pid, integer, function) :: binary
def drain(queue, size_in_bits, function, time \\ 0) do
data = read(queue, size_in_bits)
unless data == <<>> do
function.(data, time)
drain(queue, size_in_bits, function, time)
end
end
@doc "Drain the buffer which is assumed to contain just a DCA file with opus packets which have a header that dictate the size of a frame and the packets passed to the function"
@spec drain_dca(pid, function, integer) :: binary
def drain_dca(queue, function, time \\ 0) do
packet_size_in_bytes = read(queue, 16, :integer)
if packet_size_in_bytes != "" && packet_size_in_bytes != 0 do
data = read(queue, packet_size_in_bytes * 8)
unless data == <<>> do
function.(data, time)
drain_dca(queue, function, time)
end
else
data = read(queue, 9_999_999_999_999)
function.(data, time)
end
end
@doc "Get the size of the buffer"
@spec size(pid) :: integer
def size(queue) do
queue |> Agent.get(fn data -> data end) |> bit_size
end
@doc "Dump everything out of the buffer"
@spec dump(pid) :: atom
def dump(queue) do
Agent.get(queue, fn data -> data end)
Agent.update(queue, fn(_existing_data) -> <<>> end)
end
# For binary data
defp _slice_data_in_bits(data, limit_in_bits) when (bit_size(data) >= limit_in_bits) do
top_size = bit_size(data) - limit_in_bits
<< remaining_data::bitstring-size(top_size), capture_data::binary >> = data
{remaining_data, capture_data}
end
# Use to handle empty data
defp _slice_data_in_bits(data, _limit_in_bits) do
{<<>>, data}
end
# For packet size information specifically using opus packets
defp _slice_data_in_bits(data, limit_in_bits, :integer) do
top_size = bit_size(data) - limit_in_bits
# somehow this is coming in as a big unsigned integer instead of a little one
# this may be happening in DCA conversion
<< remaining_data::bitstring-size(top_size), capture_data::big-unsigned-integer-size(limit_in_bits) >> = data
{remaining_data, capture_data}
end
end
|
lib/discord_ex/voice/buffer.ex
| 0.735167
| 0.419321
|
buffer.ex
|
starcoder
|
defmodule Scenic.Primitive.Ellipse do
@moduledoc """
Draw an ellipse on the screen.
## Data
`{radius_1, radius_2}`
The data for an arc is a single number.
* `radius_1` - the radius of the ellipse in one direction
* `radius_2` - the radius of the ellipse in the other direction
## Styles
This primitive recognizes the following styles
* [`hidden`](Scenic.Primitive.Style.Hidden.html) - show or hide the primitive
* [`fill`](Scenic.Primitive.Style.Fill.html) - fill in the area of the primitive
* [`stroke`](Scenic.Primitive.Style.Stroke.html) - stroke the outline of the primitive. In this case, only the curvy part.
Note: you can achieve the same effect with a Circle primitive
by applying a `:scale` transform to it with unequal values on the axes
## Usage
You should add/modify primitives via the helper functions in
[`Scenic.Primitives`](Scenic.Primitives.html#ellipse/3)
```elixir
graph
|> ellipse( {75, 100}, stroke: {1, :yellow} )
```
"""
use Scenic.Primitive
alias Scenic.Script
alias Scenic.Primitive
alias Scenic.Primitive.Style
@type t :: {radius_1 :: number, radius_2 :: number}
@type styles_t :: [:hidden | :scissor | :fill | :stroke_width | :stroke_fill]
@styles [:hidden, :scissor, :fill, :stroke_width, :stroke_fill]
@impl Primitive
@spec validate({radius_1 :: number, radius_2 :: number}) ::
{:ok, {radius_1 :: number, radius_2 :: number}} | {:error, String.t()}
def validate({r1, r2})
when is_number(r1) and is_number(r2) and
r1 >= 0 and r2 >= 0 do
{:ok, {r1, r2}}
end
def validate(data) do
{
:error,
"""
#{IO.ANSI.red()}Invalid Ellipse specification
Received: #{inspect(data)}
#{IO.ANSI.yellow()}
The data for an Arc is {radius_1, radius_2}
The radii must be >= 0#{IO.ANSI.default_color()}
"""
}
end
# --------------------------------------------------------
@doc """
Returns a list of styles recognized by this primitive.
"""
@impl Primitive
@spec valid_styles() :: styles_t()
def valid_styles(), do: @styles
# --------------------------------------------------------
@doc """
Compile the data for this primitive into a mini script. This can be combined with others to
generate a larger script and is called when a graph is compiled.
"""
@impl Primitive
@spec compile(primitive :: Primitive.t(), styles :: Style.t()) :: Script.t()
def compile(%Primitive{module: __MODULE__, data: {radius_1, radius_2}}, styles) do
Script.draw_ellipse([], radius_1, radius_2, Script.draw_flag(styles))
end
# --------------------------------------------------------
def contains_point?({r1, r2}, {xp, yp}) do
dx = xp * xp / (r1 * r1)
dy = yp * yp / (r2 * r2)
# test if less or equal to 1
dx + dy <= 1
end
end
|
lib/scenic/primitive/ellipse.ex
| 0.945889
| 0.923316
|
ellipse.ex
|
starcoder
|
defmodule FibonacciHeap do
@moduledoc """
参考 : https://www.cs.princeton.edu/~wayne/teaching/fibonacci-heap.pdf
前提として、delete-minしか想定しないので、マークの設定はしない。
(ポインタがないので、ランク計算は、非効率になりうる)
また、ルートだけ一つに固定し、ルート以下の子供に対してフィボなビッチヒープを構成するようにする。
data: {property, queue}
queue: {property, queue}
# Insert
O(1)
data: {property, queue}
- min-propertyを満たす場合
- propertyと交換しqueueへ突っ込む。
- 満たさない場合、
- そのままqueueへ突っ込む。
# Delete-min
O(log n)
- ルートを取り出し、新しいルートを決める。
- 必要に応じてマージする。
"""
defstruct data: nil, size: 0, comparator: nil
def new(comparator), do: %__MODULE__{comparator: comparator}
def empty?(%__MODULE__{data: nil, size: 0}), do: true
def empty?(%__MODULE__{}), do: false
def size(%__MODULE__{size: size}), do: size
def top(%__MODULE__{data: nil}), do: nil
def top(%__MODULE__{data: {v, _}}), do: v
@doc """
## Examples
iex> alias FibonacciHeap, as: Heap
...> Heap.new(&(&1 < &2))
...> |> Heap.pop()
%FibonacciHeap{
data: nil,
size: 0,
comparator: &(&1 < &2)
}
iex> alias FibonacciHeap, as: Heap
...> Heap.new(&(&1 < &2))
...> |> Heap.push(1)
...> |> Heap.pop()
%FibonacciHeap{
data: nil,
size: 0,
comparator: &(&1 < &2)
}
iex> alias FibonacciHeap, as: Heap
...> Heap.new(&(&1 < &2))
...> |> Heap.push(1)
...> |> Heap.push(2)
...> |> Heap.pop()
%FibonacciHeap{
data: {2, nil},
size: 1,
comparator: &(&1 < &2)
}
"""
def pop(%__MODULE__{data: nil, size: 0} = heap), do: heap
def pop(%__MODULE__{data: {_v, queue}, size: n, comparator: comp} = heap),
do: %{heap | data: dequeue(queue, comp), size: n - 1}
def pop!(%__MODULE__{} = heap), do: {top(heap), pop(heap)}
@doc """
## Examples
iex> alias FibonacciHeap, as: Heap
...> Heap.new(&(&1 < &2))
...> |> Heap.push(1)
%FibonacciHeap{
data: {1, nil},
size: 1,
comparator: &(&1 < &2)
}
iex> alias FibonacciHeap, as: Heap
...> Heap.new(&(&1 < &2))
...> |> Heap.push(1)
...> |> Heap.push(2)
%FibonacciHeap{
data: {1, [{2, nil}]},
size: 2,
comparator: &(&1 < &2)
}
"""
def push(%__MODULE__{data: h, size: n, comparator: comp} = heap, v),
do: %{heap | data: meld(h, {v, nil}, comp), size: n + 1}
defp meld(nil, v, _comp), do: v
defp meld(v, nil, _comp), do: v
defp meld({v0, q0} = left , {v1, q1} = right, comp) do
if comp.(v0, v1) do
{v0, enqueue(q0, right)}
else
{v1, enqueue(q1, left)}
end
end
defp enqueue(q, v)
defp enqueue(nil, v), do: [v]
defp enqueue(q, v), do: [v | q]
defp dequeue(nil, _), do: nil
defp dequeue([], _), do: nil
defp dequeue([q], _), do: q
defp dequeue([q0, q1 | q], comp),
do: meld(meld(q0, q1, comp), dequeue(q, comp), comp)
defimpl Collectable do
def into(heap) do
{
heap,
fn
heap, {:cont, v} -> FibonacciHeap.push(heap, v)
heap, :done -> heap
_heap, :halt -> :ok
end
}
end
end
defimpl Enumerable do
def count(heap), do: {:ok, FibonacciHeap.size(heap)}
def member?(_, _), do: {:error, __MODULE__}
def slice(_), do: {:error, __MODULE__}
def reduce(_heap, {:halt, acc}, _fun), do: {:halted, acc}
def reduce(heap, {:suspend, acc}, fun), do: {:suspended, acc, &reduce(heap, &1, fun)}
def reduce(%FibonacciHeap{data: nil, size: 0}, {:cont, acc}, _fun), do: {:done, acc}
def reduce(heap, {:cont, acc}, fun) do
reduce(FibonacciHeap.pop(heap), fun.(FibonacciHeap.top(heap), acc), fun)
end
end
end
|
lib/algs/heap/fibonacci_heap.ex
| 0.573678
| 0.579162
|
fibonacci_heap.ex
|
starcoder
|
defmodule FE.Maybe do
@moduledoc """
`FE.Maybe` is an explicit data type for representing values that might or might not exist.
"""
alias FE.{Result, Review}
@type t(a) :: {:just, a} | :nothing
defmodule Error do
defexception [:message]
end
@doc """
Creates an `FE.Maybe` representing the absence of a value.
"""
@spec nothing() :: t(any)
def nothing, do: :nothing
@doc """
Creates an `FE.Maybe` representing the value passed as an argument.
"""
@spec just(a) :: t(a) when a: var
def just(value), do: {:just, value}
@doc """
Creates an `FE.Maybe` from any Elixir term.
It creates a non-value from `nil` and a value from any other term.
Please note that false, 0, the empty list, etc., are valid values.
## Examples
iex> FE.Maybe.new(nil)
FE.Maybe.nothing()
iex> FE.Maybe.new(:x)
FE.Maybe.just(:x)
iex> FE.Maybe.new(false)
FE.Maybe.just(false)
"""
@spec new(a | nil) :: t(a) when a: var
def new(term)
def new(nil), do: nothing()
def new(value), do: just(value)
@doc """
Transforms an `FE.Maybe` value using the provided function.
Nothing is done if there is no value.
## Examples
iex> FE.Maybe.map(FE.Maybe.nothing(), &String.length/1)
FE.Maybe.nothing()
iex> FE.Maybe.map(FE.Maybe.just("foo"), &String.length/1)
FE.Maybe.just(3)
"""
@spec map(t(a), (a -> a)) :: t(a) when a: var
def map(maybe, f)
def map(:nothing, _), do: nothing()
def map({:just, value}, f), do: just(f.(value))
@doc """
Returns the value stored in a `FE.Maybe` or the provided default if there is no value.
## Examples
iex> FE.Maybe.unwrap_or(FE.Maybe.nothing(), 0)
0
iex> FE.Maybe.unwrap_or(FE.Maybe.just(5), 0)
5
"""
@spec unwrap_or(t(a), a) :: a when a: var
def unwrap_or(maybe, default)
def unwrap_or(:nothing, default), do: default
def unwrap_or({:just, value}, _), do: value
@doc """
Passes the value stored in `FE.Maybe` as input to the first function, or returns the provided default.
## Examples
iex> FE.Maybe.unwrap_with(FE.Maybe.nothing(), fn(x) -> x+1 end, 0)
0
iex> FE.Maybe.unwrap_with(FE.Maybe.just(4), fn(x) -> x+1 end, 0)
5
iex> FE.Maybe.unwrap_with(FE.Maybe.just("a"), fn(x) -> x <> "bc" end, "xyz")
"abc"
"""
@spec unwrap_with(t(a), (a -> b), b) :: b when a: var, b: var
def unwrap_with(maybe, on_just, default)
def unwrap_with(:nothing, _, default), do: default
def unwrap_with({:just, value}, on_just, _), do: on_just.(value)
@doc """
Returns the value stored in an `FE.Maybe`. Raises an `FE.Maybe.Error` if a non-value is passed.
## Examples
iex> FE.Maybe.unwrap!(FE.Maybe.just(:value))
:value
iex> try do FE.Maybe.unwrap!(FE.Maybe.nothing()) ; rescue e -> e end
%FE.Maybe.Error{message: "unwrapping Maybe that has no value"}
"""
@spec unwrap!(t(a)) :: a | no_return() when a: var
def unwrap!(maybe)
def unwrap!({:just, value}), do: value
def unwrap!(:nothing), do: raise(Error, "unwrapping Maybe that has no value")
@doc """
Passes the value of `FE.Maybe` to the provided function and returns its return value,
that should be of the type `FE.Maybe`.
Useful for chaining together a computation consisting of multiple steps, each of which
takes a value as an argument and returns a `FE.Maybe`.
## Examples
iex> FE.Maybe.and_then(FE.Maybe.nothing(), fn s -> FE.Maybe.just(String.length(s)) end)
FE.Maybe.nothing()
iex> FE.Maybe.and_then(FE.Maybe.just("foobar"), fn s -> FE.Maybe.just(String.length(s)) end)
FE.Maybe.just(6)
iex> FE.Maybe.and_then(FE.Maybe.just("foobar"), fn _ -> FE.Maybe.nothing() end)
FE.Maybe.nothing()
"""
@spec and_then(t(a), (a -> t(a))) :: t(a) when a: var
def and_then(maybe, f)
def and_then(:nothing, _), do: nothing()
def and_then({:just, value}, f), do: f.(value)
@doc """
Folds over the provided list of elements, where the accumulator and each element
in the list are passed to the provided function.
The provided function must returns a new accumulator of the `FE.Maybe` type.
The provided `FE.Maybe` is the initial accumulator.
Returns the last `FE.Maybe` returned by the function.
Stops and returns `nothing()` if at any step the function returns `nothing`.
## Examples
iex> FE.Maybe.fold(FE.Maybe.nothing(), [], &FE.Maybe.just(&1))
FE.Maybe.nothing()
iex> FE.Maybe.fold(FE.Maybe.just(5), [], &FE.Maybe.just(&1))
FE.Maybe.just(5)
iex> FE.Maybe.fold(FE.Maybe.nothing(), [1, 2], &FE.Maybe.just(&1 + &2))
FE.Maybe.nothing()
iex> FE.Maybe.fold(FE.Maybe.just(1), [1, 1], &FE.Maybe.just(&1 + &2))
FE.Maybe.just(3)
iex> FE.Maybe.fold(FE.Maybe.just(1), [1, 2, -2, 3], fn
...> elem, _acc when elem < 0 -> FE.Maybe.nothing()
...> elem, acc -> FE.Maybe.just(elem+acc)
...> end)
FE.Maybe.nothing()
"""
@spec fold(t(a), [b], (b, a -> t(a))) :: t(a) when a: var, b: var
def fold(maybe, elems, f) do
Enum.reduce_while(elems, maybe, fn elem, acc ->
case and_then(acc, fn value -> f.(elem, value) end) do
{:just, _} = just -> {:cont, just}
:nothing -> {:halt, :nothing}
end
end)
end
@doc """
Works like `fold/3`, except that the first element of the provided list is removed
from it, wrapped in a `FE.Maybe` and treated as the initial accumulator.
Then, fold is executed over the remainder of the provided list.
## Examples
iex> FE.Maybe.fold([1,2,3], fn elem, acc -> FE.Maybe.just(elem+acc) end)
FE.Maybe.just(6)
iex> FE.Maybe.fold([1], fn elem, acc -> FE.Maybe.just(elem+acc) end)
FE.Maybe.just(1)
iex> FE.Maybe.fold([1], fn _, _ -> FE.Maybe.nothing() end)
FE.Maybe.just(1)
iex> FE.Maybe.fold([1, 2, 3], &(FE.Maybe.just(&1 + &2)))
FE.Maybe.just(6)
iex> FE.Maybe.fold([1, -22, 3], fn
...> elem, _acc when elem < 0 -> FE.Maybe.nothing()
...> elem, acc -> FE.Maybe.just(elem+acc)
...> end)
FE.Maybe.nothing()
"""
@spec fold([b], (b, a -> t(a))) :: t(a) when a: var, b: var
def fold(elems, f)
def fold([], _), do: raise(Enum.EmptyError)
def fold([head | tail], f), do: fold(just(head), tail, f)
@doc """
Extracts only the values from a list of `Maybe.t()`s
## Examples
iex> FE.Maybe.justs([FE.Maybe.just(:good), FE.Maybe.nothing(), FE.Maybe.just(:better)])
[:good, :better]
"""
@spec justs([t(a)]) :: [a] when a: var
def justs(els) do
Enum.reduce(els, [], fn
{:just, value}, acc -> [value | acc]
:nothing, acc -> acc
end)
|> Enum.reverse()
end
@doc """
Transforms an `FE.Maybe` to an `FE.Result`.
An `FE.Maybe` with a value becomes a successful value of a `FE.Result`.
A `FE.Maybe` without a value wrapped becomes an erroneous `FE.Result`, where
the second argument is used as the error's value.
## Examples
iex> FE.Maybe.to_result(FE.Maybe.just(3), "No number found")
FE.Result.ok(3)
iex> FE.Maybe.to_result(FE.Maybe.nothing(), "No number found")
FE.Result.error("No number found")
"""
@spec to_result(t(a), b) :: Result.t(a, b) when a: var, b: var
def to_result(maybe, error)
def to_result({:just, value}, _), do: Result.ok(value)
def to_result(:nothing, error), do: Result.error(error)
@doc """
Transforms an `FE.Maybe` to an `FE.Review`.
An `FE.Maybe` with a value becomes an accepted `FE.Review` with the same value.
An `FE.Maybe` without a value wrapped becomes a rejected `FE.Review`, where
the issues are takens from the second argument to the function.
## Examples
iex> FE.Maybe.to_review(FE.Maybe.just(3), ["No number found"])
FE.Review.accepted(3)
iex> FE.Maybe.to_review(FE.Maybe.nothing(), ["No number found"])
FE.Review.rejected(["No number found"])
"""
@spec to_review(t(a), [b]) :: Review.t(a, b) when a: var, b: var
def to_review(maybe, issues)
def to_review({:just, value}, _), do: Review.accepted(value)
def to_review(:nothing, issues), do: Review.rejected(issues)
end
|
lib/fe/maybe.ex
| 0.866627
| 0.658774
|
maybe.ex
|
starcoder
|
defmodule Cizen.Filter.Code do
alias Cizen.Filter
@moduledoc false
@additional_operators [:is_nil, :to_string, :to_charlist]
@type t :: term
def with_prefix({:access, keys}, prefix) do
{:access, prefix ++ keys}
end
def with_prefix({op, args}, prefix) when is_atom(op) and is_list(args) do
args = Enum.map(args, &with_prefix(&1, prefix))
{op, args}
end
def with_prefix(node, _prefix), do: node
def all([]), do: true
def all([filter]), do: filter
def all([filter | tail]), do: {:and, [filter, all(tail)]}
def any([]), do: false
def any([filter]), do: filter
def any([filter | tail]), do: {:or, [filter, any(tail)]}
defp read_header(arg, env), do: read_header(arg, %{}, [], [], env)
defp read_header({:when, _, [header, guard]}, keys, operations, prefix, env) do
{keys, operations} = read_header(header, keys, operations, prefix, env)
code = translate(guard, keys, env)
operations = [code | operations]
{keys, operations}
end
defp read_header({:%, _, [module, {:%{}, _, pairs}]}, keys, operations, prefix, env) do
module = Macro.expand(module, env)
access = List.insert_at(prefix, -1, :__struct__)
operations = [
{:and, [{:is_map, [{:access, prefix}]}, {:==, [{:access, access}, module]}]}
| operations
]
pairs
|> Enum.reduce({keys, operations}, fn {key, value}, {keys, operations} ->
read_header(value, keys, operations, List.insert_at(prefix, -1, key), env)
end)
end
defp read_header({:=, _, [struct, {var, meta, context}]}, keys, operations, prefix, env) do
{keys, operations} = read_header(struct, keys, operations, prefix, env)
read_header({var, meta, context}, keys, operations, prefix, env)
end
defp read_header({:^, _, [var]}, keys, operations, prefix, _env) do
operations = [{:==, [{:access, prefix}, var]} | operations]
{keys, operations}
end
defp read_header({var, _, _}, keys, operations, prefix, _env) do
case Map.get(keys, var) do
nil ->
keys = Map.put(keys, var, prefix)
{keys, operations}
access ->
operations = [{:==, [{:access, prefix}, {:access, access}]} | operations]
{keys, operations}
end
end
defp read_header(value, keys, operations, prefix, _env) do
operations = [{:==, [{:access, prefix}, value]} | operations]
{keys, operations}
end
def generate({:fn, _, cases}, env) do
do_generate(cases, [], env)
end
defp do_generate([fncase], guards, env) do
{_, code} = with_guard(fncase, guards, env)
code
end
defp do_generate([fncase | tail], guards, env) do
{guard, code} = with_guard(fncase, guards, env)
guards = List.insert_at(guards, -1, guard)
tail_code = do_generate(tail, guards, env)
# literal tuple
{:or, [code, tail_code]}
end
defp with_guard(fncase, guards, env) do
{guard, code} = gen(fncase, env)
code =
guards
|> Enum.map(fn guard -> {:==, [guard, false]} end)
|> List.insert_at(-1, guard)
|> all()
|> gen_and(code)
{guard, code}
end
defp gen({:->, _, [[arg], {:__block__, _, [expression]}]}, env) do
gen({:->, [], [[arg], expression]}, env)
end
defp gen({:->, _, [[arg], expression]}, env) do
{keys, operations} = read_header(arg, env)
code = translate(expression, keys, env)
guard =
operations
|> Enum.reverse()
|> all()
{guard, code}
end
defp translate(expression, keys, env) do
expression
|> Macro.prewalk(&expand_embedded(&1, env))
|> Macro.postwalk(&walk(&1, keys, env))
end
defp expand_embedded(node, env) do
case node do
{{:., _, [{:__aliases__, _, [:Filter]}, :new]}, _, _} ->
{filter, _} =
node
|> Code.eval_quoted([], env)
filter
node ->
node
end
end
# Skip . operator
defp walk({:., _, _} = node, _keys, _env), do: node
# Additional operators
defp walk({op, _, args} = node, _keys, _env) when op in @additional_operators do
if Enum.any?(args, &has_access?(&1)) do
# literal tuple
{op, args}
else
node
end
end
# Field access
defp walk({{:., _, [{:access, keys}, key]}, _, []}, _keys, _env) do
# literal tuple
{:access, List.insert_at(keys, -1, key)}
end
defp walk({{:., _, [Access, :get]}, _, [{:access, keys}, key]}, _keys, _env) do
# literal tuple
{:access, List.insert_at(keys, -1, key)}
end
# Function call
defp walk({{:., _, [module, function]}, _, args} = node, _keys, env) do
expanded_module = Macro.expand(module, env)
cond do
expanded_module == Filter and function == :match? ->
# Embedded filter
case args do
[%Filter{code: code}, {:access, keys}] ->
quote do
unquote(__MODULE__).with_prefix(unquote(code), unquote(keys))
end
[filter, {:access, keys}] ->
quote do
unquote(__MODULE__).with_prefix(unquote(filter).code, unquote(keys))
end
end
Enum.any?(args, &has_access?(&1)) ->
# Function call
# literal tuple
{:call, [{module, function} | args]}
true ->
node
end
end
# Access to value
defp walk({first, _, third} = node, keys, _env) when is_atom(first) and not is_list(third) do
if Map.has_key?(keys, first) do
keys = Map.get(keys, first)
# literal tuple
{:access, keys}
else
node
end
end
defp walk({first, _, third} = node, keys, env) when is_atom(first) do
cond do
Macro.operator?(first, length(third)) ->
# Operator
if Enum.any?(third, &has_access?(&1)) do
op = first
args = third
# literal tuple
{op, args}
else
node
end
third != [] ->
# Function calls
gen_call(node, keys, env)
true ->
node
end
end
defp walk(node, _keys, _env), do: node
defp has_access?(value) do
{_node, has_access?} =
Macro.prewalk(value, false, fn node, has_access? ->
case node do
{:access, _} ->
{node, true}
node ->
{node, has_access?}
end
end)
has_access?
end
defp gen_call({first, _, third} = node, _keys, env) do
if Enum.any?(third, &has_access?(&1)) do
arity = length(third)
{module, _} =
env.functions
|> Enum.find({env.module, []}, fn {_module, functions} ->
Enum.find(functions, fn
{^first, ^arity} ->
true
_ ->
false
end)
end)
fun = {module, first}
# literal tuple
{:call, [fun | third]}
else
node
end
end
defp gen_and(true, arg2), do: arg2
defp gen_and(arg1, true), do: arg1
defp gen_and(arg1, arg2), do: {:and, [arg1, arg2]}
end
|
lib/cizen/filter/code.ex
| 0.606498
| 0.443239
|
code.ex
|
starcoder
|
defmodule Membrane.Core.Element.DemandHandler do
@moduledoc false
# Module handling demands requested on source pads.
alias Membrane.Core
alias Membrane.Element.Pad
alias Core.PullBuffer
alias Core.Element.{
BufferController,
CapsController,
DemandController,
EventController,
PadModel,
State
}
require PadModel
use Core.Element.Log
use Bunch
@doc """
Gets given amount of data from given sink pad's PullBuffer, passes it to proper
controller, and checks if source demand has been suppplied. If not, than demand
is assumed to be underestimated, and a zero-sized demand is sent to handle it
again.
"""
@spec handle_demand(
Pad.name_t(),
{:source, Pad.name_t()} | :self,
:set | :increase,
pos_integer,
State.t()
) :: State.stateful_try_t()
def handle_demand(pad_name, source, :set, size, state) do
state = set_sink_demand(pad_name, source, size, state)
supply_demand(pad_name, source, size, state)
end
def handle_demand(pad_name, :self, :increase, size, state) do
{total_size, state} =
PadModel.get_and_update_data!(
pad_name,
:demand,
fn demand -> (demand + size) ~> {&1, &1} end,
state
)
supply_demand(pad_name, :self, total_size, state)
end
@doc """
Handles demands requested on given sink pad, if there are any.
"""
@spec check_and_handle_demands(Pad.name_t(), State.t()) :: State.stateful_try_t()
def check_and_handle_demands(pad_name, state) do
demand = PadModel.get_data!(pad_name, :demand, state)
if demand > 0 do
supply_demand(pad_name, :self, demand, state)
else
{:ok, state}
end
|> case do
{:ok, %State{type: :filter} = state} ->
PadModel.filter_names_by_data(%{direction: :source}, state)
|> Bunch.Enum.try_reduce(state, fn name, st ->
DemandController.handle_demand(name, 0, st)
end)
{:ok, %State{type: :sink} = state} ->
{:ok, state}
{{:error, reason}, state} ->
{{:error, reason}, state}
end
end
@spec supply_demand(Pad.name_t(), {:source, Pad.name_t()} | :self, pos_integer, State.t()) ::
State.stateful_try_t()
defp supply_demand(pad_name, source, size, state) do
pb_output =
PadModel.get_and_update_data(
pad_name,
:buffer,
&(&1 |> PullBuffer.take(size)),
state
)
with {{:ok, {pb_status, data}}, state} <- pb_output,
{:ok, state} <- handle_pullbuffer_output(pad_name, source, data, state) do
:ok = send_dumb_demand_if_needed(source, pb_status, state)
{:ok, state}
else
{{:error, reason}, state} ->
warn_error(
"""
Error while supplying demand on pad #{inspect(pad_name)} requested by
#{inspect(source)} of size #{inspect(size)}
""",
{:supply_demand, reason},
state
)
end
end
@spec send_dumb_demand_if_needed({:source, Pad.name_t()} | :self, :empty | :value, State.t()) ::
:ok
defp send_dumb_demand_if_needed(:self, _pb_status, _state),
do: :ok
defp send_dumb_demand_if_needed(
{:source, src_name},
pb_status,
state
) do
if pb_status != :empty && PadModel.get_data!(src_name, :demand, state) > 0 do
debug(
"""
handle_process did not produce expected amount of buffers, despite
PullBuffer being not empty. Trying executing handle_demand again.
""",
state
)
send(self(), {:membrane_demand, [0, src_name]})
end
:ok
end
@spec handle_pullbuffer_output(
Pad.name_t(),
{:source, Pad.name_t()} | :self,
[{:event | :caps, any} | {:buffers, list, pos_integer}],
State.t()
) :: State.stateful_try_t()
defp handle_pullbuffer_output(pad_name, source, data, state) do
data
|> Bunch.Enum.try_reduce(state, fn v, state ->
do_handle_pullbuffer_output(pad_name, source, v, state)
end)
end
@spec do_handle_pullbuffer_output(
Pad.name_t(),
{:source, Pad.name_t()} | :self,
{:event | :caps, any} | {:buffers, list, pos_integer},
State.t()
) :: State.stateful_try_t()
defp do_handle_pullbuffer_output(pad_name, _source, {:event, e}, state),
do: EventController.exec_handle_event(pad_name, e, state)
defp do_handle_pullbuffer_output(pad_name, _source, {:caps, c}, state),
do: CapsController.exec_handle_caps(pad_name, c, state)
defp do_handle_pullbuffer_output(
pad_name,
source,
{:buffers, buffers, size},
state
) do
state = update_sink_demand(pad_name, source, &(&1 - size), state)
BufferController.exec_buffer_handler(pad_name, source, buffers, state)
end
@spec set_sink_demand(Pad.name_t(), {:source, Pad.name_t()} | :self, non_neg_integer, State.t()) ::
State.t()
defp set_sink_demand(pad_name, :self, size, state),
do: PadModel.set_data!(pad_name, :demand, size, state)
defp set_sink_demand(_pad_name, _src, _f, state), do: state
@spec set_sink_demand(
Pad.name_t(),
{:source, Pad.name_t()} | :self,
(non_neg_integer -> non_neg_integer),
State.t()
) :: State.t()
defp update_sink_demand(pad_name, :self, f, state),
do: PadModel.update_data!(pad_name, :demand, f, state)
defp update_sink_demand(_pad_name, _src, _f, state), do: state
end
|
lib/membrane/core/element/demand_handler.ex
| 0.710628
| 0.402333
|
demand_handler.ex
|
starcoder
|
defmodule Ockam.Examples.Messaging.Ordering do
@moduledoc """
Examples of using ordering pipes
Creates a shuffle worker to re-order messages
Sends messages through shuffle and through shuffle wrapped in an ordered pipe
"""
alias Ockam.Examples.Messaging.Shuffle
def check_strict(pipe_mod) do
Ockam.Node.register_address("app")
{:ok, shuffle} = Shuffle.create([])
{:ok, receiver} = pipe_mod.receiver().create([])
{:ok, sender} = pipe_mod.sender().create(receiver_route: [shuffle, receiver])
Enum.each(1..100, fn n ->
Ockam.Router.route(%{
onward_route: [sender, "app"],
return_route: ["ordered"],
payload: "#{n}"
})
Ockam.Router.route(%{
onward_route: [shuffle, "app"],
return_route: ["unordered"],
payload: "#{n}"
})
end)
## receive 100 messages
unordered =
Enum.map(1..100, fn _n ->
receive do
%{payload: pl, return_route: ["unordered"]} -> String.to_integer(pl)
end
end)
ordered =
Enum.map(1..100, fn _n ->
receive do
%{payload: pl} -> String.to_integer(pl)
end
end)
{unordered, ordered}
# payloads == Enum.sort(payloads)
end
def check_monotonic(pipe_mod) do
Ockam.Node.register_address("app")
{:ok, shuffle} = Shuffle.create([])
{:ok, receiver} = pipe_mod.receiver().create([])
{:ok, sender} = pipe_mod.sender().create(receiver_route: [shuffle, receiver])
Enum.each(1..100, fn n ->
Ockam.Router.route(%{
onward_route: [sender, "app"],
return_route: ["ordered"],
payload: "#{n}"
})
Ockam.Router.route(%{
onward_route: [shuffle, "app"],
return_route: ["unordered"],
payload: "#{n}"
})
end)
## receive 100 messages
unordered =
1..100
|> Enum.map(fn _n ->
receive do
%{payload: pl, return_route: ["unordered"]} -> String.to_integer(pl)
after
100 ->
nil
end
end)
|> Enum.reject(&is_nil/1)
ordered =
1..100
|> Enum.map(fn _n ->
receive do
%{payload: pl, return_route: rr} when rr != ["unordered"] -> String.to_integer(pl)
after
100 ->
nil
end
end)
|> Enum.reject(&is_nil/1)
{unordered, ordered}
# payloads == Enum.sort(payloads)
end
end
|
implementations/elixir/ockam/ockam/lib/ockam/examples/messaging/ordering.ex
| 0.826467
| 0.461502
|
ordering.ex
|
starcoder
|
defmodule Exred.Node.Rpiphoto do
@moduledoc """
Takes a photo using the Raspberry PI's camera module
**Incoming message format**
```elixir
msg = %{
payload :: any,
filename :: String.t,
width :: String.t,
height :: String.t,
horizontal_flip :: String.t,
vertical_flip :: String.t,
metering :: String.t
}
```
All of the above are optional. Payload is ignored. The other keys override the corresponding node config values.
**Outgoing message format**
```elixir
msg = %{
payload :: number
}
```
Payload is the exit status of the shell command.
"""
@name "RPI Photo"
@category "raspberry pi"
@info @moduledoc
@config %{
name: %{
info: "Node name",
value: @name,
type: "string",
attrs: %{max: 25}
},
filename: %{
info: "Output file name",
value: "/tmp/image-%04d",
type: "string",
attrs: %{max: 50}
},
width: %{
info: "image width",
type: "number",
value: 800,
attrs: %{min: 0, max: 3280}
},
height: %{
info: "image height",
type: "number",
value: 600,
attrs: %{min: 0, max: 2464}
},
horizontal_flip: %{
info: "Flip image horizontally",
type: "string",
value: "false",
attrs: %{max: 5}
},
vertical_flip: %{
info: "Flip image vertically",
type: "string",
value: "false",
attrs: %{max: 5}
},
metering: %{
info: "Set metering mode",
type: "selector",
value: "average",
attrs: %{options: ["average", "spot", "backlit", "matrix"]}
}
}
@ui_attributes %{
left_icon: "photo_camera",
config_order: [:name, :metering, :width, :height, :horizontal_flip, :vertical_flip, :filename]
}
use Exred.NodePrototype
alias Porcelain.Result
require Logger
@impl true
def handle_msg(%{} = msg, state) do
filename = Map.get(msg, :filename, state.config.filename.value)
width = Map.get(msg, :width, state.config.width.value)
height = Map.get(msg, :height, state.config.height.value)
metering = Map.get(msg, :metering, state.config.metering.value)
horizontal_flip =
case Map.get(msg, :horizontal_flip, state.config.horizontal_flip.value) do
"true" -> "-hf"
_ -> ""
end
vertical_flip =
case Map.get(msg, :vertical_flip, state.config.vertical_flip.value) do
"true" -> "-vf"
_ -> ""
end
cmd =
[
"/usr/bin/raspistill",
"-dt",
"-v",
"-o",
filename,
"-w",
width,
"-h",
height,
"-mm",
metering,
"-ex",
"sports",
"-awb",
"cloud",
"--nopreview",
horizontal_flip,
vertical_flip,
"--timeout",
"100"
]
|> Enum.join(" ")
res = %Result{out: output, status: status} = Porcelain.shell(cmd)
Logger.info("#{__MODULE__} raspistill return status: #{inspect(status)}")
out = Map.put(msg, :payload, status)
{out, state}
end
def handle_msg(msg, state) do
Logger.warn(
"UNHANDLED MSG node: #{state.node_id} #{get_in(state.config, [:name, :value])} msg: #{
inspect(msg)
}"
)
{nil, state}
end
end
|
lib/exred_node_rpiphoto.ex
| 0.797951
| 0.716157
|
exred_node_rpiphoto.ex
|
starcoder
|
defmodule ESpec.ExampleRunner do
@moduledoc """
Contains all the functions need to run a 'spec' example.
"""
defmodule(AfterExampleError, do: defexception(example_error: nil, message: nil))
@dict_keys [:ok, :shared]
alias ESpec.Example
alias ESpec.AssertionError
alias ESpec.Output
@doc """
Runs one specific example and returns an `%ESpec.Example{}` struct.
The sequence in the following:
- evaluates 'befores' and 'lets'. 'befores' fill the map for `shared`, 'lets' can access `shared` ;
- runs 'example block';
- evaluate 'finally's'
The struct has fields `[status: :success, result: result]` or `[status: failed, error: error]`
The `result` is the value returned by example block.
`error` is a `%ESpec.AssertionError{}` struct.
"""
def run(example) do
contexts = Example.extract_contexts(example)
cond do
example.opts[:skip] || Enum.any?(contexts, & &1.opts[:skip]) ->
run_skipped(example)
example.opts[:pending] ->
run_pending(example)
true ->
spawn_example(example, :os.timestamp())
end
end
defp spawn_example(example, start_time) do
Task.Supervisor.async_nolink(ESpec.TaskSupervisor, fn -> run_example(example, start_time) end)
|> Task.yield(:infinity)
|> check_example_task(example, start_time)
end
defp check_example_task({:ok, example_result}, _, _), do: example_result
defp check_example_task({:exit, reason}, example, start_time) do
error = %AssertionError{message: "Process exited with reason: #{inspect(reason)}"}
do_rescue(example, %{}, start_time, error, false)
end
defp run_example(example, start_time) do
{assigns, example} = before_example_actions(example)
try do
try_run(example, assigns, start_time)
rescue
error in [AssertionError] ->
do_rescue(example, assigns, start_time, error)
error in [AfterExampleError] ->
do_rescue(example, assigns, start_time, error.example_error, false)
other_error ->
error = %AssertionError{message: format_other_error(other_error, __STACKTRACE__)}
do_rescue(example, assigns, start_time, error)
catch
what, value -> do_catch(example, assigns, start_time, what, value)
after
unload_mocks()
end
end
defp initial_shared(example), do: Example.extract_options(example)
defp before_example_actions(example) do
{initial_shared(example), example}
|> run_config_before
|> run_befores_and_lets
end
defp try_run(example, assigns, start_time) do
if example.status == :failure, do: raise(example.error)
result =
case apply(example.module, example.function, [assigns]) do
{ESpec.ExpectTo, res} -> res
res -> res
end
{_assigns, example} = after_example_actions(assigns, example)
if example.status == :failure, do: raise(%AfterExampleError{example_error: example.error})
duration = duration_in_ms(start_time, :os.timestamp())
example = %Example{example | status: :success, result: result, duration: duration}
Output.example_finished(example)
example
end
defp do_catch(example, assigns, start_time, what, value) do
duration = duration_in_ms(start_time, :os.timestamp())
error = %AssertionError{message: format_catch(what, value)}
example = %Example{example | status: :failure, error: error, duration: duration}
Output.example_finished(example)
after_example_actions(assigns, example)
example
end
defp do_rescue(example, assigns, start_time, error, perform_after_example \\ true) do
duration = duration_in_ms(start_time, :os.timestamp())
example = %Example{example | status: :failure, error: error, duration: duration}
Output.example_finished(example)
if perform_after_example, do: after_example_actions(assigns, example)
example
end
def after_example_actions(assigns, example) do
{assigns, example}
|> run_finallies
|> run_config_finally
end
defp run_skipped(example) do
example = %Example{example | status: :pending, result: Example.skip_message(example)}
Output.example_finished(example)
example
end
defp run_pending(example) do
example = %Example{example | status: :pending, result: Example.pending_message(example)}
Output.example_finished(example)
example
end
defp run_config_before({assigns, example}) do
func = ESpec.Configuration.get(:before)
if func do
fun =
if is_function(func, 1) do
fn -> {fill_dict(assigns, func.(assigns)), example} end
else
fn -> {fill_dict(assigns, func.()), example} end
end
call_with_rescue(fun, {assigns, example})
else
{assigns, example}
end
end
defp run_befores_and_lets({assigns, example}) do
ESpec.Let.Impl.clear_lets(example.module)
Example.extract_lets(example)
|> Enum.each(&ESpec.Let.Impl.run_before/1)
{assigns, example} =
Example.extract_befores(example)
|> Enum.reduce({assigns, example}, fn before, {assigns, example} ->
ESpec.Let.Impl.update_shared(assigns)
fun = fn -> {do_run_before(before, assigns), example} end
call_with_rescue(fun, {assigns, example})
end)
ESpec.Let.Impl.update_shared(assigns)
{assigns, example}
end
defp run_finallies({assigns, example}) do
Example.extract_finallies(example)
|> Enum.reverse()
|> Enum.reduce({assigns, example}, fn finally, {map, example} ->
fun = fn ->
assigns = apply(finally.module, finally.function, [map])
{fill_dict(map, assigns), example}
end
call_with_rescue(fun, {assigns, example})
end)
end
defp run_config_finally({assigns, example}) do
func = ESpec.Configuration.get(:finally)
if func do
run_config_finally({assigns, example}, func)
else
{assigns, example}
end
end
defp run_config_finally({assigns, example}, func) do
fun = fn ->
if is_function(func, 1), do: func.(assigns), else: func.()
{assigns, example}
end
call_with_rescue(fun, {assigns, example})
end
defp call_with_rescue(fun, {assigns, example}) do
try do
fun.()
rescue
any_error -> do_before(any_error, {assigns, example}, __STACKTRACE__)
catch
what, value -> do_catch(what, value, {assigns, example})
end
end
defp do_catch(what, value, {map, example}) do
example =
if example.error do
example
else
error = %AssertionError{message: format_catch(what, value)}
%Example{example | status: :failure, error: error}
end
{map, example}
end
defp do_before(error, {map, example}, stacktrace) do
example =
if example.error do
example
else
error = %AssertionError{message: format_other_error(error, stacktrace)}
%Example{example | status: :failure, error: error}
end
{map, example}
end
defp do_run_before(%ESpec.Before{} = before, map) do
returned = apply(before.module, before.function, [map])
fill_dict(map, returned)
end
defp fill_dict(map, res) do
case res do
{key, list} when key in @dict_keys and (is_list(list) or is_map(list)) ->
if (Keyword.keyword?(list) || is_map(list)) && Enumerable.impl_for(list) do
Enum.reduce(list, map, fn {k, v}, a -> Map.put(a, k, v) end)
else
map
end
_ ->
map
end
end
defp unload_mocks, do: ESpec.Mock.unload()
defp duration_in_ms(start_time, end_time) do
div(:timer.now_diff(end_time, start_time), 1000)
end
defp format_other_error(error, stacktrace) do
Exception.format_banner(:error, error) <> "\n" <> Exception.format_stacktrace(stacktrace)
end
defp format_catch(what, value), do: "#{what} #{inspect(value)}"
end
|
lib/espec/example_runner.ex
| 0.740456
| 0.621282
|
example_runner.ex
|
starcoder
|
defmodule TimeZoneInfo.TimeZoneDatabase do
@moduledoc """
Implementation of the `Calendar.TimeZoneDatabase` behaviour.
"""
@behaviour Calendar.TimeZoneDatabase
alias TimeZoneInfo.{
DataStore,
GregorianSeconds,
IanaDateTime,
IsoDays,
Transformer.RuleSet
}
@compile {:inline, gap: 2, convert: 1, to_wall: 1, to_wall: 2}
@impl true
def time_zone_periods_from_wall_datetime(_, "Etc/UTC"),
do: {:ok, %{std_offset: 0, utc_offset: 0, zone_abbr: "UTC", wall_period: {:min, :max}}}
def time_zone_periods_from_wall_datetime(
%NaiveDateTime{calendar: Calendar.ISO} = naive_datetime,
time_zone
) do
naive_datetime
|> GregorianSeconds.from_naive()
|> periods_from_wall_gregorian_seconds(time_zone, naive_datetime)
end
def time_zone_periods_from_wall_datetime(%NaiveDateTime{} = naive_datetime, time_zone) do
naive_datetime
|> NaiveDateTime.convert!(Calendar.ISO)
|> time_zone_periods_from_wall_datetime(time_zone)
end
@impl true
def time_zone_period_from_utc_iso_days(_, "Etc/UTC"),
do: {:ok, %{std_offset: 0, utc_offset: 0, zone_abbr: "UTC", wall_period: {:min, :max}}}
def time_zone_period_from_utc_iso_days(iso_days, time_zone) do
iso_days
|> IsoDays.to_gregorian_seconds()
|> period_from_utc_gregorian_seconds(time_zone, iso_days)
end
defp periods_from_wall_gregorian_seconds(at_wall_seconds, time_zone, at_wall_date_time) do
case DataStore.get_transitions(time_zone) do
{:ok, transitions} ->
transitions
|> find_transitions(at_wall_seconds)
|> to_periods(at_wall_seconds, at_wall_date_time)
{:error, :transitions_not_found} ->
{:error, :time_zone_not_found}
end
end
defp period_from_utc_gregorian_seconds(gregorian_seconds, time_zone, date_time) do
case DataStore.get_transitions(time_zone) do
{:ok, transitions} ->
transitions
|> find_transition(gregorian_seconds)
|> to_period(date_time)
{:error, :transitions_not_found} ->
{:error, :time_zone_not_found}
end
end
defp find_transition(transitions, timestamp) do
Enum.find_value(transitions, fn {at, period} ->
with true <- at <= timestamp, do: period
end)
end
defp find_transitions([{at_utc, _} = transition | transitions], at_wall, last \\ :none) do
case at_utc > at_wall do
false -> {head(transitions, :none), transition, last}
true -> find_transitions(transitions, at_wall, transition)
end
end
defp head([], default), do: default
defp head(list, _), do: hd(list)
defp to_period(
{utc_offset, rule_name, {_, _} = format},
{_, {_, _}} = iso_days
) do
case DataStore.get_rules(rule_name) do
{:ok, rules} ->
rules
|> transitions(utc_offset, format, IsoDays.to_year(iso_days))
|> find_transition(IsoDays.to_gregorian_seconds(iso_days))
|> to_period(nil)
{:error, :rules_not_found} ->
{:error, :time_zone_not_found}
end
end
defp to_period({utc_offset, std_offset, zone_abbr, wall_period}, _) do
{:ok,
%{
utc_offset: utc_offset,
std_offset: std_offset,
zone_abbr: zone_abbr,
wall_period: wall_period
}}
end
defp to_periods({:none, {_at, {utc_offset, std_offset, zone_abbr, _wall_period}}, :none}, _, _) do
{:ok,
%{
utc_offset: utc_offset,
std_offset: std_offset,
zone_abbr: zone_abbr,
wall_period: {:min, :max}
}}
end
defp to_periods({:none, a, b}, at_wall, at_wall_datetime) do
to_periods({a, b}, at_wall, at_wall_datetime)
end
defp to_periods({a, b, :none}, at_wall, at_wall_datetime) do
to_periods({a, b}, at_wall, at_wall_datetime)
end
defp to_periods(
{_transition, {_, {utc_offset, rule_name, format}}},
at_wall,
at_wall_datetime
)
when is_binary(rule_name) do
calculate_periods(utc_offset, rule_name, format, at_wall, at_wall_datetime)
end
defp to_periods({transition_a, transition_b}, at_wall, _at_wall_datetime) do
at_wall_b = to_wall(transition_b)
at_wall_ba = to_wall(transition_b, transition_a)
cond do
at_wall_b <= at_wall && at_wall < at_wall_ba ->
{:ambiguous, convert(transition_a), convert(transition_b)}
at_wall_ba <= at_wall && at_wall < at_wall_b ->
gap(transition_a, transition_b)
at_wall < at_wall_b ->
{:ok, convert(transition_a)}
true ->
{:ok, convert(transition_b)}
end
end
defp to_periods(
{_transition_a, _transition_b, {_, {utc_offset, rule_name, format}}},
at_wall,
at_wall_datetime
)
when is_binary(rule_name) and is_integer(utc_offset) do
calculate_periods(utc_offset, rule_name, format, at_wall, at_wall_datetime)
end
defp to_periods({transition_a, transition_b, transition_c}, at_wall, _at_wall_datetime) do
at_wall_ba = to_wall(transition_b, transition_a)
at_wall_b = to_wall(transition_b)
at_wall_cb = to_wall(transition_c, transition_b)
at_wall_c = to_wall(transition_c)
cond do
at_wall >= at_wall_c ->
if at_wall < at_wall_cb,
do: {:ambiguous, convert(transition_b), convert(transition_c)},
else: {:ok, convert(transition_c)}
at_wall >= at_wall_cb ->
gap(transition_b, transition_c)
at_wall >= at_wall_b ->
if at_wall < at_wall_ba,
do: {:ambiguous, convert(transition_a), convert(transition_b)},
else: {:ok, convert(transition_b)}
at_wall >= at_wall_ba ->
gap(transition_a, transition_b)
true ->
{:ok, convert(transition_a)}
end
end
defp calculate_periods(utc_offset, rule_name, format, at_wall_seconds, at_wall_datetime) do
case DataStore.get_rules(rule_name) do
{:ok, rules} ->
rules
|> transitions(utc_offset, format, at_wall_datetime.year)
|> find_transitions(at_wall_seconds)
|> to_periods(at_wall_seconds, at_wall_datetime)
{:error, :rules_not_found} ->
{:error, :time_zone_not_found}
end
end
defp transitions(rules, utc_offset, format, year) do
rules
|> to_rule_set(year)
|> RuleSet.transitions(utc_offset, format)
end
@spec to_rule_set([TimeZoneInfo.rule()], Calendar.year()) :: [TimeZoneInfo.transition()]
defp to_rule_set(rules, year) do
Enum.flat_map(rules, fn {{month, day, time}, time_standard, std_offset, letters} ->
Enum.into((year - 1)..(year + 1), [], fn year ->
at = IanaDateTime.to_gregorian_seconds(year, month, day, time)
{at, {time_standard, std_offset, letters}}
end)
end)
|> Enum.sort_by(fn {at, _} -> at end)
end
defp gap(
{_, {utc_offset_a, std_offset_a, zone_abbr_a, {_, limit_a} = wall_period_a}},
{_, {utc_offset_b, std_offset_b, zone_abbr_b, {limit_b, _} = wall_period_b}}
) do
{
:gap,
{
%{
utc_offset: utc_offset_a,
std_offset: std_offset_a,
zone_abbr: zone_abbr_a,
wall_period: wall_period_a
},
limit_a
},
{
%{
utc_offset: utc_offset_b,
std_offset: std_offset_b,
zone_abbr: zone_abbr_b,
wall_period: wall_period_b
},
limit_b
}
}
end
defp to_wall({at, {utc_offset, std_offset, _zone_abbr, _wall_period}}),
do: at + utc_offset + std_offset
defp to_wall(
{at, {_, _, _, _}},
{_, {utc_offset, std_offset, _zone_abbr, _wall_period}}
),
do: at + utc_offset + std_offset
defp convert({_, {utc_offset, std_offset, zone_abbr, wall_period}}),
do: %{
utc_offset: utc_offset,
std_offset: std_offset,
zone_abbr: zone_abbr,
wall_period: wall_period
}
end
|
lib/time_zone_info/time_zone_database.ex
| 0.893733
| 0.530115
|
time_zone_database.ex
|
starcoder
|
defmodule Mix.Tasks.Format do
use Mix.Task
@shortdoc "Formats the given files/patterns"
@moduledoc """
Formats the given files and patterns.
mix format mix.exs "lib/**/*.{ex,exs}" "test/**/*.{ex,exs}"
If any of the files is `-`, then the output is read from stdin
and written to stdout.
## Formatting options
Formatting is done with the `Code.format_string!/2` function.
For complete list of formatting options please refer to its
description.
A `.formatter.exs` file can also be defined for customizing input
files and the formatter itself.
## Task-specific options
* `--check-formatted` - check that the file is already formatted.
This is useful in pre-commit hooks and CI scripts if you want to
reject contributions with unformatted code. However, keep in mind,
that the formatting output may differ between Elixir versions as
improvements and fixes are applied to the formatter.
* `--check-equivalent` - check if the file after formatting has the
same AST. If the ASTs are not equivalent, it is a bug in the code
formatter. This option is recommended if you are automatically
formatting files.
* `--dry-run` - do not save files after formatting.
* `--dot-formatter` - the file with formatter configuration.
Defaults to `.formatter.exs` if one is available, see next section.
If any of the `--check-*` flags are given and a check fails, the formatted
contents won't be written to disk nor printed to stdout.
## `.formatter.exs`
The formatter will read a `.formatter.exs` in the current directory for
formatter configuration. It should return a keyword list with any of the
options supported by `Code.format_string!/2`.
The `.formatter.exs` also supports other options:
* `:inputs` (a list of paths and patterns) - specifies the default inputs
to be used by this task. For example, `["mix.exs", "{config,lib,test}/**/*.{ex,exs}"]`.
* `:import_deps` (a list of dependencies as atoms) - specifies a list
of dependencies whose formatter configuration will be imported.
See the "Importing dependencies configuration" section below for more
information.
* `:export` (a keyword list) - specifies formatter configuration to be exported. See the
"Importing dependencies configuration" section below.
## When to format code
We recommend developers to format code directly in their editors. Either
automatically on save or via an explicit command/key binding. If such option
is not yet available in your editor of choice, adding the required integration
is relatively simple as it is a matter of invoking
cd $project && mix format $file
where `$file` refers to the current file and `$project` is the root of your
project.
It is also possible to format code across the whole project by passing a list
of patterns and files to `mix format`, as showed at the top of this task
documentation. This list can also be set in the `.formatter.exs` under the
`:inputs` key.
## Importing dependencies configuration
This task supports importing formatter configuration from dependencies.
A dependency that wants to export formatter configuration needs to have a `.formatter.exs` file
at the root of the project. In this file, the dependency can export a `:export` option with
configuration to export. For now, only one option is supported under `:export`:
`:locals_without_parens` (whose value has the same shape as the value of the
`:locals_without_parens` in `Code.format_string!/2`).
The functions listed under `:locals_without_parens` in the `:export` option of a dependency
can be imported in a project by listing that dependency in the `:import_deps`
option of the formatter configuration file of the project.
For example, consider I have a project `my_app` that depends on `my_dep`.
`my_dep` wants to export some configuration, so `my_dep/.formatter.exs`
would look like this:
# my_dep/.formatter.exs
[
# Regular formatter configuration for my_dep
# ...
export: [
locals_without_parens: [some_dsl_call: 2, some_dsl_call: 3]
]
]
In order to import configuration, `my_app`'s `.formatter.exs` would look like
this:
# my_app/.formatter.exs
[
import_deps: [:my_dep]
]
"""
@switches [
check_equivalent: :boolean,
check_formatted: :boolean,
dot_formatter: :string,
dry_run: :boolean
]
@deps_manifest "cached_formatter_deps"
def run(args) do
{opts, args} = OptionParser.parse!(args, strict: @switches)
formatter_opts = eval_dot_formatter(opts)
formatter_opts = fetch_deps_opts(formatter_opts)
args
|> expand_args(formatter_opts)
|> Task.async_stream(&format_file(&1, opts, formatter_opts), ordered: false, timeout: 30000)
|> Enum.reduce({[], [], []}, &collect_status/2)
|> check!()
end
defp eval_dot_formatter(opts) do
case dot_formatter(opts) do
{:ok, dot_formatter} -> eval_file_with_keyword_list(dot_formatter)
:error -> []
end
end
defp dot_formatter(opts) do
cond do
dot_formatter = opts[:dot_formatter] -> {:ok, dot_formatter}
File.regular?(".formatter.exs") -> {:ok, ".formatter.exs"}
true -> :error
end
end
# This function reads exported configuration from the imported dependencies and deals with
# caching the result of reading such configuration in a manifest file.
defp fetch_deps_opts(formatter_opts) do
deps = Keyword.get(formatter_opts, :import_deps, [])
cond do
deps == [] ->
formatter_opts
is_list(deps) ->
# Since we have dependencies listed, we write the manifest even if those dependencies
# don't export anything so that we avoid lookups everytime.
deps_manifest = Path.join(Mix.Project.manifest_path(), @deps_manifest)
dep_parenless_calls =
if deps_dot_formatters_stale?(deps_manifest) do
dep_parenless_calls = eval_deps_opts(deps)
write_deps_manifest(deps_manifest, dep_parenless_calls)
dep_parenless_calls
else
read_deps_manifest(deps_manifest)
end
Keyword.update(
formatter_opts,
:locals_without_parens,
dep_parenless_calls,
&(&1 ++ dep_parenless_calls)
)
true ->
Mix.raise("Expected :import_deps to return a list of dependencies, got: #{inspect(deps)}")
end
end
defp deps_dot_formatters_stale?(deps_manifest) do
Mix.Utils.stale?([".formatter.exs" | Mix.Project.config_files()], [deps_manifest])
end
defp read_deps_manifest(deps_manifest) do
deps_manifest |> File.read!() |> :erlang.binary_to_term()
end
defp write_deps_manifest(deps_manifest, parenless_calls) do
File.mkdir_p!(Path.dirname(deps_manifest))
File.write!(deps_manifest, :erlang.term_to_binary(parenless_calls))
end
defp eval_deps_opts(deps) do
deps_paths = Mix.Project.deps_paths()
for dep <- deps,
dep_path = assert_valid_dep_and_fetch_path(dep, deps_paths),
dep_dot_formatter = Path.join(dep_path, ".formatter.exs"),
File.regular?(dep_dot_formatter),
dep_opts = eval_file_with_keyword_list(dep_dot_formatter),
parenless_call <- dep_opts[:export][:locals_without_parens] || [],
uniq: true,
do: parenless_call
end
defp assert_valid_dep_and_fetch_path(dep, deps_paths) when is_atom(dep) do
case Map.fetch(deps_paths, dep) do
{:ok, path} ->
if File.dir?(path) do
path
else
Mix.raise(
"Unavailable dependency #{inspect(dep)} given to :import_deps in the formatter configuration. " <>
"The dependency cannot be found in the filesystem, please run mix deps.get and try again"
)
end
:error ->
Mix.raise(
"Unknown dependency #{inspect(dep)} given to :import_deps in the formatter configuration. " <>
"The dependency is not listed in your mix.exs file"
)
end
end
defp assert_valid_dep_and_fetch_path(dep, _deps_paths) do
Mix.raise("Dependencies in :import_deps should be atoms, got: #{inspect(dep)}")
end
defp eval_file_with_keyword_list(path) do
{opts, _} = Code.eval_file(path)
unless Keyword.keyword?(opts) do
Mix.raise("Expected #{inspect(path)} to return a keyword list, got: #{inspect(opts)}")
end
opts
end
defp expand_args([], formatter_opts) do
if inputs = formatter_opts[:inputs] do
expand_files_and_patterns(List.wrap(inputs), ".formatter.exs")
else
Mix.raise(
"Expected one or more files/patterns to be given to mix format " <>
"or for a .formatter.exs to exist with an :inputs key"
)
end
end
defp expand_args(files_and_patterns, _formatter_opts) do
expand_files_and_patterns(files_and_patterns, "command line")
end
defp expand_files_and_patterns(files_and_patterns, context) do
files =
for file_or_pattern <- files_and_patterns,
file <- stdin_or_wildcard(file_or_pattern),
uniq: true,
do: file
if files == [] do
Mix.raise(
"Could not find a file to format. The files/patterns from #{context} " <>
"did not point to any existing file. Got: #{inspect(files_and_patterns)}"
)
end
files
end
defp stdin_or_wildcard("-"), do: [:stdin]
defp stdin_or_wildcard(path), do: Path.wildcard(path)
defp read_file(:stdin) do
{IO.stream(:stdio, :line) |> Enum.to_list() |> IO.iodata_to_binary(), file: "stdin"}
end
defp read_file(file) do
{File.read!(file), file: file}
end
defp format_file(file, task_opts, formatter_opts) do
{input, extra_opts} = read_file(file)
output = IO.iodata_to_binary([Code.format_string!(input, extra_opts ++ formatter_opts), ?\n])
check_equivalent? = Keyword.get(task_opts, :check_equivalent, false)
check_formatted? = Keyword.get(task_opts, :check_formatted, false)
dry_run? = Keyword.get(task_opts, :dry_run, false)
cond do
check_equivalent? and not equivalent?(input, output) ->
{:not_equivalent, file}
check_formatted? ->
if input == output, do: :ok, else: {:not_formatted, file}
dry_run? ->
:ok
true ->
write_or_print(file, input, output)
end
rescue
exception ->
stacktrace = System.stacktrace()
{:exit, file, exception, stacktrace}
end
defp write_or_print(file, input, output) do
cond do
file == :stdin -> IO.write(output)
input == output -> :ok
true -> File.write!(file, output)
end
:ok
end
defp collect_status({:ok, :ok}, acc), do: acc
defp collect_status({:ok, {:exit, _, _, _} = exit}, {exits, not_equivalent, not_formatted}) do
{[exit | exits], not_equivalent, not_formatted}
end
defp collect_status({:ok, {:not_equivalent, file}}, {exits, not_equivalent, not_formatted}) do
{exits, [file | not_equivalent], not_formatted}
end
defp collect_status({:ok, {:not_formatted, file}}, {exits, not_equivalent, not_formatted}) do
{exits, not_equivalent, [file | not_formatted]}
end
defp check!({[], [], []}) do
:ok
end
defp check!({[{:exit, file, exception, stacktrace} | _], _not_equivalent, _not_formatted}) do
Mix.shell().error("mix format failed for file: #{file}")
reraise exception, stacktrace
end
defp check!({_exits, [_ | _] = not_equivalent, _not_formatted}) do
Mix.raise("""
mix format failed due to --check-equivalent.
The following files were not equivalent:
#{to_bullet_list(not_equivalent)}
Please report this bug with the input files at github.com/elixir-lang/elixir/issues
""")
end
defp check!({_exits, _not_equivalent, [_ | _] = not_formatted}) do
Mix.raise("""
mix format failed due to --check-formatted.
The following files were not formatted:
#{to_bullet_list(not_formatted)}
""")
end
defp to_bullet_list(files) do
Enum.map_join(files, "\n", &" * #{&1}")
end
defp equivalent?(input, output) do
Code.Formatter.equivalent(input, output) == :ok
end
end
|
lib/mix/lib/mix/tasks/format.ex
| 0.882187
| 0.535949
|
format.ex
|
starcoder
|
defmodule Surface.Catalogue.Data do
@moduledoc """
Experimental module that provides conveniences for manipulating data
in Examples and Playgrounds.
Provide wrappers around built-in functions like `get_in/2` and `update_in/3`
using a shorter syntax for accessors.
## Accessor Mapping
* `[_]`: `Access.all/0`
* `[fun]`: `Access.filter(fun)`
* `[index]`: `Access.at(index)` (Shorthand for `[index: i]`)
* `[index: i]`: `Access.at(i)`
* `[key: k]`: `Access.key(k)`
* `[first..last]`: `Surface.Catalogue.Data.slice(first..last)`
## Example
Data.get(props.lists[_].cards[& &1.id == "Card_1"].tags[-1].name)
The code above will be translated to:
get_in(props, [:lists, Access.all, :cards, Access.filter(& &1.id == "Card_1"), :tags, Access.at(-1), :name])
"""
@doc """
Generates a short ramdom id.
"""
def random_id(size \\ 6) do
:crypto.strong_rand_bytes(size)
|> Base.encode32(case: :lower)
|> binary_part(0, size)
end
@doc """
Gets an existing value from the given nested structure.
Raises an error if none or more than one value is found.
"""
defmacro get!(path) do
{subject, selector} = split_path(path)
quote do
unquote(__MODULE__).__get__!(unquote(subject), unquote(selector))
end
end
@doc """
Gets a value from the given nested structure.
A wrapper around `get_in/2`
"""
defmacro get(path) do
{subject, selector} = split_path(path)
quote do
get_in(unquote(subject), unquote(selector))
end
end
@doc """
Gets a value and updates a given nested structure.
A wrapper around `get_and_update_in/3`
"""
defmacro get_and_update(path, fun) do
{subject, selector} = split_path(path)
quote do
get_and_update_in(unquote(subject), unquote(selector), unquote(fun))
end
end
@doc """
Pops a item from the given nested structure.
A wrapper around `pop_in/2`
"""
defmacro pop(path) do
{subject, selector} = split_path(path)
quote do
pop_in(unquote(subject), unquote(selector))
end
end
@doc """
Updates an item in the given nested structure.
A wrapper around `update_in/2`
"""
defmacro update(path, fun) do
{subject, selector} = split_path(path)
quote do
update_in(unquote(subject), unquote(selector), unquote(fun))
end
end
@doc """
Deletes an item from the given nested structure.
"""
defmacro delete(path) do
{subject, selector} = split_path(path)
quote do
unquote(__MODULE__).__delete__(unquote(subject), unquote(selector))
end
end
@doc """
Inserts an item into a list in the given nested structure.
"""
defmacro insert_at(path, pos, value) do
{subject, selector} = split_path(path)
quote do
unquote(__MODULE__).__insert_at__(
unquote(subject),
unquote(selector),
unquote(pos),
unquote(value)
)
end
end
@doc """
Appends an item to a list in the given nested structure.
"""
defmacro append(path, value) do
{subject, selector} = split_path(path)
quote do
unquote(__MODULE__).__insert_at__(unquote(subject), unquote(selector), -1, unquote(value))
end
end
@doc """
Prepends an item to a list in the given nested structure.
"""
defmacro prepend(path, value) do
{subject, selector} = split_path(path)
quote do
unquote(__MODULE__).__insert_at__(unquote(subject), unquote(selector), 0, unquote(value))
end
end
@doc false
def __get__!(subject, selector) do
case get_in(subject, selector) |> List.flatten() do
[item] ->
item
[] ->
raise "no value found"
[_ | _] ->
raise "more than one value found"
end
end
@doc false
def __insert_at__(subject, selector, pos, value) do
update_in(subject, selector, fn list ->
List.insert_at(list, pos, value)
end)
end
@doc false
def __delete__(subject, selector) do
{_, list} = pop_in(subject, selector)
list
end
@doc false
def access_fun(value) when is_function(value) do
Access.filter(value)
end
def access_fun(value) when is_integer(value) do
Access.at(value)
end
def access_fun(from..to = range) when is_integer(from) and is_integer(to) do
slice(range)
end
def access_fun(value) do
Access.key(value)
end
defp quoted_access_fun({:_, _, _}) do
quote do
Access.all()
end
end
defp quoted_access_fun(key: value) do
quote do
Access.key(unquote(value))
end
end
defp quoted_access_fun(index: value) do
quote do
Access.at(unquote(value))
end
end
defp quoted_access_fun(value) do
quote do
unquote(__MODULE__).access_fun(unquote(value))
end
end
def slice(range) do
fn op, data, next -> slice(op, data, range, next) end
end
defp slice(:get, data, range, next) when is_list(data) do
data |> Enum.slice(range) |> Enum.map(next)
end
defp slice(:get_and_update, data, range, next) when is_list(data) do
get_and_update_slice(data, range, next, [], [], -1)
end
defp slice(_op, data, _range, _next) do
raise "slice expected a list, got: #{inspect(data)}"
end
defp normalize_range_bound(value, list_length) do
if value < 0 do
value + list_length
else
value
end
end
defp get_and_update_slice([], _range, _next, updates, gets, _index) do
{:lists.reverse(gets), :lists.reverse(updates)}
end
defp get_and_update_slice(list, from..to, next, updates, gets, -1) do
list_length = length(list)
from = normalize_range_bound(from, list_length)
to = normalize_range_bound(to, list_length)
get_and_update_slice(list, from..to, next, updates, gets, 0)
end
defp get_and_update_slice([head | rest], from..to = range, next, updates, gets, index) do
new_index = index + 1
if index >= from and index <= to do
case next.(head) do
{get, update} ->
get_and_update_slice(rest, range, next, [update | updates], [get | gets], new_index)
:pop ->
get_and_update_slice(rest, range, next, updates, [head | gets], new_index)
end
else
get_and_update_slice(rest, range, next, [head | updates], gets, new_index)
end
end
defp split_path(path) do
{[subject | rest], _} = unnest(path, [], true, "test")
{subject, convert_selector(rest)}
end
defp convert_selector(list) do
Enum.map(list, fn
{:map, key} ->
quote do
Access.key!(unquote(key))
end
{:access, expr} ->
quoted_access_fun(expr)
end)
end
def unnest(path) do
unnest(path, [], true, "test")
end
defp unnest({{:., _, [Access, :get]}, _, [expr, key]}, acc, _all_map?, kind) do
unnest(expr, [{:access, key} | acc], false, kind)
end
defp unnest({{:., _, [expr, key]}, _, []}, acc, all_map?, kind)
when is_tuple(expr) and :erlang.element(1, expr) != :__aliases__ and
:erlang.element(1, expr) != :__MODULE__ do
unnest(expr, [{:map, key} | acc], all_map?, kind)
end
defp unnest(other, [], _all_map?, kind) do
raise ArgumentError,
"expected expression given to #{kind} to access at least one element, " <>
"got: #{Macro.to_string(other)}"
end
defp unnest(other, acc, all_map?, kind) do
case proper_start?(other) do
true ->
{[other | acc], all_map?}
false ->
raise ArgumentError,
"expression given to #{kind} must start with a variable, local or remote call " <>
"and be followed by an element access, got: #{Macro.to_string(other)}"
end
end
defp proper_start?({{:., _, [expr, _]}, _, _args})
when is_atom(expr)
when :erlang.element(1, expr) == :__aliases__
when :erlang.element(1, expr) == :__MODULE__,
do: true
defp proper_start?({atom, _, _args})
when is_atom(atom),
do: true
defp proper_start?(other), do: not is_tuple(other)
end
|
lib/surface/catalogue/data.ex
| 0.880168
| 0.726353
|
data.ex
|
starcoder
|
defmodule DiscordBot.Model.Payload do
@moduledoc """
An object which wraps all gateway messages
"""
use DiscordBot.Model.Serializable
alias DiscordBot.Model.{Dispatch, Hello, Identify, Payload, StatusUpdate, VoiceState}
defstruct [
:opcode,
:data,
:sequence,
:name
]
@typedoc """
The numeric opcode for the payload
"""
@type opcode :: atom | number
@typedoc """
The body of the payload
"""
@type data :: any | nil
@typedoc """
The sequence number, used for resumes/heartbeats
"""
@type sequence :: number | nil
@typedoc """
The payload's event name, only for opcode 0
"""
@type name :: String.t() | nil
@type t :: %__MODULE__{
opcode: opcode,
data: data,
sequence: sequence,
name: name
}
defimpl Poison.Encoder, for: __MODULE__ do
@spec encode(Payload.t(), Poison.Encoder.options()) :: iodata
def encode(payload, options) do
%{opcode: opcode, data: data, sequence: sequence, name: name} = payload
Poison.Encoder.Map.encode(
%{
"op" => Payload.opcode_from_atom(opcode),
"d" => data,
"s" => sequence,
"t" => name
},
options
)
end
end
@doc """
Constructs a payload containing only an opcode, `opcode`
"""
@spec payload(atom | number) :: __MODULE__.t()
def payload(opcode) do
payload(opcode, nil, nil, nil)
end
@doc """
Constructs a payload containing an opcode, `opcode` and a datagram, `data`
"""
@spec payload(atom | number, any) :: __MODULE__.t()
def payload(opcode, data) do
payload(opcode, data, nil, nil)
end
@doc """
Consructs a payload object given the opcode `opcode`, the datagram `data`,
the sequence number `sequence`, and the event name `event_name`
"""
@spec payload(atom | number, any, number | nil, String.t() | nil) :: __MODULE__.t()
def payload(opcode, data, sequence, event_name) when is_number(opcode) do
opcode
|> atom_from_opcode()
|> payload(data, sequence, event_name)
end
def payload(opcode, data, sequence, event_name) when is_atom(opcode) do
%__MODULE__{
opcode: opcode,
data: data,
sequence: sequence,
name: event_name
}
end
@doc """
Builds the heartbeat message
"""
@spec heartbeat(number | nil) :: __MODULE__.t()
def heartbeat(sequence_number) do
payload(:heartbeat, sequence_number)
end
@doc """
Converts a plain map-represented JSON object `map` into a payload
"""
@spec from_map(map) :: __MODULE__.t()
def from_map(map) do
opcode = map |> Map.get("op") |> atom_from_opcode
%__MODULE__{
opcode: opcode,
data: map |> Map.get("d") |> to_model(opcode, Map.get(map, "t")),
sequence: Map.get(map, "s"),
name: Map.get(map, "t")
}
end
@doc """
Converts a data object to the correct model given its opcode and event name
"""
@spec to_model(any, atom, String.t()) :: struct
def to_model(data, opcode, name) do
case opcode do
:dispatch -> data |> Dispatch.from_map(name)
:heartbeat -> data
:identify -> data |> Identify.from_map()
:voice_state_update -> data |> VoiceState.from_map()
:hello -> data |> Hello.from_map()
:status_update -> data |> StatusUpdate.from_map()
:heartbeat_ack -> nil
_ -> data
end
end
@doc """
Converts a numeric Discord opcode to a corresponding
descriptive atom
"""
@spec atom_from_opcode(number) :: atom
def atom_from_opcode(opcode) do
%{
0 => :dispatch,
1 => :heartbeat,
2 => :identify,
3 => :status_update,
4 => :voice_state_update,
6 => :resume,
7 => :reconnect,
8 => :request_guild_members,
9 => :invalid_session,
10 => :hello,
11 => :heartbeat_ack
}[opcode]
end
@doc """
Converts an atom describing a discord opcode to
its corresponding numeric value
"""
@spec opcode_from_atom(atom) :: number
def opcode_from_atom(atom) do
%{
dispatch: 0,
heartbeat: 1,
identify: 2,
status_update: 3,
voice_state_update: 4,
resume: 6,
reconnect: 7,
request_guild_members: 8,
invalid_session: 9,
hello: 10,
heartbeat_ack: 11
}[atom]
end
end
|
apps/discordbot/lib/discordbot/model/payload.ex
| 0.867036
| 0.446676
|
payload.ex
|
starcoder
|
defmodule Solana.RPC.Request do
@moduledoc """
Functions for creating Solana JSON-RPC API requests.
This client only implements the most common methods (see the function
documentation below). If you need a method that's on the [full
list](https://docs.solana.com/developing/clients/jsonrpc-api#json-rpc-api-reference)
but is not implemented here, please open an issue or contact the maintainers.
"""
@typedoc "JSON-RPC API request (pre-encoding)"
@type t :: {String.t(), [String.t() | map]}
@typedoc "JSON-RPC API request (JSON encoding)"
@type json :: %{
jsonrpc: String.t(),
id: term,
method: String.t(),
params: list
}
@doc """
Encodes a `t:Solana.RPC.Request.t/0` (or a list of them) in the [required
format](https://docs.solana.com/developing/clients/jsonrpc-api#request-formatting).
"""
@spec encode(requests :: [t]) :: [json]
def encode(requests) when is_list(requests) do
requests
|> Enum.with_index()
|> Enum.map(&to_json_rpc/1)
end
@spec encode(request :: t) :: json
def encode(request), do: to_json_rpc({request, 0})
defp to_json_rpc({{method, []}, id}) do
%{jsonrpc: "2.0", id: id, method: method}
end
defp to_json_rpc({{method, params}, id}) do
%{jsonrpc: "2.0", id: id, method: method, params: check_params(params)}
end
defp check_params([]), do: []
defp check_params([map = %{} | rest]) when map_size(map) == 0, do: check_params(rest)
defp check_params([elem | rest]), do: [elem | check_params(rest)]
@doc """
Returns all information associated with the account of the provided Pubkey.
For more information, see [the Solana
docs](https://docs.solana.com/developing/clients/jsonrpc-api#getaccountinfo).
"""
@spec get_account_info(account :: Solana.key(), opts :: keyword) :: t
def get_account_info(account, opts \\ []) do
{"getAccountInfo", [B58.encode58(account), encode_opts(opts, %{"encoding" => "base64"})]}
end
@doc """
Returns the balance of the provided pubkey's account.
For more information, see [the Solana
docs](https://docs.solana.com/developing/clients/jsonrpc-api#getbalance).
"""
@spec get_balance(account :: Solana.key(), opts :: keyword) :: t
def get_balance(account, opts \\ []) do
{"getBalance", [B58.encode58(account), encode_opts(opts)]}
end
@doc """
Returns identity and transaction information about a confirmed block in the
ledger.
For more information, see [the Solana
docs](https://docs.solana.com/developing/clients/jsonrpc-api#getblock).
"""
@spec get_block(start_slot :: non_neg_integer, opts :: keyword) :: t
def get_block(start_slot, opts \\ []) do
{"getBlock", [start_slot, encode_opts(opts)]}
end
@doc """
Returns a recent block hash from the ledger, and a fee schedule that can be
used to compute the cost of submitting a transaction using it.
For more information, see [the Solana
docs](https://docs.solana.com/developing/clients/jsonrpc-api#getrecentblockhash).
"""
@spec get_recent_blockhash(opts :: keyword) :: t
def get_recent_blockhash(opts \\ []) do
{"getRecentBlockhash", [encode_opts(opts)]}
end
@doc """
Returns minimum balance required to make an account rent exempt.
For more information, see [the Solana
docs](https://docs.solana.com/developing/clients/jsonrpc-api#getminimumbalanceforrentexemption).
"""
@spec get_minimum_balance_for_rent_exemption(length :: non_neg_integer, opts :: keyword) :: t
def get_minimum_balance_for_rent_exemption(length, opts \\ []) do
{"getMinimumBalanceForRentExemption", [length, encode_opts(opts)]}
end
@doc """
Submits a signed transaction to the cluster for processing.
For more information, see [the Solana
docs](https://docs.solana.com/developing/clients/jsonrpc-api#sendtransaction).
"""
@spec send_transaction(transaction :: Solana.Transaction.t(), opts :: keyword) :: t
def send_transaction(tx = %Solana.Transaction{}, opts \\ []) do
{:ok, tx_bin} = Solana.Transaction.to_binary(tx)
opts = opts |> fix_tx_opts() |> encode_opts(%{"encoding" => "base64"})
{"sendTransaction", [Base.encode64(tx_bin), opts]}
end
defp fix_tx_opts(opts) do
opts
|> Enum.map(fn
{:commitment, commitment} -> {:preflight_commitment, commitment}
other -> other
end)
|> Enum.into([])
end
@doc """
Requests an airdrop of lamports to an account.
For more information, see [the Solana
docs](https://docs.solana.com/developing/clients/jsonrpc-api#requestairdrop).
"""
@spec request_airdrop(account :: Solana.key(), sol :: pos_integer, opts :: keyword) :: t
def request_airdrop(account, sol, opts \\ []) do
{"requestAirdrop",
[B58.encode58(account), sol * Solana.lamports_per_sol(), encode_opts(opts)]}
end
@doc """
Returns confirmed signatures for transactions involving an address backwards
in time from the provided signature or most recent confirmed block.
For more information, see [the Solana
docs](https://docs.solana.com/developing/clients/jsonrpc-api#getsignaturesforaddress).
"""
@spec get_signatures_for_address(account :: Solana.key(), opts :: keyword) :: t
def get_signatures_for_address(account, opts \\ []) do
{"getSignaturesForAddress", [B58.encode58(account), encode_opts(opts)]}
end
@doc """
Returns the statuses of a list of signatures.
Unless the `searchTransactionHistory` configuration parameter is included,
this method only searches the recent status cache of signatures, which retains
statuses for all active slots plus `MAX_RECENT_BLOCKHASHES` rooted slots.
For more information, see [the Solana
docs](https://docs.solana.com/developing/clients/jsonrpc-api#getsignaturestatuses).
"""
@spec get_signature_statuses(signatures :: [Solana.key()], opts :: keyword) :: t
def get_signature_statuses(signatures, opts \\ []) when is_list(signatures) do
{"getSignatureStatuses", [Enum.map(signatures, &B58.encode58/1), encode_opts(opts)]}
end
@doc """
Returns transaction details for a confirmed transaction.
For more information, see [the Solana
docs](https://docs.solana.com/developing/clients/jsonrpc-api#gettransaction).
"""
@spec get_transaction(signature :: Solana.key(), opts :: keyword) :: t
def get_transaction(signature, opts \\ []) do
{"getTransaction", [B58.encode58(signature), encode_opts(opts)]}
end
@doc """
Returns the total supply of an SPL Token.
For more information, see [the Solana
docs](https://docs.solana.com/developing/clients/jsonrpc-api#gettokensupply).
"""
@spec get_token_supply(mint :: Solana.key(), opts :: keyword) :: t
def get_token_supply(mint, opts \\ []) do
{"getTokenSupply", [B58.encode58(mint), encode_opts(opts)]}
end
@doc """
Returns the 20 largest accounts of a particular SPL Token type.
For more information, see [the Solana
docs](https://docs.solana.com/developing/clients/jsonrpc-api#gettokenlargestaccounts).
"""
@spec get_token_largest_accounts(mint :: Solana.key(), opts :: keyword) :: t
def get_token_largest_accounts(mint, opts \\ []) do
{"getTokenLargestAccounts", [B58.encode58(mint), encode_opts(opts)]}
end
@doc """
Returns the account information for a list of pubkeys.
For more information, see [the Solana
docs](https://docs.solana.com/developing/clients/jsonrpc-api#getmultipleaccounts).
"""
@spec get_multiple_accounts(accounts :: [Solana.key()], opts :: keyword) :: t
def get_multiple_accounts(accounts, opts \\ []) when is_list(accounts) do
{"getMultipleAccounts",
[Enum.map(accounts, &B58.encode58/1), encode_opts(opts, %{"encoding" => "base64"})]}
end
defp encode_opts(opts, defaults \\ %{}) do
Enum.into(opts, defaults, fn {k, v} -> {camelize(k), encode_value(v)} end)
end
defp camelize(word) do
case Regex.split(~r/(?:^|[-_])|(?=[A-Z])/, to_string(word)) do
words ->
words
|> Enum.filter(&(&1 != ""))
|> camelize_list(:lower)
|> Enum.join()
end
end
defp camelize_list([], _), do: []
defp camelize_list([h | tail], :lower) do
[String.downcase(h)] ++ camelize_list(tail, :upper)
end
defp camelize_list([h | tail], :upper) do
[String.capitalize(h)] ++ camelize_list(tail, :upper)
end
defp encode_value(v) do
cond do
:ok == elem(Solana.Key.check(v), 0) -> B58.encode58(v)
:ok == elem(Solana.Transaction.check(v), 0) -> B58.encode58(v)
true -> v
end
end
end
|
lib/solana/rpc/request.ex
| 0.836888
| 0.483892
|
request.ex
|
starcoder
|
defmodule Bamboo.RecipientReplacerAdapter do
@moduledoc """
Replaces to addresses with a provided recipients list.
It provides a wrapper for any other mailer adapter, usefull when working on releases
machine with real email address. It simply replaces `to` addresses
with the provided list of addresses and set original values for `to`, `cc` and `bcc`
in headers.
### Doesn't support adapters without attachments_support
## Example config
# Typically done in config/staging.exs
config :my_app, MyAppMailer.
adapter: Bamboo.RecipientReplacerAdapter,
inner_adapter: Bamboo.SendGridAdapter,
recipient_replacements: ["<EMAIL>", "<EMAIL>"],
...
# Define a Mailer. Typically in lib/my_app/mailer.ex
defmodule MyApp.Mailer do
use Bamboo.Mailer, otp_app: :my_app
end
"""
import Bamboo.Email, only: [put_header: 3]
defmodule(AdapterNotSupportedError, do: defexception([:message]))
@behaviour Bamboo.Adapter
@doc false
def deliver(email, config) do
original_to = Map.get(email, :to, [])
original_cc = Map.get(email, :cc, [])
original_bcc = Map.get(email, :bcc, [])
adapter = config.inner_adapter
if not adapter.supports_attachments?() do
raise AdapterNotSupportedError,
"RecipientReplacerAdapter supports only adapters that support attachments"
end
recipients_list =
config.recipient_replacements
|> Enum.map(&{nil, &1})
email =
email
|> Map.put(:to, recipients_list)
|> Map.put(:cc, [])
|> Map.put(:bcc, [])
|> put_header("X-Real-To", convert_recipients_list(original_to))
|> put_header("X-Real-Cc", convert_recipients_list(original_cc))
|> put_header("X-Real-Bcc", convert_recipients_list(original_bcc))
adapter.deliver(email, config)
end
@doc false
def handle_config(config) do
adapter = config.inner_adapter
adapter.handle_config(config)
end
@doc false
def supports_attachments?, do: true
defp convert_recipients_list(recipients_list) do
Enum.map(recipients_list, fn {name, address} ->
case name do
nil -> address
name -> "<#{name}>#{address}"
end
end)
|> Enum.join(",")
end
end
|
lib/bamboo/adapters/recipient_replacer_adapter.ex
| 0.686895
| 0.421076
|
recipient_replacer_adapter.ex
|
starcoder
|
defmodule Snitch.Domain.ShippingCalculator do
@moduledoc """
Defines the calculator module for shipping.
The core functionality of the module is to handle
caluclation of shipping rates.
"""
alias Snitch.Core.Tools.MultiTenancy.Repo
alias Snitch.Data.Model.GeneralConfiguration, as: GCModel
alias Snitch.Data.Schema.{Package, ShippingRuleIdentifier}
@doc """
Returns the `shipping_cost` for a `package`.
Calculates the cost for the supplied `package`.
> The supplied `package` should have the `package_items` preloaded.
The shipping cost is being calculated under the following assumptions:
- The shipping cost would be calculated for the entire order which can
consist of multiple packages. However, at present it is being
assumed that the order will have only one package, so the cost is being
calcualted for that particular `package`. The supplied `package` may
change to `order` in future.
- The different shipping rules are kind of related to different entites.
e.g. some shipping rules apply to `product`s while some apply to
`order`s.
The rules have priority related to them. The priority at present
is being handled in the `calculate/1`.
#TODO
The logic at present is heavily dependent on rules for the shipping category.
It directly refers to the `shipping_identifier` codes in the logic to do the
calculation however, it should be modified to make the code more generic.
At present the code restricts itself to the shipping_identifiers, it may be
reafactored or rewritten to make it generic.
The `shipping_cost` is calculated based on some rules. The rules
are defined for a `shipping_category` by`Snitch.Data.Schema.ShippingRule`.
"""
@spec calculate(Package.t()) :: Money.t()
def calculate(package) do
package =
Repo.preload(
package,
[:items, shipping_category: [shipping_rules: :shipping_rule_identifier]]
)
active_rules = get_category_active_rules(package.shipping_category)
currency_code = GCModel.fetch_currency()
cost = Money.new!(currency_code, 0)
active_rules
|> Enum.reduce_while(cost, fn rule, acc ->
code = rule.shipping_rule_identifier.code
identifier = ShippingRuleIdentifier.identifier_with_module()
module = identifier[code].module
module.calculate(package, currency_code, rule, acc)
end)
|> Money.round()
end
# Returns the shipping_rules active for shipping category
defp get_category_active_rules(shipping_category) do
Enum.filter(shipping_category.shipping_rules, fn rule ->
rule.active?
end)
end
end
|
apps/snitch_core/lib/core/domain/shipping_calculator.ex
| 0.719285
| 0.731514
|
shipping_calculator.ex
|
starcoder
|
defmodule Apq.DocumentProvider do
@moduledoc """
Apq document provider or Absinthe plug.
### Example
Define a new module and `use Apq.DocumentProvider`:
```elixir
defmodule ApqExample.Apq do
use Apq.DocumentProvider,
cache_provider: ApqExample.Cache,
max_query_size: 16384 #default
end
```
#### Options
- `:cache_provider` -- Module responsible for cache retrieval and placement. The cache provider needs to follow the `Apq.CacheProvider` behaviour.
- `:max_query_size` -- (Optional) Maximum number of bytes of the graphql query document. Defaults to 16384 bytes (16kb)
Example configuration for using Apq in `Absinthe.Plug`. Same goes for configuring
Phoenix.
match("/api",
to: Absinthe.Plug,
init_opts: [
schema: ApqExample.Schema,
json_codec: Jason,
interface: :playground,
document_providers: [ApqExample.Apq, Absinthe.Plug.DocumentProvider.Default]
]
)
When the Apq document provider does not match (i.e. the apq extensions are not set in the request),
the request is passed to the next document provider. This will most likely by the default
provider available (`Absinthe.Plug.DocumentProvider.Default`).
"""
# Maximum query size
@max_query_size 16384
defmacro __using__(opts) do
cache_provider = Keyword.fetch!(opts, :cache_provider)
max_query_size = Keyword.get(opts, :max_query_size, @max_query_size)
quote do
require Logger
@behaviour Absinthe.Plug.DocumentProvider
Module.put_attribute(__MODULE__, :max_query_size, unquote(max_query_size))
@doc """
Handles any requests with the Apq extensions and forwards those without
to the next document provider.
"""
def process(%{params: params} = request, _) do
case process_params(params) do
{hash, nil} -> cache_get(request, hash)
{hash, query} -> cache_put(request, hash, query)
_ -> {:cont, request}
end
end
def process(request, _), do: {:cont, request}
@doc """
Determine the remaining pipeline for an request with an apq document.
This prepends the Apq Phase before the first Absinthe.Parse phase and handles
Apq errors, cache hits and misses.
"""
def pipeline(%{pipeline: as_configured} = options) do
as_configured
|> Absinthe.Pipeline.insert_before(
Absinthe.Phase.Parse,
{
Apq.Phase.ApqInput,
[]
}
)
end
defp cache_put(request, hash, query) when byte_size(query) > @max_query_size do
{:halt, %{request | document: {:apq_query_max_size_error, nil}}}
end
defp cache_put(request, hash, query) when is_binary(query) and is_binary(hash) do
calculated_hash = :crypto.hash(:sha256, query) |> Base.encode16(case: :lower)
case calculated_hash == hash do
true ->
unquote(cache_provider).put(hash, query)
{:halt, %{request | document: {:apq_stored, query}}}
false ->
{:halt, %{request | document: {:apq_hash_match_error, query}}}
end
end
defp cache_put(request, hash, _) when is_binary(hash) do
{:halt, %{request | document: {:apq_query_format_error, nil}}}
end
defp cache_put(request, _hash, query) when is_binary(query) do
{:halt, %{request | document: {:apq_hash_format_error, nil}}}
end
defp cache_get(request, hash) when is_binary(hash) do
case unquote(cache_provider).get(hash) do
# Cache miss
{:ok, nil} ->
{:halt, %{request | document: {:apq_not_found_error, nil}}}
# Cache hit
{:ok, document} ->
{:halt, %{request | document: {:apq_found, document}}}
error ->
Logger.warn("Error occured getting cache entry for #{hash}")
{:cont, request}
end
end
defp cache_get(request, _) do
{:halt, %{request | document: {:apq_hash_format_error, nil}}}
end
defp process_params(%{
"query" => query,
"extensions" => %{"persistedQuery" => %{"version" => 1, "sha256Hash" => hash}}
}) do
{hash, query}
end
defp process_params(%{
"extensions" => %{"persistedQuery" => %{"version" => 1, "sha256Hash" => hash}}
}) do
{hash, nil}
end
defp process_params(params), do: params
defoverridable pipeline: 1
end
end
end
|
lib/apq/document_provider.ex
| 0.824214
| 0.795738
|
document_provider.ex
|
starcoder
|
defmodule LocalLedger.Transaction.Validator do
@moduledoc """
This module is used to validate that the total of debits minus the total of
credits for a transaction is equal to 0.
"""
alias LocalLedger.Errors.{AmountNotPositiveError, InvalidAmountError, SameAddressError}
alias LocalLedgerDB.Entry
@doc """
Validates that the incoming entries are of different addresses
except if the address is being associated with a different token.
If not, it raises a `SameAddressError` exception.
"""
@spec validate_different_addresses(list()) :: list() | no_return()
def validate_different_addresses(entries) do
identical_addresses =
entries
|> split_by_token()
|> Enum.flat_map(&get_identical_addresses/1)
case identical_addresses do
[] ->
entries
addresses ->
raise SameAddressError, message: SameAddressError.error_message(addresses)
end
end
defp get_identical_addresses(entries) do
{debits, credits} = split_debit_credit(entries)
debit_addresses = extract_addresses(debits)
credit_addresses = extract_addresses(credits)
intersect(debit_addresses, credit_addresses)
end
@doc """
Validates that the incoming entries have debit - credit = 0.
If not, it raises an `InvalidAmountError` exception.
"""
@spec validate_zero_sum(list()) :: list() | no_return()
def validate_zero_sum(entries) do
entries_by_token = split_by_token(entries)
case Enum.all?(entries_by_token, &is_zero_sum?/1) do
true -> entries
false -> raise InvalidAmountError
end
end
defp split_by_token(entries) do
entries
|> Enum.group_by(fn entry -> entry["token"]["id"] end)
|> Map.values()
end
defp is_zero_sum?(entries) do
{debits, credits} = split_debit_credit(entries)
total(debits) - total(credits) == 0
end
defp split_debit_credit(entries) do
debit_type = Entry.debit_type()
credit_type = Entry.credit_type()
Enum.reduce(entries, {[], []}, fn entry, {debits, credits} ->
case entry["type"] do
^debit_type ->
{[entry | debits], credits}
^credit_type ->
{debits, [entry | credits]}
end
end)
end
@doc """
Validates that all incoming entry amounts are greater than zero.
If not, it raises an `AmountNotPositiveError` exception.
"""
@spec validate_positive_amounts(list()) :: list() | no_return()
def validate_positive_amounts(entries) do
case Enum.all?(entries, fn entry -> entry["amount"] > 0 end) do
true -> entries
false -> raise AmountNotPositiveError
end
end
defp total(list) do
Enum.reduce(list, 0, fn entry, acc -> entry["amount"] + acc end)
end
defp extract_addresses(list), do: Enum.map(list, fn e -> e["address"] end)
defp intersect(a, b), do: a -- a -- b
end
|
apps/local_ledger/lib/local_ledger/transaction/validator.ex
| 0.894884
| 0.414988
|
validator.ex
|
starcoder
|
defmodule Gherkin.TokenMatcher do
alias Gherkin.{Dialect, GherkinLine, Location, NoSuchLanguageError, Token}
@type on_match :: {:ok, Token.t(), t} | :error
@type t :: %__MODULE__{
active_doc_string_separator: <<_::24>> | nil,
default_language: String.t(),
dialect: Dialect.t(),
indent_to_remove: non_neg_integer,
language: String.t()
}
@language_pattern ~r/^\s*#\s*language\s*:\s*([a-zA-Z\-_]+)\s*$/
@enforce_keys [:default_language, :dialect, :language]
defstruct @enforce_keys ++ [active_doc_string_separator: nil, indent_to_remove: 0]
@spec match_BackgroundLine(t, Token.t()) :: on_match
def match_BackgroundLine(%__MODULE__{} = matcher, %Token{} = token),
do: match_title_line(matcher, token, :BackgroundLine, matcher.dialect.background_keywords)
@spec match_Comment(t, Token.t()) :: on_match
def match_Comment(%__MODULE__{} = matcher, %Token{line: "#" <> _} = token) do
text = GherkinLine.get_line_text(token.line, 0)
{:ok, set_token_matched(token, :Comment, matcher.language, text, nil, 0), matcher}
end
def match_Comment(%__MODULE__{}, %Token{}), do: :error
@spec match_DocStringSeparator(t, Token.t()) :: on_match
def match_DocStringSeparator(
%__MODULE__{active_doc_string_separator: nil} = matcher,
%Token{} = token
) do
with :error <- match_DocStringSeparator(matcher, token, ~s(""")),
do: match_DocStringSeparator(matcher, token, ~s(```))
end
def match_DocStringSeparator(%__MODULE__{} = matcher, %Token{} = token) do
if String.starts_with?(token.line, matcher.active_doc_string_separator),
do:
{:ok, set_token_matched(token, :DocStringSeparator, matcher.language),
%{matcher | active_doc_string_separator: nil, indent_to_remove: 0}},
else: :error
end
@spec match_DocStringSeparator(t, Token.t(), String.t()) :: on_match
defp match_DocStringSeparator(matcher, token, <<_::24>> = separator) do
if String.starts_with?(token.line, separator),
do:
{:ok,
set_token_matched(
token,
:DocStringSeparator,
matcher.language,
GherkinLine.get_rest_trimmed(token.line, 3)
),
%{matcher | active_doc_string_separator: separator, indent_to_remove: token.line.indent}},
else: :error
end
@spec match_Empty(t, Token.t()) :: on_match
def match_Empty(%__MODULE__{} = matcher, %Token{line: ""} = token),
do: {:ok, set_token_matched(token, :Empty, matcher.language, nil, nil, 0), matcher}
def match_Empty(%__MODULE__{}, %Token{}), do: :error
@spec match_EOF(t, Token.t()) :: on_match
def match_EOF(%__MODULE__{} = matcher, %Token{} = token) do
if Token.eof?(token),
do: {:ok, set_token_matched(token, :EOF, matcher.language), matcher},
else: :error
end
@spec match_ExamplesLine(t, Token.t()) :: on_match
def match_ExamplesLine(%__MODULE__{} = matcher, %Token{} = token),
do: match_title_line(matcher, token, :ExamplesLine, matcher.dialect.examples_keywords)
@spec match_FeatureLine(t, Token.t()) :: on_match
def match_FeatureLine(%__MODULE__{} = matcher, %Token{} = token),
do: match_title_line(matcher, token, :FeatureLine, matcher.dialect.feature_keywords)
@spec match_Language(t, Token.t()) :: on_match
def match_Language(%__MODULE__{} = matcher, %Token{} = token) do
case Regex.run(@language_pattern, token.line.trimmed_line_text) do
[_, language] ->
{
:ok,
set_token_matched(token, :Language, matcher.language, language),
change_dialect!(matcher, language, token.location)
}
nil ->
:error
end
end
@spec match_Other(t, Token.t()) :: on_match
def match_Other(%__MODULE__{} = matcher, %Token{} = token),
do: {
:ok,
set_token_matched(
token,
:Other,
matcher.language,
other_text(matcher, token.line),
nil,
0
),
matcher
}
@spec other_text(t, GherkinLine.t()) :: String.t()
defp other_text(matcher, line) do
text = GherkinLine.get_line_text(line, matcher.indent_to_remove)
if matcher.active_doc_string_separator,
do: String.replace(text, ~S(\"\"\"), ~s(""")),
else: text
end
@spec match_ScenarioLine(t, Token.t()) :: on_match
def match_ScenarioLine(%__MODULE__{} = matcher, %Token{} = token),
do: match_title_line(matcher, token, :ScenarioLine, matcher.dialect.scenario_keywords)
@spec match_ScenarioOutlineLine(t, Token.t()) :: on_match
def match_ScenarioOutlineLine(%__MODULE__{} = matcher, %Token{} = token),
do:
match_title_line(
matcher,
token,
:ScenarioOutlineLine,
matcher.dialect.scenario_outline_keywords
)
@spec match_title_line(
t,
Token.t(),
:BackgroundLine | :ExamplesLine | :FeatureLine | :ScenarioLine | :ScenarioOutlineLine,
[String.t(), ...]
) :: on_match
defp match_title_line(matcher, token, token_type, keywords) do
if keyword = Enum.find(keywords, &GherkinLine.start_with_title_keyword?(token.line, &1)) do
title = GherkinLine.get_rest_trimmed(token.line, byte_size(keyword) + 1)
{:ok, set_token_matched(token, token_type, matcher.language, title, keyword), matcher}
else
:error
end
end
@spec match_StepLine(t, Token.t()) :: on_match
def match_StepLine(%__MODULE__{} = matcher, %Token{} = token) do
if keyword = dialect_keyword(token.line, matcher.dialect),
do: {
:ok,
set_token_matched(
token,
:StepLine,
matcher.language,
GherkinLine.get_rest_trimmed(token.line, byte_size(keyword)),
keyword
),
matcher
},
else: :error
end
@spec dialect_keyword(GherkinLine.t(), Dialect.t()) :: String.t() | nil
defp dialect_keyword(line, dialect) do
with nil <- keyword(line, dialect.given_keywords),
nil <- keyword(line, dialect.when_keywords),
nil <- keyword(line, dialect.then_keywords),
nil <- keyword(line, dialect.and_keywords),
nil <- keyword(line, dialect.but_keywords),
do: nil
end
@spec keyword(GherkinLine.t(), Dialect.keywords()) :: String.t() | nil
defp keyword(line, keywords), do: Enum.find(keywords, &String.starts_with?(line, &1))
@spec match_TableRow(t, Token.t()) :: on_match
def match_TableRow(%__MODULE__{} = matcher, %Token{line: "|" <> _} = token),
do: {
:ok,
set_token_matched(
token,
:TableRow,
matcher.language,
nil,
nil,
nil,
token.line.table_cells
),
matcher
}
def match_TableRow(%__MODULE__{}, %Token{}), do: :error
@spec match_TagLine(t, Token.t()) :: on_match
def match_TagLine(%__MODULE__{} = matcher, %Token{line: "@" <> _} = token),
do: {
:ok,
set_token_matched(token, :TagLine, matcher.language, nil, nil, nil, token.line.tags),
matcher
}
def match_TagLine(%__MODULE__{}, %Token{}), do: :error
@spec set_token_matched(
Token.t(),
:BackgroundLine
| :Comment
| :DocStringSeparator
| :Empty
| :EOF
| :ExamplesLine
| :FeatureLine
| :Language
| :Other
| :ScenarioLine
| :ScenarioOutlineLine
| :StepLine
| :TableRow
| :TagLine,
String.t(),
String.t() | nil,
String.t() | nil,
non_neg_integer | nil,
[TableCell.t()] | [Tag.t()]
) :: Token.t()
defp set_token_matched(
token,
matched_type,
language,
text \\ nil,
keyword \\ nil,
indent \\ nil,
items \\ []
) do
token
|> Map.put(:matched_gherkin_dialect, language)
|> Map.put(:matched_indent, indent || (token.line && token.line.indent) || 0)
|> Map.put(:matched_items, items)
|> Map.put(:matched_keyword, keyword)
|> Map.put(:matched_text, text && String.replace(text, ~r/(\r\n|\r|\n)$/, ""))
|> Map.put(:matched_type, matched_type)
|> put_in([:location, :column], token.matched_indent + 1)
end
@spec new(String.t()) :: t
def new(language \\ "en") when is_binary(language),
do: change_dialect!(%__MODULE__{default_language: language}, language)
@spec change_dialect!(t, String.t(), Location.t() | nil) :: t | no_return
defp change_dialect!(matcher, language, location \\ nil) do
if dialect = Dialect.get(language) do
%{matcher | dialect: dialect, language: language}
else
raise NoSuchLanguageError, language: language, location: location
end
end
end
|
gherkin/elixir/lib/gherkin/token_matcher.ex
| 0.821903
| 0.689685
|
token_matcher.ex
|
starcoder
|
defmodule Google.Bigtable.V2.Row do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
key: binary,
families: [Google.Bigtable.V2.Family.t()]
}
defstruct [:key, :families]
field :key, 1, type: :bytes
field :families, 2, repeated: true, type: Google.Bigtable.V2.Family
end
defmodule Google.Bigtable.V2.Family do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
name: String.t(),
columns: [Google.Bigtable.V2.Column.t()]
}
defstruct [:name, :columns]
field :name, 1, type: :string
field :columns, 2, repeated: true, type: Google.Bigtable.V2.Column
end
defmodule Google.Bigtable.V2.Column do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
qualifier: binary,
cells: [Google.Bigtable.V2.Cell.t()]
}
defstruct [:qualifier, :cells]
field :qualifier, 1, type: :bytes
field :cells, 2, repeated: true, type: Google.Bigtable.V2.Cell
end
defmodule Google.Bigtable.V2.Cell do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
timestamp_micros: integer,
value: binary,
labels: [String.t()]
}
defstruct [:timestamp_micros, :value, :labels]
field :timestamp_micros, 1, type: :int64
field :value, 2, type: :bytes
field :labels, 3, repeated: true, type: :string
end
defmodule Google.Bigtable.V2.RowRange do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
start_key: {atom, any},
end_key: {atom, any}
}
defstruct [:start_key, :end_key]
oneof :start_key, 0
oneof :end_key, 1
field :start_key_closed, 1, type: :bytes, oneof: 0
field :start_key_open, 2, type: :bytes, oneof: 0
field :end_key_open, 3, type: :bytes, oneof: 1
field :end_key_closed, 4, type: :bytes, oneof: 1
end
defmodule Google.Bigtable.V2.RowSet do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
row_keys: [binary],
row_ranges: [Google.Bigtable.V2.RowRange.t()]
}
defstruct [:row_keys, :row_ranges]
field :row_keys, 1, repeated: true, type: :bytes
field :row_ranges, 2, repeated: true, type: Google.Bigtable.V2.RowRange
end
defmodule Google.Bigtable.V2.ColumnRange do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
start_qualifier: {atom, any},
end_qualifier: {atom, any},
family_name: String.t()
}
defstruct [:start_qualifier, :end_qualifier, :family_name]
oneof :start_qualifier, 0
oneof :end_qualifier, 1
field :family_name, 1, type: :string
field :start_qualifier_closed, 2, type: :bytes, oneof: 0
field :start_qualifier_open, 3, type: :bytes, oneof: 0
field :end_qualifier_closed, 4, type: :bytes, oneof: 1
field :end_qualifier_open, 5, type: :bytes, oneof: 1
end
defmodule Google.Bigtable.V2.TimestampRange do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
start_timestamp_micros: integer,
end_timestamp_micros: integer
}
defstruct [:start_timestamp_micros, :end_timestamp_micros]
field :start_timestamp_micros, 1, type: :int64
field :end_timestamp_micros, 2, type: :int64
end
defmodule Google.Bigtable.V2.ValueRange do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
start_value: {atom, any},
end_value: {atom, any}
}
defstruct [:start_value, :end_value]
oneof :start_value, 0
oneof :end_value, 1
field :start_value_closed, 1, type: :bytes, oneof: 0
field :start_value_open, 2, type: :bytes, oneof: 0
field :end_value_closed, 3, type: :bytes, oneof: 1
field :end_value_open, 4, type: :bytes, oneof: 1
end
defmodule Google.Bigtable.V2.RowFilter.Chain do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
filters: [Google.Bigtable.V2.RowFilter.t()]
}
defstruct [:filters]
field :filters, 1, repeated: true, type: Google.Bigtable.V2.RowFilter
end
defmodule Google.Bigtable.V2.RowFilter.Interleave do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
filters: [Google.Bigtable.V2.RowFilter.t()]
}
defstruct [:filters]
field :filters, 1, repeated: true, type: Google.Bigtable.V2.RowFilter
end
defmodule Google.Bigtable.V2.RowFilter.Condition do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
predicate_filter: Google.Bigtable.V2.RowFilter.t() | nil,
true_filter: Google.Bigtable.V2.RowFilter.t() | nil,
false_filter: Google.Bigtable.V2.RowFilter.t() | nil
}
defstruct [:predicate_filter, :true_filter, :false_filter]
field :predicate_filter, 1, type: Google.Bigtable.V2.RowFilter
field :true_filter, 2, type: Google.Bigtable.V2.RowFilter
field :false_filter, 3, type: Google.Bigtable.V2.RowFilter
end
defmodule Google.Bigtable.V2.RowFilter do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
filter: {atom, any}
}
defstruct [:filter]
oneof :filter, 0
field :chain, 1, type: Google.Bigtable.V2.RowFilter.Chain, oneof: 0
field :interleave, 2, type: Google.Bigtable.V2.RowFilter.Interleave, oneof: 0
field :condition, 3, type: Google.Bigtable.V2.RowFilter.Condition, oneof: 0
field :sink, 16, type: :bool, oneof: 0
field :pass_all_filter, 17, type: :bool, oneof: 0
field :block_all_filter, 18, type: :bool, oneof: 0
field :row_key_regex_filter, 4, type: :bytes, oneof: 0
field :row_sample_filter, 14, type: :double, oneof: 0
field :family_name_regex_filter, 5, type: :string, oneof: 0
field :column_qualifier_regex_filter, 6, type: :bytes, oneof: 0
field :column_range_filter, 7, type: Google.Bigtable.V2.ColumnRange, oneof: 0
field :timestamp_range_filter, 8, type: Google.Bigtable.V2.TimestampRange, oneof: 0
field :value_regex_filter, 9, type: :bytes, oneof: 0
field :value_range_filter, 15, type: Google.Bigtable.V2.ValueRange, oneof: 0
field :cells_per_row_offset_filter, 10, type: :int32, oneof: 0
field :cells_per_row_limit_filter, 11, type: :int32, oneof: 0
field :cells_per_column_limit_filter, 12, type: :int32, oneof: 0
field :strip_value_transformer, 13, type: :bool, oneof: 0
field :apply_label_transformer, 19, type: :string, oneof: 0
end
defmodule Google.Bigtable.V2.Mutation.SetCell do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
family_name: String.t(),
column_qualifier: binary,
timestamp_micros: integer,
value: binary
}
defstruct [:family_name, :column_qualifier, :timestamp_micros, :value]
field :family_name, 1, type: :string
field :column_qualifier, 2, type: :bytes
field :timestamp_micros, 3, type: :int64
field :value, 4, type: :bytes
end
defmodule Google.Bigtable.V2.Mutation.DeleteFromColumn do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
family_name: String.t(),
column_qualifier: binary,
time_range: Google.Bigtable.V2.TimestampRange.t() | nil
}
defstruct [:family_name, :column_qualifier, :time_range]
field :family_name, 1, type: :string
field :column_qualifier, 2, type: :bytes
field :time_range, 3, type: Google.Bigtable.V2.TimestampRange
end
defmodule Google.Bigtable.V2.Mutation.DeleteFromFamily do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
family_name: String.t()
}
defstruct [:family_name]
field :family_name, 1, type: :string
end
defmodule Google.Bigtable.V2.Mutation.DeleteFromRow do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{}
defstruct []
end
defmodule Google.Bigtable.V2.Mutation do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
mutation: {atom, any}
}
defstruct [:mutation]
oneof :mutation, 0
field :set_cell, 1, type: Google.Bigtable.V2.Mutation.SetCell, oneof: 0
field :delete_from_column, 2, type: Google.Bigtable.V2.Mutation.DeleteFromColumn, oneof: 0
field :delete_from_family, 3, type: Google.Bigtable.V2.Mutation.DeleteFromFamily, oneof: 0
field :delete_from_row, 4, type: Google.Bigtable.V2.Mutation.DeleteFromRow, oneof: 0
end
defmodule Google.Bigtable.V2.ReadModifyWriteRule do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
rule: {atom, any},
family_name: String.t(),
column_qualifier: binary
}
defstruct [:rule, :family_name, :column_qualifier]
oneof :rule, 0
field :family_name, 1, type: :string
field :column_qualifier, 2, type: :bytes
field :append_value, 3, type: :bytes, oneof: 0
field :increment_amount, 4, type: :int64, oneof: 0
end
|
lib/google/bigtable/v2/data.pb.ex
| 0.778607
| 0.689959
|
data.pb.ex
|
starcoder
|
defmodule Plotex do
alias Plotex.ViewRange
alias Plotex.Axis
alias Plotex.Output.Formatter
require Logger
@moduledoc """
Documentation for Plotex.
TODO
"""
defstruct [:config, :xticks, :yticks, :datasets]
@type data_types :: number | DateTime.t() | NaiveDateTime.t()
@type data_item :: Stream.t(data_types) | Enum.t(data_types)
@type data_pair :: {data_item, data_item}
@type data :: Stream.t(data_pair) | Enum.t(data_pair)
@type t :: %Plotex{
config: Plotex.Config.t(),
xticks: Enumerable.t(),
yticks: Enumerable.t(),
datasets: Enumerable.t()
}
@doc """
Generates a stream of the data points (ticks) for a given axis.
"""
def generate_axis(%Axis{units: units} = axis) do
unless axis.limits.start == nil || axis.limits.stop == nil do
%{data: data, basis: basis} = Plotex.Axis.Units.scale(units, axis.limits)
# Logger.warn("TIME generate_axis: #{inspect(data |> Enum.to_list)}")
# Logger.warn("TIME generate_axis: limits: #{inspect(axis.limits)}")
# Logger.warn("TIME generate_axis: view: #{inspect(axis.view)}")
trng = scale_data(data, axis)
# Logger.warn("TIME generate_axis: trng: #{inspect(trng |> Enum.to_list)}")
# Logger.warn("TIME generate_axis: range: #{inspect(axis.view)}")
ticks =
Stream.zip(data, trng)
# |> Stream.each(& Logger.warn("dt gen view: #{inspect &1}"))
|> Stream.filter(&(elem(&1, 1) >= axis.view.start))
|> Stream.filter(&(elem(&1, 1) <= axis.view.stop))
|> Enum.to_list()
[data: ticks, basis: basis]
else
[data: [], basis: nil]
end
end
@doc """
Returns a stream of scaled data points zipped with the original points.
"""
def scale_data(_data, %Axis{limits: %{start: start, stop: stop}} = _axis)
when is_nil(start) or is_nil(stop),
do: []
def scale_data(data, %Axis{} = axis) do
# Logger.warn("SCALE_DATA: #{inspect axis}")
m =
ViewRange.diff(axis.view.stop, axis.view.start) /
ViewRange.diff(axis.limits.stop, axis.limits.start)
b = axis.view.start |> ViewRange.to_val()
x! = axis.limits.start |> ViewRange.to_val()
data
|> Stream.map(fn x -> m * (ViewRange.to_val(x) - x!) + b end)
end
@doc """
Returns of scaled data for both X & Y coordinates for a given {X,Y} dataset.
"""
def plot_data({xdata, ydata}, %Axis{} = xaxis, %Axis{} = yaxis) do
xrng = scale_data(xdata, xaxis)
yrng = scale_data(ydata, yaxis)
{Enum.zip(xdata, xrng), Enum.zip(ydata, yrng)}
end
@doc """
Find the appropriate limits given an enumerable of datasets.
For example, given {[1,2,3,4], [0.4,0.3,0.2,0.1]} will find the X limits 1..4
and the Y limits of 0.1..0.4.
"""
def limits(datasets, opts \\ []) do
# Logger.warn("LIMITS: #{inspect opts} ")
proj = Keyword.get(opts, :projection, :cartesian)
min_xrange = get_in(opts, [:xaxis, :view_min]) || ViewRange.empty()
min_yrange = get_in(opts, [:yaxis, :view_min]) || ViewRange.empty()
{xl, yl} =
for {xdata, ydata} <- datasets, reduce: {min_xrange, min_yrange} do
{xlims, ylims} ->
xlims! = xdata |> ViewRange.from(proj)
ylims! = ydata |> ViewRange.from(proj)
xlims! = ViewRange.min_max(xlims, xlims!)
ylims! = ViewRange.min_max(ylims, ylims!)
{xlims!, ylims!}
end
xl = ViewRange.pad(xl, opts[:xaxis] || [])
yl = ViewRange.pad(yl, opts[:yaxis] || [])
# Logger.warn("lims reduced: limits!: post!: #{inspect {xl, yl}}")
{xl, yl}
end
def std_units(opts) do
case opts[:kind] do
nil -> nil
:numeric -> %Axis.Units.Numeric{}
:datetime -> %Axis.Units.Time{}
end
end
def std_fmt(opts) do
case opts[:kind] do
nil -> %Plotex.Output.Formatter.NumericDefault{}
:numeric -> %Plotex.Output.Formatter.NumericDefault{}
:datetime -> %Plotex.Output.Formatter.Calendar{}
end
end
@doc """
Create a Plotex struct for given datasets and configuration. Will load and scan data
for all input datasets.
"""
@spec plot(Plotex.data(), Keyword.t()) :: Plotex.t()
def plot(datasets, opts \\ []) do
{xlim, ylim} = limits(datasets, opts)
# ticks = opts[:xaxis][:ticks]
# And this part is kludgy...
xaxis = %Axis{
limits: xlim,
units: struct(opts[:xaxis][:units] || std_units(opts[:xaxis]) || %Axis.Units.Numeric{}),
formatter:
struct(opts[:xaxis][:formatter] || std_fmt(opts[:xaxis]) || %Formatter.NumericDefault{}),
view: %ViewRange{start: 10, stop: (opts[:xaxis][:width] || 100) - 10}
}
yaxis = %Axis{
limits: ylim,
units: struct(opts[:yaxis][:units] || std_units(opts[:yaxis]) || %Axis.Units.Numeric{}),
formatter:
struct(
opts[:yaxis][:formatter] || std_fmt(opts[:yaxis]) || %Formatter.Calendar{}
),
view: %ViewRange{start: 10, stop: (opts[:yaxis][:width] || 100) - 10}
}
[data: xticks, basis: xbasis] = generate_axis(xaxis)
[data: yticks, basis: ybasis] = generate_axis(yaxis)
xaxis = xaxis |> Map.put(:basis, xbasis)
yaxis = yaxis |> Map.put(:basis, ybasis)
# Logger.warn("plot xaxis: #{inspect xaxis}")
# Logger.warn("plot yaxis: #{inspect yaxis}")
config = %Plotex.Config{
xaxis: xaxis,
yaxis: yaxis
}
# Logger.warn("xticks: #{inspect xticks |> Enum.to_list()}")
# Logger.warn("yticks: #{inspect yticks |> Enum.to_list()}")
datasets! =
for {data, idx} <- datasets |> Stream.with_index(), into: [] do
{xd, yd} = Plotex.plot_data(data, config.xaxis, config.yaxis)
{Stream.zip(xd, yd), idx}
end
# Logger.warn "datasets! => #{inspect datasets! |> Enum.at(0) |> elem(0) |> Enum.to_list()}"
%Plotex{config: config, xticks: xticks, yticks: yticks, datasets: datasets!}
end
end
|
lib/plotex.ex
| 0.7773
| 0.586582
|
plotex.ex
|
starcoder
|
defmodule Stargate do
@moduledoc """
Stargate provides an Elixir client for the Apache Pulsar distributed message
log service, based on the Pulsar project's websocket API.
### Producer
Create a producer process under your application's supervision tree with the following:
options = [
name: :pulsar_app,
host: [{:"broker-url.com", 8080}],
producer: [
persistence: "non-persistent",
tenant: "marketing",
namespace: "public",
topic: "new-stuff"
]
]
Stargate.Supervisor.start_link(options)
Once the producer is running, pass messages to the client by pid or by the named
registry entry:
Stargate.produce(producer, [{"key", "value"}])
If you won't be producing frequently you can choose to run ad hoc produce commands against
the url of the Pulsar cluster/topic as follows:
url = "ws://broker-url.com:8080/ws/v2/producer/non-persistent/marketing/public/new-stuff"
Stargate.produce(url, [{"key, "value"}])
### Consumer and Reader
Both consumers and readers connected to Pulsar via Stargate process received messages the
same way. Stargate takes care of receiving the messages and sending acknowledgements back
to the cluster so all you need to do is start a process and define a module in your application
that invokes `use Stargate.Receiver.MessageHandler` and has a `handle_message/1` or `handle_message/2`
function as follows:
defmodule Publicize.MessageHandler do
use Stargate.Receiver.MessageHandler
def handle_message(%{context: context, payload: payload}) do
publish_to_channel(payload, context)
:ack
end
defp publish_to_channel(payload, context) do
...do stuff...
end
end
The `handle_message/1` must return either `:ack` or `:continue` in order to ack successful
processing of the message back to the cluster or continue processing without ack (in the event
you want to do a bulk/cumulative ack at a later time). If using the `handle_message/2` callback
for handlers that keep state across messages handled, it must return `{:ack, state}` or
`{:continue, state}`.
Then, create a consumer or reader process under your application's supevision tree with the following:
options = [
name: :pulsar_app,
host: [{:"broker-url.com", 8080}]
consumer: [ <====== replace with `:reader` for a reader client
tenant: "internal",
namespace: "research",
topic: "ready-to-release",
subscription: "rss-feed", <====== required for a `:consumer`
handler: Publicizer.MessageHandler
]
]
Stargate.Supervisor.start_link(options)
Readers and Consumers share the same configuration API with the two key differences that the
`:consumer` key in the options differentiates from the `:reader` key, as well as the requirement
to provide a `"subscription"` to a consumer for the cluster to manage messages.
"""
defdelegate produce(url_or_connection, message), to: Stargate.Producer
defdelegate produce(connection, message, mfa), to: Stargate.Producer
defmodule Message do
@moduledoc """
Defines the Elixir Struct that represents the structure of a Pulsar message.
The struct combines the "location" data of the received messages (persistent vs. non-persistent,
tenant, namespace, topic) with the payload, any key and/or properties provided with the message,
and the publication timestamp as an DateTime struct, and the messageId assigned by the cluster.
### Example
message = %Stargate.Message{
topic: "ready-for-release",
namespace: "research",
tenant: "internal",
persistence: "persistent",
message_id: "CAAQAw==",
payload: "<NAME>",
key: "1234",
properties: nil,
publish_time: ~U[2020-01-10 18:13:34.443264Z]
}
"""
@type t :: %__MODULE__{
topic: String.t(),
namespace: String.t(),
tenant: String.t(),
persistence: String.t(),
message_id: String.t(),
payload: String.t(),
key: String.t(),
properties: map(),
publish_time: DateTime.t()
}
defstruct [
:topic,
:namespace,
:tenant,
:persistence,
:message_id,
:payload,
:key,
:properties,
:publish_time
]
@doc """
Create a %Stargate.Message{} struct from a list of arguments. Takes the map decoded from
the json message payload received from Pulsar and adds the tenant, namespace, topic, persistence
information to maintain "location awareness" of a message's source topic.
Creating a %Stargate.Message{} via the `new/5` function automatically converts the ISO8601-formatted
publish timestamp to a DateTime struct and decodes the message payload from the Base64 encoding
received from the cluster.
"""
@spec new(map(), String.t(), String.t(), String.t(), String.t()) :: Stargate.Message.t()
def new(message, persistence, tenant, namespace, topic) do
{:ok, timestamp, _} =
message
|> Map.get("publishTime")
|> DateTime.from_iso8601()
payload =
message
|> Map.get("payload")
|> Base.decode64!()
%Message{
topic: topic,
namespace: namespace,
tenant: tenant,
persistence: persistence,
message_id: message["messageId"],
payload: payload,
key: Map.get(message, "key", ""),
properties: Map.get(message, "properties", %{}),
publish_time: timestamp
}
end
end
end
|
lib/stargate.ex
| 0.854869
| 0.410874
|
stargate.ex
|
starcoder
|
defmodule WordsWithEnemies.Game.Server do
@moduledoc """
A process for controlling an individual game.
"""
use GenServer
require Logger
alias WordsWithEnemies.Game.Lobby
alias WordsWithEnemies.{Game, Player}
@doc """
Starts a new game process, and returns `{:ok, pid}`.
"""
def start_link(id, players) when is_list(players) do
if Enum.all?(players, &Player.player?/1) do
game = %Game{id: id, players: players}
name = {:via, Registry, {WordsWithEnemies.Game.Registry, id}}
GenServer.start_link(__MODULE__, game, name: name)
else
raise("invalid players passed")
end
end
@doc """
Increment's the game's round number by 1.
"""
def increment_round(pid) do
GenServer.cast(pid, :increment_round)
end
@doc """
Adds `player` to the list of players.
"""
def add_player(pid, %Player{} = player) do
GenServer.cast(pid, {:add_player, player})
end
@doc """
Returns `game` without the player who's name is `name`.
"""
def remove_player(pid, name) do
GenServer.cast(pid, {:remove_player, name})
end
@doc """
Set's the `status` key to `in_progress`.
"""
def begin_game(pid) do
GenServer.cast(pid, :begin_game)
end
@doc """
Returns the player with `name` if they exist, otherwise `nil`.
"""
def find_player(pid, name) do
GenServer.call(pid, {:find_player, name})
end
@doc """
Calls `func` on each player struct in the game, and
sets the result to the `players` key.
"""
def update_players(pid, func) when is_function(func) do
GenServer.cast(pid, {:update_players, func})
end
@doc """
Calls `func` on the player with `name`, and replaces
that player's struct with the result. Returns `:player_not_found`
if the player doesn't exist.
"""
def update_player(pid, id, func) when is_function(func) do
GenServer.cast(pid, {:update_player, id, func})
end
@doc """
Replaces the games players with `players`, which must be
a list of `%Player{}` structs. Returns `:invalid_players` if
the list contains anything else.
"""
def replace_players(pid, players) do
if Enum.all?(players, &Player.player?/1) do
GenServer.cast(pid, {:replace_players, players})
else
raise("invalid players passed")
end
end
@doc """
Gives every player a new set of letters.
"""
def distribute_letters(pid) do
update_players(pid, &Player.set_random_letters/1)
end
@doc """
Returns the inner struct of the game.
"""
def lookup(pid) do
GenServer.call(pid, :lookup)
end
@doc """
Terminates the game.
"""
def stop(pid) do
GenServer.stop(pid)
end
# Server
def init(%Game{id: id} = game) do
Logger.info("Game:#{id} has initialised")
Lobby.add_game(game)
{:ok, game}
end
def terminate(:normal, %Game{id: id} = game) do
Logger.info("Game:#{id} has terminated")
Lobby.remove_game(id)
:normal
end
def handle_call(:lookup, _from, game) do
{:reply, game, game}
end
def handle_call({:find_player, name}, _from, game) do
player = Enum.find(game.players, &(&1.name === name))
{:reply, player, game}
end
def handle_cast(:begin_game, game) do
Logger.info("Game:#{game.id} has started")
Lobby.remove_game(game.id)
{:noreply, %{game | status: :in_progress}}
end
def handle_cast(:increment_round, game) do
{:noreply, %{game | round: game.round+1}}
end
def handle_cast({:add_player, player}, game) do
Logger.info("#{player.name} was added to game:#{game.id}")
{:noreply, %{game | players: [player | game.players]}}
end
def handle_cast({:remove_player, name}, game) do
filtered = Enum.filter(game.players, &(&1.name !== name))
{:noreply, %{game | players: filtered}}
end
def handle_cast({:update_players, func}, game) do
new_players = Enum.map(game.players, &func.(&1))
{:noreply, %{game | players: new_players}}
end
def handle_cast({:update_player, id, func}, %Game{players: players} = game) do
index = Enum.find_index(players, &(&1.id == id)) || length(players)
new_players = List.update_at(players, index, &func.(&1))
{:noreply, %{game | players: new_players}}
end
def handle_cast({:replace_players, players}, game) do
{:noreply, %{game | players: players}}
end
end
|
lib/words_with_enemies/game/server.ex
| 0.717408
| 0.405449
|
server.ex
|
starcoder
|
defmodule Type.Bitstring do
@moduledoc """
Handles bitstrings and binaries in the erlang/elixir type system.
This type has two required parameters that define a semilattice over
the number line which are the allowed number of bits in the bitstrings
which are members of this type.
- `size` minimum size of the bitstring.
- `unit` distance between points in the lattice.
Roughly speaking, this corresponds to the process of matching the header
of a binary (size), plus a variable-length payload with recurring features
of size (unit).
### Examples:
- `t:bitstring/0` is equivalent to `%Type.Bitstring{size: 0, unit: 1}`.
Note that any bitstring type is a subtype of this type.
- `t:binary/0` is equivalent to `%Type.Bitstring{size: 0, unit: 8}`.
- A fixed-size binary is `%Type.Bitstring{unit: 0, size: <size>}`.
- The empty binary (`""`) is `%Type.Bitstring{unit: 0, size: 0}`.
### Key functions:
#### comparison
binaries are sorted by unit first (with smaller units coming after bigger units,
as they are less restrictive), except zero, which is the most restrictive.
```elixir
iex> Type.compare(%Type.Bitstring{size: 0, unit: 8}, %Type.Bitstring{size: 0, unit: 1})
:lt
iex> Type.compare(%Type.Bitstring{size: 0, unit: 1}, %Type.Bitstring{size: 16, unit: 0})
:gt
```
for two binaries with the same unit, a binary with a smaller fixed size comes
after those with bigger fixed sizes, since bigger fixed sizes are more restrictive.
```elixir
iex> Type.compare(%Type.Bitstring{size: 0, unit: 8}, %Type.Bitstring{size: 16, unit: 8})
:gt
iex> Type.compare(%Type.Bitstring{size: 16, unit: 8}, %Type.Bitstring{size: 8, unit: 8})
:lt
```
#### intersection
Some binaries have no intersection, but Mavis will find obscure intersections.
```elixir
iex> Type.intersection(%Type.Bitstring{size: 15, unit: 0}, %Type.Bitstring{size: 3, unit: 0})
%Type{name: :none}
iex> Type.intersection(%Type.Bitstring{size: 0, unit: 8}, %Type.Bitstring{size: 16, unit: 4})
%Type.Bitstring{size: 16, unit: 8}
iex> Type.intersection(%Type.Bitstring{size: 15, unit: 1}, %Type.Bitstring{size: 0, unit: 8})
%Type.Bitstring{size: 16, unit: 8}
iex> Type.intersection(%Type.Bitstring{size: 14, unit: 6}, %Type.Bitstring{size: 16, unit: 8})
%Type.Bitstring{size: 32, unit: 24}
```
#### union
Most binary pairs won't have nontrivial unions, but Mavis will find some obscure ones.
```elixir
iex> Type.union(%Type.Bitstring{size: 0, unit: 1}, %Type.Bitstring{size: 15, unit: 8})
%Type.Bitstring{size: 0, unit: 1}
iex> Type.union(%Type.Bitstring{size: 25, unit: 8}, %Type.Bitstring{size: 1, unit: 4})
%Type.Bitstring{size: 1, unit: 4}
```
#### subtype?
A bitstring type is a subtype of another if its lattice is covered by the other's.
```elixir
iex> Type.subtype?(%Type.Bitstring{size: 16, unit: 8}, %Type.Bitstring{size: 4, unit: 4})
true
iex> Type.subtype?(%Type.Bitstring{size: 3, unit: 7}, %Type.Bitstring{size: 12, unit: 6})
false
```
#### usable_as
Most bitstrings are at least maybe usable as others, but there are cases where it's impossible.
```elixir
iex> Type.usable_as(%Type.Bitstring{size: 8, unit: 16}, %Type.Bitstring{size: 4, unit: 4})
:ok
iex> Type.usable_as(%Type.Bitstring{size: 3, unit: 7}, %Type.Bitstring{size: 12, unit: 6})
{:maybe, [%Type.Message{meta: [],
type: %Type.Bitstring{size: 3, unit: 7},
target: %Type.Bitstring{size: 12, unit: 6}}]}
iex> Type.usable_as(%Type.Bitstring{size: 3, unit: 8}, %Type.Bitstring{size: 16, unit: 8})
{:error, %Type.Message{meta: [],
type: %Type.Bitstring{size: 3, unit: 8},
target: %Type.Bitstring{size: 16, unit: 8}}}
```
"""
defstruct [size: 0, unit: 0]
@type t :: %__MODULE__{
size: non_neg_integer,
unit: 0..256
}
defimpl Type.Properties do
import Type, only: :macros
alias Type.{Bitstring, Message}
use Type.Helpers
group_compare do
def group_compare(_, bitstring) when is_bitstring(bitstring), do: :gt
def group_compare(%Bitstring{unit: 0}, %Bitstring{unit: b}) when b != 0, do: :lt
def group_compare(%Bitstring{unit: a}, %Bitstring{unit: 0}) when a != 0, do: :gt
def group_compare(%Bitstring{unit: a}, %Bitstring{unit: b}) when a < b, do: :gt
def group_compare(%Bitstring{unit: a}, %Bitstring{unit: b}) when a > b, do: :lt
def group_compare(%Bitstring{size: a}, %Bitstring{size: b}) when a < b, do: :gt
def group_compare(%Bitstring{size: a}, %Bitstring{size: b}) when a > b, do: :lt
def group_compare(left, %Type{module: String, name: :t, params: []}) do
left
|> group_compare(%Type.Bitstring{size: 0, unit: 8})
|> case do
:eq -> :gt
order -> order
end
end
def group_compare(left, %Type{module: String, name: :t, params: [p]}) do
lowest_idx = case p do
i when is_integer(i) -> [i]
range = _.._ -> range
%Type.Union{of: ints} -> ints
end
|> Enum.min
left
|> Type.compare(%Type.Bitstring{size: lowest_idx * 8})
|> case do
:eq -> :gt
order -> order
end
end
end
usable_as do
# empty strings
def usable_as(%{size: 0, unit: 0}, %Bitstring{size: 0}, _meta), do: :ok
def usable_as(type = %{size: 0, unit: 0}, target = %Bitstring{}, meta) do
{:error, Message.make(type, target, meta)}
end
# same unit
def usable_as(challenge = %{size: size_a, unit: unit},
target = %Bitstring{size: size_b, unit: unit},
meta) do
cond do
unit == 0 and size_a == size_b ->
:ok
unit == 0 ->
{:error, Message.make(challenge, target, meta)}
rem(size_a - size_b, unit) != 0 ->
{:error, Message.make(challenge, target, meta)}
size_b > size_a ->
{:maybe, [Message.make(challenge, target, meta)]}
true ->
:ok
end
end
# same size
def usable_as(challenge = %{size: size, unit: unit_a},
target = %Bitstring{size: size, unit: unit_b},
meta) do
cond do
unit_b == 0 ->
{:maybe, [Message.make(challenge, target, meta)]}
unit_a < unit_b ->
{:maybe, [Message.make(challenge, target, meta)]}
true ->
:ok
end
end
# different sizes and units
def usable_as(challenge = %{size: size_a, unit: unit_a},
target = %Bitstring{size: size_b, unit: unit_b}, meta) do
unit_gcd = Integer.gcd(unit_a, unit_b)
cond do
unit_b == 0 and rem(size_a - size_b, unit_a) == 0 ->
{:maybe, [Message.make(challenge, target, meta)]}
# the lattice formed by the units will never meet.
rem(size_a - size_b, unit_gcd) != 0 ->
{:error, Message.make(challenge, target, meta)}
# the challenge lattice strictly overlays the target lattice
unit_gcd == unit_b and size_b < size_a ->
:ok
true ->
{:maybe, [Message.make(challenge, target, meta)]}
end
end
def usable_as(%{size: 0, unit: 0},
%Type{module: String, name: :t, params: []}, _meta), do: :ok
def usable_as(%{size: 0, unit: 0},
challenge = %Type{module: String, name: :t, params: [p]}, meta) do
if Type.subtype?(0, p) do
:ok
else
{:error, Message.make(%Type.Bitstring{}, challenge, meta)}
end
end
# special String.t type
def usable_as(challenge, target = %Type{module: String, name: :t, params: []}, meta) do
case Type.usable_as(challenge, binary()) do
:ok ->
msg = encapsulation_msg(challenge, target)
{:maybe, [Message.make(challenge, remote(String.t()), meta ++ [message: msg])]}
error ->
error
end
end
def usable_as(challenge, target = %Type{module: String, name: :t, params: [p]}, meta) do
case p do
i when is_integer(i) -> [i]
range = _.._ -> range
%Type.Union{of: ints} -> ints
end
|> Enum.map(&Type.usable_as(challenge, %Type.Bitstring{size: &1 * 8}, meta))
|> Enum.reduce(&Type.ternary_and/2)
|> case do
:ok ->
msg = encapsulation_msg(challenge, target)
{:maybe, [Message.make(challenge, remote(String.t()), meta ++ [message: msg])]}
other -> other
end
end
# literal bitstrings
def usable_as(challenge, bitstring, meta) when is_bitstring(bitstring) do
challenge
|> Type.usable_as(%Type.Bitstring{size: :erlang.bit_size(bitstring)}, meta)
|> case do
{:error, _} -> {:error, Message.make(challenge, bitstring, meta)}
{:maybe, _} -> {:maybe, [Message.make(challenge, bitstring, meta)]}
:ok -> {:maybe, [Message.make(challenge, bitstring, meta)]}
end
end
end
# TODO: DRY THIS UP into the Type module.
defp encapsulation_msg(challenge, target) do
"""
#{inspect challenge} is an equivalent type to #{inspect target} but it may fail because it is
a remote encapsulation which may require qualifications outside the type system.
"""
end
intersection do
def intersection(%{unit: 0}, %Bitstring{unit: 0}), do: none()
def intersection(%{size: asz, unit: 0}, %Bitstring{size: bsz, unit: unit})
when asz >= bsz and rem(asz - bsz, unit) == 0 do
%Bitstring{size: asz, unit: 0}
end
def intersection(%{size: asz, unit: unit}, %Bitstring{size: bsz, unit: 0})
when bsz >= asz and rem(asz - bsz, unit) == 0 do
%Bitstring{size: bsz, unit: 0}
end
def intersection(%{unit: aun}, %Bitstring{unit: bun})
when aun == 0 or bun == 0 do
none()
end
def intersection(%{size: asz, unit: aun}, %Bitstring{size: bsz, unit: bun}) do
if rem(asz - bsz, Integer.gcd(aun, bun)) == 0 do
size = if asz > bsz do
sizeup(asz, bsz, aun, bun)
else
sizeup(bsz, asz, bun, aun)
end
%Bitstring{
size: size,
unit: lcm(aun, bun)}
else
none()
end
end
def intersection(bs, st = %Type{module: String, name: :t}) do
Type.intersection(st, bs)
end
def intersection(bs, bitstring) when is_bitstring(bitstring) do
Type.intersection(bitstring, bs)
end
end
defp sizeup(asz, bsz, aun, bun) do
a_mod_b = rem(aun, bun)
Enum.reduce(0..bun-1, asz - bsz, fn idx, acc ->
if rem(acc, bun) == 0 do
throw idx
else
acc + a_mod_b
end
end)
raise "unreachable"
catch
idx ->
asz + aun * idx
end
defp lcm(a, b), do: div(a * b, Integer.gcd(a, b))
subtype :usable_as
end
defimpl Inspect do
def inspect(%{size: 0, unit: 0}, _opts), do: "::<<>>"
def inspect(%{size: 0, unit: 1}, _opts) do
"bitstring()"
end
def inspect(%{size: 0, unit: 8}, _opts) do
"binary()"
end
def inspect(%{size: 0, unit: unit}, _opts) do
"::<<_::_*#{unit}>>"
end
def inspect(%{size: size, unit: 0}, _opts) do
"::<<_::#{size}>>"
end
def inspect(%{size: size, unit: unit}, _opts) do
"::<<_::#{size}, _::_*#{unit}>>"
end
end
end
|
lib/type/bitstring.ex
| 0.935927
| 0.983691
|
bitstring.ex
|
starcoder
|
defmodule Sippet.Transactions.Server.NonInvite do
@moduledoc false
use Sippet.Transactions.Server, initial_state: :trying
alias Sippet.Message.StatusLine, as: StatusLine
alias Sippet.Transactions.Server.State, as: State
@timer_j 32_000
def trying(:enter, _old_state, %State{request: request} = data) do
receive_request(request, data)
:keep_state_and_data
end
def trying(:cast, {:incoming_request, _request}, _data),
do: :keep_state_and_data
def trying(:cast, {:outgoing_response, response}, data) do
data = send_response(response, data)
case StatusLine.status_code_class(response.start_line) do
1 -> {:next_state, :proceeding, data}
_ -> {:next_state, :completed, data}
end
end
def trying(:cast, {:error, reason}, data),
do: shutdown(reason, data)
def trying(event_type, event_content, data),
do: unhandled_event(event_type, event_content, data)
def proceeding(:enter, _old_state, _data),
do: :keep_state_and_data
def proceeding(:cast, {:incoming_request, _request},
%State{extras: %{last_response: last_response}} = data) do
send_response(last_response, data)
:keep_state_and_data
end
def proceeding(:cast, {:outgoing_response, response}, data) do
data = send_response(response, data)
case StatusLine.status_code_class(response.start_line) do
1 -> {:keep_state, data}
_ -> {:next_state, :completed, data}
end
end
def proceeding(:cast, {:error, reason}, data),
do: shutdown(reason, data)
def proceeding(event_type, event_content, data),
do: unhandled_event(event_type, event_content, data)
def completed(:enter, _old_state, %State{request: request} = data) do
if reliable?(request) do
{:stop, :normal, data}
else
{:keep_state_and_data, [{:state_timeout, @timer_j, nil}]}
end
end
def completed(:state_timeout, _nil, data),
do: {:stop, :normal, data}
def completed(:cast, {:incoming_request, _request},
%State{extras: %{last_response: last_response}} = data) do
send_response(last_response, data)
:keep_state_and_data
end
def completed(:cast, {:error, _reason}, _data),
do: :keep_state_and_data
def completed(event_type, event_content, data),
do: unhandled_event(event_type, event_content, data)
end
|
lib/sippet/transactions/server/non_invite.ex
| 0.616243
| 0.424979
|
non_invite.ex
|
starcoder
|
defmodule KafkaEx.Config do
@moduledoc """
Configuring KafkaEx
```
""" <>
File.read!(Path.expand("../../config/config.exs", __DIR__)) <>
"""
```
"""
alias KafkaEx.Utils.Logger
@doc false
def disable_default_worker do
Application.get_env(:kafka_ex, :disable_default_worker, false)
end
@doc false
def client_id do
Application.get_env(:kafka_ex, :client_id, "kafka_ex")
end
@doc false
def consumer_group do
Application.get_env(:kafka_ex, :consumer_group, "kafka_ex")
end
@doc false
def use_ssl, do: Application.get_env(:kafka_ex, :use_ssl, false)
# use this function to get the ssl options - it verifies the options and
# either emits warnings or raises errors as appropriate on misconfiguration
@doc false
def ssl_options do
ssl_options(use_ssl(), Application.get_env(:kafka_ex, :ssl_options, []))
end
@doc false
def default_worker do
:kafka_ex
end
@doc false
def server_impl do
:kafka_ex
|> Application.get_env(:kafka_version, :default)
|> server
end
@doc false
def brokers do
:kafka_ex
|> Application.get_env(:brokers)
|> brokers()
end
defp brokers(nil),
do: nil
defp brokers(list) when is_list(list),
do: list
defp brokers(csv) when is_binary(csv) do
for line <- String.split(csv, ","), into: [] do
case line |> trim() |> String.split(":") do
[host] ->
msg = "Port not set for Kafka broker #{host}"
Logger.warn(msg)
raise msg
[host, port] ->
{port, _} = Integer.parse(port)
{host, port}
end
end
end
defp brokers({mod, fun, args}) when is_atom(mod) and is_atom(fun) do
apply(mod, fun, args)
end
defp brokers(fun) when is_function(fun, 0) do
fun.()
end
if Version.match?(System.version(), "<1.3.0") do
defp trim(string), do: String.strip(string)
else
defp trim(string), do: String.trim(string)
end
defp server("0.8.0"), do: KafkaEx.Server0P8P0
defp server("0.8.2"), do: KafkaEx.Server0P8P2
defp server("0.9.0"), do: KafkaEx.Server0P9P0
defp server("kayrock"), do: KafkaEx.New.Client
defp server(_), do: KafkaEx.Server0P10AndLater
# ssl_options should be an empty list by default if use_ssl is false
defp ssl_options(false, []), do: []
# emit a warning if use_ssl is false but options are present
# (this is not a fatal error and can occur if one disables ssl in the
# default option set)
defp ssl_options(false, options) do
Logger.warn(
"Ignoring ssl_options #{inspect(options)} because " <>
"use_ssl is false. If you do not intend to use ssl and want to " <>
"remove this warning, set `ssl_options: []` in the KafkaEx config."
)
[]
end
# verify that options is at least a keyword list
defp ssl_options(true, options) do
if Keyword.keyword?(options) do
options
else
raise(
ArgumentError,
"SSL is enabled and invalid ssl_options were provided: " <>
inspect(options)
)
end
end
end
|
lib/kafka_ex/config.ex
| 0.631708
| 0.496338
|
config.ex
|
starcoder
|
defmodule AWS.ComputeOptimizer do
@moduledoc """
AWS Compute Optimizer is a service that analyzes the configuration and
utilization metrics of your AWS compute resources, such as EC2 instances, Auto
Scaling groups, AWS Lambda functions, and Amazon EBS volumes.
It reports whether your resources are optimal, and generates optimization
recommendations to reduce the cost and improve the performance of your
workloads. Compute Optimizer also provides recent utilization metric data, as
well as projected utilization metric data for the recommendations, which you can
use to evaluate which recommendation provides the best price-performance
trade-off. The analysis of your usage patterns can help you decide when to move
or resize your running resources, and still meet your performance and capacity
requirements. For more information about Compute Optimizer, including the
required permissions to use the service, see the [AWS Compute Optimizer User Guide](https://docs.aws.amazon.com/compute-optimizer/latest/ug/).
"""
alias AWS.Client
alias AWS.Request
def metadata do
%AWS.ServiceMetadata{
abbreviation: nil,
api_version: "2019-11-01",
content_type: "application/x-amz-json-1.0",
credential_scope: nil,
endpoint_prefix: "compute-optimizer",
global?: false,
protocol: "json",
service_id: "Compute Optimizer",
signature_version: "v4",
signing_name: "compute-optimizer",
target_prefix: "ComputeOptimizerService"
}
end
@doc """
Describes recommendation export jobs created in the last seven days.
Use the `ExportAutoScalingGroupRecommendations` or
`ExportEC2InstanceRecommendations` actions to request an export of your
recommendations. Then use the `DescribeRecommendationExportJobs` action to view
your export jobs.
"""
def describe_recommendation_export_jobs(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeRecommendationExportJobs", input, options)
end
@doc """
Exports optimization recommendations for Auto Scaling groups.
Recommendations are exported in a comma-separated values (.csv) file, and its
metadata in a JavaScript Object Notation (.json) file, to an existing Amazon
Simple Storage Service (Amazon S3) bucket that you specify. For more
information, see [Exporting Recommendations](https://docs.aws.amazon.com/compute-optimizer/latest/ug/exporting-recommendations.html)
in the *Compute Optimizer User Guide*.
You can have only one Auto Scaling group export job in progress per AWS Region.
"""
def export_auto_scaling_group_recommendations(%Client{} = client, input, options \\ []) do
Request.request_post(
client,
metadata(),
"ExportAutoScalingGroupRecommendations",
input,
options
)
end
@doc """
Exports optimization recommendations for Amazon EBS volumes.
Recommendations are exported in a comma-separated values (.csv) file, and its
metadata in a JavaScript Object Notation (.json) file, to an existing Amazon
Simple Storage Service (Amazon S3) bucket that you specify. For more
information, see [Exporting Recommendations](https://docs.aws.amazon.com/compute-optimizer/latest/ug/exporting-recommendations.html)
in the *Compute Optimizer User Guide*.
You can have only one Amazon EBS volume export job in progress per AWS Region.
"""
def export_ebs_volume_recommendations(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ExportEBSVolumeRecommendations", input, options)
end
@doc """
Exports optimization recommendations for Amazon EC2 instances.
Recommendations are exported in a comma-separated values (.csv) file, and its
metadata in a JavaScript Object Notation (.json) file, to an existing Amazon
Simple Storage Service (Amazon S3) bucket that you specify. For more
information, see [Exporting Recommendations](https://docs.aws.amazon.com/compute-optimizer/latest/ug/exporting-recommendations.html)
in the *Compute Optimizer User Guide*.
You can have only one Amazon EC2 instance export job in progress per AWS Region.
"""
def export_ec2_instance_recommendations(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ExportEC2InstanceRecommendations", input, options)
end
@doc """
Exports optimization recommendations for AWS Lambda functions.
Recommendations are exported in a comma-separated values (.csv) file, and its
metadata in a JavaScript Object Notation (.json) file, to an existing Amazon
Simple Storage Service (Amazon S3) bucket that you specify. For more
information, see [Exporting Recommendations](https://docs.aws.amazon.com/compute-optimizer/latest/ug/exporting-recommendations.html)
in the *Compute Optimizer User Guide*.
You can have only one Lambda function export job in progress per AWS Region.
"""
def export_lambda_function_recommendations(%Client{} = client, input, options \\ []) do
Request.request_post(
client,
metadata(),
"ExportLambdaFunctionRecommendations",
input,
options
)
end
@doc """
Returns Auto Scaling group recommendations.
AWS Compute Optimizer generates recommendations for Amazon EC2 Auto Scaling
groups that meet a specific set of requirements. For more information, see the
[Supported resources and requirements](https://docs.aws.amazon.com/compute-optimizer/latest/ug/requirements.html)
in the *AWS Compute Optimizer User Guide*.
"""
def get_auto_scaling_group_recommendations(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetAutoScalingGroupRecommendations", input, options)
end
@doc """
Returns Amazon Elastic Block Store (Amazon EBS) volume recommendations.
AWS Compute Optimizer generates recommendations for Amazon EBS volumes that meet
a specific set of requirements. For more information, see the [Supported resources and
requirements](https://docs.aws.amazon.com/compute-optimizer/latest/ug/requirements.html)
in the *AWS Compute Optimizer User Guide*.
"""
def get_ebs_volume_recommendations(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetEBSVolumeRecommendations", input, options)
end
@doc """
Returns Amazon EC2 instance recommendations.
AWS Compute Optimizer generates recommendations for Amazon Elastic Compute Cloud
(Amazon EC2) instances that meet a specific set of requirements. For more
information, see the [Supported resources and requirements](https://docs.aws.amazon.com/compute-optimizer/latest/ug/requirements.html)
in the *AWS Compute Optimizer User Guide*.
"""
def get_ec2_instance_recommendations(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetEC2InstanceRecommendations", input, options)
end
@doc """
Returns the projected utilization metrics of Amazon EC2 instance
recommendations.
The `Cpu` and `Memory` metrics are the only projected utilization metrics
returned when you run this action. Additionally, the `Memory` metric is returned
only for resources that have the unified CloudWatch agent installed on them. For
more information, see [Enabling Memory Utilization with the CloudWatch Agent](https://docs.aws.amazon.com/compute-optimizer/latest/ug/metrics.html#cw-agent).
"""
def get_ec2_recommendation_projected_metrics(%Client{} = client, input, options \\ []) do
Request.request_post(
client,
metadata(),
"GetEC2RecommendationProjectedMetrics",
input,
options
)
end
@doc """
Returns the enrollment (opt in) status of an account to the AWS Compute
Optimizer service.
If the account is the management account of an organization, this action also
confirms the enrollment status of member accounts within the organization.
"""
def get_enrollment_status(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetEnrollmentStatus", input, options)
end
@doc """
Returns AWS Lambda function recommendations.
AWS Compute Optimizer generates recommendations for functions that meet a
specific set of requirements. For more information, see the [Supported resources and
requirements](https://docs.aws.amazon.com/compute-optimizer/latest/ug/requirements.html)
in the *AWS Compute Optimizer User Guide*.
"""
def get_lambda_function_recommendations(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetLambdaFunctionRecommendations", input, options)
end
@doc """
Returns the optimization findings for an account.
It returns the number of:
* Amazon EC2 instances in an account that are `Underprovisioned`,
`Overprovisioned`, or `Optimized`.
* Auto Scaling groups in an account that are `NotOptimized`, or
`Optimized`.
* Amazon EBS volumes in an account that are `NotOptimized`, or
`Optimized`.
* Lambda functions in an account that are `NotOptimized`, or
`Optimized`.
"""
def get_recommendation_summaries(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetRecommendationSummaries", input, options)
end
@doc """
Updates the enrollment (opt in and opt out) status of an account to the AWS
Compute Optimizer service.
If the account is a management account of an organization, this action can also
be used to enroll member accounts within the organization.
You must have the appropriate permissions to opt in to Compute Optimizer, to
view its recommendations, and to opt out. For more information, see [Controlling access with AWS Identity and Access
Management](https://docs.aws.amazon.com/compute-optimizer/latest/ug/security-iam.html)
in the *AWS Compute Optimizer User Guide*.
When you opt in, Compute Optimizer automatically creates a Service-Linked Role
in your account to access its data. For more information, see [Using Service-Linked Roles for AWS Compute
Optimizer](https://docs.aws.amazon.com/compute-optimizer/latest/ug/using-service-linked-roles.html)
in the *AWS Compute Optimizer User Guide*.
"""
def update_enrollment_status(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UpdateEnrollmentStatus", input, options)
end
end
|
lib/aws/generated/compute_optimizer.ex
| 0.941506
| 0.512815
|
compute_optimizer.ex
|
starcoder
|
defmodule Primer.Navigation do
@moduledoc """
Functions for generating
[Primer Navigation](https://github.com/primer/primer/tree/master/modules/primer-navigation) elements.
"""
use Phoenix.HTML
alias Primer.Labels
alias Primer.Utility
@doc """
Renders a menu element.
**See:** [Menu element documentation](https://github.com/primer/primer/tree/master/modules/primer-navigation#menu)
## Example
EEx template:
```
<%= menu do %>
<%= menu_item("Foo", "/path/to/foo", selected: true) %>
<%= menu_item("Bar", "/path/to/bar") %>
<% end %>
```
generates:
```html
<nav class="menu">
<a class="menu-item selected" href="/path/to/foo">Foo</a>
<a class="menu-item" href="/path/to/bar">Bar</a>
</nav>
```
"""
@spec menu(Keyword.t()) :: Phoenix.HTML.safe()
def menu(block)
def menu(do: block) do
content_tag(:nav, block, class: "menu")
end
@doc """
Renders a menu item element.
## Options
* `:octicon` - Renders an [Octicon](https://octicons.github.com) with the menu item
* `:selected` - If `true`, renders the menu item as selected
All other options are passed through to the underlying HTML `a` element.
"""
@spec menu_item(String.t(), String.t(), Keyword.t()) :: Phoenix.HTML.safe()
def menu_item(text, link, options \\ []) do
selected = options[:selected]
class =
"menu-item"
|> Utility.append_class(options[:class])
|> Utility.append_class(if selected, do: "selected", else: nil)
tag_options =
options
|> Keyword.drop([:octicon, :selected])
|> Keyword.put(:href, link)
|> Keyword.put(:class, class)
content =
if options[:octicon] do
[
PhoenixOcticons.octicon(options[:octicon], width: 16),
text
]
else
text
end
content_tag(:a, content, tag_options)
end
@doc """
Renders an `UnderlineNav` element.
The `underline_nav_item/3` function is used to generate the nav items within the nav element.
**See:** [UnderlineNav element documentation](https://github.com/primer/primer/tree/master/modules/primer-navigation#underline-nav)
## Options
All options are passed through to the underlying HTML `nav` element.
## Example
EEx template:
```
<%= underline_nav do %>
<%= underline_nav_item "Foo", "/path/to/foo", selected: true %>
<%= underline_nav_item "Bar", "/path/to/bar" %>
<% end %>
```
generates:
```html
<nav class="UnderlineNav">
<div class="UnderlineNav-body">
<a class="UnderlineNav-item selected" href="/path/to/foo">Foo</a>
<a class="UnderlineNav-item" href="/path/to/bar">Bar</a>
</div>
</div>
```
"""
@spec underline_nav(Keyword.t(), Keyword.t()) :: Phoenix.HTML.safe()
def underline_nav(options \\ [], block)
def underline_nav(options, do: block) do
class = Utility.append_class("UnderlineNav", options[:class])
content_tag(:nav, class: class) do
content_tag(:div, block, class: "UnderlineNav-body")
end
end
@doc """
Renders an `UnderlineNav-item` element.
## Options
* `:counter` - When supplied with an integer value, renders a `Counter` element
* `:selected` - When `true`, renders this item as selected
All other options are passed through to the underlying HTML `a` element.
"""
@spec underline_nav_item(String.t(), String.t(), Keyword.t()) :: Phoenix.HTML.safe()
def underline_nav_item(text, link, options \\ []) do
count = options[:counter]
selected = options[:selected]
class =
"UnderlineNav-item"
|> Utility.append_class(options[:class])
|> Utility.append_class(if selected, do: "selected", else: nil)
tag_options =
options
|> Keyword.drop([:counter, :selected])
|> Keyword.put(:class, class)
tag_options = if selected, do: tag_options, else: Keyword.put(tag_options, :href, link)
content = if count, do: [text, Labels.counter(count)], else: text
content_tag(:a, content, tag_options)
end
end
|
lib/primer/navigation.ex
| 0.818084
| 0.765769
|
navigation.ex
|
starcoder
|
defmodule Membrane.MP4.MovieFragmentBox do
@moduledoc """
A module containing a function for assembling an MPEG-4 movie fragment box.
The movie fragment box (`moof` atom) is a top-level box and consists of:
* exactly one movie fragment header (`mfhd` atom)
The movie fragment header contains a sequence number that is
increased for every subsequent movie fragment in order in which
they occur.
* zero or more track fragment box (`traf` atom)
The track fragment box provides information related to a track
fragment's presentation time, duration and physical location of
its samples in the media data box.
This box is required by Common Media Application Format.
For more information about movie fragment box and its contents refer to
[ISO/IEC 14496-12](https://www.iso.org/standard/74428.html) or to
[ISO/IEC 23000-19](https://www.iso.org/standard/79106.html).
"""
alias Membrane.MP4.Container
@trun_flags %{data_offset: 1, sample_duration: 0x100, sample_size: 0x200, sample_flags: 0x400}
@mdat_data_offset 8
@spec assemble(%{
sequence_number: integer,
elapsed_time: integer,
timescale: integer,
duration: integer,
samples_table: [%{sample_size: integer, sample_flags: integer}]
}) :: Container.t()
def assemble(config) do
config =
config
|> Map.merge(%{
sample_count: length(config.samples_table),
data_offset: 0
})
moof_size = moof(config) |> Container.serialize!() |> byte_size()
config = %{config | data_offset: moof_size + @mdat_data_offset}
moof(config)
end
defp moof(config) do
[
moof: %{
children: [
mfhd: %{
children: [],
fields: %{flags: 0, sequence_number: config.sequence_number, version: 0}
},
traf: %{
children: [
tfhd: %{
children: [],
fields: %{
default_sample_duration: 0,
default_sample_flags: 0,
default_sample_size: 0,
flags: 0b100000000000111000,
track_id: config.id,
version: 0
}
},
tfdt: %{
children: [],
fields: %{
base_media_decode_time: config.elapsed_time,
flags: 0,
version: 1
}
},
trun: %{
children: [],
fields: %{
data_offset: config.data_offset,
flags:
@trun_flags.data_offset + @trun_flags.sample_duration +
@trun_flags.sample_size + @trun_flags.sample_flags,
sample_count: config.sample_count,
samples: config.samples_table,
version: 0
}
}
],
fields: %{}
}
],
fields: %{}
}
]
end
end
|
lib/membrane_mp4/movie_fragment_box.ex
| 0.87701
| 0.572424
|
movie_fragment_box.ex
|
starcoder
|
defmodule ExJenga.SendMoney.PesalinkToBank do
@moduledoc """
This enables your application to send money to a PesaLink participating bank.
It is restricted to Kenya.
"""
import ExJenga.JengaBase
alias ExJenga.Signature
@uri_affix "/transaction/v2/remittance#pesalinkacc"
@doc """
Send Money to other banks via Pesalink.
- Resitricted to Kenya.
- Recieving bank has to be a participant of Pesalink.
Read more about Pesalink participating banks here: https://ipsl.co.ke/participating-banks/
## Parameters
attrs: - a map containing:
- `source` - a map containing; `countryCode`, `name` and `accountNumber`
- `destination` - a map containing; `type`, `countryCode`, `name`, `mobileNumber`, `bankCode` and `accountNumber`
- `transfer` - a map containing; `type`, `amount`, `currencyCode`, `reference`, `date` and `description`
Read More about the parameters' descriptions here: https://developer.jengaapi.io/reference#pesalink2bank
## Example
iex> ExJenga.SendMoney.PesalinkToBank.request(%{ source: %{ countryCode: "KE", name: "<NAME>", accountNumber: "1460163242696" }, destination: %{ type: "bank", countryCode: "KE", name: "<NAME>", mobileNumber: "0722000000", bankCode: "01", accountNumber: "01100762802910" }, transfer: %{ type: "PesaLink", amount: "1000", currencyCode: "KES", reference: "639434645738", date: "2020-11-25", description: "some remarks here" } })
{:ok,
%{
"transactionId" => "1452854",
"status" => "SUCCESS",
"description" => "Confirmed. Ksh 1000 Sent to 01100762802910 -<NAME> from your account 1460163242696 on 20-05-2019 at 141313 Ref. 707700078800 Thank you"
}}
"""
@spec request(map()) :: {:error, any()} | {:ok, any()}
def request(
%{
source: %{countryCode: _, name: _, accountNumber: accountNumber},
destination: %{
type: _,
countryCode: _,
name: name,
bankCode: _,
accountNumber: _
},
transfer: %{
type: _,
amount: amount,
currencyCode: currencyCode,
reference: reference,
date: _,
description: _
}
} = requestBody
) do
message = "#{amount}#{currencyCode}#{reference}#{name}#{accountNumber}"
headers = [{"signature", Signature.sign(message)}]
make_request(@uri_affix, requestBody, headers)
end
def request(_) do
{:error, "Required Parameters missing, check your request body"}
end
end
|
lib/ex_jenga/send_money/pesalink_to_bank.ex
| 0.749912
| 0.481149
|
pesalink_to_bank.ex
|
starcoder
|
defmodule Pavlov.Mocks.Matchers do
@moduledoc """
Provides matchers for Mocked modules.
"""
import ExUnit.Assertions
import Pavlov.Mocks.Matchers.Messages
@doc """
Asserts whether a method was called with on a mocked module.
Use in conjunction with `with` to perform assertions on the arguments
passed in to the method.
A negative version `not_to_have_received` is also provided. The same usage
instructions apply.
## Example
expect HTTPotion |> to_have_received :get
"""
def to_have_received(module, tuple) when is_tuple(tuple) do
{method, args} = tuple
args = List.flatten [args]
result = _called(module, method, args)
case result do
false -> flunk message_for_matcher(:have_received, [module, method, args], :assertion)
_ -> assert result
end
end
def to_have_received(module, method) do
result = _called(module, method, [])
case result do
false -> flunk message_for_matcher(:have_received, [module, method], :assertion)
_ -> assert result
end
end
@doc false
def not_to_have_received(module, tuple) when is_tuple(tuple) do
{method, args} = tuple
args = List.flatten [args]
result = _called(module, method, args)
case result do
true -> flunk message_for_matcher(:have_received, [module, method, args], :refutation)
_ -> refute result
end
end
def not_to_have_received(module, method) do
result = _called(module, method, [])
case result do
true -> flunk message_for_matcher(:have_received, [module, method], :refutation)
_ -> refute result
end
end
@doc """
Use in conjunction with `to_have_received` to perform assertions on the
arguments passed in to the given method.
## Example
expect HTTPotion |> to_have_received :get |> with_args "http://example.com"
"""
def with_args(method, args) do
{method, args}
end
@doc """
Asserts whether a method was called when using "Asserts" syntax:
## Example
assert called HTTPotion.get("http://example.com")
"""
defmacro called({ {:., _, [ module , f ]} , _, args }) do
quote do
:meck.called unquote(module), unquote(f), unquote(args)
end
end
defp _called(module, f, args) do
:meck.called module, f, args
end
end
|
lib/mocks/matchers.ex
| 0.816809
| 0.639265
|
matchers.ex
|
starcoder
|
defmodule AWS.Transfer do
@moduledoc """
AWS Transfer Family is a fully managed service that enables the transfer of
files over the File Transfer Protocol (FTP), File Transfer Protocol over SSL
(FTPS), or Secure Shell (SSH) File Transfer Protocol (SFTP) directly into and
out of Amazon Simple Storage Service (Amazon S3).
AWS helps you seamlessly migrate your file transfer workflows to AWS Transfer
Family by integrating with existing authentication systems, and providing DNS
routing with Amazon Route 53 so nothing changes for your customers and partners,
or their applications. With your data in Amazon S3, you can use it with AWS
services for processing, analytics, machine learning, and archiving. Getting
started with AWS Transfer Family is easy since there is no infrastructure to buy
and set up.
"""
@doc """
Instantiates an autoscaling virtual server based on the selected file transfer
protocol in AWS.
When you make updates to your file transfer protocol-enabled server or when you
work with users, use the service-generated `ServerId` property that is assigned
to the newly created server.
"""
def create_server(client, input, options \\ []) do
request(client, "CreateServer", input, options)
end
@doc """
Creates a user and associates them with an existing file transfer
protocol-enabled server.
You can only create and associate users with servers that have the
`IdentityProviderType` set to `SERVICE_MANAGED`. Using parameters for
`CreateUser`, you can specify the user name, set the home directory, store the
user's public key, and assign the user's AWS Identity and Access Management
(IAM) role. You can also optionally add a scope-down policy, and assign metadata
with tags that can be used to group and search for users.
"""
def create_user(client, input, options \\ []) do
request(client, "CreateUser", input, options)
end
@doc """
Deletes the file transfer protocol-enabled server that you specify.
No response returns from this operation.
"""
def delete_server(client, input, options \\ []) do
request(client, "DeleteServer", input, options)
end
@doc """
Deletes a user's Secure Shell (SSH) public key.
No response is returned from this operation.
"""
def delete_ssh_public_key(client, input, options \\ []) do
request(client, "DeleteSshPublicKey", input, options)
end
@doc """
Deletes the user belonging to a file transfer protocol-enabled server you
specify.
No response returns from this operation.
When you delete a user from a server, the user's information is lost.
"""
def delete_user(client, input, options \\ []) do
request(client, "DeleteUser", input, options)
end
@doc """
Describes the security policy that is attached to your file transfer
protocol-enabled server.
The response contains a description of the security policy's properties. For
more information about security policies, see [Working with security policies](https://docs.aws.amazon.com/transfer/latest/userguide/security-policies.html).
"""
def describe_security_policy(client, input, options \\ []) do
request(client, "DescribeSecurityPolicy", input, options)
end
@doc """
Describes a file transfer protocol-enabled server that you specify by passing
the `ServerId` parameter.
The response contains a description of a server's properties. When you set
`EndpointType` to VPC, the response will contain the `EndpointDetails`.
"""
def describe_server(client, input, options \\ []) do
request(client, "DescribeServer", input, options)
end
@doc """
Describes the user assigned to the specific file transfer protocol-enabled
server, as identified by its `ServerId` property.
The response from this call returns the properties of the user associated with
the `ServerId` value that was specified.
"""
def describe_user(client, input, options \\ []) do
request(client, "DescribeUser", input, options)
end
@doc """
Adds a Secure Shell (SSH) public key to a user account identified by a
`UserName` value assigned to the specific file transfer protocol-enabled server,
identified by `ServerId`.
The response returns the `UserName` value, the `ServerId` value, and the name of
the `SshPublicKeyId`.
"""
def import_ssh_public_key(client, input, options \\ []) do
request(client, "ImportSshPublicKey", input, options)
end
@doc """
Lists the security policies that are attached to your file transfer
protocol-enabled servers.
"""
def list_security_policies(client, input, options \\ []) do
request(client, "ListSecurityPolicies", input, options)
end
@doc """
Lists the file transfer protocol-enabled servers that are associated with your
AWS account.
"""
def list_servers(client, input, options \\ []) do
request(client, "ListServers", input, options)
end
@doc """
Lists all of the tags associated with the Amazon Resource Number (ARN) you
specify.
The resource can be a user, server, or role.
"""
def list_tags_for_resource(client, input, options \\ []) do
request(client, "ListTagsForResource", input, options)
end
@doc """
Lists the users for a file transfer protocol-enabled server that you specify by
passing the `ServerId` parameter.
"""
def list_users(client, input, options \\ []) do
request(client, "ListUsers", input, options)
end
@doc """
Changes the state of a file transfer protocol-enabled server from `OFFLINE` to
`ONLINE`.
It has no impact on a server that is already `ONLINE`. An `ONLINE` server can
accept and process file transfer jobs.
The state of `STARTING` indicates that the server is in an intermediate state,
either not fully able to respond, or not fully online. The values of
`START_FAILED` can indicate an error condition.
No response is returned from this call.
"""
def start_server(client, input, options \\ []) do
request(client, "StartServer", input, options)
end
@doc """
Changes the state of a file transfer protocol-enabled server from `ONLINE` to
`OFFLINE`.
An `OFFLINE` server cannot accept and process file transfer jobs. Information
tied to your server, such as server and user properties, are not affected by
stopping your server. Stopping the server will not reduce or impact your file
transfer protocol endpoint billing.
The state of `STOPPING` indicates that the server is in an intermediate state,
either not fully able to respond, or not fully offline. The values of
`STOP_FAILED` can indicate an error condition.
No response is returned from this call.
"""
def stop_server(client, input, options \\ []) do
request(client, "StopServer", input, options)
end
@doc """
Attaches a key-value pair to a resource, as identified by its Amazon Resource
Name (ARN).
Resources are users, servers, roles, and other entities.
There is no response returned from this call.
"""
def tag_resource(client, input, options \\ []) do
request(client, "TagResource", input, options)
end
@doc """
If the `IdentityProviderType` of a file transfer protocol-enabled server is
`API_Gateway`, tests whether your API Gateway is set up successfully.
We highly recommend that you call this operation to test your authentication
method as soon as you create your server. By doing so, you can troubleshoot
issues with the API Gateway integration to ensure that your users can
successfully use the service.
"""
def test_identity_provider(client, input, options \\ []) do
request(client, "TestIdentityProvider", input, options)
end
@doc """
Detaches a key-value pair from a resource, as identified by its Amazon Resource
Name (ARN).
Resources are users, servers, roles, and other entities.
No response is returned from this call.
"""
def untag_resource(client, input, options \\ []) do
request(client, "UntagResource", input, options)
end
@doc """
Updates the file transfer protocol-enabled server's properties after that server
has been created.
The `UpdateServer` call returns the `ServerId` of the server you updated.
"""
def update_server(client, input, options \\ []) do
request(client, "UpdateServer", input, options)
end
@doc """
Assigns new properties to a user.
Parameters you pass modify any or all of the following: the home directory,
role, and policy for the `UserName` and `ServerId` you specify.
The response returns the `ServerId` and the `UserName` for the updated user.
"""
def update_user(client, input, options \\ []) do
request(client, "UpdateUser", input, options)
end
@spec request(AWS.Client.t(), binary(), map(), list()) ::
{:ok, map() | nil, map()}
| {:error, term()}
defp request(client, action, input, options) do
client = %{client | service: "transfer"}
host = build_host("transfer", client)
url = build_url(host, client)
headers = [
{"Host", host},
{"Content-Type", "application/x-amz-json-1.1"},
{"X-Amz-Target", "TransferService.#{action}"}
]
payload = encode!(client, input)
headers = AWS.Request.sign_v4(client, "POST", url, headers, payload)
post(client, url, payload, headers, options)
end
defp post(client, url, payload, headers, options) do
case AWS.Client.request(client, :post, url, payload, headers, options) do
{:ok, %{status_code: 200, body: body} = response} ->
body = if body != "", do: decode!(client, body)
{:ok, body, response}
{:ok, response} ->
{:error, {:unexpected_response, response}}
error = {:error, _reason} -> error
end
end
defp build_host(_endpoint_prefix, %{region: "local", endpoint: endpoint}) do
endpoint
end
defp build_host(_endpoint_prefix, %{region: "local"}) do
"localhost"
end
defp build_host(endpoint_prefix, %{region: region, endpoint: endpoint}) do
"#{endpoint_prefix}.#{region}.#{endpoint}"
end
defp build_url(host, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}/"
end
defp encode!(client, payload) do
AWS.Client.encode!(client, payload, :json)
end
defp decode!(client, payload) do
AWS.Client.decode!(client, payload, :json)
end
end
|
lib/aws/generated/transfer.ex
| 0.858867
| 0.510802
|
transfer.ex
|
starcoder
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.