code
stringlengths 114
1.05M
| path
stringlengths 3
312
| quality_prob
float64 0.5
0.99
| learning_prob
float64 0.2
1
| filename
stringlengths 3
168
| kind
stringclasses 1
value |
|---|---|---|---|---|---|
defmodule Saltpack.Armor do
@moduledoc """
Armoring
"""
@armorer BaseX.prepare_module(
"Base62Armor",
"0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz",
32
)
@typedoc """
A keyword list with formatting options
- `app`: the application name (default: "")
- `chars_per_word`: grouping word-size (default: 18)
- `words_per_line`: grouping line-size (default: 4)
The start and end framing are always presented on separate lines.
```
72 (4 words of 18 characters)
+3 (inter-word spaces)
+1 (end-of-message indicator: `.`)
==
76 (default maximum line-length)
```
"""
@type formatting_options :: [
app: String.t(),
chars_per_word: pos_integer,
words_per_line: pos_integer
]
@spec parse_options(formatting_options) :: {String.t(), pos_integer, pos_integer}
defp parse_options(options) do
{Keyword.get(options, :app, ""), Keyword.get(options, :chars_per_word, 18),
Keyword.get(options, :words_per_line, 4)}
end
@doc false
def armor_message(m, t, o \\ []) do
{a, cpw, wpl} = parse_options(o)
{h, f} = head_foot(t, a)
Enum.join([h, m |> armor_raw |> format_payload({cpw, wpl}), f], ".\n") <> "."
end
defp head_foot(t, a) do
app_part = if a == "", do: "", else: a <> " "
shared = app_part <> "SALTPACK " <> t
{"BEGIN " <> shared, "END " <> shared}
end
@doc false
def armor_raw(s), do: s |> @armorer.encode
@doc false
def dearmor_message(m) do
case m |> normalize_message |> String.split(".") do
[h, p, f, _] -> {framed_message_type(h, f), dearmor_raw(p)}
_ -> dearmor_error()
end
end
defp dearmor_error, do: raise("Invalid or incomplete message.")
defp normalize_message(m), do: Regex.replace(~r/[>\s]+/, m, "")
defp framed_message_type(h, f) do
{t, pf} =
case Regex.named_captures(
~r/^BEGIN(?<app>[a-zA-Z0-9]+)?SALTPACK(?<type>ENCRYPTEDMESSAGE|SIGNEDMESSAGE|DETACHEDSIGNATURE|MESSAGE)$/,
h
) do
%{"app" => app, "type" => type} -> {type, "END" <> app <> "SALTPACK" <> type}
%{"type" => type} -> {type, "ENDSALTPACK" <> type}
_ -> {nil, nil}
end
if f == pf, do: t, else: dearmor_error()
end
@doc false
def dearmor_raw(s), do: s |> @armorer.decode
defp format_payload(s, {cpw, wpl}), do: s |> set_words(cpw, []) |> set_lines(wpl, [])
defp set_words(chars, cpw, acc) when byte_size(chars) <= cpw,
do: [chars | acc] |> Enum.reverse()
defp set_words(chars, cpw, acc) when byte_size(chars) > cpw do
{this, rest} = {:binary.part(chars, 0, cpw), :binary.part(chars, cpw, byte_size(chars) - cpw)}
set_words(rest, cpw, [this | acc])
end
defp set_lines([], _wpl, acc), do: acc |> Enum.reverse() |> Enum.join("\n")
defp set_lines(words, wpl, acc) do
{words, rest} = grab_words({}, wpl, words)
set_lines(rest, wpl, [words |> Tuple.to_list() |> Enum.join(" ") | acc])
end
defp grab_words(words, _count, []), do: {words, []}
defp grab_words(words, count, [h | t]) when tuple_size(words) < count,
do: grab_words(Tuple.append(words, h), count, t)
defp grab_words(words, count, left) when tuple_size(words) == count, do: {words, left}
end
|
lib/saltpack/armor.ex
| 0.818483
| 0.722967
|
armor.ex
|
starcoder
|
defmodule SurfaceBootstrap.Modal do
use Surface.LiveComponent
alias SurfaceBootstrap.Button
alias Surface.Components.Raw
@moduledoc """
The Bootstrap **modal**, with various configuration options.
This component relies on Bootstrap Native and requires to follow the instructions in the Readme file to use.
Use the public API to show / hide the modal programmatically.
The hook takes care of interop with classes that the javascript code modify to show/hide the modal.
Using `Modal.show("component_id")` or `Modal.hide("component_id")` essentially delegates down
to the javascript implementation to allow for animation, scroll overflow handling on the `<body>` element etc.
```
"""
@doc "Should modal fade in/out? Defaults true"
prop fade, :boolean, default: true
@doc "Have backdrop / darkened background? Defaults true"
prop backdrop, :boolean, default: true
@doc "Should have static backdrop? As in clicking background does not close modal."
prop static_backdrop, :boolean
@doc "Scrollable content?"
prop scrollable, :boolean
@doc "Use grid? If true will replace modal-content class with container-fluid and let you use row and col classes to align content"
prop use_grid, :boolean
@doc "Center vertically?"
prop vertically_centered, :boolean
@doc "Show close button? Defaults to true"
prop show_close_button, :boolean, default: true
@doc "Header text"
prop header, :string
@doc "Footer content, use via Modal.Footer"
slot footer
@doc "Default slot"
slot default, required: true
data action, :atom
def update(assigns, socket) do
socket = assign(socket, assigns)
socket =
case assigns[:action] do
nil ->
socket
:show ->
push_event(socket, "bsn-show-modal-#{assigns.id}", %{})
:hide ->
push_event(socket, "bsn-hide-modal-#{assigns.id}", %{})
end
|> assign(:action, nil)
{:ok, socket}
end
def render(assigns) do
~H"""
<div
:hook="Modal"
id={{ @id }}
class={{
"modal",
fade: @fade
}}
:attrs={{
"data-bsnstyle": true,
"data-bsnclass": "show",
"data-backdrop": backdrop_attribute(@backdrop, @static_backdrop)
}}
tabindex="-1"
>
<div class={{
"modal-dialog",
"modal-dialog-scrollable": @scrollable,
"modal-dialog-centered": @vertically_centered
}}>
<div class="modal-content">
<div :if={{ @header || @show_close_button }} class="modal-header">
<h5 :if={{ @header }} class="modal-title">{{ @header }}</h5>
<Button click="close_modal" class="btn-close" aria_label="Close" />
</div>
<div class="modal-body">
<#Raw :if={{ @use_grid }}>
<div class="container-fluid">
</#Raw>
<slot />
<#Raw :if={{ @use_grid }}>
</div>
</#Raw>
</div>
<div :if={{ slot_assigned?(:footer) }} class="modal-footer">
<slot name="footer" />
</div>
</div>
</div>
</div>
"""
end
defp backdrop_attribute(backdrop, static) do
cond do
!backdrop ->
nil
backdrop && static ->
"static"
backdrop ->
true
end
end
# External API
def show(id) do
send_update(__MODULE__, id: id, action: :show)
end
def hide(id) do
send_update(__MODULE__, id: id, action: :hide)
end
def handle_event("close_modal", _params, socket) do
hide(socket.assigns.id)
{:noreply, socket}
end
end
|
lib/surface_bootstrap/modal.ex
| 0.674908
| 0.611701
|
modal.ex
|
starcoder
|
defmodule Meeseeks.Selector.XPath.Expr.Helpers do
@moduledoc false
alias Meeseeks.{Context, Document}
@nodes Context.nodes_key()
@other_nums [:NaN, :Infinity, :"-Infinity"]
# eq_fmt
def eq_fmt(x, y, document) when is_list(x) or is_list(y) do
nodes_fmt(x, y, document)
end
def eq_fmt(x, y, document) when is_boolean(x) or is_boolean(y) do
boolean(x, document)
end
def eq_fmt(x, y, document) when is_number(x) or is_number(y) do
number(x, document)
end
def eq_fmt(x, y, document) when x in @other_nums or y in @other_nums do
number(x, document)
end
def eq_fmt(x, _y, document) do
string(x, document)
end
# cmp_fmt
def cmp_fmt(x, y, document) when is_list(x) or is_list(y) do
nodes_fmt(x, y, document)
end
def cmp_fmt(x, _y, document) do
number(x, document)
end
# nodes_fmt
defp nodes_fmt(x, _y, _document) when is_number(x) do
x
end
defp nodes_fmt(x, _y, _document) when x in @other_nums do
x
end
defp nodes_fmt(x, _y, _document) when is_boolean(x) do
x
end
defp nodes_fmt(x, _y, _document) when is_binary(x) do
x
end
defp nodes_fmt(x, y, document) when is_list(x) and is_number(y) do
Enum.map(x, &(string(&1, document) |> number(document)))
end
defp nodes_fmt(x, y, document) when is_list(x) and y in @other_nums do
Enum.map(x, &(string(&1, document) |> number(document)))
end
defp nodes_fmt(x, y, document) when is_list(x) and is_boolean(y) do
boolean(x, document)
end
defp nodes_fmt(x, y, document) when is_list(x) and is_binary(y) do
Enum.map(x, &string(&1, document))
end
defp nodes_fmt(x, y, document) when is_list(x) and is_list(y) do
Enum.map(x, &string(&1, document))
end
# boolean
def boolean(false, _document), do: false
def boolean(true, _document), do: true
def boolean("", _document), do: false
def boolean([], _document), do: false
def boolean(:NaN, _document), do: false
def boolean(:Infinity, _document), do: true
def boolean(:"-Infinity", _document), do: true
def boolean(x, _document) when is_binary(x), do: true
def boolean(x, _document) when is_integer(x), do: x != 0
def boolean(x, _document) when is_float(x), do: x != 0.0
def boolean(x, _document) when is_list(x), do: nodes?(x)
# number
def number(false, _document), do: 0
def number(true, _document), do: 1
def number("", _document), do: :NaN
def number([], _document), do: :NaN
def number(:NaN, _document), do: :NaN
def number(:Infinity, _document), do: :Infinity
def number(:"-Infinity", _document), do: :"-Infinity"
def number(x, _document) when is_number(x), do: x
def number(x, document) when is_list(x), do: string(x, document) |> number(document)
def number(x, _document) when is_binary(x) do
case Regex.run(~r/^\s*(\-?\d+(\.\d+)?)\s*$/, x) do
[_, s] -> String.to_integer(s)
[_, s, _] -> String.to_float(s)
_ -> :NaN
end
end
# string
def string(false, _document), do: "false"
def string(true, _document), do: "true"
def string([], _document), do: ""
def string(0, _document), do: "0"
def string(0.0, _document), do: "0"
def string(:NaN, _document), do: "NaN"
def string(:Infinity, _document), do: "Infinity"
def string(:"-Infinity", _document), do: "-Infinity"
def string({_attr, value}, _document), do: value
def string(%Document.Doctype{}, _document), do: ""
def string(%Document.Comment{} = x, _document), do: x.content
def string(%Document.Data{} = x, _document), do: x.content
def string(%Document.Text{} = x, _document), do: x.content
def string(%Document.Element{} = x, document) do
children = Document.children(document, x.id)
child_nodes = Document.get_nodes(document, children)
child_nodes
|> Enum.map(&string(&1, document))
|> Enum.join("")
end
def string(x, _document) when is_binary(x), do: x
def string(x, _document) when is_integer(x), do: Integer.to_string(x)
def string(x, _document) when is_float(x), do: Float.to_string(x)
def string(x, document) when is_list(x) do
if nodes?(x) do
[node | _] = x
string(node, document)
else
raise ArgumentError, "invalid input to helper `string/2`: #{inspect(x)}"
end
end
# nodes?
def nodes?(xs) when is_list(xs), do: Enum.all?(xs, &node?/1)
def nodes?(_), do: false
# node?
def node?(%Document.Comment{}), do: true
def node?(%Document.Data{}), do: true
def node?(%Document.Doctype{}), do: true
def node?(%Document.Element{}), do: true
def node?(%Document.Text{}), do: true
# attribute
def node?({attr, val}) when is_binary(attr) and is_binary(val), do: true
# namespace
def node?(ns) when is_binary(ns), do: true
def node?(_), do: false
# position
def position(node, context) do
context
|> Map.fetch!(@nodes)
|> Enum.find_index(fn n -> node == n end)
|> plus_one()
end
# substring/2
def substring(_s, :NaN), do: ""
def substring(_s, :Infinity), do: ""
def substring(s, :"-Infinity"), do: s
def substring(s, n) when n <= 0, do: s
def substring(s, n) do
{_, sub} = String.split_at(s, n)
sub
end
# substring/3
def substring(_s, :NaN, _n2), do: ""
def substring(_s, _n1, :NaN), do: ""
def substring(_s, :Infinity, _n2), do: ""
def substring(s, :"-Infinity", :Infinity), do: s
def substring(s, :"-Infinity", n2), do: String.slice(s, 0, n2)
def substring(s, n1, n2) when n1 <= 0 do
case n2 do
:"-Infinity" ->
""
:Infinity ->
String.slice(s, 0, String.length(s))
_ ->
len = n1 + n2
if len <= 0 do
""
else
String.slice(s, 0, len)
end
end
end
def substring(s, n1, n2) do
case n2 do
:"-Infinity" ->
""
:Infinity ->
String.slice(s, n1, String.length(s))
_ ->
if n2 <= 0 do
""
else
String.slice(s, n1, n2)
end
end
end
# arithmetic
def add(:NaN, _), do: :NaN
def add(_, :NaN), do: :NaN
def add(:Infinity, _), do: :Infinity
def add(_, :Infinity), do: :Infinity
def add(:"-Infinity", _), do: :"-Infinity"
def add(_, :"-Infinity"), do: :"-Infinity"
def add(n1, n2), do: n1 + n2
def sub(:NaN, _), do: :NaN
def sub(_, :NaN), do: :NaN
def sub(:Infinity, _), do: :Infinity
def sub(_, :Infinity), do: :Infinity
def sub(:"-Infinity", _), do: :"-Infinity"
def sub(_, :"-Infinity"), do: :"-Infinity"
def sub(n1, n2), do: n1 - n2
def mult(:NaN, _), do: :NaN
def mult(_, :NaN), do: :NaN
def mult(:Infinity, _), do: :Infinity
def mult(_, :Infinity), do: :Infinity
def mult(:"-Infinity", _), do: :"-Infinity"
def mult(_, :"-Infinity"), do: :"-Infinity"
def mult(n1, n2), do: n1 * n2
def divd(:NaN, _), do: :NaN
def divd(_, :NaN), do: :NaN
def divd(:Infinity, _), do: :Infinity
def divd(_, :Infinity), do: :Infinity
def divd(:"-Infinity", _), do: :"-Infinity"
def divd(_, :"-Infinity"), do: :"-Infinity"
def divd(_, 0), do: :Infinity
def divd(n1, n2), do: div(n1, n2)
def mod(:NaN, _), do: :NaN
def mod(_, :NaN), do: :NaN
def mod(:Infinity, _), do: :Infinity
def mod(_, :Infinity), do: :Infinity
def mod(:"-Infinity", _), do: :"-Infinity"
def mod(_, :"-Infinity"), do: :"-Infinity"
def mod(_, 0), do: :Infinity
def mod(n1, n2), do: rem(n1, n2)
# numbers
def round_(:NaN), do: :NaN
def round_(:Infinity), do: :Infinity
def round_(:"-Infinity"), do: :"-Infinity"
def round_(n) when is_float(n), do: round(n)
def round_(n) when is_integer(n), do: n
def floor(:NaN), do: :NaN
def floor(:Infinity), do: :Infinity
def floor(:"-Infinity"), do: :"-Infinity"
def floor(n) when is_float(n), do: Float.floor(n)
def floor(n) when is_integer(n), do: n
def ceiling(:NaN), do: :NaN
def ceiling(:Infinity), do: :Infinity
def ceiling(:"-Infinity"), do: :"-Infinity"
def ceiling(n) when is_float(n), do: Float.ceil(n)
def ceiling(n) when is_integer(n), do: n
# compare
# =
def compare(:=, x, y), do: x == y
# !=
def compare(:!=, x, y), do: x != y
# <=
def compare(:<=, :NaN, :NaN), do: true
def compare(:<=, :NaN, _), do: true
def compare(:<=, _, :NaN), do: false
def compare(:<=, :Infinity, :Infinity), do: true
def compare(:<=, :Infinity, _), do: false
def compare(:<=, _, :Infinity), do: true
def compare(:<=, :"-Infinity", :"-Infinity"), do: true
def compare(:<=, :"-Infinity", _), do: true
def compare(:<=, _, :"-Infinity"), do: false
def compare(:<=, x, y), do: x <= y
# <
def compare(:<, :NaN, :NaN), do: false
def compare(:<, :NaN, _), do: true
def compare(:<, _, :NaN), do: false
def compare(:<, :Infinity, :Infinity), do: false
def compare(:<, :Infinity, _), do: false
def compare(:<, _, :Infinity), do: true
def compare(:<, :"-Infinity", :"-Infinity"), do: false
def compare(:<, :"-Infinity", _), do: true
def compare(:<, _, :"-Infinity"), do: false
def compare(:<, x, y), do: x < y
# >=
def compare(:>=, :NaN, :NaN), do: true
def compare(:>=, :NaN, _), do: false
def compare(:>=, _, :NaN), do: true
def compare(:>=, :Infinity, :Infinity), do: true
def compare(:>=, :Infinity, _), do: true
def compare(:>=, _, :Infinity), do: false
def compare(:>=, :"-Infinity", :"-Infinity"), do: true
def compare(:>=, :"-Infinity", _), do: false
def compare(:>=, _, :"-Infinity"), do: true
def compare(:>=, x, y), do: x >= y
# >
def compare(:>, :NaN, :NaN), do: false
def compare(:>, :NaN, _), do: false
def compare(:>, _, :NaN), do: true
def compare(:>, :Infinity, :Infinity), do: false
def compare(:>, :Infinity, _), do: true
def compare(:>, _, :Infinity), do: false
def compare(:>, :"-Infinity", :"-Infinity"), do: false
def compare(:>, :"-Infinity", _), do: false
def compare(:>, _, :"-Infinity"), do: true
def compare(:>, x, y), do: x > y
# negate
def negate(:NaN), do: :NaN
def negate(:Infinity), do: :"-Infinity"
def negate(:"-Infinity"), do: :Infinity
def negate(n), do: -n
# misc
defp plus_one(n), do: n + 1
end
|
lib/meeseeks/selector/xpath/expr/helpers.ex
| 0.682362
| 0.442456
|
helpers.ex
|
starcoder
|
defmodule Cashtrail.Contacts.Address do
@moduledoc """
This is an `Ecto.Schema` struct that represents an address of the contact.
## Fields
* `:id` - The unique id of the address.
* `:street` - street part of the address.
* `:number` - number part of the address.
* `:complement` - complement part of the address, like apartment number for instance.
* `:district` - district part of the address.
* `:city` - city part of the address.
* `:state` - state or province part of the address. This depends on the country.
* `:country` - The country of the address.
* `:zip` - The zip code of the address. This field is not validated, so you can
insert whatever the zip code of any country you want.
* `:line_1` - Line 1 can have the street and number in some countries (like in the US).
* `:line_2` - Line 2 can have the city, state, and zip code in some countries (like in the US).
See `Cashtrail.Contacts.create_contact/2` to know how to create a contact with an address, or
`Cashtrail.Contacts.create_contact/2` to know how to update an address of a contact, or
insert a new one.
"""
use Ecto.Schema
import Ecto.Changeset
@type t :: %Cashtrail.Contacts.Address{
id: Ecto.UUID.t() | nil,
street: String.t() | nil,
number: String.t() | nil,
complement: String.t() | nil,
district: String.t() | nil,
city: String.t() | nil,
state: String.t() | nil,
country: String.t() | nil,
zip: String.t() | nil,
line_1: String.t() | nil,
line_2: String.t() | nil
}
embedded_schema do
field :street, :string
field :number, :string
field :complement, :string
field :district, :string
field :city, :string
field :state, :string
field :country, :string
field :zip, :string
field :line_1, :string
field :line_2, :string
end
@doc false
@spec changeset(t | Ecto.Changeset.t(), map()) :: Ecto.Changeset.t()
def changeset(contact, attrs) do
contact
|> cast(attrs, [
:street,
:number,
:complement,
:district,
:city,
:state,
:country,
:zip,
:line_1,
:line_2
])
end
end
|
apps/cashtrail/lib/cashtrail/contacts/address.ex
| 0.839158
| 0.753625
|
address.ex
|
starcoder
|
defmodule APIacAuthBearer.Validator.JWT do
@moduledoc """
An implementation of [RFC9068 - JSON Web Token (JWT) Profile for OAuth 2.0 Access Tokens](https://tools.ietf.org/html/draft-ietf-oauth-access-token-jwt-07).
This validator accepts the following options:
- `:issuer` **[mandatory]**: an OAuth2 issuer whose metadata and keys will be resolved
automatically
- `:client_config`: a `(APIacAuthBearer.Validator.opts() -> %{required(String.t()) => any()})`
function that returns the RS (*resource server*) configuration in case encryption is used. The
following fields are to be set:
- `"at_encrypted_response_alg"` [**mandatory**]: the algorithm used to decrypt
bearer token
- `"jwks"` [**mandatory**]: RS' symmetric or asymmetric keys used to decrypt the token
%{
"at_encrypted_response_alg" => "ECDH-ES",
"jwks" => %{
"keys" => [
%{
"crv" => "P-256",
"d" => "<KEY>",
"kty" => "EC",
"x" => "<KEY>",
"y" => "<KEY>"
}
]
}
}
- `:oauth2_metadata_updater_opts`: options that will be passed to
`Oauth2MetadataUpdater`
- `:server_metadata`: server metadata that takes precedence over those automatically
retrieve from the server (requested from the issuer). Useful when the OP does
not support OAuth2 metadata or OpenID Connect discovery, or to override one or more
parameters
Note that the `"at_encrypted_response_alg"` parameter is **not** registered at the IANA. This
is because an OAuth2 RS is not specified as an OAuth2 client. This can be a special case of
an OAuth2 client, and is by certain AS implementations, but it's not specified as such. This
library uses the terms `:client_config` and `"at_encrypted_response_alg"` to make it easier
to use with backends that do indeed treat RSes as a special type of OAuth2 client.
The `APIacAuthBearer` `:resource_indicator` is also **mandatory** for this validator per the
specification.
"""
@behaviour APIacAuthBearer.Validator
@all_enc_enc [
"A128CBC-HS256",
"A192CBC-HS384",
"A256CBC-HS512",
"A128GCM",
"A192GCM",
"A256GCM"
]
@impl true
def validate_opts(opts) do
cond do
not is_binary(opts[:issuer]) ->
{:error, "missing or invalid mandatory option `:issuer` for #{__MODULE__}"}
not is_binary(opts[:resource_indicator]) ->
{:error, "missing or invalid mandatory option `:resource_indicator` for #{__MODULE__}"}
true ->
:ok
end
end
@impl true
def validate_bearer(bearer, opts) do
with :ok <- verify_typ(bearer),
{:ok, jws} <- maybe_decrypt(bearer, opts),
{:ok, payload_str} <- verify_signature(jws, opts),
{:ok, payload} <- Jason.decode(payload_str),
:ok <- verify_issuer(payload, opts),
# audience is verified in the main module
:ok <- verify_expiration(payload) do
{:ok, payload}
else
{:error, %Jason.DecodeError{}} ->
{:error, :invalid_jwt_inner_json_content}
{:error, _} = error ->
error
_ ->
{:error, :invalid_client_configuration}
end
end
defp verify_typ(bearer) do
cond do
JOSEUtils.is_jws?(bearer) ->
case JOSEUtils.JWS.peek_header(bearer) do
{:ok, %{"typ" => type}} when type in ["at+jwt", "application/at+jwt"] ->
:ok
_ ->
{:error, :invalid_jws_typ_header_parameter}
end
JOSEUtils.is_jwe?(bearer) ->
case JOSEUtils.JWE.peek_header(bearer) do
{:ok, %{"typ" => type}} when type in ["at+jwt", "application/at+jwt"] ->
:ok
_ ->
{:error, :invalid_jwe_typ_header_parameter}
end
true ->
{:error, :invalid_jwt_bearer}
end
end
defp maybe_decrypt(bearer, opts) do
cond do
opts[:client_config] && JOSEUtils.is_jwe?(bearer) ->
do_decrypt(bearer, opts)
JOSEUtils.is_jwe?(bearer) ->
{:error, :no_client_config_for_encrypted_token}
true ->
{:ok, bearer}
end
end
defp do_decrypt(jwe, opts) do
case opts[:client_config].(opts) do
%{"at_encrypted_response_alg" => enc_alg, "jwks" => %{"keys" => jwks}} ->
case JOSEUtils.JWE.decrypt(jwe, jwks, [enc_alg], @all_enc_enc) do
{:ok, {jws, _}} ->
{:ok, jws}
:error ->
{:error, :jwe_decryption_failure}
end
_ ->
{:error, :missing_client_config_param}
end
end
defp verify_signature(jws, opts) do
with {:ok, jwks} <- server_jwks(opts),
{:ok, %{"alg" => sig_alg}} <- JOSEUtils.JWS.peek_header(jws),
true <- sig_alg != "none",
{:ok, {payload_str, _}} <- JOSEUtils.JWS.verify(jws, jwks, [sig_alg]) do
{:ok, payload_str}
else
false ->
{:error, :illegal_use_of_sig_alg_none}
:error ->
{:error, :jws_signature_verification_failure}
{:error, %_{}} ->
{:error, :invalid_jws_header}
{:error, _} = error ->
error
end
end
defp verify_issuer(%{"iss" => iss}, opts),
do: if iss == opts[:issuer], do: :ok, else: {:error, :invalid_issuer}
defp verify_expiration(%{"exp" => exp}),
do: if exp >= System.system_time(:second), do: :ok, else: {:error, :expired_jwt_bearer}
defp server_jwks(opts) do
server_metadata = opts[:server_metadata] || %{}
issuer = opts[:issuer]
case server_metadata do
%{"jwks" => %{"keys" => jwks}} when is_list(jwks) ->
{:ok, jwks}
%{"jwks_uri" => jwks_uri} ->
JWKSURIUpdater.get_keys(jwks_uri)
_ ->
Oauth2MetadataUpdater.get_metadata(
issuer,
opts[:oauth2_metadata_updater_opts] || []
)
|> case do
{:ok, %{"jwks" => %{"keys" => jwks}}} when is_list(jwks) ->
{:ok, jwks}
{:ok, %{"jwks_uri" => jwks_uri}} ->
JWKSURIUpdater.get_keys(jwks_uri)
{:ok, _} ->
{:error, :server_has_no_jwks_configured}
{:error, _} = error ->
error
end
end
end
end
|
lib/validator/jwt.ex
| 0.844377
| 0.441673
|
jwt.ex
|
starcoder
|
defmodule HL7.V2_3_1.Segments.OM1 do
@moduledoc false
require Logger
alias HL7.V2_3_1.{DataTypes}
use HL7.Segment,
fields: [
segment: nil,
sequence_number_test_observation_master_file: nil,
producers_test_observation_id: DataTypes.Ce,
permitted_data_types: nil,
specimen_required: nil,
producer_id: DataTypes.Ce,
observation_description: nil,
other_test_observation_ids_for_the_observation: DataTypes.Ce,
other_names: nil,
preferred_report_name_for_the_observation: nil,
preferred_short_name_or_mnemonic_for_observation: nil,
preferred_long_name_for_the_observation: nil,
orderability: nil,
identity_of_instrument_used_to_perform_this_study: DataTypes.Ce,
coded_representation_of_method: DataTypes.Ce,
portable: nil,
observation_producing_department_section: DataTypes.Ce,
telephone_number_of_section: DataTypes.Xtn,
nature_of_test_observation: nil,
report_subheader: DataTypes.Ce,
report_display_order: nil,
date_time_stamp_for_any_change_in_definition_for_the_observation: DataTypes.Ts,
effective_date_time_of_change: DataTypes.Ts,
typical_turn_around_time: nil,
processing_time: nil,
processing_priority: nil,
reporting_priority: nil,
outside_sites_where_observation_may_be_performed: DataTypes.Ce,
address_of_outside_sites: DataTypes.Xad,
phone_number_of_outside_site: DataTypes.Xtn,
confidentiality_code: nil,
observations_required_to_interpret_the_obs: DataTypes.Ce,
interpretation_of_observations: nil,
contraindications_to_observations: DataTypes.Ce,
reflex_tests_observations: DataTypes.Ce,
rules_that_trigger_reflex_testing: nil,
fixed_canned_message: DataTypes.Ce,
patient_preparation: nil,
procedure_medication: DataTypes.Ce,
factors_that_may_effect_the_observation: nil,
test_observation_performance_schedule: nil,
description_of_test_methods: nil,
kind_of_quantity_observed: DataTypes.Ce,
point_versus_interval: DataTypes.Ce,
challenge_information: nil,
relationship_modifier: DataTypes.Ce,
target_anatomic_site_of_test: DataTypes.Ce,
modality_of_imaging_measurement: DataTypes.Ce
]
end
|
lib/hl7/2.3.1/segments/om1.ex
| 0.630002
| 0.441793
|
om1.ex
|
starcoder
|
defmodule MhZ19 do
@moduledoc File.read!("./README.md") |> String.replace(~r/^# .+\n\n/, "")
use GenServer
alias Circuits.UART
@commands [
co2_concentration: <<0xFF, 0x01, 0x86, 0x00, 0x00, 0x00, 0x00, 0x00, 0x79>>
]
@typedoc """
MhZ19 GenServer start_link options
* `:name` - a name for the GenServer (defaults to #{__MODULE__})
* `:tty` - name of the serial device (defaults to "ttyAMA0")
"""
@type options() :: [
name: GenServer.name(),
tty: binary
]
@doc """
Starts a new GenServer process with given `opts`.
## Examples
```elixir
iex> {:ok, pid} = MhZ19.start_link
```
"""
@spec start_link(options()) :: GenServer.on_start()
def start_link(opts \\ []) do
name = opts[:name] || __MODULE__
GenServer.start_link(__MODULE__, opts, name: name)
end
@doc """
Measures the current CO2 concentration value.
## Examples
```elixir
iex> {:ok, result} = MhZ19.measure(pid)
{:ok,
%MhZ19.Measurement{
co2_concentration: 650
}}
```
"""
@spec measure(GenServer.server()) :: {:ok, struct} | {:error, any()}
def measure(pid) do
GenServer.call(pid, :measure)
end
@impl GenServer
def init(opts \\ []) do
{:ok, uart} = UART.start_link()
:ok =
UART.open(uart, opts[:tty] || "ttyAMA0",
speed: 9_600,
data_bits: 8,
stop_bits: 1,
parity: :none,
active: false
)
{:ok, %{uart: uart}}
end
@impl GenServer
def handle_call(:measure, _from, state) do
{:reply, retrieve_co2_concentration(state), state}
end
defp retrieve_co2_concentration(state) do
:ok = UART.write(state.uart, @commands[:co2_concentration])
UART.read(state.uart)
|> handle_data(state)
end
defp handle_data({:ok, <<0xFF, 0x86, high, low, _, _, _, _>>}, _state) do
data = high * 256 + low
{:ok,
%MhZ19.Measurement{
co2_concentration: data
}}
end
defp handle_data({:error, reason}, _state) do
{:error, reason}
end
defp handle_data({:ok, <<>>}, _state) do
{:error, :timeout}
end
defp handle_data(_, state) do
retrieve_co2_concentration(state)
end
end
|
lib/mh_z19.ex
| 0.75183
| 0.703282
|
mh_z19.ex
|
starcoder
|
defmodule Rtmp.Handshake.DigestHandshakeFormat do
@moduledoc """
Functions to parse and validate RTMP handshakes based on flash client versions
and SHA digests. This handshake is required for supporting H.264 video.
Since no documentation of this handshake publicly exists from Adobe, this
was created by referencing https://www.cs.cmu.edu/~dst/Adobe/Gallery/RTMPE.txt
"""
require Logger
@random_crud <<0xF0, 0xEE, 0xC2, 0x4A, 0x80, 0x68, 0xBE, 0xE8, 0x2E, 0x00, 0xD0, 0xD1, 0x02,
0x9E, 0x7E, 0x57, 0x6E, 0xEC, 0x5D, 0x2D, 0x29, 0x80, 0x6F, 0xAB, 0x93, 0xB8,
0xE6, 0x36, 0xCF, 0xEB, 0x31, 0xAE>>
@genuine_fms_name "Genuine Adobe Flash Media Server 001"
@genuine_player_name "Genuine Adobe Flash Player 001"
@genuine_fms_with_crud @genuine_fms_name <> @random_crud
@genuine_player_with_crud @genuine_player_name <> @random_crud
@sha_256_digest_length 32
# copied from jwplayer handshake
@adobe_version <<128, 0, 7, 2>>
@type state :: %__MODULE__.State{}
defmodule State do
@moduledoc false
defstruct current_stage: :p0,
unparsed_binary: <<>>,
bytes_to_send: <<>>,
received_start_time: 0,
is_server: nil
end
@spec new() :: state
@doc "Creates a new digest handshake format instance"
def new() do
%State{}
end
@spec is_valid_format(binary) :: :unknown | :yes | :no
@doc "Validates if the passed in binary can be parsed using the digest handshake."
def is_valid_format(binary) do
cond do
byte_size(binary) < 1537 ->
:unknown
<<type::8, c1::bytes-size(1536), _::binary>> = binary ->
fms_version = get_message_format(c1, @genuine_fms_name)
player_version = get_message_format(c1, @genuine_player_name)
cond do
type != 3 -> :no
fms_version == :version1 || fms_version == :version2 -> :yes
player_version == :version1 || player_version == :version2 -> :yes
true -> :no
end
end
end
@spec process_bytes(state, binary) :: {state, Rtmp.Handshake.process_result()}
@doc "Attempts to proceed with the handshake process with the passed in bytes"
def process_bytes(state = %State{}, binary) do
state = %{state | unparsed_binary: state.unparsed_binary <> binary}
do_process_bytes(state)
end
@spec create_p0_and_p1_to_send(state) :: {state, binary}
@doc "Returns packets 0 and 1 to send to the peer"
def create_p0_and_p1_to_send(state = %State{}) do
random_binary = :crypto.strong_rand_bytes(1528)
handshake = <<0::4*8>> <> @adobe_version <> random_binary
{state, digest_offset, constant_key} =
case state.is_server do
nil ->
# Since this is called prior to us knowing if we are a server or not
# (i.e. we haven't received peer's packet 1 yet) we assume we are
# the first to send a packet off and thus we are the client
state = %{state | is_server: false}
digest_offset = get_client_digest_offset(handshake)
{state, digest_offset, @genuine_player_name}
true ->
digest_offset = get_server_digest_offset(handshake)
{state, digest_offset, @genuine_fms_name}
end
{part1, _, part2} = get_message_parts(handshake, digest_offset)
hmac = calc_hmac(part1, part2, constant_key)
p0 = <<3::8>>
p1 = part1 <> hmac <> part2
{state, p0 <> p1}
end
defp do_process_bytes(state = %State{current_stage: :p0}) do
if byte_size(state.unparsed_binary) < 1 do
{state, {:incomplete, <<>>}}
else
<<type::8, rest::binary>> = state.unparsed_binary
case type do
3 ->
state = %{state | unparsed_binary: rest, current_stage: :p1}
do_process_bytes(state)
_ ->
{state, :failure}
end
end
end
defp do_process_bytes(state = %State{current_stage: :p1, is_server: nil}) do
# Since is_server is nil, that means we got packet 1 from the peer before we sent
# our packet 1. This means we are a server reacting to a client
{state, p0_and_p1} = create_p0_and_p1_to_send(%{state | is_server: true})
state = %{state | bytes_to_send: state.bytes_to_send <> p0_and_p1}
do_process_bytes(state)
end
defp do_process_bytes(state = %State{current_stage: :p1}) do
if byte_size(state.unparsed_binary) < 1536 do
send_incomplete_response(state)
else
<<handshake::bytes-size(1536), rest::binary>> = state.unparsed_binary
const_to_use =
case state.is_server do
true -> @genuine_player_name
false -> @genuine_fms_name
end
{challenge_key_offset, key_offset} =
case get_message_format(handshake, const_to_use) do
:version1 -> {get_client_digest_offset(handshake), get_client_dh_offset(handshake)}
:version2 -> {get_server_digest_offset(handshake), get_server_dh_offset(handshake)}
end
<<_::bytes-size(challenge_key_offset), challenge_key::bytes-size(32), _::binary>> =
handshake
key_offset_without_time = key_offset - 4
<<
time::4*8,
_::bytes-size(key_offset_without_time),
_key::bytes-size(128),
_::binary
>> = handshake
state = %{
state
| received_start_time: time,
current_stage: :p2,
bytes_to_send: state.bytes_to_send <> generate_p2(state.is_server, challenge_key),
unparsed_binary: rest
}
do_process_bytes(state)
end
end
defp do_process_bytes(state = %State{current_stage: :p2}) do
if byte_size(state.unparsed_binary) < 1536 do
send_incomplete_response(state)
else
# TODO: Add confirmation of the p1 public key we sent. For now
# we are just assuming that if the peer didn't disconnect us we
# are good
<<_::1536*8, rest::binary>> = state.unparsed_binary
state = %{state | unparsed_binary: rest}
{state, {:success, state.received_start_time, state.bytes_to_send, state.unparsed_binary}}
end
end
defp generate_p2(is_server, challenge_key) do
random_binary = :crypto.strong_rand_bytes(1536 - @sha_256_digest_length)
string =
case is_server do
true -> @genuine_fms_with_crud
false -> @genuine_player_with_crud
end
digest = :crypto.hmac(:sha256, string, challenge_key)
signature = :crypto.hmac(:sha256, digest, random_binary)
random_binary <> signature
end
defp get_server_dh_offset(<<_::bytes-size(766), byte1, byte2, byte3, byte4, _::binary>>) do
# Calculates the offset of the server's Diffie-Hellman key
offset = byte1 + byte2 + byte3 + byte4
rem(offset, 632) + 8
end
defp get_server_digest_offset(<<_::bytes-size(772), byte1, byte2, byte3, byte4, _::binary>>) do
# Calculates the offset of the server's digest
offset = byte1 + byte2 + byte3 + byte4
rem(offset, 728) + 776
end
defp get_client_dh_offset(<<_::bytes-size(1532), byte1, byte2, byte3, byte4, _::binary>>) do
# Calculates the offset of the client's Diffie-Hellmann key
offset = byte1 + byte2 + byte3 + byte4
rem(offset, 632) + 772
end
defp get_client_digest_offset(<<_::bytes-size(8), byte1, byte2, byte3, byte4, _::binary>>) do
# Calculates the offset of the client's digest
offset = byte1 + byte2 + byte3 + byte4
rem(offset, 728) + 12
end
defp get_message_format(handshake, key) do
version_1_offset = get_client_digest_offset(handshake)
{v1_part1, v1_digest, v1_part2} = get_message_parts(handshake, version_1_offset)
v1_hmac = calc_hmac(v1_part1, v1_part2, key)
version_2_offset = get_server_digest_offset(handshake)
{v2_part1, v2_digest, v2_part2} = get_message_parts(handshake, version_2_offset)
v2_hmac = calc_hmac(v2_part1, v2_part2, key)
cond do
v1_hmac == v1_digest -> :version1
v2_hmac == v2_digest -> :version2
true -> :unknown
end
end
defp get_message_parts(handshake, digest_offset) do
after_digest = 1536 - (digest_offset + @sha_256_digest_length)
<<
part1::bytes-size(digest_offset),
digest::bytes-size(@sha_256_digest_length),
part2::bytes-size(after_digest),
_::binary
>> = handshake
{part1, digest, part2}
end
defp calc_hmac(part1, part2, key) do
data = part1 <> part2
:crypto.hmac(:sha256, key, data)
end
defp send_incomplete_response(state) do
bytes_to_send = state.bytes_to_send
state = %{state | bytes_to_send: <<>>}
{state, {:incomplete, bytes_to_send}}
end
end
|
apps/rtmp/lib/rtmp/handshake/digest_handshake_format.ex
| 0.612541
| 0.525917
|
digest_handshake_format.ex
|
starcoder
|
defmodule Militerm.Systems.SimpleResponse do
@moduledoc ~S"""
The response system allows NPCs to map text string patterns to
events. This is a fairly generic system, so the scripting needs to
supply the string being matched as well as the set of matches. The
returned event is then triggered by the script as well.
response:
set-name:
- pattern: pattern
events:
- event1
- event2
The pattern is a regex with named captures available.
This should be sufficient to build a bot based on the old Eliza game.
"""
use Militerm.ECS.System
require Logger
defscript simple_response_trigger_event(set, text), for: objects do
do_sr_trigger_event(objects, set, text)
end
defscript simple_response_trigger_event(set, text, default_event), for: objects do
do_sr_trigger_event(objects, set, text, default_event)
end
defscript random_selection(list) do
if is_list(list) do
count = Enum.count(list)
Enum.at(list, :rand.uniform(count) - 1)
else
list
end
end
def do_sr_trigger_event(objects, set, text, default_event \\ nil)
def do_sr_trigger_event(objects, set, [text], default_event) do
do_sr_trigger_event(objects, set, text, default_event)
end
def do_sr_trigger_event(%{"this" => this} = objects, set, text, default_event) do
this
|> get_pattern_set(set)
|> log_pattern_set(this, set)
|> find_match(text)
|> log_match(this, set)
|> trigger_event(objects, default_event)
end
def do_sr_trigger_event(_, _, _, _), do: false
def get_pattern_set({:thing, thing_id}, set) do
Militerm.Components.SimpleResponses.get_set(thing_id, set)
end
def get_pattern_set({:thing, thing_id, _}, set) do
Militerm.Components.SimpleResponses.get_set(thing_id, set)
end
def log_pattern_set(patterns, {:thing, thing_id}, set) do
Logger.debug(fn ->
[thing_id, " SimpleResponseTriggerEvent ", set, " patterns: ", inspect(patterns)]
end)
patterns
end
def log_pattern_set(patterns, {:thing, thing_id, _}, set) do
Logger.debug(fn ->
[thing_id, " SimpleResponseTriggerEvent ", set, " patterns: ", inspect(patterns)]
end)
patterns
end
def find_match(patterns, text) do
patterns
|> Enum.find_value(fn %{"regex" => regex, "event" => event} ->
case regex_matches(regex, text) do
%{} = captures -> {event, captures}
_ -> false
end
end)
end
def log_match(match, {:thing, thing_id, _}, set) do
log_match(match, {:thing, thing_id}, set)
end
def log_match(match, {:thing, thing_id}, set) do
Logger.debug(fn ->
[thing_id, " SimpleResponseTriggerEvent ", set, " match: ", inspect(match)]
end)
match
end
def regex_matches([], _), do: false
def regex_matches([regex | rest], text) do
case Regex.named_captures(regex, text) do
%{} = captures -> captures
_ -> regex_matches(rest, text)
end
end
def regex_matches(regex, text), do: Regex.named_captures(regex, text)
def trigger_event(nil, _, nil), do: false
def trigger_event(nil, %{"this" => this} = objects, event) do
do_trigger_event(this, event, objects)
false
end
def trigger_event({event, captures}, %{"this" => this} = objects, _) do
do_trigger_event(this, event, Map.merge(captures, objects))
end
def trigger_event(event, %{"this" => this} = objects, _) do
do_trigger_event(this, event, objects)
true
end
def do_trigger_event({:thing, thing_id}, event, args) do
do_trigger_event(thing_id, event, args)
end
def do_trigger_event({:thing, thing_id, _}, event, args) do
do_trigger_event(thing_id, event, args)
end
def do_trigger_event(thing_id, event, args) do
Militerm.Systems.Events.async_trigger(thing_id, event, "responder", args)
true
end
end
|
lib/militerm/systems/simple_response.ex
| 0.705886
| 0.401512
|
simple_response.ex
|
starcoder
|
defmodule Jerboa.Format.HeaderLengthError do
@moduledoc """
Error indicating STUN message with header of invalid length
STUN messages have a fixed header of 20 bytes, so any message
shorter than that will produce this error when passed to
`Jerboa.Format.decode/1`.
Exception struct fields:
* `:binary` - whole STUN message which produced this error
"""
defexception [:message, :binary]
def exception(opts) do
b = opts[:binary]
%__MODULE__{binary: b,
message: "the STUN wire format requires a header of at least" <>
" 20 bytes but got #{byte_size b} bytes"}
end
end
defmodule Jerboa.Format.BodyLengthError do
@moduledoc """
Error indicating STUN message with body shorter than that declared
in header
Each STUN message contains the length of its body encoded in the
header. If a message body is shorter than that declared in the
header it cannot be decoded correctly and will produce this error
when passed to `Jerboa.Format.decode/1`.
Excepton struct fields:
* `:length` - actual length of message body
"""
defexception [:message, :length]
def exception(opts) do
%__MODULE__{length: opts[:length],
message: "message body is shorter than specified length"}
end
end
defmodule Jerboa.Format.First2BitsError do
@moduledoc """
Error indicating wrong value encoded in first two bits of STUN
or ChannelData message
A STUN message header must start with two zero (clear) bits. A ChannelData
message must start with first bit set to zero, and second set to one. If none
of these holds true, this error is produced. This error also indicates
bitstring shorter than 2 bits.
Exception struct fields:
* `:bits` - a 2 bit long bitstring with the value of first two bits
of a message
"""
defexception [:message, :bits]
def exception(opts) do
%__MODULE__{bits: opts[:bits],
message: "two most significant bits of message have wrong " <>
"value. Found #{inspect opts[:bits]}, expected 0b00 or 0b01"}
end
end
defmodule Jerboa.Format.MagicCookieError do
@moduledoc """
Error indicating that the message does not encode the magic cookie
value
The second 4 bytes of each STUN message header have a fixed value of
`0x2112A442` otherwise the message can't be identified as a STUN
message and this error is produced.
Exception struct fields:
* `:header` - whole 20 byte header of invalid message
"""
defexception [:message, :header]
def exception(opts) do
%__MODULE__{header: opts[:header],
message: "STUN message doesn't have magic cookie"}
end
end
defmodule Jerboa.Format.UnknownMethodError do
@moduledoc """
Error indicating that the STUN message method is unknown to Jerboa
STUN methods are (along with classes) the primary indicator of how
messages should be processed. If the method is unknown the STUN
agent won't know how to service to the message.
Exception struct fields:
* `:method` - integer with a value of the unknown method
"""
defexception [:message, :method]
def exception(opts) do
m = opts[:method]
%__MODULE__{method: m,
message: "unknown STUN method 0x#{Integer.to_string(m, 16)}"}
end
end
defmodule Jerboa.Format.Last2BitsError do
@moduledoc """
Error indicating that the last two bits of the STUN message length
field are not zeroes (clear)
STUN messages must be padded to a multiple of 4 bytes, so the length
value encoded in the message header must be a multiple of 4. The
binary representation of numbers divisible by 4 always have the last
two bits set to 0. This serves as another distinguishing feature, at
least, of a correctly formed STUN message.
Exception struct fields:
* `:length` - value of the length field in the message header
"""
defexception [:message, :length]
def exception(opts) do
%__MODULE__{length: opts[:length],
message: "all STUN attributes are padded to a multiple of 4 bytes" <>
" so the last 2 bits of this field should be zero"}
end
end
defmodule Jerboa.Format.AttributeFormatError do
@moduledoc """
Error indicating that binary representation of attribute isn't
compliant with [STUN RFC](https://tools.ietf.org/html/rfc5389#section-15).
"""
defexception [:message]
def exception(_opts \\ []) do
%__MODULE__{message: "length of attribute's value does not match length " <>
"declared in attribute's header"}
end
end
defmodule Jerboa.Format.ComprehensionError do
@moduledoc """
Error indicating that the STUN message contained a
comprehension-required attribute unknown to Jerboa
If the STUN message contains a comprehension-required attribute
which is unknown to the STUN agent then it cannot be successfully
processed.
Exception struct fields:
* `:attribute` - integer value of the unknown attribute
"""
defexception [:message, :attribute]
def exception(opts) do
a = opts[:attribute]
%__MODULE__{attribute: a,
message: "can not encode/decode comprehension required attribute #{a}"}
end
end
defmodule Jerboa.Format.XORAddress do
@moduledoc false
defmodule IPFamilyError do
@moduledoc """
Error indicating that the IP address family encoded in the
XOR-MAPPED-ADDRESS, XOR-PEER-ADDRESS or XOR-RELAYED-ADDRESS
attribute's value is invalid
Valid address families are 0x01 for IPv4 and 0x02 for IPv6.
Exception struct fields:
* `:number` - address family number encoded in the attribute's
value
"""
defexception [:message, :number]
def exception(opts) do
n = opts[:number]
%__MODULE__{number: n,
message: "IP family should be for 0x01 IPv4 or 0x02 for IPv6" <>
" but got 0x#{Integer.to_string(n, 16)}"}
end
end
defmodule LengthError do
@moduledoc """
Error indicating that the XOR-MAPPED-ADDRESS, XOR-PEER-ADDRESS or
XOR-RELAYED-ADDRESS attribute has invalid length
These attributes are encoded into 8 bytes for IPv4 and 20 bytes for IPv6.
Exception struct fields:
* `:length` - length of attribute's value found in the message
"""
defexception [:message, :length]
def exception(opts) do
%__MODULE__{length: opts[:length],
message: "Invalid value length. XOR Mapped Address attribute value" <>
"must be 8 bytes or 20 bytes long for IPv4 and IPv6 respectively"}
end
end
defmodule IPArityError do
@moduledoc """
Error indicating that the IP address family and IP address length
of the XOR-MAPPED-ADDRESS, XOR-PEER-ADDRESS or XOR-RELAYED-ADDRESS
attributes don't make sense
For example: the IP address family value may be 0x01 (IPv4) while
the length of an address is 16 bytes, as in IPv6.
Exception struct fields:
* `:family` - the IP address family given in the
XOR-*-ADDRESS attribute (either `<<0x01>>` or `<<0x02>>`)
"""
defexception [:message, :family]
def exception(opts) do
message =
case opts[:family] do
<<0x01::8>> ->
"IPv4 addresses are 4 bytes long but got 16 bytes"
<<0x02::8>> ->
"IPv6 addresses are 16 bytes long but got 4 bytes"
end
%__MODULE__{family: opts[:family],
message: message}
end
end
end
defmodule Jerboa.Format.Lifetime do
@moduledoc false
defmodule LengthError do
@moduledoc """
Error indicating that LIFETIME attribute's value has invalid
length
TURN RFC requires that LIFETIME attribute's value must be 4 bytes long
so any other length results in this error.
Exception struct fields:
* `:length` - length of attribute's value found in STUN message (in bytes)
"""
defexception [:message, :length]
def exception(opts) do
length = opts[:length]
%__MODULE__{length: length,
message: "Invalid value length. LIFETIME attribute's value must be " <>
"4 bytes long (found #{length})"}
end
end
end
defmodule Jerboa.Format.Nonce.LengthError do
@moduledoc """
Error indicating that NONCE attribute's value has invalid
length
STUN RFC requires that NONCE attribute's value must be maximum 128 UTF-8 encoded
characters long, so any longer value results in this error.
Exception struct fields:
* `:length` - length of attribute's value found in STUN message (number of UTF-8 encoded characters)
"""
defexception [:message, :length]
def exception(opts) do
length = opts[:length]
%__MODULE__{length: length,
message: "Invalid value length. NONCE attribute's value must be " <>
"maximum 128 characters long (found #{length})"}
end
end
defmodule Jerboa.Format.Username.LengthError do
@moduledoc """
Error indicating that USERNAME attribute's value has invalid
length
STUN RFC requires that USERNAME attribute's value must be a valid UTF-8 encoded
sequence of 512 or less bytes.
Exception struct fields:
* `:length` - length of attribute's value found in STUN message (in bytes)
"""
defexception [:message, :length]
def exception(opts) do
length = opts[:length]
%__MODULE__{length: length,
message: "Invalid value length. USERNAME attribute's value must be " <>
"maximum 512 bytes long (found #{length})"}
end
end
defmodule Jerboa.Format.Realm.LengthError do
@moduledoc """
Error indicating that REALM attribute's value has invalid
length
STUN RFC requires that REALM attribute's value must be maximum 128 UTF-8 encoded
characters long, so any longer value results in this error.
Exception struct fields:
* `:length` - length of attribute's value found in STUN message (number of UTF-8 encoded characters)
"""
defexception [:message, :length]
def exception(opts) do
length = opts[:length]
%__MODULE__{length: length,
message: "Invalid value length. REALM attribute's value must be " <>
"maximum 128 characters long (found #{length})"}
end
end
defmodule Jerboa.Format.ErrorCode.FormatError do
@moduledoc """
Error indicating invalid format of ERROR-CODE attribute
STUN RFC and its extensions define several error code values which must be known
by STUN agent. In addition reason field of ERROR-CODE must be valid UTF-8
encoded string, no longer than 128 characters. This error indicates breaking of any
of these constraints.
You may also encounter this error when ERROR-CODE attribute's binary format isn't
compliant with the one specified in STUN RFC.
"""
defexception [:message, :code, :reason]
def exception(opts) do
code = opts[:code]
reason = opts[:reason]
%__MODULE__{code: code,
reason: reason,
message: "Invalid format ERROR-CODE attribute. Please refer to " <>
"documentation of `Jerboa.Format.ErrorCode.FormatError` module for " <>
"more information"}
end
end
defmodule Jerboa.Format.ErrorCode.LengthError do
@moduledoc """
Error indicating that ERROR-CODE attribute's value is too short
ERROR-CODE binary format requires that its value is at least 4 bytes long. Any other
length results in this error.
Exception struct fields:
* `:length` - length of attribute's value found in STUN message (in bytes)
"""
defexception [:message, :length]
def exception(opts) do
length = opts[:length]
%__MODULE__{length: length,
message: "Invalid value length. ERROR-CODE attribute's value must be " <>
"at least 4 bytes long (found #{length})"}
end
end
defmodule Jerboa.Format.RequestedTransport.LengthError do
@moduledoc """
Error indicating that REQUESTED-TRANSPORT attribute's value
has length different than 4 bytes
"""
defexception [:message, :length]
def exception(opts) do
length = opts[:length]
%__MODULE__{length: length,
message: "Invalid length of REQUESTED-TRANSPORT attribute. " <>
"Expected 4 bytes, found: #{length}"}
end
end
defmodule Jerboa.Format.MessageIntegrity.FormatError do
@moduledoc """
Error indicating badly encoded MESSAGE-INTEGRITY attribute found
in STUN message
MESSAGE-INTEGRITY value must be 20 bytes long, any other value results
in this error.
"""
defexception [:message]
def exception(_opts \\ []) do
%__MODULE__{message: "Invalid value length or declared length. MESSAGE-INTEGRITY value " <>
"must be 20 bytes long"}
end
end
defmodule Jerboa.Format.DontFragment.ValuePresentError do
@moduledoc """
Error indicating that DONT-FRAGMENT found in STUN message
has some value associated with it
"""
defexception [:message]
def exception(_opts \\ []) do
%__MODULE__{message: "DONT-FRAGMENT attribute has value associated with it"}
end
end
defmodule Jerboa.Format.EvenPort.FormatError do
@moduledoc """
Error indicating that EVEN-PORT attributes found in STUN message
has invalid format
TURN RFC requires that EVEN-PORT attribute is 1 byte long,
where 7 last bits are always set to 0.
"""
defexception [:message]
def exception(_opts \\ []) do
%__MODULE__{message: "EVEN-PORT attribute has invalid format"}
end
end
defmodule Jerboa.Format.ReservationToken.LengthError do
@moduledoc """
Error indication that RESERVATION-TOKEN attribute's value
has invalid (different than 8 bytes) length
Exception struct fields:
* `:length` - length of attribute's value found in STUN message (in bytes)
"""
defexception [:message, :length]
def exception(opts) do
length = opts[:length]
%__MODULE__{length: length,
message: "RESERVATION-TOKEN attribute has value of invalid " <>
"length. Expected 8 bytes, found #{length}"}
end
end
defmodule Jerboa.Format.ChannelNumber.First2BitsError do
@moduledoc """
Error indicating that the first two bits of a CHANNEL-NUMBER value
do not match the required 0b01 value
Exception struct fields:
* `:bits` - a 2 bit long bitstring with the value of first two bits
of a message
"""
defexception [:message, :bits]
def exception(opts) do
%__MODULE__{bits: opts[:bits],
message: "the two most significant bits of a TURN " <>
"CHANNEL-NUMBER must be 0b01"}
end
end
defmodule Jerboa.Format.ChannelDataLengthError do
@moduledoc """
Error indicating ChannelData message of invalid length
Each ChannelData message contains the length of its data field encoded in the
header. If data field is shorter than that declared in the header, it cannot
be decoded correctly and will produce this error.
Excepton struct fields:
* `:length` - actual length of message data field
"""
defexception [:message, :length]
def exception(opts) do
%__MODULE__{length: opts[:length],
message: "channel data is shorter than specified length"}
end
end
|
lib/jerboa/format/exceptions.ex
| 0.895308
| 0.644868
|
exceptions.ex
|
starcoder
|
defmodule AadhaarValidator do
@dihedral_group [
[0, 1, 2, 3, 4, 5, 6, 7, 8, 9],
[1, 2, 3, 4, 0, 6, 7, 8, 9, 5],
[2, 3, 4, 0, 1, 7, 8, 9, 5, 6],
[3, 4, 0, 1, 2, 8, 9, 5, 6, 7],
[4, 0, 1, 2, 3, 9, 5, 6, 7, 8],
[5, 9, 8, 7, 6, 0, 4, 3, 2, 1],
[6, 5, 9, 8, 7, 1, 0, 4, 3, 2],
[7, 6, 5, 9, 8, 2, 1, 0, 4, 3],
[8, 7, 6, 5, 9, 3, 2, 1, 0, 4],
[9, 8, 7, 6, 5, 4, 3, 2, 1, 0]
]
@permutation [
[0, 1, 2, 3, 4, 5, 6, 7, 8, 9],
[1, 5, 7, 6, 2, 8, 3, 0, 9, 4],
[5, 8, 0, 3, 7, 9, 6, 1, 4, 2],
[8, 9, 1, 6, 0, 4, 3, 5, 2, 7],
[9, 4, 5, 3, 1, 2, 6, 8, 7, 0],
[4, 2, 8, 6, 5, 7, 3, 9, 0, 1],
[2, 7, 9, 3, 8, 0, 6, 4, 1, 5],
[7, 0, 4, 6, 9, 1, 3, 2, 5, 8]
]
@inverse ["0", "4", "3", "2", "1", "5", "6", "7", "8", "9"]
def checksum_of(card_num) when is_integer(card_num) do
card_num
|> Integer.to_string()
|> checksum_of()
|> String.to_integer()
end
def checksum_of(card_num) when is_binary(card_num) do
_ = String.to_integer(card_num) # To check all numbers
case String.length(card_num) do
11 ->
index = card_num
|> String.codepoints()
|> Enum.reverse()
|> Enum.with_index()
|> Enum.reduce(0, fn({x, i}, check) ->
second_arg =
@permutation
|> Enum.at(rem(i + 1, 8))
|> Enum.at(String.to_integer(x))
@dihedral_group
|> Enum.at(check)
|> Enum.at(second_arg)
end)
Enum.at(@inverse, index)
_ ->
raise ArgumentError.exception("Not a valid number to calculate checksum")
end
end
def valid?(number) when is_integer(number) do
number
|> Integer.to_string()
|> valid?()
end
def valid?(number) when is_binary(number) do
_ = String.to_integer(number) # To check all numbers
case String.length(number) do
12 ->
{card_num, checksum} = String.split_at(number, -1)
valid?(card_num, checksum)
_ ->
false
end
end
defp valid?(card_num, checksum) do
checksum_of(card_num) == checksum
end
end
|
lib/aadhaar_validator.ex
| 0.534127
| 0.55923
|
aadhaar_validator.ex
|
starcoder
|
defmodule Microdata.Item do
@moduledoc """
`Microdata.Item` structs are read from a `Microdata.Document`'s source.
"""
defstruct types: nil, id: nil, properties: nil
@type t :: %Microdata.Item{
types: MapSet.t(String.t()),
id: URI.t(),
properties: [Microdata.Property.t()]
}
@doc """
Resolve the vocabulary of a typed item or list of types.
## Examples
```
iex> Microdata.Item.vocabulary(%Microdata.Item{})
nil
iex> Microdata.Item.vocabulary(%Microdata.Item{types: MapSet.new(["foo"])})
"foo/"
iex> Microdata.Item.vocabulary(["foo", "bar"])
"foo/"
iex> Microdata.Item.vocabulary(["foo#bar"])
"foo"
iex> Microdata.Item.vocabulary(["foo/bar"])
"foo/"
iex> Microdata.Item.vocabulary(["foo/bar/baz"])
"foo/bar/"
```
"""
@spec vocabulary(Microdata.Item.t()) :: String.t() | nil
@spec vocabulary(MapSet.t()) :: String.t() | nil
def vocabulary(%Microdata.Item{types: types}), do: vocabulary(types)
def vocabulary(types) when is_map(types) or is_list(types) do
types
|> Enum.map(&parse_vocabulary_from_type/1)
|> List.first()
end
def vocabulary(_), do: nil
@doc """
Lookup item properties with matching names.
## Examples (not a doctest)
```
iex> Microdata.Item.lookup(item, "foo")
[%Microdata.Property{names: ["foo"], ...}, ...]
iex> Microdata.Item.lookup(["foo", "bar"])
[
%Microdata.Property{names: ["foo"], ...},
%Microdata.Property{names: ["bar"], ...}, ...
]
```
"""
@spec lookup(Microdata.Item.t(), String.t()) :: [Microdata.Property.t()]
@spec lookup(Microdata.Item.t(), [String.t()]) :: [Microdata.Property.t()]
def lookup(_, nil), do: []
def lookup(item, prop_names) when is_list(prop_names) do
names = MapSet.new(prop_names)
item.properties
|> Enum.filter(fn property ->
is_map(property.names) &&
property.names
|> MapSet.intersection(names)
|> MapSet.size() > 0
end)
end
def lookup(item, prop_names) do
item.properties
|> Enum.filter(fn property ->
is_map(property.names) && MapSet.member?(property.names, prop_names)
end)
end
# Utility functions
defp parse_vocabulary_from_type(type) do
cond do
String.contains?(type, "#") ->
type
|> String.split("#", parts: 2)
|> List.first()
String.contains?(type, "/") ->
[_ | tail] = type |> String.split("/") |> Enum.reverse()
"#{tail |> Enum.reverse() |> Enum.join("/")}/"
true ->
"#{type}/"
end
end
end
|
lib/microdata/item.ex
| 0.920442
| 0.78469
|
item.ex
|
starcoder
|
defmodule SqlParser do
@moduledoc "Turns strings of SQL into Elixir data."
@doc """
Break the given SQL string `s` into tokens.
Return the list of tokens. SQL keywords, operators, etc. are represented
as Elixir keywords. Identifiers and literals are represented as pairs,
`{:token_type, value}`. The token types are `:identifier`, `:number`, and
`:string`.
## Examples
iex> SqlParser.tokenize("SELECT * FROM Student")
[:select, :*, :from, {:identifier, "Student"}]
iex> SqlParser.tokenize("WHERE name = '")
[:where, {:identifier, "name"}, :=, {:error, "unrecognized character: '"}]
iex> SqlParser.tokenize("1 <> 0.99")
[{:number, 1}, :<>, {:number, Decimal.new("0.99")}]
"""
def tokenize(s) do
token_re = ~r/(?:\s*)([0-9](?:\w|\.)*|\w+|'(?:[^']|'')*'|>=|<=|<>|.)(?:\s*)/
Regex.scan(token_re, s, capture: :all_but_first)
|> Enum.map(&match_to_token/1)
end
# Convert a single token regex match into a token.
defp match_to_token([token_str]) do
case String.downcase(token_str) do
"all" -> :all
"and" -> :and
"as" -> :as
"asc" -> :asc
"between" -> :between
"by" -> :by
"desc" -> :desc
"distinct" -> :distinct
"exists" -> :exists
"from" -> :from
"group" -> :group
"having" -> :having
"insert" -> :insert
"is" -> :is
"limit" -> :limit
"not" -> :not
"null" -> :null
"or" -> :or
"order" -> :order
"select" -> :select
"set" -> :set
"union" -> :union
"update" -> :update
"values" -> :values
"where" -> :where
"*" -> :*
"." -> :.
"," -> :','
"(" -> :'('
")" -> :')'
"=" -> :=
">=" -> :'>='
"<=" -> :'<='
">" -> :'>'
"<" -> :'<'
"<>" -> :'<>'
_ ->
cond do
String.match?(token_str, ~r/^[0-9]/) ->
if String.contains?(token_str, ".") do
{:number, Decimal.new(token_str)}
else
{n, ""} = Integer.parse(token_str)
{:number, n}
end
String.match?(token_str, ~r/^[a-z]/i) ->
{:identifier, token_str}
String.match?(token_str, ~r/^'.*'$/) ->
{:string, String.slice(token_str, 1..-2)} # TODO: handle doubled quotes
true -> {:error, "unrecognized character: #{token_str}"}
end
end
end
@doc """
Parse a SQL `SELECT` statement.
The result is a Map with keys representing all the clauses of a `SELECT`:
`:select`, `:from`, `:where`, `:group`, `:having`, `:order`.
Most clauses are optional, so some of these keys may map to `nil`.
For example, below, there's no `GROUP BY` clause in the input,
so we have `group: nil` in the output.
iex> SqlParser.parse_select_stmt!(\"""
...> SELECT Title
...> FROM Album
...> WHERE ArtistId = 252
...> ORDER BY Title
...> \""")
%{
select: [{:identifier, "Title"}],
from: ["Album"],
where: {:=, {:identifier, "ArtistId"}, {:number, 252}},
group: nil,
having: nil,
order: [identifier: "Title"],
limit: nil
}
Raises `ArgumentError` if the input string isn't a syntactically correct
`SELECT` statement.
iex> SqlParser.parse_select_stmt!("SELECT SELECT FROM SELECT WHERE SELECT")
** (ArgumentError) identifier or literal expected
"""
def parse_select_stmt!(sql) do
{%{}, tokenize(sql)}
|> parse_clause!(:select, &parse_exprs!/1, required: true)
|> parse_clause!(:from, &parse_tables!/1)
|> parse_clause!(:where, &parse_expr!/1)
|> parse_clause_2!(:group, :by, &parse_exprs!/1)
|> parse_clause!(:having, &parse_expr!/1)
|> check_having_without_group!()
|> parse_clause_2!(:order, :by, &parse_exprs!/1)
|> parse_clause!(:limit, &parse_expr!/1)
|> check_done!()
end
defp parse_exprs!(sql) do
{expr, tail} = parse_expr!(sql)
case tail do
[:',' | more] ->
{exprs, rest} = parse_exprs!(more)
{[expr | exprs], rest}
_ -> {[expr], tail}
end
end
defp parse_prim!(sql) do
case sql do
[{:identifier, fnname}, :'(' | rest] ->
fnname_up = String.upcase(fnname)
{args, rest} = parse_exprs!(rest)
expected_argc =
cond do
Enum.member?(["AVG", "COUNT", "MAX", "MIN", "SUM"], fnname_up) ->
[1]
fnname_up == "ROUND" ->
1..2
true ->
raise ArgumentError, message: "unrecognized function #{inspect(fnname)}"
end
if !Enum.member?(expected_argc, length(args)) do
raise ArgumentError, message: "#{fnname_up}() function expects #{expected_argc} argument(s), got #{length(args)}"
end
case rest do
[:')' | rest] -> {{:apply, fnname_up, args}, rest}
_ -> raise ArgumentError, message: "')' expected after function arguments"
end
[{:identifier, table_name}, :., {:identifier, column_name} | rest] ->
{{:., table_name, column_name}, rest}
[{:identifier, _} | rest] -> {hd(sql), rest}
[{:number, _} | rest] -> {hd(sql), rest}
[{:string, _} | rest] -> {hd(sql), rest}
[:* | rest] -> {hd(sql), rest}
_ -> raise ArgumentError, message: "identifier or literal expected"
end
end
defp parse_expr!(sql) do
{lhs, rest} = parse_and_expr!(sql)
case rest do
[:or | rest] ->
{rhs, rest} = parse_expr!(rest)
{{:or, lhs, rhs}, rest}
_ -> {lhs, rest}
end
end
defp parse_and_expr!(sql) do
{lhs, rest} = parse_compare_expr!(sql)
case rest do
[:and |rest] ->
{rhs, rest} = parse_and_expr!(rest)
{{:and, lhs, rhs}, rest}
_ -> {lhs, rest}
end
end
defp parse_compare_expr!(sql) do
{prim, rest} = parse_prim!(sql)
case rest do
[:= | rest] ->
{rhs, rest} = parse_prim!(rest)
{{:=, prim, rhs}, rest}
[:'<>' | rest] ->
{rhs, rest} = parse_prim!(rest)
{{:'<>', prim, rhs}, rest}
[:is, :null | rest] ->
{{:is_null, prim}, rest}
_ -> {prim, rest}
end
end
defp parse_table!(sql) do
case sql do
[{:identifier, table_name}, {:identifier, alias_name} | rest] ->
{{:alias, table_name, alias_name}, rest}
[{:identifier, table_name} | rest] ->
{table_name, rest}
_ -> raise ArgumentError, message: "table name expected"
end
end
defp parse_tables!(sql) do
{table, rest} = parse_table!(sql)
case rest do
[:',' | rest] ->
{tables, rest} = parse_tables!(rest)
{[table | tables], rest}
_ -> {[table], rest}
end
end
defp parse_clause!({ast, sql}, keyword, parser, keywords \\ []) do
case sql do
[^keyword | rest] ->
{clause_ast, rest} = parser.(rest)
{Map.put(ast, keyword, clause_ast), rest}
_ ->
if Keyword.get(keywords, :required, false) do
raise ArgumentError, message: "#{keyword} expected"
else
{Map.put(ast, keyword, nil), sql}
end
end
end
defp parse_clause_2!({ast, sql}, kw1, kw2, parser) do
case sql do
[^kw1, ^kw2 | rest] ->
{clause_ast, rest} = parser.(rest)
{Map.put(ast, kw1, clause_ast), rest}
_ ->
{Map.put(ast, kw1, :nil), sql}
end
end
defp check_having_without_group!({ast, sql}) do
if ast.group == nil && ast.having != nil do
raise ArgumentError, message: "a GROUP BY clause is required before HAVING"
end
{ast, sql}
end
defp check_done!({ast, sql}) do
case sql do
[] -> ast
_ ->
ast |> inspect() |> IO.puts()
sql |> inspect() |> IO.puts()
raise ArgumentError, message: "extra stuff at end of SQL"
end
end
end
|
jorendorff+elixir/lib/SqlParser.ex
| 0.849504
| 0.559802
|
SqlParser.ex
|
starcoder
|
defmodule Microformats2 do
@moduledoc """
A [microformats2](http://microformats.org/wiki/microformats2) parser for elixir.
"""
alias Microformats2.Helpers
@doc """
Parse a document either by URL or by content. Returns a microformats2 parsing structure or `:error`.
## Parameters
* `content_or_url` is either the HTML document or a URL
* `base_url_or_opts` is either the base URL of the document (if the first argument is a HTML string) or a
keyword list of options
* `opts` is an option list when the first argument is an HTML string
## Options
Valid options are:
* `:atomize_keys`: `true` or `false`, defaults to `false`. Convert keys to atoms when true, e.g. `"rels"` to `:rels`
* `:underscore_keys`: `true` or `false`, `false` by default. Convert dashed keys to underscored keys when true,
e.g. `"rel-urls"` to `"rel_urls"` or `:"rel-urls"` to `:rel_urls`
## Examples
iex> Microformats2.parse("http://example.org/")
%{"rels" => [], "rel-urls" => [], "items" => []}
iex> Microformats2.parse("http://example.org/", atomize_keys: true, underscore_keys: true)
%{rels: [], rel_urls: [], items: []}
iex> Microformats2.parse(\"\"\"
<div class="h-card">
<img class="u-photo" alt="photo of Mitchell"
src="https://webfwd.org/content/about-experts/300.mitchellbaker/mentor_mbaker.jpg"/>
<a class="p-name u-url" href="http://blog.lizardwrangler.com/"><NAME></a>
(<a class="u-url" href="https://twitter.com/MitchellBaker">@MitchellBaker</a>)
<span class="p-org">Mozilla Foundation</span>
<p class="p-note">
Mitchell is responsible for setting the direction and scope of the Mozilla Foundation and its activities.
</p>
<span class="p-category">Strategy</span>
<span class="p-category">Leadership</span>
</div>
\"\"\", "http://example.org")
%{
"items" => [
%{
"properties" => %{
"category" => ["Strategy", "Leadership"],
"name" => ["<NAME>"],
"note" => ["Mitchell is responsible for setting the direction and scope of the Mozilla Foundation and its activities."],
"org" => ["Mozilla Foundation"],
"photo" => [
%{
"alt" => "photo of Mitchell",
"value" => "https://webfwd.org/content/about-experts/300.mitchellbaker/mentor_mbaker.jpg"
}
],
"url" => ["http://blog.lizardwrangler.com/",
"https://twitter.com/MitchellBaker"]
},
"type" => ["h-card"]
}
],
"rel-urls" => %{},
"rels" => %{}
}
"""
@spec parse(String.t() | Floki.html_tree(), String.t() | keyword(), keyword()) :: :error | map()
def parse(content_or_url, base_url_or_opts \\ [], opts \\ [])
if Code.ensure_loaded?(Tesla) do
def parse(url, opts, _) when is_list(opts) do
client = Tesla.client([{Tesla.Middleware.FollowRedirects, max_redirects: 3}])
case Tesla.get(client, url) do
{:ok, response} -> parse(response.body, url, opts)
_ -> :error
end
end
end
def parse(content, url, opts) when is_binary(content) do
case parsed_document(content) do
{:ok, doc} -> parse(doc, url, opts)
_ -> :error
end
end
def parse(content, url, opts) do
doc =
content
|> Floki.filter_out("template")
|> Floki.filter_out(:comment)
rels = Microformats2.Rels.parse(doc, url, opts)
items = Microformats2.Items.parse(doc, doc, url, opts)
%{
Helpers.normalized_key("items", opts) => items,
Helpers.normalized_key("rels", opts) => rels[Helpers.normalized_key("rels", opts)],
Helpers.normalized_key("rel-urls", opts) => rels[Helpers.normalized_key("rel-urls", opts)]
}
end
defp replace_whitespaces(text, last_text \\ "")
defp replace_whitespaces(text, last_text) when last_text == text, do: text
defp replace_whitespaces(text, _) do
text
|> String.replace(~r/>(( )*) ( *)</, ">\\g{1} \\g{3}<")
|> replace_whitespaces(text)
end
# this is a really ugly hack, but html5ever doesn't support template tags (it fails with a nif_panic),
# mochiweb has bugs whith whitespaces and I can't really get fast_html to work
defp parsed_document(content) do
content
|> replace_whitespaces()
|> String.replace(~r/\015/, "
")
|> String.replace(~r/\012/, "
")
|> String.replace(~r/\013/, "")
|> Floki.parse_document()
end
end
|
lib/microformats2.ex
| 0.785185
| 0.449453
|
microformats2.ex
|
starcoder
|
defmodule FalconPlusApi.Api.Dashboard do
alias Maxwell.Conn
alias FalconPlusApi.{Util, Sig, Api}
@doc """
* [Session](#/authentication) Required
### Request
```
{
"screen_id": 953,
"title": "laiwei-test-graph1",
"endpoints": ["laiweiofficemac"],
"counters": ["value/name=pfc.push.ms","value/name=pfc.push.size"],
"timespan": 1800,
"graph_type": "h",
"method": "AVG",
"position": 0
}
```
### Response
```Status: 200```
```{"message":"ok"}```
For errors responses, see the [response status codes documentation](#/response-status-codes).
"""
def graph_create(sig, addr, opts \\ []) do
sig = Sig.get_sig(sig)
~s</api/v1/dashboard/graph>
|> Util.url(addr)
|> Conn.new()
|> Api.set_opts(opts)
|> Conn.put_req_header("Apitoken", sig)
|> Api.post
|> Api.get_result
end
@doc """
* [Session](#/authentication) Required
### Request
``` {"endpoints":["e1", "e2"], "counters":["c1", "c2"]} ```
### Response
```Status: 200```
```
{
"ck": "68c07419dbd7ac65977c97d05d99440d",
"counters": "c1|c2",
"endpoints": "e1|e2",
"id": 365195
}
```
For errors responses, see the [response status codes documentation](#/response-status-codes).
"""
def graph_create_tmpgraph(sig, addr, opts \\ []) do
sig = Sig.get_sig(sig)
~s</api/v1/dashboard/tmpgraph>
|> Util.url(addr)
|> Conn.new()
|> Api.set_opts(opts)
|> Conn.put_req_header("Apitoken", sig)
|> Api.post
|> Api.get_result
end
@doc """
* [Session](#/authentication) Required
### Response
```Status: 200```
```{"message":"ok"}```
For errors responses, see the [response status codes documentation](#/response-status-codes).
"""
def graph_delete(id, sig, addr, opts \\ []) do
sig = Sig.get_sig(sig)
~s</api/v1/dashboard/graph/#{id}>
|> Util.url(addr)
|> Conn.new()
|> Api.set_opts(opts)
|> Conn.put_req_header("Apitoken", sig)
|> Api.delete
|> Api.get_result
end
@doc """
* [Session](#/authentication) Required
### Response
```Status: 200```
```
{
"counters":["value/name=pfc.push.ms", "value/name=pfc.push.size"],
"endpoints":["laiweiofficemac"],
"falcon_tags":"",
"graph_id":4626,
"graph_type":"h",
"method":"",
"position":4626,
"screen_id":953,
"timespan":3600,
"title":"test"
}```
For errors responses, see the [response status codes documentation](#/response-status-codes).
"""
def graph_get(id, sig, addr, opts \\ []) do
sig = Sig.get_sig(sig)
~s</api/v1/dashboard/graph/#{id}>
|> Util.url(addr)
|> Conn.new()
|> Api.set_opts(opts)
|> Conn.put_req_header("Apitoken", sig)
|> Api.get
|> Api.get_result
end
@doc """
* [Session](#/authentication) Required
### Response
```Status: 200```
```{"counters":["agent.alive"],"endpoints":["laiweiofficemac"]}```
For errors responses, see the [response status codes documentation](#/response-status-codes).
"""
def graph_get_tmpgraph_by_id(id, sig, addr, opts \\ []) do
sig = Sig.get_sig(sig)
~s</api/v1/dashboard/tmpgraph/#{id}>
|> Util.url(addr)
|> Conn.new()
|> Api.set_opts(opts)
|> Conn.put_req_header("Apitoken", sig)
|> Api.get
|> Api.get_result
end
@doc """
* [Session](#/authentication) Required
### Request
```
{
"counters": ["value/name=pfc.push.ms","value/name=pfc.push.size", "agent.alive"],
"falcon_tags": "srv=falcon"
}
```
### Response
```Status: 200```
```{"message":"ok"}```
For errors responses, see the [response status codes documentation](#/response-status-codes).
"""
def graph_update(id, sig, addr, opts \\ []) do
sig = Sig.get_sig(sig)
~s</api/v1/dashboard/graph/#{id}>
|> Util.url(addr)
|> Conn.new()
|> Api.set_opts(opts)
|> Conn.put_req_header("Apitoken", sig)
|> Api.put
|> Api.get_result
end
@doc """
* [Session](#/authentication) Required
### Response
```Status: 200```
```
[
{
"counters": [
"value/name=pfc.push.ms"
],
"endpoints": [
"laiweiofficemac"
],
"falcon_tags": "",
"graph_id": 4640,
"graph_type": "h",
"method": "",
"position": 0,
"screen_id": 991,
"timespan": 3600,
"title": "dddd"
},
{
"counters": [
"aaa"
],
"endpoints": [
"xxx"
],
"falcon_tags": "",
"graph_id": 4641,
"graph_type": "h",
"method": "SUM",
"position": 0,
"screen_id": 991,
"timespan": 3600,
"title": "dddd"
}
]
```
For errors responses, see the [response status codes documentation](#/response-status-codes).
"""
def graphs_gets_by_screenid(screen_id, sig, addr, opts \\ []) do
sig = Sig.get_sig(sig)
~s</api/v1/dashboard/graphs/screen/#{screen_id}>
|> Util.url(addr)
|> Conn.new()
|> Api.set_opts(opts)
|> Conn.put_req_header("Apitoken", sig)
|> Api.get
|> Api.get_result
end
@doc """
* [Session](#/authentication) Required
### Request
Content-type: application/x-www-form-urlencoded
```name=laiwei-sceen1&pid=0```
### Response
```Status: 200```
```{"name":"laiwei-sceen1","pid":0,"screen_id":961} ```
For errors responses, see the [response status codes documentation](#/response-status-codes).
"""
def screen_create(sig, addr, opts \\ []) do
sig = Sig.get_sig(sig)
~s</api/v1/dashboard/screen>
|> Util.url(addr)
|> Conn.new()
|> Api.set_opts(opts)
|> Conn.put_req_header("Apitoken", sig)
|> Api.post
|> Api.get_result
end
@doc """
* [Session](#/authentication) Required
### Response
```Status: 200```
```{"message":"ok"} ```
For errors responses, see the [response status codes documentation](#/response-status-codes).
"""
def screen_delete(screen_id, sig, addr, opts \\ []) do
sig = Sig.get_sig(sig)
~s</api/v1/dashboard/screen/#{screen_id}>
|> Util.url(addr)
|> Conn.new()
|> Api.set_opts(opts)
|> Conn.put_req_header("Apitoken", sig)
|> Api.delete
|> Api.get_result
end
@doc """
* [Session](#/authentication) Required
### Response
```Status: 200```
```{"id":961,"pid":0,"name":"laiwei-sceen1"} ```
For errors responses, see the [response status codes documentation](#/response-status-codes).
"""
def screen_get_by_id(screen_id, sig, addr, opts \\ []) do
sig = Sig.get_sig(sig)
~s</api/v1/dashboard/screen/#{screen_id}>
|> Util.url(addr)
|> Conn.new()
|> Api.set_opts(opts)
|> Conn.put_req_header("Apitoken", sig)
|> Api.get
|> Api.get_result
end
@doc """
* [Session](#/authentication) Required
### Request
Content-type: application/x-www-form-urlencoded
```limit=100```
### Response
```Status: 200```
```
[
{
"id": 952,
"name": "a1",
"pid": 0
},
{
"id": 953,
"name": "aa1",
"pid": 952
},
{
"id": 968,
"name": "laiwei-screen2",
"pid": 1
},
{
"id": 972,
"name": "laiwei-sceen1",
"pid": 0
},
{
"id": 991,
"name": "xnew",
"pid": 972
},
{
"id": 993,
"name": "clone3",
"pid": 972
},
{
"id": 995,
"name": "op",
"pid": 0
}
]
```
For errors responses, see the [response status codes documentation](#/response-status-codes).
"""
def screen_gets_all(sig, addr, opts \\ []) do
sig = Sig.get_sig(sig)
~s</api/v1/dashboard/screens>
|> Util.url(addr)
|> Conn.new()
|> Api.set_opts(opts)
|> Conn.put_req_header("Apitoken", sig)
|> Api.get
|> Api.get_result
end
@doc """
* [Session](#/authentication) Required
### Response
```Status: 200```
```
[
{
"id": 952,
"name": "a1",
"pid": 0
},
{
"id": 961,
"name": "laiwei-sceen1",
"pid": 0
}
]
```
For errors responses, see the [response status codes documentation](#/response-status-codes).
"""
def screen_gets_by_pid(screen_pid, sig, addr, opts \\ []) do
sig = Sig.get_sig(sig)
~s</api/v1/dashboard/screens/pid/#{screen_pid}>
|> Util.url(addr)
|> Conn.new()
|> Api.set_opts(opts)
|> Conn.put_req_header("Apitoken", sig)
|> Api.get
|> Api.get_result
end
@doc """
* [Session](#/authentication) Required
### Request
Content-type: application/x-www-form-urlencoded
```name=laiwei-sceen1&pid=0```
### Response
```Status: 200```
```{"message":"ok"} ```
For errors responses, see the [response status codes documentation](#/response-status-codes).
"""
def screen_update(screen_id, sig, addr, opts \\ []) do
sig = Sig.get_sig(sig)
~s</api/v1/dashboard/screen/#{screen_id}>
|> Util.url(addr)
|> Conn.new()
|> Api.set_opts(opts)
|> Conn.put_req_header("Apitoken", sig)
|> Api.put
|> Api.get_result
end
end
|
lib/falcon_plus_api/api/dashboard.ex
| 0.698227
| 0.750758
|
dashboard.ex
|
starcoder
|
defmodule Vex.Validators.Exclusion do
@moduledoc """
Ensure a value is not a member of a list of values.
## Options
* `:in`: The list.
* `:message`: Optional. A custom error message. May be in EEx format
and use the fields described in "Custom Error Messages," below.
The list can be provided in place of the keyword list if no other options are needed.
## Examples
iex> Vex.Validators.Exclusion.validate(1, [1, 2, 3])
{:error, "must not be one of [1, 2, 3]"}
iex> Vex.Validators.Exclusion.validate(1, [in: [1, 2, 3]])
{:error, "must not be one of [1, 2, 3]"}
iex> Vex.Validators.Exclusion.validate(1, [in: [1, 2, 3], message: "<%= value %> shouldn't be in <%= inspect list %>"])
{:error, "1 shouldn't be in [1, 2, 3]"}
iex> Vex.Validators.Exclusion.validate(4, [1, 2, 3])
:ok
iex> Vex.Validators.Exclusion.validate("a", ~w(a b c))
{:error, ~S(must not be one of ["a", "b", "c"])}
iex> Vex.Validators.Exclusion.validate("a", in: ~w(a b c), message: "must not be abc, talkin' 'bout 123")
{:error, "must not be abc, talkin' 'bout 123"}
## Custom Error Messages
Custom error messages (in EEx format), provided as :message, can use the following values:
iex> Vex.Validators.Exclusion.__validator__(:message_fields)
[value: "The bad value", list: "List"]
An example:
iex> Vex.Validators.Exclusion.validate("a", in: ~w(a b c), message: "<%= inspect value %> is a disallowed value")
{:error, ~S("a" is a disallowed value)}
"""
use Vex.Validator
@message_fields [value: "The bad value", list: "List"]
def validate(value, options) when is_list(options) do
if Keyword.keyword?(options) do
unless_skipping(value, options) do
list = Keyword.get options, :in
result !Enum.member?(list, value), message(options, "must not be one of #{inspect list}",
value: value, list: list)
end
else
validate(value, [in: options])
end
end
defp result(true, _), do: :ok
defp result(false, message), do: {:error, message}
end
|
lib/vex/validators/exclusion.ex
| 0.799638
| 0.52007
|
exclusion.ex
|
starcoder
|
defmodule Andy.Profiles.Rover.GMDefs.Eating do
@moduledoc "The GM definition for :eating"
alias Andy.GM.{GenerativeModelDef, Intention, Conjecture}
import Andy.GM.Utils
def gm_def() do
%GenerativeModelDef{
name: :eating,
conjectures: [
conjecture(:chewing),
conjecture(:found_food)
],
contradictions: [],
priors: %{
chewing: %{about: :self, values: %{is: false}},
found_food: %{about: :self, values: %{is: false}}
},
intentions: %{
chew: [
%Intention{
intent_name: :eat,
valuator: chewing_valuator(),
repeatable: true
},
%Intention{
intent_name: :say,
valuator: chewing_noise(),
repeatable: true
}
]
}
}
end
# Conjectures
# opinion
defp conjecture(:chewing) do
%Conjecture{
name: :chewing,
activator: opinion_activator(:self),
predictors: [
no_change_predictor(:over_food, default: %{is: false})
],
# always true if activated
valuator: chewing_belief_valuator(),
intention_domain: [:chew]
}
end
# goal
defp conjecture(:found_food) do
%Conjecture{
name: :found_food,
activator: goal_activator(fn %{is: found_food?} -> found_food? end),
predictors: [
no_change_predictor(:over_food, default: %{is: false}),
no_change_predictor(:approaching_food, default: %{is: false})
],
valuator: found_food_belief_valuator(),
intention_domain: []
}
end
# Conjecture belief valuators
defp found_food_belief_valuator() do
fn conjecture_activation, [round | _previous_rounds] ->
about = conjecture_activation.about
over_food? = current_perceived_value(round, about, :over_food, :is, default: false)
%{is: over_food?}
end
end
defp chewing_belief_valuator() do
fn conjecture_activation, [round | _previous_rounds] ->
about = conjecture_activation.about
over_food? = current_perceived_value(round, about, :over_food, :is, default: false)
%{is: over_food?}
end
end
# Intention valuators
defp chewing_valuator() do
fn %{is: chewing?} = belief_values ->
if chewing?, do: empty_valuator().(belief_values), else: nil
end
end
defp chewing_noise() do
fn %{is: chewing?} ->
if chewing?, do: saying("Nom de nom de nom"), else: nil
end
end
end
|
lib/andy/profiles/rover/gm_defs/eating.ex
| 0.616936
| 0.415166
|
eating.ex
|
starcoder
|
defmodule Sanbase.Utils.Transform do
@doc ~s"""
Transform the maps from the :ok tuple data so the `key` is renamed to `new_key`
## Examples:
iex> Sanbase.Utils.Transform.rename_map_keys({:ok, [%{a: 1}, %{a: 2}]}, old_key: :a, new_key: :b)
{:ok, [%{b: 1}, %{b: 2}]}
iex> Sanbase.Utils.Transform.rename_map_keys({:ok, [%{a: 1}, %{d: 2}]}, old_key: :a, new_key: :b)
{:ok, [%{b: 1}, %{d: 2}]}
iex> Sanbase.Utils.Transform.rename_map_keys({:error, "bad"}, old_key: :a, new_key: :b)
{:error, "bad"}
"""
@spec duplicate_map_keys({:ok, list(map)}, any(), any()) :: {:ok, list(map)}
@spec duplicate_map_keys({:error, any()}, any(), any()) :: {:error, any()}
def duplicate_map_keys({:ok, data}, key, new_key) do
result =
data
|> Enum.map(fn
%{^key => value} = elem -> elem |> Map.put(new_key, value)
elem -> elem
end)
{:ok, result}
end
def duplicate_map_keys({:error, error}, _, _) do
{:error, error}
end
@doc ~s"""
Transform the maps from the :ok tuple data so the `key` duplicated with a key
named `new_key`
## Examples:
iex> Sanbase.Utils.Transform.rename_map_keys({:ok, [%{a: 1}, %{a: 2}]}, old_key: :a, new_key: :b)
{:ok, [%{b: 1}, %{b: 2}]}
iex> Sanbase.Utils.Transform.rename_map_keys({:ok, [%{a: 1}, %{d: 2}]}, old_key: :a, new_key: :b)
{:ok, [%{b: 1}, %{d: 2}]}
iex> Sanbase.Utils.Transform.rename_map_keys({:error, "bad"}, old_key: :a, new_key: :b)
{:error, "bad"}
"""
@spec rename_map_keys({:ok, list(map)}, keyword(atom())) :: {:ok, list(map)}
@spec rename_map_keys({:error, any()}, keyword(atom())) :: {:error, any()}
def rename_map_keys({:ok, data}, opts) do
old_key = Keyword.get(opts, :old_key)
new_key = Keyword.get(opts, :new_key)
result =
data
|> Enum.map(fn
%{^old_key => value} = elem ->
elem |> Map.delete(old_key) |> Map.put(new_key, value)
elem ->
elem
end)
{:ok, result}
end
def rename_map_keys({:error, error}, _) do
{:error, error}
end
def rename_map_keys!(map, old_keys: old_keys, new_keys: new_keys) do
old_new_keys_map = Enum.zip(old_keys, new_keys) |> Enum.into(%{})
map
|> Enum.map(fn {k, v} -> {old_new_keys_map[k] || k, v} end)
|> Enum.into(%{})
end
def maybe_unwrap_ok_value({:ok, [value]}), do: {:ok, value}
def maybe_unwrap_ok_value({:ok, []}), do: {:ok, nil}
def maybe_unwrap_ok_value({:error, error}), do: {:error, error}
@doc ~s"""
Sums the values of all keys with the same datetime
## Examples:
iex> Sanbase.Utils.Transform.sum_by_datetime([%{datetime: ~U[2019-01-01 00:00:00Z], val: 2}, %{datetime: ~U[2019-01-01 00:00:00Z], val: 3}, %{datetime: ~U[2019-01-02 00:00:00Z], val: 2}], :val)
[%{datetime: ~U[2019-01-01 00:00:00Z], val: 5}, %{datetime: ~U[2019-01-02 00:00:00Z], val: 2}]
iex> Sanbase.Utils.Transform.sum_by_datetime([], :key)
[]
"""
@spec sum_by_datetime(list(map), atom()) :: list(map)
def sum_by_datetime(data, key) do
data
|> Enum.group_by(& &1[:datetime], & &1[key])
|> Enum.map(fn {datetime, list} ->
value =
case list do
[] -> 0
[_ | _] = list -> Enum.sum(list)
end
%{:datetime => datetime, key => value}
end)
|> Enum.sort_by(&DateTime.to_unix(&1[:datetime]))
end
end
|
lib/sanbase/utils/transform.ex
| 0.805441
| 0.686547
|
transform.ex
|
starcoder
|
defmodule CacheMoney do
@moduledoc """
Handles caching values under different cache names, can expire keys
"""
use GenServer
@default_timeout 5000
@typedoc """
The name of the cache, used for namespacing multiple caches on the same adapter.
Can be either a binary or an atom, but will always be converted to a binary.
"""
@type cache_name :: binary | atom
@typedoc """
The key a value will be set under. Can be either a binary or an atom, but will
always be converted to a binary.
"""
@type key :: binary | atom
@typedoc """
The value to be saved in the cache. Can be any value going _in_ to the cache,
but depending on the adapter used, may not be the same value going out. For
example, `CacheMoney.Adapters.ETS` can save any elixir term, including `pid`s.
`CacheMoney.Adapters.Redis`, however, can only save items as strings.
"""
@type value :: term
@typedoc """
Currently the only option available is an optional `timeout` that gets passed
along with `GenServer.call`
"""
@type options :: [timeout: integer]
@type lazy_function :: (() -> {:ok, value} | {:ok, value, integer} | {:error, value} | value)
@type server :: GenServer.server()
@doc """
Starts a `CacheMoney` process linked to the current process.
## Arguments
* cache - the name of the cache. Multiple caches using the same adapter will
all be in the same spot, but will be namespaced by the given cache name.
* conifg - contains various configuration options for the cache, depending on
the adapter. `:adapter` is required to be set, and must be set to a module that
implements `CacheMoney.Adapter`, such as `CacheMoney.Adapters.Redis` or
`CacheMoney.Adapters.ETS`. Different adapters will also specify other required
optionso be passed to them through the `config` argument
* opts - see `GenServer.start_link/3`. Options are passed straight through to the
underlying `GenServer`
"""
@spec start_link(cache_name, map(), Keyword.t()) :: GenServer.on_start()
def start_link(cache, config = %{}, opts \\ []) do
config =
config
|> Map.put_new(:cache, cache)
|> config.adapter.start_link()
opts = Keyword.put_new(opts, :name, cache)
GenServer.start_link(__MODULE__, config, opts)
end
@doc """
Gets the value out of the cache using the `key`.
If the value does not exist in the cache `nil` will be returned.
"""
@spec get(server, key, options()) :: {:ok, value} | {:error, term}
def get(server, key, opts \\ []),
do: GenServer.call(server, {:get, key}, opts[:timeout] || @default_timeout)
@doc """
Gets the value out of the cache using the `key`. Lazily fetches the data, inserts
it into the cache, and returns it if it does not exist. Optional `expiry` is in
seconds.
"""
@spec get_lazy(server, key, lazy_function(), integer | nil, options()) ::
{:ok, value} | {:error, any}
def get_lazy(server, key, fun, expiry \\ nil, opts \\ []),
do: GenServer.call(server, {:get_lazy, key, fun, expiry}, opts[:timeout] || @default_timeout)
@doc """
Sets `key` in the cache to `value`
"""
@spec set(server, key, value) :: {:ok, value} | {:error, any}
def set(server, key, value), do: GenServer.call(server, {:set, key, value})
@doc """
Sets `key` in the cache to `value`
"""
@spec set(server, key, value, options()) :: {:ok, value} | {:error, any}
def set(server, key, value, opts) when is_list(opts),
do: GenServer.call(server, {:set, key, value}, opts[:timeout] || @default_timeout)
@doc """
Sets `key` in the cache to `value`, which expires after `expiry` seconds
"""
@spec set(server, key, value, integer, options()) :: {:ok, value} | {:error, any}
def set(server, key, value, expiry, opts \\ []),
do: GenServer.call(server, {:set, key, value, expiry}, opts[:timeout] || @default_timeout)
@doc """
Deletes the `key` from the cache
"""
@spec delete(server, key, options()) :: {:ok, value} | {:error, term}
def delete(server, key, opts \\ []),
do: GenServer.call(server, {:delete, key}, opts[:timeout] || @default_timeout)
@impl true
def init(args) do
{:ok, args}
end
@impl true
def handle_call({:get, key}, from, config) do
key = get_key(config.cache, key)
{:reply, config.adapter.get(with_caller(config, from), key), config}
end
def handle_call({:get_lazy, key, fun, expiry}, from, config) do
key = get_key(config.cache, key)
case config.adapter.get(with_caller(config, from), key) do
{:ok, nil} ->
value = get_and_save_lazy_value(key, fun.(), expiry, with_caller(config, from))
{:reply, value, config}
value ->
{:reply, value, config}
end
end
def handle_call({:set, key, value}, from, config) do
{:reply, config.adapter.set(with_caller(config, from), get_key(config.cache, key), value),
config}
end
def handle_call({:set, key, value, expiry}, from, config) do
{:reply,
config.adapter.set(
with_caller(config, from),
get_key(config.cache, key),
value,
expiry
), config}
end
def handle_call({:delete, key}, from, config) do
{:reply, config.adapter.delete(with_caller(config, from), get_key(config.cache, key)), config}
end
defp get_and_save_lazy_value(key, {:ok, value}, nil, config) do
config.adapter.set(config, key, value)
{:ok, value}
end
defp get_and_save_lazy_value(key, {:ok, value, expiry}, nil, config) do
config.adapter.set(config, key, value, expiry)
{:ok, value}
end
defp get_and_save_lazy_value(key, {:ok, value}, expiry, config) do
config.adapter.set(config, key, value, expiry)
{:ok, value}
end
defp get_and_save_lazy_value(_key, {:error, error}, _, _config), do: {:error, error}
defp get_and_save_lazy_value(key, value, nil, config) do
config.adapter.set(config, key, value)
{:ok, value}
end
defp get_and_save_lazy_value(key, value, expiry, config) do
config.adapter.set(config, key, value, expiry)
{:ok, value}
end
defp get_key(cache, key) do
"#{cache}-#{key}"
end
defp with_caller(config, {caller, _}), do: Map.put(config, :caller, caller)
end
|
lib/cache_money.ex
| 0.890564
| 0.562597
|
cache_money.ex
|
starcoder
|
defmodule Claritas do
use Bitwise
@moduledoc """
Claritas lighten or darken color (hexadecimal) by integer value.
"""
@doc """
Shift brightness (up or down) of the given hexadecimal `color` by integer value `brightness_change`.
Parameters:
`color`: color in hexadecimal format (#RRGGBB)
`brightness_change`: integer value
## Examples
```
# positive values are lightening the color
iex> Claritas.shift("#f06d06", 30)
{:ok, "#FF8B24"}
# negative values are darkening the color
iex> Claritas.shift("#f06d06", -30)
{:ok, "#D24F00"}
```
Dynamically increase/decrease brightness of your colors.
Controller:
```
def index(conn, _params) do
shades =
for i <- 1..20, into: [] do
Claritas.shift("#141C5B", 10 * i)
end
render(conn, "index.html", shades: shades)
end
```
Template:
```
<ul>
<%= for {_, shade} <- @shades do %>
<li><span style='background-color: <%= shade %>'><%= shade %></span></li>
<% end %>
</ul>
```
"""
def shift(color, _) when not is_binary(color), do: {:error, "color must be a string"}
def shift(color, brightness_change) when is_integer(brightness_change) do
hexc =
color
|> String.split("#")
# handle both cases: hex number or hex number started with hash symbol
hex_color =
case hexc do
[_, hex] -> hex
[hex] -> hex
end
int_color = hex_color |> to_charlist() |> List.to_integer(16)
red = (int_color >>> 16) + brightness_change
green = (int_color &&& 0x0000FF) + brightness_change
blue = (int_color >>> 8 &&& 0x00FF) + brightness_change
red = adjust(red)
green = adjust(green)
blue = adjust(blue)
new_color = (green ||| blue <<< 8 ||| red <<< 16) |> Integer.to_string(16)
{:ok, "#" <> new_color}
end
def shift(_, brightness_change) when not is_integer(brightness_change),
do: {:error, "second parameter must be integer"}
# let's ensure values belong to RGB range
defp adjust(intensity) when intensity > 255, do: 255
defp adjust(intensity) when intensity < 0, do: 0
defp adjust(intensity), do: intensity
end
|
lib/claritas.ex
| 0.919498
| 0.856872
|
claritas.ex
|
starcoder
|
defmodule Chunky.Sequence.OEIS.Multiples do
@moduledoc """
Sequences from the [Online Encyclopedia of Integer Sequences](https://oeis.org) dealing with multiples
and additions.
## Available Sequences
### Multiples of an integer
- `create_sequence_a008585/1` - A008585 - a(n) = 3*n.
- `create_sequence_a008586/1` - A008586 - Multiples of 4.
- `create_sequence_a008587/1` - A008587 - Multiples of 5.
- `create_sequence_a008588/1` - A008588 - Nonnegative multiples of 6.
- `create_sequence_a008589/1` - A008589 - Multiples of 7.
- `create_sequence_a008590/1` - A008590 - Multiples of 8.
- `create_sequence_a008591/1` - A008591 - Multiples of 9.
- `create_sequence_a008592/1` - A008592 - Multiples of 10: a(n) = 10 * n.
- `create_sequence_a008593/1` - A008593 - Multiples of 11.
- `create_sequence_a008594/1` - A008594 - Multiples of 12.
- `create_sequence_a008595/1` - A008595 - Multiples of 13.
- `create_sequence_a008596/1` - A008596 - Multiples of 14.
- `create_sequence_a008597/1` - A008597 - Multiples of 15.
- `create_sequence_a008598/1` - A008598 - Multiples of 16.
- `create_sequence_a008599/1` - A008599 - Multiples of 17.
- `create_sequence_a008600/1` - A008600 - Multiples of 18.
- `create_sequence_a008601/1` - A008601 - Multiples of 19.
- `create_sequence_a008602/1` - A008602 - Multiples of 20.
- `create_sequence_a008603/1` - A008603 - Multiples of 21.
- `create_sequence_a008604/1` - A008604 - Multiples of 22.
- `create_sequence_a008605/1` - A008605 - Multiples of 23.
- `create_sequence_a008606/1` - A008606 - Multiples of 24.
- `create_sequence_a008607/1` - A008607 - Multiples of 25.
- `create_sequence_a252994/1` - A252994 - Multiples of 26.
- `create_sequence_a305548/1` - A305548 - a(n) = 27*n.
- `create_sequence_a135628/1` - A135628 - Multiples of 28.
- `create_sequence_a195819/1` - A195819 - Multiples of 29.
- `create_sequence_a249674/1` - A249674 - a(n) = 30*n.
- `create_sequence_a135631/1` - A135631 - Multiples of 31.
- `create_sequence_a174312/1` - A174312 - 32*n.
- `create_sequence_a044102/1` - A044102 - Multiples of 36.
- `create_sequence_a085959/1` - A085959 - Multiples of 37.
- `create_sequence_a169823/1` - A169823 - Multiples of 60.
- `create_sequence_a152691/1` - A152691 - Multiples of 64.
- `create_sequence_a169825/1` - A169825 - Multiples of 420.
- `create_sequence_a169827/1` - A169827 - Multiples of 840.
### Multiples with specific Digit Sums
- `create_sequence_a069537/1` - A069537 - Multiples of 2 with digit sum = 2.
- `create_sequence_a063997/1` - A063997 - Multiples of 4 whose digits add to 4.
- `create_sequence_a069540/1` - A069540 - Multiples of 5 with digit sum 5.
- `create_sequence_a062768/1` - A062768 - Multiples of 6 such that the sum of the digits is equal to 6.
- `create_sequence_a063416/1` - A063416 - Multiples of 7 whose sum of digits is equal to 7.
- `create_sequence_a069543/1` - A069543 - Multiples of 8 with digit sum 8.
### Multiples containing specific Digits
- `create_sequence_a121023/1` - A121023 - Multiples of 3 containing a 3 in their decimal representation.
- `create_sequence_a121024/1` - A121024 - Multiples of 4 containing a 4 in their decimal representation.
- `create_sequence_a121025/1` - A121025 - Multiples of 5 containing a 5 in their decimal representation.
- `create_sequence_a121026/1` - A121026 - Multiples of 6 containing a 6 in their decimal representation.
- `create_sequence_a121027/1` - A121027 - Multiples of 7 containing a 7 in their decimal representation.
- `create_sequence_a121028/1` - A121028 - Multiples of 8 containing an 8 in their decimal representation.
- `create_sequence_a121029/1` - A121029 - Multiples of 9 containing a 9 in their decimal representation.
- `create_sequence_a121030/1` - A121030 - Multiples of 10 containing a 10 in their decimal representation.
- `create_sequence_a121031/1` - A121031 - Multiples of 11 containing an 11 in their decimal representation.
- `create_sequence_a121032/1` - A121032 - Multiples of 12 containing a 12 in their decimal representation.
- `create_sequence_a121033/1` - A121033 - Multiples of 13 containing a 13 in their decimal representation.
- `create_sequence_a121034/1` - A121034 - Multiples of 14 containing a 14 in their decimal representation.
- `create_sequence_a121035/1` - A121035 - Multiples of 15 containing a 15 in their decimal representation.
- `create_sequence_a121036/1` - A121036 - Multiples of 16 containing a 16 in their decimal representation.
- `create_sequence_a121037/1` - A121037 - Multiples of 17 containing a 17 in their decimal representation.
- `create_sequence_a121038/1` - A121038 - Multiples of 18 containing a 18 in their decimal representation.
- `create_sequence_a121039/1` - A121039 - Multiples of 19 containing a 19 in their decimal representation.
- `create_sequence_a121040/1` - A121040 - Multiples of 20 containing a 20 in their decimal representation.
### Perfect Numbers and Variations
- `create_sequence_a006037/1` - A006037 - Weird numbers: abundant (A005101) but not pseudoperfect (A005835).
- `create_sequence_a002975/1` - A002975 - Primitive weird numbers: weird numbers with no proper weird divisors.
- `create_sequence_a005835/1` - A005835 - Pseudoperfect (or semiperfect) numbers n: some subset of the proper divisors of n sums to n.
- `create_sequence_a006036/1` - A006036 - Primitive pseudoperfect numbers.
- `create_sequence_a054377/1` - A054377 - Primary pseudoperfect numbers: numbers n > 1 such that 1/n + sum 1/p = 1, where the sum is over the primes p | n.
- `create_sequence_a194472/1` - A194472 - Erdős-Nicolas numbers.
### Digit sums or sequential digits
- `create_sequence_a009003/1` - A009003 - Hypotenuse numbers (squares are sums of 2 nonzero squares).
- `create_sequence_a004144/1` - A004144 - Nonhypotenuse numbers (indices of positive squares that are not the sums of 2 distinct nonzero squares).
- `create_sequence_a005153/1` - A005153 - Practical numbers: positive integers m such that every k <= sigma(m) is a sum of distinct divisors of m. Also called panarithmic numbers.
- `create_sequence_a006753/1` - A006753 - Smith (or joke) numbers: composite numbers n such that sum of digits of n = sum of digits of prime factors of n (counted with multiplicity).
- `create_sequence_a019506/1` - A019506 - Hoax numbers: composite numbers whose digit-sum equals the sum of the digit-sums of its distinct prime factors.
- `create_sequence_a016825/1` - A016825 - Positive integers congruent to 2 mod 4: a(n) = 4*n+2, for n >= 0.
- `create_sequence_a138591/1` - A138591 - Sums of two or more consecutive nonnegative integers.
### Convergences
- `create_sequence_a007770/1` - A007770 - Happy numbers: numbers whose trajectory under iteration of sum of squares of digits map (see A003132) includes 1.
- `create_sequence_a031177/1` - A031177 - Unhappy numbers: numbers having period-8 2-digitized sequences.
"""
import Chunky.Sequence, only: [sequence_for_function: 1, sequence_for_list: 1]
alias Chunky.Math
alias Chunky.Math.Predicates
# raw data for A054377 - Primary pseudoperfect numbers: numbers n > 1 such that 1/n + sum 1/p = 1, where the sum is over the primes p | n.
@data_a054377 [
2,
6,
42,
1806,
47058,
2_214_502_422,
52_495_396_602,
8_490_421_583_559_688_410_706_771_261_086
]
@doc """
OEIS Sequence `A008585` - a(n) = 3*n.
From [OEIS A008585](https://oeis.org/A008585):
> a(n) = 3*n.
> (Formerly )
**Sequence IDs**: `:a008585`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Multiples, :a008585) |> Sequence.take!(60)
[0,3,6,9,12,15,18,21,24,27,30,33,36,39,42,45,48,51,54,57,60,63,66,69,72,75,78,81,84,87,90,93,96,99,102,105,108,111,114,117,120,123,126,129,132,135,138,141,144,147,150,153,156,159,162,165,168,171,174,177]
"""
@doc offset: 0,
sequence: "a(n) = 3*n.",
references: [{:oeis, :a008585, "https://oeis.org/A008585"}]
def create_sequence_a008585(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Multiples.seq_a008585/1)
end
@doc false
@doc offset: 0
def seq_a008585(idx) do
idx * 3
end
@doc """
OEIS Sequence `A008586` - Multiples of 4.
From [OEIS A008586](https://oeis.org/A008586):
> Multiples of 4.
> (Formerly )
**Sequence IDs**: `:a008586`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Multiples, :a008586) |> Sequence.take!(58)
[0,4,8,12,16,20,24,28,32,36,40,44,48,52,56,60,64,68,72,76,80,84,88,92,96,100,104,108,112,116,120,124,128,132,136,140,144,148,152,156,160,164,168,172,176,180,184,188,192,196,200,204,208,212,216,220,224,228]
"""
@doc offset: 0,
sequence: "Multiples of 4.",
references: [{:oeis, :a008586, "https://oeis.org/A008586"}]
def create_sequence_a008586(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Multiples.seq_a008586/1)
end
@doc false
@doc offset: 0
def seq_a008586(idx) do
idx * 4
end
@doc """
OEIS Sequence `A008587` - Multiples of 5.
From [OEIS A008587](https://oeis.org/A008587):
> Multiples of 5.
> (Formerly )
**Sequence IDs**: `:a008587`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Multiples, :a008587) |> Sequence.take!(56)
[0,5,10,15,20,25,30,35,40,45,50,55,60,65,70,75,80,85,90,95,100,105,110,115,120,125,130,135,140,145,150,155,160,165,170,175,180,185,190,195,200,205,210,215,220,225,230,235,240,245,250,255,260,265,270,275]
"""
@doc offset: 0,
sequence: "Multiples of 5.",
references: [{:oeis, :a008587, "https://oeis.org/A008587"}]
def create_sequence_a008587(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Multiples.seq_a008587/1)
end
@doc false
@doc offset: 0
def seq_a008587(idx) do
idx * 5
end
@doc """
OEIS Sequence `A008588` - Nonnegative multiples of 6.
From [OEIS A008588](https://oeis.org/A008588):
> Nonnegative multiples of 6.
> (Formerly )
**Sequence IDs**: `:a008588`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Multiples, :a008588) |> Sequence.take!(59)
[0,6,12,18,24,30,36,42,48,54,60,66,72,78,84,90,96,102,108,114,120,126,132,138,144,150,156,162,168,174,180,186,192,198,204,210,216,222,228,234,240,246,252,258,264,270,276,282,288,294,300,306,312,318,324,330,336,342,348]
"""
@doc offset: 0,
sequence: "Nonnegative multiples of 6.",
references: [{:oeis, :a008588, "https://oeis.org/A008588"}]
def create_sequence_a008588(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Multiples.seq_a008588/1)
end
@doc false
@doc offset: 0
def seq_a008588(idx) do
idx * 6
end
@doc """
OEIS Sequence `A008589` - Multiples of 7.
From [OEIS A008589](https://oeis.org/A008589):
> Multiples of 7.
> (Formerly )
**Sequence IDs**: `:a008589`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Multiples, :a008589) |> Sequence.take!(55)
[0,7,14,21,28,35,42,49,56,63,70,77,84,91,98,105,112,119,126,133,140,147,154,161,168,175,182,189,196,203,210,217,224,231,238,245,252,259,266,273,280,287,294,301,308,315,322,329,336,343,350,357,364,371,378]
"""
@doc offset: 0,
sequence: "Multiples of 7.",
references: [{:oeis, :a008589, "https://oeis.org/A008589"}]
def create_sequence_a008589(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Multiples.seq_a008589/1)
end
@doc false
@doc offset: 0
def seq_a008589(idx) do
idx * 7
end
@doc """
OEIS Sequence `A008590` - Multiples of 8.
From [OEIS A008590](https://oeis.org/A008590):
> Multiples of 8.
> (Formerly )
**Sequence IDs**: `:a008590`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Multiples, :a008590) |> Sequence.take!(55)
[0,8,16,24,32,40,48,56,64,72,80,88,96,104,112,120,128,136,144,152,160,168,176,184,192,200,208,216,224,232,240,248,256,264,272,280,288,296,304,312,320,328,336,344,352,360,368,376,384,392,400,408,416,424,432]
"""
@doc offset: 0,
sequence: "Multiples of 8.",
references: [{:oeis, :a008590, "https://oeis.org/A008590"}]
def create_sequence_a008590(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Multiples.seq_a008590/1)
end
@doc false
@doc offset: 0
def seq_a008590(idx) do
idx * 8
end
@doc """
OEIS Sequence `A008591` - Multiples of 9.
From [OEIS A008591](https://oeis.org/A008591):
> Multiples of 9.
> (Formerly )
**Sequence IDs**: `:a008591`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Multiples, :a008591) |> Sequence.take!(54)
[0,9,18,27,36,45,54,63,72,81,90,99,108,117,126,135,144,153,162,171,180,189,198,207,216,225,234,243,252,261,270,279,288,297,306,315,324,333,342,351,360,369,378,387,396,405,414,423,432,441,450,459,468,477]
"""
@doc offset: 0,
sequence: "Multiples of 9.",
references: [{:oeis, :a008591, "https://oeis.org/A008591"}]
def create_sequence_a008591(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Multiples.seq_a008591/1)
end
@doc false
@doc offset: 0
def seq_a008591(idx) do
idx * 9
end
@doc """
OEIS Sequence `A008592` - Multiples of 10: a(n) = 10 * n.
From [OEIS A008592](https://oeis.org/A008592):
> Multiples of 10: a(n) = 10 * n.
> (Formerly )
**Sequence IDs**: `:a008592`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Multiples, :a008592) |> Sequence.take!(54)
[0,10,20,30,40,50,60,70,80,90,100,110,120,130,140,150,160,170,180,190,200,210,220,230,240,250,260,270,280,290,300,310,320,330,340,350,360,370,380,390,400,410,420,430,440,450,460,470,480,490,500,510,520,530]
"""
@doc offset: 0,
sequence: "Multiples of 10: a(n) = 10 * n.",
references: [{:oeis, :a008592, "https://oeis.org/A008592"}]
def create_sequence_a008592(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Multiples.seq_a008592/1)
end
@doc false
@doc offset: 0
def seq_a008592(idx) do
idx * 10
end
@doc """
OEIS Sequence `A008593` - Multiples of 11.
From [OEIS A008593](https://oeis.org/A008593):
> Multiples of 11.
> (Formerly )
**Sequence IDs**: `:a008593`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Multiples, :a008593) |> Sequence.take!(54)
[0,11,22,33,44,55,66,77,88,99,110,121,132,143,154,165,176,187,198,209,220,231,242,253,264,275,286,297,308,319,330,341,352,363,374,385,396,407,418,429,440,451,462,473,484,495,506,517,528,539,550,561,572,583]
"""
@doc offset: 0,
sequence: "Multiples of 11.",
references: [{:oeis, :a008593, "https://oeis.org/A008593"}]
def create_sequence_a008593(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Multiples.seq_a008593/1)
end
@doc false
@doc offset: 0
def seq_a008593(idx) do
idx * 11
end
@doc """
OEIS Sequence `A008594` - Multiples of 12.
From [OEIS A008594](https://oeis.org/A008594):
> Multiples of 12.
> (Formerly )
**Sequence IDs**: `:a008594`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Multiples, :a008594) |> Sequence.take!(54)
[0,12,24,36,48,60,72,84,96,108,120,132,144,156,168,180,192,204,216,228,240,252,264,276,288,300,312,324,336,348,360,372,384,396,408,420,432,444,456,468,480,492,504,516,528,540,552,564,576,588,600,612,624,636]
"""
@doc offset: 0,
sequence: "Multiples of 12.",
references: [{:oeis, :a008594, "https://oeis.org/A008594"}]
def create_sequence_a008594(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Multiples.seq_a008594/1)
end
@doc false
@doc offset: 0
def seq_a008594(idx) do
idx * 12
end
@doc """
OEIS Sequence `A008595` - Multiples of 13.
From [OEIS A008595](https://oeis.org/A008595):
> Multiples of 13.
> (Formerly )
**Sequence IDs**: `:a008595`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Multiples, :a008595) |> Sequence.take!(53)
[0,13,26,39,52,65,78,91,104,117,130,143,156,169,182,195,208,221,234,247,260,273,286,299,312,325,338,351,364,377,390,403,416,429,442,455,468,481,494,507,520,533,546,559,572,585,598,611,624,637,650,663,676]
"""
@doc offset: 0,
sequence: "Multiples of 13.",
references: [{:oeis, :a008595, "https://oeis.org/A008595"}]
def create_sequence_a008595(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Multiples.seq_a008595/1)
end
@doc false
@doc offset: 0
def seq_a008595(idx) do
idx * 13
end
@doc """
OEIS Sequence `A008596` - Multiples of 14.
From [OEIS A008596](https://oeis.org/A008596):
> Multiples of 14.
> (Formerly )
**Sequence IDs**: `:a008596`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Multiples, :a008596) |> Sequence.take!(53)
[0,14,28,42,56,70,84,98,112,126,140,154,168,182,196,210,224,238,252,266,280,294,308,322,336,350,364,378,392,406,420,434,448,462,476,490,504,518,532,546,560,574,588,602,616,630,644,658,672,686,700,714,728]
"""
@doc offset: 0,
sequence: "Multiples of 14.",
references: [{:oeis, :a008596, "https://oeis.org/A008596"}]
def create_sequence_a008596(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Multiples.seq_a008596/1)
end
@doc false
@doc offset: 0
def seq_a008596(idx) do
idx * 14
end
@doc """
OEIS Sequence `A008597` - Multiples of 15.
From [OEIS A008597](https://oeis.org/A008597):
> Multiples of 15.
> (Formerly )
**Sequence IDs**: `:a008597`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Multiples, :a008597) |> Sequence.take!(53)
[0,15,30,45,60,75,90,105,120,135,150,165,180,195,210,225,240,255,270,285,300,315,330,345,360,375,390,405,420,435,450,465,480,495,510,525,540,555,570,585,600,615,630,645,660,675,690,705,720,735,750,765,780]
"""
@doc offset: 0,
sequence: "Multiples of 15.",
references: [{:oeis, :a008597, "https://oeis.org/A008597"}]
def create_sequence_a008597(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Multiples.seq_a008597/1)
end
@doc false
@doc offset: 0
def seq_a008597(idx) do
idx * 15
end
@doc """
OEIS Sequence `A008598` - Multiples of 16.
From [OEIS A008598](https://oeis.org/A008598):
> Multiples of 16.
> (Formerly )
**Sequence IDs**: `:a008598`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Multiples, :a008598) |> Sequence.take!(53)
[0,16,32,48,64,80,96,112,128,144,160,176,192,208,224,240,256,272,288,304,320,336,352,368,384,400,416,432,448,464,480,496,512,528,544,560,576,592,608,624,640,656,672,688,704,720,736,752,768,784,800,816,832]
"""
@doc offset: 0,
sequence: "Multiples of 16.",
references: [{:oeis, :a008598, "https://oeis.org/A008598"}]
def create_sequence_a008598(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Multiples.seq_a008598/1)
end
@doc false
@doc offset: 0
def seq_a008598(idx) do
idx * 16
end
@doc """
OEIS Sequence `A008599` - Multiples of 17.
From [OEIS A008599](https://oeis.org/A008599):
> Multiples of 17.
> (Formerly )
**Sequence IDs**: `:a008599`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Multiples, :a008599) |> Sequence.take!(53)
[0,17,34,51,68,85,102,119,136,153,170,187,204,221,238,255,272,289,306,323,340,357,374,391,408,425,442,459,476,493,510,527,544,561,578,595,612,629,646,663,680,697,714,731,748,765,782,799,816,833,850,867,884]
"""
@doc offset: 0,
sequence: "Multiples of 17.",
references: [{:oeis, :a008599, "https://oeis.org/A008599"}]
def create_sequence_a008599(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Multiples.seq_a008599/1)
end
@doc false
@doc offset: 0
def seq_a008599(idx) do
idx * 17
end
@doc """
OEIS Sequence `A008600` - Multiples of 18.
From [OEIS A008600](https://oeis.org/A008600):
> Multiples of 18.
> (Formerly )
**Sequence IDs**: `:a008600`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Multiples, :a008600) |> Sequence.take!(53)
[0,18,36,54,72,90,108,126,144,162,180,198,216,234,252,270,288,306,324,342,360,378,396,414,432,450,468,486,504,522,540,558,576,594,612,630,648,666,684,702,720,738,756,774,792,810,828,846,864,882,900,918,936]
"""
@doc offset: 0,
sequence: "Multiples of 18.",
references: [{:oeis, :a008600, "https://oeis.org/A008600"}]
def create_sequence_a008600(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Multiples.seq_a008600/1)
end
@doc false
@doc offset: 0
def seq_a008600(idx) do
idx * 18
end
@doc """
OEIS Sequence `A008601` - Multiples of 19.
From [OEIS A008601](https://oeis.org/A008601):
> Multiples of 19.
> (Formerly )
**Sequence IDs**: `:a008601`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Multiples, :a008601) |> Sequence.take!(53)
[0,19,38,57,76,95,114,133,152,171,190,209,228,247,266,285,304,323,342,361,380,399,418,437,456,475,494,513,532,551,570,589,608,627,646,665,684,703,722,741,760,779,798,817,836,855,874,893,912,931,950,969,988]
"""
@doc offset: 0,
sequence: "Multiples of 19.",
references: [{:oeis, :a008601, "https://oeis.org/A008601"}]
def create_sequence_a008601(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Multiples.seq_a008601/1)
end
@doc false
@doc offset: 0
def seq_a008601(idx) do
idx * 19
end
@doc """
OEIS Sequence `A008602` - Multiples of 20.
From [OEIS A008602](https://oeis.org/A008602):
> Multiples of 20.
> (Formerly )
**Sequence IDs**: `:a008602`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Multiples, :a008602) |> Sequence.take!(51)
[0,20,40,60,80,100,120,140,160,180,200,220,240,260,280,300,320,340,360,380,400,420,440,460,480,500,520,540,560,580,600,620,640,660,680,700,720,740,760,780,800,820,840,860,880,900,920,940,960,980,1000]
"""
@doc offset: 0,
sequence: "Multiples of 20.",
references: [{:oeis, :a008602, "https://oeis.org/A008602"}]
def create_sequence_a008602(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Multiples.seq_a008602/1)
end
@doc false
@doc offset: 0
def seq_a008602(idx) do
idx * 20
end
@doc """
OEIS Sequence `A008603` - Multiples of 21.
From [OEIS A008603](https://oeis.org/A008603):
> Multiples of 21.
> (Formerly )
**Sequence IDs**: `:a008603`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Multiples, :a008603) |> Sequence.take!(48)
[0,21,42,63,84,105,126,147,168,189,210,231,252,273,294,315,336,357,378,399,420,441,462,483,504,525,546,567,588,609,630,651,672,693,714,735,756,777,798,819,840,861,882,903,924,945,966,987]
"""
@doc offset: 0,
sequence: "Multiples of 21.",
references: [{:oeis, :a008603, "https://oeis.org/A008603"}]
def create_sequence_a008603(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Multiples.seq_a008603/1)
end
@doc false
@doc offset: 0
def seq_a008603(idx) do
idx * 21
end
@doc """
OEIS Sequence `A008604` - Multiples of 22.
From [OEIS A008604](https://oeis.org/A008604):
> Multiples of 22.
> (Formerly )
**Sequence IDs**: `:a008604`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Multiples, :a008604) |> Sequence.take!(46)
[0,22,44,66,88,110,132,154,176,198,220,242,264,286,308,330,352,374,396,418,440,462,484,506,528,550,572,594,616,638,660,682,704,726,748,770,792,814,836,858,880,902,924,946,968,990]
"""
@doc offset: 0,
sequence: "Multiples of 22.",
references: [{:oeis, :a008604, "https://oeis.org/A008604"}]
def create_sequence_a008604(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Multiples.seq_a008604/1)
end
@doc false
@doc offset: 0
def seq_a008604(idx) do
idx * 22
end
@doc """
OEIS Sequence `A008605` - Multiples of 23.
From [OEIS A008605](https://oeis.org/A008605):
> Multiples of 23.
> (Formerly )
**Sequence IDs**: `:a008605`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Multiples, :a008605) |> Sequence.take!(51)
[0,23,46,69,92,115,138,161,184,207,230,253,276,299,322,345,368,391,414,437,460,483,506,529,552,575,598,621,644,667,690,713,736,759,782,805,828,851,874,897,920,943,966,989,1012,1035,1058,1081,1104,1127,1150]
"""
@doc offset: 0,
sequence: "Multiples of 23.",
references: [{:oeis, :a008605, "https://oeis.org/A008605"}]
def create_sequence_a008605(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Multiples.seq_a008605/1)
end
@doc false
@doc offset: 0
def seq_a008605(idx) do
idx * 23
end
@doc """
OEIS Sequence `A008606` - Multiples of 24.
From [OEIS A008606](https://oeis.org/A008606):
> Multiples of 24.
> (Formerly )
**Sequence IDs**: `:a008606`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Multiples, :a008606) |> Sequence.take!(51)
[0,24,48,72,96,120,144,168,192,216,240,264,288,312,336,360,384,408,432,456,480,504,528,552,576,600,624,648,672,696,720,744,768,792,816,840,864,888,912,936,960,984,1008,1032,1056,1080,1104,1128,1152,1176,1200]
"""
@doc offset: 0,
sequence: "Multiples of 24.",
references: [{:oeis, :a008606, "https://oeis.org/A008606"}]
def create_sequence_a008606(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Multiples.seq_a008606/1)
end
@doc false
@doc offset: 0
def seq_a008606(idx) do
idx * 24
end
@doc """
OEIS Sequence `A008607` - Multiples of 25.
From [OEIS A008607](https://oeis.org/A008607):
> Multiples of 25.
> (Formerly )
**Sequence IDs**: `:a008607`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Multiples, :a008607) |> Sequence.take!(50)
[0,25,50,75,100,125,150,175,200,225,250,275,300,325,350,375,400,425,450,475,500,525,550,575,600,625,650,675,700,725,750,775,800,825,850,875,900,925,950,975,1000,1025,1050,1075,1100,1125,1150,1175,1200,1225]
"""
@doc offset: 0,
sequence: "Multiples of 25.",
references: [{:oeis, :a008607, "https://oeis.org/A008607"}]
def create_sequence_a008607(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Multiples.seq_a008607/1)
end
@doc false
@doc offset: 0
def seq_a008607(idx) do
idx * 25
end
@doc """
OEIS Sequence `A169823` - Multiples of 60.
From [OEIS A169823](https://oeis.org/A169823):
> Multiples of 60.
> (Formerly )
**Sequence IDs**: `:a169823`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Multiples, :a169823) |> Sequence.take!(46)
[0,60,120,180,240,300,360,420,480,540,600,660,720,780,840,900,960,1020,1080,1140,1200,1260,1320,1380,1440,1500,1560,1620,1680,1740,1800,1860,1920,1980,2040,2100,2160,2220,2280,2340,2400,2460,2520,2580,2640,2700]
"""
@doc offset: 0,
sequence: "Multiples of 60.",
references: [{:oeis, :a169823, "https://oeis.org/A169823"}]
def create_sequence_a169823(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Multiples.seq_a169823/1)
end
@doc false
@doc offset: 0
def seq_a169823(idx) do
idx * 60
end
@doc """
OEIS Sequence `A169825` - Multiples of 420.
From [OEIS A169825](https://oeis.org/A169825):
> Multiples of 420.
> (Formerly )
**Sequence IDs**: `:a169825`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Multiples, :a169825) |> Sequence.take!(41)
[0,420,840,1260,1680,2100,2520,2940,3360,3780,4200,4620,5040,5460,5880,6300,6720,7140,7560,7980,8400,8820,9240,9660,10080,10500,10920,11340,11760,12180,12600,13020,13440,13860,14280,14700,15120,15540,15960,16380,16800]
"""
@doc offset: 0,
sequence: "Multiples of 420.",
references: [{:oeis, :a169825, "https://oeis.org/A169825"}]
def create_sequence_a169825(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Multiples.seq_a169825/1)
end
@doc false
@doc offset: 0
def seq_a169825(idx) do
idx * 420
end
@doc """
OEIS Sequence `A169827` - Multiples of 840.
From [OEIS A169827](https://oeis.org/A169827):
> Multiples of 840.
> (Formerly )
**Sequence IDs**: `:a169827`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Multiples, :a169827) |> Sequence.take!(39)
[0,840,1680,2520,3360,4200,5040,5880,6720,7560,8400,9240,10080,10920,11760,12600,13440,14280,15120,15960,16800,17640,18480,19320,20160,21000,21840,22680,23520,24360,25200,26040,26880,27720,28560,29400,30240,31080,31920]
"""
@doc offset: 0,
sequence: "Multiples of 840.",
references: [{:oeis, :a169827, "https://oeis.org/A169827"}]
def create_sequence_a169827(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Multiples.seq_a169827/1)
end
@doc false
@doc offset: 0
def seq_a169827(idx) do
idx * 840
end
@doc """
OEIS Sequence `A252994` - Multiples of 26.
From [OEIS A252994](https://oeis.org/A252994):
> Multiples of 26.
> (Formerly )
**Sequence IDs**: `:a252994`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Multiples, :a252994) |> Sequence.take!(54)
[0,26,52,78,104,130,156,182,208,234,260,286,312,338,364,390,416,442,468,494,520,546,572,598,624,650,676,702,728,754,780,806,832,858,884,910,936,962,988,1014,1040,1066,1092,1118,1144,1170,1196,1222,1248,1274,1300,1326,1352,1378]
"""
@doc offset: 0,
sequence: "Multiples of 26.",
references: [{:oeis, :a252994, "https://oeis.org/A252994"}]
def create_sequence_a252994(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Multiples.seq_a252994/1)
end
@doc false
@doc offset: 0
def seq_a252994(idx) do
idx * 26
end
@doc """
OEIS Sequence `A305548` - a(n) = 27*n.
From [OEIS A305548](https://oeis.org/A305548):
> a(n) = 27*n.
> (Formerly )
**Sequence IDs**: `:a305548`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Multiples, :a305548) |> Sequence.take!(57)
[0,27,54,81,108,135,162,189,216,243,270,297,324,351,378,405,432,459,486,513,540,567,594,621,648,675,702,729,756,783,810,837,864,891,918,945,972,999,1026,1053,1080,1107,1134,1161,1188,1215,1242,1269,1296,1323,1350,1377,1404,1431,1458,1485,1512]
"""
@doc offset: 0,
sequence: "a(n) = 27*n.",
references: [{:oeis, :a305548, "https://oeis.org/A305548"}]
def create_sequence_a305548(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Multiples.seq_a305548/1)
end
@doc false
@doc offset: 0
def seq_a305548(idx) do
idx * 27
end
@doc """
OEIS Sequence `A121023` - Multiples of 3 containing a 3 in their decimal representation.
From [OEIS A121023](https://oeis.org/A121023):
> Multiples of 3 containing a 3 in their decimal representation.
> (Formerly )
**Sequence IDs**: `:a121023`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Multiples, :a121023) |> Sequence.take!(53)
[3,30,33,36,39,63,93,123,132,135,138,153,183,213,231,234,237,243,273,300,303,306,309,312,315,318,321,324,327,330,333,336,339,342,345,348,351,354,357,360,363,366,369,372,375,378,381,384,387,390,393,396,399]
"""
@doc offset: 1,
sequence: "Multiples of 3 containing a 3 in their decimal representation.",
references: [{:oeis, :a121023, "https://oeis.org/A121023"}]
def create_sequence_a121023(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Multiples.seq_a121023/2)
end
@doc false
@doc offset: 1
def seq_a121023(_idx, last) do
Math.next_number(fn v -> Math.contains_number?(v, 3) end, last, 3)
end
@doc """
OEIS Sequence `A121024` - Multiples of 4 containing a 4 in their decimal representation.
From [OEIS A121024](https://oeis.org/A121024):
> Multiples of 4 containing a 4 in their decimal representation.
> (Formerly )
**Sequence IDs**: `:a121024`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Multiples, :a121024) |> Sequence.take!(53)
[4,24,40,44,48,64,84,104,124,140,144,148,164,184,204,224,240,244,248,264,284,304,324,340,344,348,364,384,400,404,408,412,416,420,424,428,432,436,440,444,448,452,456,460,464,468,472,476,480,484,488,492,496]
"""
@doc offset: 1,
sequence: "Multiples of 4 containing a 4 in their decimal representation.",
references: [{:oeis, :a121024, "https://oeis.org/A121024"}]
def create_sequence_a121024(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Multiples.seq_a121024/2)
end
@doc false
@doc offset: 1
def seq_a121024(_idx, last) do
Math.next_number(fn v -> Math.contains_number?(v, 4) end, last, 4)
end
@doc """
OEIS Sequence `A121025` - Multiples of 5 containing a 5 in their decimal representation.
From [OEIS A121025](https://oeis.org/A121025):
> Multiples of 5 containing a 5 in their decimal representation.
> (Formerly )
**Sequence IDs**: `:a121025`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Multiples, :a121025) |> Sequence.take!(54)
[5,15,25,35,45,50,55,65,75,85,95,105,115,125,135,145,150,155,165,175,185,195,205,215,225,235,245,250,255,265,275,285,295,305,315,325,335,345,350,355,365,375,385,395,405,415,425,435,445,450,455,465,475,485]
"""
@doc offset: 1,
sequence: "Multiples of 5 containing a 5 in their decimal representation.",
references: [{:oeis, :a121025, "https://oeis.org/A121025"}]
def create_sequence_a121025(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Multiples.seq_a121025/2)
end
@doc false
@doc offset: 1
def seq_a121025(_idx, last) do
Math.next_number(fn v -> Math.contains_number?(v, 5) end, last, 5)
end
@doc """
OEIS Sequence `A121026` - Multiples of 6 containing a 6 in their decimal representation.
From [OEIS A121026](https://oeis.org/A121026):
> Multiples of 6 containing a 6 in their decimal representation.
> (Formerly )
**Sequence IDs**: `:a121026`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Multiples, :a121026) |> Sequence.take!(53)
[6,36,60,66,96,126,156,162,168,186,216,246,264,276,306,336,360,366,396,426,456,462,468,486,516,546,564,576,600,606,612,618,624,630,636,642,648,654,660,666,672,678,684,690,696,726,756,762,768,786,816,846,864]
"""
@doc offset: 1,
sequence: "Multiples of 6 containing a 6 in their decimal representation.",
references: [{:oeis, :a121026, "https://oeis.org/A121026"}]
def create_sequence_a121026(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Multiples.seq_a121026/2)
end
@doc false
@doc offset: 1
def seq_a121026(_idx, last) do
Math.next_number(fn v -> Math.contains_number?(v, 6) end, last, 6)
end
@doc """
OEIS Sequence `A121027` - Multiples of 7 containing a 7 in their decimal representation.
From [OEIS A121027](https://oeis.org/A121027):
> Multiples of 7 containing a 7 in their decimal representation.
> (Formerly )
**Sequence IDs**: `:a121027`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Multiples, :a121027) |> Sequence.take!(50)
[7,70,77,147,175,217,273,287,357,371,378,427,476,497,567,574,637,672,679,700,707,714,721,728,735,742,749,756,763,770,777,784,791,798,847,875,917,973,987,1057,1071,1078,1127,1176,1197,1267,1274,1337,1372,1379]
"""
@doc offset: 1,
sequence: "Multiples of 7 containing a 7 in their decimal representation.",
references: [{:oeis, :a121027, "https://oeis.org/A121027"}]
def create_sequence_a121027(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Multiples.seq_a121027/2)
end
@doc false
@doc offset: 1
def seq_a121027(_idx, last) do
Math.next_number(fn v -> Math.contains_number?(v, 7) end, last, 7)
end
@doc """
OEIS Sequence `A121028` - Multiples of 8 containing an 8 in their decimal representation.
From [OEIS A121028](https://oeis.org/A121028):
> Multiples of 8 containing an 8 in their decimal representation.
> (Formerly )
**Sequence IDs**: `:a121028`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Multiples, :a121028) |> Sequence.take!(51)
[8,48,80,88,128,168,184,208,248,280,288,328,368,384,408,448,480,488,528,568,584,608,648,680,688,728,768,784,800,808,816,824,832,840,848,856,864,872,880,888,896,928,968,984,1008,1048,1080,1088,1128,1168,1184]
"""
@doc offset: 1,
sequence: "Multiples of 8 containing an 8 in their decimal representation.",
references: [{:oeis, :a121028, "https://oeis.org/A121028"}]
def create_sequence_a121028(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Multiples.seq_a121028/2)
end
@doc false
@doc offset: 1
def seq_a121028(_idx, last) do
Math.next_number(fn v -> Math.contains_number?(v, 8) end, last, 8)
end
@doc """
OEIS Sequence `A121029` - Multiples of 9 containing a 9 in their decimal representation.
From [OEIS A121029](https://oeis.org/A121029):
> Multiples of 9 containing a 9 in their decimal representation.
> (Formerly )
**Sequence IDs**: `:a121029`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Multiples, :a121029) |> Sequence.take!(48)
[9,90,99,189,198,279,297,369,396,459,495,549,594,639,693,729,792,819,891,900,909,918,927,936,945,954,963,972,981,990,999,1089,1098,1179,1197,1269,1296,1359,1395,1449,1494,1539,1593,1629,1692,1719,1791,1809]
"""
@doc offset: 1,
sequence: "Multiples of 9 containing a 9 in their decimal representation.",
references: [{:oeis, :a121029, "https://oeis.org/A121029"}]
def create_sequence_a121029(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Multiples.seq_a121029/2)
end
@doc false
@doc offset: 1
def seq_a121029(_idx, last) do
Math.next_number(fn v -> Math.contains_number?(v, 9) end, last, 9)
end
@doc """
OEIS Sequence `A121030` - Multiples of 10 containing a 10 in their decimal representation.
From [OEIS A121030](https://oeis.org/A121030):
> Multiples of 10 containing a 10 in their decimal representation.
> (Formerly )
**Sequence IDs**: `:a121030`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Multiples, :a121030) |> Sequence.take!(44)
[10,100,110,210,310,410,510,610,710,810,910,1000,1010,1020,1030,1040,1050,1060,1070,1080,1090,1100,1110,1210,1310,1410,1510,1610,1710,1810,1910,2010,2100,2110,2210,2310,2410,2510,2610,2710,2810,2910,3010,3100]
"""
@doc offset: 1,
sequence: "Multiples of 10 containing a 10 in their decimal representation.",
references: [{:oeis, :a121030, "https://oeis.org/A121030"}]
def create_sequence_a121030(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Multiples.seq_a121030/2)
end
@doc false
@doc offset: 1
def seq_a121030(_idx, last) do
Math.next_number(fn v -> Math.contains_number?(v, 10) end, last, 10)
end
@doc """
OEIS Sequence `A121031` - Multiples of 11 containing an 11 in their decimal representation.
From [OEIS A121031](https://oeis.org/A121031):
> Multiples of 11 containing an 11 in their decimal representation.
> (Formerly )
**Sequence IDs**: `:a121031`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Multiples, :a121031) |> Sequence.take!(39)
[11,110,1100,1111,1122,1133,1144,1155,1166,1177,1188,1199,2112,2211,3113,3311,4114,4411,5115,5511,6116,6611,7117,7711,8118,8811,9119,9911,11000,11011,11022,11033,11044,11055,11066,11077,11088,11099,11110]
"""
@doc offset: 1,
sequence: "Multiples of 11 containing an 11 in their decimal representation.",
references: [{:oeis, :a121031, "https://oeis.org/A121031"}]
def create_sequence_a121031(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Multiples.seq_a121031/2)
end
@doc false
@doc offset: 1
def seq_a121031(_idx, last) do
Math.next_number(fn v -> Math.contains_number?(v, 11) end, last, 11)
end
@doc """
OEIS Sequence `A121032` - Multiples of 12 containing a 12 in their decimal representation.
From [OEIS A121032](https://oeis.org/A121032):
> Multiples of 12 containing a 12 in their decimal representation.
> (Formerly )
**Sequence IDs**: `:a121032`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Multiples, :a121032) |> Sequence.take!(43)
[12,120,312,612,912,1128,1200,1212,1224,1236,1248,1260,1272,1284,1296,1512,1812,2112,2124,2412,2712,3012,3120,3312,3612,3912,4128,4212,4512,4812,5112,5124,5412,5712,6012,6120,6312,6612,6912,7128,7212,7512,7812]
"""
@doc offset: 1,
sequence: "Multiples of 12 containing a 12 in their decimal representation.",
references: [{:oeis, :a121032, "https://oeis.org/A121032"}]
def create_sequence_a121032(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Multiples.seq_a121032/2)
end
@doc false
@doc offset: 1
def seq_a121032(_idx, last) do
Math.next_number(fn v -> Math.contains_number?(v, 12) end, last, 12)
end
@doc """
OEIS Sequence `A121033` - Multiples of 13 containing a 13 in their decimal representation.
From [OEIS A121033](https://oeis.org/A121033):
> Multiples of 13 containing a 13 in their decimal representation.
> (Formerly )
**Sequence IDs**: `:a121033`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Multiples, :a121033) |> Sequence.take!(38)
[13,130,1131,1300,1313,1326,1339,1352,1365,1378,1391,2132,2613,3133,3913,4134,5135,5213,6136,6513,7137,7813,8138,9113,9139,10413,11310,11323,11336,11349,11362,11375,11388,11713,13000,13013,13026,13039]
"""
@doc offset: 1,
sequence: "Multiples of 13 containing a 13 in their decimal representation.",
references: [{:oeis, :a121033, "https://oeis.org/A121033"}]
def create_sequence_a121033(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Multiples.seq_a121033/2)
end
@doc false
@doc offset: 1
def seq_a121033(_idx, last) do
Math.next_number(fn v -> Math.contains_number?(v, 13) end, last, 13)
end
@doc """
OEIS Sequence `A121034` - Multiples of 14 containing a 14 in their decimal representation.
From [OEIS A121034](https://oeis.org/A121034):
> Multiples of 14 containing a 14 in their decimal representation.
> (Formerly )
**Sequence IDs**: `:a121034`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Multiples, :a121034) |> Sequence.take!(40)
[14,140,714,1148,1400,1414,1428,1442,1456,1470,1484,1498,2114,2142,2814,3514,4144,4214,4914,5614,6146,6314,7014,7140,7714,8148,8414,9114,9142,9814,10514,11144,11214,11410,11424,11438,11452,11466,11480,11494]
"""
@doc offset: 1,
sequence: "Multiples of 14 containing a 14 in their decimal representation.",
references: [{:oeis, :a121034, "https://oeis.org/A121034"}]
def create_sequence_a121034(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Multiples.seq_a121034/2)
end
@doc false
@doc offset: 1
def seq_a121034(_idx, last) do
Math.next_number(fn v -> Math.contains_number?(v, 14) end, last, 14)
end
@doc """
OEIS Sequence `A121035` - Multiples of 15 containing a 15 in their decimal representation.
From [OEIS A121035](https://oeis.org/A121035):
> Multiples of 15 containing a 15 in their decimal representation.
> (Formerly )
**Sequence IDs**: `:a121035`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Multiples, :a121035) |> Sequence.take!(43)
[15,150,315,615,915,1155,1215,1500,1515,1530,1545,1560,1575,1590,1815,2115,2415,2715,3015,3150,3315,3615,3915,4155,4215,4515,4815,5115,5415,5715,6015,6150,6315,6615,6915,7155,7215,7515,7815,8115,8415,8715,9015]
"""
@doc offset: 1,
sequence: "Multiples of 15 containing a 15 in their decimal representation.",
references: [{:oeis, :a121035, "https://oeis.org/A121035"}]
def create_sequence_a121035(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Multiples.seq_a121035/2)
end
@doc false
@doc offset: 1
def seq_a121035(_idx, last) do
Math.next_number(fn v -> Math.contains_number?(v, 15) end, last, 15)
end
@doc """
OEIS Sequence `A121036` - Multiples of 16 containing a 16 in their decimal representation.
From [OEIS A121036](https://oeis.org/A121036):
> Multiples of 16 containing a 16 in their decimal representation.
> (Formerly )
**Sequence IDs**: `:a121036`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Multiples, :a121036) |> Sequence.take!(42)
[16,160,416,816,1168,1216,1600,1616,1632,1648,1664,1680,1696,2016,2160,2416,2816,3168,3216,3616,4016,4160,4416,4816,5168,5216,5616,6016,6160,6416,6816,7168,7216,7616,8016,8160,8416,8816,9168,9216,9616,10016]
"""
@doc offset: 1,
sequence: "Multiples of 16 containing a 16 in their decimal representation.",
references: [{:oeis, :a121036, "https://oeis.org/A121036"}]
def create_sequence_a121036(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Multiples.seq_a121036/2)
end
@doc false
@doc offset: 1
def seq_a121036(_idx, last) do
Math.next_number(fn v -> Math.contains_number?(v, 16) end, last, 16)
end
@doc """
OEIS Sequence `A121037` - Multiples of 17 containing a 17 in their decimal representation.
From [OEIS A121037](https://oeis.org/A121037):
> Multiples of 17 containing a 17 in their decimal representation.
> (Formerly )
**Sequence IDs**: `:a121037`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Multiples, :a121037) |> Sequence.take!(37)
[17,170,1173,1700,1717,1734,1751,1768,1785,2176,3179,3417,5117,6171,6817,7174,8177,8517,10217,11713,11730,11747,11764,11781,11798,11917,12172,13175,13617,14178,15317,17000,17017,17034,17051,17068,17085]
"""
@doc offset: 1,
sequence: "Multiples of 17 containing a 17 in their decimal representation.",
references: [{:oeis, :a121037, "https://oeis.org/A121037"}]
def create_sequence_a121037(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Multiples.seq_a121037/2)
end
@doc false
@doc offset: 1
def seq_a121037(_idx, last) do
Math.next_number(fn v -> Math.contains_number?(v, 17) end, last, 17)
end
@doc """
OEIS Sequence `A121038` - Multiples of 18 containing a 18 in their decimal representation.
From [OEIS A121038](https://oeis.org/A121038):
> Multiples of 18 containing a 18 in their decimal representation.
> (Formerly )
**Sequence IDs**: `:a121038`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Multiples, :a121038) |> Sequence.take!(39)
[18,180,918,1188,1800,1818,1836,1854,1872,1890,2718,3186,3618,4518,5184,5418,6318,7182,7218,8118,9018,9180,9918,10188,10818,11718,11808,11826,11844,11862,11880,11898,12186,12618,13518,14184,14418,15318,16182]
"""
@doc offset: 1,
sequence: "Multiples of 18 containing a 18 in their decimal representation.",
references: [{:oeis, :a121038, "https://oeis.org/A121038"}]
def create_sequence_a121038(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Multiples.seq_a121038/2)
end
@doc false
@doc offset: 1
def seq_a121038(_idx, last) do
Math.next_number(fn v -> Math.contains_number?(v, 18) end, last, 18)
end
@doc """
OEIS Sequence `A121039` - Multiples of 19 containing a 19 in their decimal representation.
From [OEIS A121039](https://oeis.org/A121039):
> Multiples of 19 containing a 19 in their decimal representation.
> (Formerly )
**Sequence IDs**: `:a121039`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Multiples, :a121039) |> Sequence.take!(37)
[19,190,1197,1900,1919,1938,1957,1976,1995,3192,3819,4199,5719,6194,7619,9196,9519,11191,11419,11913,11932,11951,11970,11989,12198,13319,14193,15219,17119,17195,19000,19019,19038,19057,19076,19095,19114]
"""
@doc offset: 1,
sequence: "Multiples of 19 containing a 19 in their decimal representation.",
references: [{:oeis, :a121039, "https://oeis.org/A121039"}]
def create_sequence_a121039(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Multiples.seq_a121039/2)
end
@doc false
@doc offset: 1
def seq_a121039(_idx, last) do
Math.next_number(fn v -> Math.contains_number?(v, 19) end, last, 19)
end
@doc """
OEIS Sequence `A121040` - Multiples of 20 containing a 20 in their decimal representation.
From [OEIS A121040](https://oeis.org/A121040):
> Multiples of 20 containing a 20 in their decimal representation.
> (Formerly )
**Sequence IDs**: `:a121040`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Multiples, :a121040) |> Sequence.take!(44)
[20,120,200,220,320,420,520,620,720,820,920,1020,1120,1200,1220,1320,1420,1520,1620,1720,1820,1920,2000,2020,2040,2060,2080,2120,2200,2220,2320,2420,2520,2620,2720,2820,2920,3020,3120,3200,3220,3320,3420,3520]
"""
@doc offset: 1,
sequence: "Multiples of 20 containing a 20 in their decimal representation.",
references: [{:oeis, :a121040, "https://oeis.org/A121040"}]
def create_sequence_a121040(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Multiples.seq_a121040/2)
end
@doc false
@doc offset: 1
def seq_a121040(_idx, last) do
Math.next_number(fn v -> Math.contains_number?(v, 20) end, last, 20)
end
@doc """
OEIS Sequence `A135628` - Multiples of 28.
From [OEIS A135628](https://oeis.org/A135628):
> Multiples of 28.
> (Formerly )
**Sequence IDs**: `:a135628`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Multiples, :a135628) |> Sequence.take!(50)
[0,28,56,84,112,140,168,196,224,252,280,308,336,364,392,420,448,476,504,532,560,588,616,644,672,700,728,756,784,812,840,868,896,924,952,980,1008,1036,1064,1092,1120,1148,1176,1204,1232,1260,1288,1316,1344,1372]
"""
@doc offset: 0,
sequence: "Multiples of 28.",
references: [{:oeis, :a135628, "https://oeis.org/A135628"}]
def create_sequence_a135628(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Multiples.seq_a135628/1)
end
@doc false
@doc offset: 0
def seq_a135628(idx) do
idx * 28
end
@doc """
OEIS Sequence `A195819` - Multiples of 29.
From [OEIS A195819](https://oeis.org/A195819):
> Multiples of 29.
> (Formerly )
**Sequence IDs**: `:a195819`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Multiples, :a195819) |> Sequence.take!(47)
[0,29,58,87,116,145,174,203,232,261,290,319,348,377,406,435,464,493,522,551,580,609,638,667,696,725,754,783,812,841,870,899,928,957,986,1015,1044,1073,1102,1131,1160,1189,1218,1247,1276,1305,1334]
"""
@doc offset: 0,
sequence: "Multiples of 29.",
references: [{:oeis, :a195819, "https://oeis.org/A195819"}]
def create_sequence_a195819(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Multiples.seq_a195819/1)
end
@doc false
@doc offset: 0
def seq_a195819(idx) do
idx * 29
end
@doc """
OEIS Sequence `A249674` - a(n) = 30*n.
From [OEIS A249674](https://oeis.org/A249674):
> a(n) = 30*n.
> (Formerly )
**Sequence IDs**: `:a249674`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Multiples, :a249674) |> Sequence.take!(49)
[0,30,60,90,120,150,180,210,240,270,300,330,360,390,420,450,480,510,540,570,600,630,660,690,720,750,780,810,840,870,900,930,960,990,1020,1050,1080,1110,1140,1170,1200,1230,1260,1290,1320,1350,1380,1410,1440]
"""
@doc offset: 0,
sequence: "a(n) = 30*n.",
references: [{:oeis, :a249674, "https://oeis.org/A249674"}]
def create_sequence_a249674(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Multiples.seq_a249674/1)
end
@doc false
@doc offset: 0
def seq_a249674(idx) do
idx * 30
end
@doc """
OEIS Sequence `A135631` - Multiples of 31.
From [OEIS A135631](https://oeis.org/A135631):
> Multiples of 31.
> (Formerly )
**Sequence IDs**: `:a135631`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Multiples, :a135631) |> Sequence.take!(49)
[0,31,62,93,124,155,186,217,248,279,310,341,372,403,434,465,496,527,558,589,620,651,682,713,744,775,806,837,868,899,930,961,992,1023,1054,1085,1116,1147,1178,1209,1240,1271,1302,1333,1364,1395,1426,1457,1488]
"""
@doc offset: 0,
sequence: "Multiples of 31.",
references: [{:oeis, :a135631, "https://oeis.org/A135631"}]
def create_sequence_a135631(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Multiples.seq_a135631/1)
end
@doc false
@doc offset: 0
def seq_a135631(idx) do
idx * 31
end
@doc """
OEIS Sequence `A174312` - 32*n.
From [OEIS A174312](https://oeis.org/A174312):
> 32*n.
> (Formerly )
**Sequence IDs**: `:a174312`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Multiples, :a174312) |> Sequence.take!(51)
[0,32,64,96,128,160,192,224,256,288,320,352,384,416,448,480,512,544,576,608,640,672,704,736,768,800,832,864,896,928,960,992,1024,1056,1088,1120,1152,1184,1216,1248,1280,1312,1344,1376,1408,1440,1472,1504,1536,1568,1600]
"""
@doc offset: 0,
sequence: "32*n.",
references: [{:oeis, :a174312, "https://oeis.org/A174312"}]
def create_sequence_a174312(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Multiples.seq_a174312/1)
end
@doc false
@doc offset: 0
def seq_a174312(idx) do
idx * 32
end
@doc """
OEIS Sequence `A044102` - Multiples of 36.
From [OEIS A044102](https://oeis.org/A044102):
> Multiples of 36.
> (Formerly )
**Sequence IDs**: `:a044102`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Multiples, :a044102) |> Sequence.take!(48)
[0,36,72,108,144,180,216,252,288,324,360,396,432,468,504,540,576,612,648,684,720,756,792,828,864,900,936,972,1008,1044,1080,1116,1152,1188,1224,1260,1296,1332,1368,1404,1440,1476,1512,1548,1584,1620,1656,1692]
"""
@doc offset: 0,
sequence: "Multiples of 36.",
references: [{:oeis, :a044102, "https://oeis.org/A044102"}]
def create_sequence_a044102(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Multiples.seq_a044102/1)
end
@doc false
@doc offset: 0
def seq_a044102(idx) do
idx * 36
end
@doc """
OEIS Sequence `A062768` - Multiples of 6 such that the sum of the digits is equal to 6.
From [OEIS A062768](https://oeis.org/A062768):
> Multiples of 6 such that the sum of the digits is equal to 6.
> (Formerly )
**Sequence IDs**: `:a062768`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Multiples, :a062768) |> Sequence.take!(46)
[6,24,42,60,114,132,150,204,222,240,312,330,402,420,510,600,1014,1032,1050,1104,1122,1140,1212,1230,1302,1320,1410,1500,2004,2022,2040,2112,2130,2202,2220,2310,2400,3012,3030,3102,3120,3210,3300,4002,4020,4110]
"""
@doc offset: 1,
sequence: "Multiples of 6 such that the sum of the digits is equal to 6.",
references: [{:oeis, :a062768, "https://oeis.org/A062768"}]
def create_sequence_a062768(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Multiples.seq_a062768/2)
end
@doc false
@doc offset: 1
def seq_a062768(_idx, last) do
Math.next_number(fn v -> Math.digit_sum(v) == 6 end, last, 6)
end
@doc """
OEIS Sequence `A063416` - Multiples of 7 whose sum of digits is equal to 7.
From [OEIS A063416](https://oeis.org/A063416):
> Multiples of 7 whose sum of digits is equal to 7.
> (Formerly )
**Sequence IDs**: `:a063416`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Multiples, :a063416) |> Sequence.take!(38)
[7,70,133,322,511,700,1015,1141,1204,1330,2023,2212,2401,3031,3220,4102,5110,7000,10024,10150,10213,10402,11032,11221,11410,12040,12103,13111,13300,15001,20041,20104,20230,21112,21301,22120,23002,24010]
"""
@doc offset: 1,
sequence: "Multiples of 7 whose sum of digits is equal to 7.",
references: [{:oeis, :a063416, "https://oeis.org/A063416"}]
def create_sequence_a063416(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Multiples.seq_a063416/2)
end
@doc false
@doc offset: 1
def seq_a063416(_idx, last) do
Math.next_number(fn v -> Math.digit_sum(v) == 7 end, last, 7)
end
@doc """
OEIS Sequence `A063997` - Multiples of 4 whose digits add to 4.
From [OEIS A063997](https://oeis.org/A063997):
> Multiples of 4 whose digits add to 4.
> (Formerly )
**Sequence IDs**: `:a063997`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Multiples, :a063997) |> Sequence.take!(36)
[4,40,112,220,400,1012,1120,1300,2020,2200,3100,4000,10012,10120,10300,11020,11200,12100,13000,20020,20200,21100,22000,30100,31000,40000,100012,100120,100300,101020,101200,102100,103000,110020,110200,111100]
"""
@doc offset: 1,
sequence: "Multiples of 4 whose digits add to 4.",
references: [{:oeis, :a063997, "https://oeis.org/A063997"}]
def create_sequence_a063997(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Multiples.seq_a063997/2)
end
@doc false
@doc offset: 1
def seq_a063997(_idx, last) do
Math.next_number(fn v -> Math.digit_sum(v) == 4 end, last, 4)
end
@doc """
OEIS Sequence `A069537` - Multiples of 2 with digit sum = 2.
From [OEIS A069537](https://oeis.org/A069537):
> Multiples of 2 with digit sum = 2.
> (Formerly )
**Sequence IDs**: `:a069537`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Multiples, :a069537) |> Sequence.take!(29)
[2,20,110,200,1010,1100,2000,10010,10100,11000,20000,100010,100100,101000,110000,200000,1000010,1000100,1001000,1010000,1100000,2000000,10000010,10000100,10001000,10010000,10100000,11000000,20000000]
"""
@doc offset: 1,
sequence: "Multiples of 2 with digit sum = 2.",
references: [{:oeis, :a069537, "https://oeis.org/A069537"}]
def create_sequence_a069537(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Multiples.seq_a069537/2)
end
@doc false
@doc offset: 1
def seq_a069537(_idx, last) do
Math.next_number(fn v -> Math.digit_sum(v) == 2 end, last, 2)
end
@doc """
OEIS Sequence `A069540` - Multiples of 5 with digit sum 5.
From [OEIS A069540](https://oeis.org/A069540):
> Multiples of 5 with digit sum 5.
> (Formerly )
**Sequence IDs**: `:a069540`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Multiples, :a069540) |> Sequence.take!(39)
[5,50,140,230,320,410,500,1040,1130,1220,1310,1400,2030,2120,2210,2300,3020,3110,3200,4010,4100,5000,10040,10130,10220,10310,10400,11030,11120,11210,11300,12020,12110,12200,13010,13100,14000,20030,20120]
"""
@doc offset: 1,
sequence: "Multiples of 5 with digit sum 5.",
references: [{:oeis, :a069540, "https://oeis.org/A069540"}]
def create_sequence_a069540(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Multiples.seq_a069540/2)
end
@doc false
@doc offset: 1
def seq_a069540(_idx, last) do
Math.next_number(fn v -> Math.digit_sum(v) == 5 end, last, 5)
end
@doc """
OEIS Sequence `A069543` - Multiples of 8 with digit sum 8.
From [OEIS A069543](https://oeis.org/A069543):
> Multiples of 8 with digit sum 8.
> (Formerly )
**Sequence IDs**: `:a069543`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Multiples, :a069543) |> Sequence.take!(40)
[8,80,152,224,440,512,800,1016,1160,1232,1304,1520,2024,2240,2312,2600,3032,3104,3320,4040,4112,4400,5120,6200,8000,10016,10160,10232,10304,10520,11024,11240,11312,11600,12032,12104,12320,13040,13112,13400]
"""
@doc offset: 1,
sequence: "Multiples of 8 with digit sum 8.",
references: [{:oeis, :a069543, "https://oeis.org/A069543"}]
def create_sequence_a069543(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Multiples.seq_a069543/2)
end
@doc false
@doc offset: 1
def seq_a069543(_idx, last) do
Math.next_number(fn v -> Math.digit_sum(v) == 8 end, last, 8)
end
@doc """
OEIS Sequence `A085959` - Multiples of 37.
From [OEIS A085959](https://oeis.org/A085959):
> Multiples of 37.
> (Formerly )
**Sequence IDs**: `:a085959`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Multiples, :a085959) |> Sequence.take!(48)
[0,37,74,111,148,185,222,259,296,333,370,407,444,481,518,555,592,629,666,703,740,777,814,851,888,925,962,999,1036,1073,1110,1147,1184,1221,1258,1295,1332,1369,1406,1443,1480,1517,1554,1591,1628,1665,1702,1739]
"""
@doc offset: 0,
sequence: "Multiples of 37.",
references: [{:oeis, :a085959, "https://oeis.org/A085959"}]
def create_sequence_a085959(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Multiples.seq_a085959/1)
end
@doc false
@doc offset: 0
def seq_a085959(idx) do
idx * 37
end
@doc """
OEIS Sequence `A152691` - Multiples of 64.
From [OEIS A152691](https://oeis.org/A152691):
> Multiples of 64.
> (Formerly )
**Sequence IDs**: `:a152691`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Multiples, :a152691) |> Sequence.take!(46)
[0,64,128,192,256,320,384,448,512,576,640,704,768,832,896,960,1024,1088,1152,1216,1280,1344,1408,1472,1536,1600,1664,1728,1792,1856,1920,1984,2048,2112,2176,2240,2304,2368,2432,2496,2560,2624,2688,2752,2816,2880]
"""
@doc offset: 0,
sequence: "Multiples of 64.",
references: [{:oeis, :a152691, "https://oeis.org/A152691"}]
def create_sequence_a152691(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Multiples.seq_a152691/1)
end
@doc false
@doc offset: 0
def seq_a152691(idx) do
idx * 64
end
@doc """
OEIS Sequence `A016825` - Positive integers congruent to 2 mod 4: a(n) = 4*n+2, for n >= 0.
From [OEIS A016825](https://oeis.org/A016825):
> Positive integers congruent to 2 mod 4: a(n) = 4*n+2, for n >= 0.
> (Formerly )
**Sequence IDs**: `:a016825`
**Finite**: False
**Offset**: 0
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Multiples, :a016825) |> Sequence.take!(59)
[2,6,10,14,18,22,26,30,34,38,42,46,50,54,58,62,66,70,74,78,82,86,90,94,98,102,106,110,114,118,122,126,130,134,138,142,146,150,154,158,162,166,170,174,178,182,186,190,194,198,202,206,210,214,218,222,226,230,234]
"""
@doc offset: 0,
sequence: "Positive integers congruent to 2 mod 4: a(n) = 4*n+2, for n >= 0.",
references: [{:oeis, :a016825, "https://oeis.org/A016825"}]
def create_sequence_a016825(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Multiples.seq_a016825/2)
end
@doc false
@doc offset: 0
def seq_a016825(_idx, last) do
Math.next_number(&Predicates.is_singly_even_number?/1, last + 1)
end
@doc """
OEIS Sequence `A007770` - Happy numbers: numbers whose trajectory under iteration of sum of squares of digits map (see A003132) includes 1.
From [OEIS A007770](https://oeis.org/A007770):
> Happy numbers: numbers whose trajectory under iteration of sum of squares of digits map (see A003132) includes 1.
> (Formerly )
**Sequence IDs**: `:a007770`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Multiples, :a007770) |> Sequence.take!(54)
[1,7,10,13,19,23,28,31,32,44,49,68,70,79,82,86,91,94,97,100,103,109,129,130,133,139,167,176,188,190,192,193,203,208,219,226,230,236,239,262,263,280,291,293,301,302,310,313,319,320,326,329,331,338]
"""
@doc offset: 1,
sequence:
"Happy numbers: numbers whose trajectory under iteration of sum of squares of digits map (see A003132) includes 1.",
references: [{:oeis, :a007770, "https://oeis.org/A007770"}]
def create_sequence_a007770(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Multiples.seq_a007770/2)
end
@doc false
@doc offset: 1
def seq_a007770(_idx, last) do
Math.next_number(&Predicates.is_happy_number?/1, last)
end
@doc """
OEIS Sequence `A031177` - Unhappy numbers: numbers having period-8 2-digitized sequences.
From [OEIS A031177](https://oeis.org/A031177):
> Unhappy numbers: numbers having period-8 2-digitized sequences.
> (Formerly )
**Sequence IDs**: `:a031177`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Multiples, :a031177) |> Sequence.take!(68)
[2,3,4,5,6,8,9,11,12,14,15,16,17,18,20,21,22,24,25,26,27,29,30,33,34,35,36,37,38,39,40,41,42,43,45,46,47,48,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64,65,66,67,69,71,72,73,74,75,76,77,78,80,81,83]
"""
@doc offset: 1,
sequence: "Unhappy numbers: numbers having period-8 2-digitized sequences.",
references: [{:oeis, :a031177, "https://oeis.org/A031177"}]
def create_sequence_a031177(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Multiples.seq_a031177/2)
end
@doc false
@doc offset: 1
def seq_a031177(_idx, last) do
Math.next_number(&Predicates.is_unhappy_number?/1, last)
end
@doc """
OEIS Sequence `A138591` - Sums of two or more consecutive nonnegative integers.
From [OEIS A138591](https://oeis.org/A138591):
> Sums of two or more consecutive nonnegative integers.
> (Formerly )
**Sequence IDs**: `:a138591`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Multiples, :a138591) |> Sequence.take!(71)
[1,3,5,6,7,9,10,11,12,13,14,15,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,65,66,67,68,69,70,71,72,73,74,75,76,77]
"""
@doc offset: 1,
sequence: "Sums of two or more consecutive nonnegative integers.",
references: [{:oeis, :a138591, "https://oeis.org/A138591"}]
def create_sequence_a138591(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Multiples.seq_a138591/2)
end
@doc false
@doc offset: 1, fill_value: 0
def seq_a138591(idx, last) do
case idx do
1 -> 1
_ -> Math.next_number(&Predicates.is_polite_number?/1, last)
end
end
@doc """
OEIS Sequence `A006753` - Smith (or joke) numbers: composite numbers n such that sum of digits of n = sum of digits of prime factors of n (counted with multiplicity).
From [OEIS A006753](https://oeis.org/A006753):
> Smith (or joke) numbers: composite numbers n such that sum of digits of n = sum of digits of prime factors of n (counted with multiplicity).
> (Formerly M3582)
**Sequence IDs**: `:a006753`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Multiples, :a006753) |> Sequence.take!(52)
[4,22,27,58,85,94,121,166,202,265,274,319,346,355,378,382,391,438,454,483,517,526,535,562,576,588,627,634,636,645,648,654,663,666,690,706,728,729,762,778,825,852,861,895,913,915,922,958,985,1086,1111,1165]
"""
@doc offset: 1,
sequence:
"Smith (or joke) numbers: composite numbers n such that sum of digits of n = sum of digits of prime factors of n (counted with multiplicity).",
references: [{:oeis, :a006753, "https://oeis.org/A006753"}]
def create_sequence_a006753(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Multiples.seq_a006753/2)
end
@doc false
@doc offset: 1
def seq_a006753(_idx, last) do
Math.next_number(&Predicates.is_smith_number?/1, last)
end
@doc """
OEIS Sequence `A019506` - Hoax numbers: composite numbers whose digit-sum equals the sum of the digit-sums of its distinct prime factors.
From [OEIS A019506](https://oeis.org/A019506):
> Hoax numbers: composite numbers whose digit-sum equals the sum of the digit-sums of its distinct prime factors.
> (Formerly )
**Sequence IDs**: `:a019506`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Multiples, :a019506) |> Sequence.take!(50)
[22,58,84,85,94,136,160,166,202,234,250,265,274,308,319,336,346,355,361,364,382,391,424,438,454,456,476,483,516,517,526,535,562,627,634,644,645,650,654,660,663,690,702,706,732,735,762,778,855,860]
"""
@doc offset: 1,
sequence:
"Hoax numbers: composite numbers whose digit-sum equals the sum of the digit-sums of its distinct prime factors.",
references: [{:oeis, :a019506, "https://oeis.org/A019506"}]
def create_sequence_a019506(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Multiples.seq_a019506/2)
end
@doc false
@doc offset: 1
def seq_a019506(_idx, last) do
Math.next_number(&Predicates.is_hoax_number?/1, last)
end
@doc """
OEIS Sequence `A004144` - Nonhypotenuse numbers (indices of positive squares that are not the sums of 2 distinct nonzero squares).
From [OEIS A004144](https://oeis.org/A004144):
> Nonhypotenuse numbers (indices of positive squares that are not the sums of 2 distinct nonzero squares).
> (Formerly M0542)
**Sequence IDs**: `:a004144`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Multiples, :a004144) |> Sequence.take!(68)
[1,2,3,4,6,7,8,9,11,12,14,16,18,19,21,22,23,24,27,28,31,32,33,36,38,42,43,44,46,47,48,49,54,56,57,59,62,63,64,66,67,69,71,72,76,77,79,81,83,84,86,88,92,93,94,96,98,99,103,107,108,112,114,118,121,124,126,127]
"""
@doc offset: 1,
sequence:
"Nonhypotenuse numbers (indices of positive squares that are not the sums of 2 distinct nonzero squares).",
references: [{:oeis, :a004144, "https://oeis.org/A004144"}]
def create_sequence_a004144(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Multiples.seq_a004144/2)
end
@doc false
@doc offset: 1, fill_value: 0
def seq_a004144(_idx, last) do
Math.next_number(&Predicates.is_nonhypotenuse_number?/1, last)
end
@doc """
OEIS Sequence `A009003` - Hypotenuse numbers (squares are sums of 2 nonzero squares).
From [OEIS A009003](https://oeis.org/A009003):
> Hypotenuse numbers (squares are sums of 2 nonzero squares).
> (Formerly )
**Sequence IDs**: `:a009003`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Multiples, :a009003) |> Sequence.take!(64)
[5,10,13,15,17,20,25,26,29,30,34,35,37,39,40,41,45,50,51,52,53,55,58,60,61,65,68,70,73,74,75,78,80,82,85,87,89,90,91,95,97,100,101,102,104,105,106,109,110,111,113,115,116,117,119,120,122,123,125,130,135,136,137,140]
"""
@doc offset: 1,
sequence: "Hypotenuse numbers (squares are sums of 2 nonzero squares).",
references: [{:oeis, :a009003, "https://oeis.org/A009003"}]
def create_sequence_a009003(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Multiples.seq_a009003/2)
end
@doc false
@doc offset: 1
def seq_a009003(_idx, last) do
Math.next_number(&Predicates.is_hypotenuse_number?/1, last)
end
@doc """
OEIS Sequence `A005153` - Practical numbers: positive integers m such that every k <= sigma(m) is a sum of distinct divisors of m. Also called panarithmic numbers.
From [OEIS A005153](https://oeis.org/A005153):
> Practical numbers: positive integers m such that every k <= sigma(m) is a sum of distinct divisors of m. Also called panarithmic numbers.
> (Formerly M0991)
**Sequence IDs**: `:a005153`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Multiples, :a005153) |> Sequence.take!(60)
[1,2,4,6,8,12,16,18,20,24,28,30,32,36,40,42,48,54,56,60,64,66,72,78,80,84,88,90,96,100,104,108,112,120,126,128,132,140,144,150,156,160,162,168,176,180,192,196,198,200,204,208,210,216,220,224,228,234,240,252]
"""
@doc offset: 1,
sequence:
"Practical numbers: positive integers m such that every k <= sigma(m) is a sum of distinct divisors of m. Also called panarithmic numbers.",
references: [{:oeis, :a005153, "https://oeis.org/A005153"}]
def create_sequence_a005153(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Multiples.seq_a005153/2)
end
@doc false
@doc offset: 1
def seq_a005153(_idx, last) do
Math.next_number(&Predicates.is_practical_number?/1, last)
end
@doc """
OEIS Sequence `A054377` - Primary pseudoperfect numbers: numbers n > 1 such that 1/n + sum 1/p = 1, where the sum is over the primes p | n.
From [OEIS A054377](https://oeis.org/A054377):
> Primary pseudoperfect numbers: numbers n > 1 such that 1/n + sum 1/p = 1, where the sum is over the primes p | n.
> (Formerly )
**Sequence IDs**: `:a054377`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Multiples, :a054377) |> Sequence.take!(8)
[2,6,42,1806,47058,2214502422,52495396602,8490421583559688410706771261086]
"""
@doc offset: 1,
sequence:
"Primary pseudoperfect numbers: numbers n > 1 such that 1/n + sum 1/p = 1, where the sum is over the primes p | n.",
references: [{:oeis, :a054377, "https://oeis.org/A054377"}]
def create_sequence_a054377(_opts) do
sequence_for_list(@data_a054377)
end
@doc """
OEIS Sequence `A005835` - Pseudoperfect (or semiperfect) numbers n: some subset of the proper divisors of n sums to n.
From [OEIS A005835](https://oeis.org/A005835):
> Pseudoperfect (or semiperfect) numbers n: some subset of the proper divisors of n sums to n.
> (Formerly M4094)
**Sequence IDs**: `:a005835`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Multiples, :a005835) |> Sequence.take!(62)
[6,12,18,20,24,28,30,36,40,42,48,54,56,60,66,72,78,80,84,88,90,96,100,102,104,108,112,114,120,126,132,138,140,144,150,156,160,162,168,174,176,180,186,192,196,198,200,204,208,210,216,220,222,224,228,234,240,246,252,258,260,264]
"""
@doc offset: 1,
sequence:
"Pseudoperfect (or semiperfect) numbers n: some subset of the proper divisors of n sums to n.",
references: [{:oeis, :a005835, "https://oeis.org/A005835"}]
def create_sequence_a005835(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Multiples.seq_a005835/2)
end
@doc false
@doc offset: 1
def seq_a005835(_idx, last) do
Math.next_number(&Predicates.is_pseudoperfect_number?/1, last)
end
@doc """
OEIS Sequence `A194472` - Erdős-Nicolas numbers.
From [OEIS A194472](https://oeis.org/A194472):
> Erdős-Nicolas numbers.
> (Formerly )
**Sequence IDs**: `:a194472`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Multiples, :a194472) |> Sequence.take!(5)
[24,2016,8190,42336,45864]
"""
@doc offset: 1,
sequence: "Erdős-Nicolas numbers.",
references: [{:oeis, :a194472, "https://oeis.org/A194472"}]
def create_sequence_a194472(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Multiples.seq_a194472/2)
end
@doc false
@doc offset: 1
def seq_a194472(_idx, last) do
Math.next_number(&Predicates.is_erdos_nicolas_number?/1, last)
end
@doc """
OEIS Sequence `A006037` - Weird numbers: abundant (A005101) but not pseudoperfect (A005835).
From [OEIS A006037](https://oeis.org/A006037):
> Weird numbers: abundant (A005101) but not pseudoperfect (A005835).
> (Formerly M5339)
**Sequence IDs**: `:a006037`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Multiples, :a006037) |> Sequence.take!(5)
[70,836,4030,5830,7192]
"""
@doc offset: 1,
sequence: "Weird numbers: abundant (A005101) but not pseudoperfect (A005835).",
references: [{:oeis, :a006037, "https://oeis.org/A006037"}]
def create_sequence_a006037(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Multiples.seq_a006037/2)
end
@doc false
@doc offset: 1
def seq_a006037(_idx, last) do
Math.next_number(&Predicates.is_weird_number?/1, last)
end
@doc """
OEIS Sequence `A006036` - Primitive pseudoperfect numbers.
From [OEIS A006036](https://oeis.org/A006036):
> Primitive pseudoperfect numbers.
> (Formerly M4133)
**Sequence IDs**: `:a006036`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Multiples, :a006036) |> Sequence.take!(69)
[6,20,28,88,104,272,304,350,368,464,490,496,550,572,650,748,770,910,945,1184,1190,1312,1330,1376,1430,1504,1575,1610,1696,1870,1888,1952,2002,2030,2090,2170,2205,2210,2470,2530,2584,2590,2870,2990,3010,3128,3190,3230,3290,3410,3465,3496,3710,3770,3944,4070,4095,4130,4216,4270,4288,4408,4510,4544,4672,4690,4712,4730,4970]
"""
@doc offset: 1,
sequence: "Primitive pseudoperfect numbers.",
references: [{:oeis, :a006036, "https://oeis.org/A006036"}]
def create_sequence_a006036(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Multiples.seq_a006036/2)
end
@doc false
@doc offset: 1
def seq_a006036(_idx, last) do
Math.next_number(&Predicates.is_primitive_pseudoperfect_number?/1, last)
end
@doc """
OEIS Sequence `A002975` - Primitive weird numbers: weird numbers with no proper weird divisors.
From [OEIS A002975](https://oeis.org/A002975):
> Primitive weird numbers: weird numbers with no proper weird divisors.
> (Formerly M5340)
**Sequence IDs**: `:a002975`
**Finite**: False
**Offset**: 1
## Example
iex> Sequence.create(Elixir.Chunky.Sequence.OEIS.Multiples, :a002975) |> Sequence.take!(3)
[70,836,4030]
"""
@doc offset: 1,
sequence: "Primitive weird numbers: weird numbers with no proper weird divisors.",
references: [{:oeis, :a002975, "https://oeis.org/A002975"}]
def create_sequence_a002975(_opts) do
sequence_for_function(&Elixir.Chunky.Sequence.OEIS.Multiples.seq_a002975/2)
end
@doc false
@doc offset: 1
def seq_a002975(_idx, last) do
Math.next_number(&Predicates.is_primitive_weird_number?/1, last)
end
end
|
lib/sequence/oeis/multiples.ex
| 0.859678
| 0.82485
|
multiples.ex
|
starcoder
|
defmodule Serum.Fragment do
@moduledoc """
Defines a struct representing a page fragment.
## Fields
* `file`: Source path. This can be `nil` if created internally.
* `output`: Destination path
* `metadata`: A map holding extra information about the fragment
* `data`: Contents of the page fragment
"""
alias Serum.HtmlTreeHelper, as: Html
alias Serum.Plugin
@type t :: %__MODULE__{
file: binary() | nil,
output: binary(),
metadata: map(),
data: binary()
}
defstruct [:file, :output, :metadata, :data]
@doc "Creates a new `Fragment` struct."
@spec new(binary() | nil, binary(), map(), binary()) :: Result.t(t())
def new(file, output, metadata, data) do
data
|> Floki.parse()
|> Html.traverse(%{}, &set_header_ids/2)
|> elem(0)
|> Plugin.rendering_fragment(metadata)
|> case do
{:ok, html_tree} ->
fragment = %__MODULE__{
file: file,
output: output,
metadata: Map.put(metadata, :images, extract_images(html_tree)),
data: Floki.raw_html(html_tree)
}
{:ok, fragment}
{:error, _} = error ->
error
end
end
@spec set_header_ids(Html.tree(), map()) :: {Html.tree(), map()}
defp set_header_ids(tree, state)
defp set_header_ids({<<?h, ch::8>>, _, _} = tree, state) when ch in ?1..?6 do
{tag_name, attrs, children} = tree
case Enum.find(attrs, fn {k, _} -> k === "id" end) do
{"id", id} ->
{tree, increase_count(state, id)}
nil ->
id = generate_id(tree)
new_tree =
case state[id] do
nil -> {tag_name, [{"id", id} | attrs], children}
x -> {tag_name, [{"id", "#{id}-#{x + 1}"} | attrs], children}
end
{new_tree, increase_count(state, id)}
end
end
defp set_header_ids(tree, state), do: {tree, state}
@spec generate_id(Html.tree()) :: binary()
defp generate_id(tree) do
tree
|> Floki.text()
|> String.downcase()
|> String.replace(~r/\s/, "-")
end
@spec increase_count(map(), binary()) :: map()
defp increase_count(map, id), do: Map.update(map, id, 1, &(&1 + 1))
@spec extract_images(Html.tree()) :: [binary()]
defp extract_images(tree) do
tree
|> Floki.find("img")
|> Enum.map(fn {"img", attrs, _} ->
attrs |> Enum.find(fn {k, _} -> k === "src" end) |> elem(1)
end)
|> List.flatten()
end
defprotocol Source do
@moduledoc false
alias Serum.Result
@spec to_fragment(term(), map()) :: Result.t(Fragment.t())
def to_fragment(x, templates)
end
end
|
lib/serum/fragment.ex
| 0.806396
| 0.486332
|
fragment.ex
|
starcoder
|
defmodule OT.Text.Application do
@moduledoc """
The application of a text operation to a piece of text.
CodeSandbox custom version to ignore error_mismatch, as JS doesn't provide the deletion material
"""
alias OT.Text, as: Text
alias Text.Operation
alias Text.JSString
@typedoc """
The result of an `apply/2` function call, representing either success or error
in application of an operation
"""
@type apply_result ::
{:ok, OT.Text.datum()}
| {:error, :delete_mismatch | :retain_too_long}
@doc """
Apply an operation to a piece of text.
Given a piece of text and an operation, iterate over each component in the
operation and apply it to the given text. If the operation is valid, the
function will return `{:ok, new_state}` where `new_state` is the text with
the operation applied to it. If the operation is invalid, an
`{:error, atom}` tuple will be returned.
## Examples
iex> OT.Text.Application.apply("Foo", [3, %{i: " Bar"}])
{:ok, "Foo Bar"}
iex> OT.Text.Application.apply("Foo", [%{d: "Foos"}])
{:error, :delete_mismatch}
## Errors
- `:delete_mismatch` A delete component did not match the text it would have
deleted in the text
- `:retain_too_long` A retain component skipped past the end of the text
"""
@spec apply(Text.datum(), Operation.t()) :: apply_result
def apply(text, op) do
target_length = count_length(op)
with {:ok, result} <- do_apply(text, op) do
result_length = JSString.length(result)
if result_length == target_length do
{:ok, result}
else
{:error, {:length_mismatch, result_length, target_length}}
end
end
end
@spec count_length(Operation.t(), number) :: number
def count_length(op, result \\ 0)
def count_length([], result) do
result
end
def count_length([%{d: _del} | op], result) do
count_length(op, result)
end
def count_length([%{i: ins} | op], result) do
count_length(op, result + JSString.length(ins))
end
def count_length([ret | op], result) do
count_length(op, result + ret)
end
@spec count_base_length(Operation.t(), number) :: number
def count_base_length(op, result \\ 0)
def count_base_length([], result) do
result
end
def count_base_length([%{d: del} | op], result) do
count_base_length(op, result + JSString.length(del))
end
def count_base_length([%{i: _ins} | op], result) do
count_base_length(op, result)
end
def count_base_length([ret | op], result) do
count_base_length(op, result + ret)
end
@doc """
Same as `apply/2`, but raises if the application fails.
"""
@spec apply!(Text.datum(), Operation.t()) :: Text.datum() | no_return
def apply!(text, op) do
with {:ok, result} <- __MODULE__.apply(text, op) do
result
else
{:error, error} -> raise to_string(error)
end
end
@spec do_apply(Text.datum(), Operation.t(), Text.datum()) :: apply_result
defp do_apply(text, op, result \\ "")
defp do_apply(text, [], result) do
{:ok, result <> text}
end
defp do_apply(text, [%{d: del} | op], result) when is_integer(del) do
if del <= JSString.length(text) do
{_deleted, text} = JSString.split_at(text, del)
text
|> do_apply(op, result)
else
{:error, :delete_mismatch}
end
end
# Add support for d: "aaaa" usecases, is deprecated in CSB now
defp do_apply(text, [%{d: del} | op], result) do
do_apply(text, [%{d: JSString.length(del)} | op], result)
end
defp do_apply(text, [%{i: ins} | op], result) do
text
|> do_apply(op, result <> ins)
end
defp do_apply(text, [ret | op], result) when is_integer(ret) do
if ret <= JSString.length(text) do
{retained, text} = JSString.split_at(text, ret)
text
|> do_apply(op, result <> retained)
else
{:error, :retain_too_long}
end
end
end
|
lib/ot/text/application.ex
| 0.833155
| 0.573589
|
application.ex
|
starcoder
|
defmodule MobileNumberFormat.SaxParser do
@behaviour Saxy.Handler
def handle_event(:start_document, _prolog, state) do
{:ok, state}
end
def handle_event(:end_document, _data, state) do
{:ok, state}
end
def handle_event(:start_element, {"territory", attributes}, {[], territories}) do
territory =
%{
iso_country_code: Enum.find_value(attributes, fn {k, v} -> k == "id" && v end),
country_calling_code: Enum.find_value(attributes, fn {k, v} -> k == "countryCode" && v end) |> String.to_integer() |> to_string(),
national_prefix: Enum.find_value(attributes, fn {k, v} -> k == "nationalPrefix" && v |> String.to_integer() |> to_string() end),
possible_lengths: nil,
example: nil,
regex: nil
}
{:ok, {["territory"], [territory | territories]}}
end
def handle_event(:start_element, {"mobile", _attributes}, {["territory"], territories}) do
{:ok, {["mobile", "territory"], territories}}
end
def handle_event(:start_element, {"possibleLengths", attributes}, {["mobile", "territory" | _] = tags, territories}) do
[current_territory | territories] = territories
current_territory =
Map.put(
current_territory,
:possible_lengths,
Enum.find_value(attributes, fn {k, v} ->
if k == "national", do: possible_lengths_to_integer_list(v)
end))
{:ok, {["possibleLengths" | tags], [current_territory | territories]}}
end
def handle_event(:start_element, {"nationalNumberPattern", _attributes}, {["mobile", "territory" | _] = tags, territories}) do
{:ok, {["nationalNumberPattern" | tags], territories}}
end
def handle_event(:start_element, {"exampleNumber", _attributes}, {["mobile", "territory" | _] = tags, territories}) do
{:ok, {["exampleNumber" | tags], territories}}
end
def handle_event(:start_element, {_name, _attributes}, state) do
{:ok, state}
end
def handle_event(:end_element, _name, {[], territories}) do
{:ok, {[], territories}}
end
def handle_event(:end_element, name, {[name, "mobile", "territory" | _] = tags, territories}) do
{:ok, {tl(tags), territories}}
end
def handle_event(:end_element, "mobile", {["mobile", "territory" | _] = tags, territories}) do
{:ok, {tl(tags), territories}}
end
def handle_event(:end_element, "territory", {["territory" | _] = tags, territories}) do
{:ok, {tl(tags), territories}}
end
def handle_event(:end_element, _name, state) do
{:ok, state}
end
def handle_event(:characters, chars, {["nationalNumberPattern", "mobile" | _] = tags, territories}) do
[current_territory | territories] = territories
chars = chars |> String.replace(~r/\s/, "") |> Regex.compile!()
current_territory = Map.put(current_territory, :regex, chars)
{:ok, {tags, [current_territory | territories]}}
end
def handle_event(:characters, chars, {["exampleNumber", "mobile" | _] = tags, territories}) do
[current_territory | territories] = territories
chars = chars |> String.replace(~r/\s/, "")
current_territory = Map.put(current_territory, :example, chars)
{:ok, {tags, [current_territory | territories]}}
end
def handle_event(:characters, _chars, state) do
{:ok, state}
end
def handle_event(:cdata, _cdata, state) do
{:ok, state}
end
defp possible_lengths_to_integer_list(possible_lengths) do
possible_lengths
|> String.split(",")
|> Enum.map(fn number_or_range ->
if String.contains?(number_or_range, "[") do
number_or_range
|> String.replace_prefix("[", "")
|> String.replace_suffix("]", "")
|> String.split("-")
|> Enum.reduce(nil, fn number, prev_number ->
if prev_number,
do: Range.new(String.to_integer(prev_number), String.to_integer(number)),
else: number
end)
|> Enum.to_list()
else
String.to_integer(number_or_range)
end
end)
|> List.flatten()
end
end
|
lib/sax_parser.ex
| 0.541894
| 0.487063
|
sax_parser.ex
|
starcoder
|
defmodule Exception do
@moduledoc """
Several convenience functions to work and pretty print
exceptions and backtraces.
"""
# Normalize an exception converting Erlang exceptions
# to Elixir style exceptions. This is meant to be used
# internally.
@doc false
def normalize(exception) when is_exception(exception) do
exception
end
def normalize(:badarg) do
ArgumentError.new
end
def normalize(:badarith) do
ArithmeticError.new
end
def normalize(:system_limit) do
SystemLimitError.new
end
def normalize({ :badarity, { fun, args } }) do
BadArityError.new(function: fun, args: args)
end
def normalize({ :badfun, actual }) do
BadFunctionError.new(actual: actual)
end
def normalize({ :badmatch, actual }) do
MatchError.new(actual: actual)
end
def normalize({ :case_clause, actual }) do
CaseClauseError.new(actual: actual)
end
def normalize(:undef) do
UndefinedFunctionError.new from_stacktrace(System.stacktrace)
end
def normalize(:function_clause) do
FunctionClauseError.new from_stacktrace(System.stacktrace)
end
def normalize({ :badarg, payload }) do
ArgumentError.new message: "argument error: #{inspect(payload)}"
end
def normalize(other) do
ErlangError.new original: other
end
# Check the given module is a valid record.
@doc false
def check!(module) do
unless :erlang.function_exported(module, :message, 1) do
raise "Expected #{inspect module} to implement message/1"
end
end
@doc """
Receives a module, fun and arity and returns a string
representing such invocation. Arity may also be a list
of arguments. It follows the same syntax as in stacktraces.
"""
def format_module_fun_arity(module, fun, arity) do
<< ?:, fun | :binary >> = inspect(fun)
if is_list(arity) do
inspected = lc x inlist arity, do: inspect(x)
"#{inspect module}.#{fun}(#{Enum.join(inspected, ", ")})"
else
"#{inspect module}.#{fun}/#{arity}"
end
end
@doc """
Formats each line in the stacktrace.
"""
def format_stacktrace({module, fun, arity, file_line}) do
"#{format_file_line(file_line)}#{format_module_fun_arity(module, fun, arity)}"
end
@doc """
Formats file and line information present in stacktraces.
Expect them to be given in a keywords list.
"""
def format_file_line(file_line) do
format_file_line(Keyword.get(file_line, :file), Keyword.get(file_line, :line))
end
@doc """
Formats the given file and line.
"""
def format_file_line(file, line) do
if file do
file = to_binary(file)
if line && line != 0 do
"#{file}:#{line}: "
else
"#{file}: "
end
else
""
end
end
## Helpers
defp from_stacktrace([{ module, function, arity, _ }|_]) do
[module: module, function: function, arity: arity]
end
defp from_stacktrace(_), do: []
end
defexception RuntimeError, message: "runtime error"
defexception ArgumentError, message: "argument error"
defexception ArithmeticError, message: "bad argument in arithmetic expression"
defexception SystemLimitError, message: "a system limit has been reached"
defexception SyntaxError, [file: nil, line: nil, description: "syntax error"] do
def message(exception) do
"#{Exception.format_file_line(exception.file, exception.line)}#{exception.description}"
end
end
defexception TokenMissingError, [file: nil, line: nil, description: "expression is incomplete"] do
def message(exception) do
"#{Exception.format_file_line(exception.file, exception.line)}#{exception.description}"
end
end
defexception CompileError, [file: nil, line: nil, description: "compile error"] do
def message(exception) do
"#{Exception.format_file_line(exception.file, exception.line)}#{exception.description}"
end
end
defexception BadFunctionError, [actual: nil] do
def message(exception) do
"bad function: #{inspect(exception.actual)}"
end
end
defexception MatchError, [actual: nil] do
def message(exception) do
"no match of right hand side value: #{inspect(exception.actual)}"
end
end
defexception CaseClauseError, [actual: nil] do
def message(exception) do
"no case clause matching: #{inspect(exception.actual)}"
end
end
defexception BadArityError, [function: nil, args: nil] do
def message(exception) do
"bad arity error: #{inspect(exception.function)} called with #{inspect(exception.args)}"
end
end
defexception UndefinedFunctionError, [module: nil, function: nil, arity: nil] do
def message(exception) do
if exception.function do
formatted = Exception.format_module_fun_arity exception.module, exception.function, to_arity(exception.arity)
"undefined function: #{formatted}"
else
"undefined function"
end
end
defp to_arity(arity) when is_integer(arity), do: arity
defp to_arity(list) when is_list(list), do: length(list)
end
defexception FunctionClauseError, [module: nil, function: nil, arity: nil] do
def message(exception) do
if exception.function do
formatted = Exception.format_module_fun_arity exception.module, exception.function, exception.arity
"no function clause matching: #{formatted}"
else
"no function clause matches"
end
end
end
defexception Protocol.UndefinedError, [protocol: nil, structure: nil] do
def message(exception) do
"protocol #{inspect exception.protocol} not implemented for #{inspect exception.structure}"
end
end
defexception ErlangError, [original: nil] do
def message(exception) do
"erlang error: #{inspect(exception.original)}"
end
end
defexception Keyword.KeyError, key: nil do
def message(exception) do
"key not found: #{inspect exception.key}"
end
end
defexception Enum.OutOfBoundsError, message: "out of bounds error"
|
lib/elixir/lib/exception.ex
| 0.684686
| 0.457379
|
exception.ex
|
starcoder
|
defmodule Snitch.Data.Model.Promotion.OrderEligibility do
@moduledoc """
Defines functions for order level checks while checking if
a promotion can be applied to the order.
"""
use Snitch.Data.Model
alias Snitch.Data.Model.PromotionAdjustment
@valid_order_states ~w(delivery address)a
@success_message "promotion applicable"
@error_message "coupon not applicable"
@coupon_applied "coupon already applied"
@message %{
applicable: "promotion applicable",
valid_state: "valid order state",
promotionable: "order promotionable"
}
@doc """
Checks if the `promotion` is already applied to the order.
Returns {:false, message} if promotion is already applied otherwise
returns true.
Tracks by checking if adjustments already exist for the order for the
supplied `promotion`
"""
def promotion_applied(order, promotion) do
case PromotionAdjustment.order_adjustments_for_promotion(order, promotion) do
[] ->
{true, @message.applicable}
_list ->
{false, @coupon_applied}
end
end
@doc """
Checks the state of the order before applying the promotion.
At present the supported states are `[:address, :delivery]`
Returns {false, message} if order not in valid state otherwise,
returns true.
"""
def valid_order_state(order) do
if order.state in @valid_order_states do
{true, @message.valid_state}
else
{false, "promotion not applicable to order"}
end
end
@doc """
Checks if an order is `promotionable`.
An order is considered `promotionable` if it contains atlease one `promotionable`
product as lineitem. If no `promotionable` products are found the order is
ineligible for promotion.
Returns `true` if promotionable otherwise, returns {false, message}
"""
def order_promotionable(order) do
order = Repo.preload(order, line_items: [:product])
if Enum.any?(order.line_items, fn line_item ->
line_item.product.promotionable == true
end) do
{true, @message.promotionable}
else
{false, "no promotionable products found"}
end
end
@doc """
Checks the order against the rules defined for the promotion.
The rules are checked as per the match policy defined in the supplied
promotion.
A match policy of type "all" checks that all the rules should
be satisfied to apply the promotion.
A match policy of type "any" checks that any one of the rules
should be satisfied to apply the promotions.
Returns true if the rule_check is satisfied else returns {false, message}
where message is set by first non-applicable rule.
"""
def rules_check(order, promotion) do
promotion = Repo.preload(promotion, :rules)
case check_eligibility(promotion.match_policy, order, promotion.rules) do
{true, _success_message} = reason ->
reason
{false, _message} = reason ->
reason
end
end
defp check_eligibility(_, _order, _no_rules_set = []) do
{true, @success_message}
end
defp check_eligibility("all", order, rules) do
Enum.reduce_while(rules, {true, @success_message}, fn rule, acc ->
case rule_eligibility(order, rule) do
{true, _} ->
{:cont, acc}
{false, _reason} = error ->
acc = error
{:halt, acc}
end
end)
end
defp check_eligibility("any", order, rules) do
Enum.reduce_while(rules, {false, @error_message}, fn rule, _ ->
case rule_eligibility(order, rule) do
{false, _reason} = reason ->
acc = reason
{:cont, acc}
{true, _reason} = reason ->
acc = reason
{:halt, acc}
end
end)
end
defp rule_eligibility(order, rule) do
rule.module.eligible(order, rule.preferences)
end
end
|
apps/snitch_core/lib/core/data/model/promotion/order_eligibility.ex
| 0.846356
| 0.524882
|
order_eligibility.ex
|
starcoder
|
defmodule Mix.Tasks.Nlp.PosTagger.Train do
@moduledoc """
This task trains and optionally tests a part-of-speech tagger
using files containing tokenized text and POS tags.
The trained model will be JSON-encoded and saved to
the specified target file; it can be loaded by reading
the target file, JSON decoding it, and feeding the result to
`POSTagger.compile/1`.
If supplied, the test file must be formatted identically to the training file.
"""
@shortdoc @moduledoc
use Mix.Task
alias Mix.Tasks.Nlp.PosTagger.TaggerUtils
alias Penelope.NLP.POSTagger
require Logger
@switches [
section_sep: :string,
token_sep: :string,
test_file: :string
]
def run(argv) do
{options, args, other} = OptionParser.parse(argv, switches: @switches)
case {args, other} do
{[source, target], []} -> execute(source, target, options)
_ -> usage()
end
end
defp execute(source, target, options) do
Application.ensure_all_started(:penelope)
options = combine_with_defaults(options)
{tokens, tags} = TaggerUtils.ingest_file(source, options)
Logger.info("Training file processed; fitting model...")
model = POSTagger.fit(%{}, tokens, tags)
exported =
model
|> POSTagger.export()
|> Poison.encode!()
File.write!(target, exported)
Logger.info("Model saved to #{target}.")
if options.test_file do
test(model, options)
end
end
defp usage do
IO.puts("""
Part-of-speech tagger trainer
usage: mix nlp.pos_tagger.train [options] <train-file> <output-file>
train-file: path to a training file, each line of which contains a
tokenized phrase and the tokens' part-of-speech tags
Example line: Bill|saw|her,NNP|VBD|PRP
output-file: path to the file where the trained model should be saved
options:
--section-sep: the string separating tokenized text from POS tags in
each line of the training file, default: "\\t"
--token-sep: the string separating individual tokens and tags in each
line of the training file, default: " "
--test-file: path to the file to use for testing the trained tagger.
must be formatted identically to the training file,
default: nil
""")
end
defp test(model, options) do
{tokens, tags} = TaggerUtils.ingest_file(options.test_file, options)
tags = List.flatten(tags)
Logger.info("Test file loaded. Testing tagger...")
stats =
tokens
|> Enum.map(&POSTagger.tag(model, %{}, &1))
|> Enum.flat_map(fn results -> Enum.map(results, &elem(&1, 1)) end)
|> Enum.zip(tags)
|> Enum.reduce({0, 0}, fn {predicted, gold}, {correct, total} ->
{(predicted == gold && correct + 1) || correct, total + 1}
end)
stats = %{
total_tokens: elem(stats, 1),
accuracy: elem(stats, 0) / elem(stats, 1)
}
Logger.info(
~s(Test complete.) <>
~s(\n\tTotal tokens: #{stats.total_tokens}) <>
~s(\n\tAccuracy: #{stats.accuracy})
)
end
defp combine_with_defaults(options) do
%{
token_sep: Keyword.get(options, :token_sep, " "),
section_sep: Keyword.get(options, :section_sep, "\t"),
test_file: Keyword.get(options, :test_file)
}
end
end
|
lib/mix/tasks/nlp/pos_tagger/train.ex
| 0.818483
| 0.52756
|
train.ex
|
starcoder
|
defmodule Slackerton.Trivia.Store do
use GenServer
require Logger
defstruct [ active: MapSet.new(), recently_asked: MapSet.new(), quiz: Map.new(), winners: Map.new() ]
def start_link(_) do
GenServer.start_link(__MODULE__, %__MODULE__{}, name: __MODULE__)
end
def init(state) do
{:ok, state }
end
def start_quiz(room) do
update(fn state ->
state
|> Map.update!(:active, fn active -> MapSet.put(active, room) end)
|> Map.update!(:winners, fn winners -> Map.put(winners, room, MapSet.new()) end)
end)
end
def add_questions_to_quiz(room, quiz) do
update(fn state ->
state
|> Map.update!(:quiz, &Map.put(&1, room, quiz))
|> Map.update!(:recently_asked, &MapSet.put(&1, quiz.question))
end)
end
def answer_quiz(room, user, answer) do
formatted_answer = String.upcase(answer)
case get(:quiz)[room] do
%{correct: ^formatted_answer} ->
update_key(:winners, &Map.update!(&1, room, fn set -> MapSet.put(set, user) end))
_ ->
:ok
end
end
def get_quiz(room), do: Map.get(get(:quiz), room)
def get_winners(room), do: get(:winners)[room]
def active?(room), do: get(:active) |> MapSet.member?(room)
def recently_asked?(question), do: get(:recently_asked) |> MapSet.member?(question)
defp update_key(key, callback) do
GenServer.call(__MODULE__, {:update, key, callback})
end
defp update(callback) do
GenServer.call(__MODULE__, {:update, callback})
end
defp get(key) do
GenServer.call(__MODULE__, {:get, key})
end
def schedule_completion(room, callback, timeout \\ 15_000) do
Logger.info(fn -> {"Scheduling End of Game", [room: room] } end)
Process.send_after(__MODULE__, {:times_up, room, callback}, timeout)
end
def handle_info({:times_up, room, callback}, state) do
state = Map.update!(state, :active, &MapSet.delete(&1, room))
quiz = Map.get(state, :quiz)[room]
winners = Map.get(state, :winners)[room]
callback.({quiz, winners})
{:noreply, state}
end
def handle_info(message, state) do
Logger.warn(fn -> { "unhandled message", [message: message] } end)
{:noreply, state}
end
def handle_call({:update, key, callback}, _, state) do
state = Map.update!(state, key, callback)
{:reply, state, state}
end
def handle_call({:update, callback}, _, state) do
state = callback.(state)
{:reply, state, state}
end
def handle_call({:get, key }, _ , state) do
value = Map.get(state, key)
{:reply, value, state}
end
end
|
lib/slackerton/trivia/store.ex
| 0.536556
| 0.42662
|
store.ex
|
starcoder
|
defmodule TripUpdates do
@moduledoc """
Responsible for converting lists of BoardingStatus structs into an enhanced TripUpdates JSON feed
The basic TripUpdates feed is a Protobuf, documented here: https://developers.google.com/transit/gtfs-realtime/guides/trip-updates
The enhanced JSON feed takes the Protobuf, expands it into JSON, and adds some additional fields.
"""
def to_map(boarding_statuses) do
current_time = System.system_time(:second)
%{
header: header(current_time),
entity: entity(current_time, boarding_statuses)
}
end
def header(current_time) do
%{
gtfs_realtime_version: "1.0",
timestamp: current_time
}
end
def entity(current_time, boarding_statuses) do
for {_trip_id, trip_statuses} <-
Enum.group_by(boarding_statuses, & &1.trip_id),
update <- trip_update(current_time, trip_statuses) do
update
end
end
def trip_update(_current_time, []) do
[]
end
def trip_update(current_time, [%BoardingStatus{} = bs | _] = statuses) do
[
%{
id: "#{current_time}_#{bs.trip_id}",
trip_update: %{
trip: trip(bs),
stop_time_update: Enum.map(statuses, &stop_time_update/1)
}
}
]
end
def trip(%BoardingStatus{} = bs) do
start_date =
case bs.scheduled_time do
:unknown -> DateHelpers.service_date()
dt -> DateHelpers.service_date(dt)
end
Map.merge(
%{
trip_id: bs.trip_id,
route_id: bs.route_id,
start_date: start_date,
schedule_relationship: schedule_relationship(bs)
},
direction_id_map(bs.direction_id)
)
end
def stop_time_update(%BoardingStatus{} = bs) do
Enum.reduce(
[
stop_sequence_map(bs.stop_sequence),
boarding_status_map(bs.status),
platform_id_map(bs.stop_id, bs.track),
%{stop_id: bs.stop_id}
],
&Map.merge/2
)
end
def schedule_relationship(%BoardingStatus{added?: true}) do
"ADDED"
end
def schedule_relationship(%BoardingStatus{}) do
"SCHEDULED"
end
def direction_id_map(:unknown) do
%{}
end
def direction_id_map(direction_id) do
%{direction_id: direction_id}
end
def stop_sequence_map(:unknown) do
%{}
end
def stop_sequence_map(stop_sequence) do
%{stop_sequence: stop_sequence}
end
def boarding_status_map(:unknown) do
%{}
end
def boarding_status_map(status) do
%{
boarding_status: status
}
end
def platform_id_map(_, "") do
%{}
end
def platform_id_map(stop_id, track) do
platform_id = "#{stop_id}-#{String.pad_leading(track, 2, ["0"])}"
%{
stop_id: platform_id
}
end
end
|
apps/commuter_rail_boarding/lib/trip_updates.ex
| 0.635675
| 0.448185
|
trip_updates.ex
|
starcoder
|
defmodule Digraph do
@moduledoc """
`Digraph` is a struct-based implementation of `:digraph`.
## Notes
This will not enforce the `:acyclic` option.
"""
use Boundary, deps: [], exports: []
alias Digraph.{Edge, Vertex}
@behaviour Access
defstruct vertices: %{},
edges: %{},
options: [],
vertex_id: 0,
edge_id: 0
@type t :: %__MODULE__{
vertices: %{optional(Vertex.id()) => Vertex.t()},
edges: %{optional(Edge.id()) => Edge.t()},
options: [opt],
vertex_id: non_neg_integer,
edge_id: non_neg_integer
}
@type opt :: :digraph.d_type()
@type label :: :digraph.label()
@impl Access
def fetch(graph, key) do
Map.fetch(graph, key)
end
@impl Access
def get_and_update(graph, key, fun) do
Map.get_and_update(graph, key, fun)
end
@impl Access
def pop(graph, key) do
Map.pop(graph, key)
end
@spec new([opt]) :: t
def new(opts \\ []) do
%__MODULE__{options: opts}
end
@spec from_digraph(:digraph.graph()) :: t
def from_digraph(dg) do
{options, _} = Keyword.split(:digraph.info(dg), [:cyclicity, :protection])
%__MODULE__{
vertices:
dg
|> :digraph.vertices()
|> Enum.map(&{elem(&1, 0), Vertex.from_digraph(dg, &1)})
|> Enum.into(%{}),
edges:
dg
|> :digraph.edges()
|> Enum.map(&{elem(&1, 0), Edge.from_digraph(dg, &1)})
|> Enum.into(%{}),
options: Keyword.values(options)
}
end
@spec to_digraph(t) :: :digraph.graph()
def to_digraph(graph) do
dg = :digraph.new(graph.options)
_ = Enum.each(graph.vertices, fn {_, v} -> Vertex.to_digraph(dg, v) end)
_ = Enum.each(graph.edges, fn {_, e} -> Edge.to_digraph(dg, e) end)
dg
end
@spec add_vertex(t) :: {Vertex.id(), t}
def add_vertex(graph) do
{id, graph} = next_id(graph, :vertex)
add_vertex(graph, id)
end
@spec add_vertex(t, Vertex.id()) :: {Vertex.id(), t}
def add_vertex(graph, id), do: add_vertex(graph, id, [])
@spec add_vertex(t, Vertex.id(), label) :: {Vertex.id(), t}
def add_vertex(graph, id, label) do
{id, put_in(graph, [:vertices, id], Vertex.new(id, label))}
end
@spec add_next_vertex(t, label) :: {Vertex.id(), t}
def add_next_vertex(graph, label) do
{id, graph} = next_id(graph, :vertex)
add_vertex(graph, id, label)
end
@spec add_edge(t, Vertex.id(), Vertex.id()) :: {:ok, {Edge.t(), t}} | {:error, reason :: term}
def add_edge(graph, v1, v2), do: add_edge(graph, v1, v2, [])
@spec add_edge(t, Vertex.id(), Vertex.id(), label) ::
{:ok, {Edge.t(), t}} | {:error, reason :: term}
def add_edge(graph, v1, v2, label) do
{id, graph} = next_id(graph, :edge)
add_edge(graph, id, v1, v2, label)
end
@spec add_edge(t, Edge.id(), Vertex.id(), Vertex.id(), label) ::
{:ok, {Edge.t(), t}} | {:error, reason :: term}
def add_edge(graph, id, v1, v2, label) do
with {_, true} <- {:v1, v1 in Map.keys(graph.vertices)},
{_, true} <- {:v2, v2 in Map.keys(graph.vertices)} do
{:ok, {id, put_in(graph, [:edges, id], Edge.new(id, v1, v2, label))}}
else
{:v1, false} -> {:error, {:bad_vertex, v1}}
{:v2, false} -> {:error, {:bad_vertex, v2}}
end
end
@spec del_edge(t, Edge.id()) :: t
def del_edge(graph, edge) do
Map.update!(graph, :edges, &Map.delete(&1, edge))
end
@spec del_edges(t, [Edge.id()]) :: t
def del_edges(graph, edges) do
Enum.reduce(edges, graph, &del_edge(&2, &1))
end
@spec del_vertex(t, Vertex.id()) :: t
def del_vertex(graph, vertex) do
graph
|> del_edges(Enum.map(edges(graph, vertex), & &1.id))
|> Map.update!(:vertices, &Map.delete(&1, vertex))
end
@spec del_vertices(t, [Vertex.id()]) :: t
def del_vertices(graph, vertices) do
Enum.reduce(vertices, graph, &del_vertex(&2, &1))
end
@spec vertex(t, Vertex.id()) :: Vertex.t() | nil
def vertex(graph, vertex) do
Map.get(graph.vertices, vertex)
end
@spec edge(t, Edge.id()) :: Edge.t() | nil
def edge(graph, edge) do
Map.get(graph.edges, edge)
end
@spec vertices(t) :: [Vertex.t()]
def vertices(graph) do
Map.values(graph.vertices)
end
@spec edges(t) :: [Edge.t()]
def edges(graph) do
Map.values(graph.edges)
end
@spec edges(t, Vertex.id()) :: [Edge.t()]
def edges(graph, vertex) do
graph
|> edges()
|> Enum.filter(fn
%{v1: ^vertex} -> true
%{v2: ^vertex} -> true
_ -> false
end)
end
@spec in_edges(t, Vertex.id()) :: [Edge.t()]
def in_edges(graph, vertex) do
graph
|> edges()
|> Enum.filter(fn
%{v2: ^vertex} -> true
_ -> false
end)
end
@spec out_edges(t, Vertex.id()) :: [Edge.t()]
def out_edges(graph, vertex) do
graph
|> edges()
|> Enum.filter(fn
%{v1: ^vertex} -> true
_ -> false
end)
end
@spec in_neighbours(t, Vertex.id()) :: [Vertex.t()]
def in_neighbours(graph, vertex) do
graph
|> in_edges(vertex)
|> Enum.map(&vertex(graph, Map.get(&1, :v1)))
|> Enum.reject(&is_nil/1)
end
@spec out_neighbours(t, Vertex.id()) :: [Vertex.t()]
def out_neighbours(graph, vertex) do
graph
|> out_edges(vertex)
|> Enum.map(&vertex(graph, Map.get(&1, :v2)))
|> Enum.reject(&is_nil/1)
end
@spec no_vertices(t) :: non_neg_integer()
def no_vertices(graph) do
Enum.count(graph.vertices)
end
@spec no_edges(t) :: non_neg_integer()
def no_edges(graph) do
Enum.count(graph.edges)
end
@spec in_degree(t, Vertex.id()) :: non_neg_integer()
def in_degree(graph, vertex) do
graph
|> in_edges(vertex)
|> Enum.count()
end
@spec out_degree(t, Vertex.id()) :: non_neg_integer()
def out_degree(graph, vertex) do
graph
|> out_edges(vertex)
|> Enum.count()
end
@spec next_id(t, :vertex | :edge) :: {term, t}
defp next_id(graph, :vertex) do
{[:"$v" | graph.vertex_id], Map.update!(graph, :vertex_id, &(&1 + 1))}
end
defp next_id(graph, :edge) do
{[:"$e" | graph.edge_id], Map.update!(graph, :edge_id, &(&1 + 1))}
end
defmodule Vertex do
@moduledoc false
@behaviour Access
defstruct id: nil,
label: nil
@type t :: %__MODULE__{
id: id,
label: Digraph.label()
}
@type id :: :digraph.vertex()
@impl Access
def fetch(vertex, key) do
Map.fetch(vertex, key)
end
@impl Access
def get_and_update(vertex, key, fun) do
Map.get_and_update(vertex, key, fun)
end
@impl Access
def pop(vertex, key) do
Map.pop(vertex, key)
end
@spec new(id, Digraph.label()) :: t
def new(id, label \\ nil) do
%__MODULE__{id: id, label: label}
end
@spec from_digraph(:digraph.graph(), id) :: t | nil
def from_digraph(dg, vertex) do
case :digraph.vertex(dg, vertex) do
{id, label} -> new(id, label)
_ -> nil
end
end
@spec to_digraph(:digraph.graph(), t) :: :digraph.graph()
def to_digraph(dg, vertex) do
_ = :digraph.add_vertex(dg, vertex.id, vertex.label)
dg
end
end
defmodule Edge do
@moduledoc false
alias Digraph.Vertex
@behaviour Access
defstruct id: nil,
v1: nil,
v2: nil,
label: nil
@type t :: %__MODULE__{
id: id,
v1: Vertex.id(),
v2: Vertex.id(),
label: Digraph.label()
}
@type id :: :digraph.edge()
@impl Access
def fetch(edge, key) do
Map.fetch(edge, key)
end
@impl Access
def get_and_update(edge, key, fun) do
Map.get_and_update(edge, key, fun)
end
@impl Access
def pop(edge, key) do
Map.pop(edge, key)
end
@spec new(id, Vertex.id(), Vertex.id(), Digraph.label()) :: t
def new(id, v1, v2, label \\ nil) do
%__MODULE__{id: id, v1: v1, v2: v2, label: label}
end
@spec from_digraph(:digraph.graph(), id) :: t | nil
def from_digraph(dg, edge) do
case :digraph.edge(dg, edge) do
{id, v1, v2, label} -> new(id, v1, v2, label)
_ -> nil
end
end
@spec to_digraph(:digraph.graph(), t) :: :digraph.graph()
def to_digraph(dg, edge) do
_ = :digraph.add_edge(dg, edge.id, edge.v1, edge.v2, edge.label)
dg
end
end
end
|
lib/digraph.ex
| 0.908145
| 0.606906
|
digraph.ex
|
starcoder
|
defmodule SpeechMarkdown.Grammar do
@moduledoc """
This is the nimble-parsec grammar for the subset of the Speech Markdown
language supported by this library. The parser is tolerant of any string
inputs, but poorly-specified constructs will simply be output as string
values. Results are returned as an ast containing a list of tagged tokens,
like so:
iex> parse!("You say pecan [200ms], I say (pecan)[/pɪˈkɑːn/]")
[
text: "You say pecan ",
break: [200, :ms],
text: ", I say ",
modifier: ["pecan", {:ipa, "pɪˈkɑːn"}]
]
"""
import NimbleParsec
@doc "parse a speech markdown string into an ast"
@spec parse!(text :: String.t()) :: keyword()
def parse!(text) do
{:ok, ast} = parse(text)
ast
end
@doc "parse a speech markdown string into an ast"
@spec parse(text :: String.t()) :: {:ok, keyword()}
def parse(text) do
with {:ok, [ast], _, _, _, _} <- document(text) do
{:ok, ast}
end
end
# coalesce adjacent text tokens
defp merge([{:text, x}, {:text, y} | z]) do
merge([{:text, x <> y} | z])
end
defp merge([x | y]) do
[x | merge(y)]
end
defp merge([]) do
[]
end
# --------------------------------------------------------------------------
# helpers
# --------------------------------------------------------------------------
atomize = &map(string(empty(), &1), {String, :to_atom, []})
space = repeat(ascii_char('\r\n\s\t'))
# --------------------------------------------------------------------------
# breaks
# --------------------------------------------------------------------------
break =
ignore(string("["))
|> ignore(space)
|> ignore(optional(string("break") |> optional(space) |> string(":")))
|> ignore(space)
|> integer(min: 1)
|> choice([atomize.("ms"), atomize.("s")])
|> ignore(space)
|> ignore(string("]"))
|> tag(:break)
# --------------------------------------------------------------------------
# ipa
# --------------------------------------------------------------------------
ipa_long =
ignore(optional(string("ipa") |> optional(space) |> string(":")))
|> ignore(space)
|> ignore(string("\""))
|> reduce(repeat(utf8_char([{:not, ?"}])), :to_string)
|> ignore(string("\""))
ipa_short =
ignore(string("/"))
|> reduce(repeat(utf8_char([{:not, ?/}])), :to_string)
|> ignore(string("/"))
ipa =
choice([ipa_long, ipa_short])
|> unwrap_and_tag(:ipa)
# --------------------------------------------------------------------------
# say-as
# --------------------------------------------------------------------------
say =
choice([atomize.("characters"), atomize.("number")])
|> unwrap_and_tag(:say)
# --------------------------------------------------------------------------
# all modifiers
# --------------------------------------------------------------------------
modifier =
ignore(string("("))
|> reduce(repeat(utf8_char([{:not, ?)}])), :to_string)
|> ignore(string(")["))
|> ignore(space)
|> choice([ipa, say])
|> ignore(space)
|> ignore(string("]"))
|> tag(:modifier)
text =
utf8_char([])
|> reduce(:to_string)
|> unwrap_and_tag(:text)
defparsec(
:document,
choice([break, modifier, text])
|> repeat()
|> reduce(:merge)
)
end
|
lib/speech_markdown/grammar.ex
| 0.807802
| 0.407864
|
grammar.ex
|
starcoder
|
defmodule Mongo.Collection do
@moduledoc """
This module provides some boilerplate code for a better support of structs while using the
MongoDB driver:
* automatic load and dump function
* reflection functions
* type specification
* support for embedding one and many structs
* support for `after load` function
* support for `before dump` function
* support for id generation
* support for default values
* support for derived values
When using the MongoDB driver only maps and keyword lists are used to
represent documents.
If you would prefer to use structs instead of the maps to give the document a stronger meaning or to emphasize
its importance, you have to create a `defstruct` and fill it from the map manually:
defmodule Label do
defstruct name: "warning", color: "red"
end
iex> label_map = Mongo.find_one(:mongo, "labels", %{})
%{"name" => "warning", "color" => "red"}
iex> label = %Label{name: label_map["name"], color: label_map["color"]}
We have defined a module `Label` as `defstruct`, then we get the first label document
the collection `labels`. The function `find_one` returns a map. We convert the map manually and
get the desired struct.
If we want to save a new structure, we have to do the reverse. We convert the struct into a map:
iex> label = %Label{}
iex> label_map = %{"name" => label.name, "color" => label.color}
iex> {:ok, _} = Mongo.insert_one(:mongo, "labels", label_map)
Alternatively, you can also remove the `__struct__` key from `label`. The MongoDB driver automatically
converts the atom keys into strings.
iex> Map.drop(label, [:__struct__])
%{color: :red, name: "warning"}
If you use nested structures, the work becomes a bit more complex. In this case, you have to use the inner structures
convert manually, too.
If you take a closer look at the necessary work, two basic functions can be derived:
* `load` Conversion of the map into a struct.
* `dump` Conversion of the struct into a map.
This module provides the necessary macros to automate this boilerplate code.
The above example can be rewritten as follows:
defmodule Label do
use Collection
document do
attribute :name, String.t(), default: "warning"
attribute :color, String.t(), default: :red
end
end
This results in the following module:
defmodule Label do
defstruct [name: "warning", color: "red"]
@type t() :: %Label{String.t(), String.t()}
def new()...
def load(map)...
def dump(%Label{})...
def __collection__(:attributes)...
def __collection__(:types)...
def __collection__(:collection)...
def __collection__(:id)...
end
You can now create new structs with the default values and use the conversion functions between maps and
structs:
iex(1)> x = Label.new()
%Label{color: :red, name: "warning"}
iex(2)> m = Label.dump(x)
%{color: :red, name: "warning"}
iex(3)> Label.load(m, true)
%Label{color: :red, name: "warning"}
The `load/2` function distinguishes between keys of type binarys `load(map, false)` and keys of type atoms `load(map, true)`.
The default is `load(map, false)`:
iex(1)> m = %{"color" => :red, "name" => "warning"}
iex(2)> Label.load(m)
%Label{color: :red, name: "warning"}
If you would now expect atoms as keys, the result of the conversion is not correct in this case:
iex(3)> Label.load(m, true)
%Label{color: nil, name: nil}
The background is that MongoDB always returns binarys as keys and structs use atoms as keys.
## Default and derived values
Attributes have two options:
* `default:` a value or a function, which is called, when a new struct is created
* `derived:` `true` to indicate, that is attribute should not be saved to the database
If you call `new/0` a new struct is returned filled with the default values. In case of a function the
function is called to use the return value as default.
attribute: created, DateTime.t(), &DateTime.utc_now/0
If you mark an attribute as a derived attribute (`derived: true`) then the dump function will remove
the attributes from the struct automatically for you, so these kind of attributes won't be saved in
the database.
attribute :id, String.t(), derived: true
## Collections
In MongoDB, documents are written in collections. We can use the `collection/2` macro to create
a collection:
defmodule Card do
use Collection
@collection nil
collection "cards" do
attribute :title, String.t(), default: "new title"
end
end
The `collection/2` macro creates a collection that is basically similar to a document, where
an attribute for the ID is added automatically. Additionally the attribute `@collection` is assigned and
can be used as a constant in other functions.
In the example above we only suppress a warning of the editor by `@collection`. The macro creates the following
expression: `@collection "cards"`. By default, the following attribute is created for the ID:
{:_id, BSON.ObjectId.t(), &Mongo.object_id/0}
where the default value is created via the function `&Mongo.object_id/0` when calling `new/0`:
iex> Card.new()
%Card{_id: #BSON.ObjectId<5ec3d04a306a5f296448a695>, title: "new title"}
Two additional reflection features are also provided:
iex> Card.__collection__(:id)
:_id
iex(3)> Card.__collection__(:collection)
"cards"
## MongoDB example
We define the following collection:
defmodule Card do
use Collection
@collection nil ## keeps the editor happy
@id nil
collection "cards" do
attribute :title, String.t(), default: "new title"
end
def insert_one(%Card{} = card) do
with map <- dump(card),
{:ok, _} <- Mongo.insert_one(:mongo, @collection, map) do
:ok
end
end
def find_one(id) do
:mongo
|> Mongo.find_one(@collection, %{@id => id})
|> load()
end
end
Then we can call the functions `insert_one` and `find_one`. Thereby
we always use the defined structs as parameters or get the
struct as result:
iex(1)> card = Card.new()
%Card{_id: #BSON.ObjectId<5ec3ed0d306a5f377943c23c>, title: "new title"}
iex(6)> Card.insert_one(card)
:ok
iex(2)> Card.find_one(card._id)
%XCard{_id: #BSON.ObjectId<5ec3ecbf306a5f3779a5edaa>, title: "new title"}
## Id generator
In MongoDB it is common to use the attribute `_id` as id. The value is
uses an ObjectId generated by the mongodb driver. This behavior can be specified by
the module attribute `@id_generator` when using `collection`.
The default setting is
{:_id, BSON.ObjectId.t(), &Mongo.object_id/0}
Now you can overwrite this tuple `{name, type, function}` as you like:
@id_generator false # no ID creation
@id_generator {id, String.t, &IDGenerator.next()/0} # customized name and generator
@id_generator nil # use default: {:_id, BSON.ObjectId.t(), &Mongo.object_id/0}
### Embedded documents
Until now we had only shown simple attributes. It will only be interesting when we
embed other structs. With the macros `embeds_one/3` and `embeds_many/3`, structs can be
added to the attributes:
## Example `embeds_one`
defmodule Label do
use Collection
document do
attribute :name, String.t(), default: "warning"
attribute :color, String.t(), default: :red
end
end
defmodule Card do
use Collection
collection "cards" do
attribute :title, String.t()
attribute :list, BSON.ObjectId.t()
attribute :created, DateString.t(), default: &DateTime.utc_now/0
attribute :modified, DateString.t(), default: &DateTime.utc_now/0
embeds_one :label, Label, default: &Label.new/0
end
end
If we now call `new/0`, we get the following structure:
iex(1)> Card.new()
%Card{
_id: #BSON.ObjectId<5ec3f0f0306a5f3aa5418a24>,
created: ~U[2020-05-19 14:45:04.141044Z],
label: %Label{color: :red, name: "warning"},
list: nil,
modified: ~U[2020-05-19 14:45:04.141033Z],
title: nil
}
## `after_load/1` and `before_dump/1` macros
Sometimes you may want to perform post-processing after loading the data set, for example
to create derived attributes. Conversely, before saving, you may want to
drop the derived attributes so that they are not saved to the database.
For this reason there are two macros `after_load/1` and `before_dump/1`. You can
specify functions that are called after the `load/0` or before the `dump`:
## Example `embeds_many`
defmodule Board do
use Collection
collection "boards" do
attribute :id, String.t() ## derived attribute
attribute :title, String.t()
attribute :created, DateString.t(), default: &DateTime.utc_now/0
attribute :modified, DateString.t(), default: &DateTime.utc_now/0
embeds_many :lists, BoardList
after_load &Board.after_load/1
before_dump &Board.before_dump/1
end
def after_load(%Board{_id: id} = board) do
%Board{board | id: BSON.ObjectId.encode!(id)}
end
def before_dump(board) do
%Board{board | id: nil}
end
def new(title) do
new()
|> Map.put(:title, title)
|> Map.put(:lists, [])
|> after_load()
end
def store(board) do
with map <- dump(board),
{:ok, _} <- Mongo.insert_one(:mongo, @collection, map) do
:ok
end
end
def fetch(id) do
:mongo
|> Mongo.find_one(@collection, %{@id => id})
|> load()
end
end
In this example the attribute `id` is derived from the actual ID and stored as a binary.
This attribute is often used and therefore we want to save the conversion of the ID.
To avoid storing the derived attribute `id`, the `before_dump/1` function is called, which
removes the `id` from the struct:
iex(1)> board = Board.new("Vega")
%Board{
_id: #BSON.ObjectId<5ec3f802306a5f3ee3b71cf2>,
created: ~U[2020-05-19 15:15:14.374556Z],
id: "5ec3f802306a5f3ee3b71cf2",
lists: [],
modified: ~U[2020-05-19 15:15:14.374549Z],
title: "Vega"
}
iex(2)> Board.store(board)
:ok
iex(3)> Board.fetch(board._id)
%Board{
_id: #BSON.ObjectId<5ec3f802306a5f3ee3b71cf2>,
created: ~U[2020-05-19 15:15:14.374Z],
id: "5ec3f802306a5f3ee3b71cf2",
lists: [],
modified: ~U[2020-05-19 15:15:14.374Z],
title: "Vega"
}
If we call the document in the Mongo shell, we see that the attribute `id` was not stored there:
> db.boards.findOne({"_id" : ObjectId("5ec3f802306a5f3ee3b71cf2")})
{
"_id" : ObjectId("5ec3f802306a5f3ee3b71cf2"),
"created" : ISODate("2020-05-19T15:15:14.374Z"),
"lists" : [ ],
"modified" : ISODate("2020-05-19T15:15:14.374Z"),
"title" : "Vega"
}
"""
alias Mongo.Collection
@doc false
defmacro __using__(_) do
quote do
@before_dump_fun &Function.identity/1
@after_load_fun &Function.identity/1
@id_generator nil
import Collection, only: [document: 1, collection: 2]
Module.register_attribute(__MODULE__, :attributes, accumulate: true)
Module.register_attribute(__MODULE__, :derived, accumulate: true)
Module.register_attribute(__MODULE__, :types, accumulate: true)
Module.register_attribute(__MODULE__, :embed_ones, accumulate: true)
Module.register_attribute(__MODULE__, :embed_manys, accumulate: true)
Module.register_attribute(__MODULE__, :after_load_fun, [])
Module.register_attribute(__MODULE__, :before_dump_fun, [])
end
end
@doc """
Defines a struct as a collection with id generator and a collection.
Inside a collection block, each attribute is defined through the `attribute/3` macro.
"""
defmacro collection(name, [do: block]) do
make_collection(name, block)
end
@doc """
Defines a struct as a document without id generator and a collection. These documents
are used to be embedded within collection structs.
Inside a document block, each attribute is defined through the `attribute/3` macro.
"""
defmacro document([do: block]) do
make_collection(nil, block)
end
defp make_collection(name, block) do
prelude =
quote do
@collection unquote(name)
@id_generator (case @id_generator do
nil -> {:_id, quote(do: BSON.ObjectId.t()), &Mongo.object_id/0}
false -> {nil, nil, nil}
other -> other
end)
@id elem(@id_generator, 0)
Collection.__id__(@id_generator, @collection)
try do
import Collection
unquote(block)
after
:ok
end
end
postlude =
quote unquote: false do
attribute_names = @attributes |> Enum.reverse |> Enum.map(&elem(&1, 0))
struct_attrs = (@attributes |> Enum.reverse |> Enum.map(fn {name, opts} -> {name, opts[:default]} end)) ++
(@embed_ones |> Enum.map(fn {name, _mod, opts} -> {name, opts[:default]} end)) ++
(@embed_manys |> Enum.map(fn {name, _mod, opts} -> {name, opts[:default]} end))
defstruct struct_attrs
Collection.__type__(@types)
def __collection__(:attributes), do: unquote(attribute_names)
def __collection__(:types), do: @types
def __collection__(:collection), do: unquote(@collection)
def __collection__(:id), do: unquote(elem(@id_generator, 0))
end
new_function =
quote unquote: false do
embed_ones = (@embed_ones |> Enum.map(fn {name, _mod, opts} -> {name, opts} end))
embed_manys = (@embed_manys |> Enum.map(fn {name, _mod, opts} -> {name, opts} end))
args = (@attributes ++ embed_ones ++ embed_manys)
|> Enum.map(fn {name, opts} -> {name, opts[:default]} end)
|> Enum.filter(fn {_name, fun} -> is_function(fun) end)
def new() do
%__MODULE__{unquote_splicing(Collection.struct_args(args))}
end
end
load_function =
quote unquote: false do
attribute_names = @attributes |> Enum.map(&elem(&1, 0))
embed_ones = @embed_ones
|> Enum.filter(fn {_name, mod, _opts} -> Collection.has_load_function?(mod) end)
|> Enum.map(fn {name, mod, _opts} -> {name, mod} end)
embed_manys = @embed_manys
|> Enum.filter(fn {_name, mod, _opts} -> Collection.has_load_function?(mod) end)
|> Enum.map(fn {name, mod, _opts} -> {name, mod} end)
def load(map, use_atoms \\ false)
def load(nil, _use_atoms) do
nil
end
def load(xs, use_atoms) when is_list(xs) do
Enum.map(xs, fn map -> load(map, use_atoms) end)
end
def load(map, false) when is_map(map) do
struct = Enum.reduce(unquote(attribute_names),
%__MODULE__{},
fn name, result ->
Map.put(result, name, map[Atom.to_string(name)])
end)
struct = unquote(embed_ones)
|> Enum.map(fn {name, mod} -> {name, mod.load(map[Atom.to_string(name)])} end)
|> Enum.reduce(struct, fn {name, doc}, acc -> Map.put(acc, name, doc) end)
unquote(embed_manys)
|> Enum.map(fn {name, mod} -> {name, mod.load(map[Atom.to_string(name)])} end)
|> Enum.reduce(struct, fn {name, doc}, acc -> Map.put(acc, name, doc) end)
|> @after_load_fun.()
end
def load(map, true) when is_map(map) do
struct = Enum.reduce(unquote(attribute_names),
%__MODULE__{},
fn name, result ->
Map.put(result, name, map[name])
end)
struct = unquote(embed_ones)
|> Enum.map(fn {name, mod} -> {name, mod.load(map[name], true)} end)
|> Enum.reduce(struct, fn {name, doc}, acc -> Map.put(acc, name, doc) end)
unquote(embed_manys)
|> Enum.map(fn {name, mod} -> {name, mod.load(map[name], true)} end)
|> Enum.reduce(struct, fn {name, doc}, acc -> Map.put(acc, name, doc) end)
|> @after_load_fun.()
end
end
dump_function =
quote unquote: false do
embed_ones = @embed_ones
|> Enum.filter(fn {_name, mod, _opts} -> Collection.has_dump_function?(mod) end)
|> Enum.map(fn {name, mod, _opts} -> {name, mod} end)
embed_manys = @embed_manys
|> Enum.filter(fn {_name, mod, _opts} -> Collection.has_dump_function?(mod) end)
|> Enum.map(fn {name, mod, _opts} -> {name, mod} end)
def dump(nil) do
nil
end
def dump(xs) when is_list(xs) do
Enum.map(xs, fn struct -> dump(struct) end)
end
def dump(%__MODULE__{} = struct) do
struct = unquote(embed_ones)
|> Enum.map(fn {name, mod} -> {name, mod.dump(Map.get(struct, name))} end)
|> Enum.reduce(struct, fn {name, doc}, acc -> Map.put(acc, name, doc) end)
struct = unquote(embed_manys)
|> Enum.map(fn {name, mod} -> {name, mod.dump(Map.get(struct, name))} end)
|> Enum.reduce(struct, fn {name, doc}, acc -> Map.put(acc, name, doc) end)
struct
|> Map.drop(unquote(@derived))
|> @before_dump_fun.()
|> Collection.dump()
end
end
quote do
unquote(prelude)
unquote(postlude)
unquote(new_function)
unquote(load_function)
unquote(dump_function)
end
end
@doc """
Inserts the specified `@id_generator` to the list of attributes. Calls `add_id/3`.
"""
defmacro __id__(id_generator, name) do
quote do
Collection.add_id(__MODULE__, unquote(id_generator), unquote(name))
end
end
@doc """
Inserts the specified `@id_generator` to the list of attributes.
"""
def add_id(_mod, _id_generator, nil) do
end
def add_id(_mod, {nil, _type, _fun}, _name) do
end
def add_id(mod, {id, type, fun}, _name) do
Module.put_attribute(mod, :types, {id, type})
Module.put_attribute(mod, :attributes, {id, default: fun})
end
@doc """
Inserts boilercode for the @type attribute.
"""
defmacro __type__(types) do
quote bind_quoted: [types: types] do
@type t() :: %__MODULE__{unquote_splicing(types)}
end
end
@doc """
Returns true, if the Module has the `dump/1` function.
"""
def has_dump_function?(mod) do
Keyword.has_key?(mod.__info__(:functions), :dump)
end
@doc """
Returns true, if the Module has the `load/1` function.
"""
def has_load_function?(mod) do
Keyword.has_key?(mod.__info__(:functions), :load)
end
@doc """
Returns the default arguments for the struct. They are used to provide the
default values in the `new/0` call.
"""
def struct_args(args) when is_list(args) do
Enum.map(args, fn {arg, func} -> struct_args(arg, func) end)
end
def struct_args(arg, func) do
quote do
{unquote(arg), unquote(func).()}
end
end
@doc """
Defines the `before_dump/1` function.
"""
defmacro before_dump(fun) do
quote do
Module.put_attribute(__MODULE__, :before_dump_fun, unquote(fun))
end
end
@doc """
Defines the `after_load/1` function.
"""
defmacro after_load(fun) do
quote do
Module.put_attribute(__MODULE__, :after_load_fun, unquote(fun))
end
end
@doc """
Adds the struct to the `embeds_one` list. Calls `__embeds_one__`
"""
defmacro embeds_one(name, mod, opts \\ []) do
quote do
Collection.__embeds_one__(__MODULE__, unquote(name), unquote(mod), unquote(opts))
end
end
@doc """
Adds the struct to the `embeds_one` list.
"""
def __embeds_one__(mod, name, target, opts) do
Module.put_attribute(mod, :embed_ones, {name, target, opts})
end
@doc """
Adds the struct to the `embeds_many` list. Calls `__embeds_many__`
"""
defmacro embeds_many(name, mod, opts \\ []) do
quote do
type = unquote(Macro.escape({{:., [], [mod, :t]}, [], []}))
Collection.__embeds_many__(__MODULE__, unquote(name), unquote(mod), type, unquote(opts))
end
end
@doc """
Adds the struct to the `embeds_many` list.
"""
def __embeds_many__(mod, name, target, type, opts) do
Module.put_attribute(mod, :types, {name, type})
Module.put_attribute(mod, :embed_manys, {name, target, opts})
end
@doc """
Adds the attribute to the attributes list. It call `__attribute__/4` function.
"""
defmacro attribute(name, type, opts \\ []) do
quote do
Collection.__attribute__(__MODULE__, unquote(name), unquote(Macro.escape(type)), unquote(opts))
end
end
@doc """
Adds the attribute to the attributes list.
"""
def __attribute__(mod, name, type, opts) do
case opts[:derived] do
true -> Module.put_attribute(mod, :derived, name)
_ -> []
end
Module.put_attribute(mod, :types, {name, type})
Module.put_attribute(mod, :attributes, {name, opts})
end
def dump(%{__struct__: _} = struct) do
map = Map.from_struct(struct)
:maps.map(&dump/2, map) |> filter_nils()
end
def dump(map), do: :maps.map(&dump/2, map)
def dump(_key, value), do: ensure_nested_map(value)
defp ensure_nested_map(%{__struct__: Date} = data), do: data
defp ensure_nested_map(%{__struct__: DateTime} = data), do: data
defp ensure_nested_map(%{__struct__: NaiveDateTime} = data) , do: data
defp ensure_nested_map(%{__struct__: Time} = data), do: data
defp ensure_nested_map(%{__struct__: BSON.ObjectId} = data), do: data
defp ensure_nested_map(%{__struct__: _} = struct) do
map = Map.from_struct(struct)
:maps.map(&dump/2, map) |> filter_nils()
end
defp ensure_nested_map(list) when is_list(list), do: Enum.map(list, &ensure_nested_map/1)
defp ensure_nested_map(data), do: data
def filter_nils(map) when is_map(map) do
Enum.reject(map, fn {_key, value} -> is_nil(value) end)
|> Enum.into(%{})
end
def filter_nils(keyword) when is_list(keyword) do
Enum.reject(keyword, fn {_key, value} -> is_nil(value) end)
end
end
|
lib/mongo/collection.ex
| 0.8812
| 0.603289
|
collection.ex
|
starcoder
|
defmodule RateLimitETS do
@moduledoc """
Abstract module for interacting with ETS tables that track state for all the scopes.
ETS (Erlang Term Storage) is an in-memory database for Erlang, it provides a robust in-memory
store for Elixir and Erlang objects. This in-memory property and the ownership model where
the table belongs to an individual process matches the behaviour we want from our rate
limiting library.
See https://elixirschool.com/en/lessons/specifics/ets/ for more details.
"""
@doc """
Global ETS state table name
"""
def state_table() do
:rate_limit_state_table
end
@doc """
Initialise the ETS state table.
"""
def init() do
:ets.new(state_table(), [:set, :named_table, :public])
:ok
end
@doc """
Cleanup state for the ETS state table.
"""
def cleanup() do
:ets.delete(state_table())
end
def init_counters(scope, limit, period) do
table_id = :ets.new(:scope_counters, [:set, :public])
:ets.insert(state_table(), {scope, table_id, limit + 1, period, TimeSource.now()})
end
def reset_counters(scope) do
[{scope, table_id, limit, period, _}] = :ets.lookup(state_table(), scope)
true = :ets.delete_all_objects(table_id)
true = :ets.insert(state_table(), {scope, table_id, limit, period, TimeSource.now()})
:ok
end
def limit(scope) do
[{_, _, limit, period, _}] = :ets.lookup(state_table(), scope)
limit
end
def update_counter(scope, key) do
case :ets.lookup(state_table(), scope) do
[{_scope, table_id, limit, period, previous_reset}] ->
next_reset = TimeSource.next_time(period, previous_reset)
count = :ets.update_counter(table_id, key, {2, 1, limit, limit}, {key, 0})
{count, limit, next_reset}
[] ->
:rate_not_set
end
end
def lookup_counter(scope, key) do
case :ets.lookup(state_table(), scope) do
[{_scope, table_id, limit, period, previous_reset}] ->
next_reset = TimeSource.next_time(period, previous_reset)
case :ets.lookup(table_id, key) do
[{_key, count}] ->
{count, limit, next_reset}
[] ->
{0, limit, next_reset}
end
[] ->
:rate_not_set
end
end
end
|
lib/rate_limit_ets.ex
| 0.772101
| 0.561936
|
rate_limit_ets.ex
|
starcoder
|
defmodule ExState do
defstruct pos: 0, regs: nil, proc: nil, p_id: nil, messages_sent: 0, final_pid: nil
end
defmodule Day18 do
@moduledoc """
You discover a tablet containing some strange assembly code labeled simply "Duet". Rather than bother the sound card
with it, you decide to run the code yourself. Unfortunately, you don't see any documentation, so you're left to figure
out what the instructions mean on your own.
It seems like the assembly is meant to operate on a set of registers that are each named with a single letter and that
can each hold a single integer. You suppose each register should start with a value of 0.
There aren't that many instructions, so it shouldn't be hard to figure out what they do. Here's what you determine:
snd X plays a sound with a frequency equal to the value of X.
set X Y sets register X to the value of Y.
add X Y increases register X by the value of Y.
mul X Y sets register X to the result of multiplying the value contained in register X by the value of Y.
mod X Y sets register X to the remainder of dividing the value contained in register X by the value of Y (that is, it
sets X to the result of X modulo Y).
rcv X recovers the frequency of the last sound played, but only when the value of X is not zero. (If it is zero, the
command does nothing.)
jgz X Y jumps with an offset of the value of Y, but only if the value of X is greater than zero. (An offset of 2 skips
the next instruction, an offset of -1 jumps to the previous instruction, and so on.)
Many of the instructions can take either a register (a single letter) or a number. The value of a register is the
integer it contains; the value of a number is that number.
After each jump instruction, the program continues with the instruction to which the jump jumped. After any other
instruction, the program continues with the next instruction. Continuing (or jumping) off either end of the program
terminates it.
For example:
set a 1
add a 2
mul a a
mod a 5
snd a
set a 0
rcv a
jgz a -1
set a 1
jgz a -2
The first four instructions set a to 1, add 2 to it, square it, and then set it to itself modulo 5, resulting in a
value of 4.
Then, a sound with frequency 4 (the value of a) is played.
After that, a is set to 0, causing the subsequent rcv and jgz instructions to both be skipped (rcv because a is 0,
and jgz because a is not greater than 0).
Finally, a is set to 1, causing the next jgz instruction to activate, jumping back two instructions to another jump,
which jumps again to the rcv, which ultimately triggers the recover operation.
At the time the recover operation is executed, the frequency of the last sound played is 4.
What is the value of the recovered frequency (the value of the most recently played sound) the first time a rcv
instruction is executed with a non-zero value?
--- Part Two ---
As you congratulate yourself for a job well done, you notice that the documentation has been on the back of the
tablet this entire time. While you actually got most of the instructions correct, there are a few key differences.
This assembly code isn't about sound at all - it's meant to be run twice at the same time.
Each running copy of the program has its own set of registers and follows the code independently - in fact, the
programs don't even necessarily run at the same speed. To coordinate, they use the send (snd) and receive (rcv)
instructions:
snd X sends the value of X to the other program. These values wait in a queue until that program is ready to receive
them. Each program has its own message queue, so a program can never receive a message it sent.
rcv X receives the next value and stores it in register X. If no values are in the queue, the program waits for a
value to be sent to it. Programs do not continue to the next instruction until they have received a value. Values are
received in the order they are sent.
Each program also has its own program ID (one 0 and the other 1); the register p should begin with this value.
For example:
snd 1
snd 2
snd p
rcv a
rcv b
rcv c
rcv d
Both programs begin by sending three values to the other. Program 0 sends 1, 2, 0; program 1 sends 1, 2, 1. Then,
each program receives a value (both 1) and stores it in a, receives another value (both 2) and stores it in b, and
then each receives the program ID of the other program (program 0 receives 1; program 1 receives 0) and stores it in
c. Each program now sees a different value in its own copy of register c.
Finally, both programs try to rcv a fourth time, but no data is waiting for either of them, and they reach a deadlock.
When this happens, both programs terminate.
It should be noted that it would be equally valid for the programs to run at different speeds; for example, program
0 might have sent all three values and then stopped at the first rcv before program 1 executed even its first
instruction.
Once both of your programs have terminated (regardless of what caused them to do so), how many times did program 1
send a value?
7239
"""
def part_a() do
{:done, state} = run_cmds("res/day18.input", "0", {:a, self()})
String.to_integer(Map.get(state.regs, "a"))
end
def test_a() do
{:done, state} = run_cmds("res/day18_test.input", "0", {:a, self()})
Map.get(state.regs, "a")
end
def test_b() do
pid0=spawn(Day18, :run_cmds, ["res/day18_b_test.input", "0", {:b, self()}])
pid1=spawn(Day18, :run_cmds, ["res/day18_b_test.input", "1", {:b, self()}])
send pid0, pid1
send pid1, pid0
#IO.puts "WTF"
receive do
result ->
result
end
end
def part_b() do
pid0=spawn(Day18, :run_cmds, ["res/day18.input", "0", {:b, self()}])
pid1=spawn(Day18, :run_cmds, ["res/day18.input", "1", {:b, self()}])
send pid0, pid1
send pid1, pid0
receive do
result ->
result
end
end
def run_cmds(file, program_id, {:b, final_pid}) do
proc = receive do
pid ->
pid
end
File.read!(file)
|> String.split("\n")
|> Enum.map(fn (x) -> String.split(x, " ") end)
|> execute(%ExState{proc: proc, regs: %{"p" => program_id}, p_id: program_id, final_pid: final_pid}, :b)
end
def run_cmds(file, _, {:a,final_pid}) do
File.read!(file)
|> String.split("\n")
|> Enum.map(fn (x) -> String.split(x, " ") end)
|> execute(%ExState{regs: %{final_pid: final_pid}}, :a)
end
defp execute(instructions, state, problem) do
{stopflag, newstate} = Enum.at(instructions, state.pos)
|> do_instruction(state, problem)
case stopflag do
true ->
{:done, newstate}
false ->
execute(instructions, %{newstate | pos: newstate.pos + 1}, problem)
end
end
defp do_instruction(nil, state, _) do
case state.p_id === "1" do
true -> send(state.final_pid, state.messages_sent)
false ->:ok
end
{true, state}
end
defp do_instruction(["set", reg1, reg2], state, _) do
{_, val2} = get_vals(nil, reg2, state)
{false, %{state | regs: Map.put(state.regs, reg1, val2)}}
end
defp do_instruction(["snd", reg1], state, :b) do
{val1, _} = get_vals(reg1, nil, state)
send(state.proc, val1)
{false, %{state| messages_sent: state.messages_sent + 1}}
end
defp do_instruction(["snd", reg1], state, :a) do
{val1, _} = get_vals(reg1, nil, state)
{false, %{state| messages_sent: val1}}
end
defp do_instruction(["rcv", reg1], state, :b) do
receive do
msg ->
{false, %{state | regs: Map.put(state.regs, reg1, msg)}}
after
1000 ->
#IO.puts "WTF WTF"
case state.p_id === "1" do
true -> send(state.final_pid, state.messages_sent)
false ->:ok
end
{true, state}
end
end
defp do_instruction(["rcv", reg1], state, :a) do
{val1, _} = get_vals(reg1, nil, state)
case val1 do
"0" ->
{false, state}
_ ->
{true, %{state | regs: Map.put(state.regs, reg1, state.messages_sent)}}
end
end
defp do_instruction(["jgz", reg1, reg2], state, _) do
{val1, val2} = get_vals(reg1, reg2, state)
case val1 <= "0" or state.pos + String.to_integer(val2) < 0 do
true ->
{false, state}
false ->
{false, %{state | pos: state.pos + String.to_integer(val2) - 1}}
end
end
defp do_instruction([op, reg1, reg2], state, _) do
{val1, val2} = get_vals(reg1, reg2, state)
{false, %{state | regs: Map.put(state.regs, reg1, string_math(op, val1, val2))}}
end
defp string_math("add", string1, string2) do
Integer.to_string(String.to_integer(string1) + String.to_integer(string2))
end
defp string_math("mul", string1, string2) do
Integer.to_string(String.to_integer(string1) * String.to_integer(string2))
end
defp string_math("mod", string1, string2) do
Integer.to_string(rem(String.to_integer(string1), String.to_integer(string2)))
end
defp get_vals(reg1, reg2, _state) when reg1 < "a" and reg2 < "a" do
{reg1, reg2}
end
defp get_vals(reg1, reg2, state) when reg1 < "a" do
{reg1, Map.get(state.regs, reg2, "0")}
end
defp get_vals(reg1, reg2, state) when reg2 < "a" do
{Map.get(state.regs, reg1, "0"), reg2}
end
defp get_vals(reg1, reg2, state) do
{Map.get(state.regs, reg1, "0"), Map.get(state.regs, reg2, "0")}
end
end
|
lib/day18.ex
| 0.70619
| 0.839175
|
day18.ex
|
starcoder
|
defmodule EWallet.TransactionConsumptionFetcher do
@moduledoc """
Handles any kind of retrieval/fetching for the TransactionConsumptionGate.
All functions here are only meant to load and format data related to
transaction consumptions.
"""
alias EWalletDB.{Transaction, TransactionConsumption}
@spec get(String.t()) ::
{:ok, %TransactionConsumption{}}
| {:error, :transaction_consumption_not_found}
def get(nil), do: {:error, :transaction_consumption_not_found}
def get(id) do
%{id: id}
|> get_by()
|> return_consumption()
end
defp return_consumption(nil), do: {:error, :transaction_consumption_not_found}
defp return_consumption(consumption), do: {:ok, consumption}
@spec idempotent_fetch(String.t()) ::
{:ok, nil}
| {:idempotent_call, %TransactionConsumption{}}
| {:error, %TransactionConsumption{}, atom(), String.t()}
| {:error, %TransactionConsumption{}, String.t(), String.t()}
def idempotent_fetch(idempotency_token) do
%{idempotency_token: idempotency_token}
|> get_by()
|> return_idempotent()
end
defp get_by(attrs) do
TransactionConsumption.get_by(
attrs,
preload: [
:account,
:user,
:wallet,
:token,
:transaction_request,
:transaction,
:exchange_account,
:exchange_wallet
]
)
end
defp return_idempotent(nil), do: {:ok, nil}
defp return_idempotent(%TransactionConsumption{transaction: nil} = consumption) do
{:idempotent_call, consumption}
end
defp return_idempotent(%TransactionConsumption{transaction: transaction} = consumption) do
return_transaction_result(consumption, failed_transaction: Transaction.failed?(transaction))
end
defp return_transaction_result(consumption, failed_transaction: true) do
{code, description} = Transaction.get_error(consumption.transaction)
{:error, consumption, code, description}
end
defp return_transaction_result(consumption, failed_transaction: false) do
{:idempotent_call, consumption}
end
end
|
apps/ewallet/lib/ewallet/fetchers/transaction_consumption_fetcher.ex
| 0.80954
| 0.445891
|
transaction_consumption_fetcher.ex
|
starcoder
|
defmodule ExRabbitMQ.Config.Connection do
@moduledoc """
A structure holding the necessary information about a connection to a RabbitMQ node.
#### Connection configuration example:
```elixir
# :connection is this connection's configuration name
config :exrabbitmq, :my_connection_config,
# username for connecting to rabbitmq (distinct per connection configuration block)
username: "username",
# password for connecting to rabbitmq(distinct per connection configuration block)
password: "password",
# host where RabbitMQ is running
host: "rabbitmq.host.my",
# port where RabbitMQ is listening (optional, default: 5672)
port: 5672,
# the virtual host to connect to (optional, default: /)
vhost: "/",
# the connection's heartbeat RabbitMQ in seconds (optional, default: 20)
heartbeat: 20,
# the delay after which a connection wil be re-attempted after having been
# dropped (optional, default: 2000)
reconnect_after: 2000,
# the maximum channels per connection (optional, default: 65535)
max_channels: 65535,
```
"""
alias ExRabbitMQ.Config.Pool, as: PoolConfig
require Logger
defstruct [
:username,
:password,
:host,
:port,
:vhost,
:heartbeat,
:reconnect_after,
:max_channels,
:pool,
:cleanup_after
]
@type t :: %__MODULE__{
username: String.t(),
password: String.t(),
host: String.t(),
port: pos_integer,
vhost: String.t(),
heartbeat: pos_integer,
reconnect_after: pos_integer,
max_channels: pos_integer,
pool: PoolConfig.t(),
cleanup_after: pos_integer
}
@spec get(atom, atom | t()) :: t()
def get(app \\ :exrabbitmq, connection_config) do
connection_config
|> case do
connection_config when is_atom(connection_config) -> from_env(app, connection_config)
_ -> connection_config
end
|> merge_defaults()
|> validate_connection_config
end
@spec to_hash_key(t()) :: {binary, t()}
def to_hash_key(connection_config) do
key =
connection_config
|> :erlang.term_to_binary()
|> :erlang.md5()
{key, connection_config}
end
@spec get_weight_timeout() :: pos_integer | :infinity
def get_weight_timeout() do
Application.get_env(:exrabbitmq, :get_weight_timeout, :infinity)
end
# Returns a part of the `app` configuration section, specified with the
# `key` argument as a `ExRabbitMQ.Config.Connection` struct.
# If the `app` argument is omitted, it defaults to `:exrabbitmq`.
@spec from_env(atom, atom | module) :: t()
defp from_env(app, key) do
config = Application.get_env(app, key, [])
%__MODULE__{
username: config[:username],
password: config[:password],
host: config[:host],
port: config[:port],
vhost: config[:vhost],
heartbeat: config[:heartbeat],
reconnect_after: config[:reconnect_after],
max_channels: config[:max_channels],
pool: PoolConfig.get(config[:pool] || []),
cleanup_after: config[:cleanup_after]
}
end
# Merges an existing `ExRabbitMQ.Config.Connection` struct the default values when these are `nil`.
defp merge_defaults(%__MODULE__{} = config) do
%__MODULE__{
username: config.username,
password: <PASSWORD>,
host: config.host,
port: config.port || 5672,
vhost: config.vhost || "/",
heartbeat: config.heartbeat || 20,
reconnect_after: config.reconnect_after || 2_000,
max_channels: config.max_channels || 65_535,
pool: PoolConfig.get(config.pool || []),
cleanup_after: config.cleanup_after || 5_000
}
end
defp validate_connection_config(%__MODULE__{max_channels: max_channels} = config) do
if max_channels >= 1 and max_channels <= 65_535 do
config
else
Logger.warn("The maximum number of connections per channel is out of range 1 to 65535.")
%__MODULE__{config | max_channels: 65_535}
end
end
end
|
lib/ex_rabbit_m_q/config/connection.ex
| 0.876957
| 0.612049
|
connection.ex
|
starcoder
|
defmodule ProcessFun do
@moduledoc """
Ok, let's have a bit of play from `iex` to get a feel for processes. Let's start from `iex`.
```
iex -S mix
> pid = spawn(fn -> IO.puts("Hello matey") end)
```
You'll say "Hello matey" printed out. `pid` is the Process ID.
```
> Process.alive?(pid)
false
```
The process is not alive, as the function exited so the process finished.
```
> pid = spawn(fn ->
receive do
:go_go -> IO.puts("Goodbye, sweet world!")
end
end)
> Process.alive?(pid)
true
```
This process is now blocked, waiting for the message `:go_go` - it's still ALIVE! Nothing is printed out.
```
send(pid, :go_go)
```
The message "Goodbye, sweet world!" is now printed out.
```
Process.alive?(pid)
false
```
Now we have seen processes that do a thing, then die. Useful, but maybe not that useful. Let's use the functions in this
module to implement a counter.
```
> pid = spawn(ProcessFun, :simplest_counter, [0])
Process.alive?(pid)
true
```
"The count is 0" is output. This is a slightly different form of `spawn`, which takes 3 argements: the module, function, and arguments; it spawns
with `simplest_counter` in this module, passing in `0`. It is blocking, waiting for an `:increment` message, and alive.
```
send(pid, :increment)
Process.alive?(pid)
true
```
Now the count is 1, and the process is still alive. The function did not exit - it called itself with the new count. WE ARE CHANGING STATE IN A
FUNCTIONAL ENVIRONMENT!
If a process changes state in a forest but no-one is there, has it changed state? In this case we know because we are cheating and outputing the
count to `standard out`. Let's implement querying the output.
```
pid = spawn(ProcessFun, :queryable_counter, [0])
```
The `queryable_counter` is silet on standard out, so how do we know what the count is?
```
send(pid, {:query, self()})
```
`self()` returns the process id (pid) of the current process. In this case it is the pid of your iex session. We send it along as part
of the message to the counter process, so it knows where to send the results of the query. You can see in the function body the line
`send(pid, {:counter_value, value}))`. Typing `flush` displays and empties out your iex mailbox.
```
> flush()
{:counter_value, 0}
> send(pid, :increment)
> send(pid, {:query, self()})
> flush()
{:counter_value, 1}
```
The functions `start_queryable_counter/1`, `increment_queryable_counter/1`, and
`queryable_counter_value/1` wrap the spawning and message passing in a
clearner API. The API is tested by ProcessFunTest (see test directory), which you can
run from the terminal by `mix test`. (Remember to exit from iex with `ctrl-c` first)
## EXERCISE
Implement `decrement_queryable_counter/1`. A test has been written for you; remove
the skip tag.
"""
@spec simplest_counter(integer()) :: no_return()
def simplest_counter(value) do
IO.puts("The count is #{value}")
receive do
:increment ->
simplest_counter(value + 1)
end
end
@spec queryable_counter(integer()) :: no_return()
def queryable_counter(value) do
receive do
:increment ->
queryable_counter(value + 1)
{:query, pid} ->
send(pid, {:counter_value, value})
queryable_counter(value)
end
end
@spec start_queryable_counter(integer()) :: pid()
def start_queryable_counter(value) do
spawn(__MODULE__, :queryable_counter, [value])
end
@spec queryable_counter_value(pid()) :: integer() | {:error, :timeout}
def queryable_counter_value(pid) do
send(pid, {:query, self()})
receive do
{:counter_value, value} ->
value
after
3_000 -> {:error, :timeout}
end
end
@spec increment_queryable_counter(pid()) :: :ok
def increment_queryable_counter(pid) do
send(pid, :increment)
:ok
end
@spec decrement_queryable_counter(pid()) :: :ok
def decrement_queryable_counter(_pid) do
## IMPLEMENT ME
:ok
end
end
|
lib/process_fun.ex
| 0.802013
| 0.948298
|
process_fun.ex
|
starcoder
|
defmodule AMQP.Exchange do
@moduledoc """
Functions to operate on Exchanges.
"""
import AMQP.Core
alias AMQP.Channel
@doc """
Declares an Exchange. The default Exchange type is `direct`.
AMQP 0-9-1 brokers provide four pre-declared exchanges:
* Direct exchange: (empty string) or `amq.direct`
* Fanout exchange: `amq.fanout`
* Topic exchange: `amq.topic`
* Headers exchange: `amq.match` (and `amq.headers` in RabbitMQ)
Besides the exchange name and type, the following options can be used:
* `durable`: If set, keeps the Exchange between restarts of the broker
* `auto-delete`: If set, deletes the Exchange once all queues unbind from it
* `passive`: If set, returns an error if the Exchange does not already exist
* `internal:` If set, the exchange may not be used directly by publishers, but
only when bound to other exchanges. Internal exchanges are used to
construct wiring that is not visible to applications.
"""
def declare(%Channel{pid: pid}, exchange, type \\ :direct, options \\ [])
when type in [:direct, :topic, :fanout, :headers] do
exchange_declare =
exchange_declare(exchange: exchange,
type: Atom.to_string(type),
passive: Keyword.get(options, :passive, false),
durable: Keyword.get(options, :durable, false),
auto_delete: Keyword.get(options, :auto_delete, false),
internal: Keyword.get(options, :internal, false),
nowait: Keyword.get(options, :no_wait, false),
arguments: Keyword.get(options, :arguments, []))
exchange_declare_ok() = :amqp_channel.call pid, exchange_declare
:ok
end
@doc """
Deletes an Exchange by name. When an Exchange is deleted all bindings to it are
also deleted
"""
def delete(%Channel{pid: pid}, exchange, options \\ []) when is_binary(exchange) do
exchange_delete =
exchange_delete(exchange: exchange,
if_unused: Keyword.get(options, :if_unused, false),
nowait: Keyword.get(options, :no_wait, false))
exchange_delete_ok() = :amqp_channel.call pid, exchange_delete
:ok
end
@doc """
Binds an Exchange to another Exchange or a Queue using the
exchange.bind AMQP method (a RabbitMQ-specific extension)
"""
def bind(%Channel{pid: pid}, destination, source, options \\ [])
when is_binary(destination) and is_binary(source) do
exchange_bind =
exchange_bind(destination: destination,
source: source,
routing_key: Keyword.get(options, :routing_key, ""),
nowait: Keyword.get(options, :no_wait, false),
arguments: Keyword.get(options, :arguments, []))
exchange_bind_ok() = :amqp_channel.call pid, exchange_bind
:ok
end
@doc """
Unbinds an Exchange from another Exchange or a Queue using the
exchange.unbind AMQP method (a RabbitMQ-specific extension)
"""
def unbind(%Channel{pid: pid}, destination, source, options \\ []) do
exchange_unbind =
exchange_unbind(destination: destination,
source: source,
routing_key: Keyword.get(options, :routing_key, ""),
nowait: Keyword.get(options, :no_wait, false),
arguments: Keyword.get(options, :arguments, []))
exchange_unbind_ok() = :amqp_channel.call pid, exchange_unbind
:ok
end
@doc """
Convenience function to declare an Exchange of type `direct`.
"""
def direct(%Channel{} = channel, exchange, options \\ []) do
declare(channel, exchange, :direct, options)
end
@doc """
Convenience function to declare an Exchange of type `fanout`.
"""
def fanout(%Channel{} = channel, exchange, options \\ []) do
declare(channel, exchange, :fanout, options)
end
@doc """
Convenience function to declare an Exchange of type `topic`.
"""
def topic(%Channel{} = channel, exchange, options \\ []) do
declare(channel, exchange, :topic, options)
end
end
|
lib/amqp/exchange.ex
| 0.787441
| 0.552902
|
exchange.ex
|
starcoder
|
defmodule Turbo.Ecto do
@moduledoc """
A rich ecto component, including search sort and paginate. https://hexdocs.pm/turbo_ecto
## Example
### Category Table Structure
| Field | Type | Comment |
| ------------- | ------------- | --------- |
| `name` | string | |
### Product Table Structure
| Field | Type | Comment |
| ------------- | ------------- | --------- |
| `name` | string | |
| `body` | text | |
| `price` | float | |
| `category_id` | integer | |
| `available` | boolean | |
### Variant Table Structure
| Field | Type | Comment |
| ------------- | ------------- | --------- |
| `name` | string | |
| `price` | float | |
| `product_id` | integer | |
* Input Search
```elixir
url_query = http://localhost:4000/varinats?q[product_name_or_name_like]=elixir
```
* Expect output:
```elixir
iex> params = %{"q" => %{"product_name_or_name_like" => "elixir"}}
iex> Turbo.Ecto.turboq(Turbo.Ecto.Variant, params)
#Ecto.Query<from v0 in Turbo.Ecto.Variant, left_join: p1 in assoc(v0, :product), where: like(p1.name, \"%elixir%\") or like(v0.name, \"%elixir%\"), limit: 10, offset: 0>
```
"""
alias Turbo.Ecto.Config, as: TConfig
alias Turbo.Ecto.{Builder, Utils}
alias Turbo.Ecto.Hooks.Paginate
@doc """
Returns a result and pageinate info.
## Example
iex> params = %{"q" => %{"name_or_product_name_like" => "elixir", "price_eq" => "1"}, "s" => "updated_at+asc", "per_page" => 5, "page" => 1}
iex> Turbo.Ecto.turbo(Turbo.Ecto.Variant, params)
%{
paginate: %{current_page: 1, per_page: 5, next_page: nil, prev_page: nil, total_count: 0, total_pages: 0},
datas: []
}
"""
@spec turbo(Ecto.Query.t(), map(), keyword()) :: map()
def turbo(queryable, params, opts \\ []) do
build_opts = uniq_merge(opts, TConfig.defaults())
entry_name = Keyword.get(build_opts, :entry_name)
paginate_name = Keyword.get(build_opts, :paginate_name)
queryable = turboq(queryable, params)
%{
entry_name => handle_query(queryable, build_opts),
paginate_name => get_paginate(queryable, params, build_opts)
}
|> Utils.symbolize_keys()
end
defp uniq_merge(keyword1, keyword2) do
keyword2
|> Keyword.merge(keyword1)
|> Keyword.new()
end
@doc """
Returns processed queryable.
## Example
iex> params = %{"q" => %{"name_or_body_like" => "elixir"}, "s" => "updated_at+asc", "per_page" => 5, "page" => 1}
iex> Turbo.Ecto.turboq(Turbo.Ecto.Product, params)
#Ecto.Query<from p0 in Turbo.Ecto.Product, where: like(p0.name, \"%elixir%\") or like(p0.body, \"%elixir%\"), order_by: [asc: p0.updated_at], limit: 5, offset: 0>
"""
@spec turboq(Ecto.Query.t(), map()) :: Ecto.Query.t()
def turboq(queryable, params), do: Builder.run(queryable, params)
defp get_paginate(queryable, params, opts), do: Paginate.get_paginate(queryable, params, opts)
defp handle_query(queryable, opts) do
case Keyword.get(opts, :repo) do
nil -> raise "Expected key `repo` in `opts`, got #{inspect(opts)}"
repo -> apply(repo, :all, [queryable])
end
end
end
|
lib/turbo_ecto.ex
| 0.83772
| 0.85318
|
turbo_ecto.ex
|
starcoder
|
defmodule Cizen.Effects.Race do
@moduledoc """
An effect to run a race for the given effects.
## Anonymous race
perform %Race{
effects: [
effect1,
effect2
]
}
# If effect2 resolves faster than effect1 with :somevalue,
# the race returns the :somevalue
## Named Race
perform %Race{
effects: [
effect1: effect1,
effect2: effect2
]
}
# If effect2 resolves faster than effect1 with :somevalue,
# the race returns the {effect2: :somevalue}
"""
@keys [:effects]
@enforce_keys @keys
defstruct @keys
alias Cizen.Effect
alias Cizen.Effects.Map
use Effect
@impl true
def init(id, %__MODULE__{effects: effects}) do
effects =
Enum.map(effects, fn
{name, effect} ->
%Map{
effect: effect,
transform: fn value -> {name, value} end
}
effect ->
effect
end)
do_init(id, effects)
end
defp do_init(_id, []), do: []
defp do_init(id, [effect | tail]) do
case Effect.init(id, effect) do
{:resolve, value} ->
{:resolve, value}
state ->
case do_init(id, tail) do
{:resolve, value} ->
{:resolve, value}
states ->
[state | states]
end
end
end
@impl true
def handle_event(id, event, _, state) do
do_handle_event(id, event, state)
end
defp do_handle_event(_id, _event, []), do: []
defp do_handle_event(id, event, [{effect, state} | tail]) do
case Effect.handle_event(id, event, effect, state) do
{:resolve, value} ->
{:resolve, value}
{:consume, state} ->
do_handle_event_tail(effect, state, id, event, tail, true)
state ->
do_handle_event_tail(effect, state, id, event, tail, false)
end
end
defp do_handle_event_tail(effect, state, id, event, tail, consumed) do
case do_handle_event(id, event, tail) do
{:resolve, value} ->
{:resolve, value}
{:consume, states} ->
{:consume, [{effect, state} | states]}
states ->
if consumed do
{:consume, [{effect, state} | states]}
else
[{effect, state} | states]
end
end
end
end
|
lib/cizen/effects/race.ex
| 0.796055
| 0.495361
|
race.ex
|
starcoder
|
defmodule Commanded.Middleware.Uniqueness do
@behaviour Commanded.Middleware
@moduledoc """
Documentation for Commanded.Middleware.Uniqueness.
"""
@default_partition __MODULE__
defprotocol UniqueFields do
@fallback_to_any true
@doc """
Returns unique fields for a command as a list of tuples as:
`{field_name :: atom() | list(atom), error_message :: String.t(), owner :: term, opts :: keyword()}`,
where `opts` might contain none, one or multiple options:
`ignore_case: true` or `ignore_case: [:email, :username]` for multi-fields entities - binary-based
fields are downcased before comparison
`:label` - use this atom as error label
`:is_unique` - `(term, String.t(), term, keyword() -> boolean())`
`:partition` - use to set custom partition name
`:no_owner` - if true then ignore owner and check `field_name` - `field_value` pair uniqueness
in a `partition` scope. `release_by_value/3` must be used to release key-value pair in such case.
`:no_owner` option has sense when it is necessary to ensure uniquenesses in embedded structs.
"""
def unique(command)
end
defimpl UniqueFields, for: Any do
def unique(_command), do: []
end
@doc """
Returns default parition which is by default @Commanded.Middleware.Uniqueness
"""
def default_partition do
@default_partition
end
@doc """
Claims a `key`, `value`, `owner`, `partition` set
or reports that this combination has already been claimed.
If a `key`, `value`, `owner`, `partition` set has to be claimed
and an old value for the given owner exists it releases first.
If `partition` is ommited then default partition used.
"""
@spec claim(key :: term, value :: term, owner :: term, partition :: term) ::
:ok
| {:error, :already_exists}
| {:error, :unknown_error}
| {:error, :no_adapter}
def claim(key, value, owner, partition \\ @default_partition) do
case get_adapter() do
nil -> {:error, :no_adapter}
adapter -> adapter.claim(key, value, owner, partition)
end
end
@doc """
Claims a `key`, `value`, `partition` set
or reports that this combination has already been claimed.
If `partition` is ommited then default partition used.
"""
@spec claim_without_owner(key :: term, value :: term, partition :: term) ::
:ok
| {:error, :already_exists}
| {:error, :unknown_error}
| {:error, :no_adapter}
def claim_without_owner(key, value, partition \\ @default_partition) do
case get_adapter() do
nil -> {:error, :no_adapter}
adapter -> adapter.claim(key, value, partition)
end
end
@doc """
Releases a value record via `key`, `value`, `owner`, `partition` set
"""
@spec release(key :: term, value :: term, owner :: term, partition :: term) ::
:ok
| {:error, :claimed_by_another_owner}
| {:error, :unknown_error}
| {:error, :no_adapter}
def release(key, value, owner, partition \\ @default_partition) do
case get_adapter() do
nil -> {:error, :no_adapter}
adapter -> adapter.release(key, value, owner, partition)
end
end
@doc """
Releases a value record via `key`, `owner`, `partition` set
"""
@spec release_by_owner(key :: term, owner :: term, partition :: term) ::
:ok
| {:error, :unknown_error}
| {:error, :no_adapter}
def release_by_owner(key, owner, partition \\ @default_partition) do
case get_adapter() do
nil -> {:error, :no_adapter}
adapter -> adapter.release_by_owner(key, owner, partition)
end
end
@doc """
Releases a value record via `key`, `value`, `partition` set
"""
@spec release_by_value(key :: term, value :: term, partition :: term) ::
:ok
| {:error, :unknown_error}
| {:error, :no_adapter}
def release_by_value(key, value, partition \\ @default_partition) do
case get_adapter() do
nil -> {:error, :no_adapter}
adapter -> adapter.release_by_value(key, value, partition)
end
end
### Pipeline itself
alias Commanded.Middleware.Pipeline
import Pipeline
@doc false
def before_dispatch(%Pipeline{command: command} = pipeline) do
case ensure_uniqueness(command) do
:ok ->
pipeline
{:error, errors} ->
pipeline
|> respond({:error, :validation_failure, errors})
|> halt()
end
end
@doc false
def after_dispatch(pipeline), do: pipeline
@doc false
def after_failure(pipeline), do: pipeline
defp ensure_uniqueness(command) do
ensure_uniqueness(command, get_adapter())
end
defp ensure_uniqueness(_command, nil) do
require Logger
Logger.debug("No unique cache adapter defined in config! Assume the value is unique.",
label: "#{__MODULE__}"
)
:ok
end
defp ensure_uniqueness(command, adapter) do
command
|> UniqueFields.unique()
|> ensure_uniqueness(command, adapter, [], [])
end
defp ensure_uniqueness([record | rest], command, adapter, errors, to_release) do
{_, error_message, _, _} = record = expand_record(record)
label = get_label(record)
{errors, to_release} =
case claim_value(record, command, adapter) do
{key, value, owner, partition} ->
to_release = [{key, value, owner, partition} | to_release]
{errors, to_release}
_ ->
errors = [{label, error_message} | errors]
{errors, to_release}
end
ensure_uniqueness(rest, command, adapter, errors, to_release)
end
defp ensure_uniqueness([], _command, _adapter, [], _to_release), do: :ok
defp ensure_uniqueness([], _command, _adapter, errors, []), do: {:error, errors}
defp ensure_uniqueness([], command, adapter, errors, to_release) do
Enum.each(to_release, &release(&1, adapter))
ensure_uniqueness([], command, adapter, errors, [])
end
defp claim_value({fields, _, owner, opts}, command, adapter)
when is_list(fields) do
value =
fields
|> Enum.reduce([], fn field_name, acc ->
ignore_case = Keyword.get(opts, :ignore_case)
[get_field_value(command, field_name, ignore_case) | acc]
end)
key = Module.concat(fields)
command = %{key => value}
entity = {key, "", owner, opts}
claim_value(entity, command, adapter)
end
defp claim_value({field_name, _, owner, opts}, command, adapter)
when is_atom(field_name) do
ignore_case = Keyword.get(opts, :ignore_case)
value = get_field_value(command, field_name, ignore_case)
partition = get_partition(opts, command)
require Logger
claim_result =
case Keyword.get(opts, :no_owner, false) do
false ->
adapter.claim(field_name, value, owner, partition)
true ->
adapter.claim(field_name, value, partition)
_ ->
raise ArgumentError,
"Commanded.Middleware.Uniqueness.UniqueFields :no_owner option can only be either true or false"
end
case claim_result do
:ok ->
case external_check(field_name, value, owner, command, opts) do
true ->
{field_name, value, owner, partition}
_ ->
release({field_name, value, owner, partition}, adapter)
{:error, :external_check_failed}
end
error ->
error
end
end
defp release({key, value, owner, partition}, adapter),
do: adapter.release(key, value, owner, partition)
defp external_check(field_name, value, owner, command, opts) when is_list(opts),
do: external_check(field_name, value, owner, command, get_external_checker(opts))
defp external_check(field_name, value, owner, _command, {checker, opts})
when is_function(checker, 4),
do: checker.(field_name, value, owner, opts)
defp external_check(_field_name, _value, _owner, _command, {nil, _}), do: true
defp external_check(_field_name, _value, _owner, %{__struct__: module}, _opts),
do:
raise(
"#{__MODULE__}: The ':is_unique' option for the #{module} command has incorrect value. It should be only a function with 4 arguments"
)
defp expand_record({one, two, three}), do: {one, two, three, []}
defp expand_record(entity), do: entity
defp get_field_value(command, field_name, ignore_case)
defp get_field_value(command, field_name, ignore_case) when is_list(ignore_case),
do: get_field_value(command, field_name, Enum.any?(ignore_case, &(&1 == field_name)))
defp get_field_value(command, field_name, field_name),
do: get_field_value(command, field_name, true)
defp get_field_value(command, field_name, true),
do: command |> get_field_value(field_name, false) |> downcase()
defp get_field_value(command, field_name, _), do: Map.get(command, field_name)
defp downcase(value) when is_binary(value), do: String.downcase(value)
defp downcase(value), do: value
defp get_label({entity, _, _, opts}), do: Keyword.get(opts, :label, entity)
defp get_external_checker(opts), do: {Keyword.get(opts, :is_unique), opts}
defp get_partition(opts, command), do: get_partition(opts, command, use_command_as_partition?())
defp get_partition(opts, %command{}, true), do: Keyword.get(opts, :partition, command)
defp get_partition(opts, _, _), do: Keyword.get(opts, :partition, default_partition())
defp get_adapter, do: Application.get_env(:commanded_uniqueness_middleware, :adapter)
defp use_command_as_partition?,
do: Application.get_env(:commanded_uniqueness_middleware, :use_command_as_partition)
end
|
lib/commanded/middleware/uniqueness.ex
| 0.847842
| 0.502991
|
uniqueness.ex
|
starcoder
|
# import Scenic.Primitives, only: [group: 3, rect: 3]
# import FloUI.Scrollable.Components, only: [scroll_bars: 3]
# alias Scenic.Graph
# alias Scenic.Primitive
# alias Scenic.Math.Vector2
# alias FloUI.Scrollable.Hotkeys
# alias FloUI.Scrollable.Drag
# alias FloUI.Scrollable.Wheel
# alias FloUI.Scrollable.ScrollBars
# alias FloUI.Scrollable.Acceleration
# alias FloUI.Scrollable.PositionCap
# @moduledoc """
# ## NOTICE
# This component was originally created by zwetsloot. I have updated it to work with scenic 0.11.0, and added :cursor_scroll event support.
# Work has been started to write this as a snap component, but this will work as is in snap templates.
# https://hex.pm/packages/scenic_scrollable
# The scrollable component offers a way to show part of a content group bounded by a fixed rectangle or frame, and change the visible part of the content without displacing the bounded rectangle by scrolling.
# The scrollable component offers three ways to scroll, which can be used in conjunction:
# - The content can be clicked and dragged directly using a mouse.
# - Hotkeys can be set for up, down, left and right scroll directions.
# - A horizontal and a vertical scroll bar can be set up.
# Note that for the hotkeys to work, the scrollable component has to catch focus first by clicking it once with the left mouse button.
# ## Data
# `t:Scenic.Scrollable.settings/0`
# To initialize a scrollable component, a map containing `frame` and `content` elements, and a builder function are required. Further customization can be provided with optional styles.
# ### Frame
# The frame contains information about the size of the fixed rectangle shaped bounding box. It is a tuple containing the width as first element, and height as second element.
# ### Content
# The content contains information about the size and offset of the content. The offset can be used to adjust the limits of where the content can be scrolled to, and can for example be of used when the content position looks off in its {0, 0} starting position. If no offset is required, the content can be passed as a tuple containing the width as first element, and height as second element. If an offset is used, the content can be passed as a `t:Scenic.Scrollable.rect/0`, which is a map containing `x`, `y`, `width` and `height` elements.
# ## Builder
# `t:Scenic.Scrollable.builder/0`
# In addition to the required data, a scrollable component requires a builder, similar to the `Scenic.Primitive.Group` primitive. The builder is a function that takes a graph, and should return a graph with the necessary components attached to it that form the content of the scrollable component.
# ## Styles
# `t:Scenic.Scrollable.styles/0`
# Similar to the `Scenic.Primitive.Group` primitive, any style can be passed to the scrollable component, which will be passed on to the underlying components. In addition, the following styles specific to the scrollable component can be provided.
# ### scroll_position
# `t:Scenic.Scrollable.v2/0`
# The starting position of the scrollable content. This does not influence the limits to where the content can be scrolled to.
# ### scroll_acceleration
# `t:Scenic.Scrollable.Acceleration.settings/0`
# Settings regarding sensitivity of the scroll functionality. The settings are passed in a map with the following elements:
# - acceleration: number
# - mass: number
# - counter_pressure: number
# The higher number given for the acceleration, the faster the scroll movement gains speed. The default value is 20.
# The higher number given for the mass, the slower the scroll movement gains speed, and the faster it loses speed. The default value is 1.
# The higher number given for counter_pressure, the lower the maximum scroll speed, and the faster the scroll movement loses speed after the user input has stopped. The default value is 0.1.
# ### scroll_hotkeys
# `t:Scenic.Scrollable.Hotkeys.settings/0`
# A hotkey can be provided for every scroll direction to enable scrolling using the keyboard. The hotkey settings can be passed in a map with the following elements.
# - up: `t:String.t/0`
# - down: `t:String.t/0`
# - left: `t:String.t/0`
# - right: `t:String.t/0`
# The passed string can be the letter of the intended key, such as "w" or "s", or the description of a special key, such as the arrow keys "up", "down", "left" or "right".
# ### scroll_fps
# number
# Specifies the times per second the scroll content position is recalculated when it is scrolling. For environments with limited resources, it might be prudent to set a lower value than the default 30.
# ### scroll_drag
# `t:Scenic.Scrollable.Drag.settings/0`
# Options for enabling scrolling by directly dragging the content using a mouse. Buttons events on the scrollable content will take precedence over the drag functionality. Drag settings are passed in a map with the following elements:
# - mouse_buttons: [`t:Scenic.Scrollable.Drag.mouse_button/0`]
# The list of mouse buttons specifies with which mouse button the content can be dragged. Available mouse buttons are `:left`, `:right` and `:middle`. By default, the drag functionality is disabled.
# ### scroll_bar_thickness
# number
# Specify the thickness of both scroll bars.
# ### scroll_bar
# `t:Scenic.Scrollable.ScrollBar.styles/0`
# Specify the styles for both horizontal and vertical scroll bars. If different styles for each scroll bar are desired, use the `vertical_scroll_bar` and `horizontal_scroll_bar` options instead. The following styles are supported"
# - scroll_buttons: boolean
# - scroll_bar_theme: map
# - scroll_bar_radius: number
# - scroll_bar_border: number
# - scroll_drag: `t:Scenic.Scrollable.Drag.settings/0`
# The scroll_buttons boolean can be used to specify of the scroll bar should contain buttons for scrolling, in addition to the scroll bar slider. The scroll buttons are not shown by default.
# A theme can be passed using the scroll_bar_theme element to provide a set of colors for the scroll bar. For more information on themes, see the `Scenic.Primitive.Style.Theme` module. The default theme is `:light`.
# The scroll bars rounding and border can be adjusted using the scroll_bar_radius and scroll_bar_border elements respectively. The default values are 3 and 1.
# The scroll_drag settings can be provided in the same form the scrollable components scroll_drag style is provided, and can be used to specify by which mouse button the scroll bar slider can be dragged. By default, the `:left`, `:right` and `:middle` buttons are all enabled.
# ### horizontal_scroll_bar
# `t:Scenic.Scrollable.ScrollBar.styles/0`
# Specify styles for the horizontal scroll bar only. The available styles are exactly the same as explained in the above scroll_bar style section.
# ### vertical_scroll_bar
# `t:Scenic.Scrollable.ScrollBar.styles/0`
# Specify styles for the vertical scroll bar only. The available styles are exactly the same as explained in the above scroll_bar style section.
# ## Examples
# iex> graph = Scenic.Scrollable.Components.scrollable(
# ...> Scenic.Graph.build(),
# ...> %{
# ...> frame: {200, 400},
# ...> content: %{x: 0, y: 10, width: 400, height: 800}
# ...> },
# ...> fn graph ->
# ...> Scenic.Primitives.text(graph, "scrollable text")
# ...> end,
# ...> [id: :scrollable_component_1]
# ...> )
# ...> graph.primitives[1].id
# :scrollable_component_1
# iex> graph = Scenic.Scrollable.Components.scrollable(
# ...> Scenic.Graph.build(),
# ...> %{
# ...> frame: {200, 400},
# ...> content: %{x: 0, y: 10, width: 400, height: 800}
# ...> },
# ...> fn graph ->
# ...> Scenic.Primitives.text(graph, "scrollable text")
# ...> end,
# ...> [
# ...> scroll_position: {-10, -50},
# ...> scroll_acceleration: %{
# ...> acceleration: 15,
# ...> mass: 1.2,
# ...> counter_pressure: 0.2
# ...> },
# ...> scroll_hotkeys: %{
# ...> up: "w",
# ...> down: "s",
# ...> left: "a",
# ...> right: "d"
# ...> },
# ...> scroll_fps: 15,
# ...> scroll_drag: %{
# ...> mouse_buttons: [:left]
# ...> },
# ...> scroll_bar_thickness: 15,
# ...> scroll_bar: [
# ...> scroll_buttons: true,
# ...> scroll_bar_theme: Scenic.Primitive.Style.Theme.preset(:dark)
# ...> ],
# ...> translate: {50, 50},
# ...> id: :scrollable_component_2
# ...> ]
# ...> )
# ...> graph.primitives[1].id
# :scrollable_component_2
# """
# @typedoc """
# Data structure representing a vector 2, in the form of an {x, y} tuple.
# """
# @type v2 :: Scenic.Math.vector_2()
# @typedoc """
# Data structure representing a rectangle.
# """
# @type rect :: %{
# x: number,
# y: number,
# width: number,
# height: number
# }
# @typedoc """
# A map with settings with which to initialize a `Scenic.Scrollable` component.
# - frame: The size as {width, height} of the frame or viewport through which the content is visible.
# - content: The size as {width, height}, or size and offset as `t:Scenic.Scrollable.rect/0` of the scrollable content.
# The offset affects the limits of the contents position. To set the contents current position only, pass in the :scroll_position option, as defined in the `t:Scenic.Scrollable.style/0` type.
# """
# @type settings :: %{
# frame: v2,
# content: v2 | rect
# }
# @typedoc """
# The optional styles with which the scrollable component can be customized. See this modules top section for a more detailed explanation of every style.
# """
# @type style ::
# {:scroll_position, v2}
# | {:scroll_acceleration, Acceleration.settings()}
# | {:scroll_hotkeys, Hotkeys.settings()}
# | {:scroll_fps, number}
# | {:scroll_drag, Drag.settings()}
# | {:scroll_bar_thickness, number}
# | {:scroll_bar, Scenic.Scrollable.ScrollBar.styles()}
# | {:horizontal_scroll_bar, Scenic.Scrollable.ScrollBar.styles()}
# | {:vertical_scroll_bar, Scenic.Scrollable.ScrollBar.styles()}
# | {:translate, v2}
# | {:id, term}
# # enable any input to be passed to the content
# | {atom, term}
# # TODO bounce
# @typedoc """
# A collection of optional styles with which the scrollable component can be customized. See `t:Scenic.Scrollable.style/0` and this modules top section for more information.
# """
# @type styles :: [style]
# @typedoc """
# The states a scrollable component can be in.
# - scrolling: the scrollable component is currently being scrolled using a scroll button or hotkey
# - dragging: the scrollable component is currently being dragged, by using a scroll bar slider, or by dragging the content directly using a mouse button
# - cooling_down: the scrollable component is still moving due to previous user input, but the user is not giving any scroll related input at the moment.
# - idle: the scrollable component is not moving
# """
# @type scroll_state ::
# :scrolling
# | :wheel
# | :dragging
# | :cooling_down
# | :idle
# @typedoc """
# The builder function used to setup the content of the scrollable component. The builder function works the same as the builder function used for setting up `Scenic.Primitive.Group` primitives.
# """
# @type builder :: (Graph.t() -> Graph.t())
# @typedoc """
# The state with which the scrollable components GenServer is running.
# """
# @type t :: %__MODULE__{
# id: any,
# graph: Graph.t(),
# frame: rect,
# content: rect,
# scroll_position: v2,
# fps: number,
# scrolling: scroll_state,
# drag_state: Drag.t(),
# wheel_state: Wheel.t(),
# scroll_bars: {:some, ScrollBars.t()} | :none,
# acceleration: Acceleration.t(),
# hotkeys: Hotkeys.t(),
# position_caps: PositionCap.t(),
# focused: boolean,
# animating: boolean
# }
# defstruct id: :scrollable,
# graph: Graph.build(),
# frame: %{x: 0, y: 0, width: 0, height: 0},
# content: %{x: 0, y: 0, width: 0, height: 0},
# scroll_position: {0, 0},
# fps: 30,
# scrolling: :idle,
# drag_state: %Drag{},
# wheel_state: %Wheel{},
# scroll_bars: :none,
# acceleration: %Acceleration{},
# hotkeys: %Hotkeys{},
# position_caps: %PositionCap{},
# focused: false,
# animating: false
# @default_scroll_position {0, 0}
# @default_fps 30
# # CALLBACKS
# @impl Scenic.Component
# def validate(%{content: %{width: content_width, height: content_height, x: x, y: y}} = input)
# when is_number(x) and is_number(y) do
# validate(%{input | content: {content_width, content_height}})
# |> ResultEx.map(fn _ -> input end)
# end
# def validate(
# %{
# frame: {frame_width, frame_height},
# content: {content_width, content_height},
# builder: builder
# } = input
# )
# when is_number(frame_width) and is_number(frame_height) and is_number(content_width) and
# is_number(content_height) and is_function(builder) do
# {:ok, input}
# end
# def validate(_), do: :invalid_input
# @impl Scenic.Scene
# def init(scene, %{content: {content_width, content_height}} = input, opts) do
# init(
# scene,
# %{input | content: %{x: 0, y: 0, width: content_width, height: content_height}},
# opts
# )
# end
# def init(scene, %{frame: {frame_width, frame_height}, content: content, builder: builder}, opts) do
# styles = opts || %{}
# {frame_x, frame_y} = styles[:translate] || {0, 0}
# scroll_position = styles[:scroll_position] || @default_scroll_position
# %__MODULE__{
# id: opts[:id] || :scrollable,
# frame: %{x: frame_x, y: frame_y, width: frame_width, height: frame_height},
# content: content,
# scroll_position: Vector2.add(scroll_position, {content.x, content.y}),
# fps: styles[:scroll_fps] || @default_fps,
# acceleration: Acceleration.init(styles[:scroll_acceleration]),
# hotkeys: Hotkeys.init(styles[:scroll_hotkeys]),
# drag_state: Drag.init(styles[:scroll_drag])
# }
# |> init_position_caps
# |> init_graph(scene, builder, styles)
# end
# def handle_update(
# %{content: content, builder: builder},
# opts,
# %{assigns: %{state: state}} = scene
# ) do
# graph =
# state.graph
# |> Graph.delete(:content)
# |> Graph.delete(:frame)
# state =
# %{state | content: content, graph: graph}
# |> init_content(builder, opts)
# |> update_content_size
# |> init_position_caps()
# scene =
# scene
# |> assign(state: state)
# scene = update(state, scene)
# {:noreply, scene}
# end
# @impl Scenic.Scene
# def handle_input(
# {:cursor_scroll, {{offset_x, offset_y}, _} = scroll_pos},
# :input_capture,
# %{assigns: %{state: state}} = scene
# ) do
# OptionEx.map(state.scroll_bars, & &1.pid)
# |> OptionEx.map(&GenServer.cast(&1, {:update_cursor_scroll, scroll_pos}))
# {:noreply, assign(scene, state: state)}
# end
# def handle_input({:key, {"escape", :release, _}}, _, %{assigns: %{state: state}} = scene) do
# state = release_focus(state, scene)
# {:noreply, assign(scene, state: state)}
# end
# def handle_input(
# {:key, {key, :press, _}},
# _,
# %{assigns: %{state: state}} = scene
# ) do
# state = Map.update!(state, :hotkeys, &Hotkeys.handle_key_press(&1, key))
# scene = update(state, scene)
# {:noreply, scene}
# end
# def handle_input(
# {:key, {key, :release, _}},
# _,
# %{assigns: %{state: state}} = scene
# ) do
# state = Map.update!(state, :hotkeys, &Hotkeys.handle_key_release(&1, key))
# scene = update(state, scene)
# {:noreply, scene}
# end
# def handle_input(_input, _, scene) do
# {:noreply, scene}
# end
# @impl Scenic.Scene
# def handle_event(
# {:scroll_bars_initialized, _id, scroll_bars_state},
# _from,
# %{assigns: %{state: state}} = scene
# ) do
# state = %{state | scroll_bars: OptionEx.return(scroll_bars_state)}
# {:noreply, assign(scene, state: state)}
# end
# def handle_event(
# {:scroll_bars_position_change, _id, %{scroll_state: :idle} = scroll_bars_state},
# _from,
# %{assigns: %{state: state}} = scene
# ) do
# # TODO move this position update to apply force?
# send_parent_event(scene, {:scroll_bars_position, ScrollBars.new_position(scroll_bars_state)})
# state =
# ScrollBars.new_position(scroll_bars_state)
# |> OptionEx.map(&Vector2.add(&1, {state.content.x, state.content.y}))
# |> OptionEx.map(&%{state | scroll_position: &1})
# |> OptionEx.or_else(state)
# scene = update(state, scene)
# {:noreply, scene}
# end
# def handle_event(
# {:scroll_bars_position_change, _id, scroll_bars_state},
# _from,
# %{assigns: %{state: state}} = scene
# ) do
# state = %{state | scroll_bars: OptionEx.return(scroll_bars_state)}
# scene = update(state, scene)
# {:noreply, scene}
# end
# def handle_event(
# {:scroll_bars_scroll_end, _id, scroll_bars_state},
# _from,
# %{assigns: %{state: state}} = scene
# ) do
# state = %{state | scroll_bars: OptionEx.return(scroll_bars_state)}
# scene = update(state, scene)
# {:noreply, scene}
# end
# def handle_event(
# {:scroll_bars_button_pressed, _id, scroll_bars_state},
# _from,
# %{assigns: %{state: state}} = scene
# ) do
# state = %{state | scroll_bars: OptionEx.return(scroll_bars_state)}
# scene = update(state, scene)
# {:noreply, scene}
# end
# def handle_event(
# {:scroll_bars_button_released, _id, scroll_bars_state},
# _from,
# %{assigns: %{state: state}} = scene
# ) do
# state = %{state | scroll_bars: OptionEx.return(scroll_bars_state)}
# scene = update(state, scene)
# {:noreply, scene}
# end
# def handle_event(
# {:cursor_scroll_started, _id, scroll_bars_state, wheel_state},
# _from,
# %{assigns: %{state: state}} = scene
# ) do
# state = %{state | scroll_bars: OptionEx.return(scroll_bars_state), wheel_state: wheel_state}
# scene = update(state, scene)
# {:noreply, scene}
# end
# def handle_event(
# {:cursor_scroll_stopped, _id, scroll_bars_state, wheel_state},
# _from,
# %{assigns: %{state: state}} = scene
# ) do
# state = %{state | scroll_bars: OptionEx.return(scroll_bars_state), wheel_state: wheel_state}
# scene = update(state, scene)
# {:noreply, scene}
# end
# def handle_event(
# {:update_scroll_pos, {x, y}, {check_offset_x, check_offset_y} = check_offset},
# _from,
# %{assigns: %{state: state}} = scene
# ) do
# new_pos = Vector2.sub({-1 * x, -1 * y}, {state.content.x, state.content.y})
# pos_offset = Vector2.sub(state.scroll_position, new_pos)
# check_vector = Vector2.add(pos_offset, check_offset)
# if not Vector2.in_bounds?(check_vector, {0, 0}, {state.frame.width, state.frame.height}) do
# state =
# %{state | scroll_position: new_pos}
# |> init_position_caps
# scene = update(state, scene)
# {:noreply, scene}
# else
# {:noreply, scene}
# end
# end
# def handle_event(event, _, scene) do
# {:cont, event, scene}
# end
# # no callback on the `Scenic.Scene` and no GenServer @behaviour, so impl will not work
# @spec handle_info(request :: term(), state :: term()) :: {:noreply, state :: term()}
# def handle_info(:tick, %{assigns: %{state: state}} = scene) do
# state = %{state | animating: false}
# scene = update(state, scene)
# {:noreply, scene}
# end
# def handle_info({:update_content, content}, %{assigns: %{state: state}} = scene) do
# state =
# %{state | content: content}
# |> update_content_size
# |> init_position_caps()
# scene = update(state, scene)
# {:noreply, scene}
# end
# # no callback on the `Scenic.Scene` and no GenServer @behaviour, so impl will not work
# @spec handle_call(request :: term(), GenServer.from(), state :: term()) ::
# {:reply, reply :: term, new_state :: term}
# def handle_call(msg, _, scene) do
# {:reply, {:error, {:unexpected_message, msg}}, scene}
# end
# # INITIALIZERS
# defp init_graph(state, scene, builder, styles) do
# state =
# state
# |> init_input_capture
# |> init_content(builder, styles)
# |> init_scroll_bars(styles)
# scene =
# scene
# |> assign(state: state)
# |> push_graph(state.graph)
# send_parent_event(scene, {:register_scrollbar, self()})
# {:ok, scene}
# end
# @spec init_input_capture(t) :: t
# defp init_input_capture(%{graph: graph, frame: frame} = state) do
# graph
# |> rect({frame.width, frame.height},
# translate: {frame.x, frame.y},
# id: :input_capture,
# input: :cursor_scroll
# )
# |> (&%{state | graph: &1}).()
# end
# @spec init_content(t, (Graph.t() -> Graph.t()), styles) :: t
# defp init_content(%{graph: graph, frame: frame, content: content} = state, builder, styles) do
# # MEMO: stacking up groups and scenes will result in reaching the cap prety fast when nesting scrollable elements
# group(
# graph,
# &group(
# &1,
# builder,
# Keyword.merge(styles,
# id: :content,
# translate: Vector2.add(state.scroll_position, {content.x, content.y})
# )
# ),
# id: :frame,
# scissor: {frame.width, frame.height},
# translate: {frame.x, frame.y}
# )
# |> (&%{state | graph: &1}).()
# end
# @spec init_scroll_bars(t, styles) :: t
# defp init_scroll_bars(%{graph: graph} = state, styles) do
# update_scroll_bars(graph, state, styles)
# end
# @spec init_position_caps(t) :: t
# defp init_position_caps(
# %{
# frame: %{width: frame_width, height: frame_height},
# content: %{x: x, y: y, width: content_width, height: content_height}
# } = state
# ) do
# min = {x + frame_width - content_width, y + frame_height - content_height}
# max = {x, y}
# position_cap = PositionCap.init(%{min: min, max: max})
# Map.put(state, :position_caps, position_cap)
# |> Map.update(:scroll_position, {0, 0}, &PositionCap.cap(position_cap, &1))
# end
# # UPDATERS
# @spec update(t, scene :: any) :: t
# defp update(state, scene) do
# state =
# state
# |> update_scroll_state
# |> update_input_capture_range
# |> apply_force
# |> translate
# |> update_scroll_bars
# |> tick
# assign(scene, state: state)
# |> push_graph(state.graph)
# end
# @spec update_scroll_bars(t) :: t
# defp update_scroll_bars(state) do
# # TODO refactor?
# # MEMO due to performance issues, I am directly calling to the scroll bars, rather than modifying the graph. There might be a cleaner way to do this.
# pos = Vector2.sub(state.scroll_position, {state.content.x, state.content.y})
# OptionEx.map(state.scroll_bars, & &1.pid)
# |> OptionEx.map(&GenServer.call(&1, {:update_scroll_position, pos}))
# state
# end
# @spec update_scroll_bars(Graph.t() | Primitive.t(), t, styles) :: t
# defp update_scroll_bars(graph_or_primitive, %{frame: frame} = state, styles) do
# styles =
# Enum.into(styles, [])
# |> Keyword.take([:scroll_bar, :horizontal_scroll_bar, :vertical_scroll_bar, :scroll_drag])
# |> Keyword.put(:id, :scroll_bars)
# OptionEx.return(styles[:scroll_bar])
# |> OptionEx.or_try(fn -> OptionEx.return(styles[:horizontal_scroll_bar]) end)
# |> OptionEx.or_try(fn -> OptionEx.return(styles[:vertical_scroll_bar]) end)
# |> OptionEx.map(fn _ ->
# FloUI.Scrollable.ScrollBars.add_to_graph(
# graph_or_primitive,
# %{
# width: frame.width,
# height: frame.height,
# content_size: {state.content.width, state.content.height},
# scroll_position: Vector2.sub(state.scroll_position, {state.content.x, state.content.y})
# },
# styles
# )
# end)
# |> OptionEx.or_else(graph_or_primitive)
# |> (&%{state | graph: &1}).()
# end
# defp update_content_size(state) do
# content_size = {state.content.width, state.content.height}
# OptionEx.map(state.scroll_bars, & &1.pid)
# |> OptionEx.map(&GenServer.call(&1, {:update_content_size, content_size}))
# state
# end
# @spec update_scroll_state(t) :: t
# defp update_scroll_state(state) do
# verify_idle_state(state)
# |> OptionEx.or_try(fn -> verify_dragging_state(state) end)
# |> OptionEx.or_try(fn -> verify_scrolling_state(state) end)
# |> OptionEx.or_try(fn -> verify_wheel_state(state) end)
# |> OptionEx.or_try(fn -> verify_cooling_down_state(state) end)
# |> OptionEx.map(&%{state | scrolling: &1})
# |> OptionEx.or_else(state)
# end
# @spec update_input_capture_range(t) :: t
# defp update_input_capture_range(%{graph: _, scrolling: :dragging} = state) do
# Map.update!(state, :graph, fn graph ->
# graph
# # TODO get screen res (for all monitors added up) somehow ?
# |> Graph.modify(:input_capture, fn primitive ->
# rect(primitive, {4000, 3000},
# translate: {-2000, -1500},
# id: :input_capture,
# input: :cursor_scroll
# )
# end)
# end)
# end
# defp update_input_capture_range(%{graph: _, frame: frame} = state) do
# Map.update!(state, :graph, fn graph ->
# graph
# |> Graph.modify(:input_capture, fn primitive ->
# rect(primitive, {frame.width, frame.height},
# translate: {frame.x, frame.y},
# id: :input_capture,
# input: :cursor_scroll
# )
# end)
# end)
# end
# @spec apply_force(t) :: t
# defp apply_force(%{scrolling: :idle} = state), do: state
# defp apply_force(%{scrolling: :dragging} = state) do
# state.scroll_bars
# |> OptionEx.bind(&OptionEx.from_bool(ScrollBars.dragging?(&1), &1))
# |> OptionEx.bind(&ScrollBars.new_position/1)
# |> OptionEx.map(fn new_position ->
# Vector2.add(new_position, {state.content.x, state.content.y})
# end)
# |> OptionEx.or_try(fn ->
# OptionEx.from_bool(Drag.dragging?(state.drag_state), state.drag_state)
# |> OptionEx.bind(&Drag.new_position/1)
# end)
# |> OptionEx.map(&%{state | scroll_position: PositionCap.cap(state.position_caps, &1)})
# |> OptionEx.or_else(state)
# end
# defp apply_force(%{scrolling: :wheel, wheel_state: %{offset: {:vertical, offset_y}}} = state) do
# {x, y} = state.scroll_position
# scroll_position = {x, y + offset_y * 10}
# %{state | scroll_position: PositionCap.cap(state.position_caps, scroll_position)}
# # Acceleration.apply_force(state.acceleration, {0, offset_y * 3})
# # |> Acceleration.apply_counter_pressure()
# # |> (&%{state | acceleration: &1}).()
# # |> (fn state ->
# # Map.update(state, :scroll_position, {0, 0}, fn scroll_pos ->
# # scroll_pos = Acceleration.translate(state.acceleration, scroll_pos)
# # PositionCap.cap(state.position_caps, scroll_pos)
# # end)
# # end).()
# end
# defp apply_force(%{scrolling: :wheel, wheel_state: %{offset: {:horizontal, offset_x}}} = state) do
# {x, y} = state.scroll_position
# scroll_position = {x + offset_x * 10, y}
# %{state | scroll_position: PositionCap.cap(state.position_caps, scroll_position)}
# end
# defp apply_force(state) do
# force =
# Hotkeys.direction(state.hotkeys)
# |> Vector2.add(get_scroll_bars_direction(state))
# Acceleration.apply_force(state.acceleration, force)
# |> Acceleration.apply_counter_pressure()
# |> (&%{state | acceleration: &1}).()
# |> (fn state ->
# Map.update(state, :scroll_position, {0, 0}, fn scroll_pos ->
# scroll_pos = Acceleration.translate(state.acceleration, scroll_pos)
# PositionCap.cap(state.position_caps, scroll_pos)
# end)
# end).()
# end
# @spec translate(t) :: t
# defp translate(%{content: %{x: x, y: y}} = state) do
# Map.update!(state, :graph, fn graph ->
# graph
# |> Graph.modify(:content, fn primitive ->
# Map.update(primitive, :transforms, %{}, fn styles ->
# Map.put(styles, :translate, Vector2.add(state.scroll_position, {x, y}))
# end)
# end)
# end)
# end
# @spec verify_idle_state(t) :: {:some, :idle} | :none
# defp verify_idle_state(state) do
# result =
# Hotkeys.direction(state.hotkeys) == {0, 0} and not Drag.dragging?(state.drag_state) and
# state.wheel_state.wheel_state != :scrolling and
# get_scroll_bars_direction(state) == {0, 0} and not scroll_bars_dragging?(state) and
# Acceleration.is_stationary?(state.acceleration)
# OptionEx.from_bool(result, :idle)
# end
# @spec verify_dragging_state(t) :: {:some, :dragging} | :none
# defp verify_dragging_state(state) do
# result = Drag.dragging?(state.drag_state) or scroll_bars_dragging?(state)
# OptionEx.from_bool(result, :dragging)
# end
# @spec verify_scrolling_state(t) :: {:some, :scrolling} | :none
# defp verify_scrolling_state(state) do
# result =
# Hotkeys.direction(state.hotkeys) != {0, 0} or
# (get_scroll_bars_direction(state) != {0, 0} and not (state.scrolling == :dragging))
# OptionEx.from_bool(result, :scrolling)
# end
# @spec verify_wheel_state(t) :: {:some, :scrolling} | :none
# defp verify_wheel_state(state) do
# {_, offset} = state.wheel_state.offset
# result =
# (not Hotkeys.is_any_key_pressed?(state.hotkeys) and
# not Drag.dragging?(state.drag_state) and
# offset > 0) or
# (offset < 0 and
# get_scroll_bars_direction(state) == {0, 0} and
# not scroll_bars_dragging?(state))
# OptionEx.from_bool(result, :wheel)
# end
# @spec verify_cooling_down_state(t) :: {:some, :cooling_down} | :none
# defp verify_cooling_down_state(state) do
# {_, offset} = state.wheel_state.offset
# result =
# not Hotkeys.is_any_key_pressed?(state.hotkeys) and
# not Drag.dragging?(state.drag_state) and
# offset == 0 and
# get_scroll_bars_direction(state) == {0, 0} and
# not scroll_bars_dragging?(state) and
# not Acceleration.is_stationary?(state.acceleration)
# OptionEx.from_bool(result, :cooling_down)
# end
# @spec start_cooling_down(t, v2) :: t
# defp start_cooling_down(state, cursor_pos) do
# speed =
# Drag.last_position(state.drag_state)
# |> OptionEx.or_else(cursor_pos)
# |> (&Vector2.sub(cursor_pos, &1)).()
# |> (&Drag.amplify_speed(state.drag_state, &1)).()
# Map.update!(state, :acceleration, &Acceleration.set_speed(&1, speed))
# end
# @spec capture_focus(t, Context.t()) :: t
# defp capture_focus(%{focused: false} = state, scene) do
# capture_input(scene, :key)
# %{state | focused: true}
# end
# defp capture_focus(state, _), do: state
# @spec release_focus(t, Context.t()) :: t
# defp release_focus(%{focused: true} = state, scene) do
# release_input(scene)
# %{state | focused: false}
# end
# defp release_focus(state, _), do: state
# @spec tick(t) :: t
# defp tick(%{scrolling: :idle} = state), do: %{state | animating: false}
# defp tick(%{scrolling: :dragging} = state), do: %{state | animating: false}
# defp tick(%{scrolling: :wheel} = state), do: %{state | animating: false}
# defp tick(%{animating: true} = state), do: state
# defp tick(state) do
# Process.send_after(self(), :tick, tick_time(state))
# %{state | animating: true}
# end
# @spec tick_time(t) :: number
# defp tick_time(%{fps: fps}) do
# trunc(1000 / fps)
# end
# # UTILITY
# @spec get_scroll_bars_direction(t) :: v2
# defp get_scroll_bars_direction(%{scroll_bars: :none}), do: {0, 0}
# defp get_scroll_bars_direction(%{scroll_bars: {:some, scroll_bars}}),
# do: ScrollBars.direction(scroll_bars)
# @spec scroll_bars_dragging?(t) :: boolean
# defp scroll_bars_dragging?(%{scroll_bars: :none}), do: false
# defp scroll_bars_dragging?(%{scroll_bars: {:some, scroll_bars}}),
# do: ScrollBars.dragging?(scroll_bars)
# end
|
lib/scrollbar/scrollable.ex
| 0.826292
| 0.57344
|
scrollable.ex
|
starcoder
|
defmodule SvgBuilder.Painting do
alias SvgBuilder.Element
alias SvgBuilder.Units
@moduledoc """
Handles filling, stroking and marker symbols.
https://www.w3.org/TR/SVG11/painting.html
Fill and stroke can be assigned `t:paint_t/0` type values.
The meanings of these values are as follows:
* `:current` : Use the currentColor set in the document.
* `:inherit` : Use the parent element's value for paint.
* `:none` : No paint.
* atom : Use one of the standard color names. `t:color_t/0`
* `"url(...)"` : Set the paint from a URL resource.
* `"icc-color(...)"` : Set to an ICC color profile.
* `{r, g, b}` : Set the red. green and blue values either as integers from 0 to 255 or floats from 0.0 to 1.0.
* `t:SvgBuilder.Element.t/0` : Set the paint from another element, must be a linearGradient, radialGradient or pattern element.
"""
@colornames ~w(aliceblue antiquewhite aqua aquamarine azure beige bisque black
blanchedalmond blue blueviolet brown burlywood cadetblue chartreuse chocolate
coral cornflowerblue cornsilk crimson cyan darkblue darkcyan darkgoldenrod
darkgray darkgreen darkgrey darkkhaki darkmagenta darkolivegreen darkorange
darkorchid darkred darksalmon darkseagreen darkslateblue darkslategray
darkslategrey darkturquoise darkviolet deeppink deepskyblue dimgray dimgrey
dodgerblue firebrick floralwhite forestgreen fuchsia gainsboro ghostwhite gold
goldenrod gray grey green greenyellow honeydew hotpink indianred indigo ivory
khaki lavender lavenderblush lawngreen lemonchiffon lightblue lightcoral
lightcyan lightgoldenrodyellow lightgray lightgreen lightgrey lightpink
lightsalmon lightseagreen lightskyblue lightslategray lightslategrey
lightsteelblue lightyellow lime limegreen linen magenta maroon
mediumaquamarine mediumblue mediumorchid mediumpurple mediumseagreen
mediumslateblue mediumspringgreen mediumturquoise mediumvioletred midnightblue
mintcream mistyrose moccasin navajowhite navy oldlace olive olivedrab orange
orangered orchid palegoldenrod palegreen paleturquoise palevioletred
papayawhip peachpuff peru pink plum powderblue purple red rosybrown royalblue
saddlebrown salmon sandybrown seagreen seashell sienna silver skyblue
slateblue slategray slategrey snow springgreen steelblue tan teal thistle
tomato turquoise violet wheat white whitesmoke yellow yellowgreen)a
@type color_t() ::
:aliceblue
| :antiquewhite
| :aqua
| :aquamarine
| :azure
| :beige
| :bisque
| :black
| :blanchedalmond
| :blue
| :blueviolet
| :brown
| :burlywood
| :cadetblue
| :chartreuse
| :chocolate
| :coral
| :cornflowerblue
| :cornsilk
| :crimson
| :cyan
| :darkblue
| :darkcyan
| :darkgoldenrod
| :darkgray
| :darkgreen
| :darkgrey
| :darkkhaki
| :darkmagenta
| :darkolivegreen
| :darkorange
| :darkorchid
| :darkred
| :darksalmon
| :darkseagreen
| :darkslateblue
| :darkslategray
| :darkslategrey
| :darkturquoise
| :darkviolet
| :deeppink
| :deepskyblue
| :dimgray
| :dimgrey
| :dodgerblue
| :firebrick
| :floralwhite
| :forestgreen
| :fuchsia
| :gainsboro
| :ghostwhite
| :gold
| :goldenrod
| :gray
| :grey
| :green
| :greenyellow
| :honeydew
| :hotpink
| :indianred
| :indigo
| :ivory
| :khaki
| :lavender
| :lavenderblush
| :lawngreen
| :lemonchiffon
| :lightblue
| :lightcoral
| :lightcyan
| :lightgoldenrodyellow
| :lightgray
| :lightgreen
| :lightgrey
| :lightpink
| :lightsalmon
| :lightseagreen
| :lightskyblue
| :lightslategray
| :lightslategrey
| :lightsteelblue
| :lightyellow
| :lime
| :limegreen
| :linen
| :magenta
| :maroon
| :mediumaquamarine
| :mediumblue
| :mediumorchid
| :mediumpurple
| :mediumseagreen
| :mediumslateblue
| :mediumspringgreen
| :mediumturquoise
| :mediumvioletred
| :midnightblue
| :mintcream
| :mistyrose
| :moccasin
| :navajowhite
| :navy
| :oldlace
| :olive
| :olivedrab
| :orange
| :orangered
| :orchid
| :palegoldenrod
| :palegreen
| :paleturquoise
| :palevioletred
| :papayawhip
| :peachpuff
| :peru
| :pink
| :plum
| :powderblue
| :purple
| :red
| :rosybrown
| :royalblue
| :saddlebrown
| :salmon
| :sandybrown
| :seagreen
| :seashell
| :sienna
| :silver
| :skyblue
| :slateblue
| :slategray
| :slategrey
| :snow
| :springgreen
| :steelblue
| :tan
| :teal
| :thistle
| :tomato
| :turquoise
| :violet
| :wheat
| :white
| :whitesmoke
| :yellow
| :yellowgreen
@type paint_t() ::
:inherit
| :none
| :current
| color_t()
| {non_neg_integer(), non_neg_integer(), non_neg_integer()}
| {float(), float(), float()}
| binary()
| Element.t()
@doc """
Set the "fill" attributes of an element.
When called with a map the attributes set are `:color, :rule, :opacity`
When called with a `t:paint_t/0` value, then just the "fill" attribute is set.
See above for paint options.
## Examples
iex> rect = SvgBuilder.Shape.rect(0,0,10,10)
{:rect, %{height: "10", width: "10", x: "0", y: "0"}, []}
iex> SvgBuilder.Painting.fill(rect, :red)
{:rect, %{fill: :red, height: "10", width: "10", x: "0", y: "0"}, []}
iex> SvgBuilder.Painting.fill(rect, {123,45,3})
{:rect, %{fill: "rgb(123, 45, 3)", height: "10", width: "10", x: "0", y: "0"},[]}
iex>SvgBuilder.Painting.fill(rect, %{color: :inherit, opacity: 0.5, rule: :evenodd})
{:rect, %{fill: "rgb(123, 45, 3)", height: "10", width: "10", x: "0", y: "0"},[]}
{:rect,
%{
fill: :inherit,
"fill-opacity": "0.5",
"fill-rule": "evenodd",
height: "10",
width: "10",
x: "0",
y: "0"
}, []}
"""
@spec fill(Element.t(), %{} | paint_t) :: Element.t()
def fill(element, fill_attrs) when is_map(fill_attrs) do
element
|> apply_unless_nil(Map.get(fill_attrs, :color), &fill/2)
|> apply_unless_nil(Map.get(fill_attrs, :rule), &fill_rule/2)
|> apply_unless_nil(Map.get(fill_attrs, :opacity), &fill_opacity/2)
end
def fill(element, fill) do
Element.add_attribute(element, :fill, paint(fill))
end
@doc """
Set the "fill-rule" attribute for an element. This sets the algorithm for determining which
parts of the canvas are included inside the shape.
See: https://www.w3.org/TR/SVG11/painting.html#FillProperties
Allowed values are:
* `:non_zero`
* `:evenodd`
* `:inherit`
"""
@spec fill_rule(Element.t(), :non_zero | :evenodd | :inherit) :: Element.t()
def fill_rule(element, rule) do
Element.add_attribute(element, :"fill-rule", fill_rule(rule))
end
@doc """
Sets the "fill-opacity" attribute on an element.
Opacity may be one of:
* nil
* `:inherit`
* a float from 0.0 to 1.0
"""
@spec fill_opacity(Element.t(), nil | :inherit | float) :: Element.t()
def fill_opacity(element, opacity) do
Element.add_attribute(element, :"fill-opacity", opacity(opacity))
end
@doc """
Set the stroke attributes on an element.
When called with a map of attributes use the following keys: `[:color, :width,
:linecap, :linejoin, :mitrelimit, :dasharray, :dashoffset, :opacity]` to
set various stroke attributes.
When called with a `t:paint_t/0` type it just sets the "stroke" attribute on
the element.
"""
@spec stroke(Element.t(), map | paint_t) :: Element.t()
def stroke(element, stroke_attrs) when is_map(stroke_attrs) do
element
|> apply_unless_nil(Map.get(stroke_attrs, :color), &stroke/2)
|> apply_unless_nil(Map.get(stroke_attrs, :width), &stroke_width/2)
|> apply_unless_nil(Map.get(stroke_attrs, :linecap), &stroke_linecap/2)
|> apply_unless_nil(Map.get(stroke_attrs, :linejoin), &stroke_linejoin/2)
|> apply_unless_nil(Map.get(stroke_attrs, :miterlimit), &stroke_miterlimit/2)
|> apply_unless_nil(Map.get(stroke_attrs, :dasharray), &stroke_dasharray/2)
|> apply_unless_nil(Map.get(stroke_attrs, :dashoffset), &stroke_dashoffset/2)
|> apply_unless_nil(Map.get(stroke_attrs, :opacity), &stroke_opacity/2)
end
def stroke(element, stroke) do
Element.add_attribute(element, :stroke, paint(stroke))
end
@doc """
Sets the "stroke-width" attribute on an element.
Can be set to a number or `:inherit`.
"""
@spec stroke_width(Element.t(), number | :inherit) :: Element.t()
def stroke_width(element, width) do
Element.add_attribute(element, :"stroke-width", inherit_length(width))
end
@doc """
Sets the "stroke-linecap" attribute on an element.
Allowable values are `:butt`, `:round`, `:square` and `:inherit`
"""
@spec stroke_linecap(Element.t(), :butt | :round | :square | :inherit) :: Element.t()
def stroke_linecap(element, linecap) when linecap in [:butt, :round, :square, :inherit] do
Element.add_attribute(element, :"stroke-linecap", linecap)
end
@doc """
Sets the "stroke-linejoin" attribute on an element.
Allowable values are `:miter`, `:round`, `:bevel` and `:inherit`
"""
@spec stroke_linejoin(Element.t(), :miter | :round | :bevel | :inherit) :: Element.t()
def stroke_linejoin(element, linejoin) when linejoin in [:miter, :round, :bevel, :inherit] do
Element.add_attribute(element, :"stroke-linejoin", linejoin)
end
@doc """
Sets the "stroke-miterlimit" attribute on an element.
Allowable values are a number or `:inherit`
"""
@spec stroke_miterlimit(Element.t(), number | :inherit) :: Element.t()
def stroke_miterlimit(element, miterlimit) do
Element.add_attribute(element, :"stroke-miterlimit", inherit_length(miterlimit))
end
@doc """
Sets the "stroke-dasharray" attribute on an element.
The `dasharray` argument can be `:none`, `:inherit` or a list of
dash lengths. Note that SVG will duplicate the dash array if an odd number
of elements are in the list.
"""
@spec stroke_dasharray(Element.t(), [number] | :none | :inherit) :: Element.t()
def stroke_dasharray(element, dasharray) do
Element.add_attribute(element, :"stroke-dasharray", dash_array(dasharray))
end
@doc """
Sets the "stroke-dashoffset" attribute on an element.
The offset may be either `:inherit` or a number.
"""
@spec stroke_dashoffset(Element.t(), number | :inherit) :: Element.t()
def stroke_dashoffset(element, offset) do
Element.add_attribute(element, :"stroke-dashoffset", inherit_length(offset))
end
@doc """
Sets the "stroke-opacity" attribute on an element.
The opacity may be either `nil`, `:inherit` or a float from 0.0 to 1.0.
"""
@spec stroke_opacity(Element.t(), float | :inherit | nil) :: Element.t()
def stroke_opacity(element, opacity) do
Element.add_attribute(element, :"stroke-opacity", opacity(opacity))
end
defp paint(:current) do
:currentColor
end
defp paint(paint) when paint in [:none, :inherit] do
paint
end
defp paint(paint) when paint in @colornames do
paint
end
defp paint(<<"url(", _::binary>> = paint) do
paint
end
defp paint(<<"icc-color(", _::binary>> = paint) do
paint
end
defp paint({type, %{id: id}, []})
when type in [:linearGradient, :radialGradient, :pattern] do
"url(##{id})"
end
defp paint({r, g, b}) when is_number(r) and is_number(g) and is_number(b) do
"rgb(#{r}, #{g}, #{b})"
end
defp paint(paint) do
raise ArgumentError, "Invalid paint: #{inspect(paint)}"
end
defp fill_rule(rule) when rule in [:nonzero, :evenodd, :inherit] do
"#{rule}"
end
defp opacity(nil) do
nil
end
defp opacity(:inherit) do
:inherit
end
defp opacity(n) when is_number(n) do
Units.number(n)
end
defp inherit_length(:inherit) do
:inherit
end
defp inherit_length(l) do
Units.len(l)
end
defp dash_array(:none) do
:none
end
defp dash_array(:inherit) do
:inherit
end
defp dash_array(dasharray) do
Units.length_list(dasharray)
end
defp apply_unless_nil(element, nil, _function) do
element
end
defp apply_unless_nil(element, value, function) do
function.(element, value)
end
end
|
lib/painting.ex
| 0.895013
| 0.524212
|
painting.ex
|
starcoder
|
defmodule Furlex.Parser do
@doc """
Parses the given HTML, returning a map structure of structured
data keys mapping to their respective values, or an error.
"""
@callback parse(html :: String.t) :: {:ok, Map.t} | {:error, Atom.t}
@doc """
Extracts the given tags from the given raw html according to
the given match function
"""
@spec extract(List.t | String.t, String.t, Function.t) :: Map.t
def extract(tags, html, match) when is_list(tags) do
tags
|> Stream.map(&extract(&1, html, match))
|> Enum.reject(fn {_, v} -> is_nil(v) end)
|> Map.new()
|> group_keys()
end
def extract(tag, html, match) do
case Floki.find(html, match.(tag)) do
nil -> nil
elements ->
content =
case do_extract_content(elements) do
[] -> nil
[ element ] -> element
content -> content
end
{tag, content}
end
end
@doc "Extracts a canonical url from the given raw HTML"
@spec extract_canonical(String.t) :: nil | String.t
def extract_canonical(html) do
case Floki.find(html, "link[rel=\"canonical\"]") do
[] -> nil
elements ->
elements
|> Floki.attribute("href")
|> Enum.at(0)
end
end
@doc """
Groups colon-separated keys into dynamic map structures
## Examples
iex> Application.put_env(:furlex, :group_keys?, false)
iex> Furlex.Parser.group_keys %{"twitter:app:id" => 123, "twitter:app:name" => "YouTube"}
%{"twitter:app:id" => 123, "twitter:app:name" => "YouTube"}
iex> Application.put_env(:furlex, :group_keys?, true)
iex> Furlex.Parser.group_keys %{"twitter:app:id" => 123, "twitter:app:name" => "YouTube"}
%{
"twitter" => %{
"app" => %{
"id" => 123,
"name" => "YouTube"
}
}
}
"""
@spec group_keys(Map.t) :: Map.t
def group_keys(map)
def group_keys(map) do
if Application.get_env(:furlex, :group_keys?) do
Enum.reduce map, %{}, fn
{_, v}, _acc when is_map(v) -> group_keys(v)
{k, v}, acc -> do_group_keys(k, v, acc)
end
else
map
end
end
defp do_group_keys(key, value, acc) do
[ h | t ] = key |> String.split(":") |> Enum.reverse()
base = Map.new [{h, value}]
result = Enum.reduce t, base, fn key, sub_acc ->
Map.new([{key, sub_acc}])
end
deep_merge(acc, result)
end
defp deep_merge(left, right) do
Map.merge(left, right, &deep_resolve/3)
end
defp deep_resolve(_key, left = %{}, right = %{}) do
deep_merge(left, right)
end
defp deep_resolve(_key, _left, right) do
right
end
defp do_extract_content(elements) do
Enum.map elements, fn element ->
element
|> Floki.attribute("content")
|> Enum.at(0)
end
end
end
|
lib/furlex/parser.ex
| 0.850515
| 0.433202
|
parser.ex
|
starcoder
|
defmodule ElixirScript.FindUsedFunctions do
@moduledoc false
alias ElixirScript.State, as: ModuleState
@doc """
Takes a list of entry modules and finds modules they use along with
documenting the functions used. The data collected about used functions
is used to filter only the used functions for compilation
"""
@spec execute([atom], pid) :: :ok
def execute(entry_modules, pid) do
entry_modules
|> List.wrap()
|> Enum.each(fn module ->
walk_module(module, pid)
end)
pid
|> ElixirScript.State.list_modules()
|> Enum.each(fn {module, info} ->
if get_in(info, [:attributes, :protocol_impl]) do
walk_module(module, pid)
end
end)
end
defp walk_module(module, pid) do
%{
attributes: _attrs,
compile_opts: _compile_opts,
definitions: defs,
file: _file,
line: _line,
module: ^module,
unreachable: unreachable
} = ModuleState.get_module(pid, module)
reachable_defs =
Enum.filter(defs, fn
{_, type, _, _} when type in [:defmacro, :defmacrop] ->
false
{name, _, _, _} ->
name not in unreachable
_ ->
true
end)
state = %{
pid: pid,
module: module
}
Enum.each(reachable_defs, fn {name, _type, _, _clauses} ->
ModuleState.put_used(state.pid, module, name)
end)
Enum.each(reachable_defs, &walk(&1, state))
end
defp walk_module(module, function, arity, pid) do
function = {function, arity}
unless ModuleState.has_used?(pid, module, function) do
info = ModuleState.get_module(pid, module)
state = %{
pid: pid,
module: module
}
reachable_def =
Enum.find(Map.get(info, :definitions, []), fn {name, _, _, _} -> name == function end)
case reachable_def do
nil ->
nil
{name, _type, _, _clauses} = func ->
ModuleState.put_used(state.pid, module, name)
walk(func, state)
end
end
end
defp walk({{_name, _arity}, _type, _, clauses}, state) do
Enum.each(clauses, &walk(&1, state))
end
defp walk({_, _args, _guards, body}, state) do
walk_block(body, state)
end
defp walk({:->, _, [[{:when, _, params}], body]}, state) do
guards = List.last(params)
params = params |> Enum.reverse() |> tl |> Enum.reverse()
walk({[], params, guards, body}, state)
end
defp walk({:->, _, [params, body]}, state) do
walk({[], params, [], body}, state)
end
defp walk({:|, _, [head, tail]}, state) do
walk(head, state)
walk(tail, state)
end
defp walk({:"::", _, [target, _type]}, state) do
walk(target, state)
end
defp walk(form, state) when is_list(form) do
Enum.each(form, &walk(&1, state))
end
defp walk({a, b}, state) do
walk({:{}, [], [a, b]}, state)
end
defp walk({:{}, _, elements}, state) do
walk(elements, state)
end
defp walk({:%{}, _, properties}, state) do
Enum.each(properties, fn val -> walk(val, state) end)
end
defp walk({:<<>>, _, elements}, state) do
Enum.each(elements, fn val -> walk(val, state) end)
end
defp walk({:=, _, [left, right]}, state) do
walk(left, state)
walk(right, state)
end
defp walk({:%, _, [module, params]}, state) do
walk_module(module, :__struct__, 0, state.pid)
walk_module(module, :__struct__, 1, state.pid)
walk(params, state)
end
defp walk({:for, _, generators}, state) when is_list(generators) do
Enum.each(generators, fn
{:<<>>, _, body} ->
walk(body, state)
{:<-, _, [identifier, enum]} ->
walk(identifier, state)
walk(enum, state)
[into: expression] ->
walk(expression, state)
[into: expression, do: expression2] ->
walk(expression, state)
walk_block(expression2, state)
[do: expression] ->
walk_block(expression, state)
filter ->
walk(filter, state)
end)
end
defp walk({:case, _, [condition, [do: clauses]]}, state) do
Enum.each(clauses, &walk(&1, state))
walk(condition, state)
end
defp walk({:cond, _, [[do: clauses]]}, state) do
Enum.each(clauses, fn {:->, _, [clause, clause_body]} ->
Enum.each(List.wrap(clause_body), &walk(&1, state))
walk(hd(clause), state)
end)
end
defp walk({:receive, _context, blocks}, state) do
do_block = Keyword.get(blocks, :do)
after_block = Keyword.get(blocks, :after, nil)
walk_block(do_block, state)
if after_block do
Enum.each(List.wrap(after_block), &walk(&1, state))
end
end
defp walk({:try, _, [blocks]}, state) do
try_block = Keyword.get(blocks, :do)
rescue_block = Keyword.get(blocks, :rescue, nil)
catch_block = Keyword.get(blocks, :catch, nil)
after_block = Keyword.get(blocks, :after, nil)
else_block = Keyword.get(blocks, :else, nil)
walk_block(try_block, state)
if rescue_block do
Enum.each(rescue_block, fn
{:->, _, [[{:in, _, [param, names]}], body]} ->
# TODO: investigate walk
walk({[], [param], [{{:., [], [Enum, :member?]}, [], [param, names]}, body]}, state)
{:->, _, [[param], body]} ->
walk({[], [param], [], body}, state)
end)
end
if catch_block do
walk({:fn, [], catch_block}, state)
end
if after_block do
Enum.each(List.wrap(after_block), &walk(&1, state))
end
if else_block do
walk({:fn, [], else_block}, state)
end
end
defp walk({:fn, _, clauses}, state) do
Enum.each(clauses, &walk(&1, state))
end
defp walk({:with, _, args}, state) do
Enum.each(args, fn
{:<-, _, [left, right]} ->
walk(left, state)
walk(right, state)
{:=, _, [left, right]} ->
walk(left, state)
walk(right, state)
[do: expression] ->
walk_block(expression, state)
[do: expression, else: elses] ->
walk_block(expression, state)
Enum.each(elses, fn {:->, _, [left, right]} ->
walk(left, state)
walk(right, state)
end)
end)
end
defp walk({{:., _, [:erlang, :apply]}, _, [module, function, params]}, state) do
walk({{:., [], [module, function]}, [], params}, state)
end
defp walk({{:., _, [:erlang, :apply]}, _, [function, params]}, state) do
walk({function, [], params}, state)
end
defp walk({{:., _, [ElixirScript.JS, _]}, _, params}, state) do
walk(params, state)
end
defp walk({{:., _, [module, function]}, _, params}, state) do
cond do
ElixirScript.Translate.Module.is_js_module(module, state) ->
nil
ElixirScript.Translate.Module.is_elixir_module(module) ->
walk_module(module, function, length(params), state.pid)
is_tuple(module) ->
walk(module, state)
true ->
nil
end
walk(params, state)
end
defp walk({:super, _, [{_, function} | params]}, state) do
walk_module(state.module, function, length(params), state.pid)
walk(params, state)
end
defp walk({function, _, params}, state) when is_atom(function) and is_list(params) do
walk_module(state.module, function, length(params), state.pid)
walk(params, state)
end
defp walk({value, _, params}, state) when is_list(params) do
walk(value, state)
walk(params, state)
end
defp walk(_unused1, _unused2) do
nil
end
defp walk_block(block, state) do
case block do
nil ->
nil
{:__block__, _, block_body} ->
Enum.each(block_body, &walk(&1, state))
b when is_list(b) ->
Enum.each(b, &walk(&1, state))
_ ->
walk(block, state)
end
end
end
|
lib/elixir_script/passes/find_used_functions.ex
| 0.514156
| 0.48377
|
find_used_functions.ex
|
starcoder
|
defmodule Filterable.Ecto.Helpers do
@moduledoc ~S"""
Contains macros which allow to define widely used filters.
use Filterable.Ecto.Helpers
Example:
defmodule PostFilters do
use Filterable.DSL
use Filterable.Ecto.Helpers
field :author
field :title
paginateable per_page: 10
orderable [:title, :inserted_at]
end
"""
defmacro __using__(_) do
quote do
import unquote(__MODULE__)
end
end
defmacro field(name, opts \\ []) do
quote do
@options unquote(opts) |> Keyword.merge(share: false)
filter unquote(name)(query, value) do
query |> Ecto.Query.where([{unquote(name), ^value}])
end
end
end
defmacro paginateable(opts \\ []) do
per_page = Keyword.get(opts, :per_page, 20)
quote do
@options param: [:page, :per_page],
default: [page: 1, per_page: unquote(per_page)],
cast: :integer,
share: false
filter paginate(_, %{page: page, per_page: _}) when page < 0 do
{:error, "page can't be negative"}
end
filter paginate(_, %{page: _page, per_page: per_page}) when per_page < 0 do
{:error, "per_page can't be negative"}
end
filter paginate(_, %{page: _page, per_page: per_page}) when per_page > unquote(per_page) do
{:error, "per_page can't be more than #{unquote(per_page)}"}
end
filter paginate(query, %{page: page, per_page: per_page}) do
Ecto.Query.from(q in query, limit: ^per_page, offset: ^((page - 1) * per_page))
end
end
end
defmacro orderable(fields) when is_list(fields) do
fields = Enum.map(fields, &to_string/1)
quote do
@options param: [:sort, :order],
default: [order: "desc"],
cast: :string,
share: false,
allowed: unquote(fields)
filter sort(query, %{sort: nil, order: _}) do
query
end
filter sort(_, %{sort: field, order: _}) when not (field in unquote(fields)) do
{:error,
"Unable to sort on #{inspect(field)}, only #{inspect(unquote(fields))} are allowed"}
end
filter sort(_, %{sort: _, order: order}) when not (order in ~w(asc desc)) do
{:error, "Unable to sort using #{inspect(order)}, only 'asc' and 'desc' are allowed"}
end
filter sort(query, %{sort: field, order: order}) do
field = String.to_atom(field)
order = String.to_atom(order)
query |> Ecto.Query.order_by([{^order, ^field}])
end
end
end
defmacro limitable(opts \\ []) do
limit = Keyword.get(opts, :limit)
quote do
@options default: unquote(limit), cast: :integer, share: false
filter limit(_, value) when value < 0 do
{:error, "limit can't be negative"}
end
filter limit(_, value) when value > unquote(limit) do
{:error, "limit can't be more than #{unquote(limit)}"}
end
filter limit(query, value) do
query |> Ecto.Query.limit(^value)
end
@options default: 0, cast: :integer, share: false
filter offset(_, value) when value < 0 do
{:error, "offset can't be negative"}
end
filter offset(query, value) do
query |> Ecto.Query.offset(^value)
end
end
end
end
|
lib/filterable/ecto/helpers.ex
| 0.739422
| 0.431764
|
helpers.ex
|
starcoder
|
defmodule Ecto.Schema.Metadata do
@moduledoc """
Stores metadata of a struct.
The fields are:
* `state` - the state in a struct's lifetime, e.g. :built, :loaded, :deleted
* `source` - the database source of a model, which is the source specified
in schema by default or custom source when building a assoc with the custom source.
"""
defstruct [:state, :source]
end
defmodule Ecto.Schema do
@moduledoc ~S"""
Defines a schema for a model.
A schema is a struct with associated metadata that is persisted to a
repository. Every schema model is also a struct, that means that you work
with models just like you would work with structs.
## Example
defmodule User do
use Ecto.Schema
schema "users" do
field :name, :string
field :age, :integer, default: 0
has_many :posts, Post
end
end
By default, a schema will generate both a primary key named `id`
of type `:integer` and `belongs_to` associations will generate
foreign keys of type `:integer` too. Those setting can be configured
below.
## Schema attributes
The schema supports some attributes to be set before hand,
configuring the defined schema.
Those attributes are:
* `@primary_key` - configures the schema primary key. It expects
a tuple with the primary key name, type and options. Defaults
to `{:id, :integer, read_after_writes: true}`. When set to
false, does not define a primary key in the model;
* `@foreign_key_type` - configures the default foreign key type
used by `belongs_to` associations. Defaults to `:integer`;
* `@timestamps_opts` - configures the default timestamps type
used by `timestamps`. Defaults to `[type: Ecto.DateTime, usec: false]`;
* `@derive` - the same as `@derive` available in `Kernel.defstruct/1`
as the schema defines a struct behind the scenes;
The advantage of defining configure the schema via those attributes
is that they can be set with a macro to configure application wide
defaults. For example, if you would like to use `uuid`'s in all of
your application models, you can do:
# Define a module to be used as base
defmodule MyApp.Model do
defmacro __using__(_) do
quote do
use Ecto.Model
@primary_key {:id, :uuid, []}
@foreign_key_type :uuid
end
end
end
# Now use MyApp.Model to define new models
defmodule MyApp.Comment do
use MyApp.Model
schema "comments" do
belongs_to :post, MyApp.Post
end
end
Any models using `MyApp.Model` will get the `:id` field with type
`:uuid` as primary key.
The `belongs_to` association on `MyApp.Comment` will also define
a `:post_id` field with `:uuid` type that references the `:id` of
the `MyApp.Post` model.
## Types and casting
When defining the schema, types need to be given. Types are split
in two categories, primitive types and custom types.
### Primitive types
The primitive types are:
Ecto type | Elixir type | Literal syntax in query
:---------------------- | :---------------------- | :---------------------
`:integer` | `integer` | 1, 2, 3
`:float` | `float` | 1.0, 2.0, 3.0
`:boolean` | `boolean` | true, false
`:string` | UTF-8 encoded `string` | "hello"
`:binary` | `binary` | `<<int, int, int, ...>>`
`:uuid` | 16 byte `binary` | `uuid(binary_or_string)`
`{:array, inner_type}` | `list` | `[value, value, value, ...]`
`:decimal` | [`Decimal`](https://github.com/ericmj/decimal)
`:datetime` | `{{year, month, day}, {hour, min, sec}}`
`:date` | `{year, month, day}`
`:time` | `{hour, min, sec}`
**Note:** Although Ecto provides `:date`, `:time` and `:datetime`, you
likely want to use `Ecto.Date`, `Ecto.Time` and `Ecto.DateTime` respectively.
See the Custom types sections below about types that enhance the primitive
ones.
### Custom types
Sometimes the primitive types in Ecto are too primitive. For example,
`:uuid` relies on the underling binary representation instead of
representing itself as a readable string. That's where `Ecto.UUID`
comes in.
`Ecto.UUID` is a custom type. A custom type is a module that
implements the `Ecto.Type` behaviour. By default, Ecto provides the
following custom types:
Custom type | Ecto type | Elixir type
:---------------------- | :---------------------- | :---------------------
`Ecto.DateTime` | `:datetime` | `%Ecto.DateTime{}`
`Ecto.Date` | `:date` | `%Ecto.Date{}`
`Ecto.Time` | `:time` | `%Ecto.Time{}`
`Ecto.UUID` | `:uuid` | "uuid-string"
Ecto allow developers to provide their own types too. Read the
`Ecto.Type` documentation for more information.
### Casting
When directly manipulating the struct, it is the responsibility of
the developer to ensure the field values have the proper type. For
example, you can create a weather struct with an invalid value
for `temp_lo`:
iex> weather = %Weather{temp_lo: "0"}
iex> weather.temp_lo
"0"
However, if you attempt to persist the struct above, an error will
be raised since Ecto validates the types when building the query.
Therefore, when working and manipulating external data, it is
recommended the usage of `Ecto.Changeset`'s that are able to filter
and properly cast external data. In fact, `Ecto.Changeset` and custom
types provide a powerful combination to extend Ecto types and queries.
Finally, models can also have virtual fields by passing the
`virtual: true` option. These fields are not persisted to the database
and can optionally not be type checked by declaring type `:any`.
## Reflection
Any schema module will generate the `__schema__` function that can be
used for runtime introspection of the schema:
* `__schema__(:source)` - Returns the source as given to `schema/2`;
* `__schema__(:primary_key)` - Returns a list of the field that is the primary
key or [] if there is none;
* `__schema__(:fields)` - Returns a list of all non-virtual field names;
* `__schema__(:field, field)` - Returns the type of the given non-virtual field;
* `__schema__(:associations)` - Returns a list of all association field names;
* `__schema__(:association, assoc)` - Returns the association reflection of the given assoc;
* `__schema__(:read_after_writes)` - Non-virtual fields that must be read back
from the database after every write (insert or update);
* `__schema__(:load, idx, values)` - Loads a new model from a tuple of non-virtual
field values starting at the given index. Typically used by adapter interfaces;
Furthermore, both `__struct__` and `__changeset__` functions are
defined so structs and changeset functionalities are available.
"""
alias Ecto.Schema.Metadata
@doc false
defmacro __using__(_) do
quote do
import Ecto.Schema, only: [schema: 2]
@primary_key {:id, :integer, read_after_writes: true}
@timestamps_opts []
@foreign_key_type :integer
end
end
@doc """
Defines a schema with a source name and field definitions.
"""
defmacro schema(source, [do: block]) do
quote do
source = unquote(source)
unless is_binary(source) do
raise ArgumentError, "schema source must be a string, got: #{inspect source}"
end
Module.register_attribute(__MODULE__, :changeset_fields, accumulate: true)
Module.register_attribute(__MODULE__, :struct_fields, accumulate: true)
Module.register_attribute(__MODULE__, :ecto_fields, accumulate: true)
Module.register_attribute(__MODULE__, :ecto_assocs, accumulate: true)
Module.register_attribute(__MODULE__, :ecto_raw, accumulate: true)
Module.put_attribute(__MODULE__, :struct_fields,
{:__meta__, %Metadata{state: :built, source: source}})
primary_key_field =
case @primary_key do
false ->
[]
{name, type, opts} ->
Ecto.Schema.field(name, type, opts)
[name]
other ->
raise ArgumentError, "@primary_key must be false or {name, type, opts}"
end
try do
import Ecto.Schema
unquote(block)
after
:ok
end
fields = @ecto_fields |> Enum.reverse
assocs = @ecto_assocs |> Enum.reverse
Module.eval_quoted __MODULE__, [
Ecto.Schema.__struct__(@struct_fields),
Ecto.Schema.__changeset__(@changeset_fields),
Ecto.Schema.__source__(source),
Ecto.Schema.__fields__(fields),
Ecto.Schema.__assocs__(assocs),
Ecto.Schema.__primary_key__(primary_key_field),
Ecto.Schema.__load__(fields),
Ecto.Schema.__read_after_writes__(@ecto_raw)]
end
end
## API
@doc """
Defines a field on the model schema with given name and type.
## Options
* `:default` - Sets the default value on the schema and the struct.
The default value is calculated at compilation time, so don't use
expressions like `Ecto.DateTime.local` or `Ecto.UUID.generate` as
they would then be the same for all records
* `:virtual` - When true, the field is not persisted to the database
* `:read_after_writes` - When true, the field is always read back
from the repository during inserts and updates. For relational
databases, this means the RETURNING option of those statements
are used. For this reason, MySQL does not support this option for
any field besides the primary key (which must be of type serial).
Setting this option to true for MySQL will cause the values to be
ignored or, even worse, load invalid values from the database.
"""
defmacro field(name, type \\ :string, opts \\ []) do
quote do
Ecto.Schema.__field__(__MODULE__, unquote(name), unquote(type), unquote(opts))
end
end
@doc """
Generates `:inserted_at` and `:updated_at` timestamp fields.
When using `Ecto.Model`, the fields generated by this macro
will automatically be set to the current time when inserting
and updating values in a repository.
## Options
* `:type` - the timestamps type, defaults to `Ecto.DateTime`.
* `:usec` - boolean, sets whether microseconds are used in timestamps.
Microseconds will be 0 if false. Defaults to false.
* `:inserted_at` - the name of the column for insertion times or `false`
* `:updated_at` - the name of the column for update times or `false`
All options can be pre-configured by setting `@timestamps_opts`.
"""
defmacro timestamps(opts \\ []) do
quote bind_quoted: binding do
timestamps =
[inserted_at: :inserted_at, updated_at: :updated_at,
type: Ecto.DateTime, usec: false]
|> Keyword.merge(@timestamps_opts)
|> Keyword.merge(opts)
if inserted_at = Keyword.fetch!(timestamps, :inserted_at) do
Ecto.Schema.field(inserted_at, Keyword.fetch!(timestamps, :type), [])
end
if updated_at = Keyword.fetch!(timestamps, :updated_at) do
Ecto.Schema.field(updated_at, Keyword.fetch!(timestamps, :type), [])
end
@ecto_timestamps timestamps
end
end
@doc """
Defines an association.
This macro is used by `belongs_to/3`, `has_one/3` and `has_many/3` to
define associations. However, custom association mechanisms can be provided
by developers and hooked in via this macro.
Read more about custom associations in `Ecto.Association`.
"""
defmacro association(cardinality, name, association, opts \\ []) do
quote do
Ecto.Schema.__association__(__MODULE__, unquote(cardinality), unquote(name),
unquote(association), unquote(opts))
end
end
@doc ~S"""
Indicates a one-to-many association with another model.
The current model has zero or more records of the other model. The other
model often has a `belongs_to` field with the reverse association.
## Options
* `:foreign_key` - Sets the foreign key, this should map to a field on the
other model, defaults to the underscored name of the current model
suffixed by `_id`
* `:references` - Sets the key on the current model to be used for the
association, defaults to the primary key on the model
* `:through` - If this association must be defined in terms of existing
associations. Read below for more information
## Examples
defmodule Post do
use Ecto.Model
schema "posts" do
has_many :comments, Comment
end
end
# Get all comments for a given post
post = Repo.get(Post, 42)
comments = Repo.all assoc(post, :comments)
# The comments can come preloaded on the post struct
[post] = Repo.all(from(p in Post, where: p.id == 42, preload: :comments))
post.comments #=> [%Comment{...}, ...]
## has_many/has_one :through
Ecto also supports defining associations in terms of other associations
via the `:through` option. Let's see an example:
defmodule Post do
use Ecto.Model
schema "posts" do
has_many :comments, Comment
has_one :permalink, Permalink
has_many :comments_authors, through: [:comments, :author]
# Specify the association with custom source
has_many :tags, {"posts_tags", Tag}
end
end
defmodule Comment do
use Ecto.Model
schema "comments" do
belongs_to :author, Author
belongs_to :post, Post
has_one :post_permalink, through: [:post, :permalink]
end
end
In the example above, we have defined a `has_many :through` association
named `:comments_authors`. A `:through` association always expect a list
and the first element of the list must be a previously defined association
in the current module. For example, `:comments_authors` first points to
`:comments` in the same module (Post), which then points to `:author` in
the next model `Comment`.
This `:through` associations will return all authors for all comments
that belongs to that post:
# Get all comments for a given post
post = Repo.get(Post, 42)
authors = Repo.all assoc(post, :comments_authors)
`:through` associations are read-only as they are useful to avoid repetition
allowing the developer to easily retrieve data that is often seem together
but stored across different tables.
`:through` associations can also be preloaded. In such cases, not only
the `:through` association is preloaded but all intermediate steps are
preloaded too:
[post] = Repo.all(from(p in Post, where: p.id == 42, preload: :comments_authors))
post.comments_authors #=> [%Author{...}, ...]
# The comments for each post will be preloaded too
post.comments #=> [%Comment{...}, ...]
# And the author for each comment too
hd(post.comments).authors #=> [%Author{...}, ...]
Finally, `:through` can be used with multiple associations (not only 2)
and with associations of any kind, including `belongs_to` and others
`:through` associations. When the `:through` association is expected to
return one or no item, `has_one :through` should be used instead, as in
the example at the beginning of this section:
# How we defined the association above
has_one :post_permalink, through: [:post, :permalink]
# Get a preloaded comment
[comment] = Repo.all(Comment) |> Repo.preload(:post_permalink)
comment.post_permalink #=> %Permalink{...}
"""
defmacro has_many(name, queryable, opts \\ []) do
quote bind_quoted: binding() do
if is_list(queryable) and Keyword.has_key?(queryable, :through) do
association(:many, name, Ecto.Association.HasThrough, queryable)
else
association(:many, name, Ecto.Association.Has, [queryable: queryable] ++ opts)
end
end
end
@doc ~S"""
Indicates a one-to-one association with another model.
The current model has zero or one records of the other model. The other
model often has a `belongs_to` field with the reverse association.
## Options
* `:foreign_key` - Sets the foreign key, this should map to a field on the
other model, defaults to the underscored name of the current model
suffixed by `_id`
* `:references` - Sets the key on the current model to be used for the
association, defaults to the primary key on the model
* `:through` - If this association must be defined in terms of existing
associations. Read the section in `has_many/3` for more information
## Examples
defmodule Post do
use Ecto.Model
schema "posts" do
has_one :permalink, Permalink
# Specify the association with custom source
has_one :category, {"posts_categories", Category}
end
end
# The permalink can come preloaded on the post struct
[post] = Repo.all(from(p in Post, where: p.id == 42, preload: :permalink))
post.permalink #=> %Permalink{...}
"""
defmacro has_one(name, queryable, opts \\ []) do
quote bind_quoted: binding() do
if is_list(queryable) and Keyword.has_key?(queryable, :through) do
association(:one, name, Ecto.Association.HasThrough, queryable)
else
association(:one, name, Ecto.Association.Has, [queryable: queryable] ++ opts)
end
end
end
@doc ~S"""
Indicates a one-to-one association with another model.
The current model belongs to zero or one records of the other model. The other
model often has a `has_one` or a `has_many` field with the reverse association.
You should use `belongs_to` in the table that contains the foreign key. Imagine
a company <-> manager relationship. If the company contains the `manager_id` in
the underlying database table, we say the company belongs to manager.
In fact, when you invoke this macro, a field with the name of foreign key is
automatically defined in the schema for you.
## Options
* `:foreign_key` - Sets the foreign key field name, defaults to the name
of the association suffixed by `_id`. For example, `belongs_to :company`
will define foreign key of `:company_id`
* `:references` - Sets the key on the other model to be used for the
association, defaults to: `:id`
* `:auto_field` - When false, does not automatically define a `:foreign_key`
field, implying the user is defining the field manually elsewhere
* `:type` - Sets the type of automtically defined `:foreign_key`.
Defaults to: `:integer` and be set per schema via `@foreign_key_type`
All other options are forwarded to the underlying foreign key definition
and therefore accept the same options as `field/3`.
## Examples
defmodule Comment do
use Ecto.Model
schema "comments" do
# This automatically defines a post_id field too
belongs_to :post, Post
# Specify the association with custom source
belongs_to :author, {"posts_authors", Author}
end
end
# The post can come preloaded on the comment record
[comment] = Repo.all(from(c in Comment, where: c.id == 42, preload: :post))
comment.post #=> %Post{...}
"""
defmacro belongs_to(name, queryable, opts \\ []) do
quote bind_quoted: binding() do
opts = Keyword.put_new(opts, :foreign_key, :"#{name}_id")
foreign_key_type = opts[:type] || @foreign_key_type
if Keyword.get(opts, :auto_field, true) do
field(opts[:foreign_key], foreign_key_type, opts)
end
association(:one, name, Ecto.Association.BelongsTo, [queryable: queryable] ++ opts)
end
end
## Callbacks
@doc false
def __field__(mod, name, type, opts) do
check_type!(type, opts[:virtual])
check_default!(type, opts[:default])
Module.put_attribute(mod, :changeset_fields, {name, type})
put_struct_field(mod, name, opts[:default])
unless opts[:virtual] do
if opts[:read_after_writes] do
Module.put_attribute(mod, :ecto_raw, name)
end
Module.put_attribute(mod, :ecto_fields, {name, type})
end
end
@doc false
def __association__(mod, cardinality, name, association, opts) do
not_loaded = %Ecto.Association.NotLoaded{owner: mod, field: name, cardinality: cardinality}
put_struct_field(mod, name, not_loaded)
opts = [cardinality: cardinality] ++ opts
Module.put_attribute(mod, :ecto_assocs, {name, association.struct(mod, name, opts)})
end
@doc false
def __load__(struct, fields, idx, values) do
loaded = do_load(struct, fields, idx, values)
loaded = put_in loaded.__meta__.state, :loaded
Ecto.Model.Callbacks.__apply__(struct.__struct__, :after_load, loaded)
end
defp do_load(struct, fields, idx, values) when is_integer(idx) and is_tuple(values) do
Enum.reduce(fields, {struct, idx}, fn
{field, type}, {acc, idx} ->
value = Ecto.Type.load!(type, elem(values, idx))
{Map.put(acc, field, value), idx + 1}
end) |> elem(0)
end
## Quoted callbacks
@doc false
def __changeset__(changeset_fields) do
map = changeset_fields |> Enum.into(%{}) |> Macro.escape()
quote do
def __changeset__, do: unquote(map)
end
end
@doc false
def __struct__(struct_fields) do
quote do
defstruct unquote(Macro.escape(struct_fields))
end
end
@doc false
def __source__(source) do
quote do
def __schema__(:source), do: unquote(Macro.escape(source))
end
end
@doc false
def __fields__(fields) do
quoted = Enum.map(fields, fn {name, type} ->
quote do
def __schema__(:field, unquote(name)), do: unquote(type)
end
end)
field_names = Enum.map(fields, &elem(&1, 0))
quoted ++ [quote do
def __schema__(:field, _), do: nil
def __schema__(:fields), do: unquote(field_names)
end]
end
@doc false
def __assocs__(assocs) do
quoted =
Enum.map(assocs, fn {name, refl} ->
quote do
def __schema__(:association, unquote(name)) do
unquote(Macro.escape(refl))
end
end
end)
assoc_names = Enum.map(assocs, &elem(&1, 0))
quote do
def __schema__(:associations), do: unquote(assoc_names)
unquote(quoted)
def __schema__(:association, _), do: nil
end
end
@doc false
def __primary_key__(primary_key) do
quote do
def __schema__(:primary_key), do: unquote(primary_key)
end
end
@doc false
def __load__(fields) do
quote do
def __schema__(:load, idx, values) do
Ecto.Schema.__load__(__struct__(), unquote(fields), idx, values)
end
end
end
@doc false
def __read_after_writes__(fields) do
quote do
def __schema__(:read_after_writes), do: unquote(Enum.reverse(fields))
end
end
## Private
defp put_struct_field(mod, name, assoc) do
fields = Module.get_attribute(mod, :struct_fields)
if List.keyfind(fields, name, 0) do
raise ArgumentError, "field/association `#{name}` is already set on schema"
end
Module.put_attribute(mod, :struct_fields, {name, assoc})
end
defp check_type!(type, virtual?) do
cond do
type == :any and not virtual? ->
raise ArgumentError, "only virtual fields can have type :any"
Ecto.Type.primitive?(type) ->
true
is_atom(type) ->
if Code.ensure_compiled?(type) and function_exported?(type, :type, 0) do
type
else
raise ArgumentError, "invalid or unknown field type `#{inspect type}`"
end
true ->
raise ArgumentError, "invalid field type `#{inspect type}`"
end
end
defp check_default!(type, default) do
case Ecto.Type.dump(type, default) do
{:ok, _} ->
:ok
:error ->
raise ArgumentError, "invalid default argument `#{inspect default}` for `#{inspect type}`"
end
end
end
|
lib/ecto/schema.ex
| 0.92731
| 0.630799
|
schema.ex
|
starcoder
|
defmodule ExTermbox.EventManager do
@moduledoc """
This module implements an event manager that notifies subscribers of
keyboard, mouse and resize events received from the termbox API.
Internally, the event manager is managing a NIF-based polling routine and
fanning out polled events to subscribers. It works likes this:
1. The `ExTermbox.Bindings.start_polling/1` NIF is called with the event
manager's pid.
2. The NIF creates a background thread for the blocking polling routine and
immediately returns with a resource representing a handle for the thread.
3. When the polling routine receives an event (e.g., a keypress), it sends
a message to the event manager with the event data, and then continues
polling for the next event.
4. The event manager receives event data from the background thread and
notifies all of its subscribers of the event. Steps 3 and 4 are repeated
for each event.
5. When the event manager is terminated, `ExTermbox.Bindings.stop_polling/0`
is called to stop polling and terminate the background thread.
Example Usage:
def event_loop do
receive do
{:event, %Event{ch: ?q} = event} ->
Bindings.shutdown()
{:event, %Event{} = event} ->
# handle the event and wait for another...
event_loop()
end
end
{:ok, pid} = EventManager.start_link()
:ok = EventManager.subscribe(self())
event_loop()
"""
use GenServer
alias ExTermbox.Event
@default_bindings ExTermbox.Bindings
@default_server_opts [name: __MODULE__]
# Client API
@doc """
Starts an event manager process linked to the current process.
Running multiple instances of the event manager process simultaneously is
discouraged, as it could crash the NIF or cause unexpected behavior. By
default, the process is registered with a fixed name to prevent this.
"""
def start_link(opts \\ []) do
{bindings, server_opts} = Keyword.pop(opts, :bindings, @default_bindings)
server_opts_with_defaults = Keyword.merge(@default_server_opts, server_opts)
GenServer.start_link(__MODULE__, bindings, server_opts_with_defaults)
end
@doc """
Subscribes the given subscriber pid to future event notifications.
"""
def subscribe(event_manager_server \\ __MODULE__, subscriber_pid) do
GenServer.call(event_manager_server, {:subscribe, subscriber_pid})
end
def stop(event_manager_server \\ __MODULE__) do
GenServer.stop(event_manager_server)
end
# Server Callbacks
@impl true
def init(bindings) do
_ = Process.flag(:trap_exit, true)
{:ok,
%{
bindings: bindings,
status: :ready,
recipients: MapSet.new()
}}
end
@impl true
def handle_call({:subscribe, pid}, _from, state) do
if state.status == :ready do
:ok = start_polling(state.bindings)
end
{:reply, :ok,
%{
state
| status: :polling,
recipients: MapSet.put(state.recipients, pid)
}}
end
@impl true
def handle_info({:event, packed_event}, state) when is_tuple(packed_event) do
handle_info({:event, unpack_event(packed_event)}, state)
end
def handle_info({:event, %Event{} = event}, state) do
# Notify subscribers of the event
:ok = notify(state.recipients, event)
{:noreply, state}
end
def handle_info(_message, state) do
{:noreply, state}
end
@impl true
def terminate(_reason, state) do
# Try to stop polling for events to leave the system in a clean state. If
# this fails or `terminate/2` isn't called, it will have to be done later.
_ = state.bindings.stop_polling()
:ok
end
defp start_polling(bindings) do
case bindings.start_polling(self()) do
{:ok, _resource} ->
:ok
{:error, :already_polling} ->
with :ok <- bindings.stop_polling(),
{:ok, _resource} <- bindings.start_polling(self()),
do: :ok
{:error, unhandled_error} ->
{:error, unhandled_error}
end
end
defp notify(recipients, event) do
for pid <- recipients do
send(pid, {:event, event})
end
:ok
end
defp unpack_event({type, mod, key, ch, w, h, x, y}) do
%Event{type: type, mod: mod, key: key, ch: ch, w: w, h: h, x: x, y: y}
end
end
|
lib/ex_termbox/event_manager.ex
| 0.756447
| 0.509947
|
event_manager.ex
|
starcoder
|
defmodule AntikytheraCore.TemplateEngine do
@moduledoc """
This is an implementation of `EEx.Engine` that auto-escape dynamic parts within HAML templates.
"""
use EEx.Engine
alias Antikythera.TemplateSanitizer
def init(_opts), do: {:safe, ""}
def handle_body({:safe, iodata}) do
q =
quote do
IO.iodata_to_binary(unquote(iodata))
end
{:safe, q}
end
def handle_text("", text) do
handle_text({:safe, ""}, text)
end
def handle_text({:safe, buffer}, text) do
q = quote do: [unquote(buffer) | unquote(text)]
{:safe, q}
end
def handle_expr("", marker, expr) do
handle_expr({:safe, ""}, marker, expr)
end
def handle_expr({:safe, buffer}, "=", expr) do
q =
quote do
tmp = unquote(buffer)
[tmp | unquote(to_safe_expr(expr))]
end
{:safe, q}
end
def handle_expr({:safe, buffer}, "", expr) do
q =
quote do
tmp = unquote(buffer)
unquote(expr)
tmp
end
{:safe, q}
end
# For literals we can do the work at compile time
defp to_safe_expr(s) when is_binary(s) , do: TemplateSanitizer.html_escape(s)
defp to_safe_expr(nil) , do: ""
defp to_safe_expr(a) when is_atom(a) , do: TemplateSanitizer.html_escape(Atom.to_string(a))
defp to_safe_expr(i) when is_integer(i), do: Integer.to_string(i)
defp to_safe_expr(f) when is_float(f) , do: Float.to_string(f)
# Otherwise we do the work at runtime
defp to_safe_expr(expr) do
quote do
AntikytheraCore.TemplateEngine.to_safe_iodata(unquote(expr))
end
end
def to_safe_iodata({:safe, data}) , do: data
def to_safe_iodata(s) when is_binary(s) , do: TemplateSanitizer.html_escape(s)
def to_safe_iodata(nil) , do: ""
def to_safe_iodata(a) when is_atom(a) , do: TemplateSanitizer.html_escape(Atom.to_string(a))
def to_safe_iodata(i) when is_integer(i) , do: Integer.to_string(i)
def to_safe_iodata(f) when is_float(f) , do: Float.to_string(f)
def to_safe_iodata([]) , do: ""
def to_safe_iodata([h | _] = l) when is_integer(h), do: List.to_string(l) # convert charlist to String.t
def to_safe_iodata([h | t]) , do: [to_safe_iodata(h) | to_safe_iodata(t)]
end
|
core/web/template_engine.ex
| 0.627837
| 0.406155
|
template_engine.ex
|
starcoder
|
defmodule NewRelic.Telemetry.Plug do
use GenServer
@moduledoc """
Provides `Plug` instrumentation via `telemetry`.
Plug pipelines are auto-discovered and instrumented.
We automatically gather:
* Transaction metrics and events
* Transaction Traces
* Distributed Traces
You can opt-out of this instrumentation via configuration. See `NewRelic.Config` for details.
----
To prevent reporting an individual transaction:
```elixir
NewRelic.ignore_transaction()
```
----
Inside a Transaction, the agent will track work across processes that are spawned and linked.
You can signal to the agent not to track work done inside a spawned process, which will
exclude it from the current Transaction.
To exclude a process from the Transaction:
```elixir
Task.async(fn ->
NewRelic.exclude_from_transaction()
Work.wont_be_tracked()
end)
```
"""
alias NewRelic.{Transaction, DistributedTrace, Util}
@doc false
def start_link(_) do
config = %{
enabled?: NewRelic.Config.feature?(:plug_instrumentation),
handler_id: {:new_relic, :plug}
}
GenServer.start_link(__MODULE__, config, name: __MODULE__)
end
@cowboy_start [:cowboy, :request, :start]
@cowboy_stop [:cowboy, :request, :stop]
@cowboy_exception [:cowboy, :request, :exception]
@plug_router_start [:plug, :router_dispatch, :start]
@plug_events [
@cowboy_start,
@cowboy_stop,
@cowboy_exception,
@plug_router_start
]
@doc false
def init(%{enabled?: false}), do: :ignore
def init(%{enabled?: true} = config) do
:telemetry.attach_many(
config.handler_id,
@plug_events,
&__MODULE__.handle_event/4,
config
)
Process.flag(:trap_exit, true)
{:ok, config}
end
@doc false
def terminate(_reason, %{handler_id: handler_id}) do
:telemetry.detach(handler_id)
end
@doc false
def handle_event(
@cowboy_start,
%{system_time: system_time},
meta,
_config
) do
Transaction.Reporter.start_transaction(:web)
if NewRelic.Config.enabled?(),
do: DistributedTrace.start(:http, meta.req.headers)
add_start_attrs(meta, system_time)
maybe_report_queueing(meta)
end
def handle_event(
@plug_router_start,
_measurements,
%{conn: conn, route: route},
_config
) do
NewRelic.add_attributes(plug_name: plug_name(conn, route))
end
def handle_event(
@cowboy_stop,
%{duration: duration} = meas,
meta,
_config
) do
add_stop_attrs(meas, meta, duration)
add_stop_error_attrs(meta)
Transaction.Reporter.stop_transaction(:web)
end
# Don't treat 404 as an exception
def handle_event(
@cowboy_exception,
%{duration: duration} = meas,
%{resp_status: "404" <> _} = meta,
_config
) do
add_stop_attrs(meas, meta, duration)
Transaction.Reporter.stop_transaction(:web)
end
def handle_event(
@cowboy_exception,
%{duration: duration} = meas,
%{kind: kind} = meta,
_config
) do
add_stop_attrs(meas, meta, duration)
{reason, stack} = reason_and_stack(meta)
Transaction.Reporter.fail(%{kind: kind, reason: reason, stack: stack})
Transaction.Reporter.stop_transaction(:web)
end
def handle_event(_event, _measurements, _meta, _config) do
:ignore
end
defp add_start_attrs(meta, system_time) do
[
pid: inspect(self()),
system_time: system_time,
host: meta.req.host,
path: meta.req.path,
remote_ip: meta.req.peer |> elem(0) |> :inet_parse.ntoa() |> to_string(),
referer: meta.req.headers["referer"],
user_agent: meta.req.headers["user-agent"],
content_type: meta.req.headers["content-type"],
request_method: meta.req.method
]
|> NewRelic.add_attributes()
end
@kb 1024
defp add_stop_attrs(meas, meta, duration) do
info = Process.info(self(), [:memory, :reductions])
[
duration: duration,
status: status_code(meta),
memory_kb: info[:memory] / @kb,
reductions: info[:reductions],
"cowboy.req_body_duration_ms": meas[:req_body_duration] |> to_ms,
"cowboy.resp_duration_ms": meas[:resp_duration] |> to_ms,
"cowboy.req_body_length": meas[:req_body_length],
"cowboy.resp_body_length": meas[:resp_body_length]
]
|> NewRelic.add_attributes()
end
defp add_stop_error_attrs(%{resp_status: "5" <> _, error: {:socket_error, error, message}}) do
[
error: true,
"cowboy.socket_error": error,
"cowboy.socket_error.message": message
]
|> NewRelic.add_attributes()
end
# client timeout:
defp add_stop_error_attrs(%{error: {:socket_error, error, message}}) do
[
"cowboy.socket_error": error,
"cowboy.socket_error.message": message
]
|> NewRelic.add_attributes()
end
# server timeout:
defp add_stop_error_attrs(%{error: {:connection_error, error, message}}) do
[
"cowboy.connection_error": error,
"cowboy.connection_error.message": message
]
|> NewRelic.add_attributes()
end
defp add_stop_error_attrs(_meta) do
:ok
end
defp to_ms(duration),
do: System.convert_time_unit(duration, :native, :millisecond)
@request_start_header "x-request-start"
defp maybe_report_queueing(meta) do
with true <- NewRelic.Config.feature?(:request_queuing_metrics),
request_start when is_binary(request_start) <- meta.req.headers[@request_start_header],
{:ok, request_start_s} <- Util.RequestStart.parse(request_start) do
NewRelic.add_attributes(request_start_s: request_start_s)
end
end
defp status_code(%{resp_status: :undefined}) do
nil
end
defp status_code(%{resp_status: status})
when is_integer(status) do
status
end
defp status_code(%{resp_status: status})
when is_binary(status) do
String.split(status) |> List.first() |> String.to_integer()
end
defp reason_and_stack(%{reason: %{__exception__: true} = reason, stacktrace: stack}) do
{reason, stack}
end
defp reason_and_stack(%{reason: {{reason, stack}, _init_call}}) do
{reason, stack}
end
defp reason_and_stack(%{reason: {reason, _init_call}}) do
{reason, []}
end
defp reason_and_stack(unexpected_cowboy_exception) do
NewRelic.log(:debug, "unexpected_cowboy_exception: #{inspect(unexpected_cowboy_exception)}")
{:unexpected_cowboy_exception, []}
end
defp plug_name(conn, match_path) do
"/Plug/#{conn.method}/#{match_path}"
|> String.replace("/*glob", "")
|> String.replace("/*_path", "")
end
end
|
lib/new_relic/telemetry/plug.ex
| 0.757301
| 0.716051
|
plug.ex
|
starcoder
|
defmodule Phoenix.Endpoint do
@moduledoc """
Defines a Phoenix endpoint.
The endpoint is the boundary where all requests to your
web application start. It is also the interface your
application provides to the underlying web servers.
Overall, an endpoint has three responsibilities:
* It provides a wrapper for starting and stopping the
endpoint as part of a supervision tree.
* To define an initial plug pipeline where requests
are sent to.
* To host web specific configuration for your
application.
## Endpoints
An endpoint is simply a module defined with the help
of Phoenix.Endpoint. If you have used the phoenix.new
generator, an endpoint was automatically generated as
part of your application:
defmodule YourApp.Endpoint do
use Phoenix.Endpoint, otp_app: :your_app
# plug ...
# plug ...
plug :router, YourApp.Router
end
Before being used, an endpoint must be splicitly started as part
of your application supervision tree too (which is again done by
default in generated applications):
worker(YourApp.Endpoint, [])
## Endpoint configuration
All endpoints are configured in your application environment.
For example:
config :your_app, YourApp.Endpoint,
secret_key_base: "<KEY>"
Endpoint configuration is split in two categories. Compile-time
configuration means the configuration is read during compilation
and changing it at runtime has no effect. The compile-time
configuration is mostly related to error handling.
On the other hand, runtime configuration is accessed during or
after your application is started and can be read through the
`config/2` function:
YourApp.Endpoint.config(:port)
YourApp.Endpoint.config(:some_config, :default_value)
### Compile-time
* `:debug_errors` - when true, uses `Plug.Debugger` functionality for
debugging failures in the application. Recomended to be set to true
only in development as it allows listing of the application source
code during debugging. Defaults to false.
* `:render_errors` - a module representing a view to render templates
whenever there is a failure in the application. For example, if the
application crashes with a 500 error during a HTML request,
`render("500.html", assigns)` will be called in the view given to
`:render_errors`. The default view is `MyApp.ErrorView`.
### Runtime
* `:cache_static_lookup` - when true, static assets lookup in the
filesystem via the `static_path` function are cached. Defaults to true.
* `:http` - the configuration for the http server. Currently uses
cowboy and accepts all options as defined by `Plug.Adapters.Cowboy`.
Defaults to false.
* `:https` - the configuration for the https server. Currently uses
cowboy and accepts all options as defined by `Plug.Adapters.Cowboy`.
Defaults to false.
* `:secret_key_base` - a secret key used as base to generate secrets
to encode cookies, session and friends. Defaults to nil as it must
be set per application.
* `:server` - when true, starts the web server when the endpoint
supervision tree starts. Defaults to false. The `mix phoenix.server`
task automatically sets this to true.
* `:url` - configuration for generating URLs throughout the app.
Accepts the host, scheme and port. Defaults to:
[host: "localhost"]
## Endpoint API
In the previous section, we have used the `config/2` function which is
automatically generated in your Endpoint. Here is a summary of all functions
defined in your endpoint:
* `start_link()` - starts the Endpoint supervision tree, including its
configuration cache and possibly the servers for handling requests
* `config(key, default)` - access the endpoint configuration given by key
* `config_change(changed, removed)` - reload the endpoint configuration on application upgrades
* `url(path)` - returns the URL for this endpoint with the given path
* `static_path(path)` - returns the static path for a given asset
Besides the functions above, it defines also the API expected by Plug
for serving requests:
* `init(opts)` - invoked when starting the endpoint server
* `call(conn, opts)` - invoked on every request and it simply dispatches to
the defined Plug pipeline
"""
alias Phoenix.Endpoint.Adapter
@doc false
defmacro __using__(opts) do
quote do
unquote(config(opts))
unquote(plug())
unquote(server())
end
end
defp config(opts) do
quote do
otp_app = unquote(opts)[:otp_app] || raise "endpoint expects :otp_app to be given"
config = Adapter.config(otp_app, __MODULE__)
@config config
end
end
defp plug() do
quote location: :keep do
@behaviour Plug
import Phoenix.Endpoint
Module.register_attribute(__MODULE__, :plugs, accumulate: true)
@before_compile Phoenix.Endpoint
def init(opts) do
opts
end
def call(conn, opts) do
conn = put_in conn.secret_key_base, config(:secret_key_base)
conn = update_in conn.private, &Map.put(&1, :phoenix_endpoint, __MODULE__)
phoenix_endpoint_pipeline(conn, opts)
end
defoverridable [init: 1, call: 2]
if config[:debug_errors] do
use Plug.Debugger, otp_app: otp_app
end
use Phoenix.Endpoint.ErrorHandler, view: config[:render_errors]
end
end
defp server() do
quote location: :keep, unquote: false do
@doc """
Starts the endpoint supervision tree.
"""
def start_link do
Adapter.start_link(unquote(otp_app), __MODULE__)
end
@doc """
Returns the endpoint configuration for `key`
Returns `default` if the router does not exist.
"""
def config(key, default \\ nil) do
case :ets.lookup(__MODULE__, key) do
[{^key, val}] -> val
[] -> default
end
end
@doc """
Reloads the configuration given the application environment changes.
"""
def config_change(changed, removed) do
Phoenix.Config.config_change(__MODULE__, changed, removed)
end
@doc """
Generates a URL for the given path based on the
`:url` configuration for the endpoint.
"""
def url(path) do
Phoenix.Config.cache(__MODULE__,
:__phoenix_url__,
&Phoenix.Endpoint.Adapter.url/1) <> path
end
@doc """
Generates a route to a static file based on the contents inside
`priv/static` for the endpoint otp application.
"""
def static_path(path) do
Phoenix.Config.cache(__MODULE__,
{:__phoenix_static__, path},
&Phoenix.Endpoint.Adapter.static_path(&1, path))
end
end
end
@doc false
defmacro __before_compile__(env) do
plugs = Module.get_attribute(env.module, :plugs)
plugs = for plug <- plugs, allow_plug?(plug), do: plug
{conn, body} = Plug.Builder.compile(plugs)
quote do
defp phoenix_endpoint_pipeline(unquote(conn), _), do: unquote(body)
end
end
defp allow_plug?({Phoenix.CodeReloader, _, _}), do:
Application.get_env(:phoenix, :code_reloader, false)
defp allow_plug?(_), do:
true
## API
@doc """
Stores a plug to be executed as part of the pipeline.
"""
defmacro plug(plug, opts \\ []) do
quote do
@plugs {unquote(plug), unquote(opts), true}
end
end
@doc """
A macro that can be plugged in order to handle routing errors.
By default, a Phoenix router will raise a `Phoenix.Router.NoRouteError`
struct in case no route is found. This macro wraps the router call so
the route error does not pass through.
It also wraps the router call to provide better debugger and error
rendering behaviour.
## Examples
plug :router, MyApp.Router
"""
defmacro router(conn, plug) do
conf = Module.get_attribute(__CALLER__.module, :config)
code =
if conf[:debug_errors] do
quote do
Plug.Debugger.wrap(conn, @plug_debugger, fn ->
plug.call(conn, plug.init([]))
end)
end
else
quote do
plug.call(conn, plug.init([]))
end
end
quote do
conn = unquote(conn)
plug = unquote(plug)
Phoenix.Endpoint.ErrorHandler.wrap(conn, @phoenix_handle_errors, fn ->
unquote(code)
end)
end
end
end
|
lib/phoenix/endpoint.ex
| 0.902204
| 0.529142
|
endpoint.ex
|
starcoder
|
defmodule ArrowWeb.DisruptionController.Index do
@moduledoc """
Builds and executes the database queries for the disruptions index.
"""
alias Arrow.{Adjustment, Disruption, Repo}
alias ArrowWeb.DisruptionController.Filters
import Ecto.Query
@spec all(Filters.t() | nil) :: [Disruption.t()]
def all(filters \\ nil), do: base_query() |> apply_filters(filters) |> Repo.all()
defp apply_filter({:include_past?, false}, query) do
cutoff = Date.utc_today() |> Date.add(-7)
from [revisions: r] in query, where: is_nil(r.end_date) or r.end_date > ^cutoff
end
defp apply_filter({:include_past?, true}, query), do: query
@empty_set MapSet.new()
defp apply_filter({:kinds, kinds}, query) when kinds != @empty_set do
kinds_list = kinds |> expand_kinds_filter() |> MapSet.to_list()
condition =
kinds_list
|> Enum.map(&Adjustment.kind_is/1)
|> Enum.reduce(dynamic(false), &dynamic([adjustments: a], ^&2 or ^&1))
|> then(&dynamic([revisions: r], ^&1 or r.adjustment_kind in ^kinds_list))
from query, where: ^condition
end
defp apply_filter({:kinds, kinds}, query) when kinds == @empty_set, do: query
defp apply_filter({:only_approved?, true}, query) do
from [revisions: r] in query, where: r.row_approved
end
defp apply_filter({:only_approved?, false}, query), do: query
defp apply_filter({:search, search}, query) when is_binary(search) do
from [revisions: r, adjustments: a] in query,
where: ilike(r.description, ^"%#{search}%") or ilike(a.source_label, ^"%#{search}%")
end
defp apply_filter({:search, nil}, query), do: query
defp apply_filter({:sort, {direction, :id}}, query) do
from [disruptions: d] in query, order_by: {^direction, d.id}
end
defp apply_filter({:sort, {direction, :start_date}}, query) do
from [revisions: r] in query, order_by: {^direction, r.start_date}
end
defp apply_filters(query, nil), do: query
defp apply_filters(query, filters) do
filters |> Filters.flatten() |> Enum.reduce(query, &apply_filter/2)
end
defp base_query do
from [disruptions: d, revisions: r] in Disruption.with_latest_revisions(),
where: r.is_active == true,
left_join: a in assoc(r, :adjustments),
as: :adjustments,
preload: [
revisions:
{r,
[
{:adjustments, ^from(a in Adjustment, order_by: :source_label)},
:days_of_week,
:exceptions
]}
]
end
@green_line_branch_kinds ~w(green_line_b green_line_c green_line_d green_line_e)a
# When `green_line` is used as a kind filter, in addition to taking it literally, include all
# kinds that refer to specific Green Line branches
defp expand_kinds_filter(kinds) do
if :green_line in kinds,
do: MapSet.union(kinds, MapSet.new(@green_line_branch_kinds)),
else: kinds
end
end
|
lib/arrow_web/controllers/disruption_controller/index.ex
| 0.756447
| 0.401482
|
index.ex
|
starcoder
|
defmodule Game.TileGenerator do
alias Game.Repo
alias Game.Tile
alias Game.Region
defp calculate_height(
{coordinates, tile},
calculated_tiles,
neighbors,
remaining_tiles,
boundary_tiles
)
when map_size(neighbors) > 0 do
calculated_and_boundary_tiles = Map.merge(boundary_tiles, calculated_tiles)
calculated_neighbors = Game.TileMap.neighbors(calculated_and_boundary_tiles, coordinates)
calculated_neighbors_height =
Enum.reduce(calculated_neighbors, 0, fn {_, neighbor}, sum ->
sum + (Map.get(neighbor, :height, 0) || 0)
end)
height =
if map_size(calculated_neighbors) > 0 do
round(
calculated_neighbors_height / map_size(calculated_neighbors) +
:math.sqrt(:rand.uniform() * 16) * (:rand.uniform() - 0.5)
)
else
:rand.uniform(10) - 5
end
tile = Map.put(tile, :height, height)
calculated_tiles = Map.put(calculated_tiles, coordinates, tile)
neighbors = Map.merge(neighbors, Game.TileMap.neighbors(remaining_tiles, coordinates))
remaining_tiles = Map.drop(remaining_tiles, Map.keys(neighbors))
{next_coordinates, next_tile} = Enum.random(neighbors)
neighbors = Map.delete(neighbors, next_coordinates)
Map.put(
calculate_height(
{next_coordinates, next_tile},
calculated_tiles,
neighbors,
remaining_tiles,
boundary_tiles
),
coordinates,
tile
)
end
defp calculate_height({coordinates, tile}, calculated_tiles, %{}, %{}, %{}) do
calculated_neighbors = Game.TileMap.neighbors(calculated_tiles, coordinates)
calculated_neighbors_height =
Enum.reduce(calculated_neighbors, 0, fn {_, neighbor}, sum ->
sum + (Map.get(neighbor, :height, 0) || 0)
end)
tile =
Map.put(
tile,
:height,
round(
calculated_neighbors_height / map_size(calculated_neighbors) +
:math.sqrt(:rand.uniform() * 16) * (:rand.uniform() - 0.5)
)
)
Map.put(calculated_tiles, coordinates, tile)
end
defp calculate_height(tiles, boundary_tiles) do
neighbors =
Enum.reduce(tiles, %{}, fn {coords, tile}, acc ->
if map_size(Game.TileMap.neighbors(boundary_tiles, coords)) > 0 do
Map.put(acc, coords, tile)
else
acc
end
end)
remaining_tiles = Map.drop(tiles, Map.keys(neighbors))
{next_coordinates, next_tile} = Enum.random(neighbors)
neighbors = Map.delete(neighbors, next_coordinates)
calculate_height(
{next_coordinates, next_tile},
%{},
neighbors,
remaining_tiles,
boundary_tiles
)
end
def calculate_height(tiles) do
coordinates = {0, 0, 0}
{tile, remaining_tiles} = Map.pop(tiles, coordinates)
tile = Map.put(tile, :height, :rand.uniform(10) - 5)
neighbors = Game.TileMap.neighbors(remaining_tiles, coordinates)
remaining_tiles = Map.drop(remaining_tiles, Map.keys(neighbors))
Map.put(
calculate_height({coordinates, tile}, %{}, neighbors, remaining_tiles, %{}),
coordinates,
tile
)
end
defp save(tiles) do
Repo.insert_all(
Tile,
tiles
)
end
def call(region) do
center = {region.x, region.y, region.z}
tiles = Game.TileMap.generate(center)
tiles =
if center == {0, 0, 0} do
calculate_height(tiles)
else
boundary_tiles = Tile.Queries.within_range(region.world_id, center, Region.size() + 1)
boundary_tiles =
Enum.reduce(boundary_tiles, %{}, fn tile, acc ->
Map.put(acc, {tile.x, tile.y, tile.z}, %{
x: tile.x,
y: tile.y,
z: tile.z,
height: tile.height
})
end)
calculate_height(tiles, boundary_tiles)
end
Map.values(tiles)
|> Enum.map(fn tile ->
tile
|> Map.put(:terrain, %{type: "dirt"})
|> Map.put(:region_id, region.id)
|> Map.put(:world_id, region.world_id)
end)
|> Enum.chunk_every(5000)
|> Enum.each(fn chunk -> save(chunk) end)
region
end
end
|
lib/game/generators/tile_generator.ex
| 0.559651
| 0.656803
|
tile_generator.ex
|
starcoder
|
defmodule ScrapyCloudEx.Endpoints.Helpers do
@moduledoc false
require Logger
alias ScrapyCloudEx.HttpAdapter.{RequestConfig, Response}
alias ScrapyCloudEx.HttpAdapters.Default, as: DefaultAdapter
@typep param :: {atom, any}
# parameter naming in the API is a bit inconsistent where multi-words variables are concerned
# (e.g. include_headers vs lineend) and often doesn't conform to the Elixir convention of
# snake_casing variables composed of multiple words, so this will allow us to accept both (e.g.)
# `line_end` and `lineend` and convert them to the name the API expects
@spec canonicalize_params(Keyword.t(), Keyword.t()) :: Keyword.t()
def canonicalize_params(params, aliases) do
params |> Enum.map(&canonicalize_param(&1, aliases))
end
@spec validate_params(Keyword.t(), [atom, ...]) :: :ok | ScrapyCloudEx.invalid_param_error()
def validate_params(params, expected) when is_list(params) and is_list(expected) do
params
|> Enum.reject(¶m_valid?(expected, &1))
|> case do
[] ->
:ok
[{invalid_param, _} | _] ->
"valid params: #{inspect(expected |> Enum.sort())}"
|> invalid_param_error(invalid_param)
end
end
@spec invalid_param_error(String.t() | ScrapyCloudEx.tagged_error_info(), atom) ::
ScrapyCloudEx.invalid_param_error()
def invalid_param_error(error, tag) when is_atom(tag) or is_list(tag),
do: {:invalid_param, {tag, error}}
def make_request(%RequestConfig{} = config) do
config = RequestConfig.ensure_defaults(config)
%RequestConfig{opts: opts} = config
Logger.debug("making request: #{inspect(config, pretty: true)}")
http_client = get_http_client(opts)
case http_client.request(config) do
{:error, _} = error ->
error
{:ok, response} ->
response = process_response(response)
Logger.debug("received response: #{inspect(response, pretty: true)}")
http_client.handle_response(response, opts)
end
end
@spec canonicalize_param(param, Keyword.t()) :: param
defp canonicalize_param({k, v} = pair, param_aliases) do
case Keyword.get(param_aliases, k) do
nil ->
pair
canonical_name ->
Logger.warn("replacing '#{inspect(k)}' parameter with '#{inspect(canonical_name)}'")
{canonical_name, v}
end
end
@spec param_valid?([atom], {atom, any} | atom) :: boolean
defp param_valid?(valid, {k, _}), do: valid |> param_valid?(k)
defp param_valid?(valid, param), do: valid |> Enum.member?(param)
@spec get_http_client(Keyword.t()) :: atom
defp get_http_client(opts) do
opts |> Keyword.get(:http_adapter, DefaultAdapter)
end
@spec process_response(Response.t()) :: Response.t()
defp process_response(%Response{} = response), do: maybe_unzip_body(response)
@spec maybe_unzip_body(Response.t()) :: Response.t()
defp maybe_unzip_body(%Response{body: body} = response) do
body =
if Response.gzipped?(response) do
Logger.debug("gunzipping compressed body")
:zlib.gunzip(body)
else
body
end
%{response | body: body}
end
end
|
lib/endpoints/helpers.ex
| 0.819424
| 0.40589
|
helpers.ex
|
starcoder
|
defmodule Harald.Transport do
@moduledoc """
A server to manage lower level transports and parse bluetooth events.
"""
use GenServer
alias Harald.{HCI, LE}
@type adapter_state :: map
@type command :: binary
@type namespace :: atom
defmodule State do
@moduledoc false
@enforce_keys [:adapter, :adapter_state, :handlers]
defstruct @enforce_keys
end
@doc """
Start the transport.
## Options
`:handlers` - additional processes to send Bluetooth events to
`:namespace` - a prefix to what the transport will register its name as
Note: `opts` is passed through to the `init/1` call.
"""
@spec start_link(keyword) :: GenServer.server()
def start_link(opts) do
GenServer.start_link(__MODULE__, opts, name: name(opts[:namespace]))
end
@impl GenServer
def init(opts) do
{adapter, adapter_opts} = opts[:adapter]
{:ok, adapter_state} = apply(adapter, :setup, [self(), adapter_opts])
handlers = [LE | Keyword.get(opts, :handlers, [])]
handler_pids =
for h <- handlers do
{:ok, pid} = apply(h, :setup, [Keyword.take(opts, [:namespace])])
pid
end
{:ok, %State{adapter: adapter, adapter_state: adapter_state, handlers: handler_pids}}
end
@doc """
Send an HCI command to the Bluetooth HCI.
"""
@spec send_command(namespace, command) :: any
def send_command(namespace, command) when is_atom(namespace) and is_binary(command) do
namespace
|> name()
|> GenServer.call({:send_command, command})
end
@impl GenServer
def handle_info({:transport_adapter, msg}, %{handlers: handlers} = state) do
{_, data} = HCI.deserialize(msg)
send_to_handlers(data, handlers)
{:noreply, state}
end
@impl GenServer
def handle_call(
{:send_command, command},
_from,
%State{adapter: adapter, adapter_state: adapter_state} = state
) do
{:ok, adapter_state} = adapter.send_command(command, adapter_state)
{:reply, :ok, %State{state | adapter_state: adapter_state}}
end
defp name(namespace), do: String.to_atom("#{namespace}.#{__MODULE__}")
defp send_to_handlers(data, handlers) do
for h <- handlers do
send(h, {:bluetooth_event, data})
end
end
end
|
lib/harald/transport.ex
| 0.782205
| 0.438725
|
transport.ex
|
starcoder
|
defmodule Tesla.Middleware.Compression do
@moduledoc """
Compress requests and decompress responses.
Supports "gzip" and "deflate" encodings using erlang's built-in `:zlib` module.
## Example usage
```
defmodule MyClient do
use Tesla
plug Tesla.Middleware.Compression, format: "gzip"
end
```
## Options
- `:format` - request compression format, `"gzip"` (default) or `"deflate"`
"""
@behaviour Tesla.Middleware
@impl Tesla.Middleware
def call(env, next, opts) do
env
|> compress(opts)
|> Tesla.run(next)
|> decompress()
end
defp compressable?(body), do: is_binary(body)
@doc """
Compress request.
It is used by `Tesla.Middleware.CompressRequest`.
"""
def compress(env, opts) do
if compressable?(env.body) do
format = Keyword.get(opts || [], :format, "gzip")
env
|> Tesla.put_body(compress_body(env.body, format))
|> Tesla.put_headers([{"content-encoding", format}])
else
env
end
end
defp compress_body(body, "gzip"), do: :zlib.gzip(body)
defp compress_body(body, "deflate"), do: :zlib.zip(body)
@doc """
Decompress response.
It is used by `Tesla.Middleware.DecompressResponse`.
"""
def decompress({:ok, env}), do: {:ok, decompress(env)}
def decompress({:error, reasonn}), do: {:error, reasonn}
def decompress(env) do
env
|> Tesla.put_body(decompress_body(env.body, Tesla.get_header(env, "content-encoding")))
end
defp decompress_body(<<31, 139, 8, _::binary>> = body, "gzip"), do: :zlib.gunzip(body)
defp decompress_body(body, "deflate"), do: :zlib.unzip(body)
defp decompress_body(body, _content_encoding), do: body
end
defmodule Tesla.Middleware.CompressRequest do
@moduledoc """
Only compress request.
See `Tesla.Middleware.Compression` for options.
"""
@behaviour Tesla.Middleware
@impl Tesla.Middleware
def call(env, next, opts) do
env
|> Tesla.Middleware.Compression.compress(opts)
|> Tesla.run(next)
end
end
defmodule Tesla.Middleware.DecompressResponse do
@moduledoc """
Only decompress response.
See `Tesla.Middleware.Compression` for options.
"""
@behaviour Tesla.Middleware
@impl Tesla.Middleware
def call(env, next, _opts) do
env
|> Tesla.run(next)
|> Tesla.Middleware.Compression.decompress()
end
end
|
lib/tesla/middleware/compression.ex
| 0.913266
| 0.706924
|
compression.ex
|
starcoder
|
defmodule AlertProcessor.TimeFrameComparison do
@moduledoc """
module used to compare between subscription timeframe maps
and alert timeframe maps. maps contain keys identifying
the day type being checked along with a start and end second
of day to be able to create a range to check for an intersection.
"""
@type second_of_day :: 0..86_399
@type timeframe_map :: %{
optional(:sunday) => %{start: second_of_day, end: second_of_day},
optional(:monday) => %{start: second_of_day, end: second_of_day},
optional(:tuesday) => %{start: second_of_day, end: second_of_day},
optional(:wednesday) => %{start: second_of_day, end: second_of_day},
optional(:thursday) => %{start: second_of_day, end: second_of_day},
optional(:friday) => %{start: second_of_day, end: second_of_day},
optional(:saturday) => %{start: second_of_day, end: second_of_day}
}
@type time_period :: %{start: second_of_day, end: second_of_day}
@spec match?(boolean | timeframe_map, timeframe_map) :: boolean
def match?(active_period_timeframe_map, _) when is_boolean(active_period_timeframe_map),
do: active_period_timeframe_map
def match?(active_period_timeframe_map, subscription_timeframe_map) do
relevant_active_period_timeframe_map =
Map.take(active_period_timeframe_map, Map.keys(subscription_timeframe_map))
Enum.any?(relevant_active_period_timeframe_map, fn {day_of_week_atom, time_range} ->
timeframes_match?(time_range, subscription_timeframe_map[day_of_week_atom])
end)
end
@spec timeframes_match?(time_period, time_period) :: boolean
defp timeframes_match?(active_period, subscription_time_period) do
any_overlap?(range_map(active_period), range_map(subscription_time_period))
end
defp range_map(%{start: start_seconds, end: end_seconds}) when end_seconds > start_seconds do
start_minute = Integer.floor_div(start_seconds, 60) * 60
end_minute = Integer.floor_div(end_seconds, 60) * 60
[{start_minute, end_minute}]
end
defp range_map(%{start: start_seconds, end: end_seconds}) do
start_minute = Integer.floor_div(start_seconds, 60) * 60
end_minute = Integer.floor_div(end_seconds, 60) * 60
[{0, end_minute}, {start_minute, 86_399}]
end
defp any_overlap?(active_periods, subscription_periods) do
Enum.any?(active_periods, fn {aps, ape} ->
Enum.any?(subscription_periods, fn {sps, spe} ->
aps < spe && sps < ape
end)
end)
end
end
|
apps/alert_processor/lib/rules_engine/time_frame_comparison.ex
| 0.841403
| 0.585605
|
time_frame_comparison.ex
|
starcoder
|
defmodule Absinthe.GraphqlWS.Socket do
@moduledoc """
This module is used by a custom websocket, which can then handle connections from a client
implementing the [GraphQL over WebSocket protocol](https://github.com/enisdenjo/graphql-ws/blob/master/PROTOCOL.md)
## Options
* `schema` - required - The Absinthe schema for the current application (example: `MyAppWeb.Schema`)
* `keepalive` - optional - Interval in milliseconds to send `:ping` control frames over the websocket.
Defaults to `30_000` (30 seconds).
* `pipeline` - optional - A `{module, function}` tuple defining how to generate an Absinthe pipeline
for each incoming message. Defaults to `{Absinthe.GraphqlWS.Socket, :absinthe_pipeline}`.
## Pipeline modification
The `:pipeline` option to socket definition defaults to `{Absinthe.GraphqlWS.Socket, :absinthe_pipeline}`.
This function returns the default pipeline provided by `&Absinthe.Pipeline.for_document/2`. Absinthe query execution
can be modified by altering the list of phases in this pipeline. See `Absinthe.Pipeline` for more info.
If an alternate pipeline function is provided, it must accept the arguments `schema` and `options`. These
options include the current context and any variables that are included with the requested query.
## Example
defmodule MyAppWeb.GraphqlSocket do
use Absinthe.GraphqlWS.Socket, schema: MyAppWeb.Schema
def handle_message(_msg, socket) do
{:ok, socket}
end
end
"""
alias Absinthe.GraphqlWS.Socket
require Logger
@default_keepalive 30_000
@enforce_keys ~w[absinthe connect_info endpoint handler keepalive pubsub]a
defstruct [
:absinthe,
:connect_info,
:endpoint,
:handler,
:keepalive,
:pubsub,
assigns: %{},
initialized?: false,
subscriptions: %{}
]
@typedoc """
A socket that holds information necessary for parsing incoming messages as well as outgoing subscription data.
"""
@type t() :: %Socket{
absinthe: map(),
assigns: map(),
connect_info: map(),
endpoint: module(),
initialized?: boolean(),
keepalive: integer(),
subscriptions: map()
}
@type socket() :: t()
@typedoc """
Opcode atoms for messages handled by `handle_control/2`. Used by server-side keepalive messages.
"""
@type control() ::
:ping
| :pong
@typedoc """
Opcode atoms for messages pushed to the client.
"""
@type opcode() ::
:text
| :binary
| control()
@typedoc """
JSON that conforms to the `graphql-ws` protocol.
"""
@type message() :: binary()
@typedoc """
A websocket frame to send to the client.
"""
@type frame() :: {opcode(), message()}
@typedoc """
Used internally by `Absinthe.GraphqlWS.Transport.handle_in/2`.
These are return values to incoming messages from a websocket.
## Values
* `{:ok, socket}` - save new socket state, without sending any data to the client.
* `{:reply, :ok, {:text, "{}"}, socket}` - send JSON content to the client.
* `{:reply, :error, {:text, "{}"}, socket}` - send an error with JSON payload to the client.
* `{:stop, :normal, socket}` - shut down the socket process.
"""
@type reply_inbound() ::
{:ok, socket()}
| {:reply, :ok, frame(), socket()}
| {:reply, :error, frame(), socket()}
| {:stop, term(), socket()}
@typedoc """
Valid return values from `c:handle_message/2`.
These are return values to messages that have been received from within Elixir
## Values
* `{:ok, socket}` - save new socket state, without sending any data to the client.
* `{:push, {:text, Message.Next.new(id, %{})}, socket}` - save new socket state, and send data to the client.
* `{:stop, :reason, socket}` - stop the socket.
"""
@type reply_message() ::
{:ok, socket()}
| {:push, frame(), socket()}
| {:stop, term(), socket()}
@typedoc """
Return values from `c:handle_init/2`.
"""
@type init() ::
{:ok, map(), socket()}
| {:error, map(), socket()}
| {:stop, term(), socket()}
@doc """
Handles messages that are sent to this process through `send/2`, which have not been caught
by the default implementation. It must return a `t:reply_message/0`.
If pushing content to the websocket, it must return a tuple in the form
`{:push, {:text, message}, socket}`, where `message` is JSON that represents a valid `grapql-ws`
message.
## Example
alias Absinthe.GraphqlWS.Message
def handle_message({:thing, thing}, socket) do
{:ok, assign(socket, :thing, thing)}
end
def handle_message({:send, id, payload}, socket) do
{:push, {:text, Message.Next.new(id, payload)}, socket}
end
def handle_message(_msg, socket) do
{:ok, socket}
end
"""
@callback handle_message(params :: term(), socket()) :: Socket.reply_message()
@doc """
Handle the `connection_init` message sent by the socket implementation. This will receive
the `payload` from the message, defaulting to an empty map if received from the client.
This can be used for custom authentication/authorization, using
`Absinthe.GraphqlWS.Util.assign_context/2` to modify the Absinthe context.
In case the user is authenticated through session cookies, the session data may be accessed in
the socket's `:connect_info` field. Note that you need to send a `_csrf_token` param in the URL to effectively receive
the session info (or else the session will be `nil`). For more information, visit the Phoenix Endpoint docs:
https://hexdocs.pm/phoenix/Phoenix.Endpoint.html#socket/3-common-configuration
## Example
defmodule MySocket do
use Absinthe.GraphqlWS.Socket, schema: MySchema
def handle_init(%{"user_id" => user_id}, socket) do
case find_user(user_id) do
nil ->
{:error, %{}, socket}
user ->
socket = assign_context(socket, current_user: user)
{:ok, %{name: user.name}, socket}
end
end
end
"""
@callback handle_init(payload :: map(), socket()) :: Socket.init()
@optional_callbacks handle_message: 2, handle_init: 2
@spec __after_compile__(any(), any()) :: :ok
def __after_compile__(env, _bytecode) do
opts = Module.get_attribute(env.module, :graphql_ws_socket_opts)
unless Keyword.has_key?(opts, :schema) do
:elixir_errors.erl_warn(env.line, env.file, "#{env.module} must specify `:schema` when using Absinthe.GraphqlWS.Socket")
end
:ok
end
defmacro __using__(opts) do
quote do
@graphql_ws_socket_opts unquote(opts)
@after_compile Absinthe.GraphqlWS.Socket
import Absinthe.GraphqlWS.Util
alias Absinthe.GraphqlWS.Socket
@behaviour Phoenix.Socket.Transport
@behaviour Absinthe.GraphqlWS.Socket
@doc false
@impl Phoenix.Socket.Transport
def child_spec(opts) do
Socket.__child_spec__(__MODULE__, opts, @graphql_ws_socket_opts)
end
@doc false
@impl Phoenix.Socket.Transport
def connect(transport) do
Socket.__connect__(__MODULE__, transport, @graphql_ws_socket_opts)
end
@doc false
@impl Phoenix.Socket.Transport
def init(socket) do
if socket.keepalive > 0,
do: Process.send_after(self(), :keepalive, socket.keepalive)
{:ok, socket}
end
@doc false
@impl Phoenix.Socket.Transport
def handle_control(message, socket),
do: Absinthe.GraphqlWS.Transport.handle_control(message, socket)
@doc false
@impl Phoenix.Socket.Transport
def handle_in(message, socket),
do: Absinthe.GraphqlWS.Transport.handle_in(message, socket)
@doc false
@impl Phoenix.Socket.Transport
def handle_info(message, socket),
do: Absinthe.GraphqlWS.Transport.handle_info(message, socket)
@doc false
@impl Phoenix.Socket.Transport
def terminate(message, socket),
do: Absinthe.GraphqlWS.Transport.terminate(message, socket)
defoverridable terminate: 2
end
end
defmacrop debug(msg), do: quote(do: Logger.debug("[graph-socket@#{inspect(self())}] #{unquote(msg)}"))
@doc false
def new(attrs \\ []), do: __struct__(attrs)
@doc """
Provides a stub implementation that allows the socket to start. Phoenix.Socket.Transport
expects a child spec that starts a process; we do so with a noop Task.
"""
def __child_spec__(module, _opts, _socket_opts) do
%{id: {__MODULE__, module}, start: {Task, :start_link, [fn -> :ok end]}, restart: :transient}
end
@doc """
When a client connects to this websocket, this function is called to initialize the socket.
"""
@spec __connect__(module(), map(), Keyword.t()) :: {:ok, socket()}
def __connect__(module, socket, options) do
absinthe_pipeline = Keyword.get(options, :pipeline, {__MODULE__, :absinthe_pipeline})
pubsub = socket.endpoint.config(:pubsub_server)
schema = Keyword.fetch!(options, :schema)
keepalive = Keyword.get(options, :keepalive, @default_keepalive)
absinthe_config = %{
opts: [
context: %{
pubsub: socket.endpoint
}
],
pipeline: absinthe_pipeline,
schema: schema
}
socket =
Socket.new(
absinthe: absinthe_config,
connect_info: socket.connect_info,
endpoint: socket.endpoint,
handler: module,
keepalive: keepalive,
pubsub: pubsub
)
debug("connect: #{socket}")
{:ok, socket}
end
@doc """
Provides the default absinthe pipeline.
## Params
* `schema` - An `Absinthe.Schema.t()`
* `options` - A keyword list with the current context, variables, etc for the
current query.
"""
@spec absinthe_pipeline(Absinthe.Schema.t(), Keyword.t()) :: Absinthe.Pipeline.t()
def absinthe_pipeline(schema, options) do
schema
|> Absinthe.Pipeline.for_document(options)
end
defimpl String.Chars do
def to_string(socket) do
handler = Module.split(socket.handler) |> Enum.join(".")
connect_info = Map.keys(socket.connect_info) |> inspect()
"#Socket<handler=#{handler}, connect_info=#{connect_info}, keepalive=#{keepalive(socket.keepalive)}>"
end
defp keepalive(0), do: "disabled"
defp keepalive(value) when value > 10_000, do: "#{value / 1000}s"
defp keepalive(value), do: "#{value}ms"
end
end
|
lib/absinthe/graphql_ws/socket.ex
| 0.906166
| 0.511717
|
socket.ex
|
starcoder
|
defmodule Membrane.Libnice.Bin do
@moduledoc """
Bin used for establishing ICE connection, sending and receiving messages.
### Architecture and pad semantic
Both input and output pads are dynamic ones.
One instance of Libnice Bin is responsible for handling only one ICE stream which can have
multiple components.
Each pad is responsible for carrying data from/to one component.
### Linking using output pad
To receive messages after establishing ICE connection you have to link Libnice Bin to your element
via `Pad.ref(:output, component_id)`. `component_id` is an id of component from which your
element will receive messages. E.g. if you passed as `n_components` 2 it means that there will be
two components and you can link Libnice Bin to your element via `Pad.ref(:output, 1)`
and `Pad.ref(:output, 2)`.
**Important**: you can link to Libnice Bin using its output pad in any moment you want but if you don't
want to miss any messages do it before playing your pipeline.
**Important**: you can't link multiple elements using the same `component_id`. Messages from
one component can be conveyed only to one element.
### Linking using input pad
To send messages after establishing ICE connection you have to link to Libnice Bin via
`Pad.ref(:input, component_id)`. `component_id` is an id of component which will be used to send
messages via net. To send data from multiple elements via the same `component_id` you have to
use [membrane_funnel_plugin](https://github.com/membraneframework/membrane_funnel_plugin).
### Messages API
You can send following messages to Libnice Bin:
- `:gather_candidates`
- `{:set_remote_credentials, credentials}` - credentials are string in form of "ufrag passwd"
- `{:set_remote_candidate, candidate, component_id}` - candidate is a string in form of
SDP attribute i.e. it has prefix "a=" e.g. "a=candidate 1 "
- `{:parse_remote_sdp, sdp}`
- `:peer_candidate_gathering_done`
### Notifications API
- `{:new_candidate_full, candidate}`
Triggered by: `:gather_candidates`
- `:candidate_gathering_done`
Triggered by: `:gather_candidates`
- `{:new_remote_candidate_full, candidate}`
Triggered by: `{:set_remote_candidate, candidate, component_id}` or `{:parse_remote_sdp, sdp}`
### Sending and receiving messages
To send or receive messages just link to Libnice Bin using relevant pads.
As soon as connection is established your element will receive demands from Libnice Sink or
messages from Libnice Source.
"""
use Membrane.Bin
alias Membrane.Libnice.Connector
require Membrane.Logger
def_options n_components: [
spec: integer(),
default: 1,
description: "Number of components that will be created in the stream"
],
stream_name: [
spec: String.t(),
default: "",
description: "Name of the stream"
],
stun_servers: [
spec: [ExLibnice.stun_server()],
default: [],
description: "List of stun servers"
],
turn_servers: [
spec: [ExLibnice.relay_info()],
default: [],
description: "List of turn servers"
],
controlling_mode: [
spec: boolean(),
default: false,
description: "Refer to RFC 8445 section 4 - Controlling and Controlled Agent"
],
port_range: [
spec: Range.t(),
default: 0..0,
description: "The port range to use"
],
handshake_module: [
spec: module(),
default: Handshake.Default,
description: "Module implementing Handshake behaviour"
],
handshake_opts: [
spec: keyword(),
default: [],
description:
"Options for handshake module. They will be passed to init function of hsk_module"
]
def_input_pad :input,
availability: :on_request,
caps: :any,
mode: :pull,
demand_unit: :buffers
def_output_pad :output,
availability: :on_request,
caps: :any,
mode: :push
@impl true
def handle_init(options) do
%__MODULE__{
n_components: n_components,
stream_name: stream_name,
stun_servers: stun_servers,
turn_servers: turn_servers,
controlling_mode: controlling_mode,
port_range: port_range,
handshake_module: hsk_module,
handshake_opts: hsk_opts
} = options
{:ok, connector} =
Connector.start_link(
parent: self(),
n_components: n_components,
stream_name: stream_name,
stun_servers: stun_servers,
turn_servers: turn_servers,
controlling_mode: controlling_mode,
port_range: port_range,
hsk_module: hsk_module,
hsk_opts: hsk_opts
)
{:ok, libnice} = Connector.get_libnice_pid(connector)
children = [
libnice_source: Membrane.Libnice.Source,
libnice_sink: %Membrane.Libnice.Sink{libnice: libnice}
]
spec = %ParentSpec{
children: children
}
{{:ok, spec: spec}, %{:connector => connector}}
end
@impl true
def handle_pad_added(Pad.ref(:output, _component_id) = pad, _ctx, state) do
links = [link(:libnice_source) |> via_out(pad) |> to_bin_output(pad)]
{{:ok, spec: %ParentSpec{links: links}}, state}
end
@impl true
def handle_pad_added(Pad.ref(:input, _component_id) = pad, _ctx, state) do
links = [link_bin_input(pad) |> via_in(pad) |> to(:libnice_sink)]
{{:ok, spec: %ParentSpec{links: links}}, state}
end
@impl true
def handle_prepared_to_playing(_ctx, %{connector: connector} = state) do
{:ok, hsk_init_data, credentials} = Connector.run(connector)
actions =
hsk_init_data
|> Enum.map(fn {component_id, init_data} ->
{:notify, {:handshake_init_data, component_id, init_data}}
end)
actions = actions ++ [{:notify, {:local_credentials, credentials}}]
{{:ok, actions}, state}
end
@impl true
def handle_prepared_to_stopped(_ctx, %{connector: connector} = state) do
Connector.reset(connector)
{:ok, state}
end
@impl true
def handle_other(:gather_candidates, _ctx, %{connector: connector} = state) do
Connector.gather_candidates(connector)
{:ok, state}
end
@impl true
def handle_other(
{:set_remote_credentials, credentials},
_ctx,
%{connector: connector} = state
) do
Connector.set_remote_credentials(connector, credentials)
{:ok, state}
end
@impl true
def handle_other({:parse_remote_sdp, sdp}, _ctx, %{connector: connector} = state) do
Connector.parse_remote_sdp(connector, sdp)
{:ok, state}
end
@impl true
def handle_other(
{:set_remote_candidate, cand, component_id},
_ctx,
%{connector: connector} = state
) do
Connector.set_remote_candidate(connector, cand, component_id)
{:ok, state}
end
@impl true
def handle_other(:restart_stream, _ctx, %{connector: connector} = state) do
case Connector.restart_stream(connector) do
{:ok, credentials} ->
{{:ok, notify: {:local_credentials, credentials}}, state}
{:error, cause} ->
Membrane.Logger.debug("Stream restart failed, because: #{cause}")
{:ok, state}
end
end
@impl true
def handle_other(:peer_candidate_gathering_done, _ctx, %{connector: connector} = state) do
Connector.peer_candidate_gathering_done(connector)
{:ok, state}
end
@impl true
def handle_other({:component_state_ready, _stream_id, _component_id} = msg, _ctx, state),
do: {{:ok, forward: {:libnice_sink, msg}}, state}
@impl true
def handle_other({:component_state_failed, stream_id, component_id}, _ctx, state),
do: {{:ok, notify: {:connection_failed, stream_id, component_id}}, state}
@impl true
def handle_other({:hsk_finished, _component_id, _hsk_data} = msg, _ctx, state),
do: {{:ok, [forward: {:libnice_source, msg}, forward: {:libnice_sink, msg}]}, state}
@impl true
def handle_other({:ice_payload, component_id, _payload} = msg, ctx, state) do
if Map.has_key?(ctx.pads, Pad.ref(:output, component_id)) do
{{:ok, forward: {:libnice_source, msg}}, state}
else
Membrane.Logger.warn("No links for component: #{component_id}. Ignoring incoming message.")
{:ok, state}
end
end
@impl true
def handle_other(msg, _ctx, state), do: {{:ok, notify: msg}, state}
@impl true
def handle_notification(
{:connection_ready, _stream_id, _component_id} = msg,
_from,
_ctx,
state
),
do: {{:ok, notify: msg}, state}
@impl true
def handle_notification(
{:component_state_failed, stream_id, component_id},
_from,
_ctx,
state
),
do: {{:ok, notify: {:connection_failed, stream_id, component_id}}, state}
@impl true
def handle_notification(msg, _from, _ctx, state), do: {{:ok, notify: msg}, state}
@impl true
def handle_shutdown(_reason, state) do
GenServer.stop(state.connector)
end
end
|
lib/membrane_libnice_plugin/ice_bin.ex
| 0.869507
| 0.508788
|
ice_bin.ex
|
starcoder
|
defmodule Exq.Middleware.Telemetry do
@moduledoc """
This middleware allows you to subscribe to the telemetry events and
collect metrics about your jobs.
### Exq telemetry events
The middleware emit three events, same as what `:telemetry.span/3` emits.
* `[:exq, :job, :start]` - Is invoked whenever a job starts.
** Measurements **
- `system_time` (integer) - System time when the job started
* `[:exq, :job, :stop]` - Is invoked whenever a job completes successfully.
** Measurements **
- `duration` (integer) - Duration of the job execution in native unit
* `[:exq, :job, :exception]` - Is invoked whenever a job fails.
** Measurements **
- `duration` (integer) - Duration of the job execution in native unit
** Metadata **
In addition to the common metadata, exception event will have the following fields.
- `kind` (exit | error) - either `exit` or `error`
- `reason` (term) - could be an `Exception.t/0` or term
- `stacktrace` (list) - Stacktrace of the error. Will be empty if the kind is `exit`.
** Metadata **
Each event has the following common metadata:
* `enqueued_at` (`DateTime.t/0`) - datetime the job was enqueued
* `queue` (`String.t/0`) - the name of the queue the job was executed in
* `class` (`String.t/0`) - the job's class
* `jid` (`String.t/0`) - the job's jid
* `retry_count` (integer) - number of times this job has failed so far
### Examples
defmodule MyApp.Application do
def start(_type, _args) do
children = [
# .....
{Telemetry.Metrics.ConsoleReporter, metrics: metrics()}
]
opts = [strategy: :one_for_one, name: MyApp.Supervisor]
Supervisor.start_link(children, opts)
end
defp metrics do
[
counter("exq.job.stop.duration"),
counter("exq.job.exception.duration"),
distribution("exq.job.stop.duration",
buckets: [0.1, 0.2, 0.3, 0.5, 0.75, 1, 2, 3, 5, 10],
unit: {:native, :millisecond}
),
distribution("exq.job.exception.duration",
buckets: [0.1, 0.2, 0.3, 0.5, 0.75, 1, 2, 3, 5, 10],
unit: {:native, :millisecond}
),
summary("exq.job.stop.duration", unit: {:native, :millisecond}),
summary("exq.job.exception.duration", unit: {:native, :millisecond})
]
end
end
"""
@behaviour Exq.Middleware.Behaviour
alias Exq.Middleware.Pipeline
import Pipeline
defguardp is_stacktrace(stacktrace)
when is_list(stacktrace) and length(stacktrace) > 0 and is_tuple(hd(stacktrace)) and
(tuple_size(hd(stacktrace)) == 3 or tuple_size(hd(stacktrace)) == 4)
@impl true
def after_failed_work(pipeline) do
duration = System.monotonic_time() - pipeline.assigns.telemetry_start_time
error_map =
case pipeline.assigns.error do
{reason, stacktrace} when is_stacktrace(stacktrace) ->
%{kind: :error, reason: reason, stacktrace: stacktrace}
reason ->
%{kind: :exit, reason: reason, stacktrace: []}
end
:telemetry.execute(
[:exq, :job, :exception],
%{duration: duration},
Map.merge(metadata(pipeline.assigns.job), error_map)
)
pipeline
end
@impl true
def after_processed_work(pipeline) do
duration = System.monotonic_time() - pipeline.assigns.telemetry_start_time
:telemetry.execute(
[:exq, :job, :stop],
%{duration: duration},
metadata(pipeline.assigns.job)
)
pipeline
end
@impl true
def before_work(pipeline) do
:telemetry.execute(
[:exq, :job, :start],
%{system_time: System.system_time()},
metadata(pipeline.assigns.job)
)
assign(pipeline, :telemetry_start_time, System.monotonic_time())
end
defp metadata(job),
do: %{
enqueued_at: DateTime.from_unix!(round(job.enqueued_at * 1000), :millisecond),
queue: job.queue,
class: job.class,
jid: job.jid,
retry_count: job.retry_count || 0
}
end
|
lib/exq/middleware/telemetry.ex
| 0.803135
| 0.613815
|
telemetry.ex
|
starcoder
|
defmodule Pushest do
@moduledoc ~S"""
Pushest is a Pusher library leveraging Elixir/OTP to combine server and client-side Pusher features.
Abstracts un/subscription, client-side triggers, private/presence channel authorizations.
Keeps track of subscribed channels and users presence when subscribed to presence channel.
Pushest is meant to be used in your module where you can define callbacks for
events you're interested in.
A simple implementation in an OTP application would be:
```
# Add necessary pusher configuration to your application config:
# simple_client/config/config.exs
config :simple_client, SimpleClient,
pusher_app_id: System.get_env("PUSHER_APP_ID"),
pusher_key: System.get_env("PUSHER_APP_KEY"),
pusher_secret: System.get_env("PUSHER_SECRET"),
pusher_cluster: System.get_env("PUSHER_CLUSTER"),
pusher_encrypted: true
# simple_client/simple_client.ex
defmodule SimpleClient do
use Pushest, otp_app: :simple_client
def handle_event({:ok, "public-channel", "some-event"}, frame) do
# do something with public frame
end
def handle_event({:ok, "private-channel", "some-other-event"}, frame) do
# do something with private frame
end
end
# Now you can start your application with Pushest as a part of your supervision tree:
# simple_client/lib/simple_client/application.ex
def start(_type, _args) do
children = [
{SimpleClient, []}
]
opts = [strategy: :one_for_one, name: Sup.Supervisor]
Supervisor.start_link(children, opts)
end
```
You can also provide Pusher options directly via start_link/1 (without using OTP app configuration):
```
config = %{
app_id: System.get_env("PUSHER_APP_ID"),
key: System.get_env("PUSHER_APP_KEY"),
secret: System.get_env("PUSHER_SECRET"),
cluster: System.get_env("PUSHER_CLUSTER"),
encrypted: true
}
{:ok, pid} = SimpleClient.start_link(config)
```
Now you can interact with Pusher:
```
SimpleClient.trigger("private-channel", "event", %{message: "via api"})
SimpleClient.channels()
# => %{"channels" => %{"public-channel" => %{}}}
SimpleClient.subscribe("private-channel")
SimpleClient.trigger("private-channel", "event", %{message: "via ws"})
SimpleClient.trigger("private-channel", "event", %{message: "via api"}, force_api: true)
# ...
```
"""
alias Pushest.Router
@doc ~S"""
Invoked when the Pusher event occurs (e.g. other client sends a message).
"""
@callback handle_event({atom, String.t(), String.t()}, term) :: term
defmacro __using__(opts) do
quote bind_quoted: [opts: opts] do
@typedoc ~S"""
Options for Pushest to properly communicate with Pusher server.
- `:app_id` - Pusher Application ID.
- `:key` - Pusher Application key.
- `:secret` - Necessary to subscribe to private/presence channels and trigger events.
- `:cluster` - Cluster where your Pusher app is configured.
- `:encrypted` - When set to true communication with Pusher is fully encrypted.
"""
@type pusher_opts :: %{
app_id: String.t(),
secret: String.t(),
key: String.t(),
cluster: String.t(),
encrypted: boolean
}
@typedoc ~S"""
Optional options for trigger function.
- `:force_api` - Always triggers via Pusher REST API endpoint when set to `true`
"""
@type trigger_opts :: [force_api: boolean]
@behaviour Pushest
@config Pushest.Supervisor.config(__MODULE__, opts)
@doc ~S"""
Starts a Pushest Supervisor process linked to current process.
Can be started as a part of host application supervision tree.
Pusher options can be passed as an argument or can be provided in an OTP
application config.
For available pusher_opts values see `t:pusher_opts/0`.
"""
@spec start_link(pusher_opts) :: {:ok, pid} | {:error, term}
def start_link(pusher_opts) when is_map(pusher_opts) do
Pushest.Supervisor.start_link(pusher_opts, __MODULE__)
end
def start_link(_) do
Pushest.Supervisor.start_link(@config, __MODULE__)
end
def child_spec(opts) do
%{
id: __MODULE__,
start: {__MODULE__, :start_link, [opts]},
type: :supervisor
}
end
@doc ~S"""
Subscribe to a channel with user_data as a map. When subscribing to a
presence- channel user_id key with unique identifier as a value has to be
provided in the user_data map. user_info key can contain a map with optional
informations about user.
E.g.: %{user_id: "1", user_info: %{name: "<NAME>"}}
"""
@spec subscribe(String.t(), map) :: term
def subscribe(channel, user_data) do
Router.cast({:subscribe, channel, user_data})
end
@doc ~S"""
Subscribe to a channel without any user data, like any public channel.
"""
@spec subscribe(String.t()) :: term
def subscribe(channel) do
Router.cast({:subscribe, channel, %{}})
end
@doc ~S"""
Trigger on given channel/event combination - sends given data to Pusher.
data has to be a map.
"""
@spec trigger(String.t(), String.t(), map) :: term
def trigger(channel, event, data) do
Router.cast({:trigger, channel, event, data})
end
@doc ~S"""
Same as trigger/3 but adds a possiblity to enforce triggering via API endpoint.
For enforced API trigger provide `force_api: true` as an `opts`.
E.g.: `Mod.trigger("channel", "event", %{message: "m"}, force_api: true)`
For trigger_opts values see `t:trigger_opts/0`.
"""
@spec trigger(String.t(), String.t(), map, trigger_opts) :: term
def trigger(channel, event, data, opts) do
Router.cast({:trigger, channel, event, data}, opts)
end
@doc ~S"""
Returns all the channels anyone is using, calls Pusher via REST API.
"""
def channels do
Router.call(:channels)
end
@doc ~S"""
Returns only the channels this client is subscribed to.
"""
def subscribed_channels do
Router.call(:subscribed_channels)
end
@doc ~S"""
Returns information about all the users subscribed to a presence channels
this client is subscribed to.
"""
def presence do
Router.call(:presence)
end
@doc ~S"""
Unsubscribes from a channel
"""
def unsubscribe(channel) do
Router.cast({:unsubscribe, channel})
end
@doc ~S"""
Function meant to be overwritten in user module, e.g.:
```
defmodule MyMod do
use Pushest, otp_app: :my_mod
handle_event({:ok, "my-channel, "my-event"}, frame) do
# Do something with a frame here.
end
end
```
"""
def handle_event({status, channel, event}, frame) do
require Logger
Logger.error(
"No #{inspect(status)} handle_event/2 clause in #{__MODULE__} provided for #{
inspect(event)
}"
)
end
defoverridable handle_event: 2
end
end
end
|
lib/pushest.ex
| 0.866994
| 0.66594
|
pushest.ex
|
starcoder
|
defmodule RayTracer.Cube do
@moduledoc """
This module defines cube operations
"""
alias RayTracer.Shape
use Shape
alias RayTracer.RTuple
alias RayTracer.Matrix
alias RayTracer.Material
alias RayTracer.Intersection
@type t :: %__MODULE__{
transform: Matrix.matrix,
inv_transform: Matrix.matrix,
trans_inv_transform: Matrix.matrix,
material: Material.t
}
import RayTracer.Constants
import RTuple, only: [vector: 3]
@doc """
Builds a cube with given transformation matrix and material
"""
@spec new(Matrix.matrix, Material.t) :: t
def new(transform \\ Matrix.ident, material \\ Material.new) do
%__MODULE__{material: material} |> Shape.set_transform(transform)
end
defimpl Shape.Shadeable do
alias RayTracer.{Cube, RTuple, Intersection, Ray}
@spec local_normal_at(Cube.t, RTuple.point) :: RTuple.vector
def local_normal_at(_cube, point) do
ox = point |> RTuple.x
oy = point |> RTuple.y
oz = point |> RTuple.z
absv = [ox, oy, oz] |> Enum.map(fn v -> abs(v) end)
maxc = absv |> Enum.max
x = Enum.at(absv, 0)
y = Enum.at(absv, 1)
cond do
maxc == x -> vector(ox, 0, 0)
maxc == y -> vector(0, oy, 0)
true -> vector(0, 0, oz)
end
end
@spec local_intersect(Cube.t, Ray.t) :: list(Intersection.t)
def local_intersect(cube, ray) do
{xtmin, xtmax} = check_axis(ray.origin |> RTuple.x, ray.direction |> RTuple.x)
{ytmin, ytmax} = check_axis(ray.origin |> RTuple.y, ray.direction |> RTuple.y)
{ztmin, ztmax} = check_axis(ray.origin |> RTuple.z, ray.direction |> RTuple.z)
tmin = Enum.max([xtmin, ytmin, ztmin])
tmax = Enum.min([xtmax, ytmax, ztmax])
if tmin > tmax do
[]
else
[Intersection.new(tmin, cube), Intersection.new(tmax, cube)]
end
end
@spec check_axis(number, number) :: {number, number}
defp check_axis(origin, direction) do
tmin_numerator = (-1 - origin)
tmax_numerator = (1 - origin)
if abs(direction) >= epsilon() do
tmin = tmin_numerator / direction
tmax = tmax_numerator / direction
swap_if_greater(tmin, tmax)
else
tmin = tmin_numerator * infinity()
tmax = tmax_numerator * infinity()
swap_if_greater(tmin, tmax)
end
end
defp swap_if_greater(tmin, tmax) do
if tmin > tmax do
{tmax, tmin}
else
{tmin, tmax}
end
end
end
end
|
lib/cube.ex
| 0.876423
| 0.541954
|
cube.ex
|
starcoder
|
defmodule Strukt do
@moduledoc """
"""
import Kernel, except: [defstruct: 1, defstruct: 2]
import Strukt.Field, only: [is_supported: 1]
@doc """
See `c:new/1`
"""
@callback new() :: {:ok, struct()} | {:error, Ecto.Changeset.t()}
@doc """
This callback can be overridden to provide custom initialization behavior.
The default implementation provided for you performs all of the necessary
validation and autogeneration of fields with those options set.
NOTE: It is critical that if you do override this callback, that you call
`super/1` to run the default implementation at some point in your implementation.
"""
@callback new(Keyword.t() | map()) :: {:ok, struct()} | {:error, Ecto.Changeset.t()}
@doc """
See `c:change/2`
"""
@callback change(Ecto.Changeset.t() | term()) :: Ecto.Changeset.t()
@doc """
This callback can be overridden to provide custom change behavior.
The default implementation provided for you creates a changeset and applies
all of the inline validations defined on the schema.
NOTE: It is recommended that if you need to perform custom validations, that
you override `c:validate/1` instead. If you need to override this callback
specifically for some reason, make sure you call `super/2` at some point during
your implementation to ensure that validations are run.
"""
@callback change(Ecto.Changeset.t() | term(), Keyword.t() | map()) :: Ecto.Changeset.t()
@doc """
This callback can be overridden to provide custom validation logic.
The default implementation simply returns the changeset it is given. Validations
defined inline with fields are handled by a specially generated `__validate__/1`
function which is called directly by `new/1` and `change/2`.
NOTE: If you override this function, there is no need to invoke `super/1`
"""
@callback validate(Ecto.Changeset.t()) :: Ecto.Changeset.t()
@field_types [
:field,
:embeds_one,
:embeds_many,
:belongs_to,
:has_many,
:has_one,
:many_to_many,
:timestamps
]
@schema_attrs [
:primary_key,
:schema_prefix,
:foreign_key_type,
:timestamps_opts,
:derive,
:field_source_mapper
]
@special_attrs @schema_attrs ++ [:moduledoc, :derives]
defmacro __using__(_) do
quote do
import Kernel, except: [defstruct: 1, defstruct: 2]
import unquote(__MODULE__), only: :macros
end
end
@doc ~S"""
This variant of `defstruct` can accept a list of fields, just like `Kernel.defstruct/1`, in which
case it simply defers to `Kernel.defstruct/1` and does nothing; or it can be passed a block
containing an `Ecto.Schema` definition. The resulting struct/schema is defined in the current
module scope, and will inherit attributes like `@derive`, `@primary_key`, etc., which are already
defined in the current scope.
## Example
defmodule Passthrough do
use Strukt
defstruct [:name]
end
defmodule Person do
use Strukt
@derive [Jason.Encoder]
defstruct do
field :name, :string
end
def say_hello(%__MODULE__{name: name}), do: "Hello #{name}!"
end
Above, even though `Strukt.defstruct/1` is in scope, it simply passes through the list of fields
to `Kernel.defstruct/1`, as without a proper schema, there isn't much useful we can do. This allows
intermixing uses of `defstruct/1` in the same scope without conflict.
"""
defmacro defstruct(arg)
defmacro defstruct(do: block) do
define_struct(__CALLER__, nil, block)
end
defmacro defstruct(fields) do
quote bind_quoted: [fields: fields] do
Kernel.defstruct(fields)
end
end
@doc ~S"""
This variant of `defstruct` takes a module name and block containing a struct schema and
any other module contents desired, and defines a new module with that name, generating
a struct just like `Strukt.defstruct/1`.
## Example
use Strukt
defstruct Person do
@derive [Jason.Encoder]
field :name, :string
def say_hello(%__MODULE__{name: name}), do: "Hello #{name}!"
end
NOTE: Unlike `Strukt.defstruct/1`, which inherits attributes like `@derive` or `@primary_key` from
the surrounding scope; this macro requires them to be defined in the body, as shown above.
"""
defmacro defstruct(name, do: body) do
define_struct(__CALLER__, name, body)
end
defp define_struct(env, name, {:__block__, meta, body}) do
{special_attrs, body} =
Enum.split_with(body, fn
{:@, _, [{attr, _, _}]} -> attr in @special_attrs
_ -> false
end)
{fields, body} =
Enum.split_with(body, fn
{field_type, _, _} -> field_type in @field_types
_ -> false
end)
{schema_attrs, special_attrs} =
Enum.split_with(special_attrs, fn {:@, _, [{attr, _, _}]} -> attr in @schema_attrs end)
moduledoc = Enum.find(special_attrs, fn {:@, _, [{attr, _, _}]} -> attr == :moduledoc end)
derives =
case Enum.find(special_attrs, fn {:@, _, [{attr, _, _}]} -> attr == :derives end) do
{_, _, [{_, _, [derives]}]} ->
derives
nil ->
[]
end
fields = Strukt.Field.parse(fields)
define_struct(env, name, meta, moduledoc, derives, schema_attrs, fields, body)
end
# This clause handles the edge case where the definition only contains
# a single field and nothing else
defp define_struct(env, name, {type, _, _} = field) when is_supported(type) do
fields = Strukt.Field.parse([field])
define_struct(env, name, [], nil, [], [], fields, [])
end
defp define_struct(_env, name, meta, moduledoc, derives, schema_attrs, fields, body) do
# Extract macros which should be defined at the top of the module
{macros, body} =
Enum.split_with(body, fn
{node, _meta, _body} -> node in [:use, :import, :alias]
_ -> false
end)
# Extract child struct definitions
children =
fields
|> Enum.filter(fn %{type: t, block: block} ->
t in [:embeds_one, :embeds_many] and block != nil
end)
|> Enum.map(fn %{value_type: value_type, block: block} ->
quote do
Strukt.defstruct unquote(value_type) do
unquote(block)
end
end
end)
# Generate validation metadata for the generated module
validated_fields =
for %{name: name, type: t} = f <- fields, t != :timestamps, reduce: {:%{}, [], []} do
{node, meta, elements} ->
kvs =
Keyword.merge(
[type: t, value_type: f.value_type, default: f.options[:default]],
f.validations
)
element = {name, {:%{}, [], kvs}}
{node, meta, [element | elements]}
end
# Get a list of fields valid for `cast/3`
cast_fields = for %{type: :field} = f <- fields, do: f.name
# Get a list of embeds valid for `cast_embed/3`
cast_embed_fields = for %{type: t} = f <- fields, t in [:embeds_one, :embeds_many], do: f.name
# Expand fields back to their final AST form
fields_ast =
fields
|> Stream.map(&Strukt.Field.to_ast/1)
# Drop any extraneous args (such as inline schema definitions, which have been extracted)
|> Enum.map(fn {type, meta, args} -> {type, meta, Enum.take(args, 3)} end)
# Make sure the default primary key is defined and castable
defines_primary_key? =
Enum.any?(fields, &(&1.type == :field and Keyword.has_key?(&1.options, :primary_key)))
quoted =
quote location: :keep do
unquote(moduledoc)
unquote_splicing(macros)
# Capture schema attributes from outer scope, since `use Ecto.Schema` will reset them
schema_attrs =
unquote(@schema_attrs)
|> Enum.map(&{&1, Module.get_attribute(__MODULE__, &1)})
|> Enum.reject(fn {_, value} -> is_nil(value) end)
use Ecto.Schema
import Ecto.Changeset, except: [change: 2]
@behaviour unquote(__MODULE__)
@before_compile unquote(__MODULE__)
# Generate child structs before generating the parent
unquote_splicing(children)
# Ensure any schema attributes are set, starting with outer scope, then inner
for {schema_attr, value} <- schema_attrs do
Module.put_attribute(__MODULE__, schema_attr, value)
end
# Schema attributes defined in module body
unquote_splicing(schema_attrs)
# Ensure a primary key is defined, if one hasn't been by this point
defines_primary_key? = unquote(defines_primary_key?)
case Module.get_attribute(__MODULE__, :primary_key) do
nil when not defines_primary_key? ->
# Provide the default primary key
Module.put_attribute(__MODULE__, :primary_key, {:uuid, Ecto.UUID, autogenerate: true})
pk when defines_primary_key? ->
# Primary key is being overridden
Module.put_attribute(__MODULE__, :primary_key, false)
_pk ->
# Primary key is set and not overridden
nil
end
@schema_name Macro.underscore(__MODULE__)
@validated_fields unquote(validated_fields)
@cast_embed_fields unquote(Macro.escape(cast_embed_fields))
# Ensure primary key can be cast, if applicable
case Module.get_attribute(__MODULE__, :primary_key) do
false ->
# Primary key was explicitly disabled
Module.put_attribute(__MODULE__, :cast_fields, unquote(Macro.escape(cast_fields)))
{pk, _type, _opts} ->
# Primary key was defaulted, or set manually via attribute
Module.put_attribute(__MODULE__, :cast_fields, [
pk | unquote(Macro.escape(cast_fields))
])
end
# Inject or override @derives, without Jason.Encoder if present
case Module.get_attribute(__MODULE__, :derives) do
derives when derives in [false, nil] or derives == [] ->
case unquote(derives) do
nil ->
nil
ds ->
if Enum.member?(ds, Jason.Encoder) do
Module.put_attribute(__MODULE__, :derives_jason, true)
Module.put_attribute(
__MODULE__,
:derives,
Enum.reject(ds, &(&1 == Jason.Encoder))
)
end
end
derives ->
if Enum.member?(derives, Jason.Encoder) do
Module.put_attribute(__MODULE__, :derives_jason, true)
Module.put_attribute(
__MODULE__,
:derives,
Enum.reject(derives, &(&1 == Jason.Encoder))
)
end
end
embedded_schema do
unquote({:__block__, meta, fields_ast})
end
@doc """
Creates a `#{__MODULE__}`, using the provided params.
This operation is fallible, so it returns `{:ok, t}` or `{:error, Ecto.Changeset.t}`.
If this struct has an autogenerated primary key, it will be generated, assuming it
was not provided in the set of params. By default, all structs generated by `defstruct`
are given a primary key field of `:uuid`, which is autogenerated using `UUID.uuid/4`.
See the docs for `defstruct` if you wish to change this.
"""
@impl Strukt
def new(params \\ %{})
def new(params) do
struct(__MODULE__)
|> Strukt.Autogenerate.generate()
|> changeset(params, :insert)
|> from_changeset()
end
@doc """
Prepares an `Ecto.Changeset` from a struct, or an existing `Ecto.Changeset`, by applying
the provided params as changes. The resulting changeset is validated.
See `from_changeset/1`, for converting the changeset back to a struct.
"""
@impl Strukt
def change(entity_or_changeset, params \\ %{})
def change(entity_or_changeset, params) do
case entity_or_changeset do
%Ecto.Changeset{} = cs ->
cs
|> Ecto.Changeset.change(params)
|> __validate__()
|> validate()
%__MODULE__{} = entity ->
changeset(entity, params, :update)
end
end
@doc """
Validates a changeset for this type. Automatically called by `new/1`, `change/2`, and `changeset/{1,2}`.
NOTE: This function can be overridden manually to provide additional validations above
and beyond those defined by the schema itself, for cases where the validation options
available are not rich enough to express the necessary business rules. By default this
function just returns the input changeset, as `changeset` automatically applies the
schema validations for you.
"""
@impl Strukt
def validate(cs), do: cs
defoverridable unquote(__MODULE__)
unquote(body)
end
if is_nil(name) do
quoted
else
quote do
defmodule unquote(name) do
unquote(quoted)
end
end
end
end
@doc false
defmacro __before_compile__(env) do
quote location: :keep, bind_quoted: [schema_module: env.module] do
# Injects the type spec for this module based on the schema
typespec_ast =
Strukt.Typespec.generate(%Strukt.Typespec{
caller: __MODULE__,
info: @validated_fields,
fields: @cast_fields,
embeds: @cast_embed_fields
})
Module.eval_quoted(__ENV__, typespec_ast)
@doc """
Generates an `Ecto.Changeset` for this type, using the provided params.
This function automatically performs validations based on the schema, and additionally,
it invokes `validate/1` in order to apply custom validations, if present.
Use `from_changeset/1` to apply the changes in the changeset,
and get back a valid instance of this type
"""
@spec changeset(t) :: Ecto.Changeset.t()
@spec changeset(t, Keyword.t() | map()) :: Ecto.Changeset.t()
def changeset(%__MODULE__{} = entity, params \\ %{}) do
changeset(entity, params, nil)
end
# This function is used to build and validate a changeset for the corresponding action.
@doc false
def changeset(%__MODULE__{} = entity, params, action)
when action in [:insert, :update, :delete, nil] do
params =
case params do
%__MODULE__{} ->
Map.from_struct(params)
m when is_map(m) ->
m
other ->
Enum.into(other, %{})
end
cast(entity, params, @cast_fields)
|> Map.put(:action, action)
|> __cast_embeds__(@cast_embed_fields)
|> __validate__()
|> validate()
end
defp __cast_embeds__(changeset, []), do: changeset
defp __cast_embeds__(%Ecto.Changeset{params: params} = changeset, [field | fields]) do
# If we get a struct(s) in the params for an embed, there is no need to cast, presume validity and apply the change directly
f = to_string(field)
prev = Ecto.Changeset.fetch_field!(changeset, field)
# Ensure a change can always be applied, whether inserting or updated
changeset =
case Map.get(params, f) do
nil ->
changeset
%_{} = entity when is_nil(prev) ->
# In this case, we don't have a previous instance, and we don't need to cast
Ecto.Changeset.put_embed(changeset, field, Map.from_struct(entity))
%_{} = entity ->
# In this case, we have a previous instance, so we need to change appropriately, but we don't need to cast
cs = Ecto.Changeset.change(prev, Map.from_struct(entity))
Ecto.Changeset.put_embed(changeset, field, cs)
[%_{} | _] = entities ->
# When we have a list of entities, we are overwriting the embeds with a new set
Ecto.Changeset.put_embed(changeset, field, Enum.map(entities, &Map.from_struct/1))
other when is_map(other) or is_list(other) ->
# For all other parameters, we need to cast. Depending on how the embedded entity is configured, this may raise an error
cast_embed(changeset, field)
end
__cast_embeds__(changeset, fields)
end
@doc """
Applies the changes in the changset if the changeset is valid, returning the
updated data. The action must be one of `:insert`, `:update`, or `:delete` and
is used
Returns `{:ok, t}` or `{:error, Ecto.Changeset.t}`, depending on validity of the changeset
"""
@spec from_changeset(Ecto.Changeset.t()) :: {:ok, t} | {:error, Ecto.Changeset.t()}
def from_changeset(changeset)
def from_changeset(%Ecto.Changeset{valid?: true} = cs),
do: {:ok, Ecto.Changeset.apply_changes(cs)}
def from_changeset(%Ecto.Changeset{} = cs), do: {:error, cs}
@doc "Deserialize this type from a JSON string or iodata"
@spec from_json(binary | iodata) :: {:ok, t} | {:error, reason :: term}
def from_json(input) do
with {:ok, map} <- Jason.decode(input, keys: :atoms!, strings: :copy) do
{:ok, Ecto.embedded_load(__MODULE__, map, :json)}
end
end
# Generate the __validate__ function
validate_ast = Strukt.Validation.generate(__MODULE__, @validated_fields)
Module.eval_quoted(__ENV__, validate_ast)
# Handle conditional implementation of Jason.Encoder
if Module.get_attribute(__MODULE__, :derives_jason) do
defimpl Jason.Encoder, for: schema_module do
def encode(value, opts) do
value
|> Ecto.embedded_dump(:json)
|> Jason.Encode.map(opts)
end
end
end
end
end
end
|
lib/strukt.ex
| 0.869396
| 0.44746
|
strukt.ex
|
starcoder
|
defmodule Xgit.Util.TrailingHashDevice do
@moduledoc false
# Creates an `iodevice` process that supports git file formats with a trailing
# SHA-1 hash.
# When reading, the trailing 20 bytes are interpreted as a SHA-1 hash of the
# remaining file contents and can be verified using the `valid_hash?/1` function.
# This is an admittedly minimal implementation; just enough is implemented to
# allow Xgit's index file parser to do its work.
use GenServer
import Xgit.Util.ForceCoverage
require Logger
@doc ~S"""
Creates an IO device that reads a file with trailing hash.
Unlike `File.open/2` and `File.open/3`, no options or function are
accepted.
This device can be passed to `IO.binread/2`.
## Return Value
`{:ok, pid}` where `pid` points to an IO device process.
`{:ok, reason}` if the file could not be opened. See `File.open/2` for
possible values for `reason`.
"""
@spec open_file(path :: Path.t()) :: {:ok, pid} | {:error, File.posix()}
def open_file(path) when is_binary(path),
do: GenServer.start_link(__MODULE__, {:file, path})
@doc ~S"""
Creates an IO device that writes to a file with trailing hash.
Unlike `File.open/2` and `File.open/3`, no options or function are
accepted.
This device can be passed to `IO.binwrite/2`.
## Options
`:max_file_size` (non-negative integer) may be passed, which will cause a
failure after the _n_th byte is written. This is intended for internal
testing purposes.
## Return Value
`{:ok, pid}` where `pid` points to an IO device process.
`{:ok, reason}` if the file could not be opened. See `File.open/2` for
possible values for `reason`.
"""
@spec open_file_for_write(path :: Path.t(), opts :: Keyword.t()) ::
{:ok, pid} | {:error, File.posix()}
def open_file_for_write(path, opts \\ []) when is_binary(path) and is_list(opts),
do: GenServer.start_link(__MODULE__, {:file_write, path, opts})
@doc ~S"""
Creates an IO device that reads a string with trailing hash.
This is intended mostly for internal testing purposes.
Unlike `StringIO.open/2` and `StringIO.open/3`, no options or function are
accepted.
This device can be passed to `IO.binread/2`.
## Return Value
`{:ok, pid}` where `pid` points to an IO device process.
"""
@spec open_string(s :: binary) :: {:ok, pid}
def open_string(s) when is_binary(s) and byte_size(s) >= 20,
do: GenServer.start_link(__MODULE__, {:string, s})
@doc ~S"""
Returns `true` if this is process is an `TrailingHashDevice` instance.
Note the difference between this function and `valid_hash?/1`.
"""
@spec valid?(v :: any) :: boolean
def valid?(v) when is_pid(v) do
GenServer.call(v, :valid_trailing_hash_read_device?) == :valid_trailing_hash_read_device
catch
:exit, {:timeout, _} -> false
end
def valid?(_), do: cover(false)
@doc ~S"""
Returns `true` if the hash at the end of the file matches the hash
generated while reading the file.
Should only be called once and only once when the entire file (sans SHA-1 hash)
has been read.
## Return Values
`true` or `false` if the SHA-1 hash was found and was valid (or not).
`:too_soon` if called before the SHA-1 hash is expected.
`:already_called` if called a second (or successive) time.
`:opened_for_write` if called on a device that was opened for write.
"""
@spec valid_hash?(io_device :: pid) :: boolean
def valid_hash?(io_device) when is_pid(io_device),
do: GenServer.call(io_device, :valid_hash?)
@impl true
def init({:file, path}) do
with {:ok, %{size: size}} <- File.stat(path, time: :posix),
{:ok, pid} when is_pid(pid) <- File.open(path) do
cover {:ok,
%{
iodevice: pid,
mode: :read,
remaining_bytes: size - 20,
crypto: :crypto.hash_init(:sha)
}}
else
{:error, reason} -> cover {:stop, reason}
end
end
def init({:file_write, path, opts}) do
case File.open(path, [:write]) do
{:ok, pid} when is_pid(pid) ->
cover {:ok,
%{
iodevice: pid,
mode: :write,
remaining_bytes: Keyword.get(opts, :max_file_size, :unlimited),
crypto: :crypto.hash_init(:sha)
}}
{:error, reason} ->
cover {:stop, reason}
end
end
def init({:string, s}) do
{:ok, pid} = StringIO.open(s)
cover {:ok,
%{
iodevice: pid,
mode: :read,
remaining_bytes: byte_size(s) - 20,
crypto: :crypto.hash_init(:sha)
}}
end
@impl true
def handle_info({:io_request, from, reply_as, req}, state) do
state = io_request(from, reply_as, req, state)
cover {:noreply, state}
end
def handle_info({:file_request, from, reply_as, req}, state) do
state = file_request(from, reply_as, req, state)
cover {:noreply, state}
end
def handle_info(message, state) do
Logger.warn("TrailingHashDevice received unexpected message #{inspect(message)}")
cover {:noreply, state}
end
@impl true
def handle_call(:valid_trailing_hash_read_device?, _from_, state),
do: {:reply, :valid_trailing_hash_read_device, state}
def handle_call(:valid_hash?, _from, %{mode: :write} = state),
do: {:reply, :opened_for_write, state}
def handle_call(:valid_hash?, _from, %{crypto: :done} = state),
do: {:reply, :already_called, state}
def handle_call(
:valid_hash?,
_from,
%{iodevice: iodevice, mode: :read, remaining_bytes: remaining_bytes, crypto: crypto} =
state
)
when remaining_bytes <= 0 do
actual_hash = :crypto.hash_final(crypto)
hash_from_file = IO.binread(iodevice, 20)
{:reply, actual_hash == hash_from_file, %{state | crypto: :done}}
end
def handle_call(:valid_hash?, _from, state), do: {:reply, :too_soon, state}
def handle_call(request, _from, state) do
Logger.warn("TrailingHashDevice received unexpected call #{inspect(request)}")
{:reply, :unknown_message, state}
end
defp io_request(from, reply_as, req, state) do
{reply, state} = io_request(req, state)
send(from, {:io_reply, reply_as, reply})
state
end
defp io_request(
{:get_chars, :"", count},
%{mode: :read, remaining_bytes: remaining_bytes} = state
)
when remaining_bytes <= 0 and is_integer(count) and count >= 0 do
cover {:eof, state}
end
defp io_request({:get_chars, :"", 0}, %{mode: :read} = state), do: cover({"", state})
defp io_request(
{:get_chars, :"", count},
%{iodevice: iodevice, mode: :read, remaining_bytes: remaining_bytes, crypto: crypto} =
state
)
when is_integer(count) and count > 0 do
data = IO.binread(iodevice, min(remaining_bytes, count))
if is_binary(data) do
crypto = :crypto.hash_update(crypto, data)
cover {data, %{state | remaining_bytes: remaining_bytes - byte_size(data), crypto: crypto}}
else
# coveralls-ignore-start
# This will only occur if an I/O error occurs *mid*-file.
# Difficult to simulate and fairly simple code, so not generating coverage for this line.
cover {data, state}
# coveralls-ignore-stop
end
end
defp io_request(
{:put_chars, _encoding, data},
%{
iodevice: iodevice,
mode: :write,
remaining_bytes: remaining_bytes,
crypto: crypto
} = state
)
when is_integer(remaining_bytes) do
if byte_size(data) <= remaining_bytes do
crypto = :crypto.hash_update(crypto, data)
IO.binwrite(iodevice, data)
cover {:ok, %{state | remaining_bytes: remaining_bytes - byte_size(data), crypto: crypto}}
else
cover {{:error, :eio}, %{state | remaining_bytes: 0}}
end
end
defp io_request(
{:put_chars, _encoding, data},
%{
iodevice: iodevice,
mode: :write,
remaining_bytes: :unlimited,
crypto: crypto
} = state
) do
crypto = :crypto.hash_update(crypto, data)
IO.binwrite(iodevice, data)
cover {:ok, %{state | crypto: crypto}}
end
defp io_request(request, state) do
Logger.warn("TrailingHashDevice received unexpected iorequest #{inspect(request)}")
cover {{:error, :request}, state}
end
defp file_request(from, reply_as, req, state) do
{reply, state} = file_request(req, state)
send(from, {:file_reply, reply_as, reply})
state
end
defp file_request(
:close,
%{iodevice: iodevice, mode: :write, crypto: crypto} = state
) do
hash = :crypto.hash_final(crypto)
IO.binwrite(iodevice, hash)
cover {File.close(iodevice), %{state | iodevice: nil}}
end
defp file_request(:close, %{iodevice: iodevice} = state),
do: cover({File.close(iodevice), %{state | iodevice: nil}})
defp file_request(request, state) do
Logger.warn("TrailingHashDevice received unexpected file_request #{inspect(request)}")
cover {{:error, :request}, state}
end
end
|
lib/xgit/util/trailing_hash_device.ex
| 0.969018
| 0.632446
|
trailing_hash_device.ex
|
starcoder
|
defmodule Holobot.Holofans.Videos do
@moduledoc """
Holofans videos caching server and client API module.
"""
use GenServer, shutdown: 10_000
require Logger
require Memento
alias Holobot.Holofans.{Client, Video}
@type video_status() :: :new | :live | :upcoming | :past | :missing
@cache_limit 1000
@cache_update_interval 300_000
@spec get_airing :: [Holobot.Holofans.Video.t()]
defdelegate get_airing, to: __MODULE__, as: :get_lives
def start_link(init_args \\ []) do
Logger.info("Starting Videos cache server")
GenServer.start_link(__MODULE__, [init_args], name: __MODULE__)
end
@impl true
def init(_args) do
# Setup Mnesia table
setup_table()
{:ok, %{}, {:continue, :update}}
end
@impl true
def handle_continue(:update, state) do
Logger.info("Performing initial Videos cache")
send(self(), :update)
:timer.send_interval(@cache_update_interval, :update)
{:noreply, state}
end
@impl true
def handle_info(:update, _state) do
Logger.info("Updating Videos cache")
# Clear records
:ok = Memento.Table.clear(Video)
# Do fetching from API and writing to cache
cache_videos!(:live)
cache_videos!(:upcoming)
{:noreply, %{}}
end
# Client
@doc """
Get a video by its Youtube video ID
"""
@spec get_video(binary()) :: Video.t() | nil
def get_video(yt_vid_key) do
Memento.transaction!(fn ->
Memento.Query.read(Video, yt_vid_key)
end)
end
@doc """
Get list of all videos.
"""
@spec get_all_videos :: list(Video.t())
def get_all_videos() do
Memento.transaction!(fn ->
Memento.Query.all(Video)
end)
end
@doc """
Get list of currently airing live streams.
"""
@spec get_lives :: list(Video.t())
def get_lives() do
guards = [
{:==, :live_end, nil},
{:!=, :live_start, nil},
{:==, :status, "live"}
]
Memento.transaction!(fn ->
Memento.Query.select(Video, guards)
end)
end
@doc """
Get list of upcoming streams.
"""
@spec get_upcoming() :: list(Video.t())
def get_upcoming() do
guards = [
{:==, :live_start, nil},
{:==, :status, "upcoming"},
{:==, :duration_secs, nil}
]
Memento.transaction!(fn ->
Memento.Query.select(Video, guards)
end)
|> Enum.filter(&is_not_free_chat?/1)
end
@doc """
Get list of only free chat streams.
"""
@spec get_free_chats :: [Video.t()]
def get_free_chats() do
guards = [
{:==, :live_start, nil},
{:==, :status, "upcoming"},
{:==, :duration_secs, nil}
]
Memento.transaction!(fn ->
Memento.Query.select(Video, guards)
end)
|> Enum.filter(&is_free_chat?/1)
end
@doc """
Search for a video by title. Returns a list of up to 10 results.
"""
@spec search_query(binary()) :: list(Video.t())
def search_query(query) do
with {:ok, results} <- fetch_videos(%{limit: 10, title: query}) do
results[:videos]
end
end
# Helpers
defp setup_table() do
if !Holobot.Helpers.table_exists?(Video) do
# Create the ETS/Mnesia table
Logger.info("Setting up Mnesia table Video")
Memento.Table.create!(Video)
end
end
@spec cache_videos!(video_status()) :: any()
defp cache_videos!(status) do
# video request chunk size, <= 50
step = 50
filters = %{
limit: step,
status: Atom.to_string(status),
order: "asc",
sort: "live_schedule"
}
try do
{:ok, results} = fetch_videos(filters)
total = results[:total]
# Set number of total results to fetch
items_to_fetch =
cond do
total >= @cache_limit -> @cache_limit
total < @cache_limit -> total
end
if items_to_fetch > 0 do
0..items_to_fetch
|> Stream.filter(&(rem(&1, step) == 0))
|> Enum.each(fn offset ->
Logger.debug("Current offset: #{offset}")
with {:ok, results} <- fetch_videos(Map.merge(filters, %{offset: offset})),
{:ok, videos} <- Access.fetch(results, :videos),
videos_chunk <- Stream.map(videos, &Video.build_record/1) do
Memento.transaction!(fn ->
Enum.each(videos_chunk, &Memento.Query.write/1)
end)
end
end)
Logger.info("Cached total of #{items_to_fetch} videos of status: #{status}")
else
Logger.info("Nothing to cache, skipping.")
end
rescue
RuntimeError -> "Error when caching videos of status: #{status}!"
end
end
defp fetch_videos(params \\ %{}) do
query = URI.encode_query(params)
url = URI.parse("/videos") |> Map.put(:query, query) |> URI.to_string()
case Client.get(url) do
{:ok, %HTTPoison.Response{status_code: 200, body: body}} ->
{:ok, body}
{:ok, %HTTPoison.Response{status_code: 404}} ->
Logger.warning("Resource not found")
{:error, "Not found"}
{:error, %HTTPoison.Error{reason: reason}} ->
Logger.error(reason)
{:error, reason}
end
end
defp is_not_free_chat?(vid), do: !is_free_chat?(vid)
defp is_free_chat?(vid) do
vid.title |> String.downcase() |> String.contains?(["free", "chat"])
end
end
|
lib/holobot/holofans/videos.ex
| 0.694303
| 0.415254
|
videos.ex
|
starcoder
|
defmodule Irc.Message.Params do
@moduledoc """
The parameters part of an IRC message.
While there's no inherent reason to treat trailing and middle parameters
separately, they are stored separately to provide the guarantee that
message = message |> Params.decode |> Params.encode. If all you want is a
simple list of all the parameters, then call Params.flatten/1
"""
alias __MODULE__
defstruct [:trailing, middles: []]
@type t :: %Params{middles: [String.t], trailing: String.t}
@space <<32>>
@colon <<58>>
@doc """
Parses a string representation of the parameters section of an IRC message
"""
@spec decode(String.t) ::{:ok, Params.t} | {:error, String.t}
def decode(nil), do: nil
def decode(str) do
str |> String.split(" ", trim: true) |> do_decode([])
end
# We found a trailing in the proper position
defp do_decode([":" <> trailing], middles) do
{:ok, %Params{middles: middles, trailing: trailing}}
end
# There is something after a trailing
defp do_decode([":" <> _trailing | _tail], _middles) do
{:error, "Trailing parameter segment must only be in last position"}
end
# We ran out of middles without running into a trailing
defp do_decode([], middles) do
{:ok, %Params{middles: middles, trailing: nil}}
end
# Continue building up the list of middles
defp do_decode([next | rest], middles) do
do_decode(rest, [next | middles])
end
@doc """
Generates a valid IRC params string from the given Params
"""
@spec encode(Params.t) :: iolist()
def encode(%Params{middles: middles, trailing: nil}) do
middles |> append_spaces([])
end
def encode(%Params{middles: middles, trailing: trailing}) do
middles |> append_spaces([@space, @colon, trailing])
end
defp append_spaces([h | t], acc) do
append_spaces(t, [@space | [h | acc]])
end
defp append_spaces([], acc) do
acc
end
@doc """
Prepends trailing to middles to create a consolidated list of params that is
easier to work with. Note: the resulting list will be in reverse order from
how it would appear in the string representation; if this does not work for
your usecase for some reason, simply pass it Enum.reverse/1 first.
"""
@spec flatten(Params.t) :: [String.t]
def flatten(%Params{middles: m, trailing: nil}), do: m
def flatten(%Params{middles: m, trailing: t}), do: [t | m]
end
defimpl String.Chars, for: Irc.Message.Params do
def to_string(params) do
params |> Irc.Message.Params.encode |> Kernel.to_string
end
end
|
lib/irc/message/params.ex
| 0.736306
| 0.511046
|
params.ex
|
starcoder
|
defmodule Crux.Rest.Endpoints.Generator do
@moduledoc false
@moduledoc since: "0.3.0"
# Module used to generate endpoint functions via (nested) macros.
# Example
# defmodule Test do
# use Crux.Rest.Endpoints.Generator
# route "/foo/:foo_id"
# route "/bar" do
# route "foo/:foo_id"
# end
# end
# Usage then
# Test.foo() # "/foo"
# Test.foo(123) # "/foo/123"
# Test.bar() # "/bar"
# Test.bar_foo() # "/bar/foo"
# Test.bar_foo(123) # "/bar/foo/123"
# Import this module, add relevant attributes, and define a before compile callback.
defmacro __using__([]) do
quote do
import unquote(__MODULE__), only: [route: 1, route: 2]
@current []
@routes %{}
@before_compile {unquote(__MODULE__), :define_functions}
end
end
@doc ~S"""
Registers a route.
This will generate functions to access it or part of it.
Additionally if there are variable sements, those can be passed as arguments to it.
Passing `nil` as an argument, causes that variable segment to be excluded from the result.
For example:
`route "/users/:user_id"`
Will generate something equivalent to:
```elixir
def users() do
"/users"
end
def users(user_id) do
"/users/#{user_id}"
end
```
"""
defmacro route(name) do
put_route(name, do: nil)
end
@doc """
Registers a route and allow nested routes to be created by using do blocks.
This works the same as `route/1`.
For example:
```elixir
route "/users" do
route "/:user_id"
route "/@me"
end
```
Is equivalent to:
```elixir
route "/users"
route "/users/:user_id"
route "/users/@me"
```
"""
defmacro route(name, do: nested_routes) do
put_route(name, do: nested_routes)
end
# Actually registers routes.
defp put_route("/" <> name, do: nested_routes) do
quote do
# Save the current path
current = @current
# Add the new path to the current one
@current current ++ unquote(String.split(name, "/"))
# Ensure that nested resources get created too
keys = Enum.map(@current, &Access.key(&1, %{}))
# Create new resources
@routes update_in(@routes, keys, fn %{} -> %{} end)
# Recurse do blocks
unquote(nested_routes)
# Restore the saved path
@current current
end
end
defp put_route(name, do: _nested_routes) do
raise "Routes must start with a /, got: \"#{name}\""
end
# Flattens the nested map stucture to a list of routes
# %{"foo" => %{"bar" => {}, "baz" => %{}}}
# to:
# ["/foo", "/foo/bar", "/foo/baz"]
defp flatten_routes(prefix \\ "", routes)
defp flatten_routes(prefix, routes)
when map_size(routes) > 0 do
routes
|> Enum.map(fn {route, children} ->
route = "#{prefix}/#{route}"
[route | [flatten_routes(route, children)]]
end)
|> List.flatten()
end
defp flatten_routes(_prefix, %{}) do
[]
end
# Before compile callback function to generate functions for the routes
defmacro define_functions(%{module: module}) do
routes =
module
|> Module.get_attribute(:routes)
|> flatten_routes()
Module.delete_attribute(module, :current)
Module.delete_attribute(module, :routes)
Enum.map(routes, &define_function/1)
end
defp define_function(route) do
{function_name, function_arguments, function_return} = transform_route(route)
quote do
@doc """
This function handles the route:
`#{unquote(route)}`.
"""
# credo:disable-for-next-line Credo.Check.Readability.Specs
def unquote(function_name)(unquote_splicing(function_arguments)) do
unquote(function_return)
end
end
end
# Transforms a given route into:
# - A function name out of the fix segments
# - A list of variable ASTs out of the variable segments
# - A return AST to return a formatted binary
defp transform_route(route) do
{segments, fix, variable} = split_route(route)
function_name = to_name(fix)
function_arguments = to_arguments(variable)
function_return = to_return(segments)
{function_name, function_arguments, function_return}
end
# Splits the route into its variable and fix segments.
@spec split_route(route :: String.t()) ::
{segments :: [String.t()], fix :: [String.t()], variable :: [String.t()]}
defp split_route("/" <> route) do
segments = String.split(route, "/")
{fix, variable} =
segments
|> Enum.split_with(fn
":" <> _segment -> false
_segment -> true
end)
{segments, fix, variable}
end
# Joins the fix segments to a function name atom.
@spec to_name([String.t()]) :: atom()
defp to_name(segments) do
segments
|> Enum.map_join("_", fn
"@" <> rest -> String.replace(rest, ["-", "."], "_")
segment -> String.replace(segment, ["-", "."], "_")
end)
|> String.to_atom()
end
# Maps the variable segments to a list of variable ASTs to be used as function arguments.
defp to_arguments(segments) do
Enum.map(segments, &to_variable/1)
end
# Converts the given fix and evaluated variable segments into a binary joining them by "/".
defp to_return(segments) do
quote do
IO.iodata_to_binary(unquote(_to_return(segments)))
end
end
defp _to_return(segments) do
Enum.map(segments, fn
":" <> _rest = variable -> _maybe_to_string(variable)
fix -> ["/", fix]
end)
end
# If name belongs to a nil variable, empty data will be returned, otherwise its string value prefixed with a /.
defp _maybe_to_string(name) do
quote bind_quoted: [name: to_variable(name)] do
if name do
["/", to_string(name)]
else
[]
end
end
end
### Helpers
# Converts a single name to the AST of a variable
defp to_variable(":" <> name), do: to_variable(name)
defp to_variable(name) do
name
|> String.to_atom()
|> Macro.var(Elixir)
end
# Returns AST to `to_string/1` the given name as a variable in the current context.
defp to_string_variable(name) do
quote do
to_string(unquote(to_variable(name)))
end
end
end
|
lib/rest/endpoints/generator.ex
| 0.782122
| 0.547646
|
generator.ex
|
starcoder
|
defmodule Advent.Y2021.D12 do
@moduledoc """
https://adventofcode.com/2021/day/12
"""
@typep cave :: String.t()
@typep cave_system :: %{cave() => MapSet.t()}
@doc """
How many paths through this cave system are there that visit small caves at
most once?
"""
@spec part_one(Enumerable.t()) :: non_neg_integer()
def part_one(input) do
input
|> parse_input()
|> count_exits(1)
end
@doc """
Given these new rules, how many paths through this cave system are there?
"""
@spec part_two(Enumerable.t()) :: non_neg_integer()
def part_two(input) do
input
|> parse_input()
|> count_exits(2)
end
@spec parse_input(Enumerable.t()) :: cave_system()
defp parse_input(input) do
input
|> Stream.map(&Regex.run(~r/(\w+)\-(\w+)/, &1, capture: :all_but_first))
|> Stream.flat_map(fn [a, b] -> [{a, b}, {b, a}] end)
|> Enum.reduce(%{}, fn
{_a, "start"}, cave_system -> cave_system
{"end", _b}, cave_system -> cave_system
{a, b}, cave_system -> Map.update(cave_system, a, MapSet.new([b]), &MapSet.put(&1, b))
end)
end
@spec count_exits(cave_system(), pos_integer()) :: non_neg_integer()
defp count_exits(cave_system, max_v) do
do_count_exits(cave_system, "start", Map.new(), max_v, 0)
end
@spec do_count_exits(cave_system(), cave(), map(), pos_integer(), non_neg_integer()) ::
non_neg_integer()
defp do_count_exits(_cave_system, "end", _seen, _max_v, count), do: count + 1
defp do_count_exits(cave_system, cave, seen, max_v, count) do
seen =
if is_small_cave(cave),
do: Map.update(seen, cave, 1, &(&1 + 1)),
else: seen
sub =
if Enum.any?(Map.values(seen), &(&1 == max_v)),
do: seen |> Map.keys() |> MapSet.new(),
else: MapSet.new()
todo = MapSet.difference(cave_system[cave], sub)
Enum.map(todo, fn c ->
do_count_exits(cave_system, c, seen, max_v, count)
end)
|> Enum.sum()
end
@spec is_small_cave(cave()) :: boolean()
defp is_small_cave(cave) do
String.downcase(cave) == cave
end
end
|
lib/advent/y2021/d12.ex
| 0.74055
| 0.424591
|
d12.ex
|
starcoder
|
defmodule GenRetry.Task do
@moduledoc ~S"""
Provides `async/2`, which operates like `Task.async/1` with retry
capability.
"""
@doc ~S"""
Works like `Task.async`, but with retry. Returns a regular `%Task{}` usable
with the rest of the functions in `Task`.
`opts` are GenRetry options.
The `:respond_to` option is tolerated, but ignored.
"""
@spec async(GenRetry.retryable_fun(), GenRetry.options()) :: %Task{}
def async(fun, opts \\ []) do
Task.async(task_function(fun, opts))
end
defmodule Supervisor do
@moduledoc ~S"""
Provides `async/3`, which operates like `Task.Supervisor.async/2`
with retry capability.
"""
@doc ~S"""
Works like `Task.Supervisor.async/2`, but with retry. Returns a regular
`%Task{}` usable with the rest of the functions in `Task`.
`opts` are GenRetry options.
The `:respond_to` option is tolerated, but ignored.
"""
@spec async(pid, GenRetry.retryable_fun(), GenRetry.options()) :: %Task{}
def async(pid, fun, opts \\ []) do
Task.Supervisor.async(pid, GenRetry.Task.task_function(fun, opts))
end
@doc ~S"""
Works like `Task.Supervisor.async_nolink/2`, but with retry. Returns a regular
`%Task{}` usable with the rest of the functions in `Task`.
`opts` are GenRetry options.
The `:respond_to` option is tolerated, but ignored.
"""
@spec async_nolink(pid, GenRetry.retryable_fun(), GenRetry.options()) ::
%Task{}
def async_nolink(pid, fun, opts \\ []) do
Task.Supervisor.async_nolink(pid, GenRetry.Task.task_function(fun, opts))
end
end
@doc false
@spec task_function(GenRetry.retryable_fun(), GenRetry.options()) :: fun
def task_function(fun, opts) do
fn ->
GenRetry.retry_link(fun, Keyword.put(opts, :respond_to, self()))
receive do
{:success, return_value, _worker_state} -> return_value
{:failure, error, trace, _worker_state} -> reraise(error, trace)
end
end
end
end
|
lib/gen_retry/task.ex
| 0.821044
| 0.489564
|
task.ex
|
starcoder
|
defmodule MuonTrap.Options do
@moduledoc """
Validate and normalize the options passed to MuonTrap.cmd/3 and MuonTrap.Daemon.start_link/3
This module is generally not called directly, but it's likely
the source of exceptions if any options aren't quite right. Call `validate/4` directly to
debug or check options without invoking a command.
"""
@typedoc """
The following fields are always present:
* `:cmd` - the command to run
* `:args` - a list of arguments to the command
The next fields are optional:
* `:into` - `MuonTrap.cmd/3` only
* `:cd`
* `:arg0`
* `:stderr_to_stdout`
* `:parallelism`
* `:env`
* `:name` - `MuonTrap.Daemon`-only
* `:msg_callback` - `MuonTrap.Daemon`-only
* `:log_output` - `MuonTrap.Daemon`-only
* `:log_prefix` - `MuonTrap.Daemon`-only
* `:cgroup_controllers`
* `:cgroup_path`
* `:cgroup_base`
* `:delay_to_sigkill`
* `:cgroup_sets`
* `:uid`
* `:gid`
"""
@type t() :: map()
@doc """
Validate options and normalize them for invoking commands
Pass in `:cmd` or `:daemon` for the first parameter to allow function-specific
options.
"""
@spec validate(:cmd | :daemon, binary(), [binary()], keyword()) :: t()
def validate(context, cmd, args, opts) when context in [:cmd, :daemon] do
assert_no_null_byte!(cmd, context)
unless Enum.all?(args, &is_binary/1) do
raise ArgumentError, "all arguments for #{operation(context)} must be binaries"
end
abs_command = System.find_executable(cmd) || :erlang.error(:enoent, [cmd, args, opts])
validate_options(context, abs_command, args, opts)
|> resolve_cgroup_path()
end
defp resolve_cgroup_path(%{cgroup_path: _path, cgroup_base: _base}) do
raise ArgumentError, "cannot specify both a cgroup_path and a cgroup_base"
end
defp resolve_cgroup_path(%{cgroup_base: base} = options) do
# Create a random subfolder for this invocation
Map.put(options, :cgroup_path, Path.join(base, random_string()))
end
defp resolve_cgroup_path(other), do: other
# Thanks https://github.com/danhper/elixir-temp/blob/master/lib/temp.ex
defp random_string() do
Integer.to_string(:rand.uniform(0x100000000), 36) |> String.downcase()
end
defp validate_options(context, cmd, args, opts) do
Enum.reduce(
opts,
%{cmd: cmd, args: args, into: ""},
&validate_option(context, &1, &2)
)
end
# System.cmd/3 options
defp validate_option(:cmd, {:into, what}, opts), do: Map.put(opts, :into, what)
defp validate_option(_any, {:cd, bin}, opts) when is_binary(bin), do: Map.put(opts, :cd, bin)
defp validate_option(_any, {:arg0, bin}, opts) when is_binary(bin),
do: Map.put(opts, :arg0, bin)
defp validate_option(_any, {:stderr_to_stdout, bool}, opts) when is_boolean(bool),
do: Map.put(opts, :stderr_to_stdout, bool)
defp validate_option(_any, {:parallelism, bool}, opts) when is_boolean(bool),
do: Map.put(opts, :parallelism, bool)
defp validate_option(_any, {:env, enum}, opts),
do: Map.put(opts, :env, validate_env(enum))
# MuonTrap.Daemon options
defp validate_option(:daemon, {:name, name}, opts),
do: Map.put(opts, :name, name)
defp validate_option(:daemon, {:log_output, level}, opts)
when level in [:error, :warn, :info, :debug],
do: Map.put(opts, :log_output, level)
defp validate_option(:daemon, {:log_prefix, prefix}, opts) when is_binary(prefix),
do: Map.put(opts, :log_prefix, prefix)
defp validate_option(:daemon, {:msg_callback, nil}, opts), do: opts
defp validate_option(:daemon, {:msg_callback, function}, opts) when is_function(function) do
with function_info <- Function.info(function),
true <- function_info[:arity] == 1 do
Map.put(opts, :msg_callback, function)
else
_arity_match_error ->
raise(ArgumentError, "Invalid :msg_callback, only functions with /1 arity are allowed")
end
end
# MuonTrap common options
defp validate_option(_any, {:cgroup_controllers, controllers}, opts) when is_list(controllers),
do: Map.put(opts, :cgroup_controllers, controllers)
defp validate_option(_any, {:cgroup_path, path}, opts) when is_binary(path) do
Map.put(opts, :cgroup_path, path)
end
defp validate_option(_any, {:cgroup_base, path}, opts) when is_binary(path) do
Map.put(opts, :cgroup_base, path)
end
defp validate_option(_any, {:delay_to_sigkill, delay}, opts) when is_integer(delay),
do: Map.put(opts, :delay_to_sigkill, delay)
defp validate_option(_any, {:cgroup_sets, sets}, opts) when is_list(sets),
do: Map.put(opts, :cgroup_sets, sets)
defp validate_option(_any, {:uid, id}, opts) when is_integer(id) or is_binary(id),
do: Map.put(opts, :uid, id)
defp validate_option(_any, {:gid, id}, opts) when is_integer(id) or is_binary(id),
do: Map.put(opts, :gid, id)
defp validate_option(_any, {key, val}, _opts),
do: raise(ArgumentError, "invalid option #{inspect(key)} with value #{inspect(val)}")
defp validate_env(enum) do
Enum.map(enum, fn
{k, nil} ->
{String.to_charlist(k), false}
{k, v} ->
{String.to_charlist(k), String.to_charlist(v)}
other ->
raise ArgumentError, "invalid environment key-value #{inspect(other)}"
end)
end
# Copied from Elixir's system.ex to make MuonTrap.cmd pass System.cmd's tests
defp assert_no_null_byte!(binary, context) do
case :binary.match(binary, "\0") do
{_, _} ->
raise ArgumentError,
"cannot execute #{operation(context)} for program with null byte, got: #{
inspect(binary)
}"
:nomatch ->
:ok
end
end
defp operation(:cmd), do: "MuonTrap.cmd/3"
defp operation(:daemon), do: "MuonTrap.Daemon.start_link/3"
end
|
lib/muontrap/options.ex
| 0.790934
| 0.516047
|
options.ex
|
starcoder
|
defmodule Plenario.DataSetQueries do
import Ecto.Query
import Geo.PostGIS, only: [
st_contains: 2,
st_intersects: 2
]
import Plenario.QueryUtils
alias Plenario.{
DataSet,
DataSetQueries,
User
}
def list, do: from d in DataSet
def get(id) do
case Regex.match?(~r/^\d+$/, "#{id}") do
true -> from d in DataSet, where: d.id == ^id
false -> from d in DataSet, where: d.slug == ^id
end
end
def with_user(query), do: from d in query, preload: [:user]
def with_fields(query), do: from d in query, preload: [:fields]
def with_virtual_dates(query), do: from d in query, preload: [:virtual_dates]
def with_virtual_points(query), do: from d in query, preload: [:virtual_points]
def state(query, state), do: from d in query, where: d.state == ^state
def for_user(query, %User{id: id}), do: for_user(query, id)
def for_user(query, id), do: from d in query, where: d.user_id == ^id
def bbox_contains(query, geom), do: from d in query, where: st_contains(d.bbox, ^geom)
def bbox_intersects(query, geom), do: from d in query, where: st_intersects(d.bbox, ^geom)
def time_range_contains(query, timestamp), do: from d in query, where: fragment("?::tsrange @> ?::timestamp", d.time_range, ^timestamp)
def time_range_intersects(query, tsrange), do: from d in query, where: fragment("?::tsrange && ?::tsrange", d.time_range, ^Plenario.TsRange.to_postgrex(tsrange))
defdelegate order(query, args), to: Plenario.QueryUtils
defdelegate paginate(query, args), to: Plenario.QueryUtils
def handle_opts(query, opts \\ []) do
opts = [
with_user: false,
with_fields: false,
with_virtual_dates: false,
with_virtual_points: false,
state: :empty,
for_user: :empty,
bbox_contains: :empty,
bbox_intersects: :empty,
time_range_contains: :empty,
time_range_intersects: :empty,
order: :empty,
paginate: :empty
]
|> Keyword.merge(opts)
query
|> boolean_compose(opts[:with_user], DataSetQueries, :with_user)
|> boolean_compose(opts[:with_fields], DataSetQueries, :with_fields)
|> boolean_compose(opts[:with_virtual_dates], DataSetQueries, :with_virtual_dates)
|> boolean_compose(opts[:with_virtual_points], DataSetQueries, :with_virtual_points)
|> filter_compose(opts[:state], DataSetQueries, :state)
|> filter_compose(opts[:for_user], DataSetQueries, :for_user)
|> filter_compose(opts[:bbox_contains], DataSetQueries, :bbox_contains)
|> filter_compose(opts[:bbox_intersects], DataSetQueries, :bbox_intersects)
|> filter_compose(opts[:time_range_contains], DataSetQueries, :time_range_contains)
|> filter_compose(opts[:time_range_intersects], DataSetQueries, :time_range_intersects)
|> filter_compose(opts[:order], DataSetQueries, :order)
|> filter_compose(opts[:paginate], DataSetQueries, :paginate)
end
end
|
lib/plenario/queries/data_set_queries.ex
| 0.531209
| 0.664378
|
data_set_queries.ex
|
starcoder
|
defmodule Re.Listings.Highlights.Scores do
@moduledoc """
Define listing profile pattern for highlight eligibility and
set score to allow highlight ordering.
"""
alias Re.{
Filtering,
Listings.Queries,
Repo
}
def order_highlights_by_scores(listings) do
options = %{
max_id: get_max_id(),
average_price_per_area_by_neighborhood: get_average_price_per_area_by_neighborhood()
}
listings
|> Enum.map(&%{listing: &1, score: calculate_highlight_score(&1, options)})
|> Enum.sort(&(&1.score >= &2.score))
|> Enum.map(& &1.listing)
end
def calculate_highlight_score(listing, options) do
max_id = Map.get(options, :max_id)
average_price_per_area_by_neighborhood =
Map.get(options, :average_price_per_area_by_neighborhood)
calculate_recency_score(listing, max_id) +
calculate_price_per_area_score(listing, average_price_per_area_by_neighborhood)
end
defp get_max_id() do
Queries.active()
|> Queries.max_id()
|> Repo.one()
end
@neighborhoods_slugs ~w(botafogo copacabana flamengo humaita ipanema lagoa laranjeiras leblon perdizes vila-pompeia)
@profile_score_filters %{
max_price: 2_000_000,
max_rooms: 3,
min_garage_spots: 1,
neighborhoods_slugs: @neighborhoods_slugs
}
defp get_average_price_per_area_by_neighborhood() do
Queries.average_price_per_area_by_neighborhood()
|> Filtering.apply(@profile_score_filters)
|> Repo.all()
|> Enum.reduce(%{}, fn item, acc ->
Map.merge(acc, %{item.neighborhood_slug => item.average_price_per_area})
end)
end
def calculate_recency_score(_, 0), do: 0
def calculate_recency_score(%{id: listing_id}, max_id) when listing_id > max_id, do: 1
def calculate_recency_score(%{id: listing_id}, max_id) do
listing_id / max_id
end
def calculate_price_per_area_score(%{price: 0}, _), do: 0
def calculate_price_per_area_score(%{area: 0}, _), do: 0
def calculate_price_per_area_score(
%{price: price, area: area, address: address},
average_price_by_neighborhood
) do
price_per_area = price / area
price_in_neighborhood = Map.get(average_price_by_neighborhood, address.neighborhood_slug, 0.0)
price_in_neighborhood / price_per_area
end
def filter_with_profile_score(query, filters \\ %{}) do
updated_filters = mount_filter(filters)
Filtering.apply(query, updated_filters)
end
defp mount_filter(filters) do
Map.merge(filters, @profile_score_filters)
end
end
|
apps/re/lib/listings/highlights/scores.ex
| 0.710729
| 0.48377
|
scores.ex
|
starcoder
|
defmodule BitPal.BCH.KeyTree do
@moduledoc """
This module implements BIP-0032: derivation of private and public keys from a
public or private master key (see: https://github.com/bitcoin/bips/blob/master/bip-0032.mediawiki)
The idea is quite simple. This module implements two functions:
- to_public(private) - generates a public key from a private key (can't spend coins using the public key).
- child_key(key, id) - generate a child key derived from key, which may be either a public or a private key.
Based on these function (mainly, child_key), we can imagine a tree with the root in the master key
(m for the private key and M for the public key). Each key has 0x7FFFFFFF*2 children. 0..0x7FFFFFF
are regular child keys, and 0x80000000.. are "hardened keys" (can't derive them from public keys,
but can be used to derive public keys). In this implementation, we use negative numbers to denote
hardened keys (the spec uses 1' for a hardened key). Note that 0' is represented as -1, 1' as -2
and so on.
In this scheme, it is meaningful to assign each key with a "derivation path" as a sequence of
numbers, like so: m / 1 / 5 / 8
These works much like directories in your regular file system, except that we can only use
numbers. Some of these have a specific meaning. See BIP-0044 or BIP-0043 for more details. This
module is only concerned with key derivation, so it does not impose any restrictions.
This module represents a derivation path as a list of numbers. It also contains the ability to
parse a string in a "standard" representation into a list of numbers that can later be manipulated
as desired. It is worth noting that this representation does not concern itself with whether or
not we are talking about public or private keys (i.e. the leading m or M). This is since we want
it to be easy to take substrings of this representation to divide the derivation into multiple
steps. This is since we typically don't start with the master key, but a derived key at some
level. The symbol :public can be used to enforce a public key at some point.
"""
alias BitPal.Crypto.Base58
alias BitPal.Crypto.EC
defmodule Public do
@moduledoc """
This is our representation of a public key.
Note: parent_fingerprint is RIPEMD160 after SHA256 of the parent public key.
"""
defstruct key: nil,
chaincode: nil,
depth: 0,
child_id: 0,
parent_fingerprint: <<0x00, 0x00, 0x00, 0x00>>
end
defmodule Private do
@moduledoc """
This is our representation of a private key.
Note: parent_fingerprint is RIPEMD160 after SHA256 of the parent public key.
"""
defstruct key: nil,
chaincode: nil,
depth: 0,
child_id: 0,
parent_fingerprint: <<0x00, 0x00, 0x00, 0x00>>
end
@doc """
Convert a private key to a public key. A no-op if the key is already a public key.
"""
def to_public(key = %Public{}) do
key
end
def to_public(key = %Private{}) do
%Public{
key: EC.to_public!(key.key),
chaincode: key.chaincode,
depth: key.depth,
child_id: key.child_id,
parent_fingerprint: key.parent_fingerprint
}
end
@doc """
Derive a child key based on "id". If "id" is negative, we derive a hardened child key (i.e. a key
which can not be derived from a public key, not even the public part).
Note: It is possible that some keys are invalid. In that case we return :error and the next key should
be used instead. The probability of this happening is 1 in 2^127 according to BIP-0032.
"""
def child_key(key = %Private{}, id) do
id =
if id < 0 do
-id - 1 + 0x80000000
else
id
end
seed =
if id >= 0x80000000 do
# Hardened key
<<0x00>> <> key.key <> <<id::32>>
else
# Normal key
EC.to_public!(key.key) <> <<id::32>>
end
<<nkey::binary-size(32), ncode::binary-size(32)>> = hmac_sha512(key.chaincode, seed)
case EC.privkey_add(key.key, nkey) do
{:ok, nkey} ->
%Private{
key: nkey,
chaincode: ncode,
depth: key.depth + 1,
child_id: id,
parent_fingerprint: key_fingerprint(key)
}
{:error, _} ->
:error
end
end
def child_key(key = %Public{}, id) do
if id < 0 do
raise("Can not derive hardened keys from a public key.")
end
<<nkey::binary-size(32), ncode::binary-size(32)>> =
hmac_sha512(key.chaincode, key.key <> <<id::32>>)
case EC.pubkey_add(key.key, nkey) do
{:ok, nkey} ->
%Public{
key: nkey,
chaincode: ncode,
depth: key.depth + 1,
child_id: id,
parent_fingerprint: key_fingerprint(key)
}
{:error, _} ->
:error
end
end
@doc """
Derive the key specified in the path.
"""
def derive(key, []), do: key
def derive(key, [first | rest]), do: derive(apply_part(key, first), rest)
def derive(key, <<data::binary>>), do: derive(key, parse_path(data))
@doc """
Apply a single part of a chain.
"""
def apply_part(key, :public), do: to_public(key)
def apply_part(key, id), do: child_key(key, id)
@doc """
Compute a key's fingerprint (RIPEMD160 of the SHA256).
"""
def key_fingerprint(key) do
{_, hash} = key_hash(key)
binary_part(hash, 0, 4)
end
@doc """
Compute a key's hash (RIPEMD160 of the SHA256). This is what is typically used as target adresses
for P2PKH, for example. The returned address is in the format expected by the cashaddress module,
so it is easy to use that to generate BCH URL:s later on.
Note: This always computes the public key's hash, even if a private key is passed.
"""
def key_hash(key) do
hash = :crypto.hash(:ripemd160, :crypto.hash(:sha256, to_public(key).key))
{:p2pkh, hash}
end
@doc """
Create a key from a seed (a binary of some length).
"""
def from_seed(seed) do
<<key::binary-size(32), chaincode::binary-size(32)>> = hmac_sha512("Bitcoin seed", seed)
# Note: no parent fingerprint, we're a root key.
%Private{
key: key,
chaincode: chaincode,
depth: 0,
child_id: 0
}
end
# Signatures for various subnets.
@mainnet_pub <<0x04, 0x88, 0xB2, 0x1E>>
@mainnet_pri <<0x04, 0x88, 0xAD, 0xE4>>
@doc """
Load a Base58-encoded key into a suitable representation.
"""
def import_key(string) do
<<version::binary-size(4), rest::binary>> = Base58.decode(string, :doublesha)
case version do
@mainnet_pub ->
parse_public(rest)
@mainnet_pri ->
parse_private(rest)
end
end
defp parse_public(data) do
<<
depth::binary-size(1),
fingerprint::binary-size(4),
child_id::binary-size(4),
chaincode::binary-size(32),
key::binary-size(33)
>> = data
%Public{
key: key,
chaincode: chaincode,
depth: :binary.decode_unsigned(depth),
child_id: :binary.decode_unsigned(child_id),
parent_fingerprint: fingerprint
}
end
defp parse_private(data) do
<<
depth::binary-size(1),
fingerprint::binary-size(4),
child_id::binary-size(4),
chaincode::binary-size(32),
0x00,
key::binary-size(32)
>> = data
%Private{
key: key,
chaincode: chaincode,
depth: :binary.decode_unsigned(depth),
child_id: :binary.decode_unsigned(child_id),
parent_fingerprint: fingerprint
}
end
@doc """
Export a Base58 key in the standard format. Note: This is more information than what is needed to
make payments to a node. Thus, this format is only suitable if we expect to further derive keys
from this key.
"""
def export_key(key) do
{signature, bytes} =
case key do
%Public{key: k} ->
{@mainnet_pub, k}
%Private{key: k} ->
{@mainnet_pri, <<0x00>> <> k}
end
data =
signature <>
<<key.depth::8>> <>
key.parent_fingerprint <>
<<
key.child_id::4*8
>> <>
key.chaincode <>
bytes
Base58.encode(data, :doublesha)
end
@doc """
Parses a derivation string into a list that we use in the rest of this library.
"""
def parse_path(string) do
[first | rest] = String.split(string, "/")
case String.trim(first) do
"m" ->
parse_path_i(rest)
"M" ->
Enum.concat(parse_path_i(rest), [:public])
_ ->
# Consider this as a fragment, and just parse it as if we had a "m/" in the beginning.
parse_path_i([first | rest])
end
end
defp parse_path_i(parts), do: Enum.map(parts, &convert_part/1)
defp convert_part(part) do
part = String.trim(part)
cond do
part == "p" or part == "P" ->
:public
String.ends_with?(part, "'") ->
-(String.to_integer(String.slice(part, 0, String.length(part) - 1)) + 1)
true ->
String.to_integer(part)
end
end
@doc """
Convert a path-list into a string.
Note that paths containing ":public" do not have an exact representation in the standard
format. We will simply output M if there is a ":public" somewhere, regardless of where it is.
"""
def inspect_path(path) do
r = inspect_path_i(Enum.filter(path, &(&1 != :public)))
if Enum.find(path, &(&1 == :public)) do
"M" <> r
else
"m" <> r
end
end
defp inspect_path_i([first | rest]) do
p =
if first >= 0 do
inspect(first)
else
inspect(-first - 1) <> "'"
end
"/" <> p <> inspect_path_i(rest)
end
defp inspect_path_i([]) do
""
end
# Perform a HMAC-SHA512.
defp hmac_sha512(key, data) do
:crypto.mac(:hmac, :sha512, key, data)
end
end
|
lib/bitpal/bch/keytree.ex
| 0.897139
| 0.678247
|
keytree.ex
|
starcoder
|
defmodule SoftBank.Account do
@moduledoc """
An Account represents accounts in the system which are of _asset_,
_liability_, or _equity_ types, in accordance with the "accounting equation".
Each account must be set to one of the following types:
| TYPE | NORMAL BALANCE | DESCRIPTION |
| :-------- | :-------------:| :--------------------------------------|
| asset | Debit | Resources owned by the Business Entity |
| liability | Credit | Debts owed to outsiders |
| equity | Credit | Owners rights to the Assets |
Each account can also be marked as a _Contra Account_. A contra account will have it's
normal balance swapped. For example, to remove equity, a "Drawing" account may be created
as a contra equity account as follows:
`account = %Fuentes.Account{name: "Drawing", type: "asset", contra: true}`
At all times the balance of all accounts should conform to the "accounting equation"
*Assets = Liabilities + Owner's Equity*
Each account type acts as it's own ledger.
For more details see:
[Wikipedia - Accounting Equation](http://en.wikipedia.org/wiki/Accounting_equation)
[Wikipedia - Debits, Credits, and Contra Accounts](http://en.wikipedia.org/wiki/Debits_and_credits)
"""
import Kernel, except: [abs: 1]
use Ecto.Schema
import Ecto.Changeset
import Ecto.Query
alias SoftBank.Repo
alias SoftBank.Amount
alias SoftBank.Account
alias SoftBank.Entry
alias SoftBank.Config
@typedoc "An Account type."
@type t :: %__MODULE__{
name: String.t(),
account_number: String.t(),
type: String.t(),
contra: Boolean.t(),
hash: String.t(),
default_currency: String.t(),
amounts: [SoftBank.Amount]
}
schema "softbank_accounts" do
field(:name, :string)
field(:account_number, :string)
field(:hash, :string)
field(:type, :string)
field(:contra, :boolean)
field(:default_currency, :string)
field(:balance, Money.Ecto.Composite.Type, virtual: true)
has_many(:amounts, Amount, on_delete: :delete_all)
has_many(:entry, through: [:amounts, :entry], on_delete: :delete_all)
timestamps
end
@params ~w(account_number type contra name hash id default_currency)a
@required_fields ~w(account_number)a
@credit_types ["asset"]
@debit_types ["liability", "equity"]
@doc """
Builds a changeset based on the `struct` and `params`.
"""
def changeset(struct, params \\ %{}) do
struct
|> cast(params, @params)
|> validate_required(@required_fields)
end
@doc false
def to_changeset(struct, params \\ %{}) do
struct
|> cast(params, @params)
end
@doc """
Create new account with default ledgers
"""
def new(name) do
default_currency = Config.get(:default_currency, :USD)
new(name, default_currency)
end
def new(name, currency \\ :USD, hash \\ hash_id()) do
## check if curreccy is valid?
known? = Cldr.Currency.known_currency?(currency)
case known? do
true ->
currency = to_string(currency)
asset_struct = %{name: name <> " Assets", type: "asset", default_currency: currency}
account_number = bank_account_number()
{_, debit_account} =
%Account{}
|> Account.to_changeset(asset_struct)
|> put_change(:account_number, account_number)
|> put_change(:hash, hash)
|> validate_required(@required_fields)
|> Repo.insert()
liablilty_struct = %{
name: name <> " Liabilities",
type: "liability",
default_currency: currency
}
account_number = bank_account_number()
{_, credit_account} =
%Account{}
|> Account.to_changeset(liablilty_struct)
|> put_change(:account_number, account_number)
|> put_change(:hash, hash)
|> validate_required(@required_fields)
|> Repo.insert()
equity_struct = %{name: name <> " Equity", type: "equity", default_currency: currency}
account_number = bank_account_number()
{_, equity_account} =
%Account{}
|> Account.to_changeset(equity_struct)
|> put_change(:account_number, account_number)
|> put_change(:hash, hash)
|> validate_required(@required_fields)
|> Repo.insert()
%{
hash: hash,
debit_account: debit_account,
credit_account: credit_account,
equity_account: equity_account
}
false ->
{:error, "unknown currency"}
end
end
@doc false
defp with_amounts(query) do
from(q in query, preload: [:amounts])
end
@doc false
@spec amount_sum(Ecto.Repo.t(), SoftBank.Account.t(), String.t()) :: Decimal.t()
def amount_sum(repo, account, type) do
records =
Amount
|> Amount.for_account(account)
|> Amount.select_type(type)
|> repo.all()
default_currency = account.default_currency
default_currency = String.to_atom(default_currency)
latest_rates = Money.ExchangeRates.latest_rates()
rates =
case(latest_rates) do
{:error, rates} -> []
{:ok, rates} -> rates
end
default_records =
Enum.map(records, fn x ->
Money.to_currency!(x, default_currency, rates)
end)
new_amt = Money.new(default_currency, 0)
reply =
Enum.reduce(default_records, new_amt, fn r, acc ->
{_, new_amt} = Money.add(r, acc)
new_amt
end)
IO.inspect(reply, label: "reply in repo.bank.acount.amount_sum ")
reply
end
@doc false
@spec amount_sum(Ecto.Repo.t(), SoftBank.Account.t(), String.t(), map) :: Decimal.t()
def amount_sum(repo, account, type, dates) do
records =
Amount
|> Amount.for_account(account)
|> Amount.dated(dates)
|> Amount.select_type(type)
|> repo.all()
default_currency = account.default_currency
default_currency = String.to_atom(default_currency)
latest_rates = Money.ExchangeRates.latest_rates()
rates =
case(latest_rates) do
{:error, rates} -> []
{:ok, rates} -> rates
end
default_records =
Enum.map(records, fn x ->
Money.to_currency!(x, default_currency, rates)
end)
new_amt = Money.new(default_currency, 0)
reply =
Enum.reduce(default_records, new_amt, fn r, acc ->
{_, new_amt} = Money.add(r, acc)
new_amt
end)
IO.inspect(reply, label: "reply in repo.bank.acount.amount_sum ")
reply
end
@doc """
Computes the account balance for a given `SoftBank.Account` in a given
Ecto.Repo when provided with a map of dates in the format
`%{from_date: from_date, to_date: to_date}`.
Returns Decimal type.
"""
@spec balance(Ecto.Repo.t(), [SoftBank.Account.t()], Ecto.Date.t()) :: Decimal.t()
def account_balance(repo \\ Config.repo(), account_or_account_list, dates \\ nil) do
balance(repo, account_or_account_list, dates)
end
@doc """
Computes the account balance for a list of `SoftBank.Account` in a given
Ecto.Repo inclusive of all entries. This function is intended to be used with a
list of `SoftBank.Account`s of the same type.
Returns Decimal type.
"""
# Balance for individual account with dates
def balance(
repo,
account = %Account{
account_number: account_number,
type: type,
contra: contra,
default_currency: default_currency
},
dates
)
when is_nil(dates) do
credits = Account.amount_sum(repo, account, "credit")
debits = Account.amount_sum(repo, account, "debit")
credits =
case is_nil(credits) do
true ->
{_, credit} = Money.new(:USD, 0)
[credit]
false ->
credits
end
debits =
case is_nil(debits) do
true ->
{_, debits} = Money.new(:USD, 0)
[debits]
false ->
debits
end
if type in @credit_types && !contra do
balance = Money.sub(debits, credits)
else
balance = Money.sub(credits, debits)
end
end
@doc false
def balance(
repo,
account = %Account{
account_number: account_number,
type: type,
contra: contra,
default_currency: default_currency
},
dates
) do
credits = Account.amount_sum(repo, account, "credit", dates)
debits = Account.amount_sum(repo, account, "debit", dates)
credits =
case is_nil(credits) do
true ->
{_, credit} = Money.new(:USD, 0)
[credit]
false ->
credits
end
debits =
case is_nil(debits) do
true ->
{_, debits} = Money.new(:USD, 0)
[debits]
false ->
debits
end
if type in @credit_types && !contra do
balance = Money.sub(debits, credits)
else
balance = Money.sub(credits, debits)
end
end
@doc false
def balance(repo, accounts, dates) when is_list(accounts) do
new_amt = Money.new(:USD, 0)
balance =
Enum.reduce(accounts, new_amt, fn account, acc ->
{_, new_amt} = Money.add(Account.balance(repo, account, dates), acc)
new_amt
end)
IO.inspect(balance, label: "balance in repo.bank.acount.balance ")
balance
end
defp hash_id(number \\ 20) do
Nanoid.generate(number, "0123456789")
end
defp bank_account_number(number \\ 12) do
base_acct_number = Nanoid.generate(number, "0123456789")
end
@doc """
Fetch the Account from the Repo.
"""
def fetch(account, repo \\ Repo)
def fetch(%{account_number: account_number}, repo) do
query =
Account
|> where([a], a.account_number == ^account_number)
|> select([a], %{
account_number: a.account_number,
hash: a.hash,
type: a.type,
contra: a.contra,
id: a.id,
default_currency: a.default_currency
})
|> repo.one()
end
def fetch(%{hash: hash}, repo) do
query =
Account
|> where([a], a.hash == ^hash)
|> select([a], %{
account_number: a.account_number,
hash: a.hash,
type: a.type,
contra: a.contra,
id: a.id,
default_currency: a.default_currency
})
|> repo.all()
end
@doc """
Computes a test balance for all accounts in the provided Ecto.Repo.
Returns Money type.
"""
def test_balance(repo \\ Config.repo()) do
accounts = repo.all(Account)
default_currency = repo.get(:default_currency, :USD)
case Enum.count(accounts) > 0 do
true ->
accounts_by_type = Enum.group_by(accounts, fn i -> String.to_atom(i.type) end)
accounts_by_type =
Enum.map(accounts_by_type, fn {account_type, accounts} ->
{account_type, Account.account_balance(repo, accounts)}
end)
accounts_by_type[:asset]
|> Money.sub(accounts_by_type[:liability])
|> Money.sub(accounts_by_type[:equity])
false ->
Money.new(default_currency, 0)
end
end
end
|
lib/repos/Bank/Account.ex
| 0.868102
| 0.648327
|
Account.ex
|
starcoder
|
defimpl Timex.Protocol, for: Tuple do
alias Timex.Types
import Timex.Macros
@epoch :calendar.datetime_to_gregorian_seconds({{1970,1,1},{0,0,0}})
@spec to_julian(Types.date | Types.datetime) :: integer
def to_julian({y,m,d}) when is_date(y,m,d) do
Timex.Calendar.Julian.julian_date(y, m, d)
end
def to_julian({{y,m,d},_}) when is_date(y,m,d) do
Timex.Calendar.Julian.julian_date(y, m, d)
end
def to_julian(_), do: {:error, :invalid_date}
@spec to_gregorian_seconds(Types.date | Types.datetime) :: integer
def to_gregorian_seconds({y,m,d} = date) when is_date(y,m,d),
do: :calendar.datetime_to_gregorian_seconds({date,{0,0,0}})
def to_gregorian_seconds({{y,m,d},{h,mm,s}} = dt) when is_datetime(y,m,d,h,mm,s),
do: :calendar.datetime_to_gregorian_seconds(dt)
def to_gregorian_seconds(_), do: {:error, :invalid_date}
@spec to_gregorian_microseconds(Types.date | Types.datetime) :: integer
def to_gregorian_microseconds({y,m,d} = date) when is_date(y,m,d),
do: (to_gregorian_seconds(date)*(1_000*1_000))
def to_gregorian_microseconds({{y,m,d},{h,mm,s}} = date) when is_datetime(y,m,d,h,mm,s),
do: (to_gregorian_seconds(date)*(1_000*1_000))
def to_gregorian_microseconds(_), do: {:error, :invalid_date}
@spec to_unix(Types.date | Types.datetime) :: integer
def to_unix({y,m,d} = date) when is_date(y,m,d),
do: (:calendar.datetime_to_gregorian_seconds({date,{0,0,0}}) - @epoch)
def to_unix({{y,m,d},{h,mm,s}} = dt) when is_datetime(y,m,d,h,mm,s),
do: (:calendar.datetime_to_gregorian_seconds(dt) - @epoch)
def to_unix(_), do: {:error, :invalid_date}
@spec to_date(Types.date | Types.datetime) :: Date.t
def to_date({y,m,d}) when is_date(y,m,d),
do: %Date{year: y, month: m, day: d}
def to_date({{y,m,d},_}) when is_date(y,m,d),
do: %Date{year: y, month: m, day: d}
def to_date(_), do: {:error, :invalid_date}
@spec to_datetime(Types.date | Types.datetime, Types.valid_timezone) ::
DateTime.t | {:error, term}
def to_datetime({y,m,d} = date, timezone) when is_date(y,m,d) do
Timex.DateTime.Helpers.construct({date, {0,0,0}}, timezone)
end
def to_datetime({{y,m,d},{h,mm,s}} = dt, timezone) when is_datetime(y,m,d,h,mm,s) do
Timex.DateTime.Helpers.construct(dt, timezone)
end
def to_date(_, _), do: {:error, :invalid_date}
@spec to_naive_datetime(Types.date | Types.datetime) :: NaiveDateTime.t
def to_naive_datetime({y,m,d}) when is_date(y,m,d) do
%NaiveDateTime{year: y, month: m, day: d, hour: 0, minute: 0, second: 0, microsecond: {0,0}}
end
def to_naive_datetime({{y,m,d},{h,mm,s}}) when is_datetime(y,m,d,h,mm,s) do
%NaiveDateTime{year: y, month: m, day: d, hour: h, minute: mm, second: s, microsecond: {0,0}}
end
def to_naive_datetime(_), do: {:error, :invalid_date}
@spec to_erl(Types.date | Types.datetime) :: Types.date | Types.datetime
def to_erl({y,m,d} = date) when is_date(y,m,d), do: date
def to_erl({{y,m,d},{h,mm,s}} = dt) when is_datetime(y,m,d,h,mm,s), do: dt
def to_erl(_), do: {:error, :invalid_date}
@spec century(Types.date | Types.datetime) :: non_neg_integer
def century({y,m,d}) when is_date(y,m,d), do: Timex.century(y)
def century({{y,m,d},_}) when is_date(y,m,d), do: Timex.century(y)
def century(_), do: {:error, :invalid_date}
@spec is_leap?(Types.date | Types.datetime) :: boolean
def is_leap?({y,m,d}) when is_date(y,m,d), do: :calendar.is_leap_year(y)
def is_leap?({{y,m,d},{h,mm,s}}) when is_datetime(y,m,d,h,mm,s), do: :calendar.is_leap_year(y)
def is_leap?(_), do: {:error, :invalid_date}
@spec beginning_of_day(Types.date | Types.datetime) :: Types.date | Types.datetime
def beginning_of_day({y,m,d} = date) when is_date(y,m,d), do: date
def beginning_of_day({{y,m,d}=date,{h,mm,s}}) when is_datetime(y,m,d,h,mm,s),
do: {date, {0,0,0}}
def beginning_of_day(_), do: {:error, :invalid_date}
@spec end_of_day(Types.date | Types.datetime) :: Types.date | Types.datetime
def end_of_day({y,m,d} = date) when is_date(y,m,d), do: date
def end_of_day({{y,m,d}=date,{h,mm,s}}) when is_datetime(y,m,d,h,mm,s),
do: {date, {23,59,59}}
def end_of_day(_), do: {:error, :invalid_date}
@spec beginning_of_week(Types.date | Types.datetime, Types.weekday) :: Types.date | Types.datetime
def beginning_of_week({y,m,d} = date, weekstart) when is_date(y,m,d) do
case Timex.days_to_beginning_of_week(date, weekstart) do
{:error, _} = err -> err
days -> shift(date, [days: -days])
end
end
def beginning_of_week({{y,m,d},{h,mm,s}} = date, weekstart) when is_datetime(y,m,d,h,mm,s) do
case Timex.days_to_beginning_of_week(date, weekstart) do
{:error, _} = err -> err
days -> beginning_of_day(shift(date, [days: -days]))
end
end
def beginning_of_week(_,_), do: {:error, :invalid_date}
@spec end_of_week(Types.date | Types.datetime, Types.weekday) :: Types.date | Types.datetime
def end_of_week({y,m,d} = date, weekstart) when is_date(y,m,d) do
case Timex.days_to_end_of_week(date, weekstart) do
{:error, _} = err -> err
days_to_end ->
shift(date, [days: days_to_end])
end
end
def end_of_week({{y,m,d},{h,mm,s}} = date, weekstart) when is_datetime(y,m,d,h,mm,s) do
case Timex.days_to_end_of_week(date, weekstart) do
{:error, _} = err -> err
days_to_end ->
end_of_day(shift(date, [days: days_to_end]))
end
end
def end_of_week(_,_), do: {:error, :invalid_date}
@spec beginning_of_year(Types.date | Types.datetime) :: Types.date | Types.datetime
def beginning_of_year({y,m,d}) when is_date(y,m,d),
do: {y,1,1}
def beginning_of_year({{y,m,d},{h,mm,s}}) when is_datetime(y,m,d,h,mm,s),
do: {{y,1,1},{0,0,0}}
def beginning_of_year(_), do: {:error, :invalid_date}
@spec end_of_year(Types.date | Types.datetime) :: Types.date | Types.datetime
def end_of_year({y,m,d}) when is_date(y,m,d),
do: {y,12,31}
def end_of_year({{y,m,d},{h,mm,s}}) when is_datetime(y,m,d,h,mm,s),
do: {{y,12,31},{23,59,59}}
def end_of_year(_), do: {:error, :invalid_date}
@spec beginning_of_quarter(Types.date | Types.datetime) :: Types.date | Types.datetime
def beginning_of_quarter({y,m,d}) when is_date(y,m,d) do
month = 1 + (3 * (Timex.quarter(m) - 1))
{y,month,1}
end
def beginning_of_quarter({{y,m,d},{h,mm,s} = time}) when is_datetime(y,m,d,h,mm,s) do
month = 1 + (3 * (Timex.quarter(m) - 1))
{{y,month,1},time}
end
def beginning_of_quarter(_), do: {:error, :invalid_date}
@spec end_of_quarter(Types.date | Types.datetime) :: Types.date | Types.datetime
def end_of_quarter({y,m,d}) when is_date(y,m,d) do
month = 3 * Timex.quarter(m)
end_of_month({y,month,d})
end
def end_of_quarter({{y,m,d},{h,mm,s} = time}) when is_datetime(y,m,d,h,mm,s) do
month = 3 * Timex.quarter(m)
end_of_month({{y,month,d}, time})
end
def end_of_quarter(_), do: {:error, :invalid_date}
@spec beginning_of_month(Types.date | Types.datetime) :: Types.date | Types.datetime
def beginning_of_month({y,m,d}) when is_date(y,m,d),
do: {y,m,1}
def beginning_of_month({{y,m,d},{h,mm,s}}) when is_datetime(y,m,d,h,mm,s),
do: {{y,m,1},{0,0,0}}
def beginning_of_month(_), do: {:error, :invalid_date}
@spec end_of_month(Types.date | Types.datetime) :: Types.date | Types.datetime
def end_of_month({y,m,d} = date) when is_date(y,m,d),
do: {y,m,days_in_month(date)}
def end_of_month({{y,m,d},{h,mm,s}} = date) when is_datetime(y,m,d,h,mm,s),
do: {{y,m,days_in_month(date)},{23,59,59}}
def end_of_month(_), do: {:error, :invalid_date}
@spec quarter(Types.date | Types.datetime) :: integer
def quarter({y,m,d}) when is_date(y,m,d), do: Timex.quarter(m)
def quarter({{y,m,d},{h,mm,s}}) when is_datetime(y,m,d,h,mm,s), do: Timex.quarter(m)
def quarter(_), do: {:error, :invalid_date}
def days_in_month({y,m,d}) when is_date(y,m,d), do: Timex.days_in_month(y, m)
def days_in_month({{y,m,d},_}) when is_date(y,m,d), do: Timex.days_in_month(y, m)
def days_in_month(_), do: {:error, :invalid_date}
def week_of_month({y,m,d}) when is_date(y,m,d), do: Timex.week_of_month(y,m,d)
def week_of_month({{y,m,d},_}) when is_date(y,m,d), do: Timex.week_of_month(y,m,d)
def week_of_month(_), do: {:error, :invalid_date}
def weekday({y,m,d} = date) when is_date(y,m,d), do: :calendar.day_of_the_week(date)
def weekday({{y,m,d} = date,_}) when is_date(y,m,d), do: :calendar.day_of_the_week(date)
def weekday(_), do: {:error, :invalid_date}
def day({y,m,d} = date) when is_date(y,m,d),
do: 1 + Timex.diff(date, {y,1,1}, :days)
def day({{y,m,d} = date,_}) when is_date(y,m,d),
do: 1 + Timex.diff(date, {y,1,1}, :days)
def day(_), do: {:error, :invalid_date}
def is_valid?({y,m,d}) when is_date(y,m,d), do: true
def is_valid?({{y,m,d},{h,mm,s}}) when is_datetime(y,m,d,h,mm,s), do: true
def is_valid?(_), do: false
def iso_week({y,m,d}) when is_date(y,m,d),
do: Timex.iso_week(y, m, d)
def iso_week({{y,m,d}, _}) when is_date(y,m,d),
do: Timex.iso_week(y, m, d)
def iso_week(_), do: {:error, :invalid_date}
def from_iso_day({y,m,d}, day) when is_day_of_year(day) and is_date(y,m,d) do
{year, month, day_of_month} = Timex.Helpers.iso_day_to_date_tuple(y, day)
{year, month, day_of_month}
end
def from_iso_day({{y,m,d},{_,_,_}=time}, day) when is_day_of_year(day) and is_date(y,m,d) do
{year, month, day_of_month} = Timex.Helpers.iso_day_to_date_tuple(y, day)
{{year, month, day_of_month}, time}
end
def from_iso_day(_,_), do: {:error, :invalid_date}
@spec set(Types.date | Types.datetime, list({atom(), term})) :: Types.date | Types.datetime | {:error, term}
def set({y,m,d} = d, options) when is_date(y,m,d),
do: do_set({d,{0,0,0}}, options, :date)
def set({{y,m,d},{h,mm,s}} = d, options) when is_datetime(y,m,d,h,mm,s),
do: do_set(d, options, :datetime)
def set(_,_), do: {:error, :invalid_date}
defp do_set(date, options, datetime_type) do
validate? = Keyword.get(options, :validate, true)
Enum.reduce(options, date, fn
_option, {:error, _} = err ->
err
option, result ->
case option do
{:validate, _} -> result
{:datetime, {{_,_,_} = date, {_,_,_} = time} = dt} ->
if validate? do
case datetime_type do
:date -> Timex.normalize(:date, date)
:datetime ->
{Timex.normalize(:date, date), Timex.normalize(:time, time)}
end
else
case datetime_type do
:date -> date
:datetime -> dt
end
end
{:date, {_, _, _} = d} ->
if validate? do
case result do
{_,_,_} -> Timex.normalize(:date, d)
{{_,_,_}, {_,_,_} = t} -> {Timex.normalize(:date, d), t}
end
else
case result do
{_,_,_} -> d
{{_,_,_}, {_,_,_} = t} -> {d,t}
end
end
{:time, {_,_,_} = t} ->
if validate? do
case result do
{_,_,_} -> date
{{_,_,_}=d,{_,_,_}} -> {d, Timex.normalize(:time, t)}
end
else
case result do
{_,_,_} -> date
{{_,_,_}=d,{_,_,_}} -> {d,t}
end
end
{:day, d} ->
{y,m,_} = date
if validate? do
case result do
{y,m,_} -> {y,m, Timex.normalize(:day, {y,m,d})}
{{y,m,_},{_,_,_}=t} -> {{y,m, Timex.normalize(:day, {y,m,d})}, t}
end
else
case result do
{y,m,_} -> {y,m,d}
{{y,m,_}, {_,_,_} = t} -> {{y,m,d}, t}
end
end
{:year, year} ->
if validate? do
case result do
{_,m,d} -> {Timex.normalize(:year, year), m, d}
{{_,m,d},{_,_,_} = t} -> {{Timex.normalize(:year, year),m,d}, t}
end
else
case result do
{_,m,d} -> {year,m,d}
{{_,m,d},{_,_,_} = t} -> {{year,m,d}, t}
end
end
{:month, month} ->
if validate? do
case result do
{y,_,d} ->
{y, Timex.normalize(:month, month), Timex.normalize(:day, {y, month, d})}
{{y,_,d},{_,_,_} = t} ->
{{y, Timex.normalize(:month, month),Timex.normalize(:day, {y, month, d})}, t}
end
else
case result do
{y,_,d} -> {y,month,d}
{{y,_,d},{_,_,_} = t} -> {{y,month,d}, t}
end
end
{:hour, hour} ->
if validate? do
case result do
{_,_,_} -> result
{{_,_,_} = d,{_,m,s}} -> {d, {Timex.normalize(:hour, hour),m,s}}
end
else
case result do
{_,_,_} -> result
{{_,_,_} = d,{_,m,s}} -> {d, {hour,m,s}}
end
end
{:minute, min} ->
if validate? do
case result do
{_,_,_} -> result
{{_,_,_} = d,{h,_,s}} -> {d, {h, Timex.normalize(:minute, min),s}}
end
else
case result do
{_,_,_} -> result
{{_,_,_} = d,{h,_,s}} -> {d, {h,min,s}}
end
end
{:second, sec} ->
if validate? do
case result do
{_,_,_} -> result
{{_,_,_} = d,{h,m,_}} -> {d, {h, m, Timex.normalize(:second, sec)}}
end
else
case result do
{_,_,_} -> result
{{_,_,_} = d,{h,m,_}} -> {d, {h,m,sec}}
end
end
{name, _} when name in [:timezone, :microsecond] ->
result
{option_name, _} ->
{:error, {:bad_option, option_name}}
end
end)
end
@spec shift(Types.date | Types.datetime, list({atom(), term})) ::
Types.date | Types.datetime | {:error, term}
def shift(date, [{_, 0}]),
do: date
def shift({y,m,d}=date, options) when is_date(y,m,d),
do: do_shift(date, options, :date)
def shift({{y,m,d},{h,mm,s}}=datetime, options) when is_datetime(y,m,d,h,mm,s),
do: do_shift(datetime, options, :datetime)
def shift(_, _), do: {:error, :invalid_date}
defp do_shift(date, options, type) do
allowed_options = Enum.filter(options, fn
{:hours, value} when value >= 24 or value <= -24 -> true
{:hours, _} -> false
{:minutes, value} when value >= 24*60 or value <= -24*60 -> true
{:minutes, _} -> false
{:seconds, value} when value >= 24*60*60 or value <= -24*60*60 -> true
{:seconds, _} -> false
{:milliseconds, value} when value >= 24*60*60*1000 or value <= -24*60*60*1000 -> true
{:milliseconds, _} -> false
{:microseconds, {value, _}} when value >= 24*60*60*1000*1000 or value <= -24*60*60*1000*1000 -> true
{:microseconds, value} when value >= 24*60*60*1000*1000 or value <= -24*60*60*1000*1000 -> true
{:microseconds, _} -> false
{_type, _value} -> true
end)
case Timex.shift(to_naive_datetime(date), allowed_options) do
{:error, _} = err -> err
%NaiveDateTime{} = nd when type == :date ->
{nd.year,nd.month,nd.day}
%NaiveDateTime{} = nd when type == :datetime ->
{{nd.year,nd.month,nd.day}, {nd.hour,nd.minute,nd.second}}
end
end
end
|
elixir/codes-from-books/little-elixir/cap8/blitzy/deps/timex/lib/datetime/erlang.ex
| 0.843219
| 0.418994
|
erlang.ex
|
starcoder
|
defmodule Nerves.Firmware do
@moduledoc """
API for upgrading and managing firmware on a Nerves device.
Handles firmware for a single block device (like /dev/mmcblk0). Delegates a
lot to <NAME>leth's excellent [fwup](https://github.com/fhunleth/fwup).
Provides:
- Firmware upgrades
- Firmware status
- Firmware-related activities (shutdown, reboot, halt)
**Looking for over-the-network firmware updates?** see
[nerves_firmware_http](https://github.com/nerves-project/nerves_firmware_http),
which provides an HTTP micro-service providing over-network firmware management.
## Installation
1. Add nerves_firmware to your list of dependencies in `mix.exs`:
def deps do
[{:nerves_firmware, "~> 0.4.0"}]
end
2. Ensure nerves_firmware is started before your application:
def application do
[applications: [:nerves_firmware]]
end
## Configuration
In your app's config.exs, you can configure the block device for your
target that is managed by setting the device key as follows:
config :nerves_firmware, device: "dev/mmcblk0"
"""
use Application
require Logger
@type reason :: atom
@typedoc """
Arguments to be passed to FWUP.
"""
@type args :: [binary]
@server Nerves.Firmware.Server
@doc """
Application start callback.
"""
@spec start(atom, term) :: {:ok, pid} | {:error, String.t}
def start(_type, _args) do
Logger.debug "#{__MODULE__}.start(...)"
opts = [strategy: :one_for_one, name: Nerves.Firmware.Supervisor]
children = [@server]
Supervisor.start_link(children, opts)
end
@doc """
Return a map of information about the current firmware.
This currently contains values showing the state of the firmware installation,
as well as the key/value pairs encoded in the firmware itself.
__status:__
`:active` - Currently running the latest firmware received. Firmware
must be in this state to be updated.
`:await_restart` - Firmware has been updated since restart, and a restart is
needed to start running from the new firmware.
__device:__
The device file that holds the firmware, e.g. /dev/mmcblk0
"""
@spec state() :: Map.t
def state(), do: GenServer.call @server, {:state}
@doc """
Applies a firmware file to the device media.
This mostly just passes information through to Nerves.Firmware.Fwup.apply(..)
which is a very thin wrapper around [fwup](https://github.com/fhunleth/fwup), but it
also sets the firwmare state based on the action to reflect the update, and
prevent multiple updates from overwriting known good firmware.
* `action` can be one of `:upgrade` or `:complete`
* `args` is a list of extra arguments to be passed to fwup.
Returns {:error, :await_restart} if the upgrade is requested after
already updating an image without a reboot in-between.
"""
@spec apply(String.t, atom, args) :: :ok | {:error, reason}
def apply(firmware, action, args \\ []) do
args = maybe_pub_key_args(args)
GenServer.call @server, {:apply, firmware, action, args}
end
@doc """
Returns `true` if new firmware can currently be installed.
The firmware module usually allows new firmware to be installed, but there
are situations where installing new firmware is dangerous. Currently
if the device has had an update applied without being restarted,
this returns false, and update apis will return errors, to prevent bricking.
"""
@spec allow_upgrade?() :: true | false
def allow_upgrade?() do
GenServer.call @server, {:allow_upgrade?}
end
@doc """
Apply a 1 or 2-phase nerves update
Applies firmware using `upgrade` task, then, if /tmp/finalize.fw exists,
apply that file with `on-reboot` task. Supports @fhunleth 2-phase format.
* `args` is a list of extra arguments to be passed to fwup.
Returns {:error, :await_restart} if the upgrade is requested after
already updating an image without a reboot in-between.
"""
@spec upgrade_and_finalize(String.t, args) :: :ok | {:error, reason}
def upgrade_and_finalize(firmware, args \\ []) do
args = maybe_pub_key_args(args)
GenServer.call @server, {:upgrade_and_finalize, firmware, args}, :infinity
end
@doc """
Applies /tmp/finalize.fw if with `on-reboot` task if exists,
* `args` is a list of extra arguments to be passed to fwup, but is currently
ignored for this function.
Returns {:error, :await_restart} if the finalize is requested after
already updating an image without a reboot in-between.
"""
@spec finalize(args) :: :ok | {:error, reason}
def finalize(args \\ []) do
args = maybe_pub_key_args(args)
# REVIEW args is ignored by the server for this call. What should they do?
GenServer.call @server, {:finalize, args}, :infinity
end
@doc """
Reboot the device.
Issues the os-level `reboot` command, which reboots the device, even
if erlinit.conf specifies not to reboot on exit of the Erlang VM.
"""
@spec reboot() :: :ok
def reboot(), do: logged_shutdown "reboot"
@doc """
Reboot the device gracefully
Issues :init.stop command to gracefully shutdown all applications in the Erlang VM.
All code is unloaded and ports closed before the system terminates by calling halt(Status).
erlinit.config must be set to reboot on exit(default) for a graceful reboot to work.
"""
@spec reboot(atom) :: :ok
def reboot(:graceful), do: :init.stop
@doc """
Forces device to power off (without reboot).
"""
@spec poweroff() :: :ok
def poweroff(), do: logged_shutdown "poweroff"
@doc """
Forces device to halt (meaning hang, not power off, nor reboot).
Note: this is different than :erlang.halt(), which exists BEAM, and
may end up rebooting the device if erlinit.conf settings allow reboot on exit.
"""
@spec halt() :: :ok
def halt(), do: logged_shutdown "halt"
# private helpers
defp logged_shutdown(cmd, args \\ []) do
Logger.info "#{__MODULE__} : device told to #{cmd}"
# Invoke the appropriate command to tell erlinit that a shutdown
# of the Erlang VM is imminent. erlinit 1.0+ gives some time
# before the shutdown (10 seconds by default). Pre-erlinit 1.0
# shuts down close to immediately.
System.cmd(cmd, args)
# Gracefully shut down
:init.stop
# If still shutting down and erlinit hasn't already killed
# the Erlang VM, do so ourselves. This is set to a minute
# since `:init.stop` and `erlinit` are expected to kill
# the VM first.
Process.sleep(60_000)
System.halt
end
@spec maybe_pub_key_args(args) :: args
defp maybe_pub_key_args(args) do
pub_key_path = Application.get_env(:nerves_firmware, :pub_key_path)
if pub_key_path do
Logger.info "#{__MODULE__} using signature"
["-p", "#{pub_key_path}" | args]
else
args
end
end
end
|
lib/firmware.ex
| 0.781747
| 0.456531
|
firmware.ex
|
starcoder
|
defmodule Kernel.ParallelCompiler do
@moduledoc """
A module responsible for compiling files in parallel.
"""
@doc """
Compiles the given files.
Those files are compiled in parallel and can automatically
detect dependencies between them. Once a dependency is found,
the current file stops being compiled until the dependency is
resolved.
If there is an error during compilation or if `warnings_as_errors`
is set to `true` and there is a warning, this function will fail
with an exception.
This function receives a set of callbacks as options:
* `:each_file` - for each file compiled, invokes the callback passing the file
* `:each_module` - for each module compiled, invokes the callback
passing the file, module and the module bytecode
* `:each_waiting` - every time a module waits for another module to be compiled,
this callback is invoked with the module we are waiting
The compiler doesn't care about the return values of the callbacks except
by `each_waiting`, which must return nil or a file as a hint where the source
could be found.
Returns the modules generated by each compiled file.
"""
def files(files, callbacks // [])
def files(files, callbacks) when is_list(callbacks) do
spawn_compilers(files, nil, callbacks)
end
@doc """
Compiles the given files to the given path.
Read `files/2` for more information.
"""
def files_to_path(files, path, callbacks // [])
def files_to_path(files, path, callbacks) when is_binary(path) and is_list(callbacks) do
spawn_compilers(files, path, callbacks)
end
defp spawn_compilers(files, path, callbacks) do
Code.ensure_loaded(Kernel.ErrorHandler)
:elixir_code_server.cast(:reset_warnings)
schedulers = max(:erlang.system_info(:schedulers_online), 2)
result = spawn_compilers(files, files, path, callbacks, [], [], schedulers, [])
# In case --warning-as-errors is enabled and there was a warning,
# compilation status will be set to error and we fail with Kernel.CompilationError
case :elixir_code_server.call(:compilation_status) do
:ok -> result
:error -> raise CompileError, [], []
end
end
# We already have 4 currently running, don't spawn new ones
defp spawn_compilers(files, original, output, callbacks, waiting, queued, schedulers, result) when
length(queued) - length(waiting) >= schedulers do
wait_for_messages(files, original, output, callbacks, waiting, queued, schedulers, result)
end
# Spawn a compiler for each file in the list until we reach the limit
defp spawn_compilers([h|t], original, output, callbacks, waiting, queued, schedulers, result) do
parent = self()
child = spawn_link fn ->
:erlang.put(:elixir_compiler_pid, parent)
:erlang.put(:elixir_ensure_compiled, true)
:erlang.process_flag(:error_handler, Kernel.ErrorHandler)
try do
if output do
:elixir_compiler.file_to_path(h, output)
else
:elixir_compiler.file(h)
end
parent <- { :compiled, self(), h }
catch
kind, reason ->
parent <- { :failure, self(), kind, reason, System.stacktrace }
end
end
spawn_compilers(t, original, output, callbacks, waiting, [{child, h}|queued], schedulers, result)
end
# No more files, nothing waiting, queue is empty, we are done
defp spawn_compilers([], _original, _output, _callbacks, [], [], _schedulers, result), do: result
# Queued x, waiting for x: POSSIBLE ERROR! Release processes so we get the failures
defp spawn_compilers([], original, output, callbacks, waiting, queued, schedulers, result) when length(waiting) == length(queued) do
Enum.each queued, fn { child, _ } -> child <- { :release, self() } end
wait_for_messages([], original, output, callbacks, waiting, queued, schedulers, result)
end
# No more files, but queue and waiting are not full or do not match
defp spawn_compilers([], original, output, callbacks, waiting, queued, schedulers, result) do
wait_for_messages([], original, output, callbacks, waiting, queued, schedulers, result)
end
# Wait for messages from child processes
defp wait_for_messages(files, original, output, callbacks, waiting, queued, schedulers, result) do
receive do
{ :compiled, child, file } ->
if callback = Keyword.get(callbacks, :each_file) do
callback.(file)
end
new_queued = List.keydelete(queued, child, 0)
# Sometimes we may have spurious entries in the waiting
# list because someone invoked try/rescue UndefinedFunctionError
new_waiting = List.keydelete(waiting, child, 0)
spawn_compilers(files, original, output, callbacks, new_waiting, new_queued, schedulers, result)
{ :module_available, child, file, module, binary } ->
if callback = Keyword.get(callbacks, :each_module) do
callback.(file, module, binary)
end
# Release the module loader which is waiting for an ack
child <- { self, :ack }
new_waiting = release_waiting_processes(module, waiting)
new_result = [module|result]
wait_for_messages(files, original, output, callbacks, new_waiting, queued, schedulers, new_result)
{ :waiting, child, on } ->
# If one of the callbacks is each_waiting and we haven't seen
# the hinted file before, add it to the list to be processed.
if (callback = Keyword.get(callbacks, :each_waiting)) &&
(hint = callback.(on)) &&
not(hint in original) do
files = [hint|files]
original = [hint|original]
end
new_waiting = :orddict.store(child, on, waiting)
spawn_compilers(files, original, output, callbacks, new_waiting, queued, schedulers, result)
{ :failure, child, kind, reason, stacktrace } ->
if many_missing?(child, files, waiting, queued) do
IO.puts "== Compilation failed =="
IO.puts "Compilation failed on the following files:\n"
Enum.each Enum.reverse(queued), fn { pid, file } ->
case List.keyfind(waiting, pid, 0) do
{ _, mod } -> IO.puts "* #{file} is missing module #{inspect mod}"
_ -> :ok
end
end
IO.puts "\nThe first failure is shown below..."
end
{^child, file} = List.keyfind(queued, child, 0)
IO.puts "== Compilation error on file #{file} =="
:erlang.raise(kind, reason, stacktrace)
end
end
defp many_missing?(child, files, waiting, queued) do
waiting_length = length(waiting)
match?({ ^child, _ }, List.keyfind(waiting, child, 0)) and
waiting_length > 1 and files == [] and
waiting_length == length(queued)
end
# Release waiting processes that are waiting for the given module
defp release_waiting_processes(module, waiting) do
Enum.filter waiting, fn { child, waiting_module } ->
if waiting_module == module do
child <- { :release, self() }
false
else
true
end
end
end
end
|
lib/elixir/lib/kernel/parallel_compiler.ex
| 0.777975
| 0.475301
|
parallel_compiler.ex
|
starcoder
|
defmodule AdventOfCode2019 do
@moduledoc """
AdventOfCode2019 is a a set of solutions to Advent of Code 2019 in Elixir!
"""
@doc """
Run the specified day with supplied input.
Example:
iex> ["12", "14", "1969", "100756"] |> AdventOfCode2019.stream_lines("1.1")
34_241
"""
@spec stream_lines(Enumerable.t(), String.t()) :: integer()
def stream_lines(in_stream, day_part), do: run_day_part(in_stream, day_part)
@spec run_day_part(Enumerable.t(), String.t()) :: integer()
defp run_day_part(in_stream, "1.1"), do: RocketEquation.part1(in_stream)
defp run_day_part(in_stream, "1.2"), do: RocketEquation.part2(in_stream)
defp run_day_part(in_stream, "2.1"), do: TwelveOhTwoProgramAlarm.part1(in_stream)
defp run_day_part(in_stream, "2.2"), do: TwelveOhTwoProgramAlarm.part2(in_stream)
defp run_day_part(in_stream, "3.1"), do: CrossedWires.part1(in_stream)
defp run_day_part(in_stream, "3.2"), do: CrossedWires.part2(in_stream)
defp run_day_part(in_stream, "4.1"), do: SecureContainer.part1(in_stream)
defp run_day_part(in_stream, "4.2"), do: SecureContainer.part2(in_stream)
defp run_day_part(in_stream, "5.1"), do: SunnyWithAsteroids.part1(in_stream)
defp run_day_part(in_stream, "5.2"), do: SunnyWithAsteroids.part2(in_stream)
defp run_day_part(in_stream, "6.1"), do: UniversalOrbitMap.part1(in_stream)
defp run_day_part(in_stream, "6.2"), do: UniversalOrbitMap.part2(in_stream)
defp run_day_part(in_stream, "7.1"), do: AdventOfCode2019.AmplificationCircuit.part1(in_stream)
defp run_day_part(in_stream, "7.2"), do: AdventOfCode2019.AmplificationCircuit.part2(in_stream)
defp run_day_part(in_stream, "8.1"), do: AdventOfCode2019.SpaceImageFormat.part1(in_stream)
defp run_day_part(in_stream, "8.2"), do: AdventOfCode2019.SpaceImageFormat.part2(in_stream)
defp run_day_part(in_stream, "9.1"), do: AdventOfCode2019.SensorBoost.part1(in_stream)
defp run_day_part(in_stream, "9.2"), do: AdventOfCode2019.SensorBoost.part2(in_stream)
defp run_day_part(in_stream, "10.1"), do: AdventOfCode2019.MonitoringStation.part1(in_stream)
defp run_day_part(in_stream, "10.2"), do: AdventOfCode2019.MonitoringStation.part2(in_stream)
defp run_day_part(in_stream, "11.1"), do: AdventOfCode2019.SpacePolice.part1(in_stream)
defp run_day_part(in_stream, "11.2"), do: AdventOfCode2019.SpacePolice.part2(in_stream)
defp run_day_part(in_stream, "12.1"), do: AdventOfCode2019.TheNBodyProblem.part1(in_stream)
defp run_day_part(in_stream, "12.2"), do: AdventOfCode2019.TheNBodyProblem.part2(in_stream)
defp run_day_part(in_stream, "13.1"), do: AdventOfCode2019.CarePackage.part1(in_stream)
defp run_day_part(in_stream, "13.2"), do: AdventOfCode2019.CarePackage.part2(in_stream)
defp run_day_part(in_stream, "14.1"), do: AdventOfCode2019.SpaceStoichiometry.part1(in_stream)
defp run_day_part(in_stream, "14.2"), do: AdventOfCode2019.SpaceStoichiometry.part2(in_stream)
defp run_day_part(in_stream, "15.1"), do: AdventOfCode2019.OxygenSystem.part1(in_stream)
defp run_day_part(in_stream, "15.2"), do: AdventOfCode2019.OxygenSystem.part2(in_stream)
defp run_day_part(in_stream, "16.1"),
do: AdventOfCode2019.FlawedFrequencyTransmission.part1(in_stream)
defp run_day_part(in_stream, "16.2"),
do: AdventOfCode2019.FlawedFrequencyTransmission.part2(in_stream)
defp run_day_part(in_stream, "17.1"), do: AdventOfCode2019.SetAndForget.part1(in_stream)
defp run_day_part(in_stream, "17.2"), do: AdventOfCode2019.SetAndForget.part2(in_stream)
end
|
lib/advent_of_code_2019.ex
| 0.721743
| 0.447279
|
advent_of_code_2019.ex
|
starcoder
|
defmodule Riptide do
@moduledoc """
Riptide is a data first framework for building realtime applications. Riptide makes building snappy, realtime applications a breeze by letting you think purely in terms of your data and functionally about what should happen when it changes.
"""
@internal %{internal: true}
use Supervisor
@doc """
Starts a Riptide process.
Probably should not called this directly and instead should be placed
inside your application's root supervisor.
## Options
* `:port` - Optional, will override default port of `12_000`
"""
def start_link(opts \\ []) do
Supervisor.start_link(__MODULE__, opts, name: __MODULE__)
end
@doc false
def init(opts) do
Riptide.Store.init()
Riptide.Migration.run()
Supervisor.init(
[
if Riptide.Config.riptide_scheduler() do
{Riptide.Scheduler, []}
end,
{Riptide.Websocket.Server,
Keyword.merge(
[handlers: Riptide.Config.riptide_handlers()],
opts
)}
]
|> Enum.filter(& &1),
strategy: :one_for_one
)
end
@doc """
Pass in a query and get the results.
Read more about query structure [here](https://riptide.ironbay.co/docs/queries). State parameter is optional and is passed to interceptors.
## Options
* `:min` - Starting range of query, optional
* `:max` - End range of query, optional
* `:limit` - Max number of results, optional
## Examples
iex> Riptide.query(%{ "todo:info" => %{} })
%{
"todo:info" => %{
"todo1" => %{
"text" => "Document riptide"
}
}
}
"""
def query(query, state \\ @internal) do
with :ok <- Riptide.Interceptor.query_before(query, state) do
resolved = Riptide.Interceptor.query_resolve(query, state)
store =
resolved
|> Enum.reduce(query, fn {path, _value}, collect ->
Dynamic.delete(collect, path)
end)
|> case do
result when result === %{} ->
%{}
remaining ->
Riptide.Store.query(remaining)
end
{:ok,
Enum.reduce(resolved, store, fn {path, value}, collect ->
Dynamic.put(collect, path, value)
end)}
end
end
@doc """
Return a stream of values underneath a path
## Options
* `:min` - Starting range of query, optional
* `:max` - End range of query, optional
* `:limit` - Max number of results, optional
## Examples
iex> Riptide.stream(["todo:info"]) |> Enum.take(1)
[
%{"todo1", %{ "text" => "Document riptide" }}
]
"""
def stream(path, opts \\ %{}, state \\ @internal) do
query = Dynamic.put(%{}, path, opts)
with :ok <- Riptide.Interceptor.query_before(query, state) do
Riptide.Store.stream(path, opts)
end
end
@doc """
The same as `query_path/3` but raises an exception if it fails
"""
def query_path!(path, opts \\ %{}, state \\ @internal) do
{:ok, result} = query_path(path, opts, state)
result
end
@doc """
Return data under a specific path
## Options
* `:min` - Starting range of query, optional
* `:max` - End range of query, optional
* `:limit` - Max number of results, optional
"""
def query_path(path, opts \\ %{}, state \\ @internal) do
case query(Dynamic.put(%{}, path, opts), state) do
{:ok, result} -> {:ok, Dynamic.get(result, path)}
result -> result
end
end
@doc """
The same as `mutation/2` but raises an exception if it fails
"""
def mutation!(mut, state \\ @internal) do
case mutation(mut, state) do
{:ok, result} -> result
end
end
@doc """
Apply a mutation.
This will do following steps in order
1. Trigger `c:Riptide.Interceptor.mutation_before/4`
2. Trigger `c:Riptide.Interceptor.mutation_effect/4`
3. Broadcast mutation to interested processes
4. Write mutation to stores
5. Trigger `c:Riptide.Interceptor.mutation_after/4`
## Examples
iex> mut = Riptide.Mutation.put_merge(["foo", "bar"], "hello")
iex> Riptide.mutation(mut)
{:ok, %{
merge: %{
"foo" => %{
"bar" => "hello
}
},
delete: %{}
}}
"""
@spec mutation(Riptide.Mutation.t(), any()) :: {:ok, Riptide.Mutation.t()} | {:error, any()}
def mutation(mut, state \\ @internal) do
with {:ok, prepared} <- Riptide.Interceptor.mutation_before(mut, state),
prepared <- Riptide.Interceptor.mutation_effect(prepared, state),
:ok <- Riptide.Subscribe.broadcast_mutation(prepared),
:ok <- Riptide.Store.mutation(prepared),
:ok <- Riptide.Interceptor.mutation_after(prepared, state) do
{:ok, prepared}
end
end
@doc """
Convience method to apply a mutation that merges a single value
## Examples
iex> Riptide.merge(["foo", "bar"], "hello")
{:ok, %{
merge: %{
"foo" => %{
"bar" => "hello
}
},
delete: %{}
}}
"""
def merge(path, value, state \\ @internal),
do:
path
|> Riptide.Mutation.put_merge(value)
|> mutation(state)
@doc """
The same as `merge/3` but raises an exception if it fails
"""
def merge!(path, value, state \\ @internal) do
{:ok, result} = merge(path, value, state)
result
end
@doc """
Convience method to apply a mutation that deletes a single path
## Examples
iex> Riptide.delete(["foo", "bar"])
{:ok, %{
delete: %{
"foo" => %{
"bar" => 1
}
},
delete: %{}
}}
"""
def delete(path, state \\ @internal),
do:
path
|> Riptide.Mutation.put_delete()
|> mutation(state)
@doc """
The same as `delete/2` but raises an exception if it fails
"""
def delete!(path, state \\ @internal) do
{:ok, result} = delete(path, state)
result
end
end
|
packages/elixir/lib/riptide.ex
| 0.874144
| 0.48499
|
riptide.ex
|
starcoder
|
defmodule Cloudinary.Transformation.Expression do
@moduledoc """
The expression representation for conditional transformation, using user-defined variables
and/or giving values with arithmetic expression.
See `Cloudinary.Transformation.expression/1` to know the usage.
"""
@type t :: %__MODULE__{
source: String.t(),
booleanable: boolean,
numerable: boolean,
stringable: boolean,
mappable: boolean,
settable: boolean
}
@type as_boolean :: %__MODULE__{
source: String.t(),
booleanable: true,
numerable: boolean,
stringable: boolean,
mappable: boolean,
settable: boolean
}
defstruct source: nil,
booleanable: false,
numerable: false,
stringable: false,
mappable: false,
settable: false
@predefined_numeric_variable_mappings %{
aspect_ratio: "ar",
current_page: "cp",
duration: "du",
face_count: "fc",
height: "h",
illustrative_likelihood: "ils",
initial_aspect_ratio: "iar",
initial_duration: "idu",
initial_height: "ih",
initial_width: "iw",
page_count: "pc",
page_x: "px",
page_y: "py",
trimmed_aspect_ratio: "tar",
width: "w"
}
@predefined_numeric_variables Map.keys(@predefined_numeric_variable_mappings)
@predefined_set_variable_mappings %{
page_names: "pgnames",
tags: "tags"
}
@predefined_set_variables Map.keys(@predefined_set_variable_mappings)
@predefined_map_variable_mappings %{
context: "ctx"
}
@predefined_map_variables Map.keys(@predefined_map_variable_mappings)
@comparison_operator_mappings %{
==: "eq",
!=: "ne",
<: "lt",
>: "gt",
<=: "lte",
>=: "gte",
&&: "and",
||: "or"
}
@comparison_operators Map.keys(@comparison_operator_mappings)
@set_operator_mappings %{
in: "in",
notin: "nin"
}
@set_operators Map.keys(@set_operator_mappings)
@map_operator_mappings %{
"Access.get": ":"
}
@map_operators Map.keys(@map_operator_mappings)
@arithmetic_operator_mappings %{
*: "mul",
/: "div",
+: "add",
-: "sub",
pow: "pow",
mod: "mod"
}
@arithmetic_operators Map.keys(@arithmetic_operator_mappings)
@doc """
Convert AST(abstract syntax tree) into another AST that results in a URL string.
"""
@spec traverse(Macro.t()) :: Macro.t()
def traverse(ast) do
ast
|> Macro.traverse(
0,
fn
{:^, _meta, _args} = ast, acc -> {ast, acc + 1}
list, acc when is_list(list) -> {list, acc + 1}
{:not, _meta1, [{:in, _meta2, args}]}, 0 -> {{:notin, [], args}, 0}
{{:., _meta, [Access, :get]}, meta, args}, 0 -> {{:"Access.get", meta, args}, 0}
ast, acc -> {ast, acc}
end,
fn
{:^, _meta, [arg]}, 1 -> {build_quoted(arg), 0}
{:^, _meta, _args} = ast, acc -> {ast, acc - 1}
list, 1 when is_list(list) -> {build_quoted(list), 0}
list, acc when is_list(list) -> {list, acc - 1}
{identifier, _meta, args}, 0 -> {build_quoted(identifier, args), 0}
identifier, 0 -> {build_quoted(identifier), 0}
ast, acc -> {ast, acc}
end
)
|> elem(0)
end
defp build_quoted(identifier) do
quote do: unquote(__MODULE__).build(unquote(identifier))
end
defp build_quoted(identifier, args) do
quote do: unquote(__MODULE__).build(unquote(identifier), unquote(args))
end
@doc """
Generate an expression with an atom representing operator and expressions as arguments.
"""
@spec build(atom, [__MODULE__.t()] | nil) :: __MODULE__.t()
def build(operator, [%__MODULE__{numerable: true}, %__MODULE__{numerable: true}] = args)
when operator in @arithmetic_operators do
%__MODULE__{
source: Enum.join(args, "_#{@arithmetic_operator_mappings[operator]}_"),
numerable: true
}
end
def build(operator, [%__MODULE__{numerable: true}, %__MODULE__{numerable: true}] = args)
when operator in @comparison_operators do
%__MODULE__{
source: Enum.join(args, "_#{@comparison_operator_mappings[operator]}_"),
booleanable: true
}
end
def build(operator, [%__MODULE__{stringable: true}, %__MODULE__{stringable: true}] = args)
when operator in @comparison_operators do
%__MODULE__{
source: Enum.join(args, "_#{@comparison_operator_mappings[operator]}_"),
booleanable: true
}
end
def build(operator, [%__MODULE__{stringable: true}, %__MODULE__{settable: true}] = args)
when operator in @set_operators do
%__MODULE__{
source: Enum.join(args, "_#{@set_operator_mappings[operator]}_"),
booleanable: true
}
end
def build(operator, [%__MODULE__{mappable: true}, %__MODULE__{stringable: true}] = args)
when operator in @map_operators do
%__MODULE__{
source: Enum.join(args, @map_operator_mappings[operator]),
stringable: true
}
end
def build(variable, nil) do
%__MODULE__{source: "$#{variable}", stringable: true, numerable: true}
end
@doc """
Generate an expression with an atom representing predefined variable, or with a basic type of
variable.
"""
@spec build(atom) :: __MODULE__.t()
@spec build(number | String.t() | [String.t()]) :: __MODULE__.t()
def build(variable) when variable in @predefined_numeric_variables do
%__MODULE__{source: @predefined_numeric_variable_mappings[variable], numerable: true}
end
def build(variable) when variable in @predefined_set_variables do
%__MODULE__{source: @predefined_set_variable_mappings[variable], settable: true}
end
def build(variable) when variable in @predefined_map_variables do
%__MODULE__{source: @predefined_map_variable_mappings[variable], mappable: true}
end
def build(variable) when is_float(variable) or is_integer(variable) do
%__MODULE__{source: "#{variable}", numerable: true}
end
def build(variable) when is_binary(variable) do
%__MODULE__{source: "!#{variable}!", stringable: true}
end
def build(variable) when is_list(variable) do
%__MODULE__{source: "!#{Enum.join(variable, ":")}!", stringable: true}
end
defimpl String.Chars do
def to_string(%{source: source}) when is_binary(source), do: source
end
end
|
lib/cloudinary/transformation/expression.ex
| 0.887021
| 0.517815
|
expression.ex
|
starcoder
|
defmodule Estated.Property.Valuation do
@moduledoc "Valuation details as provided by a proprietary valuation algorithm."
@moduledoc since: "0.2.0"
import Estated.CastHelpers, only: [cast_date: 1]
defstruct [
:value,
:high,
:low,
:forecast_standard_deviation,
:date
]
@typedoc "Valuation details as provided by a proprietary valuation algorithm."
@typedoc since: "0.2.0"
@type t :: %__MODULE__{
value: value() | nil,
high: high() | nil,
low: low() | nil,
forecast_standard_deviation: forecast_standard_deviation() | nil,
date: date() | nil
}
@typedoc """
The current property value.
Eg. **16430**
"""
@typedoc since: "0.2.0"
@type value :: integer()
@typedoc """
The highest probable value.
Eg. **17220**
"""
@typedoc since: "0.2.0"
@type high :: integer()
@typedoc """
The lowest probable value.
Eg. **15780**
"""
@typedoc since: "0.2.0"
@type low :: integer()
@typedoc """
Forecast standard deviation (a percentage, 0-100).
Eg. **55**
"""
@typedoc since: "0.2.0"
@type forecast_standard_deviation :: non_neg_integer()
@typedoc """
The date the valuation was performed.
Eg. **2019-10-24**
"""
@typedoc since: "0.2.0"
@type date :: Date.t()
@doc false
@doc since: "0.2.0"
@spec cast(map()) :: t()
def cast(%{} = valuation) do
Enum.reduce(valuation, %__MODULE__{}, &cast_field/2)
end
@spec cast(nil) :: nil
def cast(nil) do
nil
end
defp cast_field({"value", value}, acc) do
%__MODULE__{acc | value: value}
end
defp cast_field({"high", high}, acc) do
%__MODULE__{acc | high: high}
end
defp cast_field({"low", low}, acc) do
%__MODULE__{acc | low: low}
end
defp cast_field({"forecast_standard_deviation", forecast_standard_deviation}, acc) do
%__MODULE__{acc | forecast_standard_deviation: forecast_standard_deviation}
end
defp cast_field({"date", date}, acc) do
%__MODULE__{acc | date: cast_date(date)}
end
defp cast_field(_map_entry, acc) do
acc
end
end
|
lib/estated/property/valuation.ex
| 0.86995
| 0.450722
|
valuation.ex
|
starcoder
|
defmodule Hui.U do
@moduledoc """
Struct and functions related to Solr updating.
"""
defstruct [:doc, :commitWithin, :overwrite, :optimize, :commit,
:waitSearcher, :expungeDeletes, :maxSegments, :rollback,
:delete_id, :delete_query]
@typedoc """
Struct and functions related to Solr [updating](http://lucene.apache.org/solr/guide/uploading-data-with-index-handlers.html).
"""
@type t :: %__MODULE__{doc: map | list(map), commitWithin: integer, overwrite: boolean,
optimize: boolean, commit: boolean, rollback: boolean,
waitSearcher: boolean, expungeDeletes: boolean, maxSegments: integer,
delete_id: binary | list(binary), delete_query: binary | list(binary)}
@doc """
Encodes the `Hui.U.t` module struct into Solr binary commands for JSON-formatted update.
## Example
```
# Update / index 2 documents, commit them within 1s
iex> doc1 = %{"name" => "The Turin Horse", "directed_by" => ["<NAME>"], "genre" => ["Drama"], "id" => "tt1316540"}
%{
"directed_by" => ["<NAME>"],
"genre" => ["Drama"],
"id" => "tt1316540",
"name" => "The Turin Horse"
}
iex> doc2 = %{"name" => "I Wish", "directed_by" => ["<NAME>"], "genre" => ["Drama"], "id" => "tt1650453"}
%{
"directed_by" => ["<NAME>"],
"genre" => ["Drama"],
"id" => "tt1650453",
"name" => "I Wish"
}
iex> x = %Hui.U{doc: [doc1, doc2], commit: true, commitWithin: 1000}
%Hui.U{
commit: true,
commitWithin: 1000,
delete_id: nil,
delete_query: nil,
doc: [
%{
"directed_by" => ["<NAME>"],
"genre" => ["Drama"],
"id" => "tt1316540",
"name" => "The Tur<NAME>"
},
%{
"directed_by" => ["<NAME>"],
"genre" => ["Drama"],
"id" => "tt1650453",
"name" => "<NAME>"
}
],
expungeDeletes: nil,
maxSegments: nil,
optimize: nil,
overwrite: nil,
rollback: nil,
waitSearcher: nil
}
iex> x |> Hui.U.encode
"{\\\"add\\\":{\\\"commitWithin\\\":1000,\\\"doc\\\":{\\\"name\\\":\\\"The Turin Horse\\\",\\\"id\\\":\\\"tt1316540\\\",\\\"genre\\\":[\\\"Drama\\\"],\\\"directed_by\\\":[\\\"Béla Tarr\\\"]}},\\\"add\\\":{\\\"commitWithin\\\":1000,\\\"doc\\\":{\\\"name\\\":\\\"I Wish\\\",\\\"id\\\":\\\"tt1650453\\\",\\\"genre\\\":[\\\"Drama\\\"],\\\"directed_by\\\":[\\\"Hirokazu Koreeda\\\"]}},\\\"commit\\\":{}}"
# Delete the documents by ID
iex> %Hui.U{delete_id: ["tt1316540", "tt1650453"]} |> Hui.U.encode
"{\\\"delete\\\":{\\\"id\\\":\\\"tt1316540\\\"},\\\"delete\\\":{\\\"id\\\":\\\"tt1650453\\\"}}"
# Delete the documents by filter query
iex> %Hui.U{delete_query: "id:tt*"} |> Hui.U.encode
"{\\\"delete\\\":{\\\"query\\\":\\\"id:tt*\\\"}}"
# Commits the docs, make them visible and remove previously deleted docs from the index
iex> %Hui.U{commit: true, waitSearcher: true, expungeDeletes: true} |> Hui.U.encode
"{\\\"commit\\\":{\\\"waitSearcher\\\":true,\\\"expungeDeletes\\\":true}}"
# Optimise the index, and keep the number of index segments 10 max
iex> %Hui.U{optimize: true, maxSegments: 10} |> Hui.U.encode
"{\\\"optimize\\\":{\\\"maxSegments\\\":10}}"
```
"""
@spec encode(Hui.U.t) :: binary
def encode(%__MODULE__{} = s) do
a = "#{_encode(doc: s.doc, within: s.commitWithin, overwrite: s.overwrite)}"
b = "#{_encode(delete_id: s.delete_id)}"
c = "#{_encode(delete_query: s.delete_query)}"
d = "#{_encode(commit: s.commit, wait: s.waitSearcher, expunge: s.expungeDeletes)}"
e = "#{_encode(optimize: s.optimize, wait: s.waitSearcher, max: s.maxSegments)}"
f = "#{_encode(rollback: s.rollback)}"
x = [a, b, c, d, e, f] |> Enum.filter(fn x -> x != "" end)
"{#{Enum.join(x, ",")}}"
end
defp _encode(doc) when is_map(doc), do: Poison.encode!(doc)
defp _encode(doc: doc, within: w, overwrite: o) when is_map(doc), do: "\"add\":{#{_encode(within: w)}#{_encode(overwrite: o)}\"doc\":#{_encode(doc)}}"
defp _encode(doc: [h|t], within: w, overwrite: o) when is_map(h), do: Enum.map_join([h]++t, "," , &_encode(doc: &1, within: w, overwrite: o))
defp _encode(doc: _, within: _, overwrite: _), do: ""
defp _encode(within: w) when is_integer(w), do: "\"commitWithin\":#{w},"
defp _encode(within: _), do: ""
defp _encode(overwrite: o) when is_boolean(o), do: "\"overwrite\":#{o},"
defp _encode(overwrite: _), do: ""
defp _encode(commit: true, wait: w, expunge: e) when is_boolean(w) and is_boolean(e), do: "\"commit\":{\"waitSearcher\":#{w},\"expungeDeletes\":#{e}}"
defp _encode(commit: true, wait: w, expunge: nil) when is_boolean(w), do: "\"commit\":{\"waitSearcher\":#{w}}"
defp _encode(commit: true, wait: nil, expunge: e) when is_boolean(e), do: "\"commit\":{\"expungeDeletes\":#{e}}"
defp _encode(commit: true, wait: nil, expunge: nil), do: "\"commit\":{}"
defp _encode(commit: _, wait: _, expunge: _), do: ""
defp _encode(optimize: true, wait: w, max: m) when is_boolean(w) and is_integer(m), do: "\"optimize\":{\"waitSearcher\":#{w},\"maxSegments\":#{m}}"
defp _encode(optimize: true, wait: w, max: nil) when is_boolean(w), do: "\"optimize\":{\"waitSearcher\":#{w}}"
defp _encode(optimize: true, wait: nil, max: m) when is_integer(m), do: "\"optimize\":{\"maxSegments\":#{m}}"
defp _encode(optimize: true, wait: nil, max: nil), do: "\"optimize\":{}"
defp _encode(optimize: _, wait: _, max: _), do: ""
defp _encode(delete_id: id) when is_binary(id), do: "\"delete\":{\"id\":\"#{id}\"}"
defp _encode(delete_id: id) when is_list(id), do: Enum.map_join(id, ",", &_encode(delete_id: &1))
defp _encode(delete_id: _), do: ""
defp _encode(delete_query: q) when is_binary(q), do: "\"delete\":{\"query\":\"#{q}\"}"
defp _encode(delete_query: q) when is_list(q), do: Enum.map_join(q, ",", &_encode(delete_query: &1))
defp _encode(delete_query: _), do: ""
defp _encode(rollback: true), do: "\"rollback\":{}"
defp _encode(rollback: _), do: ""
end
|
lib/hui/u.ex
| 0.808294
| 0.650592
|
u.ex
|
starcoder
|
defmodule Jackalope.Handler do
@moduledoc """
Behaviour defining callbacks triggered during the MQTT life-cycle
The jackalope handler is stateless, so if state is needed one could
route the messages to stateful processes, and inform the system
about connection and subscription state.
Most of the callbacks are optional.
"""
@type topic :: Tortoise311.topic()
@type topic_filter :: Tortoise311.topic_filter()
@type topic_levels :: [String.t()]
@type payload :: term()
@type last_will :: [topic: topic, payload: payload, qos: non_neg_integer()]
@doc """
Called when the MQTT connection changes status
This can be used to inform other parts of the system about the state
of the connection; possible values are `:up` and `:down`, where up
means that the MQTT client has a connection to the broker; down
means that the connection has been dropped.
"""
@callback connection(status :: :up | :down) :: any()
@doc """
Produces the last will message for the current connection, or nil if the last will in the connection options is to be used
Example: [topic: hub_serial_number/message", payload: %{code: "going_down", msg: "Last will message"}, qos: 1]
"""
@callback last_will() :: last_will | nil
@doc """
Called when a topic filter subscription state changes
This can be used to inform other parts of the system that we should
(or shouldn't) expect messages received on the given `topic_filter`.
The status values are `:up` and `:down`, where up means that the
broker has accepted a subscription request to the specific
`topic_filter`, and down means that the broker has accepted an
unsubscribe request.
"""
@callback subscription(status :: :up | :down, topic_filter) :: any()
@doc """
Called when receiving a message matching one of the subscriptions
The callback will receive two arguments; the MQTT topic in list
form, where each of the topic levels are an item. This allows us to
pattern match on topic filters with wildcards.
The payload should be a term; at this point the message will have
been run through a JSON decoder. If the JSON decode should fail the
optional `handle_error/1` callback would have been triggered
instead.
"""
@callback handle_message(topic_levels, payload) :: any()
@doc """
Handle errors produced by Jackalope that should be reacted to
During the connection life-cycle various errors can occur, and while
Jackrabbit and Tortoise311 will try to correct the situation, some
errors require user intervention. The optional `handle_error/1`
callback can help inform the surrounding system of errors.
@impl Jackalope.Handler
def handle_error({:publish_error, work_order, :ttl_expired}) do
Logger.error("Work order expired: \#{inspect(work_order)}")
end
def handle_error(_otherwise) do
_ignore = nil
end
If this callback is implemented one should make sure to make a
catch-all to prevent unhandled errors from crashing the handler.
"""
@callback handle_error(reason) :: any()
when reason:
{:payload_decode_error, Jason.DecodeError.t(),
{topic_levels, payload_string :: String.t()}}
| {:publish_error, {topic, payload, opts}, error_reason :: term}
| {:publish_error, jackalope_work_order :: term, :ttl_expired},
opts: Keyword.t()
@optional_callbacks connection: 1,
subscription: 2,
handle_error: 1
end
|
lib/jackalope/handler.ex
| 0.844473
| 0.446736
|
handler.ex
|
starcoder
|
defmodule Runlet.Cmd.Valve do
@moduledoc "Asynchronously start/stop the event stream"
defstruct tref: nil,
open: true,
dropped: 0
@doc """
Allows dynamically starting/stopping the event stream using the
start/stop commands. While stopped, events are discarded.
"""
@spec exec(Enumerable.t()) :: Enumerable.t()
def exec(stream) do
Stream.transform(
stream,
fn ->
struct(Runlet.Cmd.Valve, [])
end,
fn
%Runlet.Event{event: %Runlet.Event.Signal{}} = t, state ->
{[t], state}
t,
%Runlet.Cmd.Valve{
tref: tref,
open: open,
dropped: dropped
} = state ->
receive do
{:runlet_close, seconds} when tref == nil and is_integer(seconds) ->
{:ok, tref} = :timer.send_after(seconds * 1_000, :runlet_open)
{[], %{state | tref: tref, open: false, dropped: dropped}}
{:runlet_close, seconds} when is_integer(seconds) ->
_ = :timer.cancel(tref)
{:ok, tref} = :timer.send_after(seconds * 1_000, :runlet_open)
{[], %{state | tref: tref, open: false, dropped: dropped}}
{:runlet_close, _} when open == true ->
{[
%{
t
| attr:
Map.merge(t.attr, %{
valve: %Runlet.Event.Valve{dropped: dropped}
})
}
], state}
{:runlet_close, _} ->
{[], %{state | dropped: dropped}}
:runlet_open when tref == nil ->
{[
%{
t
| attr:
Map.merge(t.attr, %{
valve: %Runlet.Event.Valve{dropped: dropped}
})
}
], state}
:runlet_open ->
_ = :timer.cancel(tref)
{[
%{
t
| attr:
Map.merge(t.attr, %{
valve: %Runlet.Event.Valve{dropped: dropped}
})
}
], %{state | tref: nil, open: true}}
after
0 ->
case open do
true ->
{[
%{
t
| attr:
Map.merge(t.attr, %{
valve: %Runlet.Event.Valve{dropped: dropped}
})
}
], state}
false ->
{[], %{state | dropped: dropped + 1}}
end
end
end,
fn
%Runlet.Cmd.Valve{tref: nil} ->
:ok
%Runlet.Cmd.Valve{tref: tref} ->
:timer.cancel(tref)
end
)
end
end
|
lib/runlet/cmd/valve.ex
| 0.673943
| 0.412087
|
valve.ex
|
starcoder
|
defmodule RegexToStrings do
@moduledoc ~S"""
Get the strings a regex will match.
"""
@unsupported_metacharacters [".", "*", "+", ",}"]
@doc ~S"""
Get the strings a regex will match.
"""
def regex_to_strings(regex_string) do
maybe_regex_to_strings(regex_string, raise: false)
end
def regex_to_strings!(regex_string) do
maybe_regex_to_strings(regex_string, raise: true)
end
defp maybe_regex_to_strings(regex_string, raise: raise?) do
regex_string
# filter out non-capturing group syntax
|> String.replace("?:", "")
|> check_unsupported_metacharacter(raise: raise?)
|> case do
:unsupported_regex ->
:unsupported_regex
regex_string ->
values =
regex_string
|> String.graphemes()
# replace ranges such as "c-f" by "cdef"
|> fill_ranges()
|> do_regex_to_strings([], [])
if raise? do
values
else
{:ok, values}
end
end
end
defp do_regex_to_strings([], current_values, result) do
result ++ current_values
end
defp do_regex_to_strings(["|" | rest_chars], current_values, result) do
do_regex_to_strings(rest_chars, [], result ++ current_values)
end
defp do_regex_to_strings(["?" | rest_chars], current_values, result) do
current_values = Enum.map(current_values, &String.slice(&1, 0..-2)) ++ current_values
do_regex_to_strings(rest_chars, current_values, result)
end
defp do_regex_to_strings([char, "{", min, ",", max, "}" | rest_chars], current_values, result) do
strings =
String.to_integer(min)..String.to_integer(max)
|> Enum.to_list()
|> Enum.map(&String.duplicate(char, &1))
current_values =
if current_values == [], do: [""], else: current_values
current_values =
for i <- current_values, j <- strings, do: i <> j
do_regex_to_strings(rest_chars, current_values, result)
end
defp do_regex_to_strings([char, "{", repeat, "}" | rest_chars], current_values, result) do
repeat = String.to_integer(repeat)
string = String.duplicate(char, repeat)
current_values = Enum.map(current_values, &(&1 <> string))
do_regex_to_strings(rest_chars, current_values, result)
end
defp do_regex_to_strings(["[" | _] = chars, current_values, result) do
string = Enum.join(chars)
[char_class_string] = Regex.run(~r/^\[.+?\]\??/, string)
string_after_char_class = String.replace(string, char_class_string, "")
optional? = String.ends_with?(char_class_string, "?")
char_class_chars =
char_class_string
|> String.trim_trailing("?")
|> String.trim_trailing("]")
|> String.trim_leading("[")
|> String.graphemes()
char_class_chars =
if optional?, do: ["" | char_class_chars], else: char_class_chars
current_values =
if current_values == [], do: [""], else: current_values
current_values =
for i <- current_values, j <- char_class_chars, do: i <> j
string_after_char_class
|> String.graphemes()
|> do_regex_to_strings(current_values, result)
end
defp do_regex_to_strings(["(" | _] = chars, current_values, result) do
# find the string until the corresponding closing parenthesis
{chars_in_group, 0, :found_closing} =
Enum.reduce_while(chars, {[], -1, :not_found_closing}, fn
"(", {chars_in_group, parentheses_nesting, :not_found_closing} ->
{:cont, {["(" | chars_in_group], parentheses_nesting + 1, :not_found_closing}}
")", {chars_in_group, 0, :not_found_closing} ->
{:cont, {[")" | chars_in_group], 0, :found_closing}}
")", {chars_in_group, parentheses_nesting, :not_found_closing} ->
{:cont, {[")" | chars_in_group], parentheses_nesting - 1, :not_found_closing}}
char, {chars_in_group, parentheses_nesting, :not_found_closing} ->
{:cont, {[char | chars_in_group], parentheses_nesting, :not_found_closing}}
"?", {chars_in_group, 0, :found_closing} ->
{:halt, {["?" | chars_in_group], 0, :found_closing}}
_, {chars_in_group, 0, :found_closing} ->
{:halt, {chars_in_group, 0, :found_closing}}
end)
group_string =
chars_in_group
|> Enum.reverse()
|> Enum.join()
string_after_group = String.replace(Enum.join(chars), group_string, "")
optional? = String.ends_with?(group_string, "?")
strings_found_in_group =
group_string
|> String.trim_trailing("?")
|> String.trim_trailing(")")
|> String.trim_leading("(")
|> String.graphemes()
|> do_regex_to_strings([], [])
strings_found_in_group =
if optional?, do: ["" | strings_found_in_group], else: strings_found_in_group
current_values =
if current_values == [], do: [""], else: current_values
current_values =
for i <- current_values, j <- strings_found_in_group, do: i <> j
string_after_group
|> String.graphemes()
|> do_regex_to_strings(current_values, result)
end
defp do_regex_to_strings([char | rest_chars], current_values, result) do
current_values = if current_values == [], do: [""], else: current_values
do_regex_to_strings(rest_chars, Enum.map(current_values, &(&1 <> char)), result)
end
defp fill_ranges(list_chars) do
index = Enum.find_index(list_chars, &(&1 == "-"))
if index do
<<range_start::utf8>> = Enum.at(list_chars, index - 1)
<<range_end::utf8>> = Enum.at(list_chars, index + 1)
values =
Range.new(range_start, range_end)
|> Enum.to_list()
|> List.to_string()
|> String.graphemes()
|> Enum.slice(1..-2)
|> Enum.map(&to_string(&1))
list_chars
|> List.replace_at(index, values)
|> List.flatten()
|> fill_ranges()
else
list_chars
end
end
defp check_unsupported_metacharacter(regex_string, raise: raise?) do
@unsupported_metacharacters
|> Enum.any?(fn metacharacter ->
if String.contains?(regex_string, metacharacter) do
if raise? do
raise "unsupported metacharacter \"#{metacharacter}\""
end
true
end
end)
|> case do
true ->
:unsupported_regex
false ->
regex_string
end
end
end
|
lib/regex_to_strings.ex
| 0.555676
| 0.574156
|
regex_to_strings.ex
|
starcoder
|
defmodule Dissolver do
@moduledoc """
# Dissolver
### NOTE: This is a wip repo. So be warned there maybe be bugs.
This project is a fork of https://github.com/elixirdrops/kerosene.
I thought to take it over because it does not look as if its being activly developed
and I wanted more features.
TODO:
- [x] Lazy query - Instead of pagination calling Repo.all it will return an Ecto query. This is useful for subqueries where you will passying the query to something like a preload.
- [x] Custom themes - Now you can pass a module as the source of your theme.
- [ ] Refactor all the test so that the modules only exposed required interfaces.
- [ ] The way this lib queries for total counts is a bit odd since it's trying to account for groub_by and multi sourced froms. I'm going to see if we can't make this cleaner.
- [ ] Refactor namespace of functions. General clean up of internal methods.
---
 [](https://coveralls.io/github/MorphicPro/dissolver?branch=master)
Pagination for Ecto and Phoenix.
## Installation
add Dissolver to your mix.exs dependencies:
```elixir
def deps do
[
{:dissolver, "~> 0.9.4"}
]
end
```
Next provide Dissolver your Repo module via the config.
Add the following to your config:
```elixir
...
config :dissolver,
repo: MyApp.Repo
per_page: 2
import_config "\#{Mix.env()}.exs"
```
For more information about the configuration options look at the [Configurations](#module-configuration) section
Now you are ready to start using Dissolver.
## Usage
Start paginating your queries
```elixir
def index(conn, params) do
{products, paginator} =
Product
|> Product.with_lowest_price
|> Dissolver.paginate(params)
render(conn, "index.html", products: products, paginator: paginator)
end
```
Add the view helper to your view
```elixir
defmodule MyApp.ProductView do
use MyApp.Web, :view
import Dissolver.HTML
end
```
Generate the links using the view helper in your template
```elixir
<%= paginate @conn, @paginator %>
```
Importing `Dissolver.HTML` provides your template access
to `Dissolver.HTML.paginate/3` as the prior example shows.
The `Dissolver.HTML.paginate/3` can take a host of options to aid
the theme from how many links to show (`window: integer`) to what the
link labels should read.
By default the theme used to generate the html is the `Dissolver.HTML.Simple` theme.
It will only provide the very basic prev|next buttons. For more theme options, including providing
your own, read the following [Configurations](#module-configuration)
## Configuration
This module uses the following that can be set as globals in your `config/config.ex` configurations
* `:repo` - _*Required*_ Your app's Ecto Repo
* `:theme` (default: `Dissolver.HTML.Simple`) - A module that implements the `Dissolver.HTML.Theme` behavior
There are a few pre defiend theme modules found in `dessolver/html/`
* `Dissolver.HTML.Simple` - This is the _default_ with only previous | next links
* `Dissolver.HTML.Bootstrap` - [A Bootstrap 4 theme ](https://getbootstrap.com/)
* `Dissolver.HTML.Foundation` - [A Foundation theme](https://get.foundation/)
* `Dissolver.HTML.Materialize` - [A Materialize theme](https://materializecss.com/)
* `Dissolver.HTML.Semantic` - [A Semantic UI theme](https://semantic-ui.com/)
* `Dissolver.HTML.Tailwind` - [A Tailwind CSS theme](https://tailwindcss.com/)
* `:per_page` (default: 20) - The global per page setting
* `:max_page` - The limit of pages allow to navigate regardless of total pages found
This option is ignored if not provided and defaults to total pages found in the query.
* `:lazy` (default: false) - This option if enabled will result in all `Dissolver.paginate/3` calls
return an Ecto.Query rather than call Repo.all. This is useful for when you need to paginate
on an association via a preload. TODO: provide example.
## JSON API Support.
```elixir
defmodule MyApp.ProductView do
use MyApp.Web, :view
import Dissolver.JSON
def render("index.json", %{products: products, dissolver: dissolver, conn: conn}) do
%{data: render_many(products, MyApp.ProductView, "product.json"),
pagination: paginate(conn, dissolver)}
end
def render("product.json", %{product: product}) do
%{id: product.id,
name: product.name,
description: product.description,
price: product.price}
end
end
```
## Lazy Example.
Say you have a tag that has many posts and you want to paginate the post as they relate to a given tag.
Heres an example of using the lazy option so that Dissolver.paginate returns a query rather than a result.
```elixir
def get_post_for_tag!(tag_name, params \\ %{}) do
total_count =
from(t in "tags",
join: pt in "post_tags",
on: pt.tag_id == t.id,
where: t.name == ^tag_name,
select: count()
)
|> Repo.one()
{posts_query, paginator} =
from(p in Post, order_by: [desc: :inserted_at], preload: [:tags])
|> Dissolver.paginate(params, total_count: total_count, lazy: true)
tag =
from(t in Tag, where: t.name == ^tag_name, preload: [posts: ^posts_query])
|> Repo.one!()
{tag, paginator}
end
```
You will notice that in this case I also have to supply total_count since Dissolver.paginate does not currently have a way to scope total_count of a given tag since they query is applied after Dissolver.paginate was called. So for this use case we just supply the total count up front.
For my controller it looks just like most.
```elixir
def show_post(conn, %{"tag" => tag} = params) do
{tag, paginator} = Blog.get_post_for_tag!(tag, params)
render(conn, "show_posts.html", tag: tag, paginator: paginator)
end
```
You can also send in options to paginate helper look at the docs for more details.
## Contributing
If you can start by writing an issue ticket.
Then if you like feel free to fork and submit a PR for review.
## Acknowledgement
I would like to Thank
* Matt (@mgwidmann)
* <NAME> (@drewolson)
* <NAME> (@amatsuda)
## License
Please take a look at LICENSE.md
"""
import Ecto.Query
alias Dissolver.Paginator
@spec paginate(Ecto.Query.t(), map(), nil | keyword()) :: {list(), Paginator.t()}
def paginate(query, params, opts \\ []) do
repo = Application.fetch_env!(:dissolver, :repo)
process_options(opts)
|> process_params(params)
|> put_total_count(repo, query)
|> put_total_pages()
|> max_per_page_constraint()
|> max_page_constraint()
|> max_count_constraint()
|> page_constraint()
|> process_query(query)
|> return_query_results(repo)
end
defp process_options(opts) do
app_config =
%{
per_page: Application.get_env(:dissolver, :per_page),
max_per_page: Application.get_env(:dissolver, :max_per_page),
max_page: Application.get_env(:dissolver, :max_page),
max_count: Application.get_env(:dissolver, :max_count),
lazy: Application.get_env(:dissolver, :lazy)
}
|> drop_nil()
opts_map =
%{
per_page: Keyword.get(opts, :per_page),
max_per_page: Keyword.get(opts, :max_per_page),
max_page: Keyword.get(opts, :max_page),
max_count: Keyword.get(opts, :max_count),
total_count: Keyword.get(opts, :total_count),
lazy: Keyword.get(opts, :lazy)
}
|> drop_nil()
parsed_opts = Map.merge(app_config, opts_map)
struct(Dissolver.Paginator, parsed_opts)
end
# TODO: Add total_count as option
defp process_params(paginator, params) do
paginator
|> Map.merge(process_page_param(params))
|> Map.merge(process_per_page_param(params))
end
defp process_page_param(%{"page" => page}) do
%{page: String.to_integer(page)}
end
defp process_page_param(_params), do: %{}
defp process_per_page_param(%{"per_page" => per_page}) do
%{per_page: String.to_integer(per_page)}
end
defp process_per_page_param(_params), do: %{}
# TODO: refactor
defp put_total_count(%{total_count: nil} = paginator, repo, query) do
%{paginator | total_count: get_total_count(repo, query)}
end
defp put_total_count(%{total_count: total_count} = paginator, _repo, _query) do
%{paginator | total_count: total_count}
end
defp get_total_count(repo, query) do
query
|> exclude(:preload)
|> exclude(:order_by)
|> total_count()
|> repo.one()
end
defp put_total_pages(%{total_count: total_count, per_page: per_page} = paginator) do
%{paginator | total_pages: get_total_pages(total_count, per_page)}
end
defp get_total_pages(0, _), do: 0
defp get_total_pages(count, per_page) do
Float.ceil(count / per_page) |> trunc()
end
defp max_per_page_constraint(%{per_page: per_page, max_per_page: nil} = paginator) do
%{paginator | max_per_page: per_page}
end
defp max_per_page_constraint(
%{total_count: total_count, per_page: per_page, max_per_page: max_per_page} = paginator
)
when per_page > max_per_page do
%{
paginator
| per_page: max_per_page,
total_pages: (total_count / max_per_page) |> trunc |> abs
}
end
defp max_per_page_constraint(paginator), do: paginator
defp max_page_constraint(%{max_page: nil, total_pages: total_pages} = paginator) do
%{paginator | max_page: total_pages}
end
defp max_page_constraint(%{max_page: max_page, total_pages: total_pages} = paginator)
when max_page >
total_pages do
%{paginator | max_page: total_pages}
end
defp max_page_constraint(
%{
per_page: per_page,
max_page: max_page
} = paginator
) do
%{
paginator
| total_pages: max_page,
total_count: max_page * per_page
}
end
defp max_count_constraint(%{max_count: nil} = paginator), do: paginator
defp max_count_constraint(%{total_count: total_count, max_count: max_count} = paginator)
when total_count > max_count do
%{paginator | total_count: max_count}
end
defp max_count_constraint(paginator), do: paginator
# TODO: refactor
# Not of fan of how this is checking if group_by or multi source from.
# Also the repeated use of total_count as a name bothers me.
defp total_count(%{group_bys: [_ | _]} = query), do: total_row_count(query)
defp total_count(%{from: %{source: {_, nil}}} = query), do: total_row_count(query)
defp total_count(query) do
primary_key = get_primary_key(query)
query
|> exclude(:select)
|> select([i], count(field(i, ^primary_key), :distinct))
end
defp total_row_count(query) do
query
|> subquery()
|> select(count("*"))
end
defp get_primary_key(query) do
new_query =
case is_map(query) do
true -> query.from.source |> elem(1)
_ -> query
end
new_query
|> apply(:__schema__, [:primary_key])
|> hd
end
defp page_constraint(%{page: page, max_page: max_page} = paginator) when page > max_page do
%{paginator | page: max_page}
end
defp page_constraint(paginator), do: paginator
# TODO: refactor
defp process_query(%{total_count: 0} = paginator, _query) do
{
paginator,
nil
}
end
defp process_query(%{page: page, per_page: per_page} = paginator, query) do
offset = (page - 1) * per_page
{
paginator,
query
|> limit(^per_page)
|> offset(^offset)
}
end
defp return_query_results({paginator, nil}, _repo) do
{[], paginator}
end
defp return_query_results({%{lazy: false} = paginator, query}, repo) do
{repo.all(query), paginator}
end
defp return_query_results({%{lazy: true} = paginator, query}, _repo) do
{query, paginator}
end
# Utils ---
defp drop_nil(%{} = map) do
map
|> Enum.filter(fn {_, v} -> v end)
|> Enum.into(%{})
end
end
|
lib/dissolver.ex
| 0.653016
| 0.807499
|
dissolver.ex
|
starcoder
|
defmodule Day25 do
def part1(input) do
grid = parse(input)
Stream.iterate(grid, &move/1)
|> Stream.with_index
|> Stream.drop_while(&elem(&1, 0))
|> Enum.take(1)
|> hd
|> elem(1)
end
defp move(grid) do
case move_one(grid, :east) do
nil ->
move_one(grid, :south)
grid ->
move_one(grid, :south) || grid
end
end
defp move_one(grid, kind) do
case moves(grid, kind) do
[] ->
nil
moves ->
Enum.reduce(moves, grid, fn {from, to}, grid ->
%{grid | from => :empty, to => kind}
end)
end
end
defp moves(grid, kind) do
Enum.reduce(Enum.sort(grid), [], fn {from, what}, acc ->
if kind === what do
case get_adjacent(grid, kind, from) do
{to, :empty} ->
[{from, to} | acc]
{_, _} ->
acc
end
else
acc
end
end)
end
defp get_adjacent(grid, :east, {row, col}) do
pos = {row, col + 1}
case Map.get(grid, pos) do
nil ->
pos = {row, 0}
{pos, Map.fetch!(grid, pos)}
what ->
{pos, what}
end
end
defp get_adjacent(grid, :south, {row, col}) do
pos = {row + 1, col}
case Map.get(grid, pos) do
nil ->
pos = {0, col}
{pos, Map.fetch!(grid, pos)}
what ->
{pos, what}
end
end
def print_grid(grid) do
{{max_row, max_col}, _} = Enum.max(grid)
Enum.map(0..max_row, fn row ->
Enum.map(0..max_col, fn col ->
case Map.fetch!(grid, {row, col}) do
:east -> ">"
:south -> "v"
:empty -> "."
end
end)
|> Enum.concat(["\n"])
end)
|> IO.puts
end
defp parse(input) do
input
|> Enum.with_index
|> Enum.flat_map(fn {line, row} ->
String.codepoints(line)
|> Enum.with_index
|> Enum.map(fn {char, col} ->
{{row, col},
case char do
"v" -> :south
">" -> :east
"." -> :empty
end}
end)
end)
|> Map.new
end
end
|
day25/lib/day25.ex
| 0.512449
| 0.512998
|
day25.ex
|
starcoder
|
defmodule Nerves.Runtime.Log.SyslogParser do
@moduledoc """
Functions for parsing syslog strings
"""
@type severity ::
:alert | :critical | :debug | :emergency | :error | :informational | :notice | :warning
@type facility ::
:kernel
| :user_level
| :mail
| :system
| :security_authorization
| :syslogd
| :line_printer
| :network_news
| :UUCP
| :clock
| :security_authorization
| :FTP
| :NTP
| :log_audit
| :log_alert
| :clock
| :local0
| :local1
| :local2
| :local3
| :local4
| :local5
| :local6
| :local7
@doc """
Parse out the syslog facility, severity, and message (including the timestamp
and host) from a syslog-formatted string.
The message is of the form:
```text
<pri>message
```
`pri` is an integer that when broken apart gives you a facility and severity.
`message` is everything else.
"""
@spec parse(String.t()) ::
{:ok, %{facility: facility(), severity: severity(), message: binary()}}
| {:error, :parse_error}
def parse(<<"<", pri, ">", message::binary>>) when pri >= ?0 and pri <= ?9 do
do_parse(pri - ?0, message)
end
def parse(<<"<", pri0, pri1, ">", message::binary>>)
when pri0 >= ?1 and pri0 <= ?9 and pri1 >= ?0 and pri1 <= ?9 do
do_parse((pri0 - ?0) * 10 + (pri1 - ?0), message)
end
def parse(<<"<", ?1, pri0, pri1, ">", message::binary>>)
when pri0 >= ?0 and pri0 <= ?9 and pri1 >= ?0 and pri1 <= ?9 do
do_parse(100 + (pri0 - ?0) * 10 + (pri1 - ?0), message)
end
def parse(_) do
{:error, :parse_error}
end
defp do_parse(pri, message) do
with {:ok, facility, severity} <- decode_priority(pri) do
{:ok, %{facility: facility, severity: severity, message: message}}
end
end
@doc """
Decode a syslog priority to facility and severity
"""
@spec decode_priority(0..191) :: {:ok, facility(), severity()} | {:error, :parse_error}
def decode_priority(priority) when priority >= 0 and priority <= 191 do
facility = div(priority, 8)
severity = Integer.mod(priority, 8)
{:ok, facility_name(facility), severity_name(severity)}
end
def decode_priority(_priority) do
{:error, :parse_error}
end
defp facility_name(0), do: :kernel
defp facility_name(1), do: :user_level
defp facility_name(2), do: :mail
defp facility_name(3), do: :system
defp facility_name(4), do: :security_authorization
defp facility_name(5), do: :syslogd
defp facility_name(6), do: :line_printer
defp facility_name(7), do: :network_news
defp facility_name(8), do: :UUCP
defp facility_name(9), do: :clock
defp facility_name(10), do: :security_authorization
defp facility_name(11), do: :FTP
defp facility_name(12), do: :NTP
defp facility_name(13), do: :log_audit
defp facility_name(14), do: :log_alert
defp facility_name(15), do: :clock
defp facility_name(16), do: :local0
defp facility_name(17), do: :local1
defp facility_name(18), do: :local2
defp facility_name(19), do: :local3
defp facility_name(20), do: :local4
defp facility_name(21), do: :local5
defp facility_name(22), do: :local6
defp facility_name(23), do: :local7
defp severity_name(0), do: :emergency
defp severity_name(1), do: :alert
defp severity_name(2), do: :critical
defp severity_name(3), do: :error
defp severity_name(4), do: :warning
defp severity_name(5), do: :notice
defp severity_name(6), do: :informational
defp severity_name(7), do: :debug
@doc """
Convert severity to an Elixir logger level
"""
@spec severity_to_logger(severity()) :: Logger.level()
def severity_to_logger(severity) when severity in [:emergency, :alert, :critical, :error],
do: :error
def severity_to_logger(severity) when severity == :warning, do: :warn
def severity_to_logger(severity) when severity in [:notice, :informational], do: :info
def severity_to_logger(severity) when severity == :debug, do: :debug
end
|
lib/nerves_runtime/log/syslog_parser.ex
| 0.531696
| 0.85446
|
syslog_parser.ex
|
starcoder
|
defmodule Kaffe.Subscriber do
@moduledoc """
Consume messages from a single partition of a single Kafka topic.
Assignments are received from a group consumer member, `Kaffe.GroupMember`.
Messages are delegated to `Kaffe.Worker`. The worker is expected to cast back
a response, at which time the stored offset will be acked back to Kafka.
The options (`ops`) to `subscribe/7` may include the beginning offset
using `:begin_offset`.
The subscriber reads the following options out of the configuration:
- `max_bytes` - The maximum number of message bytes to receive in a batch
- `offset_reset_policy` - The native `auto.offset.reset` option,
either `:reset_to_earliest` or `:reset_to_latest`.
See: https://github.com/klarna/brucke/blob/master/src/brucke_member.erl
Also: https://github.com/klarna/brod/blob/master/src/brod_consumer.erl
"""
use GenServer
require Logger
require Record
import Record, only: [defrecord: 2, extract: 2]
defrecord :kafka_message_set, extract(:kafka_message_set, from_lib: "brod/include/brod.hrl")
defrecord :kafka_message, extract(:kafka_message, from_lib: "brod/include/brod.hrl")
defmodule State do
defstruct subscriber_pid: nil, group_coordinator_pid: nil, gen_id: nil, worker_pid: nil,
subscriber_name: nil, topic: nil, partition: nil, subscribe_ops: nil,
retries_remaining: nil
end
def subscribe(subscriber_name, group_coordinator_pid, worker_pid,
gen_id, topic, partition, ops) do
GenServer.start_link(__MODULE__, [subscriber_name, group_coordinator_pid, worker_pid,
gen_id, topic, partition, ops], name: name(subscriber_name, topic, partition))
end
def stop(subscriber_pid) do
Logger.info "event#stopping=#{inspect self()}"
GenServer.stop(subscriber_pid)
end
def ack_messages(subscriber_pid, topic, partition, generation_id, offset) do
GenServer.cast(subscriber_pid, {:ack_messages, topic, partition, generation_id, offset})
end
def consume_more_messages(subscriber_pid, offset) do
GenServer.cast(subscriber_pid, {:consume_more_messages, offset})
end
def init([subscriber_name, group_coordinator_pid, worker_pid,
gen_id, topic, partition, ops]) do
send(self(), {:subscribe_to_topic_partition})
{:ok, %State{group_coordinator_pid: group_coordinator_pid,
worker_pid: worker_pid, gen_id: gen_id,
subscriber_name: subscriber_name, topic: topic, partition: partition, subscribe_ops: ops ++ subscriber_ops(),
retries_remaining: max_retries()}}
end
def handle_info({_pid, {:kafka_message_set, topic, partition, _high_wm_offset, _messages} = message_set}, state) do
^topic = state.topic
^partition = state.partition
messages = message_set
|> kafka_message_set
|> Enum.into(%{})
|> Map.get(:messages)
|> Enum.map(fn (message) ->
compile_message(message, state.topic, state.partition)
end)
Logger.debug "Sending #{Enum.count(messages)} messages to worker: #{inspect state.worker_pid}"
worker().process_messages(state.worker_pid, self(), topic, partition, state.gen_id, messages)
{:noreply, state}
end
def handle_info({:subscribe_to_topic_partition},
%{subscriber_name: subscriber_name,
topic: topic,
partition: partition,
subscribe_ops: ops} = state) do
kafka().subscribe(subscriber_name, self(), topic, partition, ops)
|> handle_subscribe(state)
end
def handle_info({_pid, {:kafka_fetch_error, topic, partition, code, reason} = error}, state) do
Logger.info "event#kafka_fetch_error=#{inspect self()} topic=#{topic} partition=#{partition} code=#{inspect code} reason=#{inspect reason}"
{:stop, {:shutdown, error}, state}
end
def handle_info({:DOWN, _ref, _process, pid, reason}, %{subscriber_pid: subscriber_pid} = state)
when pid == subscriber_pid do
Logger.warn "event#consumer_down=#{inspect self()} reason=#{inspect reason}"
{:stop, {:shutdown, {:consumer_down, reason}}, state}
end
def handle_info(unknown, state) do
# catch all
Logger.warn "event#unknown_message=#{inspect self()} reason=#{inspect unknown}"
{:noreply, state}
end
def handle_cast({:ack_messages, topic, partition, generation_id, offset}, state) do
Logger.debug "Ready to ack messages of #{state.topic} / #{state.partition} / #{generation_id} at offset: #{offset}"
# Is this the ack we're looking for?
^topic = state.topic
^partition = state.partition
^generation_id = state.gen_id
# Update the offsets in the group
:ok = group_coordinator().ack(state.group_coordinator_pid, state.gen_id,
state.topic, state.partition, offset)
# Request more messages from the consumer
:ok = kafka().consume_ack(state.subscriber_pid, offset)
{:noreply, state}
end
def handle_cast({:consume_more_messages, offset}, state) do
Logger.debug "Ready to consume more messages of #{state.topic} / #{state.partition} at offset: #{offset}. Offset has not been commited back"
# Request more messages from the consumer
:ok = kafka().consume_ack(state.subscriber_pid, offset)
{:noreply, state}
end
defp handle_subscribe({:ok, subscriber_pid}, state) do
Logger.debug "Subscribe success: #{inspect subscriber_pid}"
Process.monitor(subscriber_pid)
{:noreply, %{state | subscriber_pid: subscriber_pid}}
end
defp handle_subscribe({:error, reason}, %{retries_remaining: retries_remaining} = state)
when retries_remaining > 0 do
Logger.debug "Failed to subscribe with reason: #{inspect reason}, #{retries_remaining} retries remaining"
Process.send_after(self(), {:subscribe_to_topic_partition}, retry_delay())
{:noreply, %{state | retries_remaining: retries_remaining - 1}}
end
defp handle_subscribe({:error, reason}, state) do
Logger.warn "event#subscribe_failed=#{inspect self()} reason=#{inspect reason}"
{:stop, {:subscribe_failed, :retries_exceeded, reason}, state}
end
defp compile_message(msg, topic, partition) do
Map.merge(%{topic: topic, partition: partition}, kafka_message_to_map(msg))
end
defp kafka_message_to_map(msg) do
Enum.into(kafka_message(msg), %{})
end
defp kafka do
Application.get_env(:kaffe, :kafka_mod, :brod)
end
defp group_coordinator do
Application.get_env(:kaffe, :group_coordinator_mod, :brod_group_coordinator)
end
defp worker do
Application.get_env(:kaffe, :worker_mod, Kaffe.Worker)
end
defp subscriber_ops do
[max_bytes: Kaffe.Config.Consumer.configuration.max_bytes,
offset_reset_policy: Kaffe.Config.Consumer.configuration.offset_reset_policy]
end
defp max_retries do
Kaffe.Config.Consumer.configuration.subscriber_retries
end
defp retry_delay do
Kaffe.Config.Consumer.configuration.subscriber_retry_delay_ms
end
defp name(subscriber_name, topic, partition) do
:"kaffe_subscriber_#{subscriber_name}_#{topic}_#{partition}"
end
end
|
lib/kaffe/group_member/subscriber/subscriber.ex
| 0.858674
| 0.569613
|
subscriber.ex
|
starcoder
|
defmodule Beeline.Config do
@moduledoc false
@producer_schema Beeline.Producer.schema()
@schema [
name: [
doc: """
The GenServer name for the topology. The topology will build on this
name, using it as a prefix.
""",
type: :atom
],
producers: [
doc: """
A list of producers to which the consumer should subscribe. See the
"producer options" section below for the schema.
""",
type: :keyword_list,
keys: [
*: [
type: :keyword_list,
keys:
Enum.map(@producer_schema, fn {k, v} ->
{k, put_in(v[:doc], false)}
end)
]
]
],
get_stream_position: [
doc: """
A function to invoke in order to get the stream position for a producer.
This function should be a 1-arity function (anonymous or capture) where
the name of the producer is passed as the argument. This option may also
be passed as an MFA tuple where the producer name will be prepended to
the argument list. If this option is not provided, a default will be
fetched with `Application.fetch_env!(:beeline, :get_stream_position)`.
This configuration can be used to set a blanket function for all
beelines to use.
""",
type: {:or, [:mfa, {:fun, 1}]}
],
auto_subscribe?: [
doc: """
A function to invoke to determine whether each producer should
subscribe to events as it starts up. The argument passed is the
GenServer name of the producer. If this option is not provided,
a default will be fetched with
`Application.fetch_env!(:beeline, :auto_subscribe?)`.
""",
type: {:or, [:mfa, {:fun, 1}]}
],
subscribe_after: [
doc: """
A period in msec after initialization when each producer should
query the `:auto_subscribe?` function.
""",
type: {:or, [:mfa, {:fun, 0}, :non_neg_integer]},
default: {Enum, :random, [3_000..5_000]}
],
spawn_health_checkers?: [
doc: """
Controls whether the topology should spawn the HealthChecker children.
It can be useful to disable this in `Mix.env() in [:dev, :test]` as the
health checker provides little or no value in those environments and
can produce many log lines. If this option is left blank, it will be
gotten from the application environment defaulting to `true` with
`Application.get_env(:beeline, :spawn_health_checkers?, true)`.
""",
type: {:or, [:boolean, {:in, [nil]}]},
default: nil
],
health_check_interval: [
doc: """
How long the health checker processes should wait between polling
the stream positions. Can either be a function (MFA or 0-arity function)
or a non-negative integer. The value is treated as milliseconds.
""",
type: {:or, [:mfa, {:fun, 0}, :non_neg_integer]},
default: 51_000
],
health_check_drift: [
doc: """
A noise to add to the interval specified with `:health_check_interval`.
This can be useful to allow that not all producers poll their positions
at the same time, which can reduce strain on the stream position store
and the EventStoreDB. Can either be a function (MFA or 0-arity function)
or a non-negative integer. The value is treated as milliseconds. If a
function is provided, it is invoked every time the health checker
process attempts to schedule the next poll.
""",
type: {:or, [:mfa, {:fun, 0}, :non_neg_integer]},
default: {Enum, :random, [0..10_000]}
],
test_mode?: [
doc: """
Controls whether the topology should start up in test mode. In test
mode, any adapters set in producer specs are switched out with
the `:dummy` adapter. If this option is left blank, it will be
gotten from the application environment defaulting to `false` with
`Application.get_env(:beeline, :test_mode?, false)`.
""",
type: {:or, [:boolean, {:in, [nil]}]}
],
context: [
doc: """
A user-defined data structure which is used as the initial state of
the GenStage consumer process.
""",
type: :any,
default: nil
]
]
# coveralls-ignore-start
def schema, do: @schema
# coveralls-ignore-stop
defstruct [:module | Keyword.keys(@schema)]
def source(opts) do
opts =
Keyword.keys(@schema)
|> Enum.map(fn key -> {key, nil} end)
|> Keyword.merge(opts)
|> update_in([:producers, Access.all()], fn {key, producer} ->
producer =
Keyword.keys(@producer_schema)
|> Enum.map(fn key -> {key, nil} end)
|> Keyword.merge(producer)
{key, producer}
end)
|> add_default_opts()
|> update_in([:producers, Access.all()], fn {key, producer} ->
{key, struct(Beeline.Producer, producer)}
end)
struct(__MODULE__, opts)
end
@doc false
def add_default_opts(opts) do
Enum.reduce(opts, [], &add_default_opt(&1, &2, opts))
end
@doc false
# coveralls-ignore-start
def add_default_opt({:get_stream_position, nil}, acc, _all_opts) do
get_stream_position = Application.fetch_env!(:beeline, :get_stream_position)
[{:get_stream_position, get_stream_position} | acc]
end
# coveralls-ignore-stop
def add_default_opt({:auto_subscribe?, nil}, acc, _all_opts) do
auto_subscribe? = Application.fetch_env!(:beeline, :auto_subscribe?)
[{:auto_subscribe?, auto_subscribe?} | acc]
end
def add_default_opt({:spawn_health_checkers?, nil}, acc, _all_opts) do
spawn_health_checkers? =
Application.get_env(:beeline, :spawn_health_checkers?, true)
[{:spawn_health_checkers?, spawn_health_checkers?} | acc]
end
def add_default_opt({:test_mode?, nil}, acc, _all_opts) do
test_mode? = Application.get_env(:beeline, :test_mode?, false)
[{:test_mode?, test_mode?} | acc]
end
def add_default_opt({:producers, producers}, acc, all_opts) do
producers =
producers
|> Enum.map(fn {key, producer} ->
producer =
Enum.reduce(
producer,
[],
&add_default_producer_opt(&1, &2, key, all_opts)
)
{key, producer}
end)
[{:producers, producers} | acc]
end
def add_default_opt({k, v}, acc, _all_opts), do: [{k, v} | acc]
@doc false
def add_default_producer_opt({:name, nil}, acc, key, all_opts) do
name = Module.concat(all_opts[:name], "Producer_#{key}")
[{:name, name} | acc]
end
def add_default_producer_opt({k, v}, acc, _key, _all_opts), do: [{k, v} | acc]
end
|
lib/beeline/config.ex
| 0.807499
| 0.476762
|
config.ex
|
starcoder
|
defmodule KinesisClient.Stream.AppState do
@moduledoc """
The AppState is where the information about Stream shards are stored. ShardConsumers will
checkpoint the records, and the `KinesisClient.Stream.Coordinator` will check here to determine
what shards to consume.
"""
def initialize(app_name, opts \\ []),
do: adapter(opts).initialize(app_name, opts)
@doc """
Get a `KinesisClient.Stream.AppState.ShardInfo` struct by shard_id. If there is not an existing
record, returns `:not_found`.
"""
def get_lease(app_name, shard_id, opts \\ []),
do: adapter(opts).get_lease(app_name, shard_id, opts)
@doc """
Persists a new ShardInfo record. Returns an error if there is already a record for that `shard_id`
"""
def create_lease(app_name, shard_id, lease_owner, opts \\ []),
do: adapter(opts).create_lease(app_name, shard_id, lease_owner, opts)
@doc """
Update the checkpoint of the shard with the last sequence number that was processed by a
ShardConsumer. Will return {:error, :lead_invalid} if the `lease` does not match what is in
`ShardInfo` and the checkpoint will not be updated.
"""
def update_checkpoint(app_name, shard_id, lease, checkpoint, opts \\ []),
do: adapter(opts).update_checkpoint(app_name, shard_id, lease, checkpoint, opts)
@doc """
Renew lease. Increments :lease_count.
"""
def renew_lease(app_name, shard_lease, opts \\ []),
do: adapter(opts).renew_lease(app_name, shard_lease, opts)
def take_lease(app_name, shard_id, new_owner, lease_count, opts \\ []),
do: adapter(opts).take_lease(app_name, shard_id, new_owner, lease_count, opts)
@doc """
Marks a ShardLease as completed.
This indicates that all records for the shard have been processed by the app. `KinesisClient.Stream.Shard`
processes will not be started for ShardLease's that are completed.
"""
def close_shard(app_name, shard_id, opts \\ []),
do: adapter(opts).close_shard(app_name, shard_id, opts)
defp adapter(opts) do
Keyword.get(opts, :adapter, KinesisClient.Stream.AppState.Dynamo)
end
end
|
lib/kinesis_client/stream/app_state.ex
| 0.753013
| 0.464537
|
app_state.ex
|
starcoder
|
defmodule Locations.GeoEncode.Lookup do
@moduledoc """
Entry point to perform a lookup using the OpenStreetMap Nominatim service
"""
require Logger
alias Locations.External.HttpClient
@url "https://nominatim.openstreetmap.org/search?"
@format "&format=geojson&addressdetails=0"
@doc """
Geo encode the passed address.
Returns the result of the address lookup. This will be a JSON decoded
structure as returned from Nominatim or an error.
"""
@spec geoencode(binary) :: {:error, any} | {:ok, number, any}
def geoencode(address) do
search_url = @url <> "q=" <> address <> @format
referer = get_nominatim_referer()
case HttpClient.get(search_url, [{"Referer", referer}], []) do
{:error, reason} ->
Logger.error("GeoEncode transport error #{inspect(reason)}")
{:error, merge_errors(reason)}
{:ok, response} ->
process_response(response)
end
end
defp process_response(%{status_code: status} = response) when status == 200 do
case Jason.decode(response.body) do
{:ok, body} ->
{:ok, status, body}
{:error, reason} ->
Logger.error("GeoEncode json error #{inspect(reason)}")
{:error, reason}
end
end
defp process_response(%{status_code: status}) do
{:error, status}
end
defp merge_errors(%{message: nil, reason: reason}) when is_atom(reason) do
reason
end
defp merge_errors(%{message: nil, reason: %{reason: reason}}) when is_atom(reason) do
reason
end
defp merge_errors(%{message: msg, reason: nil}) do
msg
end
defp merge_errors(%{message: msg, reason: %{reason: reason}}) when is_atom(reason) do
"message: #{inspect(msg)} - reason: #{inspect(reason)}"
end
def get_nominatim_referer() do
case Application.get_env(:locations, :nominatim_referer) do
nil ->
raise """
You need to set the referer as required by the Nominatim service in your config file
config :location, nominatim_referer: "<<My application>>"
"""
other ->
other
end
end
end
|
lib/locations/geo_encode/lookup.ex
| 0.727782
| 0.400075
|
lookup.ex
|
starcoder
|
defmodule Sanbase.Billing.GraphqlSchema do
@moduledoc ~s"""
Contains functions that help examining the GraphQL schema.
It allows you to work easily with access logic of queries.
"""
alias Sanbase.Billing.Product
alias Sanbase.Metric
require SanbaseWeb.Graphql.Schema
# NOTE: In case of compile time error for reasons like wrong import_typesa and
# similar, the error will be not include the right place where it errored. In this
# case replace the @query type with the commented one - it has high chances for the
# proper error location to be revealed
# @query_type %{fields: %{}}
@query_type Absinthe.Schema.lookup_type(SanbaseWeb.Graphql.Schema, :query)
@fields @query_type.fields |> Map.keys()
@doc ~s"""
Return a map of {query, product_id} key-value pairs. The key is a query that
needs an extension plan to be accessed and the value is the product_id that
is needed for that access. If a user has a subscription plan with that product_id
he/she will have access to that query
"""
@spec extension_metric_product_map :: %{required(atom()) => Product.product_id()}
def extension_metric_product_map() do
@fields
|> Enum.filter(fn field ->
Map.get(@query_type.fields, field) |> Absinthe.Type.meta(:access) == :extension
end)
|> Enum.map(fn field ->
# The `product` key value is something like `Product.exchange_wallets_product`
# so the value is its AST instead of the actual value because of how
# the graphql schema is being built compile time. It is preferable to have
# more complicated code here instead of having to make the call at compile
# time, save it into module attribute and use that instead
product_ast = Map.get(@query_type.fields, field) |> Absinthe.Type.meta(:product)
{{_, _, [module, func]}, _, _} = product_ast
product_id = apply(module, func, [])
{{:query, field}, product_id}
end)
|> Map.new()
end
def min_plan_map() do
# Metadata looks like this:
# meta(access: :restricted, min_plan: [sanapi: :pro, sanbase: :free])
query_min_plan_map =
get_query_meta_field_list(:min_plan)
|> Enum.into(%{}, fn
{query, kw_list} when is_list(kw_list) ->
{{:query, query},
%{
"SANAPI" => Keyword.get(kw_list, :sanapi, :free),
"SANBASE" => Keyword.get(kw_list, :sanbase, :free)
}}
{query, _} ->
{{:query, query}, %{"SANAPI" => :free, "SANBASE" => :free}}
end)
Metric.min_plan_map()
|> Enum.into(query_min_plan_map, fn
{metric, product_plan_map} when is_map(product_plan_map) ->
{{:metric, metric}, product_plan_map}
{metric, _} ->
{{:metric, metric}, %{"SANAPI" => :free, "SANBASE" => :free}}
end)
end
@doc ~s"""
Return all query names that have all `fields` with the values specified in
the corresponding position of the `values` list
"""
@spec get_field_value_matches(list(atom()), list(any)) :: list(atom())
def get_field_value_matches(fields, values)
when is_list(fields) and is_list(values) and length(fields) == length(values) do
field_value_pairs = Enum.zip(fields, values)
Enum.filter(@fields, fn f ->
Enum.all?(field_value_pairs, fn {field, value} ->
Map.get(@query_type.fields, f) |> Absinthe.Type.meta(field) == value
end)
end)
end
def get_query_meta_field_list(field) do
Enum.map(@fields, fn f ->
{f, Map.get(@query_type.fields, f) |> Absinthe.Type.meta(field)}
end)
end
def get_all_with_access_level(level) do
Enum.map(get_queries_with_access_level(level), &{:query, &1}) ++
Enum.map(get_metrics_with_access_level(level), &{:metric, &1})
end
def get_metrics_with_access_level(level) do
Enum.filter(Metric.access_map(), fn {_metric, metric_level} ->
level == metric_level
end)
|> Enum.map(fn {metric, _access} -> metric end)
end
def get_queries_with_access_level(level) do
get_field_value_matches([:access], [level])
end
def get_all_without_access_level() do
get_metrics_with_access_level(nil) -- [:__typename, :__type, :__schema]
end
end
|
lib/sanbase/billing/graphql_schema.ex
| 0.823328
| 0.42937
|
graphql_schema.ex
|
starcoder
|
defmodule RlStudy.MDP.State do
@typedoc """
Custom type of State.
"""
@type t :: %RlStudy.MDP.State{row: integer, column: integer}
defstruct row: -1, column: -1
@doc """
# Examples
iex> RlStudy.MDP.State.new()
%RlStudy.MDP.State{}
"""
@spec new :: RlStudy.MDP.State.t()
def new() do
%RlStudy.MDP.State{}
end
@doc """
# Examples
iex> RlStudy.MDP.State.new(1, 2)
%RlStudy.MDP.State{row: 1, column: 2}
"""
@spec new(non_neg_integer(), non_neg_integer()) :: RlStudy.MDP.State.t()
def new(row, column) when is_integer(row) and is_integer(column) do
%RlStudy.MDP.State{row: row, column: column}
end
@doc """
# Examples
iex> RlStudy.MDP.State.new(%{row: 7, column: 5})
%RlStudy.MDP.State{row: 7, column: 5}
"""
@spec new(%{row: non_neg_integer(), column: non_neg_integer()}) :: RlStudy.MDP.State.t()
def new(value) when is_map(value) do
new(value.row, value.column)
end
@doc """
# Examples
iex> s = RlStudy.MDP.State.new(7, 8)
iex> RlStudy.MDP.State.repr(s)
"<State: [7, 8]>"
"""
@spec repr(RlStudy.MDP.State.t()) :: String.t()
def repr(state) do
"<State: [#{state.row}, #{state.column}]>"
end
@doc """
# Examples
iex> s = RlStudy.MDP.State.new(1,2)
iex> RlStudy.MDP.State.clone(s)
%RlStudy.MDP.State{row: 1, column: 2}
"""
@spec clone(RlStudy.MDP.State.t()) :: RlStudy.MDP.State.t()
def clone(state) do
new(%{row: state.row, column: state.column})
end
@doc """
# Examples
iex> s = RlStudy.MDP.State.new(5, 6)
iex> RlStudy.MDP.State.hash(s)
"Kpo7UphjTrzxlXn1bAzT5770hacD/s/lANd61UWr87A="
"""
@spec hash(RlStudy.MDP.State.t()) :: String.t()
def hash(state) do
:crypto.hash(:sha256, repr(state)) |> Base.encode64()
end
@doc """
iex> s1 = RlStudy.MDP.State.new(5, 6)
iex> s2 = RlStudy.MDP.State.new(5, 6)
iex> s3 = RlStudy.MDP.State.new(5, 7)
iex> RlStudy.MDP.State.eq(s1, s2)
true
iex> RlStudy.MDP.State.eq(s1, s3)
false
"""
@spec eq(RlStudy.MDP.State.t(), RlStudy.MDP.State.t()) :: boolean
def eq(state1, state2) do
state1 == state2
end
end
|
lib/mdp/state.ex
| 0.711531
| 0.64692
|
state.ex
|
starcoder
|
defmodule Braintree.Plan do
@moduledoc """
Plans represent recurring billing plans in a Braintree merchant account.
The API for plans is read only.
For additional reference see:
https://developers.braintreepayments.com/reference/request/plan/all/ruby
"""
use Braintree.Construction
alias Braintree.HTTP
alias Braintree.ErrorResponse, as: Error
@type t :: %__MODULE__{
id: String.t,
add_ons: [any],
balance: String.t,
billing_day_of_month: String.t,
billing_frequency: String.t,
created_at: String.t,
currency_iso_code: String.t,
description: String.t,
discounts: [any],
name: String.t,
number_of_billing_cycles: String.t,
price: String.t,
trial_duration: String.t,
trial_duration_unit: String.t,
trial_period: String.t,
updated_at: String.t
}
defstruct id: nil,
add_ons: [],
balance: nil,
billing_day_of_month: nil,
billing_frequency: nil,
created_at: nil,
currency_iso_code: nil,
description: nil,
discounts: [],
name: nil,
number_of_billing_cycles: nil,
price: nil,
trial_duration: nil,
trial_duration_unit: nil,
trial_period: nil,
updated_at: nil
@doc """
Get a list of all the plans defined in the merchant account. If there are
no plans an empty list is returned.
## Example
{:ok, plans} = Braintree.Plan.all()
"""
@spec all(Keyword.t) :: {:ok, [t]} | {:error, Error.t}
def all(opts \\ []) do
with {:ok, %{"plans" => plans}} <- HTTP.get("plans", opts) do
{:ok, new(plans)}
end
end
end
|
lib/plan.ex
| 0.864253
| 0.452234
|
plan.ex
|
starcoder
|
defmodule Axon.Training do
@moduledoc """
Abstractions for training machine learning models.
"""
require Axon
require Axon.Updates
@doc false
def step({_, _} = model, {_, _} = update), do: step(model, update, [])
@doc """
Represents a single training step.
The first two arguments are tuples:
* The first tuple contains the model initialization function
and the objective function. For a Neural Network, the objective
function is the loss function of the Neural Network prediction
* The second pairs contains the updater initialization function
and the update function itself
## Options
* `:metrics` - metrics to track during each training step. Can be an
atom representing a function in `Axon.Metrics`, or a 2-arity function
taking `y_true` and `y_pred` as args.
"""
def step({init_model_fn, objective_fn}, {init_update_fn, update_fn}, opts)
when is_function(init_model_fn, 0) and is_function(objective_fn, 3) and
is_function(init_update_fn, 1) and is_function(update_fn, 3) and is_list(opts) do
metrics = opts[:metrics] || []
update_metrics_fn = fn old_metrics, step, y_true, y_pred ->
Map.new(metrics, fn
{key, fun} ->
batch_metric = fun.(y_true, y_pred)
avg_metric =
old_metrics[key]
|> Nx.multiply(step)
|> Nx.add(batch_metric)
|> Nx.divide(Nx.add(step, 1))
{key, avg_metric}
key ->
batch_metric = apply(Axon.Metrics, key, [y_true, y_pred])
avg_metric =
old_metrics[key]
|> Nx.multiply(step)
|> Nx.add(batch_metric)
|> Nx.divide(Nx.add(step, 1))
{key, avg_metric}
end)
end
init_fn = fn ->
params = init_model_fn.()
optim_params = init_update_fn.(params)
init_metrics = Map.new(metrics, fn k -> {k, Nx.tensor(0.0, backend: Nx.Defn.Expr)} end)
%{
epoch: Nx.tensor(0, backend: Nx.Defn.Expr),
epoch_step: Nx.tensor(0, backend: Nx.Defn.Expr),
epoch_loss: Nx.tensor(0.0, backend: Nx.Defn.Expr),
params: params,
optimizer_state: optim_params,
metrics: init_metrics
}
end
step_fn = fn train_state, input, target ->
{{preds, batch_loss}, gradients} =
Nx.Defn.Kernel.value_and_grad(
train_state[:params],
&objective_fn.(&1, input, target),
fn x -> elem(x, 1) end
)
new_metrics =
case metrics do
[] ->
%{}
_ ->
update_metrics_fn.(train_state[:metrics], train_state[:epoch_step], target, preds)
end
epoch_avg_loss =
train_state[:epoch_loss]
|> Nx.multiply(train_state[:epoch_step])
|> Nx.add(batch_loss)
|> Nx.divide(Nx.add(train_state[:epoch_step], 1))
{updates, new_update_state} =
update_fn.(gradients, train_state[:optimizer_state], train_state[:params])
%{
epoch: train_state[:epoch],
epoch_step: Nx.add(train_state[:epoch_step], 1),
epoch_loss: epoch_avg_loss,
params: Axon.Updates.apply_updates(train_state[:params], updates),
optimizer_state: new_update_state,
metrics: new_metrics
}
end
{init_fn, step_fn}
end
@doc false
def step(%Axon{} = model, loss, {_, _} = optimizer) when is_function(loss, 2) or is_atom(loss),
do: step(model, loss, optimizer, [])
@doc """
Represents a single training step using an Axon `model`,
`loss` function, and `optimizer`.
The `loss` function is either an atom or a two arity
anonymous function.
"""
def step(%Axon{} = model, loss, optimizer, opts)
when is_function(loss, 2) and is_list(opts) do
{init_fn, predict_fn} = Axon.compile(model)
objective_fn = fn params, input, target ->
preds = predict_fn.(params, input)
loss = Nx.add(loss.(target, preds), Axon.penalty(model, params))
{preds, loss}
end
step({init_fn, objective_fn}, optimizer, opts)
end
def step(%Axon{} = model, loss, optimizer, opts) when is_atom(loss) and is_list(opts) do
loss_fn = &apply(Axon.Losses, loss, [&1, &2, [reduction: :mean]])
step(model, loss_fn, optimizer, opts)
end
@doc false
def step(%Axon{} = model, train_state, loss, {_, _} = optimizer)
when is_function(loss, 2) or is_atom(loss),
do: step(model, train_state, loss, optimizer, [])
@doc """
Represents a single training step using an Axon `model`,
initial state `train_state`, `loss` function and `optimizer`.
The `loss` function is either an atom or a two arity anonymous
function.
"""
def step(%Axon{} = model, train_state, loss, optimizer, opts)
when is_function(loss, 2) and is_list(opts) do
init_fn = fn ->
train_state
|> Tuple.to_list()
|> Enum.map(&Nx.tensor(&1, backend: Nx.Defn.Expr))
|> List.to_tuple()
end
objective_fn = fn params, input, target ->
preds = Axon.predict(model, params, input)
Nx.add(loss.(target, preds), Axon.penalty(model, params))
end
step({init_fn, objective_fn}, optimizer, opts)
end
def step(%Axon{} = model, train_state, loss, optimizer, opts)
when is_atom(loss) and is_list(opts) do
loss_fn = &apply(Axon.Losses, loss, [&1, &2, [reduction: :mean]])
step(model, train_state, loss_fn, optimizer, opts)
end
@doc """
Implements a common training loop.
Its arguments are:
* A tuple with the initialization function and the step function.
Often retrieved from `step/3` but it could also be manually provided.
* The inputs tensors
* The targets tensors
* A list of options
## Options
* `:epochs` - number of epochs to train for. Defaults to `5`.
* `:compiler` - `defn` compiler to use to run training loop.
Defaults to `Nx.Defn.Evaluator`.
* `:log_every` - frequency with which to log training loss.
Accepts an integer referring to number of batches, `:epoch`,
or `:none`. Defaults to `:epoch`.
All other options are given to the underlying compiler.
## A note on Nx and anonymous functions
When training, both `init_fn` and `step_fn` are executed within
the given Nx `:compiler`. Therefore, it is required that `init_fn`
and `step_fn` work on tensor expressions instead of tensor values.
For example, let's suppose you want to initialize the values with:
Nx.random_uniform({40, 28}, 0, 1)
The following won't work:
params = Nx.random_uniform({40, 28}, 0, 1)
init_fn = fn -> params end
Instead, we want to build the values inside the given compiler.
The correct way to build those values is by compuing them inside
a defn:
defn init_values, do: Nx.random_uniform({40, 28}, 0, 1)
And then:
init_fn = &init_values/0
"""
def train({init_fn, step_fn}, inputs, targets, opts \\ []) do
epochs = opts[:epochs] || 5
compiler = opts[:compiler] || Nx.Defn.Evaluator
log_every = opts[:log_every] || 50
jit_opts = [compiler: compiler, log_every: log_every] ++ opts
train_state = Nx.Defn.jit(init_fn, [], jit_opts)
for epoch <- 1..epochs, reduce: train_state do
train_state ->
{time, train_state} =
:timer.tc(
&train_epoch/6,
[step_fn, train_state, inputs, targets, epoch, jit_opts]
)
epoch_avg_loss =
train_state[:epoch_loss]
|> Nx.to_scalar()
zero_metrics = Map.new(train_state[:metrics], fn {k, _} -> {k, 0.0} end)
IO.puts("\n")
IO.puts("Epoch #{epoch} time: #{time / 1_000_000}s")
IO.puts("Epoch #{epoch} loss: #{:io_lib.format("~.5f", [epoch_avg_loss])}")
train_state[:metrics]
|> Enum.each(fn {k, v} ->
IO.puts(
"Epoch #{epoch} #{Atom.to_string(k)}: #{:io_lib.format("~.5f", [Nx.to_scalar(v)])}"
)
end)
IO.puts("\n")
%{train_state | metrics: zero_metrics, epoch: epoch + 1, epoch_step: 0, epoch_loss: 0.0}
end
end
## Helpers
defp train_epoch(step_fn, train_state, inputs, targets, epoch, opts) do
{log_every, jit_opts} = Keyword.pop(opts, :log_every)
dataset =
inputs
|> Stream.zip(targets)
train_state =
for {inp, tar} <- dataset, reduce: train_state do
train_state ->
train_state = Nx.Defn.jit(step_fn, [train_state, inp, tar], jit_opts)
if is_integer(log_every) and
Nx.remainder(train_state[:epoch_step], log_every) == Nx.tensor(0) do
log_batch(epoch, train_state)
end
train_state
end
train_state
end
defp log_batch(epoch, train_state) do
batch_num = train_state[:epoch_step]
avg_loss = train_state[:epoch_loss]
metrics =
train_state[:metrics]
|> Enum.map(fn {k, v} ->
"Average #{Atom.to_string(k)}: #{:io_lib.format("~.5f", [Nx.to_scalar(v)])}"
end)
metrics =
Enum.join(
["Average Loss: #{:io_lib.format("~.5f", [Nx.to_scalar(avg_loss)])}" | metrics],
" - "
)
IO.write(
"\rEpoch #{epoch}, batch #{Nx.to_scalar(batch_num)} - " <>
"#{metrics}"
)
end
end
|
lib/axon/training.ex
| 0.929047
| 0.766403
|
training.ex
|
starcoder
|
defmodule Timex.TimezoneInfo do
@moduledoc """
All relevant timezone information for a given period, i.e. Europe/Moscow on March 3rd, 2013
Notes:
- `full_name` is the name of the zone, but does not indicate anything about the current period (i.e. CST vs CDT)
- `abbreviation` is the abbreviated name for the zone in the current period, i.e. "America/Chicago" on 3/30/15 is "CDT"
- `offset_std` is the offset in seconds from standard time for this period
- `offset_utc` is the offset in seconds from UTC for this period
Spec:
- `day_of_week`: :sunday, :monday, :tuesday, etc
- `datetime`: {{year, month, day}, {hour, minute, second}}
- `from`: :min | {day_of_week, datetime}, when this zone starts
- `until`: :max | {day_of_week, datetime}, when this zone ends
"""
defstruct full_name: "Etc/UTC",
abbreviation: "UTC",
offset_std: 0,
offset_utc: 0,
from: :min,
until: :max
@valid_day_names [:sunday, :monday, :tuesday, :wednesday, :thursday, :friday, :saturday]
@max_seconds_in_day 60 * 60 * 24
@type day_of_week :: :sunday | :monday | :tuesday | :wednesday | :thursday | :friday | :saturday
@type datetime :: {{non_neg_integer, 1..12, 1..31}, {0..24, 0..59, 0..60}}
@type offset :: -85399..85399
@type from_constraint :: :min | {day_of_week, datetime}
@type until_constraint :: :max | {day_of_week, datetime}
@type t :: %__MODULE__{
full_name: String.t(),
abbreviation: String.t(),
offset_std: offset,
offset_utc: offset,
from: from_constraint,
until: until_constraint
}
@doc """
Create a custom timezone if a built-in one does not meet your needs.
You must provide the name, abbreviation, offset from UTC, daylight savings time offset,
and the from/until reference points for when the zone takes effect and ends.
To clarify the two offsets, `offset_utc` is the absolute offset relative to UTC,
`offset_std` is the offset to apply to `offset_utc` which gives us the offset from UTC
during daylight savings time for this timezone. If DST does not apply for this zone, simply
set it to 0.
The from/until reference points must meet the following criteria:
- Be set to `:min` for from, or `:max` for until, which represent
"infinity" for the start/end of the zone period.
- OR, be a tuple of {day_of_week, datetime}, where:
- `day_of_week` is an atom like `:sunday`
- `datetime` is an Erlang datetime tuple, e.g. `{{2016,10,8},{2,0,0}}`
*IMPORTANT*: Offsets are in seconds, not minutes, if you do not ensure they
are in the correct unit, runtime errors or incorrect results are probable.
## Examples
iex> #{__MODULE__}.create("Etc/Test", "TST", 120*60, 0, :min, :max)
%TimezoneInfo{full_name: "Etc/Test", abbreviation: "TST", offset_std: 7200, offset_utc: 0, from: :min, until: :max}
...> #{__MODULE__}.create("Etc/Test", "TST", 24*60*60, 0, :min, :max)
{:error, "invalid timezone offset '86400'"}
"""
@spec create(String.t(), String.t(), offset, offset, from_constraint, until_constraint) ::
__MODULE__.t() | {:error, String.t()}
def create(name, abbr, offset_utc, offset_std, from, until) do
%__MODULE__{
full_name: name,
abbreviation: abbr,
offset_std: offset_std,
offset_utc: offset_utc,
from: from || :min,
until: until || :max
}
|> validate_and_return()
end
def from_datetime(%DateTime{
time_zone: name,
zone_abbr: abbr,
std_offset: std_offset,
utc_offset: utc_offset
}) do
%__MODULE__{
full_name: name,
abbreviation: abbr,
offset_std: std_offset,
offset_utc: utc_offset,
from: :min,
until: :max
}
end
@doc false
def to_period(%__MODULE__{offset_utc: utc, offset_std: std, abbreviation: abbr}) do
%{std_offset: std, utc_offset: utc, zone_abbr: abbr}
end
defp validate_and_return(%__MODULE__{} = tz) do
with true <- is_valid_name(tz.full_name),
true <- is_valid_name(tz.abbreviation),
true <- is_valid_offset(tz.offset_std),
true <- is_valid_offset(tz.offset_utc),
true <- is_valid_from_constraint(tz.from),
true <- is_valid_until_constraint(tz.until),
do: tz
end
defp is_valid_name(name) when is_binary(name), do: true
defp is_valid_name(name), do: {:error, "invalid timezone name '#{inspect(name)}'!"}
defp is_valid_offset(offset)
when is_integer(offset) and
(offset < @max_seconds_in_day and offset > -@max_seconds_in_day),
do: true
defp is_valid_offset(offset), do: {:error, "invalid timezone offset '#{inspect(offset)}'"}
defp is_valid_from_constraint(:min), do: true
defp is_valid_from_constraint(:max),
do: {:error, ":max is not a valid from constraint for timezones"}
defp is_valid_from_constraint(c), do: is_valid_constraint(c)
defp is_valid_until_constraint(:min),
do: {:error, ":min is not a valid until constraint for timezones"}
defp is_valid_until_constraint(:max), do: true
defp is_valid_until_constraint(c), do: is_valid_constraint(c)
defp is_valid_constraint({day_of_week, {{y, m, d}, {h, mm, s}}} = datetime)
when day_of_week in @valid_day_names do
cond do
:calendar.valid_date({y, m, d}) ->
valid_hour = h >= 1 and h <= 24
valid_min = mm >= 0 and mm <= 59
valid_sec = s >= 0 and s <= 59
cond do
valid_hour && valid_min && valid_sec ->
true
:else ->
{:error,
"invalid datetime constraint for timezone: #{inspect(datetime)} (invalid time)"}
end
:else ->
{:error, "invalid datetime constraint for timezone: #{inspect(datetime)} (invalid date)"}
end
end
defp is_valid_constraint(c),
do: {:error, "'#{inspect(c)}' is not a valid constraint for timezones"}
end
|
lib/timezone/timezone_info.ex
| 0.919285
| 0.604195
|
timezone_info.ex
|
starcoder
|
defmodule DomainEvent do
@moduledoc """
Events represent a fact inside the domain, it is the representation of a decision or a state change that a system want
to notify to its subscribers. Events represents facts that nobody can change, so events are not intentions or requests
of anything, An example may be and UserRegistered or a NotificationSent.
Events are the most important topic in a Publish-Subscribe system, because this element let’s notify a many
stakeholders in a specific event. An other benefit is the system is decouple, because you can add more subscriber to
the system without modify some component.
"""
defstruct [
:name,
:eventId,
:data
]
@doc """
Creates a new DomainEvent structure with a generated event_id
## Examples
iex> DomainEvent.new("UserRegistered", %{name: "username", email: "<EMAIL>", createdAt: "2021-05-11T15:00:47.380Z"})
%DomainEvent{
data: %{
createdAt: "2021-05-11T15:00:47.380Z",
email: "<EMAIL>",
name: "username"
},
eventId: "852e6f59-f920-45e0-bce8-75f1e74647ff",
name: "UserRegistered"
}
"""
def new(name, data) do
new_p(name, data, NameGenerator.message_id())
end
@doc """
Creates a new DomainEvent structure
## Examples
iex> DomainEvent.new("UserRegistered", %{name: "username", email: "<EMAIL>", createdAt: "2021-05-11T15:00:47.380Z"}, "852e6f59-f920-45e0-bce8-75f1e74647aa")
%DomainEvent{
data: %{
createdAt: "2021-05-11T15:00:47.380Z",
email: "<EMAIL>",
name: "username"
},
eventId: "852e6f59-f920-45e0-bce8-75f1e74647aa",
name: "UserRegistered"
}
"""
def new(name, data, event_id), do: new_p(name, data, event_id)
defp new_p(nil, _, _), do: raise "Invalid nil name in DomainEvent constructor!"
defp new_p(_, nil, _), do: raise "Invalid nil data in DomainEvent constructor!"
defp new_p(_, _, nil), do: raise "Invalid nil event_id in DomainEvent constructor!"
defp new_p(name, data, event_id) do
%__MODULE__{
name: name,
data: data,
eventId: event_id
}
end
end
|
lib/reactive_commons/api/domain_event.ex
| 0.743354
| 0.486027
|
domain_event.ex
|
starcoder
|
defmodule AWS.CloudWatchEvents do
@moduledoc """
Amazon EventBridge helps you to respond to state changes in your AWS resources.
When your resources change state, they automatically send events into an event
stream. You can create rules that match selected events in the stream and route
them to targets to take action. You can also use rules to take action on a
predetermined schedule. For example, you can configure rules to:
* Automatically invoke an AWS Lambda function to update DNS entries
when an event notifies you that Amazon EC2 instance enters the running state.
* Direct specific API records from AWS CloudTrail to an Amazon
Kinesis data stream for detailed analysis of potential security or availability
risks.
* Periodically invoke a built-in target to create a snapshot of an
Amazon EBS volume.
For more information about the features of Amazon EventBridge, see the [Amazon EventBridge User
Guide](https://docs.aws.amazon.com/eventbridge/latest/userguide).
"""
alias AWS.Client
alias AWS.Request
def metadata do
%AWS.ServiceMetadata{
abbreviation: nil,
api_version: "2015-10-07",
content_type: "application/x-amz-json-1.1",
credential_scope: nil,
endpoint_prefix: "events",
global?: false,
protocol: "json",
service_id: "CloudWatch Events",
signature_version: "v4",
signing_name: "events",
target_prefix: "AWSEvents"
}
end
@doc """
Activates a partner event source that has been deactivated.
Once activated, your matching event bus will start receiving events from the
event source.
"""
def activate_event_source(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ActivateEventSource", input, options)
end
@doc """
Cancels the specified replay.
"""
def cancel_replay(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CancelReplay", input, options)
end
@doc """
Creates an archive of events with the specified settings.
When you create an archive, incoming events might not immediately start being
sent to the archive. Allow a short period of time for changes to take effect. If
you do not specify a pattern to filter events sent to the archive, all events
are sent to the archive except replayed events. Replayed events are not sent to
an archive.
"""
def create_archive(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateArchive", input, options)
end
@doc """
Creates a new event bus within your account.
This can be a custom event bus which you can use to receive events from your
custom applications and services, or it can be a partner event bus which can be
matched to a partner event source.
"""
def create_event_bus(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateEventBus", input, options)
end
@doc """
Called by an SaaS partner to create a partner event source.
This operation is not used by AWS customers.
Each partner event source can be used by one AWS account to create a matching
partner event bus in that AWS account. A SaaS partner must create one partner
event source for each AWS account that wants to receive those event types.
A partner event source creates events based on resources within the SaaS
partner's service or application.
An AWS account that creates a partner event bus that matches the partner event
source can use that event bus to receive events from the partner, and then
process them using AWS Events rules and targets.
Partner event source names follow this format:
` *partner_name*/*event_namespace*/*event_name* `
*partner_name* is determined during partner registration and identifies the
partner to AWS customers. *event_namespace* is determined by the partner and is
a way for the partner to categorize their events. *event_name* is determined by
the partner, and should uniquely identify an event-generating resource within
the partner system. The combination of *event_namespace* and *event_name* should
help AWS customers decide whether to create an event bus to receive these
events.
"""
def create_partner_event_source(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreatePartnerEventSource", input, options)
end
@doc """
You can use this operation to temporarily stop receiving events from the
specified partner event source.
The matching event bus is not deleted.
When you deactivate a partner event source, the source goes into PENDING state.
If it remains in PENDING state for more than two weeks, it is deleted.
To activate a deactivated partner event source, use `ActivateEventSource`.
"""
def deactivate_event_source(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeactivateEventSource", input, options)
end
@doc """
Deletes the specified archive.
"""
def delete_archive(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteArchive", input, options)
end
@doc """
Deletes the specified custom event bus or partner event bus.
All rules associated with this event bus need to be deleted. You can't delete
your account's default event bus.
"""
def delete_event_bus(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteEventBus", input, options)
end
@doc """
This operation is used by SaaS partners to delete a partner event source.
This operation is not used by AWS customers.
When you delete an event source, the status of the corresponding partner event
bus in the AWS customer account becomes DELETED.
"""
def delete_partner_event_source(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeletePartnerEventSource", input, options)
end
@doc """
Deletes the specified rule.
Before you can delete the rule, you must remove all targets, using
`RemoveTargets`.
When you delete a rule, incoming events might continue to match to the deleted
rule. Allow a short period of time for changes to take effect.
Managed rules are rules created and managed by another AWS service on your
behalf. These rules are created by those other AWS services to support
functionality in those services. You can delete these rules using the `Force`
option, but you should do so only if you are sure the other service is not still
using that rule.
"""
def delete_rule(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteRule", input, options)
end
@doc """
Retrieves details about an archive.
"""
def describe_archive(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeArchive", input, options)
end
@doc """
Displays details about an event bus in your account.
This can include the external AWS accounts that are permitted to write events to
your default event bus, and the associated policy. For custom event buses and
partner event buses, it displays the name, ARN, policy, state, and creation
time.
To enable your account to receive events from other accounts on its default
event bus, use `PutPermission`.
For more information about partner event buses, see `CreateEventBus`.
"""
def describe_event_bus(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeEventBus", input, options)
end
@doc """
This operation lists details about a partner event source that is shared with
your account.
"""
def describe_event_source(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeEventSource", input, options)
end
@doc """
An SaaS partner can use this operation to list details about a partner event
source that they have created.
AWS customers do not use this operation. Instead, AWS customers can use
`DescribeEventSource` to see details about a partner event source that is shared
with them.
"""
def describe_partner_event_source(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribePartnerEventSource", input, options)
end
@doc """
Retrieves details about a replay.
Use `DescribeReplay` to determine the progress of a running replay. A replay
processes events to replay based on the time in the event, and replays them
using 1 minute intervals. If you use `StartReplay` and specify an
`EventStartTime` and an `EventEndTime` that covers a 20 minute time range, the
events are replayed from the first minute of that 20 minute range first. Then
the events from the second minute are replayed. You can use `DescribeReplay` to
determine the progress of a replay. The value returned for
`EventLastReplayedTime` indicates the time within the specified time range
associated with the last event replayed.
"""
def describe_replay(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeReplay", input, options)
end
@doc """
Describes the specified rule.
DescribeRule does not list the targets of a rule. To see the targets associated
with a rule, use `ListTargetsByRule`.
"""
def describe_rule(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeRule", input, options)
end
@doc """
Disables the specified rule.
A disabled rule won't match any events, and won't self-trigger if it has a
schedule expression.
When you disable a rule, incoming events might continue to match to the disabled
rule. Allow a short period of time for changes to take effect.
"""
def disable_rule(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DisableRule", input, options)
end
@doc """
Enables the specified rule.
If the rule does not exist, the operation fails.
When you enable a rule, incoming events might not immediately start matching to
a newly enabled rule. Allow a short period of time for changes to take effect.
"""
def enable_rule(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "EnableRule", input, options)
end
@doc """
Lists your archives.
You can either list all the archives or you can provide a prefix to match to the
archive names. Filter parameters are exclusive.
"""
def list_archives(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListArchives", input, options)
end
@doc """
Lists all the event buses in your account, including the default event bus,
custom event buses, and partner event buses.
"""
def list_event_buses(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListEventBuses", input, options)
end
@doc """
You can use this to see all the partner event sources that have been shared with
your AWS account.
For more information about partner event sources, see `CreateEventBus`.
"""
def list_event_sources(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListEventSources", input, options)
end
@doc """
An SaaS partner can use this operation to display the AWS account ID that a
particular partner event source name is associated with.
This operation is not used by AWS customers.
"""
def list_partner_event_source_accounts(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListPartnerEventSourceAccounts", input, options)
end
@doc """
An SaaS partner can use this operation to list all the partner event source
names that they have created.
This operation is not used by AWS customers.
"""
def list_partner_event_sources(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListPartnerEventSources", input, options)
end
@doc """
Lists your replays.
You can either list all the replays or you can provide a prefix to match to the
replay names. Filter parameters are exclusive.
"""
def list_replays(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListReplays", input, options)
end
@doc """
Lists the rules for the specified target.
You can see which of the rules in Amazon EventBridge can invoke a specific
target in your account.
"""
def list_rule_names_by_target(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListRuleNamesByTarget", input, options)
end
@doc """
Lists your Amazon EventBridge rules.
You can either list all the rules or you can provide a prefix to match to the
rule names.
ListRules does not list the targets of a rule. To see the targets associated
with a rule, use `ListTargetsByRule`.
"""
def list_rules(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListRules", input, options)
end
@doc """
Displays the tags associated with an EventBridge resource.
In EventBridge, rules and event buses can be tagged.
"""
def list_tags_for_resource(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListTagsForResource", input, options)
end
@doc """
Lists the targets assigned to the specified rule.
"""
def list_targets_by_rule(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListTargetsByRule", input, options)
end
@doc """
Sends custom events to Amazon EventBridge so that they can be matched to rules.
"""
def put_events(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "PutEvents", input, options)
end
@doc """
This is used by SaaS partners to write events to a customer's partner event bus.
AWS customers do not use this operation.
"""
def put_partner_events(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "PutPartnerEvents", input, options)
end
@doc """
Running `PutPermission` permits the specified AWS account or AWS organization to
put events to the specified *event bus*.
Amazon EventBridge (CloudWatch Events) rules in your account are triggered by
these events arriving to an event bus in your account.
For another account to send events to your account, that external account must
have an EventBridge rule with your account's event bus as a target.
To enable multiple AWS accounts to put events to your event bus, run
`PutPermission` once for each of these accounts. Or, if all the accounts are
members of the same AWS organization, you can run `PutPermission` once
specifying `Principal` as "*" and specifying the AWS organization ID in
`Condition`, to grant permissions to all accounts in that organization.
If you grant permissions using an organization, then accounts in that
organization must specify a `RoleArn` with proper permissions when they use
`PutTarget` to add your account's event bus as a target. For more information,
see [Sending and Receiving Events Between AWS Accounts](https://docs.aws.amazon.com/eventbridge/latest/userguide/eventbridge-cross-account-event-delivery.html)
in the *Amazon EventBridge User Guide*.
The permission policy on the default event bus cannot exceed 10 KB in size.
"""
def put_permission(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "PutPermission", input, options)
end
@doc """
Creates or updates the specified rule.
Rules are enabled by default, or based on value of the state. You can disable a
rule using `DisableRule`.
A single rule watches for events from a single event bus. Events generated by
AWS services go to your account's default event bus. Events generated by SaaS
partner services or applications go to the matching partner event bus. If you
have custom applications or services, you can specify whether their events go to
your default event bus or a custom event bus that you have created. For more
information, see `CreateEventBus`.
If you are updating an existing rule, the rule is replaced with what you specify
in this `PutRule` command. If you omit arguments in `PutRule`, the old values
for those arguments are not kept. Instead, they are replaced with null values.
When you create or update a rule, incoming events might not immediately start
matching to new or updated rules. Allow a short period of time for changes to
take effect.
A rule must contain at least an EventPattern or ScheduleExpression. Rules with
EventPatterns are triggered when a matching event is observed. Rules with
ScheduleExpressions self-trigger based on the given schedule. A rule can have
both an EventPattern and a ScheduleExpression, in which case the rule triggers
on matching events as well as on a schedule.
When you initially create a rule, you can optionally assign one or more tags to
the rule. Tags can help you organize and categorize your resources. You can also
use them to scope user permissions, by granting a user permission to access or
change only rules with certain tag values. To use the `PutRule` operation and
assign tags, you must have both the `events:PutRule` and `events:TagResource`
permissions.
If you are updating an existing rule, any tags you specify in the `PutRule`
operation are ignored. To update the tags of an existing rule, use `TagResource`
and `UntagResource`.
Most services in AWS treat : or / as the same character in Amazon Resource Names
(ARNs). However, EventBridge uses an exact match in event patterns and rules. Be
sure to use the correct ARN characters when creating event patterns so that they
match the ARN syntax in the event you want to match.
In EventBridge, it is possible to create rules that lead to infinite loops,
where a rule is fired repeatedly. For example, a rule might detect that ACLs
have changed on an S3 bucket, and trigger software to change them to the desired
state. If the rule is not written carefully, the subsequent change to the ACLs
fires the rule again, creating an infinite loop.
To prevent this, write the rules so that the triggered actions do not re-fire
the same rule. For example, your rule could fire only if ACLs are found to be in
a bad state, instead of after any change.
An infinite loop can quickly cause higher than expected charges. We recommend
that you use budgeting, which alerts you when charges exceed your specified
limit. For more information, see [Managing Your Costs with Budgets](https://docs.aws.amazon.com/awsaccountbilling/latest/aboutv2/budgets-managing-costs.html).
"""
def put_rule(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "PutRule", input, options)
end
@doc """
Adds the specified targets to the specified rule, or updates the targets if they
are already associated with the rule.
Targets are the resources that are invoked when a rule is triggered.
You can configure the following as targets for Events:
* EC2 instances
* SSM Run Command
* SSM Automation
* AWS Lambda functions
* Data streams in Amazon Kinesis Data Streams
* Data delivery streams in Amazon Kinesis Data Firehose
* Amazon ECS tasks
* AWS Step Functions state machines
* AWS Batch jobs
* AWS CodeBuild projects
* Pipelines in AWS CodePipeline
* Amazon Inspector assessment templates
* Amazon SNS topics
* Amazon SQS queues, including FIFO queues
* The default event bus of another AWS account
* Amazon API Gateway REST APIs
* Redshift Clusters to invoke Data API ExecuteStatement on
Creating rules with built-in targets is supported only in the AWS Management
Console. The built-in targets are `EC2 CreateSnapshot API call`, `EC2
RebootInstances API call`, `EC2 StopInstances API call`, and `EC2
TerminateInstances API call`.
For some target types, `PutTargets` provides target-specific parameters. If the
target is a Kinesis data stream, you can optionally specify which shard the
event goes to by using the `KinesisParameters` argument. To invoke a command on
multiple EC2 instances with one rule, you can use the `RunCommandParameters`
field.
To be able to make API calls against the resources that you own, Amazon
EventBridge (CloudWatch Events) needs the appropriate permissions. For AWS
Lambda and Amazon SNS resources, EventBridge relies on resource-based policies.
For EC2 instances, Kinesis data streams, AWS Step Functions state machines and
API Gateway REST APIs, EventBridge relies on IAM roles that you specify in the
`RoleARN` argument in `PutTargets`. For more information, see [Authentication and Access
Control](https://docs.aws.amazon.com/eventbridge/latest/userguide/auth-and-access-control-eventbridge.html)
in the *Amazon EventBridge User Guide*.
If another AWS account is in the same region and has granted you permission
(using `PutPermission`), you can send events to that account. Set that account's
event bus as a target of the rules in your account. To send the matched events
to the other account, specify that account's event bus as the `Arn` value when
you run `PutTargets`. If your account sends events to another account, your
account is charged for each sent event. Each event sent to another account is
charged as a custom event. The account receiving the event is not charged. For
more information, see [Amazon EventBridge (CloudWatch Events) Pricing](https://aws.amazon.com/eventbridge/pricing/).
`Input`, `InputPath`, and `InputTransformer` are not available with `PutTarget`
if the target is an event bus of a different AWS account.
If you are setting the event bus of another account as the target, and that
account granted permission to your account through an organization instead of
directly by the account ID, then you must specify a `RoleArn` with proper
permissions in the `Target` structure. For more information, see [Sending and Receiving Events Between AWS
Accounts](https://docs.aws.amazon.com/eventbridge/latest/userguide/eventbridge-cross-account-event-delivery.html)
in the *Amazon EventBridge User Guide*.
For more information about enabling cross-account events, see `PutPermission`.
**Input**, **InputPath**, and **InputTransformer** are mutually exclusive and
optional parameters of a target. When a rule is triggered due to a matched
event:
* If none of the following arguments are specified for a target,
then the entire event is passed to the target in JSON format (unless the target
is Amazon EC2 Run Command or Amazon ECS task, in which case nothing from the
event is passed to the target).
* If **Input** is specified in the form of valid JSON, then the
matched event is overridden with this constant.
* If **InputPath** is specified in the form of JSONPath (for
example, `$.detail`), then only the part of the event specified in the path is
passed to the target (for example, only the detail part of the event is passed).
* If **InputTransformer** is specified, then one or more specified
JSONPaths are extracted from the event and used as values in a template that you
specify as the input to the target.
When you specify `InputPath` or `InputTransformer`, you must use JSON dot
notation, not bracket notation.
When you add targets to a rule and the associated rule triggers soon after, new
or updated targets might not be immediately invoked. Allow a short period of
time for changes to take effect.
This action can partially fail if too many requests are made at the same time.
If that happens, `FailedEntryCount` is non-zero in the response and each entry
in `FailedEntries` provides the ID of the failed target and the error code.
"""
def put_targets(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "PutTargets", input, options)
end
@doc """
Revokes the permission of another AWS account to be able to put events to the
specified event bus.
Specify the account to revoke by the `StatementId` value that you associated
with the account when you granted it permission with `PutPermission`. You can
find the `StatementId` by using `DescribeEventBus`.
"""
def remove_permission(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "RemovePermission", input, options)
end
@doc """
Removes the specified targets from the specified rule.
When the rule is triggered, those targets are no longer be invoked.
When you remove a target, when the associated rule triggers, removed targets
might continue to be invoked. Allow a short period of time for changes to take
effect.
This action can partially fail if too many requests are made at the same time.
If that happens, `FailedEntryCount` is non-zero in the response and each entry
in `FailedEntries` provides the ID of the failed target and the error code.
"""
def remove_targets(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "RemoveTargets", input, options)
end
@doc """
Starts the specified replay.
Events are not necessarily replayed in the exact same order that they were added
to the archive. A replay processes events to replay based on the time in the
event, and replays them using 1 minute intervals. If you specify an
`EventStartTime` and an `EventEndTime` that covers a 20 minute time range, the
events are replayed from the first minute of that 20 minute range first. Then
the events from the second minute are replayed. You can use `DescribeReplay` to
determine the progress of a replay. The value returned for
`EventLastReplayedTime` indicates the time within the specified time range
associated with the last event replayed.
"""
def start_replay(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "StartReplay", input, options)
end
@doc """
Assigns one or more tags (key-value pairs) to the specified EventBridge
resource.
Tags can help you organize and categorize your resources. You can also use them
to scope user permissions by granting a user permission to access or change only
resources with certain tag values. In EventBridge, rules and event buses can be
tagged.
Tags don't have any semantic meaning to AWS and are interpreted strictly as
strings of characters.
You can use the `TagResource` action with a resource that already has tags. If
you specify a new tag key, this tag is appended to the list of tags associated
with the resource. If you specify a tag key that is already associated with the
resource, the new tag value that you specify replaces the previous value for
that tag.
You can associate as many as 50 tags with a resource.
"""
def tag_resource(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "TagResource", input, options)
end
@doc """
Tests whether the specified event pattern matches the provided event.
Most services in AWS treat : or / as the same character in Amazon Resource Names
(ARNs). However, EventBridge uses an exact match in event patterns and rules. Be
sure to use the correct ARN characters when creating event patterns so that they
match the ARN syntax in the event you want to match.
"""
def test_event_pattern(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "TestEventPattern", input, options)
end
@doc """
Removes one or more tags from the specified EventBridge resource.
In Amazon EventBridge (CloudWatch Events, rules and event buses can be tagged.
"""
def untag_resource(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UntagResource", input, options)
end
@doc """
Updates the specified archive.
"""
def update_archive(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UpdateArchive", input, options)
end
end
|
lib/aws/generated/cloud_watch_events.ex
| 0.88258
| 0.434221
|
cloud_watch_events.ex
|
starcoder
|
defmodule Surface.Compiler.ParseTreeTranslator do
@behaviour Surface.Compiler.NodeTranslator
alias Surface.IOHelper
alias Surface.Compiler.Helpers
def handle_init(state), do: state
def handle_expression(expression, meta, state) do
{{:expr, expression, to_meta(meta)}, state}
end
def handle_comment(comment, meta, state) do
{{:comment, comment, meta}, state}
end
def handle_node("template", attributes, body, meta, state, context) do
message = """
using <template> to fill slots has been deprecated and will be removed in \
future versions.
Hint: replace `<template>` with `<#template>`
"""
IOHelper.warn(message, state.caller, fn _ -> meta.line end)
handle_node("#template", attributes, body, meta, state, context)
end
def handle_node("slot", attributes, body, meta, state, context) do
message = """
using <slot> to define component slots has been deprecated and will be removed in \
future versions.
Hint: replace `<slot>` with `<#slot>`
"""
IOHelper.warn(message, state.caller, fn _ -> meta.line end)
handle_node("#slot", attributes, body, meta, state, context)
end
def handle_node(name, attributes, body, meta, state, _context) do
{{name, attributes, body, to_meta(meta)}, state}
end
def handle_block(name, expr, body, meta, state, _context) do
{{:block, name, expr, body, to_meta(meta)}, state}
end
def handle_subblock(:default, expr, children, meta, state, %{parent_block: "case"}) do
if !Helpers.blank?(children) do
message = "cannot have content between {#case ...} and {#match ...}"
IOHelper.compile_error(message, meta.file, meta.line)
end
{{:block, :default, expr, [], to_meta(meta)}, state}
end
def handle_subblock(:default, expr, children, meta, state, _context) do
{{:block, :default, expr, children, to_meta(meta)}, state}
end
def handle_subblock(name, expr, children, meta, state, _context) do
{{:block, name, expr, children, to_meta(meta)}, state}
end
def handle_text(text, state) do
{text, state}
end
# TODO: Update these after accepting the expression directly instead of the :root attribute
def handle_block_expression(_block_name, nil, _state, _context) do
[]
end
def handle_block_expression(_block_name, {:expr, expr, expr_meta}, _state, _context) do
meta = to_meta(expr_meta)
[{:root, {:attribute_expr, expr, meta}, meta}]
end
def handle_attribute(
:root,
{:tagged_expr, "...", expr, _marker_meta},
attr_meta,
_state,
context
) do
{:expr, value, expr_meta} = expr
%{tag_name: tag_name} = context
directive =
case tag_name do
<<first, _::binary>> when first in ?A..?Z ->
":props"
_ ->
":attrs"
end
{directive, {:attribute_expr, value, to_meta(expr_meta)}, to_meta(attr_meta)}
end
def handle_attribute(
name,
{:tagged_expr, "...", expr, marker_meta},
_attr_meta,
_state,
_context
) do
{:expr, value, _expr_meta} = expr
message = """
cannot assign `{...#{value}}` to attribute `#{name}`. \
The tagged expression `{... }` can only be used on a root attribute/property.
Example: <div {...@attrs}>
"""
IOHelper.compile_error(message, marker_meta.file, marker_meta.line)
end
def handle_attribute(
:root,
{:tagged_expr, "=", expr, _marker_meta},
attr_meta,
_state,
context
) do
{:expr, value, expr_meta} = expr
%{tag_name: tag_name} = context
original_name = strip_name_from_tagged_expr_equals!(value, expr_meta)
name =
case tag_name do
<<first, _::binary>> when first in ?A..?Z ->
original_name
_ ->
String.replace(original_name, "_", "-")
end
{name, {:attribute_expr, value, to_meta(expr_meta)}, to_meta(attr_meta)}
end
def handle_attribute(
name,
{:tagged_expr, "=", expr, marker_meta},
_attr_meta,
_state,
_context
) do
{:expr, value, _expr_meta} = expr
message = """
cannot assign `{=#{value}}` to attribute `#{name}`. \
The tagged expression `{= }` can only be used on a root attribute/property.
Example: <div {=@class}>
"""
IOHelper.compile_error(message, marker_meta.file, marker_meta.line)
end
def handle_attribute(name, {:expr, expr, expr_meta}, attr_meta, _state, _context) do
{name, {:attribute_expr, expr, to_meta(expr_meta)}, to_meta(attr_meta)}
end
def handle_attribute(name, value, attr_meta, _state, _context) do
{name, value, to_meta(attr_meta)}
end
def context_for_node(name, _meta, _state) do
%{tag_name: name}
end
def context_for_subblock(name, _meta, _state, parent_context) do
%{sub_block: name, parent_block: Map.get(parent_context, :block_name)}
end
def context_for_block(name, _meta, _state) do
%{block_name: name}
end
def to_meta(%{void_tag?: true} = meta) do
drop_common_keys(meta)
end
def to_meta(meta) do
meta
|> Map.drop([:void_tag?])
|> drop_common_keys()
end
defp drop_common_keys(meta) do
Map.drop(meta, [
:self_close,
:line_end,
:column_end,
:node_line_end,
:node_column_end,
:macro?,
:ignored_body?
])
end
defp strip_name_from_tagged_expr_equals!(value, expr_meta) do
case Code.string_to_quoted(value) do
{:ok, {:@, _, [{name, _, _}]}} when is_atom(name) ->
to_string(name)
{:ok, {name, _, _}} when is_atom(name) ->
to_string(name)
_ ->
message = """
invalid value for tagged expression `{=#{value}}`. \
The expression must be either an assign or a variable.
Examples: `<div {=@class}>` or `<div {=class}>`
"""
IOHelper.compile_error(message, expr_meta.file, expr_meta.line)
end
end
end
|
lib/surface/compiler/parse_tree_translator.ex
| 0.592431
| 0.406803
|
parse_tree_translator.ex
|
starcoder
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.