code
stringlengths 114
1.05M
| path
stringlengths 3
312
| quality_prob
float64 0.5
0.99
| learning_prob
float64 0.2
1
| filename
stringlengths 3
168
| kind
stringclasses 1
value |
|---|---|---|---|---|---|
defmodule Belp do
@moduledoc """
Belp is a simple Boolean Expression Lexer and Parser.
"""
alias Belp.{AST, InvalidCharError, SyntaxError, UndefinedVariableError}
@typedoc """
A type that describes which primitives can be used as expression.
"""
@type expr :: String.t() | charlist
@doc """
Evaluates the given expression.
## Examples
iex> Belp.eval("foo and bar", foo: true, bar: false)
{:ok, false}
iex> Belp.eval("foo or bar", %{"foo" => true, "bar" => false})
{:ok, true}
iex> Belp.eval("bar", %{foo: true})
{:error, %Belp.UndefinedVariableError{var: "bar"}}
iex> Belp.eval("invalid expression")
{:error, %Belp.SyntaxError{line: 1, token: "expression"}}
iex> Belp.eval("foo || bar")
{:error, %Belp.InvalidCharError{char: "|", line: 1}}
"""
@spec eval(
expr,
Keyword.t(as_boolean(any))
| %{optional(atom | String.t()) => as_boolean(any)}
) ::
{:ok, boolean}
| {:error,
InvalidCharError.t() | SyntaxError.t() | UndefinedVariableError.t()}
def eval(expr, vars \\ %{}) do
with {:ok, _tokens, ast} <- parse(expr) do
AST.eval(ast, sanitize_vars(vars))
end
end
@doc """
Evaluates the given expression. Raises when the expression is invalid or
variables are undefined.
## Examples
iex> Belp.eval!("foo and bar", foo: true, bar: false)
false
iex> Belp.eval!("foo or bar", %{"foo" => true, "bar" => false})
true
iex> Belp.eval!("bar", %{foo: true})
** (Belp.UndefinedVariableError) Undefined variable: bar
iex> Belp.eval!("invalid expression")
** (Belp.SyntaxError) Syntax error near token "expression" on line 1
iex> Belp.eval!("foo || bar")
** (Belp.InvalidCharError) Invalid character "|" on line 1
"""
@spec eval!(
expr,
Keyword.t(as_boolean(any))
| %{optional(atom | String.t()) => as_boolean(any)}
) :: boolean | no_return
def eval!(expr, vars \\ %{}) do
expr |> eval(vars) |> may_bang!()
end
@doc """
Checks whether the given expression is valid.
## Examples
iex> Belp.valid_expression?("foo and bar")
true
iex> Belp.valid_expression?("invalid expression")
false
iex> Belp.valid_expression?("foo || bar")
false
"""
@spec valid_expression?(expr) :: boolean
def valid_expression?(expr) do
validate(expr) == :ok
end
@doc """
Validates the given expression, returning an error tuple in case the
expression is invalid.
## Examples
iex> Belp.validate("foo and bar")
:ok
iex> Belp.validate("invalid expression")
{:error, %Belp.SyntaxError{line: 1, token: "expression"}}
iex> Belp.validate("foo || bar")
{:error, %Belp.InvalidCharError{char: "|", line: 1}}
"""
@doc since: "0.2.0"
@spec validate(expr) :: :ok | {:error, InvalidCharError.t() | SyntaxError.t()}
def validate(expr) do
with {:ok, _tokens, _ast} <- parse(expr), do: :ok
end
@doc """
Gets a list of variable names that are present in the given expression.
## Examples
iex> Belp.variables("(foo and bar) or !foo")
{:ok, ["foo", "bar"]}
iex> Belp.variables("invalid expression")
{:error, %Belp.SyntaxError{line: 1, token: "expression"}}
iex> Belp.variables("foo || bar")
{:error, %Belp.InvalidCharError{char: "|", line: 1}}
"""
@spec variables(expr) ::
{:ok, [String.t()]}
| {:error, InvalidCharError.t() | SyntaxError.t()}
def variables(expr) do
with {:ok, tokens, _ast} <- parse(expr) do
vars =
for token <- tokens,
match?({:var, _, _}, token),
{:var, _, var} = token,
uniq: true,
do: var
{:ok, vars}
end
end
@doc """
Gets a list of variable names that are present in the given expression. Raises
when the expression is invalid or variables are undefined.
## Examples
iex> Belp.variables!("(foo and bar) or !foo")
["foo", "bar"]
iex> Belp.variables!("invalid expression")
** (Belp.SyntaxError) Syntax error near token "expression" on line 1
iex> Belp.variables!("foo || bar")
** (Belp.InvalidCharError) Invalid character "|" on line 1
"""
@spec variables!(expr) :: [String.t()] | no_return
def variables!(expr) do
expr |> variables() |> may_bang!()
end
defp lex(expr) do
case :belp_lexer.string(to_charlist(expr)) do
{:ok, tokens, _lines} ->
{:ok, tokens}
{:error, {line, _, {:illegal, char}}, _lines} ->
{:error, %InvalidCharError{char: to_string(char), line: line}}
end
end
defp may_bang!({:ok, value}), do: value
defp may_bang!({:error, error}), do: raise(error)
defp parse(expr) do
with {:ok, tokens} <- lex(expr),
{:ok, ast} <- do_parse(tokens) do
{:ok, tokens, ast}
end
end
defp do_parse(tokens) do
case :belp_parser.parse(tokens) do
{:ok, ast} ->
{:ok, ast}
{:error, {line, _, [_, token]}} ->
{:error, %SyntaxError{line: line, token: to_string(token)}}
end
end
defp sanitize_vars(vars) do
Map.new(vars, fn {key, value} -> {to_string(key), !!value} end)
end
end
|
lib/belp.ex
| 0.884962
| 0.483466
|
belp.ex
|
starcoder
|
defmodule RDF.Literal.Datatype do
@moduledoc """
A behaviour for datatypes for `RDF.Literal`s.
An implementation of this behaviour defines a struct for a datatype IRI and the semantics of its
values via the functions defined by this behaviour.
There are three important groups of `RDF.Literal.Datatype` implementations:
- `RDF.XSD.Datatype`: This is another, more specific behaviour for XSD datatypes. RDF.ex comes with
builtin implementations of this behaviour for the most important XSD datatypes, but you define
your own custom datatypes by deriving from these builtin datatypes and constraining them via
`RDF.XSD.Facet`s.
- Non-XSD datatypes which implement the `RDF.Literal.Datatype` directly: There's currently only one
builtin datatype of this category - `RDF.LangString` for language tagged RDF literals.
- `RDF.Literal.Generic`: This is a generic implementation which is used for `RDF.Literal`s with a
datatype that has no own `RDF.Literal.Datatype` implementation defining its semantics.
"""
alias RDF.{Literal, IRI}
@type t :: module
@type literal :: %{:__struct__ => t(), optional(atom()) => any()}
@type comparison_result :: :lt | :gt | :eq
@doc """
The name of the datatype.
"""
@callback name :: String.t()
@doc """
The IRI of the datatype.
"""
@callback id :: IRI.t() | nil
@callback new(any) :: Literal.t()
@callback new(any, Keyword.t()) :: Literal.t()
@callback new!(any) :: Literal.t()
@callback new!(any, Keyword.t()) :: Literal.t()
@doc """
Callback for datatype specific castings.
This callback is called by the auto-generated `cast/1` function on the implementations, which already deals with the basic cases.
So, implementations can assume the passed argument is a valid `RDF.Literal.Datatype` struct,
a `RDF.IRI` or a `RDF.BlankNode`.
If the given literal can not be converted into this datatype an implementation should return `nil`.
A final catch-all clause should delegate to `super`. For example `RDF.XSD.Datatype`s will handle casting from derived
datatypes in the default implementation.
"""
@callback do_cast(literal | RDF.IRI.t | RDF.BlankNode.t) :: Literal.t() | nil
@doc """
Checks if the given `RDF.Literal` has the datatype for which the `RDF.Literal.Datatype` is implemented or is derived from it.
## Example
iex> RDF.XSD.byte(42) |> RDF.XSD.Integer.datatype?()
true
"""
@callback datatype?(Literal.t | t | literal) :: boolean
@doc """
The datatype IRI of the given `RDF.Literal`.
"""
@callback datatype_id(Literal.t | literal) :: IRI.t()
@doc """
The language of the given `RDF.Literal` if present.
"""
@callback language(Literal.t | literal) :: String.t() | nil
@doc """
Returns the value of a `RDF.Literal`.
This function also accepts literals of derived datatypes.
"""
@callback value(Literal.t | literal) :: any
@doc """
Returns the lexical form of a `RDF.Literal`.
"""
@callback lexical(Literal.t() | literal) :: String.t()
@doc """
Produces the canonical representation of a `RDF.Literal`.
"""
@callback canonical(Literal.t() | literal) :: Literal.t()
@doc """
Returns the canonical lexical form of a `RDF.Literal`.
If the given literal is invalid, `nil` is returned.
"""
@callback canonical_lexical(Literal.t() | literal) :: String.t() | nil
@doc """
Determines if the lexical form of a `RDF.Literal` is the canonical form.
Note: For `RDF.Literal.Generic` literals with the canonical form not defined,
this always returns `true`.
"""
@callback canonical?(Literal.t() | literal | any) :: boolean
@doc """
Determines if a `RDF.Literal` has a proper value of the value space of its datatype.
This function also accepts literals of derived datatypes.
"""
@callback valid?(Literal.t() | literal | any) :: boolean
@doc """
Callback for datatype specific `equal_value?/2` comparisons when the given literals have the same or derived datatypes.
This callback is called by auto-generated `equal_value?/2` function when the given literals have
the same datatype or one is derived from the other.
Should return `nil` when the given arguments are not comparable as literals of this
datatype. This behaviour is particularly important for SPARQL.ex where this
function is used for the `=` operator, where comparisons between incomparable
terms are treated as errors and immediately leads to a rejection of a possible
match.
See also `c:do_equal_value_different_datatypes?/2`.
"""
@callback do_equal_value_same_or_derived_datatypes?(literal, literal) :: boolean | nil
@doc """
Callback for datatype specific `equal_value?/2` comparisons when the given literals have different datatypes.
This callback is called by auto-generated `equal_value?/2` function when the given literals have
different datatypes and are not derived from each other.
Should return `nil` when the given arguments are not comparable as literals of this
datatype. This behaviour is particularly important for SPARQL.ex where this
function is used for the `=` operator, where comparisons between incomparable
terms are treated as errors and immediately leads to a rejection of a possible
match.
See also `c:do_equal_value_same_or_derived_datatypes?/2`.
"""
@callback do_equal_value_different_datatypes?(literal, literal) :: boolean | nil
@doc """
Callback for datatype specific `compare/2` comparisons between two `RDF.Literal`s.
This callback is called by auto-generated `compare/2` function on the implementations, which already deals with the basic cases.
So, implementations can assume the passed arguments are valid `RDF.Literal.Datatype` structs and
have the same datatypes or are derived from each other.
Should return `:gt` if value of the first literal is greater than the value of the second in
terms of their datatype and `:lt` for vice versa. If the two literals can be considered equal `:eq` should be returned.
For datatypes with only partial ordering `:indeterminate` should be returned when the
order of the given literals is not defined.
`nil` should be returned when the given arguments are not comparable datatypes or if one them is invalid.
The default implementation of the `_using__` macro of `RDF.Literal.Datatype`s
just compares the values of the given literals.
"""
@callback do_compare(literal | any, literal | any) :: comparison_result | :indeterminate | nil
@doc """
Updates the value of a `RDF.Literal` without changing everything else.
## Example
iex> RDF.XSD.integer(42) |> RDF.XSD.Integer.update(fn value -> value + 1 end)
RDF.XSD.integer(43)
iex> ~L"foo"de |> RDF.LangString.update(fn _ -> "bar" end)
~L"bar"de
iex> RDF.literal("foo", datatype: "http://example.com/dt") |> RDF.Literal.Generic.update(fn _ -> "bar" end)
RDF.literal("bar", datatype: "http://example.com/dt")
"""
@callback update(Literal.t() | literal, fun()) :: Literal.t
@doc """
Updates the value of a `RDF.Literal` without changing anything else.
This variant of `c:update/2` allows with the `:as` option to specify what will
be passed to `fun`, eg. with `as: :lexical` the lexical is passed to the function.
## Example
iex> RDF.XSD.integer(42) |> RDF.XSD.Integer.update(
...> fn value -> value <> "1" end, as: :lexical)
RDF.XSD.integer(421)
"""
@callback update(Literal.t() | literal, fun(), keyword) :: Literal.t
@doc """
Returns the `RDF.Literal.Datatype` for a datatype IRI.
"""
defdelegate get(id), to: Literal.Datatype.Registry, as: :datatype
@doc !"""
As opposed to RDF.Literal.valid?/1 this function operates on the datatype structs ...
It's meant for internal use only and doesn't perform checks if the struct
passed is actually a `RDF.Literal.Datatype` struct.
"""
def valid?(%datatype{} = datatype_literal), do: datatype.valid?(datatype_literal)
defmacro __using__(opts) do
name = Keyword.fetch!(opts, :name)
id = Keyword.fetch!(opts, :id)
do_register = Keyword.get(opts, :register, not is_nil(id))
datatype = __CALLER__.module
# TODO: find an alternative to Code.eval_quoted - We want to support that id can be passed via a function call
unquoted_id =
if do_register do
id
|> Code.eval_quoted([], __ENV__)
|> elem(0)
|> to_string()
end
quote do
@behaviour unquote(__MODULE__)
@doc !"""
This function is just used to check if a module is a RDF.Literal.Datatype.
See `RDF.Literal.Datatype.Registry.is_rdf_literal_datatype?/1`.
"""
def __rdf_literal_datatype_indicator__, do: true
@name unquote(name)
@impl unquote(__MODULE__)
def name, do: @name
@id if unquote(id), do: RDF.IRI.new(unquote(id))
@impl unquote(__MODULE__)
def id, do: @id
# RDF.XSD.Datatypes offers another default implementation, but since it is
# still in a macro implementation defoverridable doesn't work
unless RDF.XSD.Datatype in @behaviour do
@doc """
Checks if the given literal has this datatype.
"""
@impl unquote(__MODULE__)
def datatype?(%Literal{literal: literal}), do: datatype?(literal)
def datatype?(%datatype{}), do: datatype?(datatype)
def datatype?(__MODULE__), do: true
def datatype?(_), do: false
end
@impl unquote(__MODULE__)
def datatype_id(%Literal{literal: literal}), do: datatype_id(literal)
def datatype_id(%__MODULE__{}), do: @id
@impl unquote(__MODULE__)
def language(%Literal{literal: literal}), do: language(literal)
def language(%__MODULE__{}), do: nil
@doc """
Returns the canonical lexical form of a `RDF.Literal` of this datatype.
"""
@impl unquote(__MODULE__)
def canonical_lexical(literal)
def canonical_lexical(%Literal{literal: literal}), do: canonical_lexical(literal)
def canonical_lexical(%__MODULE__{} = literal) do
if valid?(literal) do
literal |> canonical() |> lexical()
end
end
def canonical_lexical(_), do: nil
@doc """
Casts a datatype literal of one type into a datatype literal of another type.
Returns `nil` when the given arguments are not castable into this datatype or when the given argument is an
invalid literal.
Implementations define the casting for a given value with the `c:RDF.Literal.Datatype.do_cast/1` callback.
"""
@spec cast(Literal.Datatype.literal | RDF.Term.t) :: Literal.t() | nil
@dialyzer {:nowarn_function, cast: 1}
def cast(literal_or_value)
def cast(%Literal{literal: literal}), do: cast(literal)
def cast(%__MODULE__{} = datatype_literal),
do: if(valid?(datatype_literal), do: literal(datatype_literal))
def cast(%struct{} = datatype_literal) do
if (Literal.datatype?(struct) and Literal.Datatype.valid?(datatype_literal)) or
struct in [RDF.IRI, RDF.BlankNode] do
case do_cast(datatype_literal) do
%__MODULE__{} = literal -> if valid?(literal), do: literal(literal)
%Literal{literal: %__MODULE__{}} = literal -> if valid?(literal), do: literal
_ -> nil
end
end
end
def cast(_), do: nil
@impl unquote(__MODULE__)
def do_cast(value), do: nil
@doc """
Checks if two datatype literals are equal in terms of the values of their value space.
Non-`RDF.Literal`s are tried to be coerced via `RDF.Literal.coerce/1` before comparison.
Returns `nil` when the given arguments are not comparable as literals of this
datatype.
Invalid literals are only considered equal in this relation when both have the exact same
datatype and the same attributes (lexical form, language etc.).
Implementations can customize this equivalence relation via the `c:RDF.Literal.Datatype.do_equal_value_different_datatypes?/2`
and `c:RDF.Literal.Datatype.do_equal_value_different_datatypes?/2` callbacks.
"""
def equal_value?(left, right)
def equal_value?(left, %Literal{literal: right}), do: equal_value?(left, right)
def equal_value?(%Literal{literal: left}, right), do: equal_value?(left, right)
def equal_value?(nil, _), do: nil
def equal_value?(_, nil), do: nil
def equal_value?(left, right) do
cond do
not Literal.datatype?(right) and not resource?(right) -> equal_value?(left, Literal.coerce(right))
not Literal.datatype?(left) and not resource?(left) -> equal_value?(Literal.coerce(left), right)
true ->
left_datatype = left.__struct__
right_datatype = right.__struct__
left_valid = resource?(left) or left_datatype.valid?(left)
right_valid = resource?(right) or right_datatype.valid?(right)
cond do
not left_valid and not right_valid ->
left == right
left_valid and right_valid ->
case equality_path(left_datatype, right_datatype) do
{:same_or_derived, datatype} ->
datatype.do_equal_value_same_or_derived_datatypes?(left, right)
{:different, datatype} ->
datatype.do_equal_value_different_datatypes?(left, right)
end
# one of the given literals is invalid
true ->
if left_datatype == right_datatype do
false
end
end
end
end
# RDF.XSD.Datatype offers another default implementation, but since it is
# still in a macro implementation defoverridable doesn't work
unless RDF.XSD.Datatype in @behaviour do
@impl unquote(__MODULE__)
def do_equal_value_same_or_derived_datatypes?(left, right), do: left == right
@impl unquote(__MODULE__)
def do_equal_value_different_datatypes?(left, right), do: nil
defoverridable do_equal_value_same_or_derived_datatypes?: 2,
do_equal_value_different_datatypes?: 2
end
defp equality_path(left_datatype, right_datatype)
defp equality_path(datatype, datatype), do: {:same_or_derived, datatype}
defp equality_path(datatype, _), do: {:different, datatype}
# as opposed to RDF.resource? this does not try to resolve atoms
defp resource?(%RDF.IRI{}), do: true
defp resource?(%RDF.BlankNode{}), do: true
defp resource?(_), do: false
# RDF.XSD.Datatypes offers another default implementation, but since it is
# still in a macro implementation defoverridable doesn't work
unless RDF.XSD.Datatype in @behaviour do
@spec compare(RDF.Literal.t() | any, RDF.Literal.t() | any) :: RDF.Literal.Datatype.comparison_result | :indeterminate | nil
def compare(left, right)
def compare(left, %RDF.Literal{literal: right}), do: compare(left, right)
def compare(%RDF.Literal{literal: left}, right), do: compare(left, right)
def compare(left, right) do
if RDF.Literal.datatype?(left) and RDF.Literal.datatype?(right) and
RDF.Literal.Datatype.valid?(left) and RDF.Literal.Datatype.valid?(right) do
do_compare(left, right)
end
end
@impl RDF.Literal.Datatype
def do_compare(%datatype{} = left, %datatype{} = right) do
case {datatype.value(left), datatype.value(right)} do
{left_value, right_value} when left_value < right_value -> :lt
{left_value, right_value} when left_value > right_value -> :gt
_ ->
if datatype.equal_value?(left, right), do: :eq
end
end
def do_compare(_, _), do: nil
defoverridable compare: 2,
do_compare: 2
end
@doc """
Updates the value of a `RDF.Literal` without changing everything else.
"""
@impl unquote(__MODULE__)
def update(literal, fun, opts \\ [])
def update(%Literal{literal: literal}, fun, opts), do: update(literal, fun, opts)
def update(%__MODULE__{} = literal, fun, opts) do
case Keyword.get(opts, :as) do
:lexical -> lexical(literal)
nil -> value(literal)
end
|> fun.()
|> new()
end
# This is a private RDF.Literal constructor, which should be used to build
# the RDF.Literals from the datatype literal structs instead of the
# RDF.Literal/new/1, to bypass the unnecessary datatype checks.
defp literal(datatype_literal), do: %Literal{literal: datatype_literal}
defoverridable datatype_id: 1,
language: 1,
canonical_lexical: 1,
cast: 1,
do_cast: 1,
equal_value?: 2,
equality_path: 2,
update: 2,
update: 3
defimpl String.Chars do
def to_string(literal) do
literal.__struct__.lexical(literal)
end
end
if unquote(do_register) do
import ProtocolEx
defimpl_ex Registration, unquote(unquoted_id),
for: RDF.Literal.Datatype.Registry.Registration do
@moduledoc false
def datatype(id), do: unquote(datatype)
end
end
end
end
end
|
lib/rdf/literal/datatype.ex
| 0.95121
| 0.81231
|
datatype.ex
|
starcoder
|
defmodule Scenic.Primitive.Path do
@moduledoc """
Draw a complex path on the screen described by a list of actions.
## Data
`list_of_commands`
The data for a path is a list of commands. They are interpreted in order
when the path is drawn. See below for the commands it will accept.
## Styles
This primitive recognizes the following styles
* [`hidden`](Scenic.Primitive.Style.Hidden.html) - show or hide the primitive
* [`fill`](Scenic.Primitive.Style.Fill.html) - fill in the area of the primitive
* [`stroke`](Scenic.Primitive.Style.Stroke.html) - stroke the outline of the primitive. In this case, only the curvy part.
* [`cap`](Scenic.Primitive.Style.Cap.html) - says how to draw the ends of the line.
* [`join`](Scenic.Primitive.Style.Join.html) - control how segments are joined.
* [`miter_limit`](Scenic.Primitive.Style.MiterLimit.html) - control how segments are joined.
## Commands
* `:begin` - start a new path segment
* `:close_path` - draw a line back to the start of the current segment
* `{:move_to, x, y}` - move the current draw position
* `{:line_to, x, y}` - draw a line from the current position to a new location.
* `{:bezier_to, c1x, c1y, c2x, c2y, x, y}` - draw a bezier curve from the current position to a new location.
* `{:quadratic_to, cx, cy, x, y}` - draw a quadratic curve from the current position to a new location.
* `{:arc_to, x1, y1, x2, y2, radius}` - draw an arc from the current position to a new location.
## `Path` vs. `Script`
Both the `Path` and the `Script` primitives use the `Scenic.Script` to create scripts
are sent to the drivers for drawing. The difference is that a Path is far more limited
in what it can do, and is inserted inline with the compiled graph that created it.
The script primitive, on the other hand, has full access to the API set of
`Scenic.Script` and accesses scripts by reference.
The inline vs. reference difference is important. A simple path will be consume
fewer resources. BUT it will cause the entire graph to be recompile and resent
to the drivers if you change it.
A script primitive references a script that you create separately from the
the graph. This means that any changes to the graph (such as an animation) will
NOT need to recompile or resend the script.
## Usage
You should add/modify primitives via the helper functions in
[`Scenic.Primitives`](Scenic.Primitives.html#path/3)
```elixir
graph
|> path( [
:begin,
{:move_to, 0, 0},
{:bezier_to, 0, 20, 0, 50, 40, 50},
{:line_to, 30, 60},
:close_path
],
fill: :blue
)
```
"""
use Scenic.Primitive
alias Scenic.Script
alias Scenic.Primitive
alias Scenic.Primitive.Style
# import IEx
@type cmd ::
:begin
| :close_path
| {:move_to, x :: number, y :: number}
| {:line_to, x :: number, y :: number}
| {:bezier_to, c1x :: number, c1y :: number, c2x :: number, c2y :: number, x :: number,
y :: number}
| {:quadratic_to, cx :: number, cy :: number, x :: number, y :: number}
| {:arc_to, x1 :: number, y1 :: number, x2 :: number, y2 :: number, radius :: number}
@type t :: [cmd]
@type styles_t :: [
:hidden | :scissor | :fill | :stroke_width | :stroke_fill | :cap | :join | :miter_limit
]
@styles [:hidden, :scissor, :fill, :stroke_width, :stroke_fill, :cap, :join, :miter_limit]
@impl Primitive
@spec validate(commands :: t()) :: {:ok, commands :: t()} | {:error, String.t()}
def validate(commands) when is_list(commands) do
Enum.reduce(commands, {:ok, commands}, fn
_, {:error, error} ->
{:error, error}
:begin, ok ->
ok
:close_path, ok ->
ok
{:move_to, x, y}, ok when is_number(x) and is_number(y) ->
ok
{:line_to, x, y}, ok when is_number(x) and is_number(y) ->
ok
{:bezier_to, c1x, c1y, c2x, c2y, x, y}, ok
when is_number(c1x) and is_number(c1y) and
is_number(c2x) and is_number(c2y) and
is_number(x) and is_number(y) ->
ok
{:quadratic_to, cx, cy, x, y}, ok
when is_number(cx) and is_number(cy) and
is_number(x) and is_number(y) ->
ok
{:arc_to, x1, y1, x2, y2, radius}, ok
when is_number(x1) and is_number(y1) and
is_number(x2) and is_number(y2) and is_number(radius) ->
ok
cmd, _ ->
err_cmd(cmd, commands)
end)
# make sure it always starts with a :begin
|> case do
{:ok, [:begin | _] = cmds} -> {:ok, cmds}
{:ok, cmds} -> {:ok, [:begin | cmds]}
err -> err
end
end
def validate(data) do
{
:error,
"""
#{IO.ANSI.red()}Invalid Path specification
Received: #{inspect(data)}
#{IO.ANSI.yellow()}
Path should be a list of operations from the following set:
:begin
:close_path
{:move_to, x, y}
{:line_to, x, y}
{:bezier_to, c1x, c1y, c2x, c2y, x, y}
{:quadratic_to, cx, cy, x, y}
{:arc_to, x1, y1, x2, y2, radius}#{IO.ANSI.default_color()}
"""
}
end
defp err_cmd(:solid, commands) do
{
:error,
"""
#{IO.ANSI.red()}Invalid Path specification
Received: #{inspect(commands)}
The :solid command is deprecated
#{IO.ANSI.yellow()}
Path should be a list of operations from the following set:
:begin
:close_path
{:move_to, x, y}
{:line_to, x, y}
{:bezier_to, c1x, c1y, c2x, c2y, x, y}
{:quadratic_to, cx, cy, x, y}
{:arc_to, x1, y1, x2, y2, radius}#{IO.ANSI.default_color()}
"""
}
end
defp err_cmd(:hole, commands) do
{
:error,
"""
#{IO.ANSI.red()}Invalid Path specification
Received: #{inspect(commands)}
The :hole command is deprecated
#{IO.ANSI.yellow()}
Path should be a list of operations from the following set:
:begin
:close_path
{:move_to, x, y}
{:line_to, x, y}
{:bezier_to, c1x, c1y, c2x, c2y, x, y}
{:quadratic_to, cx, cy, x, y}
{:arc_to, x1, y1, x2, y2, radius}#{IO.ANSI.default_color()}
"""
}
end
defp err_cmd(cmd, commands) do
{
:error,
"""
#{IO.ANSI.red()}Invalid Path specification
Received: #{inspect(commands)}
The #{inspect(cmd)} operation is invalid
#{IO.ANSI.yellow()}
Path should be a list of operations from the following set:
:begin
:close_path
{:move_to, x, y}
{:line_to, x, y}
{:bezier_to, c1x, c1y, c2x, c2y, x, y}
{:quadratic_to, cx, cy, x, y}
{:arc_to, x1, y1, x2, y2, radius}#{IO.ANSI.default_color()}
"""
}
end
# --------------------------------------------------------
@doc """
Returns a list of styles recognized by this primitive.
"""
@impl Primitive
@spec valid_styles() :: styles_t()
def valid_styles(), do: @styles
# --------------------------------------------------------
@doc """
Compile the data for this primitive into a mini script. This can be combined with others to
generate a larger script and is called when a graph is compiled.
Note: Path is a "Meta" primitive. It isn't really a primitive that is represented in a
draw script. Instead, it generates it's own mini-script, which is included inline to the
graph it is contained in.
Note: The compiled script is backwards. This is an inline script, which means
it is inserted into a larger script as part of the graph compile process and
Script.finish() will be called on that later.
"""
@impl Primitive
@spec compile(primitive :: Primitive.t(), styles :: Style.t()) :: Script.t()
def compile(%Primitive{module: __MODULE__, data: commands}, styles) do
ops =
Enum.reduce(commands, [], fn
:begin, acc ->
Script.begin_path(acc)
:close_path, acc ->
Script.close_path(acc)
{:move_to, x, y}, acc ->
Script.move_to(acc, x, y)
{:line_to, x, y}, acc ->
Script.line_to(acc, x, y)
{:bezier_to, c1x, c1y, c2x, c2y, x, y}, acc ->
Script.bezier_to(acc, c1x, c1y, c2x, c2y, x, y)
{:quadratic_to, cx, cy, x, y}, acc ->
Script.quadratic_to(acc, cx, cy, x, y)
{:arc_to, x1, y1, x2, y2, radius}, acc ->
Script.arc_to(acc, x1, y1, x2, y2, radius)
_, acc ->
acc
end)
# finish by appending a fill/stroke command
case Script.draw_flag(styles) do
nil ->
ops
:fill ->
Script.fill_path(ops)
:stroke ->
Script.stroke_path(ops)
:fill_stroke ->
ops
|> Script.fill_path()
|> Script.stroke_path()
end
end
end
|
lib/scenic/primitive/path.ex
| 0.90827
| 0.884039
|
path.ex
|
starcoder
|
defmodule Membrane.MP4.Container.ParseHelper do
@moduledoc false
use Bunch
alias Membrane.MP4.Container
alias Membrane.MP4.Container.Schema
@box_name_size 4
@box_size_size 4
@box_header_size @box_name_size + @box_size_size
@spec parse_boxes(binary, Schema.t(), Container.t()) ::
{:ok, Container.t()} | {:error, Container.parse_error_context_t()}
def parse_boxes(<<>>, _schema, acc) do
{:ok, Enum.reverse(acc)}
end
def parse_boxes(data, schema, acc) do
withl header: {:ok, {name, content, data}} <- parse_box_header(data),
do: box_schema = schema[name],
known?: true <- box_schema && not box_schema.black_box?,
try: {:ok, {fields, rest}} <- parse_fields(content, box_schema.fields),
try: {:ok, children} <- parse_boxes(rest, box_schema.children, []) do
box = %{fields: fields, children: children}
parse_boxes(data, schema, [{name, box} | acc])
else
header: error ->
error
known?: _ ->
box = %{content: content}
parse_boxes(data, schema, [{name, box} | acc])
try: {:error, context} ->
{:error, [box: name] ++ context}
end
end
defp parse_box_header(data) do
withl header:
<<size::integer-size(@box_size_size)-unit(8), name::binary-size(@box_name_size),
rest::binary>> <- data,
do: content_size = size - @box_header_size,
size: <<content::binary-size(content_size), rest::binary>> <- rest do
{:ok, {parse_box_name(name), content, rest}}
else
header: _ -> {:error, reason: :box_header, data: data}
size: _ -> {:error, reason: {:box_size, header: size, actual: byte_size(rest)}, data: data}
end
end
defp parse_box_name(name) do
name |> String.trim_trailing(" ") |> String.to_atom()
end
defp parse_fields(data, []) do
{:ok, {%{}, data}}
end
defp parse_fields(data, [{name, type} | fields]) do
with {:ok, {term, rest}} <- parse_field(data, {name, type}),
{:ok, {terms, rest}} <- parse_fields(rest, fields) do
{:ok, {Map.put(terms, name, term), rest}}
end
end
defp parse_field(data, {:reserved, reserved}) do
size = bit_size(reserved)
case data do
<<^reserved::bitstring-size(size), rest::bitstring>> -> {:ok, {[], rest}}
data -> parse_field_error(data, :reserved, expected: reserved)
end
end
defp parse_field(data, {name, subfields}) when is_list(subfields) do
case parse_fields(data, subfields) do
{:ok, result} -> {:ok, result}
{:error, context} -> parse_field_error(data, name, context)
end
end
defp parse_field(data, {name, {:int, size}}) do
case data do
<<int::signed-integer-size(size), rest::bitstring>> -> {:ok, {int, rest}}
_unknown_format -> parse_field_error(data, name)
end
end
defp parse_field(data, {name, {:uint, size}}) do
case data do
<<uint::integer-size(size), rest::bitstring>> -> {:ok, {uint, rest}}
_unknown_format -> parse_field_error(data, name)
end
end
defp parse_field(data, {name, {:fp, int_size, frac_size}}) do
case data do
<<int::integer-size(int_size), frac::integer-size(frac_size), rest::bitstring>> ->
{:ok, {{int, frac}, rest}}
_unknown_format ->
parse_field_error(data, name)
end
end
defp parse_field(data, {_name, :bin}) do
{:ok, {data, <<>>}}
end
defp parse_field(data, {name, {type, size}}) when type in [:bin, :str] do
case data do
<<bin::bitstring-size(size), rest::bitstring>> -> {:ok, {bin, rest}}
_unknown_format -> parse_field_error(data, name)
end
end
defp parse_field(data, {name, :str}) do
case String.split(data, "\0", parts: 2) do
[str, rest] -> {:ok, {str, rest}}
_unknown_format -> parse_field_error(data, name)
end
end
defp parse_field(<<>>, {_name, {:list, _type}}) do
{:ok, {[], <<>>}}
end
defp parse_field(data, {name, {:list, type}} = field) do
with {:ok, {term, rest}} <- parse_field(data, {name, type}),
{:ok, {terms, rest}} <- parse_field(rest, field) do
{:ok, {[term | terms], rest}}
end
end
defp parse_field(data, {name, _type}), do: parse_field_error(data, name)
defp parse_field_error(data, name, context \\ [])
defp parse_field_error(data, name, []) do
{:error, field: name, data: data}
end
defp parse_field_error(_data, name, context) do
{:error, [field: name] ++ context}
end
end
|
lib/membrane_mp4/container/parse_helper.ex
| 0.519521
| 0.508666
|
parse_helper.ex
|
starcoder
|
defmodule AshCsv.DataLayer do
@moduledoc "The data layer implementation for AshCsv"
@behaviour Ash.DataLayer
alias Ash.Actions.Sort
alias Ash.Dsl.Extension
alias Ash.Filter.{Expression, Not, Predicate}
alias Ash.Filter.Predicate.{Eq, GreaterThan, In, LessThan}
@impl true
def can?(_, :read), do: true
def can?(_, :create), do: true
def can?(_, :update), do: true
def can?(_, :destroy), do: true
def can?(_, :sort), do: true
def can?(_, :filter), do: true
def can?(_, :limit), do: true
def can?(_, :offset), do: true
def can?(_, :boolean_filter), do: true
def can?(_, :transact), do: true
def can?(_, :delete_with_query), do: false
def can?(_, {:filter_predicate, _, %In{}}), do: true
def can?(_, {:filter_predicate, _, %Eq{}}), do: true
def can?(_, {:filter_predicate, _, %LessThan{}}), do: true
def can?(_, {:filter_predicate, _, %GreaterThan{}}), do: true
def can?(_, {:sort, _}), do: true
def can?(_, _), do: false
@csv %Ash.Dsl.Section{
name: :csv,
schema: [
file: [
type: :string,
doc: "The file to read the data from",
required: true
],
create?: [
type: :boolean,
doc:
"Whether or not the file should be created if it does not exist (this will only happen on writes)",
default: false
],
header?: [
type: :boolean,
default: false,
doc: "If the csv file has a header that should be skipped"
],
separator: [
type: {:custom, __MODULE__, :separator_opt, []},
default: ?,,
doc: "The separator to use, defaults to a comma. Pass in a character (not a string)."
],
columns: [
type: {:custom, __MODULE__, :columns_opt, []},
default: [],
doc: "The order that the attributes appear in the columns of the CSV"
]
]
}
def file(resource) do
resource
|> Extension.get_opt([:csv], :file, "", true)
|> Path.expand(File.cwd!())
end
def columns(resource) do
Extension.get_opt(resource, [:csv], :columns, [], true)
end
def separator(resource) do
Extension.get_opt(resource, [:csv], :separator, nil, true)
end
def header?(resource) do
Extension.get_opt(resource, [:csv], :header?, nil, true)
end
def create?(resource) do
Extension.get_opt(resource, [:csv], :create?, nil, true)
end
@impl true
def limit(query, offset, _), do: {:ok, %{query | limit: offset}}
@impl true
def offset(query, offset, _), do: {:ok, %{query | offset: offset}}
@impl true
def filter(query, filter, _resource) do
{:ok, %{query | filter: filter}}
end
@impl true
def sort(query, sort, _resource) do
{:ok, %{query | sort: sort}}
end
@doc false
def columns_opt(columns) do
if Enum.all?(columns, &is_atom/1) do
{:ok, columns}
else
{:error, "Expected all columns to be atoms"}
end
end
use Extension, sections: [@csv]
defmodule Query do
@moduledoc false
defstruct [:resource, :sort, :filter, :limit, :offset]
end
@impl true
def run_query(query, resource) do
case read_file(resource) do
{:ok, results} ->
offset_records =
results
|> filter_matches(query.filter)
|> Sort.runtime_sort(query.sort)
|> Enum.drop(query.offset || 0)
if query.limit do
{:ok, Enum.take(offset_records, query.limit)}
else
{:ok, offset_records}
end
{:error, error} ->
{:error, error}
end
rescue
e in File.Error ->
if create?(resource) do
{:ok, []}
else
{:error, e}
end
end
@impl true
def create(resource, changeset) do
case run_query(%Query{resource: resource}, resource) do
{:ok, records} ->
create_from_records(records, resource, changeset)
{:error, error} ->
{:error, error}
end
end
@impl true
def update(resource, changeset) do
resource
|> do_read_file()
|> do_update(resource, changeset)
end
@impl true
def destroy(%resource{} = record) do
resource
|> do_read_file()
|> do_destroy(resource, record)
end
defp cast_stored(resource, keys) do
Enum.reduce_while(keys, {:ok, resource.__struct__}, fn {key, value}, {:ok, record} ->
with attribute when not is_nil(attribute) <- Ash.Resource.attribute(resource, key),
{:ok, loaded} <- Ash.Type.cast_stored(attribute.type, value) do
{:cont, {:ok, struct(record, [{key, loaded}])}}
else
nil ->
{:halt, {:error, "#{key} is not an attribute"}}
:error ->
{:halt, {:error, "#{key} could not be loaded"}}
end
end)
end
@impl true
def resource_to_query(resource) do
%Query{resource: resource}
end
@impl true
def transaction(resource, fun) do
file = file(resource)
:global.trans({{:csv, file}, System.unique_integer()}, fn ->
try do
Process.put({:csv_in_transaction, file(resource)}, true)
{:res, fun.()}
catch
{{:csv_rollback, ^file}, value} ->
{:error, value}
end
end)
|> case do
{:res, result} -> {:ok, result}
{:error, error} -> {:error, error}
:aborted -> {:error, "transaction failed"}
end
end
@impl true
def rollback(resource, error) do
throw({{:csv_rollback, file(resource)}, error})
end
@impl true
def in_transaction?(resource) do
Process.get({:csv_in_transaction, file(resource)}, false) == true
end
def filter_matches(records, nil), do: records
def filter_matches(records, filter) do
Enum.filter(records, &matches_filter?(&1, filter.expression))
end
defp matches_filter?(_record, nil), do: true
defp matches_filter?(_record, boolean) when is_boolean(boolean), do: boolean
defp matches_filter?(
record,
%Predicate{
predicate: predicate,
attribute: %{name: name},
relationship_path: []
}
) do
matches_predicate?(record, name, predicate)
end
defp matches_filter?(record, %Expression{op: :and, left: left, right: right}) do
matches_filter?(record, left) && matches_filter?(record, right)
end
defp matches_filter?(record, %Expression{op: :or, left: left, right: right}) do
matches_filter?(record, left) || matches_filter?(record, right)
end
defp matches_filter?(record, %Not{expression: expression}) do
not matches_filter?(record, expression)
end
defp matches_predicate?(record, field, %Eq{value: predicate_value}) do
Map.fetch(record, field) == {:ok, predicate_value}
end
defp matches_predicate?(record, field, %LessThan{value: predicate_value}) do
case Map.fetch(record, field) do
{:ok, value} -> value < predicate_value
:error -> false
end
end
defp matches_predicate?(record, field, %GreaterThan{value: predicate_value}) do
case Map.fetch(record, field) do
{:ok, value} -> value > predicate_value
:error -> false
end
end
defp matches_predicate?(record, field, %In{values: predicate_values}) do
case Map.fetch(record, field) do
{:ok, value} -> value in predicate_values
:error -> false
end
end
# sobelow_skip ["Traversal.FileModule"]
defp do_destroy({:ok, results}, resource, record) do
columns = columns(resource)
pkey = Ash.Resource.primary_key(resource)
changeset_pkey = Map.take(record, pkey)
results
|> Enum.reduce_while({:ok, []}, fn result, {:ok, results} ->
key_vals =
columns
|> Enum.zip(result)
|> Enum.reject(fn {key, _value} ->
key == :_
end)
cast(resource, key_vals, pkey, changeset_pkey, result, results)
end)
|> case do
{:ok, rows} ->
lines =
rows
|> CSV.encode(separator: separator(resource))
|> Enum.to_list()
resource
|> file()
|> File.write(lines, [:write])
|> case do
:ok ->
:ok
{:error, error} ->
{:error, "Error while writing to CSV: #{inspect(error)}"}
end
end
end
defp do_destroy({:error, error}, _, _), do: {:error, error}
defp cast(resource, key_vals, pkey, changeset_pkey, result, results) do
case cast_stored(resource, key_vals) do
{:ok, casted} ->
if Map.take(casted, pkey) == changeset_pkey do
{:cont, {:ok, results}}
else
{:cont, {:ok, [result | results]}}
end
{:error, error} ->
{:halt, {:error, error}}
end
end
defp do_update({:error, error}, _, _) do
{:error, error}
end
# sobelow_skip ["Traversal.FileModule"]
defp do_update({:ok, results}, resource, changeset) do
columns = columns(resource)
pkey = Ash.Resource.primary_key(resource)
changeset_pkey =
Enum.into(pkey, %{}, fn key ->
{key, Ash.Changeset.get_attribute(changeset, key)}
end)
results
|> Enum.reduce_while({:ok, []}, fn result, {:ok, results} ->
key_vals =
columns
|> Enum.zip(result)
|> Enum.reject(fn {key, _value} ->
key == :_
end)
dump(resource, changeset, results, result, key_vals, pkey, changeset_pkey)
end)
|> case do
{:ok, rows} ->
lines =
rows
|> CSV.encode(separator: separator(resource))
|> Enum.to_list()
resource
|> file()
|> File.write(lines, [:write])
|> case do
:ok ->
{:ok, struct(changeset.data, changeset.attributes)}
{:error, error} ->
{:error, "Error while writing to CSV: #{inspect(error)}"}
end
end
end
defp dump(resource, changeset, results, result, key_vals, pkey, changeset_pkey) do
case cast_stored(resource, key_vals) do
{:ok, casted} ->
if Map.take(casted, pkey) == changeset_pkey do
dump_row(resource, changeset, results)
else
{:cont, {:ok, [result | results]}}
end
{:error, error} ->
{:halt, {:error, error}}
end
end
defp dump_row(resource, changeset, results) do
Enum.reduce_while(columns(resource), {:ok, []}, fn key, {:ok, row} ->
type = Ash.Resource.attribute(resource, key).type
value = Ash.Changeset.get_attribute(changeset, key)
case Ash.Type.dump_to_native(type, value) do
{:ok, value} ->
{:cont, {:ok, [to_string(value) | row]}}
:error ->
{:halt, {:error, "Could not dump #{key} to native type"}}
end
end)
|> case do
{:ok, new_row} ->
{:cont, {:ok, [new_row | results]}}
{:error, error} ->
{:halt, {:error, error}}
end
end
defp read_file(resource) do
columns = columns(resource)
resource
|> do_read_file()
|> case do
{:ok, results} ->
do_cast_stored(results, columns, resource)
{:error, error} ->
{:error, error}
end
end
defp do_cast_stored(results, columns, resource) do
results
|> Enum.reduce_while({:ok, []}, fn result, {:ok, results} ->
key_vals =
columns
|> Enum.zip(result)
|> Enum.reject(fn {key, _value} ->
key == :_
end)
case cast_stored(resource, key_vals) do
{:ok, casted} -> {:cont, {:ok, [casted | results]}}
{:error, error} -> {:halt, {:error, error}}
end
end)
end
defp do_read_file(resource) do
amount_to_drop =
if header?(resource) do
1
else
0
end
resource
|> file()
|> File.stream!()
|> Stream.drop(amount_to_drop)
|> CSV.decode(separator: separator(resource))
|> Enum.reduce_while({:ok, []}, fn
{:ok, result}, {:ok, results} ->
{:cont, {:ok, [result | results]}}
{:error, error}, _ ->
{:halt, {:error, error}}
end)
end
# sobelow_skip ["Traversal.FileModule"]
defp create_from_records(records, resource, changeset) do
pkey = Ash.Resource.primary_key(resource)
pkey_value = Map.take(changeset.attributes, pkey)
if Enum.any?(records, fn record -> Map.take(record, pkey) == pkey_value end) do
{:error, "Record is not unique"}
else
row =
Enum.reduce_while(columns(resource), {:ok, []}, fn key, {:ok, row} ->
type = Ash.Resource.attribute(resource, key).type
value = Map.get(changeset.attributes, key)
case Ash.Type.dump_to_native(type, value) do
{:ok, value} ->
{:cont, {:ok, [to_string(value) | row]}}
:error ->
{:halt, {:error, "Could not dump #{key} to native type"}}
end
end)
case row do
{:ok, row} ->
lines =
[Enum.reverse(row)]
|> CSV.encode(separator: separator(resource))
|> Enum.to_list()
resource
|> file()
|> File.write(lines, [:append])
|> case do
:ok ->
{:ok, struct(resource, changeset.attributes)}
{:error, error} ->
{:error, "Error while writing to CSV: #{inspect(error)}"}
end
{:error, error} ->
{:error, error}
end
end
end
end
|
lib/ash_csv/data_layer.ex
| 0.785638
| 0.406685
|
data_layer.ex
|
starcoder
|
defmodule PhpAssocMap do
@doc """
Parses an associative array string (or charlist) to a key-valued map. Both single and double quotes are supported.
*Note*: If the string starts with `<?php return`, it'll be ignored
## Exemples
iex> PhpAssocMap.to_map("['key' => ['another_key' => 'value']]")
%{"key" => %{"another_key" => "value"}}
iex> PhpAssocMap.to_map("<?php return ['key' => ['another_key' => 'value']];")
%{"key" => %{"another_key" => "value"}}
"""
@spec to_map(binary() | charlist()) :: any()
def to_map(string), do: string |> to_tuple() |> PhpAssocMap.Map.Parser.parse()
@doc """
Converts a map structure to an associative array string. The string key and value are single quoted
The returned document will not be formatted yet. Use PhpAssocMap.Exploder.explode/1 or PhpAssocMap.Exploder.explode/2 to have it formatted.
## Exmples
iex> PhpAssocMap.from_map(%{"key" => %{"another_key" => "value"}})
"['key'=>['another_key'=>'value']]"
"""
@spec from_map(map()) :: binary()
def from_map(map), do: PhpAssocMap.Map.Serializer.from_map(map)
@doc """
Parses an associative array string (or charlist) to a key-valued keyword list. Both single and double quotes are supported.
*Note*: If the string starts with `<?php return`, it'll be ignored
## Exemples
iex> PhpAssocMap.to_tuple("['key' => ['another_key' => 'value']]")
[{"key", [{"another_key", "value"}]}]
iex> PhpAssocMap.to_tuple("<?php return ['key' => ['another_key' => 'value']];")
[{"key", [{"another_key", "value"}]}]
"""
@spec to_tuple(binary() | charlist()) :: [tuple()]
def to_tuple(string), do: string |> ast()
@doc """
Converts a keyword list structure to an associative array string. The string key and value are single quoted
The returned document will not be formatted yet. Use PhpAssocMap.Exploder.explode/1 or PhpAssocMap.Exploder.explode/2 to have it formatted.
## Exmples
iex> PhpAssocMap.from_tuple([{"key", [{"another_key", "value"}]}])
"['key'=>['another_key'=>'value']]"
"""
@spec from_tuple([tuple()]) :: binary()
def from_tuple(tuple), do: PhpAssocMap.Tuple.Serializer.from_tuple(tuple)
@doc """
Gets the document AST from leex and yecc parser. The ast is automatically obtain from to_tuple/1 and to_map/1
## Exmples
iex> PhpAssocMap.ast("['key'=>['another_key'=>'value']]")
[{"key", [{"another_key", "value"}]}]
"""
@spec ast(charlist() | binary()) :: [tuple()]
def ast(chars) when is_bitstring(chars), do: chars |> String.to_charlist() |> ast()
def ast(string) do
with {:ok, tokens, _} <- :php_lang.string(string),
{:ok, ast} = :php_parse.parse(tokens) do
ast
end
end
end
|
lib/php_assoc_map.ex
| 0.612078
| 0.48871
|
php_assoc_map.ex
|
starcoder
|
defmodule LiqenCore.Accounts do
import Ecto.Query, only: [from: 2]
alias LiqenCore.Accounts.{User,
PasswordCredential,
MediumCredential}
alias LiqenCore.Repo
@moduledoc """
Manages everything related to user accounts and its authentication.
## Notes
- This module doesn't manage permissions or roles. It only gives an interface
to handle with identities.
- This module doesn't have any session mechanism like tokens.
- This module defines a type `t:user/0` which is a "public" representation of
a user. Functions that returns a user will return this `t:user/0`. Do not
confuse with `LiqenCore.Accounts.User` which is an internal module.
"""
@typedoc """
Represents a user.
```
%{
id: "42",
username: "tai",
name: "<NAME>"
}
```
"""
@type user :: %{
id: number,
username: String.t,
name: String.t
}
@doc """
Creates a user.
"""
@spec create_user(map) :: {:ok, user} | {:error, Ecto.Changeset.t}
def create_user(params) do
%User{}
|> User.changeset(params)
|> Ecto.Changeset.cast_assoc(
:password_credential, with: &PasswordCredential.changeset/2)
|> Repo.insert()
|> take()
end
@doc """
Get a user
"""
def get_user(id) do
User
|> get(id)
|> take()
end
@doc """
List all users
"""
def list_users do
User
|> get_all()
|> take()
end
@doc """
Authenticate a user giving a pair of email-password
"""
def login_with_password(email, password) do
email
|> get_password_credential()
|> check_password(password)
|> get_user_by_credential()
|> create_token()
end
@doc """
Get a password credential given an e-mail address
"""
@spec get_password_credential(String.t) :: {:ok, map} | {:error, :unauthorized}
def get_password_credential(email) do
query =
from pc in PasswordCredential,
where: pc.email == ^email
case Repo.one(query) do
%PasswordCredential{} = pc ->
{:ok, pc}
_ ->
{:error, :unauthorized}
end
end
@doc """
Check if a given password matches with the valid one stored in a
PasswordCredential object
"""
@spec check_password(any, String.t) :: {:ok, map} | {:error, :unauthorized}
def check_password({:ok, credential}, password) do
with {:error, _} <- Comeonin.Bcrypt.check_pass(credential, password) do
{:error, :unauthorized}
end
end
def check_password(any, _), do: any
@doc """
Get a user given its credential
"""
def get_user_by_credential({:ok, credential}) do
credential = Repo.preload(credential, :user)
{:ok, credential.user}
end
def get_user_by_credential(any), do: any
@doc """
Create a token from a User object
"""
def create_token({:ok, %User{} = user}) do
Guardian.encode_and_sign(user, :access)
end
def create_token(any), do: any
@doc """
Authenticate a user via medium giving a `state` and a `code`
To get both `state` and `code`, use `get_medium_login_data/0` or
`get_medium_login_data/1`
"""
def login_with_medium(state, code) do
state
|> get_medium_credential_from_state()
|> get_long_lived_token(code)
|> get_medium_user_data()
|> update_medium_data()
|> ensure_user_exists()
|> create_token()
end
@doc """
Get data needed to create a user from a medium identity: a `state` (returned directly
by this function) and a `code`.
To get the `code`, follow the steps in
[Medium API documentation](https://github.com/Medium/medium-api-docs#2-authentication)
to redirect the user to a page in your domain with a short-term authorization
code.
From that redirect_uri, collect the `state` and `code` and call
`login_with_medium/2` to finish the process.
"""
def get_medium_login_data do
# Create a MediumCredential with a random generated "state"
state = Base.encode16(:crypto.strong_rand_bytes(8))
%MediumCredential{state: state}
|> Ecto.Changeset.change()
|> Repo.insert()
# Leave all the fields empty
state
end
@doc """
Get data needed to log in a `user` via medium: a `state` (returned directly by
this function) and a `code`.
To get the `code`, follow the steps in
[Medium API documentation](https://github.com/Medium/medium-api-docs#2-authentication)
to redirect the user to a page in your domain with a short-term authorization
code.
From that redirect_uri, collect the `state` and `code` and call
`login_with_medium/2`.
"""
def get_medium_login_data(user_id) do
# If the user has MediumCredential, refresh the `state` with a random
# generated one.
# Otherwise, create a MediumCredential with a random generated "state"
# linked with `user`
state = Base.encode16(:crypto.strong_rand_bytes(8))
case Repo.get_by(MediumCredential, user_id: user_id) do
nil -> %MediumCredential{user_id: user_id}
credential -> credential
end
|> Ecto.Changeset.change()
|> Ecto.Changeset.put_change(:state, state)
|> Repo.insert_or_update!()
state
end
defp take(list) when is_list(list) do
list =
list
|> Enum.map(&take(&1))
|> Enum.map(fn {:ok, obj} -> obj end)
{:ok, list}
end
defp take({:ok, %User{} = object}) do
{:ok, Map.take(object, [:id, :name, :username])}
end
defp take(any), do: any
defp get(struct, id) do
case Repo.get(struct, id) do
%{} = object ->
{:ok, object}
_ ->
{:error, :not_found}
end
end
defp get_all(struct) do
struct
|> Repo.all()
|> Enum.map(fn obj -> {:ok, obj} end)
end
@doc """
Retrieve a MediumCredential
"""
defp get_medium_credential_from_state(state) do
case Repo.get_by(MediumCredential, state: state) do
nil -> {:error, :not_found}
credential -> {:ok, credential}
end
end
@doc """
Request a Medium Long-lived token from its API
"""
def get_long_lived_token({:ok, credential}, code) do
uri = "https://api.medium.com/v1/tokens"
body = [
code: code,
client_id: System.get_env("MEDIUM_CLIENT_ID"),
client_secret: System.get_env("MEDIUM_CLIENT_SECRET"),
grant_type: "authorization_code",
redirect_uri: System.get_env("MEDIUM_REDIRECT_URI")
]
headers = %{
"Content-Type" => "application/x-www-form-urlencoded",
"Accept" => "application/json"
}
with {:ok, %{"access_token" => access_token}} <-
uri
|> HTTPoison.post({:form, body}, headers)
|> handle_json_response(201)
do
{:ok, credential, access_token}
end
end
def get_long_lived_token(any, _), do: any
@doc """
Get current user data from a Medium Token
"""
def get_medium_user_data({:ok, credential, access_token}) do
uri = "https://api.medium.com/v1/me"
headers = %{
"Content-Type" => "application/json",
"Accept" => "application/json",
"Authorization" => "Bearer #{access_token}"
}
with {:ok, %{"data" => data}} <-
uri
|> HTTPoison.get(headers)
|> handle_json_response(200)
do
{:ok, credential, data}
end
end
def get_medium_user_data(any), do: any
defp handle_json_response({:ok, response}, status_code) do
%{body: json_body,
status_code: code} = response
case code do
^status_code ->
Poison.decode(json_body)
_ ->
{:error, json_body}
end
end
defp handle_json_response(any, _), do: any
@doc """
Update a MediumCredential
"""
def update_medium_data({:ok, new_credential, data}) do
%{
"id" => medium_id,
"imageUrl" => image_url,
"name" => name,
"url" => url,
"username" => username
} = data
attrs = %{
medium_id: medium_id,
username: username,
name: name,
url: url,
image_url: image_url
}
case Repo.get_by(MediumCredential, medium_id: medium_id) do
nil ->
new_credential
|> MediumCredential.changeset(attrs)
|> Repo.update()
old_credential ->
old_credential
|> MediumCredential.changeset(attrs)
|> Repo.update()
end
end
def update_medium_data(any), do: any
@doc """
Given a MediumCredential, creates a User if necessary
"""
def ensure_user_exists({:ok, %MediumCredential{} = credential}) do
%{user: user,
username: username} = Repo.preload(credential, :user)
params = %{
username: username <> Base.encode16(:crypto.strong_rand_bytes(3))
}
case user do
nil ->
{:ok, user} =
%User{}
|> User.changeset(params)
|> Repo.insert()
credential
|> Ecto.Changeset.change(user_id: user.id)
|> Repo.update()
{:ok, user}
_ ->
{:ok, user}
end
end
def ensure_user_exists(any), do: any
end
|
lib/liqen_core/accounts/accounts.ex
| 0.814717
| 0.601418
|
accounts.ex
|
starcoder
|
defmodule Bolt.Cogs.Tempban do
@moduledoc false
@behaviour Nosedrum.Command
alias Nosedrum.Predicates
alias Bolt.{
ErrorFormatters,
Events.Handler,
Helpers,
Humanizer,
ModLog,
Parsers,
Repo,
Schema.Infraction
}
alias Nostrum.Api
import Ecto.Query, only: [from: 2]
require Logger
@impl true
def usage, do: ["tempban <user:snowflake|member> <duration:duration> [reason:str...]"]
@impl true
def description,
do: """
Temporarily ban the given user for the given duration with an optional reason.
An infraction is stored in the infraction database, and can be retrieved later.
Requires the `BAN_MEMBERS` permission.
**Examples**:
```rs
// tempban Dude for 2 days without a reason
tempban @Dude#0001 2d
// the same thing, but with a specified reason
tempban @Dude#0001 2d posting cats instead of ducks
```
"""
@impl true
def predicates,
do: [&Predicates.guild_only/1, Predicates.has_permission(:ban_members)]
@impl true
def command(msg, [user, duration | reason_list]) do
response =
with reason <- Enum.join(reason_list, " "),
{:ok, user_id, converted_user} <- Helpers.into_id(msg.guild_id, user),
{:ok, true} <- Helpers.is_above(msg.guild_id, msg.author.id, user_id),
{:ok, expiry} <- Parsers.human_future_date(duration),
query <-
from(
infr in Infraction,
where:
infr.active and infr.user_id == ^user_id and infr.guild_id == ^msg.guild_id and
infr.type == "tempban",
limit: 1,
select: {infr.id, infr.expires_at}
),
[] <- Repo.all(query),
{:ok} <- Api.create_guild_ban(msg.guild_id, user_id, 7),
infraction_map <- %{
type: "tempban",
guild_id: msg.guild_id,
user_id: user_id,
actor_id: msg.author.id,
reason: if(reason != "", do: reason, else: nil),
expires_at: expiry
},
{:ok, _created_infraction} <- Handler.create(infraction_map) do
user_string = Humanizer.human_user(converted_user || user_id)
ModLog.emit(
msg.guild_id,
"INFRACTION_CREATE",
"#{Humanizer.human_user(msg.author)} temporarily banned" <>
" #{user_string} until #{Helpers.datetime_to_human(expiry)}" <>
if(reason != "", do: " with reason `#{Helpers.clean_content(reason)}`", else: "")
)
response =
"👌 temporarily banned #{user_string} until #{Helpers.datetime_to_human(expiry)}"
if reason != "" do
response <> " with reason `#{Helpers.clean_content(reason)}`"
else
response
end
else
{:ok, false} ->
"🚫 you need to be above the target user in the role hierarchy"
[{existing_id, existing_expiry}] ->
"❌ there already is a tempban for that member under ID" <>
" ##{existing_id} which will expire on " <> Helpers.datetime_to_human(existing_expiry)
error ->
ErrorFormatters.fmt(msg, error)
end
{:ok, _msg} = Api.create_message(msg.channel_id, response)
end
def command(msg, _args) do
response = "ℹ️ usage: `tempban <user:snowflake|member> <duration:duration> [reason:str...]`"
{:ok, _msg} = Api.create_message(msg.channel_id, response)
end
end
|
lib/bolt/cogs/tempban.ex
| 0.741206
| 0.595581
|
tempban.ex
|
starcoder
|
defmodule Yum.Data do
@moduledoc """
Import food data.
The location of the data can either be set globally in the config:
config :yum, path: "path/to/data"
Or it can be explicitly passed to a function.
"""
@type translation :: %{ optional(String.t) => translation | String.t }
@type translation_tree :: %{ optional(String.t) => translation }
@type diet_list :: [String.t]
@type diet_info :: %{ optional(String.t) => translation_tree }
@type diet_tree :: %{ optional(String.t) => diet_info }
@type allergen_list :: [String.t]
@type allergen_info :: %{ optional(String.t) => translation_tree }
@type allergen_tree :: %{ optional(String.t) => allergen_info }
@type nutrition :: %{ optional(String.t) => any }
@type ingredient_info :: %{ optional(String.t) => translation_tree | diet_list | allergen_list | nutrition }
@type cuisine_info :: %{ optional(String.t) => translation_tree | String.t }
@type ingredient_tree :: %{ optional(String.t) => ingredient_tree, required(:__info__) => ingredient_info }
@type cuisine_tree :: %{ optional(String.t) => cuisine_tree, required(:__info__) => cuisine_info }
@type migration :: %{ optional(String.t) => String.t | { String.t, String.t } }
@type file_filter :: ((String.t) -> boolean)
@type list_reducer(type) :: ({ String.t, type }, any -> any)
@type tree_reducer(type) :: ({ String.t, type }, [{ String.t, type }], any -> any)
@type diet_reducer :: list_reducer(diet_info)
@type allergen_reducer :: list_reducer(allergen_info)
@type ingredient_reducer :: tree_reducer(ingredient_info)
@type cuisine_reducer :: tree_reducer(cuisine_info)
defp load(path), do: TomlElixir.parse_file!(path)
defp path(), do: Application.fetch_env!(:yum, :path)
defp load_all(_), do: true
@doc """
Get a filter that can be used for the given refs.
"""
@spec ref_filter(String.t | [String.t]) :: file_filter
def ref_filter(ref) do
refs = Yum.Util.match_ref(ref)
&Enum.member?(refs, &1)
end
@doc """
Load the diet names and translations.
Uses the path set in the config under `:path`.
See `diets/1`.
"""
@spec diets() :: diet_tree
def diets(), do: diets(path())
@doc """
Load the diet names and translations.
Uses the path referenced by `data`.
The files to be loaded can be filtered by providing a filter.
"""
@spec diets(String.t | file_filter) :: diet_tree
def diets(filter) when is_function(filter), do: diets(path(), filter)
def diets(data), do: diets(data, &load_all/1)
@doc """
Load the diet names and translations.
Uses the path referenced by `data`.
The files to be loaded can be filtered by providing a filter.
"""
@spec diets(String.t, file_filter) :: diet_tree
def diets(data, filter), do: load_list(Path.join(data, "diets"), filter)
@doc """
Reduce the diet data.
Uses the path set in the config under `:path`.
See `reduce_diets/3`.
"""
@spec reduce_diets(any, diet_reducer) :: any
def reduce_diets(acc, fun), do: reduce_diets(acc, fun, path())
@doc """
Reduce the diet data.
Each diet is passed to `fun` and an updated accumulator is returned.
Uses the path referenced by `data`.
The files to be loaded can be filtered by providing a filter.
"""
@spec reduce_diets(any, diet_reducer, String.t | file_filter) :: any
def reduce_diets(acc, fun, filter) when is_function(filter), do: reduce_diets(acc, fun, path(), filter)
def reduce_diets(acc, fun, data), do: reduce_diets(acc, fun, data, &load_all/1)
@doc """
Reduce the diet data.
Each diet is passed to `fun` and an updated accumulator is returned.
Uses the path referenced by `data`.
The files to be loaded can be filtered by providing a filter.
"""
@spec reduce_diets(any, diet_reducer, String.t, file_filter) :: any
def reduce_diets(acc, fun, data, filter), do: reduce_list(Path.join(data, "diets"), acc, fun, filter)
@doc """
Load the allergen names and translations.
Uses the path set in the config under `:path`.
See `allergens/1`.
"""
@spec allergens() :: allergen_tree
def allergens(), do: allergens(path())
@doc """
Load the allergen names and translations.
Uses the path referenced by `data`.
The files to be loaded can be filtered by providing a filter.
"""
@spec allergens(String.t | file_filter) :: allergen_tree
def allergens(filter) when is_function(filter), do: allergens(path(), filter)
def allergens(data), do: allergens(data, &load_all/1)
@doc """
Load the allergen names and translations.
Uses the path referenced by `data`.
The files to be loaded can be filtered by providing a filter.
"""
@spec allergens(String.t, file_filter) :: allergen_tree
def allergens(data, filter), do: load_list(Path.join(data, "allergens"), filter)
@doc """
Reduce the allergen data.
Uses the path set in the config under `:path`.
See `reduce_allergens/3`.
"""
@spec reduce_allergens(any, allergen_reducer) :: any
def reduce_allergens(acc, fun), do: reduce_allergens(acc, fun, path())
@doc """
Reduce the allergen data.
Each allergen is passed to `fun` and an updated accumulator is returned.
Uses the path referenced by `data`.
The files to be loaded can be filtered by providing a filter.
"""
@spec reduce_allergens(any, allergen_reducer, String.t | file_filter) :: any
def reduce_allergens(acc, fun, filter) when is_function(filter), do: reduce_allergens(acc, fun, path(), filter)
def reduce_allergens(acc, fun, data), do: reduce_allergens(acc, fun, data, &load_all/1)
@doc """
Reduce the allergen data.
Each allergen is passed to `fun` and an updated accumulator is returned.
Uses the path referenced by `data`.
The files to be loaded can be filtered by providing a filter.
"""
@spec reduce_allergens(any, allergen_reducer, String.t, file_filter) :: any
def reduce_allergens(acc, fun, data, filter), do: reduce_list(Path.join(data, "allergens"), acc, fun, filter)
@doc """
Load the ingredient data.
Uses the path set in the config under `:path`.
See `ingredients/2`.
"""
@spec ingredients(String.t | file_filter) :: ingredient_tree
def ingredients(group \\ "")
def ingredients(filter) when is_function(filter), do: ingredients("", filter)
def ingredients(group), do: ingredients(group, path())
@doc """
Load the ingredient data.
If only a particular group of ingredients is required, the path to
find these can be provided to `group`. This will however not include
any parent information that should be applied to these child ingredients.
Uses the path referenced by `data`.
The files to be loaded can be filtered by providing a filter.
"""
@spec ingredients(String.t, String.t | file_filter) :: ingredient_tree
def ingredients(group, filter) when is_function(filter), do: ingredients(group, path(), filter)
def ingredients(group, data), do: ingredients(group, data, &load_all/1)
@doc """
Load the ingredient data.
If only a particular group of ingredients is required, the path to
find these can be provided to `group`. This will however not include
any parent information that should be applied to these child ingredients.
Uses the path referenced by `data`.
The files to be loaded can be filtered by providing a filter.
"""
@spec ingredients(String.t, String.t, file_filter) :: ingredient_tree
def ingredients(group, data, filter), do: load_tree(Path.join([data, "ingredients", group]), filter)
@doc """
Reduce the ingredient data.
Uses the path set in the config under `:path`.
See `reduce_ingredients/4`
"""
@spec reduce_ingredients(any, ingredient_reducer, String.t | file_filter) :: any
def reduce_ingredients(acc, fun, group \\ "")
def reduce_ingredients(acc, fun, filter) when is_function(filter), do: reduce_ingredients(acc, fun, "", filter)
def reduce_ingredients(acc, fun, group), do: reduce_ingredients(acc, fun, group, path())
@doc """
Reduce the ingredient data.
Each ingredient is passed to `fun` and an updated accumulator is returned.
If only a particular group of ingredients is required, the path to
find these can be provided to `group`. This will however not include
any parent information that should be applied to these child ingredients.
Uses the path referenced by `data`.
The files to be loaded can be filtered by providing a filter.
"""
@spec reduce_ingredients(any, ingredient_reducer, String.t, String.t | file_filter) :: any
def reduce_ingredients(acc, fun, group, filter) when is_function(filter), do: reduce_ingredients(acc, fun, group, path(), filter)
def reduce_ingredients(acc, fun, group, data), do: reduce_ingredients(acc, fun, group, data, &load_all/1)
@doc """
Reduce the ingredient data.
Each ingredient is passed to `fun` and an updated accumulator is returned.
If only a particular group of ingredients is required, the path to
find these can be provided to `group`. This will however not include
any parent information that should be applied to these child ingredients.
Uses the path referenced by `data`.
The files to be loaded can be filtered by providing a filter.
"""
@spec reduce_ingredients(any, ingredient_reducer, String.t, String.t, file_filter) :: any
def reduce_ingredients(acc, fun, group, data, filter), do: reduce_tree(Path.join([data, "ingredients", group]), acc, fun, filter)
@doc """
Load the cuisine data.
Uses the path set in the config under `:path`.
See `cuisines/2`
"""
@spec cuisines(String.t | file_filter) :: cuisine_tree
def cuisines(group \\ "")
def cuisines(filter) when is_function(filter), do: cuisines("", filter)
def cuisines(group), do: cuisines(group, path())
@doc """
Load the cuisine data.
If only a particular group of cuisines is required, the path to
find these can be provided to `group`. This will however not include
any parent information that should be applied to these child cuisines.
Uses the path referenced by `data`.
The files to be loaded can be filtered by providing a filter.
"""
@spec cuisines(String.t, String.t | file_filter) :: cuisine_tree
def cuisines(group, filter) when is_function(filter), do: cuisines(group, path(), filter)
def cuisines(group, data), do: cuisines(group, data, &load_all/1)
@doc """
Load the cuisine data.
If only a particular group of cuisines is required, the path to
find these can be provided to `group`. This will however not include
any parent information that should be applied to these child cuisines.
Uses the path referenced by `data`.
The files to be loaded can be filtered by providing a filter.
"""
@spec cuisines(String.t, String.t, file_filter) :: cuisine_tree
def cuisines(group, data, filter), do: load_tree(Path.join([data, "cuisines", group]), filter)
@doc """
Reduce the cuisine data.
Uses the path set in the config under `:path`.
See `reduce_cuisines/4`
"""
@spec reduce_cuisines(any, cuisine_reducer, String.t | file_filter) :: any
def reduce_cuisines(acc, fun, group \\ "")
def reduce_cuisines(acc, fun, filter) when is_function(filter), do: reduce_cuisines(acc, fun, "", filter)
def reduce_cuisines(acc, fun, group), do: reduce_cuisines(acc, fun, group, path())
@doc """
Reduce the cuisine data.
Each cuisine is passed to `fun` and an updated accumulator is returned.
If only a particular group of cuisines is required, the path to
find these can be provided to `group`. This will however not include
any parent information that should be applied to these child cuisines.
Uses the path referenced by `data`.
The files to be loaded can be filtered by providing a filter.
"""
@spec reduce_cuisines(any, cuisine_reducer, String.t, String.t | file_filter) :: any
def reduce_cuisines(acc, fun, group, filter) when is_function(filter), do: reduce_cuisines(acc, fun, group, path(), filter)
def reduce_cuisines(acc, fun, group, data), do: reduce_cuisines(acc, fun, group, data, &load_all/1)
@doc """
Reduce the cuisine data.
Each cuisine is passed to `fun` and an updated accumulator is returned.
If only a particular group of cuisines is required, the path to
find these can be provided to `group`. This will however not include
any parent information that should be applied to these child cuisines.
Uses the path referenced by `data`.
The files to be loaded can be filtered by providing a filter.
"""
@spec reduce_cuisines(any, cuisine_reducer, String.t, String.t, file_filter) :: any
def reduce_cuisines(acc, fun, group, data, filter), do: reduce_tree(Path.join([data, "cuisines", group]), acc, fun, filter)
@doc """
Load the migration data.
Uses the path set in the config under `:path`.
See `migrations/3`
"""
@spec migrations(String.t, integer) :: [migration]
def migrations(type, timestamp \\ -1), do: migrations(type, timestamp, path())
@doc """
Load the migration data.
The path to the set of migration data for a certain type should be passed
to `type`.
Any migration files after `timestamp` will be loaded, any earlier or
equal to will be ignored.
Uses the path referenced by `data`.
"""
@spec migrations(String.t, integer, String.t) :: [migration]
def migrations(type, timestamp, data) do
Path.wildcard(Path.join([data, type, "__migrations__", "*.yml"]))
|> Enum.filter(&(to_timestamp(&1) > timestamp))
|> Enum.sort(&(to_timestamp(&1) < to_timestamp(&2)))
|> Enum.map(&load_migration/1)
end
@doc """
Reduce the migration data.
Uses the path set in the config under `:path`.
See `reduce_migrations/5`
"""
@spec reduce_migrations(any, String.t, (migration, any -> any), integer) :: any
def reduce_migrations(acc, type, fun, timestamp \\ -1), do: reduce_migrations(acc, type, fun, timestamp, path())
@doc """
Reduce the migration data.
Each migration is passed to `fun` and an updated accumulator is returned.
The path to the set of migration data for a certain type should be passed
to `type`.
Any migration files after `timestamp` will be loaded, any earlier or
equal to will be ignored.
Uses the path referenced by `data`.
"""
@spec reduce_migrations(any, String.t, (migration, any -> any), integer, String.t) :: any
def reduce_migrations(acc, type, fun, timestamp, data) do
Path.wildcard(Path.join([data, type, "__migrations__", "*.yml"]))
|> Enum.filter(&(to_timestamp(&1) > timestamp))
|> Enum.sort(&(to_timestamp(&1) < to_timestamp(&2)))
|> Enum.reduce(acc, &(fun.(load_migration(&1), &2)))
end
defp load_list(path, filter) do
Path.wildcard(Path.join(path, "*.toml"))
|> Enum.filter(&(trim_path_ref(&1, path) |> filter.()))
|> Enum.reduce(%{}, fn file, acc ->
[_|paths] = Enum.reverse(Path.split(Path.relative_to(file, path)))
contents = Enum.reduce([Path.basename(file, ".toml")|paths], load(file), fn name, contents ->
%{ name => contents}
end)
Map.merge(acc, contents)
end)
end
defp load_tree(path, filter) do
Path.wildcard(Path.join(path, "**/*.toml"))
|> Enum.filter(&(trim_path_ref(&1, path) |> filter.()))
|> Enum.reduce(%{}, fn file, acc ->
[_|paths] = Enum.reverse(Path.split(Path.relative_to(file, path)))
contents = Enum.reduce([Path.basename(file, ".toml")|paths], %{ __info__: load(file) }, fn name, contents ->
%{ name => contents }
end)
Map.merge(acc, contents, &merge_nested_contents/3)
end)
end
defp load_migration(path) do
[content] = YamlElixir.read_all_from_file(path)
Enum.reduce(content, %{ "timestamp" => filename(path) }, fn
%{ "A" => ref }, acc -> Map.put(acc, "add", [ref|(acc["add"] || [])])
%{ "U" => ref }, acc -> Map.put(acc, "update", [ref|(acc["update"] || [])])
%{ "D" => ref }, acc -> Map.put(acc, "delete", [ref|(acc["delete"] || [])])
%{ "M" => ref }, acc ->
[ref_a, ref_b] = String.split(ref, " ")
Map.put(acc, "move", [{ ref_a, ref_b }|(acc["move"] || [])])
end)
|> Enum.map(fn
{ key, list } when is_list(list) -> { key, Enum.reverse(list) }
other -> other
end)
|> Map.new
end
defp merge_nested_contents(_key, a, b), do: Map.merge(a, b, &merge_nested_contents/3)
defp reduce_list(path, acc, fun, filter) do
Path.wildcard(Path.join(path, "*.toml"))
|> Enum.filter(&(trim_path_ref(&1, path) |> filter.()))
|> Enum.reduce(acc, &(fun.({ Path.basename(&1, ".toml"), load(&1) }, &2)))
end
defp reduce_tree(path, acc, fun, filter) do
Path.wildcard(Path.join(path, "**/*.toml"))
|> Enum.filter(&(trim_path_ref(&1, path) |> filter.()))
|> Enum.reduce({ [], acc }, fn file, { parent, acc } ->
[name|paths] = Enum.reverse(Path.split(Path.relative_to(file, path)))
parent = remove_stale_nodes(parent, paths)
data = load(file)
tagged_data = { Path.basename(name, ".toml"), data }
acc = fun.(tagged_data, parent, acc)
{ [tagged_data|parent], acc }
end)
|> elem(1)
end
defp remove_stale_nodes([dep = { name, _ }], [name]), do: [dep]
defp remove_stale_nodes([dep = { name, _ }|deps], [name|new_deps]), do: [dep|remove_stale_nodes(deps, new_deps)]
defp remove_stale_nodes([_|deps], new_deps), do: remove_stale_nodes(deps, new_deps)
defp remove_stale_nodes([], _), do: []
defp filename(file), do: Path.basename(file) |> Path.rootname
defp to_timestamp(file), do: filename(file) |> String.to_integer
defp trim_path_ref(ref, path), do: String.replace_prefix(ref, path, "") |> String.replace_suffix(".toml", "")
end
|
lib/yum/data.ex
| 0.867401
| 0.54825
|
data.ex
|
starcoder
|
defmodule ArtemisLog.ListSessions do
use ArtemisLog.Context
import ArtemisLog.Helpers.Filter
import ArtemisLog.Helpers.Search
import Ecto.Query
alias ArtemisLog.EventLog
alias ArtemisLog.Repo
@moduledoc """
When a user logs in, they are given a `session_id` value. All actions the
user takes during that log is considered a "session".
Sessions are a virtual resource. The session data is not stored as a separate
table in the database. Instead, session information is included in two
existing resources:
- `EventLog`. Write actions that change data like create, update and delete.
- `HttpRequestLog`. Read actions that do not change data like index and show.
Although session data is stored in both resources, when listing sessions it
is sufficient to only query the `EventLog` records. At least one EventLog record
is created each session.
## Implementation
Although it is possible to return a list of unique session_id values using a
`DISTINCT` SQL clause, it does not support ordering.
Since the primary use case for this data is displaying paginated data in
historical order, the context uses the more complex and robust `SELECT` and
`GROUP_BY` method of querying data.
For more information see: https://stackoverflow.com/q/5391564
"""
@default_page_size 10
@default_paginate true
def call(params \\ %{}, user) do
params = default_params(params)
EventLog
|> filter_query(params, user)
|> search_filter(params)
|> group_query(params)
|> restrict_access(user)
|> get_records(params)
end
defp default_params(params) do
params
|> ArtemisLog.Helpers.keys_to_strings()
|> Map.put_new("page", Map.get(params, "page_number", 1))
|> Map.put_new("page_size", @default_page_size)
|> Map.put_new("paginate", @default_paginate)
end
defp filter_query(query, %{"filters" => filters}, _user) when is_map(filters) do
Enum.reduce(filters, query, fn {key, value}, acc ->
filter(acc, key, value)
end)
end
defp filter_query(query, _params, _user), do: query
defp filter(query, _key, nil), do: query
defp filter(query, _key, ""), do: query
defp filter(query, "session_id", value), do: where(query, [el], el.session_id in ^split(value))
defp filter(query, "user_id", value), do: where(query, [el], el.user_id in ^split(value))
defp filter(query, "user_name", value), do: where(query, [el], el.user_name in ^split(value))
defp filter(query, _key, _value), do: query
defp group_query(query, _params) do
query
|> select_fields()
|> group_by([:session_id])
|> order_by([q], desc: max(q.inserted_at))
|> where([q], not is_nil(q.session_id))
end
defp select_fields(query) do
select(
query,
[q],
%{
inserted_at: max(q.inserted_at),
session_id: q.session_id,
user_name: max(q.user_name)
}
)
end
defp restrict_access(query, user) do
cond do
has?(user, "sessions:access:all") -> query
has?(user, "sessions:access:self") -> where(query, [el], el.user_id == ^user.id)
true -> where(query, [el], is_nil(el.id))
end
end
defp get_records(query, %{"paginate" => true} = params), do: Repo.paginate(query, params)
defp get_records(query, _params), do: Repo.all(query)
end
|
apps/artemis_log/lib/artemis_log/contexts/session/list_sessions.ex
| 0.734024
| 0.513607
|
list_sessions.ex
|
starcoder
|
defmodule Day17 do
def part1(input) do
grid = read_grid(input)
IO.write(grid)
set = make_map_set(String.split(grid, "\n"))
find_intersections(set)
end
def part2(input) do
path = find_path(input)
operate_robot(input, path)
end
# Part 1 helpers
defp find_intersections(set) do
set
|> Enum.filter(fn pos -> is_intersection(pos, set) end)
|> Enum.map(fn {x, y} -> x * y end)
|> Enum.sum
end
defp is_intersection(pos, set) do
Enum.all?(Enum.reverse(directions()), fn dir ->
vec_add(pos, dir) in set
end)
end
defp directions() do
[{0, 1}, {0, -1}, {-1, 0}, {1, 0}]
end
defp vec_add({x1, y1}, {x2, y2}), do: {x1 + x2, y1 + y2}
# Part 2 helpers
# Find the path for the robot
defp find_path(input) do
grid = read_grid(input)
map = make_map(String.split(grid, "\n"))
{pos, dir} = Map.fetch!(map, :robot)
path_finder = PathFinder.new(pos, dir, map)
path = PathFinder.find_paths(path_finder)
PathFinder.make_path(path, path_finder)
end
defp operate_robot(program, path) do
memory = Intcode.new(program)
memory = Map.put(memory, 0, 2)
robot_program = robot_program(path)
memory = Intcode.execute(memory)
send_commands(robot_program, memory)
end
defp send_commands([cmd | cmds], memory) do
{output, memory} = Intcode.get_output(memory)
IO.write(output)
IO.write(cmd)
memory = Intcode.set_input(memory, cmd)
memory = Intcode.resume(memory)
send_commands(cmds, memory)
end
defp send_commands([], memory) do
{output, _memory} = Intcode.get_output(memory)
IO.write(Enum.filter(output, & &1 < 255))
# Read out the amount of dust collected.
List.last(output)
end
defp robot_program(path) do
Splitter.split(path) ++ ['n\n']
end
defp read_grid(program) do
memory = Intcode.new(program)
memory = Intcode.execute(memory)
{output, _memory} = Intcode.get_output(memory)
to_string(output)
end
defp make_map_set(input) do
input
|> Stream.with_index
|> Enum.reduce(MapSet.new(), fn {line, y}, set ->
String.to_charlist(line)
|> Stream.with_index
|> Enum.reduce(set, fn {char, x}, set ->
case char do
?\# -> MapSet.put(set, {x, y})
?\. -> set
_ -> set
end
end)
end)
end
defp make_map(input) do
input
|> Stream.with_index
|> Enum.reduce(Map.new(), fn {line, y}, map ->
String.to_charlist(line)
|> Stream.with_index
|> Enum.reduce(map, fn {char, x}, map ->
pos = {x, y}
case char do
?\# -> Map.put(map, pos, :path)
?\. -> Map.put(map, {x, y}, :wall)
?\^ ->
map = Map.put(map, :robot, {pos, {0, -1}})
Map.put(map, pos, :path)
end
end)
end)
end
end
defmodule Splitter do
def split(prog) do
main = pair(String.split(prog, ","))
{:done, main, sub_progs} = split_program(main, ?A)
[Enum.intersperse(main, ?\,) |
Enum.map(sub_progs, fn sub_prog ->
Enum.flat_map(sub_prog, fn {dir, amount} ->
[to_charlist(dir), to_charlist(amount)]
end)
|> Enum.intersperse(?\,)
|> List.flatten
end)]
|> Enum.map(& &1 ++ '\n')
end
defp pair([dir, amount | tail]) do
[{dir, amount} | pair(tail)]
end
defp pair([]), do: []
defp split_program(main, sub_prog_id) do
find_start(main, sub_prog_id, [])
end
defp find_start([{_,_} | _] = main, sub_prog_id, acc) do
build_sub_prog(main, 1, sub_prog_id, acc)
end
defp find_start([prog | tail], sub_prog_id, acc) do
find_start(tail, sub_prog_id, [prog | acc])
end
defp find_start([], _sub_prog_id, acc) do
if length(acc) <= 10 do
{:done, Enum.reverse(acc), []}
else
{:error, :too_long_main_prog}
end
end
defp build_sub_prog(main, len, sub_prog_id, acc) when sub_prog_id <= ?C do
case take_sub_prog(main, len) do
{:error, _} = error ->
error
sub_prog ->
subst_main = subst_sub_prog(main, sub_prog, sub_prog_id)
case split_program(subst_main, sub_prog_id + 1) do
{:error, _} ->
# Try to make this sub program longer.
build_sub_prog(main, len + 1, sub_prog_id, acc)
{:done, main, sub_progs} ->
{:done, Enum.reverse(acc, main), [sub_prog | sub_progs]}
end
end
end
defp build_sub_prog(_, _, _, _) do
# There are already three sub programs. Can't start another.
{:error, :too_many_sub_programs}
end
defp take_sub_prog(main, n, acc \\ [])
defp take_sub_prog(_main, 0, acc) do
case prog_len(acc) > 20 do
true -> {:error, :too_long_sub_program}
false -> Enum.reverse(acc)
end
end
defp take_sub_prog([{_, _} = head | tail], n, acc) do
take_sub_prog(tail, n - 1, [head | acc])
end
defp take_sub_prog(_, _, _), do: {:error, :sub_prog_cannot_grow}
defp prog_len(sub_prog, len \\ -1)
defp prog_len([{_,num} | tail], len) do
prog_len(tail, len + byte_size(num) + 1 + 1)
end
defp prog_len([], len), do: len
defp subst_sub_prog([head | tail] = main, sub_prog, sub_prog_id) do
case List.starts_with?(main, sub_prog) do
true ->
main = Enum.drop(main, length(sub_prog))
[sub_prog_id | subst_sub_prog(main, sub_prog, sub_prog_id)]
false ->
[head | subst_sub_prog(tail, sub_prog, sub_prog_id)]
end
end
defp subst_sub_prog([], _, _), do: []
end
defmodule PathFinder do
defstruct pos: nil, dir: nil, map: nil
def new(pos, dir, map) do
%PathFinder{pos: pos, dir: dir, map: map}
end
def make_path([pos | path], path_finder) do
^pos = path_finder.pos # Assertion.
path = do_make_path(path, pos, path_finder.dir, [])
robotize(path)
end
defp robotize(path) do
path
|> Enum.map(fn cmd ->
case cmd do
:left -> "L"
:right -> "R"
int when is_integer(int) -> to_string(int)
end
end)
|> Enum.intersperse(",")
|> to_string
end
def do_make_path([next | path], pos, dir, acc) do
{acc, dir} = maybe_turn(next, pos, dir, acc)
^next = vec_add(pos, dir) # Assertion
acc = move_one(acc)
do_make_path(path, next, dir, acc)
end
def do_make_path([], _, _, acc), do: Enum.reverse(acc)
defp maybe_turn(next, pos, dir, acc) do
case vec_add(pos, dir) do
^next -> {acc, dir}
_ ->
case vec_add(pos, turn_right(dir)) do
^next ->
^next = vec_add(pos, turn_right(dir)) # Assertion.
{[:right | acc], turn_right(dir)}
_ ->
case vec_add(pos, turn_left(dir)) do
^next ->
^next = vec_add(pos, turn_left(dir)) # Assertion.
{[:left | acc], turn_left(dir)}
_ ->
^next = vec_add(pos, turn_around(dir)) # Assertion.
{[:right, :right | acc], turn_around(dir)}
end
end
end
end
defp move_one([cmd | acc]) do
case is_integer(cmd) do
true ->
[cmd + 1 | acc]
false ->
[1, cmd | acc]
end
end
defp move_one([]), do: [1]
def find_paths(path_finder) do
paths = [init_path(path_finder)]
find_paths(paths, path_finder)
end
defp find_paths(paths, path_finder) do
paths = extend_paths(paths, path_finder)
case Enum.find(paths, &all_visited?/1) do
nil -> find_paths(paths, path_finder)
path -> get_path(path)
end
end
defp extend_paths([path | paths], path_finder) do
paths1 = directions(path)
|> Enum.flat_map(fn dir ->
extend_path(path, dir, path_finder)
end)
paths2 = extend_paths(paths, path_finder)
paths1 ++ paths2
end
defp extend_paths([], _path_finder), do: []
defp extend_path(path, dir, path_finder) do
pos = get_pos(path)
new_pos = vec_add(pos, dir)
not_wall = Map.get(path_finder.map, new_pos, :wall) == :path
unvisited = unvisited?(new_pos, dir, path)
if not_wall and unvisited do
[visit(pos, dir, path)]
else
[]
end
end
defp init_path(path_finder) do
pos = path_finder.pos
unvisited = path_finder.map
|> Stream.flat_map(fn
{pos, :path} -> [pos]
{_, _} -> []
end)
|> MapSet.new
unvisited = MapSet.delete(unvisited, pos)
visited = Enum.map(directions(), & {pos, &1})
|> MapSet.new
{[pos], visited, unvisited}
end
defp get_pos({[pos | _], _, _}), do: pos
defp get_path({path, _, _}), do: Enum.reverse(path)
defp all_visited?({_path, _visited, unvisited}) do
MapSet.size(unvisited) === 0
end
defp unvisited?(new_pos, from_dir, {_path, visited, _unvisited}) do
not MapSet.member?(visited, {new_pos, from_dir})
end
defp visit(pos, from_dir, {path, visited, unvisited}) do
new_pos = vec_add(pos, from_dir)
visited = MapSet.put(visited, {pos, turn_around(from_dir)})
visited = MapSet.put(visited, {new_pos, from_dir})
unvisited = MapSet.delete(unvisited, new_pos)
{[new_pos | path], visited, unvisited}
end
defp directions() do
[{0, 1}, {0, -1}, {-1, 0}, {1, 0}]
end
defp directions({path, _, _}) do
case path do
[pos1, pos2 | _ ] ->
# Prefer to continue moving in the same direction
# to make the path as short as possible.
dir = vec_sub(pos1, pos2)
[dir, turn_left(dir), turn_right(dir), turn_around(dir) ]
_ ->
directions()
end
end
# Note: Y axis is reversed; left is right and right is left.
defp turn_left({dx, dy}), do: {dy, -dx}
defp turn_right({dx, dy}), do: {-dy, dx}
defp turn_around({dx, dy}), do: {-dx, -dy}
defp vec_add({x1, y1}, {x2, y2}), do: {x1 + x2, y1 + y2}
defp vec_sub({x1, y1}, {x2, y2}), do: {x1 - x2, y1 - y2}
end
defmodule Intcode do
def new(program) do
machine(program)
end
defp machine(input) do
memory = read_program(input)
memory = Map.put(memory, :ip, 0)
Map.put(memory, :output, :queue.new())
end
def set_input(memory, input) do
Map.put(memory, :input, input)
end
def get_output(memory) do
q = Map.fetch!(memory, :output)
Map.put(memory, :output, :queue.new())
{:queue.to_list(q), memory}
end
def resume(memory) do
execute(memory, Map.fetch!(memory, :ip))
end
def execute(memory, ip \\ 0) do
{opcode, modes} = fetch_opcode(memory, ip)
case opcode do
1 ->
memory = exec_arith_op(&+/2, modes, memory, ip)
execute(memory, ip + 4)
2 ->
memory = exec_arith_op(&*/2, modes, memory, ip)
execute(memory, ip + 4)
3 ->
case exec_input(modes, memory, ip) do
{:suspended, memory} ->
memory
memory ->
execute(memory, ip + 2)
end
4 ->
memory = exec_output(modes, memory, ip)
execute(memory, ip + 2)
5 ->
ip = exec_if(&(&1 !== 0), modes, memory, ip)
execute(memory, ip)
6 ->
ip = exec_if(&(&1 === 0), modes, memory, ip)
execute(memory, ip)
7 ->
memory = exec_cond(&(&1 < &2), modes, memory, ip)
execute(memory, ip + 4)
8 ->
memory = exec_cond(&(&1 === &2), modes, memory, ip)
execute(memory, ip + 4)
9 ->
memory = exec_inc_rel_base(modes, memory, ip)
execute(memory, ip + 2)
99 ->
memory
end
end
defp exec_arith_op(op, modes, memory, ip) do
[in1, in2] = read_operand_values(memory, ip + 1, modes, 2)
out_addr = read_out_address(memory, div(modes, 100), ip + 3)
result = op.(in1, in2)
write(memory, out_addr, result)
end
defp exec_input(modes, memory, ip) do
out_addr = read_out_address(memory, modes, ip + 1)
case Map.get(memory, :input, []) do
[] ->
{:suspended, Map.put(memory, :ip, ip)}
[value | input] ->
memory = write(memory, out_addr, value)
Map.put(memory, :input, input)
end
end
defp exec_output(modes, memory, ip) do
[value] = read_operand_values(memory, ip + 1, modes, 1)
q = Map.fetch!(memory, :output)
q = :queue.in(value, q)
Map.put(memory, :output, q)
end
defp exec_if(op, modes, memory, ip) do
[value, new_ip] = read_operand_values(memory, ip + 1, modes, 2)
case op.(value) do
true -> new_ip
false -> ip + 3
end
end
defp exec_cond(op, modes, memory, ip) do
[operand1, operand2] = read_operand_values(memory, ip + 1, modes, 2)
out_addr = read_out_address(memory, div(modes, 100), ip + 3)
result = case op.(operand1, operand2) do
true -> 1
false -> 0
end
write(memory, out_addr, result)
end
defp exec_inc_rel_base(modes, memory, ip) do
[offset] = read_operand_values(memory, ip + 1, modes, 1)
base = get_rel_base(memory) + offset
Map.put(memory, :rel_base, base)
end
defp read_operand_values(_memory, _addr, _modes, 0), do: []
defp read_operand_values(memory, addr, modes, n) do
operand = read(memory, addr)
operand = case rem(modes, 10) do
0 -> read(memory, operand)
1 -> operand
2 -> read(memory, operand + get_rel_base(memory))
end
[operand | read_operand_values(memory, addr + 1, div(modes, 10), n - 1)]
end
defp read_out_address(memory, modes, addr) do
out_addr = read(memory, addr)
case modes do
0 -> out_addr
2 -> get_rel_base(memory) + out_addr
end
end
defp fetch_opcode(memory, ip) do
opcode = read(memory, ip)
modes = div(opcode, 100)
opcode = rem(opcode, 100)
{opcode, modes}
end
defp get_rel_base(memory) do
Map.get(memory, :rel_base, 0)
end
defp read(memory, addr) do
Map.get(memory, addr, 0)
end
defp write(memory, addr, value) do
Map.put(memory, addr, value)
end
defp read_program(input) do
String.split(input, ",")
|> Stream.map(&String.to_integer/1)
|> Stream.with_index
|> Stream.map(fn {code, index} -> {index, code} end)
|> Map.new
end
end
|
day17/lib/day17.ex
| 0.5083
| 0.485417
|
day17.ex
|
starcoder
|
defmodule RecursiveMatch do
@moduledoc """
Recursive matching
"""
@doc """
Matches given value with pattern
Returns `true` or `false`
## Parameters
- pattern: Expected pattern (use `:_` instead of `_`)
- tested: Tested value
- options: Options
* `strict`, when `true` compare using `===`, when `false` compare using `==`, default `true`
* `ignore_order`, when `true` - ignore order of items in lists, default `false`
## Example
iex> import RecursiveMatch
RecursiveMatch
iex> match_r %{a: 1}, %{a: 1, b: 2}
true
iex> match_r %{a: 1, b: 2}, %{a: 1}
false
"""
@spec match_r(term, term, list | nil) :: boolean
def match_r(pattern, tested, options \\ [strict: true])
def match_r(pattern, %{__struct__: _} = tested, options) do
match_r(pattern, Map.from_struct(tested), options)
end
def match_r(:_, _, _), do: true
def match_r(%{__struct__: _} = pattern, tested, options) do
match_r(Map.from_struct(pattern), tested, options)
end
def match_r(pattern, tested, options) when is_tuple(tested) and is_tuple(pattern) do
list_pattern = Tuple.to_list(pattern)
list_tested = Tuple.to_list(tested)
if Enum.count(list_pattern) == Enum.count(list_tested) do
list_pattern
|> Enum.zip(list_tested)
|> Enum.all?(fn {pattern_item, tested_item} ->
match_r(pattern_item, tested_item, options)
end)
else
false
end
end
def match_r(pattern, tested, options) when is_list(tested) and is_list(pattern) do
if Enum.count(pattern) == Enum.count(tested) do
if options[:ignore_order] == true do
match_lists_ignore_order(pattern, tested, options)
else
pattern
|> Enum.zip(tested)
|> Enum.all?(fn {pattern_item, tested_item} ->
match_r(pattern_item, tested_item, options)
end)
end
else
false
end
end
def match_r(pattern, tested, options) when is_map(tested) and is_map(pattern) do
strict = options[:strict]
Enum.all?(pattern, fn
{_key, :_} -> true
{key, value} when is_map(value) or is_list(value) ->
match_r(value, tested[key], options)
{key, value} when strict === true ->
Map.has_key?(tested, key) and value === Map.get(tested, key)
{key, value} ->
Map.has_key?(tested, key) and value == Map.get(tested, key)
end)
end
def match_r(a, a, _), do: true
def match_r(a, b, options) do
case options[:strict] do
true -> a === b
nil -> a === b
false -> a == b
end
end
defp match_lists_ignore_order([], [], _), do: true
defp match_lists_ignore_order([pattern | pattern_tail], tested, options) do
case Enum.find_index(tested, fn t -> match_r(pattern, t, options) end) do
nil ->
false
index ->
tested_rest = List.delete_at(tested, index)
match_lists_ignore_order(pattern_tail, tested_rest, options)
end
end
def prepare_right_for_diff(pattern, tested, options)
when is_struct(tested) and is_map(pattern) and not is_struct(pattern),
do: prepare_right_for_diff(pattern, Map.from_struct(tested), options)
def prepare_right_for_diff(%{__struct__: struct} = pattern, tested, options)
when is_struct(tested) and is_struct(pattern) do
pattern
|> Map.from_struct()
|> Enum.map(fn
{_key, :_} ->
tested
{key, value} ->
if Map.has_key?(pattern, key) do
{key, prepare_right_for_diff(Map.get(pattern, key), value, options)}
else
nil
end
end)
|> Enum.filter(& &1 && elem(&1, 1))
|> (& struct(struct, &1)).()
end
def prepare_right_for_diff(pattern, tested, options)
when is_list(tested) and is_list(pattern) do
if options[:ignore_order] === true do
tested
|> Enum.sort_by(&Enum.find_index(pattern, fn v -> v == &1 end), &<=/2)
|> zip_with_rest(pattern)
|> Enum.map(fn {tested, pattern} ->
prepare_right_for_diff(pattern, tested, options)
end)
|> Enum.filter(& &1 != :zip_nil)
else
tested
|> zip_with_rest(pattern)
|> Enum.map(fn {tested, pattern} ->
prepare_right_for_diff(pattern, tested, options)
end)
|> Enum.filter(& &1 != :zip_nil)
end
end
def prepare_right_for_diff(pattern, tested, options)
when is_map(tested) and is_map(pattern) do
tested
|> filter_tested(pattern)
|> Enum.map(fn
{_key, :_} ->
:_
{key, value} ->
{key, prepare_right_for_diff(Map.get(pattern, key), value, options)}
end)
|> Map.new()
end
def prepare_right_for_diff(pattern, tested, options)
when is_tuple(pattern) and is_tuple(tested) do
list_pattern = Tuple.to_list(pattern)
list_tested = Tuple.to_list(tested)
list_tested
|> zip_with_rest(list_pattern)
|> Enum.map(fn {tested, pattern} ->
prepare_right_for_diff(pattern, tested, options)
end)
|> Enum.filter(& &1 != :zip_nil)
|> List.to_tuple()
end
def prepare_right_for_diff(_pattern, tested, _options), do: tested
defp filter_tested(tested, pattern) do
if list_intersection(Map.keys(tested), Map.keys(pattern)) == [] do
tested
else
Map.take(tested, Map.keys(pattern))
end
end
defp list_intersection(a, b), do: a -- (a -- b)
def prepare_left_for_diff(pattern, tested, options)
when is_struct(pattern) and is_map(tested) and not is_struct(tested),
do: prepare_left_for_diff(Map.from_struct(pattern), tested, options)
def prepare_left_for_diff(%{__struct__: struct} = pattern, tested, options)
when is_struct(tested) and is_struct(pattern) do
pattern
|> Map.from_struct
|> Enum.map(fn
{key, :_} ->
{key, Map.get(tested, key)}
{key, value} ->
{key, prepare_left_for_diff(value, Map.get(tested, key), options)}
end)
|> Map.new()
|> (& struct(struct, &1)).()
end
def prepare_left_for_diff(pattern, tested, options)
when is_list(tested) and is_list(pattern) do
pattern
|> zip_with_rest(tested)
|> Enum.map(fn {pattern, tested} ->
prepare_left_for_diff(pattern, tested, options)
end)
|> Enum.filter(& &1 != :zip_nil)
end
def prepare_left_for_diff(pattern, tested, options)
when is_map(tested) and is_map(pattern) do
pattern
|> Enum.map(fn
{key, :_} ->
{key, Map.get(tested, key)}
{key, value} ->
{key, prepare_left_for_diff(value, Map.get(tested, key), options)}
end)
|> Map.new()
end
def prepare_left_for_diff(:_, tested, _options), do: tested
def prepare_left_for_diff(pattern, _tested, _options), do: pattern
defp zip_with_rest(a, b) do
if length(a) > length(b) do
Enum.reduce(a, {[], b}, fn
a_i, {acc, [b_i | b_rest]} ->
{[{a_i, b_i} | acc], b_rest}
a_i, {acc, []} ->
{[{a_i, :zip_nil} | acc], []}
end)
else
Enum.reduce(b, {[], a}, fn
b_i, {acc, [a_i | a_rest]} ->
{[{a_i, b_i} | acc], a_rest}
b_i, {acc, []} ->
{[{:zip_nil, b_i} | acc], []}
end)
end
|> elem(0)
|> Enum.reverse()
end
@doc """
Matches given value with pattern
Returns `true` or raises `ExUnit.AssertionError`
## Parameters
- pattern: Expected pattern (use `:_` instead of `_`)
- tested: Tested value
- options: Options
* strict: when `true` compare using `===`, when `false` compare using `==`, default `true`
* `ignore_order`, when `true` - ignore order of items in lists, default `false`
* message: Custom message on fail
## Example
The assertion
assert_match %{a: 1}, %{a: 1, b: 2}
will match,
assert_match %{a: 1, b: 2}, %{a: 1}
will fail with the message:
match (assert_match) failed
left: %{a: 1, b: 2},
right: %{a: 1}
"""
@spec assert_match(term, term, list | nil) :: boolean
defmacro assert_match(left, right, options \\ [strict: true]) do
message = options[:message] || "match (assert_match) failed"
quote do
right = unquote(right)
left = unquote(left)
message = unquote(message)
options = unquote(options)
prepared_right = prepare_right_for_diff(left, right, options)
prepared_left = prepare_left_for_diff(left, right, options)
ExUnit.Assertions.assert match_r(left, right, options),
right: prepared_right,
left: prepared_left,
message: message
end
end
@doc """
Matches given value with pattern
Returns `true` or raises `ExUnit.AssertionError`
## Parameters
- pattern: Expected pattern (use `:_` instead of `_`)
- tested: Tested value
- options: Options
* strict: when `true` compare using `===`, when `false` compare using `==`, default `true`
* `ignore_order`, when `true` - ignore order of items in lists, default `false`
* message: Custom message on fail
## Example
The assertion
assert_match %{a: 1}, %{a: 1, b: 2}
will match,
assert_match %{a: 1, b: 2}, %{a: 1}
will fail with the message:
match (refute_match) succeeded, but should have failed
"""
@spec refute_match(term, term, list | nil) :: boolean
defmacro refute_match(left, right, options \\ [strict: true]) do
message = options[:message] || "match (refute_match) succeeded, but should have failed"
quote do
right = unquote(right)
left = unquote(left)
message = unquote(message)
options = unquote(options)
ExUnit.Assertions.refute match_r(left, right, options), message: message
end
end
defmacro __using__([]) do
quote do
import unquote(__MODULE__)
end
end
end
|
lib/recursive_match.ex
| 0.884058
| 0.561335
|
recursive_match.ex
|
starcoder
|
defmodule Backpack.Moment do
defdelegate shift(term, opts), to: Backpack.Moment.Calculator
defdelegate ago(term, seconds_or_unit \\ :seconds), to: Backpack.Moment.Calculator
defdelegate from_now(term, seconds_or_unit \\ :seconds), to: Backpack.Moment.Calculator
defdelegate minutes_ago(term, minutes), to: Backpack.Moment.Calculator
defdelegate minute_ago(term, minutes), to: Backpack.Moment.Calculator, as: :minutes_ago
defdelegate minutes_from_now(term, minutes), to: Backpack.Moment.Calculator
defdelegate minute_from_now(term, minutes), to: Backpack.Moment.Calculator, as: :minutes_from_now
defdelegate hours_ago(term, hours), to: Backpack.Moment.Calculator
defdelegate hour_ago(term, hours), to: Backpack.Moment.Calculator, as: :hours_ago
defdelegate hours_from_now(term, hours), to: Backpack.Moment.Calculator
defdelegate hour_from_now(term, hours), to: Backpack.Moment.Calculator, as: :hours_from_now
defdelegate days_ago(term, days), to: Backpack.Moment.Calculator
defdelegate day_ago(term, days), to: Backpack.Moment.Calculator, as: :days_ago
defdelegate days_from_now(term, days), to: Backpack.Moment.Calculator
defdelegate day_from_now(term, days), to: Backpack.Moment.Calculator, as: :days_from_now
defdelegate weeks_ago(term, weeks), to: Backpack.Moment.Calculator
defdelegate week_ago(term, weeks), to: Backpack.Moment.Calculator, as: :weeks_ago
defdelegate weeks_from_now(term, weeks), to: Backpack.Moment.Calculator
defdelegate week_from_now(term, weeks), to: Backpack.Moment.Calculator, as: :weeks_from_now
defdelegate months_ago(term, months), to: Backpack.Moment.Calculator
defdelegate month_ago(term, months), to: Backpack.Moment.Calculator, as: :months_ago
defdelegate months_from_now(term, months), to: Backpack.Moment.Calculator
defdelegate month_from_now(term, months), to: Backpack.Moment.Calculator, as: :months_from_now
defdelegate years_ago(term, years), to: Backpack.Moment.Calculator
defdelegate year_ago(term, years), to: Backpack.Moment.Calculator, as: :years_ago
defdelegate years_from_now(term, years), to: Backpack.Moment.Calculator
defdelegate year_from_now(term, years), to: Backpack.Moment.Calculator, as: :years_from_now
defdelegate beginning_of_day(term), to: Backpack.Moment.Calculator
defdelegate end_of_day(term), to: Backpack.Moment.Calculator
defdelegate beginning_of_week(term), to: Backpack.Moment.Calculator
defdelegate end_of_week(term), to: Backpack.Moment.Calculator
defdelegate beginning_of_month(term), to: Backpack.Moment.Calculator
defdelegate end_of_month(term), to: Backpack.Moment.Calculator
defdelegate beginning_of_quarter(term), to: Backpack.Moment.Calculator
defdelegate end_of_quarter(term), to: Backpack.Moment.Calculator
defdelegate beginning_of_year(term), to: Backpack.Moment.Calculator
defdelegate end_of_year(term), to: Backpack.Moment.Calculator
defdelegate yesterday(term), to: Backpack.Moment.Calculator
defdelegate tomorrow(term), to: Backpack.Moment.Calculator
defdelegate last_week(term), to: Backpack.Moment.Calculator
defdelegate next_week(term), to: Backpack.Moment.Calculator
defdelegate last_month(term), to: Backpack.Moment.Calculator
defdelegate next_month(term), to: Backpack.Moment.Calculator
defdelegate last_year(term), to: Backpack.Moment.Calculator
defdelegate next_year(term), to: Backpack.Moment.Calculator
defdelegate quarter(term), to: Backpack.Moment.Calculator
defdelegate day_of_week(term), to: Backpack.Moment.Calculator
defdelegate today?(term), to: Backpack.Moment.Calculator
defdelegate future?(term), to: Backpack.Moment.Calculator
defdelegate past?(term), to: Backpack.Moment.Calculator
defdelegate years(term, unit \\ :seconds), to: Backpack.Moment.Numeric
defdelegate year(term, unit \\ :seconds), to: Backpack.Moment.Numeric, as: :years
defdelegate months(term, unit \\ :seconds), to: Backpack.Moment.Numeric
defdelegate month(term, unit \\ :seconds), to: Backpack.Moment.Numeric, as: :months
defdelegate weeks(term, unit \\ :seconds), to: Backpack.Moment.Numeric
defdelegate week(term, unit \\ :seconds), to: Backpack.Moment.Numeric, as: :weeks
defdelegate days(term, unit \\ :seconds), to: Backpack.Moment.Numeric
defdelegate day(term, unit \\ :seconds), to: Backpack.Moment.Numeric, as: :days
defdelegate hours(term, unit \\ :seconds), to: Backpack.Moment.Numeric
defdelegate hour(term, unit \\ :seconds), to: Backpack.Moment.Numeric, as: :hours
defdelegate minutes(term, unit \\ :seconds), to: Backpack.Moment.Numeric
defdelegate minute(term, unit \\ :seconds), to: Backpack.Moment.Numeric, as: :minutes
defdelegate seconds(term, unit \\ :seconds), to: Backpack.Moment.Numeric
defdelegate second(term, unit \\ :seconds), to: Backpack.Moment.Numeric, as: :seconds
defdelegate format(term, format, lang \\ :en), to: Calendar.Strftime, as: :strftime
defdelegate format!(term, format, lang \\ :en), to: Calendar.Strftime, as: :strftime!
defdelegate timestamp(unit \\ :seconds), to: System, as: :system_time
defdelegate from_unix(term, unit \\ :seconds), to: DateTime
defdelegate from_unix!(term, unit \\ :seconds), to: DateTime
defdelegate to_unix(term, unit \\ :seconds), to: Backpack.Moment.Converter
defdelegate distance_of_time_in_words(from, to \\ 0, opts \\ []), to: Backpack.Moment.Presenter
defdelegate time_ago_in_words(term, opts \\ []), to: Backpack.Moment.Presenter
end
|
lib/backpack/moment.ex
| 0.631253
| 0.531513
|
moment.ex
|
starcoder
|
defmodule Slugy do
@moduledoc ~S"""
A Phoenix library to generate slug for your schema fields
## Examples
Let's suppose we have a `Post` schema and we want to generate a slug from `title` field and save it to the `slug` field. To achieve that we need to call `slugify/2` following the changeset pipeline passing the desireable field. `slugify/2` generates the slug and put it to the changeset.
defmodule Post do
use Ecto.Schema
import Ecto.Changeset
import Slugy, only: [slugify: 2]
embedded_schema do
field(:title, :string)
field(:slug, :string)
end
def changeset(post, attrs) do
post
|> cast(attrs, [:title])
|> slugify(:title)
end
end
Running this code on iex console you can see the slug generated as a new change to be persisted.
iex> Post.changeset(%Post{}, %{title: "A new Post"}).changes
%{title: "A new Post", slug: "a-new-post"}
Slugy just generates a slug if the field's value passed to `slugify/2` comes with a new value to persist in `attrs` (in update cases) or if the struct is a new record to save.
"""
import Ecto.Changeset
alias Slugy.Slug
@doc ~S"""
### Usage
The `slugify/2` expects a changeset as a first parameter and an atom on the second one.
The function will check if there is a change on the `title` field and if affirmative generates the slug and assigns to the `slug` field, otherwise do nothing and just returns the changeset.
iex> Post.changeset(%Post{}, %{title: "A new Post"}).changes
%{slug: "a-new-post", title: "A new Post"}
### Slugify from an embedded struct field
In rare cases you need to generate slugs from a field inside a embeded structure that represents a jsonb column on your database.
For example by having a struct like below and we want a slug from `data -> title`:
defmodule PostWithEmbeddedStruct do
use Ecto.Schema
import Ecto.Changeset
import Slugy, only: [slugify: 2]
embedded_schema do
field(:data, :map)
field(:slug, :string)
end
def changeset(post, attrs) do
post
|> cast(attrs, [:data])
|> slugify([:data, :title])
end
end
%PostWithEmbeddedStruct{
data: %{title: "This is my AWESOME title", external_id: 1}
}
Just pass a list with the keys following the path down to the desirable field.
iex> PostWithEmbeddedStruct.changeset(%PostWithEmbeddedStruct{}, %{data: %{title: "This is my AWESOME title"}}).changes
%{data: %{title: "This is my AWESOME title"}, slug: "this-is-my-awesome-title"}
### Custom slug
If you want a custom slug composed for more than one fields **e.g.** a post `title` and the `type` like so `"how-to-use-slugy-video"` you need to implement the `Slug protocol` that extracts the desirable fields to generate the slug.
defmodule Post do
# ...
end
defimpl Slugy.Slug, for: Post do
def to_slug(%{title: title, type: type}) do
"#{title} #{type}"
end
end
So, `%Post{title: "A new Post", type: "video"}` with the above `Slug` protocol implementation will have a slug like so `a-new-post-video`
## Routes
And lastly for having our routes with the slug we just need to implement the `Phoenix.Param` protocol to our slugified schemas. `Phoenix.Param` will extract the slug in place of the `:id`.
defmodule Post do
@derive {Phoenix.Param, key: :slug}
schema "posts" do
# ...
end
def changeset(post, attrs) do
# ...
end
end
For more information about `Phoenix.Param` protocol see in [https://hexdocs.pm/phoenix/Phoenix.Param.html](https://hexdocs.pm/phoenix/Phoenix.Param.html)
## Installation
Add to your `mix.exs` file.
def deps do
[
{:slugy, "~> 2.0.0"}
]
end
Don’t forget to update your dependencies.
$ mix deps.get
"""
def slugify(changeset, key) when is_atom(key) do
if str = get_change(changeset, key) do
do_slugify(changeset, str)
else
changeset
end
end
def slugify(changeset, nested_field) when is_list(nested_field) do
with str when not is_nil(str) <- get_in(changeset.changes, nested_field) do
do_slugify(changeset, str)
else
_ -> changeset
end
end
@doc """
Returns a downcased dashed string.
## Examples
iex> Slugy.slugify("Vamo que vamo")
"vamo-que-vamo"
"""
def slugify(str) when is_binary(str) do
str
|> String.trim()
|> String.normalize(:nfd)
|> String.replace(~r/\s\s+/, " ")
|> String.replace(~r/[^A-z\s\d-]/u, "")
|> String.replace(~r/\s/, "-")
|> String.replace(~r/--+/, "-")
|> String.downcase()
end
defp do_slugify(changeset, str) do
struct = Map.merge(changeset.data, changeset.changes)
if Slug.impl_for(struct) do
slug = struct |> Slug.to_slug() |> slugify()
put_change(changeset, :slug, slug)
else
put_change(changeset, :slug, slugify(str))
end
end
end
defprotocol Slugy.Slug do
@moduledoc ~S"""
A protocol that builds a string to converts into a slug
This protocol is used by Slugy.slugify/2. For example, when you
want a custom slug for a Post, composed by the `title` and the
`published_at` fields:
"how-to-use-slugy-2018-10-10"
Suppose you have a Post module with the following fields:
defmodule Post do
schema "posts" do
field :title, :string
field :body, :text
field :published_at, :datetime
end
end
You need to implement the Slugy.Slug protocol to achieve that:
defimpl Slugy.Slug, for: Post do
def to_slug(%{title: title, published_at: published_at}) do
"#{title} #{published_at}"
end
end
Slugy internally uses this string to build your custom slug.
"""
def to_slug(struct)
end
|
lib/slugy.ex
| 0.799912
| 0.568056
|
slugy.ex
|
starcoder
|
defmodule Scenic.Primitive.Style.Paint.Color do
@moduledoc """
Fill a primitive with a single color
The color paint is used as the data for the [`:fill`](Scenic.Primitive.Style.Fill.html) style.
### Data Format
`{:color, valid_color}`
The full format is a tuple with two parameters. The first is the :color atom indicating
that this is color paint data. The second is any valid color (see below).
### Valid Colors
You can pass in any color format that is supported by the `Scenic.Color.to_rgba/1` function.
This includes any named color. See the documentation for `Scenic.Color` for more information.
Example:
```elixir
graph
|> rect( {100,200}, fill: {:color, :blue} )
|> rect( {100,200}, stroke: {1, {:color, :green}} )
```
### Shortcut Format
`valid_color`
Because the color paint type is used so frequently, you can simply pass in any valid
color and the `:fill` style will infer that it is to be used as paint.
Example:
```elixir
graph
|> rect( {100,200}, fill: :blue )
|> rect( {100,200}, stroke: {1, :green} )
```
"""
# --------------------------------------------------------
@doc false
def validate({:color, color}) do
try do
{:ok, {:color, Scenic.Color.to_rgba(color)}}
rescue
_ -> {:error, error_msg({:color, color})}
end
end
def validate(color) do
try do
{:ok, Scenic.Color.to_rgba(color)}
rescue
_ -> {:error, error_msg(color)}
end
end
defp error_msg({:color, color}) do
"""
Invalid Color specification: #{inspect(color)}
#{IO.ANSI.yellow()}
Valid color fills can be either just a color (the default fill) or an explicit {:color, color_data} tuple.
Valid examples:
fill: :green # named color
fill: {:green, 0xef} # {named color, alpha}
fill: { 10, 20, 30} # {r, g, b} color
fill: { 10, 20, 30, 0xff} # {r, g, b, a} color
Or any of the above can be a fully explicit color paint
fill: {:color, :green}
fill: {:color, { 10, 20, 30, 0xff}}
etc...
See the documentation for a list of named colors.
https://hexdocs.pm/scenic/Scenic.Color.html#module-named-colors#{IO.ANSI.default_color()}
"""
end
defp error_msg(color) do
"""
Invalid Color specification: #{inspect(color)}
#{IO.ANSI.yellow()}
Example colors:
:green # named color
{:green, 0xef} # {named color, alpha}
{ 10, 20, 30} # {r, g, b} color
{ 10, 20, 30, 0xff} # {r, g, b, a} color
See the documentation for a list of named colors.
https://hexdocs.pm/scenic/Scenic.Color.html#module-named-colors#{IO.ANSI.default_color()}
"""
end
end
|
lib/scenic/primitive/style/paint/color.ex
| 0.89906
| 0.921074
|
color.ex
|
starcoder
|
defmodule Bitcoinex.Block do
@moduledoc """
Bitcoin on-chain transaction structure.
Supports serialization of transactions.
"""
alias Bitcoinex.Block
alias Bitcoinex.Transaction
alias Bitcoinex.Utils
alias Bitcoinex.Transaction.Utils, as: ProtocolUtils
defstruct [
:version,
:prev_block,
:merkle_root,
:timestamp,
:bits,
:nonce,
:txn_count,
:txns
]
@doc """
Returns the BlockID of the given block.
defined as the bitcoin hash of the block header (first 80 bytes):
BlockID is sha256(sha256(nVersion | prev_block | merkle_root | timestamp | bits | nonce))
"""
def block_id(raw_block) do
<<header::binary-size(80), _rest::binary>> = raw_block
Base.encode16(
<<:binary.decode_unsigned(
Utils.double_sha256(header),
:big
)::little-size(256)>>,
case: :lower
)
end
@doc """
Decodes a transaction in a hex encoded string into binary.
"""
def decode(block_hex) when is_binary(block_hex) do
case Base.decode16(block_hex, case: :lower) do
{:ok, block_bytes} ->
case parse(block_bytes) do
{:ok, block} ->
{:ok, block}
:error ->
{:error, :parse_error}
end
:error ->
{:error, :decode_error}
end
end
# returns block
defp parse(block_bytes) do
<<version::little-size(32), remaining::binary>> = block_bytes
# Previous block
<<prev_block::binary-size(32), remaining::binary>> = remaining
# Merkle root
<<merkle_root::binary-size(32), remaining::binary>> = remaining
# Timestamp, difficulty target bits, nonce
<<timestamp::little-size(32), bits::little-size(32), nonce::little-size(32), remaining::binary>> = remaining
# Transactions
{txn_count, remaining} = ProtocolUtils.get_counter(remaining)
{txns, remaining} = Transaction.parse_list(txn_count, remaining)
if byte_size(remaining) != 0 do
:error
else
{:ok,
%Block{
version: version,
prev_block: Base.encode16(<<:binary.decode_unsigned(prev_block, :big)::little-size(256)>>, case: :lower),
merkle_root: Base.encode16(<<:binary.decode_unsigned(merkle_root, :big)::little-size(256)>>, case: :lower),
timestamp: timestamp,
bits: bits,
nonce: nonce,
txn_count: length(txns),
txns: txns
}}
end
end
end
|
server/bitcoinex/lib/block.ex
| 0.79653
| 0.422117
|
block.ex
|
starcoder
|
defrecord Mix.Dep, [scm: nil, app: nil, requirement: nil, status: nil, opts: nil], moduledoc: """
This is a record that keeps information about your project
dependencies. It keeps:
* scm - a module representing the source code management tool (SCM) operations;
* app - the app name as an atom;
* requirements - a binary or regexp with the deps requirement;
* status - the current status of dependency, check `Mix.Deps.format_status/1` for more info;
* opts - the options given by the developer
"""
defmodule Mix.Deps do
@moduledoc """
A module with common functions to work with dependencies.
"""
@doc """
Returns all dependencies in as `Mix.Dep` record.
## Exceptions
This function raises an exception in case the developer
provides a dependency in the wrong format.
## Statuses
The `status` element in the tuple returns the current
situation of the repository. Check `format_status/1`
for more information.
"""
def all do
deps = Mix.project[:deps] || []
scms = Mix.SCM.available
Enum.map deps, with_scm_and_status(&1, scms)
end
@doc """
Get all dependencies that match the specific `status`.
"""
def all(status) do
Enum.filter all, match?(Mix.Dep[status: { ^status, _ }], &1)
end
@doc """
Receives a list of deps names and returns deps records.
Raises an error if the dependency does not exist.
"""
def by_name(given) do
candidates = all
Enum.map given, fn(app) ->
if is_binary(app), do: app = binary_to_atom(app)
case List.keyfind(candidates, app, 2) do
nil -> raise Mix.Error, message: "unknown dependency #{app}"
dep -> dep
end
end
end
@doc """
Formats the status of a dependency.
"""
def format_status({ :ok, _vsn }), do: "ok"
def format_status({ :noappfile, path }), do: "could not find app file at #{path}"
def format_status({ :invalidapp, path }), do: "the app file at #{path} is invalid"
def format_status({ :invalidvsn, vsn }), do: "the dependency does not match the specified version, got #{vsn}"
def format_status({ :lockmismatch, _ }), do: "lock mismatch: the dependency is out of date"
def format_status(:nolock), do: "the dependency is not locked"
def format_status({ :unavailable, _ }), do: "the dependency is not available, run `mix deps.get`"
@doc """
Receives a dependency and update its status
"""
def update_status(Mix.Dep[scm: scm, app: app, requirement: req, opts: opts]) do
with_scm_and_status({ app, req, opts }, [scm])
end
@doc """
Checks the lock for the given dependency and update its status accordingly.
"""
def check_lock(Mix.Dep[status: { :unavailable, _}] = dep, _lock) do
dep
end
def check_lock(Mix.Dep[scm: scm, app: app, opts: opts] = dep, lock) do
rev = lock[app]
opts = Keyword.put(opts, :lock, rev)
if scm.check?(deps_path(dep), opts) do
dep
else
status = if rev, do: { :lockmismatch, rev }, else: :nolock
dep.status(status)
end
end
@doc """
Check if a dependency is out of date or not, considering its
lock status. Therefore, be sure to call `check_lock` before
invoking this function.
"""
def out_of_date?(Mix.Dep[status: { :unavailable, _ }]), do: true
def out_of_date?(Mix.Dep[status: { :lockmismatch, _ }]), do: true
def out_of_date?(Mix.Dep[status: :nolock]), do: true
def out_of_date?(_), do: false
@doc """
Format the dependency for printing.
"""
def format_dep(Mix.Dep[scm: scm, app: app, status: status, opts: opts]) do
version =
case status do
{ :ok, vsn } when vsn != nil -> "(#{vsn}) "
_ -> ""
end
"#{app} #{version}[#{scm.key}: #{inspect opts[scm.key]}]"
end
@doc """
Returns the path for the given dependency.
"""
def deps_path(Mix.Dep[app: app, opts: opts]) do
deps_path(app, opts)
end
@doc """
The default path for dependencies.
"""
def deps_path do
Mix.project[:deps_path] || "deps"
end
## Helpers
defp with_scm_and_status({ app, opts }, scms) when is_atom(app) and is_list(opts) do
with_scm_and_status({ app, nil, opts }, scms)
end
defp with_scm_and_status({ app, req, opts }, scms) when is_atom(app) and
(is_binary(req) or is_regex(req) or req == nil) and is_list(opts) do
{ scm, opts } = Enum.find_value scms, fn(scm) ->
(new = scm.consumes?(opts)) && { scm, new }
end
if scm do
Mix.Dep[
scm: scm,
app: app,
requirement: req,
status: status(scm, app, req, opts),
opts: opts
]
else
supported = Enum.join scms, ", "
raise Mix.Error, message: "did not specify a supported scm, expected one of: " <> supported
end
end
defp with_scm_and_status(other, _scms) do
raise Mix.Error, message: %b(dependency specified in the wrong format: #{inspect other}, ) <>
%b(expected { "app", "requirement", scm: "location" })
end
defp status(scm, app, req, opts) do
deps_path = deps_path(app, opts)
if scm.available? deps_path, opts do
if req do
app_path = File.join deps_path, "ebin/#{app}.app"
validate_app_file(app_path, app, req)
else
{ :ok, nil }
end
else
{ :unavailable, deps_path }
end
end
defp validate_app_file(app_path, app, req) do
case :file.consult(app_path) do
{ :ok, [{ :application, ^app, config }] } ->
case List.keyfind(config, :vsn, 1) do
{ :vsn, actual } ->
actual = list_to_binary(actual)
if vsn_match?(req, actual) do
{ :ok, actual }
else
{ :invalidvsn, actual }
end
nil -> { :invalidvsn, nil }
end
{ :ok, _ } -> { :invalidapp, app_path }
{ :error, _ } -> { :noappfile, app_path }
end
end
defp vsn_match?(expected, actual) when is_binary(expected), do: actual == expected
defp vsn_match?(expected, actual) when is_regex(expected), do: actual =~ expected
defp deps_path(app, opts) do
opts[:path] || File.join(deps_path, app)
end
end
|
lib/mix/lib/mix/deps.ex
| 0.853532
| 0.508971
|
deps.ex
|
starcoder
|
defmodule Nosedrum.Interactor do
@moduledoc """
Interactors take the role of both `Nosedrum.Invoker` and `Nosedrum.Storage` when
it comes to Discord's Application Commands. An Interactor handles incoming
`t:Nostrum.Struct.Interaction.t/0`s, invoking `c:Nosedrum.ApplicationCommand.command/1` callbacks
and responding to the Interaction.
In addition to tracking commands locally for the bot, an Interactor is
responsible for registering an Application Command with Discord when `c:add_command/4`
or `c:remove_command/4` is called.
"""
@moduledoc since: "0.4.0"
alias Nostrum.Struct.{Guild, Interaction}
@callback_type_map %{
pong: 1,
channel_message_with_source: 4,
deferred_channel_message_with_source: 5,
deferred_update_message: 6,
update_message: 7
}
@flag_map %{
ephemeral?: 64
}
@type command_scope :: :global | Guild.id() | [Guild.id()]
@typedoc """
Defines a structure of commands, subcommands, subcommand groups, as
outlined in the [official documentation](https://discord.com/developers/docs/interactions/application-commands#subcommands-and-subcommand-groups).
**Note** that Discord only supports nesting 3 levels deep, like `command -> subcommand group -> subcommand`.
## Example path:
```elixir
%{
{"castle", MyApp.CastleCommand.description()} =>
%{
{"prepare", "Prepare the castle for an attack."} => [],
{"open", "Open up the castle for traders and visitors."} => [],
# ...
}
}
```
"""
@type application_command_path ::
%{
{group_name :: String.t(), group_desc :: String.t()} => [
application_command_path | [Nosedrum.ApplicationCommand.option()]
]
}
@typedoc """
The name or pid of the Interactor process.
"""
@type name_or_pid :: atom() | pid()
@doc """
Handle an Application Command invocation.
This callback should be invoked upon receiving an interaction via the `:INTERACTION_CREATE` event.
## Example using `Nosedrum.Interactor.Dispatcher`:
```elixir
# In your `Nostrum.Consumer` file:
def handle_event({:INTERACTION_CREATE, interaction, _ws_state}) do
IO.puts "Got interaction"
Nosedrum.Interactor.Dispatcher.handle_interaction(interaction)
end
```
"""
@callback handle_interaction(interaction :: Interaction.t(), name_or_pid) :: :ok
@doc """
Add a new command under the given name or application command path. Returns `:ok` if successful, and
`{:error, reason}` otherwise.
If the command already exists, it will be overwritten.
"""
@callback add_command(
name_or_path :: String.t() | application_command_path,
command_module :: module,
scope :: command_scope,
name_or_pid
) ::
:ok | {:error, reason :: String.t()}
@doc """
Remove the command under the given name or application command path. Returns `:ok` if successful, and
`{:error, reason}` otherwise.
If the command does not exist, no error should be returned.
"""
@callback remove_command(
name_or_path :: String.t() | application_command_path,
command_id :: Nostrum.Snowflake.t(),
scope :: command_scope,
name_or_pid
) ::
:ok | {:error, reason :: String.t()}
@doc """
Responds to an Interaction with the values in the given `t:Nosedrum.ApplicationCommand.response/0`. Returns `{:ok}` if
successful, and a `t:Nostrum.Api.error/0` otherwise.
"""
@spec respond(Interaction.t(), Nosedrum.ApplicationCommand.response()) ::
{:ok} | Nostrum.Api.error()
def respond(interaction, command_response) do
type =
command_response
|> Keyword.get(:type, :channel_message_with_source)
|> convert_callback_type()
data =
command_response
|> Keyword.take([:content, :embeds, :components, :tts?, :allowed_mentions])
|> Map.new()
|> put_flags(command_response)
res = %{
type: type,
data: data
}
Nostrum.Api.create_interaction_response(interaction, res)
end
defp convert_callback_type(type) do
Map.get(@callback_type_map, type)
end
defp put_flags(data_map, command_response) do
Enum.reduce(@flag_map, data_map, fn {flag, value}, data_map_acc ->
if command_response[flag] do
Map.put(data_map_acc, :flags, value)
else
data_map_acc
end
end)
end
end
|
lib/nosedrum/interactor.ex
| 0.902233
| 0.73197
|
interactor.ex
|
starcoder
|
defmodule ExDiceRoller.Sigil do
@moduledoc """
Han dles the sigil `~a` for dice rolling. If no options are specified, the
sigil will return the compiled function based on the provided roll. Note that
variables cannot be present in the expression when invoking a roll directly
from the sigil.
Also note that if you wish to use the `/` operator with the sigil, you will
need to use a different delimeter. Example: `~a|1d4+4d6/2d4|`.
The following options are available, with each invoking a roll:
* `r`: Compiles and invokes the roll. Variables are not supported with this.
* `e`: Turns on the exploding dice mechanic.
* `h`: Select the highest of the calculated values when using the `,`
separator.
* `l`: Select the highest of the calculated values when using the `,`
separator.
* `k`: Keeps the value for each dice roll and returns it as a list.
## Examples
iex> import ExDiceRoller.Sigil
ExDiceRoller.Sigil
iex> fun = ~a/1+1/
iex> fun.([])
2
iex> import ExDiceRoller.Sigil
iex> fun = ~a/1d4/
iex> fun.([])
1
iex> fun.([])
4
iex> import ExDiceRoller.Sigil
iex> ~a/1d6+1/r
4
iex> ~a/1d2/re
7
"""
@spec sigil_a(String.t(), charlist) :: Compiler.compiled_val()
def sigil_a(roll_string, opts) do
binary_opts = :binary.list_to_bin(opts)
with {:ok, translated_opts} <- translate_opts(binary_opts, []),
{:ok, fun} = ExDiceRoller.compile(roll_string) do
case length(translated_opts) > 0 do
false ->
{:ok, fun} = ExDiceRoller.compile(roll_string)
fun
true ->
fun.(opts: translated_opts -- [:execute])
end
else
{:error, reason} -> {:error, {:invalid_option, reason}}
end
end
@spec translate_opts(binary, list(atom)) :: {:ok, list(atom)} | {:error, any}
defp translate_opts(<<?r, t::binary>>, acc), do: translate_opts(t, [:execute | acc])
defp translate_opts(<<?e, t::binary>>, acc), do: translate_opts(t, [:explode | acc])
defp translate_opts(<<?h, t::binary>>, acc), do: translate_opts(t, [:highest | acc])
defp translate_opts(<<?l, t::binary>>, acc), do: translate_opts(t, [:lowest | acc])
defp translate_opts(<<?k, t::binary>>, acc), do: translate_opts(t, [:keep | acc])
defp translate_opts(<<>>, acc), do: {:ok, acc}
defp translate_opts(rest, _acc), do: {:error, rest}
end
|
lib/sigil.ex
| 0.888299
| 0.643651
|
sigil.ex
|
starcoder
|
defmodule Day11.Seats do
defstruct [:grid]
def new(input), do: %__MODULE__{grid: gridify(input, 0, %{})}
def stabilize(%__MODULE__{grid: grid}, opts) do
case advance(grid, opts) do
{:stable, grid} -> %__MODULE__{grid: grid}
{:change, grid} -> stabilize(%__MODULE__{grid: grid}, opts)
end
end
def count_empty_seats(%__MODULE__{grid: grid}) do
grid
|> Map.values()
|> Enum.count(fn a -> a == "#" end)
end
defp gridify([], _, grid), do: grid
defp gridify([line | rest], row, grid) do
grid =
line
|> String.split("", trim: true)
|> Enum.with_index()
|> Enum.into(grid, fn {char, col} -> {{row, col}, char} end)
gridify(rest, row + 1, grid)
end
defp advance(grid, opts) do
grid
|> Enum.map(fn entry -> new_value(entry, grid, opts) end)
|> Enum.reduce({:stable, %{}}, fn
{pos, val, val}, {outcome, grid} -> {outcome, Map.put(grid, pos, val)}
{pos, _old_val, new_val}, {_, grid} -> {:change, Map.put(grid, pos, new_val)}
end)
end
def new_value({pos, "."}, _grid, _opts), do: {pos, ".", "."}
def new_value({pos, "L"}, grid, ignore_floor: ignore_floor, threshold: _) do
pos
|> adjacent(grid, ignore_floor)
|> Enum.count(fn seat -> seat == "#" end)
|> case do
0 -> {pos, "L", "#"}
_ -> {pos, "L", "L"}
end
end
def new_value({pos, "#"}, grid, ignore_floor: ignore_floor, threshold: threshold) do
pos
|> adjacent(grid, ignore_floor)
|> Enum.count(fn seat -> seat == "#" end)
|> case do
x when x >= threshold -> {pos, "#", "L"}
_ -> {pos, "#", "#"}
end
end
def adjacent({x, y}, grid, ignore_floor) do
for i <- -1..1 do
for j <- -1..1, do: {i, j}
end
|> Enum.concat()
|> Enum.reject(fn
{0, 0} -> true
_ -> false
end)
|> Enum.map(&seat_in_dir(&1, {x, y}, 1, grid, ignore_floor))
end
def seat_in_dir({dx, dy}, {x, y}, i, grid, false) do
pos = {x + dx * i, y + dy * i}
Map.get(grid, pos, "L")
end
def seat_in_dir({dx, dy}, {x, y}, i, grid, true) do
pos = {x + dx * i, y + dy * i}
case Map.get(grid, pos, "L") do
"." -> seat_in_dir({dx, dy}, {x, y}, i + 1, grid, true)
seat -> seat
end
end
end
defimpl String.Chars, for: Day11.Seats do
def to_string(%Day11.Seats{grid: grid}) do
for i <- 0..9 do
pts = for j <- 0..9, do: {i, j}
pts
|> Enum.map(fn pos -> Map.get(grid, pos) end)
|> Enum.join()
end
|> Enum.join("\n")
end
end
defimpl Inspect, for: Day11.Seats do
def inspect(seats, _), do: to_string(seats)
end
|
year_2020/lib/day_11/seats.ex
| 0.776877
| 0.724407
|
seats.ex
|
starcoder
|
defmodule Crux.Structs.Emoji do
@moduledoc """
Represents a Discord [Emoji Object](https://discord.com/developers/docs/resources/emoji#emoji-object).
Differences opposed to the Discord API Object:
- `:user` is just the user id
"""
@moduledoc since: "0.1.0"
@behaviour Crux.Structs
alias Crux.Structs
alias Crux.Structs.{Emoji, Reaction, Snowflake, Util}
defstruct [
:id,
:name,
:roles,
:user,
:require_colons,
:managed,
:animated,
:available
]
@typedoc since: "0.1.0"
@type t :: %__MODULE__{
id: Snowflake.t() | nil,
name: String.t(),
roles: MapSet.t(Snowflake.t()),
user: Snowflake.t() | nil,
require_colons: boolean() | nil,
managed: boolean() | nil,
animated: boolean() | nil,
available: boolean() | nil
}
@typedoc """
All available types that can be resolved into an emoji id.
"""
@typedoc since: "0.2.1"
@type id_resolvable() :: Reaction.t() | Emoji.t() | Snowflake.t() | String.t()
@doc """
Resolves the id of a `t:Crux.Structs.Emoji.t/0`.
> Automatically invoked by `Crux.Structs.resolve_id/2`.
```elixir
iex> %Crux.Structs.Emoji{id: 618731477143912448}
...> |> Crux.Structs.Emoji.resolve_id()
618731477143912448
iex> %Crux.Structs.Reaction{emoji: %Crux.Structs.Emoji{id: 618731477143912448}}
...> |> Crux.Structs.Emoji.resolve_id()
618731477143912448
iex> 618731477143912448
...> |> Crux.Structs.Emoji.resolve_id()
618731477143912448
iex> "618731477143912448"
...> |> Crux.Structs.Emoji.resolve_id()
618731477143912448
```
"""
@doc since: "0.2.1"
@spec resolve_id(id_resolvable()) :: Snowflake.t() | nil
def resolve_id(%Reaction{emoji: emoji}) do
resolve_id(emoji)
end
def resolve_id(%Emoji{id: id}) do
resolve_id(id)
end
def resolve_id(resolvable), do: Structs.resolve_id(resolvable)
@doc """
Creates a `t:Crux.Structs.Emoji.t/0` struct from raw data.
> Automatically invoked by `Crux.Structs.create/2`.
"""
@doc since: "0.1.0"
@spec create(data :: map()) :: t()
def create(data) do
emoji =
data
|> Util.atomify()
|> Map.update(:id, nil, &Snowflake.to_snowflake/1)
|> Map.update(
:roles,
MapSet.new(),
&MapSet.new(&1, fn role -> Snowflake.to_snowflake(role) end)
)
|> Map.update(:user, nil, Util.map_to_id())
struct(__MODULE__, emoji)
end
@typedoc """
All available types that can be resolved into a discord emoji identifier.
> String.t() stands for an already encoded unicode emoji.
"""
@typedoc since: "0.2.1"
@type identifier_resolvable() :: Emoji.t() | Reaction.t() | String.t()
@doc ~S"""
Converts an `t:Crux.Structs.Emoji.t/0`, a `t:Crux.Structs.Reaction.t/0`, or a `t:String.t/0` to its discord identifier format.
> This is automatically done if using a appropriate rest function.
## Examples
```elixir
# A custom emoji
iex> %Crux.Structs.Emoji{animated: false, id: 396521773216301056, name: "blobwavereverse"}
...> |> Crux.Structs.Emoji.to_identifier()
"blobwavereverse:396521773216301056"
# A custom animated emoji
iex> %Crux.Structs.Emoji{animated: true, id: 396521774466203659, name: "ablobwavereverse"}
...> |> Crux.Structs.Emoji.to_identifier()
"a:ablobwavereverse:396521774466203659"
# A regular emoji
iex> %Crux.Structs.Emoji{animated: false, id: nil, name: "👋"}
...> |> Crux.Structs.Emoji.to_identifier()
"%F0%9F%91%8B"
# A reaction struct
iex> %Crux.Structs.Reaction{
...> emoji: %Crux.Structs.Emoji{animated: false, id: 356830260626456586, name: "blobReach"}
...> }
...> |> Crux.Structs.Emoji.to_identifier()
"blobReach:356830260626456586"
# An already encoded identifier
iex> "👀"
...> |> URI.encode_www_form()
...> |> Crux.Structs.Emoji.to_identifier()
"%F0%9F%91%80"
# A custom emoji's identifier
iex> "eyesRight:271412698267254784"
...> |> Crux.Structs.Emoji.to_identifier()
"eyesRight:271412698267254784"
```
"""
@doc since: "0.1.1"
@spec to_identifier(emoji :: identifier_resolvable()) :: String.t()
def to_identifier(%Crux.Structs.Reaction{emoji: emoji}), do: to_identifier(emoji)
def to_identifier(%__MODULE__{id: nil, name: name}), do: URI.encode_www_form(name)
def to_identifier(%__MODULE__{id: id, name: name, animated: true}), do: "a:#{name}:#{id}"
def to_identifier(%__MODULE__{id: id, name: name}), do: "#{name}:#{id}"
def to_identifier(identifier) when is_bitstring(identifier), do: identifier
defimpl String.Chars, for: Crux.Structs.Emoji do
@spec to_string(Emoji.t()) :: String.t()
def to_string(%Emoji{id: nil, name: name}), do: name
def to_string(%Emoji{id: id, name: name, animated: true}),
do: "<a:#{name}:#{id}>"
def to_string(%Emoji{id: id, name: name}), do: "<:#{name}:#{id}>"
end
end
|
lib/structs/emoji.ex
| 0.860969
| 0.543469
|
emoji.ex
|
starcoder
|
defmodule ExUssd.Display do
@moduledoc false
@doc """
Its used to tranform ExUssd menu struct to string.
## Parameters
- `menu` - menu to transform to string
- `route` - route
- `opts` - optional session args
## Examples
iex> menu = ExUssd.new(name: "home", resolve: fn menu, _payload, _metadata -> menu |> ExUssd.set(title: "Welcome") end)
iex> ExUssd.Display.to_string(menu, ExUssd.Route.get_route(%{text: "*544#", service_code: "*544#"}))
{:ok, %{menu_string: "Welcome", should_close: false}}
"""
def to_string(_, _, opts \\ [])
@spec to_string(%ExUssd{orientation: :horizontal}, map()) ::
{:ok, %{menu_string: String.t(), should_close: boolean()}}
def to_string(
%ExUssd{
orientation: :horizontal,
error: error,
delimiter: delimiter,
menu_list: menu_list,
nav: nav,
should_close: should_close,
split: split,
default_error: default_error
},
%{route: route},
opts
) do
session = Keyword.get(opts, :session_id)
%{depth: depth} = List.first(route)
total_length = Enum.count(menu_list)
menu_list = get_menu_list(menu_list, opts)
max = depth * split - 1
navigation = ExUssd.Nav.to_string(nav, depth, menu_list, max, length(route), :horizontal)
should_close =
if depth == total_length do
should_close
else
false
end
menu_string =
case Enum.at(menu_list, depth - 1) do
%ExUssd{name: name} ->
if should_close do
IO.iodata_to_binary(["#{depth}", delimiter, "#{total_length}", "\n", name])
else
IO.iodata_to_binary(["#{depth}", delimiter, "#{total_length}", "\n", name, navigation])
end
_ ->
ExUssd.Registry.set_depth(session, total_length + 1)
IO.iodata_to_binary([default_error, navigation])
end
error = if error != true, do: error
{:ok,
%{menu_string: IO.iodata_to_binary(["#{error}", menu_string]), should_close: should_close}}
end
@spec to_string(ExUssd.t(), map(), keyword()) ::
{:ok, %{menu_string: String.t(), should_close: boolean()}}
def to_string(
%ExUssd{
orientation: :vertical,
delimiter: delimiter,
error: error,
menu_list: menu_list,
nav: nav,
should_close: should_close,
show_navigation: show_navigation,
split: split,
title: title,
is_zero_based: is_zero_based
},
%{route: route},
opts
) do
%{depth: depth} = List.first(route)
# {0, 6}
{min, max} = {split * (depth - 1), depth * split - 1}
# [0, 1, 2, 3, 4, 5, 6]
selection = Enum.into(min..max, [])
menu_list = get_menu_list(menu_list, opts)
menus =
selection
|> Enum.with_index()
|> Enum.map(&transform(menu_list, min, delimiter, &1, is_zero_based))
|> Enum.reject(&is_nil(&1))
navigation = ExUssd.Nav.to_string(nav, depth, menu_list, max, length(route), :vertical)
error = if error != true, do: error
title_error = IO.iodata_to_binary(["#{error}", "#{title}"])
show_navigation =
if should_close do
false
else
show_navigation
end
menu_string =
cond do
Enum.empty?(menus) and show_navigation == false ->
title_error
Enum.empty?(menus) and show_navigation == true ->
IO.iodata_to_binary([title_error, navigation])
show_navigation == false ->
IO.iodata_to_binary([title_error, "\n", Enum.join(menus, "\n")])
show_navigation == true ->
IO.iodata_to_binary([title_error, "\n", Enum.join(menus, "\n"), navigation])
end
{:ok, %{menu_string: menu_string, should_close: should_close}}
end
@spec transform([ExUssd.t()], integer(), String.t(), {integer(), integer()}, boolean()) ::
nil | binary()
defp transform(menu_list, min, delimiter, {position, index}, is_zero_based) do
case Enum.at(menu_list, position) do
%ExUssd{name: name} ->
start = if(is_zero_based, do: 0, else: 1)
"#{index + start + min}#{delimiter}#{name}"
nil ->
nil
end
end
defp get_menu_list(menu_list, opts) do
menu_list
|> Enum.map(fn %{name: name} = menu ->
if String.equivalent?(name, "") do
ExUssd.Executer.execute_navigate(menu, Map.new(opts))
else
menu
end
end)
|> Enum.reverse()
end
end
|
lib/ex_ussd/display.ex
| 0.683208
| 0.472562
|
display.ex
|
starcoder
|
defmodule Eon do
@moduledoc """
Eon is a small library for using .exs files as a
document store. Files read with Eon are expected
to contain only an Elixir map as well as execute
no arbitary code unless specified to do so. Eon
can also write maps and structs to file.
Eon is useful for when you would normally store
data as JSON but you need to preserve Elixir's
datatypes. Functions, ports and other datatypes
that cannot be represented as pure data are not
supported by Eon.
"""
defmodule Error do
defexception message: "Unexpected Eon error."
end
@errors %{
eacces: "Permission denied.",
eexist: "File already exists.",
eisdir: "The named file is a directory.",
enoent: "No such file or directory.",
enospc: "There is no space left on the device.",
enotdir: "Path is invalid.",
eval: "File invalid. This usually means there is a syntax error.",
unsafe: "File results in code execution. Use Eon.read_unsafe! to bypass."
}
@doc """
Takes a string, expecting the contents to be a
single map that executes no arbitrary code. Returns a
tuple which contains `:ok` or `:error` as the first
element and the result or error message respectively.
## Examples
iex> Eon.read("%{hello: \"world\"}")
{:ok, %{hello: "world"}}
iex> Eon.read("%{num: Enum.random(0..100)}")
{:error, :unsafe}
"""
def from_string(string) when is_bitstring(string) do
process_body({string, false, %{}})
end
@doc """
Same as from_string/1 except it returns the result
rather than an `:ok` tuple containing it. In the case
of an error, an exception is raised.
## Examples
iex> Eon.read("%{hello: \"world\"}")
{:ok, %{hello: "world"}}
iex> Eon.read("%{num: Enum.random(0..100)}")
** (Eon.Error) File results in code execution. Use Eon.read_unsafe! to bypass.
"""
def from_string!(string) when is_bitstring(string) do
case from_string(string) do
{:ok, result} -> result
{:error, error} -> raise_error(error)
end
end
@doc """
Same as `from_string/1` except allows arbitrary code
execution. This effectively bypasses the step which
prohibits the execution of Elixir code containing
potentially unsafe data structures.
"""
def from_string_unsafe(string) when is_bitstring(string) do
from_string_unsafe(%{}, string)
end
@doc """
Same as `from_string_unsafe/1` except it takes a map as
the first argument and pushes the filename to the second.
Unbound variables within the loaded file that match a
key within the passed map will be replaced with the
corresponding value.
"""
def from_string_unsafe(bindings, string) when is_map(bindings) and is_bitstring(string) do
process_body({string, true, bindings})
end
@doc """
Same as `from_string_unsafe/1` except it returns the result
rather than an `:ok` tuple containing it. In the case of an
error, an exception is raised.
"""
def from_string_unsafe!(string) when is_bitstring(string) do
from_string_unsafe!(%{}, string)
end
@doc """
Same as `from_string_unsafe/2` except it returns the result
rather than an `:ok` tuple containing it. In the case of an
error, an exception is raised.
"""
def from_string_unsafe!(bindings, string) when is_map(bindings) and is_bitstring(string) do
case from_string_unsafe(bindings, string) do
{:ok, result} -> result
{:error, error} -> raise_error(error)
end
end
@doc """
Loads a file, expecting a file with a single map
that executes no arbitrary code. Returns a tuple
which contains `:ok` or `:error` as the first element
and the result or error message respectively.
## Examples
iex> Eon.read("test/fixtures/basic.exs")
{:ok, %{hello: "world"}}
iex> Eon.read("test/fixtures/unsafe.exs")
{:error, :unsafe}
iex> Eon.read("non_existent_file.exs")
{:error, :enoent}
"""
def read(filename) when is_bitstring(filename) do
read_file(filename, false, nil)
end
@doc """
Same as read/1 except it returns the result rather
than an `:ok` tuple containing it. In the case of an
error, an exception is raised.
## Examples
iex> Eon.read!("test/fixtures/basic.exs")
%{hello: "world"}
iex> Eon.read!("test/fixtures/unsafe.exs")
** (Eon.Error) File results in code execution. Use Eon.read_unsafe! to bypass.
iex> Eon.read!("non_existent_file.exs")
** (Eon.Error) No such file or directory.
"""
def read!(filename) when is_bitstring(filename) do
case read(filename) do
{:ok, result} -> result
{:error, error} -> raise_error(error)
end
end
@doc """
Same as `read/1` except allows arbitrary code execution.
This effectively bypasses the step which prohibits the
execution of Elixir code containing potentially unsafe
data structures.
"""
def read_unsafe(filename) when is_bitstring(filename) do
read_unsafe(%{}, filename)
end
@doc """
Same as `read_unsafe/1` except it takes a map as the
first argument and pushes the filename to the second.
Unbound variables within the loaded file that match a
key within the passed map will be replaced with the
corresponding value.
"""
def read_unsafe(bindings, filename) when is_map(bindings) and is_bitstring(filename) do
read_file(filename, true, bindings)
end
@doc """
Same as `read_unsafe/1` except it returns the result rather
than an `:ok` tuple containing it. In the case of an error,
an exception is raised.
"""
def read_unsafe!(filename) when is_bitstring(filename) do
read_unsafe!(%{}, filename)
end
@doc """
Same as `read_unsafe/2` except it returns the result rather
than an `:ok` tuple containing it. In the case of an error,
an exception is raised.
"""
def read_unsafe!(bindings, filename) when is_map(bindings) and is_bitstring(filename) do
case read_unsafe(bindings, filename) do
{:ok, result} -> result
{:error, error} -> raise_error(error)
end
end
@doc """
Takes a map and writes it to file, returning the port
of the IO connection.
## Examples
iex> Eon.write(%{hello: "world"}, "hello.exs")
{:ok, #PID<0.159.0>}
iex> Eon.write(%{hello: "world"}, "/hello.exs")
{:error, :eacces}
"""
def write(map, filename) when is_map(map) and is_bitstring(filename) do
map = Map.merge(%{}, map)
contents = Macro.to_string(quote do: unquote(map))
case File.open(filename, [:write]) do
{:ok, file} ->
IO.binwrite(file, contents)
{:ok, file}
{:error, error} ->
{:error, error}
_ ->
{:error, :unknown}
end
end
@doc """
Same as `write/2` except it returns the result rather
than an `:ok` tuple containing it. In the case of an
error, an exception is raised.
## Examples
iex> Eon.write(%{hello: "world"}, "hello.exs")
#PID<0.159.0>
iex> Eon.write(%{hello: "world"}, "/hello.exs")
** (Eon.Error) Permission denied.
"""
def write!(map, filename) when is_map(map) and is_bitstring(filename) do
case write(map, filename) do
{:ok, result} -> result
{:error, error} -> raise_error(error)
end
end
defp raise_error(error) do
case get_in(@errors, [error]) do
message when is_bitstring(message) -> raise Eon.Error, message: message
_ -> raise Eon.Error
end
end
defp read_file(filename, allow_unsafe, bindings) do
case File.read(filename) do
{:ok, file} -> process_body({file, allow_unsafe, bindings})
{:error, error} -> {:error, error}
_ -> {:error, :unknown}
end
end
defp process_body({body, _allow_unsafe = true, bindings}) do
case Code.eval_string(body, Enum.map(bindings, &(&1))) do
{contents, _results} -> {:ok, Map.merge(%{}, contents)}
_ -> {:error, :eval}
end
end
defp process_body({file, _, _bindings}) do
case check_if_safe(file) do
true ->
{contents, _results} = Code.eval_string(file, [])
{:ok, Map.merge(%{}, contents)}
false ->
{:error, :unsafe}
end
end
defp check_if_safe(file) do
case Code.string_to_quoted(file) do
{:ok, {_type, _line, contents}} when is_list(contents) ->
contents
|> Enum.map(&is_safe?/1)
|> List.flatten
|> Enum.all?(&(&1))
_ ->
false
end
end
defp is_safe?(value) do
case value do
{_key, {expression, _line, value}} ->
if expression != :{} and expression != :%{} do
false
else
value
|> Enum.filter(&(is_tuple(&1)))
|> Enum.map(&is_safe?/1)
end
{_key, value} when is_list(value) ->
value
|> Enum.filter(&(is_tuple(&1)))
|> Enum.map(&is_safe?/1)
|> Enum.all?(&(&1))
{expression, _line, _value} ->
expression == :{} or expression == :%{}
_ ->
true
end
end
end
|
lib/eon.ex
| 0.878725
| 0.595316
|
eon.ex
|
starcoder
|
defmodule PolylineHelpers do
@moduledoc """
Helpers for working with polylines
"""
@doc """
Polylines may be too long or verbose.
this will apply the following optimizatoins:
- if the line has more than ~400 points (500 bytes), it will decrease the number
- if the polylines that are reduced have duplicate points, they are removed
- the precision of individual points is set to 4 decimal places
"""
@spec condense([String.t()]) :: [String.t()]
def condense(polylines) do
polylines
|> Enum.reduce({[], MapSet.new()}, &do_condence_reduce/2)
|> elem(0)
end
@spec do_condence_reduce(String.t(), {[String.t()], MapSet.t()}) :: {[String.t()], MapSet.t()}
defp do_condence_reduce(polyline, {polylines, point_set}) do
floor = fn x -> Float.floor(x, 4) end
{new_decoded_polyline, new_point_set} =
polyline
|> Polyline.decode()
|> do_pointlist_smoothing(polyline)
|> Enum.map(fn {lat, lng} -> {floor.(lat), floor.(lng)} end)
|> filter_distinct_points(point_set)
if new_decoded_polyline == [] do
{polylines, new_point_set}
else
polyline = Polyline.encode(new_decoded_polyline)
{[polyline | polylines], new_point_set}
end
end
@spec do_pointlist_smoothing([{float, float}], String.t()) :: [{float, float}]
defp do_pointlist_smoothing(points, polyline) do
score = div(byte_size(polyline), 500)
if score == 0 do
points
else
# this will properionally grow to reasonable levels of point extractions where:
# score of 1: take every other, 2: take every fourth, 3 or above: take every sixth
shortening_factor = 2 * min(score, 3)
# we always include the last because otherwise the line may not end at correct coordinate
Enum.take_every(points, shortening_factor) ++ [List.last(points)]
end
end
@spec filter_distinct_points([{float, float}], MapSet.t()) :: {[{float, float}], MapSet.t()}
defp filter_distinct_points(points, all_points) do
# points is a List of points which makes them easier to encode into polylines without conversion
# all points is a MapSet because it is more efficient for it to be distinct in terms of how it is used
{points -- MapSet.to_list(all_points), MapSet.union(MapSet.new(points), all_points)}
end
end
|
apps/site/lib/polyline_helpers.ex
| 0.764804
| 0.542076
|
polyline_helpers.ex
|
starcoder
|
defmodule Huffman.Tree do
alias Huffman.{Leaf, Node, Queue}
@type tree :: %Node{left: Node.child(), right: Node.child()}
@type serialized_tree() :: bitstring()
@doc """
build/1 takes the priority queue and transforms it into a binary tree
with the lowest priorities as leafs furthest from the root and more higher
priorities closer to the root.
"""
@spec build(Queue.queue()) :: tree()
def build(queue) do
# transform items to leafs
queue = queue |> Queue.map(&to_leaf/1)
# start building the tree
build_tree(queue)
end
defp build_tree(queue) do
{{freq_first, value_first}, queue} = queue |> Queue.pop()
{{freq_second, value_second}, queue} = queue |> Queue.pop()
node = %Node{
left: value_first,
right: value_second
}
queue = queue |> Queue.push(freq_first + freq_second, node)
if Queue.length(queue) > 1 do
build_tree(queue)
else
{{_freq, tree}, _queue} = queue |> Queue.pop()
tree
end
end
defp to_leaf(item) do
%Leaf{val: item}
end
@doc """
serialize/1 takes the tree and outputs its serialized representation
"""
@spec serialize(Node.t()) :: serialized_tree()
def serialize(node)
def serialize(%Leaf{val: val}) do
<<1::size(1), val::bitstring>>
end
def serialize(%Node{left: left, right: right}) do
<<
0::size(1),
serialize(left)::bitstring,
serialize(right)::bitstring
>>
end
@doc """
deserialize/1 takes the serialized tree and returns a populated binary tree.
"""
@spec deserialize(serialized_tree()) :: {:ok, Node.child()}
def deserialize(serialized_tree) do
with {node, _rest} <- read(serialized_tree) do
{:ok, node}
end
end
defp read(<<1::size(1), value::binary-size(1), rest::bitstring>>) do
{
%Leaf{val: value},
rest
}
end
defp read(<<0::size(1), rest::bitstring>>) do
{left, rest} = read(rest)
{right, rest} = read(rest)
node = %Node{
left: left,
right: right
}
{node, rest}
end
end
|
lib/huffman/tree.ex
| 0.78374
| 0.481698
|
tree.ex
|
starcoder
|
defmodule Bitcraft.BitBlock do
@moduledoc ~S"""
Defines a bit-block.
A bit-block is used to map a bitstring into an Elixir struct.
The definition of the bit-block is possible through `defblock/3`.
`defblock/3` is typically used to decode bitstring from a bit stream,
usually a binary protocol (e.g.: TCP/IP), into Elixir structs and
vice-versa (encoding Elixir structs into a bitstring).
## Example
defmodule MyBlock do
import Bitcraft.BitBlock
defblock "my-static-block" do
segment :h, 5, type: :binary
segment :s1, 4, default: 1
segment :s2, 8, default: 1, sign: :signed
segment :t, 3, type: :binary
end
end
The `segment` macro defines a segment in the bit-block with given
name and size. Bit-blocks are regular structs and can be created
and manipulated directly using Elixir's struct API:
iex> block = %MyBlock{h: "begin", s1: 3, s2: -3, t: "end"}
iex> %{block | h: "hello"}
By default, a bit-block will automatically generate the implementation
for the callbacks `c:encode/1` and `c:decode/3`. You can then encode
the struct into a bitstring and then decode a bitstring into a Elixir
struct, like so:
iex> bits = MyBlock.encode(block)
iex> data = MyBlock.decode(bits)
What we defined previously it is a static block, means a fixed size always.
This is the easiest scenario because we don't need to provide any additional
logic to the the `encode/1` and `decode/3` functions. For that reason,
in the example above, we call `decode` function only with the input
bitstring, the other arguments are not needed since they are ment to
resolve the size for dynamic segments.
## Dynamic Segments
There are other scenarios where the block of bits is dynamic, means
the size of the block is variable and depends on other segment values
to calculate the size, this makes it more complicated to decode it.
For those variable blocks, we can define dynamic segments using the
`segment/3` API:
segment :var, :dynamic, type: :bits
As you can see, for the size argument we are passing `:dynamic` atom.
In this way, the segment is marked as dynamic and its size is resolved
later during the decoding process.
The following is a more elaborate example of block. We define an IPv4
datagram which has a static and a dynamic part. The dynamic part is
basically the options and the data. The block can be defined as:
defmodule IpDatagram do
@moduledoc false
import Bitcraft.BitBlock
defblock "IP-datagram" do
segment :vsn, 4
segment :hlen, 4
segment :srvc_type, 8
segment :tot_len, 16
segment :id, 16
segment :flags, 3
segment :frag_off, 13
segment :ttl, 8
segment :proto, 8
segment :hdr_chksum, 16, type: :bits
segment :src_ip, 32, type: :bits
segment :dst_ip, 32, type: :bits
segment :opts, :dynamic, type: :bits
segment :data, :dynamic, type: :bits
end
# Size resolver for dynamic segments invoked during the decoding
def calc_size(%__MODULE__{hlen: hlen}, :opts, dgram_s)
when hlen >= 5 and 4 * hlen <= dgram_s do
opts_s = 4 * (hlen - 5)
{opts_s * 8, dgram_s}
end
def calc_size(%__MODULE__{leftover: leftover}, :data, dgram_s) do
data_s = :erlang.bit_size(leftover)
{data_s, dgram_s}
end
end
Here, the segment corresponding to the `:opts` segment has a type modifier,
specifying that `:opts` is to bind to a bitstring (or binary). All other
segments have the default type equal to unsigned integer.
An IP datagram header is of variable length. This length is measured in the
number of 32-bit words and is given in the segment corresponding to `:hlen`.
The minimum value of `:hlen` is 5. It is the segment corresponding to
`:opts` that is variable, so if `:hlen` is equal to 5, `:opts` becomes
an empty binary. Finally, the tail segment `:data` bind to bitstring.
The decoding of the datagram fails if one of the following occurs:
* The first 4-bits segment of datagram is not equal to 4.
* `:hlen` is less than 5.
* The size of the datagram is less than `4*hlen`.
Since this block has dynamic segments, we can now use the other decode
arguments to resolve the size for them during the decoding process:
IpDatagram.decode(bits, :erlang.bit_size(bits), &IpDatagram.calc_size/3)
Where:
* The first argument is the input IPv4 datagram (bitstring).
* The second argument is is the accumulator to the callback function
(third argument), in this case is the total number of bits in the
datagram.
* And the third argument is the function callback or dynamic size resolver
that will be invoked by the decoder for each dynamic segment. The callback
functions receives the data struct with the current decoded segments, the
segment name (to be pattern-matched and resolve its size), and the
accumulator that can be used to pass metadata during the dynamic
segments evaluation.
## Reflection
Any bit-block module will generate the `__bit_block__` function that can be
used for runtime introspection of the bit-block:
* `__bit_block__(:name)` - Returns the name or alias as given to
`defblock/3`.
* `__bit_block__(:segments)` - Returns a list of all segments names.
* `__schema__(:segment_info, segment)` - Returns a map with the segment
info.
## Working with typespecs
By default, the typespec `t/0` is generated but in the simplest form:
@type t :: %__MODULE__{}
If you want to provide a more accurate typespec for you block adding the
typespecs for each of the segments on it, you can set the option `:typespec`
to `false` when defining the block, like so:
defblock "my-block", typespec: false do
...
end
"""
import Record
# Block segment record
defrecord(:block_segment,
name: nil,
size: nil,
type: nil,
sign: nil,
endian: nil,
default: nil
)
@typedoc "Basic Segment types"
@type base_seg_type ::
:integer
| :float
| :bitstring
| :bits
| :binary
| :bytes
| :utf8
| :utf16
| :utf32
@typedoc "Segment type"
@type segment_type :: base_seg_type | Bitcraft.BitBlock.Array.t()
@typedoc "Block's segment type definition"
@type block_segment ::
record(:block_segment,
name: atom,
size: integer | :dynamic | nil,
type: segment_type,
sign: atom,
endian: atom,
default: term
)
@typedoc "Bitblock definition"
@type t :: %{optional(atom) => any, __struct__: atom}
@typedoc "Resolver function for the size of dynamic segments."
@type dynamic_size_resolver ::
(t, seg_name :: atom, acc :: term ->
{size :: non_neg_integer, acc :: term})
## Callbacks
@doc """
Encodes the given data type into a bitstring.
## Example
iex> block = %MyBlock{seg1: 1, seg: 2}
iex> MyBlock.encode(block)
"""
@callback encode(t) :: bitstring
@doc """
Decodes the given bitstring into the corresponding data type.
## Example
iex> block = %MyBlock{seg1: 1, seg: 2}
iex> bits = MyBlock.encode(block)
iex> MyBlock.decode(bits)
"""
@callback decode(input :: bitstring, acc :: term, dynamic_size_resolver) :: t
## API
alias __MODULE__
alias __MODULE__.{Array, DynamicSegment}
@doc """
Defines a bit-block struct with a name and segment definitions.
"""
defmacro defblock(name, opts \\ [], do: block) do
prelude =
quote do
:ok = Module.put_attribute(__MODULE__, :block_segments, [])
:ok = Module.put_attribute(__MODULE__, :dynamic_segments, [])
name = unquote(name)
unquote(block)
end
postlude =
quote unquote: false, bind_quoted: [opts: opts] do
@behaviour Bitcraft.BitBlock
segments = Module.get_attribute(__MODULE__, :block_segments, [])
struct_segments =
Enum.reduce(
segments ++ [block_segment(name: :leftover, default: <<>>)],
[],
fn block_segment(name: name, type: type, default: default), acc ->
[{name, default} | acc]
end
)
# define struct
@enforce_keys Keyword.get(opts, :enforce_keys, [])
defstruct struct_segments
# maybe define default data type
if Keyword.get(opts, :typespec, true) == true do
@typedoc "#{__MODULE__} type"
@type t :: %__MODULE__{}
end
# build encoding expressions for encode/decode functions
{bit_expr, map_expr} = BitBlock.build_encoding_exprs(segments, "", "")
## Encoding Functions
@doc false
def decode(unquote(bit_expr)) do
struct(__MODULE__, unquote(map_expr))
end
def decode(unquote(bit_expr), acc_in, fun) when is_function(fun, 3) do
struct = struct(__MODULE__, unquote(map_expr))
BitBlock.decode_segments(@dynamic_segments, struct, acc_in, fun)
end
if length(@dynamic_segments) > 0 do
@doc false
def encode(data) do
BitBlock.encode_segments(@block_segments, data)
end
else
@doc false
def encode(unquote(map_expr)) do
unquote(bit_expr)
end
end
## Reflection Functions
@doc false
def __bit_block__(:name), do: unquote(name)
def __bit_block__(:segments) do
for block_segment(name: name) <- unquote(Macro.escape(@block_segments)), do: name
end
@doc false
def __bit_block__(:segment_info, segment) do
case :lists.keyfind(segment, 2, @block_segments) do
false -> nil
rec -> segment_to_map(rec)
end
end
## Private
defp segment_to_map(rec) do
%{
name: block_segment(rec, :name),
size: block_segment(rec, :size),
type: block_segment(rec, :type),
sign: block_segment(rec, :sign),
endian: block_segment(rec, :endian),
default: block_segment(rec, :default)
}
end
end
quote do
unquote(prelude)
unquote(postlude)
end
end
@doc """
Internal helper for decoding the block segments.
"""
@spec decode_segments([block_segment], map, term, dynamic_size_resolver) :: map
def decode_segments(block_segments, struct, acc_in, fun) do
block_segments
|> Enum.reduce(
{struct, acc_in},
fn block_segment(name: name, type: type, sign: sign, endian: endian), {data_acc, cb_acc} ->
# exec callback
{size, cb_acc} = fun.(data_acc, name, cb_acc)
# parse segment bits
{value, bits} = Bitcraft.decode_segment(data_acc.leftover, size, type, sign, endian)
# update decoded data
data_acc = %{
data_acc
| name => %DynamicSegment{
value: value,
size: size
},
leftover: bits
}
{data_acc, cb_acc}
end
)
|> elem(0)
end
@doc """
Internal helper for encoding the block segments.
"""
@spec encode_segments([block_segment], map) :: bitstring
def encode_segments(block_segments, data) do
Enum.reduce(block_segments, <<>>, fn
block_segment(size: nil), acc ->
acc
block_segment(name: name, size: :dynamic, type: type, sign: sign, endian: endian), acc ->
case Map.fetch!(data, name) do
%DynamicSegment{value: value, size: size} ->
value = Bitcraft.encode_segment(value, size, type, sign, endian)
<<acc::bitstring, value::bitstring>>
nil ->
acc
value ->
raise ArgumentError,
"dynamic segment #{name} is expected to be of type " <>
"#{DynamicSegment}, but got: #{inspect(value)}"
end
block_segment(name: name, size: size, type: type, sign: sign, endian: endian), acc ->
value =
data
|> Map.fetch!(name)
|> Bitcraft.encode_segment(size, type, sign, endian)
<<acc::bitstring, value::bitstring>>
end)
end
@doc """
Defines a segment on the block with a given `name` and `size`.
See `Kernel.SpecialForms.<<>>/1` for more information about the
segment types, size, unit, and so on.
## Options
* `:type` - Defines the segment data type the set of bits will be
mapped to. See `Kernel.SpecialForms.<<>>/1` for more information
about the segment data types. Defaults to `:integer`.
* `:sign` - Applies only to integers and defines whether the integer
is `:signed` or `:unsigned`. Defaults to `:unsigned`.
* `:endian` - Applies to `utf32`, `utf16`, `float`, `integer`.
Defines the endianness, `:big` or `:little`. Defaults to `:big`.
* `:default` - Sets the default value on the block and the struct.
The default value is calculated at compilation time, so don't use
expressions for generating values dynamically as they would then
be the same for all records. Defaults to `nil`.
"""
defmacro segment(name, size \\ nil, opts \\ []) do
quote do
BitBlock.__segment__(__MODULE__, unquote(name), unquote(size), unquote(opts))
end
end
@doc """
Same as `segment/3`, but automatically generates a **dynamic**
segment with the type `Bitcraft.BitBlock.Array.t()`.
The size of the array-type segment in bits has to be calculated
dynamically during the decoding, and the length of the array will
be `segment_size/element_size`. This process is performs automatically
during the decoding. hence, it is important to set the right
`element_size` and also implement properly the callback to calculate
the segment size. See `Bitcraft.BitBlock.dynamic_size_resolver()`.
## Options
Options are the same as `segment/3`, and additionally:
* `:element_size` - The size in bits of each array element.
Defaults to `8`.
**NOTE:** The `:type` is the same as `segment/3` BUT it applies to the
array element.
"""
defmacro array(name, opts \\ []) do
{type, opts} = Keyword.pop(opts, :type, :integer)
{size, opts} = Keyword.pop(opts, :element_size, 8)
opts = [type: %Array{type: type, element_size: size}] ++ opts
quote do
BitBlock.__segment__(
__MODULE__,
unquote(name),
:dynamic,
unquote(Macro.escape(opts))
)
end
end
@doc """
This is a helper function used internally for building a block segment.
"""
@spec __segment__(module, atom, non_neg_integer, Keyword.t()) :: :ok
def __segment__(mod, name, size, opts) do
segment =
block_segment(
name: name,
size: size,
type: Keyword.get(opts, :type, :integer),
sign: Keyword.get(opts, :sign, :unsigned),
endian: Keyword.get(opts, :endian, :big),
default: Keyword.get(opts, :default, nil)
)
if size == :dynamic do
dynamic_segments = Module.get_attribute(mod, :dynamic_segments, [])
Module.put_attribute(mod, :dynamic_segments, dynamic_segments ++ [segment])
end
block_segments = Module.get_attribute(mod, :block_segments, [])
Module.put_attribute(mod, :block_segments, block_segments ++ [segment])
end
## Helpers
@doc """
This is a helper function used internally for building the encoding
expressions.
"""
@spec build_encoding_exprs([block_segment], String.t(), String.t()) ::
{bin_expr_ast :: term, map_expr_ast :: term}
def build_encoding_exprs([], bin, map) do
bin_expr = Code.string_to_quoted!("<<#{bin}leftover::bitstring>>")
map_expr = Code.string_to_quoted!("%{#{map}leftover: leftover}")
{bin_expr, map_expr}
end
def build_encoding_exprs([block_segment(name: name, size: size) = segment | segments], bin, map)
when is_integer(size) do
build_encoding_exprs(
segments,
bin <> "#{name}::" <> build_modifier(segment) <> ", ",
map <> "#{name}: #{name}, "
)
end
def build_encoding_exprs(
[block_segment(name: name, size: size, default: default) | segments],
bin,
map
)
when is_atom(size) do
build_encoding_exprs(segments, bin, map <> "#{name}: #{inspect(default)}, ")
end
## Private
# Helper function used internally for building bitstring modifier.
defp build_modifier(block_segment(type: type, sign: sign, endian: endian, size: size))
when type in [:integer, :float] do
"#{type}-#{sign}-#{endian}-size(#{size})"
end
defp build_modifier(block_segment(type: type, size: size))
when type in [:bitstring, :bits, :binary, :bytes] do
"#{type}-size(#{size})"
end
defp build_modifier(block_segment(type: type, endian: endian))
when type in [:utf8, :utf16, :utf32] do
"#{type}-#{endian}"
end
end
|
lib/bitcraft/bit_block.ex
| 0.917263
| 0.785185
|
bit_block.ex
|
starcoder
|
defmodule Range do
@moduledoc """
Defines a range.
A range represents a sequence of one or many,
ascending or descending, consecutive integers.
Ranges can be either increasing (`first <= last`) or
decreasing (`first > last`). Ranges are also always
inclusive.
A range is represented internally as a struct. However,
the most common form of creating and matching on ranges
is via the `../2` macro, auto-imported from `Kernel`:
iex> range = 1..3
1..3
iex> first..last = range
iex> first
1
iex> last
3
A range implements the `Enumerable` protocol, which means
functions in the `Enum` module can be used to work with
ranges:
iex> range = 1..10
1..10
iex> Enum.reduce(range, 0, fn i, acc -> i * i + acc end)
385
iex> Enum.count(range)
10
iex> Enum.member?(range, 11)
false
iex> Enum.member?(range, 8)
true
Such function calls are efficient memory-wise no matter the
size of the range. The implementation of the `Enumerable`
protocol uses logic based solely on the endpoints and does
not materialize the whole list of integers.
"""
defstruct first: nil, last: nil
@type t :: %__MODULE__{first: integer, last: integer}
@type t(first, last) :: %__MODULE__{first: first, last: last}
@doc """
Creates a new range.
"""
@spec new(integer, integer) :: t
def new(first, last) when is_integer(first) and is_integer(last) do
%Range{first: first, last: last}
end
def new(first, last) do
raise ArgumentError,
"ranges (first..last) expect both sides to be integers, " <>
"got: #{inspect(first)}..#{inspect(last)}"
end
@doc """
Checks if two ranges are disjoint.
## Examples
iex> Range.disjoint?(1..5, 6..9)
true
iex> Range.disjoint?(5..1, 6..9)
true
iex> Range.disjoint?(1..5, 5..9)
false
iex> Range.disjoint?(1..5, 2..7)
false
"""
@doc since: "1.8.0"
@spec disjoint?(t, t) :: boolean
def disjoint?(first1..last1, first2..last2) do
{first1, last1} = normalize(first1, last1)
{first2, last2} = normalize(first2, last2)
last2 < first1 or last1 < first2
end
@compile inline: [normalize: 2]
defp normalize(first, last) when first > last, do: {last, first}
defp normalize(first, last), do: {first, last}
# TODO: Remove by 2.0
@doc false
@deprecated "Pattern match on first..last instead"
def range?(term)
def range?(first..last) when is_integer(first) and is_integer(last), do: true
def range?(_), do: false
end
defimpl Enumerable, for: Range do
def reduce(first..last, acc, fun) do
reduce(first, last, acc, fun, _up? = last >= first)
end
defp reduce(_first, _last, {:halt, acc}, _fun, _up?) do
{:halted, acc}
end
defp reduce(first, last, {:suspend, acc}, fun, up?) do
{:suspended, acc, &reduce(first, last, &1, fun, up?)}
end
defp reduce(first, last, {:cont, acc}, fun, _up? = true) when first <= last do
reduce(first + 1, last, fun.(first, acc), fun, _up? = true)
end
defp reduce(first, last, {:cont, acc}, fun, _up? = false) when first >= last do
reduce(first - 1, last, fun.(first, acc), fun, _up? = false)
end
defp reduce(_, _, {:cont, acc}, _fun, _up) do
{:done, acc}
end
def member?(first..last, value) when is_integer(value) do
if first <= last do
{:ok, first <= value and value <= last}
else
{:ok, last <= value and value <= first}
end
end
def member?(_.._, _value) do
{:ok, false}
end
def count(first..last) do
if first <= last do
{:ok, last - first + 1}
else
{:ok, first - last + 1}
end
end
def slice(first..last) do
if first <= last do
{:ok, last - first + 1, &slice_asc(first + &1, &2)}
else
{:ok, first - last + 1, &slice_desc(first - &1, &2)}
end
end
defp slice_asc(current, 1), do: [current]
defp slice_asc(current, remaining), do: [current | slice_asc(current + 1, remaining - 1)]
defp slice_desc(current, 1), do: [current]
defp slice_desc(current, remaining), do: [current | slice_desc(current - 1, remaining - 1)]
end
defimpl Inspect, for: Range do
import Inspect.Algebra
def inspect(first..last, opts) do
concat([to_doc(first, opts), "..", to_doc(last, opts)])
end
end
|
lib/elixir/lib/range.ex
| 0.838531
| 0.637172
|
range.ex
|
starcoder
|
defmodule Autox.RelationUtils do
import Ecto
alias Ecto.Association.BelongsTo
alias Ecto.Association.Has
alias Ecto.Association.HasThrough
alias Autox.ChangesetUtils
@moduledoc """
For reference:
%Ecto.Association.BelongsTo{cardinality: :one, defaults: [], field: :flavor,
owner: Apiv3.RelationshipUtilsTest.Ingredient, owner_key: :flavor_id,
queryable: Flavor, related: Flavor, related_key: :id}
%Ecto.Association.Has{cardinality: :many, defaults: [], field: :shops_flavors,
on_cast: :changeset, on_delete: :nothing, on_replace: :raise,
owner: Apiv3.RelationshipUtilsTest.Flavor, owner_key: :id,
queryable: ShopFlavor, related: ShopFlavor, related_key: :flavor_id}
%Ecto.Association.HasThrough{cardinality: :many, field: :shops,
owner: Apiv3.RelationshipUtilsTest.Flavor, owner_key: :id,
through: [:shops_flavors, :shop]}
%Ecto.Association.Has{cardinality: :many, defaults: [], field: :pictures,
on_cast: :changeset, on_delete: :nothing, on_replace: :raise,
owner: Apiv3.RelationshipUtilsTest.Shop, owner_key: :id,
queryable: {"shops_pictures", Apiv3.Picture}, related: Apiv3.Picture,
related_key: :shop_id}
"""
def caac(r, p, k, d), do: creative_action_and_changeset(r,p,k,d)
def creative_action_and_changeset(repo, parent, key, data) do
data
|> find_class
|> find_or_create_model(repo, data)
|> case do
{:ok, data} -> create_core(repo, parent, key, data)
other -> other
end
end
def find_class(data), do: data |> Map.get("type") |> maybe_to_existing_model
defp find_or_create_model(nil, _, _), do: {:error, "no such type"}
defp find_or_create_model(class, repo, %{"id" => id}=data) do
case repo.get(class, id) do
nil -> {:error, "no such model #{id}"}
model ->
data = data
|> Map.drop(["id"])
|> Map.put(:model, model)
{:ok, data}
end
end
defp find_or_create_model(_, _, data), do: {:ok, data}
defp create_core(repo, parent, key, data) do
parent
|> reflect_association(key)
|> case do
%{cardinality: :one}=relation ->
parent
|> repo.preload([key])
|> singular_cardinality_check(key, data, &create1(&1, relation, &2))
%{cardinality: :many}=relation ->
checker = &repo.get(assoc(parent, key), &1)
worker = &createx(parent, relation, &1)
plural_cardinality_check(checker, data, worker)
end
end
def createx(parent, %Has{}=r, data), do: create1(parent, r, data)
def createx(parent, %HasThrough{through: [near_field, far_field]}, data) do
parent
|> reflect_association(near_field)
|> double_reflect_association(far_field)
|> through
|> apply([parent, data])
end
defp many_many_through(_, _), do: {:error, "refuse to create many-many through relationship due"}
defp one_many_through(near_relation, far_relation, parent, data) do
%{field: near_field, related: class, owner_key: pkey, related_key: fkey} = near_relation
%{field: far_field} = far_relation
attributes = data
|> Map.get("attributes", %{})
|> Map.put(to_string(fkey), Map.get(parent, pkey))
relationships = %{} |> Map.put(to_string(far_field), data)
params = %{"type" => class, "attributes" => attributes, "relationships" => relationships}
case parent |> Map.get(near_field) do
nil -> create1(parent, near_relation, params)
parent -> create1(parent, far_relation, params)
end
end
defp many_one_through(near_relation, far_relation, parent, data) do
%{field: near_field, related: class, owner_key: pkey, related_key: fkey} = near_relation
%{field: far_field} = far_relation
attributes = data
|> Map.get("attributes", %{})
|> Map.put(to_string(fkey), Map.get(parent, pkey))
relationships = %{} |> Map.put(to_string(far_field), data)
params = %{"type" => class, "attributes" => attributes, "relationships" => relationships}
case data |> Map.get(:model) do
nil -> {:error, "many through one with a nonexistent one isn't allowed"}
_ -> create1(parent, near_relation, params)
end
end
def one_one_through(a,b,c,d), do: one_many_through(a,b,c,d)
defp through({%{cardinality: :many}, %{cardinality: :many}}), do: &many_many_through/2
defp through({%{cardinality: :many}=rn, %{cardinality: :one}=rf}), do: &many_one_through(rn, rf, &1, &2)
defp through({%{cardinality: :one}=rn, %{cardinality: :many}=rf}), do: &one_many_through(rn, rf, &1, &2)
defp through({%{cardinality: :one}=rn, %{cardinality: :one}=rf}), do: &one_one_through(rn, rf, &1, &2)
defp create1(parent, %HasThrough{}=r, data), do: createx(parent, r, data)
defp create1(parent, %BelongsTo{owner_key: key, owner: class}, %{model: child}) do
params = %{} |> Map.put(key, child.id)
cs = parent |> class.update_changeset(params)
{:update, cs}
end
defp create1(_, %BelongsTo{}, data) do
{:error, "refuse to create belongs_to relationships in reverse"}
end
defp create1(parent, %Has{related_key: key, related: class}, %{model: child}) do
params = %{} |> Map.put(key, parent.id)
cs = child |> class.update_changeset(params)
{:update, cs}
end
defp create1(parent, %Has{related: class, field: field}, data) do
params = ChangesetUtils.activemodel_paramify(data)
cs = parent
|> build_assoc(field)
|> class.create_changeset(params)
{:insert, cs}
end
defp singular_cardinality_check(parent, field, model, f) do
id = model |> Map.get(:model, %{}) |> Map.get(:id)
parent
|> Map.get(field)
|> case do
%{id: ^id} when not is_nil(id) -> {:ok, model}
%{id: id} -> {:error, "already occupied by '#{id}'"}
nil -> f.(parent, model)
end
end
defp plural_cardinality_check(checker, %{model: %{id: id}}=d, f) do
case checker.(id) do
nil -> f.(d)
model -> {:ok, model}
end
end
defp plural_cardinality_check(_, data, f), do: f.(data)
def reflect_association(%{__struct__: module}, field), do: reflect_association(module, field)
def reflect_association(module, field) do
case field |> maybe_to_existing_atom do
nil -> nil
atom ->
module.__schema__(:association, atom)
end
end
def double_reflect_association(%{related: module}=r, field), do: {r, reflect_association(module, field)}
def double_reflect_association(r, _), do: {r, nil}
def maybe_to_existing_model(atom) when is_atom(atom) do
atom
|> Atom.to_string
|> case do
"Elixir." <> _ -> atom
symbol -> symbol |> maybe_to_existing_model
end
end
def maybe_to_existing_model(str) when is_binary(str) do
try do
ChangesetUtils.model_module_from_collection_name(str)
rescue
ArgumentError -> nil
end
end
defp maybe_to_existing_atom(atom) when is_atom(atom), do: atom
defp maybe_to_existing_atom(str) when is_binary(str) do
try do
str |> String.to_existing_atom
rescue
ArgumentError -> nil
end
end
end
|
lib/autox/utils/relation_utils.ex
| 0.596903
| 0.424442
|
relation_utils.ex
|
starcoder
|
defmodule Ecto.Adapters.Tds do
@moduledoc """
Adapter module for MSSQL Server using the TDS protocol.
## Options
Tds options split in different categories described
below. All options can be given via the repository
configuration.
### Connection options
* `:hostname` - Server hostname
* `:port` - Server port (default: 1433)
* `:username` - Username
* `:password` - <PASSWORD>
* `:database` - the database to connect to
* `:pool` - The connection pool module, may be set to `Ecto.Adapters.SQL.Sandbox`
* `:ssl` - Set to true if ssl should be used (default: false)
* `:ssl_opts` - A list of ssl options, see Erlang's `ssl` docs
* `:show_sensitive_data_on_connection_error` - show connection data and
configuration whenever there is an error attempting to connect to the
database
We also recommend developers to consult the `Tds.start_link/1` documentation
for a complete list of all supported options for driver.
### Storage options
* `:collation` - the database collation. Used during database creation but
it is ignored later
If you need collation other than Latin1, add `tds_encoding` as dependency to
your project `mix.exs` file then amend `config/config.ex` by adding:
config :tds, :text_encoder, Tds.Encoding
This should give you extended set of most encoding. For complete list check
`Tds.Encoding` [documentation](https://hexdocs.pm/tds_encoding).
### After connect flags
After connecting to MSSQL server, TDS will check if there are any flags set in
connection options that should affect connection session behaviour. All flags are
MSSQL standard *SET* options. The following flags are currently supported:
* `:set_language` - sets session language (consult stored procedure output
`exec sp_helplanguage` for valid values)
* `:set_datefirst` - number in range 1..7
* `:set_dateformat` - atom, one of `:mdy | :dmy | :ymd | :ydm | :myd | :dym`
* `:set_deadlock_priority` - atom, one of `:low | :high | :normal | -10..10`
* `:set_lock_timeout` - number in milliseconds > 0
* `:set_remote_proc_transactions` - atom, one of `:on | :off`
* `:set_implicit_transactions` - atom, one of `:on | :off`
* `:set_allow_snapshot_isolation` - atom, one of `:on | :off`
(required if `Repo.transaction(fn -> ... end, isolation_level: :snapshot)` is used)
* `:set_read_committed_snapshot` - atom, one of `:on | :off`
## Limitations
### UUIDs
MSSQL server has slightly different binary storage format for UUIDs (`uniqueidentifier`).
If you use `:binary_id`, the proper choice is made. Otherwise you must use the `Tds.Ecto.UUID`
type. Avoid using `Ecto.UUID` since it may cause unpredictable application behaviour.
### SQL `Char`, `VarChar` and `Text` types
When working with binaries and strings,there are some limitations you should be aware of:
- Strings that should be stored in mentioned sql types must be encoded to column
codepage (defined in collation). If collation is different than database collation,
it is not possible to store correct value into database since the connection
respects the database collation. Ecto does not provide way to override parameter
codepage.
- If you need other than Latin1 or other than your database default collation, as
mentioned in "Storage Options" section, then manually encode strings using
`Tds.Encoding.encode/2` into desired codepage and then tag parameter as `:binary`.
Please be aware that queries that use this approach in where clauses can be 10x slower
due increased logical reads in database.
- You can't store VarChar codepoints encoded in one collation/codepage to column that
is encoded in different collation/codepage. You will always get wrong result. This is
not adapter or driver limitation but rather how string encoding works for single byte
encoded strings in MSSQL server. Don't be confused if you are always seeing latin1 chars,
they are simply in each codepoint table.
In particular, if a field has the type `:text`, only raw binaries will be allowed.
To avoid above limitations always use `:string` (NVarChar) type for text if possible.
If you really need to use VarChar's column type, you can use the `Tds.Ecto.VarChar`
Ecto type.
### JSON support
Even though the adapter will convert `:map` fields into JSON back and forth,
actual value is stored in NVarChar column.
### Query hints and table hints
MSSQL supports both query hints and table hints: https://docs.microsoft.com/en-us/sql/t-sql/queries/hints-transact-sql-query
For Ecto compatibility, the query hints must be given via the `lock` option, and they
will be translated to MSSQL's "OPTION". If you need to pass multiple options, you
can separate them by comma:
from query, lock: "HASH GROUP, FAST 10"
Table hints are specified as a list alongside a `from` or `join`:
from query, hints: ["INDEX (IX_Employee_ManagerID)"]
The `:migration_lock` will be treated as a table hint and defaults to "UPDLOCK".
### Multi Repo calls in transactions
To avoid deadlocks in your app, we exposed `:isolation_level` repo transaction option.
This will tell to SQL Server Transaction Manager how to begin transaction.
By default, if this option is omitted, isolation level is set to `:read_committed`.
Any attempt to manually set the transaction isolation via queries, such as
Ecto.Adapter.SQL.query("SET TRANSACTION ISOLATION LEVEL XYZ")
will fail once explicit transaction is started using `c:Ecto.Repo.transaction/2`
and reset back to :read_committed.
There is `Ecto.Query.lock/3` function can help by setting it to `WITH(NOLOCK)`.
This should allow you to do eventually consistent reads and avoid locks on given
table if you don't need to write to database.
NOTE: after explicit transaction ends (commit or rollback) implicit transactions
will run as READ_COMMITTED.
"""
use Ecto.Adapters.SQL,
driver: :tds
require Logger
require Ecto.Query
@behaviour Ecto.Adapter.Storage
@doc false
def autogenerate(:binary_id), do: Tds.Ecto.UUID.bingenerate()
def autogenerate(:embed_id), do: Tds.Ecto.UUID.generate()
def autogenerate(type), do: super(type)
@doc false
@impl true
def loaders({:map, _}, type), do: [&json_decode/1, &Ecto.Type.embedded_load(type, &1, :json)]
def loaders(:map, type), do: [&json_decode/1, type]
def loaders(:boolean, type), do: [&bool_decode/1, type]
def loaders(:binary_id, type), do: [Tds.Ecto.UUID, type]
def loaders(_, type), do: [type]
@impl true
def dumpers({:map, _}, type), do: [&Ecto.Type.embedded_dump(type, &1, :json)]
def dumpers(:binary_id, type), do: [type, Tds.Ecto.UUID]
def dumpers(_, type), do: [type]
defp bool_decode(<<0>>), do: {:ok, false}
defp bool_decode(<<1>>), do: {:ok, true}
defp bool_decode(0), do: {:ok, false}
defp bool_decode(1), do: {:ok, true}
defp bool_decode(x) when is_boolean(x), do: {:ok, x}
defp json_decode(x) when is_binary(x), do: {:ok, Tds.json_library().decode!(x)}
defp json_decode(x), do: {:ok, x}
# Storage API
@doc false
@impl true
def storage_up(opts) do
database =
Keyword.fetch!(opts, :database) || raise ":database is nil in repository configuration"
command =
~s(CREATE DATABASE [#{database}])
|> concat_if(opts[:collation], &"COLLATE=#{&1}")
case run_query(Keyword.put(opts, :database, "master"), command) do
{:ok, _} ->
:ok
{:error, %{mssql: %{number: 1801}}} ->
{:error, :already_up}
{:error, error} ->
{:error, Exception.message(error)}
end
end
defp concat_if(content, nil, _fun), do: content
defp concat_if(content, value, fun), do: content <> " " <> fun.(value)
@doc false
@impl true
def storage_down(opts) do
database =
Keyword.fetch!(opts, :database) || raise ":database is nil in repository configuration"
case run_query(Keyword.put(opts, :database, "master"), "DROP DATABASE [#{database}]") do
{:ok, _} ->
:ok
{:error, %{mssql: %{number: 3701}}} ->
{:error, :already_down}
{:error, error} ->
{:error, Exception.message(error)}
end
end
@impl Ecto.Adapter.Storage
def storage_status(opts) do
database =
Keyword.fetch!(opts, :database) || raise ":database is nil in repository configuration"
opts = Keyword.put(opts, :database, "master")
check_database_query =
"SELECT [name] FROM [master].[sys].[databases] WHERE [name] = '#{database}'"
case run_query(opts, check_database_query) do
{:ok, %{num_rows: 0}} -> :down
{:ok, %{num_rows: _}} -> :up
other -> {:error, other}
end
end
defp run_query(opts, sql_command) do
{:ok, _} = Application.ensure_all_started(:ecto_sql)
{:ok, _} = Application.ensure_all_started(:tds)
timeout = Keyword.get(opts, :timeout, 15_000)
opts =
opts
|> Keyword.drop([:name, :log, :pool, :pool_size])
|> Keyword.put(:backoff_type, :stop)
|> Keyword.put(:max_restarts, 0)
{:ok, pid} = Task.Supervisor.start_link()
task =
Task.Supervisor.async_nolink(pid, fn ->
{:ok, conn} = Tds.start_link(opts)
value = Ecto.Adapters.Tds.Connection.execute(conn, sql_command, [], opts)
GenServer.stop(conn)
value
end)
case Task.yield(task, timeout) || Task.shutdown(task) do
{:ok, {:ok, result}} ->
{:ok, result}
{:ok, {:error, error}} ->
{:error, error}
{:exit, {%{__struct__: struct} = error, _}}
when struct in [Tds.Error, DBConnection.Error] ->
{:error, error}
{:exit, reason} ->
{:error, RuntimeError.exception(Exception.format_exit(reason))}
nil ->
{:error, RuntimeError.exception("command timed out")}
end
end
@impl true
def supports_ddl_transaction? do
true
end
@impl true
def lock_for_migrations(meta, opts, fun) do
%{opts: adapter_opts, repo: repo} = meta
if Keyword.fetch(adapter_opts, :pool_size) == {:ok, 1} do
Ecto.Adapters.SQL.raise_migration_pool_size_error()
end
opts = opts ++ [log: false, timeout: :infinity]
{:ok, result} =
transaction(meta, opts, fn ->
lock_name = "'ecto_#{inspect(repo)}'"
Ecto.Adapters.SQL.query!(meta, "sp_getapplock @Resource = #{lock_name}, @LockMode = 'Exclusive', @LockOwner = 'Transaction', @LockTimeout = -1", [], opts)
fun.()
end)
result
end
end
|
lib/ecto/adapters/tds.ex
| 0.894358
| 0.67252
|
tds.ex
|
starcoder
|
defmodule Timex.Duration do
@moduledoc """
This module provides a friendly API for working with Erlang
timestamps, i.e. `{megasecs, secs, microsecs}`. In addition,
it provides an easy way to wrap the measurement of function
execution time (via `measure`).
"""
alias __MODULE__
alias Timex.Types
use Timex.Constants
@enforce_keys [:megaseconds, :seconds, :microseconds]
defstruct megaseconds: 0, seconds: 0, microseconds: 0
@type t :: %__MODULE__{
megaseconds: integer,
seconds: integer,
microseconds: integer
}
@type units :: :microseconds | :milliseconds |
:seconds | :minutes | :hours |
:days | :weeks
@type measurement_units :: :microseconds | :milliseconds |
:seconds | :minutes | :hours
@type to_options :: [truncate: boolean]
@doc """
Converts a Duration to an Erlang timestamp
## Example
iex> d = %Timex.Duration{megaseconds: 1, seconds: 2, microseconds: 3}
...> Timex.Duration.to_erl(d)
{1, 2, 3}
"""
@spec to_erl(__MODULE__.t) :: Types.timestamp
def to_erl(%__MODULE__{} = d),
do: {d.megaseconds, d.seconds, d.microseconds}
@doc """
Converts an Erlang timestamp to a Duration
## Example
iex> Timex.Duration.from_erl({1, 2, 3})
%Timex.Duration{megaseconds: 1, seconds: 2, microseconds: 3}
"""
@spec from_erl(Types.timestamp) :: __MODULE__.t
def from_erl({mega, sec, micro}),
do: %__MODULE__{megaseconds: mega, seconds: sec, microseconds: micro}
@doc """
Converts a Duration to a Time if the duration fits within a 24-hour clock.
If it does not, an error tuple is returned.
## Examples
iex> d = %Timex.Duration{megaseconds: 0, seconds: 4000, microseconds: 0}
...> Timex.Duration.to_time(d)
{:ok, ~T[01:06:40]}
iex> d = %Timex.Duration{megaseconds: 1, seconds: 0, microseconds: 0}
...> Timex.Duration.to_time(d)
{:error, :invalid_time}
"""
@spec to_time(__MODULE__.t) :: {:ok, Time.t} | {:error, atom}
def to_time(%__MODULE__{} = d) do
{h,m,s,us} = to_clock(d)
Time.from_erl({h,m,s}, Timex.DateTime.Helpers.construct_microseconds(us))
end
@doc """
Same as to_time/1, but returns the Time directly. Raises an error if the
duration does not fit within a 24-hour clock.
## Examples
iex> d = %Timex.Duration{megaseconds: 0, seconds: 4000, microseconds: 0}
...> Timex.Duration.to_time!(d)
~T[01:06:40]
iex> d = %Timex.Duration{megaseconds: 1, seconds: 0, microseconds: 0}
...> Timex.Duration.to_time!(d)
** (ArgumentError) cannot convert {277, 46, 40} to time, reason: :invalid_time
"""
@spec to_time!(__MODULE__.t) :: Time.t | no_return
def to_time!(%__MODULE__{} = d) do
{h,m,s,us} = to_clock(d)
Time.from_erl!({h,m,s}, Timex.DateTime.Helpers.construct_microseconds(us))
end
@doc """
Converts a Time to a Duration
## Example
iex> Timex.Duration.from_time(~T[01:01:30])
%Timex.Duration{megaseconds: 0, seconds: 3690, microseconds: 0}
"""
@spec from_time(Time.t) :: __MODULE__.t
def from_time(%Time{} = t) do
{us, _} = t.microsecond
from_clock({t.hour, t.minute, t.second, us})
end
@doc """
Converts a Duration to a string, using the ISO standard for formatting durations.
## Examples
iex> d = %Timex.Duration{megaseconds: 0, seconds: 3661, microseconds: 0}
...> Timex.Duration.to_string(d)
"PT1H1M1S"
iex> d = %Timex.Duration{megaseconds: 102, seconds: 656013, microseconds: 33}
...> Timex.Duration.to_string(d)
"P3Y3M3DT3H33M33.000033S"
"""
@spec to_string(__MODULE__.t) :: String.t
def to_string(%__MODULE__{} = duration) do
Timex.Format.Duration.Formatter.format(duration)
end
@doc """
Parses a duration string (in ISO-8601 format) into a Duration struct.
"""
@spec parse(String.t) :: {:ok, __MODULE__.t} | {:error, term}
defdelegate parse(str), to: Timex.Parse.Duration.Parser
@doc """
Parses a duration string into a Duration struct, using the provided parser module.
"""
@spec parse(String.t, module()) :: {:ok, __MODULE__.t} | {:error, term}
defdelegate parse(str, module), to: Timex.Parse.Duration.Parser
@doc """
Same as parse/1, but returns the Duration unwrapped, and raises on error
"""
@spec parse!(String.t) :: __MODULE__.t | no_return
defdelegate parse!(str), to: Timex.Parse.Duration.Parser
@doc """
Same as parse/2, but returns the Duration unwrapped, and raises on error
"""
@spec parse!(String.t, module()) :: __MODULE__.t | no_return
defdelegate parse!(str, module), to: Timex.Parse.Duration.Parser
@doc """
Converts a Duration to a clock tuple, i.e. `{hour,minute,second,microsecond}`.
## Example
iex> d = %Timex.Duration{megaseconds: 1, seconds: 1, microseconds: 50}
...> Timex.Duration.to_clock(d)
{277, 46, 41, 50}
"""
def to_clock(%__MODULE__{megaseconds: mega, seconds: sec, microseconds: micro}) do
ss = (mega * 1_000_000)+sec
ss = cond do
micro > 1_000_000 -> ss+div(micro,1_000_000)
:else -> ss
end
hour = div(ss, 60*60)
min = div(rem(ss, 60*60),60)
secs = rem(rem(ss, 60*60),60)
{hour,min,secs,rem(micro,1_000_000)}
end
@doc """
Converts a clock tuple, i.e. `{hour, minute, second, microsecond}` to a Duration.
## Example
iex> Timex.Duration.from_clock({1, 2, 3, 4})
%Timex.Duration{megaseconds: 0, seconds: 3723, microseconds: 4}
"""
def from_clock({hour,minute,second,usec}) do
total_seconds = (hour*60*60)+(minute*60)+second
mega = div(total_seconds,1_000_000)
ss = rem(total_seconds,1_000_000)
from_erl({mega,ss,usec})
end
@doc """
Converts a Duration to its value in microseconds
## Example
iex> Duration.to_microseconds(Duration.from_milliseconds(10.5))
10_500
"""
@spec to_microseconds(__MODULE__.t) :: integer
@spec to_microseconds(__MODULE__.t, to_options) :: integer
def to_microseconds(%Duration{megaseconds: mega, seconds: sec, microseconds: micro}) do
total_seconds = (mega * @million) + sec
total_microseconds = (total_seconds * 1_000 * 1_000) + micro
total_microseconds
end
def to_microseconds(%Duration{} = duration, _opts), do: to_microseconds(duration)
@doc """
Converts a Duration to its value in milliseconds
## Example
iex> Duration.to_milliseconds(Duration.from_seconds(1))
1000.0
iex> Duration.to_milliseconds(Duration.from_seconds(1.543))
1543.0
iex> Duration.to_milliseconds(Duration.from_seconds(1.543), truncate: true)
1543
"""
@spec to_milliseconds(__MODULE__.t) :: float
@spec to_milliseconds(__MODULE__.t, to_options) :: float | integer
def to_milliseconds(%__MODULE__{} = d), do: to_microseconds(d) / 1_000
def to_milliseconds(%__MODULE__{} = d, [truncate: true]), do: trunc(to_milliseconds(d))
def to_milliseconds(%__MODULE__{} = d, _opts), do: to_milliseconds(d)
@doc """
Converts a Duration to its value in seconds
## Example
iex> Duration.to_seconds(Duration.from_milliseconds(1500))
1.5
iex> Duration.to_seconds(Duration.from_milliseconds(1500), truncate: true)
1
"""
@spec to_seconds(__MODULE__.t) :: float
@spec to_seconds(__MODULE__.t, to_options) :: float | integer
def to_seconds(%__MODULE__{} = d), do: to_microseconds(d) / (1_000*1_000)
def to_seconds(%__MODULE__{} = d, [truncate: true]), do: trunc(to_seconds(d))
def to_seconds(%__MODULE__{} = d, _opts), do: to_seconds(d)
@doc """
Converts a Duration to its value in minutes
## Example
iex> Duration.to_minutes(Duration.from_seconds(90))
1.5
iex> Duration.to_minutes(Duration.from_seconds(65), truncate: true)
1
"""
@spec to_minutes(__MODULE__.t) :: float
@spec to_minutes(__MODULE__.t, to_options) :: float | integer
def to_minutes(%__MODULE__{} = d), do: to_microseconds(d) / (1_000*1_000*60)
def to_minutes(%__MODULE__{} = d, [truncate: true]), do: trunc(to_minutes(d))
def to_minutes(%__MODULE__{} = d, _opts), do: to_minutes(d)
@doc """
Converts a Duration to its value in hours
## Example
iex> Duration.to_hours(Duration.from_minutes(105))
1.75
iex> Duration.to_hours(Duration.from_minutes(105), truncate: true)
1
"""
@spec to_hours(__MODULE__.t) :: float
@spec to_hours(__MODULE__.t, to_options) :: float | integer
def to_hours(%__MODULE__{} = d), do: to_microseconds(d) / (1_000*1_000*60*60)
def to_hours(%__MODULE__{} = d, [truncate: true]), do: trunc(to_hours(d))
def to_hours(%__MODULE__{} = d, _opts), do: to_hours(d)
@doc """
Converts a Duration to its value in days
## Example
iex> Duration.to_days(Duration.from_hours(6))
0.25
iex> Duration.to_days(Duration.from_hours(25), truncate: true)
1
"""
@spec to_days(__MODULE__.t) :: float
@spec to_days(__MODULE__.t, to_options) :: float | integer
def to_days(%__MODULE__{} = d), do: to_microseconds(d) / (1_000*1_000*60*60*24)
def to_days(%__MODULE__{} = d, [truncate: true]), do: trunc(to_days(d))
def to_days(%__MODULE__{} = d, _opts), do: to_days(d)
@doc """
Converts a Duration to its value in weeks
## Example
iex> Duration.to_weeks(Duration.from_days(14))
2.0
iex> Duration.to_weeks(Duration.from_days(13), truncate: true)
1
"""
@spec to_weeks(__MODULE__.t) :: float
@spec to_weeks(__MODULE__.t, to_options) :: float | integer
def to_weeks(%__MODULE__{} = d), do: to_microseconds(d) / (1_000*1_000*60*60*24*7)
def to_weeks(%__MODULE__{} = d, [truncate: true]), do: trunc(to_weeks(d))
def to_weeks(%__MODULE__{} = d, _opts), do: to_weeks(d)
Enum.each [{:microseconds, 1 / @usecs_in_sec},
{:milliseconds, 1 / @msecs_in_sec},
{:seconds, 1},
{:minutes, @secs_in_min},
{:hours, @secs_in_hour},
{:days, @secs_in_day},
{:weeks, @secs_in_week}], fn {type, coef} ->
@spec to_microseconds(integer | float, unquote(type)) :: float
def to_microseconds(value, unquote(type)),
do: do_round(value * unquote(coef) * @usecs_in_sec)
@spec to_milliseconds(integer | float, unquote(type)) :: float
def to_milliseconds(value, unquote(type)),
do: do_round(value * unquote(coef) * @msecs_in_sec)
@spec to_seconds(integer | float, unquote(type)) :: float
def to_seconds(value, unquote(type)),
do: do_round(value * unquote(coef))
@spec to_minutes(integer | float, unquote(type)) :: float
def to_minutes(value, unquote(type)),
do: do_round(value * unquote(coef) / @secs_in_min)
@spec to_hours(integer | float, unquote(type)) :: float
def to_hours(value, unquote(type)),
do: do_round(value * unquote(coef) / @secs_in_hour)
@spec to_days(integer | float, unquote(type)) :: float
def to_days(value, unquote(type)),
do: do_round(value * unquote(coef) / @secs_in_day)
@spec to_weeks(integer | float, unquote(type)) :: float
def to_weeks(value, unquote(type)),
do: do_round(value * unquote(coef) / @secs_in_week)
end
@doc """
Converts an integer value representing microseconds to a Duration
"""
@spec from_microseconds(integer) :: __MODULE__.t
def from_microseconds(us) do
us = round(us)
{ sec, micro } = mdivmod(us)
{ mega, sec } = mdivmod(sec)
%Duration{megaseconds: mega, seconds: sec, microseconds: micro}
end
@doc """
Converts an integer value representing milliseconds to a Duration
"""
@spec from_milliseconds(integer) :: __MODULE__.t
def from_milliseconds(ms), do: from_microseconds(ms * @usecs_in_msec)
@doc """
Converts an integer value representing seconds to a Duration
"""
@spec from_seconds(integer) :: __MODULE__.t
def from_seconds(s), do: from_microseconds(s * @usecs_in_sec)
@doc """
Converts an integer value representing minutes to a Duration
"""
@spec from_minutes(integer) :: __MODULE__.t
def from_minutes(m), do: from_seconds(m * @secs_in_min)
@doc """
Converts an integer value representing hours to a Duration
"""
@spec from_hours(integer) :: __MODULE__.t
def from_hours(h), do: from_seconds(h * @secs_in_hour)
@doc """
Converts an integer value representing days to a Duration
"""
@spec from_days(integer) :: __MODULE__.t
def from_days(d), do: from_seconds(d * @secs_in_day)
@doc """
Converts an integer value representing weeks to a Duration
"""
@spec from_weeks(integer) :: __MODULE__.t
def from_weeks(w), do: from_seconds(w * @secs_in_week)
@doc """
Add one Duration to another.
## Examples
iex> d = %Timex.Duration{megaseconds: 1, seconds: 1, microseconds: 1}
...> Timex.Duration.add(d, d)
%Timex.Duration{megaseconds: 2, seconds: 2, microseconds: 2}
iex> d = %Timex.Duration{megaseconds: 1, seconds: 750000, microseconds: 750000}
...> Timex.Duration.add(d, d)
%Timex.Duration{megaseconds: 3, seconds: 500001, microseconds: 500000}
"""
@spec add(__MODULE__.t, __MODULE__.t) :: __MODULE__.t
def add(%Duration{megaseconds: mega1, seconds: sec1, microseconds: micro1},
%Duration{megaseconds: mega2, seconds: sec2, microseconds: micro2}) do
normalize(%Duration{megaseconds: mega1+mega2,
seconds: sec1+sec2,
microseconds: micro1+micro2 })
end
@doc """
Subtract one Duration from another.
## Example
iex> d1 = %Timex.Duration{megaseconds: 3, seconds: 3, microseconds: 3}
...> d2 = %Timex.Duration{megaseconds: 2, seconds: 2, microseconds: 2}
...> Timex.Duration.sub(d1, d2)
%Timex.Duration{megaseconds: 1, seconds: 1, microseconds: 1}
"""
@spec sub(__MODULE__.t, __MODULE__.t) :: __MODULE__.t
def sub(%Duration{megaseconds: mega1, seconds: sec1, microseconds: micro1},
%Duration{megaseconds: mega2, seconds: sec2, microseconds: micro2}) do
normalize(%Duration{megaseconds: mega1-mega2,
seconds: sec1-sec2,
microseconds: micro1-micro2 })
end
@doc """
Scale a Duration by some coefficient value, i.e. a scale of 2 is twice is long.
## Example
iex> d = %Timex.Duration{megaseconds: 1, seconds: 1, microseconds: 1}
...> Timex.Duration.scale(d, 2)
%Timex.Duration{megaseconds: 2, seconds: 2, microseconds: 2}
"""
@spec scale(__MODULE__.t, coefficient :: integer | float) :: __MODULE__.t
def scale(%Duration{megaseconds: mega, seconds: secs, microseconds: micro}, coef) do
mega_s = mega*coef
s_diff = (mega_s*1_000_000)-(trunc(mega_s)*1_000_000)
secs_s = s_diff+(secs*coef)
us_diff = (secs_s*1_000_000)-(trunc(secs_s)*1_000_000)
us_s = us_diff+(micro*coef)
extra_mega = div(trunc(secs_s), 1_000_000)
mega_final = trunc(mega_s)+extra_mega
extra_secs = div(trunc(us_s), 1_000_000)
secs_final = trunc(secs_s)-(extra_mega*1_000_000)+extra_secs
us_final = trunc(us_s)-(extra_secs*1_000_000)
normalize(%Duration{megaseconds: mega_final,
seconds: secs_final,
microseconds: us_final })
end
@doc """
Invert a Duration, i.e. a positive duration becomes a negative one, and vice versa
## Example
iex> d = %Timex.Duration{megaseconds: -1, seconds: -2, microseconds: -3}
...> Timex.Duration.invert(d)
%Timex.Duration{megaseconds: 1, seconds: 2, microseconds: 3}
"""
@spec invert(__MODULE__.t) :: __MODULE__.t
def invert(%Duration{megaseconds: mega, seconds: sec, microseconds: micro}) do
%Duration{megaseconds: -mega, seconds: -sec, microseconds: -micro }
end
@doc """
Returns the absolute value of the provided Duration.
## Example
iex> d = %Timex.Duration{megaseconds: -1, seconds: -2, microseconds: -3}
...> Timex.Duration.abs(d)
%Timex.Duration{megaseconds: 1, seconds: 2, microseconds: 3}
"""
@spec abs(__MODULE__.t) :: __MODULE__.t
def abs(%Duration{} = duration) do
us = to_microseconds(duration)
if us < 0 do
from_microseconds(-us)
else
duration
end
end
@doc """
Return a timestamp representing a time lapse of length 0.
iex> Timex.Duration.zero |> Timex.Duration.to_seconds
0.0
Can be useful for operations on collections of durations. For instance,
Enum.reduce(durations, Duration.zero, Duration.add(&1, &2))
Can also be used to represent the timestamp of the start of the UNIX epoch,
as all Erlang timestamps are relative to this point.
"""
@spec zero() :: __MODULE__.t
def zero, do: %Duration{megaseconds: 0, seconds: 0, microseconds: 0}
@doc """
Returns the duration since the first day of year 0 to Epoch.
## Example
iex> Timex.Duration.epoch()
%Timex.Duration{megaseconds: 62_167, seconds: 219_200, microseconds: 0}
"""
@spec epoch() :: __MODULE__.t
def epoch() do
epoch(nil)
end
@doc """
Returns the amount of time since the first day of year 0 to Epoch.
The argument is an atom indicating the type of time units to return.
The allowed unit type atoms are:
- :microseconds
- :milliseconds
- :seconds
- :minutes
- :hours
- :days
- :weeks
## Examples
iex> Timex.Duration.epoch(:seconds)
62_167_219_200
If the specified type is nil, a duration since the first day of year 0 to Epoch
is returned.
iex> Timex.Duration.epoch(nil)
%Timex.Duration{megaseconds: 62_167, seconds: 219_200, microseconds: 0}
"""
@spec epoch(nil) :: __MODULE__.t
@spec epoch(units) :: non_neg_integer
def epoch(type) do
seconds = :calendar.datetime_to_gregorian_seconds({{1970,1,1}, {0,0,0}})
case type do
nil ->
from_seconds(seconds)
:microseconds -> seconds |> from_seconds |> to_microseconds
:milliseconds -> seconds |> from_seconds |> to_milliseconds
:seconds -> seconds
:minutes -> seconds |> from_seconds |> to_minutes
:hours -> seconds |> from_seconds |> to_hours
:days -> seconds |> from_seconds |> to_days
:weeks -> seconds |> from_seconds |> to_weeks
end
end
@doc """
Returns the amount of time since Epoch.
The argument is an atom indicating the type of time units to return.
The allowed unit type atoms are:
- :microseconds
- :milliseconds
- :seconds
- :minutes
- :hours
- :days
- :weeks
## Examples
iex> Timex.Duration.now(:seconds)
1483141644
When the argument is omitted or nil, a Duration is returned.
iex> Timex.Duration.now
%Timex.Duration{megaseconds: 1483, seconds: 141562, microseconds: 536938}
"""
@spec now() :: __MODULE__.t
@spec now(nil) :: __MODULE__.t
@spec now(units) :: non_neg_integer
def now(type \\ nil)
def now(nil), do: :os.system_time(:micro_seconds) |> from_microseconds
def now(:microseconds), do: :os.system_time(:micro_seconds)
def now(:milliseconds), do: :os.system_time(:milli_seconds)
def now(:seconds), do: :os.system_time(:seconds)
def now(:minutes), do: :os.system_time(:seconds) |> from_seconds |> to_minutes
def now(:hours), do: :os.system_time(:seconds) |> from_seconds |> to_hours
def now(:days), do: :os.system_time(:seconds) |> from_seconds |> to_days
def now(:weeks), do: :os.system_time(:seconds) |> from_seconds |> to_weeks
@doc """
An alias for `Duration.diff/3`
"""
defdelegate elapsed(duration, ref \\ nil, type \\ nil), to: __MODULE__, as: :diff
@doc """
This function determines the difference in time between two timestamps
(represented by Duration structs). If the second timestamp is omitted,
`Duration.now` will be used as the reference timestamp. If the first
timestamp argument occurs before the second, the resulting measurement will
be a negative value.
The type argument is an atom indicating the units the measurement should be
returned in. If no type argument is provided, a Duration will be returned.
Valid measurement units for this function are:
:microseconds, :milliseconds, :seconds, :minutes, :hours, :days, or :weeks
## Examples
iex> alias Timex.Duration
...> d = Duration.from_erl({1457, 136000, 785000})
...> Duration.diff(d, Duration.zero, :days)
16865
"""
def diff(t1, t2, type \\ nil)
def diff(%Duration{} = t1, nil, type), do: diff(t1, now(), type)
def diff(%Duration{} = t1, %Duration{} = t2, type) do
delta = do_diff(t1, t2)
case type do
nil -> delta
:microseconds -> to_microseconds(delta, truncate: true)
:milliseconds -> to_milliseconds(delta, truncate: true)
:seconds -> to_seconds(delta, truncate: true)
:minutes -> to_minutes(delta, truncate: true)
:hours -> to_hours(delta, truncate: true)
:days -> to_days(delta, truncate: true)
:weeks -> to_weeks(delta, truncate: true)
end
end
defp do_diff(%Duration{} = t1, %Duration{} = t2) do
microsecs = :timer.now_diff(to_erl(t1), to_erl(t2))
mega = div(microsecs, 1_000_000_000_000)
secs = div(microsecs - mega*1_000_000_000_000, 1_000_000)
micro = rem(microsecs, 1_000_000)
%Duration{megaseconds: mega, seconds: secs, microseconds: micro}
end
@doc """
Evaluates fun() and measures the elapsed time.
Returns `{Duration.t, result}`.
## Example
iex> {_timestamp, result} = Duration.measure(fn -> 2 * 2 end)
...> result == 4
true
"""
@spec measure((() -> any)) :: {__MODULE__.t, any}
def measure(fun) when is_function(fun) do
{time, result} = :timer.tc(fun, [])
{Duration.from_microseconds(time), result}
end
@doc """
Evaluates `apply(fun, args)`, and measures execution time.
Returns `{Duration.t, result}`.
## Example
iex> {_timestamp, result} = Duration.measure(fn x, y -> x * y end, [2, 4])
...> result == 8
true
"""
@spec measure(fun, [any]) :: {__MODULE__.t, any}
def measure(fun, args) when is_function(fun) and is_list(args) do
{time, result} = :timer.tc(fun, args)
{Duration.from_microseconds(time), result}
end
@doc """
Evaluates `apply(module, fun, args)`, and measures execution time.
Returns `{Duration.t, result}`.
## Example
iex> {_timestamp, result} = Duration.measure(Enum, :map, [[1,2], &(&1*2)])
...> result == [2, 4]
true
"""
@spec measure(module, atom, [any]) :: {__MODULE__.t, any}
def measure(module, fun, args)
when is_atom(module) and is_atom(fun) and is_list(args) do
{time, result} = :timer.tc(module, fun, args)
{Duration.from_microseconds(time), result}
end
def normalize(%Duration{megaseconds: mega, seconds: sec, microseconds: micro}) do
# TODO: check for negative values
{ sec, micro } = mdivmod(sec, micro)
{ mega, sec } = mdivmod(mega, sec)
%Duration{megaseconds: mega, seconds: sec, microseconds: micro}
end
defp divmod(a, b), do: {div(a, b), rem(a, b)}
defp divmod(initial, a, b), do: {initial + div(a, b), rem(a, b)}
defp mdivmod(a), do: divmod(a, 1_000_000)
defp mdivmod(initial, a), do: divmod(initial, a, 1_000_000)
defp do_round(value) when is_integer(value), do: value
defp do_round(value) when is_float(value), do: Float.round(value, 6)
end
|
lib/time/duration.ex
| 0.926379
| 0.665737
|
duration.ex
|
starcoder
|
defmodule MLLP.Receiver do
@moduledoc """
A simple MLLP server.
Minimal Lower Layer Protocol (MLLP) is an application level protocol which merely defines header and
trailer delimiters for HL7 messages utilized in the healthcare industry for data interchange.
## Options
The following options are required for starting an MLLP receiver either via `start/1` or indirectly via
`child_spec/1` :
- `:port` - The tcp port the receiver will listen on.
- `:dispatcher` - Callback module messages ingested by the receiver will be passed to. This library ships with an
echo only example dispatch module, `MLLP.EchoDispatcher` for example purposes, which can be provided as a value
for this parameter.
Optional parameters:
- `:packet_framer` - Callback module for received packets. Defaults to `MLLP.DefaultPacketFramer`
- `:transport_opts` - A map of parameters given to ranch as transport options. See
[Ranch Documentation](https://ninenines.eu/docs/en/ranch/1.7/manual/) for all transport options that can be
provided. The default `transport_opts` are `%{num_acceptors: 100, max_connections: 20_000}` if none are provided.
"""
use GenServer
require Logger
alias MLLP.FramingContext
alias MLLP.Peer
@type dispatcher :: any()
@type t() :: %MLLP.Receiver{
socket: any(),
transport: any(),
buffer: String.t(),
dispatcher_module: dispatcher()
}
@type options() :: [
port: pos_integer(),
dispatcher: module(),
packet_framer: module(),
transport_opts: :ranch.opts()
]
@behaviour :ranch_protocol
defstruct socket: nil,
transport: nil,
buffer: "",
dispatcher_module: nil
@doc """
Starts an MLLP.Receiver.
{:ok, info_map} = MLLP.Receiver.start(port: 4090, dispatcher: MLLP.EchoDispatcher)
If successful it will return a map containing the pid of the listener, the port it's listening on, and the
receiver_id (ref) created, otherwise an error tuple.
Note that this function is in constrast with `child_spec/1` which can be used to embed MLLP.Receiver in your
application or within a supervision tree as part of your application.
This function is useful for starting an MLLP.Receiver from within a GenServer or for development and testing
purposes.
See [Options](#module-options) for details on required and optiomal parameters.
"""
@spec start(options()) :: {:ok, map()} | {:error, any()}
def start(opts) do
args = to_args(opts)
result =
:ranch.start_listener(
args.receiver_id,
args.transport_mod,
args.transport_opts,
args.proto_mod,
args.proto_opts
)
case result do
{:ok, pid} ->
{:ok, %{receiver_id: args.receiver_id, pid: pid, port: args.port}}
{:error, :eaddrinuse} ->
{:error, :eaddrinuse}
end
end
@spec stop(any) :: :ok | {:error, :not_found}
def stop(port) do
receiver_id = get_receiver_id_by_port(port)
:ok = :ranch.stop_listener(receiver_id)
end
@doc """
A function which can be used to embed an MLLP.Receiver under Elixir v1.5+ supervisors.
Unlike `start/1`, `start/2`, or `start/3` this function takes two additional options : `ref` and `transport_opts`.
Note that if a `ref` option is not supplied a reference will be created for you using `make_ref/0`.
children = [{MLLP.Receiver, [
ref: MyRef,
port: 4090,
dispatcher: MLLP.EchoDispatcher,
packet_framer: MLLP.DefaultPacketFramer,
transport_opts: %{num_acceptors: 25, max_connections: 20_000}
]}
]
Supervisor.init(children, strategy: :one_for_one)
See [Options](#module-options) for details on required and optiomal parameters.
## Examples
iex(1)> opts = [ref: MyRef, port: 4090, dispatcher: MLLP.EchoDispatcher, packet_framer: MLLP.DefaultPacketFramer]
[
ref: MyRef,
port: 4090,
dispatcher: MLLP.EchoDispatcher,
packet_framer: MLLP.DefaultPacketFramer
]
iex(2)> MLLP.Receiver.child_spec(opts)
%{
id: {:ranch_listener_sup, MyRef},
start: {:ranch_listener_sup, :start_link,
[
MyRef,
:ranch_tcp,
%{socket_opts: [port: 4090], num_acceptors: 100, max_connections: 20_000},
MLLP.Receiver,
%{packet_framer_module: MLLP.DefaultPacketFramer, dispatcher_module: MLLP.EchoDispatcher, allowed_clients: %{}, verify: nil}
]},
type: :supervisor,
modules: [:ranch_listener_sup],
restart: :permanent,
shutdown: :infinity
}
"""
@spec child_spec(options()) :: Supervisor.child_spec()
def child_spec(opts) do
args = to_args(opts)
{id, start, restart, shutdown, type, modules} =
:ranch.child_spec(
args.receiver_id,
args.transport_mod,
args.transport_opts,
args.proto_mod,
args.proto_opts
)
%{
id: id,
start: start,
restart: restart,
shutdown: shutdown,
type: type,
modules: modules
}
end
@doc false
def start_link(receiver_id, _, transport, options) do
# the proc_lib spawn is required because of the :gen_server.enter_loop below.
{:ok,
:proc_lib.spawn_link(__MODULE__, :init, [
[
receiver_id,
transport,
options
]
])}
end
defp to_args(opts) do
port =
Keyword.get(opts, :port, nil) ||
raise(ArgumentError, "No tcp port provided")
dispatcher_mod =
Keyword.get(opts, :dispatcher, nil) ||
raise(ArgumentError, "No dispatcher module provided")
Code.ensure_loaded?(dispatcher_mod) ||
raise "The dispatcher module #{dispatcher_mod} could not be found."
implements_behaviour?(dispatcher_mod, MLLP.Dispatcher) ||
raise "The dispatcher module #{dispatcher_mod} does not implement the MLLP.Dispatcher behaviour"
packet_framer_mod = Keyword.get(opts, :packet_framer, MLLP.DefaultPacketFramer)
Code.ensure_loaded?(packet_framer_mod) ||
raise "The packet framer module #{packet_framer_mod} could not be found."
implements_behaviour?(packet_framer_mod, MLLP.PacketFramer) ||
raise "The packet framer module #{packet_framer_mod} does not implement the MLLP.Dispatcher behaviour"
receiver_id = Keyword.get(opts, :ref, make_ref())
{transport_mod, transport_opts} =
default_transport_opts()
|> Map.merge(Keyword.get(opts, :transport_opts, %{}))
|> update_transport_options(port)
proto_mod = __MODULE__
verify = get_in(transport_opts, [:socket_opts, :verify])
allowed_clients = get_allowed_clients(verify, opts)
proto_opts = %{
packet_framer_module: packet_framer_mod,
dispatcher_module: dispatcher_mod,
allowed_clients: allowed_clients,
verify: verify
}
%{
receiver_id: receiver_id,
port: port,
transport_mod: transport_mod,
transport_opts: transport_opts,
proto_mod: proto_mod,
proto_opts: proto_opts
}
end
defp default_transport_opts() do
%{num_acceptors: 100, max_connections: 20_000}
end
defp update_transport_options(transport_opts, port) do
{transport_module, tls_options1, transport_opts1} =
case Map.pop(transport_opts, :tls) do
{nil, options1} ->
Logger.warn(
"Starting listener on a non secured socket, data will be passed over unencrypted connection!"
)
{:ranch_tcp, [], options1}
{tls_options, options1} ->
verify_peer = Keyword.get(tls_options, :verify)
{:ranch_ssl, Keyword.merge(get_peer_options(verify_peer), tls_options), options1}
end
socket_opts = get_socket_options(transport_opts, port) ++ tls_options1
transport_opts2 = Map.put(transport_opts1, :socket_opts, socket_opts)
{transport_module, transport_opts2}
end
defp get_socket_options(transport_opts, port) do
transport_opts
|> Map.get(:socket_opts, [])
|> Keyword.put(:port, port)
end
defp get_peer_options(:verify_peer = verify) do
[
verify: verify,
fail_if_no_peer_cert: true,
crl_check: :best_effort,
crl_cache: {:ssl_crl_cache, {:internal, [http: 5_000]}}
]
end
defp get_peer_options(:verify_none = verify) do
[verify: verify, fail_if_no_peer_cert: false]
end
defp get_peer_options(_) do
raise ArgumentError, "Invalid verify_peer option provided"
end
defp get_receiver_id_by_port(port) do
:ranch.info()
|> Enum.filter(fn {_k, v} -> v[:port] == port end)
|> Enum.map(fn {k, _v} -> k end)
|> List.first()
end
# ===================
# GenServer callbacks
# ===================
@doc false
@spec init(Keyword.t()) ::
{:ok, state :: any()}
| {:ok, state :: any(), timeout() | :hibernate | {:continue, term()}}
| :ignore
| {:stop, reason :: any()}
def init([receiver_id, transport, options]) do
{:ok, socket} = :ranch.handshake(receiver_id, [])
{:ok, server_info} = transport.sockname(socket)
{:ok, client_info} = transport.peername(socket)
case Peer.validate(%{transport: transport, socket: socket, client_info: client_info}, options) do
{:ok, :success} ->
:ok = transport.setopts(socket, active: :once)
state = %{
socket: socket,
server_info: server_info,
client_info: client_info,
transport: transport,
framing_context: %FramingContext{
packet_framer_module: Map.get(options, :packet_framer_module),
dispatcher_module: Map.get(options, :dispatcher_module)
}
}
# http://erlang.org/doc/man/gen_server.html#enter_loop-3
:gen_server.enter_loop(__MODULE__, [], state)
{:error, error} ->
Logger.warn("Failed to verify client #{inspect(client_info)}, error: #{inspect(error)}")
{:stop,
%{message: "Failed to verify client #{inspect(client_info)}, error: #{inspect(error)}"}}
end
end
def handle_info({message, socket, data}, state) when message in [:tcp, :ssl] do
Logger.debug(fn -> "Receiver received data: [#{inspect(data)}]." end)
framing_context = handle_received_data(socket, data, state.framing_context, state.transport)
{:noreply, %{state | framing_context: framing_context}}
end
def handle_info({message, _socket}, state) when message in [:tcp_closed, :ssl_closed] do
Logger.debug("MLLP.Receiver tcp_closed.")
{:stop, :normal, state}
end
def handle_info({message, _, reason}, state) when message in [:tcp_error, :tls_error] do
Logger.error(fn -> "MLLP.Receiver encountered a tcp_error: [#{inspect(reason)}]" end)
{:stop, reason, state}
end
def handle_info(:timeout, state) do
Logger.debug("Receiver timed out.")
{:stop, :normal, state}
end
def handle_info(msg, state) do
Logger.warn("Unexpected handle_info for msg [#{inspect(msg)}].")
{:noreply, state}
end
defp implements_behaviour?(mod, behaviour) do
behaviours_found = Keyword.get(mod.__info__(:attributes), :behaviour, [])
behaviour in behaviours_found
end
defp handle_received_data(socket, data, framing_context, transport) do
transport.setopts(socket, active: :once)
framing_context = framing_context
framer = framing_context.packet_framer_module
{:ok, framing_context2} = framer.handle_packet(data, framing_context)
reply_buffer = framing_context2.reply_buffer
if reply_buffer != "" do
transport.send(socket, reply_buffer)
%{framing_context2 | reply_buffer: ""}
else
framing_context2
end
end
defp get_allowed_clients(:verify_peer, opts) do
Keyword.get(opts, :allowed_clients, [])
|> Enum.map(&to_charlist/1)
|> Enum.into(%{}, fn client -> {client, true} end)
end
defp get_allowed_clients(_, opts) do
Keyword.get(opts, :allowed_clients, [])
|> Enum.map(&normalize_ip/1)
|> Enum.reject(&is_nil(&1))
|> Enum.into(%{}, fn client -> {client, true} end)
end
def normalize_ip({_, _, _, _} = ip), do: ip
def normalize_ip({_, _, _, _, _, _, _, _} = ip), do: ip
def normalize_ip(ip) when is_atom(ip), do: normalize_ip(to_string(ip))
def normalize_ip(ip) when is_binary(ip) do
ip
|> String.to_charlist()
|> :inet.parse_address()
|> case do
{:ok, address} ->
address
_ ->
normalize_hostname(ip)
end
end
defp normalize_hostname(name) do
name
|> String.to_charlist()
|> :inet.gethostbyname()
|> case do
{:ok, {:hostent, _, _, _, _, [address | _]}} ->
address
error ->
Logger.warn(
"IP/hostname #{inspect(name)} provided is not a valid IP/hostname #{inspect(error)}. It will be filtered from allowed_clients list"
)
nil
end
end
end
|
lib/mllp/receiver.ex
| 0.849738
| 0.435361
|
receiver.ex
|
starcoder
|
defmodule Axon.MixedPrecision do
@moduledoc """
Utilities for creating mixed precision policies.
Mixed precision is useful for increasing model throughput at the possible
price of a small dip in accuracy. When creating a mixed precision policy,
you define the policy for `params`, `compute`, and `output`.
The `params` policy dictates what type parameters should be stored as
during training. The `compute` policy dictates what type should be used
during intermediate computations in the model's forward pass. The `output`
policy dictates what type the model should output.
Here's an example of creating a mixed precision policy and applying it
to a model:
model =
Axon.input({nil, 784})
|> Axon.dense(128, activation: :relu)
|> Axon.batch_norm()
|> Axon.dropout(rate: 0.5)
|> Axon.dense(64, activation: :relu)
|> Axon.batch_norm()
|> Axon.dropout(rate: 0.5)
|> Axon.dense(10, activation: :softmax)
policy = Axon.MixedPrecision.create_policy(
params: {:f, 32},
compute: {:f, 16},
output: {:f, 32}
)
mp_model =
model
|> Axon.MixedPrecision.apply_policy(policy, except: [:batch_norm])
The example above applies the mixed precision policy to every layer in
the model except Batch Normalization layers. The policy will cast parameters
and inputs to `{:f, 16}` for intermediate computations in the model's forward
pass before casting the output back to `{:f, 32}`.
"""
alias Axon.MixedPrecision.Policy
@doc """
Creates a mixed precision policy with the given options.
## Options
* `params` - parameter precision policy. Defaults to `{:f, 32}`
* `compute` - compute precision policy. Defaults to `{:f, 32}`
* `output` - output precision policy. Defaults to `{:f, 32}`
## Examples
iex> Axon.MixedPrecision.create_policy(params: {:f, 16}, output: {:f, 16})
%Policy{params: {:f, 16}, compute: {:f, 32}, output: {:f, 16}}
iex> Axon.MixedPrecision.create_policy(compute: {:bf, 16})
%Policy{params: {:f, 32}, compute: {:bf, 16}, output: {:f, 32}}
"""
def create_policy(opts \\ []) do
params = opts[:params] || {:f, 32}
compute = opts[:compute] || {:f, 32}
output = opts[:output] || {:f, 32}
%Policy{params: params, compute: compute, output: output}
end
@doc """
Applies mixed precision policy `policy` to every layer in the
given model which returns true for `filter`.
`filter` may be a function or one of `:only` or `:except` - which define
filters for specific operations in the model. You may only use one of
`:only`, `:except`, or a function:
# Only applies to dense layers
Axon.MixedPrecision.apply_policy(model, policy, only: [:dense])
# Applies to every layer but batch norm
Axon.MixedPrecision.apply_policy(model, policy, except: [:batch_norm])
# A more complex application using filters
Axon.MixedPrecision.apply_policy(model, policy, fn
%Axon{op: :dense} -> true
%Axon{op: :batch_norm} -> false
%Axon{op: :conv} -> false
%Axon{op: _} -> true
end)
"""
def apply_policy(%Axon{} = axon, %Policy{} = policy, filter) when is_function(filter) do
Axon.tree_map(axon, fn layer ->
if filter.(layer) do
%{layer | policy: policy}
else
layer
end
end)
end
@doc false
def apply_policy(axon, policy, only: only) do
filter = fn %Axon{op: op} ->
Enum.member?(only, op)
end
apply_policy(axon, policy, filter)
end
@doc false
def apply_policy(axon, policy, except: exceptions) do
filter = fn %Axon{op: op} ->
not Enum.member?(exceptions, op)
end
apply_policy(axon, policy, filter)
end
@doc false
def apply_policy(%Axon{} = axon, %Policy{} = policy) do
apply_policy(%Axon{} = axon, %Policy{} = policy, & &1)
end
end
|
lib/axon/mixed_precision.ex
| 0.936663
| 0.77343
|
mixed_precision.ex
|
starcoder
|
defmodule SafeExecEnv do
@moduledoc """
It is often desirable to run natively compiled code (C, C++, Rust, ..) from a BEAM
application via a binding. The motivation may be better performance or simply to access
an external library that already exists. The problem is that if that native code crashes
then, unlike with native BEAM processest, he entire VM will crash, taking your
application along with it,
SafeExecEnv provides a safe(r) way to run natively compiled code from a BEAM application.
If the native code in question can be reasonably expected to never crash, then this
precaution is unecessary, but if native code that might crash is being run then this
library provides a layer of insulation between the code being executed and the virtual
machine the main application is being run on.
Using SafeExecEnv is easy; simply add it to a Supervisor:
defmodule MyApplication do
def start(_type, _args) do
children = [SafeExecEnv]
opts = [strategy: :one_for_one, name: MyApplication.Supervisor]
Supervisor.start_link(children, opts)
end
end
Then you may run functions safely by calling SafeExecEnv::exec with the function. Captured,
anonymous, and Module / Function / Arguments (MFA) style function passing is supported.
SafeExecEnv works by spawning a second BEAM VM to run the functions in. If that VM crashes
then the SafeExecEnv server will also crash. When supervised, this will cause the SafeExenv
to be restarted, and the external VM will be started again. Calls to SafeExecEnv may
fail during that time, and will need to be tried again once available.
"""
require Logger
use GenServer
@ets_table_name :safe_exec_env
@safe_exec_node_name "SafeExecEnv_"
@doc """
Executes a function in the safe executable environment and returns the result.
The SafeExecEnv server is presumed to be started.
"""
@spec exec(fun :: function) :: any | {:error, reason :: String.t()}
def exec(fun) when is_function(fun) do
me = self()
f = fn ->
rv =
try do
fun.()
rescue
e -> {:error, e}
end
Process.send(me, rv, [])
end
try do
Node.spawn(node_name(), f)
return_value()
rescue
e -> {:error, e}
end
end
@doc """
Executes a function with the provided argument (in usual "MFA" form) in the safe
executable environment and returns the result. The SafeExecEnv server is presumed to be
started.
"""
@spec exec(module :: atom, fun :: atom, args :: list) :: any | {:error, reason :: String.t()}
def exec(module, fun, args) do
me = self()
f = fn ->
rv =
try do
apply(module, fun, args)
rescue
e -> {:error, e}
end
Process.send(me, rv, [])
end
try do
Node.spawn(node_name(), f)
return_value()
rescue
e -> {:error, e}
end
end
@doc "Returns the name of the BEAM node being used as the safe exec environment"
@spec get() :: String.t()
def get() do
case node_name() do
nil -> GenServer.call(__MODULE__, :start_node)
node -> node
end
end
@doc "Returns true if the node is running and reachable, otherwise false"
@spec is_alive?() :: boolean
def is_alive?() do
case Node.ping(node_name()) do
:pong -> true
_ -> false
end
end
@doc false
@spec start_link(args :: any) :: {:ok, pid}
def start_link(_) do
GenServer.start_link(__MODULE__, [], name: __MODULE__)
end
@impl GenServer
def init(_) do
start_node()
{:ok, :ok}
end
@impl GenServer
def handle_call(:start_node, _, state) do
node = start_node()
{:reply, node, state}
end
defp return_value() do
receive do
rv -> rv
end
end
defp start_ets() do
if :ets.info(@ets_table_name) == :undefined do
@ets_table_name =
:ets.new(@ets_table_name, [:named_table, :public, {:read_concurrency, true}])
end
end
defp start_node() do
check_distribution()
start_ets()
{:ok, node} = SafeExecEnv.ExternNode.start(generate_node_name(), self())
:ets.insert(@ets_table_name, {:safe_exec_node, node})
node
end
defp check_distribution(), do: :erlang.get_cookie() |> has_cookie?()
defp has_cookie?(:nocookie) do
Logger.error(
"SafeExecEnv can not start as the BEAM was not started in distributed mode. Start the vm with the name or sname command line option."
)
Process.exit(self(), :kill)
false
end
defp has_cookie?(_), do: true
defp node_name() do
case :ets.lookup(@ets_table_name, :safe_exec_node) do
[] -> nil
[{_key, value}] -> value
end
end
defp generate_node_name() do
node()
|> Atom.to_string()
|> String.split("@")
|> (fn [x | _] -> @safe_exec_node_name <> x end).()
|> String.to_charlist()
end
end
|
lib/safe_exec_env.ex
| 0.657978
| 0.546496
|
safe_exec_env.ex
|
starcoder
|
defmodule Ritcoinex.Chain do
@moduledoc """
This module is a distributed database, soft realtime, and works as a
Chain of orders like be a Blockchain.
This real propuse is build a pool of "blockchains", so, each table is a
self blockchain and all can communicate with all...
"""
# FUN'S OF MNESIA ------------------------------------------------
@doc """
info_db/0, stop_db/0, start_db/0, exit_db/0 are functions bases and
useful from mnesia env, most used..
"""
def info_db, do: :mnesia.system_info()
def stop_db, do: :mnesia.stop()
def start_db, do: :mnesia.start()
def exit_db, do: :init.stop()
# --------------------------------------------------------------
# RPC START/STOP -----------------------------------------------
@doc """
rpc_start/0, and rpc_stop/0 are functions that will be
calling all nodes in same time..
"""
def rpc_start() do
:rpc.multicall(
[node()|Node.list],
Application,
:start, [:mnesia])
end
def rpc_stop() do
:rpc.multicall(
[node()|Node.list],
Application,
:stop, [:mnesia])
end
# --------------------------------------------------------------
# INSTALL DEAFAULT SCHEME / BURNS TABLES -----------------------
@doc """
install_db/0 is an init starter of db, and create the initial chain table
with the defaults settings
"""
def install_db do
app_schema()
:timer.sleep(500)
rpc_start()
:timer.sleep(500)
Ritcoinex.Initial.Records.create_table_genesis()
:timer.sleep(1000)
Ritcoinex.Initial.Records.create_table_order()
:timer.sleep(1000)
Ritcoinex.Initial.Register.register_genesis_initial()
:timer.sleep(500)
rpc_stop()
end
# INSTALL DEAFAULT SCHEME / BURNS TABLES -----------------------
# JOIN OTHER NODE TO THE CLUSTER -------------------------------
@doc """
When another node want join to the cluster of nodes, use the function
connect/1 that expect one argument which is the node e.g :"nobody@know"
anyone previous participant need to run this function to
enable unknown node to the party..
"""
def connect(set_node) do
:mnesia.start
:mnesia.change_config(:extra_db_nodes, [set_node])
:mnesia.change_table_copy_type(:schema, set_node, :disc_copies)
:mnesia.add_table_copy(:genesis, set_node, :disc_copies)
:mnesia.add_table_copy(:order, set_node, :disc_copies)
end
# JOIN OTHER NODE TO THE CLUSTER -------------------------------
# DEFAULT'S ----------------------------------------------------
def app_schema() do
:mnesia.create_schema([Node.self | Node.list])
end
# READ / DELETE / LOAD_TABLE ----------------------------------
def delete_table(name_table) do
:mnesia.delete_table(name_table)
end
def read(name_table, wallet) do
read_wallet = fn ->
:mnesia.read({name_table, wallet})
end
:mnesia.transaction(read_wallet)
end
def load_from_disc(name_table) do
load = fn ->
:mnesia.force_load_table(name_table)
end
:mnesia.transaction(load)
end
# --------------------------------------------------------------
end
|
lib/mnesia_chain/ritcoinex.ex
| 0.598547
| 0.420451
|
ritcoinex.ex
|
starcoder
|
defmodule Crux.Rest.Opts do
@moduledoc false
@moduledoc since: "0.3.0"
alias Crux.Rest.{Opts, Request}
defstruct(
token: nil,
token_type: "Bot",
raw: false,
name: nil,
version: 8
)
@typedoc since: "0.3.0"
@type t :: %{
token: String.t(),
token_type: String.t(),
raw: boolean(),
name: module(),
version: integer() | nil
}
# The dialyzer REALLY dislikes Opts.t() as the return value here for whatever reason...
# credo:disable-for-next-line Credo.Check.Readability.Specs
def transform(%{} = data) do
opts = struct(__MODULE__, data)
:ok = validate(opts)
opts
end
# Validates the given options, raises an argument error if invalid.
defp validate(%Opts{token: token})
when token == ""
when not is_binary(token) do
raise ArgumentError, """
Expected :token to be a binary.
Received #{inspect(token)}
"""
end
defp validate(%Opts{token_type: token_type})
when token_type == ""
when not is_binary(token_type) do
raise ArgumentError, """
Expected :token_type to be a string.
Received #{inspect(token_type)}
"""
end
defp validate(%Opts{raw: raw})
when not is_boolean(raw) do
raise ArgumentError, """
Expected :raw to be a boolean.
Received #{inspect(raw)}
"""
end
defp validate(%Opts{version: version})
when not is_nil(version) and not is_integer(version) do
raise ArgumentError, """
Expected :version to be nil or an integer.
Received #{inspect(version)}
"""
end
defp validate(%Opts{name: name})
when not is_atom(name) do
raise ArgumentError, """
Expected :name to be a module name.
Received #{inspect(name)}
"""
end
defp validate(%Opts{}) do
:ok
end
@doc """
Applies options to the request
"""
@spec apply_options(request :: Request.t(), opts :: Opts.t()) :: Request.t()
def apply_options(request, %{version: version} = opts) do
request
|> apply_raw(opts)
|> apply_auth(opts)
|> Request.put_version(version)
end
defp apply_raw(request, %{raw: true}) do
Request.put_transform(request, nil)
end
defp apply_raw(request, _opts) do
request
end
defp apply_auth(%{auth: true} = request, %{token: token, token_type: token_type}) do
Request.put_token(request, token, token_type)
end
defp apply_auth(request, %{token: _token, token_type: _token_type} = _opts) do
request
end
@spec global(atom) :: atom
def global(name) do
Module.concat([name, RateLimiter.Global])
end
@spec registry(atom) :: atom
def registry(name) do
Module.concat([name, RateLimiter.Registry])
end
@spec supervisor(atom) :: atom
def supervisor(name) do
Module.concat([name, RateLimiter.Supervisor])
end
@spec handler_supervisor(atom) :: atom
def handler_supervisor(name) do
Module.concat([name, RateLimiter.Handler.Supervisor])
end
end
|
lib/rest/opts.ex
| 0.844216
| 0.404831
|
opts.ex
|
starcoder
|
defmodule Bloodhound.Client do
alias Poison.Parser
alias Bloodhound.Utility
@doc """
Indexes a document by inferring that it's ID is within it's data map.
"""
def index(type, data), do: index(type, data.id, data)
@doc """
Adds a document to the index given it's type, ID, and a map with it's data.
"""
def index(type, id, data) do
type
|> build_url(id)
|> HTTPoison.post(Poison.encode!(data))
|> parse_response
|> Utility.debug_piped("Index response: ")
end
@doc """
Gets a document given it's type and ID.
"""
def get(type, id) do
type |> build_url(id) |> HTTPoison.get |> parse_response
end
@doc """
Deletes a document given a document type and ID.
"""
def delete(type \\ nil, id \\ nil) do
type |> build_url(id) |> HTTPoison.delete |> parse_response
end
@doc """
Searches an index and optionally index types.
"""
def search(types \\ nil, data \\ %{}) do
List.wrap(types)
|> build_url("_search")
|> HTTPoison.post(Poison.encode! data)
|> parse_response
end
@doc """
Constructs an ElasticSearch API URL.
TODO add params
"""
def build_url(type, id \\ nil) do
url = Application.get_env :bloodhound, :elasticsearch_url
index = Application.get_env :bloodhound, :index
List.flatten([url, index, type, id])
|> Enum.filter(&(&1))
|> Enum.join("/")
|> Utility.debug_piped("Built URL:")
end
@doc """
Parses a response from the ElasticSearch API into a happy map %{:)}.
"""
def parse_response({status, response}) do
case status do
:ok ->
if String.first(response.body) === "{" do
case body = Parser.parse!(response.body, keys: :atoms) do
%{hits: hits} -> {:ok, format_hits hits}
%{_source: _} -> {:ok, format_document body}
%{error: _} -> {:error, body}
%{found: false} -> {:error, body}
_ -> {:ok, body}
end
else
{:error, response}
end
_ -> {:error, response}
end
end
@doc """
Formats documents in search results to look like the models they represent.
"""
def format_hits(hits) do
%{hits | hits: Enum.map(hits.hits, &format_document/1)}
end
def format_document(document = %{_source: source}) do
case Map.has_key?(document, :_score) do
true -> Map.merge source, %{score: document._score, type: document._type}
false -> source
end
end
@doc """
Calls ElasticSearch's Refresh API which: "...allows to explicitly refresh one
or more indices, making all operations performed since the last refresh
available for search".
"""
def refresh(types \\ nil) do
List.wrap(types)
|> build_url("_refresh")
|> HTTPoison.post("")
|> parse_response
end
end
|
lib/bloodhound/client.ex
| 0.58818
| 0.447581
|
client.ex
|
starcoder
|
defmodule GraphQL do
@moduledoc ~S"""
The main GraphQL module.
The `GraphQL` module provides a
[GraphQL](http://facebook.github.io/graphql/) implementation for Elixir.
## Parse a query
Parse a GraphQL query
iex> GraphQL.parse "{ hello }"
%{definitions: [
%{kind: :OperationDefinition, loc: %{start: 0},
operation: :query,
selectionSet: %{kind: :SelectionSet, loc: %{start: 0},
selections: [
%{kind: :Field, loc: %{start: 0}, name: "hello"}
]
}}
],
kind: :Document, loc: %{start: 0}
}
## Execute a query
Execute a GraphQL query against a given schema / datastore.
# iex> GraphQL.execute schema, "{ hello }"
# [data: [hello: world]]
"""
alias GraphQL.Schema
alias GraphQL.SyntaxError
defmodule ObjectType do
defstruct name: "RootQueryType", description: "", fields: []
end
defmodule FieldDefinition do
defstruct name: nil, type: "String", resolve: nil
end
@doc """
Tokenize the input string into a stream of tokens.
iex> GraphQL.tokenize("{ hello }")
[{ :"{", 1 }, { :name, 1, 'hello' }, { :"}", 1 }]
"""
def tokenize(input_string) when is_binary(input_string) do
input_string |> to_char_list |> tokenize
end
def tokenize(input_string) do
{:ok, tokens, _} = :graphql_lexer.string input_string
tokens
end
@doc """
Parse the input string into a Document AST.
iex> GraphQL.parse("{ hello }")
%{definitions: [
%{kind: :OperationDefinition, loc: %{start: 0},
operation: :query,
selectionSet: %{kind: :SelectionSet, loc: %{start: 0},
selections: [
%{kind: :Field, loc: %{start: 0}, name: "hello"}
]
}}
],
kind: :Document, loc: %{start: 0}
}
"""
def parse(input_string) when is_binary(input_string) do
input_string |> to_char_list |> parse
end
def parse(input_string) do
case input_string |> tokenize |> :graphql_parser.parse do
{:ok, parse_result} ->
parse_result
{:error, {line_number, _, errors}} ->
raise SyntaxError, line: line_number, errors: errors
end
end
@doc """
Execute a query against a schema.
# iex> GraphQL.execute(schema, "{ hello }")
# [data: [hello: world]]
"""
def execute(schema, query) do
document = parse(query)
query_fields = hd(document[:definitions])[:selectionSet][:selections]
%Schema{
query: _query_root = %ObjectType{
name: "RootQueryType",
fields: fields
}
} = schema
result = for fd <- fields, qf <- query_fields, qf[:name] == fd.name do
arguments = Map.get(qf, :arguments, [])
|> Enum.map(&parse_argument/1)
{String.to_atom(fd.name), fd.resolve.(arguments)}
end
[data: result]
end
defp parse_argument(%{kind: :Argument, loc: _, name: name, value: %{kind: _, loc: _, value: value}}) do
{String.to_atom(name), value}
end
end
|
lib/graphql.ex
| 0.84137
| 0.577317
|
graphql.ex
|
starcoder
|
defmodule Exqlite.Sqlite3 do
@moduledoc """
The interface to the NIF implementation.
"""
alias Exqlite.Sqlite3NIF
@type db() :: reference()
@type statement() :: reference()
@type reason() :: atom() | String.t()
@doc """
Opens a new sqlite database at the Path provided.
If `path` can be `":memory"` to keep the sqlite database in memory.
"""
@spec open(String.t()) :: {:ok, db()} | {:error, reason()}
def open(path), do: Sqlite3NIF.open(String.to_charlist(path))
@spec close(nil) :: :ok
def close(nil), do: :ok
@doc """
Closes the database and releases any underlying resources.
"""
@spec close(db()) :: :ok | {:error, reason()}
def close(conn), do: Sqlite3NIF.close(conn)
@doc """
Executes an sql script. Multiple stanzas can be passed at once.
"""
@spec execute(db(), String.t()) :: :ok | {:error, {atom(), reason()}}
def execute(conn, sql) do
case Sqlite3NIF.execute(conn, String.to_charlist(sql)) do
:ok ->
:ok
{:error, {code, reason}} ->
{:error, {code, String.Chars.to_string(reason)}}
_ ->
# This should never happen, but just to be safe
{:error, {:unknown, "unhandled error"}}
end
end
@doc """
Get the number of changes recently.
**Note**: If triggers are used, the count may be larger than expected.
See: https://sqlite.org/c3ref/changes.html
"""
@spec changes(db()) :: {:ok, integer()}
def changes(conn), do: Sqlite3NIF.changes(conn)
@spec prepare(db(), String.t()) :: {:ok, statement()} | {:error, reason()}
def prepare(conn, sql) do
Sqlite3NIF.prepare(conn, String.to_charlist(sql))
end
@spec bind(db(), statement(), nil) ::
:ok | {:error, reason()} | {:error, {atom(), any()}}
def bind(conn, statement, nil), do: bind(conn, statement, [])
@spec bind(db(), statement(), []) ::
:ok | {:error, reason()} | {:error, {atom(), any()}}
def bind(conn, statement, args) do
Sqlite3NIF.bind(conn, statement, Enum.map(args, &convert/1))
end
@spec columns(db(), statement()) :: {:ok, []} | {:error, reason()}
def columns(conn, statement), do: Sqlite3NIF.columns(conn, statement)
@spec step(db(), statement()) :: :done | :busy | {:row, []}
def step(conn, statement), do: Sqlite3NIF.step(conn, statement)
@spec last_insert_rowid(db()) :: {:ok, integer()}
def last_insert_rowid(conn), do: Sqlite3NIF.last_insert_rowid(conn)
defp convert(%Date{} = val), do: Date.to_iso8601(val)
defp convert(%DateTime{} = val), do: DateTime.to_iso8601(val)
defp convert(%Time{} = val), do: Time.to_iso8601(val)
defp convert(%NaiveDateTime{} = val), do: NaiveDateTime.to_iso8601(val)
defp convert(val), do: val
end
|
lib/exqlite/sqlite3.ex
| 0.828384
| 0.417034
|
sqlite3.ex
|
starcoder
|
defmodule Aecore.Channel.Updates.ChannelWithdrawUpdate do
@moduledoc """
State channel update implementing withdraws in the state channel. This update can be included in ChannelOffChainTx.
This update is used by ChannelWithdrawTx for unlocking some of the state channel's tokens.
"""
alias Aecore.Account.{Account, AccountStateTree}
alias Aecore.Chain.Chainstate
alias Aecore.Channel.ChannelOffChainUpdate
alias Aecore.Channel.Tx.ChannelWithdrawTx
alias Aecore.Channel.Updates.ChannelWithdrawUpdate
@behaviour ChannelOffChainUpdate
@typedoc """
Structure of the ChannelWithdrawUpdate type
"""
@type t :: %ChannelWithdrawUpdate{
to: binary(),
amount: non_neg_integer()
}
@typedoc """
The type of errors returned by this module
"""
@type error :: {:error, String.t()}
@doc """
Definition of ChannelWithdrawUpdate structure
## Parameters
- to: the onchain account where the tokens will be returned
- amount: number of the tokens withdrawn from the state channel
"""
defstruct [:to, :amount]
@doc """
Creates a ChannelWithdrawUpdate from a ChannelWithdrawTx
"""
@spec new(ChannelWithdrawTx.t()) :: ChannelWithdrawUpdate.t()
def new(%ChannelWithdrawTx{
amount: amount,
withdrawing_account: withdrawing_account
})
when is_binary(withdrawing_account) do
%ChannelWithdrawUpdate{
to: withdrawing_account,
amount: amount
}
end
@doc """
Deserializes ChannelWithdrawUpdate. The serialization was changed in later versions of epoch.
"""
@spec decode_from_list(list(binary())) :: ChannelWithdrawUpdate.t()
def decode_from_list([to, to, amount]) do
%ChannelWithdrawUpdate{
to: to,
amount: :binary.decode_unsigned(amount)
}
end
@doc """
Serializes ChannelWithdrawUpdate. The serialization was changed in later versions of epoch.
"""
@spec encode_to_list(ChannelWithdrawUpdate.t()) :: list(binary())
def encode_to_list(%ChannelWithdrawUpdate{
to: to,
amount: amount
}) do
[to, to, :binary.encode_unsigned(amount)]
end
@doc """
Performs the withdraw on the offchain chainstate. Returns an error if the operation failed.
"""
@spec update_offchain_chainstate!(Chainstate.t(), ChannelWithdrawUpdate.t()) ::
Chainstate.t() | no_return()
def update_offchain_chainstate!(
%Chainstate{
accounts: accounts
} = chainstate,
%ChannelWithdrawUpdate{
to: to,
amount: amount
}
) do
updated_accounts =
AccountStateTree.update(
accounts,
to,
&withdraw_from_account!(&1, amount)
)
%Chainstate{chainstate | accounts: updated_accounts}
end
@spec withdraw_from_account!(Account.t(), non_neg_integer()) :: Account.t() | no_return()
defp withdraw_from_account!(account, amount) do
Account.apply_transfer!(account, nil, -amount)
end
@spec half_signed_preprocess_check(ChannelWithdrawUpdate.t(), map()) :: :ok | error()
def half_signed_preprocess_check(
%ChannelWithdrawUpdate{
to: to,
amount: amount
},
%{
foreign_pubkey: correct_to
}
) do
cond do
amount < 0 ->
{:error, "#{__MODULE__}: Can't withdraw negative amount of tokens"}
to != correct_to ->
{:error,
"#{__MODULE__}: Funds can be only withdrawn from the update initiator's account (#{
inspect(correct_to)
}), got #{inspect(to)}"}
true ->
:ok
end
end
def half_signed_preprocess_check(%ChannelWithdrawUpdate{}, _) do
{:error,
"#{__MODULE__}: Missing keys in the opts dictionary. This probably means that the update was unexpected."}
end
@doc """
Validates an update considering state before applying it to the provided chainstate.
"""
@spec fully_signed_preprocess_check(
Chainstate.t() | nil,
ChannelWithdrawUpdate.t(),
non_neg_integer()
) :: :ok | error()
def fully_signed_preprocess_check(
%Chainstate{accounts: accounts},
%ChannelWithdrawUpdate{to: to, amount: amount},
channel_reserve
) do
%Account{balance: to_balance} = AccountStateTree.get(accounts, to)
cond do
!AccountStateTree.has_key?(accounts, to) ->
{:error, "#{__MODULE__}: Withdrawing account is not a party of this channel"}
to_balance - amount < channel_reserve ->
{:error,
"#{__MODULE__}: Withdrawing party tried to withdraw #{amount} tokens but can withdraw at most #{
to_balance - channel_reserve
} tokens"}
true ->
:ok
end
end
def fully_signed_preprocess_check(nil, %ChannelWithdrawUpdate{}, _channel_reserve) do
{:error, "#{__MODULE__}: OffChain Chainstate must exist"}
end
end
|
apps/aecore/lib/aecore/channel/updates/channel_withdraw_update.ex
| 0.860501
| 0.42057
|
channel_withdraw_update.ex
|
starcoder
|
defmodule McProtocol.DataTypes do
@moduledoc false
# For <<< (left shift) operator
use Bitwise
defmodule ChatMessage do
@moduledoc false
defstruct [:text, :translate, :with, :score, :selector, :extra,
:bold, :italic, :underligned, :strikethrough, :obfuscated, :color,
:clickEvent, :hoverEvent, :insertion]
defmodule Score do
@moduledoc false
defstruct [:name, :objective]
end
defmodule ClickEvent do
@moduledoc false
defstruct [:action, :value]
end
defmodule HoverEvent do
@moduledoc false
defstruct [:action, :value]
end
end
defmodule Slot do
@moduledoc false
defstruct id: nil, count: 0, damage: 0, nbt: nil
end
defmodule Decode do
@spec varint(binary) :: {integer, binary}
def varint(data) do
{:ok, resp} = varint?(data)
resp
end
def varint?(data) do
decode_varint(data, 0, 0)
end
defp decode_varint(<<1::1, curr::7, rest::binary>>, num, acc) when num < (64 - 7) do
decode_varint(rest, num + 7, (curr <<< num) + acc)
end
defp decode_varint(<<0::1, curr::7, rest::binary>>, num, acc) do
{:ok, {(curr <<< num) + acc, rest}}
end
defp decode_varint(_, num, _) when num >= (64 - 7), do: :too_big
defp decode_varint("", _, _), do: :incomplete
defp decode_varint(_, _, _), do: :error
@spec bool(binary) :: {boolean, binary}
def bool(<<value::size(8), rest::binary>>) do
case value do
1 -> {true, rest}
_ -> {false, rest}
end
end
def string(data) do
{length, data} = varint(data)
<<result::binary-size(length), rest::binary>> = data
{to_string(result), rest}
#result = :binary.part(data, {0, length})
#{result, :binary.part(data, {length, byte_size(data)-length})}
end
def chat(data) do
json = string(data)
Poison.decode!(json, as: McProtocol.DataTypes.ChatMessage)
end
def slot(data) do
<<id::signed-integer-2*8, data::binary>> = data
slot_with_id(data, id)
end
defp slot_with_id(data, -1), do: {%McProtocol.DataTypes.Slot{}, data}
defp slot_with_id(data, id) do
<<count::unsigned-integer-1*8, damage::unsigned-integer-2*8, data::binary>> = data
{nbt, data} = slot_nbt(data)
struct = %McProtocol.DataTypes.Slot{id: id, count: count, damage: damage, nbt: nbt}
{struct, data}
end
defp slot_nbt(<<0, data::binary>>), do: {nil, data}
defp slot_nbt(data), do: McProtocol.NBT.read(data)
def varint_length_binary(data) do
{length, data} = varint(data)
result = :binary.part(data, {0, length})
{result, :binary.part(data, {length, byte_size(data)-length})}
end
def byte(data) do
<<num::signed-integer-size(8), data::binary>> = data
{num, data}
end
def fixed_point_byte(data) do
{num, data} = byte(data)
{num / 32, data}
end
def u_byte(data) do
<<num::unsigned-integer-size(8), data::binary>> = data
{num, data}
end
def short(data) do
<<num::signed-integer-size(16), data::binary>> = data
{num, data}
end
def u_short(data) do
<<num::unsigned-integer-size(16), data::binary>> = data
{num, data}
end
def int(data) do
<<num::signed-integer-size(32), data::binary>> = data
{num, data}
end
def fixed_point_int(data) do
{num, data} = int(data)
{num / 32, data}
end
def long(data) do
<<num::signed-integer-size(64), data::binary>> = data
{num, data}
end
def float(data) do
<<num::signed-float-4*8, data::binary>> = data
{num, data}
end
def double(data) do
<<num::signed-float-8*8, data::binary>> = data
{num, data}
end
def rotation(data) do
<<x::signed-float-4*8, y::signed-float-4*8, z::signed-float-4*8,
rest::binary>> = data
{{x, y, z}, data}
end
def position(data) do
<<x::signed-integer-26, y::signed-integer-12, z::signed-integer-26, data::binary>> = data
{{x, y, z}, data}
end
def byte_array_rest(data) do
{data, <<>>}
end
def byte_flags(data) do
<<flags::binary-1*8, data::binary>> = data
{flags, data}
end
end
end
|
lib/datatypes/datatypes.ex
| 0.597608
| 0.475605
|
datatypes.ex
|
starcoder
|
defmodule Ecto.ParameterizedType do
@moduledoc """
Parameterized types are Ecto types that can be customized per field.
Parameterized types allow a set of options to be specified in the schema
which are initialized on compilation and passed to the callback functions
as the last argument.
For example, `field :foo, :string` behaves the same for every field.
On the other hand, `field :foo, Ecto.Enum, values: [:foo, :bar, :baz]`
will likely have a different set of values per field.
Note that options are specified as a keyword, but it is idiomatic to
convert them to maps inside `c:init/1` for easier pattern matching in
other callbacks.
Parameterized types are a superset of regular types. In other words,
with parameterized types you can do everything a regular type does,
and more. For example, parameterized types can handle `nil` values
in both `load` and `dump` callbacks, they can customize `cast` behavior
per query and per changeset, and also control how values are embedded.
However, parameterized types are also more complex. Therefore, if
everything you need to achieve can be done with basic types, they
should be preferred to parameterized ones.
## Examples
To create a parameterized type, create a module as shown below:
defmodule MyApp.MyType do
use Ecto.ParameterizedType
def type(_params), do: :string
def init(opts) do
validate_opts(opts)
Enum.into(opts, %{})
end
def cast(data, params) do
...
cast_data
end
def load(data, _loader, params) do
...
{:ok, loaded_data}
end
def dump(data, dumper, params) do
...
{:ok, dumped_data}
end
def equal?(a, b, _params) do
a == b
end
end
To use this type in a schema field, specify the type and parameters like this:
schema "foo" do
field :bar, MyApp.MyType, opt1: :baz, opt2: :boo
end
"""
@typedoc """
The keyword options passed from the Schema's field macro into `c:init/1`
"""
@type opts :: keyword()
@typedoc """
The parameters for the ParameterizedType
This is the value passed back from `c:init/1` and subsequently passed
as the last argument to all callbacks. Idiomatically it is a map.
"""
@type params :: term()
@doc """
Callback to convert the options specified in the field macro into parameters
to be used in other callbacks.
This function is called at compile time, and should raise if invalid values are
specified. It is idiomatic that the parameters returned from this are a map.
`field` and `schema` will be injected into the options automatically.
For example, this schema specification
schema "my_table" do
field :my_field, MyParameterizedType, opt1: :foo, opt2: nil
end
will result in the call:
MyParameterizedType.init([schema: "my_table", field: :my_field, opt1: :foo, opt2: nil])
"""
@callback init(opts :: opts()) :: params()
@doc """
Casts the given input to the ParameterizedType with the given parameters.
For more information on casting, see `c:Ecto.Type.cast/1`
"""
@callback cast(data :: term, params()) ::
{:ok, term} | :error | {:error, keyword()}
@doc """
Loads the given term into a ParameterizedType.
For more information on loading, see `c:Ecto.Type.load/1`.
Note that this callback *will* be called when loading a `nil` value,
unlike `c:Ecto.Type.load/1`.
"""
@callback load(value :: any(), loader :: function(), params()) :: {:ok, value :: any()} | :error
@doc """
Dumps the given term into an Ecto native type.
For more information on dumping, see `c:Ecto.Type.dump/1`.
Note that this callback *will* be called when dumping a `nil` value,
unlike `c:Ecto.Type.dump/1`.
"""
@callback dump(value :: any(), dumper :: function(), params()) :: {:ok, value :: any()} | :error
@doc """
Returns the underlying schema type for the ParameterizedType.
For more information on schema types, see `c:Ecto.Type.type/0`
"""
@callback type(params()) :: Ecto.Type.t()
@doc """
Checks if two terms are semantically equal.
"""
@callback equal?(value1 :: any(), value2 :: any(), params()) :: boolean()
@doc """
Dictates how the type should be treated inside embeds.
For more information on embedding, see `c:Ecto.Type.embed_as/1`
"""
@callback embed_as(format :: atom(), params()) :: :self | :dump
@doc """
Generates a loaded version of the data.
This is callback is invoked when a parameterized type is given
to `field` with the `:autogenerate` flag.
"""
@callback autogenerate(params()) :: term()
@optional_callbacks autogenerate: 1
@doc false
defmacro __using__(_) do
quote location: :keep do
@behaviour Ecto.ParameterizedType
def embed_as(_, _), do: :self
def equal?(term1, term2, _params), do: term1 == term2
defoverridable embed_as: 2, equal?: 3
end
end
end
|
lib/ecto/parameterized_type.ex
| 0.924696
| 0.663418
|
parameterized_type.ex
|
starcoder
|
defmodule JSONRPC2 do
@moduledoc ~S"""
`JSONRPC2` is an Elixir library for JSON-RPC 2.0.
It includes request and response utility modules, a transport-agnostic server handler, a
line-based TCP server and client, which are based on [Ranch](https://github.com/ninenines/ranch)
and [shackle](https://github.com/lpgauth/shackle), respectively, and a JSON-in-the-body HTTP(S)
server and client, based on [Plug](https://github.com/elixir-lang/plug) and
[hackney](https://github.com/benoitc/hackney), respectively.
## TCP Example
# Define a handler
defmodule Handler do
use JSONRPC2.Server.Handler
def handle_request("hello", [name]) do
"Hello, #{name}!"
end
def handle_request("hello2", %{"name" => name}) do
"Hello again, #{name}!"
end
def handle_request("subtract", [minuend, subtrahend]) do
minuend - subtrahend
end
def handle_request("notify", [name]) do
IO.puts "You have been notified, #{name}!"
end
end
# Start the server (this will usually go in your OTP application's start/2)
JSONRPC2.Servers.TCP.start_listener(Handler, 8000)
# Define the client
defmodule Client do
alias JSONRPC2.Clients.TCP
def start(host, port) do
TCP.start(host, port, __MODULE__)
end
def hello(name) do
TCP.call(__MODULE__, "hello", [name])
end
def hello2(args) do
TCP.call(__MODULE__, "hello2", Map.new(args))
end
def subtract(minuend, subtrahend) do
TCP.cast(__MODULE__, "subtract", [minuend, subtrahend])
end
def notify(name) do
TCP.notify(__MODULE__, "notify", [name])
end
end
# Start the client pool (this will also usually go in your OTP application's start/2)
Client.start("localhost", 8000)
# Make a call with the client to the server
IO.inspect Client.hello("Elixir")
#=> {:ok, "Hello, Elixir!"}
# Make a call with the client to the server, using named args
IO.inspect Client.hello2(name: "Elixir")
#=> {:ok, "Hello again, Elixir!"}
# Make a call with the client to the server asynchronously
{:ok, request_id} = Client.subtract(2, 1)
IO.puts "non-blocking!"
#=> non-blocking!
IO.inspect JSONRPC2.Clients.TCP.receive_response(request_id)
#=> {:ok, 1}
# Notifications
Client.notify("Elixir")
#=> You have been notified, Elixir!
## HTTP Example
# Define a handler
defmodule Handler do
use JSONRPC2.Server.Handler
def handle_request("hello", [name]) do
"Hello, #{name}!"
end
def handle_request("hello2", %{"name" => name}) do
"Hello again, #{name}!"
end
def handle_request("notify", [name]) do
IO.puts "You have been notified, #{name}!"
end
end
# Start the server (this will usually go in your OTP application's start/2)
JSONRPC2.Servers.HTTP.http(Handler)
# Define the client
defmodule Client do
alias JSONRPC2.Clients.HTTP
@url "http://localhost:4000/"
def hello(name) do
HTTP.call(@url, "hello", [name])
end
def hello2(args) do
HTTP.call(@url, "hello2", Map.new(args))
end
def notify(name) do
HTTP.notify(@url, "notify", [name])
end
end
# Make a call with the client to the server
IO.inspect Client.hello("Elixir")
#=> {:ok, "Hello, Elixir!"}
# Make a call with the client to the server, using named args
IO.inspect Client.hello2(name: "Elixir")
#=> {:ok, "Hello again, Elixir!"}
# Notifications
Client.notify("Elixir")
#=> You have been notified, Elixir!
## Serializers
Any module which conforms to the same API as Jason's `Jason.encode/1` and `Jason.decode/1` can
be provided as a serializer to the functions which accept them.
"""
@typedoc "A JSON-RPC 2.0 method."
@type method :: String.t()
@typedoc "A decoded JSON object."
@type json ::
nil
| true
| false
| float
| integer
| String.t()
| [json]
| %{optional(String.t()) => json}
@typedoc "A JSON-RPC 2.0 params value."
@type params :: [json] | %{optional(String.t()) => json}
@typedoc "A JSON-RPC 2.0 request ID."
@type id :: String.t() | number
end
|
lib/jsonrpc2.ex
| 0.78789
| 0.477554
|
jsonrpc2.ex
|
starcoder
|
defmodule Membrane.Endpoint do
@moduledoc """
Module defining behaviour for endpoints - elements consuming and producing data.
Behaviours for endpoints are specified, besides this place, in modules
`Membrane.Element.Base`,
`Membrane.Element.WithOutputPads`,
and `Membrane.Element.WithInputPads`.
Endpoint can have both input and output pads. Job of usual endpoint is both, to
receive some data on such pad and consume it (write to a soundcard, send through
TCP etc.) and to produce some data (read from soundcard, download through HTTP,
etc.) and send it through such pad. If these pads work in pull mode, which is
the most common case, then endpoint is also responsible for receiving demands on
the output pad and requesting them on the input pad (for more details, see
`c:Membrane.Element.WithOutputPads.handle_demand/5` callback).
Endpoints, like all elements, can of course have multiple pads if needed to
provide more complex solutions.
"""
alias Membrane.{Buffer, Element, Pad}
alias Membrane.Element.CallbackContext
@doc """
Callback that is called when buffer should be written by the endpoint.
By default calls `c:handle_write/4` for each buffer.
For pads in pull mode it is called when buffers have been demanded (by returning
`:demand` action from any callback).
For pads in push mode it is invoked when buffers arrive.
"""
@callback handle_write_list(
pad :: Pad.ref_t(),
buffers :: list(Buffer.t()),
context :: CallbackContext.Process.t(),
state :: Element.state_t()
) :: Membrane.Element.Base.callback_return_t()
@doc """
Callback that is called when buffer should be written by the endpoint. In contrast
to `c:handle_write_list/4`, it is passed only a single buffer.
Called by default implementation of `c:handle_write_list/4`.
"""
@callback handle_write(
pad :: Pad.ref_t(),
buffer :: Buffer.t(),
context :: CallbackContext.Process.t(),
state :: Element.state_t()
) :: Membrane.Element.Base.callback_return_t()
@doc """
Brings all the stuff necessary to implement a endpoint element.
Options:
- `:bring_pad?` - if true (default) requires and aliases `Membrane.Pad`
"""
defmacro __using__(options) do
quote location: :keep do
use Membrane.Element.Base, unquote(options)
use Membrane.Element.WithOutputPads
use Membrane.Element.WithInputPads
@behaviour unquote(__MODULE__)
@impl true
def membrane_element_type, do: :endpoint
@impl true
def handle_write(_pad, _buffer, _context, state),
do: {{:error, :handle_write_not_implemented}, state}
@impl true
def handle_write_list(pad, buffers, _context, state) do
args_list = buffers |> Enum.map(&[pad, &1])
{{:ok, split: {:handle_write, args_list}}, state}
end
defoverridable handle_write_list: 4,
handle_write: 4
end
end
end
|
lib/membrane/endpoint.ex
| 0.831622
| 0.452173
|
endpoint.ex
|
starcoder
|
defmodule SymbolicExpression.Canonical.Parser do
alias SymbolicExpression.Canonical.Parser.State
require Logger
@doc """
Parses a canonical s-expression held in a string. Returns `{:ok, result}` on
success, `{:error, reason}` when the string does not contain a valid canonical
s-expression. See [Wikipedia](https://en.wikipedia.org/wiki/Canonical_S-expressions)
for more details about canonical s-expressions.
## Example
iex> alias SymbolicExpression.Canonical.Parser
iex> Parser.parse "(1:11:21:3)"
{:ok, [1, 2, 3]}
iex> Parser.parse "invalid"
{:error, :bad_arg}
"""
def parse(exp) when is_binary(exp) do
try do
{:ok, parse!(exp)}
rescue
error in [ArgumentError] ->
Logger.error "Failed to parse expression: '#{exp}' with error: '#{inspect error}'"
{:error, :bad_arg}
end
end
@doc """
Like `parse/1`, except raises an `ArgumentError` when the string does not
contain a valid s-expression.
## Example
iex> alias SymbolicExpression.Canonical.Parser
iex> Parser.parse! "(1:11:21:3)"
[1, 2, 3]
iex> Parser.parse! "((1:11:21:3)[24:text/plain;charset=utf-8]14:This is a test4:atom())"
[[1, 2, 3], "This is a test", :atom, []]
"""
def parse!(""), do: []
def parse!(exp) when is_binary(exp), do: _parse!(State.new exp)
@doc """
Like `parse/1` except the input is a file path instead of a binary.
"""
def parse_file(file) when is_binary(file) do
try do
{:ok, parse_file!(file)}
rescue
error in [ArgumentError] ->
Logger.error "Failed to parse expression in file: '#{file}' with error: '#{inspect error}'"
{:error, :bad_arg}
error in [File.Error] ->
Logger.error "Failed to parse expression in file: '#{file}' with error: '#{inspect error}'"
{:error, :bad_file}
end
end
@doc """
Like `parse_file/1` except raises `ArgumentError` when the string does not
contain a valid s-expression or `File.Error` if the file can't be read.
"""
def parse_file!(file) when is_binary(file) do
file |> Path.expand |> File.read! |> parse!
end
# New scope.
defp _parse!(s = %State{expression: "(" <> rest, in_term: false, paren_count: count, result: result}) when count > 0 or result == [[]] do
_parse! %State{s | expression: rest, paren_count: count + 1, result: [[] | s.result]}
end
# End scope with no current term.
defp _parse!(s = %State{expression: ")" <> rest, token: "", in_term: false, paren_count: count, result: [first, second | tail]}) when count > 0 do
_parse! %State{s | expression: rest, paren_count: count - 1, result: [second ++ [first] | tail]}
end
# Start type.
defp _parse!(s = %State{expression: "[" <> rest, in_term: false, in_type: false}) do
_parse! %State{s | expression: rest, in_type: true}
end
# # End scope with no current term.
# defp _parse!(s = %State{expression: "]" <> rest, in_term: false, in_type: true}) do
# _parse! %State{s | expression: rest, in_type: false}
# end
# End length string.
defp _parse!(s = %State{expression: ":" <> rest, in_term: false}) do
length = parse_length s.token
_parse! %State{s | expression: rest, token: "", in_term: true, term_length: length}
end
# Done parsing type
defp _parse!(s = %State{expression: << c :: utf8, ?] >> <> rest, in_term: true, in_type: true, term_length: 1}) do
type = s.token <> <<c>>
_parse! %State{s | expression: rest, token: "", in_term: false, in_type: false, term_length: 0, type: type}
end
# Done parsing term
defp _parse!(s = %State{expression: << c :: utf8 >> <> rest, in_term: true, term_length: 1, result: [head | tail]}) do
processed = process(s.token <> <<c>>, s.type)
_parse! %State{s | expression: rest, token: "", type: "", in_term: false, term_length: 0, result: [head ++ [processed] | tail]}
end
# Grab next character of term.
defp _parse!(s = %State{expression: << c :: utf8 >> <> rest, in_term: true, term_length: length}) when length > 1 do
_parse! %State{s | expression: rest, token: s.token <> <<c>>, term_length: length - 1}
end
# Append character to current term.
defp _parse!(s = %State{expression: << c :: utf8 >> <> rest}) do
_parse! %State{s | expression: rest, token: s.token <> <<c>>}
end
# Base case.
defp _parse!(%State{expression: "", token: "", in_term: false, paren_count: 0, result: [[head | _]| _]}), do: head
# Catch all for errors.
defp _parse!(s) do
raise ArgumentError, message: """
Invalid s-expression with
remaining exp: #{inspect s.expression}
token: #{inspect s.token}
in term: #{inspect s.in_term}
result #{inspect s.result}
"""
end
defp parse_length(string) when is_binary(string) do
string |> Integer.parse |> _parse_length(string)
end
defp _parse_length({0, _}, _) do
raise ArgumentError, message: "Term length of 0 is not allowed."
end
defp _parse_length({length, ""}, _), do: length
defp _parse_length(_, string) do
raise ArgumentError, message: "Expected a term length, got '#{string}'"
end
defp process(term, type) do
process_from_type(term, type)
|| process_int(term, type)
|| process_float(term, type)
|| process_atom(term, type)
end
defp process_from_type(_term, ""), do: nil
defp process_from_type(term, "text/plain;charset=utf-8"), do: term
defp process_from_type(term, type) do
raise ArgumentError, message: "Unabled to process type: '#{inspect type}' for term: '#{inspect term}'"
end
defp process_int(term, "") do
case Integer.parse(term) do
{int, ""} ->
int
_ ->
nil
end
end
defp process_float(term, "") do
case Float.parse(term) do
{float, ""} ->
float
_ ->
nil
end
end
defp process_atom(term, ""), do: String.to_atom(term)
end
|
lib/symbolic_expression/canonical/parser.ex
| 0.848533
| 0.648327
|
parser.ex
|
starcoder
|
## matched at unqualified no parentheses call
@one @two 3 do
end
## matched dot call
@one two.() do
end
## matched qualified no arguments call
@one Two.three do
end
## matched qualified no parentheses call
@one Two.three 4 do
end
## matched qualified parentheses call
@one Two.three() do
end
## matched unqualified no arguments call
@one two do
end
## matched unqualified no parentheses call
@one two 3 do
end
## matched unqualified parentheses call
@one two() do
end
# Unmatched dot calls
## matched at unqualified no parentheses call
one.(@two 3) do
end
## matched dot call
one.(two.()) do
end
## matched qualified no arguments call
one.(Two.three) do
end
## matched qualified no parentheses call
one.(Two.three 4) do
end
## matched qualified parentheses call
one.(Two.three()) do
end
## matched unqualified no arguments call
one.(two) do
end
## matched unqualified no parentheses call
one.(two 3) do
end
## matched unqualified parentheses call
one.(two()) do
end
# Unmatched At Unqualified No Arguments Call
One.two do
end
# Unmatched Qualified No Parentheses Calls
## matched at unqualified no parentheses call
One.two @three 4 do
end
## matched dot call
One.two three.() do
end
## matched qualified no arguments call
One.two Three.four do
end
## matched qualified no parentheses call
One.two Three.four 5 do
end
## matched qualified parentheses call
One.two Three.four() do
end
## matched unqualified no arguments call
One.two three do
end
## matched unqualified no parentheses call
One.two three 4 do
end
## matched unqualified parentheses call
One.two three() do
end
# Unmatched Qualified Parentheses Calls
## matched at unqualified no parentheses call
One.two(@three 4) do
end
##(matched) dot call
One.two(three.()) do
end
## matched qualified no arguments call
One.two(Three.four) do
end
## matched qualified no parentheses call
One.two(Three.four 5) do
end
## matched qualified parentheses call
One.two(Three.four()) do
end
## matched unqualified no arguments call
One.two(three) do
end
## matched unqualified no parentheses call
One.two(three 4) do
end
## matched unqualified parentheses call
One.two(three()) do
end
|
testData/org/elixir_lang/formatting/no_spaces_around_dot_operator.ex
| 0.658527
| 0.604487
|
no_spaces_around_dot_operator.ex
|
starcoder
|
defmodule Stripe.Refund do
@moduledoc """
Work with [Stripe `refund` objects](https://stripe.com/docs/api#refund_object).
You can:
- [Create a refund](https://stripe.com/docs/api#create_refund)
- [Retrieve a refund](https://stripe.com/docs/api#retrieve_refund)
- [Update a refund](https://stripe.com/docs/api#update_refund)
- [List all refunds](https://stripe.com/docs/api#list_refunds)
"""
use Stripe.Entity
import Stripe.Request
@type t :: %__MODULE__{
id: Stripe.id(),
object: String.t(),
amount: non_neg_integer,
balance_transaction: Stripe.id() | Stripe.BalanceTransaction.t() | nil,
charge: Stripe.id() | Stripe.Charge.t() | nil,
created: Stripe.timestamp(),
currency: String.t(),
failure_balance_transaction: Stripe.id() | Stripe.BalanceTransaction.t() | nil,
failure_reason: String.t() | nil,
metadata: Stripe.Types.metadata(),
payment: Stripe.id() | Stripe.Charge.t() | nil,
reason: String.t() | nil,
receipt_number: String.t() | nil,
status: String.t() | nil
}
defstruct [
:id,
:object,
:amount,
:balance_transaction,
:charge,
:created,
:currency,
:failure_balance_transaction,
:failure_reason,
:metadata,
:payment,
:reason,
:receipt_number,
:status
]
@plural_endpoint "refunds"
@doc """
Create a refund.
When you create a new refund, you must specify a charge to create it on.
Creating a new refund will refund a charge that has previously been created
but not yet refunded. Funds will be refunded to the credit or debit card
that was originally charged.
You can optionally refund only part of a charge. You can do so as many times
as you wish until the entire charge has been refunded.
Once entirely refunded, a charge can't be refunded again. This method will
return an error when called on an already-refunded charge, or when trying to
refund more money than is left on a charge.
See the [Stripe docs](https://stripe.com/docs/api#create_refund).
"""
@spec create(params, Stripe.options()) :: {:ok, t} | {:error, Stripe.Error.t()}
when params: %{
:charge => Stripe.Charge.t() | Stripe.id(),
optional(:amount) => pos_integer,
optional(:metadata) => Stripe.Types.metadata(),
optional(:reason) => String.t(),
optional(:refund_application_fee) => boolean,
optional(:reverse_transfer) => boolean
} | %{}
def create(params, opts \\ []) do
new_request(opts)
|> put_endpoint(@plural_endpoint)
|> put_method(:post)
|> put_params(params)
|> cast_to_id([:charge])
|> make_request()
end
@doc """
Retrieve a refund.
Retrieves the details of an existing refund.
See the [Stripe docs](https://stripe.com/docs/api#retrieve_refund).
"""
@spec retrieve(Stripe.id() | t, Stripe.options()) :: {:ok, t} | {:error, Stripe.Error.t()}
def retrieve(id, opts \\ []) do
new_request(opts)
|> put_endpoint(@plural_endpoint <> "/#{get_id!(id)}")
|> put_method(:get)
|> make_request()
end
@doc """
Update a refund.
Updates the specified refund by setting the values of the parameters passed.
Any parameters not provided will be left unchanged.
This request only accepts `:metadata` as an argument.
See the [Stripe docs](https://stripe.com/docs/api#update_refund).
"""
@spec update(Stripe.id() | t, params, Stripe.options()) :: {:ok, t} | {:error, Stripe.Error.t()}
when params: %{
optional(:metadata) => Stripe.Types.metadata()
} | %{}
def update(id, params, opts \\ []) do
new_request(opts)
|> put_endpoint(@plural_endpoint <> "/#{get_id!(id)}")
|> put_method(:post)
|> put_params(params)
|> make_request()
end
@doc """
List all refunds.
Returns a list of all refunds you’ve previously created. The refunds are
returned in sorted order, with the most recent refunds appearing first. For
convenience, the 10 most recent refunds are always available by default on
the charge object.
See the [Stripe docs](https://stripe.com/docs/api#list_refunds).
"""
@spec list(params, Stripe.options()) :: {:ok, Stripe.List.t(t)} | {:error, Stripe.Error.t()}
when params: %{
optional(:charget) => Stripe.id() | Stripe.Charge.t(),
optional(:ending_before) => t | Stripe.id(),
optional(:limit) => 1..100,
optional(:starting_after) => t | Stripe.id()
} | %{}
def list(params \\ %{}, opts \\ []) do
new_request(opts)
|> prefix_expansions()
|> put_endpoint(@plural_endpoint)
|> put_method(:get)
|> put_params(params)
|> cast_to_id([:charge, :ending_before, :starting_after])
|> make_request()
end
end
|
lib/stripe/core_resources/refund.ex
| 0.778902
| 0.475301
|
refund.ex
|
starcoder
|
defmodule Bypass do
@external_resource "README.md"
@moduledoc "README.md"
|> File.read!()
|> String.split("<!-- MDOC !-->")
|> Enum.fetch!(1)
defstruct pid: nil, port: nil
@typedoc """
Represents a Bypass server process.
"""
@type t :: %__MODULE__{pid: pid, port: non_neg_integer}
import Bypass.Utils
require Logger
@doc """
Starts an Elixir process running a minimal Plug app. The process is a HTTP
handler and listens to requests on a TCP port on localhost.
Use the other functions in this module to declare which requests are handled
and set expectations on the calls.
## Options
- `port` - Optional TCP port to listen to requests.
## Examples
```elixir
bypass = Bypass.open()
```
Assign a specific port to a Bypass instance to listen on:
```elixir
bypass = Bypass.open(port: 1234)
```
"""
@spec open(Keyword.t()) :: Bypass.t() | DynamicSupervisor.on_start_child()
def open(opts \\ []) do
case DynamicSupervisor.start_child(Bypass.Supervisor, Bypass.Instance.child_spec(opts)) do
{:ok, pid} ->
port = Bypass.Instance.call(pid, :port)
debug_log("Did open connection #{inspect(pid)} on port #{inspect(port)}")
bypass = %Bypass{pid: pid, port: port}
setup_framework_integration(test_framework(), bypass)
bypass
other ->
other
end
end
defp setup_framework_integration(:ex_unit, bypass = %{pid: pid}) do
ExUnit.Callbacks.on_exit({Bypass, pid}, fn ->
do_verify_expectations(bypass.pid, ExUnit.AssertionError)
end)
end
defp setup_framework_integration(:espec, _bypass) do
end
@doc """
Can be called to immediately verify if the declared request expectations have
been met.
Returns `:ok` on success and raises an error on failure.
"""
@spec verify_expectations!(Bypass.t()) :: :ok | no_return()
def verify_expectations!(bypass) do
verify_expectations!(test_framework(), bypass)
end
defp verify_expectations!(:ex_unit, _bypass) do
raise "Not available in ExUnit, as it's configured automatically."
end
if Code.ensure_loaded?(ESpec) do
defp verify_expectations!(:espec, bypass) do
do_verify_expectations(bypass.pid, ESpec.AssertionError)
end
end
defp do_verify_expectations(bypass_pid, error_module) do
case Bypass.Instance.call(bypass_pid, :on_exit) do
:ok ->
:ok
:ok_call ->
:ok
{:error, :too_many_requests, {:any, :any}} ->
raise error_module, "Expected only one HTTP request for Bypass"
{:error, :too_many_requests, {method, path}} ->
raise error_module, "Expected only one HTTP request for Bypass at #{method} #{path}"
{:error, :unexpected_request, {:any, :any}} ->
raise error_module, "Bypass got an HTTP request but wasn't expecting one"
{:error, :unexpected_request, {method, path}} ->
raise error_module,
"Bypass got an HTTP request but wasn't expecting one at #{method} #{path}"
{:error, :not_called, {:any, :any}} ->
raise error_module, "No HTTP request arrived at Bypass"
{:error, :not_called, {method, path}} ->
raise error_module,
"No HTTP request arrived at Bypass at #{method} #{path}"
{:exit, {class, reason, stacktrace}} ->
:erlang.raise(class, reason, stacktrace)
end
end
@doc """
Re-opens the TCP socket on the same port. Blocks until the operation is
complete.
```elixir
Bypass.up(bypass)
```
"""
@spec up(Bypass.t()) :: :ok | {:error, :already_up}
def up(%Bypass{pid: pid}),
do: Bypass.Instance.call(pid, :up)
@doc """
Closes the TCP socket. Blocks until the operation is complete.
```elixir
Bypass.down(bypass)
```
"""
@spec down(Bypass.t()) :: :ok | {:error, :already_down}
def down(%Bypass{pid: pid}),
do: Bypass.Instance.call(pid, :down)
@doc """
Expects the passed function to be called at least once regardless of the route.
```elixir
Bypass.expect(bypass, fn conn ->
assert "/1.1/statuses/update.json" == conn.request_path
assert "POST" == conn.method
Plug.Conn.resp(conn, 429, ~s<{"errors": [{"code": 88, "message": "Rate limit exceeded"}]}>)
end)
```
"""
@spec expect(Bypass.t(), (Plug.Conn.t() -> Plug.Conn.t())) :: :ok
def expect(%Bypass{pid: pid}, fun),
do: Bypass.Instance.call(pid, {:expect, fun})
@doc """
Expects the passed function to be called at least once for the specified route (method and path).
- `method` is one of `["GET", "POST", "HEAD", "PUT", "PATCH", "DELETE", "OPTIONS", "CONNECT"]`
- `path` is the endpoint.
```elixir
Bypass.expect(bypass, "POST", "/1.1/statuses/update.json", fn conn ->
Agent.get_and_update(AgentModule, fn step_no -> {step_no, step_no + 1} end)
Plug.Conn.resp(conn, 429, ~s<{"errors": [{"code": 88, "message": "Rate limit exceeded"}]}>)
end)
```
"""
@spec expect(Bypass.t(), String.t(), String.t(), (Plug.Conn.t() -> Plug.Conn.t())) :: :ok
def expect(%Bypass{pid: pid}, method, path, fun),
do: Bypass.Instance.call(pid, {:expect, method, path, fun})
@doc """
Expects the passed function to be called exactly once regardless of the route.
```elixir
Bypass.expect_once(bypass, fn conn ->
assert "/1.1/statuses/update.json" == conn.request_path
assert "POST" == conn.method
Plug.Conn.resp(conn, 429, ~s<{"errors": [{"code": 88, "message": "Rate limit exceeded"}]}>)
end)
```
"""
@spec expect_once(Bypass.t(), (Plug.Conn.t() -> Plug.Conn.t())) :: :ok
def expect_once(%Bypass{pid: pid}, fun),
do: Bypass.Instance.call(pid, {:expect_once, fun})
@doc """
Expects the passed function to be called exactly once for the specified route (method and path).
- `method` is one of `["GET", "POST", "HEAD", "PUT", "PATCH", "DELETE", "OPTIONS", "CONNECT"]`
- `path` is the endpoint.
```elixir
Bypass.expect_once(bypass, "POST", "/1.1/statuses/update.json", fn conn ->
Agent.get_and_update(AgentModule, fn step_no -> {step_no, step_no + 1} end)
Plug.Conn.resp(conn, 429, ~s<{"errors": [{"code": 88, "message": "Rate limit exceeded"}]}>)
end)
```
"""
@spec expect_once(Bypass.t(), String.t(), String.t(), (Plug.Conn.t() -> Plug.Conn.t())) :: :ok
def expect_once(%Bypass{pid: pid}, method, path, fun),
do: Bypass.Instance.call(pid, {:expect_once, method, path, fun})
@doc """
Allows the function to be invoked zero or many times for the specified route (method and path).
- `method` is one of `["GET", "POST", "HEAD", "PUT", "PATCH", "DELETE", "OPTIONS", "CONNECT"]`
- `path` is the endpoint.
```elixir
Bypass.stub(bypass, "POST", "/1.1/statuses/update.json", fn conn ->
Agent.get_and_update(AgentModule, fn step_no -> {step_no, step_no + 1} end)
Plug.Conn.resp(conn, 429, ~s<{"errors": [{"code": 88, "message": "Rate limit exceeded"}]}>)
end)
```
"""
@spec stub(Bypass.t(), String.t(), String.t(), (Plug.Conn.t() -> Plug.Conn.t())) :: :ok
def stub(%Bypass{pid: pid}, method, path, fun),
do: Bypass.Instance.call(pid, {:stub, method, path, fun})
@doc """
Makes a expection to pass.
```
Bypass.expect(bypass, fn _conn ->
Bypass.pass(bypass)
assert false
end)
"""
@spec pass(Bypass.t()) :: :ok
def pass(%Bypass{pid: pid}),
do: Bypass.Instance.call(pid, :pass)
defp test_framework do
Application.get_env(:bypass, :test_framework, :ex_unit)
end
end
|
lib/bypass.ex
| 0.843541
| 0.643693
|
bypass.ex
|
starcoder
|
defmodule Tube.Frame do
use Bitwise
@moduledoc """
Represents a full frame of the WebSocket protocol
## Struct
### `fin`
Indicates that this is the final fragment in a message. The first
fragment MAY also be the final fragment.
### `opcode`
Defines the interpretation of the "Payload data". If an unknown
opcode is received, the receiving endpoint MUST _Fail the
WebSocket Connection_. The following values are defined.
* 0x0 denotes a continuation frame
* 0x1 denotes a text frame
* 0x2 denotes a binary frame
* 0x3-7 are reserved for further non-control frames
* 0x8 denotes a connection close
* 0x9 denotes a ping
* 0xA denotes a pong
* 0xB-F are reserved for further control frames
### `mask` and `mask_key`
If mask is true, the `mask_key` will be a 4 byte long key. This will be used
to unmask the payload data from client to server.
### `len`
Length of the payload
### `payload`
Binary of the frame's application data
### `control_frame?`
If true, this frame's opcode means that this is a control frame.
Control frames can be interleaved into fragmented messages.
"""
defstruct [fin: true, opcode: 0, mask: false, len: 0, mask_key: "", payload: <<>>, control_frame?: false]
@doc """
Parses the given `binary` into a `%Tube.Frame{}` struct.
## Example
```
iex(1)> Tube.Frame.parse(<<129, 139, 71, 28, 66, 60, 15, 121, 46, 80, 40, 60, 21, 83, 53, 112, 38>>)
{:ok,
%Tube.Frame{control_frame?: false, fin: true, len: 11, mask: 1,
mask_key: <<71, 28, 66, 60>>, opcode: 1, payload: "Hello World"}, ""}
```
## Returns
When parsed with no issues, it will return
`{:ok, %Tube.Frame{}, rest}`
`rest` will contain superflous bytes that are not part of the frame and should
be kept until more TCP chops arrive.
If there was an error,
`{:error, reson}`
will be returned
"""
@spec parse(binary) :: {:ok, struct(), binary} | {:error, term}
def parse(binary) when is_binary(binary) when byte_size(binary) >= 2 do
case binary do
<<fin::size(1),
0::integer-size(3), #RFU
opcode::integer-size(4),
mask::size(1),
len::integer-size(7), rest::binary>> ->
control_frame? = (opcode &&& 0b1000) > 0
{len, rest} = case len do
126 ->
<< _::size(16),
len::integer-size(16),
rest::binary>> = binary
{len, rest}
127 ->
<< _::size(16),
len::integer-size(64),
rest::binary>> = binary
{len, rest}
len -> {len, rest}
end
case (case mask do
0 ->
if byte_size(rest) >= len do
<<payload::binary-size(len),
rest::binary>> = rest
{:ok, "", payload, rest}
else
:error
end
1 ->
if byte_size(rest) >= len + 4 do
<<mask_key::binary-size(4),
payload::binary-size(len),
rest::binary>> = rest
{:ok, mask_key, payload, rest}
else
:error
end
end) do
{:ok, mask_key, payload, rest} ->
payload = if mask == 1 do
mask_payload(payload, mask_key)
else
payload
end
fin = fin == 1
case {fin, control_frame?, len} do
{false, true, _} -> {:error, :invalid_header}
{_, true, len} when len >= 126 -> {:error, :invalid_header}
{_, _, _} ->
{:ok, %__MODULE__{
fin: fin,
opcode: opcode,
control_frame?: control_frame?,
mask: mask,
len: len,
mask_key: mask_key,
payload: payload
}, rest}
end
:error ->
{:error, :not_enough_payload} # this means that the next tcp frame will have more payload
end
_ ->
IO.inspect(binary)
{:error, :invalid_header} #this means that the payload is actually part of an older message
end
end
def parse(_), do: {:error, :incomplete_header}
@doc """
Applies the mask to the given payload.
This can be done to either mask or unmask the payload.
"""
@spec mask_payload(binary, binary) :: binary
def mask_payload(payload, mask_key) do
<<a::integer-size(8), b::integer-size(8), c::integer-size(8), d::integer-size(8)>> = mask_key
__mask_payload(payload, [a, b, c, d], 0, "")
end
defp __mask_payload("", _, _, masked), do: masked
defp __mask_payload(<<first::integer-size(8), rest::binary>>, mask_key, i, masked) do
mask = Enum.at(mask_key, rem(i, 4))
first = bxor(first, mask)
__mask_payload(rest, mask_key, i + 1, masked <> <<first::integer-size(8)>>)
end
@doc """
Converts the #{__MODULE__} struct to a binary.
If the given frame has a `mask_key`, it will apply this key.
"""
@spec to_binary(struct()) :: binary
def to_binary(%__MODULE__{} = struct) do
struct = %{struct | mask: struct.mask_key != ""}
len = case byte_size(struct.payload) do
len when len >= 1 <<< 16 ->
<< 127::integer-size(7), len::integer-size(64) >>
len when len >= 126 ->
<< 126::integer-size(7), len::integer-size(16) >>
len ->
<< len::integer-size(7) >>
end
fin = if struct.fin, do: 1, else: 0
mask = if struct.mask, do: 1, else: 0
opcode = struct.opcode
mask_key = struct.mask_key
payload = struct.payload |> mask_payload(mask_key)
len_size = bit_size(len)
payload_size = byte_size(payload)
mask_size = byte_size(mask_key)
<< fin::size(1),
0::integer-size(3), #RFU
opcode::integer-size(4),
mask::size(1),
len::binary-size(len_size)-unit(1),
mask_key::binary-size(mask_size),
payload::binary-size(payload_size) >>
end
@doc """
Generates a random mask using `:crypto.strong_rand_bytes/1` and adds it to the
given frame
"""
@spec put_mask(struct()) :: struct()
def put_mask(%__MODULE__{} = struct) do
mask = :crypto.strong_rand_bytes(4)
%{struct |
mask_key: mask,
mask: 1
}
end
end
|
lib/frame/frame.ex
| 0.852322
| 0.821725
|
frame.ex
|
starcoder
|
defmodule AtomTweaksWeb.PageMetadata do
@moduledoc """
A system for easily setting metadata for the page before it is rendered.
If you assign some metadata to the page before it is rendered:
```
iex> PageMetadata.add(conn, foo: "bar")
iex> hd(conn.assigns.page_metadata)
[foo: "bar"]
```
And add the `PageMetadata.render/1` call to the `head` section of the layout template:
```elixir
<%= PageMetadata.render(@conn) %>
```
It will show up in the rendered page:
```html
<html>
<head>
<meta foo="bar">
</head>
<body>
</body>
</html>
```
"""
use Phoenix.HTML
alias AtomTweaksWeb.PageMetadata.Metadata
alias Plug.Conn
@doc """
Adds metadata to the page.
The metadata for any type that implements the `AtomTweaksWeb.PageMetadata.Metadata` protocol can
be added to a page.
"""
@spec add(Plug.Conn.t(), Metadata.t() | nil) :: Plug.Conn.t()
def add(conn, metadata)
def add(conn, nil), do: conn
def add(conn, metadata) do
do_add(conn, Metadata.to_metadata(metadata))
end
@doc """
Renders the metadata for the page, if it was set.
It renders each item of metadata as an individual `meta` tag. This function should be called in
the `head` section of the page layout template, typically
`app_name_web/templates/layout/app.html.eex`.
"""
@spec render(Plug.Conn.t()) :: Phoenix.HTML.safe() | nil
def render(conn) do
case conn.assigns[:page_metadata] do
nil -> nil
metadata -> Enum.map(metadata, fn datum -> tag(:meta, datum) end)
end
end
defp default_metadata(conn) do
[
[property: "og:url", content: "#{conn.scheme}://#{conn.host}#{conn.request_path}"],
[property: "og:site_name", content: Application.get_env(:atom_tweaks, :site_name)]
]
end
defp do_add(conn, []), do: Conn.assign(conn, :page_metadata, get(conn))
defp do_add(conn, list) do
if Keyword.keyword?(list) do
Conn.assign(conn, :page_metadata, [list | get(conn)])
else
Enum.reduce(list, conn, &add(&2, &1))
end
end
defp get(conn) do
conn.assigns[:page_metadata] || default_metadata(conn)
end
end
|
lib/atom_tweaks_web/page_metadata.ex
| 0.777427
| 0.635208
|
page_metadata.ex
|
starcoder
|
defmodule Cldr.Number.Transliterate do
@moduledoc """
Transliteration for digits and separators.
Transliterating a string is an expensive business. First the string has to
be exploded into its component graphemes. Then for each grapheme we have
to map to the equivalent in the other `{locale, number_system}`. Then we
have to reassemble the string.
Effort is made to short circuit where possible. Transliteration is not
required for any `{locale, number_system}` that is the same as `{"en",
"latn"}` since the implementation uses this combination for the placeholders during
formatting already. When short circuiting is possible (typically the en-*
locales with "latn" number_system - the total number of short circuited
locales is 211 of the 537 in CLDR) the overall number formatting is twice as
fast than when formal transliteration is required.
### Configuring precompilation of digit transliterations
This module includes `Cldr.Number.Transliterate.transliterate_digits/3` which transliterates
digits between number systems. For example from :arabic to :latn. Since generating a
transliteration map is slow, pairs of transliterations can be configured so that the
transliteration map is created at compile time and therefore speeding up transliteration at
run time.
To configure these transliteration pairs, add the following to your backend configuration:
defmodule MyApp.Cldr do
use Cldr,
locale: ["en", "fr", "th"],
default_locale: "en",
precompile_transliterations: [{:latn, :thai}, {:arab, :thai}]
end
Where each tuple in the list configures one transliteration map. In this example, two maps are
configured: from :latn to :thai and from :arab to :thai.
A list of configurable number systems is returned by `Cldr.Number.System.systems_with_digits/0`.
If a transliteration is requested between two number pairs that have not been configured for
precompilation, a warning is logged.
"""
require Logger
alias Cldr.Number.System
@doc """
Transliterates from latin digits to another number system's digits.
Transliterates the latin digits 0..9 to their equivalents in
another number system. Also transliterates the decimal and grouping
separators as well as the plus, minus and exponent symbols. Any other character
in the string will be returned "as is".
* `sequence` is the string to be transliterated.
* `locale` is any known locale, defaulting to `Cldr.get_locale/0`.
* `number_system` is any known number system. If expressed as a `string` it
is the actual name of a known number system. If epressed as an `atom` it is
used as a key to look up a number system for the locale (the usual keys are
`:default` and `:native` but :traditional and :finance are also part of the
standard). See `Cldr.Number.System.number_systems_for/2` for a locale to
see what number system types are defined. The default is `:default`.
For available number systems see `Cldr.Number.System.number_systems/0`
and `Cldr.Number.System.number_systems_for/2`. Also see
`Cldr.Number.Symbol.number_symbols_for/2`.
## Examples
iex> Cldr.Number.Transliterate.transliterate("123556", "en", :default, TestBackend.Cldr)
"123556"
iex> Cldr.Number.Transliterate.transliterate("123,556.000", "fr", :default, TestBackend.Cldr)
"123 556,000"
iex> Cldr.Number.Transliterate.transliterate("123556", "th", :default, TestBackend.Cldr)
"123556"
iex> Cldr.Number.Transliterate.transliterate("123556", "th", "thai", TestBackend.Cldr)
"๑๒๓๕๕๖"
iex> Cldr.Number.Transliterate.transliterate("123556", "th", :native, TestBackend.Cldr)
"๑๒๓๕๕๖"
iex> Cldr.Number.Transliterate.transliterate("Some number is: 123556", "th", "thai", TestBackend.Cldr)
"Some number is: ๑๒๓๕๕๖"
"""
def transliterate(sequence, locale, number_system, backend) do
Module.concat(backend, Number.Transliterate).transliterate(sequence, locale, number_system)
end
def transliterate_digits(digits, from_system, from_system) do
digits
end
def transliterate_digits(digits, from_system, to_system) when is_binary(digits) do
with {:ok, from} <- System.number_system_digits(from_system),
{:ok, to} <- System.number_system_digits(to_system) do
log_warning(
"Transliteration from number system #{inspect(from_system)} to " <>
"#{inspect(to_system)} requires dynamic generation of a transliteration map for " <>
"each function call which is slow. Please consider configuring this transliteration pair. " <>
"See `Cldr.Number.Transliteration` for further information."
)
map = System.generate_transliteration_map(from, to)
do_transliterate_digits(digits, map)
else
{:error, message} -> {:error, message}
end
end
if macro_exported?(Logger, :warning, 2) do
defp log_warning(message) do
Logger.warning(fn -> message end)
end
else
defp log_warning(message) do
Logger.warn(message)
end
end
defp do_transliterate_digits(digits, map) do
digits
|> String.graphemes()
|> Enum.map(&Map.get(map, &1, &1))
|> Enum.join()
end
end
|
lib/cldr/number/transliterate.ex
| 0.886282
| 0.787155
|
transliterate.ex
|
starcoder
|
defmodule Blockchain.Block do
@moduledoc """
This module effectively encodes a block, the heart of the blockchain.
A chain is formed when blocks point to previous blocks,
either as a parent or an ommer (uncle).
For more information, see Section 4.3 of the Yellow Paper.
"""
alias Block.Header
alias Blockchain.{Account, Chain, Transaction}
alias Blockchain.Account.Repo
alias Blockchain.Block.HolisticValidity
alias Blockchain.Transaction.Receipt
alias Blockchain.Transaction.Receipt.Bloom
alias ExthCrypto.Hash.Keccak
alias MerklePatriciaTree.{DB, Trie}
# Defined in Eq.(19)
# block_hash: Hash for this block, acts simply as a cache,
# header: B_H,
# transactions: B_T,
# ommers: B_U
defstruct block_hash: nil,
header: %Header{},
transactions: [],
receipts: [],
ommers: []
@type t :: %__MODULE__{
block_hash: EVM.hash() | nil,
header: Header.t(),
transactions: [Transaction.t()],
receipts: [Receipt.t()],
ommers: [Header.t()]
}
@block_reward_ommer_divisor 32
@block_reward_ommer_offset 8
@doc """
Encodes a block such that it can be represented in RLP encoding.
This is defined as `L_B` Eq.(35) in the Yellow Paper.
## Examples
iex> Blockchain.Block.serialize(%Blockchain.Block{
...> header: %Block.Header{parent_hash: <<1::256>>, ommers_hash: <<2::256>>, beneficiary: <<3::160>>, state_root: <<4::256>>, transactions_root: <<5::256>>, receipts_root: <<6::256>>, logs_bloom: <<>>, difficulty: 5, number: 1, gas_limit: 5, gas_used: 3, timestamp: 6, extra_data: "Hi mom", mix_hash: <<7::256>>, nonce: <<8::64>>},
...> transactions: [%Blockchain.Transaction{nonce: 5, gas_price: 6, gas_limit: 7, to: <<1::160>>, value: 8, v: 27, r: 9, s: 10, data: "hi"}],
...> ommers: [%Block.Header{parent_hash: <<11::256>>, ommers_hash: <<12::256>>, beneficiary: <<13::160>>, state_root: <<14::256>>, transactions_root: <<15::256>>, receipts_root: <<16::256>>, logs_bloom: <<>>, difficulty: 5, number: 1, gas_limit: 5, gas_used: 3, timestamp: 6, extra_data: "Hi mom", mix_hash: <<17::256>>, nonce: <<18::64>>}]
...> })
[
[<<1::256>>, <<2::256>>, <<3::160>>, <<4::256>>, <<5::256>>, <<6::256>>, <<>>, 5, 1, 5, 3, 6, "Hi mom", <<7::256>>, <<8::64>>],
[[<<5>>, <<6>>, <<7>>, <<1::160>>, <<8>>, "hi", <<27>>, <<9>>, <<10>>]],
[[<<11::256>>, <<12::256>>, <<13::160>>, <<14::256>>, <<15::256>>, <<16::256>>, <<>>, 5, 1, 5, 3, 6, "Hi mom", <<17::256>>, <<18::64>>]]
]
iex> Blockchain.Block.serialize(%Blockchain.Block{})
[
[
nil,
<<29, 204, 77, 232, 222, 199, 93, 122, 171, 133, 181, 103, 182, 204, 212, 26, 211, 18, 69, 27, 148, 138, 116, 19, 240, 161, 66, 253, 64, 212, 147, 71>>,
nil,
<<86, 232, 31, 23, 27, 204, 85, 166, 255, 131, 69, 230, 146, 192, 248, 110, 91, 72, 224, 27, 153, 108, 173, 192, 1, 98, 47, 181, 227, 99, 180, 33>>,
<<86, 232, 31, 23, 27, 204, 85, 166, 255, 131, 69, 230, 146, 192, 248, 110, 91, 72, 224, 27, 153, 108, 173, 192, 1, 98, 47, 181, 227, 99, 180, 33>>,
<<86, 232, 31, 23, 27, 204, 85, 166, 255, 131, 69, 230, 146, 192, 248, 110, 91, 72, 224, 27, 153, 108, 173, 192, 1, 98, 47, 181, 227, 99, 180, 33>>,
<<0::2048>>,
nil,
nil,
0,
0,
nil,
"",
<<0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0>>,
<<0, 0, 0, 0, 0, 0, 0, 0>>
],
[],
[]
]
"""
@spec serialize(t) :: ExRLP.t()
def serialize(block) do
[
# L_H(B_H)
Header.serialize(block.header),
# L_T(B_T)*
Enum.map(block.transactions, &Transaction.serialize/1),
# L_H(B_U)*
Enum.map(block.ommers, &Header.serialize/1)
]
end
@doc """
Decodes a block from an RLP encoding. Effectively inverts
L_B defined in Eq.(35).
## Examples
iex> Blockchain.Block.deserialize([
...> [<<1::256>>, <<2::256>>, <<3::160>>, <<4::256>>, <<5::256>>, <<6::256>>, <<>>, <<5>>, <<1>>, <<5>>, <<3>>, <<6>>, "Hi mom", <<7::256>>, <<8::64>>],
...> [[<<5>>, <<6>>, <<7>>, <<1::160>>, <<8>>, "hi", <<27>>, <<9>>, <<10>>]],
...> [[<<11::256>>, <<12::256>>, <<13::160>>, <<14::256>>, <<15::256>>, <<16::256>>, <<>>, <<5>>, <<1>>, <<5>>, <<3>>, <<6>>, "Hi mom", <<17::256>>, <<18::64>>]]
...> ])
%Blockchain.Block{
header: %Block.Header{parent_hash: <<1::256>>, ommers_hash: <<2::256>>, beneficiary: <<3::160>>, state_root: <<4::256>>, transactions_root: <<5::256>>, receipts_root: <<6::256>>, logs_bloom: <<>>, difficulty: 5, number: 1, gas_limit: 5, gas_used: 3, timestamp: 6, extra_data: "Hi mom", mix_hash: <<7::256>>, nonce: <<8::64>>},
transactions: [%Blockchain.Transaction{nonce: 5, gas_price: 6, gas_limit: 7, to: <<1::160>>, value: 8, v: 27, r: 9, s: 10, data: "hi"}],
ommers: [%Block.Header{parent_hash: <<11::256>>, ommers_hash: <<12::256>>, beneficiary: <<13::160>>, state_root: <<14::256>>, transactions_root: <<15::256>>, receipts_root: <<16::256>>, logs_bloom: <<>>, difficulty: 5, number: 1, gas_limit: 5, gas_used: 3, timestamp: 6, extra_data: "Hi mom", mix_hash: <<17::256>>, nonce: <<18::64>>}]
}
"""
@spec deserialize(ExRLP.t()) :: t
def deserialize(rlp) do
[
header,
transactions,
ommers
] = rlp
%__MODULE__{
header: Header.deserialize(header),
transactions: Enum.map(transactions, &Transaction.deserialize/1),
ommers: Enum.map(ommers, &Header.deserialize/1)
}
end
@spec decode_rlp(binary()) :: {:ok, [ExRLP.t()]} | {:error, any()}
def decode_rlp("0x" <> hex_data) do
hex_binary = Base.decode16!(hex_data, case: :mixed)
decode_rlp(hex_binary)
rescue
e ->
{:error, e}
end
def decode_rlp(rlp) when is_binary(rlp) do
rlp |> ExRLP.decode() |> decode_rlp()
rescue
e ->
{:error, e}
end
def decode_rlp(rlp_result_list) do
{:ok, deserialize(rlp_result_list)}
rescue
e ->
{:error, e}
end
@doc """
Computes hash of a block, which is simply the hash of the serialized
block after applying RLP encoding.
This is defined in Eq.(37) of the Yellow Paper.
## Examples
iex> %Blockchain.Block{header: %Block.Header{number: 5, parent_hash: <<1, 2, 3>>, beneficiary: <<2, 3, 4>>, difficulty: 100, timestamp: 11, mix_hash: <<1>>, nonce: <<2>>}}
...> |> Blockchain.Block.hash()
<<78, 28, 127, 10, 192, 253, 127, 239, 254, 179, 39, 34, 245, 44, 152, 98, 128, 71, 238, 155, 100, 161, 199, 71, 243, 223, 172, 191, 74, 99, 128, 63>>
iex> %Blockchain.Block{header: %Block.Header{number: 0, parent_hash: <<1, 2, 3>>, beneficiary: <<2, 3, 4>>, difficulty: 100, timestamp: 11, mix_hash: <<1>>, nonce: <<2>>}}
...> |> Blockchain.Block.hash()
<<218, 225, 46, 241, 196, 160, 136, 96, 109, 216, 73, 167, 92, 174, 91, 228, 85, 112, 234, 129, 99, 200, 158, 61, 223, 166, 165, 132, 187, 24, 142, 193>>
"""
@spec hash(t) :: EVM.hash()
def hash(block), do: Header.hash(block.header)
@doc """
Stores a given block in the database and returns the block hash.
This should be used if we ever want to retrieve that block in
the future.
Note: Blocks are identified by a hash of the block header,
thus we will only get the same block back if the header
matches what we stored.
## Examples
iex> db = MerklePatriciaTree.Test.random_ets_db()
iex> block = %Blockchain.Block{header: %Block.Header{number: 5, parent_hash: <<1, 2, 3>>, beneficiary: <<2, 3, 4>>, difficulty: 100, timestamp: 11, mix_hash: <<1>>, nonce: <<2>>}}
iex> Blockchain.Block.put_block(block, db)
{:ok, <<78, 28, 127, 10, 192, 253, 127, 239, 254, 179, 39, 34, 245, 44, 152, 98, 128, 71, 238, 155, 100, 161, 199, 71, 243, 223, 172, 191, 74, 99, 128, 63>>}
iex> {:ok, serialized_block} = MerklePatriciaTree.DB.get(db, block |> Blockchain.Block.hash)
iex> serialized_block |> ExRLP.decode |> Blockchain.Block.deserialize()
%Blockchain.Block{header: %Block.Header{number: 5, parent_hash: <<1, 2, 3>>, beneficiary: <<2, 3, 4>>, difficulty: 100, timestamp: 11, mix_hash: <<1>>, nonce: <<2>>}}
"""
@spec put_block(t, DB.db(), binary() | nil) :: {:ok, EVM.hash()}
def put_block(block, db, predefined_key \\ nil) do
hash = if predefined_key, do: predefined_key, else: hash(block)
block_rlp = block |> serialize |> ExRLP.encode()
:ok = MerklePatriciaTree.DB.put!(db, hash, block_rlp)
{:ok, hash}
end
@doc """
Returns a given block from the database, if the hash
exists in the database.
See `Blockchain.Block.put_block/2` for details.
## Examples
iex> db = MerklePatriciaTree.Test.random_ets_db()
iex> Blockchain.Block.get_block(<<1, 2, 3>>, db)
:not_found
iex> db = MerklePatriciaTree.Test.random_ets_db()
iex> block = %Blockchain.Block{
...> transactions: [%Blockchain.Transaction{nonce: 5, gas_price: 6, gas_limit: 7, to: <<1::160>>, value: 8, v: 27, r: 9, s: 10, data: "hi"}],
...> header: %Block.Header{number: 5, parent_hash: <<1, 2, 3>>, beneficiary: <<2, 3, 4>>, difficulty: 100, timestamp: 11, mix_hash: <<1>>, nonce: <<2>>}
...> }
iex> Blockchain.Block.put_block(block, db)
iex> Blockchain.Block.get_block(block |> Blockchain.Block.hash, db)
{:ok, %Blockchain.Block{
transactions: [%Blockchain.Transaction{nonce: 5, gas_price: 6, gas_limit: 7, to: <<1::160>>, value: 8, v: 27, r: 9, s: 10, data: "hi"}],
header: %Block.Header{number: 5, parent_hash: <<1, 2, 3>>, beneficiary: <<2, 3, 4>>, difficulty: 100, timestamp: 11, mix_hash: <<1>>, nonce: <<2>>}
}}
"""
@spec get_block(EVM.hash(), DB.db()) :: {:ok, t} | :not_found
def get_block(block_hash, db) do
with {:ok, rlp} <- MerklePatriciaTree.DB.get(db, block_hash) do
block = rlp |> ExRLP.decode() |> deserialize()
{:ok, block}
end
end
@doc """
Returns the parent node for a given block, if it exists.
We assume a block is a genesis block if it does not have
a valid `parent_hash` set.
## Examples
iex> Blockchain.Block.get_parent_block(%Blockchain.Block{header: %Block.Header{number: 0}}, nil)
:genesis
iex> db = MerklePatriciaTree.Test.random_ets_db()
iex> block = %Blockchain.Block{header: %Block.Header{number: 5, parent_hash: <<1, 2, 3>>, beneficiary: <<2, 3, 4>>, difficulty: 100, timestamp: 11, mix_hash: <<1>>, nonce: <<2>>}}
iex> Blockchain.Block.put_block(block, db)
iex> Blockchain.Block.get_parent_block(%Blockchain.Block{header: %Block.Header{parent_hash: block |> Blockchain.Block.hash}}, db)
{:ok, %Blockchain.Block{header: %Block.Header{number: 5, parent_hash: <<1, 2, 3>>, beneficiary: <<2, 3, 4>>, difficulty: 100, timestamp: 11, mix_hash: <<1>>, nonce: <<2>>}}}
iex> db = MerklePatriciaTree.Test.random_ets_db()
iex> block = %Blockchain.Block{header: %Block.Header{number: 5, parent_hash: <<1, 2, 3>>, beneficiary: <<2, 3, 4>>, difficulty: 100, timestamp: 11, mix_hash: <<1>>, nonce: <<2>>}}
iex> Blockchain.Block.get_parent_block(%Blockchain.Block{header: %Block.Header{parent_hash: block |> Blockchain.Block.hash}}, db)
:not_found
"""
@spec get_parent_block(t, DB.db()) :: {:ok, t} | :genesis | :not_found
def get_parent_block(block, db) do
case block.header.number do
0 -> :genesis
_ -> get_block(block.header.parent_hash, db)
end
end
@doc """
Returns the total number of transactions
included in a block. This is based on the
transaction list for a given block.
## Examples
iex> Blockchain.Block.get_transaction_count(%Blockchain.Block{transactions: [%Blockchain.Transaction{}, %Blockchain.Transaction{}]})
2
"""
@spec get_transaction_count(t) :: integer()
def get_transaction_count(block), do: Enum.count(block.transactions)
@doc """
Returns a given receipt from a block. This is
based on the receipts root where all receipts
are stored for the given block.
## Examples
iex> trie = MerklePatriciaTree.Trie.new(MerklePatriciaTree.Test.random_ets_db())
iex> %Blockchain.Block{}
...> |> Blockchain.Block.put_receipt(6, %Blockchain.Transaction.Receipt{state: <<1, 2, 3>>, cumulative_gas: 10, bloom_filter: <<2, 3, 4>>, logs: []}, trie.db)
...> |> Blockchain.Block.put_receipt(7, %Blockchain.Transaction.Receipt{state: <<4, 5, 6>>, cumulative_gas: 11, bloom_filter: <<5, 6, 7>>, logs: []}, trie.db)
...> |> Blockchain.Block.get_receipt(6, trie.db)
%Blockchain.Transaction.Receipt{state: <<1, 2, 3>>, cumulative_gas: 10, bloom_filter: <<2, 3, 4>>, logs: []}
iex> trie = MerklePatriciaTree.Trie.new(MerklePatriciaTree.Test.random_ets_db())
iex> %Blockchain.Block{}
...> |> Blockchain.Block.put_receipt(6, %Blockchain.Transaction.Receipt{state: <<1, 2, 3>>, cumulative_gas: 10, bloom_filter: <<2, 3, 4>>, logs: []}, trie.db)
...> |> Blockchain.Block.get_receipt(7, trie.db)
nil
"""
@spec get_receipt(t, integer(), DB.db()) :: Receipt.t() | nil
def get_receipt(block, i, db) do
serialized_receipt =
db
|> Trie.new(block.header.receipts_root)
|> Trie.get(i |> ExRLP.encode())
case serialized_receipt do
nil ->
nil
_ ->
serialized_receipt
|> ExRLP.decode()
|> Receipt.deserialize()
end
end
@doc """
Returns a given transaction from a block. This is
based on the transactions root where all transactions
are stored for the given block.
## Examples
iex> trie = MerklePatriciaTree.Trie.new(MerklePatriciaTree.Test.random_ets_db())
iex> %Blockchain.Block{}
...> |> Blockchain.Block.put_transaction(6, %Blockchain.Transaction{nonce: 1, v: 1, r: 2, s: 3}, trie.db)
...> |> Blockchain.Block.put_transaction(7, %Blockchain.Transaction{nonce: 2, v: 1, r: 2, s: 3}, trie.db)
...> |> Blockchain.Block.get_transaction(6, trie.db)
%Blockchain.Transaction{nonce: 1, v: 1, r: 2, s: 3}
iex> trie = MerklePatriciaTree.Trie.new(MerklePatriciaTree.Test.random_ets_db())
iex> %Blockchain.Block{}
...> |> Blockchain.Block.put_transaction(6, %Blockchain.Transaction{data: "", gas_limit: 100000, gas_price: 3, init: <<96, 3, 96, 5, 1, 96, 0, 82, 96, 0, 96, 32, 243>>, nonce: 5, r: 110274197540583527170567040609004947678532096020311055824363076718114581104395, s: 15165203061950746568488278734700551064641299899120962819352765267479743108366, to: "", v: 27, value: 5}, trie.db)
...> |> Blockchain.Block.get_transaction(6, trie.db)
%Blockchain.Transaction{data: "", gas_limit: 100000, gas_price: 3, init: <<96, 3, 96, 5, 1, 96, 0, 82, 96, 0, 96, 32, 243>>, nonce: 5, r: 110274197540583527170567040609004947678532096020311055824363076718114581104395, s: 15165203061950746568488278734700551064641299899120962819352765267479743108366, to: "", v: 27, value: 5}
iex> trie = MerklePatriciaTree.Trie.new(MerklePatriciaTree.Test.random_ets_db())
iex> %Blockchain.Block{}
...> |> Blockchain.Block.put_transaction(6, %Blockchain.Transaction{nonce: 1, v: 1, r: 2, s: 3}, trie.db)
...> |> Blockchain.Block.get_transaction(7, trie.db)
nil
"""
@spec get_transaction(t, integer(), DB.db()) :: Transaction.t() | nil
def get_transaction(block, i, db) do
serialized_transaction =
db
|> Trie.new(block.header.transactions_root)
|> Trie.get(i |> ExRLP.encode())
case serialized_transaction do
nil -> nil
_ -> Transaction.deserialize(serialized_transaction |> ExRLP.decode())
end
end
@doc """
Returns the cumulative gas used by a block based on the
listed transactions. This is defined in largely in the
note after Eq.(66) referenced as l(B_R)_u, or the last
receipt's cumulative gas.
The receipts aren't directly included in the block, so
we'll need to pull it from the receipts root.
Note: this will case if we do not have a receipt for
the most recent transaction.
## Examples
iex> trie = MerklePatriciaTree.Trie.new(MerklePatriciaTree.Test.random_ets_db())
iex> %Blockchain.Block{transactions: [1,2,3,4,5,6,7]}
...> |> Blockchain.Block.put_receipt(6, %Blockchain.Transaction.Receipt{state: <<1, 2, 3>>, cumulative_gas: 10, bloom_filter: <<2, 3, 4>>, logs: []}, trie.db)
...> |> Blockchain.Block.put_receipt(7, %Blockchain.Transaction.Receipt{state: <<4, 5, 6>>, cumulative_gas: 11, bloom_filter: <<5, 6, 7>>, logs: []}, trie.db)
...> |> Blockchain.Block.get_cumulative_gas(trie.db)
11
iex> trie = MerklePatriciaTree.Trie.new(MerklePatriciaTree.Test.random_ets_db())
iex> %Blockchain.Block{transactions: [1,2,3,4,5,6]}
...> |> Blockchain.Block.put_receipt(6, %Blockchain.Transaction.Receipt{state: <<1, 2, 3>>, cumulative_gas: 10, bloom_filter: <<2, 3, 4>>, logs: []}, trie.db)
...> |> Blockchain.Block.put_receipt(7, %Blockchain.Transaction.Receipt{state: <<4, 5, 6>>, cumulative_gas: 11, bloom_filter: <<5, 6, 7>>, logs: []}, trie.db)
...> |> Blockchain.Block.get_cumulative_gas(trie.db)
10
iex> trie = MerklePatriciaTree.Trie.new(MerklePatriciaTree.Test.random_ets_db())
iex> %Blockchain.Block{}
...> |> Blockchain.Block.get_cumulative_gas(trie.db)
0
iex> trie = MerklePatriciaTree.Trie.new(MerklePatriciaTree.Test.random_ets_db())
iex> %Blockchain.Block{transactions: [1,2,3,4,5,6,7,8]}
...> |> Blockchain.Block.put_receipt(6, %Blockchain.Transaction.Receipt{state: <<1, 2, 3>>, cumulative_gas: 10, bloom_filter: <<2, 3, 4>>, logs: []}, trie.db)
...> |> Blockchain.Block.put_receipt(7, %Blockchain.Transaction.Receipt{state: <<4, 5, 6>>, cumulative_gas: 11, bloom_filter: <<5, 6, 7>>, logs: []}, trie.db)
...> |> Blockchain.Block.get_cumulative_gas(trie.db)
** (RuntimeError) cannot find receipt
"""
@spec get_cumulative_gas(t, atom()) :: EVM.Gas.t()
def get_cumulative_gas(block = %__MODULE__{}, db) do
case get_transaction_count(block) do
0 ->
0
i ->
case get_receipt(block, i, db) do
nil -> raise "cannot find receipt"
receipt -> receipt.cumulative_gas
end
end
end
@doc """
Creates a new block from a parent block. This will handle setting
the block number, the difficulty and will keep the `gas_limit` the
same as the parent's block unless specified in `opts`.
A timestamp is required for difficulty calculation.
If it's not specified, it will default to the current system time.
This function is not directly addressed in the Yellow Paper.
## Examples
iex> %Blockchain.Block{header: %Block.Header{parent_hash: <<0::256>>, beneficiary: <<5::160>>, state_root: <<1::256>>, number: 100_000, difficulty: 15_500_0000, timestamp: 5_000_000, gas_limit: 500_000}}
...> |> Blockchain.Block.gen_child_block(Blockchain.Test.ropsten_chain(), timestamp: 5010000, extra_data: "hi", beneficiary: <<5::160>>)
%Blockchain.Block{
header: %Block.Header{
state_root: <<1::256>>,
beneficiary: <<5::160>>,
number: 100_001,
difficulty: 147_507_383,
timestamp: 5_010_000,
gas_limit: 500_000,
extra_data: "hi",
parent_hash: <<141, 203, 173, 190, 43, 64, 71, 106, 211, 77, 254, 89, 58, 72, 3, 108, 6, 101, 232, 254, 10, 149, 244, 245, 102, 5, 55, 235, 198, 39, 66, 227>>
}
}
iex> %Blockchain.Block{header: %Block.Header{parent_hash: <<0::256>>, beneficiary: <<5::160>>, state_root: <<1::256>>, number: 100_000, difficulty: 1_500_0000, timestamp: 5000, gas_limit: 500_000}}
...> |> Blockchain.Block.gen_child_block(Blockchain.Test.ropsten_chain(), state_root: <<2::256>>, timestamp: 6010, extra_data: "hi", beneficiary: <<5::160>>)
%Blockchain.Block{
header: %Block.Header{
state_root: <<2::256>>,
beneficiary: <<5::160>>,
number: 100_001,
difficulty: 142_74_924,
timestamp: 6010,
gas_limit: 500_000,
extra_data: "hi",
parent_hash: <<233, 151, 241, 216, 121, 36, 187, 39, 42, 93, 8, 68, 162, 118, 84, 219, 140, 35, 220, 90, 118, 129, 76, 45, 249, 55, 241, 82, 181, 30, 22, 128>>
}
}
"""
@spec gen_child_block(t, Chain.t(), keyword()) :: t
def gen_child_block(parent_block, chain, opts \\ []) do
gas_limit = opts[:gas_limit] || parent_block.header.gas_limit
header = gen_child_header(parent_block, opts)
%__MODULE__{header: header}
|> set_block_number(parent_block)
|> set_block_difficulty(chain, parent_block)
|> set_block_gas_limit(chain, parent_block, gas_limit)
|> set_block_parent_hash(parent_block)
end
@spec gen_child_header(t, keyword()) :: Header.t()
defp gen_child_header(parent_block, opts) do
timestamp = opts[:timestamp] || System.system_time(:second)
beneficiary = opts[:beneficiary] || nil
extra_data = opts[:extra_data] || <<>>
state_root = opts[:state_root] || parent_block.header.state_root
mix_hash = opts[:mix_hash] || parent_block.header.mix_hash
%Header{
state_root: state_root,
timestamp: timestamp,
extra_data: extra_data,
beneficiary: beneficiary,
mix_hash: mix_hash
}
end
@doc """
Sets block's parent's hash
"""
@spec set_block_parent_hash(t, t) :: t
def set_block_parent_hash(block, parent_block) do
parent_hash = parent_block.block_hash || hash(parent_block)
header = %{block.header | parent_hash: parent_hash}
%{block | header: header}
end
@doc """
Calculates the `number` for a new block. This implements Eq.(38) from
the Yellow Paper.
## Examples
iex> Blockchain.Block.set_block_number(%Blockchain.Block{header: %Block.Header{extra_data: "hello"}}, %Blockchain.Block{header: %Block.Header{number: 32}})
%Blockchain.Block{header: %Block.Header{number: 33, extra_data: "hello"}}
"""
@spec set_block_number(t, t) :: t
def set_block_number(block, parent_block) do
number = parent_block.header.number + 1
header = %{block.header | number: number}
%{block | header: header}
end
@doc """
Set the difficulty of a new block based on Eq.(39), better defined
in `Block.Header`.
# TODO: Validate these results
## Examples
iex> Blockchain.Block.set_block_difficulty(
...> %Blockchain.Block{header: %Block.Header{number: 0, timestamp: 0}},
...> Blockchain.Test.ropsten_chain(),
...> nil
...> )
%Blockchain.Block{header: %Block.Header{number: 0, timestamp: 0, difficulty: 1_048_576}}
iex> Blockchain.Block.set_block_difficulty(
...> %Blockchain.Block{header: %Block.Header{number: 1, timestamp: 1_479_642_530}},
...> Blockchain.Test.ropsten_chain(),
...> %Blockchain.Block{header: %Block.Header{number: 0, timestamp: 0, difficulty: 1_048_576}}
...> )
%Blockchain.Block{header: %Block.Header{number: 1, timestamp: 1_479_642_530, difficulty: 997_888}}
"""
@spec set_block_difficulty(t, Chain.t(), t) :: t
def set_block_difficulty(block, chain, parent_block) do
difficulty = get_difficulty(block, parent_block, chain)
%{block | header: %{block.header | difficulty: difficulty}}
end
defp get_difficulty(block, parent_block, chain) do
cond do
Chain.after_bomb_delays?(chain, block.header.number) ->
delay_factor = Chain.bomb_delay_factor_for_block(chain, block.header.number)
Header.get_byzantium_difficulty(
block.header,
if(parent_block, do: parent_block.header, else: nil),
delay_factor,
chain.genesis[:difficulty],
chain.engine["Ethash"][:minimum_difficulty],
chain.engine["Ethash"][:difficulty_bound_divisor]
)
Chain.after_homestead?(chain, block.header.number) ->
Header.get_homestead_difficulty(
block.header,
if(parent_block, do: parent_block.header, else: nil),
chain.genesis[:difficulty],
chain.engine["Ethash"][:minimum_difficulty],
chain.engine["Ethash"][:difficulty_bound_divisor]
)
true ->
Header.get_frontier_difficulty(
block.header,
if(parent_block, do: parent_block.header, else: nil),
chain.genesis[:difficulty],
chain.engine["Ethash"][:minimum_difficulty],
chain.engine["Ethash"][:difficulty_bound_divisor]
)
end
end
@doc """
Sets the gas limit of a given block, or raises
if the block limit is not acceptable. The validity
check is defined in Eq.(45), Eq.(46) and Eq.(47) of
the Yellow Paper.
## Examples
iex> Blockchain.Block.set_block_gas_limit(
...> %Blockchain.Block{header: %Block.Header{}},
...> Blockchain.Test.ropsten_chain(),
...> %Blockchain.Block{header: %Block.Header{gas_limit: 1_000_000}},
...> 1_000_500
...> )
%Blockchain.Block{header: %Block.Header{gas_limit: 1_000_500}}
iex> Blockchain.Block.set_block_gas_limit(
...> %Blockchain.Block{header: %Block.Header{}},
...> Blockchain.Test.ropsten_chain(),
...> %Blockchain.Block{header: %Block.Header{gas_limit: 1_000_000}},
...> 2_000_000
...> )
** (RuntimeError) Block gas limit not valid
"""
@spec set_block_gas_limit(t, Chain.t(), t, EVM.Gas.t()) :: t
def set_block_gas_limit(block, chain, parent_block, gas_limit) do
if not Header.is_gas_limit_valid?(
gas_limit,
parent_block.header.gas_limit,
chain.params[:gas_limit_bound_divisor],
chain.params[:min_gas_limit]
),
do: raise("Block gas limit not valid")
%{block | header: %{block.header | gas_limit: gas_limit}}
end
@doc """
Attaches an ommer to a block. We do no validation at this stage.
## Examples
iex> Blockchain.Block.add_ommers(%Blockchain.Block{}, [%Block.Header{parent_hash: <<1::256>>, ommers_hash: <<2::256>>, beneficiary: <<3::160>>, state_root: <<4::256>>, transactions_root: <<5::256>>, receipts_root: <<6::256>>, logs_bloom: <<>>, difficulty: 5, number: 1, gas_limit: 5, gas_used: 3, timestamp: 6, extra_data: "Hi mom", mix_hash: <<7::256>>, nonce: <<8::64>>}])
%Blockchain.Block{
ommers: [
%Block.Header{parent_hash: <<1::256>>, ommers_hash: <<2::256>>, beneficiary: <<3::160>>, state_root: <<4::256>>, transactions_root: <<5::256>>, receipts_root: <<6::256>>, logs_bloom: <<>>, difficulty: 5, number: 1, gas_limit: 5, gas_used: 3, timestamp: 6, extra_data: "Hi mom", mix_hash: <<7::256>>, nonce: <<8::64>>}
],
header: %Block.Header{
ommers_hash: <<59, 196, 156, 242, 196, 38, 21, 97, 112, 6, 73, 111, 12, 88, 35, 155, 72, 175, 82, 0, 163, 128, 115, 236, 45, 99, 88, 62, 88, 80, 122, 96>>
}
}
"""
@spec add_ommers(t, [Header.t()]) :: t
def add_ommers(block, ommers) do
total_ommers = block.ommers ++ ommers
serialized_ommers_list = Enum.map(total_ommers, &Block.Header.serialize/1)
new_ommers_hash = serialized_ommers_list |> ExRLP.encode() |> Keccak.kec()
%{block | ommers: total_ommers, header: %{block.header | ommers_hash: new_ommers_hash}}
end
@doc """
Gets an ommer for a given block, based on the ommers_hash.
## Examples
iex> %Blockchain.Block{}
...> |> Blockchain.Block.add_ommers([%Block.Header{parent_hash: <<1::256>>, ommers_hash: <<2::256>>, beneficiary: <<3::160>>, state_root: <<4::256>>, transactions_root: <<5::256>>, receipts_root: <<6::256>>, logs_bloom: <<>>, difficulty: 5, number: 1, gas_limit: 5, gas_used: 3, timestamp: 6, extra_data: "Hi mom", mix_hash: <<7::256>>, nonce: <<8::64>>}])
...> |> Blockchain.Block.get_ommer(0)
%Block.Header{parent_hash: <<1::256>>, ommers_hash: <<2::256>>, beneficiary: <<3::160>>, state_root: <<4::256>>, transactions_root: <<5::256>>, receipts_root: <<6::256>>, logs_bloom: <<>>, difficulty: 5, number: 1, gas_limit: 5, gas_used: 3, timestamp: 6, extra_data: "Hi mom", mix_hash: <<7::256>>, nonce: <<8::64>>}
"""
@spec get_ommer(t, integer()) :: Header.t()
def get_ommer(block, i) do
Enum.at(block.ommers, i)
end
@doc """
Checks the validity of a block, including the validity of the
header and the transactions. This should verify that we should
accept the authenticity of a block.
## Examples
iex> db = MerklePatriciaTree.Test.random_ets_db()
iex> chain = Blockchain.Test.ropsten_chain()
iex> Blockchain.Genesis.create_block(chain, db)
...> |> Blockchain.Block.add_rewards(db, chain)
...> |> Blockchain.Block.validate(chain, nil, db)
:valid
iex> db = MerklePatriciaTree.Test.random_ets_db()
iex> chain = Blockchain.Test.ropsten_chain()
iex> parent = Blockchain.Genesis.create_block(chain, db)
...> child = Blockchain.Block.gen_child_block(parent, chain)
...> Blockchain.Block.validate(child, chain, :parent_not_found, db)
{:errors, [:non_genesis_block_requires_parent]}
iex> db = MerklePatriciaTree.Test.random_ets_db()
iex> chain = Blockchain.Test.ropsten_chain()
iex> parent = Blockchain.Genesis.create_block(chain, db)
iex> beneficiary = <<0x05::160>>
iex> child = Blockchain.Block.gen_child_block(parent, chain, beneficiary: beneficiary)
...> |> Blockchain.Block.add_rewards(db, chain)
iex> Blockchain.Block.validate(child, chain, parent, db)
:valid
"""
@spec validate(t, Chain.t(), t, DB.db()) :: :valid | {:invalid, [atom()]}
def validate(block, chain, parent_block, db) do
with :valid <- validate_parent_block(block, parent_block),
:valid <- validate_header(block, parent_block, chain) do
HolisticValidity.validate(block, chain, parent_block, db)
end
end
defp validate_header(block, parent_block, chain) do
expected_difficulty = get_difficulty(block, parent_block, chain)
parent_block_header = if parent_block, do: parent_block.header, else: nil
validate_dao_extra_data =
Chain.support_dao_fork?(chain) &&
Chain.within_dao_fork_extra_range?(chain, block.header.number)
Header.validate(
block.header,
parent_block_header,
expected_difficulty,
chain.params[:gas_limit_bound_divisor],
chain.params[:min_gas_limit],
validate_dao_extra_data
)
end
defp validate_parent_block(block, parent_block) do
if block.header.number > 0 and parent_block == :parent_not_found do
{:errors, [:non_genesis_block_requires_parent]}
else
:valid
end
end
@doc """
For a given block, this will add the given transactions to its
list of transaction and update the header state accordingly. That
is, we will execute each transaction and update the state root,
transaction receipts, etc. We effectively implement Eq.(2), Eq.(3)
and Eq.(4) of the Yellow Paper, referred to as Π.
The trie db refers to where we expect our trie to exist, e.g.
in `:ets` or `:rocksdb`. See `MerklePatriciaTree.DB`.
"""
@spec add_transactions(t, [Transaction.t()], DB.db(), Chain.t()) :: t
def add_transactions(block, transactions, db, chain) do
block
|> process_hardfork_specifics(chain, db)
|> do_add_transactions(transactions, db, chain)
|> calculate_logs_bloom()
end
defp process_hardfork_specifics(block, chain, db) do
if Chain.support_dao_fork?(chain) && Chain.dao_fork?(chain, block.header.number) do
repo =
db
|> Trie.new(block.header.state_root)
|> Account.Repo.new()
|> Blockchain.Hardfork.Dao.execute(chain)
put_state(block, repo.state)
else
block
end
end
@spec do_add_transactions(t, [Transaction.t()], DB.db(), Chain.t(), integer()) :: t
defp do_add_transactions(block, transactions, db, chain, trx_count \\ 0)
defp do_add_transactions(block, [], _, _, _), do: block
defp do_add_transactions(
block = %__MODULE__{header: header},
[trx | transactions],
db,
chain,
trx_count
) do
state = Trie.new(db, header.state_root)
{new_account_repo, gas_used, receipt} =
Transaction.execute_with_validation(state, trx, header, chain)
new_state = Repo.commit(new_account_repo).state
total_gas_used = block.header.gas_used + gas_used
receipt = %{receipt | cumulative_gas: total_gas_used}
updated_block =
block
|> put_state(new_state)
|> put_gas_used(total_gas_used)
|> put_receipt(trx_count, receipt, db)
|> put_transaction(trx_count, trx, db)
do_add_transactions(updated_block, transactions, db, chain, trx_count + 1)
end
@spec calculate_logs_bloom(t()) :: t()
defp calculate_logs_bloom(block) do
logs_bloom = Bloom.from_receipts(block.receipts)
updated_header = %{block.header | logs_bloom: logs_bloom}
%{block | header: updated_header}
end
# Updates a block to have a new state root given a state object
@spec put_state(t, Trie.t()) :: t
def put_state(block = %__MODULE__{header: header = %Header{}}, new_state) do
%{block | header: %{header | state_root: new_state.root_hash}}
end
# Updates a block to have total gas used set in the header
@spec put_gas_used(t, EVM.Gas.t()) :: t
def put_gas_used(block = %__MODULE__{header: header}, gas_used) do
%{block | header: %{header | gas_used: gas_used}}
end
@doc """
Updates a block by adding a receipt to the list of receipts
at position `i`.
## Examples
iex> trie = MerklePatriciaTree.Trie.new(MerklePatriciaTree.Test.random_ets_db())
iex> block = Blockchain.Block.put_receipt(%Blockchain.Block{}, 5, %Blockchain.Transaction.Receipt{state: <<1, 2, 3>>, cumulative_gas: 10, bloom_filter: <<2, 3, 4>>, logs: "hi mom"}, trie.db)
iex> MerklePatriciaTree.Trie.into(block.header.receipts_root, trie)
...> |> MerklePatriciaTree.Trie.Inspector.all_values()
[{<<5>>, <<208, 131, 1, 2, 3, 10, 131, 2, 3, 4, 134, 104, 105, 32, 109, 111, 109>>}]
"""
@spec put_receipt(t, integer(), Receipt.t(), DB.db()) :: t
def put_receipt(block, i, receipt, db) do
encoded_receipt = receipt |> Receipt.serialize() |> ExRLP.encode()
updated_receipts_root =
db
|> Trie.new(block.header.receipts_root)
|> Trie.update(ExRLP.encode(i), encoded_receipt)
updated_header = %{block.header | receipts_root: updated_receipts_root.root_hash}
updated_receipts = block.receipts ++ [receipt]
%{block | header: updated_header, receipts: updated_receipts}
end
@doc """
Updates a block by adding a transaction to the list of transactions
and updating the transactions_root in the header at position `i`, which
should be equilvant to the current number of transactions.
## Examples
iex> trie = MerklePatriciaTree.Trie.new(MerklePatriciaTree.Test.random_ets_db())
iex> block = Blockchain.Block.put_transaction(%Blockchain.Block{}, 0, %Blockchain.Transaction{nonce: 1, v: 2, r: 3, s: 4}, trie.db)
iex> block.transactions
[%Blockchain.Transaction{nonce: 1, v: 2, r: 3, s: 4}]
iex> MerklePatriciaTree.Trie.into(block.header.transactions_root, trie)
...> |> MerklePatriciaTree.Trie.Inspector.all_values()
[{<<0x80>>, <<201, 1, 128, 128, 128, 128, 128, 2, 3, 4>>}]
"""
@spec put_transaction(t, integer(), Transaction.t(), DB.db()) :: t
def put_transaction(block, i, trx, db) do
total_transactions = block.transactions ++ [trx]
encoded_transaction = trx |> Transaction.serialize() |> ExRLP.encode()
updated_transactions_root =
db
|> Trie.new(block.header.transactions_root)
|> Trie.update(ExRLP.encode(i), encoded_transaction)
%{
block
| transactions: total_transactions,
header: %{block.header | transactions_root: updated_transactions_root.root_hash}
}
end
@doc """
Adds the rewards to miners (including for ommers) to a block.
This is defined in Section 11.3, Eq.(159-163) of the Yellow Paper.
## Examples
iex> db = MerklePatriciaTree.Test.random_ets_db()
iex> miner = <<0x05::160>>
iex> chain = Blockchain.Test.ropsten_chain()
iex> state = MerklePatriciaTree.Trie.new(db)
...> |> Blockchain.Account.put_account(miner, %Blockchain.Account{balance: 400_000})
iex> block = %Blockchain.Block{header: %Block.Header{number: 0, state_root: state.root_hash, beneficiary: miner}}
iex> block
...> |> Blockchain.Block.add_rewards(db, chain)
...> |> Blockchain.Block.get_state(db)
...> |> Blockchain.Account.get_accounts([miner])
[%Blockchain.Account{balance: 400_000}]
iex> db = MerklePatriciaTree.Test.random_ets_db()
iex> miner = <<0x05::160>>
iex> chain = Blockchain.Test.ropsten_chain()
iex> state = MerklePatriciaTree.Trie.new(db)
...> |> Blockchain.Account.put_account(miner, %Blockchain.Account{balance: 400_000})
iex> block = %Blockchain.Block{header: %Block.Header{state_root: state.root_hash, beneficiary: miner}}
iex> block
...> |> Blockchain.Block.add_rewards(db, chain)
...> |> Blockchain.Block.get_state(db)
...> |> Blockchain.Account.get_accounts([miner])
[%Blockchain.Account{balance: 3000000000000400000}]
"""
@spec add_rewards(t, DB.db(), Chain.t()) :: t
def add_rewards(block, db, chain)
def add_rewards(%{header: %{beneficiary: beneficiary}}, _db, _chain)
when is_nil(beneficiary),
do: raise("Unable to add block rewards, beneficiary is nil")
def add_rewards(block = %{header: %{number: number}}, _db, _chain)
when number == 0,
do: block
def add_rewards(block, db, chain) do
base_reward = Chain.block_reward_for_block(chain, block.header.number)
state =
block
|> get_state(db)
|> add_miner_reward(block, base_reward)
|> add_ommer_rewards(block, base_reward)
set_state(block, state)
end
defp add_miner_reward(state, block, base_reward) do
ommer_reward = round(base_reward * length(block.ommers) / @block_reward_ommer_divisor)
reward = ommer_reward + base_reward
state
|> Account.add_wei(block.header.beneficiary, reward)
end
defp add_ommer_rewards(state, block, base_reward) do
Enum.reduce(block.ommers, state, fn ommer, state ->
height_difference = block.header.number - ommer.number
reward =
round(
(@block_reward_ommer_offset - height_difference) *
(base_reward / @block_reward_ommer_offset)
)
state
|> Account.add_wei(ommer.beneficiary, reward)
end)
end
@doc """
Sets a given block header field as a shortcut when
we want to change a single field.
## Examples
iex> %Blockchain.Block{}
...> |> Blockchain.Block.put_header(:number, 5)
%Blockchain.Block{
header: %Block.Header{
number: 5
}
}
"""
@spec put_header(t, any(), any()) :: t
def put_header(block, key, value) do
new_header = Map.put(block.header, key, value)
%{block | header: new_header}
end
@doc """
Returns a trie rooted at the state_root of a given block.
## Examples
iex> db = MerklePatriciaTree.Test.random_ets_db(:get_state)
iex> Blockchain.Block.get_state(%Blockchain.Block{header: %Block.Header{state_root: <<5::256>>}}, db)
%MerklePatriciaTree.Trie{root_hash: <<5::256>>, db: {MerklePatriciaTree.DB.ETS, :get_state}}
"""
@spec get_state(t, DB.db()) :: Trie.t()
def get_state(block, db) do
Trie.new(db, block.header.state_root)
end
@doc """
Sets the state_root of a given block from a trie.
## Examples
iex> trie = %MerklePatriciaTree.Trie{root_hash: <<5::256>>, db: {MerklePatriciaTree.DB.ETS, :get_state}}
iex> Blockchain.Block.set_state(%Blockchain.Block{}, trie)
%Blockchain.Block{header: %Block.Header{state_root: <<5::256>>}}
"""
@spec set_state(t, Trie.t()) :: t
def set_state(block, trie) do
put_header(block, :state_root, trie.root_hash)
end
end
|
apps/blockchain/lib/blockchain/block.ex
| 0.858006
| 0.470676
|
block.ex
|
starcoder
|
defmodule ExploringElixir do
require Logger
def episode1 do
# Emulates a hypothetical service (web service, over a TCP socket,
# another OTP process, etc.) that transforms some JSON for us ...
# but which suffers from some bugs?
f = File.read!("data/client.json")
ExploringElixir.JSONFilter.extract self(), f, "data"
Toolbelt.flush()
end
def episode2 do
# Features
ExploringElixir.OneFive.ping
ExploringElixir.OneFive.unicode_atoms
ExploringElixir.OneFive.rand_jump
# Benchmarks
ExploringElixir.Benchmark.Map.match
ExploringElixir.Benchmark.Ets.creation
ExploringElixir.Benchmark.Ets.population
end
def episode3 do
IO.puts "Using child_spec/1, we launched various processes in ExploringElixir.ChildSpec"
IO.puts "Look in lib/exploring_elixir/application.ex to see how clean it is!"
IO.puts "Now lets call into them to show they are indeed running:"
IO.inspect ExploringElixir.ChildSpec.ping ExploringElixir.ChildSpec.Permanent
IO.inspect ExploringElixir.ChildSpec.ping ExploringElixir.ChildSpec.Temporary
ExploringElixir.ChildSpec.RandomJump.rand 100
end
def episode4 do
IO.puts "Run the property tests with `mix test --only collatz`"
IO.puts "NOTE: this will recompile the project in test mode!"
count = 10
IO.puts "Run with the first #{count} positive integers:"
ExploringElixir.Collatz.step_count_for Enum.to_list 1..count
end
def episode5 do
end
def episode6 do
Application.ensure_all_started :postgrex
Supervisor.start_child ExploringElixir.Supervisor, ExploringElixir.Repo.Tenants.child_spec([])
ExploringElixir.Tenants.list
end
def episode7 do
ExploringElixir.AutoCluster.start()
end
def episode8 do
import OK, only: ["~>>": 2]
alias ExploringElixir.Time, as: T
Application.ensure_all_started :timex
Application.ensure_all_started :postgrex
Supervisor.start_child ExploringElixir.Supervisor, ExploringElixir.Repo.Tenants.child_spec([])
IO.puts "== Timestamps .. so many =="
IO.puts "This computer believes the timestamp to be #{T.Local.os_timestamp}, but this may drift around on us"
IO.puts "This BEAM vm believes the timestamp to be #{T.Local.os_timestamp}, but this may also drift around on us as well as in relation to the OS time"
IO.puts "Here is a monotonic (always increasing) time: #{T.Local.monotonic_time}"
IO.puts "The monotonic time is offset from the \"real\" time by #{T.Local.monotonic_time_offset}"
IO.puts "So the actual time is something like: #{T.Local.adjusted_monotonic_time}"
IO.puts ""
IO.puts "== Zoneless Times and Dates, aka Naive =="
IO.puts "A point in time: #{T.Local.current_time}"
IO.puts "A point in the calendar: #{T.Local.current_date}"
IO.puts "Moving a point in the calendar into the past by one day: #{T.Local.yesterday}"
IO.puts "If we are sceptical, here's the difference: #{T.Local.just_a_day_away} day"
IO.puts ""
IO.puts "== Calendars =="
IO.puts "In the standard ISO (Gregorian) calendar, today is: #{T.Calendars.today_iso}"
IO.puts "In the Jalaali calendar, today is: #{T.Calendars.today_jalaali}"
IO.puts "Converting from Gregorian to Jalaali is easy: #{T.Calendars.convert_to_jalaali ~D[1975-06-18]}"
IO.puts "The next week of Gregorian days are: "
T.for_next_week fn date -> date
|> Timex.format("%A", :strftime)
~>> (fn x -> " " <> x end).()
|> IO.puts
end
IO.puts "The next week of Jalaali days are: "
T.for_next_week fn date -> date
|> T.Calendars.convert_to_jalaali
|> Timex.format("%A", :strftime)
~>> (fn x -> " " <> x end).()
|> IO.puts
end
dates = [
{Timex.add(DateTime.utc_now, Timex.Duration.from_days(-1)), "yesterday"},
{DateTime.utc_now, "today"},
{Timex.now("America/Vancouver"), "Vancouver"},
{Timex.add(DateTime.utc_now, Timex.Duration.from_days(1)), "tomorrow"}
]
IO.puts ""
IO.puts "== With Ecto =="
IO.puts "Filing the database with some data..."
Enum.each dates, fn {date, data} -> IO.puts "Inserting -> #{data}"; T.Stored.put date, data end
DateTime.utc_now
|> ExploringElixir.Time.Stored.get
|> (fn x -> IO.puts "Today's data: #{inspect x}" end).()
end
def ecto_perf do
Application.ensure_all_started :postgrex
Supervisor.start_child ExploringElixir.Supervisor, EctoBench.Repo.child_spec([])
Enum.each [10, 100, 1000, 100000], fn x -> EctoBench.simpleWrites x end
end
end
|
lib/exploring_elixir.ex
| 0.641535
| 0.558598
|
exploring_elixir.ex
|
starcoder
|
defmodule GrapevineData.Games.Images do
@moduledoc """
Handle uploading images to remote storage for games
"""
alias GrapevineData.Games.Game
alias GrapevineData.Images
alias GrapevineData.Repo
alias Stein.Storage
@doc """
If present, upload a cover and/or hero image for a game
Only uploads a cover if the key "cover" is present, and only uploads
a hero image if the key "hero" is present. Deletes previous images on
new uploads.
"""
def maybe_upload_images(game, params) do
params = for {key, val} <- params, into: %{}, do: {to_string(key), val}
with {:ok, game} <- maybe_upload_cover_image(game, params),
{:ok, game} <- maybe_upload_hero_image(game, params) do
{:ok, game}
end
end
@doc """
Get a storage path for uploading and viewing the cover image
"""
def cover_path(game, size) do
cover_path(game.id, size, game.cover_key, game.cover_extension)
end
defp cover_path(game_id, "thumbnail", key, extension) when extension != ".png" do
cover_path(game_id, "thumbnail", key, ".png")
end
defp cover_path(game_id, size, key, extension) do
"/" <> Path.join(["games", to_string(game_id), "cover", "#{size}-#{key}#{extension}"])
end
@doc """
Get a storage path for uploading and viewing the hero image
"""
def hero_path(game, size) do
hero_path(game.id, size, game.hero_key, game.hero_extension)
end
defp hero_path(game_id, "thumbnail", key, extension) when extension != ".png" do
hero_path(game_id, "thumbnail", key, ".png")
end
defp hero_path(game_id, size, key, extension) do
"/" <> Path.join(["games", to_string(game_id), "hero", "#{size}-#{key}#{extension}"])
end
@doc """
Delete the old images for the cover or hero
Deletes original and thumbnail sizes if present.
"""
def maybe_delete_old_images(game, key, path_fun) do
case is_nil(Map.get(game, key)) do
true ->
game
false ->
Storage.delete(path_fun.(game, "original"))
Storage.delete(path_fun.(game, "thumbnail"))
game
end
end
@doc """
Generate an upload key
"""
def generate_key(), do: UUID.uuid4()
@doc """
Upload the file to the path in storage
"""
def upload(file, path) do
Storage.upload(file, path, extensions: [".jpg", ".png"], public: true)
end
def maybe_upload_cover_image(game, %{"cover" => file}) do
game = maybe_delete_old_images(game, :cover_key, &cover_path/2)
file = Storage.prep_file(file)
key = generate_key()
path = cover_path(game.id, "original", key, file.extension)
changeset = Game.cover_changeset(game, key, file.extension)
with :ok <- upload(file, path),
{:ok, game} <- Repo.update(changeset) do
generate_cover_versions(game, file)
else
:error ->
game
|> Ecto.Changeset.change()
|> Ecto.Changeset.add_error(:cover, "could not upload, please try again")
|> Ecto.Changeset.apply_action(:update)
end
end
def maybe_upload_cover_image(game, _), do: {:ok, game}
@doc """
Generate a thumbnail for the cover image
"""
def generate_cover_versions(game, file) do
path = cover_path(game, "thumbnail")
case Images.convert(file, [extname: ".png", thumbnail: "600x400"]) do
{:ok, temp_path} ->
upload(%{path: temp_path}, path)
{:ok, game}
{:error, :convert} ->
{:ok, game}
end
end
@doc """
If the `hero` param is available upload to storage
"""
def maybe_upload_hero_image(game, %{"hero" => file}) do
game = maybe_delete_old_images(game, :hero_key, &hero_path/2)
file = Storage.prep_file(file)
key = generate_key()
path = hero_path(game.id, "original", key, file.extension)
changeset = Game.hero_changeset(game, key, file.extension)
with :ok <- upload(file, path),
{:ok, game} <- Repo.update(changeset) do
generate_hero_versions(game, file)
else
:error ->
game
|> Ecto.Changeset.change()
|> Ecto.Changeset.add_error(:hero, "could not upload, please try again")
|> Ecto.Changeset.apply_action(:update)
end
end
def maybe_upload_hero_image(game, _), do: {:ok, game}
@doc """
Generate a thumbnail for the hero image
"""
def generate_hero_versions(game, file) do
path = hero_path(game, "thumbnail")
case Images.convert(file, [extname: ".png", thumbnail: "600x400"]) do
{:ok, temp_path} ->
upload(%{path: temp_path}, path)
{:ok, game}
{:error, :convert} ->
{:ok, game}
end
end
@doc """
Regenerate the cover image for a game
"""
def regenerate_cover(game) do
case Storage.download(cover_path(game, "original")) do
{:ok, temp_path} ->
generate_cover_versions(game, %{path: temp_path})
end
end
end
|
apps/data/lib/grapevine_data/games/images.ex
| 0.700178
| 0.462959
|
images.ex
|
starcoder
|
defmodule Clox do
alias Timex.Date
alias Timex.DateFormat
@minute_prefix "m"
@ten_minute_prefix "T"
@hour_prefix "H"
@day_prefix "D"
@week_prefix "W"
@month_prefix "M"
@minute_conversion 60
@minute_res 1
@ten_minute_res 10
@hour_res 60
@day_res @hour_res * 24
@week_res @day_res * 7
@month_res @week_res * 4
@granularities [
@minute_prefix,
@ten_minute_prefix,
@hour_prefix,
@day_prefix,
@week_prefix,
@month_prefix
]
@resolutions [
{@minute_prefix, @minute_res},
{@ten_minute_prefix, @ten_minute_res},
{@hour_prefix, @hour_res},
{@day_prefix, @day_res},
{@week_prefix, @week_res},
{@month_prefix, @month_res}
]
@epoch {1970,1,1} |> Date.from() |> Date.to_secs()
@date_format "{ISOz}"
@doc """
Get a list of keys for a time. The returned keys are split into buckets
based on the granularities (minute, ten minute, hour, day, week, month).
These keys may be used to save a counter for a metric
"""
def keys_for_time(time \\ Date.now) do
time = time
|> parse()
|> Date.set([second: 0, ms: 0])
keys = unquote(for granularity <- @granularities do
quote do
unquote(granularity) <> pack(truncate(unquote(Macro.var(:time, nil)), unquote(granularity)))
end
end)
{:ok, keys}
end
@doc """
Format a key into a format, defaulting to ISOz
"""
def format(time, format \\ @date_format) do
{:ok, time
|> parse()
|> DateFormat.format!(format)
}
end
@doc """
Get a list of granularities supported.
"""
def granularities do
{:ok, @granularities}
end
@doc """
Get a range of keys between two dates. The granularity will be derived based on steps.
"""
def smart_range(begining, ending, steps \\ 20)
def smart_range(begining, ending, steps) when is_binary(begining) do
smart_range(DateFormat.parse!(begining, @date_format), ending, steps)
end
def smart_range(begining, ending, steps) when is_binary(ending) do
smart_range(begining, DateFormat.parse!(ending, @date_format), steps)
end
def smart_range(begining, ending, steps) do
diff = Date.diff(begining, ending, :mins)
granularity = diff_to_granularity(diff, steps)
range(begining, ending, granularity)
end
for {granularity, resolution} <- @resolutions do
defp diff_to_granularity(diff, steps) when div(diff, unquote(resolution)) < steps do
unquote(granularity)
end
end
defp diff_to_granularity(_, _) do
unquote(List.last(@granularities))
end
@doc """
Get a range of keys given a granularity.
"""
def range(begining, ending, granularity)
def range(begining, ending, granularity) when is_binary(begining) do
range(DateFormat.parse!(begining, @date_format), ending, granularity)
end
def range(begining, ending, granularity) when is_binary(ending) do
range(begining, DateFormat.parse!(ending, @date_format), granularity)
end
for {granularity, resolution} <- @resolutions do
def range(begining, ending, unquote(granularity)) do
ending = ending
|> truncate(unquote(granularity))
|> Kernel.+(unquote(resolution))
out = begining
|> truncate(unquote(granularity))
|> iter(ending, unquote(granularity), unquote(resolution), [])
{:ok, out}
end
end
defp iter(begining, ending, granularity, resolution, acc) when begining <= ending do
key = granularity <> pack(truncate(begining, granularity))
iter(begining + resolution, ending, granularity, resolution, [key | acc])
end
defp iter(_, _, _, _, acc) do
acc
|> Enum.uniq()
|> :lists.reverse()
end
@doc """
Determine if the key is frozen.
"""
def is_frozen?(time, now \\ Date.now)
for prefix <- @granularities do
def is_frozen?(<<unquote(prefix), time :: binary>>, now) do
{:ok, truncate(now, unquote(prefix)) > unpack(time)}
end
end
defp parse(nil), do: Date.now
defp parse(""), do: Date.now
defp parse(time) when is_integer(time), do: Date.from(time, :secs)
defp parse(time = %Timex.DateTime{}), do: time
for prefix <- @granularities do
defp parse(<<unquote(prefix), time :: binary>>) do
time
|> unpack()
|> from_minutes()
|> Date.from(:secs)
end
end
defp parse(time) when is_binary(time) do
DateFormat.parse!(time, @date_format)
end
defp truncate(minutes, granularity) when is_integer(minutes) do
minutes
|> from_minutes()
|> Date.from(:secs)
|> truncate(granularity)
end
defp truncate(time, @minute_prefix) do
time
|> to_minutes()
end
defp truncate(time, @ten_minute_prefix) do
time
|> to_minutes()
|> div(@ten_minute_res)
|> Kernel.*(@ten_minute_res)
end
defp truncate(time, @hour_prefix) do
time
|> Date.set(minute: 0)
|> to_minutes()
end
defp truncate(time, @day_prefix) do
time
|> Date.set(minute: 0, hour: 0)
|> to_minutes()
end
defp truncate(time, @week_prefix) do
time
|> Date.set(minute: 0, hour: 0)
|> to_minutes()
|> div(@week_res)
|> Kernel.*(@week_res)
end
defp truncate(time, @month_prefix) do
time
|> Date.set(minute: 0, hour: 0, day: 1)
|> to_minutes()
end
defp to_minutes(date) do
date
|> Date.to_secs()
|> Kernel.-(@epoch)
|> div(@minute_conversion)
end
defp from_minutes(minutes) do
minutes
|> Kernel.*(@minute_conversion)
|> Kernel.+(@epoch)
end
defp pack(minutes) do
minutes
|> :binary.encode_unsigned()
|> Base.url_encode64()
|> String.replace("=", "")
end
defp unpack(time) do
time
|> pad()
|> Base.url_decode64!()
|> :binary.decode_unsigned()
end
for size <- [0,1,2,3] do
padding = if size != 0 do
Stream.cycle(["="]) |> Enum.take(4 - size) |> to_string()
else
""
end
defp pad(buf) when rem(byte_size(buf), 4) == unquote(size) do
buf <> unquote(padding)
end
end
end
|
lib/clox.ex
| 0.820649
| 0.441131
|
clox.ex
|
starcoder
|
defmodule K8s.Client.Runner.Watch do
@moduledoc """
`K8s.Client` runner that will watch a resource or resources and stream results back to a process.
"""
@resource_version_json_path ~w(metadata resourceVersion)
alias K8s.Client.Runner.Base
alias K8s.Operation
@doc """
Watch a resource or list of resources. Provide the `stream_to` option or results will be stream to `self()`.
Note: Current resource version will be looked up automatically.
## Examples
```elixir
conn = K8s.Conn.lookup(:test)
operation = K8s.Client.list("v1", "Namespace")
{:ok, reference} = Watch.run(operation, conn, stream_to: self())
```
```elixir
conn = K8s.Conn.lookup(:test)
operation = K8s.Client.get("v1", "Namespace", [name: "test"])
{:ok, reference} = Watch.run(operation, conn, stream_to: self())
```
"""
@spec run(Operation.t(), K8s.Conn.t(), keyword(atom)) :: Base.result_t()
def run(%Operation{method: :get} = operation, conn, opts) do
case get_resource_version(operation, conn) do
{:ok, rv} -> run(operation, conn, rv, opts)
error -> error
end
end
def run(op, _, _),
do: {:error, "Only HTTP GET operations (list, get) are supported. #{inspect(op)}"}
@doc """
Watch a resource or list of resources from a specific resource version. Provide the `stream_to` option or results will be stream to `self()`.
## Examples
```elixir
conn = K8s.Conn.lookup(:test)
operation = K8s.Client.list("v1", "Namespace")
resource_version = 3003
{:ok, reference} = Watch.run(operation, conn, resource_version, stream_to: self())
```
```elixir
conn = K8s.Conn.lookup(:test)
operation = K8s.Client.get("v1", "Namespace", [name: "test"])
resource_version = 3003
{:ok, reference} = Watch.run(operation, conn, resource_version, stream_to: self())
```
"""
@spec run(Operation.t(), K8s.Conn.t(), binary, keyword(atom)) :: Base.result_t()
def run(%Operation{method: :get, verb: verb} = operation, conn, rv, opts)
when verb in [:list, :list_all_namespaces] do
opts_w_watch_params = add_watch_params_to_opts(opts, rv)
Base.run(operation, conn, opts_w_watch_params)
end
def run(%Operation{method: :get, verb: :get} = operation, conn, rv, opts) do
{list_op, field_selector_param} = get_to_list(operation)
params = Map.merge(opts[:params] || %{}, field_selector_param)
opts = Keyword.put(opts, :params, params)
run(list_op, conn, rv, opts)
end
def run(op, _, _, _),
do: {:error, "Only HTTP GET operations (list, get) are supported. #{inspect(op)}"}
@spec get_resource_version(Operation.t(), K8s.Conn.t()) :: {:ok, binary} | {:error, binary}
defp get_resource_version(%Operation{} = operation, conn) do
case Base.run(operation, conn) do
{:ok, payload} ->
rv = parse_resource_version(payload)
{:ok, rv}
error ->
error
end
end
@spec add_watch_params_to_opts(keyword, binary) :: keyword
defp add_watch_params_to_opts(opts, rv) do
params = Map.merge(opts[:params] || %{}, %{"resourceVersion" => rv, "watch" => true})
Keyword.put(opts, :params, params)
end
@spec parse_resource_version(any) :: binary
defp parse_resource_version(%{} = payload),
do: get_in(payload, @resource_version_json_path) || "0"
defp parse_resource_version(_), do: "0"
defp get_to_list(get_op) do
list_op = %{get_op | verb: :list, path_params: []}
name = get_op.path_params[:name]
params = %{"fieldSelector" => "metadata.name%3D#{name}"}
{list_op, params}
end
end
|
lib/k8s/client/runner/watch.ex
| 0.888647
| 0.632531
|
watch.ex
|
starcoder
|
defmodule Exq.Redis.Connection do
@moduledoc """
The Connection module encapsulates interaction with a live Redis connection or pool.
"""
require Logger
alias Exq.Support.Config
alias Exq.Support.Redis
def flushdb!(redis) do
{:ok, res} = q(redis, ["flushdb"])
res
end
def decr!(redis, key) do
{:ok, count} = q(redis, ["DECR", key])
count
end
def incr!(redis, key) do
{:ok, count} = q(redis, ["INCR", key])
count
end
def get!(redis, key) do
{:ok, val} = q(redis, ["GET", key])
val
end
def set!(redis, key, val \\ 0) do
q(redis, ["SET", key, val])
end
def del!(redis, key) do
q(redis, ["DEL", key])
end
def expire!(redis, key, time \\ 10) do
q(redis, ["EXPIRE", key, time])
end
def llen!(redis, list) do
{:ok, len} = q(redis, ["LLEN", list])
len
end
def keys!(redis, search \\ "*") do
{:ok, keys} = q(redis, ["KEYS", search])
keys
end
def scan!(redis, cursor, search, count) do
{:ok, keys} = q(redis, ["SCAN", cursor, "MATCH", search, "COUNT", count])
keys
end
def scard!(redis, set) do
{:ok, count} = q(redis, ["SCARD", set])
count
end
def smembers!(redis, set) do
{:ok, members} = q(redis, ["SMEMBERS", set])
members
end
def sadd!(redis, set, member) do
{:ok, res} = q(redis, ["SADD", set, member])
res
end
def srem!(redis, set, member) do
{:ok, res} = q(redis, ["SREM", set, member])
res
end
def sismember!(redis, set, member) do
{:ok, res} = q(redis, ["SISMEMBER", set, member])
res
end
def lrange!(redis, list, range_start \\ "0", range_end \\ "-1") do
{:ok, items} = q(redis, ["LRANGE", list, range_start, range_end])
items
end
def lrem!(redis, list, value, count \\ 1, options \\ []) do
{:ok, res} =
if is_list(value) do
commands = Enum.map(value, fn v -> ["LREM", list, count, v] end)
qp(redis, commands, options)
else
q(redis, ["LREM", list, count, value], options)
end
res
end
def rpush!(redis, key, value) do
{:ok, res} = q(redis, ["RPUSH", key, value])
res
end
def lpush!(redis, key, value) do
{:ok, res} = q(redis, ["LPUSH", key, value])
res
end
def lpop(redis, key) do
q(redis, ["LPOP", key])
end
def zadd(redis, set, score, member, options \\ []) do
q(redis, ["ZADD", set, score, member], options)
end
def zadd!(redis, set, score, member) do
{:ok, res} = q(redis, ["ZADD", set, score, member])
res
end
def zcard!(redis, set) do
{:ok, count} = q(redis, ["ZCARD", set])
count
end
def zcount!(redis, set, min \\ "-inf", max \\ "+inf") do
{:ok, count} = q(redis, ["ZCOUNT", set, min, max])
count
end
def zrangebyscore!(redis, set, min \\ "0", max \\ "+inf") do
{:ok, items} = q(redis, ["ZRANGEBYSCORE", set, min, max])
items
end
def zrangebyscorewithlimit!(redis, set, offset, size, min \\ "0", max \\ "+inf") do
{:ok, items} = q(redis, ["ZRANGEBYSCORE", set, min, max, "LIMIT", offset, size])
items
end
def zrangebyscore(redis, set, min \\ "0", max \\ "+inf") do
q(redis, ["ZRANGEBYSCORE", set, min, max])
end
def zrangebyscorewithscore!(redis, set, min \\ "0", max \\ "+inf") do
{:ok, items} = q(redis, ["ZRANGEBYSCORE", set, min, max, "WITHSCORES"])
items
end
def zrangebyscorewithscoreandlimit!(redis, set, offset, size, min \\ "0", max \\ "+inf") do
{:ok, items} = q(redis, ["ZRANGEBYSCORE", set, min, max, "WITHSCORES", "LIMIT", offset, size])
items
end
def zrangebyscorewithscore(redis, set, min \\ "0", max \\ "+inf") do
q(redis, ["ZRANGEBYSCORE", set, min, max, "WITHSCORES"])
end
def zrevrangebyscorewithlimit!(redis, set, offset, size, min \\ "0", max \\ "+inf") do
{:ok, items} = q(redis, ["ZREVRANGEBYSCORE", set, max, min, "LIMIT", offset, size])
items
end
def zrevrangebyscorewithscoreandlimit!(redis, set, offset, size, min \\ "0", max \\ "+inf") do
{:ok, items} =
q(redis, ["ZREVRANGEBYSCORE", set, max, min, "WITHSCORES", "LIMIT", offset, size])
items
end
def zrange!(redis, set, range_start \\ "0", range_end \\ "-1") do
{:ok, items} = q(redis, ["ZRANGE", set, range_start, range_end])
items
end
def zrem!(redis, set, members) when is_list(members) do
{:ok, res} = q(redis, ["ZREM", set | members])
res
end
def zrem!(redis, set, member) do
{:ok, res} = q(redis, ["ZREM", set, member])
res
end
def zrem(redis, set, member) do
q(redis, ["ZREM", set, member])
end
def q(redis, command, options \\ []) do
Redis.with_retry_on_connection_error(
fn ->
redis
|> Redix.command(command, timeout: Config.get(:redis_timeout))
|> handle_response(redis)
end,
Keyword.get(options, :retry_on_connection_error, 0)
)
end
def qp(redis, command, options \\ []) do
Redis.with_retry_on_connection_error(
fn ->
redis
|> Redix.pipeline(command, timeout: Config.get(:redis_timeout))
|> handle_responses(redis)
end,
Keyword.get(options, :retry_on_connection_error, 0)
)
end
def qp!(redis, command, options \\ []) do
Redis.with_retry_on_connection_error(
fn ->
redis
|> Redix.pipeline!(command, timeout: Config.get(:redis_timeout))
|> handle_responses(redis)
end,
Keyword.get(options, :retry_on_connection_error, 0)
)
end
defp handle_response({:error, %{message: "READONLY" <> _rest}} = error, redis) do
disconnect(redis)
error
end
defp handle_response({:error, %{message: "NOSCRIPT" <> _rest}} = error, _) do
error
end
defp handle_response({:error, %Redix.ConnectionError{reason: :disconnected}} = error, _) do
error
end
defp handle_response({:error, message} = error, _) do
Logger.error(inspect(message))
error
end
defp handle_response(response, _) do
response
end
defp handle_responses({:ok, responses} = result, redis) do
# Disconnect once for multiple readonly redis node errors.
if Enum.any?(responses, &readonly_error?/1) do
disconnect(redis)
end
result
end
defp handle_responses(responses, redis) when is_list(responses) do
# Disconnect once for multiple readonly redis node errors.
if Enum.any?(responses, &readonly_error?/1) do
disconnect(redis)
end
responses
end
defp handle_responses(responses, _) do
responses
end
defp readonly_error?(%{message: "READONLY" <> _rest}), do: true
defp readonly_error?(_), do: false
defp disconnect(redis) do
pid = Process.whereis(redis)
if !is_nil(pid) && Process.alive?(pid) do
# Let the supervisor restart the process with a new connection.
Logger.error("Redis failover - forcing a reconnect")
Process.exit(pid, :kill)
# Give the process some time to be restarted.
:timer.sleep(100)
end
end
end
|
lib/exq/redis/connection.ex
| 0.695958
| 0.616503
|
connection.ex
|
starcoder
|
defmodule Zaryn.SelfRepair.Sync.BeaconSummaryHandler.NetworkStatistics do
@moduledoc false
use GenServer
alias Zaryn.PubSub
alias Zaryn.Utils
require Logger
def start_link(opts \\ []) do
GenServer.start_link(__MODULE__, opts)
end
def init(_opts) do
init_dump_dir()
unless File.exists?(dump_filename(:zaryn_tps)) do
:ets.new(:zaryn_tps, [:named_table, :ordered_set, :public, read_concurrency: true])
end
unless File.exists?(dump_filename(:zaryn_stats)) do
:ets.new(:zaryn_stats, [:named_table, :set, :public, read_concurrency: true])
end
:ets.file2tab(dump_filename(:zaryn_tps) |> String.to_charlist())
:ets.file2tab(dump_filename(:zaryn_stats) |> String.to_charlist())
{:ok, []}
end
@doc """
Return the latest TPS record
## Examples
iex> NetworkStatistics.start_link()
iex> NetworkStatistics.register_tps(~U[2021-02-02 00:00:00Z], 10.0, 100)
iex> NetworkStatistics.register_tps(~U[2021-02-03 00:00:00Z], 100.0, 1000)
iex> NetworkStatistics.get_latest_tps()
100.0
"""
@spec get_latest_tps :: float()
def get_latest_tps do
case :ets.last(:zaryn_tps) do
:"$end_of_table" ->
0.0
key ->
[{_, tps, _nb_transactions}] = :ets.lookup(:zaryn_tps, key)
tps
end
end
@doc """
Returns the number of transactions
"""
@spec get_nb_transactions() :: non_neg_integer()
def get_nb_transactions do
case :ets.lookup(:zaryn_stats, :nb_transactions) do
[] ->
0
[{_, nb}] ->
nb
end
end
@doc """
Increment the number of transactions by the given number
"""
@spec increment_number_transactions(non_neg_integer()) :: :ok
def increment_number_transactions(nb \\ 1) when is_integer(nb) and nb >= 0 do
new_nb = :ets.update_counter(:zaryn_stats, :nb_transactions, nb, {0, 0})
dump_table(:zaryn_stats)
PubSub.notify_new_transaction_number(new_nb)
:ok
end
@doc """
Register a new TPS for the given date
"""
@spec register_tps(DateTime.t(), float(), non_neg_integer()) :: :ok
def register_tps(date = %DateTime{}, tps, nb_transactions)
when is_float(tps) and tps >= 0.0 and is_integer(nb_transactions) and nb_transactions >= 0 do
Logger.info(
"TPS #{tps} on #{Utils.time_to_string(date)} with #{nb_transactions} transactions"
)
true = :ets.insert(:zaryn_tps, {date, tps, nb_transactions})
:ok = dump_table(:zaryn_tps)
PubSub.notify_new_tps(tps)
:ok
end
defp dump_table(table) when is_atom(table) do
filename =
table
|> dump_filename()
|> String.to_charlist()
:ets.tab2file(table, filename)
end
defp dump_filename(table) do
Path.join(dump_dirname(), Atom.to_string(table))
end
defp init_dump_dir do
File.mkdir_p!(dump_dirname())
end
defp dump_dirname do
dump_dir = Application.get_env(:zaryn, __MODULE__) |> Keyword.fetch!(:dump_dir)
Utils.mut_dir(dump_dir)
end
end
|
lib/zaryn/self_repair/sync/beacon_summary_handler/network_statistics.ex
| 0.792263
| 0.419559
|
network_statistics.ex
|
starcoder
|
defmodule QueryBuilder.JoinMaker do
@moduledoc false
require Ecto.Query
@doc ~S"""
Options may be:
* `:mode`: if set to `:if_preferable`, schemas are joined only if it is better
performance-wise; this happens only for one case: when the association has a
one-to-one cardinality, it is better to join and include the association's result
in the result set of the query, rather than emitting a new DB query.
* `:type`: see `Ecto.Query.join/5`'s qualifier argument for possible values.
"""
def make_joins(query, token, options \\ []) do
_make_joins(query, token, bindings(token), options, [])
end
defp _make_joins(query, [], _, _, new_token), do: {query, new_token}
defp _make_joins(query, [assoc_data | tail], bindings, options, new_token) do
mode = Keyword.get(options, :mode)
type = Keyword.get(options, :type, :inner)
{query, assoc_data, bindings} = maybe_join(query, assoc_data, bindings, mode, type)
{query, nested_assocs} =
if assoc_data.has_joined do
_make_joins(query, assoc_data.nested_assocs, bindings, options, [])
else
{query, assoc_data.nested_assocs}
end
assoc_data = %{assoc_data | nested_assocs: nested_assocs}
{query, new_token} = _make_joins(query, tail, bindings, options, new_token)
{query, [assoc_data | new_token]}
end
defp maybe_join(query, %{has_joined: true} = assoc_data, bindings, _, _),
do: {query, assoc_data, bindings}
defp maybe_join(query, %{cardinality: :many} = assoc_data, bindings, :if_preferable, _type),
do: {query, assoc_data, bindings}
defp maybe_join(query, assoc_data, bindings, _mode, type) do
%{
source_binding: source_binding,
source_schema: source_schema,
assoc_binding: assoc_binding,
assoc_field: assoc_field,
assoc_schema: assoc_schema
} = assoc_data
unless Enum.member?(bindings, assoc_binding) do
# see schema.ex's module doc in order to understand what's going on here
query =
if String.contains?(to_string(assoc_binding), "__") do
source_schema._join(query, type, source_binding, assoc_field)
else
assoc_schema._join(query, type, source_binding, assoc_field)
end
{
query,
%{assoc_data | has_joined: true},
[assoc_binding | bindings]
}
else
{query, assoc_data, bindings}
end
end
defp bindings([]), do: []
defp bindings([assoc_data | tail]) do
list = bindings(assoc_data.nested_assocs) ++ bindings(tail)
if assoc_data.has_joined do
[assoc_data.assoc_binding | list]
else
list
end
end
end
|
lib/join_maker.ex
| 0.695648
| 0.463687
|
join_maker.ex
|
starcoder
|
defmodule IntSort do
@moduledoc """
Contains functionality for sorting and chunking integers as well as merging the
chunk files
"""
alias IntSort.Chunk
alias IntSort.IntermediateFile
@integer_file Application.get_env(:int_sort, :integer_file)
# The chunk files represent the first generation of merge files so the first files
# that are merged will be Gen 2
@initial_merge_gen 2
@doc """
Chunks an integer file and writes the sorted chunks to chunk files
## Parameters
- input_file: the path to the file to be read
- output_dir: the path to the directory where the output files are to written
- chunk_size: the size of the chunks to be created
- gen: the generation number to be used for the chunk files
## Returns
A stream that emits chunk file names
"""
@spec create_chunk_files(String.t(), String.t(), pos_integer(), non_neg_integer()) :: Enum.t()
def create_chunk_files(input_file, output_dir, chunk_size, gen) do
# Create a stream pipeline that reads in integers from the input stream,
# chunks them, sorts them, and then writes the chunks to files
@integer_file.integer_file_stream(input_file)
|> @integer_file.read_stream()
|> Chunk.create_chunks(chunk_size)
|> Chunk.sort_chunks()
|> Chunk.write_chunks_to_separate_streams(gen, fn gen, chunk_num ->
IntermediateFile.intermediate_file_stream(gen, chunk_num, &gen_file_name/2, output_dir)
end)
|> Stream.with_index(1)
|> Stream.map(fn {_, chunk_num} -> gen_file_name(gen, chunk_num) end)
|> Stream.map(fn file_name -> Path.join(output_dir, file_name) end)
end
@doc """
Counts the number of integers in an input file
This function assumes that the input file is a valid integer file.
## Parameters
- input_file: The input file whose integers are to be counted
## Returns
The number of integers found in the file
"""
@spec integer_count(String.t()) :: non_neg_integer()
def integer_count(input_file) do
@integer_file.integer_file_stream(input_file)
|> @integer_file.integer_count()
end
@doc """
Takes a collection of intermediate files and performs merges on those files until a single
file remains
## Parameters
- files: A collection of the paths of files to be merged
- merge_count: The number of files to merge at once as part of a single merge group. The
merge count must be at least 2
- gen_file_name: The function that creates the file path for a gen file, which is an
intermediate file associated with a particular merge generation and merge file group,
which will contain the results of the merge. The first parameter is the merge generation
number and the second parameter is the merge group number.
- merge_file_gen: The function that performs the entire merge process for each merge
generation. See the documentation on `IntSort.merge_intermediate_files/4` for details
regarding what this function received. Ideally, `IntSort.merge_intermediate_files/4` will
be passed as this parameter, but under other circumstances (such as testing) a different
function can be passed.
- remove_files: The function that will remove any intermediate files that are no longer needed.
This function receives a collection of file paths to be removed.
If you don't want to remove intermediate files, then pass in a function that does nothing.
- integer_merged: A function that is called when an integer is merged. This
function takes two parameters. The first parameter is the merge generation and the second
parameter is the number of integers merged during that particular generation. This function
can be used to display or measure merge progress
## Returns
The path of the file containing all the integers merged together
"""
@spec total_merge(
Enum.t(),
pos_integer(),
(non_neg_integer(), non_neg_integer() -> String.t()),
(Enum.t(),
pos_integer(),
(non_neg_integer() -> String.t()),
(non_neg_integer() -> :ok) ->
Enum.t()),
(Enum.t() -> :ok),
(non_neg_integer(), non_neg_integer() -> :ok),
(non_neg_integer(), non_neg_integer() -> :ok)
) :: String.t()
def total_merge(
files,
merge_count,
gen_file_name,
merge_file_gen,
remove_files,
integer_merged \\ fn _, _ -> :ok end,
merge_gen_completed \\ fn _, _ -> :ok end
)
when merge_count > 1 do
# Do a recursive merge
[merged_file] =
do_total_merge(
files,
Enum.count(files),
@initial_merge_gen,
merge_count,
gen_file_name,
merge_file_gen,
remove_files,
integer_merged,
merge_gen_completed
)
# Take the remaining merge file and return it
merged_file
end
# The recursive implementation of the total_merge function, which returns the merge files resulting from each merge iteration
@spec do_total_merge(
Enum.t(),
non_neg_integer(),
non_neg_integer(),
pos_integer(),
(non_neg_integer(), non_neg_integer() -> String.t()),
(Enum.t(),
pos_integer(),
(non_neg_integer() -> String.t()),
(non_neg_integer() -> :ok) ->
Enum.t()),
(Enum.t() -> :ok),
(non_neg_integer(), non_neg_integer() -> :ok),
(non_neg_integer(), non_neg_integer() -> :ok)
) :: Enum.t()
defp do_total_merge(files, file_count, _, _, _, _, _, _, _) when file_count <= 1 do
files
end
defp do_total_merge(
files,
_,
merge_gen,
merge_count,
gen_file_name,
merge_file_gen,
remove_files,
integer_merged,
merge_gen_completed
) do
# Create the function that creates a merge file name for this generation
merge_file_name = fn num ->
file = gen_file_name.(merge_gen, num)
file
end
# Create the callback function that gets called to keep track of merge progress
gen_integer_merged = fn count -> integer_merged.(merge_gen, count) end
# Perform the merge for this merge generation
merged_files =
merge_file_gen.(files, merge_count, merge_file_name, gen_integer_merged)
|> Enum.to_list()
# Call the callback to notify of the completion of the merge generation
merge_gen_completed.(merge_gen, Enum.count(merged_files))
# Remove any files that were merged
remove_files.(files)
# Do a recursive call to merge the next generation of merged files
result =
do_total_merge(
merged_files,
Enum.count(merged_files),
merge_gen + 1,
merge_count,
gen_file_name,
merge_file_gen,
remove_files,
integer_merged,
merge_gen_completed
)
result
end
@doc """
Does a single round of merges on a collection of intermediate files.
This function only does a single round, merging groups of N intermediate
files together, where N is defined by the `merge_count` parameter. The merge
will result in ceil(N/merge_count) files containing the merged integers.
This function will likely be called multiple times until it results in
a single file.
## Parameters
- files: A collection of file names of the intermediate files to be merged
- merge_count: The number of files to be merged at once
- merge_file_name: A function that takes in the merge group number and
returns the file name to use for the merge file
- integer_merged: A function that is called when an integer is merged. This
function takes a single parameter, which is the number of integers that have
been merged during this round of merges. This function can be used to display
or measure merge progress.
## Returns
A stream that emits the file names containing the merged integers from this
round
"""
@spec merge_intermediate_files(
Enum.t(),
pos_integer(),
(non_neg_integer() -> String.t()),
(non_neg_integer() -> :ok)
) :: Enum.t()
def merge_intermediate_files(
files,
merge_count,
merge_file_name,
integer_merged \\ fn _ -> :ok end
) do
files
# Convert the files to file groups
|> IntermediateFile.create_file_groups(merge_count)
# Merge each file group
|> Stream.scan({[], 0}, fn {file_group, group_num}, {_, total_count} ->
# Get the file name for this group's merged file
group_file_name = merge_file_name.(group_num)
# Create the function that is called every time an integer in the file
# group is merged
group_integer_merged = fn count ->
# Transform the internal group count to an overall integer count
integer_merged.(total_count + count)
end
# Call the function to do the merging for this file group and count how many
# integers are being merged, which also has the effect of causing the stream
# processing to start running.
merge_count =
merge_file_group(file_group, group_file_name, group_integer_merged)
|> Enum.count()
# Return the file name and the cumulative number of merged integers
{group_file_name, total_count + merge_count}
end)
# We now have a stream of merge file names and integer counts. Strip out the integer counts.
|> Stream.map(fn {group_file_name, _} -> group_file_name end)
end
@doc """
Creates an intermediate file name based on a generation and chunk number
## Parameters
- gen: the generation the file is associated with
- num: the chunk number assocatied with the file
## Returns
A file name containing the gen and chunk number
"""
@spec gen_file_name(non_neg_integer(), non_neg_integer()) :: String.t()
def gen_file_name(gen, num) do
"gen#{gen}-#{num}.txt"
end
# Merges a group of integer files into a single integer file. Returns a tuple with the stream
# that emits the integers being merged
@spec merge_file_group(Enum.t(), String.t(), (non_neg_integer() -> :ok)) :: Enum.t()
defp merge_file_group(file_group, merged_file, integer_merged) do
# Open each file in the group as a file device
file_devices =
Enum.map(file_group, fn file ->
@integer_file.read_device!(file)
end)
# Create a merge stream to merge the file devices
merge_stream = IntermediateFile.merge_stream(file_devices, &@integer_file.close_device/1)
# Open a stream for the output file
output_stream = @integer_file.integer_file_stream(merged_file)
# Write the merged integers from the merge stream to the output file
@integer_file.write_integers_to_stream(merge_stream, output_stream)
|> Stream.with_index(1)
|> Stream.each(fn {_, count} -> integer_merged.(count) end)
end
end
|
int_sort/lib/int_sort.ex
| 0.876674
| 0.7007
|
int_sort.ex
|
starcoder
|
defmodule PayPal.Payments.Authorizations do
@moduledoc """
Documentation for PayPal.Payments.Authorizations
https://developer.paypal.com/docs/api/payments/#authorization
"""
@doc """
Show an authorization
[docs](https://developer.paypal.com/docs/api/payments/#authorization_get)
Possible returns:
- {:ok, authorization}
- {:error, reason}
## Examples
iex> PayPal.Payments.Authorizations.show(authorization_id)
{:ok, authorization}
"""
@spec show(String.t()) :: {atom, any}
def show(authorization_id) do
PayPal.API.get("payments/authorization/#{authorization_id}")
end
@doc """
Capture an authorization
[docs](https://developer.paypal.com/docs/api/payments/#authorization_capture)
Possible returns:
- {:ok, capture}
- {:error, reason}
## Examples
iex> PayPal.Payments.Authorizations.capture(authorization_id, %{
amount: %{
currency: "USD",
amount: "4.54"
},
is_final_capture: true
})
{:ok, capture}
"""
@spec capture(String.t(), map) :: {atom, any}
def capture(authorization_id, params) do
PayPal.API.post("payments/authorization/#{authorization_id}/capture", params)
end
@doc """
Void an authorization
[docs](https://developer.paypal.com/docs/api/payments/#authorization_void)
Possible returns:
- {:ok, authorization}
- {:error, reason}
## Examples
iex> PayPal.Payments.Authorizations.void(authorization_id)
{:ok, authorization}
"""
@spec void(String.t()) :: {atom, any}
def void(authorization_id) do
PayPal.API.post("payments/authorization/#{authorization_id}/void", nil)
end
@doc """
Reauthorize a payment
[docs](https://developer.paypal.com/docs/api/payments/#authorization_reauthorize)
Possible returns:
- {:ok, authorization}
- {:error, reason}
## Examples
iex> PayPal.Payments.Authorizations.capture(authorization_id, %{
amount: %{
currency: "USD",
amount: "4.54"
}
})
{:ok, authorization}
"""
@spec reauthorize(String.t(), map) :: {atom, any}
def reauthorize(authorization_id, params) do
PayPal.API.post("payments/authorization/#{authorization_id}/reauthorize", params)
end
end
|
lib/payments/authorizations.ex
| 0.677367
| 0.468243
|
authorizations.ex
|
starcoder
|
defmodule Finance.Numerical.Root do
@moduledoc """
Method for finding the roots of any one-dimensional function
```
f(x) = 0
```
"""
defmodule Finance.Numerical.Iteration do
@moduledoc false
defstruct [
# function f(x)
:f,
# first derivative of f(x)
:fd,
# minimum value bounding the root
:lower_bound,
:flower_bound,
# current estimated value of x
:est,
:fest,
# maximum value bounding the root
:upper_bound,
:fupper_bound,
# required tolerance
:tol,
# number of iterations left
:left,
# step size of current iteration
:dx,
# step size of previous iteration
:pdx
]
end
@doc """
Find an upper and lower bound around an initial guess that brackets a root.
##Example
The function 3x^2+5x+2 = 0 has two roots at -1 and -2/3
iex> f = fn(x) -> 3*x*x + 5*x + 2 end
iex> Finance.Numerical.Root.bracket(f, -0.7)
{:ok, -0.71, -0.6579999999999999}
"""
@default_bracket_precision 2
@bracket_step_size 1.6
def bracket(f, guess, precision \\ @default_bracket_precision) do
dt = :math.pow(10.0, round(:math.log10(abs((guess == 0.0 && 1.0) || guess))) - precision)
bracket_step(f, guess - dt, f.(guess - dt), guess + dt, f.(guess + dt), 10)
end
defp bracket_step(_f, _l, _fl, _u, _fu, _niter = 0) do
{:error, "Unable to find a possible root around the guess"}
end
defp bracket_step(_f, l, fl, u, fu, _niter) when fl * fu < 0.0 do
{:ok, l, u}
end
defp bracket_step(f, l, fl, u, fu, niter) when abs(fl) < abs(fu) do
x = l + @bracket_step_size * (l - u)
bracket_step(f, x, f.(x), u, fu, niter - 1)
end
defp bracket_step(f, l, fl, u, _fu, niter) do
x = u + @bracket_step_size * (u - l)
bracket_step(f, l, fl, x, f.(x), niter - 1)
end
@doc """
Bisection method iteratively finds the root of a function
by the process of successively reducing the initial bounds. In
iteration step the bounds sizes will reduce by half, thus the
number iterations required to achieve a given tolerance (t) given
the initial bounds size (e) is given by
n = log2(e/t)
If the initial bounds encompass one of more roots the bisection
method will converge to one of these roots.
This method has a preset function which will quit after 40 iterations and
should achieve a tolerance of about 1e-12, i.e 2^-40 ~ 1e-12
##Examples
The function 3x^2+5x+2 = 0 has two roots at -1 and -2/3
iex> f = fn(x) -> 3*x*x + 5*x + 2 end
iex> {:ok, root, niter} = Finance.Numerical.Root.bisection(f, -1.2, -0.7)
iex> Float.round(root, 12)
-1.0
iex> niter
39
iex> {:ok, root, niter} = Finance.Numerical.Root.bisection(f, -0.7, 0.0)
iex> Float.round(root, 12)
-0.666666666667
iex> niter
40
"""
@default_bisection_tolerance 1.0e-12
@default_bisection_max_iterations 41
def bisection(
f,
lower_bound,
upper_bound,
tolerance \\ @default_bisection_tolerance,
niters \\ @default_bisection_max_iterations
)
when lower_bound < upper_bound do
est = (lower_bound + upper_bound) / 2.0
bisection(
%Finance.Numerical.Iteration{
f: f,
lower_bound: lower_bound,
flower_bound: f.(lower_bound),
est: est,
fest: f.(est),
upper_bound: upper_bound,
fupper_bound: f.(upper_bound),
tol: tolerance,
left: niters
},
niters
)
end
defp bisection(iter = %Finance.Numerical.Iteration{}, niters) do
case bisection_step(iter) do
{:ok, est, left} -> {:ok, est, niters - left}
{:error, msg} -> {:error, msg}
end
end
defp bisection_step(%Finance.Numerical.Iteration{
lower_bound: lower_bound,
est: est,
upper_bound: upper_bound,
tol: tol,
left: left
})
when abs(upper_bound - lower_bound) <= tol,
do: {:ok, est, left}
defp bisection_step(%Finance.Numerical.Iteration{est: est, left: 0}), do: {:ok, est, 0}
defp bisection_step(%Finance.Numerical.Iteration{
flower_bound: flower_bound,
fupper_bound: fupper_bound
})
when flower_bound * fupper_bound > 0,
do:
{:error,
"lower_bound and upper_bound do not bracket a root, or possibly bracket multiple roots"}
defp bisection_step(
iter = %Finance.Numerical.Iteration{
f: f,
flower_bound: flower_bound,
est: est,
fest: fest,
upper_bound: upper_bound,
left: left
}
)
when flower_bound * fest > 0.0 do
nest = (est + upper_bound) / 2.0
bisection_step(%Finance.Numerical.Iteration{
iter
| lower_bound: est,
flower_bound: fest,
est: nest,
fest: f.(nest),
left: left - 1
})
end
defp bisection_step(
iter = %Finance.Numerical.Iteration{
f: f,
lower_bound: lower_bound,
est: est,
fest: fest,
left: left
}
) do
nest = (lower_bound + est) / 2.0
bisection_step(%Finance.Numerical.Iteration{
iter
| est: nest,
fest: f.(nest),
upper_bound: est,
fupper_bound: fest,
left: left - 1
})
end
@doc """
Newton Raphson method requires the evaluation of both the function f(x) and its derivative.
The method can display a very rapid convergence to the root, however it can be become unstable
when the initial estimate is too close to any local lower_bound or upper_bound minima. There is also the
possibility that the method can get trapped in a non-convergent cycle.
##Examples
The function 3x^2+5x+2 = 0 has two roots at -1 and -2/3
iex> f = fn(x) -> 3*x*x + 5*x + 2 end
iex> fd = fn(x) -> 6*x + 5 end
iex> {:ok, root, iters} = Finance.Numerical.Root.newton_raphson(f, fd, -1.3, -0.9)
iex> Float.round(root, 12)
-1.0
iex> iters
5
iex> {:ok, root, iters} = Finance.Numerical.Root.newton_raphson(f, fd, -0.7, 0.0)
iex> Float.round(root, 12)
-0.666666666667
iex> iters
6
"""
@default_newton_raphson_tolerance 1.0e-12
@default_newton_raphson_max_iterations 10
def newton_raphson(
f,
fd,
lower_bound,
upper_bound,
tolerance \\ @default_newton_raphson_tolerance,
niters \\ @default_newton_raphson_max_iterations
)
when lower_bound < upper_bound do
est = (lower_bound + upper_bound) / 2.0
fest = f.(est)
newton_raphson(
%Finance.Numerical.Iteration{
f: f,
fd: fd,
lower_bound: lower_bound,
flower_bound: f.(lower_bound),
est: est,
fest: fest,
upper_bound: upper_bound,
fupper_bound: f.(upper_bound),
tol: tolerance,
left: niters,
dx: newton_raphson_dx(fest, fd.(est)),
pdx: (upper_bound - lower_bound) / 2.0
},
niters
)
end
defp newton_raphson_dx(_fest, _fdest = 0.0), do: 0.0
defp newton_raphson_dx(fest, fdest), do: fest / fdest
defp newton_raphson(iter = %Finance.Numerical.Iteration{}, niters) do
case newton_raphson_step(iter) do
{:ok, est, left} -> {:ok, est, niters - left}
{:error, msg} -> {:error, msg}
end
end
defp newton_raphson_step(%Finance.Numerical.Iteration{
flower_bound: flower_bound,
fupper_bound: fupper_bound
})
when flower_bound * fupper_bound > 0.0,
do:
{:error,
"lower_bound and upper_bound do not bracket a root, or possibly bracket multiple roots"}
defp newton_raphson_step(%Finance.Numerical.Iteration{
lower_bound: lower_bound,
est: est,
upper_bound: upper_bound
})
when (lower_bound - est) * (est - upper_bound) < 0.0,
do: {:error, "stepped outside of initial bounds"}
defp newton_raphson_step(%Finance.Numerical.Iteration{est: est, left: left, tol: tol, dx: dx})
when abs(dx) <= tol,
do: {:ok, est, left}
defp newton_raphson_step(%Finance.Numerical.Iteration{est: est, left: 0}), do: {:ok, est, 0}
defp newton_raphson_step(
iter = %Finance.Numerical.Iteration{f: f, fd: fd, est: est, dx: dx, left: left}
) do
nest = est - dx
pdx = dx
fest = f.(nest)
newton_raphson_step(%Finance.Numerical.Iteration{
iter
| est: nest,
fest: fest,
left: left - 1,
dx: newton_raphson_dx(fest, fd.(nest)),
pdx: pdx
})
end
end
|
lib/numerical/root.ex
| 0.895658
| 0.897964
|
root.ex
|
starcoder
|
defmodule AWS.IoTJobsDataPlane do
@moduledoc """
AWS IoT Jobs is a service that allows you to define a set of jobs — remote
operations that are sent to and executed on one or more devices connected to AWS
IoT.
For example, you can define a job that instructs a set of devices to download
and install application or firmware updates, reboot, rotate certificates, or
perform remote troubleshooting operations.
To create a job, you make a job document which is a description of the remote
operations to be performed, and you specify a list of targets that should
perform the operations. The targets can be individual things, thing groups or
both.
AWS IoT Jobs sends a message to inform the targets that a job is available. The
target starts the execution of the job by downloading the job document,
performing the operations it specifies, and reporting its progress to AWS IoT.
The Jobs service provides commands to track the progress of a job on a specific
target and for all the targets of the job
"""
alias AWS.Client
alias AWS.Request
def metadata do
%AWS.ServiceMetadata{
abbreviation: nil,
api_version: "2017-09-29",
content_type: "application/x-amz-json-1.1",
credential_scope: nil,
endpoint_prefix: "data.jobs.iot",
global?: false,
protocol: "rest-json",
service_id: "IoT Jobs Data Plane",
signature_version: "v4",
signing_name: "iot-jobs-data",
target_prefix: nil
}
end
@doc """
Gets details of a job execution.
"""
def describe_job_execution(
%Client{} = client,
job_id,
thing_name,
execution_number \\ nil,
include_job_document \\ nil,
options \\ []
) do
url_path = "/things/#{URI.encode(thing_name)}/jobs/#{URI.encode(job_id)}"
headers = []
query_params = []
query_params =
if !is_nil(include_job_document) do
[{"includeJobDocument", include_job_document} | query_params]
else
query_params
end
query_params =
if !is_nil(execution_number) do
[{"executionNumber", execution_number} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Gets the list of all jobs for a thing that are not in a terminal status.
"""
def get_pending_job_executions(%Client{} = client, thing_name, options \\ []) do
url_path = "/things/#{URI.encode(thing_name)}/jobs"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Gets and starts the next pending (status IN_PROGRESS or QUEUED) job execution
for a thing.
"""
def start_next_pending_job_execution(%Client{} = client, thing_name, input, options \\ []) do
url_path = "/things/#{URI.encode(thing_name)}/jobs/$next"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Updates the status of a job execution.
"""
def update_job_execution(%Client{} = client, job_id, thing_name, input, options \\ []) do
url_path = "/things/#{URI.encode(thing_name)}/jobs/#{URI.encode(job_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
nil
)
end
end
|
lib/aws/generated/iot_jobs_data_plane.ex
| 0.791378
| 0.455865
|
iot_jobs_data_plane.ex
|
starcoder
|
defmodule Finch do
@external_resource "README.md"
@moduledoc "README.md"
|> File.read!()
|> String.split("<!-- MDOC !-->")
|> Enum.fetch!(1)
alias Finch.PoolManager
use Supervisor
@atom_methods [
:get,
:post,
:put,
:patch,
:delete,
:head,
:options
]
@methods [
"GET",
"POST",
"PUT",
"PATCH",
"DELETE",
"HEAD",
"OPTIONS"
]
@atom_to_method Enum.zip(@atom_methods, @methods) |> Enum.into(%{})
@default_pool_size 10
@default_pool_count 1
@pool_config_schema [
protocol: [
type: {:one_of, [:http2, :http1]},
doc: "The type of connection and pool to use",
default: :http1
],
size: [
type: :pos_integer,
doc: "Number of connections to maintain in each pool.",
default: @default_pool_size
],
count: [
type: :pos_integer,
doc: "Number of pools to start.",
default: @default_pool_count
],
conn_opts: [
type: :keyword_list,
doc:
"These options are passed to `Mint.HTTP.connect/4` whenever a new connection is established. `:mode` is not configurable as Finch must control this setting. Typically these options are used to configure proxying, https settings, or connect timeouts.",
default: []
]
]
@typedoc """
The `:name` provided to Finch in `start_link/1`.
"""
@type name() :: atom()
@typedoc """
An HTTP request method represented as an `atom()` or a `String.t()`.
The following atom methods are supported: `#{Enum.map_join(@atom_methods, "`, `", &inspect/1)}`.
You can use any arbitrary method by providing it as a `String.t()`.
"""
@type http_method() :: :get | :post | :head | :patch | :delete | :options | :put | String.t()
@typedoc """
A Uniform Resource Locator, the address of a resource on the Web.
"""
@type url() :: String.t() | URI.t()
@typedoc """
A body associated with a request.
"""
@type body() :: iodata() | nil
@doc """
Start an instance of Finch.
## Options:
* `:name` - The name of your Finch instance. This field is required.
* `:pools` - A map specifying the configuration for your pools. The keys should be URLs
provided as binaries, or the atom `:default` to provide a catch-all configuration to be used
for any unspecified URLs. See "Pool Configuration Options" below for details on the possible
map values. Default value is `%{default: [size: #{@default_pool_size}, count: #{
@default_pool_count
}]}`.
### Pool Configuration Options
#{NimbleOptions.docs(@pool_config_schema)}
"""
def start_link(opts) do
name = Keyword.get(opts, :name) || raise ArgumentError, "must supply a name"
pools = Keyword.get(opts, :pools, []) |> pool_options!()
{default_pool_config, pools} = Map.pop(pools, :default)
config = %{
registry_name: name,
manager_name: manager_name(name),
supervisor_name: pool_supervisor_name(name),
default_pool_config: default_pool_config,
pools: pools
}
Supervisor.start_link(__MODULE__, config, name: supervisor_name(name))
end
@impl true
def init(config) do
children = [
{DynamicSupervisor, name: config.supervisor_name, strategy: :one_for_one},
{Registry, [keys: :duplicate, name: config.registry_name, meta: [config: config]]},
{PoolManager, config}
]
Supervisor.init(children, strategy: :one_for_all)
end
@doc """
Sends an HTTP request and returns the response.
## Options:
* `:pool_timeout` - This timeout is applied when we check out a connection from the pool.
Default value is `5_000`.
* `:receive_timeout` - The maximum time to wait for a response before returning an error.
Default value is `15_000`.
"""
@spec request(name(), http_method(), url(), Mint.Types.headers(), body(), keyword()) ::
{:ok, Finch.Response.t()} | {:error, Mint.Types.error()}
def request(name, method, url, headers \\ [], body \\ nil, opts \\ []) do
with {:ok, uri} <- parse_and_normalize_url(url) do
req = %{
scheme: uri.scheme,
host: uri.host,
port: uri.port,
method: build_method(method),
path: uri.path,
headers: headers,
body: body,
query: uri.query
}
shp = {uri.scheme, uri.host, uri.port}
{pool, pool_mod} = PoolManager.get_pool(name, shp)
pool_mod.request(pool, req, opts)
end
end
defp parse_and_normalize_url(url) when is_binary(url) do
url
|> URI.parse()
|> parse_and_normalize_url()
end
defp parse_and_normalize_url(%URI{} = parsed_uri) do
normalize_uri(parsed_uri)
end
defp normalize_uri(parsed_uri) do
normalized_path = parsed_uri.path || "/"
with {:ok, scheme} <- normalize_scheme(parsed_uri.scheme) do
normalized_uri = %{
scheme: scheme,
host: parsed_uri.host,
port: parsed_uri.port,
path: normalized_path,
query: parsed_uri.query
}
{:ok, normalized_uri}
end
end
defp build_method(method) when is_binary(method), do: method
defp build_method(method) when method in @atom_methods, do: @atom_to_method[method]
defp build_method(method) do
raise ArgumentError, """
got unsupported atom method #{inspect(method)}.
only the following methods can be provided as atoms: #{
Enum.map_join(@atom_methods, ", ", &inspect/1)
}",
otherwise you must pass a binary.
"""
end
defp normalize_scheme(scheme) do
case scheme do
"https" ->
{:ok, :https}
"http" ->
{:ok, :http}
scheme ->
{:error, "invalid scheme #{inspect(scheme)}"}
end
end
defp pool_options!(pools) do
{:ok, default} = NimbleOptions.validate([], @pool_config_schema)
Enum.reduce(pools, %{default: valid_opts_to_map(default)}, fn {destination, opts}, acc ->
with {:ok, valid_destination} <- cast_destination(destination),
{:ok, valid_pool_opts} <- cast_pool_opts(opts) do
Map.put(acc, valid_destination, valid_pool_opts)
else
{:error, reason} ->
raise ArgumentError,
"got invalid configuration for pool #{inspect(destination)}! #{reason}"
end
end)
end
defp cast_destination(destination) do
case destination do
:default ->
{:ok, destination}
url when is_binary(url) ->
cast_binary_destination(url)
_ ->
{:error, "invalid destination"}
end
end
defp cast_binary_destination(url) when is_binary(url) do
with {:ok, uri} <- parse_and_normalize_url(url),
shp <- {uri.scheme, uri.host, uri.port} do
{:ok, shp}
end
end
defp cast_pool_opts(opts) do
with {:ok, valid} <- NimbleOptions.validate(opts, @pool_config_schema) do
{:ok, valid_opts_to_map(valid)}
end
end
defp valid_opts_to_map(valid) do
%{
size: valid[:size],
count: valid[:count],
conn_opts: valid[:conn_opts],
protocol: valid[:protocol]
}
end
defp supervisor_name(name), do: :"#{name}.Supervisor"
defp manager_name(name), do: :"#{name}.PoolManager"
defp pool_supervisor_name(name), do: :"#{name}.PoolSupervisor"
end
|
lib/finch.ex
| 0.913662
| 0.407333
|
finch.ex
|
starcoder
|
defmodule Commanded.Assertions.EventAssertions do
@moduledoc """
Provides test assertion and wait for event functions to help test applications built using Commanded.
The default receive timeout is one second.
You can override the default timeout in config (e.g. `config/test.exs`):
config :commanded,
assert_receive_event_timeout: 1_000
"""
import ExUnit.Assertions
alias Commanded.EventStore
alias Commanded.EventStore.TypeProvider
@doc """
Wait for an event of the given event type to be published
## Examples
wait_for_event BankAccountOpened
"""
def wait_for_event(event_type) do
wait_for_event(event_type, fn _event -> true end)
end
@doc """
Wait for an event of the given event type, matching the predicate, to be published.
## Examples
wait_for_event BankAccountOpened, fn opened -> opened.account_number == "ACC123" end
"""
def wait_for_event(event_type, predicate_fn) when is_function(predicate_fn) do
with_subscription(fn subscription ->
do_wait_for_event(subscription, event_type, predicate_fn)
end)
end
@doc """
Assert that events matching their respective predicates have a matching correlation id.
Useful when there is a chain of events that is connected through event handlers.
## Examples
id_one = 1
id_two = 2
assert_correlated(
BankAccountOpened, fn opened -> opened.id == id_one end,
InitialAmountDeposited, fn deposited -> deposited.id == id_two end
)
"""
def assert_correlated(event_type_a, predicate_a, event_type_b, predicate_b) do
assert_receive_event(event_type_a, predicate_a, fn _event_a, metadata_a ->
assert_receive_event(event_type_b, predicate_b, fn _event_b, metadata_b ->
assert metadata_a.correlation_id == metadata_b.correlation_id
end)
end)
end
@doc """
Assert that an event of the given event type is published. Verify that event using the assertion function.
## Examples
assert_receive_event BankAccountOpened, fn opened ->
assert opened.account_number == "ACC123"
end
"""
def assert_receive_event(event_type, assertion_fn) do
assert_receive_event(event_type, fn _event -> true end, assertion_fn)
end
@doc """
Assert that an event of the given event type, matching the predicate, is published.
Verify that event using the assertion function.
## Examples
assert_receive_event BankAccountOpened,
fn opened -> opened.account_number == "ACC123" end,
fn opened ->
assert opened.balance == 1_000
end
"""
def assert_receive_event(event_type, predicate_fn, assertion_fn) do
unless Code.ensure_compiled?(event_type) do
raise ExUnit.AssertionError, "event_type #{inspect(event_type)} not found"
end
with_subscription(fn subscription ->
do_assert_receive(subscription, event_type, predicate_fn, assertion_fn)
end)
end
defp default_receive_timeout,
do: Application.get_env(:commanded, :assert_receive_event_timeout, 1_000)
defp with_subscription(callback_fn) do
subscription_name = UUID.uuid4()
{:ok, subscription} = create_subscription(subscription_name)
assert_receive {:subscribed, ^subscription}, default_receive_timeout()
try do
apply(callback_fn, [subscription])
after
remove_subscription(subscription)
end
end
defp do_assert_receive(subscription, event_type, predicate_fn, assertion_fn) do
assert_receive {:events, received_events}, default_receive_timeout()
ack_events(subscription, received_events)
expected_type = TypeProvider.to_string(event_type.__struct__)
expected_event =
Enum.find(received_events, fn received_event ->
case received_event.event_type do
^expected_type ->
case apply(predicate_fn, [received_event.data]) do
true -> received_event
_ -> false
end
_ ->
false
end
end)
case expected_event do
nil ->
do_assert_receive(subscription, event_type, predicate_fn, assertion_fn)
received_event ->
if is_function(assertion_fn, 1) do
apply(assertion_fn, [received_event.data])
else
{data, all_metadata} = Map.split(received_event, [:data])
apply(assertion_fn, [data, all_metadata])
end
end
end
defp do_wait_for_event(subscription, event_type, predicate_fn) do
assert_receive {:events, received_events}, default_receive_timeout()
ack_events(subscription, received_events)
expected_type = TypeProvider.to_string(event_type.__struct__)
expected_event =
Enum.find(received_events, fn received_event ->
case received_event.event_type do
^expected_type -> apply(predicate_fn, [received_event.data])
_ -> false
end
end)
case expected_event do
nil -> do_wait_for_event(subscription, event_type, predicate_fn)
received_event -> received_event
end
end
defp create_subscription(subscription_name),
do: EventStore.subscribe_to(:all, subscription_name, self(), :origin)
defp remove_subscription(subscription),
do: EventStore.unsubscribe(subscription)
defp ack_events(subscription, events),
do: EventStore.ack_event(subscription, List.last(events))
end
|
lib/commanded/assertions/event_assertions.ex
| 0.879101
| 0.715797
|
event_assertions.ex
|
starcoder
|
defmodule Toolshed.Top do
@default_n 10
@moduledoc """
Find the top processes
"""
defmacro __using__(_) do
quote do
import Toolshed.Top,
only: [
top: 0,
top: 1,
top_reductions: 0,
top_reductions: 1,
top_mailbox: 0,
top_mailbox: 1,
top_total_heap_size: 0,
top_total_heap_size: 1,
top_heap_size: 0,
top_heap_size: 1,
top_stack_size: 0,
top_stack_size: 1
]
end
end
@spec top_reductions(any()) :: :"do not show this result in output"
def top_reductions(n \\ @default_n), do: top(order: :reductions, n: n)
@spec top_mailbox(any()) :: :"do not show this result in output"
def top_mailbox(n \\ @default_n), do: top(order: :mailbox, n: n)
@spec top_total_heap_size(any()) :: :"do not show this result in output"
def top_total_heap_size(n \\ @default_n), do: top(order: :total_heap_size, n: n)
@spec top_heap_size(any()) :: :"do not show this result in output"
def top_heap_size(n \\ @default_n), do: top(order: :heap_size, n: n)
@spec top_stack_size(any()) :: :"do not show this result in output"
def top_stack_size(n \\ @default_n), do: top(order: :stack_size, n: n)
@doc """
List the top processes
Options:
* `:order` - the sort order for the results (`:reductions`, `mailbox`, `total_heap_size`, `heap_size`, `stack_size`)
* `:n` - the max number of processes to list
"""
def top(opts \\ []) do
order = Keyword.get(opts, :order, :reductions)
n = Keyword.get(opts, :n, @default_n)
Process.list()
|> Enum.map(&process_info/1)
|> Enum.filter(fn x -> x != %{} end)
|> Enum.sort(sort(order))
|> Enum.take(n)
|> format_header
|> Enum.each(&format/1)
IEx.dont_display_result()
end
defp sort(:reductions), do: fn x, y -> x.reductions > y.reductions end
defp sort(:mailbox), do: fn x, y -> x.message_queue_len > y.message_queue_len end
defp sort(:total_heap_size), do: fn x, y -> x.total_heap_size > y.total_heap_size end
defp sort(:heap_size), do: fn x, y -> x.heap_size > y.heap_size end
defp sort(:stack_size), do: fn x, y -> x.stack_size > y.stack_size end
defp sort(_other), do: sort(:reductions)
def process_info(pid) do
organize_info(pid, Process.info(pid), get_application(pid))
end
# Ignore deceased processes
defp organize_info(_pid, nil, _app_info), do: %{}
defp organize_info(pid, info, application) do
%{
application: application,
total_heap_size: Keyword.get(info, :total_heap_size),
heap_size: Keyword.get(info, :heap_size),
stack_size: Keyword.get(info, :stack_size),
reductions: Keyword.get(info, :reductions),
message_queue_len: Keyword.get(info, :message_queue_len),
name: Keyword.get(info, :registered_name, pid)
}
end
defp get_application(pid) do
case :application.get_application(pid) do
{:ok, app} -> app
:undefined -> :undefined
end
end
defp format_header(infos) do
:io.format(
IO.ANSI.cyan() <> "~-16ts ~-24ts ~10ts ~10ts ~10ts ~10ts ~10ts~n" <> IO.ANSI.white(),
["OTP Application", "Name/PID", "Reductions", "Mailbox", "Total", "Heap", "Stack"]
)
infos
end
defp format(info) do
:io.format(
"~-16ts ~-24ts ~10B ~10B ~10B ~10B ~10B~n",
[
String.slice(to_string(info.application), 0, 16),
String.slice(inspect(info.name), 0, 24),
info.reductions,
info.message_queue_len,
info.total_heap_size,
info.heap_size,
info.stack_size
]
)
end
end
|
lib/toolshed/top.ex
| 0.701815
| 0.422326
|
top.ex
|
starcoder
|
defmodule Poison.MissingDependencyError do
@type t :: %__MODULE__{name: String.t()}
defexception name: nil
def message(%{name: name}) do
"missing optional dependency: #{name}"
end
end
defmodule Poison.ParseError do
alias Code.Identifier
alias Poison.Parser
@type t :: %__MODULE__{data: String.t(), skip: non_neg_integer, value: Parser.t()}
defexception data: "", skip: 0, value: nil
def message(%{data: data, skip: skip, value: value}) when value != nil do
<<head::binary-size(skip), _rest::bits>> = data
pos = String.length(head)
"cannot parse value at position #{pos}: #{inspect(value)}"
end
def message(%{data: data, skip: skip}) when is_bitstring(data) do
<<head::binary-size(skip), rest::bits>> = data
pos = String.length(head)
case rest do
<<>> ->
"unexpected end of input at position #{pos}"
<<token::utf8, _::bits>> ->
"unexpected token at position #{pos}: #{escape(token)}"
_rest ->
"cannot parse value at position #{pos}: #{inspect(<<rest::bits>>)}"
end
end
def message(%{data: data}) do
"unsupported value: #{inspect(data)}"
end
defp escape(token) do
{value, _} = Identifier.escape(<<token::utf8>>, ?\\)
value
end
end
defmodule Poison.Parser do
@moduledoc """
An RFC 7159 and ECMA 404 conforming JSON parser.
See: https://tools.ietf.org/html/rfc7159
See: http://www.ecma-international.org/publications/files/ECMA-ST/ECMA-404.pdf
"""
@compile :inline
@compile :inline_list_funcs
@compile {:inline_effort, 2500}
@compile {:inline_size, 150}
@compile {:inline_unroll, 3}
use Bitwise
alias Poison.{Decoder, ParseError}
@typep value :: nil | true | false | map | list | float | integer | String.t()
if Code.ensure_loaded?(Decimal) do
@type t :: value | Decimal.t()
else
@type t :: value
end
whitespace = '\s\t\n\r'
digits = ?0..?9
defmacrop syntax_error(skip) do
quote do
raise ParseError, skip: unquote(skip)
end
end
@spec parse!(iodata | binary, Decoder.options()) :: t | no_return
def parse!(value, options \\ %{})
def parse!(data, options) when is_bitstring(data) do
[value | skip] =
value(data, data, :maps.get(:keys, options, nil), :maps.get(:decimal, options, nil), 0)
<<_::binary-size(skip), rest::bits>> = data
skip_whitespace(rest, skip, value)
rescue
exception in ParseError ->
reraise ParseError,
[data: data, skip: exception.skip, value: exception.value],
__STACKTRACE__
end
def parse!(iodata, options) do
iodata |> IO.iodata_to_binary() |> parse!(options)
end
@compile {:inline, value: 5}
defp value(<<?f, rest::bits>>, _data, _keys, _decimal, skip) do
case rest do
<<"alse", _rest::bits>> -> [false | skip + 5]
_other -> syntax_error(skip)
end
end
defp value(<<?t, rest::bits>>, _data, _keys, _decimal, skip) do
case rest do
<<"rue", _rest::bits>> -> [true | skip + 4]
_other -> syntax_error(skip)
end
end
defp value(<<?n, rest::bits>>, _data, _keys, _decimal, skip) do
case rest do
<<"ull", _rest::bits>> -> [nil | skip + 4]
_other -> syntax_error(skip)
end
end
defp value(<<?-, rest::bits>>, _data, _keys, decimal, skip) do
number_neg(rest, decimal, skip + 1)
end
defp value(<<?0, rest::bits>>, _data, _keys, decimal, skip) do
number_frac(rest, decimal, skip + 1, 1, 0, 0)
end
for digit <- ?1..?9 do
coef = digit - ?0
defp value(<<unquote(digit), rest::bits>>, _data, _keys, decimal, skip) do
number_int(rest, decimal, skip + 1, 1, unquote(coef), 0)
end
end
defp value(<<?", rest::bits>>, data, _keys, _decimal, skip) do
string_continue(rest, data, skip + 1)
end
defp value(<<?[, rest::bits>>, data, keys, decimal, skip) do
array_values(rest, data, keys, decimal, skip + 1, [])
end
defp value(<<?{, rest::bits>>, data, keys, decimal, skip) do
object_pairs(rest, data, keys, decimal, skip + 1, [])
end
for char <- whitespace do
defp value(<<unquote(char), rest::bits>>, data, keys, decimal, skip) do
value(rest, data, keys, decimal, skip + 1)
end
end
defp value(_rest, _data, _keys, _decimal, skip) do
syntax_error(skip)
end
## Objects
defmacrop object_name(keys, skip, name) do
quote bind_quoted: [keys: keys, skip: skip, name: name] do
case keys do
:atoms! ->
try do
String.to_existing_atom(name)
rescue
ArgumentError ->
reraise ParseError, [skip: skip, value: name], __STACKTRACE__
end
:atoms ->
# credo:disable-for-next-line Credo.Check.Warning.UnsafeToAtom
String.to_atom(name)
_keys ->
name
end
end
end
@compile {:inline, object_pairs: 6}
defp object_pairs(<<?", rest::bits>>, data, keys, decimal, skip, acc) do
start = skip + 1
[name | skip] = string_continue(rest, data, start)
<<_::binary-size(skip), rest::bits>> = data
[value | skip] = object_value(rest, data, keys, decimal, skip)
<<_::binary-size(skip), rest::bits>> = data
object_pairs_continue(rest, data, keys, decimal, skip, [
{object_name(keys, start, name), value} | acc
])
end
defp object_pairs(<<?}, _rest::bits>>, _data, _keys, _decimal, skip, []) do
[%{} | skip + 1]
end
for char <- whitespace do
defp object_pairs(<<unquote(char), rest::bits>>, data, keys, decimal, skip, acc) do
object_pairs(rest, data, keys, decimal, skip + 1, acc)
end
end
defp object_pairs(_rest, _data, _keys, _decimal, skip, _acc) do
syntax_error(skip)
end
@compile {:inline, object_pairs_continue: 6}
defp object_pairs_continue(<<?,, rest::bits>>, data, keys, decimal, skip, acc) do
object_pairs(rest, data, keys, decimal, skip + 1, acc)
end
defp object_pairs_continue(<<?}, _rest::bits>>, _data, _keys, _decimal, skip, acc) do
[:maps.from_list(acc) | skip + 1]
end
for char <- whitespace do
defp object_pairs_continue(<<unquote(char), rest::bits>>, data, keys, decimal, skip, acc) do
object_pairs_continue(rest, data, keys, decimal, skip + 1, acc)
end
end
defp object_pairs_continue(_rest, _data, _keys, _decimal, skip, _acc) do
syntax_error(skip)
end
@compile {:inline, object_value: 5}
defp object_value(<<?:, rest::bits>>, data, keys, decimal, skip) do
value(rest, data, keys, decimal, skip + 1)
end
for char <- whitespace do
defp object_value(<<unquote(char), rest::bits>>, data, keys, decimal, skip) do
object_value(rest, data, keys, decimal, skip + 1)
end
end
defp object_value(_rest, _data, _keys, _decimal, skip) do
syntax_error(skip)
end
## Arrays
@compile {:inline, array_values: 6}
defp array_values(<<?], _rest::bits>>, _data, _keys, _decimal, skip, _acc) do
[[] | skip + 1]
end
for char <- whitespace do
defp array_values(<<unquote(char), rest::bits>>, data, keys, decimal, skip, acc) do
array_values(rest, data, keys, decimal, skip + 1, acc)
end
end
defp array_values(rest, data, keys, decimal, skip, acc) do
[value | skip] = value(rest, data, keys, decimal, skip)
<<_::binary-size(skip), rest::bits>> = data
array_values_continue(rest, data, keys, decimal, skip, [value | acc])
end
@compile {:inline, array_values_continue: 6}
defp array_values_continue(<<?,, rest::bits>>, data, keys, decimal, skip, acc) do
[value | skip] = value(rest, data, keys, decimal, skip + 1)
<<_::binary-size(skip), rest::bits>> = data
array_values_continue(rest, data, keys, decimal, skip, [value | acc])
end
defp array_values_continue(<<?], _rest::bits>>, _data, _keys, _decimal, skip, acc) do
[:lists.reverse(acc) | skip + 1]
end
for char <- whitespace do
defp array_values_continue(<<unquote(char), rest::bits>>, data, keys, decimal, skip, acc) do
array_values_continue(rest, data, keys, decimal, skip + 1, acc)
end
end
defp array_values_continue(_rest, _data, _keys, _decimal, skip, _acc) do
syntax_error(skip)
end
## Numbers
@compile {:inline, number_neg: 3}
defp number_neg(<<?0, rest::bits>>, decimal, skip) do
number_frac(rest, decimal, skip + 1, -1, 0, 0)
end
for char <- ?1..?9 do
defp number_neg(<<unquote(char), rest::bits>>, decimal, skip) do
number_int(rest, decimal, skip + 1, -1, unquote(char - ?0), 0)
end
end
defp number_neg(_rest, _decimal, skip) do
syntax_error(skip)
end
@compile {:inline, number_int: 6}
for char <- digits do
defp number_int(<<unquote(char), rest::bits>>, decimal, skip, sign, coef, exp) do
number_int(rest, decimal, skip + 1, sign, coef * 10 + unquote(char - ?0), exp)
end
end
defp number_int(rest, decimal, skip, sign, coef, exp) do
number_frac(rest, decimal, skip, sign, coef, exp)
end
@compile {:inline, number_frac: 6}
defp number_frac(<<?., rest::bits>>, decimal, skip, sign, coef, exp) do
number_frac_continue(rest, decimal, skip + 1, sign, coef, exp)
end
defp number_frac(rest, decimal, skip, sign, coef, exp) do
number_exp(rest, decimal, skip, sign, coef, exp)
end
@compile {:inline, number_frac_continue: 6}
for char <- digits do
defp number_frac_continue(<<unquote(char), rest::bits>>, decimal, skip, sign, coef, exp) do
number_frac_continue(rest, decimal, skip + 1, sign, coef * 10 + unquote(char - ?0), exp - 1)
end
end
defp number_frac_continue(_rest, _decimal, skip, _sign, _coef, 0) do
syntax_error(skip)
end
defp number_frac_continue(rest, decimal, skip, sign, coef, exp) do
number_exp(rest, decimal, skip, sign, coef, exp)
end
@compile {:inline, number_exp: 6}
for e <- 'eE' do
defp number_exp(<<unquote(e), rest::bits>>, decimal, skip, sign, coef, exp) do
[value | skip] = number_exp_continue(rest, skip + 1)
number_complete(decimal, skip, sign, coef, exp + value)
end
end
defp number_exp(_rest, decimal, skip, sign, coef, exp) do
number_complete(decimal, skip, sign, coef, exp)
end
@compile {:inline, number_exp_continue: 2}
defp number_exp_continue(<<?-, rest::bits>>, skip) do
[exp | skip] = number_exp_digits(rest, skip + 1)
[-exp | skip]
end
defp number_exp_continue(<<?+, rest::bits>>, skip) do
number_exp_digits(rest, skip + 1)
end
defp number_exp_continue(rest, skip) do
number_exp_digits(rest, skip)
end
@compile {:inline, number_exp_digits: 2}
defp number_exp_digits(<<rest::bits>>, skip) do
case number_digits(rest, skip, 0) do
[_exp | ^skip] ->
syntax_error(skip)
other ->
other
end
end
defp number_exp_digits(<<>>, skip), do: syntax_error(skip)
@compile {:inline, number_digits: 3}
for char <- digits do
defp number_digits(<<unquote(char), rest::bits>>, skip, acc) do
number_digits(rest, skip + 1, acc * 10 + unquote(char - ?0))
end
end
defp number_digits(_rest, skip, acc) do
[acc | skip]
end
@compile {:inline, number_complete: 5}
if Code.ensure_loaded?(Decimal) do
defp number_complete(true, skip, sign, coef, exp) do
[%Decimal{sign: sign, coef: coef, exp: exp} | skip]
end
else
defp number_complete(true, _skip, _sign, _coef, _exp) do
raise Poison.MissingDependencyError, name: "Decimal"
end
end
defp number_complete(_decimal, skip, sign, coef, 0) do
[coef * sign | skip]
end
max_sig = 1 <<< 53
# See: https://arxiv.org/pdf/2101.11408.pdf
defp number_complete(_decimal, skip, sign, coef, exp)
when exp in -10..10 and coef <= unquote(max_sig) do
if exp < 0 do
[coef / pow10(-exp) * sign | skip]
else
[coef * pow10(exp) * sign | skip]
end
end
defp number_complete(_decimal, skip, sign, coef, exp) do
[
String.to_float(
<<Integer.to_string(coef * sign)::bits, ".0e"::bits, Integer.to_string(exp)::bits>>
)
| skip
]
rescue
ArithmeticError ->
reraise ParseError, [skip: skip, value: "#{coef * sign}e#{exp}"], __STACKTRACE__
end
@compile {:inline, pow10: 1}
for n <- 1..10 do
defp pow10(unquote(n)), do: unquote(:math.pow(10, n))
end
defp pow10(n), do: 1.0e10 * pow10(n - 10)
## Strings
defmacrop string_codepoint_size(codepoint) do
quote bind_quoted: [codepoint: codepoint] do
cond do
codepoint <= 0x7FF -> 2
codepoint <= 0xFFFF -> 3
true -> 4
end
end
end
@compile {:inline, string_continue: 3}
defp string_continue(<<?", _rest::bits>>, _data, skip) do
["" | skip + 1]
end
defp string_continue(rest, data, skip) do
string_continue(rest, data, skip, false, 0, [])
end
@compile {:inline, string_continue: 6}
defp string_continue(<<?", _rest::bits>>, data, skip, unicode, len, acc) do
cond do
acc == [] ->
if len > 0 do
[binary_part(data, skip, len) | skip + len + 1]
else
["" | skip + 1]
end
unicode ->
[
:unicode.characters_to_binary([acc | binary_part(data, skip, len)], :utf8)
| skip + len + 1
]
true ->
[IO.iodata_to_binary([acc | binary_part(data, skip, len)]) | skip + len + 1]
end
end
defp string_continue(<<?\\, rest::bits>>, data, skip, unicode, len, acc) do
string_escape(rest, data, skip + len + 1, unicode, [acc | binary_part(data, skip, len)])
end
defp string_continue(<<char, rest::bits>>, data, skip, unicode, len, acc) when char >= 0x20 do
string_continue(rest, data, skip, unicode, len + 1, acc)
end
defp string_continue(<<codepoint::utf8, rest::bits>>, data, skip, _unicode, len, acc)
when codepoint > 0x80 do
string_continue(rest, data, skip, true, len + string_codepoint_size(codepoint), acc)
end
defp string_continue(_other, _data, skip, _unicode, len, _acc) do
syntax_error(skip + len)
end
@compile {:inline, string_escape: 5}
for {seq, char} <- Enum.zip(~C("\ntr/fb), ~c("\\\n\t\r/\f\b)) do
defp string_escape(<<unquote(seq), rest::bits>>, data, skip, unicode, acc) do
string_continue(rest, data, skip + 1, unicode, 0, [acc | unquote(<<char>>)])
end
end
defp string_escape(
<<?u, seq1::binary-size(4), rest::bits>>,
data,
skip,
_unicode,
acc
) do
string_escape_unicode(rest, data, skip, acc, seq1)
end
defp string_escape(_rest, _data, skip, _unicode, _acc), do: syntax_error(skip)
# http://www.ietf.org/rfc/rfc2781.txt
# http://perldoc.perl.org/Encode/Unicode.html#Surrogate-Pairs
# http://mathiasbynens.be/notes/javascript-encoding#surrogate-pairs
defguardp is_surrogate(cp) when cp in 0xD800..0xDFFF
defguardp is_surrogate_pair(hi, lo) when hi in 0xD800..0xDBFF and lo in 0xDC00..0xDFFF
defmacrop get_codepoint(seq, skip) do
quote bind_quoted: [seq: seq, skip: skip] do
try do
String.to_integer(seq, 16)
rescue
ArgumentError ->
reraise ParseError, [skip: skip, value: "\\u#{seq}"], __STACKTRACE__
end
end
end
@compile {:inline, string_escape_unicode: 5}
defp string_escape_unicode(<<"\\u", seq2::binary-size(4), rest::bits>>, data, skip, acc, seq1) do
hi = get_codepoint(seq1, skip)
lo = get_codepoint(seq2, skip + 6)
cond do
is_surrogate_pair(hi, lo) ->
codepoint = 0x10000 + ((hi &&& 0x03FF) <<< 10) + (lo &&& 0x03FF)
string_continue(rest, data, skip + 11, true, 0, [acc, codepoint])
is_surrogate(hi) ->
raise ParseError, skip: skip, value: "\\u#{seq1}\\u#{seq2}"
is_surrogate(lo) ->
raise ParseError, skip: skip + 6, value: "\\u#{seq2}"
true ->
string_continue(rest, data, skip + 11, true, 0, [acc, hi, lo])
end
end
defp string_escape_unicode(rest, data, skip, acc, seq1) do
string_continue(rest, data, skip + 5, true, 0, [acc, get_codepoint(seq1, skip)])
end
## Whitespace
@compile {:inline, skip_whitespace: 3}
defp skip_whitespace(<<>>, _skip, value) do
value
end
for char <- whitespace do
defp skip_whitespace(<<unquote(char), rest::bits>>, skip, value) do
skip_whitespace(rest, skip + 1, value)
end
end
defp skip_whitespace(_rest, skip, _value) do
syntax_error(skip)
end
end
|
lib/poison/parser.ex
| 0.656988
| 0.504211
|
parser.ex
|
starcoder
|
defmodule ZenMonitor.Truncator do
@moduledoc """
ZenMonitor.Truncator is used to truncate error messages to prevent error expansion issues.
## Error Expansion
At the core of ZenMonitor is a system that collects local `:DOWN` messages, batches them up and
relays them in bulk. This opens up a failure mode where each `:DOWN` message individually is
deliverable, but the bulk summary grows to an unsupportable size due to the aggregation of large
reason payloads.
If no truncation is performed then the payload can cause instability on the sender or the
receiver side.
## Truncation Behavior
ZenMonitor will truncate error reasons if they exceed a certain size to prevent Error Expansion
from breaking either the sender or the receiver.
Truncation is performed recursively on the term up to a maximum depth which can be provided to
the `ZenMonitor.Truncator.truncate/2` function.
See below for an explanation of how the Truncator treats different values
### Pass-Through Values
There are a number of types that the Truncator will pass through unmodified.
- Atoms
- Pids
- Numbers
- References
- Ports
- Binaries less than `@max_binary_size` (see the Binary section below for more information)
### Binaries
There is a configurable value `@max_binary_size` any binary encountered over this size will be
truncated to `@max_binary_size - 3` and a trailing '...' will be appended to indicate the value
has been truncated. This guarantees that no binary will appear in the term with size greater
than `@max_binary_size`
### Tuples
0-tuples through 4-tuples will be passed through with their interior terms recursively
truncated. If a tuple has more than 4 elements, it will be replaced with the `:truncated` atom.
### Lists
Lists with 0 to 4 elements will be passed through with each element recursively truncated. If a
list has more than 4 elements, it will be replaced with the `:truncated` atom.
### Maps
Maps with a `map_size/1` less than 5 will be passed through with each value recursively
truncated. If a map has a size of 5 or greater then it will be replaced with the `:truncated`
atom.
### Structs
Structs are converted into maps and then the map rules are applied, they are then converted back
into structs. The effect is that a Struct with 4 fields or fewer will be retained (with all
values recursively truncated) while Structs with 5 or more fields will be replaced with the
`:truncated` atom.
### Recursion Limit
The Truncator will only descend up to the `depth` argument passed into
`ZenMonitor.Truncator.truncate/2`, regardless of the value, if the recursion descends deeper
than this value then the `:truncated` atom will be used in place of the original value.
## Configuration
`ZenMonitor.Truncator` exposes two different configuration options, and allows for one call-site
override. The configuration options are evaluated at compile time, changing these values at
run-time (through a facility like `Application.put_env/3`) will have no effect.
Both configuration options reside under the `:zen_monitor` app key.
`:max_binary_size` is size in bytes over which the Truncator will truncate the binary. The
largest binary returned by the Truncator is defined to be the max_binary_size + 3, this is
because when the truncator Truncator a binary it will append `...` to indicate that truncation
has occurred.
`:truncation_depth` is the default depth that the Truncator will recursively descend into the
term to be truncated. This is the value used for `ZenMonitor.Truncator.truncate/2` if no second
argument is provided, providing a call-site second argument will override this configuration.
"""
@max_binary_size Application.get_env(:zen_monitor, :max_binary_size, 1024)
@truncation_binary_size @max_binary_size - 3
@truncation_depth Application.get_env(:zen_monitor, :truncation_depth, 3)
@doc """
Truncates a term to a given depth
See the module documentation for more information about how truncation works.
"""
@spec truncate(term, depth :: pos_integer()) :: term
def truncate(term, depth \\ @truncation_depth) do
do_truncate(term, 0, depth)
end
## Private
defp do_truncate({:shutdown, _} = shutdown, 0, _) do
shutdown
end
defp do_truncate(_, current, max_depth) when current >= max_depth do
:truncated
end
defp do_truncate(atom, _, _) when is_atom(atom), do: atom
defp do_truncate(pid, _, _) when is_pid(pid), do: pid
defp do_truncate(number, _, _) when is_number(number), do: number
defp do_truncate(bin, _, _) when is_binary(bin) and byte_size(bin) <= @max_binary_size, do: bin
defp do_truncate(<<first_chunk::binary-size(@truncation_binary_size), _rest::bits>>, _, _) do
first_chunk <> "..."
end
defp do_truncate(ref, _, _) when is_reference(ref), do: ref
defp do_truncate(port, _, _) when is_port(port), do: port
# Tuples
defp do_truncate({a, b, c, d}, current, max_depth) do
next = current + 1
{do_truncate(a, next, max_depth), do_truncate(b, next, max_depth),
do_truncate(c, next, max_depth), do_truncate(d, next, max_depth)}
end
defp do_truncate({a, b, c}, current, max_depth) do
next = current + 1
{do_truncate(a, next, max_depth), do_truncate(b, next, max_depth),
do_truncate(c, next, max_depth)}
end
defp do_truncate({a, b}, current, max_depth) do
next = current + 1
{do_truncate(a, next, max_depth), do_truncate(b, next, max_depth)}
end
defp do_truncate({a}, current, max_depth) do
next = current + 1
{do_truncate(a, next, max_depth)}
end
defp do_truncate({} = tuple, _, _) do
tuple
end
# Lists
defp do_truncate([_, _, _, _] = l, current, max_depth) do
do_truncate_list(l, current, max_depth)
end
defp do_truncate([_, _, _] = l, current, max_depth) do
do_truncate_list(l, current, max_depth)
end
defp do_truncate([_, _] = l, current, max_depth) do
do_truncate_list(l, current, max_depth)
end
defp do_truncate([_] = l, current, max_depth) do
do_truncate_list(l, current, max_depth)
end
defp do_truncate([], _, _) do
[]
end
# Maps / Structs
defp do_truncate(%struct_module{} = struct, current, max_depth) do
truncated_value =
struct
|> Map.from_struct()
|> do_truncate(current, max_depth)
if is_map(truncated_value) do
struct(struct_module, truncated_value)
else
truncated_value
end
end
defp do_truncate(%{} = m, current, max_depth) when map_size(m) < 5 do
for {k, v} <- m, into: %{} do
{k, do_truncate(v, current + 1, max_depth)}
end
end
# Catch all
defp do_truncate(_, _, _) do
:truncated
end
defp do_truncate_list(l, current, max_depth) do
Enum.map(l, &do_truncate(&1, current + 1, max_depth))
end
end
|
lib/zen_monitor/truncator.ex
| 0.907481
| 0.85561
|
truncator.ex
|
starcoder
|
defmodule Credo.Check.Readability.StrictModuleLayout do
use Credo.Check,
run_on_all: true,
base_priority: :low,
explanations: [
check: """
Provide module parts in a required order.
# preferred
defmodule MyMod do
@moduledoc "moduledoc"
use Foo
import Bar
alias Baz
require Qux
end
""",
params: [
order: """
List of atoms identifying the desired order of module parts.
Defaults to `~w/shortdoc moduledoc behaviour use import alias require/a`.
Following values can be provided:
- `:moduledoc` - `@moduledoc` module attribute
- `:shortdoc` - `@shortdoc` module attribute
- `:behaviour` - `@behaviour` module attribute
- `:use` - `use` expression
- `:import` - `import` expression
- `:alias` - `alias` expression
- `:require` - `require` expression
- `:defstruct` - `defstruct` expression
- `:opaque` - `@opaque` module attribute
- `:type` - `@type` module attribute
- `:typep` - `@typep` module attribute
- `:callback` - `@callback` module attribute
- `:macrocallback` - `@macrocallback` module attribute
- `:optional_callbacks` - `@optional_callbacks` module attribute
- `:module_attribute` - other module attribute
- `:public_fun` - public function
- `:private_fun` - private function or a public function marked with `@doc false`
- `:callback_fun` - public function marked with `@impl`
- `:public_macro` - public macro
- `:private_macro` - private macro or a public macro marked with `@doc false`
- `:public_guard` - public guard
- `:private_guard` - private guard or a public guard marked with `@doc false`
- `:module` - inner module definition (`defmodule` expression inside a module)
Notice that the desired order always starts from the top. For example, if you provide
the order `~w/public_fun private_fun/a`, it means that everything else (e.g. `@moduledoc`)
must appear after function definitions.
"""
]
]
alias Credo.Code
@doc false
def run(source_file, params \\ []) do
source_file
|> Code.ast()
|> Credo.Code.Module.analyze()
|> all_errors(expected_order(params), IssueMeta.for(source_file, params))
|> Enum.sort_by(&{&1.line_no, &1.column})
end
defp expected_order(params) do
params
|> Keyword.get(:order, ~w/shortdoc moduledoc behaviour use import alias require/a)
|> Enum.with_index()
|> Map.new()
end
defp all_errors(modules_and_parts, expected_order, issue_meta) do
Enum.reduce(
modules_and_parts,
[],
fn {module, parts}, errors ->
module_errors(module, parts, expected_order, issue_meta) ++ errors
end
)
end
defp module_errors(module, parts, expected_order, issue_meta) do
Enum.reduce(
parts,
%{module: module, current_part: nil, errors: []},
&check_part_location(&2, &1, expected_order, issue_meta)
).errors
end
defp check_part_location(state, {part, file_pos}, expected_order, issue_meta) do
state
|> validate_order(part, file_pos, expected_order, issue_meta)
|> Map.put(:current_part, part)
end
defp validate_order(state, part, file_pos, expected_order, issue_meta) do
if is_nil(state.current_part) or
order(state.current_part, expected_order) <= order(part, expected_order),
do: state,
else: add_error(state, part, file_pos, issue_meta)
end
defp order(part, expected_order), do: Map.get(expected_order, part, map_size(expected_order))
defp add_error(state, part, file_pos, issue_meta) do
update_in(
state.errors,
&[error(issue_meta, part, state.current_part, state.module, file_pos) | &1]
)
end
defp error(issue_meta, part, current_part, module, file_pos) do
format_issue(
issue_meta,
message: "#{part_to_string(part)} must appear before #{part_to_string(current_part)}",
trigger: inspect(module),
line_no: Keyword.get(file_pos, :line),
column: Keyword.get(file_pos, :column)
)
end
defp part_to_string(:module_attribute), do: "module attribute"
defp part_to_string(:public_guard), do: "public guard"
defp part_to_string(:public_macro), do: "public macro"
defp part_to_string(:public_fun), do: "public function"
defp part_to_string(:private_fun), do: "private function"
defp part_to_string(:callback_fun), do: "callback implementation"
defp part_to_string(part), do: "#{part}"
end
|
lib/credo/check/readability/strict_module_layout.ex
| 0.852706
| 0.490053
|
strict_module_layout.ex
|
starcoder
|
defmodule Unicode.GeneralCategory.Derived do
@moduledoc """
For certain operations and transformations
(especially in [Unicode Sets](http://unicode.org/reports/tr35/#Unicode_Sets))
there is an expectation that certain derived
general categories exists even though they are not
defined in the unicode character database.
These categories are:
* `:any` which is the full unicode character
range `0x0..0x10ffff`
* `:assigned` which is the set of codepoints
that are assigned and is therefore
equivalent to `[:any]-[:Cn]`. In fact that is
exactly how it is calculated using [unicode_set](https://hex.pm/packages/unicode_set)
and the results are copied here so
that there is no mutual dependency.
* `:ascii` which is the range for the US ASCII
character set of `0x0..0x7f`
In addition there are derived categories
not part of the Unicode specification that
support additional use cases. These include:
* Categories related to
recognising quotation marks. See the
module `Unicode.Category.QuoteMarks`.
* `:printable` which implements the same
semantics as `String.printable?/1`. This is
a very broad definition of printable characters.
* `:graph` which includes characters from the
`[^\\p{space}\\p{gc=Control}\\p{gc=Surrogate}\\p{gc=Unassigned}]`
set defined by [Unicode Regular Expressions](http://unicode.org/reports/tr18/).
"""
alias Unicode.Category.QuoteMarks
alias Unicode.Utils
@ascii_category [{0x0, 0x7F}]
@derived_categories %{
Ascii: @ascii_category,
Any: Unicode.all(),
Assigned: Unicode.DerivedCategory.Assigned.assigned(),
QuoteMark: QuoteMarks.all_quote_marks() |> Utils.list_to_ranges(),
QuoteMarkLeft: QuoteMarks.quote_marks_left() |> Utils.list_to_ranges(),
QuoteMarkRight: QuoteMarks.quote_marks_right() |> Utils.list_to_ranges(),
QuoteMarkAmbidextrous: QuoteMarks.quote_marks_ambidextrous() |> Utils.list_to_ranges(),
QuoteMarkSingle: QuoteMarks.quote_marks_single() |> Utils.list_to_ranges(),
QuoteMarkDouble: QuoteMarks.quote_marks_double() |> Utils.list_to_ranges(),
Printable: Unicode.DerivedCategory.Printable.printable(),
Graph: Unicode.DerivedCategory.Graph.graph(),
Visible: Unicode.DerivedCategory.Graph.graph()
}
@derived_aliases %{
"any" => :Any,
"assigned" => :Assigned,
"ascii" => :Ascii,
"quotemark" => :QuoteMark,
"quotemarkleft" => :QuoteMarkLeft,
"quotemarkright" => :QuoteMarkRight,
"quotemarkambidextrous" => :QuoteMarkAmbidextrous,
"quotemarksingle" => :QuoteMarkSingle,
"quotemarkdouble" => :QuoteMarkDouble,
"printable" => :Printable,
"visible" => :Graph,
"graph" => :Graph
}
@doc """
Returns a map of the derived
General Categories
"""
@spec categories :: map()
def categories do
@derived_categories
end
@doc """
Returns a map of the aliases
for the derived General Categories
"""
@spec aliases :: map()
def aliases do
@derived_aliases
end
end
|
lib/unicode/category/derived_category.ex
| 0.880823
| 0.645588
|
derived_category.ex
|
starcoder
|
defmodule Maptu do
@moduledoc """
Provides functions to convert from "dumped" maps to Elixir structs.
This module provides functions to safely convert maps (with string keys) that
represent structs (usually decoded from some kind of protocol, like
MessagePack or JSON) to Elixir structs.
## Rationale
Many Elixir libraries need to encode and decode maps as well as
structs. Encoding is often straightforward (think of libraries like
[poison][gh-poison] or [msgpax][gh-msgpax]): the map is encoded by converting
keys to strings and encoding values recursively. This works natively with
structs as well, since structs are just maps with an additional `:__struct__`
key.
The problem arises when such structs have to be decoded back to Elixir terms:
decoding will often result in a map with string keys (as the information about
keys being atoms were lost in the encoding), including a `"__struct__"` key.
Trying to blindly convert all these string keys to atoms and building a struct
by reading the `:__struct__` key is dangerous: converting dynamic input to
atoms is one of the most frequent culprit of memory leaks in Erlang
applications. This is where `Maptu` becomes useful: it provides functions that
safely convert from this kind of maps to Elixir structs.
## Use case
Let's pretend we're writing a JSON encoder/decoder for Elixir. We have
generic encoder/decoder that work for all kinds of maps:
def encode(map) when is_map(map) do
# some binary is built here
end
def decode(bin) when is_binary(bin) do
# decoding happens here
end
When we encode and then decode a struct, something like this is likely to happen:
%URI{port: 8080} |> encode() |> decode()
#=> %{"__struct__" => "Elixir.URI", "port" => 8080}
To properly decode the struct back to a `%URI{}` struct, we would have to
check the value of `"__struct__"` (check that it's an existing atom and then
an existing module), then check each key-value pair in the map to see if it's
a field of the `URI` struct and so on. `Maptu` does exactly this!
%URI{port: 8080} |> encode() |> decode() |> Maptu.struct!()
#=> %URI{port: 8080}
This is just one use case `Maptu` is good at; read the documentation for the
provided functions for more information on the capabilities of this library.
[gh-poison]: https://github.com/devinus/poison
[gh-msgpax]: https://github.com/lexmag/msgpax
"""
import Kernel, except: [struct: 1, struct: 2]
# TODO: maybe revisit naming of :non_* error reasons in v2.0 so that we drop the underscore.
@type non_strict_error_reason ::
:missing_struct_key
| {:bad_module_name, binary}
| {:non_existing_module, binary}
| {:non_struct, module}
@type strict_error_reason ::
non_strict_error_reason
| {:non_existing_atom, binary}
| {:unknown_struct_field, module, atom}
# We use a macro for this so we keep a nice stacktrace.
defmacrop raise_on_error(code) do
quote do
case unquote(code) do
{:ok, result} -> result
{:error, reason} -> raise ArgumentError, format_error(reason)
end
end
end
@doc """
Converts a map to a struct, silently ignoring erroneous keys.
`map` is a map with binary keys that represents a "dumped" struct; it must
contain a `"__struct__"` key with a binary value that can be converted to a
valid module name. If the value of `"__struct__"` is not a module name or it's
a module that isn't a struct, then an error is returned.
Keys in `map` that are not fields of the resulting struct are simply
discarded.
This function returns `{:ok, struct}` if the conversion is successful,
`{:error, reason}` otherwise.
## Examples
iex> Maptu.struct(%{"__struct__" => "Elixir.URI", "port" => 8080, "foo" => 1})
{:ok, %URI{port: 8080}}
iex> Maptu.struct(%{"__struct__" => "Elixir.GenServer"})
{:error, {:non_struct, GenServer}}
"""
@spec struct(map) :: {:ok, struct} | {:error, non_strict_error_reason}
def struct(map) do
with {:ok, {mod_name, fields}} <- extract_mod_name_and_fields(map),
{:ok, mod} <- module_to_atom(mod_name),
do: struct(mod, fields)
end
@doc """
Converts a map to a struct, failing on erroneous keys.
This function behaves like `Maptu.struct/1`, except that it returns an error
if one of the fields in `map` isn't a field of the resulting struct.
This function returns `{:ok, struct}` if the conversion is successful,
`{:error, reason}` otherwise.
## Examples
iex> Maptu.strict_struct(%{"__struct__" => "Elixir.URI", "port" => 8080})
{:ok, %URI{port: 8080}}
iex> Maptu.strict_struct(%{"__struct__" => "Elixir.URI", "pid" => 1})
{:error, {:unknown_struct_field, URI, :pid}}
"""
@spec strict_struct(map) :: {:ok, struct} | {:error, strict_error_reason}
def strict_struct(map) do
with {:ok, {mod_name, fields}} <- extract_mod_name_and_fields(map),
{:ok, mod} <- module_to_atom(mod_name),
do: strict_struct(mod, fields)
end
@doc """
Behaves like `Maptu.struct/1` but raises in case of error.
This function behaves like `Maptu.struct/1`, but it returns `struct` (instead
of `{:ok, struct}`) if the conversion is valid, and raises an `ArgumentError`
exception if it's not valid.
## Examples
iex> Maptu.struct!(%{"__struct__" => "Elixir.URI", "port" => 8080})
%URI{port: 8080}
iex> Maptu.struct!(%{"__struct__" => "Elixir.GenServer"})
** (ArgumentError) module is not a struct: GenServer
"""
@spec struct!(map) :: struct | no_return
def struct!(map) do
map |> struct() |> raise_on_error()
end
@doc """
Behaves like `Maptu.strict_struct/1` but raises in case of error.
This function behaves like `Maptu.strict_struct/1`, but it returns `struct`
(instead of `{:ok, struct}`) if the conversion is valid, and raises an
`ArgumentError` exception if it's not valid.
## Examples
iex> Maptu.strict_struct!(%{"__struct__" => "Elixir.URI", "port" => 8080})
%URI{port: 8080}
iex> Maptu.strict_struct!(%{"__struct__" => "Elixir.URI", "pid" => 1})
** (ArgumentError) unknown field :pid for struct URI
"""
@spec strict_struct!(map) :: struct | no_return
def strict_struct!(map) do
map |> strict_struct() |> raise_on_error()
end
@doc """
Builds the `mod` struct with the given `fields`, silently ignoring erroneous
keys.
This function takes a struct `mod` (`mod` should be a module that defines a
struct) and a map of fields with binary keys. It builds the `mod` struct by
safely parsing the fields in `fields`.
If a key in `fields` doesn't map to a field in the resulting struct, it's
ignored.
This function returns `{:ok, struct}` if the building is successful,
`{:error, reason}` otherwise.
## Examples
iex> Maptu.struct(URI, %{"port" => 8080, "nonexisting_field" => 1})
{:ok, %URI{port: 8080}}
iex> Maptu.struct(GenServer, %{})
{:error, {:non_struct, GenServer}}
"""
@spec struct(module, map) :: {:ok, struct} | {:error, non_strict_error_reason}
def struct(mod, fields) when is_atom(mod) and is_map(fields) do
with :ok <- ensure_struct(mod), do: fill_struct(mod, fields)
end
@doc """
Builds the `mod` struct with the given `fields`, failing on erroneous keys.
This function behaves like `Maptu.strict_struct/2`, except it returns an error
when keys in `fields` don't map to fields in the resulting struct.
This function returns `{:ok, struct}` if the building is successful,
`{:error, reason}` otherwise.
## Examples
iex> Maptu.strict_struct(URI, %{"port" => 8080})
{:ok, %URI{port: 8080}}
iex> Maptu.strict_struct(URI, %{"pid" => 1})
{:error, {:unknown_struct_field, URI, :pid}}
"""
@spec strict_struct(module, map) :: {:ok, struct} | {:error, strict_error_reason}
def strict_struct(mod, fields) when is_atom(mod) and is_map(fields) do
with :ok <- ensure_struct(mod), do: strict_fill_struct(mod, fields)
end
@doc """
Behaves like `Maptu.struct/2` but raises in case of error.
This function behaves like `Maptu.struct/2`, but it returns `struct` (instead
of `{:ok, struct}`) if the conversion is valid, and raises an `ArgumentError`
exception if it's not valid.
## Examples
iex> Maptu.struct!(URI, %{"port" => 8080})
%URI{port: 8080}
iex> Maptu.struct!(GenServer, %{})
** (ArgumentError) module is not a struct: GenServer
"""
@spec struct!(module, map) :: struct | no_return
def struct!(mod, fields) do
struct(mod, fields) |> raise_on_error()
end
@doc """
Behaves like `Maptu.strict_struct/2` but raises in case of error.
This function behaves like `Maptu.strict_struct/2`, but it returns `struct`
(instead of `{:ok, struct}`) if the conversion is valid, and raises an
`ArgumentError` exception if it's not valid.
## Examples
iex> Maptu.strict_struct!(URI, %{"port" => 8080})
%URI{port: 8080}
iex> Maptu.strict_struct!(URI, %{"pid" => 1})
** (ArgumentError) unknown field :pid for struct URI
"""
@spec strict_struct!(module, map) :: struct | no_return
def strict_struct!(mod, fields) do
strict_struct(mod, fields) |> raise_on_error()
end
defp extract_mod_name_and_fields(%{"__struct__" => "Elixir." <> _} = map),
do: {:ok, Map.pop(map, "__struct__")}
defp extract_mod_name_and_fields(%{"__struct__" => name}),
do: {:error, {:bad_module_name, name}}
defp extract_mod_name_and_fields(%{}),
do: {:error, :missing_struct_key}
defp module_to_atom("Elixir." <> name = mod_name) do
case to_existing_atom_safe(mod_name) do
{:ok, mod} -> {:ok, mod}
:error -> {:error, {:non_existing_module, name}}
end
end
defp ensure_struct(mod) when is_atom(mod) do
if function_exported?(mod, :__struct__, 0) do
:ok
else
{:error, {:non_struct, mod}}
end
end
defp fill_struct(mod, fields) do
result = Enum.reduce fields, mod.__struct__(), fn({field, value}, acc) ->
case to_existing_atom_safe(field) do
{:ok, field} ->
if Map.has_key?(acc, field), do: Map.put(acc, field, value), else: acc
:error ->
acc
end
end
{:ok, result}
end
defp strict_fill_struct(mod, fields) do
try do
result = Enum.reduce fields, mod.__struct__(), fn({field, value}, acc) ->
case to_existing_atom_safe(field) do
{:ok, field} ->
if Map.has_key?(acc, field) do
Map.put(acc, field, value)
else
throw({:unknown_struct_field, mod, field})
end
:error ->
throw({:non_existing_atom, field})
end
end
{:ok, result}
catch
:throw, reason ->
{:error, reason}
end
end
defp to_existing_atom_safe(bin) when is_binary(bin) do
try do
String.to_existing_atom(bin)
rescue
ArgumentError -> :error
else
atom -> {:ok, atom}
end
end
defp format_error(:missing_struct_key),
do: "the given map doesn't contain a \"__struct__\" key"
defp format_error({:bad_module_name, name}) when is_binary(name),
do: "not an elixir module: #{inspect name}"
defp format_error({:non_struct, mod}) when is_atom(mod),
do: "module is not a struct: #{inspect mod}"
defp format_error({:non_existing_atom, bin}) when is_binary(bin),
do: "atom doesn't exist: #{inspect bin}"
defp format_error({:unknown_struct_field, struct, field})
when is_atom(struct) and is_atom(field),
do: "unknown field #{inspect field} for struct #{inspect struct}"
end
|
lib/maptu.ex
| 0.856542
| 0.677367
|
maptu.ex
|
starcoder
|
defmodule PixelFont.DSL.MacroHelper do
@moduledoc false
@doc false
@spec block_direct_invocation!(Macro.Env.t()) :: no_return()
def block_direct_invocation!(env) do
raise CompileError,
file: env.file,
line: env.line,
description: "this macro cannot be called directly"
end
@typep exprs :: [Macro.t()]
@doc false
@spec get_exprs(Macro.t(), keyword()) :: {exprs(), Macro.t()}
def get_exprs(do_block, options \\ [])
def get_exprs({:__block__, _, exprs}, options), do: do_get_exprs(exprs, options)
def get_exprs(expr, options), do: do_get_exprs([expr], options)
@spec do_get_exprs(exprs(), keyword(), {exprs(), exprs()}) :: {exprs(), Macro.t()}
defp do_get_exprs(exprs, options, acc \\ {[], []})
defp do_get_exprs(exprs, [], {[], []}), do: {exprs, {:__block__, [], [nil]}}
defp do_get_exprs([], _, {exprs, []}), do: {Enum.reverse(exprs), {:__block__, [], [nil]}}
defp do_get_exprs([], _, {exprs, other}) do
{Enum.reverse(exprs), {:__block__, [], Enum.reverse(other)}}
end
defp do_get_exprs([expr | exprs], options, {filtered, other}) do
expected = Keyword.fetch!(options, :expected)
warn = options[:warn] || false
new_acc =
case expr do
{fun_name, _, args} when is_list(args) ->
if fun_name in expected do
{[expr | filtered], other}
else
warn_unexpected_expr(expr, warn)
{filtered, [expr | other]}
end
_ ->
warn_unexpected_expr(expr, warn)
{filtered, [expr | other]}
end
do_get_exprs(exprs, options, new_acc)
end
@spec warn_unexpected_expr(Macro.t(), boolean()) :: :ok
defp warn_unexpected_expr(expr, warn)
defp warn_unexpected_expr(_, false), do: :ok
defp warn_unexpected_expr(expr, true) do
[
"unexpected expression in block: \n",
:bright,
:cyan,
Macro.to_string(expr),
:reset,
"\nthis expression will be ignored"
]
|> IO.ANSI.format()
|> IO.warn()
end
@doc false
@spec replace_call(Macro.t(), atom(), arity(), atom()) :: Macro.t()
def replace_call(ast, from_name, arity, to_name) do
Macro.prewalk(ast, fn
{^from_name, meta, args} when length(args) === arity -> {to_name, meta, args}
expr -> expr
end)
end
@doc false
@spec handle_module(Macro.t(), Macro.Env.t()) :: {Macro.t(), [Macro.t()]}
def handle_module(exprs, env) do
{module_exprs, other_exprs} =
Enum.reduce(exprs, {[], []}, fn
{:module, _, [[do: module_do]]}, {modules, others} ->
{[module_do | modules], others}
expr, {modules, others} ->
{modules, [expr | others]}
end)
module_block = {:__block__, [], handle_module_exprs(module_exprs, env)}
{module_block, Enum.reverse(other_exprs)}
end
@spec handle_module_exprs([Macro.t()], Macro.Env.t()) :: Macro.t()
defp handle_module_exprs(module_exprs, env) do
module_exprs
|> Enum.reverse()
|> Enum.map(&(&1 |> get_exprs() |> elem(0)))
|> List.flatten()
|> Macro.prewalk(fn
{fun, meta, [{:lookups, _, _} | _]} when fun in ~w(def defp)a ->
raise CompileError,
file: env.file,
line: meta[:line],
description: "the function name `lookups` is reserved"
expr ->
expr
end)
end
end
|
lib/pixel_font/dsl/macro_helper.ex
| 0.71113
| 0.545528
|
macro_helper.ex
|
starcoder
|
defmodule HELF.Mailer do
@moduledoc """
Provides a way for sending emails with a list of Bamboo mailers.
It will try to send the email using the first available mailer, and then
fallback to the next whenever the current one fails.
Before using the module, you should configure the list of mailers, this
is what it should looks like:
config :helf, HELF.Mailer,
mailers: [HELM.Mailer.MailGun, HELM.Mailer.Maldrill],
default_sender: "<EMAIL>"
The default sender is completely optional.
"""
@type params :: [
{:from, String.t}
| {:to, String.t}
| {:subject, String.t}
| {:text, String.t}
| {:html, String.t}
]
@opaque email :: Bamboo.Email.t
@config Application.get_env(:helf, __MODULE__, [])
defmodule SentEmail do
@moduledoc """
Holds information about an already sent email.
"""
@type t :: %__MODULE__{}
@enforce_keys [:email, :mailer]
defstruct [:email, :mailer]
end
defmodule AsyncEmail do
@moduledoc """
Holds information about an email being sent.
Use with `HELF.Mailer.await` and `HELF.Mailer.yield`.
"""
@type t :: %__MODULE__{}
@enforce_keys [:notify?, :reference, :process]
defstruct [:notify?, :reference, :process]
end
@spec from(email, sender :: String.t) :: email
@doc """
Sets the email sender.
"""
defdelegate from(email, sender),
to: Bamboo.Email
@spec to(email, receiver :: String.t) :: email
@doc """
Sets the email recipient.
"""
defdelegate to(email, receiver),
to: Bamboo.Email
@spec subject(email, subject :: String.t) :: email
@doc """
Sets the email subject.
"""
defdelegate subject(email, subject),
to: Bamboo.Email
@spec text(email, text :: String.t) :: email
@doc """
Sets the text body of the `email`.
"""
defdelegate text(email, text),
to: Bamboo.Email,
as: :text_body
@spec html(email, html :: String.t) :: email
@doc """
Sets the html body of the `email`.
"""
defdelegate html(email, html),
to: Bamboo.Email,
as: :html_body
@spec new() :: email
@doc """
Creates a new empty email, see new/1 for composing emails using the params.
"""
def new do
Bamboo.Email.new_email()
|> from(Keyword.get(@config, :default_sender))
end
@spec new(params) :: email
@doc """
Creates and composes a new email using the params.
"""
def new(parameters = [_|_]) do
new()
|> compose(parameters)
end
@spec send_async(
email,
params :: [{:notify, boolean} | {:mailers, [module, ...]}]) :: AsyncEmail.t
@doc """
Sends the `email` from another processs, optionally accepts `notify` and
`mailers` keywords.
To use `await` and `yield` methods, set the `notify` keyword to true.
"""
def send_async(email, params \\ []) do
me = self()
ref = make_ref()
default_mailers = Keyword.get(@config, :mailers)
notify? = Keyword.get(params, :notify, false)
mailers = Keyword.get(params, :mailers, default_mailers)
process = spawn fn ->
status = do_send(email, mailers)
if notify? do
case status do
{:ok, result} ->
Kernel.send(me, {:email, :success, ref, result})
:error ->
Kernel.send(me, {:email, :fail, ref, email})
end
end
end
%AsyncEmail{notify?: notify?, reference: ref, process: process}
end
@spec await(AsyncEmail.t, timeout :: non_neg_integer) ::
{:ok, SentEmail.t}
| {:error, email}
@doc """
Awaits until email is sent, will raise `RuntimeError` on timeout.
"""
def await(%AsyncEmail{notify?: true, reference: ref}, timeout \\ 5_000) do
case wait_message(ref, timeout) do
:timeout ->
raise RuntimeError
return ->
return
end
end
@spec yield(AsyncEmail.t, timeout :: non_neg_integer) ::
{:ok, SentEmail.t}
| {:error, email}
| nil
@doc """
Awaits until email is sent, yields `nil` on timeout.
"""
def yield(%AsyncEmail{notify?: true, reference: ref}, timeout \\ 5_000) do
case wait_message(ref, timeout) do
:timeout ->
nil
return ->
return
end
end
@spec send(email, params :: [{:mailers, [module, ...]}]) ::
{:ok, SentEmail.t}
| {:error, email}
| {:error, :internal_error}
@doc """
Sends the `email`, optionally accepts a `mailers` keyword.
"""
def send(email = %Bamboo.Email{}, params \\ []) do
request = send_async(email, [{:notify, true} | params])
pid = request.process
ref = Process.monitor(pid)
receive do
{:DOWN, ^ref, :process, ^pid, _} ->
case wait_message(request.reference, 0) do
:timeout -> {:error, :internal_error}
msg -> msg
end
after
5_000 ->
{:error, :internal_error}
end
end
@spec do_send(email :: Bamboo.Email.t, mailers :: [module, ...]) ::
{:ok, SentEmail.t}
| :error
# Tries to send the email using the first available mailer, then fallbacks
# to the next mailer on error.
defp do_send(email, mailers) do
Enum.reduce_while(mailers, :error, fn mailer, _ ->
try do
mailer.deliver_now(email)
{:halt, {:ok, %SentEmail{email: email, mailer: mailer}}}
rescue
Bamboo.NilRecipientsError -> {:halt, :error}
Bamboo.MailgunAdapter.ApiError -> {:cont, :error}
Bamboo.MandrillAdapter.ApiError -> {:cont, :error}
Bamboo.SendgridAdapter.ApiError -> {:cont, :error}
Bamboo.SentEmail.DeliveriesError -> {:cont, :error}
Bamboo.SentEmail.NoDeliveriesError -> {:cont, :error}
end
end)
end
@spec wait_message(reference, timeout :: non_neg_integer) ::
{:ok, SentEmail.t}
| {:error, email}
| :timeout
# Blocks until email is sent or timeout is reached.
defp wait_message(reference, timeout) do
receive do
{:email, :success, ^reference, email_sent} ->
{:ok, email_sent}
{:email, :fail, ^reference, email} ->
{:error, email}
after
timeout ->
:timeout
end
end
@spec compose(email, params) :: email
# Composes the email using keywords.
defp compose(email, [{:from, val}| t]) do
email
|> from(val)
|> compose(t)
end
defp compose(email, [{:to, val}| t]) do
email
|> to(val)
|> compose(t)
end
defp compose(email, [{:subject, val}| t]) do
email
|> subject(val)
|> compose(t)
end
defp compose(email, [{:text, val}| t]) do
email
|> text(val)
|> compose(t)
end
defp compose(email, [{:html, val}| t]) do
email
|> html(val)
|> compose(t)
end
defp compose(email, []) do
email
end
end
|
lib/helf/mailer.ex
| 0.665519
| 0.435601
|
mailer.ex
|
starcoder
|
defmodule Astro do
@moduledoc "Library to calculate sunrise/set and related times given a latitude and longitude"
# ported from https://github.com/mourner/suncalc
# sun calculations are based on http://aa.quae.nl/en/reken/zonpositie.html formulas
# date/time constants and conversions
@day_ms 1000 * 60 * 60 * 24
@j1970 2440588
@j2000 2451545
defp to_julian(date), do: (Timex.to_datetime(date) |> Timex.to_julian) - 0.9
defp from_julian(j) do
{:ok, dt} = DateTime.from_unix(round((j + 0.5 - @j1970) * @day_ms), :milliseconds)
%{dt | microsecond: {0,0}}
end
defp to_days(date), do: to_julian(date) - @j2000
# general calculations for position
@pi :math.pi
@rad :math.pi / 180
@e @rad * 23.4397 # obliquity of the Earth
defp right_ascension(l, b), do: :math.atan2(:math.sin(l) * :math.cos(@e) - :math.tan(b) * :math.sin(@e), :math.cos(l))
defp declination(l, b), do: :math.asin(:math.sin(b) * :math.cos(@e) + :math.cos(b) * :math.sin(@e) * :math.sin(l))
defp azimuth(h, phi, dec), do: :math.atan2(:math.sin(h), :math.cos(h) * :math.sin(phi) - :math.tan(dec) * :math.cos(phi))
defp altitude(h, phi, dec), do: :math.asin(:math.sin(phi) * :math.sin(dec) + :math.cos(phi) * :math.cos(dec) * :math.cos(h))
defp sidereal_time(d, lw), do: @rad * (280.16 + 360.9856235 * d) - lw
defp astro_refraction(h) when h <0, do: 0 #works for positive altitudes only
defp astro_refraction(h) do
# formula 16.4 of "Astronomical Algorithms" 2nd edition by <NAME> (<NAME>) 1998.
# 1.02 / tan(h + 10.26 / (h + 5.10)) h in degrees, result in arc minutes -> converted to rad:
0.0002967 / :math.tan(h + 0.00312536 / (h + 0.08901179))
end
# general sun calculations
defp solar_mean_anomaly(d), do: @rad * (357.5291 + 0.98560028 * d)
defp ecliptic_longitude(m) do
c = @rad * (1.9148 * :math.sin(m) + 0.02 * :math.sin(2 * m) + 0.0003 * :math.sin(3 * m)) # equation of center
p = @rad * 102.9372 # perihelion of the Earth
m + c + p + @pi
end
defp sun_coords(d) do
m = solar_mean_anomaly(d)
l = ecliptic_longitude(m)
%{
dec: declination(l, 0),
ra: right_ascension(l, 0)
}
end
# calculates sun position for a given date and latitude/longitude
def get_position(date, lat, lng) do
lw = @rad * -lng
phi = @rad * lat
d = to_days(date)
c = sun_coords(d)
h = sidereal_time(d, lw) - c.ra
%{
azimuth: azimuth(h, phi, c.dec),
altitude: altitude(h, phi, c.dec)
}
end
# sun times configuration (angle, morning name, evening name)
@times [
[-0.833, "sunrise", "sunset" ],
[ -0.3, "sunrise_end", "sunset_start" ],
[ -6, "dawn", "dusk" ],
[ -12, "nautical_dawn", "nautical_dusk"],
[ -18, "night_end", "night" ],
[ 6, "golden_hour_end", "golden_hour" ]
]
# calculations for sun times
@j0 0.0009
defp julian_cycle(d, lw), do: Float.round(d - @j0 - lw / (2 * @pi))
defp approx_transit(ht, lw, n), do: @j0 + (ht + lw) / (2 * @pi) + n
defp solar_transit_j(ds, m, l), do: @j2000 + ds + 0.0053 * :math.sin(m) - 0.0069 * :math.sin(2 * l)
defp hour_angle(h, phi, d), do: :math.acos((:math.sin(h) - :math.sin(phi) * :math.sin(d)) / (:math.cos(phi) * :math.cos(d)))
# returns set time for the given sun altitude
defp get_set_j(h, lw, phi, dec, n, m, l) do
w = hour_angle(h, phi, dec)
a = approx_transit(w, lw, n)
solar_transit_j(a, m, l)
end
# calculates sun times for a given date and latitude/longitude
def get_times(lat, lng), do: get_times(Timex.today, lat, lng)
def get_times(date, lat, lng) do
lw = @rad * -lng
phi = @rad * lat
d = to_days(date)
n = julian_cycle(d, lw)
ds = approx_transit(0, lw, n)
m = solar_mean_anomaly(ds)
l = ecliptic_longitude(m)
dec = declination(l, 0)
j_noon = solar_transit_j(ds, m, l)
result = %{
solar_noon: from_julian(j_noon),
nadir: from_julian(j_noon - 0.5)
}
calc_times(result, @times, j_noon, lw, phi, dec, n, m, l)
end
defp calc_times(result, [], _, _, _, _, _, _, _), do: result
defp calc_times(result, [htime|times], j_noon, lw, phi, dec, n, m, l) do
j_set = get_set_j(Enum.at(htime, 0) * @rad, lw, phi, dec, n, m, l)
j_rise = j_noon - (j_set - j_noon)
new_result =
result
|> Dict.put_new(String.to_atom(Enum.at(htime, 1)), from_julian(j_rise))
|> Dict.put_new(String.to_atom(Enum.at(htime, 2)), from_julian(j_set))
calc_times(new_result, times, j_noon, lw, phi, dec, n, m, l)
end
end
|
lib/astro.ex
| 0.727201
| 0.64944
|
astro.ex
|
starcoder
|
defmodule Elixium.Block do
alias Elixium.Block
alias Elixium.Utilities
alias Elixium.Transaction
alias Elixium.Store.Ledger
require Logger
@moduledoc """
Provides functions for creating blocks and mining new ones
"""
defstruct index: <<0, 0, 0, 0>>,
hash: nil,
version: <<0, 0>>,
previous_hash: nil,
difficulty: 3_000_000.0,
nonce: <<0, 0, 0, 0, 0, 0, 0, 0>>,
timestamp: nil,
merkle_root: nil,
transactions: []
@default_difficulty 3_000_000.0
@doc """
When the first node on the Elixium network spins up, there won't be any
blocks in the chain. In order to create a base from which all nodes can agree,
we create a block called a genesis block. This block has the data structure
that a block would have, but has hard-coded values. This block never needs
to be verified by nodes, as it doesn't contain any actual data. The block
mined after the genesis block must reference the hash of the genesis block
as its previous_hash to be valid
"""
@spec initialize :: Block
def initialize do
%Block{
timestamp: time_unix(),
previous_hash: String.duplicate("0", 64) # 32 bytes of 0
}
end
@doc """
Takes the previous block as an argument (This is the way we create every
block except the genesis block)
"""
@spec initialize(Block) :: Block
def initialize(%{index: index, hash: previous_hash}) do
index =
index
|> :binary.decode_unsigned()
|> Kernel.+(1)
|> :binary.encode_unsigned()
|> Utilities.zero_pad(4)
block = %Block{
index: index,
previous_hash: previous_hash,
timestamp: time_unix()
}
difficulty = calculate_difficulty(block)
Map.put(block, :difficulty, difficulty)
end
@spec calculate_block_hash(Block) :: String.t()
def calculate_block_hash(block) do
%{
index: index,
version: version,
previous_hash: previous_hash,
timestamp: timestamp,
nonce: nonce,
merkle_root: merkle_root
} = block
Utilities.sha3_base16([
index,
version,
previous_hash,
timestamp,
nonce,
merkle_root
])
end
@doc """
The process of mining consists of hashing the index of the block, the hash
of the previous block (thus linking the current and previous block), the
timestamp at which the block was generated, the merkle root of the transactions
within the block, and a random nonce. We then check to see whether the number
represented by the hash is lower than the mining difficulty. If the value of
the hash is lower, it is a valid block, and we can broadcast the block to
other nodes on the network.
"""
@spec mine(Block, Range.t(), number, number, number) :: Block | :not_in_range
def mine(block, nonce_range \\ 0..18_446_744_073_709_551_615, cpu_num \\ 0, hashes \\ 0, last_hashrate_check \\ time_unix()) do
block = Map.put(block, :hash, calculate_block_hash(block))
cond do
hash_beat_target?(block) -> exit(block)
:binary.decode_unsigned(block.nonce) not in nonce_range -> exit(:not_in_range)
true ->
# Output hashrate after every 10 seconds
{hashes, last_hashrate_check} =
if time_unix() > last_hashrate_check + 30 && rem(time_unix() - last_hashrate_check, 31) == 0 do
time = time_unix()
Logger.info("CPU ##{cpu_num} Hashrate: #{Float.round((hashes / 30) / 1000, 2)} kH/s")
{0, time - 1}
else
{hashes + 1, last_hashrate_check}
end
# Wrap nonce back to 0 if we're about to overflow 8 bytes.
# We increase the timestamp and try again
if block.nonce == <<255, 255, 255, 255, 255, 255, 255, 255>> do
mine(%{block | nonce: <<0, 0, 0, 0, 0, 0, 0, 0>>, timestamp: time_unix()}, nonce_range, cpu_num, hashes, last_hashrate_check)
else
nonce =
block.nonce
|> :binary.decode_unsigned()
|> Kernel.+(1)
|> :binary.encode_unsigned()
|> Utilities.zero_pad(8) # Add trailing zero bytes since they're removed when encoding / decoding
mine(%{block | nonce: nonce}, nonce_range, cpu_num, hashes, last_hashrate_check)
end
end
end
@doc """
Retrieves a block header from a given block
"""
@spec header(Block) :: map
def header(block) do
%{
hash: block.hash,
index: block.index,
version: block.version,
previous_hash: block.previous_hash,
merkle_root: block.merkle_root,
nonce: block.nonce,
timestamp: block.timestamp
}
end
@doc """
Because the hash is a Base16 string, and not an integer, we must first
convert the hash to an integer, and afterwards compare it to the target
"""
@spec hash_beat_target?(Block) :: boolean
def hash_beat_target?(%{hash: hash, difficulty: difficulty}) do
{integer_value_of_hash, _} = Integer.parse(hash, 16)
integer_value_of_hash < calculate_target(difficulty)
end
@doc """
The target is a number based off of the block difficulty. The higher the block
difficulty, the lower the target. When a block is being mined, the goal is
to find a hash that is lower in numerical value than the target. The maximum
target (when the difficulty is 0) is
115792089237316195423570985008687907853269984665640564039457584007913129639935,
which means any hash is valid.
"""
@spec calculate_target(float) :: number
def calculate_target(difficulty), do: round((:math.pow(16, 64) / difficulty)) - 1
@doc """
Calculates the block reward for a given block index, following our weighted
smooth emission algorithm.
Where x is total token supply, t is block at full emission, i is block index,
and s is the sigma of the total_token_supply, the Smooth emission algorithm
is as follows: Round(((x * 10,000,000) * max{0, t - i}) / s) (+1 if i % 172 = 0)
"""
@spec calculate_block_reward(number) :: integer
def calculate_block_reward(block_index) do
sigma_full_emission = Application.get_env(:elixium_core, :sigma_full_emission)
total_token_supply = Application.get_env(:elixium_core, :total_token_supply)
block_at_full_emission = Application.get_env(:elixium_core, :block_at_full_emission)
# 10000000000000028 total ions
total_token_supply
|> Kernel.*(10_000_000)
|> Kernel.*(max(0, block_at_full_emission - block_index))
|> Kernel./(sigma_full_emission)
|> Float.round()
|> Kernel.+(if rem(block_index, 172) == 0, do: 1, else: 0)
|> trunc()
end
@spec total_block_fees(list) :: integer
def total_block_fees(transactions) do
Enum.reduce(transactions, 0, & Transaction.calculate_fee(&1) + &2)
end
@doc """
Return a list of keys that differ between two given block headers.
"""
@spec diff_header(Block, Block) :: list
def diff_header(block1, block2) do
block1
|> header()
|> Map.keys()
|> Enum.filter(&(Map.get(block1, &1) != Map.get(block2, &1)))
end
@doc """
Calculates the difficulty for a block using the WWHM difficulty algorithm
described at https://getmasari.org/research-papers/wwhm.pdf
"""
@spec calculate_difficulty(Block) :: number
def calculate_difficulty(block) do
index = :binary.decode_unsigned(block.index)
if index < 11 do
@default_difficulty
else
blocks_to_weight =
:elixium_core
|> Application.get_env(:retargeting_window)
|> Ledger.last_n_blocks()
|> Enum.map(&(%{&1 | index: :binary.decode_unsigned(&1.index)}))
calculate_difficulty(%{block | index: index}, blocks_to_weight)
end
end
def calculate_difficulty(block, blocks_to_weight) do
retargeting_window = Application.get_env(:elixium_core, :retargeting_window)
target_solvetime = Application.get_env(:elixium_core, :target_solvetime)
index =
if is_binary(block.index) do
:binary.decode_unsigned(block.index)
else
block.index
end
if index < 11 do
@default_difficulty
else
# If we don't have enough blocks to fill our retargeting window, the
# algorithm won't run properly (difficulty will be set too high). Let's scale
# the algo down until then.
retargeting_window = min(block.index, retargeting_window)
{weighted_solvetimes, summed_difficulties} = weight_solvetimes_and_sum_difficulties(blocks_to_weight)
min_timespan = (target_solvetime * retargeting_window) / 2
weighted_solvetimes = if weighted_solvetimes < min_timespan, do: min_timespan, else: weighted_solvetimes
target = (retargeting_window + 1) / 2 * target_solvetime
summed_difficulties * target / weighted_solvetimes
end
end
def weight_solvetimes_and_sum_difficulties(blocks) do
target_solvetime = Application.get_env(:elixium_core, :target_solvetime)
max_solvetime = target_solvetime * 10
{_, weighted_solvetimes, summed_difficulties, _} =
blocks
|> Enum.scan({nil, 0, 0, 0}, fn block, {last_block_timestamp, weighted_solvetimes, sum_difficulties, i} ->
if i == 0 do
{block.timestamp, 0, 0, 1}
else
solvetime = block.timestamp - last_block_timestamp
solvetime = if solvetime > max_solvetime, do: max_solvetime, else: solvetime
solvetime = if solvetime == 0, do: 1, else: solvetime
{block.timestamp, weighted_solvetimes + (solvetime * i), sum_difficulties + block.difficulty, i + 1}
end
end)
|> List.last()
{weighted_solvetimes, summed_difficulties}
end
@doc """
Takes in a block received from a peer which may have malicious or extra
attributes attached. Removes all extra parameters which are not defined
explicitly by the block struct.
"""
@spec sanitize(Block) :: Block
def sanitize(unsanitized_block) do
sanitized_block = struct(Block, Map.delete(unsanitized_block, :__struct__))
sanitized_transactions = Enum.map(sanitized_block.transactions, &Transaction.sanitize/1)
Map.put(sanitized_block, :transactions, sanitized_transactions)
end
defp time_unix do
DateTime.utc_now() |> DateTime.to_unix()
end
end
|
lib/block.ex
| 0.741674
| 0.493836
|
block.ex
|
starcoder
|
defmodule ExIcal.Parser do
@moduledoc """
Responsible for parsing an iCal string into a list of events.
This module contains one public function, `parse/1`.
Most of the most frequently used iCalendar properties can be parsed from the
file (for example: start/end time, description, recurrence rules, and more;
see `ExIcal.Event` for a full list).
However, there is not yet full coverage of all properties available in the
iCalendar spec. More properties will be added over time, but if you need a
legal iCalendar property that `ExIcal` does not yet support, please sumbit an
issue on GitHub.
"""
alias ExIcal.{DateParser,Event}
@doc """
Parses an iCal string into a list of events.
This function takes a single argument–a string in iCalendar format–and returns
a list of `%ExIcal.Event{}`.
## Example
```elixir
HTTPotion.get("url-for-icalendar").body
|> ExIcal.parse
|> ExIcal.by_range(DateTime.utc_now(), DateTime.utc_now() |> Timex.shift(days: 7))
```
"""
@spec parse(String.t) :: [%Event{}]
def parse(data) do
data
|> String.replace(~s"\x20\x20", ~S"\x20")
|> String.replace(~s"\n\x20", ~S"")
|> String.replace(~s"\n\t", ~S"\n")
|> String.replace(~s"\"", "")
|> String.split("\n")
|> Enum.reduce(%{events: []}, fn(line, data) ->
line
|> String.trim()
|> parse_line(data)
end)
|> Map.get(:events)
end
defp parse_line("BEGIN:VEVENT" <> _, data), do: %{data | events: [%Event{} | data[:events]]}
defp parse_line("DTSTART" <> start, data), do: data |> put_to_map(:start, process_date(start, data[:tzid]))
defp parse_line("DTEND" <> endd, data), do: data |> put_to_map(:end, process_date(endd, data[:tzid]))
defp parse_line("DTSTAMP" <> stamp, data), do: data |> put_to_map(:stamp, process_date(stamp, data[:tzid]))
defp parse_line("SUMMARY:" <> summary, data), do: data |> put_to_map(:summary, process_string(summary))
defp parse_line("DESCRIPTION:" <> description, data), do: data |> put_to_map(:description, process_string(description))
defp parse_line("UID:" <> uid, data), do: data |> put_to_map(:uid, uid)
defp parse_line("RRULE:" <> rrule, data), do: data |> put_to_map(:rrule, process_rrule(rrule, data[:tzid]))
defp parse_line("TZID:" <> tzid, data), do: data |> Map.put(:tzid, tzid)
defp parse_line("CATEGORIES:" <> categories, data), do: data |> put_to_map(:categories, String.split(categories, ","))
defp parse_line("ATTENDEE;" <> attendee, data), do: data |> put_to_map(:attendees, process_attendee(attendee, data[:events]))
defp parse_line("ORGANIZER;" <> organizer, data), do: data |> put_to_map(:organizer, process_organizer(organizer))
defp parse_line(_, data), do: data
defp put_to_map(%{events: [event | events]} = data, key, value) do
updated_event = %{event | key => value}
%{data | events: [updated_event | events]}
end
defp put_to_map(data, _key, _value), do: data
defp process_date(":" <> date, tzid), do: DateParser.parse(date, tzid)
defp process_date(";" <> date, _) do
[timezone, date] = date |> String.split(":")
timezone = case timezone do
"TZID=" <> timezone -> timezone
_ -> nil
end
DateParser.parse(date, timezone)
end
defp process_rrule(rrule, tzid) do
rrule |> String.split(";") |> Enum.reduce(%{}, fn(rule, hash) ->
[key, value] = rule |> String.split("=")
case key |> String.downcase |> String.to_atom do
:until -> hash |> Map.put(:until, DateParser.parse(value, tzid))
:interval -> hash |> Map.put(:interval, String.to_integer(value))
:count -> hash |> Map.put(:count, String.to_integer(value))
:freq -> hash |> Map.put(:freq, value)
_ -> hash
end
end)
end
defp process_string(string) when is_binary(string) do
string
|> String.replace(~S",", ~s",")
|> String.replace(~S"\n", ~s"\n")
end
defp process_attendee(attendee, [event | events] = data) when is_binary(attendee) do
attendee = attendee
|> String.replace("MAILTO:", "mailto:")
|> String.split("mailto:")
|> tl()
attendees = case event.attendees do
nil -> attendee
attendees -> [attendee | event.attendees] |> List.flatten()
end
attendees
end
defp process_organizer(organizer) when is_binary(organizer) do
[organizer] = organizer
|> String.replace("MAILTO:", "mailto:")
|> String.split("mailto:")
|> tl()
organizer
end
end
|
lib/ex_ical/parser.ex
| 0.848078
| 0.808521
|
parser.ex
|
starcoder
|
defmodule Blur.IRC.TwitchTag do
@moduledoc """
Handle all the following tags.
https://dev.twitch.tv/docs/irc/tags/
# User
display-name: The user’s display name
badge-info: indicate the exact number of months the user has been a subscriber.
badges: Comma-separated list of chat badges and the version of each badge
color: Hexadecimal RGB color code; the empty string if it is never set.
user-id: The user's ID.
# Messages
bits: (Sent only for Bits messages) The amount of cheer/Bits employed by the user.
emote-sets: A comma-separated list of emotes, belonging to one or more emote sets.
emotes: Information to replace text in the message with emote images. This can be empty.
mod: 1 if the user has a moderator badge; otherwise, 0.
room-id: The channel ID.
tmi-sent-ts: Timestamp when the server received the message.
# Channel
followers-only: Followers-only mode. If enabled, controls which followers can chat. Valid values: -1 (disabled), 0 (all followers can chat), or a non-negative integer (only users following for at least the specified number of minutes can chat).
r9k: R9K mode. If enabled, messages with more than 9 characters must be unique. Valid values: 0 (disabled) or 1 (enabled).
slow: The number of seconds a chatter without moderator privileges must wait between sending messages.
subs-only: Subscribers-only mode. If enabled, only subscribers and moderators can chat. Valid values: 0 (disabled) or 1 (enabled).
# User Notice
msg-id: The type of notice (not the ID). Valid values: sub, resub, subgift, anonsubgift, submysterygift, giftpaidupgrade, rewardgift, anongiftpaidupgrade, raid, unraid, ritual, bitsbadgetier.
system-msg: The message printed in chat along with this notice.
# Message Params
msg-param-cumulative-months (Sent only on sub, resub) The total number of months the user has subscribed. This is the same as msg-param-months but sent for different types of user notices.
msg-param-displayName (Sent only on raid) The display name of the source user raiding this channel.
msg-param-login (Sent on only raid) The name of the source user raiding this channel.
msg-param-months (Sent only on subgift, anonsubgift) The total number of months the user has subscribed. This is the same as msg-param-cumulative-months but sent for different types of user notices.
msg-param-promo-gift-total (Sent only on anongiftpaidupgrade, giftpaidupgrade) The number of gifts the gifter has given during the promo indicated by msg-param-promo-name.
msg-param-promo-name (Sent only on anongiftpaidupgrade, giftpaidupgrade) The subscriptions promo, if any, that is ongoing; e.g. Subtember 2018.
msg-param-recipient-display-name (Sent only on subgift, anonsubgift) The display name of the subscription gift recipient.
msg-param-recipient-id (Sent only on subgift, anonsubgift) The user ID of the subscription gift recipient.
msg-param-recipient-user-name (Sent only on subgift, anonsubgift) The user name of the subscription gift recipient.
msg-param-sender-login (Sent only on giftpaidupgrade) The login of the user who gifted the subscription.
msg-param-sender-name (Sent only on giftpaidupgrade) The display name of the user who gifted the subscription.
msg-param-should-share-streak (Sent only on sub, resub) Boolean indicating whether users want their streaks to be shared.
msg-param-streak-months (Sent only on sub, resub) The number of consecutive months the user has subscribed. This is 0 if msg-param-should-share-streak is 0.
msg-param-sub-plan (Sent only on sub, resub, subgift, anonsubgift) The type of subscription plan being used. Valid values: Prime, 1000, 2000, 3000. 1000, 2000, and 3000 refer to the first, second, and third levels of paid subscriptions, respectively (currently $4.99, $9.99, and $24.99).
msg-param-sub-plan-name (Sent only on sub, resub, subgift, anonsubgift) The display name of the subscription plan. This may be a default name or one created by the channel owner.
msg-param-viewerCount (Sent only on raid) The number of viewers watching the source channel raiding this channel.
msg-param-ritual-name (Sent only on ritual) The name of the ritual this notice is for. Valid value: new_chatter.
msg-param-threshold (Sent only on bitsbadgetier) The tier of the bits badge the user just earned; e.g. 100, 1000, 10000.
"""
@doc """
Convert twitch tags to a map.
"""
@spec to_map(cmd :: binary) :: map
def to_map(cmd) do
Blur.Parser.Twitch.parse(cmd)
|> Enum.reduce(%{}, fn [k, v], acc -> Map.put(acc, k, v) end)
end
@doc """
Parse server string into parts
## Example
iex> Blur.IRC.TwitchTag.parse_server("red_stone_dragon!red_stone_dragon@red_stone_dragon.tmi.twitch.tv")
{"red_stone_dragon","red_stone_dragon","red_stone_dragon.tmi.twitch.tv"}
"""
@spec parse_server(connection_string :: binary) :: {binary, binary, binary}
def parse_server(connection_string) do
case String.split(connection_string, ["!", "@"]) do
[user, nick, server] -> {user, nick, server}
[server] -> {"", "", server}
end
end
@doc """
Parse out message from tagged message.
"""
@spec parse_tagged_message(%ExIRC.Message{}) :: %ExIRC.Message{} | nil
def parse_tagged_message(irc_message) do
[connection, cmd, channel | msg] = Enum.at(irc_message.args, 0) |> String.split(" ")
message = Enum.join(msg, " ")
case parse_server(connection) do
{user, nick, server} ->
%ExIRC.Message{
irc_message
| args: [channel, String.slice(message, 1, String.length(message))],
cmd: cmd,
nick: nick,
user: user,
server: server
}
end
end
end
|
lib/handlers/IRC/twitch_tag.ex
| 0.699254
| 0.480174
|
twitch_tag.ex
|
starcoder
|
defmodule VintageNet.Technology.Gadget do
@behaviour VintageNet.Technology
alias VintageNet.Interface.RawConfig
alias VintageNet.IP.IPv4Config
@moduledoc """
Support for USB Gadget virtual Ethernet interface configurations
USB Gadget interfaces expose a virtual Ethernet port that has a static
IP. This runs a simple DHCP server for assigning an IP address to the
computer at the other end of the USB cable. IP addresses are computed
based on the hostname and interface name. A /30 subnet is used for the
two IP addresses for each side of the cable to try to avoid conflicts
with IP subnets used on either computer.
Configurations for this technology are maps with a `:type` field set
to `VintageNet.Technology.Gadget`. Gadget-specific options are in
a map under the `:gadget` key. These include:
* `:hostname` - if non-nil, this overrides the hostname used for computing
a unique IP address for this interface. If unset, `:inet.gethostname/0`
is used.
Most users should specify the following configuration:
```elixir
%{type: VintageNet.Technology.Gadget}
```
"""
@impl true
def normalize(%{type: __MODULE__} = config) do
gadget =
Map.get(config, :gadget)
|> normalize_gadget()
%{type: __MODULE__, gadget: gadget}
end
defp normalize_gadget(%{hostname: hostname}) when is_binary(hostname) do
%{hostname: hostname}
end
defp normalize_gadget(_gadget_config), do: %{}
@impl true
def to_raw_config(ifname, %{type: __MODULE__} = config, opts) do
normalized_config = normalize(config)
# Derive the subnet based on the ifname, but allow the user to force a hostname
subnet =
case normalized_config.gadget do
%{hostname: hostname} ->
OneDHCPD.IPCalculator.default_subnet(ifname, hostname)
_ ->
OneDHCPD.IPCalculator.default_subnet(ifname)
end
ipv4_config = %{
ipv4: %{
method: :static,
address: OneDHCPD.IPCalculator.our_ip_address(subnet),
prefix_length: OneDHCPD.IPCalculator.prefix_length()
}
}
%RawConfig{
ifname: ifname,
type: __MODULE__,
source_config: normalized_config,
child_specs: [
one_dhcpd_child_spec(ifname)
]
}
|> IPv4Config.add_config(ipv4_config, opts)
end
@impl true
def ioctl(_ifname, _command, _args) do
{:error, :unsupported}
end
@impl true
def check_system(opts) do
# TODO
with :ok <- check_program(opts[:bin_ifup]) do
:ok
end
end
defp check_program(path) do
if File.exists?(path) do
:ok
else
{:error, "Can't find #{path}"}
end
end
defp one_dhcpd_child_spec(ifname) do
%{
id: {OneDHCPD, ifname},
start: {OneDHCPD, :start_server, [ifname]}
}
end
end
|
lib/vintage_net/technology/gadget.ex
| 0.767908
| 0.473901
|
gadget.ex
|
starcoder
|
defmodule NxEvision do
@moduledoc """
`NxEvision` is a bridge
between [Nx](https://github.com/elixir-nx/nx)
and [evision](https://github.com/cocoa-xu/evision).
"""
@doc """
Converts a tensor of `Nx` to `Mat` of evision (OpenCV).
The tensor assumes to be `:RGB` color space.
"""
@spec convert_nx_to_mat(Nx.t()) :: {:ok, reference()} | {:error, String.t()}
def convert_nx_to_mat(t), do: convert_nx_to_mat(t, :RGB)
@doc """
Converts a tensor of `Nx` to `Mat` of evision (OpenCV).
The second parameter is a color space of the tensor(`:RGB`, `:BGR`, `:RGBA` or `:BGRA`.).
"""
@spec convert_nx_to_mat(Nx.t(), atom) :: {:ok, reference()} | {:error, String.t()}
def convert_nx_to_mat(nil, _), do: {:error, "tensor is nil"}
def convert_nx_to_mat(t, colorspace) do
{rows, cols, channels} = Nx.shape(t)
case convert_nx_to_mat(Nx.to_binary(t), Nx.type(t), rows, cols, channels, colorspace) do
{:ok, reference} -> {:ok, reference}
{:error, reason} -> {:error, List.to_string(reason)}
end
end
@doc false
@spec convert_nx_to_mat(
binary(),
{atom(), pos_integer()},
pos_integer(),
pos_integer(),
pos_integer(),
atom()
) :: {:ok, reference()} | {:error, charlist()}
def convert_nx_to_mat(binary, type, rows, cols, channels = 3, :RGB) do
case OpenCV.Mat.from_binary(binary, type, rows, cols, channels) do
{:ok, reference} ->
case OpenCV.cvtColor(reference, OpenCV.cv_COLOR_RGB2BGR()) do
{:ok, reference} -> {:ok, reference}
{:error, reason} -> {:error, reason}
_ -> {:error, 'unknown error when cvtColor'}
end
{:error, reason} ->
{:error, reason}
# _ ->
# {:error, 'unknown error when from_binary'}
end
end
def convert_nx_to_mat(binary, type, rows, cols, channels = 4, :RGBA) do
case OpenCV.Mat.from_binary(binary, type, rows, cols, channels) do
{:ok, reference} ->
case OpenCV.cvtColor(reference, OpenCV.cv_COLOR_RGBA2BGRA()) do
{:ok, reference} -> {:ok, reference}
{:error, reason} -> {:error, reason}
_ -> {:error, 'unknown error when cvtColor'}
end
{:error, reason} ->
{:error, reason}
# _ ->
# {:error, 'unknown error when from_binary'}
end
end
def convert_nx_to_mat(binary, type, rows, cols, channels = 3, :BGR) do
case OpenCV.Mat.from_binary(binary, type, rows, cols, channels) do
{:ok, reference} ->
{:ok, reference}
{:error, reason} ->
{:error, reason}
# _ -> {:error, 'unknown error when from_binary'}
end
end
def convert_nx_to_mat(binary, type, rows, cols, channels = 4, :BGRA) do
case OpenCV.Mat.from_binary(binary, type, rows, cols, channels) do
{:ok, reference} ->
{:ok, reference}
{:error, reason} ->
{:error, reason}
# _ -> {:error, 'unknown error when from_binary'}
end
end
@doc """
Converts `Mat` of evision (OpenCV) to a tensor of `Nx`.
The tensor assumes to be `:RGB` color space.
"""
@spec convert_mat_to_nx(reference) :: {:ok, Nx.t()} | {:error, String.t()}
def convert_mat_to_nx(mat), do: convert_mat_to_nx(mat, :RGB)
@doc """
Converts `Mat` of evision (OpenCV) to a tensor of `Nx`.
The second parameter is a color space of the tensor(`:RGB`, `:BGR`, `:RGBA` or `:BGRA`.).
"""
@spec convert_mat_to_nx(reference, atom) :: {:ok, Nx.t()} | {:error, String.t()}
def convert_mat_to_nx(nil, _), do: {:error, "reference is nil"}
def convert_mat_to_nx(mat, _colorspace = :BGR) do
case {OpenCV.Mat.type(mat), OpenCV.Mat.shape(mat), OpenCV.Mat.to_binary(mat)} do
{{:ok, type}, {:ok, shape}, {:ok, binary}} ->
{
:ok,
Nx.from_binary(binary, type) |> Nx.reshape(shape)
}
_ ->
{:error, "Unknown Mat type"}
end
end
def convert_mat_to_nx(mat, :BGRA), do: convert_mat_to_nx(mat, :BGR)
def convert_mat_to_nx(mat, :RGB) do
case OpenCV.cvtColor(mat, OpenCV.cv_COLOR_BGR2RGB()) do
{:ok, reference} -> convert_mat_to_nx(reference, :BGR)
{:error, reason} -> {:error, reason}
_ -> {:error, "Unknown error when cvtColor"}
end
end
def convert_mat_to_nx(mat, :RGBA) do
case OpenCV.cvtColor(mat, OpenCV.cv_COLOR_BGRA2RGBA()) do
{:ok, reference} -> convert_mat_to_nx(reference, :BGRA)
{:error, reason} -> {:error, reason}
_ -> {:error, "Unknown error when cvtColor"}
end
end
end
|
lib/nx_evision.ex
| 0.889319
| 0.880386
|
nx_evision.ex
|
starcoder
|
defmodule FusionAuth.Users do
@moduledoc """
The `FusionAuth.Users` module provides access functions to the [FusionAuth Users API](https://fusionauth.io/docs/v1/tech/apis/users).
All functions require a Tesla Client struct created with `FusionAuth.client(base_url, api_key, tenant_id)`.
## User Fields
- token :: String.t()\n
The access token, this string is an encoded JSON Web Token (JWT).
- active :: boolean()\n
True if the User is active. False if the User has been deactivated. Deactivated Users will not be able to login.
- birthDate :: String.t()\n
The User’s birthdate formatted as `YYYY-MM-DD`.
- cleanSpeakId :: String.t()\n
This Id is used by FusionAuth when the User’s username is sent to CleanSpeak to be moderated (filtered and potentially sent to the approval queue). It is the *content Id* of the username inside CleanSpeak.
- data :: map()\n
An object that can hold any information about the User that should be persisted.
- email :: String.t()\n
The User’s email address.
- expiry :: integer()\n
The expiration instant of the User’s account. An expired user is not permitted to login.
- firstName :: String.t()\n
The first name of the User.
- fullName :: String.t()\n
The User’s full name as a separate field that is not calculated from `firstName` and `lastName`.
- id :: String.t()\n
The User's unique Id.
- imageUrl :: String.t()\n
The URL that points to an image file that is the User’s profile image.
- insertInstant :: integer()\n
The instant when user was created.
- lastLoginInstant :: integer()\n
The instant when the User logged in last.
- lastName :: String.t()\n
The User's last name.
- middleName :: String.t()\n
The User's middle name.
- mobilePhone :: String.t()\n
The User’s mobile phone number. This is useful is you will be sending push notifications or SMS messages to the User.
- parentEmail :: String.t()\n
The email address of the user’s parent or guardian. If this value was provided during a create or update operation, this value value will only remain until the child is claimed by a parent.
- passwordChangeRequired :: boolean()\n
Indicates that the User’s password needs to be changed during their next login attempt.
- passwordLastUpdateInstant :: integer()\n
The instant that the User last changed their password.
- preferredLanguages :: list()\n
An array of locale strings that give, in order, the User’s preferred languages. These are important for email templates and other localizable text. See [Locales](https://fusionauth.io/docs/v1/tech/reference/data-types#locales).
- registrations :: list()\n
The list of registrations for the User.
- tenantId :: String.t()\n
The Id of the Tenant that this User belongs to.
- timezone :: String.t()\n
The User’s preferred timezone. This can be used as a default to display instants, and it is recommended that you allow User’s to change this per-session. The string will be in an [IANA](https://www.iana.org/time-zones) time zone format.
- twoFactorDelivery :: String.t()\n
The User’s preferred delivery for verification codes during a two factor login request.
- twoFactorEnabled :: boolean()\n
Determines if the User has two factor authentication enabled for their account or not.
- username :: String.t()\n
The username of the User.
- usernameStatus :: String.t()\n
The current status of the username. This is used if you are moderating usernames via CleanSpeak.
- verified :: boolean()\n
Whether or not the User’s email has been verified.
## Examples
iex> client = FusionAuth.client("http://localhost:9011", "sQ9wwELaI0whHQqyQUxAJmZvVzZqUL-hpfmAmPgbIu8", "6b40f9d6-cfd8-4312-bff8-b082ad45e93c")
iex> FusionAuth.Users.get_user_by_id(client, "06da543e-df3e-4011-b122-a9ff04326599")
{:ok,
%{
"user" => %{
"active" => true,
"email" => "<EMAIL>",
"firstName" => "Cogility",
"fullName" => "<NAME>",
"id" => "06da543e-df3e-4011-b122-a9ff04326599",
"insertInstant" => 1590606624689,
"lastLoginInstant" => 1591138635342,
"lastName" => "Admin",
"memberships" => [
%{
"groupId" => "6f0a1769-21f3-4705-a653-bd66c3ff6a63",
"id" => "ff6fea80-31a3-439b-b880-def21933a01d",
"insertInstant" => 1590705735370
}
],
"mobilePhone" => "6092895176",
"passwordChangeRequired" => false,
"passwordLastUpdateInstant" => <PASSWORD>,
"preferredLanguages" => ["en"],
"registrations" => [
%{
"applicationId" => "f8109431-14f2-4815-9987-77fdedeff802",
"id" => "7aaad5c8-846d-4a40-b587-fa62f0e6240e",
"insertInstant" => 1590606684278,
"lastLoginInstant" => 1591138635342,
"preferredLanguages" => ["en"],
"roles" => ["admin", "user"],
"timezone" => "America/Los_Angeles",
"username" => "cogadmin",
"usernameStatus" => "ACTIVE",
"verified" => true
}
],
"tenantId" => "6b40f9d6-cfd8-4312-bff8-b082ad45e93c",
"timezone" => "America/Los_Angeles",
"twoFactorDelivery" => "None",
"twoFactorEnabled" => false,
"username" => "cogadmin",
"usernameStatus" => "ACTIVE",
"verified" => true
}
},
%Tesla.Env{
__client__: %Tesla.Client{
adapter: {Tesla.Adapter.Hackney, :call, [[recv_timeout: 30000]]},
fun: nil,
post: [],
pre: [
{Tesla.Middleware.BaseUrl, :call, ["http://localhost:9011"]},
{Tesla.Middleware.JSON, :call, [[]]},
{Tesla.Middleware.Headers, :call,
[
[
{"Authorization", "<KEY>"},
{"X-FusionAuth-TenantId", "6b40f9d6-cfd8-4312-bff8-b082ad45e93c"}
]
]}
]
},
__module__: Tesla,
body: %{
"user" => %{
"active" => true,
"email" => "<EMAIL>",
"firstName" => "Cogility",
"fullName" => "<NAME>",
"id" => "06da543e-df3e-4011-b122-a9ff04326599",
"insertInstant" => 1590606624689,
"lastLoginInstant" => 1591138635342,
"lastName" => "Admin",
"memberships" => [
%{
"groupId" => "6f0a1769-21f3-4705-a653-bd66c3ff6a63",
"id" => "ff6fea80-31a3-439b-b880-def21933a01d",
"insertInstant" => 1590705735370
}
],
"mobilePhone" => "6092895176",
"passwordChangeRequired" => false,
"passwordLastUpdateInstant" => 1590606624715,
"preferredLanguages" => ["en"],
"registrations" => [
%{
"applicationId" => "f8109431-14f2-4815-9987-77fdedeff802",
"id" => "7aaad5c8-846d-4a40-b587-fa62f0e6240e",
"insertInstant" => 1590606684278,
"lastLoginInstant" => 1591138635342,
"preferredLanguages" => ["en"],
"roles" => ["admin", "user"],
"timezone" => "America/Los_Angeles",
"username" => "cogadmin",
"usernameStatus" => "ACTIVE",
"verified" => true
}
],
"tenantId" => "6b40f9d6-cfd8-4312-bff8-b082ad45e93c",
"timezone" => "America/Los_Angeles",
"twoFactorDelivery" => "None",
"twoFactorEnabled" => false,
"username" => "cogadmin",
"usernameStatus" => "ACTIVE",
"verified" => true
}
},
headers: [
{"content-type", "application/json;charset=UTF-8"},
{"content-length", "1033"},
{"date", "Thu, 04 Jun 2020 17:48:29 GMT"}
],
method: :get,
opts: [],
query: [],
status: 200,
url: "http://localhost:9011/api/user/06da543e-df3e-4011-b122-a9ff04326599"
}}
"""
alias FusionAuth.Utils
@type search_criteria() ::
%{
ids: list() | nil,
query: String.t() | nil,
queryString: String.t() | nil,
numberOfResults: integer() | nil,
sortFields: list(sort_field()) | nil,
startRow: integer() | nil
}
| map()
@type sort_field() ::
%{
missing: String.t() | nil,
name: String.t(),
order: String.t() | nil
}
| map()
@users_url "/api/user"
@doc """
Create a new user. You must specify either the email or the username or both for the User. Either of these values
may be used to uniquely identify the User and may be used to authenticate the User.
For more information visit the FusionAuth API Documentation for [Create a User](https://fusionauth.io/docs/v1/tech/apis/users#create-a-user).
"""
@spec create_user(FusionAuth.client(), map()) :: FusionAuth.result()
def create_user(client, user) do
Tesla.post(client, @users_url, %{user: user}) |> FusionAuth.result()
end
@doc """
Get a user by the user’s ID.
For more information visit the FusionAuth API Documentation for [Retrieve a User](https://fusionauth.io/docs/v1/tech/apis/users#retrieve-a-user).
"""
@spec get_user_by_id(FusionAuth.client(), String.t()) :: FusionAuth.result()
def get_user_by_id(client, user_id) do
Tesla.get(client, @users_url <> "/#{user_id}") |> FusionAuth.result()
end
@doc """
Get a user by the user’s login ID.
For more information visit the FusionAuth API Documentation for [Retrieve a User](https://fusionauth.io/docs/v1/tech/apis/users#retrieve-a-user).
"""
@spec get_user_by_login_id(FusionAuth.client(), String.t()) :: FusionAuth.result()
def get_user_by_login_id(client, login_id) do
Tesla.get(client, @users_url <> "?loginId=#{login_id}") |> FusionAuth.result()
end
@doc """
Get a user by the user’s email.
For more information visit the FusionAuth API Documentation for [Retrieve a User](https://fusionauth.io/docs/v1/tech/apis/users#retrieve-a-user).
"""
@spec get_user_by_email(FusionAuth.client(), String.t()) :: FusionAuth.result()
def get_user_by_email(client, email) do
Tesla.get(client, @users_url <> "?email=#{email}") |> FusionAuth.result()
end
@doc """
Get a user by the user’s username.
For more information visit the FusionAuth API Documentation for [Retrieve a User](https://fusionauth.io/docs/v1/tech/apis/users#retrieve-a-user).
"""
@spec get_user_by_username(FusionAuth.client(), String.t()) :: FusionAuth.result()
def get_user_by_username(client, username) do
Tesla.get(client, @users_url <> "?username=#{username}") |> FusionAuth.result()
end
@doc """
Update a user.
For more information visit the FusionAuth API Documentation for [Update a User](https://fusionauth.io/docs/v1/tech/apis/users#update-a-user).
"""
@spec update_user(FusionAuth.client(), String.t(), map()) :: FusionAuth.result()
def update_user(client, user_id, user) do
Tesla.put(client, @users_url <> "/#{user_id}", %{user: user}) |> FusionAuth.result()
end
@doc """
Deactivate or delete a user by the user's ID. Soft deleted Users are marked as inactive but not deleted from FusionAuth.
## Parameters
- hardDelete :: boolean() :: Optional :: _Defaults to false_\n
To Permanently delete a user from FusionAuth set this value to `true`. Once a user has been permanently deleted, the action cannot be undone. When this value is set to `false` the user is marked as inactive and the user will be unable log into FusionAuth. This action may be undone by reactivating the user.
For more information visit the FusionAuth API Documentation for [Delete a User](https://fusionauth.io/docs/v1/tech/apis/users#delete-a-user).
"""
@spec delete_user(FusionAuth.client(), String.t(), key: boolean()) :: FusionAuth.result()
def delete_user(client, user_id, parameters \\ []) do
Tesla.delete(client, @users_url <> "/#{user_id}" <> Utils.build_query_parameters(parameters))
|> FusionAuth.result()
end
@doc """
Bulk deactivate or delete users based on their user IDs.
## Parameters (query)
- dryRun :: boolean() :: Optional :: _Defaults to false_\n
To preview the user Ids to be deleted by the request without applying the requested action set this value to `true`.
- hardDelete :: boolean() :: Optional :: _Defaults to false_\n
To Permanently delete a user from FusionAuth set this value to `true`. Once a user has been permanently deleted, the action cannot be undone. When this value is set to `false` the user is marked as inactive and the user will be unable log into FusionAuth. This action may be undone by reactivating the user.
- query :: String.t() :: Optional\n
The raw JSON Elasticsearch query that is used to search for Users. The `userId`, `query`, and `queryString` parameters are mutually exclusive, they are listed here in order of precedence.
It is necessary to use the `query` parameter when querying against registrations in order to achieve expected results, as this field is defined as a nested datatype in the Elasticsearch mapping.
- queryString :: String.t() :: Optional\n
The Elasticsearch query string that is used to search for Users to be deleted. The userId, query, and queryString parameters are mutually exclusive, they are listed here in order of precedence.
- userId :: String.t() :: Optional\n
For more information visit the FusionAuth API Documentation for [Bulk Delete Users](https://fusionauth.io/docs/v1/tech/apis/users#bulk-delete-users).
"""
@spec bulk_delete_users(FusionAuth.client(), [String.t()], list(Keyword.t())) ::
FusionAuth.result()
def bulk_delete_users(client, user_ids, query \\ []) do
user_list = Enum.reduce(user_ids, [], fn id, acc -> [userId: id] ++ acc end)
params = Utils.build_query_parameters(user_list ++ query)
Tesla.delete(client, @users_url <> "/bulk" <> params)
|> FusionAuth.result()
end
@doc """
Reactivate an inactive user by the user's ID.
For more information visit the FusionAuth API Documentation for [Reactivate a User](https://fusionauth.io/docs/v1/tech/apis/users#reactivate-a-user).
"""
@spec reactivate_user(FusionAuth.client(), String.t()) :: FusionAuth.result()
def reactivate_user(client, user_id) do
Tesla.put(client, @users_url <> "/#{user_id}?reactivate=true", %{}) |> FusionAuth.result()
end
@doc """
Bulk import multiple users.
For more information visit the FusionAuth API Documentation for [Import Users](https://fusionauth.io/docs/v1/tech/apis/users#import-users).
"""
@spec import_users(FusionAuth.client(), list()) :: FusionAuth.result()
def import_users(client, users) do
Tesla.post(client, @users_url <> "/import", %{users: users}) |> FusionAuth.result()
end
@doc """
Search for users.
For more information visit the FusionAuth API Documentation for [Search for Users](https://fusionauth.io/docs/v1/tech/apis/users#search-for-users).
"""
@spec search_users(FusionAuth.client(), search_criteria()) :: FusionAuth.result()
def search_users(client, search) do
Tesla.post(client, @users_url <> "/search", %{search: search}) |> FusionAuth.result()
end
@doc """
Get recent logins for a specific user or all users. If no userId is specified, recent logins for all users in the
system will be returned.
## Parameters
- limit :: integer() :: Optional :: _Defaults to 10_\n
This parameter indicates the maximum amount of logins to return for a single request.
- offset :: integer() :: Optional :: _Default to 0_\n
This parameter provides the offset into the result set. Generally speaking if you wish to paginate the results, you will increment this parameter on subsequent API request by the size of the `limit` parameter.
- userId :: String.t() :: Optional
This parameter will narrow the results to only logins for a particular user. When this parameter is omitted, the most recent logins for all of FusionAuth will be returned.
For more information visit the FusionAuth API Documentation for [Retrieve Recent Logins](https://fusionauth.io/docs/v1/tech/apis/users#retrieve-recent-logins).
"""
@spec get_recent_logins(FusionAuth.client(), key: integer() | String.t()) :: FusionAuth.result()
def get_recent_logins(client, parameters \\ []) do
Tesla.get(client, @users_url <> "/recent-login" <> Utils.build_query_parameters(parameters))
|> FusionAuth.result()
end
@doc """
Verify a user's email.
For more information visit the FusionAuth API Documentation for [Verify a User's Email](https://fusionauth.io/docs/v1/tech/apis/users#verify-a-users-email).
"""
@spec verify_user_email(FusionAuth.client(), String.t()) :: FusionAuth.result()
def verify_user_email(client, verification_id) do
Tesla.post(client, @users_url <> "/verify-email/#{verification_id}", %{})
|> FusionAuth.result()
end
@doc """
Resend verification email.
For more information visit the FusionAuth API Documentation for [Resend Verification Email](https://fusionauth.io/docs/v1/tech/apis/users#resend-verification-email).
"""
@spec resend_verification_email(FusionAuth.client(), String.t()) :: FusionAuth.result()
def resend_verification_email(client, email) do
Tesla.put(client, @users_url <> "/verify-email?email=#{email}", %{})
|> FusionAuth.result()
end
@doc """
Start forgot password workflow. For example, on your login form you may have a button for Forgot your password. This
would be the API you would call to initiate the request for the user. If the email configuration is complete, the user
will be sent the forgot password email containing a link containing the changePasswordId. The provided link should take
the user to a form that allows them to change their password. This form should contain a hidden field for the
changePasswordId generated by this API.
The login identifier can be either the email or the username. The username is not case sensitive.
For more information visit the FusionAuth API Documentation for [Start Forgot Password Workflow](https://fusionauth.io/docs/v1/tech/apis/users#start-forgot-password-workflow).
"""
@spec forgot_password(FusionAuth.client(), String.t()) :: FusionAuth.result()
def forgot_password(client, login_id) do
Tesla.post(client, @users_url <> "/forgot-password", %{loginId: login_id})
|> FusionAuth.result()
end
@doc """
Change a user's password with a change password ID. This usually occurs after an email has been sent to the user
and they clicked on a link to reset their password.
For more information visit the FusionAuth API Documentation for [Change a User's Password](https://fusionauth.io/docs/v1/tech/apis/users#change-a-users-password).
"""
@spec change_password(FusionAuth.client(), String.t(), map()) ::
FusionAuth.result()
def change_password(client, change_password_id, password_data) do
Tesla.post(client, @users_url <> "/change-password/#{change_password_id}", password_data)
|> FusionAuth.result()
end
@doc """
Change a user's password using their identity (loginID and password). Using a loginId instead of the changePasswordId
bypasses the email verification and allows a password to be changed directly without first calling the forgot_password
method.
For more information visit the FusionAuth API Documentation for [Change a User's Password](https://fusionauth.io/docs/v1/tech/apis/users#change-a-users-password).
"""
@spec change_password_by_identity(FusionAuth.client(), map()) :: FusionAuth.result()
def change_password_by_identity(client, password_data) do
Tesla.post(client, @users_url <> "/change-password", password_data)
|> FusionAuth.result()
end
end
|
lib/fusion_auth/users.ex
| 0.808257
| 0.470372
|
users.ex
|
starcoder
|
defmodule ExChip8.Instructions do
alias ExChip8.{Screen, Stack, Registers, Keyboard, Memory}
import Bitwise
require Logger
@chip8_default_sprite_height Application.get_env(:ex_chip8, :chip8_default_sprite_height)
@moduledoc """
Implements all chip8 instructions.
Operation code is pattern matched to corresponding instruction implementation method.
Mutations are immediately executed according to instruction specification.
Uknown operation code raises.
"""
@doc """
Execute instruction associated with opcode.
"""
@spec exec(integer) :: :ok | :wait_for_key_press
def exec(opcode) do
nnn = opcode &&& 0x0FFF
x = opcode >>> 8 &&& 0x000F
y = opcode >>> 4 &&& 0x000F
kk = opcode &&& 0x00FF
n = opcode &&& 0x000F
case _exec(opcode, %{
nnn: nnn,
x: x,
y: y,
kk: kk,
n: n
}) do
:wait_for_key_press -> :wait_for_key_press
_ -> :ok
end
end
# CLS - Clear the display.
defp _exec(0x00E0, _) do
Screen.get_screen()
|> Screen.screen_clear()
end
# RET - Return from subroutine.
defp _exec(0x00EE, _) do
pc = Stack.stack_pop()
Registers.insert_register(:pc, pc)
end
# JP addr - 1nnn, Jump to location nnn.
defp _exec(opcode, %{
nnn: nnn
})
when (opcode &&& 0xF000) == 0x1000 do
Registers.insert_register(:pc, nnn)
end
# CALL addr - 2nnn, Call subroutine at location nnn.
defp _exec(opcode, %{
nnn: nnn
})
when (opcode &&& 0xF000) == 0x2000 do
pc = Registers.lookup_register(:pc)
Stack.stack_push(pc)
Registers.insert_register(:pc, nnn)
end
# SE Vx, byte - 3xkk, Skip next instruction if Vx=kk.
defp _exec(opcode, %{
x: x,
kk: kk
})
when (opcode &&& 0xF000) == 0x3000 do
vx = Registers.lookup_v_register(x)
if vx == kk do
pc = Registers.lookup_register(:pc)
Registers.insert_register(:pc, pc + 2)
end
end
# SE Vx, byte - 4xkk, Skip next instruction if Vx!=kk.
defp _exec(opcode, %{
x: x,
kk: kk
})
when (opcode &&& 0xF000) == 0x4000 do
reg_val = Registers.lookup_v_register(x)
if reg_val != kk do
pc = Registers.lookup_register(:pc)
Registers.insert_register(:pc, pc + 2)
end
end
# SE Vx, Vy - 5xy0, Skip next instruction if Vx == Vy.
defp _exec(opcode, %{
x: x,
y: y
})
when (opcode &&& 0xF000) == 0x5000 do
vx = Registers.lookup_v_register(x)
vy = Registers.lookup_v_register(y)
if vx == vy do
pc = Registers.lookup_register(:pc)
Registers.insert_register(:pc, pc + 2)
end
end
# LD Vx, byte - 6xkk, Vx = kk.
defp _exec(opcode, %{
x: x,
kk: kk
})
when (opcode &&& 0xF000) == 0x6000 do
Registers.insert_v_register(x, kk)
end
# ADD Vx, byte - 7xkk, Set Vx = Vx + kk.
defp _exec(opcode, %{
x: x,
kk: kk
})
when (opcode &&& 0xF000) == 0x7000 do
v = Registers.lookup_v_register(x)
sum = v + kk
<<to_8_bit_int::8>> = <<sum::8>>
Registers.insert_v_register(x, to_8_bit_int)
end
# LD Vx, Vy - 8xy0, Vx = Vy.
defp _exec(opcode, %{
x: x,
y: y
})
when (opcode &&& 0xF00F) == 0x8000 do
y_value = Registers.lookup_v_register(y)
Registers.insert_v_register(x, y_value)
end
# OR Vx, Vy - 8xy1, Performs an bitwise OR on Vx and Vy and stores the result in Vx.
defp _exec(opcode, %{
x: x,
y: y
})
when (opcode &&& 0xF00F) == 0x8001 do
y_value = Registers.lookup_v_register(y)
x_value = Registers.lookup_v_register(x)
Registers.insert_v_register(x, x_value ||| y_value)
end
# AND Vx, Vy - 8xy2, Performs an bitwise AND on Vx and Vy and stores the result in Vx.
defp _exec(opcode, %{
x: x,
y: y
})
when (opcode &&& 0xF00F) == 0x8002 do
y_value = Registers.lookup_v_register(y)
x_value = Registers.lookup_v_register(x)
Registers.insert_v_register(x, x_value &&& y_value)
end
# XOR Vx, Vy - 8xy3, Performs an bitwise XOR on Vx and Vy and stores the result in Vx.
defp _exec(opcode, %{
x: x,
y: y
})
when (opcode &&& 0xF00F) == 0x8003 do
y_value = Registers.lookup_v_register(y)
x_value = Registers.lookup_v_register(x)
Registers.insert_v_register(x, bxor(x_value, y_value))
end
# ADD Vx, Vy - 8xy4, Set Vx = Vx + Vy, set VF = carry.
defp _exec(opcode, %{
x: x,
y: y
})
when (opcode &&& 0xF00F) == 0x8004 do
y_value = Registers.lookup_v_register(y)
x_value = Registers.lookup_v_register(x)
updated_x = x_value + y_value
updated_vf = updated_x > 0xFF
Registers.insert_v_register(x, updated_x)
Registers.insert_v_register(0x0F, updated_vf)
end
# SUB Vx, Vy - 8xy5, Set Vx = Vx - Vy, set VF = Not borrow.
defp _exec(opcode, %{
x: x,
y: y
})
when (opcode &&& 0xF00F) == 0x8005 do
y_value = Registers.lookup_v_register(y)
x_value = Registers.lookup_v_register(x)
updated_x = x_value - y_value
updated_vf = x_value > y_value
Registers.insert_v_register(x, updated_x)
Registers.insert_v_register(0x0F, updated_vf)
end
# SHR Vx {, Vy} - 8xy6.
defp _exec(opcode, %{
x: x
})
when (opcode &&& 0xF00F) == 0x8006 do
x_value = Registers.lookup_v_register(x)
updated_x = div(x_value, 2)
updated_vf = x_value &&& 0x01
Registers.insert_v_register(x, updated_x)
Registers.insert_v_register(0x0F, updated_vf)
end
# SUBN Vx, Vy - 8xy7.
defp _exec(opcode, %{
x: x,
y: y
})
when (opcode &&& 0xF00F) == 0x8007 do
y_value = Registers.lookup_v_register(y)
x_value = Registers.lookup_v_register(x)
updated_vf = y_value > x_value
updated_x = y_value - x_value
Registers.insert_v_register(x, updated_x)
Registers.insert_v_register(0x0F, updated_vf)
end
# SHL Vx {, Vy} - 8xyE.
defp _exec(opcode, %{
x: x
})
when (opcode &&& 0xF00F) == 0x800E do
x_value = Registers.lookup_v_register(x)
updated_vf = x_value &&& 0b10000000
updated_x = x_value * 2
Registers.insert_v_register(x, updated_x)
Registers.insert_v_register(0x0F, updated_vf)
end
# SNE Vx, Vy - 9xy0, Skip next instruction if Vx != Vy.
defp _exec(opcode, %{
x: x,
y: y
})
when (opcode &&& 0xF000) == 0x9000 do
y_value = Registers.lookup_v_register(y)
x_value = Registers.lookup_v_register(x)
if x_value != y_value do
pc = Registers.lookup_register(:pc)
Registers.insert_register(:pc, pc + 2)
end
end
# LD I, addr - Annn, Sets the I register to nnn.
defp _exec(opcode, %{
nnn: nnn
})
when (opcode &&& 0xF000) == 0xA000 do
Registers.insert_register(:i, nnn)
end
# RND Vx, byte - Cxkk
defp _exec(opcode, %{
x: x,
kk: kk
})
when (opcode &&& 0xF000) == 0xC000 do
updated_x = :rand.uniform(255) &&& kk
Registers.insert_v_register(x, updated_x)
end
# DRW Vx, Vy, nibble - Dxyn, Draws sprite to the screen.
defp _exec(opcode, %{
x: x,
y: y,
n: n
})
when (opcode &&& 0xF000) == 0xD000 do
sprite_index = Registers.lookup_register(:i)
%{collision: updated_vf} =
Screen.get_screen()
|> Screen.screen_draw_sprite(%{
x: Registers.lookup_v_register(x),
y: Registers.lookup_v_register(y),
sprite_index: sprite_index,
num: n
})
Registers.insert_v_register(0x0F, boolean_to_integer(updated_vf))
end
# SKP Vx - Ex9E, Skip the next instruction if the key with the value of Vx is pressed.
defp _exec(opcode, %{
x: x
})
when (opcode &&& 0xF0FF) == 0xE09E do
x_value = Registers.lookup_v_register(x)
if Keyboard.get_keyboard() |> Keyboard.keyboard_is_down(x_value) do
pc = Registers.lookup_register(:pc)
Registers.insert_register(:pc, pc + 2)
end
end
# SKP Vx - ExA1, Skip the next instruction if the key with the value of Vx is NOT pressed.
defp _exec(opcode, %{
x: x
})
when (opcode &&& 0xF0FF) == 0xE0A1 do
x_value = Registers.lookup_v_register(x)
if not (Keyboard.get_keyboard() |> Keyboard.keyboard_is_down(x_value)) do
pc = Registers.lookup_register(:pc)
Registers.insert_register(:pc, pc + 2)
end
end
# LD Vx, DT - Fx07, Set Vx to the delay timer value.
defp _exec(opcode, %{
x: x
})
when (opcode &&& 0xF0FF) == 0xF007 do
updated_x = Registers.lookup_register(:delay_timer)
Registers.insert_v_register(x, updated_x)
end
# LD Vx, K - fx0A.
defp _exec(opcode, %{
x: x
})
when (opcode &&& 0xF0FF) == 0xF00A do
keyboard = Keyboard.get_keyboard()
pressed_key = keyboard.pressed_key
pressed_key_index = Keyboard.keyboard_map(keyboard, pressed_key)
case pressed_key_index do
false ->
:wait_for_key_press
_ ->
Registers.insert_v_register(x, pressed_key_index)
end
end
# LD CT, Vx, K - Fx15, Set delay_timer to Vx.
defp _exec(opcode, %{
x: x
})
when (opcode &&& 0xF0FF) == 0xF015 do
x_value = Registers.lookup_v_register(x)
Registers.insert_register(:delay_timer, x_value)
end
# LD ST, Vx, K - Fx18, Set sound_timer to Vx.
defp _exec(opcode, %{
x: x
})
when (opcode &&& 0xF0FF) == 0xF018 do
x_value = Registers.lookup_v_register(x)
Registers.insert_register(:sound_timer, x_value)
end
# ADD I, Vx - Fx1E.
defp _exec(opcode, %{
x: x
})
when (opcode &&& 0xF0FF) == 0xF01E do
x_value = Registers.lookup_v_register(x)
i_value = Registers.lookup_register(:i)
Registers.insert_register(:i, i_value + x_value)
end
# LD F, Vx - Fx29.
defp _exec(opcode, %{
x: x
})
when (opcode &&& 0xF0FF) == 0xF029 do
x_value = Registers.lookup_v_register(x)
Registers.insert_register(:i, x_value * @chip8_default_sprite_height)
end
# LD B, Vx - Fx33.
defp _exec(opcode, %{
x: x
})
when (opcode &&& 0xF0FF) == 0xF033 do
x_value = Registers.lookup_v_register(x)
hundreds = x_value |> div(100)
tens = x_value |> div(10) |> rem(10)
units = x_value |> rem(10)
i_value = Registers.lookup_register(:i)
Memory.insert_memory(i_value, hundreds)
Memory.insert_memory(i_value + 1, tens)
Memory.insert_memory(i_value + 2, units)
end
# LD [I], Vx - Fx55
defp _exec(opcode, %{
x: x
})
when (opcode &&& 0xF0FF) == 0xF055 do
0..(x - 1)
|> Enum.map(fn i ->
vi = Registers.lookup_v_register(i)
(Registers.lookup_register(:i) + i)
|> Memory.insert_memory(vi)
end)
end
# LD Vx, [I] - Fx65
defp _exec(opcode, %{
x: x
})
when (opcode &&& 0xF0FF) == 0xF065 do
0..(x - 1)
|> Enum.each(fn i ->
i_value = Registers.lookup_register(:i)
value_from_memory = Memory.lookup_memory(i_value + 1)
Registers.insert_v_register(i, value_from_memory)
end)
end
defp _exec(opcode, _) do
raise "UNKNOWN: #{Integer.to_charlist(opcode, 16)}"
end
defp boolean_to_integer(true), do: 1
defp boolean_to_integer(false), do: 0
end
|
lib/ex_chip8/instructions.ex
| 0.53437
| 0.47792
|
instructions.ex
|
starcoder
|
defmodule Uderzo.GenRenderer do
@moduledoc """
Generic rendering code. This will start a process that will render a frame
at a regular rate in a window of the indicated size. Rendering the frame is
done by calling a callback with the current window
size and height and the current mouse pointer position.
`GenRenderer` is using `GenServer` internally and is thus OTP compatible.
Usually, if you want to use Uderzo, this is the module you want to build
around. See also the examples and demos in the repository.
The basic usage of GenRenderer is the same as GenServer: you `use` the
module, supply a `render_frame/5` callback and an optional `init_renderer/1` callback. There
are just more arguments than with GenServer ;-). Short skeleton:
```
defmodule MyRenderer do
use Uderzo.GenRenderer
def start_link(...) do
Uderzo.GenRenderer.start_link(__MODULE__, "My Demo", 800, 600, 60, [], name: __MODULE__)
end
def init_renderer([]) do
{:ok, %{some: :state}}
end
def render_frame(window_width, window_height, mouse_x, mouse_y, state) do
... Paint your frame here ...
{:ok, state}
end
```
One important difference with GenServer is that the `init_renderer/1` callback isn't
called during start time but rather as soon as Uderzo is initialized. This means
that you can call functions to load fonts, etcetera, at initialization time.
Note that once called, GenRenderer just goes off and does rendering. There's no
requirement to interact with it further, although you can set the user state directly,
forcing a redraw if desired.
"""
@doc """
The (optional) init callback. It either returns `:ok` and the initial state, or
an error which will cause the GenRenderer to bail out.
"""
@callback init_renderer(args :: term) :: {:ok, term} | :error
@doc """
The rendering function. This is called `fps` times per second. It should try to
complete quickly so that frames aren't skipped.
"""
@callback render_frame(window_width :: float, window_height :: float,
mouse_x :: float,
mouse_y :: float,
state ::term) :: {:ok, term} | :error
defmacro __using__(_opts) do
quote do
@behaviour Uderzo.GenRenderer
@doc false
def init_renderer(args) do
{:ok, args}
end
@doc false
def render_frame(_ww, _wh, _mx, _my, _state) do
# TODO compile-time check? Just silently let this be called?
end
defoverridable [init_renderer: 1, render_frame: 5]
end
end
use GenServer
import Uderzo.Bindings
defmodule State do
defstruct [:title, :window_width, :window_height, :target_fps,
:window, :ntt, :user_state, :user_module]
end
@doc """
Start a GenRenderer with the indicated window height, width and title and
the target FPS.
The target_fps is a target, much rests on the speed of the rendering function
for the real fps.
The final argument, `genserver_opts`, is just passed on to `GenServer.start_link/3`.
Returns `GenServer.on_start`.
"""
def start_link(module, title, window_width, window_height, target_fps, args, genserver_opts \\ []) do
GenServer.start_link(__MODULE__,
[title, window_width, window_height, target_fps, args, module],
Keyword.merge([name: Uderzo.GenRenderer], genserver_opts))
end
@doc """
Set the user_state portion of %State{}. This is the data that gets passed into render.
Calling this has the side effect of redrawing the screen.
"""
def set_user_state(new_state) do
GenServer.call(Uderzo.GenRenderer, {:set_user_state, new_state})
end
@doc """
Get the user_state portion of %State{}. This is the data that gets passed into render.
"""
def get_user_state() do
GenServer.call(Uderzo.GenRenderer, :get_user_state)
end
# Just call the uderzo_init() method and let messages from Uderzo drive the rest.
def init([title, window_width, window_height, target_fps, user_state, user_module]) do
uderzo_init(self())
{:ok, %State{title: title, window_width: window_width, window_height: window_height,
target_fps: target_fps, user_state: user_state, user_module: user_module}}
end
# Get the user state .
def handle_call(:get_user_state, _from, state) do
{:reply, state.user_state, state}
end
# Set the user state directly and trigger a screen redraw.
def handle_call({:set_user_state, new_state}, _from, state) do
state = %State{state | user_state: new_state}
send(self(), :render_next)
{:reply, state, state}
end
# On uderzo_init completion, we receive :uderzo_initialized and can therefore create a window.
def handle_info(:uderzo_initialized, state) do
glfw_create_window(state.window_width, state.window_height, state.title, self())
{:noreply, state}
end
# On window creation completion, we can kick off the rendering loop.
# However, first we have promised to talk to the user initialization code
def handle_info({:glfw_create_window_result, window}, state) do
{:ok, user_state} = state.user_module.init_renderer(state.user_state)
send(self(), :render_next)
{:noreply, %State{state | window: window, user_state: user_state}}
end
# We should render a frame. Calculate right away when the _next_ frame
# should start and tell Uderzo we're beginning a frame
def handle_info(:render_next, state) do
ntt = next_target_time(state.target_fps)
uderzo_start_frame(state.window, self())
{:noreply, %State{state | ntt: ntt}}
end
# Uderzo tells us we're good to do the actual rendering
def handle_info({:uderzo_start_frame_result, mx, my, win_width, win_height}, state) do
{:ok, user_state} = state.user_module.render_frame(win_width, win_height, mx, my, state.user_state)
uderzo_end_frame(state.window, self())
{:noreply, %State{state | user_state: user_state}}
end
# And finally, the frame is rendered. Schedule the next frame
def handle_info({:uderzo_end_frame_done, _window}, state) do
Process.send_after(self(), :render_next, nap_time(state.ntt))
{:noreply, state}
end
defp cur_time, do: :erlang.monotonic_time(:millisecond)
defp next_target_time(fps), do: cur_time() + Kernel.trunc(1_000 / fps)
defp nap_time(ntt), do: max(0, ntt - cur_time())
end
|
uderzo/lib/uderzo/gen_renderer.ex
| 0.806396
| 0.842021
|
gen_renderer.ex
|
starcoder
|
defmodule OddJob.Queue do
@moduledoc """
The `OddJob.Queue` is a `GenServer` that manages the assignments given to the pool workers.
The `queue` receives jobs and assigns them to available workers. If all workers in a pool are
currently busy then new jobs are added to a FIFO queue to be processed as workers
become available.
"""
@moduledoc since: "0.4.0"
@doc false
use GenServer
import OddJob.Guards
alias OddJob.{Job, Pool.Worker, Utils}
@spec __struct__ :: OddJob.Queue.t()
defstruct [:pool, workers: [], assigned: [], jobs: []]
@typedoc """
The `OddJob.Queue` struct holds the state of the job queue.
* `:pool` is a term representing the name of the job pool that the `queue` belongs to
* `:workers` is a list of the active worker `pid`s, whether they are busy working or not
* `:assigned` is a list of the worker `pid`s that are currently assigned to a job
* `:jobs` is a list of `OddJob.Job` structs representing the jobs that are queued to be performed
when workers are available
"""
@typedoc since: "0.3.0"
@type t :: %__MODULE__{
pool: atom,
workers: [pid],
assigned: [pid],
jobs: [job]
}
@type job :: Job.t()
@type queue :: {:via, Registry, {OddJob.Registry, {atom, :queue}}}
@type worker :: pid
# <---- Client API ---->
@doc false
def start_link(pool_name) do
GenServer.start_link(__MODULE__, pool_name, name: Utils.queue_name(pool_name))
end
@doc false
@spec perform(queue, function | job) :: :ok
def perform(queue, function) when is_function(function, 0),
do: perform(queue, %Job{function: function, owner: self()})
def perform(queue, job),
do: GenServer.cast(queue, {:perform, job})
@doc false
@spec perform_many(queue, [job]) :: :ok
def perform_many(queue, jobs) when is_list(jobs),
do: GenServer.cast(queue, {:perform_many, jobs})
@doc false
@spec perform_many(queue, list | map, function) :: :ok
def perform_many(queue, collection, function)
when is_enumerable(collection) and is_function(function, 1) do
jobs =
for member <- collection do
%Job{function: fn -> function.(member) end, owner: self()}
end
perform_many(queue, jobs)
end
@doc false
@spec state(queue) :: t
def state(queue),
do: GenServer.call(queue, :state)
@doc false
@spec monitor_worker(queue, worker) :: :ok
def monitor_worker(queue, worker) when is_pid(worker),
do: GenServer.cast(queue, {:monitor, worker})
@doc false
@spec request_new_job(queue, worker) :: :ok
def request_new_job(queue, worker),
do: GenServer.cast(queue, {:request_new_job, worker})
# <---- Callbacks ---->
@impl GenServer
@spec init(atom) :: {:ok, t}
def init(pool_name) do
state = struct(__MODULE__, pool: pool_name)
{:ok, state}
end
@impl GenServer
def handle_cast({:monitor, pid}, %{workers: workers, jobs: []} = state) do
Process.monitor(pid)
{:noreply, %{state | workers: workers ++ [pid]}}
end
def handle_cast({:monitor, worker}, state) do
Process.monitor(worker)
workers = state.workers ++ [worker]
assigned = state.assigned ++ [worker]
[job | rest] = state.jobs
Worker.perform(worker, job)
{:noreply, %{state | workers: workers, assigned: assigned, jobs: rest}}
end
def handle_cast({:request_new_job, worker}, %{jobs: []} = state) do
{:noreply, %{state | assigned: state.assigned -- [worker]}, _timeout = 10_000}
end
def handle_cast({:request_new_job, worker}, state) do
[job | rest] = state.jobs
Worker.perform(worker, job)
{:noreply, %{state | jobs: rest}, _timeout = 10_000}
end
def handle_cast({:perform, job}, state) do
state = do_perform(job, state)
{:noreply, state}
end
def handle_cast({:perform_many, jobs}, state) do
state = do_perform_many(jobs, state)
{:noreply, state}
end
defp do_perform(job, %{jobs: jobs, assigned: assigned, workers: workers} = state) do
available = available_workers(workers, assigned)
if available == [] do
%{state | jobs: jobs ++ [job]}
else
[worker | _rest] = available
Worker.perform(worker, job)
%{state | assigned: assigned ++ [worker]}
end
end
defp do_perform_many([], state), do: state
defp do_perform_many([job | rest] = new_jobs, state) do
assigned = state.assigned
available = available_workers(state.workers, assigned)
if available == [] do
%{state | jobs: state.jobs ++ new_jobs}
else
[worker | _rest] = available
Worker.perform(worker, job)
do_perform_many(rest, %{state | assigned: assigned ++ [worker]})
end
end
defp available_workers(workers, assigned),
do: workers -- assigned
@impl GenServer
def handle_call(:state, _from, state),
do: {:reply, state, state}
@impl GenServer
def handle_info({:DOWN, ref, :process, pid, _reason}, state) do
Process.demonitor(ref, [:flush])
workers = state.workers -- [pid]
assigned = state.assigned -- [pid]
{:noreply, %{state | workers: workers, assigned: assigned}}
end
def handle_info(:timeout, state),
do: {:noreply, state, :hibernate}
end
|
lib/odd_job/queue.ex
| 0.855248
| 0.563558
|
queue.ex
|
starcoder
|
defmodule SanbaseWeb.Graphql.AnomalyTypes do
use Absinthe.Schema.Notation
import SanbaseWeb.Graphql.Cache, only: [cache_resolve: 1, cache_resolve: 2]
alias Sanbase.Anomaly
alias SanbaseWeb.Graphql.Complexity
alias SanbaseWeb.Graphql.Middlewares.AccessControl
alias SanbaseWeb.Graphql.Resolvers.AnomalyResolver
object :anomaly_data do
field(:datetime, non_null(:datetime))
field(:value, :float)
end
object :anomaly_metadata do
@desc ~s"""
The name of the anomaly the metadata is about
"""
field(:anomaly, non_null(:string))
@desc ~s"""
List of slugs which can be provided to the `timeseriesData` field to fetch
the anomaly.
"""
field :available_slugs, list_of(:string) do
cache_resolve(&AnomalyResolver.get_available_slugs/3, ttl: 600)
end
@desc ~s"""
The minimal granularity for which the data is available.
"""
field(:min_interval, :string)
@desc ~s"""
The metric for which the anomaly is about. The actual metric values can be
fetched via the `getMetric` API using the same metric as argument
"""
field(:metric, :string)
@desc ~s"""
When the interval provided in the query is bigger than `min_interval` and
contains two or more data points, the data must be aggregated into a single
data point. The default aggregation that is applied is this `default_aggregation`.
The default aggregation can be changed by the `aggregation` parameter of
the `timeseriesData` field. Available aggregations are:
[
#{
Anomaly.available_aggregations()
|> Enum.map(&Atom.to_string/1)
|> Enum.map(&String.upcase/1)
|> Enum.join(",")
}
]
"""
field(:default_aggregation, :aggregation)
@desc ~s"""
The supported aggregations for this anomaly. For more information about
aggregations see the documentation for `defaultAggregation`
"""
field(:available_aggregations, list_of(:aggregation))
field(:data_type, :anomaly_data_type)
end
object :anomaly do
@desc ~s"""
Return a list of 'datetime' and 'value' for a given anomaly, slug
and time period.
"""
field :timeseries_data, list_of(:anomaly_data) do
arg(:slug, non_null(:string))
arg(:from, non_null(:datetime))
arg(:to, non_null(:datetime))
arg(:interval, :interval, default_value: "1d")
arg(:aggregation, :aggregation, default_value: nil)
complexity(&Complexity.from_to_interval/3)
middleware(AccessControl, %{allow_realtime_data: true, allow_historical_data: true})
cache_resolve(&AnomalyResolver.timeseries_data/3)
end
field :available_since, :datetime do
arg(:slug, non_null(:string))
cache_resolve(&AnomalyResolver.available_since/3)
end
field :metadata, :anomaly_metadata do
cache_resolve(&AnomalyResolver.get_metadata/3)
end
end
enum :anomaly_data_type do
value(:timeseries)
end
end
|
lib/sanbase_web/graphql/schema/types/anomaly_types.ex
| 0.864611
| 0.458409
|
anomaly_types.ex
|
starcoder
|
defmodule Mix.Tasks.ReadDoc do
alias ReadDoc.Options
import ReadDoc.FileSaver, only: [maybe_backup_files: 1]
import ReadDoc.DocExtractor, only: [extract_doc: 1]
@moduledoc """
## Abstract
Documentation of your project can be extracted into files containing
markers.
These markers are
<!-- begin @doc <ElixirIdentifier> -->
to mark the start of an inserted docstriang and
<!-- end @doc <ElixirIdentifier> -->
to mark the end thereof.
Right now only `@moduledoc` and `@doc` strings can be extracted, according to
if `<ElixirIdentifier>` refers to a module or a function.
E.g. if a file (typically `README.md`) contains the following content:
Preface
<!-- begin @doc: My.Module -->
Some text
<!-- end @doc: My.Module -->
Epilogue
running
mix read_doc README.md
will replace `Some text`
with the moduledoc string of `My.Module`.
## Limitations
- Docstrings for types, macros and callbacks cannot be accessed yet.
- Recursion is not supported, meaning that a docstring containing markers
will not trigger the inclusion of the docstring indicated by these markers.
"""
@doc """
This is the implementation interface of the task, it supports the following options:
"""
@spec run(list(String.t())) :: :ok
def run(args) do
Mix.Task.run "compile", []
case parse_args(args) |> make_options() do
{:ok, options_and_files} ->
options_and_files
|> maybe_backup_files()
|> ReadDoc.rewrite_files()
_ ->
:ok
end
end
defp make_options({[help: true], _, _}) do
IO.puts(:stderr, extract_doc("ReadDoc.Options"))
{:exit, nil}
end
defp make_options({options, files, []}) do
{:ok,
{options
|> Enum.into(%Options{}), files}}
end
defp make_options({_, _, errors}) do
raise ArgumentError, "undefined switches #{readable_options(errors, [])}"
end
defp readable_options([], result), do: result |> Enum.reverse() |> Enum.join(", ")
defp readable_options([{option, value} | rest], result) do
readable_options(rest, ["#{option} #{value}" |> String.trim() | result])
end
defp parse_args(args) do
OptionParser.parse(args, strict: switches(), aliases: aliases())
end
defp switches,
do: [
keep_copy: :boolean,
start_comment: :string,
end_comment: :string,
line_comment: :string,
help: :boolean
]
defp aliases, do: [k: :keep_copy, h: :help]
end
|
lib/mix/tasks/read_doc.ex
| 0.627038
| 0.426501
|
read_doc.ex
|
starcoder
|
defmodule Earmark do
@type ast_meta :: map()
@type ast_tag :: binary()
@type ast_attribute_name :: binary()
@type ast_attribute_value :: binary()
@type ast_attribute :: {ast_attribute_name(), ast_attribute_value()}
@type ast_attributes :: list(ast_attribute())
@type ast_tuple :: {ast_tag(), ast_attributes(), ast(), ast_meta()}
@type ast_node :: binary() | ast_tuple()
@type ast :: list(ast_node())
@moduledoc """
## Earmark
### Abstract Syntax Tree and Rendering
The AST generation has now been moved out to [`EarmarkParser`](https://github.com/robertdober/earmark_parser)
which is installed as a dependency.
This brings some changes to this documentation and also deprecates the usage of `Earmark.as_ast`
Earmark takes care of rendering the AST to HTML, exposing some AST Transformation Tools and providing a CLI as escript.
Therefore you will not find a detailed description of the supported Markdown here anymore as this is done in
[here](https://hexdocs.pm/earmark_parser/EarmarkParser.html)
#### Earmark.as_ast
WARNING: This is just a proxy towards `EarmarkParser.as_ast` and is deprecated, it will be removed in version 1.5!
Replace your calls to `Earmark.as_ast` with `EarmarkParse.as_ast` as soon as possible.
**N.B.** If all you use is `Earmark.as_ast` consider _only_ using `EarmarkParser`.
Also please refer yourself to the documentation of [`EarmarkParser`](https://hexdocs.pm/earmark_parser/EarmarkParser.html)
The function is described below and the other two API functions `as_html` and `as_html!` are now based upon
the structure of the result of `as_ast`.
{:ok, ast, []} = EarmarkParser.as_ast(markdown)
{:ok, ast, deprecation_messages} = EarmarkParser.as_ast(markdown)
{:error, ast, error_messages} = EarmarkParser.as_ast(markdown)
#### Earmark.as_html
{:ok, html_doc, []} = Earmark.as_html(markdown)
{:ok, html_doc, deprecation_messages} = Earmark.as_html(markdown)
{:error, html_doc, error_messages} = Earmark.as_html(markdown)
#### Earmark.as_html!
html_doc = Earmark.as_html!(markdown, options)
Formats the error_messages returned by `as_html` and adds the filename to each.
Then prints them to stderr and just returns the html_doc
#### Options
Options can be passed into as `as_html/2` or `as_html!/2` according to the documentation.
A keyword list with legal options (c.f. `Earmark.Options`) or an `Earmark.Options` struct are accepted.
{status, html_doc, errors} = Earmark.as_html(markdown, options)
html_doc = Earmark.as_html!(markdown, options)
{status, ast, errors} = EarmarkParser.as_ast(markdown, options)
### Rendering
All options passed through to `EarmarkParser.as_ast` are defined therein, however some options concern only
the rendering of the returned AST
These are:
* `compact_output:` defaults to `false`
Normally `Earmark` aims to produce _Human Readable_ output.
This will give results like these:
iex(0)> markdown = "# Hello\\nWorld"
...(0)> Earmark.as_html!(markdown, compact_output: false)
"<h1>\\nHello</h1>\\n<p>\\nWorld</p>\\n"
But sometimes whitespace is not desired:
iex(1)> markdown = "# Hello\\nWorld"
...(1)> Earmark.as_html!(markdown, compact_output: true)
"<h1>Hello</h1><p>World</p>"
Be cautions though when using this options, lines will become loooooong.
#### `escape:` defaulting to `true`
If set HTML will be properly escaped
iex(2)> markdown = "Hello<br />World"
...(2)> Earmark.as_html!(markdown)
"<p>\\nHello<br />World</p>\\n"
However disabling `escape:` gives you maximum control of the created document, which in some
cases (e.g. inside tables) might even be necessary
iex(3)> markdown = "Hello<br />World"
...(3)> Earmark.as_html!(markdown, escape: false)
"<p>\\nHello<br />World</p>\\n"
* `postprocessor:` defaults to nil
Before rendering the AST is transformed by a postprocessor.
For details see the description of `Earmark.Transform.map_ast·` below which will accept the same postprocessor as
a matter of fact specifying `postprocessor: fun` is conecptionnaly the same as
markdown
|> EarmarkParser.as_ast
|> Earmark.Transform.map_ast(fun)
|> Earmark.Transform.transform
with all the necessary bookkeeping for options and messages
* `renderer:` defaults to `Earmark.HtmlRenderer`
The module used to render the final document.
#### `smartypants:` defaulting to `true`
If set the following replacements will be made during rendering of inline text
"---" → "—"
"--" → "–"
"' → "’"
?" → "”"
"..." → "…"
### Command line
$ mix escript.build
$ ./earmark file.md
Some options defined in the `Earmark.Options` struct can be specified as command line switches.
Use
$ ./earmark --help
to find out more, but here is a short example
$ ./earmark --smartypants false --code-class-prefix "a- b-" file.md
will call
Earmark.as_html!( ..., %Earmark.Options{smartypants: false, code_class_prefix: "a- b-"})
## Timeouts
By default, that is if the `timeout` option is not set Earmark uses parallel mapping as implemented in `Earmark.pmap/2`,
which uses `Task.await` with its default timeout of 5000ms.
In rare cases that might not be enough.
By indicating a longer `timeout` option in milliseconds Earmark will use parallel mapping as implemented in `Earmark.pmap/3`,
which will pass `timeout` to `Task.await`.
In both cases one can override the mapper function with either the `mapper` option (used if and only if `timeout` is nil) or the
`mapper_with_timeout` function (used otherwise).
For the escript only the `timeout` command line argument can be used.
## Security
Please be aware that Markdown is not a secure format. It produces
HTML from Markdown and HTML. It is your job to sanitize and or
filter the output of `Earmark.as_html` if you cannot trust the input
and are to serve the produced HTML on the Web.
"""
alias Earmark.Error
alias Earmark.Options
import Earmark.Message, only: [emit_messages: 2]
@doc false
def as_html(lines, options \\ %Options{})
def as_html(lines, options) when is_list(options) do
case Options.make_options(options) do
{:ok, options1} -> as_html(lines, options1)
{:error, messages} -> {:error, "", messages}
end
end
def as_html(lines, options) do
{status, ast, messages} = postprocessed_ast(lines, options)
{status, Earmark.Transform.transform(ast, options), messages}
end
@doc """
DEPRECATED call `EarmarkParser.as_ast` instead
"""
def as_ast(lines, options \\ %Options{}) do
{status, ast, messages} = _as_ast(lines, options)
message =
{:warning, 0,
"DEPRECATION: Earmark.as_ast will be removed in version 1.5, please use EarmarkParser.as_ast, which is of the same type"}
messages1 = [message | messages]
{status, ast, messages1}
end
@line_end ~r{\n\r?}
def postprocessed_ast(lines, options\\%{})
def postprocessed_ast(lines, options) when is_binary(lines), do: lines |> String.split(@line_end) |> postprocessed_ast(options)
def postprocessed_ast(lines, %{postprocessor: nil}=options), do: EarmarkParser.as_ast(lines, options)
def postprocessed_ast(lines, %{postprocessor: prep}=options) do
{status, ast, messages} = EarmarkParser.as_ast(lines, options)
ast1 = Earmark.Transform.map_ast(ast, prep, Map.get(options, :ignore_strings))
{status, ast1, messages}
end
@doc """
A convenience method that *always* returns an HTML representation of the markdown document passed in.
In case of the presence of any error messages they are printed to stderr.
Otherwise it behaves exactly as `as_html`.
"""
def as_html!(lines, options \\ %Options{})
def as_html!(lines, options) do
{_status, html, messages} = as_html(lines, options)
emit_messages(messages, options)
html
end
@doc """
Accesses current hex version of the `Earmark` application. Convenience for
`iex` usage.
"""
def version() do
with {:ok, version} = :application.get_key(:earmark, :vsn),
do: to_string(version)
end
@default_timeout_in_ms 5000
@doc false
def pmap(collection, func, timeout \\ @default_timeout_in_ms) do
collection
|> Enum.map(fn item -> Task.async(fn -> func.(item) end) end)
|> Task.yield_many(timeout)
|> Enum.map(&_join_pmap_results_or_raise(&1, timeout))
end
defp _as_ast(lines, options)
defp _as_ast(lines, %Options{} = options) do
EarmarkParser.as_ast(lines, options |> Map.delete(:__struct__) |> Enum.into([]))
end
defp _as_ast(lines, options) do
EarmarkParser.as_ast(lines, options)
end
defp _join_pmap_results_or_raise(yield_tuples, timeout)
defp _join_pmap_results_or_raise({_task, {:ok, result}}, _timeout), do: result
defp _join_pmap_results_or_raise({task, {:error, reason}}, _timeout),
do: raise(Error, "#{inspect(task)} has died with reason #{inspect(reason)}")
defp _join_pmap_results_or_raise({task, nil}, timeout),
do:
raise(
Error,
"#{inspect(task)} has not responded within the set timeout of #{timeout}ms, consider increasing it"
)
end
# SPDX-License-Identifier: Apache-2.0
|
lib/earmark.ex
| 0.849019
| 0.400105
|
earmark.ex
|
starcoder
|
defmodule Dispenser.Server.BufferServer do
@moduledoc """
A `BufferServer` is an example `GenServer` that uses `Dispenser.Buffer`.
It can receive events and send them to subscriber processes.
Subscribers can control the flow by telling the `BufferServer` how many events they want, using `ask/3`.
See `ask/3` for more information about how events are sent to subscribers.
"""
use GenServer
alias Dispenser.{Buffer, MonitoredBuffer}
alias LimitedQueue
@typedoc """
The arguments required to create a `BufferServer`.
`:buffer` defines the `Buffer` used internally by the `BufferServer`.
See `start_link/1`.
"""
@type init_args(event) :: %{
buffer: Buffer.t(event, pid())
}
@typedoc """
The opaque internal state of the `BufferServer`.
"""
@opaque t(event) :: %__MODULE__{
buffer: MonitoredBuffer.t(event)
}
@enforce_keys [:buffer]
defstruct [:buffer]
@doc """
Start a new `BufferServer` `GenServer`.
See `init_args/0` and `GenServer.start_link/2`
"""
@spec start_link(init_args(event)) :: {:ok, pid()} | {:error, term()}
when event: any()
def start_link(init_args) do
GenServer.start_link(__MODULE__, init_args)
end
@impl GenServer
@spec init(init_args(event)) :: {:ok, t(event)}
when event: any()
def init(init_args) do
monitored_buffer = MonitoredBuffer.new(init_args.buffer)
state = %__MODULE__{buffer: monitored_buffer}
{:ok, state}
end
@doc """
Add events to the `BufferServer`.
If the buffer reaches its capacity, an error is returned with the number of events that were were dropped.
"""
@spec append(GenServer.server(), [event]) :: {:ok, dropped :: non_neg_integer()}
when event: any()
def append(_server, []) do
{:ok, 0}
end
def append(server, events) when is_list(events) do
GenServer.call(server, {:append, events})
end
@doc """
Unsubscribe from the `BufferServer`.
"""
@spec unsubscribe(GenServer.server()) :: :ok | {:error, :not_subscribed}
def unsubscribe(server) do
unsubscribe(server, self())
end
@spec unsubscribe(GenServer.server(), pid()) :: :ok | {:error, :not_subscribed}
def unsubscribe(server, subscriber) when is_pid(subscriber) do
GenServer.call(server, {:unsubscribe, subscriber})
end
@doc """
Ask for events from the `BufferServer`.
Events will be delivered asynchronously to the subscribed pid in the shape of:
{:handle_assigned_events, sender, events}
where:
* `sender` is the pid of this `BufferServer`.
* `events` is a list of events that were appended to the `BufferServer`.
"""
@spec ask(GenServer.server(), non_neg_integer()) :: :ok
def ask(server, amount) when amount >= 0 do
ask(server, self(), amount)
end
@spec ask(GenServer.server(), subscriber :: pid(), non_neg_integer()) :: :ok
def ask(_server, subscriber, 0) when is_pid(subscriber) do
:ok
end
def ask(server, subscriber, amount) when is_pid(subscriber) and amount > 0 do
GenServer.cast(server, {:ask, subscriber, amount})
end
@doc """
Get various statistics about the `BufferServer` for use when debugging and generating metrics.
"""
@spec stats(GenServer.server()) :: MonitoredBuffer.stats()
def stats(server) do
GenServer.call(server, :stats)
end
@impl GenServer
def handle_call({:append, events}, _from, state) do
{buffer, dropped} = MonitoredBuffer.append(state.buffer, events)
state = %__MODULE__{state | buffer: buffer}
state = send_events(state)
{:reply, {:ok, dropped}, state}
end
@impl GenServer
def handle_call({:unsubscribe, subscriber}, _from, state) do
case MonitoredBuffer.delete(state.buffer, subscriber) do
{:ok, buffer} ->
state = %__MODULE__{state | buffer: buffer}
{:reply, :ok, state}
{:error, :not_subscribed} ->
{:reply, {:error, :not_subscribed}, state}
end
end
@impl GenServer
def handle_call(:stats, _from, state) do
stats = MonitoredBuffer.stats(state.buffer)
{:reply, stats, state}
end
@impl GenServer
def handle_cast({:ask, subscriber, amount}, state) do
buffer = MonitoredBuffer.ask(state.buffer, subscriber, amount)
state = %__MODULE__{state | buffer: buffer}
state = send_events(state)
{:noreply, state}
end
@impl GenServer
def handle_info({:DOWN, ref, _, pid, _}, state) do
case MonitoredBuffer.down(state.buffer, pid, ref) do
{:ok, buffer} ->
{:noreply, %__MODULE__{state | buffer: buffer}}
_error ->
{:noreply, state}
end
end
@spec send_events(t(event)) :: t(event)
when event: any()
defp send_events(%__MODULE__{} = state) do
{buffer, assignments} = MonitoredBuffer.assign_events(state.buffer)
Enum.each(assignments, fn {subscriber, events} ->
send_assigned_events(subscriber, events)
end)
%__MODULE__{state | buffer: buffer}
end
@spec send_assigned_events(subscriber :: pid(), [event]) :: :ok | :noconnect
when event: any()
defp send_assigned_events(subscriber, []) when is_pid(subscriber) do
:ok
end
defp send_assigned_events(subscriber, events) when is_pid(subscriber) and is_list(events) do
Process.send(
subscriber,
{:handle_assigned_events, self(), events},
[:noconnect]
)
end
end
|
lib/dispenser/server/buffer_server.ex
| 0.831827
| 0.49585
|
buffer_server.ex
|
starcoder
|
defmodule PromEx.Plug do
@moduledoc """
Use this plug in your Endpoint file to expose your metrics. The following options are supported by this plug:
* `:prom_ex_module` - The PromEx module whose metrics will be plublished through this particular plug
* `:path` - The path through which your metrics can be accessed (default is "/metrics")
If you need to have some sort of access control around your metrics endpoint, I would suggest looking at another
library that I maintain called `Unplug` (https://hex.pm/packages/unplug). Using `Unplug`, you can skip over this plug
if some sort of requirement is not fulfilled. For example, if you wanted to configure the metrics endpoint to
only be accessible if the request has an Authorization header that matches a configured environment variable you
could do something like so using `Unplug`:
```elixir
defmodule MyApp.UnplugPredicates.SecureMetricsEndpoint do
@behaviour Unplug.Predicate
@impl true
def call(conn, env_var) do
auth_header = Plug.Conn.get_req_header(conn, "authorization")
System.get_env(env_var) == auth_header
end
end
```
Which can then be used in your `endpoint.ex` file like so:
```elixir
plug Unplug,
if: {MyApp.UnplugPredicates.SecureMetricsEndpoint, "PROMETHEUS_AUTH_SECRET"},
do: {PromEx.Plug, prom_ex_module: MyApp.PromEx}
```
The reason that this functionality is not part of PromEx itself is that how you chose to configure the visibility
of the metrics route is entirely up to the user and so it felt as though this plug would be over complicated by
having to support application config, environment variables, etc. And given that `Unplug` exists for this purpose,
it is the recommended tool for the job.
"""
@behaviour Plug
require Logger
import Plug.Conn
alias Plug.Conn
@impl true
def init(opts) do
%{
prom_ex_module: Keyword.fetch!(opts, :prom_ex_module),
metrics_path: Keyword.get(opts, :path, "/metrics")
}
end
@impl true
def call(%Conn{request_path: metrics_path} = conn, %{metrics_path: metrics_path, prom_ex_module: prom_ex_module}) do
case PromEx.get_metrics(prom_ex_module) do
:prom_ex_down ->
Logger.warn("Attempted to fetch metrics from #{prom_ex_module}, but the module has not been initialized")
conn
|> put_resp_content_type("text/plain")
|> send_resp(503, "Service Unavailable")
|> halt()
metrics ->
conn
|> put_resp_content_type("text/plain")
|> send_resp(200, metrics)
|> halt()
end
end
def call(conn, _opts) do
conn
end
end
|
lib/prom_ex/plug.ex
| 0.831588
| 0.574186
|
plug.ex
|
starcoder
|
defmodule ExWire.Packet.Capability.Par.WarpStatus do
@moduledoc """
Status messages updated to handle warp details.
```
**Status** [`+0x00`: `P`, `protocolVersion`: `P`, `networkId`: `P`,
`td`: `P`, `bestHash`: `B_32`, `genesisHash`: `B_32`,
`snapshot_hash`: B_32, `snapshot_number`: P]
In addition to all the fields in eth protocol version 63’s status (denoted
by `...`), include `snapshot_hash` and `snapshot_number` which signify the
snapshot manifest RLP hash and block number respectively of the peer's local
snapshot.
```
"""
require Logger
@behaviour ExWire.Packet
@type t :: %__MODULE__{
protocol_version: integer(),
network_id: integer(),
total_difficulty: integer(),
best_hash: binary(),
genesis_hash: binary(),
snapshot_hash: EVM.hash(),
snapshot_number: integer()
}
defstruct [
:protocol_version,
:network_id,
:total_difficulty,
:best_hash,
:genesis_hash,
:snapshot_hash,
:snapshot_number
]
@doc """
Build a WarpStatus packet
Note: we are currently reflecting values based on the packet received, but
that should not be the case. We should provide the total difficulty of the
best chain found in the block header, the best hash, and the genesis hash of
our blockchain.
TODO: Don't parrot the same data back to sender
"""
@spec new(t()) :: t()
def new(packet) do
%__MODULE__{
protocol_version: 1,
network_id: ExWire.Config.chain().params.network_id,
total_difficulty: packet.total_difficulty,
best_hash: packet.genesis_hash,
genesis_hash: packet.genesis_hash,
snapshot_hash: <<0::256>>,
snapshot_number: 0
}
end
@doc """
Returns the relative message id offset for this message.
This will help determine what its message ID is relative to other Packets in the same Capability.
"""
@impl true
@spec message_id_offset() :: 0x00
def message_id_offset do
0x00
end
@doc """
Given a WarpStatus packet, serializes for transport over Eth Wire Protocol.
## Examples
iex> %ExWire.Packet.Capability.Par.WarpStatus{
...> protocol_version: 0x63,
...> network_id: 3,
...> total_difficulty: 10,
...> best_hash: <<5>>,
...> genesis_hash: <<6::256>>,
...> snapshot_hash: <<7::256>>,
...> snapshot_number: 8,
...> }
...> |> ExWire.Packet.Capability.Par.WarpStatus.serialize
[0x63, 3, 10, <<5>>, <<6::256>>, <<7::256>>, 8]
"""
@impl true
def serialize(packet = %__MODULE__{}) do
[
packet.protocol_version,
packet.network_id,
packet.total_difficulty,
packet.best_hash,
packet.genesis_hash,
packet.snapshot_hash,
packet.snapshot_number
]
end
@doc """
Given an RLP-encoded Status packet from Eth Wire Protocol, decodes into a
Status packet.
## Examples
iex> ExWire.Packet.Capability.Par.WarpStatus.deserialize([<<0x63>>, <<3>>, <<10>>, <<5>>, <<6::256>>, <<7::256>>, 8])
%ExWire.Packet.Capability.Par.WarpStatus{
protocol_version: 0x63,
network_id: 3,
total_difficulty: 10,
best_hash: <<5>>,
genesis_hash: <<6::256>>,
snapshot_hash: <<7::256>>,
snapshot_number: 8,
}
"""
@impl true
def deserialize(rlp) do
[
protocol_version,
network_id,
total_difficulty,
best_hash,
genesis_hash,
snapshot_hash,
snapshot_number
] = rlp
%__MODULE__{
protocol_version: :binary.decode_unsigned(protocol_version),
network_id: :binary.decode_unsigned(network_id),
total_difficulty: :binary.decode_unsigned(total_difficulty),
best_hash: best_hash,
genesis_hash: genesis_hash,
snapshot_hash: snapshot_hash,
snapshot_number: snapshot_number
}
end
@doc """
Handles a WarpStatus message.
We should decide whether or not we want to continue communicating with
this peer. E.g. do our network and protocol versions match?
## Examples
iex> %ExWire.Packet.Capability.Par.WarpStatus{
...> protocol_version: 63,
...> network_id: 3,
...> total_difficulty: 10,
...> best_hash: <<4::256>>,
...> genesis_hash: <<4::256>>
...> }
...> |> ExWire.Packet.Capability.Par.WarpStatus.handle()
{:send,
%ExWire.Packet.Capability.Par.WarpStatus{
best_hash: <<4::256>>,
genesis_hash: <<4::256>>,
network_id: 3,
protocol_version: 1,
total_difficulty: 10,
snapshot_hash: <<0::256>>,
snapshot_number: 0
}}
"""
@impl true
def handle(packet = %__MODULE__{}) do
Exth.trace(fn -> "[Packet] Got WarpStatus: #{inspect(packet)}" end)
{:send, new(packet)}
end
end
|
apps/ex_wire/lib/ex_wire/packet/capability/par/warp_status.ex
| 0.850453
| 0.751694
|
warp_status.ex
|
starcoder
|
defmodule Homework.Merchants do
@moduledoc """
The Merchants context.
"""
import Homework.FuzzySearchHelper
import Ecto.Query, warn: false
alias Homework.Repo
alias Homework.Merchants.Merchant
@doc """
Returns the list of merchants.
## Examples
iex> list_merchants([])
[%Merchant{}, ...]
"""
def list_merchants(_args) do
Repo.all(Merchant)
end
@doc """
Gets a single merchant.
Raises `Ecto.NoResultsError` if the Merchant does not exist.
## Examples
iex> get_merchant!(123)
%Merchant{}
iex> get_merchant!(456)
** (Ecto.NoResultsError)
"""
def get_merchant!(id), do: Repo.get!(Merchant, id)
@doc """
Gets all merchants with exact matching field name
"""
def get_merchants_where_name(name) do
query = from m in Merchant,
where: m.name == ^name
Repo.all(query)
end
@doc """
Gets all merchants that have a provided or higher levenshtein fuzziness
## Examples
iex> get_merchants_fuzzy("paul", 5)
[%Merchant{...name: "paulco"...}]
"""
def get_merchants_fuzzy(to_query, fuzziness) do
# TODO call levenshtein function once and store as a view, then use to order (rather than 2 calls), then map to user
query = from m in Merchant,
where: levenshtein(m.name, ^to_query, ^fuzziness),
order_by: levenshtein(m.name, ^to_query)
Repo.all(query)
end
@doc """
Creates a merchant.
## Examples
iex> create_merchant(%{field: value})
{:ok, %Merchant{}}
iex> create_merchant(%{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def create_merchant(attrs \\ %{}) do
%Merchant{}
|> Merchant.changeset(attrs)
|> Repo.insert()
end
@doc """
Updates a merchant.
## Examples
iex> update_merchant(merchant, %{field: new_value})
{:ok, %Merchant{}}
iex> update_merchant(merchant, %{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def update_merchant(%Merchant{} = merchant, attrs) do
merchant
|> Merchant.changeset(attrs)
|> Repo.update()
end
@doc """
Deletes a merchant.
## Examples
iex> delete_merchant(merchant)
{:ok, %Merchant{}}
iex> delete_merchant(merchant)
{:error, %Ecto.Changeset{}}
"""
def delete_merchant(%Merchant{} = merchant) do
Repo.delete(merchant)
end
@doc """
Returns an `%Ecto.Changeset{}` for tracking merchant changes.
## Examples
iex> change_merchant(merchant)
%Ecto.Changeset{data: %Merchant{}}
"""
def change_merchant(%Merchant{} = merchant, attrs \\ %{}) do
Merchant.changeset(merchant, attrs)
end
end
|
elixir/lib/homework/merchants.ex
| 0.614394
| 0.410431
|
merchants.ex
|
starcoder
|
defmodule AWS.Signer do
@moduledoc """
With code signing for IoT, you can sign code that you create for any IoT
device that is supported by Amazon Web Services (AWS). Code signing is
available through [Amazon
FreeRTOS](http://docs.aws.amazon.com/freertos/latest/userguide/) and [AWS
IoT Device
Management](http://docs.aws.amazon.com/iot/latest/developerguide/), and
integrated with [AWS Certificate Manager
(ACM)](http://docs.aws.amazon.com/acm/latest/userguide/). In order to sign
code, you import a third-party code signing certificate with ACM that is
used to sign updates in Amazon FreeRTOS and AWS IoT Device Management. For
general information about using code signing, see the [Code Signing for IoT
Developer
Guide](http://docs.aws.amazon.com/signer/latest/developerguide/Welcome.html).
"""
@doc """
Changes the state of an `ACTIVE` signing profile to `CANCELED`. A canceled
profile is still viewable with the `ListSigningProfiles` operation, but it
cannot perform new signing jobs, and is deleted two years after
cancelation.
"""
def cancel_signing_profile(client, profile_name, input, options \\ []) do
path_ = "/signing-profiles/#{URI.encode(profile_name)}"
headers = []
query_ = []
request(client, :delete, path_, query_, headers, input, options, nil)
end
@doc """
Returns information about a specific code signing job. You specify the job
by using the `jobId` value that is returned by the `StartSigningJob`
operation.
"""
def describe_signing_job(client, job_id, options \\ []) do
path_ = "/signing-jobs/#{URI.encode(job_id)}"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Returns information on a specific signing platform.
"""
def get_signing_platform(client, platform_id, options \\ []) do
path_ = "/signing-platforms/#{URI.encode(platform_id)}"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Returns information on a specific signing profile.
"""
def get_signing_profile(client, profile_name, options \\ []) do
path_ = "/signing-profiles/#{URI.encode(profile_name)}"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Lists all your signing jobs. You can use the `maxResults` parameter to
limit the number of signing jobs that are returned in the response. If
additional jobs remain to be listed, code signing returns a `nextToken`
value. Use this value in subsequent calls to `ListSigningJobs` to fetch the
remaining values. You can continue calling `ListSigningJobs` with your
`maxResults` parameter and with new values that code signing returns in the
`nextToken` parameter until all of your signing jobs have been returned.
"""
def list_signing_jobs(client, max_results \\ nil, next_token \\ nil, platform_id \\ nil, requested_by \\ nil, status \\ nil, options \\ []) do
path_ = "/signing-jobs"
headers = []
query_ = []
query_ = if !is_nil(status) do
[{"status", status} | query_]
else
query_
end
query_ = if !is_nil(requested_by) do
[{"requestedBy", requested_by} | query_]
else
query_
end
query_ = if !is_nil(platform_id) do
[{"platformId", platform_id} | query_]
else
query_
end
query_ = if !is_nil(next_token) do
[{"nextToken", next_token} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"maxResults", max_results} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Lists all signing platforms available in code signing that match the
request parameters. If additional jobs remain to be listed, code signing
returns a `nextToken` value. Use this value in subsequent calls to
`ListSigningJobs` to fetch the remaining values. You can continue calling
`ListSigningJobs` with your `maxResults` parameter and with new values that
code signing returns in the `nextToken` parameter until all of your signing
jobs have been returned.
"""
def list_signing_platforms(client, category \\ nil, max_results \\ nil, next_token \\ nil, partner \\ nil, target \\ nil, options \\ []) do
path_ = "/signing-platforms"
headers = []
query_ = []
query_ = if !is_nil(target) do
[{"target", target} | query_]
else
query_
end
query_ = if !is_nil(partner) do
[{"partner", partner} | query_]
else
query_
end
query_ = if !is_nil(next_token) do
[{"nextToken", next_token} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"maxResults", max_results} | query_]
else
query_
end
query_ = if !is_nil(category) do
[{"category", category} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Lists all available signing profiles in your AWS account. Returns only
profiles with an `ACTIVE` status unless the `includeCanceled` request field
is set to `true`. If additional jobs remain to be listed, code signing
returns a `nextToken` value. Use this value in subsequent calls to
`ListSigningJobs` to fetch the remaining values. You can continue calling
`ListSigningJobs` with your `maxResults` parameter and with new values that
code signing returns in the `nextToken` parameter until all of your signing
jobs have been returned.
"""
def list_signing_profiles(client, include_canceled \\ nil, max_results \\ nil, next_token \\ nil, options \\ []) do
path_ = "/signing-profiles"
headers = []
query_ = []
query_ = if !is_nil(next_token) do
[{"nextToken", next_token} | query_]
else
query_
end
query_ = if !is_nil(max_results) do
[{"maxResults", max_results} | query_]
else
query_
end
query_ = if !is_nil(include_canceled) do
[{"includeCanceled", include_canceled} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Returns a list of the tags associated with a signing profile resource.
"""
def list_tags_for_resource(client, resource_arn, options \\ []) do
path_ = "/tags/#{URI.encode(resource_arn)}"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Creates a signing profile. A signing profile is a code signing template
that can be used to carry out a pre-defined signing job. For more
information, see
[http://docs.aws.amazon.com/signer/latest/developerguide/gs-profile.html](http://docs.aws.amazon.com/signer/latest/developerguide/gs-profile.html)
"""
def put_signing_profile(client, profile_name, input, options \\ []) do
path_ = "/signing-profiles/#{URI.encode(profile_name)}"
headers = []
query_ = []
request(client, :put, path_, query_, headers, input, options, nil)
end
@doc """
Initiates a signing job to be performed on the code provided. Signing jobs
are viewable by the `ListSigningJobs` operation for two years after they
are performed. Note the following requirements:
<ul> <li> You must create an Amazon S3 source bucket. For more information,
see [Create a
Bucket](http://docs.aws.amazon.com/AmazonS3/latest/gsg/CreatingABucket.html)
in the *Amazon S3 Getting Started Guide*.
</li> <li> Your S3 source bucket must be version enabled.
</li> <li> You must create an S3 destination bucket. Code signing uses your
S3 destination bucket to write your signed code.
</li> <li> You specify the name of the source and destination buckets when
calling the `StartSigningJob` operation.
</li> <li> You must also specify a request token that identifies your
request to code signing.
</li> </ul> You can call the `DescribeSigningJob` and the `ListSigningJobs`
actions after you call `StartSigningJob`.
For a Java example that shows how to use this action, see
[http://docs.aws.amazon.com/acm/latest/userguide/](http://docs.aws.amazon.com/acm/latest/userguide/)
"""
def start_signing_job(client, input, options \\ []) do
path_ = "/signing-jobs"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Adds one or more tags to a signing profile. Tags are labels that you can
use to identify and organize your AWS resources. Each tag consists of a key
and an optional value. To specify the signing profile, use its Amazon
Resource Name (ARN). To specify the tag, use a key-value pair.
"""
def tag_resource(client, resource_arn, input, options \\ []) do
path_ = "/tags/#{URI.encode(resource_arn)}"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Removes one or more tags from a signing profile. To remove the tags,
specify a list of tag keys.
"""
def untag_resource(client, resource_arn, input, options \\ []) do
path_ = "/tags/#{URI.encode(resource_arn)}"
headers = []
{query_, input} =
[
{"tagKeys", "tagKeys"},
]
|> AWS.Request.build_params(input)
request(client, :delete, path_, query_, headers, input, options, nil)
end
@spec request(AWS.Client.t(), binary(), binary(), list(), list(), map(), list(), pos_integer()) ::
{:ok, Poison.Parser.t(), Poison.Response.t()}
| {:error, Poison.Parser.t()}
| {:error, HTTPoison.Error.t()}
defp request(client, method, path, query, headers, input, options, success_status_code) do
client = %{client | service: "signer"}
host = build_host("signer", client)
url = host
|> build_url(path, client)
|> add_query(query)
additional_headers = [{"Host", host}, {"Content-Type", "application/x-amz-json-1.1"}]
headers = AWS.Request.add_headers(additional_headers, headers)
payload = encode_payload(input)
headers = AWS.Request.sign_v4(client, method, url, headers, payload)
perform_request(method, url, payload, headers, options, success_status_code)
end
defp perform_request(method, url, payload, headers, options, nil) do
case HTTPoison.request(method, url, payload, headers, options) do
{:ok, %HTTPoison.Response{status_code: 200, body: ""} = response} ->
{:ok, response}
{:ok, %HTTPoison.Response{status_code: status_code, body: body} = response}
when status_code == 200 or status_code == 202 or status_code == 204 ->
{:ok, Poison.Parser.parse!(body, %{}), response}
{:ok, %HTTPoison.Response{body: body}} ->
error = Poison.Parser.parse!(body, %{})
{:error, error}
{:error, %HTTPoison.Error{reason: reason}} ->
{:error, %HTTPoison.Error{reason: reason}}
end
end
defp perform_request(method, url, payload, headers, options, success_status_code) do
case HTTPoison.request(method, url, payload, headers, options) do
{:ok, %HTTPoison.Response{status_code: ^success_status_code, body: ""} = response} ->
{:ok, %{}, response}
{:ok, %HTTPoison.Response{status_code: ^success_status_code, body: body} = response} ->
{:ok, Poison.Parser.parse!(body, %{}), response}
{:ok, %HTTPoison.Response{body: body}} ->
error = Poison.Parser.parse!(body, %{})
{:error, error}
{:error, %HTTPoison.Error{reason: reason}} ->
{:error, %HTTPoison.Error{reason: reason}}
end
end
defp build_host(_endpoint_prefix, %{region: "local"}) do
"localhost"
end
defp build_host(endpoint_prefix, %{region: region, endpoint: endpoint}) do
"#{endpoint_prefix}.#{region}.#{endpoint}"
end
defp build_url(host, path, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}#{path}"
end
defp add_query(url, []) do
url
end
defp add_query(url, query) do
querystring = AWS.Util.encode_query(query)
"#{url}?#{querystring}"
end
defp encode_payload(input) do
if input != nil, do: Poison.Encoder.encode(input, %{}), else: ""
end
end
|
lib/aws/signer.ex
| 0.844281
| 0.496765
|
signer.ex
|
starcoder
|
defmodule CCSP.Chapter2.GenericSearch do
alias CCSP.Chapter2.PriorityQueue
alias CCSP.Chapter2.Stack
alias CCSP.Chapter2.Queue
alias CCSP.Chapter2.Node
@moduledoc """
Corresponds to CCSP in Python, Section 2.2 titled "Maze Solving"
"""
@spec linear_contains?(list(any), any) :: boolean
def linear_contains?(elements, key) do
Enum.any?(elements, &(&1 == key))
end
@doc """
Assumes elements are already sorted.
"""
@spec binary_contains?(list(any), any) :: boolean
def binary_contains?(sorted_elements, key) do
binary_search(sorted_elements, key, 0, length(sorted_elements) - 1)
end
@spec binary_search(list(any), any, non_neg_integer, non_neg_integer) :: boolean
defp binary_search(elements, key, low, high) when low <= high do
mid = div(low + high, 2)
mid_elements = Enum.at(elements, mid)
cond do
mid_elements < key -> binary_search(elements, key, mid + 1, high)
mid_elements > key -> binary_search(elements, key, low, mid - 1)
true -> true
end
end
@spec depth_first_search(
a,
b,
(b -> boolean),
(a, b -> list(b))
) :: Node.t()
when a: var, b: var
def depth_first_search(maze, initial, goal_fn, successors_fn) do
frontier =
Stack.new()
|> Stack.push(Node.new(initial, nil))
explored =
MapSet.new()
|> MapSet.put(initial)
dfs(maze, frontier, explored, goal_fn, successors_fn)
end
@spec dfs(
a,
Stack.t(),
MapSet.t(),
(b -> boolean),
(a, b -> list(b))
) :: Node.t()
when a: var, b: var
defp dfs(maze, frontier, explored, goal_fn, successors_fn) do
if Stack.empty?(frontier) == false do
{current_node, frontier} = Stack.pop(frontier)
current_state = current_node.state
if goal_fn.(current_state) do
current_node
else
{frontier, explored} =
Enum.reduce(
successors_fn.(maze, current_state),
{frontier, explored},
fn child, {frontier, explored} ->
if Enum.member?(explored, child) == true do
{frontier, explored}
else
frontier = Stack.push(frontier, Node.new(child, current_node))
explored = MapSet.put(explored, child)
{frontier, explored}
end
end
)
dfs(maze, frontier, explored, goal_fn, successors_fn)
end
end
end
@spec breadth_first_search(
a,
b,
(b -> boolean),
(a, b -> list(b))
) :: Node.t()
when a: var, b: var
def breadth_first_search(space, initial, goal_fn, successors_fn) do
frontier =
Queue.new()
|> Queue.push(Node.new(initial, nil))
explored =
MapSet.new()
|> MapSet.put(initial)
bfs(space, frontier, explored, goal_fn, successors_fn)
end
@spec bfs(
a,
Queue.t(),
MapSet.t(),
(b -> boolean),
(a, b -> list(b))
) :: Node.t()
when a: var, b: var
defp bfs(space, frontier, explored, goal_fn, successors_fn) do
if Queue.empty?(frontier) == false do
{current_node, frontier} = Queue.pop(frontier)
current_state = current_node.state
if goal_fn.(current_state) do
current_node
else
{frontier, explored} =
Enum.reduce(
successors_fn.(space, current_state),
{frontier, explored},
fn child, {frontier, explored} ->
if Enum.member?(explored, child) == true do
{frontier, explored}
else
frontier = Queue.push(frontier, Node.new(child, current_node))
explored = MapSet.put(explored, child)
{frontier, explored}
end
end
)
bfs(space, frontier, explored, goal_fn, successors_fn)
end
end
end
@spec astar_search(
a,
b,
(b -> boolean),
(a, b -> list(b)),
(b -> non_neg_integer)
) :: Node.t()
when a: var, b: var
def astar_search(maze, initial, goal_fn, successors_fn, heuristic_fn) do
frontier =
PriorityQueue.new()
|> PriorityQueue.push(Node.new(initial, nil, 0.0, heuristic_fn.(initial)))
explored =
Map.new()
|> Map.put(initial, 0.0)
astar(maze, frontier, explored, goal_fn, successors_fn, heuristic_fn)
end
# Dialyzer really dislikes this spec and its permutations and claims the function has no local return. I am unsure of what it has a problem with specifically.
# @spec astar(
# a,
# PriorityQueue.t(Node.t()),
# map,
# (b -> boolean),
# (a, b -> list(b)),
# (b -> non_neg_integer)
# ) :: Node.t() when a: var, b: var
defp astar(maze, frontier, explored, goal_fn, successors_fn, heuristic_fn) do
if PriorityQueue.empty?(frontier) == false do
{current_node, frontier} = PriorityQueue.pop(frontier)
current_state = current_node.state
if goal_fn.(current_state) do
current_node
else
{frontier, explored} =
Enum.reduce(
successors_fn.(maze, current_state),
{frontier, explored},
fn child, {frontier, explored} ->
new_cost = current_node.cost + 1
if child in explored or Map.get(explored, child) <= new_cost do
{frontier, explored}
else
frontier =
PriorityQueue.push(
frontier,
Node.new(child, current_node, new_cost, heuristic_fn.(child))
)
explored = Map.put(explored, child, new_cost)
{frontier, explored}
end
end
)
astar(maze, frontier, explored, goal_fn, successors_fn, heuristic_fn)
end
end
end
@spec node_to_path(Node.t()) :: list(Node.t())
def node_to_path(n) when n == nil do
[]
end
def node_to_path(n) when n != nil do
path = [n.state]
node_to_path(n, path)
end
defp node_to_path(n, path) do
if n.parent == nil do
path
else
n = n.parent
node_to_path(n, [n.state | path])
end
end
end
|
lib/ccsp/chapter2/generic_search.ex
| 0.767777
| 0.530054
|
generic_search.ex
|
starcoder
|
defmodule Timex.Format.DateTime.Formatters.Strftime do
@moduledoc """
Date formatting language defined by the `strftime` function from the Standard
C Library.
This implementation in Elixir is mostly compatible with `strftime`. The
exception is the absence of locale-depended results. All directives that imply
textual result will produce English names and abbreviations.
A complete reference of the directives implemented here is given below.
## Directive format
A directive is marked by the percent sign (`%`) followed by one character
(`<directive>`). In addition, a few optional specifiers can be inserted
in-between:
%<flag><width><modifier><directive>
Supported flags:
* `-` - don't pad numerical results (overrides default padding if any)
* `0` - use zeros for padding
* `_` - use spaces for padding
* `:`, `::` - used only in combination with `%z`; see description of `%:z`
and `%::z` below
`<width>` is a non-negative decimal number specifying the minimum field
width.
`<modifier>` can be `E` or `O`. These are locale-sensitive modifiers, and as
such they are currently ignored by this implementation.
## List of all directives
* `%%` - produces a single `%` in the output
### Years and centuries
* `%Y` - full year number (0000..9999)
* `%y` - the last two digits of the year number (00.99)
* `%C` - century number (00..99)
* `%G` - year number corresponding to the date's ISO week (0..9999)
* `%g` - year number (2 digits) corresponding to the date's ISO week (0.99)
### Months
* `%m` - month number (01..12)
* `%b` - abbreviated month name (Jan..Dec, no padding)
* `%h` - same is `%b`
* `%B` - full month name (January..December, no padding)
### Days, and days of week
* `%d` - day number (01..31)
* `%e` - same as `%d`, but padded with spaces ( 1..31)
* `%j` - ordinal day of the year (001..366)
* `%u` - weekday, Monday first (1..7)
* `%w` - weekday, Sunday first (0..6)
* `%a` - abbreviated weekday name (Mon..Sun, no padding)
* `%A` - full weekday name (Monday..Sunday, no padding)
### Weeks
* `%V` - ISO week number (01..53)
* `%W` - week number of the year, Monday first (00..53)
* `%U` - week number of the year, Sunday first (00..53)
### Time
* `%H` - hour of the day (00..23)
* `%k` - same as `%H`, but padded with spaces ( 0..23)
* `%I` - hour of the day (1..12)
* `%l` - same as `%I`, but padded with spaces ( 1..12)
* `%M` - minutes of the hour (0..59)
* `%S` - seconds of the minute (0..60)
* `%f` - microseconds in zero padded decimal form, i.e. 025000
* `%s` - number of seconds since UNIX epoch
* `%P` - lowercase am or pm (no padding)
* `%p` - uppercase AM or PM (no padding)
### Time zones
* `%Z` - time zone name, e.g. `UTC` (no padding)
* `%z` - time zone offset in the form `+0230` (no padding)
* `%:z` - time zone offset in the form `-07:30` (no padding)
* `%::z` - time zone offset in the form `-07:30:00` (no padding)
### Compound directives
* `%D` - same as `%m/%d/%y`
* `%F` - same as `%Y-%m-%d`
* `%R` - same as `%H:%M`
* `%r` - same as `%I:%M:%S %p`
* `%T` - same as `%H:%M:%S`
* `%v` - same as `%e-%b-%Y`
"""
use Timex.Format.DateTime.Formatter
alias Timex.DateTime
alias Timex.Format.DateTime.Formatters.Default
alias Timex.Parse.DateTime.Tokenizers.Strftime
@spec tokenize(String.t) :: {:ok, [%Directive{}]} | {:error, term}
defdelegate tokenize(format_string), to: Strftime
@spec format!(%DateTime{}, String.t) :: String.t | no_return
def format!(%DateTime{} = date, format_string) do
case format(date, format_string) do
{:ok, result} -> result
{:error, reason} -> raise FormatError, message: reason
end
end
@spec format(%DateTime{}, String.t) :: {:ok, String.t} | {:error, term}
def format(%DateTime{} = date, format_string) do
Default.format(date, format_string, Strftime)
end
end
|
lib/format/datetime/formatters/strftime.ex
| 0.878751
| 0.815012
|
strftime.ex
|
starcoder
|
defmodule Blogit.Components.Posts do
@moduledoc """
A `Blogit.Component` process which can be queried from outside.
The `Blogit.Components.Posts` process holds all the posts in the blog as its
state.
This process handles the following `call` messages:
* :all -> returns all the posts of the blog as list of `Blogit.Models.Post`
structures.
* {:filter, filters, from, size} -> returns a list of posts sorted by their
meta.created_at field, newest first, filtered by the given `filters` and
the first `from` are dropped. The size of the list is specified by `size`.
The posts are represented by `Blogit.Models.Post.Meta` structures so they
can be presented in a stream by showing only their preview.
* {:by_name, name} -> returns one post by its unique name. If there is no
post with the given `name` the tuple `{:error, no-post-found-message}`
is returned. If the post is present, the tuple `{:ok, the-post}` is
returned. The post is in the for of a `Blogit.Models.Post` struct.
This component is supervised by `Blogit.Components.Supervisor` and added to
it by `Blogit.Server`.
When the posts get updated, this process' state is updated by the
`Blogit.Server` process.
The `Blogit.Components.PostsByDate` and the `Blogit.Components.Metas`
processes calculate their state using this one.
"""
use Blogit.Component
alias Blogit.Models.Post.Meta
alias Blogit.Logic.Search
def init({language, posts_provider}) do
send(self(), {:init_posts, posts_provider})
{:ok, %{language: language}}
end
def handle_info({:init_posts, posts_provider}, %{language: language}) do
{:noreply, %{language: language, posts: posts_provider.get_posts(language)}}
end
def handle_call({:update, new_posts}, _, state) do
{:reply, :ok, %{state | posts: new_posts}}
end
def handle_call(:all, _from, %{posts: posts} = state) do
{:reply, Map.values(posts), state}
end
def handle_call(
{:filter, filters, from, size},
_from,
%{posts: posts} = state
) do
take = if size == :infinity, do: map_size(posts), else: size
result =
posts
|> Map.values()
|> Search.filter_by_params(filters)
|> Enum.map(& &1.meta)
|> Meta.sorted()
|> Enum.drop(from)
|> Enum.take(take)
{:reply, result, state}
end
def handle_call({:by_name, name}, _from, %{posts: posts} = state) do
case post = posts[name] do
nil -> {:reply, {:error, "No post with name #{name} found."}, state}
_ -> {:reply, {:ok, post}, state}
end
end
end
|
lib/blogit/components/posts.ex
| 0.719876
| 0.521167
|
posts.ex
|
starcoder
|
defmodule SvgBuilder.Font do
alias SvgBuilder.{Element, Units}
@text_types ~w(altGlyph textPath text tref tspan)a
@font_styles ~w(normal italic oblique inherit)a
@font_variants ~w(normal small-caps inherit)a
@font_weights ~w(normal bold bolder lighter inherit)a
@numeric_font_weights [100, 200, 300, 400, 500, 600, 700, 800, 900]
@font_stretches ~w(normal wider narrower ultra-condensed extra-condensed condensed semi-condensed semi-expanded expanded extra-expanded ultra-expanded inherit)a
@font_sizes ~w(xx-small x-small small medium large x-large xx-large larger smaller)a
@type font_style_t() :: :normal | :italic | :oblique | :inherit
@type font_variant_t() :: :normal | :"small-caps" | :inherit
@type font_weight_t() :: :normal | :bold | :bolder | :lighter | :inherit
@type numeric_font_weight_t() :: 100 | 200 | 300 | 400 | 500 | 600 | 700 | 800 | 900
@type font_stretches_t() ::
:normal
| :wider
| :narrower
| :"ultra-condensed"
| :"extra-condensed"
| :condensed
| :"semi-condensed"
| :"semi-expanded"
| :expanded
| :"extra-expanded"
| :"ultra-expanded"
| :inherit
@type font_size_t() ::
:"xx-small"
| :"x-small"
| :small
| :medium
| :large
| :"x-large"
| :"xx-large"
| :larger
| :smaller
| integer
| float
@moduledoc """
Handles all font related attributes.
Most font attributes can only be applied to the text type elements:
#{inspect(@text_types)}
https://www.w3.org/TR/SVG11/fonts.html
"""
@doc """
Set the font on a text type element.
Font can be specified as a string font specification or as a map.
The map to set font attributes may have the following keys:
`:family`, `:style`, `:variant`, `:weight`, `:stretch`, `:size` and `:size_adjust`
See the various individual functions for what values are allowed for these keys.
## Example
iex>Text.text("Some text") |> Font.font("bold italic large Palatino, serif")
{:text, %{font: "bold italic large Palatino, serif"}, "Some text"}
iex>Text.text("Some text") |> Font.font(%{family: "Palantino, serif", weight: :bold, style: :italic, size: :large})
{:text, %{"font-family": "Palantino, serif", "font-size": :large, "font-style": :italic, "font-weight": :bold}, "Some text"}
"""
@spec font(
Element.t(),
%{
optional(:family) => binary,
optional(:style) => font_style_t,
optional(:variant) => font_variant_t,
optional(:weight) => font_weight_t | numeric_font_weight_t,
optional(:stretch) => font_stretches_t,
optional(:size) => font_size_t,
optional(:size_adjust) => number | :inherit | :none
}
| binary
) :: Element.t()
def font(element, font) when is_binary(font) do
add_text_attribute(element, :font, font)
end
def font(element, %{} = options) do
element
|> apply_unless_nil(Map.get(options, :family), &font_family/2)
|> apply_unless_nil(Map.get(options, :style), &font_style/2)
|> apply_unless_nil(Map.get(options, :variant), &font_variant/2)
|> apply_unless_nil(Map.get(options, :weight), &font_weight/2)
|> apply_unless_nil(Map.get(options, :stretch), &font_stretch/2)
|> apply_unless_nil(Map.get(options, :size), &font_size/2)
|> apply_unless_nil(Map.get(options, :size_adjust), &font_size_adjust/2)
end
defp apply_unless_nil(element, nil, _function) do
element
end
defp apply_unless_nil(element, value, function) do
function.(element, value)
end
@doc """
Set the font-family attribute on a text element.
"""
@spec font_family(Element.t(), binary) :: Element.t()
def font_family(element, family) do
add_text_attribute(element, :"font-family", family)
end
@doc """
Set the font-style attribute on a text element.
Allowed values are: `#{inspect(@font_styles)}`
"""
@spec font_style(Element.t(), font_style_t) :: Element.t()
def font_style(element, style) when style in @font_styles do
add_text_attribute(element, :"font-style", style)
end
@doc """
Set the font-variant attribute on a text element.
Allowed values are: `#{inspect(@font_variants)}`
"""
@spec font_variant(Element.t(), font_variant_t) :: Element.t()
def font_variant(element, variant) when variant in @font_variants do
add_text_attribute(element, :"font-variant", variant)
end
@doc """
Set the font-weight attribute on a text element.
Allowed values are: `#{inspect(@font_weights)}` or the
numeric values: `#{inspect(@numeric_font_weights)}`
"""
@spec font_weight(Element.t(), font_weight_t | numeric_font_weight_t) :: Element.t()
def font_weight(element, weight)
when weight in @font_weights do
add_text_attribute(element, :"font-weight", weight)
end
def font_weight(element, weight)
when weight in @numeric_font_weights do
add_text_attribute(element, :"font-weight", "#{weight}")
end
@doc """
Set the font-stretch attribute on a text element.
Allowed values are: `#{inspect(@font_variants)}`
"""
@spec font_stretch(Element.t(), font_stretches_t) :: Element.t()
def font_stretch(element, stretch) when stretch in @font_stretches do
add_text_attribute(element, :"font-stretch", stretch)
end
@doc """
Set the font-size attribute on a text element.
Allowed values are a numeric point size or one of `#{inspect(@font_sizes)}`
"""
@spec font_size(Element.t(), font_size_t) :: Element.t()
def font_size(element, size) when size in @font_sizes do
add_text_attribute(element, :"font-size", size)
end
def font_size(element, size) do
add_text_attribute(element, :"font-size", Units.len(size))
end
@doc """
Set the font-size-adjust attribute on a text element.
May be a number or one of `[:inherit, :none]`
"""
@spec font_size_adjust(Element.t(), number | :inherit | :none) :: Element.t()
def font_size_adjust(element, size) when size in [:inherit, :none] do
add_text_attribute(element, :"font-size-adjust", size)
end
def font_size_adjust(element, size) do
add_text_attribute(element, :"font-size-adjust", Units.number(size))
end
defp add_text_attribute({type, _, _} = element, attribute, value) when type in @text_types do
Element.add_attribute(element, attribute, value)
end
defp add_text_attribute({type, _, _}, attribute, _) do
raise ArgumentError, "Cannot set #{attribute} on element of type: #{type}"
end
end
|
lib/font.ex
| 0.892785
| 0.409339
|
font.ex
|
starcoder
|
defimpl Timex.Protocol, for: Tuple do
alias Timex.AmbiguousDateTime
alias Timex.DateTime.Helpers
import Timex.Macros
@epoch :calendar.datetime_to_gregorian_seconds({{1970, 1, 1}, {0, 0, 0}})
def to_julian(date) do
with {y, m, d} <- to_erl_datetime(date),
do: Timex.Calendar.Julian.julian_date(y, m, d)
end
def to_gregorian_seconds(date) do
with {:ok, date} <- to_erl_datetime(date),
do: :calendar.datetime_to_gregorian_seconds(date)
end
def to_gregorian_microseconds(date) do
with {:ok, erl_date} <- to_erl_datetime(date),
do:
:calendar.datetime_to_gregorian_seconds(erl_date) * 1_000 * 1_000 +
get_microseconds(date)
end
def to_unix(date) do
with {:ok, date} <- to_erl_datetime(date),
do: :calendar.datetime_to_gregorian_seconds(date) - @epoch
end
def to_date(date) do
with {:ok, {date, _}} <- to_erl_datetime(date),
do: Date.from_erl!(date)
end
def to_datetime({_y, _m, _d} = date, timezone) do
to_datetime({date, {0, 0, 0}}, timezone)
end
def to_datetime({{_y, _m, _d} = date, {h, mm, s}}, timezone) do
to_datetime({date, {h, mm, s, {0, 0}}}, timezone)
end
def to_datetime({{y, m, d}, {h, mm, s, us}}, timezone) when is_datetime(y, m, d, h, mm, s) do
us = Helpers.construct_microseconds(us)
dt = Timex.NaiveDateTime.new!(y, m, d, h, mm, s, us)
with %DateTime{} = datetime <- Timex.Timezone.convert(dt, timezone) do
datetime
else
%AmbiguousDateTime{} = datetime ->
datetime
{:error, _} = err ->
err
end
end
def to_datetime(_, _), do: {:error, :invalid_date}
def to_naive_datetime({{y, m, d}, {h, mm, s, us}}) when is_datetime(y, m, d, h, mm, s) do
us = Helpers.construct_microseconds(us)
Timex.NaiveDateTime.new!(y, m, d, h, mm, s, us)
end
def to_naive_datetime(date) do
with {:ok, {{y, m, d}, {h, mm, s}}} <- to_erl_datetime(date) do
Timex.NaiveDateTime.new!(y, m, d, h, mm, s)
end
end
def to_erl({y, m, d} = date) when is_date(y, m, d), do: date
def to_erl(date) do
with {:ok, date} <- to_erl_datetime(date),
do: date
end
def century({y, m, d}) when is_date(y, m, d), do: Timex.century(y)
def century({{y, m, d}, _}) when is_date(y, m, d), do: Timex.century(y)
def century(_), do: {:error, :invalid_date}
def is_leap?({y, m, d}) when is_date(y, m, d), do: :calendar.is_leap_year(y)
def is_leap?({{y, m, d}, _}) when is_date(y, m, d), do: :calendar.is_leap_year(y)
def is_leap?(_), do: {:error, :invalid_date}
def beginning_of_day({y, m, d} = date) when is_date(y, m, d), do: date
def beginning_of_day({{y, m, d} = date, _}) when is_date(y, m, d),
do: {date, {0, 0, 0}}
def beginning_of_day(_), do: {:error, :invalid_date}
def end_of_day({y, m, d} = date) when is_date(y, m, d), do: date
def end_of_day({{y, m, d} = date, _}) when is_date(y, m, d),
do: {date, {23, 59, 59}}
def end_of_day(_), do: {:error, :invalid_date}
def beginning_of_week({y, m, d}, weekstart) when is_date(y, m, d) do
with ws when is_atom(ws) <- Timex.standardize_week_start(weekstart) do
Timex.Date.new!(y, m, d)
|> Timex.Date.beginning_of_week(weekstart)
|> Date.to_erl()
end
end
def beginning_of_week({{y, m, d}, _}, weekstart) when is_date(y, m, d) do
with ws when is_atom(ws) <- Timex.standardize_week_start(weekstart) do
date =
Timex.Date.new!(y, m, d)
|> Timex.Date.beginning_of_week(ws)
|> Date.to_erl()
{date, {0, 0, 0}}
end
end
def beginning_of_week(_, _), do: {:error, :invalid_date}
def end_of_week({y, m, d}, weekstart) when is_date(y, m, d) do
with ws when is_atom(ws) <- Timex.standardize_week_start(weekstart) do
Timex.Date.new!(y, m, d)
|> Timex.Date.end_of_week(ws)
|> Date.to_erl()
end
end
def end_of_week({{y, m, d}, _}, weekstart) when is_date(y, m, d) do
with ws when is_atom(ws) <- Timex.standardize_week_start(weekstart) do
date =
Timex.Date.new!(y, m, d)
|> Timex.Date.end_of_week(ws)
|> Date.to_erl()
{date, {23, 59, 59}}
end
end
def end_of_week(_, _), do: {:error, :invalid_date}
def beginning_of_year({y, m, d}) when is_date(y, m, d),
do: {y, 1, 1}
def beginning_of_year({{y, m, d}, _}) when is_date(y, m, d),
do: {{y, 1, 1}, {0, 0, 0}}
def beginning_of_year(_), do: {:error, :invalid_date}
def end_of_year({y, m, d}) when is_date(y, m, d),
do: {y, 12, 31}
def end_of_year({{y, m, d}, _}) when is_date(y, m, d),
do: {{y, 12, 31}, {23, 59, 59}}
def end_of_year(_), do: {:error, :invalid_date}
def beginning_of_quarter({y, m, d}) when is_date(y, m, d) do
month = 1 + 3 * (Timex.quarter(m) - 1)
{y, month, 1}
end
def beginning_of_quarter({{y, m, d}, {h, mm, s} = _time}) when is_datetime(y, m, d, h, mm, s) do
month = 1 + 3 * (Timex.quarter(m) - 1)
{{y, month, 1}, {0, 0, 0}}
end
def beginning_of_quarter({{y, m, d}, {h, mm, s, _us} = _time})
when is_datetime(y, m, d, h, mm, s) do
month = 1 + 3 * (Timex.quarter(m) - 1)
{{y, month, 1}, {0, 0, 0, 0}}
end
def beginning_of_quarter(_), do: {:error, :invalid_date}
def end_of_quarter({y, m, d}) when is_date(y, m, d) do
month = 3 * Timex.quarter(m)
end_of_month({y, month, d})
end
def end_of_quarter({{y, m, d}, {h, mm, s} = time}) when is_datetime(y, m, d, h, mm, s) do
month = 3 * Timex.quarter(m)
end_of_month({{y, month, d}, time})
end
def end_of_quarter({{y, m, d}, {h, mm, s, _us}}) when is_datetime(y, m, d, h, mm, s) do
month = 3 * Timex.quarter(m)
end_of_month({{y, month, d}, {h, mm, s}})
end
def end_of_quarter(_), do: {:error, :invalid_date}
def beginning_of_month({y, m, d}) when is_date(y, m, d),
do: {y, m, 1}
def beginning_of_month({{y, m, d}, _}) when is_date(y, m, d),
do: {{y, m, 1}, {0, 0, 0}}
def beginning_of_month(_), do: {:error, :invalid_date}
def end_of_month({y, m, d} = date) when is_date(y, m, d),
do: {y, m, days_in_month(date)}
def end_of_month({{y, m, d}, _} = date) when is_date(y, m, d),
do: {{y, m, days_in_month(date)}, {23, 59, 59}}
def end_of_month(_), do: {:error, :invalid_date}
def quarter({y, m, d}) when is_date(y, m, d), do: Calendar.ISO.quarter_of_year(y, m, d)
def quarter({{y, m, d}, _}) when is_date(y, m, d), do: Calendar.ISO.quarter_of_year(y, m, d)
def quarter(_), do: {:error, :invalid_date}
def days_in_month({y, m, d}) when is_date(y, m, d), do: Timex.days_in_month(y, m)
def days_in_month({{y, m, d}, _}) when is_date(y, m, d), do: Timex.days_in_month(y, m)
def days_in_month(_), do: {:error, :invalid_date}
def week_of_month({y, m, d}) when is_date(y, m, d), do: Timex.week_of_month(y, m, d)
def week_of_month({{y, m, d}, _}) when is_date(y, m, d), do: Timex.week_of_month(y, m, d)
def week_of_month(_), do: {:error, :invalid_date}
def weekday({y, m, d} = date) when is_date(y, m, d), do: :calendar.day_of_the_week(date)
def weekday({{y, m, d} = date, _}) when is_date(y, m, d), do: :calendar.day_of_the_week(date)
def weekday(_), do: {:error, :invalid_date}
def weekday({y, m, d}, weekstart) when is_date(y, m, d),
do: Timex.Date.day_of_week(Timex.Date.new!(y, m, d), weekstart)
def weekday({{y, m, d}, _}, weekstart) when is_date(y, m, d),
do: Timex.Date.day_of_week(Timex.Date.new!(y, m, d), weekstart)
def weekday(_, _), do: {:error, :invalid_date}
def day({y, m, d} = date) when is_date(y, m, d),
do: 1 + Timex.diff(date, {y, 1, 1}, :days)
def day({{y, m, d} = date, _}) when is_date(y, m, d),
do: 1 + Timex.diff(date, {y, 1, 1}, :days)
def day(_), do: {:error, :invalid_date}
def is_valid?({y, m, d}) when is_date(y, m, d), do: true
def is_valid?({{y, m, d}, {h, mm, s}}) when is_datetime(y, m, d, h, mm, s), do: true
def is_valid?({{y, m, d}, {h, mm, s, _us}}) when is_datetime(y, m, d, h, mm, s), do: true
def is_valid?(_), do: false
def iso_week({y, m, d}) when is_date(y, m, d),
do: Timex.iso_week(y, m, d)
def iso_week({{y, m, d}, _}) when is_date(y, m, d),
do: Timex.iso_week(y, m, d)
def iso_week(_), do: {:error, :invalid_date}
def from_iso_day({y, m, d}, day) when is_day_of_year(day) and is_date(y, m, d) do
{year, month, day_of_month} = Timex.Helpers.iso_day_to_date_tuple(y, day)
{year, month, day_of_month}
end
def from_iso_day({{y, m, d}, {_, _, _} = time}, day)
when is_day_of_year(day) and is_date(y, m, d) do
{year, month, day_of_month} = Timex.Helpers.iso_day_to_date_tuple(y, day)
{{year, month, day_of_month}, time}
end
def from_iso_day({{y, m, d}, {_, _, _, _} = time}, day)
when is_day_of_year(day) and is_date(y, m, d) do
{year, month, day_of_month} = Timex.Helpers.iso_day_to_date_tuple(y, day)
{{year, month, day_of_month}, time}
end
def from_iso_day(_, _), do: {:error, :invalid_date}
def set({y, m, d} = date, options) when is_date(y, m, d),
do: do_set({date, {0, 0, 0}}, options, :date)
def set({{y, m, d}, {h, mm, s}} = datetime, options) when is_datetime(y, m, d, h, mm, s),
do: do_set(datetime, options, :datetime)
def set({{y, m, d}, {h, mm, s, us}}, options) when is_datetime(y, m, d, h, mm, s) do
{date, {h, mm, s}} = do_set({{y, m, d}, {h, mm, s}}, options, :datetime)
{date, {h, mm, s, us}}
end
def set(_, _), do: {:error, :invalid_date}
defp do_set(date, options, datetime_type) do
validate? = Keyword.get(options, :validate, true)
options
|> Helpers.sort_options()
|> Enum.reduce(date, fn
_option, {:error, _} = err ->
err
option, result ->
case option do
{:validate, _} ->
result
{:datetime, {{_, _, _} = date, {_, _, _} = time} = dt} ->
if validate? do
case datetime_type do
:date ->
Timex.normalize(:date, date)
:datetime ->
{Timex.normalize(:date, date), Timex.normalize(:time, time)}
end
else
case datetime_type do
:date -> date
:datetime -> dt
end
end
{:date, {_, _, _} = d} ->
if validate? do
case result do
{_, _, _} -> Timex.normalize(:date, d)
{{_, _, _}, {_, _, _} = t} -> {Timex.normalize(:date, d), t}
end
else
case result do
{_, _, _} -> d
{{_, _, _}, {_, _, _} = t} -> {d, t}
end
end
{:time, {_, _, _} = t} ->
if validate? do
case result do
{_, _, _} -> date
{{_, _, _} = d, {_, _, _}} -> {d, Timex.normalize(:time, t)}
end
else
case result do
{_, _, _} -> date
{{_, _, _} = d, {_, _, _}} -> {d, t}
end
end
{:day, d} ->
if validate? do
case result do
{y, m, _} -> {y, m, Timex.normalize(:day, {y, m, d})}
{{y, m, _}, {_, _, _} = t} -> {{y, m, Timex.normalize(:day, {y, m, d})}, t}
end
else
case result do
{y, m, _} -> {y, m, d}
{{y, m, _}, {_, _, _} = t} -> {{y, m, d}, t}
end
end
{:year, year} ->
if validate? do
case result do
{_, m, d} -> {Timex.normalize(:year, year), m, d}
{{_, m, d}, {_, _, _} = t} -> {{Timex.normalize(:year, year), m, d}, t}
end
else
case result do
{_, m, d} -> {year, m, d}
{{_, m, d}, {_, _, _} = t} -> {{year, m, d}, t}
end
end
{:month, month} ->
if validate? do
case result do
{y, _, d} ->
{y, Timex.normalize(:month, month), Timex.normalize(:day, {y, month, d})}
{{y, _, d}, {_, _, _} = t} ->
{{y, Timex.normalize(:month, month), Timex.normalize(:day, {y, month, d})}, t}
end
else
case result do
{y, _, d} -> {y, month, d}
{{y, _, d}, {_, _, _} = t} -> {{y, month, d}, t}
end
end
{:hour, hour} ->
if validate? do
case result do
{_, _, _} -> result
{{_, _, _} = d, {_, m, s}} -> {d, {Timex.normalize(:hour, hour), m, s}}
end
else
case result do
{_, _, _} -> result
{{_, _, _} = d, {_, m, s}} -> {d, {hour, m, s}}
end
end
{:minute, min} ->
if validate? do
case result do
{_, _, _} -> result
{{_, _, _} = d, {h, _, s}} -> {d, {h, Timex.normalize(:minute, min), s}}
end
else
case result do
{_, _, _} -> result
{{_, _, _} = d, {h, _, s}} -> {d, {h, min, s}}
end
end
{:second, sec} ->
if validate? do
case result do
{_, _, _} -> result
{{_, _, _} = d, {h, m, _}} -> {d, {h, m, Timex.normalize(:second, sec)}}
end
else
case result do
{_, _, _} -> result
{{_, _, _} = d, {h, m, _}} -> {d, {h, m, sec}}
end
end
{name, _} when name in [:timezone, :microsecond] ->
result
{option_name, _} ->
{:error, {:bad_option, option_name}}
end
end)
end
def shift(date, [{_, 0}]),
do: date
def shift({y, m, d} = date, options) when is_date(y, m, d),
do: do_shift(date, options, :date)
def shift({{y, m, d}, {h, mm, s}} = datetime, options) when is_datetime(y, m, d, h, mm, s),
do: do_shift(datetime, options, :datetime)
def shift({{y, m, d}, {h, mm, s, _us}} = datetime, options) when is_datetime(y, m, d, h, mm, s),
do: do_shift(datetime, options, :datetime)
def shift(_, _), do: {:error, :invalid_date}
defp to_erl_datetime({y, m, d} = date) when is_date(y, m, d),
do: {:ok, {date, {0, 0, 0}}}
defp to_erl_datetime({{y, m, d}, {h, mm, s}} = dt) when is_datetime(y, m, d, h, mm, s),
do: {:ok, dt}
defp to_erl_datetime({{y, m, d}, {h, mm, s, _us}}) when is_datetime(y, m, d, h, mm, s),
do: {:ok, {{y, m, d}, {h, mm, s}}}
defp to_erl_datetime(_),
do: {:error, :invalid_date}
defp get_microseconds({_, _, _, us}) when is_integer(us),
do: us
defp get_microseconds({_, _, _, {us, _precision}}) when is_integer(us),
do: us
defp get_microseconds({_, _, _}),
do: 0
defp get_microseconds({date, time}) when is_tuple(date) and is_tuple(time),
do: get_microseconds(time)
defp do_shift(date, options, type) do
allowed_options =
Enum.reject(options, fn
{:weeks, _} ->
false
{:days, _} ->
false
{:hours, value} when value >= 24 or value <= -24 ->
true
{:hours, _} ->
false
{:minutes, value} when value >= 24 * 60 or value <= -24 * 60 ->
true
{:minutes, _} ->
false
{:seconds, value} when value >= 24 * 60 * 60 or value <= -24 * 60 * 60 ->
true
{:seconds, _} ->
false
{:milliseconds, value}
when value >= 24 * 60 * 60 * 1000 or value <= -24 * 60 * 60 * 1000 ->
true
{:milliseconds, _} ->
false
{:microseconds, {value, _}}
when value >= 24 * 60 * 60 * 1000 * 1000 or value <= -24 * 60 * 60 * 1000 * 1000 ->
true
{:microseconds, value}
when value >= 24 * 60 * 60 * 1000 * 1000 or value <= -24 * 60 * 60 * 1000 * 1000 ->
true
{:microseconds, _} ->
false
{_type, _value} ->
true
end)
case Timex.shift(to_naive_datetime(date), allowed_options) do
{:error, _} = err ->
err
%NaiveDateTime{} = nd when type == :date ->
{nd.year, nd.month, nd.day}
%NaiveDateTime{} = nd when type == :datetime ->
{{nd.year, nd.month, nd.day}, {nd.hour, nd.minute, nd.second}}
end
end
end
|
lib/datetime/erlang.ex
| 0.677581
| 0.47524
|
erlang.ex
|
starcoder
|
defmodule AWS.Cloud9 do
@moduledoc """
AWS Cloud9
AWS Cloud9 is a collection of tools that you can use to code, build, run, test,
debug, and release software in the cloud.
For more information about AWS Cloud9, see the [AWS Cloud9 User Guide](https://docs.aws.amazon.com/cloud9/latest/user-guide).
AWS Cloud9 supports these operations:
* `CreateEnvironmentEC2`: Creates an AWS Cloud9 development
environment, launches an Amazon EC2 instance, and then connects from the
instance to the environment.
* `CreateEnvironmentMembership`: Adds an environment member to an
environment.
* `DeleteEnvironment`: Deletes an environment. If an Amazon EC2
instance is connected to the environment, also terminates the instance.
* `DeleteEnvironmentMembership`: Deletes an environment member from
an environment.
* `DescribeEnvironmentMemberships`: Gets information about
environment members for an environment.
* `DescribeEnvironments`: Gets information about environments.
* `DescribeEnvironmentStatus`: Gets status information for an
environment.
* `ListEnvironments`: Gets a list of environment identifiers.
* `ListTagsForResource`: Gets the tags for an environment.
* `TagResource`: Adds tags to an environment.
* `UntagResource`: Removes tags from an environment.
* `UpdateEnvironment`: Changes the settings of an existing
environment.
* `UpdateEnvironmentMembership`: Changes the settings of an existing
environment member for an environment.
"""
alias AWS.Client
alias AWS.Request
def metadata do
%AWS.ServiceMetadata{
abbreviation: nil,
api_version: "2017-09-23",
content_type: "application/x-amz-json-1.1",
credential_scope: nil,
endpoint_prefix: "cloud9",
global?: false,
protocol: "json",
service_id: "Cloud9",
signature_version: "v4",
signing_name: "cloud9",
target_prefix: "AWSCloud9WorkspaceManagementService"
}
end
@doc """
Creates an AWS Cloud9 development environment, launches an Amazon Elastic
Compute Cloud (Amazon EC2) instance, and then connects from the instance to the
environment.
"""
def create_environment_ec2(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateEnvironmentEC2", input, options)
end
@doc """
Adds an environment member to an AWS Cloud9 development environment.
"""
def create_environment_membership(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateEnvironmentMembership", input, options)
end
@doc """
Deletes an AWS Cloud9 development environment.
If an Amazon EC2 instance is connected to the environment, also terminates the
instance.
"""
def delete_environment(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteEnvironment", input, options)
end
@doc """
Deletes an environment member from an AWS Cloud9 development environment.
"""
def delete_environment_membership(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteEnvironmentMembership", input, options)
end
@doc """
Gets information about environment members for an AWS Cloud9 development
environment.
"""
def describe_environment_memberships(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeEnvironmentMemberships", input, options)
end
@doc """
Gets status information for an AWS Cloud9 development environment.
"""
def describe_environment_status(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeEnvironmentStatus", input, options)
end
@doc """
Gets information about AWS Cloud9 development environments.
"""
def describe_environments(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeEnvironments", input, options)
end
@doc """
Gets a list of AWS Cloud9 development environment identifiers.
"""
def list_environments(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListEnvironments", input, options)
end
@doc """
Gets a list of the tags associated with an AWS Cloud9 development environment.
"""
def list_tags_for_resource(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListTagsForResource", input, options)
end
@doc """
Adds tags to an AWS Cloud9 development environment.
Tags that you add to an AWS Cloud9 environment by using this method will NOT be
automatically propagated to underlying resources.
"""
def tag_resource(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "TagResource", input, options)
end
@doc """
Removes tags from an AWS Cloud9 development environment.
"""
def untag_resource(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UntagResource", input, options)
end
@doc """
Changes the settings of an existing AWS Cloud9 development environment.
"""
def update_environment(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UpdateEnvironment", input, options)
end
@doc """
Changes the settings of an existing environment member for an AWS Cloud9
development environment.
"""
def update_environment_membership(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UpdateEnvironmentMembership", input, options)
end
end
|
lib/aws/generated/cloud9.ex
| 0.874961
| 0.636988
|
cloud9.ex
|
starcoder
|
defmodule Hui.Query.Facet do
@moduledoc """
Struct related to [faceting](http://lucene.apache.org/solr/guide/faceting.html).
### Example
iex> x = %Hui.Query.Facet{field: ["type", "year"], query: "year:[2000 TO NOW]"}
%Hui.Query.Facet{
contains: nil,
"contains.ignoreCase": nil,
"enum.cache.minDf": nil,
excludeTerms: nil,
exists: nil,
facet: true,
field: ["type", "year"],
interval: nil,
limit: nil,
matches: nil,
method: nil,
mincount: nil,
missing: nil,
offset: nil,
"overrequest.count": nil,
"overrequest.ratio": nil,
pivot: [],
"pivot.mincount": nil,
prefix: nil,
query: "year:[2000 TO NOW]",
range: nil,
sort: nil,
threads: nil
}
iex> x |> Hui.Encoder.encode
"facet=true&facet.field=type&facet.field=year&facet.query=year%3A%5B2000+TO+NOW%5D"
"""
defstruct [facet: true, field: [], query: []]
++ [:"pivot.mincount", pivot: []]
++ [:prefix, :contains, :"contains.ignoreCase", :matches]
++ [:sort, :limit, :offset, :mincount,
:missing, :method, :"enum.cache.minDf", :exists]
++ [:excludeTerms, :"overrequest.count", :"overrequest.ratio",
:threads]
++ [:interval, :range]
@typedoc """
Struct for faceting.
"""
@type t :: %__MODULE__{facet: boolean, field: binary | list(binary), query: binary | list(binary),
"pivot.mincount": number, pivot: binary | list(binary),
prefix: binary, contains: binary, "contains.ignoreCase": binary, matches: binary,
sort: binary, limit: number, offset: number, mincount: number,
missing: boolean, method: binary, "enum.cache.minDf": number, exists: boolean,
excludeTerms: binary, "overrequest.count": number, "overrequest.ratio": number,
threads: binary,
interval: Hui.Query.FacetInterval.t | list(Hui.Query.FacetInterval.t),
range: Hui.Query.FacetRange.t | list(Hui.Query.FacetRange.t)}
end
|
lib/hui/query/facet.ex
| 0.850298
| 0.405596
|
facet.ex
|
starcoder
|
defmodule Entrance.Auth.Bcrypt do
@moduledoc """
Provides functions for hashing passwords and authenticating users using
[Bcrypt](https://hexdocs.pm/bcrypt_elixir/Bcrypt.html#content).
This module assumes that you have a virtual field named `password`, and a
database backed string field named `hashed_password`.
## Usage
## Example
```
defmodule YourApp.Accounts.User do
use Ecto.Schema
import Ecto.Changeset
import Entrance.Auth.Bcrypt, only: [hash_password: 1] # ...
schema "users" do
field :email, :string
field :password, :string, virtual: true
field :hashed_password, :string
field :session_secret, :string
timestamps()
end
def create_changeset(user, attrs) do
user
|> cast(attrs, [:email, :password, :hashed_password, :session_secret])
|> validate_required([:email, :password])
|> hash_password # ...
end
end
```
To authenticate a user in your application, you can use `auth/2`:
```
user = Repo.get(User, 1)
password = "<PASSWORD>"
Entrance.Auth.Bcrypt.auth(user, password)
```
"""
alias Ecto.Changeset
@doc """
Takes a changeset and turns the virtual `password` field into a
`hashed_password` change on the changeset.
```
import Entrance.Auth.Bcrypt, only: [hash_password: 1]
# ... your user schema
def create_changeset(user, attrs) do
user
|> cast(attrs, [:email, :password, :hashed_password, :session_secret])
|> validate_required([:email, :password])
|> hash_password # :)
end
```
"""
def hash_password(changeset) do
password = Changeset.get_change(changeset, :password)
if password do
hashed_password = Bcrypt.hash_pwd_salt(password)
changeset
|> Changeset.put_change(:hashed_password, hashed_password)
else
changeset
end
end
@doc """
Compares the given `password` against the given `user`'s password.
```
user = %{hashed_password: "<PASSWORD>"}
password = "<PASSWORD>"
Entrance.Auth.Bcrypt.auth(user, password)
```
"""
def auth(user, password),
do: Bcrypt.verify_pass(password, user.hashed_password)
@doc """
Simulates password check to help prevent timing attacks. Delegates to
`Bcrypt.no_user_verify/0`.
"""
def no_user_verify(), do: Bcrypt.no_user_verify()
end
|
lib/auth/bcrypt.ex
| 0.85449
| 0.764452
|
bcrypt.ex
|
starcoder
|
defmodule Squitter.Decoding.ExtSquitter do
require Logger
import Squitter.Decoding.Utils
alias Squitter.StatsTracker
alias Squitter.Decoding.ModeS
alias Squitter.Decoding.ExtSquitter.{
TypeCode,
Callsign,
AirbornePosition,
AircraftCategory,
GroundSpeed,
AirSpeed
}
@df [17, 18]
@head 37
defstruct [:df, :tc, :ca, :icao, :msg, :pi, :crc, :type_msg, :time]
def decode(time, <<df::5, ca::3, _icao::3-bytes, data::7-bytes, pi::24-unsigned>> = msg)
when byte_size(msg) == 14 and df in @df do
checksum = ModeS.checksum(msg, 112)
{:ok, icao_address} = ModeS.icao_address(msg, checksum)
<<tc::5, _rest::bits>> = data
type = TypeCode.decode(tc)
StatsTracker.count({:df, df, :decoded})
%__MODULE__{
time: time,
df: df,
tc: type,
type_msg: decode_type(type, msg),
ca: ca,
icao: icao_address,
msg: msg,
pi: pi,
crc: if(checksum == pi, do: :valid, else: :invalid)
}
end
def decode(time, <<df::5, _::bits>> = msg) when df in @df do
StatsTracker.count({:df, df, :decode_failed})
Logger.warn("Unrecognized ADS-B message (df #{inspect(df)}: #{inspect(msg)}")
%__MODULE__{df: df, time: time, msg: msg}
end
@doc """
Decode the aircraft identification message.
"""
def decode_type({:aircraft_id, tc}, <<_::@head, cat::3, cs::6-bytes, _::binary>>) do
callsign = for(<<c::6 <- cs>>, into: <<>>, do: Callsign.character(c)) |> String.trim()
%{aircraft_cat: AircraftCategory.decode(tc, cat), callsign: callsign}
end
@doc """
Decode the airborne position message.
"""
def decode_type({alt_type, tc}, msg)
when alt_type in [:airborne_pos_baro_alt, :airborne_pos_gnss_height] do
<<_::@head, ss::2, nicsb::1, alt_bin::12-bits, t::1, f::1, lat_cpr::17, lon_cpr::17,
_::binary>> = msg
<<alt_a::7, alt_q::1, alt_b::4>> = alt_bin
alt =
if alt_q == 1 do
<<n::11>> = <<alt_a::7, alt_b::4>>
n * 25 - 1000
else
# TODO: Clean this up somehow
<<fdf8:f53e:61e4::18, fdf8:f53e:61e4::18, fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b, fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b, fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b, fdf8:f53e:61e4::18, fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b, _::1, fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b, fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b, fdf8:f53e:61e4::18, dfd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b>> =
alt_bin
<<n::11>> =
<<fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b, fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b, fdf8:f53e:61e4::18, fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b, fdf8:f53e:61e4::18, fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b, fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b, fdf8:f53e:61e4::18, fdf8:f53e:61e4::18, fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b, cfd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b>>
ModeS.gillham_altitude(n)
end
%AirbornePosition{
tc: tc,
ss: ss,
nic_sb: nicsb,
alt: alt,
alt_type: alt_type,
utc_time: to_bool(t),
flag: f,
lat_cpr: lat_cpr,
lon_cpr: lon_cpr
}
|> assign_nic
end
@doc """
Decode the airborne velocity message (ground speed, true heading)
| MSG Bits | DATA Bits | Len | Abbr | Content |
|----------|-----------|-----|--------|----------------------------|
| 33-37 | 1-5 | 5 | TC | Type code |
| 38-40 | 6-8 | 3 | ST | Subtype |
| 41 | 9 | 1 | IC | Intent change flag |
| 42 | 10 | 1 | RESV_A | Reserved-A |
| 43-45 | 11-13 | 3 | NAC | Velocity uncertainty (NAC) |
| 46 | 14 | 1 | S_ew | East-West velocity sign |
| 47-56 | 15-24 | 10 | V_ew | East-West velocity |
| 57 | 25 | 1 | S_ns | North-South velocity sign |
| 58-67 | 26-35 | 10 | V_ns | North-South velocity |
| 68 | 36 | 1 | VrSrc | Vertical rate source |
| 69 | 37 | 1 | S_vr | Vertical rate sign |
| 70-78 | 38-46 | 9 | Vr | Vertical rate |
| 79-80 | 47-48 | 2 | RESV_B | Reserved-B |
| 81 | 49 | 1 | S_Dif | Diff from baro alt, sign |
| 82-88 | 50-66 | 7 | Dif | Diff from baro alt |
Source: http://adsb-decode-guide.readthedocs.io/en/latest/content/airborne-velocity.html
"""
def decode_type(:air_velocity, <<_::37, sub::3, body::binary>>) when sub in [1, 2] do
<<ic::1, _resv_a::1, nac::3, s_ew::1, v_ew::10, s_ns::1, v_ns::10, vrsrc::1, s_vr::1, vr::9,
_resv_b::2, s_dif::1, dif::7, _::binary>> = body
{velocity, heading} = calculate_vector(s_ew, s_ns, v_ew, v_ns, sub)
%GroundSpeed{
intent_change: ic == 1,
nac: nac,
heading: heading,
velocity_kt: velocity,
vert_rate_src: vert_rate_source(vrsrc),
vert_rate: vert_rate(vr, s_vr),
geo_delta: geo_delta(dif, s_dif),
supersonic: sub == 2
}
end
@doc """
Decode the airborne velocity message (air speed, magnetic heading)
| MSG Bits | DATA Bits | Len | Abbr | Content |
|----------|-----------|-----|--------|--------------------------------|
| 33-37 | 1-5 | 5 | TC | Type code |
| 38-40 | 6-8 | 3 | ST | Subtype |
| 41 | 9 | 1 | IC | Intent change flag |
| 42 | 10 | 1 | RESV_A | Reserved-A |
| 43-45 | 11-13 | 3 | NAC | Velocity uncertainty (NAC) |
| 46 | 14 | 1 | S_hdg | Heading status |
| 47-56 | 15-24 | 10 | Hdg | Heading (proportion) |
| 57 | 25 | 1 | AS-t | Airspeed Type |
| 58-67 | 26-35 | 10 | AS | Airspeed |
| 68 | 36 | 1 | VrSrc | Vertical rate source |
| 69 | 37 | 1 | S_vr | Vertical rate sign |
| 70-78 | 38-46 | 9 | Vr | Vertical rate |
| 79-80 | 47-48 | 2 | RESV_B | Reserved-B |
| 81 | 49 | 1 | S_Dif | Difference from baro alt, sign |
| 82-88 | 50-66 | 7 | Dif | Difference from baro alt |
Source: http://adsb-decode-guide.readthedocs.io/en/latest/content/airborne-velocity.html
"""
def decode_type(:air_velocity, <<_::37, sub::3, body::binary>>) when sub in [3, 4] do
<<ic::1, _resv_a::1, nac::3, s_hdg::1, hdg::10, as_t::1, as::10, vrsrc::1, s_vr::1, vr::9,
_resv_b::2, s_dif::1, dif::7, _::binary>> = body
heading =
if s_hdg == 1 do
trunc(:erlang.float(hdg) / 1024.0 * 360.0)
else
nil
end
%AirSpeed{
intent_change: ic == 1,
nac: nac,
heading: heading,
velocity_kt: as,
airspeed_type: if(as_t == 1, do: :true_speed, else: :indicated_speed),
vert_rate: vert_rate(vr, s_vr),
vert_rate_src: vert_rate_source(vrsrc),
geo_delta: geo_delta(dif, s_dif),
supersonic: sub == 4
}
end
def decode_type(_type, _msg) do
# Logger.debug "Missed parsing #{inspect type}: #{inspect msg}"
%{}
end
@doc """
Decode velocity and heading.
"""
def calculate_vector(sign_ew, sign_ns, v_ew, v_ns, sub) do
import :math
vel_ew = apply_sign(v_ew - 1, sign_ew) * if sub == 2, do: 4, else: 1
vel_ns = apply_sign(v_ns - 1, sign_ns) * if sub == 2, do: 4, else: 1
v = sqrt(pow(vel_ew, 2) + pow(vel_ns, 2) + 0.5)
h = atan2(vel_ew, vel_ns) * 180.0 / pi() + 0.5
h = if h < 0, do: h + 360, else: h
{trunc(v), trunc(h)}
end
@doc """
Decode vertical rate.
"""
def vert_rate(0, _sign),
do: nil
def vert_rate(1, _sign),
do: 0
def vert_rate(raw_vr, sign),
do: apply_sign(raw_vr * 64, sign)
@doc """
Decode vertical rate source.
"""
def vert_rate_source(vrsrc),
do: if(vrsrc == 0, do: :geo, else: :baro)
@doc """
Decode the Navigational Integrity Category (NIC)
| TC | SBnic | NIC | Rc |
|----|--------------------------|-----|--------------------|
| 9 | 0 | 11 | < 7.5 m |
| 10 | 0 | 10 | < 25 m |
| 11 | 1 | 9 | < 74 m |
| 11 | 0 | 8 | < 0.1 NM (185 m) |
| 12 | 0 | 7 | < 0.2 NM (370 m) |
| 13 | 1 (NIC Supplement-A = 0) | 6 | < 0.3 NM (556 m) |
| 13 | 0 | 6 | < 0.5 NM (925 m) |
| 13 | 1 (NIC Supplement-A = 1) | 6 | < 0.6 NM (1111 m) |
| 14 | 0 | 5 | < 1.0 NM (1852 m) |
| 15 | 0 | 4 | < 2 NM (3704 m) |
| 16 | 1 | 3 | < 4 NM (7408 m) |
| 16 | 0 | 2 | < 8 NM (14.8 km) |
| 17 | 0 | 1 | < 20 NM (37.0 km) |
| 18 | 0 | 0 | > 20 NM or Unknown |
Source: https://adsb-decode-guide.readthedocs.io/en/latest/content/nicnac.html
"""
def assign_nic(position) do
{nic, rc} =
case position do
%{tc: 9, nic_sb: 0} ->
{11, %{limit: 7.5, unit: :m}}
%{tc: 10, nic_sb: 0} ->
{10, %{limit: 25, unit: :m}}
%{tc: 11, nic_sb: 1} ->
{9, %{limit: 74, unit: :m}}
%{tc: 11, nic_sb: 0} ->
{8, %{limit: 0.1, unit: :NM}}
%{tc: 12, nic_sb: 0} ->
{7, %{limit: 0.2, unit: :NM}}
%{tc: 13, nic_sb: 1, nic_sa: 0} ->
{6, %{limit: 0.3, unit: :NM}}
%{tc: 13, nic_sb: 0} ->
{6, %{limit: 0.5, unit: :NM}}
%{tc: 13, nic_sb: 1, nic_sa: 1} ->
{6, %{limit: 0.6, unit: :NM}}
%{tc: 14, nic_sb: 0} ->
{5, %{limit: 1.0, unit: :NM}}
%{tc: 15, nic_sb: 0} ->
{4, %{limit: 2, unit: :NM}}
%{tc: 16, nic_sb: 1} ->
{3, %{limit: 4, unit: :NM}}
%{tc: 16, nic_sb: 0} ->
{2, %{limit: 8, unit: :NM}}
%{tc: 17, nic_sb: 0} ->
{1, %{limit: 20, unit: :NM}}
%{tc: 18, nic_sb: 0} ->
{0, %{limit: :unknown}}
_ ->
{:unknown, :unknown}
end
%{position | nic: nic, rc: rc}
end
@doc """
Decode the delta between barometric altitude and geometric altitude.
If the result is positive, geometric altitude is above barometric altitude.
If the result is negative, geometric altitude is below barometric altitude.
If the result is nil, no delta information is available.
"""
def geo_delta(0, _sign),
do: nil
def geo_delta(1, _sign),
do: 0
def geo_delta(raw_delta, sign),
do: apply_sign(raw_delta * 25, sign)
# Private Helpers
defp apply_sign(value, sign) do
# When the sign bit is 1, the value is negative.
if sign == 1, do: -value, else: value
end
end
|
squitter/lib/squitter/decoding/ext_squitter/decoder.ex
| 0.520009
| 0.405684
|
decoder.ex
|
starcoder
|
defmodule Zaryn.Governance.Code.CICD.Docker do
@moduledoc """
CICD service baked by docker.
The service relies on the `Dockerfile` with two targets: `zaryn-ci` and
`zaryn-cd`.
The `zaryn-ci` target produces an image with build tools. Its goal is to
compile the source code into `zaryn_node` release. The CI part is powered by
`scripts/proposal_ci_job.sh`. The script runs in a container named
`zaryn-prop-{address}`, it produces: release upgrade of `zaryn_node`, new
version of `zaryn-proposal-validator`, and combined log of application of a
code proposal to the source code, execution of unit tests, and log from
linter. The log can be obtained with `docker logs`, the release upgrade and
the validator with `docker cp`, after that the container can be disposed.
The `zaryn-cd` target produces an image capable of running `zaryn_node`
release.
"""
use Supervisor
require Logger
alias Zaryn.Testnet
alias Zaryn.Testnet.Subnet
alias Zaryn.JobCache
alias Zaryn.JobConductor
alias Zaryn.Governance.Code.CICD
alias Zaryn.Governance.Code.Proposal
import Supervisor, only: [child_spec: 2]
@behaviour CICD
@ci_image __MODULE__.CIImage
@cd_image __MODULE__.CDImage
@ci_conductor __MODULE__.CIConductor
@cd_conductor __MODULE__.CDConductor
def start_link(args \\ []) do
Supervisor.start_link(__MODULE__, args, name: __MODULE__)
end
@impl Supervisor
def init(_args) do
children = [
child_spec({JobCache, name: @ci_image, function: &build_ci_image/0}, id: @ci_image),
child_spec({JobCache, name: @cd_image, function: &build_cd_image/0}, id: @cd_image),
child_spec({JobConductor, name: @ci_conductor, limit: 2}, id: @ci_conductor),
child_spec({JobConductor, name: @cd_conductor, limit: 2}, id: @cd_conductor)
]
Supervisor.init(children, strategy: :one_for_one)
end
@impl CICD
def run_ci!(prop = %Proposal{}) do
run!(prop, @ci_image, @ci_conductor, &do_run_docker_ci/1, "CI failed")
end
@impl CICD
def run_testnet!(prop = %Proposal{}) do
run!(prop, @cd_image, @cd_conductor, &do_run_docker_testnet/1, "CD failed")
end
@impl CICD
def clean(_address), do: :ok
@impl CICD
def get_log(address) when is_binary(address) do
case System.cmd("docker", ["logs", container_name(address)]) do
{res, 0} -> {:ok, res}
err -> {:error, err}
end
end
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ [CICD] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@src_dir Application.compile_env(:zaryn, :src_dir)
@cmd_options [stderr_to_stdout: true, into: IO.stream(:stdio, :line), cd: @src_dir]
defp docker(args, opts \\ @cmd_options), do: System.cmd("docker", args, opts)
defp container_name(address) when is_binary(address) do
"zaryn-prop-#{Base.encode16(address)}"
end
defp run!(prop = %Proposal{address: address}, image, conductor, func, exception) do
with :ok <- JobCache.get!(image),
{:ok, 0} <- JobConductor.conduct(func, [prop], conductor) do
:ok
else
error ->
Logger.error("#{exception} #{inspect(error)}", address: Base.encode16(address))
raise exception
end
end
defp docker_wait(name, start_time, start_timeout \\ 10) do
Process.sleep(250)
# Block until one or more containers stop, then print their exit codes
case System.cmd("docker", ["wait", name]) do
{res, 0} ->
res |> Integer.parse() |> elem(0)
{err, _} ->
Logger.warning("docker wait: #{inspect(err)}")
# on a busy host docker may require more time to start a container
if System.monotonic_time(:second) - start_time < start_timeout do
docker_wait(name, start_time, start_timeout)
else
1
end
end
end
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ [CI] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
defp build_ci_image do
{_, 0} = docker(["build", "-t", "zaryn-ci", "--target", "zaryn-ci", "."])
:ok
end
@ci_script "/opt/code/scripts/governance/proposal_ci_job.sh"
defp do_run_docker_ci(%Proposal{address: address, changes: changes}) do
Logger.info("Verify proposal", address: Base.encode16(address))
name = container_name(address)
args = ["run", "--entrypoint", @ci_script, "-i", "--name", name, "zaryn-ci", name]
port = Port.open({:spawn_executable, System.find_executable("docker")}, [:binary, args: args])
# wait 250 ms or fail sooner
ref = Port.monitor(port)
receive do
{:DOWN, ^ref, :port, ^port, :normal} ->
raise RuntimeError, message: "failed to run: docker #{Enum.join(args, " ")}"
after
250 -> :ok
end
Port.command(port, [changes, "\n"])
Port.close(port)
docker_wait(name, System.monotonic_time(:second))
end
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ [CD] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
defp build_cd_image do
{_, 0} = docker(["build", "-t", "zaryn-cd", "."])
:ok
end
@validator "/opt/code/zaryn-proposal-validator"
@releases "/opt/code/_build/dev/rel/zaryn_node/releases"
@release "zaryn_node.tar.gz"
defp testnet_prepare(dir, address, version) do
ci = container_name(address)
with :ok <- File.mkdir_p!(dir),
{_, 0} <- docker(["cp", "#{ci}:#{@validator}", dir]),
{_, 0} <- docker(["cp", "#{ci}:#{@releases}/#{version}/#{@release}", dir]) do
:ok
else
_ -> :error
end
end
@marker Application.compile_env(:zaryn, :marker)
defp do_run_docker_testnet(%Proposal{address: address, version: version}) do
address_encoded = Base.encode16(address)
Logger.info("Running proposal", address: address_encoded)
dir = temp_dir("utn-#{address_encoded}-")
seeds = 1..5 |> Enum.map(&"node#{&1}")
compose_prefix = Path.basename(dir)
validator_container = "#{compose_prefix}_validator_1"
validator_continue = ["ash", "-c", "echo 'yes' > /proc/1/fd/0"]
# XXX use of internal knowledge about testnet
nodes = 1..length(seeds) |> Enum.map(&"#{compose_prefix}_node#{&1}_1")
with :ok <- Logger.info("#{dir} Prepare", address: address_encoded),
:ok <- testnet_prepare(dir, address, version),
:ok <- Logger.info("#{dir} Start", address: address_encoded),
{_, 0} <- testnet_start(dir, seeds),
# wait until the validator is ready for upgrade
:ok <- Logger.info("#{dir} Part I", address: address_encoded),
{:ok, _} <- wait_for_marker(validator_container, @marker),
:ok <- Logger.info("#{dir} Upgrade", address: address_encoded),
true <- testnet_upgrade(dir, nodes, version),
:ok <- Logger.info("#{dir} Part II", address: address_encoded),
{_, 0} <- docker_exec(validator_container, validator_continue),
0 <- docker_wait(validator_container, System.monotonic_time(:second)) do
testnet_cleanup(dir, 0, address_encoded)
else
_ ->
testnet_cleanup(dir, 1, address_encoded)
end
end
defp testnet_cleanup(dir, code, address_encoded) do
Logger.info("#{dir} Cleanup", address: address_encoded)
System.cmd("docker-compose", ["-f", compose_file(dir), "down"], @cmd_options)
File.rm_rf!(dir)
code
end
defp testnet_upgrade(dir, containers, version) do
dst = "/opt/app/releases/#{version}"
rel = Path.join(dir, @release)
trap_exit = Process.flag(:trap_exit, true)
result =
containers
|> Task.async_stream(
fn c ->
with {_, 0} <- docker(["exec", c, "mkdir", "-p", "#{dst}"]),
{_, 0} <- docker(["cp", rel, "#{c}:#{dst}/#{@release}"]),
{_, 0} <- docker(["exec", c, "./bin/zaryn_node", "upgrade", version]) do
:ok
else
error ->
Logger.error("Upgrade failed #{inspect(error)}")
raise "Upgrade failed"
end
end,
timeout: 30_000,
ordered: false
)
|> Enum.into([])
|> Enum.all?(&(elem(&1, 0) == :ok))
Process.flag(:trap_exit, trap_exit)
result
end
@subnet "172.16.100.0/24"
defp testnet_start(dir, seeds) do
compose = compose_file(dir)
options = [image: "zaryn-cd", dir: dir, src: @src_dir, persist: false]
Stream.iterate(@subnet, &Subnet.next/1)
|> Stream.take(123)
|> Stream.map(fn subnet ->
testnet = Testnet.from(seeds, Keyword.put(options, :subnet, subnet))
with :ok <- Testnet.create!(testnet, dir) do
System.cmd("docker-compose", ["-f", compose, "up", "-d"], @cmd_options)
end
end)
|> Stream.filter(&(elem(&1, 1) == 0))
|> Enum.at(0)
end
defp wait_for_marker(container_name, marker, timeout \\ 600_000) do
args = ["logs", container_name, "--follow", "--tail", "10"]
opts = [:binary, :use_stdio, :stderr_to_stdout, line: 8192, args: args]
task =
Task.async(fn ->
{:spawn_executable, System.find_executable("docker")}
|> Port.open(opts)
|> wait_for_marker_loop(marker)
end)
try do
{:ok, Task.await(task, timeout)}
catch
:exit, err ->
Task.shutdown(task)
{:error, err}
end
end
defp wait_for_marker_loop(port, marker) do
receive do
{^port, {:data, {:eol, line}}} ->
Logger.debug(line)
if String.starts_with?(line, marker) do
line
else
wait_for_marker_loop(port, marker)
end
{^port, other} ->
Logger.warning("Received #{inspect(other)} while reading container logs")
wait_for_marker_loop(port, marker)
end
end
defp compose_file(dir), do: Path.join(dir, "docker-compose.json")
defp docker_exec(container_name, cmd), do: docker(["exec", container_name] ++ cmd)
defp temp_dir(prefix, tmp \\ System.tmp_dir!()) do
{_mega, sec, micro} = :os.timestamp()
scheduler_id = :erlang.system_info(:scheduler_id)
Path.join(tmp, "#{prefix}#{:erlang.phash2({sec, micro, scheduler_id})}")
end
end
|
lib/zaryn/governance/code/cicd/docker/cicd.ex
| 0.709019
| 0.461623
|
cicd.ex
|
starcoder
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.