hexsha stringlengths 40 40 | size int64 2 991k | ext stringclasses 2 values | lang stringclasses 1 value | max_stars_repo_path stringlengths 4 208 | max_stars_repo_name stringlengths 6 106 | max_stars_repo_head_hexsha stringlengths 40 40 | max_stars_repo_licenses list | max_stars_count int64 1 33.5k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 4 208 | max_issues_repo_name stringlengths 6 106 | max_issues_repo_head_hexsha stringlengths 40 40 | max_issues_repo_licenses list | max_issues_count int64 1 16.3k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 4 208 | max_forks_repo_name stringlengths 6 106 | max_forks_repo_head_hexsha stringlengths 40 40 | max_forks_repo_licenses list | max_forks_count int64 1 6.91k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 2 991k | avg_line_length float64 1 36k | max_line_length int64 1 977k | alphanum_fraction float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
1c3f95e3de80da5f84fea2225629286d080d0ba8 | 3,056 | ex | Elixir | lib/lightbridge/energy_monitor.ex | jamesduncombe/lightbridge | c6b5fd54f5495ae12fefc0174ca95ebe2f69a1ce | [
"MIT"
] | null | null | null | lib/lightbridge/energy_monitor.ex | jamesduncombe/lightbridge | c6b5fd54f5495ae12fefc0174ca95ebe2f69a1ce | [
"MIT"
] | null | null | null | lib/lightbridge/energy_monitor.ex | jamesduncombe/lightbridge | c6b5fd54f5495ae12fefc0174ca95ebe2f69a1ce | [
"MIT"
] | null | null | null | defmodule Lightbridge.EnergyMonitor do
@moduledoc """
Polls the smart socket to get it's current energy usage.
Publishes to configured MQTT endpoint.
"""
use GenServer
import Lightbridge.Config, only: [fetch: 1]
alias Lightbridge.Hs100
alias Lightbridge.EnergyMonitor.Stats
# Set the polling frequency for energy stats
@poll_frequency 15 * 1_000
def start_link(_opts) do
GenServer.start_link(__MODULE__, :ok, name: __MODULE__)
end
def init(_args) do
Process.send_after(self(), :poll, @poll_frequency)
{:ok, %{client_id: fetch(:mqtt_client_id), energy_topic: fetch(:mqtt_energy_topic)}}
end
@doc """
Polls the energy stats then sends them up to the MQTT broker.
"""
@spec poll(client_id :: String.t(), energy_topic :: String.t()) :: any()
def poll(client_id, energy_topic) do
# Get the energy stats
# Parse them into a suitable structure
# Batch them up into async tasks
# TODO: This seems quite tightly coupled together...
tasks =
Hs100.get_energy()
|> parse_energy_stats()
|> Map.from_struct()
|> Enum.map(fn {path, stat} ->
Task.async(
Tortoise,
:publish,
[client_id, "#{energy_topic}/#{path}", to_string(stat), [qos: 0]]
)
end)
# Asyncly fire these off to the MQTT server
Task.await_many(tasks, _wait_for = 2_000)
end
def handle_info(:poll, %{client_id: client_id, energy_topic: energy_topic} = state) do
poll(client_id, energy_topic)
# Poll ourselves in `@poll_frequency` seconds
Process.send_after(self(), :poll, @poll_frequency)
{:noreply, state}
end
@doc """
Takes energy stats and parses them into a flattened map.
"""
@spec parse_energy_stats(stats :: String.t()) :: map()
def parse_energy_stats(stats) do
# Split the values into their own topics
{:ok, parsed_energy_stats} =
stats
|> Jason.decode(keys: :atoms)
# Get the stats from the nested structure
parsed_energy_stats
|> get_in([:emeter, :get_realtime])
|> Stats.new()
end
defmodule Stats do
@moduledoc """
Stats encapsulates the data coming back from the smart socket.
"""
defstruct current_ma: 0, err_code: 0, power_mw: 0, total_wh: 0, voltage_mv: 0
@typedoc """
Represents the Stats struct.
"""
@type t :: %__MODULE__{
current_ma: pos_integer(),
err_code: integer(),
power_mw: pos_integer(),
total_wh: pos_integer(),
voltage_mv: pos_integer()
}
@doc """
Takes in a map of energy stats and returns a new `t()`.
"""
@spec new(energy_stats :: map()) :: t()
def new(energy_stats) do
struct(__MODULE__, energy_stats)
end
end
defimpl String.Chars, for: Stats do
def to_string(energy_stats) do
~s"""
Power (mW): #{energy_stats.power_mw}
Voltage (mV): #{energy_stats.voltage_mv}
Current (mA): #{energy_stats.current_ma}
Total WH: #{energy_stats.total_wh}
"""
end
end
end
| 27.044248 | 88 | 0.638743 |
1c3fa4ca5d55edffc1cd43f2c8c43a747b43948e | 1,674 | exs | Elixir | config/dev.exs | J3RN/elixir-formatter | d68d496b1ec204035774d10e56acb9926bb46284 | [
"BSD-3-Clause"
] | 13 | 2017-12-01T13:27:27.000Z | 2022-02-07T03:30:07.000Z | config/dev.exs | J3RN/elixir-formatter | d68d496b1ec204035774d10e56acb9926bb46284 | [
"BSD-3-Clause"
] | 7 | 2021-03-08T17:10:38.000Z | 2021-08-31T16:28:30.000Z | config/dev.exs | J3RN/elixir-formatter | d68d496b1ec204035774d10e56acb9926bb46284 | [
"BSD-3-Clause"
] | 5 | 2017-12-20T09:01:27.000Z | 2020-05-03T20:24:46.000Z | use Mix.Config
# For development, we disable any cache and enable
# debugging and code reloading.
#
# The watchers configuration can be used to run external
# watchers to your application. For example, we use it
# with brunch.io to recompile .js and .css sources.
config :elixir_formatter, ElixirFormatterWeb.Endpoint,
http: [port: 4000],
debug_errors: true,
code_reloader: true,
check_origin: false,
watchers: [npm: ["run", "watch"]]
# ## SSL Support
#
# In order to use HTTPS in development, a self-signed
# certificate can be generated by running the following
# command from your terminal:
#
# openssl req -new -newkey rsa:4096 -days 365 -nodes -x509 -subj "/C=US/ST=Denial/L=Springfield/O=Dis/CN=www.example.com" -keyout priv/server.key -out priv/server.pem
#
# The `http:` config above can be replaced with:
#
# https: [port: 4000, keyfile: "priv/server.key", certfile: "priv/server.pem"],
#
# If desired, both `http:` and `https:` keys can be
# configured to run both http and https servers on
# different ports.
# Watch static and templates for browser reloading.
config :elixir_formatter, ElixirFormatterWeb.Endpoint,
live_reload: [
patterns: [
~r{priv/static/.*(js|css|png|jpeg|jpg|gif|svg)$},
~r{priv/gettext/.*(po)$},
~r{lib/elixir_formatter_web/views/.*(ex)$},
~r{lib/elixir_formatter_web/templates/.*(eex)$}
]
]
# Do not include metadata nor timestamps in development logs
config :logger, :console, format: "[$level] $message\n"
# Set a higher stacktrace during development. Avoid configuring such
# in production as building large stacktraces may be expensive.
config :phoenix, :stacktrace_depth, 20
| 34.163265 | 170 | 0.718041 |
1c40552e3deeb2c0514a98953138875a50ecb5eb | 67,667 | ex | Elixir | lib/elixir/lib/module.ex | fertapric/elixir | 9df2216670493aa30f37681cc812f3192adfe55a | [
"Apache-2.0"
] | null | null | null | lib/elixir/lib/module.ex | fertapric/elixir | 9df2216670493aa30f37681cc812f3192adfe55a | [
"Apache-2.0"
] | 1 | 2019-04-25T12:52:49.000Z | 2019-04-25T13:27:31.000Z | lib/elixir/lib/module.ex | fertapric/elixir | 9df2216670493aa30f37681cc812f3192adfe55a | [
"Apache-2.0"
] | null | null | null | defmodule Module do
@moduledoc ~S'''
Provides functions to deal with modules during compilation time.
It allows a developer to dynamically add, delete and register
attributes, attach documentation and so forth.
After a module is compiled, using many of the functions in
this module will raise errors, since it is out of their scope
to inspect runtime data. Most of the runtime data can be inspected
via the [`__info__/1`](`c:Module.__info__/1`) function attached to
each compiled module.
## Module attributes
Each module can be decorated with one or more attributes. The following ones
are currently defined by Elixir:
### `@after_compile`
A hook that will be invoked right after the current module is compiled.
Accepts a module or a `{module, function_name}`. See the "Compile callbacks"
section below.
### `@before_compile`
A hook that will be invoked before the module is compiled.
Accepts a module or a `{module, function_or_macro_name}` tuple.
See the "Compile callbacks" section below.
### `@behaviour`
Note the British spelling!
Behaviours can be referenced by modules to ensure they implement
required specific function signatures defined by `@callback`.
For example, you could specify a `URI.Parser` behaviour as follows:
defmodule URI.Parser do
@doc "Defines a default port"
@callback default_port() :: integer
@doc "Parses the given URL"
@callback parse(uri_info :: URI.t()) :: URI.t()
end
And then a module may use it as:
defmodule URI.HTTP do
@behaviour URI.Parser
def default_port(), do: 80
def parse(info), do: info
end
If the behaviour changes or `URI.HTTP` does not implement
one of the callbacks, a warning will be raised.
### `@impl`
To aid in the correct implementation of behaviours, you may optionally declare
`@impl` for implemented callbacks of a behaviour. This makes callbacks
explicit and can help you to catch errors in your code. The compiler will warn
in these cases:
* if you mark a function with `@impl` when that function is not a callback.
* if you don't mark a function with `@impl` when other functions are marked
with `@impl`. If you mark one function with `@impl`, you must mark all
other callbacks for that behaviour as `@impl`.
`@impl` works on a per-context basis. If you generate a function through a macro
and mark it with `@impl`, that won't affect the module where that function is
generated in.
`@impl` also helps with maintainability by making it clear to other developers
that the function is implementing a callback.
Using `@impl`, the example above can be rewritten as:
defmodule URI.HTTP do
@behaviour URI.Parser
@impl true
def default_port(), do: 80
@impl true
def parse(info), do: info
end
You may pass either `false`, `true`, or a specific behaviour to `@impl`.
defmodule Foo do
@behaviour Bar
@behaviour Baz
# Will warn if neither Bar nor Baz specify a callback named bar/0.
@impl true
def bar(), do: :ok
# Will warn if Baz does not specify a callback named baz/0.
@impl Baz
def baz(), do: :ok
end
The code is now more readable, as it is now clear which functions are
part of your API and which ones are callback implementations. To reinforce this
idea, `@impl true` automatically marks the function as `@doc false`, disabling
documentation unless `@doc` is explicitly set.
### `@compile`
Defines options for module compilation. This is used to configure
both Elixir and Erlang compilers, as any other compilation pass
added by external tools. For example:
defmodule MyModule do
@compile {:inline, my_fun: 1}
def my_fun(arg) do
to_string(arg)
end
end
Multiple uses of `@compile` will accumulate instead of overriding
previous ones. See the "Compile options" section below.
### `@deprecated`
Provides the deprecation reason for a function. For example:
defmodule Keyword do
@deprecated "Use Kernel.length/1 instead"
def size(keyword) do
length(keyword)
end
end
The Mix compiler automatically looks for calls to deprecated modules
and emit warnings during compilation, computed via `mix xref warnings`.
Using the `@deprecated` attribute will also be reflected in the
documentation of the given function and macro. You can choose between
the `@deprecated` attribute and the documentation metadata to provide
hard-deprecations (with warnings) and soft-deprecations (without warnings):
This is a soft-deprecation as it simply annotates the documentation
as deprecated:
@doc deprecated: "Use Kernel.length/1 instead"
def size(keyword)
This is a hard-deprecation as it emits warnings and annotates the
documentation as deprecated:
@deprecated "Use Kernel.length/1 instead"
def size(keyword)
Currently `@deprecated` only supports functions and macros. However
you can use the `:deprecated` key in the annotation metadata to
annotate the docs of modules, types and callbacks too.
We recommend using this feature with care, especially library authors.
Deprecating code always pushes the burden towards library users. We
also recommend for deprecated functionality to be maintained for long
periods of time, even after deprecation, giving developers plenty of
time to update (except for cases where keeping the deprecated API is
undesired, such as in the presence of security issues).
### `@doc` and `@typedoc`
Provides documentation for the entity that follows the attribute.
`@doc` is to be used with a function, macro, callback, or
macrocallback, while `@typedoc` with a type (public or opaque).
Accepts a string (often a heredoc) or `false` where `@doc false` will
make the entity invisible to documentation extraction tools like
[`ExDoc`](https://hexdocs.pm/ex_doc/). For example:
defmodule MyModule do
@typedoc "This type"
@typedoc since: "1.1.0"
@type t :: term
@doc "Hello world"
@doc since: "1.1.0"
def hello do
"world"
end
@doc """
Sums `a` to `b`.
"""
def sum(a, b) do
a + b
end
end
As can be seen in the example above, `@doc` and `@typedoc` also accept
a keyword list that serves as a way to provide arbitrary metadata
about the entity. Tools like [`ExDoc`](https://hexdocs.pm/ex_doc/) and
`IEx` may use this information to display annotations. A common use
case is `since` that may be used to annotate in which version the
function was introduced.
As illustrated in the example, it is possible to use these attributes
more than once before an entity. However, the compiler will warn if
used twice with binaries as that replaces the documentation text from
the preceding use. Multiple uses with keyword lists will merge the
lists into one.
Note that since the compiler also defines some additional metadata,
there are a few reserved keys that will be ignored and warned if used.
Currently these are: `:opaque` and `:defaults`.
Once this module is compiled, this information becomes available via
the `Code.fetch_docs/1` function.
### `@dialyzer`
Defines warnings to request or suppress when using a version of
`:dialyzer` that supports module attributes.
Accepts an atom, a tuple, or a list of atoms and tuples. For example:
defmodule MyModule do
@dialyzer {:nowarn_function, my_fun: 1}
def my_fun(arg) do
M.not_a_function(arg)
end
end
For the list of supported warnings, see
[`:dialyzer` module](http://www.erlang.org/doc/man/dialyzer.html).
Multiple uses of `@dialyzer` will accumulate instead of overriding
previous ones.
### `@external_resource`
Specifies an external resource for the current module.
Sometimes a module embeds information from an external file. This
attribute allows the module to annotate which external resources
have been used.
Tools like Mix may use this information to ensure the module is
recompiled in case any of the external resources change.
### `@file`
Changes the filename used in stacktraces for the function or macro that
follows the attribute, such as:
defmodule MyModule do
@doc "Hello world"
@file "hello.ex"
def hello do
"world"
end
end
### `@moduledoc`
Provides documentation for the current module.
defmodule MyModule do
@moduledoc """
A very useful module.
"""
@moduledoc authors: ["Alice", "Bob"]
end
Accepts a string (often a heredoc) or `false` where `@moduledoc false`
will make the module invisible to documentation extraction tools like
[`ExDoc`](https://hexdocs.pm/ex_doc/).
Similarly to `@doc` also accepts a keyword list to provide metadata
about the module. For more details, see the documentation of `@doc`
above.
Once this module is compiled, this information becomes available via
the `Code.fetch_docs/1` function.
### `@on_definition`
A hook that will be invoked when each function or macro in the current
module is defined. Useful when annotating functions.
Accepts a module or a `{module, function_name}` tuple. See the
"Compile callbacks" section below.
### `@on_load`
A hook that will be invoked whenever the module is loaded.
Accepts the function name (as an atom) of a function in the current module or
`{function_name, 0}` tuple where `function_name` is the name of a function in
the current module. The function must be public and have an arity of 0 (no
arguments). If the function does not return `:ok`, the loading of the module
will be aborted. For example:
defmodule MyModule do
@on_load :load_check
def load_check do
if some_condition() do
:ok
else
:abort
end
end
def some_condition do
false
end
end
Modules compiled with HiPE would not call this hook.
### `@vsn`
Specify the module version. Accepts any valid Elixir value, for example:
defmodule MyModule do
@vsn "1.0"
end
### Typespec attributes
The following attributes are part of typespecs and are also built-in in
Elixir:
* `@type` - defines a type to be used in `@spec`
* `@typep` - defines a private type to be used in `@spec`
* `@opaque` - defines an opaque type to be used in `@spec`
* `@spec` - provides a specification for a function
* `@callback` - provides a specification for a behaviour callback
* `@macrocallback` - provides a specification for a macro behaviour callback
* `@optional_callbacks` - specifies which behaviour callbacks and macro
behaviour callbacks are optional
* `@impl` - declares an implementation of a callback function or macro
### Custom attributes
In addition to the built-in attributes outlined above, custom attributes may
also be added. Custom attributes are expressed using the `@/1` operator followed
by a valid variable name. The value given to the custom attribute must be a valid
Elixir value:
defmodule MyModule do
@custom_attr [some: "stuff"]
end
For more advanced options available when defining custom attributes, see
`register_attribute/3`.
## Compile callbacks
There are three callbacks that are invoked when functions are defined,
as well as before and immediately after the module bytecode is generated.
### `@after_compile`
A hook that will be invoked right after the current module is compiled.
Accepts a module or a `{module, function_name}` tuple. The function
must take two arguments: the module environment and its bytecode.
When just a module is provided, the function is assumed to be
`__after_compile__/2`.
Callbacks registered first will run last.
#### Example
defmodule MyModule do
@after_compile __MODULE__
def __after_compile__(env, _bytecode) do
IO.inspect(env)
end
end
### `@before_compile`
A hook that will be invoked before the module is compiled.
Accepts a module or a `{module, function_or_macro_name}` tuple. The
function/macro must take one argument: the module environment. If
it's a macro, its returned value will be injected at the end of the
module definition before the compilation starts.
When just a module is provided, the function/macro is assumed to be
`__before_compile__/1`.
Callbacks registered first will run last. Any overridable definition
will be made concrete before the first callback runs. A definition may
be made overridable again in another before compile callback and it
will be made concrete one last time after all callbacks run.
*Note*: unlike `@after_compile`, the callback function/macro must
be placed in a separate module (because when the callback is invoked,
the current module does not yet exist).
#### Example
defmodule A do
defmacro __before_compile__(_env) do
quote do
def hello, do: "world"
end
end
end
defmodule B do
@before_compile A
end
B.hello()
#=> "world"
### `@on_definition`
A hook that will be invoked when each function or macro in the current
module is defined. Useful when annotating functions.
Accepts a module or a `{module, function_name}` tuple. The function
must take 6 arguments:
* the module environment
* the kind of the function/macro: `:def`, `:defp`, `:defmacro`, or `:defmacrop`
* the function/macro name
* the list of quoted arguments
* the list of quoted guards
* the quoted function body
Note the hook receives the quoted arguments and it is invoked before
the function is stored in the module. So `Module.defines?/2` will return
`false` for the first clause of every function.
If the function/macro being defined has multiple clauses, the hook will
be called for each clause.
Unlike other hooks, `@on_definition` will only invoke functions and
never macros. This is to avoid `@on_definition` callbacks from
redefining functions that have just been defined in favor of more
explicit approaches.
When just a module is provided, the function is assumed to be
`__on_definition__/6`.
#### Example
defmodule Hooks do
def on_def(_env, kind, name, args, guards, body) do
IO.puts("Defining #{kind} named #{name} with args:")
IO.inspect(args)
IO.puts("and guards")
IO.inspect(guards)
IO.puts("and body")
IO.puts(Macro.to_string(body))
end
end
defmodule MyModule do
@on_definition {Hooks, :on_def}
def hello(arg) when is_binary(arg) or is_list(arg) do
"Hello" <> to_string(arg)
end
def hello(_) do
:ok
end
end
## Compile options
The `@compile` attribute accepts different options that are used by both
Elixir and Erlang compilers. Some of the common use cases are documented
below:
* `@compile :debug_info` - includes `:debug_info` regardless of the
corresponding setting in `Code.compiler_options/1`
* `@compile {:debug_info, false}` - disables `:debug_info` regardless
of the corresponding setting in `Code.compiler_options/1`
* `@compile {:inline, some_fun: 2, other_fun: 3}` - inlines the given
name/arity pairs. Inlining is applied locally, calls from another
module are not affected by this option
* `@compile {:autoload, false}` - disables automatic loading of
modules after compilation. Instead, the module will be loaded after
it is dispatched to
* `@compile {:no_warn_undefined, Mod}` or
`@compile {:no_warn_undefined, {Mod, fun, arity}}` - does not warn if
the given module or the given `Mod.fun/arity` are not defined
You can see a handful more options used by the Erlang compiler in
the documentation for the [`:compile` module](http://www.erlang.org/doc/man/compile.html).
'''
@typep definition :: {atom, arity}
@typep def_kind :: :def | :defp | :defmacro | :defmacrop
@extra_error_msg_defines? "Use Kernel.function_exported?/3 and Kernel.macro_exported?/3 " <>
"to check for public functions and macros instead"
@extra_error_msg_definitions_in "Use the Module.__info__/1 callback to get public functions and macros instead"
@doc """
Provides runtime information about functions, macros, and other information
defined by the module.
Each module gets an `__info__/1` function when it's compiled. The function
takes one of the following items:
* `:attributes` - a keyword list with all persisted attributes
* `:compile` - a list with compiler metadata
* `:functions` - a keyword list of public functions and their arities
* `:macros` - a keyword list of public macros and their arities
* `:md5` - the MD5 of the module
* `:module` - the module atom name
"""
@callback __info__(:attributes) :: keyword()
@callback __info__(:compile) :: [term()]
@callback __info__(:functions) :: keyword()
@callback __info__(:macros) :: keyword()
@callback __info__(:md5) :: binary()
@callback __info__(:module) :: module()
@doc """
Checks if a module is open.
A module is "open" if it is currently being defined and its attributes and
functions can be modified.
"""
@spec open?(module) :: boolean
def open?(module) when is_atom(module) do
:elixir_module.is_open(module)
end
@doc """
Evaluates the quoted contents in the given module's context.
A list of environment options can also be given as argument.
See `Code.eval_string/3` for more information.
Raises an error if the module was already compiled.
## Examples
defmodule Foo do
contents =
quote do
def sum(a, b), do: a + b
end
Module.eval_quoted(__MODULE__, contents)
end
Foo.sum(1, 2)
#=> 3
For convenience, you can pass any `Macro.Env` struct, such
as `__ENV__/0`, as the first argument or as options. Both
the module and all options will be automatically extracted
from the environment:
defmodule Foo do
contents =
quote do
def sum(a, b), do: a + b
end
Module.eval_quoted(__ENV__, contents)
end
Foo.sum(1, 2)
#=> 3
Note that if you pass a `Macro.Env` struct as first argument
while also passing `opts`, they will be merged with `opts`
having precedence.
"""
@spec eval_quoted(module | Macro.Env.t(), Macro.t(), list, keyword | Macro.Env.t()) :: term
def eval_quoted(module_or_env, quoted, binding \\ [], opts \\ [])
def eval_quoted(%Macro.Env{} = env, quoted, binding, opts)
when is_list(binding) and is_list(opts) do
eval_quoted(env.module, quoted, binding, Keyword.merge(Map.to_list(env), opts))
end
def eval_quoted(module, quoted, binding, %Macro.Env{} = env)
when is_atom(module) and is_list(binding) do
eval_quoted(module, quoted, binding, Map.to_list(env))
end
def eval_quoted(module, quoted, binding, opts)
when is_atom(module) and is_list(binding) and is_list(opts) do
assert_not_compiled!(__ENV__.function, module)
:elixir_def.reset_last(module)
{value, binding, _env, _scope} =
:elixir.eval_quoted(quoted, binding, Keyword.put(opts, :module, module))
{value, binding}
end
@doc """
Creates a module with the given name and defined by
the given quoted expressions.
The line where the module is defined and its file **must**
be passed as options.
It returns a tuple of shape `{:module, module, binary, term}`
where `module` is the module name, `binary` is the module
byte code and `term` is the result of the last expression in
`quoted`.
Similar to `Kernel.defmodule/2`, the binary will only be
written to disk as a `.beam` file if `Module.create/3` is
invoked in a file that is currently being compiled.
## Examples
contents =
quote do
def world, do: true
end
Module.create(Hello, contents, Macro.Env.location(__ENV__))
Hello.world()
#=> true
## Differences from `defmodule`
`Module.create/3` works similarly to `Kernel.defmodule/2`
and return the same results. While one could also use
`defmodule` to define modules dynamically, this function
is preferred when the module body is given by a quoted
expression.
Another important distinction is that `Module.create/3`
allows you to control the environment variables used
when defining the module, while `Kernel.defmodule/2`
automatically uses the environment it is invoked at.
"""
@spec create(module, Macro.t(), Macro.Env.t() | keyword) :: {:module, module, binary, term}
def create(module, quoted, opts)
def create(module, quoted, %Macro.Env{} = env) when is_atom(module) do
create(module, quoted, Map.to_list(env))
end
def create(module, quoted, opts) when is_atom(module) and is_list(opts) do
unless Keyword.has_key?(opts, :file) do
raise ArgumentError, "expected :file to be given as option"
end
next = :elixir_module.next_counter(nil)
line = Keyword.get(opts, :line, 0)
quoted = :elixir_quote.linify_with_context_counter(line, {module, next}, quoted)
:elixir_module.compile(module, quoted, [], :elixir.env_for_eval(opts))
end
@doc """
Concatenates a list of aliases and returns a new alias.
## Examples
iex> Module.concat([Foo, Bar])
Foo.Bar
iex> Module.concat([Foo, "Bar"])
Foo.Bar
"""
@spec concat([binary | atom]) :: atom
def concat(list) when is_list(list) do
:elixir_aliases.concat(list)
end
@doc """
Concatenates two aliases and returns a new alias.
## Examples
iex> Module.concat(Foo, Bar)
Foo.Bar
iex> Module.concat(Foo, "Bar")
Foo.Bar
"""
@spec concat(binary | atom, binary | atom) :: atom
def concat(left, right)
when (is_binary(left) or is_atom(left)) and (is_binary(right) or is_atom(right)) do
:elixir_aliases.concat([left, right])
end
@doc """
Concatenates a list of aliases and returns a new alias only if the alias
was already referenced.
If the alias was not referenced yet, fails with `ArgumentError`.
It handles charlists, binaries and atoms.
## Examples
iex> Module.safe_concat([Module, Unknown])
** (ArgumentError) argument error
iex> Module.safe_concat([List, Chars])
List.Chars
"""
@spec safe_concat([binary | atom]) :: atom
def safe_concat(list) when is_list(list) do
:elixir_aliases.safe_concat(list)
end
@doc """
Concatenates two aliases and returns a new alias only if the alias was
already referenced.
If the alias was not referenced yet, fails with `ArgumentError`.
It handles charlists, binaries and atoms.
## Examples
iex> Module.safe_concat(Module, Unknown)
** (ArgumentError) argument error
iex> Module.safe_concat(List, Chars)
List.Chars
"""
@spec safe_concat(binary | atom, binary | atom) :: atom
def safe_concat(left, right)
when (is_binary(left) or is_atom(left)) and (is_binary(right) or is_atom(right)) do
:elixir_aliases.safe_concat([left, right])
end
# Build signatures to be stored in docs
defp build_signature(args, env) do
{reverse_args, counters} = simplify_args(args, %{}, [], env)
expand_keys(reverse_args, counters, [])
end
defp simplify_args([arg | args], counters, acc, env) do
{arg, counters} = simplify_arg(arg, counters, env)
simplify_args(args, counters, [arg | acc], env)
end
defp simplify_args([], counters, reverse_args, _env) do
{reverse_args, counters}
end
defp simplify_arg({:\\, _, [left, right]}, counters, env) do
{left, counters} = simplify_arg(left, counters, env)
right =
Macro.prewalk(right, fn
{:@, _, _} = attr -> Macro.expand_once(attr, env)
other -> other
end)
{{:\\, [], [left, right]}, counters}
end
# If the variable is being used explicitly for naming,
# we always give it a higher priority (nil) even if it
# starts with underscore.
defp simplify_arg({:=, _, [{var, _, atom}, _]}, counters, _env) when is_atom(atom) do
{simplify_var(var, nil), counters}
end
defp simplify_arg({:=, _, [_, {var, _, atom}]}, counters, _env) when is_atom(atom) do
{simplify_var(var, nil), counters}
end
# If we have only the variable as argument, it also gets
# higher priority. However, if the variable starts with an
# underscore, we give it a secondary context (Elixir) with
# lower priority.
defp simplify_arg({var, _, atom}, counters, _env) when is_atom(atom) do
{simplify_var(var, Elixir), counters}
end
defp simplify_arg({:%, _, [left, _]}, counters, env) do
case Macro.expand_once(left, env) do
module when is_atom(module) -> autogenerated_key(counters, simplify_module_name(module))
_ -> autogenerated_key(counters, :struct)
end
end
defp simplify_arg({:%{}, _, _}, counters, _env) do
autogenerated_key(counters, :map)
end
defp simplify_arg({:@, _, _} = attr, counters, env) do
simplify_arg(Macro.expand_once(attr, env), counters, env)
end
defp simplify_arg(other, counters, _env) when is_integer(other),
do: autogenerated_key(counters, :int)
defp simplify_arg(other, counters, _env) when is_boolean(other),
do: autogenerated_key(counters, :bool)
defp simplify_arg(other, counters, _env) when is_atom(other),
do: autogenerated_key(counters, :atom)
defp simplify_arg(other, counters, _env) when is_list(other),
do: autogenerated_key(counters, :list)
defp simplify_arg(other, counters, _env) when is_float(other),
do: autogenerated_key(counters, :float)
defp simplify_arg(other, counters, _env) when is_binary(other),
do: autogenerated_key(counters, :binary)
defp simplify_arg(_, counters, _env), do: autogenerated_key(counters, :arg)
defp simplify_var(var, guess_priority) do
case Atom.to_string(var) do
"_" -> {:_, [], guess_priority}
"_" <> rest -> {String.to_atom(rest), [], guess_priority}
_ -> {var, [], nil}
end
end
defp simplify_module_name(module) when is_atom(module) do
try do
split(module)
rescue
ArgumentError -> module
else
module_name -> String.to_atom(Macro.underscore(List.last(module_name)))
end
end
defp autogenerated_key(counters, key) do
case counters do
%{^key => :once} -> {key, Map.put(counters, key, 2)}
%{^key => value} -> {key, Map.put(counters, key, value + 1)}
%{} -> {key, Map.put(counters, key, :once)}
end
end
defp expand_keys([{:\\, meta, [key, default]} | keys], counters, acc) when is_atom(key) do
{var, counters} = expand_key(key, counters)
expand_keys(keys, counters, [{:\\, meta, [var, default]} | acc])
end
defp expand_keys([key | keys], counters, acc) when is_atom(key) do
{var, counters} = expand_key(key, counters)
expand_keys(keys, counters, [var | acc])
end
defp expand_keys([arg | args], counters, acc) do
expand_keys(args, counters, [arg | acc])
end
defp expand_keys([], _counters, acc) do
acc
end
defp expand_key(key, counters) do
case counters do
%{^key => count} when is_integer(count) and count >= 1 ->
{{:"#{key}#{count}", [], Elixir}, Map.put(counters, key, count - 1)}
_ ->
{{key, [], Elixir}, counters}
end
end
# Merge
defp merge_signatures([h1 | t1], [h2 | t2], i) do
[merge_signature(h1, h2, i) | merge_signatures(t1, t2, i + 1)]
end
defp merge_signatures([], [], _) do
[]
end
defp merge_signature({:\\, meta, [left, right]}, newer, i) do
{:\\, meta, [merge_signature(left, newer, i), right]}
end
defp merge_signature(older, {:\\, _, [left, _]}, i) do
merge_signature(older, left, i)
end
# The older signature, when given, always have higher precedence
defp merge_signature({_, _, nil} = older, _newer, _), do: older
defp merge_signature(_older, {_, _, nil} = newer, _), do: newer
# Both are a guess, so check if they are the same guess
defp merge_signature({var, _, _} = older, {var, _, _}, _), do: older
# Otherwise, returns a generic guess
defp merge_signature({_, meta, _}, _newer, i), do: {:"arg#{i}", meta, Elixir}
@doc """
Checks if the module defines the given function or macro.
Use `defines?/3` to assert for a specific type.
This function can only be used on modules that have not yet been compiled.
Use `Kernel.function_exported?/3` and `Kernel.macro_exported?/3` to check for
public functions and macros respectively in compiled modules.
Note that `defines?` returns false for functions and macros that have
been defined but then marked as overridable and no other implementation
has been provided. You can check the overridable status by calling
`overridable?/2`.
## Examples
defmodule Example do
Module.defines?(__MODULE__, {:version, 0}) #=> false
def version, do: 1
Module.defines?(__MODULE__, {:version, 0}) #=> true
end
"""
@spec defines?(module, definition) :: boolean
def defines?(module, {name, arity} = tuple)
when is_atom(module) and is_atom(name) and is_integer(arity) and arity >= 0 and arity <= 255 do
assert_not_compiled!(__ENV__.function, module, @extra_error_msg_defines?)
{set, _bag} = data_tables_for(module)
:ets.member(set, {:def, tuple})
end
@doc """
Checks if the module defines a function or macro of the
given `kind`.
`kind` can be any of `:def`, `:defp`, `:defmacro`, or `:defmacrop`.
This function can only be used on modules that have not yet been compiled.
Use `Kernel.function_exported?/3` and `Kernel.macro_exported?/3` to check for
public functions and macros respectively in compiled modules.
## Examples
defmodule Example do
Module.defines?(__MODULE__, {:version, 0}, :def) #=> false
def version, do: 1
Module.defines?(__MODULE__, {:version, 0}, :def) #=> true
end
"""
@spec defines?(module, definition, def_kind) :: boolean
def defines?(module, {name, arity} = tuple, def_kind)
when is_atom(module) and is_atom(name) and is_integer(arity) and arity >= 0 and arity <= 255 and
def_kind in [:def, :defp, :defmacro, :defmacrop] do
assert_not_compiled!(__ENV__.function, module, @extra_error_msg_defines?)
{set, _bag} = data_tables_for(module)
case :ets.lookup(set, {:def, tuple}) do
[{_, ^def_kind, _, _, _, _}] -> true
_ -> false
end
end
@doc """
Checks if the current module defines the given type (private, opaque or not).
This function is only available for modules being compiled.
"""
@doc since: "1.7.0"
@spec defines_type?(module, definition) :: boolean
def defines_type?(module, definition) do
Kernel.Typespec.defines_type?(module, definition)
end
@doc """
Copies the given spec as a callback.
Returns `true` if there is such a spec and it was copied as a callback.
If the function associated to the spec has documentation defined prior to
invoking this function, the docs are copied too.
"""
@doc since: "1.7.0"
@spec spec_to_callback(module, definition) :: boolean
def spec_to_callback(module, definition) do
Kernel.Typespec.spec_to_callback(module, definition)
end
@doc """
Returns all functions and macros defined in `module`.
It returns a list with all defined functions and macros, public and private,
in the shape of `[{name, arity}, ...]`.
This function can only be used on modules that have not yet been compiled.
Use the `c:Module.__info__/1` callback to get the public functions and macros in
compiled modules.
## Examples
defmodule Example do
def version, do: 1
defmacrop test(arg), do: arg
Module.definitions_in(__MODULE__) #=> [{:version, 0}, {:test, 1}]
end
"""
@spec definitions_in(module) :: [definition]
def definitions_in(module) when is_atom(module) do
assert_not_compiled!(__ENV__.function, module, @extra_error_msg_definitions_in)
{_, bag} = data_tables_for(module)
bag_lookup_element(bag, :defs, 2)
end
@doc """
Returns all functions defined in `module`, according
to its kind.
This function can only be used on modules that have not yet been compiled.
Use the `c:Module.__info__/1` callback to get the public functions and macros in
compiled modules.
## Examples
defmodule Example do
def version, do: 1
Module.definitions_in(__MODULE__, :def) #=> [{:version, 0}]
Module.definitions_in(__MODULE__, :defp) #=> []
end
"""
@spec definitions_in(module, def_kind) :: [definition]
def definitions_in(module, def_kind)
when is_atom(module) and def_kind in [:def, :defp, :defmacro, :defmacrop] do
assert_not_compiled!(__ENV__.function, module, @extra_error_msg_definitions_in)
{set, _} = data_tables_for(module)
:lists.concat(:ets.match(set, {{:def, :"$1"}, def_kind, :_, :_, :_, :_}))
end
@doc """
Makes the given functions in `module` overridable.
An overridable function is lazily defined, allowing a
developer to customize it. See `Kernel.defoverridable/1` for
more information and documentation.
Once a function or a macro is marked as overridable, it will
no longer be listed under `definitions_in/1` or return true
when given to `defines?/2` until another implementation is
given.
"""
@spec make_overridable(module, [definition]) :: :ok
def make_overridable(module, tuples) when is_atom(module) and is_list(tuples) do
assert_not_compiled!(__ENV__.function, module)
func = fn
{function_name, arity} = tuple
when is_atom(function_name) and is_integer(arity) and arity >= 0 and arity <= 255 ->
case :elixir_def.take_definition(module, tuple) do
false ->
raise ArgumentError,
"cannot make function #{function_name}/#{arity} " <>
"overridable because it was not defined"
clause ->
neighbours = :elixir_locals.yank(tuple, module)
:elixir_overridable.record_overridable(module, tuple, clause, neighbours)
end
other ->
raise ArgumentError,
"each element in tuple list has to be a " <>
"{function_name :: atom, arity :: 0..255} tuple, got: #{inspect(other)}"
end
:lists.foreach(func, tuples)
end
@spec make_overridable(module, module) :: :ok
def make_overridable(module, behaviour) when is_atom(module) and is_atom(behaviour) do
case check_module_for_overridable(module, behaviour) do
:ok ->
:ok
{:error, error_explanation} ->
raise ArgumentError,
"cannot pass module #{inspect(behaviour)} as argument " <>
"to defoverridable/1 because #{error_explanation}"
end
behaviour_callbacks =
for callback <- behaviour_info(behaviour, :callbacks) do
{pair, _kind} = normalize_macro_or_function_callback(callback)
pair
end
tuples =
for definition <- definitions_in(module),
definition in behaviour_callbacks,
do: definition
make_overridable(module, tuples)
end
defp check_module_for_overridable(module, behaviour) do
{_, bag} = data_tables_for(module)
behaviour_definitions = bag_lookup_element(bag, {:accumulate, :behaviour}, 2)
cond do
not Code.ensure_compiled?(behaviour) ->
{:error, "it was not defined"}
not function_exported?(behaviour, :behaviour_info, 1) ->
{:error, "it does not define any callbacks"}
behaviour not in behaviour_definitions ->
error_message =
"its corresponding behaviour is missing. Did you forget to " <>
"add @behaviour #{inspect(behaviour)}?"
{:error, error_message}
true ->
:ok
end
end
defp normalize_macro_or_function_callback({function_name, arity}) do
case :erlang.atom_to_list(function_name) do
# Macros are always provided one extra argument in behaviour_info/1
'MACRO-' ++ tail ->
{{:erlang.list_to_atom(tail), arity - 1}, :defmacro}
_ ->
{{function_name, arity}, :def}
end
end
defp behaviour_info(module, key) do
case module.behaviour_info(key) do
list when is_list(list) -> list
:undefined -> []
end
end
@doc """
Returns `true` if `tuple` in `module` is marked as overridable.
"""
@spec overridable?(module, definition) :: boolean
def overridable?(module, {function_name, arity} = tuple)
when is_atom(function_name) and is_integer(arity) and arity >= 0 and arity <= 255 do
:elixir_overridable.overridable_for(module, tuple) != :not_overridable
end
@doc """
Puts a module attribute with `key` and `value` in the given `module`.
## Examples
defmodule MyModule do
Module.put_attribute(__MODULE__, :custom_threshold_for_lib, 10)
end
"""
@spec put_attribute(module, atom, term) :: :ok
def put_attribute(module, key, value) when is_atom(module) and is_atom(key) do
__put_attribute__(module, key, value, nil)
end
@doc """
Gets the given attribute from a module.
If the attribute was marked with `accumulate` with
`Module.register_attribute/3`, a list is always returned.
`nil` is returned if the attribute has not been marked with
`accumulate` and has not been set to any value.
The `@` macro compiles to a call to this function. For example,
the following code:
@foo
Expands to something akin to:
Module.get_attribute(__MODULE__, :foo)
This function can only be used on modules that have not yet been compiled.
Use the `c:Module.__info__/1` callback to get all persisted attributes, or
`Code.fetch_docs/1` to retrieve all documentation related attributes in
compiled modules.
## Examples
defmodule Foo do
Module.put_attribute(__MODULE__, :value, 1)
Module.get_attribute(__MODULE__, :value) #=> 1
Module.get_attribute(__MODULE__, :value, :default) #=> 1
Module.get_attribute(__MODULE__, :not_found, :default) #=> :default
Module.register_attribute(__MODULE__, :value, accumulate: true)
Module.put_attribute(__MODULE__, :value, 1)
Module.get_attribute(__MODULE__, :value) #=> [1]
end
"""
@spec get_attribute(module, atom, term) :: term
def get_attribute(module, key, default \\ nil) when is_atom(module) and is_atom(key) do
case __get_attribute__(module, key, nil) do
nil -> default
value -> value
end
end
@doc """
Checks if the given attribute has been defined.
An attribute is defined if it has been registered with `register_attribute/3`
or assigned a value. If an attribute has been deleted with `delete_attribute/2`
it is no longer considered defined.
This function can only be used on modules that have not yet been compiled.
## Examples
defmodule MyModule do
@value 1
Module.register_attribute(__MODULE__, :other_value)
Module.put_attribute(__MODULE__, :another_value, 1)
Module.has_attribute?(__MODULE__, :value) #=> true
Module.has_attribute?(__MODULE__, :other_value) #=> true
Module.has_attribute?(__MODULE__, :another_value) #=> true
Module.has_attribute?(__MODULE__, :undefined) #=> false
Module.delete_attribute(__MODULE__, :value)
Module.has_attribute?(__MODULE__, :value) #=> false
end
"""
@doc since: "1.10.0"
@spec has_attribute?(module, atom) :: boolean
def has_attribute?(module, key) when is_atom(module) and is_atom(key) do
assert_not_compiled!(__ENV__.function, module)
{set, _bag} = data_tables_for(module)
:ets.member(set, key)
end
@doc """
Deletes the module attribute that matches the given key.
It returns the deleted attribute value (or `nil` if nothing was set).
## Examples
defmodule MyModule do
Module.put_attribute(__MODULE__, :custom_threshold_for_lib, 10)
Module.delete_attribute(__MODULE__, :custom_threshold_for_lib)
end
"""
@spec delete_attribute(module, atom) :: term
def delete_attribute(module, key) when is_atom(module) and is_atom(key) do
assert_not_compiled!(__ENV__.function, module)
{set, bag} = data_tables_for(module)
case :ets.lookup(set, key) do
[{_, _, :accumulate}] ->
reverse_values(:ets.take(bag, {:accumulate, key}), [])
[{_, value, _}] ->
:ets.delete(set, key)
value
[] ->
nil
end
end
defp reverse_values([{_, value} | tail], acc), do: reverse_values(tail, [value | acc])
defp reverse_values([], acc), do: acc
@doc """
Registers an attribute.
By registering an attribute, a developer is able to customize
how Elixir will store and accumulate the attribute values.
## Options
When registering an attribute, two options can be given:
* `:accumulate` - several calls to the same attribute will
accumulate instead of overriding the previous one. New attributes
are always added to the top of the accumulated list.
* `:persist` - the attribute will be persisted in the Erlang
Abstract Format. Useful when interfacing with Erlang libraries.
By default, both options are `false`.
## Examples
defmodule MyModule do
Module.register_attribute(__MODULE__, :custom_threshold_for_lib, accumulate: true)
@custom_threshold_for_lib 10
@custom_threshold_for_lib 20
@custom_threshold_for_lib #=> [20, 10]
end
"""
@spec register_attribute(module, atom, [{:accumulate, boolean}, {:persist, boolean}]) :: :ok
def register_attribute(module, attribute, options)
when is_atom(module) and is_atom(attribute) and is_list(options) do
assert_not_compiled!(__ENV__.function, module)
{set, bag} = data_tables_for(module)
if Keyword.get(options, :persist) do
:ets.insert(bag, {:persisted_attributes, attribute})
end
if Keyword.get(options, :accumulate) do
:ets.insert_new(set, {attribute, [], :accumulate}) ||
:ets.update_element(set, attribute, {3, :accumulate})
else
:ets.insert_new(bag, {:attributes, attribute})
:ets.insert_new(set, {attribute, nil, :unset})
end
:ok
end
@doc """
Splits the given module name into binary parts.
`module` has to be an Elixir module, as `split/1` won't work with Erlang-style
modules (for example, `split(:lists)` raises an error).
`split/1` also supports splitting the string representation of Elixir modules
(that is, the result of calling `Atom.to_string/1` with the module name).
## Examples
iex> Module.split(Very.Long.Module.Name.And.Even.Longer)
["Very", "Long", "Module", "Name", "And", "Even", "Longer"]
iex> Module.split("Elixir.String.Chars")
["String", "Chars"]
"""
@spec split(module | String.t()) :: [String.t(), ...]
def split(module)
def split(module) when is_atom(module) do
split(Atom.to_string(module), _original = module)
end
def split(module) when is_binary(module) do
split(module, _original = module)
end
defp split("Elixir." <> name, _original) do
String.split(name, ".")
end
defp split(_module, original) do
raise ArgumentError, "expected an Elixir module, got: #{inspect(original)}"
end
@doc false
@deprecated "Use @doc instead"
def add_doc(module, line, kind, {name, arity}, signature \\ [], doc) do
assert_not_compiled!(__ENV__.function, module)
if kind in [:defp, :defmacrop, :typep] do
if doc, do: {:error, :private_doc}, else: :ok
else
{set, _bag} = data_tables_for(module)
compile_doc(set, line, kind, name, arity, signature, nil, doc, %{}, __ENV__, false)
:ok
end
end
@doc false
# Used internally to compile documentation.
# This function is private and must be used only internally.
def compile_definition_attributes(env, kind, name, args, _guards, body) do
%{module: module} = env
{set, bag} = data_tables_for(module)
{arity, defaults} = args_count(args, 0, 0)
impl = compile_impl(set, bag, name, env, kind, arity, defaults)
doc_meta = compile_doc_meta(set, bag, name, arity, defaults)
{line, doc} = get_doc_info(set, env)
compile_doc(set, line, kind, name, arity, args, body, doc, doc_meta, env, impl)
:ok
end
defp compile_doc(_table, line, kind, name, arity, _args, _body, doc, _doc_meta, env, _impl)
when kind in [:defp, :defmacrop] do
if doc do
message =
"#{kind} #{name}/#{arity} is private, " <>
"@doc attribute is always discarded for private functions/macros/types"
IO.warn(message, Macro.Env.stacktrace(%{env | line: line}))
end
end
defp compile_doc(table, line, kind, name, arity, args, _body, doc, doc_meta, env, impl) do
key = {doc_key(kind), name, arity}
signature = build_signature(args, env)
case :ets.lookup(table, key) do
[] ->
doc = if is_nil(doc) && impl, do: false, else: doc
:ets.insert(table, {key, line, signature, doc, doc_meta})
[{_, current_line, current_sign, current_doc, current_doc_meta}] ->
signature = merge_signatures(current_sign, signature, 1)
doc = if is_nil(doc), do: current_doc, else: doc
doc = if is_nil(doc) && impl, do: false, else: doc
doc_meta = Map.merge(current_doc_meta, doc_meta)
:ets.insert(table, {key, current_line, signature, doc, doc_meta})
end
end
defp doc_key(:def), do: :function
defp doc_key(:defmacro), do: :macro
defp compile_doc_meta(set, bag, name, arity, defaults) do
doc_meta = compile_deprecated(%{}, set, bag, name, arity, defaults)
doc_meta = get_doc_meta(doc_meta, set)
add_defaults_count(doc_meta, defaults)
end
defp get_doc_meta(existing_meta, set) do
case :ets.take(set, {:doc, :meta}) do
[{{:doc, :meta}, metadata, _}] -> Map.merge(existing_meta, metadata)
[] -> existing_meta
end
end
defp compile_deprecated(doc_meta, set, bag, name, arity, defaults) do
case :ets.take(set, :deprecated) do
[{:deprecated, reason, _}] when is_binary(reason) ->
:ets.insert(bag, deprecated_reasons(defaults, name, arity, reason))
Map.put(doc_meta, :deprecated, reason)
_ ->
doc_meta
end
end
defp add_defaults_count(doc_meta, 0), do: doc_meta
defp add_defaults_count(doc_meta, n), do: Map.put(doc_meta, :defaults, n)
defp deprecated_reasons(0, name, arity, reason) do
[deprecated_reason(name, arity, reason)]
end
defp deprecated_reasons(defaults, name, arity, reason) do
[
deprecated_reason(name, arity - defaults, reason)
| deprecated_reasons(defaults - 1, name, arity, reason)
]
end
defp deprecated_reason(name, arity, reason),
do: {:deprecated, {{name, arity}, reason}}
defp compile_impl(set, bag, name, env, kind, arity, defaults) do
%{line: line, file: file} = env
case :ets.take(set, :impl) do
[{:impl, value, _}] ->
pair = {name, arity}
meta = :ets.lookup_element(set, {:def, pair}, 3)
impl = {pair, Keyword.get(meta, :context), defaults, kind, line, file, value}
:ets.insert(bag, {:impls, impl})
value
[] ->
false
end
end
defp args_count([{:\\, _, _} | tail], total, defaults) do
args_count(tail, total + 1, defaults + 1)
end
defp args_count([_head | tail], total, defaults) do
args_count(tail, total + 1, defaults)
end
defp args_count([], total, defaults), do: {total, defaults}
@doc false
def check_behaviours_and_impls(env, _set, bag, all_definitions) do
behaviours = bag_lookup_element(bag, {:accumulate, :behaviour}, 2)
impls = bag_lookup_element(bag, :impls, 2)
callbacks = check_behaviours(env, behaviours)
pending_callbacks =
if impls != [] do
{non_implemented_callbacks, contexts} = check_impls(env, behaviours, callbacks, impls)
warn_missing_impls(env, non_implemented_callbacks, contexts, all_definitions)
non_implemented_callbacks
else
callbacks
end
check_callbacks(env, pending_callbacks, all_definitions)
:ok
end
defp check_behaviours(%{lexical_tracker: pid} = env, behaviours) do
Enum.reduce(behaviours, %{}, fn behaviour, acc ->
cond do
not is_atom(behaviour) ->
message =
"@behaviour #{inspect(behaviour)} must be an atom (in module #{inspect(env.module)})"
IO.warn(message, Macro.Env.stacktrace(env))
acc
not Code.ensure_compiled?(behaviour) ->
message =
"@behaviour #{inspect(behaviour)} does not exist (in module #{inspect(env.module)})"
IO.warn(message, Macro.Env.stacktrace(env))
acc
not function_exported?(behaviour, :behaviour_info, 1) ->
message =
"module #{inspect(behaviour)} is not a behaviour (in module #{inspect(env.module)})"
IO.warn(message, Macro.Env.stacktrace(env))
acc
true ->
:elixir_lexical.record_remote(behaviour, nil, pid)
optional_callbacks = behaviour_info(behaviour, :optional_callbacks)
callbacks = behaviour_info(behaviour, :callbacks)
Enum.reduce(callbacks, acc, &add_callback(&1, behaviour, env, optional_callbacks, &2))
end
end)
end
defp add_callback(original, behaviour, env, optional_callbacks, acc) do
{callback, kind} = normalize_macro_or_function_callback(original)
case acc do
%{^callback => {_kind, conflict, _optional?}} ->
message =
if conflict == behaviour do
"the behavior #{inspect(conflict)} has been declared twice " <>
"(conflict in #{format_definition(kind, callback)} in module #{inspect(env.module)})"
else
"conflicting behaviours found. #{format_definition(kind, callback)} is required by " <>
"#{inspect(conflict)} and #{inspect(behaviour)} (in module #{inspect(env.module)})"
end
IO.warn(message, Macro.Env.stacktrace(env))
%{} ->
:ok
end
Map.put(acc, callback, {kind, behaviour, original in optional_callbacks})
end
defp check_callbacks(env, callbacks, all_definitions) do
for {callback, {kind, behaviour, optional?}} <- callbacks do
case :lists.keyfind(callback, 1, all_definitions) do
false when not optional? ->
message =
format_callback(callback, kind, behaviour) <>
" is not implemented (in module #{inspect(env.module)})"
IO.warn(message, Macro.Env.stacktrace(env))
{_, wrong_kind, _, _} when kind != wrong_kind ->
message =
format_callback(callback, kind, behaviour) <>
" was implemented as \"#{wrong_kind}\" but should have been \"#{kind}\" " <>
"(in module #{inspect(env.module)})"
IO.warn(message, Macro.Env.stacktrace(env))
_ ->
:ok
end
end
:ok
end
defp format_callback(callback, kind, module) do
protocol_or_behaviour = if protocol?(module), do: "protocol ", else: "behaviour "
format_definition(kind, callback) <>
" required by " <> protocol_or_behaviour <> inspect(module)
end
defp protocol?(module) do
Code.ensure_loaded?(module) and function_exported?(module, :__protocol__, 1) and
module.__protocol__(:module) == module
end
defp check_impls(env, behaviours, callbacks, impls) do
acc = {callbacks, %{}}
Enum.reduce(impls, acc, fn {fa, context, defaults, kind, line, file, value}, acc ->
case impl_behaviours(fa, defaults, kind, value, behaviours, callbacks) do
{:ok, impl_behaviours} ->
Enum.reduce(impl_behaviours, acc, fn {fa, behaviour}, {callbacks, contexts} ->
callbacks = Map.delete(callbacks, fa)
contexts = Map.update(contexts, behaviour, [context], &[context | &1])
{callbacks, contexts}
end)
{:error, message} ->
formatted = format_impl_warning(fa, kind, message)
IO.warn(formatted, Macro.Env.stacktrace(%{env | line: line, file: file}))
acc
end
end)
end
defp impl_behaviours({function, arity}, defaults, kind, value, behaviours, callbacks) do
impls = for n <- arity..(arity - defaults), do: {function, n}
impl_behaviours(impls, kind, value, behaviours, callbacks)
end
defp impl_behaviours(_, kind, _, _, _) when kind in [:defp, :defmacrop] do
{:error, :private_function}
end
defp impl_behaviours(_, _, value, [], _) do
{:error, {:no_behaviours, value}}
end
defp impl_behaviours(impls, _, false, _, callbacks) do
case callbacks_for_impls(impls, callbacks) do
[] -> {:ok, []}
[impl | _] -> {:error, {:impl_not_defined, impl}}
end
end
defp impl_behaviours(impls, _, true, _, callbacks) do
case callbacks_for_impls(impls, callbacks) do
[] -> {:error, {:impl_defined, callbacks}}
impls -> {:ok, impls}
end
end
defp impl_behaviours(impls, _, behaviour, behaviours, callbacks) do
filtered = behaviour_callbacks_for_impls(impls, behaviour, callbacks)
cond do
filtered != [] ->
{:ok, filtered}
behaviour not in behaviours ->
{:error, {:behaviour_not_declared, behaviour}}
true ->
{:error, {:behaviour_not_defined, behaviour, callbacks}}
end
end
defp behaviour_callbacks_for_impls([], _behaviour, _callbacks) do
[]
end
defp behaviour_callbacks_for_impls([fa | tail], behaviour, callbacks) do
case callbacks[fa] do
{_, ^behaviour, _} ->
[{fa, behaviour} | behaviour_callbacks_for_impls(tail, behaviour, callbacks)]
_ ->
behaviour_callbacks_for_impls(tail, behaviour, callbacks)
end
end
defp callbacks_for_impls([], _) do
[]
end
defp callbacks_for_impls([fa | tail], callbacks) do
case callbacks[fa] do
{_, behaviour, _} -> [{fa, behaviour} | callbacks_for_impls(tail, callbacks)]
nil -> callbacks_for_impls(tail, callbacks)
end
end
defp format_impl_warning(fa, kind, :private_function) do
"#{format_definition(kind, fa)} is private, @impl attribute is always discarded for private functions/macros"
end
defp format_impl_warning(fa, kind, {:no_behaviours, value}) do
"got \"@impl #{inspect(value)}\" for #{format_definition(kind, fa)} but no behaviour was declared"
end
defp format_impl_warning(_, kind, {:impl_not_defined, {fa, behaviour}}) do
"got \"@impl false\" for #{format_definition(kind, fa)} " <>
"but it is a callback specified in #{inspect(behaviour)}"
end
defp format_impl_warning(fa, kind, {:impl_defined, callbacks}) do
"got \"@impl true\" for #{format_definition(kind, fa)} " <>
"but no behaviour specifies such callback#{known_callbacks(callbacks)}"
end
defp format_impl_warning(fa, kind, {:behaviour_not_declared, behaviour}) do
"got \"@impl #{inspect(behaviour)}\" for #{format_definition(kind, fa)} " <>
"but this behaviour was not declared with @behaviour"
end
defp format_impl_warning(fa, kind, {:behaviour_not_defined, behaviour, callbacks}) do
"got \"@impl #{inspect(behaviour)}\" for #{format_definition(kind, fa)} " <>
"but this behaviour does not specify such callback#{known_callbacks(callbacks)}"
end
defp warn_missing_impls(_env, callbacks, _contexts, _defs) when map_size(callbacks) == 0 do
:ok
end
defp warn_missing_impls(env, non_implemented_callbacks, contexts, defs) do
for {pair, kind, meta, _clauses} <- defs,
kind in [:def, :defmacro] do
with {:ok, {_, behaviour, _}} <- Map.fetch(non_implemented_callbacks, pair),
true <- missing_impl_in_context?(meta, behaviour, contexts) do
message =
"module attribute @impl was not set for #{format_definition(kind, pair)} " <>
"callback (specified in #{inspect(behaviour)}). " <>
"This either means you forgot to add the \"@impl true\" annotation before the " <>
"definition or that you are accidentally overriding this callback"
IO.warn(message, Macro.Env.stacktrace(%{env | line: :elixir_utils.get_line(meta)}))
end
end
:ok
end
defp missing_impl_in_context?(meta, behaviour, contexts) do
case contexts do
%{^behaviour => known} -> Keyword.get(meta, :context) in known
%{} -> not Keyword.has_key?(meta, :context)
end
end
defp format_definition(kind, {name, arity}) do
format_definition(kind) <> " #{name}/#{arity}"
end
defp format_definition(:defmacro), do: "macro"
defp format_definition(:defmacrop), do: "macro"
defp format_definition(:def), do: "function"
defp format_definition(:defp), do: "function"
defp known_callbacks(callbacks) when map_size(callbacks) == 0 do
". There are no known callbacks, please specify the proper @behaviour " <>
"and make sure it defines callbacks"
end
defp known_callbacks(callbacks) do
formatted_callbacks =
for {{name, arity}, {kind, module, _}} <- callbacks do
"\n * " <> Exception.format_mfa(module, name, arity) <> " (#{format_definition(kind)})"
end
". The known callbacks are:\n#{formatted_callbacks}\n"
end
@doc false
# Used internally by Kernel's @.
# This function is private and must be used only internally.
def __get_attribute__(module, key, line) when is_atom(key) do
assert_not_compiled!(
{:get_attribute, 2},
module,
"Use the Module.__info__/1 callback or Code.fetch_docs/1 instead"
)
{set, bag} = data_tables_for(module)
case :ets.lookup(set, key) do
[{_, _, :accumulate}] ->
:lists.reverse(bag_lookup_element(bag, {:accumulate, key}, 2))
[{_, val, line}] when is_integer(line) ->
:ets.update_element(set, key, {3, :used})
val
[{_, val, _}] ->
val
[] when is_integer(line) ->
# TODO: Consider raising instead of warning on v2.0 as it usually cascades
error_message =
"undefined module attribute @#{key}, " <>
"please remove access to @#{key} or explicitly set it before access"
IO.warn(error_message, attribute_stack(module, line))
nil
[] ->
nil
end
end
@doc false
# Used internally by Kernel's @.
# This function is private and must be used only internally.
def __put_attribute__(module, key, value, line) when is_atom(key) do
assert_not_compiled!(__ENV__.function, module)
{set, bag} = data_tables_for(module)
value = preprocess_attribute(key, value)
put_attribute(module, key, value, line, set, bag)
:ok
end
# If any of the doc attributes are called with a keyword list that
# will become documentation metadata. Multiple calls will be merged
# into the same map overriding duplicate keys.
defp put_attribute(module, key, {_, metadata}, line, set, _bag)
when key in [:doc, :typedoc, :moduledoc] and is_list(metadata) do
metadata_map = preprocess_doc_meta(metadata, module, line, %{})
case :ets.insert_new(set, {{key, :meta}, metadata_map, line}) do
true ->
:ok
false ->
current_metadata = :ets.lookup_element(set, {key, :meta}, 2)
:ets.update_element(set, {key, :meta}, {2, Map.merge(current_metadata, metadata_map)})
end
end
# Optimize some attributes by avoiding writing to the attributes key
# in the bag table since we handle them internally.
defp put_attribute(module, key, value, line, set, _bag)
when key in [:doc, :typedoc, :moduledoc, :impl, :deprecated] do
try do
:ets.lookup_element(set, key, 3)
catch
:error, :badarg -> :ok
else
unread_line when is_integer(line) and is_integer(unread_line) ->
message = "redefining @#{key} attribute previously set at line #{unread_line}"
IO.warn(message, attribute_stack(module, line))
_ ->
:ok
end
:ets.insert(set, {key, value, line})
end
defp put_attribute(_module, :on_load, value, line, set, bag) do
try do
:ets.lookup_element(set, :on_load, 3)
catch
:error, :badarg ->
:ets.insert(set, {:on_load, value, line})
:ets.insert(bag, {:attributes, :on_load})
else
_ -> raise ArgumentError, "the @on_load attribute can only be set once per module"
end
end
defp put_attribute(_module, key, value, line, set, bag) do
try do
:ets.lookup_element(set, key, 3)
catch
:error, :badarg ->
:ets.insert(set, {key, value, line})
:ets.insert(bag, {:attributes, key})
else
:accumulate -> :ets.insert(bag, {{:accumulate, key}, value})
_ -> :ets.insert(set, {key, value, line})
end
end
defp attribute_stack(module, line) do
file = String.to_charlist(Path.relative_to_cwd(:elixir_module.file(module)))
[{module, :__MODULE__, 0, file: file, line: line}]
end
## Helpers
defp preprocess_attribute(key, value) when key in [:moduledoc, :typedoc, :doc] do
case value do
{line, doc} when is_integer(line) and (is_binary(doc) or doc == false or is_nil(doc)) ->
value
{line, [{key, _} | _]} when is_integer(line) and is_atom(key) ->
value
{line, doc} when is_integer(line) ->
raise ArgumentError,
"@#{key} is a built-in module attribute for documentation. It should be either " <>
"false, nil, a string, or a keyword list, got: #{inspect(doc)}"
_other ->
raise ArgumentError,
"@#{key} is a built-in module attribute for documentation. When set dynamically, " <>
"it should be {line, doc} (where \"doc\" is either false, nil, a string, or a keyword list), " <>
"got: #{inspect(value)}"
end
end
defp preprocess_attribute(:on_load, value) do
case value do
_ when is_atom(value) ->
{value, 0}
{atom, 0} = tuple when is_atom(atom) ->
tuple
_ ->
raise ArgumentError,
"@on_load is a built-in module attribute that annotates a function to be invoked " <>
"when the module is loaded. It should be an atom or a {atom, 0} tuple, " <>
"got: #{inspect(value)}"
end
end
defp preprocess_attribute(:impl, value) do
case value do
_ when is_boolean(value) ->
value
module when is_atom(module) and module != nil ->
# Attempt to compile behaviour but ignore failure (will warn later)
_ = Code.ensure_compiled(module)
value
_ ->
raise ArgumentError,
"@impl is a built-in module attribute that marks the next definition " <>
"as a callback implementation. It should be a module or a boolean, " <>
"got: #{inspect(value)}"
end
end
defp preprocess_attribute(:before_compile, atom) when is_atom(atom),
do: {atom, :__before_compile__}
defp preprocess_attribute(:after_compile, atom) when is_atom(atom),
do: {atom, :__after_compile__}
defp preprocess_attribute(:on_definition, atom) when is_atom(atom),
do: {atom, :__on_definition__}
defp preprocess_attribute(key, _value)
when key in [:type, :typep, :opaque, :spec, :callback, :macrocallback] do
raise ArgumentError,
"attributes type, typep, opaque, spec, callback, and macrocallback " <>
"must be set directly via the @ notation"
end
defp preprocess_attribute(:external_resource, value) when not is_binary(value) do
raise ArgumentError,
"@external_resource is a built-in module attribute used for specifying file " <>
"dependencies. It should be a string the path to a file, got: #{inspect(value)}"
end
defp preprocess_attribute(:deprecated, value) when not is_binary(value) do
raise ArgumentError,
"@deprecated is a built-in module attribute that annotates a definition as deprecated. " <>
"It should be a string with the reason for the deprecation, got: #{inspect(value)}"
end
defp preprocess_attribute(:file, value) do
case value do
_ when is_binary(value) ->
value
{file, line} when is_binary(file) and is_integer(line) ->
value
_ ->
raise ArgumentError,
"@file is a built-in module attribute that annotates the file and line the next " <>
"definition comes from. It should be a string or {string, line} tuple as value, " <>
"got: #{inspect(value)}"
end
end
defp preprocess_attribute(_key, value) do
value
end
defp preprocess_doc_meta([], _module, _line, map), do: map
defp preprocess_doc_meta([{key, _} | tail], module, line, map)
when key in [:opaque, :defaults] do
message = "ignoring reserved documentation metadata key: #{inspect(key)}"
IO.warn(message, attribute_stack(module, line))
preprocess_doc_meta(tail, module, line, map)
end
defp preprocess_doc_meta([{key, value} | tail], module, line, map) when is_atom(key) do
validate_doc_meta(key, value)
preprocess_doc_meta(tail, module, line, Map.put(map, key, value))
end
defp validate_doc_meta(:since, value) when not is_binary(value) do
raise ArgumentError,
":since is a built-in documentation metadata key. It should be a string representing " <>
"the version in which the documented entity was added, got: #{inspect(value)}"
end
defp validate_doc_meta(:deprecated, value) when not is_binary(value) do
raise ArgumentError,
":deprecated is a built-in documentation metadata key. It should be a string " <>
"representing the replacement for the deprecated entity, got: #{inspect(value)}"
end
defp validate_doc_meta(:delegate_to, value) do
case value do
{m, f, a} when is_atom(m) and is_atom(f) and is_integer(a) and a >= 0 ->
:ok
_ ->
raise ArgumentError,
":delegate_to is a built-in documentation metadata key. It should be a three-element " <>
"tuple in the form of {module, function, arity}, got: #{inspect(value)}"
end
end
defp validate_doc_meta(_, _), do: :ok
defp get_doc_info(table, env) do
case :ets.take(table, :doc) do
[{:doc, {_, _} = pair, _}] ->
pair
[] ->
{env.line, nil}
end
end
defp data_tables_for(module) do
:elixir_module.data_tables(module)
end
defp bag_lookup_element(table, key, pos) do
:ets.lookup_element(table, key, pos)
catch
:error, :badarg -> []
end
defp assert_not_compiled!(function_name_arity, module, extra_msg \\ "") do
open?(module) ||
raise ArgumentError,
assert_not_compiled_message(function_name_arity, module, extra_msg)
end
defp assert_not_compiled_message({function_name, arity}, module, extra_msg) do
mfa = "Module.#{function_name}/#{arity}"
"could not call #{mfa} because the module #{inspect(module)} is already compiled" <>
case extra_msg do
"" -> ""
_ -> ". " <> extra_msg
end
end
end
| 32.268479 | 113 | 0.664401 |
1c4064b0214f2fc938a648027460da030f6b1d58 | 1,795 | ex | Elixir | examples/ecto_job_priority_demo/lib/ecto_job_priority_demo/job_monitor.ex | ramondelemos/ecto_job | 781c6b43cfbfde866b0aa3f7370000910f684fe2 | [
"MIT"
] | null | null | null | examples/ecto_job_priority_demo/lib/ecto_job_priority_demo/job_monitor.ex | ramondelemos/ecto_job | 781c6b43cfbfde866b0aa3f7370000910f684fe2 | [
"MIT"
] | null | null | null | examples/ecto_job_priority_demo/lib/ecto_job_priority_demo/job_monitor.ex | ramondelemos/ecto_job | 781c6b43cfbfde866b0aa3f7370000910f684fe2 | [
"MIT"
] | null | null | null | defmodule EctoJobPriorityDemo.JobMonitor do
@moduledoc false
use GenServer
alias Ecto.Multi
alias EctoJobPriorityDemo.JobQueue
alias EctoJobPriorityDemo.Repo
def start_link(jobs \\ %{count: 1, priority: 0, period: 1000}, server) do
GenServer.start_link(__MODULE__, jobs, name: server)
end
def init(%{count: count, priority: priority, period: period}) do
send(self(), {:produce_jobs, count, priority, period})
{:ok, 0}
end
def update(server, value) do
GenServer.cast(server, {:update, value})
end
def count(server) do
GenServer.call(server, :count)
end
# Server
def handle_cast({:update, value}, state) do
{:noreply, state + value}
end
def handle_call(:count, _from, state) do
{:reply, state, state}
end
def handle_info({:produce_jobs, count, priority, period}, state) do
Multi.new()
|> Multi.run(:create_jobs, fn _repo, _ ->
jobs =
Enum.map(1..count, fn _ ->
%{
state: "AVAILABLE",
expires: nil,
schedule: DateTime.utc_now(),
attempt: 0,
max_attempts: 5,
params: %{priority: priority},
notify: nil,
priority: priority,
retain_for: :timer.seconds(1),
updated_at: NaiveDateTime.utc_now() |> NaiveDateTime.truncate(:second),
inserted_at: NaiveDateTime.utc_now() |> NaiveDateTime.truncate(:second)
}
end)
{:ok, jobs}
end)
|> Multi.run(:insert_jobs, fn _repo, %{create_jobs: jobs} ->
result =
JobQueue
|> Repo.insert_all(jobs)
{:ok, result}
end)
|> Repo.transaction()
Process.send_after(self(), {:produce_jobs, count, priority, period}, period)
{:noreply, state + count}
end
end
| 24.930556 | 83 | 0.604457 |
1c4071915a6572b83ceb74bed4c23ef3f598bfb5 | 1,172 | ex | Elixir | oeml-sdk/elixir/lib/oeml_restapi/model/position_data.ex | Martin-Molinero/coinapi-sdk | 8633f61e0809e7ee4032100fe08454e8c4ad5e0c | [
"MIT"
] | 357 | 2017-05-29T15:09:19.000Z | 2022-03-30T15:34:10.000Z | oeml-sdk/elixir/lib/oeml_restapi/model/position_data.ex | Martin-Molinero/coinapi-sdk | 8633f61e0809e7ee4032100fe08454e8c4ad5e0c | [
"MIT"
] | 68 | 2017-12-15T15:39:14.000Z | 2022-02-11T11:28:17.000Z | oeml-sdk/elixir/lib/oeml_restapi/model/position_data.ex | Martin-Molinero/coinapi-sdk | 8633f61e0809e7ee4032100fe08454e8c4ad5e0c | [
"MIT"
] | 199 | 2017-06-01T07:51:14.000Z | 2022-03-25T11:52:28.000Z | # NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
# https://openapi-generator.tech
# Do not edit the class manually.
defmodule OEML-RESTAPI.Model.PositionData do
@moduledoc """
The Position object.
"""
@derive [Poison.Encoder]
defstruct [
:"symbol_id_exchange",
:"symbol_id_coinapi",
:"avg_entry_price",
:"quantity",
:"side",
:"unrealized_pnl",
:"leverage",
:"cross_margin",
:"liquidation_price",
:"raw_data"
]
@type t :: %__MODULE__{
:"symbol_id_exchange" => String.t | nil,
:"symbol_id_coinapi" => String.t | nil,
:"avg_entry_price" => float() | nil,
:"quantity" => float() | nil,
:"side" => OEML-RESTAPI.Model.OrdSide.t | nil,
:"unrealized_pnl" => float() | nil,
:"leverage" => float() | nil,
:"cross_margin" => boolean() | nil,
:"liquidation_price" => float() | nil,
:"raw_data" => map() | nil
}
end
defimpl Poison.Decoder, for: OEML-RESTAPI.Model.PositionData do
import OEML-RESTAPI.Deserializer
def decode(value, options) do
value
|> deserialize(:"side", :struct, OEML-RESTAPI.Model.OrdSide, options)
end
end
| 25.478261 | 91 | 0.634812 |
1c4085efa42f792cdfb1119d65f8fc706019352f | 2,796 | ex | Elixir | lib/distributed.ex | ertgl/distributed | cb2ccb61069f9b86999e8fcfc1834f1a5537bffb | [
"MIT"
] | 24 | 2017-06-24T01:41:18.000Z | 2021-11-05T20:26:27.000Z | lib/distributed.ex | ertgl/distributed | cb2ccb61069f9b86999e8fcfc1834f1a5537bffb | [
"MIT"
] | null | null | null | lib/distributed.ex | ertgl/distributed | cb2ccb61069f9b86999e8fcfc1834f1a5537bffb | [
"MIT"
] | 2 | 2017-06-24T01:58:25.000Z | 2017-06-28T10:14:20.000Z | defmodule Distributed do
@moduledoc """
Make your systems distributed, replicated, scaled well, easily.
[](https://hex.pm/packages/distributed) [](https://hexdocs.pm/distributed) [](https://hex.pm/packages/distributed) [](https://github.com/ertgl/distributed) [](LICENSE.txt)
---
### Tutorial
This is an example of a replicated `GenServer`.
defmodule Storage.KV do
use GenServer
def start_link() do
GenServer.start_link(__MODULE__, [initial_state: %{}], name: __MODULE__.process_id())
end
def init(opts \\\\ []) do
{:ok, Keyword.get(opts, :initial_state, %{})}
end
def process_id() do
Storage.KV
end
def handle_cast({:set, key, value}, state) do
{:noreply, Map.put(state, key, value)}
end
def handle_call({:get, key, default}, _from, state) do
{:reply, Map.get(state, key, default), state}
end
def handle_call({:has, key}, _from, state) do
{:reply, Map.has_key?(state, key), state}
end
def handle_call({:pop, key, default}, _from, state) do
{value, new_state} = Map.pop(state, key, default)
{:reply, value, new_state}
end
def get(key, default \\\\ nil) do
Distributed.Scaler.GenServer.call(__MODULE__.process_id(), {:get, key, default})
end
def set(key, value) do
{_node_name, result} = Distributed.Replicator.GenServer.cast(__MODULE__.process_id(), {:set, key, value})
|> List.first()
result
end
def has?(key) do
Distributed.Scaler.GenServer.call(__MODULE__.process_id(), {:has, key})
end
def pop(key, default \\\\ nil) do
{_node_name, result} = Distributed.Replicator.GenServer.call(__MODULE__.process_id(), {:pop, key, default})
|> List.first()
result
end
end
You can see the example as a small project on [GitHub](https://github.com/ertgl/storage).
### Installation:
If [you have Hex](https://hex.pm), the package can be installed
by adding `:distributed` to your list of dependencies in `mix.exs`:
def application do
[
extra_applications: [
:distributed,
],
]
end
def deps do
[
{:distributed, "~> 0.1.3"},
]
end
"""
defmacro __using__(opts \\ []) do
scaler_opts = Keyword.get(opts, :scaler, [])
replicator_opts = Keyword.get(opts, :replicator, [])
quote do
use Distributed.Scaler, unquote(scaler_opts)
use Distributed.Replicator, unquote(replicator_opts)
end
end
end
| 27.683168 | 567 | 0.671674 |
1c40a6e986a266b7a0e6de3a8b62271dcefce843 | 265 | ex | Elixir | lib/authoritex/fast/form.ex | nulib/authoritex | a9b277e20873a886e2578f14f58acb277a501f01 | [
"MIT"
] | 2 | 2020-06-11T10:37:21.000Z | 2020-10-13T18:12:42.000Z | lib/authoritex/fast/form.ex | nulib/authoritex | a9b277e20873a886e2578f14f58acb277a501f01 | [
"MIT"
] | 21 | 2020-05-12T21:06:32.000Z | 2022-01-14T14:43:45.000Z | lib/authoritex/fast/form.ex | nulib/authoritex | a9b277e20873a886e2578f14f58acb277a501f01 | [
"MIT"
] | null | null | null | defmodule Authoritex.FAST.Form do
@desc "Faceted Application of Subject Terminology -- Form/Genre"
@moduledoc "Authoritex implementation for #{@desc}"
use Authoritex.FAST.Base,
subauthority: "suggest55",
code: "fast-form",
description: @desc
end
| 26.5 | 66 | 0.724528 |
1c40b17c6a2dcc4ac2afcfd17b9e911577b09330 | 5,184 | ex | Elixir | lib/pgex/types/type.ex | karlseguin/pgex | 2921f350c9f8c8f72cc75c7ede85728ea5dba1bf | [
"MIT"
] | null | null | null | lib/pgex/types/type.ex | karlseguin/pgex | 2921f350c9f8c8f72cc75c7ede85728ea5dba1bf | [
"MIT"
] | null | null | null | lib/pgex/types/type.ex | karlseguin/pgex | 2921f350c9f8c8f72cc75c7ede85728ea5dba1bf | [
"MIT"
] | null | null | null | defmodule PgEx.Type do
# just a friendly name used in error messages and such
@callback name() :: binary
@callback format() :: binary
@callback encode(non_neg_integer, any) :: {:ok, iodata} | :error
@callback decode(non_neg_integer, binary) :: {:ok, any} | :error
def get_name(module) do
module
|> Module.split()
|> List.last()
|> String.downcase()
end
end
defmodule PgEx.Types.Txt do
defmacro __using__(_) do
module = __CALLER__.module
name = PgEx.Type.get_name(module)
quote location: :keep do
@behaviour PgEx.Type
def name, do: unquote(name)
def format(), do: <<0, 0>>
end
end
end
defmodule PgEx.Types.Bin do
defmacro __using__(_) do
module = __CALLER__.module
name = PgEx.Type.get_name(module)
arr_name = name <> "[]"
arr = Module.concat(module, Array)
quote location: :keep do
@behaviour PgEx.Type
def name(), do: unquote(name)
def format(), do: <<0, 1>>
# The binary format for arrays is:
# number_of_dimesions::big-32, are_there_null::big-32, oid_of_values::big-32
#
# Followed by the following 64 bits for each dimension:
# number_of_values::big-32, lower_bound::big-32
#
# Followed by the length-prefixed values:
# length1::big-32, value1::(length1), ... lengthN::big-32, valueN::(lengthN)
#
# The key to how we decode is to build an array of the number_of_values.
# If we had {{{1, 2}, {3, 4}}, {{5, 6}, {7, 8}}, {{9, 10}, {11, 12}}}
# our sizes would be: [3, 2, 2]
#
# We build the structure recusively. Somethig like:
# 3 -> {1, 2}, {3, 4}
# 2 -> {1, 2}
# 2 -> 1
# 1 -> 2
# 1 -> {3, 4}
# 2 -> 3
# 1 -> 4
#
# 2 -> {5, 6}, {7, 8}
# 2 -> {5, 6}
# 2 -> 5
# 1 -> 6
# 1 -> {7, 8}
# 2 -> 7
# 1 -> 8
#
# 1 -> {{9, 10}, {11, 12}}}
# ....
#
# I'm not sure if that helps.
defmodule unquote(arr) do
@moduledoc false
def name(), do: unquote(arr_name) <> "[]"
def format(), do: <<0, 1>>
# an empty array
def decode(12, <<0, 0, 0, 0, _::binary>>), do: []
def decode(_length, <<dims::big-32, _null_and_type::64, data::binary>>) do
header_size = dims * 8
<<info::bytes-size(header_size), data::binary>> = data
counts = extract_counts(info, [])
{"", arr} = decode_dimensions(counts, data)
arr
end
defp extract_counts(<<>>, counts), do: Enum.reverse(counts)
defp extract_counts(<<count::big-32, _lower::big-32, info::binary>>, counts) do
extract_counts(info, [count | counts])
end
defp decode_dimensions([count], data) do # the last dimension
decode_array(count, data, [])
end
defp decode_dimensions([count | counts], data) do
decode_dimension(count, counts, data, [])
end
defp decode_dimension(0, _counts, data, acc), do: {data, Enum.reverse(acc)}
defp decode_dimension(count, counts, data, acc) do
{data, dim} = decode_dimensions(counts, data)
decode_dimension(count - 1, counts, data, [dim | acc])
end
defp decode_array(0, data, arr), do: {data, Enum.reverse(arr)}
defp decode_array(count, <<255, 255, 255, 255, data::binary>>, arr) do
decode_array(count - 1, data, [nil | arr])
end
defp decode_array(count, <<length::big-32, value::bytes-size(length), data::binary>>, arr) do
# TODO: handle error (easiest to raise and catch in the outer decode function???)
arr = [unquote(module).decode(length, value) | arr]
decode_array(count - 1, data, arr)
end
def encode(type, []), do: <<0, 0, 0, 0, 0, 0, 0, 0, type::big-32>>
def encode(type, arr) do
{size, sizes, values} = encode_dimensions(arr, true, 0, [], [])
[<<(length(sizes)+1)::big-32, 0, 0, 0, 0, type::big-32, size::32, 0, 0, 0, 1>>, sizes, Enum.reverse(values)]
end
defp encode_dimensions([], _first, size, sizes, values), do: {size, sizes, values}
defp encode_dimensions([peek | _other] = arr, first, _size, sizes, values) when not is_list(peek) do
{size, values} = Enum.reduce(arr, {0, values}, fn
nil, {size, values} -> {size + 1, [<<255, 255, 255, 255>> | values]}
value, {size, values} ->
encoded = unquote(module).encode(0, value) # TODO: ERROR
{size + 1, [[<<:erlang.iolist_size(encoded)::big-32, encoded::binary>>] | values]}
end)
{size, sizes, values}
end
defp encode_dimensions([arr | other], first, size, sizes, values) do
{s, sizes, values} = encode_dimensions(arr, first, 0, sizes, values)
sizes = case first do
true -> [<<s::big-32, 0, 0, 0, 1>> | sizes]
false -> sizes
end
encode_dimensions(other, false, size + 1, sizes, values)
end
end
end
end
end
| 33.882353 | 118 | 0.540895 |
1c410a1eca5a95f32655db140337064f4810981b | 4,446 | exs | Elixir | test/ex_oneroster/web/controllers/demographic_controller_test.exs | jrissler/ex_oneroster | cec492117bffc14aec91e2448643682ceeb449e9 | [
"Apache-2.0"
] | 3 | 2018-09-06T11:15:07.000Z | 2021-12-27T15:36:51.000Z | test/ex_oneroster/web/controllers/demographic_controller_test.exs | jrissler/ex_oneroster | cec492117bffc14aec91e2448643682ceeb449e9 | [
"Apache-2.0"
] | null | null | null | test/ex_oneroster/web/controllers/demographic_controller_test.exs | jrissler/ex_oneroster | cec492117bffc14aec91e2448643682ceeb449e9 | [
"Apache-2.0"
] | null | null | null | defmodule ExOneroster.Web.DemographicControllerTest do
use ExOneroster.Web.ConnCase
setup %{conn: conn} do
{:ok, conn: put_req_header(conn, "accept", "application/json")}
end
test "lists all entries on index", %{conn: conn} do
conn = get conn, demographic_path(conn, :index)
assert json_response(conn, 200)["data"] == []
end
test "creates demographic and renders demographic when data is valid", %{conn: conn} do
demographic_params = build(:demographic)
conn = post conn, demographic_path(conn, :create), demographic: params_for(:demographic, dateLastModified: demographic_params.dateLastModified)
assert %{"id" => id} = json_response(conn, 201)["data"]
conn = get conn, demographic_path(conn, :show, id)
assert json_response(conn, 200)["data"] == %{
"id" => id,
"americanIndianOrAlaskaNative" => demographic_params.americanIndianOrAlaskaNative,
"asian" => demographic_params.asian,
"birthdate" => demographic_params.birthdate,
"blackOrAfricanAmerican" => demographic_params.blackOrAfricanAmerican,
"cityOfBirth" => demographic_params.cityOfBirth,
"countryOfBirthCode" => demographic_params.countryOfBirthCode,
"dateLastModified" => DateTime.to_iso8601(demographic_params.dateLastModified),
"demographicRaceTwoOrMoreRaces" => demographic_params.demographicRaceTwoOrMoreRaces,
"hispanicOrLatinoEthnicity" => demographic_params.hispanicOrLatinoEthnicity,
"metadata" => demographic_params.metadata,
"nativeHawaiianOrOtherPacificIslander" => demographic_params.nativeHawaiianOrOtherPacificIslander,
"publicSchoolResidenceStatus" => demographic_params.publicSchoolResidenceStatus,
"sex" => demographic_params.sex,
"sourcedId" => demographic_params.sourcedId,
"stateOfBirthAbbreviation" => demographic_params.stateOfBirthAbbreviation,
"status" => demographic_params.status,
"white" => demographic_params.white
}
end
test "does not create demographic and renders errors when data is invalid", %{conn: conn} do
conn = post conn, demographic_path(conn, :create), demographic: params_for(:demographic, dateLastModified: nil)
assert json_response(conn, 422)["errors"] != %{}
end
test "updates chosen demographic and renders demographic when data is valid", %{conn: conn} do
demographic = insert(:demographic)
conn = put conn, demographic_path(conn, :update, demographic), demographic: params_for(:demographic, sourcedId: "Bond... James Bond", dateLastModified: demographic.dateLastModified)
assert %{"id" => id} = json_response(conn, 200)["data"]
conn = get conn, demographic_path(conn, :show, id)
assert json_response(conn, 200)["data"] == %{
"id" => id,
"americanIndianOrAlaskaNative" => demographic.americanIndianOrAlaskaNative,
"asian" => demographic.asian,
"birthdate" => Date.to_iso8601(demographic.birthdate),
"blackOrAfricanAmerican" => demographic.blackOrAfricanAmerican,
"cityOfBirth" => demographic.cityOfBirth,
"countryOfBirthCode" => demographic.countryOfBirthCode,
"dateLastModified" => DateTime.to_iso8601(demographic.dateLastModified),
"demographicRaceTwoOrMoreRaces" => demographic.demographicRaceTwoOrMoreRaces,
"hispanicOrLatinoEthnicity" => demographic.hispanicOrLatinoEthnicity,
"metadata" => demographic.metadata,
"nativeHawaiianOrOtherPacificIslander" => demographic.nativeHawaiianOrOtherPacificIslander,
"publicSchoolResidenceStatus" => demographic.publicSchoolResidenceStatus,
"sex" => demographic.sex,
"sourcedId" => "Bond... James Bond",
"stateOfBirthAbbreviation" => demographic.stateOfBirthAbbreviation,
"status" => demographic.status,
"white" => demographic.white
}
end
test "does not update chosen demographic and renders errors when data is invalid", %{conn: conn} do
demographic = insert(:demographic)
conn = put conn, demographic_path(conn, :update, demographic), demographic: params_for(:demographic, dateLastModified: "not a date")
assert json_response(conn, 422)["errors"] != %{}
end
test "deletes chosen demographic", %{conn: conn} do
demographic = insert(:demographic)
conn = delete conn, demographic_path(conn, :delete, demographic)
assert response(conn, 204)
assert_error_sent 404, fn ->
get conn, demographic_path(conn, :show, demographic)
end
end
end
| 48.857143 | 185 | 0.728295 |
1c416b9ca4b96e1ea998b9fd1fa161782206b4fc | 188 | exs | Elixir | priv/repo/migrations/20181025002636_update_fees_adv_default.exs | mindsigns/soroban | c56962e1164a51cb5e383bbbfda880f098f181f1 | [
"MIT"
] | 1 | 2020-02-09T03:03:04.000Z | 2020-02-09T03:03:04.000Z | priv/repo/migrations/20181025002636_update_fees_adv_default.exs | mindsigns/soroban | c56962e1164a51cb5e383bbbfda880f098f181f1 | [
"MIT"
] | null | null | null | priv/repo/migrations/20181025002636_update_fees_adv_default.exs | mindsigns/soroban | c56962e1164a51cb5e383bbbfda880f098f181f1 | [
"MIT"
] | null | null | null | defmodule Soroban.Repo.Migrations.UpdateFeesAdvDefault do
use Ecto.Migration
def change do
alter table(:jobs) do
modify :fees_advanced, :integer, default: 0
end
end
end
| 18.8 | 57 | 0.734043 |
1c41762e7d1fd3fb7632245662fc3a5cb0851def | 34,212 | ex | Elixir | lib/elixir/lib/module.ex | rcoppolo/elixir | c4092e071f8b42f5a9ad213dd8b3632918097213 | [
"Apache-2.0"
] | null | null | null | lib/elixir/lib/module.ex | rcoppolo/elixir | c4092e071f8b42f5a9ad213dd8b3632918097213 | [
"Apache-2.0"
] | null | null | null | lib/elixir/lib/module.ex | rcoppolo/elixir | c4092e071f8b42f5a9ad213dd8b3632918097213 | [
"Apache-2.0"
] | null | null | null | defmodule Module do
@moduledoc ~S'''
Provides functions to deal with modules during compilation time.
It allows a developer to dynamically add, delete and register
attributes, attach documentation and so forth.
After a module is compiled, using many of the functions in
this module will raise errors, since it is out of their scope
to inspect runtime data. Most of the runtime data can be inspected
via the `__info__/1` function attached to each compiled module.
## Module attributes
Each module can be decorated with one or more attributes. The following ones
are currently defined by Elixir:
### @after_compile
A hook that will be invoked right after the current module is compiled.
Accepts a module or a tuple `{<module>, <function atom>}`.
See the "Compile callbacks" section below.
### @before_compile
A hook that will be invoked before the module is compiled.
Accepts a module or a tuple `{<module>, <function/macro atom>}`.
See the "Compile callbacks" section below.
### @behaviour (notice the British spelling)
Behaviours can be referenced by modules to ensure they implement
required specific function signatures defined by `@callback`.
For example, you can specify the `URI.Parser` behaviour as follows:
defmodule URI.Parser do
@doc "Parses the given URL"
@callback parse(uri_info :: URI.t) :: URI.t
@doc "Defines a default port"
@callback default_port() :: integer
end
And then a module may use it as:
defmodule URI.HTTP do
@behaviour URI.Parser
def default_port(), do: 80
def parse(info), do: info
end
If the behaviour changes or `URI.HTTP` does not implement
one of the callbacks, a warning will be raised.
### @compile
Defines options for module compilation. This is used to configure
both Elixir and Erlang compilers, as any other compilation pass
added by external tools. For example:
defmodule M do
@compile {:inline, my_fun: 1}
def my_fun(arg) do
to_string(arg)
end
end
Multiple uses of `@compile` will accumulate instead of overriding
previous ones. See the "Compile options" section below.
### @doc
Provides documentation for the function or macro that follows the
attribute.
Accepts a string (often a heredoc) or `false` where `@doc false` will
make the function/macro invisible to the documentation extraction tools
like ExDoc. For example:
defmodule M do
@doc "Hello world"
def hello do
"world"
end
@doc """
Sums `a` to `b`.
"""
def sum(a, b) do
a + b
end
end
### @dialyzer
Defines warnings to request or suppress when using a version of
`:dialyzer` that supports module attributes.
Accepts an atom, a tuple, or a list of atoms and tuples. For example:
defmodule M do
@dialyzer {:nowarn_function, my_fun: 1}
def my_fun(arg) do
M.not_a_function(arg)
end
end
For the list of supported warnings, see
[`:dialyzer` module](http://www.erlang.org/doc/man/dialyzer.html).
Multiple uses of `@dialyzer` will accumulate instead of overriding
previous ones.
### @external_resource
Specifies an external resource to the current module.
Many times a module embeds information from an external file. This
attribute allows the module to annotate which external resources
have been used.
Tools like Mix may use this information to ensure the module is
recompiled in case any of the external resources change.
### @file
Changes the filename used in stacktraces for the function or macro that
follows the attribute, such as:
defmodule M do
@doc "Hello world"
@file "hello.ex"
def hello do
"world"
end
end
### @moduledoc
Provides documentation for the current module, such as:
defmodule M do
@moduledoc """
A very useful module
"""
end
Accepts a string (which is often a heredoc) or `false` where
`@moduledoc false` will make the module invisible to the
documentation extraction tools like ExDoc.
### @on_definition
A hook that will be invoked when each function or macro in the current
module is defined. Useful when annotating functions.
Accepts a module or a tuple `{<module>, <function atom>}`. See the
"Compile callbacks" section below.
### @on_load
A hook that will be invoked whenever the module is loaded.
Accepts a function atom of a function in the current module. The function
must have arity 0 (no arguments) and has to return `:ok`, otherwise the
loading of the module will be aborted. For example:
defmodule M do
@on_load :load_check
def load_check do
if some_condition() do
:ok
else
:abort
end
end
def some_condition do
false
end
end
### @vsn
Specify the module version. Accepts any valid Elixir value, for example:
defmodule M do
@vsn "1.0"
end
### Typespec attributes
The following attributes are part of typespecs and are also reserved by
Elixir:
* `@type` - defines a type to be used in `@spec`
* `@typep` - defines a private type to be used in `@spec`
* `@opaque` - defines an opaque type to be used in `@spec`
* `@spec` - provides a specification for a function
* `@callback` - provides a specification for a behaviour callback
* `@macrocallback` - provides a specification for a macro behaviour callback
* `@optional_callbacks` - specifies which behaviour callbacks and macro
behaviour callbacks are optional
### Custom attributes
In addition to the built-in attributes outlined above, custom attributes may
also be added. A custom attribute is any valid identifier prefixed with an
`@` and followed by a valid Elixir value:
defmodule M do
@custom_attr [some: "stuff"]
end
For more advanced options available when defining custom attributes, see
`register_attribute/3`.
## Compile callbacks
There are three callbacks that are invoked when functions are defined,
as well as before and immediately after the module bytecode is generated.
### @after_compile
A hook that will be invoked right after the current module is compiled.
Accepts a module or a tuple `{<module>, <function atom>}`. The function
must take two arguments: the module environment and its bytecode.
When just a module is provided, the function is assumed to be
`__after_compile__/2`.
#### Example
defmodule M do
@after_compile __MODULE__
def __after_compile__(env, _bytecode) do
IO.inspect env
end
end
### @before_compile
A hook that will be invoked before the module is compiled.
Accepts a module or a tuple `{<module>, <function/macro atom>}`. The
function/macro must take one argument: the module environment. If it's a
macro, its returned value will be injected at the end of the module definition
before the compilation starts.
When just a module is provided, the function/macro is assumed to be
`__before_compile__/1`.
Note: unlike `@after_compile`, the callback function/macro must
be placed in a separate module (because when the callback is invoked,
the current module does not yet exist).
#### Example
defmodule A do
defmacro __before_compile__(_env) do
quote do
def hello, do: "world"
end
end
end
defmodule B do
@before_compile A
end
### @on_definition
A hook that will be invoked when each function or macro in the current
module is defined. Useful when annotating functions.
Accepts a module or a tuple `{<module>, <function atom>}`. The function
must take 6 arguments:
- the module environment
- kind: `:def`, `:defp`, `:defmacro`, or `:defmacrop`
- function/macro name
- list of quoted arguments
- list of quoted guards
- quoted function body
Note the hook receives the quoted arguments and it is invoked before
the function is stored in the module. So `Module.defines?/2` will return
`false` for the first clause of every function.
If the function/macro being defined has multiple clauses, the hook will
be called for each clause.
Unlike other hooks, `@on_definition` will only invoke functions
and never macros. This is because the hook is invoked inside the context
of the function (and nested function definitions are not allowed in
Elixir).
When just a module is provided, the function is assumed to be
`__on_definition__/6`.
#### Example
defmodule H do
def on_def(_env, kind, name, args, guards, body) do
IO.puts "Defining #{kind} named #{name} with args:"
IO.inspect args
IO.puts "and guards"
IO.inspect guards
IO.puts "and body"
IO.puts Macro.to_string(body)
end
end
defmodule M do
@on_definition {H, :on_def}
def hello(arg) when is_binary(arg) or is_list(arg) do
"Hello" <> to_string(arg)
end
def hello(_) do
:ok
end
end
## Compile options
The `@compile` attribute accepts diverse options that is used by both
Elixir and Erlang compilers. Some of the common use cases are documented
below:
* `@compile :debug_info` - includes `:debug_info` regardless of the
setting in `Code.compiler_options/1`
* `@compile {:debug_info, false}` - disables `:debug_info` regardless
of the setting in `Code.compiler_options/1`
* `@compile {:inline, some_fun: 2, other_fun: 3}` - inlines the given
name/arity pairs
* `@compile {:autoload, false}` - disables automatic loading of
modules after compilation. Instead, the module will be loaded after
it is dispatched to
You can see a handful more options used by the Erlang compiler in
the documentation for the `:compile` module.
'''
@doc """
Provides runtime information about functions and macros defined by the
module, enables docstring extraction, etc.
Each module gets an `__info__/1` function when it's compiled. The function
takes one of the following atoms:
* `:functions` - keyword list of public functions along with their arities
* `:macros` - keyword list of public macros along with their arities
* `:module` - module name (`Module == Module.__info__(:module)`)
In addition to the above, you may also pass to `__info__/1` any atom supported
by `:erlang.module_info/0` which also gets defined for each compiled module.
For a list of supported attributes and more information, see [Modules – Erlang Reference Manual](http://www.erlang.org/doc/reference_manual/modules.html#id77056).
"""
def __info__(kind)
@doc """
Checks if a module is open, i.e. it is currently being defined
and its attributes and functions can be modified.
"""
def open?(module) do
:elixir_module.is_open(module)
end
@doc """
Evaluates the quoted contents in the given module's context.
A list of environment options can also be given as argument.
See `Code.eval_string/3` for more information.
Raises an error if the module was already compiled.
## Examples
defmodule Foo do
contents = quote do: (def sum(a, b), do: a + b)
Module.eval_quoted __MODULE__, contents
end
Foo.sum(1, 2) #=> 3
For convenience, you can pass `__ENV__/0` as an argument and
all options will be automatically extracted from the environment:
defmodule Foo do
contents = quote do: (def sum(a, b), do: a + b)
Module.eval_quoted __MODULE__, contents, [], __ENV__
end
Foo.sum(1, 2) #=> 3
"""
def eval_quoted(module, quoted, binding \\ [], opts \\ [])
def eval_quoted(%Macro.Env{} = env, quoted, binding, opts) do
eval_quoted(env.module, quoted, binding, Keyword.merge(Map.to_list(env), opts))
end
def eval_quoted(module, quoted, binding, %Macro.Env{} = env) do
eval_quoted(module, quoted, binding, Map.to_list(env))
end
def eval_quoted(module, quoted, binding, opts) do
assert_not_compiled!(:eval_quoted, module)
:elixir_def.reset_last(module)
{value, binding, _env, _scope} =
:elixir.eval_quoted quoted, binding, Keyword.put(opts, :module, module)
{value, binding}
end
@doc """
Creates a module with the given name and defined by
the given quoted expressions.
The line where the module is defined and its file **must**
be passed as options.
## Examples
contents =
quote do
def world, do: true
end
Module.create(Hello, contents, Macro.Env.location(__ENV__))
Hello.world #=> true
## Differences from `defmodule`
`Module.create/3` works similarly to `defmodule` and
return the same results. While one could also use
`defmodule` to define modules dynamically, this
function is preferred when the module body is given
by a quoted expression.
Another important distinction is that `Module.create/3`
allows you to control the environment variables used
when defining the module, while `defmodule` automatically
shares the same environment.
"""
def create(module, quoted, opts)
def create(module, quoted, %Macro.Env{} = env) do
create(module, quoted, Map.to_list(env))
end
def create(module, quoted, opts) when is_atom(module) and is_list(opts) do
unless Keyword.has_key?(opts, :file) do
raise ArgumentError, "expected :file to be given as option"
end
next = :erlang.unique_integer()
line = Keyword.get(opts, :line, 0)
quoted = :elixir_quote.linify_with_context_counter(line, {module, next}, quoted)
:elixir_module.compile(module, quoted, [], :elixir.env_for_eval(opts))
end
@doc """
Concatenates a list of aliases and returns a new alias.
## Examples
iex> Module.concat([Foo, Bar])
Foo.Bar
iex> Module.concat([Foo, "Bar"])
Foo.Bar
"""
@spec concat([binary | atom]) :: atom
def concat(list) when is_list(list) do
:elixir_aliases.concat(list)
end
@doc """
Concatenates two aliases and returns a new alias.
## Examples
iex> Module.concat(Foo, Bar)
Foo.Bar
iex> Module.concat(Foo, "Bar")
Foo.Bar
"""
@spec concat(binary | atom, binary | atom) :: atom
def concat(left, right) do
:elixir_aliases.concat([left, right])
end
@doc """
Concatenates a list of aliases and returns a new alias only if the alias
was already referenced.
If the alias was not referenced yet, fails with `ArgumentError`.
It handles charlists, binaries and atoms.
## Examples
iex> Module.safe_concat([Module, Unknown])
** (ArgumentError) argument error
iex> Module.safe_concat([List, Chars])
List.Chars
"""
@spec safe_concat([binary | atom]) :: atom | no_return
def safe_concat(list) when is_list(list) do
:elixir_aliases.safe_concat(list)
end
@doc """
Concatenates two aliases and returns a new alias only if the alias was
already referenced.
If the alias was not referenced yet, fails with `ArgumentError`.
It handles charlists, binaries and atoms.
## Examples
iex> Module.safe_concat(Module, Unknown)
** (ArgumentError) argument error
iex> Module.safe_concat(List, Chars)
List.Chars
"""
@spec safe_concat(binary | atom, binary | atom) :: atom | no_return
def safe_concat(left, right) do
:elixir_aliases.safe_concat([left, right])
end
@doc """
Attaches documentation to a given function or type.
It expects the module the function/type belongs to, the line (a non
negative integer), the kind (`def` or `defmacro`), a tuple representing
the function and its arity, the function signature (the signature
should be omitted for types) and the documentation, which should
be either a binary or a boolean.
## Examples
defmodule MyModule do
Module.add_doc(__MODULE__, __ENV__.line + 1, :def, {:version, 0}, [], "Manually added docs")
def version, do: 1
end
"""
def add_doc(module, line, kind, tuple, signature \\ [], doc)
def add_doc(_module, _line, kind, _tuple, _signature, doc) when kind in [:defp, :defmacrop, :typep] do
if doc, do: {:error, :private_doc}, else: :ok
end
def add_doc(module, line, kind, tuple, signature, doc) when
kind in [:def, :defmacro, :type, :opaque] and (is_binary(doc) or is_boolean(doc) or doc == nil) do
assert_not_compiled!(:add_doc, module)
table = data_table_for(module)
signature = simplify_signature(signature)
case :ets.lookup(table, {:doc, tuple}) do
[] ->
:ets.insert(table, {{:doc, tuple}, line, kind, signature, doc})
:ok
[{doc_tuple, line, _old_kind, old_sign, old_doc}] ->
:ets.insert(table, {
doc_tuple,
line,
kind,
merge_signatures(old_sign, signature, 1),
if(is_nil(doc), do: old_doc, else: doc)
})
:ok
end
end
# Simplify signatures to be stored in docs
defp simplify_signature(signature) do
{signature, acc} = :lists.mapfoldl(&simplify_signature/2, [], signature)
{signature, _} = :lists.mapfoldl(&expand_signature/2, {acc, acc}, signature)
signature
end
defp simplify_signature({:\\, _, [left, right ]}, acc) do
{left, acc} = simplify_signature(left, acc)
{{:\\, [], [left, right]}, acc}
end
defp simplify_signature({:=, _, [_, right]}, acc) do
simplify_signature(right, acc)
end
defp simplify_signature({var, _, atom}, acc) when is_atom(atom) do
case Atom.to_string(var) do
"_" <> rest -> {{String.to_atom(rest), [], Elixir}, acc}
_ -> {{var, [], nil}, acc}
end
end
defp simplify_signature({:%, _, [left, _]}, acc) when is_atom(left) do
struct_name = String.to_atom(camelcase_to_underscore(List.last(split(left))))
autogenerated(acc, struct_name)
end
defp simplify_signature({:%{}, _, _}, acc) do
autogenerated(acc, :map)
end
defp simplify_signature(other, acc) when is_integer(other), do: autogenerated(acc, :int)
defp simplify_signature(other, acc) when is_boolean(other), do: autogenerated(acc, :bool)
defp simplify_signature(other, acc) when is_atom(other), do: autogenerated(acc, :atom)
defp simplify_signature(other, acc) when is_list(other), do: autogenerated(acc, :list)
defp simplify_signature(other, acc) when is_float(other), do: autogenerated(acc, :float)
defp simplify_signature(other, acc) when is_binary(other), do: autogenerated(acc, :binary)
defp simplify_signature(_, acc), do: autogenerated(acc, :arg)
defp autogenerated(acc, key) do
{key, [key | acc]}
end
defp expand_signature(key, {all_keys, acc}) when is_atom(key) do
case previous_values(key, all_keys, acc) do
{i, acc} -> {{:"#{key}#{i}", [], Elixir}, {all_keys, acc}}
:none -> {{key, [], Elixir}, {all_keys, acc}}
end
end
defp expand_signature(term, {_, _} = acc) do
{term, acc}
end
defp previous_values(key, all_keys, acc) do
total_occurrences = occurrences(key, all_keys)
if total_occurrences == 1 do
:none
else
index = total_occurrences - occurrences(key, acc) + 1
{index, :lists.delete(key, acc)}
end
end
defp occurrences(key, list) do
length(:lists.filter(fn(el) -> el == key end, list))
end
defp camelcase_to_underscore(<<c::utf8, rest::binary>>) when c >= ?A and c <= ?Z,
do: do_camelcase_to_underscore(rest, <<c + 32::utf8>>)
defp do_camelcase_to_underscore(<<c::utf8, rest::binary>>, acc) when c >= ?A and c <= ?Z,
do: do_camelcase_to_underscore(rest, <<acc::binary, ?_, c + 32::utf8>>)
defp do_camelcase_to_underscore(<<c::utf8, rest::binary>>, acc),
do: do_camelcase_to_underscore(rest, <<acc::binary, c>>)
defp do_camelcase_to_underscore(<<>>, acc),
do: acc
# Merge
defp merge_signatures([h1 | t1], [h2 | t2], i) do
[merge_signature(h1, h2, i) | merge_signatures(t1, t2, i + 1)]
end
defp merge_signatures([], [], _) do
[]
end
defp merge_signature({:\\, line, [left, right]}, newer, i) do
{:\\, line, [merge_signature(left, newer, i), right]}
end
defp merge_signature(older, {:\\, _, [left, _]}, i) do
merge_signature(older, left, i)
end
# The older signature, when given, always have higher precedence
defp merge_signature({_, _, nil} = older, _newer, _), do: older
defp merge_signature(_older, {_, _, nil} = newer, _), do: newer
# Both are a guess, so check if they are the same guess
defp merge_signature({var, _, _} = older, {var, _, _}, _), do: older
# Otherwise, returns a generic guess
defp merge_signature({_, line, _}, _newer, i), do: {:"arg#{i}", line, Elixir}
@doc """
Checks if the module defines the given function or macro.
Use `defines?/3` to assert for a specific type.
## Examples
defmodule Example do
Module.defines? __MODULE__, {:version, 0} #=> false
def version, do: 1
Module.defines? __MODULE__, {:version, 0} #=> true
end
"""
def defines?(module, tuple) when is_tuple(tuple) do
assert_not_compiled!(:defines?, module)
table = defs_table_for(module)
:ets.lookup(table, {:def, tuple}) != []
end
@doc """
Checks if the module defines a function or macro of the
given `kind`.
`kind` can be any of `:def`, `:defp`, `:defmacro` or `:defmacrop`.
## Examples
defmodule Example do
Module.defines? __MODULE__, {:version, 0}, :defp #=> false
def version, do: 1
Module.defines? __MODULE__, {:version, 0}, :defp #=> false
end
"""
def defines?(module, tuple, kind) do
assert_not_compiled!(:defines?, module)
table = defs_table_for(module)
case :ets.lookup(table, {:def, tuple}) do
[{_, ^kind, _, _, _, _, _}] -> true
_ -> false
end
end
@doc """
Returns all functions defined in `module`.
## Examples
defmodule Example do
def version, do: 1
Module.definitions_in __MODULE__ #=> [{:version, 0}]
end
"""
def definitions_in(module) do
assert_not_compiled!(:definitions_in, module)
table = defs_table_for(module)
:lists.concat :ets.match(table, {{:def, :'$1'}, :_, :_, :_, :_, :_, :_})
end
@doc """
Returns all functions defined in `module`, according
to its kind.
## Examples
defmodule Example do
def version, do: 1
Module.definitions_in __MODULE__, :def #=> [{:version, 0}]
Module.definitions_in __MODULE__, :defp #=> []
end
"""
def definitions_in(module, kind) do
assert_not_compiled!(:definitions_in, module)
table = defs_table_for(module)
:lists.concat :ets.match(table, {{:def, :'$1'}, kind, :_, :_, :_, :_, :_})
end
@doc """
Makes the given functions in `module` overridable.
An overridable function is lazily defined, allowing a
developer to customize it. See `Kernel.defoverridable/1` for
more information and documentation.
"""
def make_overridable(module, tuples) do
assert_not_compiled!(:make_overridable, module)
:lists.foreach(fn {name, arity} = tuple ->
case :elixir_def.take_definition(module, tuple) do
false ->
raise ArgumentError,
"cannot make function #{name}/#{arity} overridable because it was not defined"
{{_def, :defmacrop, _line, _file, _check, _location, _defaults}, _clauses} ->
raise ArgumentError,
"cannot make private macro #{name}/#{arity} overridable, overriding " <>
"private macros is not supported"
clause ->
{{_def, kind, _line, _file, _check, _location, _defaults}, _clauses} = clause
# TODO: Remove on v2.0
if kind == :defp do
IO.warn "making private functions (#{name}/#{arity} in this case) overridable is deprecated"
end
neighbours =
if :elixir_compiler.get_opt(:internal) do
[]
else
Module.LocalsTracker.yank(module, tuple)
end
old = :elixir_def_overridable.overridable(module)
count = case :maps.find(tuple, old) do
{:ok, {count, _, _, _}} -> count + 1
:error -> 1
end
new = :maps.put(tuple, {count, clause, neighbours, false}, old)
:elixir_def_overridable.overridable(module, new)
end
end, tuples)
end
@doc """
Returns `true` if `tuple` in `module` is marked as overridable.
"""
def overridable?(module, tuple) do
:maps.is_key(tuple, :elixir_def_overridable.overridable(module))
end
@doc """
Puts an Erlang attribute to the given module with the given
key and value.
The semantics of putting the attribute depends
if the attribute was registered or not via `register_attribute/3`.
## Examples
defmodule MyModule do
Module.put_attribute __MODULE__, :custom_threshold_for_lib, 10
end
"""
def put_attribute(module, key, value) do
put_attribute(module, key, value, nil)
end
@doc false
def put_attribute(module, key, value, stack) when is_atom(key) do
assert_not_compiled!(:put_attribute, module)
table = data_table_for(module)
value = preprocess_attribute(key, value)
acc = :ets.lookup_element(table, {:elixir, :acc_attributes}, 2)
warn_if_redefining_doc_attribute(stack, table, key)
new =
if :lists.member(key, acc) do
case :ets.lookup(table, key) do
[{^key, old}] -> [value | old]
[] -> [value]
end
else
value
end
:ets.insert(table, {key, new})
value
end
@doc """
Gets the given attribute from a module.
If the attribute was marked with `accumulate` with
`Module.register_attribute/3`, a list is always returned. `nil` is returned
if the attribute has not been marked with `accumulate` and has not been set
to any value.
The `@` macro compiles to a call to this function. For example,
the following code:
@foo
Expands close to:
Module.get_attribute(__MODULE__, :foo)
## Examples
defmodule Foo do
Module.put_attribute __MODULE__, :value, 1
Module.get_attribute __MODULE__, :value #=> 1
Module.register_attribute __MODULE__, :value, accumulate: true
Module.put_attribute __MODULE__, :value, 1
Module.get_attribute __MODULE__, :value #=> [1]
end
"""
@spec get_attribute(atom, atom) :: term
def get_attribute(module, key) do
get_attribute(module, key, nil)
end
@doc """
Deletes all attributes that match the given key.
## Examples
defmodule MyModule do
Module.put_attribute __MODULE__, :custom_threshold_for_lib, 10
Module.delete_attribute __MODULE__, :custom_threshold_for_lib
end
"""
@spec delete_attribute(atom, atom) :: :ok
def delete_attribute(module, key) when is_atom(key) do
assert_not_compiled!(:delete_attribute, module)
table = data_table_for(module)
:ets.delete(table, key)
:ok
end
@doc """
Registers an attribute. By registering an attribute, a developer
is able to customize how Elixir will store and accumulate the
attribute values.
## Options
When registering an attribute, two options can be given:
* `:accumulate` - several calls to the same attribute will
accumulate instead of override the previous one. New attributes
are always added to the top of the accumulated list.
* `:persist` - the attribute will be persisted in the Erlang
Abstract Format. Useful when interfacing with Erlang libraries.
By default, both options are `false`.
## Examples
defmodule MyModule do
Module.register_attribute __MODULE__,
:custom_threshold_for_lib,
accumulate: true, persist: false
@custom_threshold_for_lib 10
@custom_threshold_for_lib 20
@custom_threshold_for_lib #=> [20, 10]
end
"""
def register_attribute(module, new, opts) when is_atom(new) do
assert_not_compiled!(:register_attribute, module)
table = data_table_for(module)
if Keyword.get(opts, :persist) do
old = :ets.lookup_element(table, {:elixir, :persisted_attributes}, 2)
:ets.insert(table, {{:elixir, :persisted_attributes}, [new | old]})
end
if Keyword.get(opts, :accumulate) do
old = :ets.lookup_element(table, {:elixir, :acc_attributes}, 2)
:ets.insert(table, {{:elixir, :acc_attributes}, [new | old]})
end
end
@doc """
Splits the given module name into binary parts.
## Examples
iex> Module.split Very.Long.Module.Name.And.Even.Longer
["Very", "Long", "Module", "Name", "And", "Even", "Longer"]
"""
def split(module) when is_atom(module) do
split(String.Chars.to_string(module))
end
def split("Elixir." <> name) do
String.split(name, ".")
end
@doc false
# Used internally to compile documentation. This function
# is private and must be used only internally.
def compile_doc(env, kind, name, args, _guards, _body) do
module = env.module
table = data_table_for(module)
arity = length(args)
pair = {name, arity}
{line, doc} = get_doc_info(table, env)
# Arguments are not expanded for the docs, but we make an exception for
# module attributes and for structs (aliases to be precise).
args = Macro.prewalk args, fn
{:@, _, _} = attr ->
Macro.expand_once(attr, env)
{:%, meta, [aliases, fields]} ->
{:%, meta, [Macro.expand_once(aliases, env), fields]}
x ->
x
end
case add_doc(module, line, kind, pair, args, doc) do
:ok ->
:ok
{:error, :private_doc} ->
:elixir_errors.warn line, env.file,
"function #{name}/#{arity} is private, " <>
"@doc's are always discarded for private functions"
end
:ok
end
@doc false
# Used internally to compile types. This function
# is private and must be used only internally.
def store_typespec(module, key, value) when is_atom(key) do
assert_not_compiled!(:put_attribute, module)
table = data_table_for(module)
new =
case :ets.lookup(table, key) do
[{^key, old}] -> [value | old]
[] -> [value]
end
:ets.insert(table, {key, new})
end
@doc false
def get_attribute(module, key, stack) when is_atom(key) and (is_list(stack) or is_nil(stack)) do
assert_not_compiled!(:get_attribute, module)
table = data_table_for(module)
case :ets.lookup(table, key) do
[{^key, val}] ->
val
[] ->
acc = :ets.lookup_element(table, {:elixir, :acc_attributes}, 2)
cond do
:lists.member(key, acc) ->
[]
is_list(stack) ->
IO.warn "undefined module attribute @#{key}, " <>
"please remove access to @#{key} or explicitly set it before access", stack
nil
true ->
nil
end
end
end
## Helpers
defp preprocess_attribute(key, value) when key in [:moduledoc, :typedoc, :doc] do
case value do
{line, doc} when is_integer(line) and (is_binary(doc) or is_boolean(doc) or is_nil(doc)) ->
value
{line, doc} when is_integer(line) ->
# Here, either the user used "@moduledoc :not_a_binary" or
# "Module.put_attribute(..., {1, :not_a_binary})". By showing just the
# "doc" value in the error, it should be clear in both cases.
raise ArgumentError,
"expected the #{key} attribute to contain a binary, a boolean, or nil, got: #{inspect(doc)}"
_other ->
# Here, we're sure it's from Module.put_attribute/3 because it's not a
# tuple with an int as the first element (which is what we create with
# @).
raise ArgumentError,
"expected the #{key} attribute to be {line, doc} (where \"doc\" is " <>
"a binary, a boolean, or nil), got: #{inspect(value)}"
end
end
defp preprocess_attribute(:on_load, atom) when is_atom(atom) do
{atom, 0}
end
defp preprocess_attribute(:behaviour, atom) when is_atom(atom) do
# Attempt to compile behaviour but ignore failure (will warn later)
_ = Code.ensure_compiled(atom)
atom
end
defp preprocess_attribute(:file, file) when is_binary(file) do
file
end
defp preprocess_attribute(:before_compile, atom) when is_atom(atom),
do: {atom, :__before_compile__}
defp preprocess_attribute(:after_compile, atom) when is_atom(atom),
do: {atom, :__after_compile__}
defp preprocess_attribute(:on_definition, atom) when is_atom(atom),
do: {atom, :__on_definition__}
defp preprocess_attribute(key, _value) when key in [:type, :typep, :export_type, :opaque, :callback, :macrocallback, :optional_callbacks] do
raise ArgumentError, "attributes type, typep, export_type, opaque, callback, macrocallback, and optional_callbacks " <>
"must be set directly via the @ notation"
end
defp preprocess_attribute(_key, value) do
value
end
defp get_doc_info(table, env) do
case :ets.take(table, :doc) do
[doc: {_, _} = pair] -> pair
[] -> {env.line, nil}
end
end
defp data_table_for(module) do
:elixir_module.data_table(module)
end
defp defs_table_for(module) do
:elixir_module.defs_table(module)
end
defp assert_not_compiled!(fun, module) do
open?(module) ||
raise ArgumentError,
"could not call #{fun} on module #{inspect module} because it was already compiled"
end
defp warn_if_redefining_doc_attribute(stack, table, key)
when is_list(stack) and key in [:doc, :typedoc, :moduledoc] do
case :ets.lookup(table, key) do
[{_, {line, val}}] when val != false ->
IO.warn "redefining @#{key} attribute previously set at line #{line}", stack
_ ->
false
end
end
defp warn_if_redefining_doc_attribute(nil, _table, _key), do: false
end
| 29.67216 | 164 | 0.656086 |
1c417ec053b920acf5927e5f6d5cdb2fb947c68e | 13,186 | ex | Elixir | lib/gim/schema.ex | jan-sti/gim | 1b8be6c2163577f375825170cc9b01674e59b646 | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | lib/gim/schema.ex | jan-sti/gim | 1b8be6c2163577f375825170cc9b01674e59b646 | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | lib/gim/schema.ex | jan-sti/gim | 1b8be6c2163577f375825170cc9b01674e59b646 | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | defmodule Gim.Schema do
@moduledoc """
Defines a schema.
## Example
defmodule User do
use Gim.Schema
schema do
property :name, index: :unique
property :age, default: 0, index: true
has_edges :author_of, Post, reflect: :authored_by
end
end
## Reflection
Any schema module will generate the `__schema__` function that can be
used for runtime introspection of the schema:
* `__schema__(:primary_key)` - Returns the primary unique indexed property name or nil;
* `__schema__(:properties)` - Returns a list of all property names;
* `__schema__(:indexes)` - Returns a list of all indexed property names;
* `__schema__(:index, property)` - Returns how the given property is indexed;
* `__schema__(:indexes_unique)` - Returns a list of all unique indexed property names;
* `__schema__(:indexes_non_unique)` - Returns a list of all non-unique indexed property names;
* `__schema__(:associations)` - Returns a list of all association names;
* `__schema__(:association, assoc)` - Returns the association reflection of the given assoc;
* `__schema__(:type, assoc)` - Returns the type of the given association;
Furthermore, `__struct__` functions are
defined so structs functionalities are available.
"""
@doc false
defmacro __using__(_) do
quote do
import Gim.Schema, only: [schema: 1]
Module.register_attribute(__MODULE__, :gim_props, accumulate: true)
Module.register_attribute(__MODULE__, :gim_assocs, accumulate: true)
end
end
@doc """
Defines a schema struct with a source name and property definitions.
"""
defmacro schema(do: block) do
prelude =
quote do
@after_compile unquote(__MODULE__)
Module.register_attribute(__MODULE__, :struct_fields, accumulate: true)
Gim.Schema.__meta__(__MODULE__, :__id__, nil)
Gim.Schema.__meta__(__MODULE__, :__repo__, nil)
import Gim.Schema
unquote(block)
end
postlude =
quote unquote: false do
defstruct @struct_fields
props = @gim_props |> Enum.reverse()
assocs = @gim_assocs |> Enum.reverse()
def __schema__(:gim), do: true
def __schema__(:properties), do: unquote(Enum.map(props, &elem(&1, 0)))
def __schema__(:indexes) do
unquote(props |> Enum.filter(&elem(&1, 1)) |> Enum.map(&elem(&1, 0)))
end
for {prop, index} <- @gim_props do
def __schema__(:index, unquote(prop)), do: unquote(index)
end
def __schema__(:index, _), do: nil
def __schema__(:indexes_unique) do
unquote(
props
|> Enum.filter(&(elem(&1, 1) in [:unique, :primary]))
|> Enum.map(&elem(&1, 0))
)
end
def __schema__(:indexes_non_unique) do
unquote(props |> Enum.filter(&(elem(&1, 1) == true)) |> Enum.map(&elem(&1, 0)))
end
def __schema__(:associations), do: unquote(Enum.map(assocs, &elem(&1, 0)))
for {name, cardinality, type, refelct, _} <- @gim_assocs do
def __schema__(:association, unquote(name)) do
unquote(Macro.escape({name, cardinality, type, refelct}))
end
end
def __schema__(:association, _), do: nil
def __schema__(:gim_assocs), do: unquote(Macro.escape(@gim_assocs))
for {name, _cardinality, type, _reflect} <- @gim_assocs do
def __schema__(:type, unquote(name)), do: unquote(type)
end
def __schema__(:type, _), do: nil
end
quote do
unquote(prelude)
unquote(postlude)
end
end
## API
@doc """
Defines a property with given name on the node type schema.
The property is not typed, you can store any valid term.
## Example
schema do
property :uuid, index: :primary
property :fullname, index: :unique
property :birthday, index: true
property :hobbies
end
## Options
* `:default` - Sets the default value on the schema and the struct.
The default value is calculated at compilation time, so don't use
expressions like `DateTime.utc_now` or `Ecto.UUID.generate` as
they would then be the same for all nodes.
* `:index` - When `true`, the property is indexed for lookups.
When `:unique`, the property uniquely indexed, which is enforced.
When `:primary`, the property is uniquely indexed and used as
primary key.
"""
defmacro property(name, opts \\ []) do
quote do
Gim.Schema.__property__(__MODULE__, unquote(name), unquote(opts))
end
end
@doc """
Defines a named (i.e. implicitly labeled) edge to a given type on the schema.
You can store multiple edges with this name.
## Example
schema do
has_edges :categories, Category, reflect: :publications
end
## Options
* `:reflect` - Sets the edge name on the target type to automatically
add a reflected edge on the target node.
"""
defmacro has_edges(name, type, opts \\ []) do
reflect = Keyword.get(opts, :reflect)
type = Macro.expand(type, __CALLER__)
caller_stacktrace = Macro.Env.stacktrace(__CALLER__)
quote do
Gim.Schema.__has_edges__(
__MODULE__,
unquote(name),
unquote(type),
unquote(reflect),
unquote(opts),
unquote(Macro.escape(caller_stacktrace))
)
def unquote(name)(nodes) when is_list(nodes) do
Enum.map(nodes, fn %{:__repo__ => repo, unquote(name) => edges} ->
repo.fetch!(unquote(type), edges)
end)
|> Enum.uniq()
end
def unquote(name)(%{:__repo__ => repo, unquote(name) => edges} = _node) do
repo.fetch!(unquote(type), edges)
end
def unquote(:"add_#{name}")(struct, nodes) when is_list(nodes) do
ids = Enum.map(nodes, fn %{__id__: id} -> id end)
Map.update!(struct, unquote(name), fn x -> ids ++ x end)
end
def unquote(:"add_#{name}")(struct, %{__id__: id} = _node) do
Map.update!(struct, unquote(name), fn x -> [id | x] end)
end
def unquote(:"delete_#{name}")(struct, nodes) when is_list(nodes) do
Map.update!(struct, unquote(name), fn edges ->
Enum.reject(edges, &Enum.member?(nodes, &1))
end)
end
def unquote(:"delete_#{name}")(struct, %{__id__: id} = _node) do
Map.update!(struct, unquote(name), &List.delete(&1, id))
end
def unquote(:"set_#{name}")(struct, nodes) when is_list(nodes) do
ids = Enum.map(nodes, fn %{__id__: id} -> id end)
Map.put(struct, unquote(name), ids)
end
def unquote(:"set_#{name}")(struct, %{__id__: id} = _node) do
Map.put(struct, unquote(name), [id])
end
def unquote(:"clear_#{name}")(struct) do
Map.put(struct, unquote(name), [])
end
end
end
@doc """
Defines a named (i.e. implicitly labeled) edge to a given type on the schema.
You can have zero or one edge with this name.
## Example
schema do
has_edge :authored_by, Person, reflect: :author_of
end
## Options
* `:reflect` - Sets the edge name on the target type to automatically
add a reflected edge on the target node.
"""
defmacro has_edge(name, type, opts \\ []) do
reflect = Keyword.get(opts, :reflect)
type = Macro.expand(type, __CALLER__)
caller_stacktrace = Macro.Env.stacktrace(__CALLER__)
quote do
Gim.Schema.__has_edge__(
__MODULE__,
unquote(name),
unquote(type),
unquote(reflect),
unquote(opts),
unquote(Macro.escape(caller_stacktrace))
)
def unquote(name)(nodes) when is_list(nodes) do
Enum.map(nodes, fn %{:__repo__ => repo, unquote(name) => edge} ->
repo.fetch!(unquote(type), edge)
end)
|> Enum.uniq()
end
def unquote(name)(%{:__repo__ => repo, unquote(name) => edge} = _node) do
repo.fetch!(unquote(type), edge)
end
def unquote(:"set_#{name}")(struct, %{__id__: id} = _node) do
Map.put(struct, unquote(name), id)
end
def unquote(:"clear_#{name}")(struct) do
Map.put(struct, unquote(name), nil)
end
end
end
defmacro __after_compile__(env, _byte_code) do
module = env.module
# Checks for Assocs
for assoc <- module.__schema__(:gim_assocs) do
check_edges(module, assoc)
end
end
@valid_property_options [:default, :index]
@doc false
def __property__(mod, name, opts) do
check_options!(opts, @valid_property_options, "property/2")
put_struct_property(mod, name, Keyword.get(opts, :default))
index =
case Keyword.get(opts, :index) do
:primary -> :primary
:unique -> :unique
truthy -> !!truthy
end
Module.put_attribute(mod, :gim_props, {name, index})
end
@doc false
def __meta__(mod, name, default) do
put_struct_property(mod, name, default)
end
@valid_has_options [:reflect]
@doc false
def __has_edges__(mod, name, type, reflect, opts, caller_stacktrace) do
check_type!(type, "has_edges/3")
check_options!(opts, @valid_has_options, "has_edges/3")
put_struct_property(mod, name, Keyword.get(opts, :default, []))
Module.put_attribute(mod, :gim_assocs, {name, :many, type, reflect, caller_stacktrace})
end
@doc false
def __has_edge__(mod, name, type, reflect, opts, caller_stacktrace) do
check_type!(type, "has_edge/3")
check_options!(opts, @valid_has_options, "has_edge/3")
put_struct_property(mod, name, Keyword.get(opts, :default))
Module.put_attribute(mod, :gim_assocs, {name, :one, type, reflect, caller_stacktrace})
end
## Private
defp put_struct_property(mod, name, assoc) do
props = Module.get_attribute(mod, :struct_fields)
if List.keyfind(props, name, 0) do
raise ArgumentError, "property/association #{inspect(name)} is already set on schema"
end
Module.put_attribute(mod, :struct_fields, {name, assoc})
end
defp check_type!(type, fun_arity) do
# Just catch the worst typos
unless type |> to_string() |> String.starts_with?("Elixir.") do
raise ArgumentError, "invalid type #{inspect(type)} for #{fun_arity}"
end
end
defp check_options!(opts, valid, fun_arity) do
case Enum.find(opts, fn {k, _} -> not (k in valid) end) do
{k, _} -> raise ArgumentError, "invalid option #{inspect(k)} for #{fun_arity}"
nil -> :ok
end
end
defp check_edges(module, {name, _cardinality, type, reflect, stacktrace}) do
with {:module, _module} <- Code.ensure_compiled(type),
_ <- type.__schema__(:gim),
false <- is_nil(reflect),
{_name, _cardinality, ^module, ^name} <- type.__schema__(:association, reflect) do
:ok
else
true ->
:ok
nil ->
message = "The targeted edge #{inspect(reflect)} is not present in #{inspect(type)}"
reraise Gim.SchemaError, message, stacktrace
{reflect_name, reflect_cardinality, ^module, nil} ->
message = ~s'''
Bidirectional edges should target each other.
Add a reflect to the target edge:
schema do
# ...
has_edge#{if reflect_cardinality == :many, do: "s", else: ""}(#{
inspect(reflect_name)
}, #{inspect(module)}, reflect: #{inspect(name)})
# ...
end
'''
reraise Gim.SchemaError, message, stacktrace
{_name, _cardinality, ^module, re_reflect} ->
unless module.__schema__(:association, re_reflect) do
message = ~s'''
Bidirectional edges should target each other.
The target edge #{inspect(reflect)} in #{inspect(module)} targets #{inspect(re_reflect)} but was expected to be #{
inspect(name)
}
'''
reraise Gim.SchemaError, message, stacktrace
else
# In this case the reflect of the target is invalid, an error will be raised in its check
:ok
end
{_name, _cardinality, re_module, _} ->
message = ~s'''
The type of the target edge #{inspect(reflect)} in #{inspect(module)} is #{
inspect(re_module)
} but was expected to be #{inspect(module)}
'''
reraise Gim.SchemaError, message, stacktrace
# {:error, :embedded} ->
# IO.warn("embedded", stacktrace)
# {:error, :badfile} ->
# IO.warn("badfile", stacktrace)
# {:error, :nofile} ->
# IO.warn("nofile", stacktrace)
# {:error, :on_load_failur} ->
# IO.warn("on_load_failur", stacktrace)
{:error, error} ->
message = ~s'''
The reflection could not be checked. Loading #{inspect(type)} resulted in a #{
inspect(error)
} error, see "Code.ensure_compiled/1" for more information.
'''
IO.warn(message, stacktrace)
end
rescue
UndefinedFunctionError ->
message = "The target type #{inspect(type)} is not a gim schema"
reraise Gim.SchemaError, message, stacktrace
end
end
| 29.765237 | 124 | 0.616791 |
1c4198c85ec64bb4c9eb19d023939a553850bd41 | 792 | exs | Elixir | mix.exs | iwatakeshi/decimal | 7ebce665eddc922b88a3f8804b9699ce1e1d9872 | [
"Apache-2.0"
] | null | null | null | mix.exs | iwatakeshi/decimal | 7ebce665eddc922b88a3f8804b9699ce1e1d9872 | [
"Apache-2.0"
] | null | null | null | mix.exs | iwatakeshi/decimal | 7ebce665eddc922b88a3f8804b9699ce1e1d9872 | [
"Apache-2.0"
] | null | null | null | defmodule Decimal.Mixfile do
use Mix.Project
@version "2.0.0-rc.0"
def project() do
[
app: :decimal,
version: @version,
elixir: "~> 1.2",
deps: deps(),
name: "Decimal",
source_url: "https://github.com/ericmj/decimal",
docs: [source_ref: "v#{@version}", main: "readme", extras: ["README.md"]],
description: description(),
package: package()
]
end
def application() do
[]
end
defp deps() do
[
{:ex_doc, ">= 0.0.0", only: :dev}
]
end
defp description() do
"Arbitrary precision decimal arithmetic."
end
defp package() do
[
maintainers: ["Eric Meadows-Jönsson"],
licenses: ["Apache-2.0"],
links: %{"GitHub" => "https://github.com/ericmj/decimal"}
]
end
end
| 18.857143 | 80 | 0.556818 |
1c41a253777c1e4d67994a226f2287dfeb303e5b | 3,451 | exs | Elixir | test/prom_ex/ets_cron_flusher_test.exs | dvic/prom_ex | 36cd5e0850e3e1ccc369011fbd15d5b442024a1e | [
"MIT"
] | 1 | 2022-02-20T10:42:44.000Z | 2022-02-20T10:42:44.000Z | test/prom_ex/ets_cron_flusher_test.exs | dvic/prom_ex | 36cd5e0850e3e1ccc369011fbd15d5b442024a1e | [
"MIT"
] | null | null | null | test/prom_ex/ets_cron_flusher_test.exs | dvic/prom_ex | 36cd5e0850e3e1ccc369011fbd15d5b442024a1e | [
"MIT"
] | null | null | null | defmodule PromEx.ETSCronFlusherTest do
use ExUnit.Case, async: false
alias PromEx.Plugins.Phoenix
alias PromEx.Test.Support.Events
defmodule DefaultPromExSetUp do
use PromEx, otp_app: :prom_ex
@impl true
def plugins do
[{Phoenix, router: TestApp.Router, endpoint: TestApp.Endpoint}]
end
end
defmodule ManualPromExSetUp do
use PromEx, otp_app: :prom_ex
@impl true
def plugins do
[{Phoenix, router: TestApp.Router, endpoint: TestApp.Endpoint}]
end
@impl true
def init_opts do
%PromEx.Config{
disabled: false,
drop_metrics_groups: MapSet.new(),
ets_flush_interval: 2_500,
grafana_agent_config: :disabled,
grafana_config: :disabled,
manual_metrics_start_delay: :no_delay,
metrics_server_config: :disabled
}
end
end
describe "ETSCronFlusherTest" do
test "should flush ETS metrics at the correct interval when the default is used" do
start_supervised!(DefaultPromExSetUp)
original_timer_ref = get_timer_ref(DefaultPromExSetUp)
Events.execute_all(:phoenix)
assert length(get_metrics_table(DefaultPromExSetUp)) == 5
assert length(get_dist_table(DefaultPromExSetUp)) == 40
Events.execute_all(:phoenix)
assert length(get_metrics_table(DefaultPromExSetUp)) == 5
assert length(get_dist_table(DefaultPromExSetUp)) == 80
Process.sleep(8_000)
assert length(get_metrics_table(DefaultPromExSetUp)) == 11
assert get_dist_table(DefaultPromExSetUp) == []
new_timer_ref = get_timer_ref(DefaultPromExSetUp)
assert original_timer_ref != new_timer_ref
assert is_reference(new_timer_ref)
PromEx.ETSCronFlusher.defer_ets_flush(DefaultPromExSetUp.__ets_cron_flusher_name__())
Process.sleep(500)
defer_new_timer_ref = get_timer_ref(DefaultPromExSetUp)
assert new_timer_ref != defer_new_timer_ref
assert is_reference(defer_new_timer_ref)
end
test "should flush ETS metrics at the correct interval when the interval is manually set" do
start_supervised!(ManualPromExSetUp)
original_timer_ref = get_timer_ref(ManualPromExSetUp)
Events.execute_all(:phoenix)
assert length(get_metrics_table(ManualPromExSetUp)) == 5
assert length(get_dist_table(ManualPromExSetUp)) == 40
Events.execute_all(:phoenix)
assert length(get_metrics_table(ManualPromExSetUp)) == 5
assert length(get_dist_table(ManualPromExSetUp)) == 80
Process.sleep(3_500)
assert length(get_metrics_table(ManualPromExSetUp)) == 11
assert get_dist_table(ManualPromExSetUp) == []
new_timer_ref = get_timer_ref(ManualPromExSetUp)
assert original_timer_ref != new_timer_ref
assert is_reference(new_timer_ref)
PromEx.ETSCronFlusher.defer_ets_flush(ManualPromExSetUp.__ets_cron_flusher_name__())
Process.sleep(500)
defer_new_timer_ref = get_timer_ref(ManualPromExSetUp)
assert new_timer_ref != defer_new_timer_ref
assert is_reference(defer_new_timer_ref)
end
end
defp get_dist_table(module) do
[module, Metrics_dist]
|> Module.concat()
|> :ets.tab2list()
end
defp get_metrics_table(module) do
[module, Metrics]
|> Module.concat()
|> :ets.tab2list()
end
defp get_timer_ref(module) do
module.__ets_cron_flusher_name__()
|> :sys.get_state()
|> Map.get(:timer_ref)
end
end
| 29.245763 | 96 | 0.718343 |
1c41a36099c4eed3c4c1f86313f5c05481997b0c | 2,539 | exs | Elixir | test/channels/participant_socket_test.exs | b-a-b-e/ProComPrag | 50c6c87933e71cb69b5c95bc77bf591a34661410 | [
"MIT"
] | 1 | 2020-05-31T21:54:40.000Z | 2020-05-31T21:54:40.000Z | test/channels/participant_socket_test.exs | b-a-b-e/ProComPrag | 50c6c87933e71cb69b5c95bc77bf591a34661410 | [
"MIT"
] | 64 | 2019-07-29T22:06:16.000Z | 2022-03-28T23:46:58.000Z | test/channels/participant_socket_test.exs | babe-project/BABE | 50c6c87933e71cb69b5c95bc77bf591a34661410 | [
"MIT"
] | 1 | 2019-07-28T19:17:43.000Z | 2019-07-28T19:17:43.000Z | defmodule Magpie.ParticipantSocketTest do
@moduledoc """
Module for tests on the socket connection.
"""
use Magpie.ChannelCase, async: true
alias Magpie.{Experiments, ParticipantSocket}
alias Magpie.Experiments.AssignmentIdentifier
test "connect with a valid experiment_id" do
experiment = insert_dynamic_experiment()
assert {:ok, socket} =
connect(ParticipantSocket, %{
"participant_id" => "1234",
"experiment_id" => experiment.id
})
# Assert the assigns as well
assert socket.assigns.participant_id == "1234"
assert socket.assigns.assignment_identifier == %AssignmentIdentifier{
chain: 1,
experiment_id: experiment.id,
generation: 1,
player: 1,
variant: 1
}
end
test "Assigns ExperimentStatus to 1 upon connection" do
experiment = insert_dynamic_experiment()
{:ok, socket} =
connect(ParticipantSocket, %{
"participant_id" => "1234",
"experiment_id" => experiment.id
})
assignment = Experiments.get_experiment_status(socket.assigns.assignment_identifier)
assert assignment.status == :in_progress
end
test "refuse connection with an invalid experiment_id" do
assert :error =
connect(ParticipantSocket, %{
"participant_id" => "1234",
"experiment_id" => :rand.uniform(1000)
})
end
test "refuse connection without supplying experiment_id" do
assert :error =
connect(ParticipantSocket, %{
"participant_id" => "1234"
})
end
test "refuse connection without supplying participant_id" do
experiment = insert_dynamic_experiment()
assert :error =
connect(ParticipantSocket, %{
"experiment_id" => experiment.id
})
end
test "refuse connection with an empty participant_id" do
experiment = insert_dynamic_experiment()
assert :error =
connect(ParticipantSocket, %{
"participant_id" => "",
"experiment_id" => experiment.id
})
end
# I guess this is a bit irrelevant so whatever. Just let it crash.
# test "refuse connection with an empty experiment_id" do
# experiment = insert_dynamic_experiment()
# assert :error =
# connect(ParticipantSocket, %{
# "participant_id" => "asdf",
# "experiment_id" => ""
# })
# end
end
| 28.211111 | 88 | 0.607326 |
1c41c70c56608829cfee4d8a3c87a3148ad5623c | 508 | ex | Elixir | lib/sutur/shops/grant.ex | ab-zu/sutur | f314ed29b344fbe0139bd87ac01caf577b1d592e | [
"MIT"
] | 1 | 2021-11-16T02:18:31.000Z | 2021-11-16T02:18:31.000Z | lib/sutur/shops/grant.ex | ab-zu/sutur | f314ed29b344fbe0139bd87ac01caf577b1d592e | [
"MIT"
] | null | null | null | lib/sutur/shops/grant.ex | ab-zu/sutur | f314ed29b344fbe0139bd87ac01caf577b1d592e | [
"MIT"
] | null | null | null | defmodule Sutur.Shops.Grant do
use Ecto.Schema
import Ecto.Changeset
schema "grants" do
field :charge_id, :integer
field :grants, {:array, :string}
field :remaining_usages, :integer
field :total_usages, :integer
field :shop_id, :id
timestamps()
end
@doc false
def changeset(grant, attrs) do
grant
|> cast(attrs, [:charge_id, :grants, :remaining_usages, :total_usages])
|> validate_required([:charge_id, :grants, :remaining_usages, :total_usages])
end
end
| 23.090909 | 81 | 0.687008 |
1c41d2c0cf9fd998fd758582f763df61664e032e | 322 | ex | Elixir | lib/history/products/queries/search.ex | fremantle-industries/history | a8a33744279ff4ca62620785f9a2e9c0c99e4de7 | [
"MIT"
] | 20 | 2021-08-06T01:09:48.000Z | 2022-03-28T18:44:56.000Z | lib/history/products/queries/search.ex | fremantle-industries/history | a8a33744279ff4ca62620785f9a2e9c0c99e4de7 | [
"MIT"
] | 13 | 2021-08-21T21:17:02.000Z | 2022-03-27T06:33:51.000Z | lib/history/products/queries/search.ex | fremantle-industries/history | a8a33744279ff4ca62620785f9a2e9c0c99e4de7 | [
"MIT"
] | 2 | 2021-09-23T11:31:59.000Z | 2022-01-09T16:19:35.000Z | defmodule History.Products.Queries.Search do
require Ecto.Query
import Ecto.Query
alias History.Products
def call(query) do
from(
p in Products.Product,
where: ilike(p.symbol, ^"%#{query}%") or ilike(p.venue, ^"%#{query}%"),
order_by: [asc: :symbol, asc: :venue, asc: :type]
)
end
end
| 23 | 77 | 0.63354 |
1c420baef692bbf472a21d742a62734cebf586e1 | 2,066 | exs | Elixir | test/bankapi/user/update_test.exs | guibes/bankapi | df43b30e7a845d0509b17c1086c70ab651b97c42 | [
"MIT"
] | null | null | null | test/bankapi/user/update_test.exs | guibes/bankapi | df43b30e7a845d0509b17c1086c70ab651b97c42 | [
"MIT"
] | 24 | 2021-03-26T17:42:17.000Z | 2021-03-31T11:47:17.000Z | test/bankapi/user/update_test.exs | guibes/bankapi | df43b30e7a845d0509b17c1086c70ab651b97c42 | [
"MIT"
] | 1 | 2021-03-26T17:49:53.000Z | 2021-03-26T17:49:53.000Z | defmodule Bankapi.User.UpdateTest do
use Bankapi.DataCase
alias Bankapi.User.{Create, Update}
alias Bankapi.{User, Repo}
@valid_attrs_complete %{
cpf: "12345678934",
password: "12345678912",
country: "USA",
city: "Vitorino",
birth_date: "2000-01-01",
state: "Paraná",
name: "Teste Teste",
email: "tutaaa@tsaa.com"
}
describe "call/1" do
test "when all params are valid return an user with complete status" do
{:ok, %User{id: user_id, user_code: user_code}} = Create.call(@valid_attrs_complete)
update_attrs = %{
"cpf" => "12345678934",
"password" => "12345678912",
"country" => "Brasil",
"city" => "Vitorino",
"birth_date" => "2000-01-01",
"state" => "São Paulo",
"name" => "Teste Teste",
"email" => "tutaaa@tsaa.com"
}
{:ok, %User{}} = Update.call(update_attrs)
user = Repo.get(User, user_id)
assert %User{
id: ^user_id,
birth_date: ~D[2000-01-01],
city: "Vitorino",
country: "Brasil",
cpf: "12345678934",
email: "tutaaa@tsaa.com",
gender: nil,
name: "Teste Teste",
# because is virtual
password: nil,
referral_code: nil,
state: "São Paulo",
status: "complete",
user_code: ^user_code
} = user
end
test "when all params are valid and referral_code user is complete" do
{:ok, %User{user_code: user_code}} = Create.call(@valid_attrs_complete)
new_user = %{
cpf: "12345678935",
password: "12345678912",
country: "USA",
city: "Vitorino",
birth_date: "2000-01-01",
state: "Paraná",
name: "Teste Teste",
email: "aaaa@aaaa.com",
referral_code: user_code
}
{:ok, new_user_response} = Create.call(new_user)
assert %User{referral_code: ^user_code} = new_user_response
end
end
end
| 27.918919 | 90 | 0.53969 |
1c4274ac88a6c80b0ed3bda8fc5aa6765f427bd8 | 3,424 | ex | Elixir | lib/ref_inspector/database.ex | elixir-inspector/ref_inspector | f34485a8e32f0aaea2e3951fbb756eac14a9f8dd | [
"Apache-2.0"
] | 7 | 2018-12-22T14:41:26.000Z | 2020-05-04T08:16:11.000Z | lib/ref_inspector/database.ex | elixir-inspector/ref_inspector | f34485a8e32f0aaea2e3951fbb756eac14a9f8dd | [
"Apache-2.0"
] | 2 | 2019-11-25T09:41:27.000Z | 2020-05-06T17:30:45.000Z | lib/ref_inspector/database.ex | elixir-inspector/ref_inspector | f34485a8e32f0aaea2e3951fbb756eac14a9f8dd | [
"Apache-2.0"
] | null | null | null | defmodule RefInspector.Database do
@moduledoc false
use GenServer
require Logger
alias RefInspector.Config
alias RefInspector.Database.Loader
alias RefInspector.Database.Parser
alias RefInspector.Database.State
@ets_table_opts [:named_table, :protected, :set, read_concurrency: true]
@doc false
def start_link(instance) when is_atom(instance), do: start_link(instance: instance)
def start_link(opts) do
state = init_state(opts)
GenServer.start_link(__MODULE__, state, name: state.instance)
end
@doc false
def init(%State{instance: nil}), do: {:stop, "missing instance name"}
def init(%State{} = state) do
if state.startup_sync do
:ok = reload_databases(state)
else
:ok = GenServer.cast(state.instance, :reload)
end
{:ok, state}
end
def handle_call(:reload, _from, state) do
state = reinit_state(state)
{:reply, reload_databases(state), state}
end
def handle_cast(:reload, state) do
state = reinit_state(state)
:ok = reload_databases(state)
{:noreply, state}
end
@doc """
Returns all referer definitions.
"""
@spec list(atom) :: [tuple]
def list(instance) do
case :ets.lookup(instance, :data) do
[{:data, entries}] -> entries
_ -> []
end
rescue
_ -> []
end
@doc """
Reloads the database.
Depending on the boolean option `:async` the reload will be performed
using `GenServer.cast/2` oder `GenServer.call/2`.
"""
def reload(opts) do
if opts[:async] do
GenServer.cast(opts[:instance], :reload)
else
GenServer.call(opts[:instance], :reload)
end
end
defp create_ets_table(instance) do
case :ets.info(instance) do
:undefined ->
_ = :ets.new(instance, @ets_table_opts)
:ok
_ ->
:ok
end
end
defp init_state(opts) do
:ok = Config.init_env()
state = %State{}
opts =
opts
|> init_state_option(:startup_silent, state)
|> init_state_option(:startup_sync, state)
|> Keyword.put_new(:yaml_reader, Config.yaml_file_reader())
struct!(State, opts)
end
defp init_state_option(opts, key, state) do
default = Map.fetch!(state, key)
config = Config.get(key, default)
Keyword.put_new(opts, key, config)
end
defp parse_database({:ok, entries}, _, _) do
Parser.parse(entries)
end
defp parse_database({:error, reason}, file, silent) do
_ =
unless silent do
Logger.info("Failed to load #{file}: #{inspect(reason)}")
end
%{}
end
defp read_databases([], silent, _) do
_ =
unless silent do
Logger.warn("Reload error: no database files configured!")
end
[]
end
defp read_databases(files, silent, yaml_reader) do
Enum.map(files, fn file ->
entries =
Config.database_path()
|> Path.join(file)
|> Loader.load(yaml_reader)
|> parse_database(file, silent)
{file, entries}
end)
end
defp reinit_state(state), do: state |> Map.to_list() |> init_state()
defp reload_databases(%{instance: instance, startup_silent: silent, yaml_reader: yaml_reader}) do
:ok = create_ets_table(instance)
Config.database_files()
|> read_databases(silent, yaml_reader)
|> update_ets_table(instance)
end
defp update_ets_table(datasets, instance) do
true = :ets.insert(instance, {:data, datasets})
:ok
end
end
| 21.808917 | 99 | 0.64632 |
1c42b0121cdd2053ebfd266137553da0f4a66f05 | 1,380 | ex | Elixir | apps/snitch_core/lib/core/data/schema/tax/tax_rate_class_value.ex | Acrecio/avia | 54d264fc179b5b5f17d174854bdca063e1d935e9 | [
"MIT"
] | 456 | 2018-09-20T02:40:59.000Z | 2022-03-07T08:53:48.000Z | apps/snitch_core/lib/core/data/schema/tax/tax_rate_class_value.ex | Acrecio/avia | 54d264fc179b5b5f17d174854bdca063e1d935e9 | [
"MIT"
] | 273 | 2018-09-19T06:43:43.000Z | 2021-08-07T12:58:26.000Z | apps/snitch_core/lib/core/data/schema/tax/tax_rate_class_value.ex | Acrecio/avia | 54d264fc179b5b5f17d174854bdca063e1d935e9 | [
"MIT"
] | 122 | 2018-09-26T16:32:46.000Z | 2022-03-13T11:44:19.000Z | defmodule Snitch.Data.Schema.TaxRateClassValue do
@moduledoc """
Models a TaxRateClassValue
The TaxRateClassValue model is repsonsible for handling the percent amount to
be used for a tax rate while calculating taxes.
"""
use Snitch.Data.Schema
alias Snitch.Data.Schema.{TaxRate, TaxClass}
alias Snitch.Core.Tools.MultiTenancy.Repo
schema "snitch_tax_rate_class_values" do
field(:percent_amount, :integer, default: 0)
belongs_to(:tax_class, TaxClass)
belongs_to(:tax_rate, TaxRate, on_replace: :delete)
timestamps()
end
@permitted ~w(tax_class_id tax_rate_id percent_amount)a
def changeset(%__MODULE__{} = data, params) do
data
|> cast(params, @permitted)
|> validate_required([:tax_class_id, :percent_amount])
|> validate_number(:percent_amount, greater_than_or_equal_to: 0)
|> foreign_key_constraint(:tax_rate_id)
|> foreign_key_constraint(:tax_class_id)
|> unique_constraint(:tax_rate_id, name: :unique_tax_rate_class_value)
|> add_tax_class_data()
end
defp add_tax_class_data(changeset) do
with {:ok, tax_class_id} <- fetch_change(changeset, :tax_class_id) do
data = %{
changeset.data
| tax_class: Repo.get(TaxClass, tax_class_id),
tax_class_id: tax_class_id
}
%{changeset | data: data}
else
_ ->
changeset
end
end
end
| 27.6 | 79 | 0.707246 |
1c42e5789e9ce7f891e8a6cde93aa940c2752992 | 68,774 | ex | Elixir | lib/axon/compiler.ex | stefkohub/axon | c2eafa0adfe69a54d48f639181c939b31e6a1731 | [
"Apache-2.0"
] | null | null | null | lib/axon/compiler.ex | stefkohub/axon | c2eafa0adfe69a54d48f639181c939b31e6a1731 | [
"Apache-2.0"
] | null | null | null | lib/axon/compiler.ex | stefkohub/axon | c2eafa0adfe69a54d48f639181c939b31e6a1731 | [
"Apache-2.0"
] | null | null | null | defmodule Axon.CompilerError do
defexception [:exception, :graph]
@impl true
def message(%{graph: %Axon{op: op}, exception: exception}) do
op_inspect =
if is_atom(op) do
Atom.to_string(op)
else
"#{inspect(op)}"
end
"""
error while building prediction for #{op_inspect}:
** (#{inspect(exception.__struct__)}) #{Exception.message(exception)}
"""
end
end
defmodule Axon.Compiler do
@moduledoc false
require Logger
import Axon.Shared
## Init JIT Compilation
@doc false
def __compile__(graph, opts) do
mode = opts[:mode] || :inference
{compile_init(graph), compile_predict(graph, mode)}
end
@doc false
def __jit_init__(graph, caller, [] = args, opts) do
fun = compile_init(graph)
jit_or_apply(caller, fun, args, opts)
end
defp compile_init(graph) when is_tuple(graph) do
init_fn = fn ->
{cache, _} =
graph
|> Tuple.to_list()
|> Enum.reduce({%{}, %{}}, &to_init_fun/2)
cache
|> Enum.reduce(%{}, fn {_, layer}, layers_acc ->
Map.merge(layer, layers_acc)
end)
end
fn -> Nx.Defn.jit_or_apply(init_fn, []) end
end
defp compile_init(%Axon{} = graph) do
init_fn = fn ->
{cache, _} = to_init_fun(graph, {%{}, %{}})
cache
|> Enum.reduce(%{}, fn {_, layer}, layers_acc ->
Map.merge(layer, layers_acc)
end)
end
fn -> Nx.Defn.jit_or_apply(init_fn, []) end
end
defp to_init_fun(graph, cache_and_counts) when is_tuple(graph) do
graph
|> Tuple.to_list()
|> Enum.reduce(cache_and_counts, fn x, acc -> to_init_fun(x, acc) end)
end
defp to_init_fun(
%Axon{
id: id,
parent: parents,
op: op,
name: name_fn,
params: params,
policy: %{params: dtype},
hooks: hooks
},
cache_and_counts
)
when is_list(parents) do
{cache, op_counts} = Enum.reduce(parents, cache_and_counts, &to_init_fun/2)
case cache do
%{^id => _} ->
{cache, op_counts}
%{} ->
if Enum.empty?(params) do
{cache, op_counts}
else
layer_params =
Enum.reduce(params, %{}, fn {_, param}, layer_params ->
%{name: name, shape: shape, initializer: initializer} = param
fun = apply(Axon.Initializers, initializer, [[type: dtype, shape: shape]])
Map.put(layer_params, name, fun)
end)
layer_params = apply_hooks(layer_params, :initialize, nil, hooks)
name = name_fn.(op, op_counts)
params = %{name => layer_params}
{
Map.put(cache, id, params),
Map.update(op_counts, op, 1, fn x -> x + 1 end)
}
end
end
end
defp to_init_fun(
%Axon{
id: id,
parent: parent,
op: op,
name: name_fn,
params: params,
opts: opts,
policy: %{params: dtype},
hooks: hooks
},
cache_and_counts
) do
{cache, op_counts} =
if parent do
to_init_fun(parent, cache_and_counts)
else
cache_and_counts
end
{cache, op_counts} =
case opts[:hidden_state] do
state when is_tuple(state) ->
state
|> Tuple.to_list()
|> Enum.reduce({cache, op_counts}, &to_init_fun/2)
nil ->
{cache, op_counts}
end
case cache do
%{^id => _} ->
{cache, op_counts}
%{} ->
if Enum.empty?(params) do
{cache, op_counts}
else
layer_params =
Enum.reduce(params, %{}, fn {_, param}, layer_params ->
%{name: name, shape: shape, initializer: initializer} = param
fun = apply(Axon.Initializers, initializer, [[type: dtype, shape: shape]])
Map.put(layer_params, name, fun)
end)
layer_params = apply_hooks(layer_params, :initialize, nil, hooks)
name = name_fn.(op, op_counts)
params = %{name => layer_params}
{
Map.put(cache, id, params),
Map.update(op_counts, op, 1, fn x -> x + 1 end)
}
end
end
end
## Model JIT Compilation
@doc false
def __jit_predict__(graph, caller, args, opts) do
{mode, opts} = Keyword.pop(opts, :mode, :inference)
fun = compile_predict(graph, mode)
jit_or_apply(caller, fun, args, opts)
end
defp compile_predict(graph, mode) do
input_ids = get_inputs(graph, [])
input_map =
input_ids
|> Enum.uniq()
|> Enum.sort()
|> Enum.with_index()
|> Enum.into(%{})
# Warn if input map is empty
if Enum.empty?(input_map) do
Logger.warn(
"You are compiling a graph with no inputs. If this was" <>
" intentional, you can run your model's predict function" <>
" with an empty tuple as input: predict_fn(params, {})"
)
end
predict_fn = fn params, inputs ->
inputs = maybe_flatten(inputs)
{expr, _} = to_predict_fun(graph, {%{}, %{}}, input_map, params, inputs, mode)
acc =
case mode do
:train ->
%{prediction: [], state: %{}}
:inference ->
[]
end
case expr do
[_ | _] = exprs ->
do_recur_to_tuple(exprs, mode, acc)
expr ->
expr
end
end
&Nx.Defn.jit_or_apply(predict_fn, [&1, &2])
end
defp maybe_flatten(inputs) when is_tuple(inputs) do
inputs
|> Tuple.to_list()
|> do_flatten([])
|> List.flatten()
|> List.to_tuple()
end
defp maybe_flatten(inputs), do: inputs
defp do_flatten([], acc), do: Enum.reverse(acc)
defp do_flatten([inp | []], acc) when is_tuple(inp) do
res = do_flatten(Tuple.to_list(inp), [])
[res | acc]
|> Enum.reverse()
end
defp do_flatten([inp | []], acc), do: Enum.reverse([inp | acc])
defp do_flatten([inp | rest], acc) when is_tuple(inp) do
res = do_flatten(Tuple.to_list(inp), [])
do_flatten(rest, [res | acc])
end
defp do_flatten([inp | rest], acc) do
do_flatten(rest, [inp | acc])
end
defp do_recur_to_tuple([res | []], mode, acc) when is_list(res) do
case mode do
:train ->
res = do_recur_to_tuple(res, :train, %{prediction: [], state: %{}})
new_pred =
[res.prediction | acc.prediction]
|> Enum.reverse()
|> List.to_tuple()
new_state = Map.merge(res.state, acc.state)
%{prediction: new_pred, state: new_state}
:inference ->
res = do_recur_to_tuple(res, :inference, [])
[res | acc]
|> Enum.reverse()
|> List.to_tuple()
end
end
defp do_recur_to_tuple([res | []], mode, acc) do
case mode do
:train ->
new_pred =
[res.prediction | acc.prediction]
|> Enum.reverse()
|> List.to_tuple()
new_state = Map.merge(res.state, acc.state)
%{prediction: new_pred, state: new_state}
:inference ->
[res | acc]
|> Enum.reverse()
|> List.to_tuple()
end
end
defp do_recur_to_tuple([expr | exprs], mode, acc) when is_list(expr) do
case mode do
:train ->
res = do_recur_to_tuple(expr, :train, %{prediction: [], state: %{}})
new_pred = [res.prediction | acc.prediction]
new_state = Map.merge(res.state, acc.state)
do_recur_to_tuple(exprs, :train, %{prediction: new_pred, state: new_state})
:inference ->
res = do_recur_to_tuple(expr, :inference, [])
do_recur_to_tuple(exprs, :inference, [res | acc])
end
end
defp do_recur_to_tuple([expr | exprs], mode, acc) do
case mode do
:train ->
new_pred = [expr.prediction | acc.prediction]
new_state = Map.merge(expr.state, acc.state)
do_recur_to_tuple(exprs, :train, %{prediction: new_pred, state: new_state})
:inference ->
do_recur_to_tuple(exprs, :inference, [expr | acc])
end
end
## Input Ordering
defp get_inputs(graph, input_ids) when is_tuple(graph) do
graph
|> Tuple.to_list()
|> Enum.reduce(input_ids, fn x, acc -> get_inputs(x, acc) end)
end
defp get_inputs(%Axon{op: :constant}, input_ids) do
input_ids
end
defp get_inputs(%Axon{id: id, op: :input}, input_ids) do
[id | input_ids]
end
defp get_inputs(%Axon{parent: parents}, input_ids)
when is_list(parents) do
Enum.reduce(parents, input_ids, fn graph, input_ids ->
get_inputs(graph, input_ids)
end)
end
defp get_inputs(%Axon{parent: parent, opts: opts}, input_ids) do
input_ids =
case opts[:hidden_state] do
state when is_tuple(state) ->
state
|> Tuple.to_list()
|> Enum.reduce(input_ids, fn graph, input_ids ->
get_inputs(graph, input_ids)
end)
nil ->
input_ids
end
get_inputs(parent, input_ids)
end
defp to_predict_fun(graph, cache_and_counts, input_map, params, inputs, mode)
when is_tuple(graph) do
graph
|> Tuple.to_list()
|> Enum.map_reduce(cache_and_counts, &to_predict_fun(&1, &2, input_map, params, inputs, mode))
end
defp to_predict_fun(%{id: id} = graph, {cache, op_counts}, input_map, params, inputs, mode) do
case cache do
%{^id => res} ->
{res, {cache, op_counts}}
%{} ->
try do
recur_predict_fun(graph, {cache, op_counts}, input_map, params, inputs, mode)
rescue
e -> reraise Axon.CompilerError.exception(graph: graph, exception: e), __STACKTRACE__
end
end
end
## Custom Layers
defp recur_predict_fun(
%Axon{
id: id,
name: name_fn,
op: op,
parent: parents,
params: layer_params,
opts: opts,
policy: %{compute: compute, output: output},
hooks: hooks
},
cache_and_counts,
input_map,
params,
inputs,
mode
)
when is_function(op) and is_list(parents) do
{exprs, {cache, op_counts}} =
Enum.map_reduce(
parents,
cache_and_counts,
&to_predict_fun(&1, &2, input_map, params, inputs, mode)
)
name = name_fn.(op, op_counts)
op_counts = Map.update(op_counts, op, 1, fn x -> x + 1 end)
inp_params =
Map.new(layer_params, fn {k, %{name: v, frozen: frz}} ->
{k, maybe_freeze(params[name][v], frz)}
end)
param_arg =
case inp_params do
%{} ->
[]
inp_params ->
[inp_params]
end
case mode do
:train ->
states =
Enum.reduce(exprs, %{}, fn expr, acc ->
Map.merge(expr.state, acc)
end)
inputs = Enum.map(exprs, &Nx.as_type(&1.prediction, compute))
out = apply(op, inputs ++ param_arg ++ opts)
out_hooked = apply_hooks(out, :forward, :train, hooks)
res = %{prediction: Nx.as_type(out_hooked, output), state: states}
{res, {Map.put(cache, id, res), op_counts}}
:inference ->
inputs = Enum.map(exprs, &Nx.as_type(&1, compute))
res = apply(op, inputs ++ param_arg ++ opts)
res_hooked = apply_hooks(res, :forward, :inference, hooks)
out = Nx.as_type(res_hooked, output)
{out, {Map.put(cache, id, out), op_counts}}
end
end
defp recur_predict_fun(
%Axon{
id: id,
name: name_fn,
op: op,
parent: parent,
params: layer_params,
opts: opts,
policy: %{compute: compute, output: output},
hooks: hooks
},
cache_and_counts,
input_map,
params,
inputs,
mode
)
when is_function(op) do
{res, {cache, op_counts}} =
to_predict_fun(parent, cache_and_counts, input_map, params, inputs, mode)
name = name_fn.(op, op_counts)
op_counts = Map.update(op_counts, op, 1, fn x -> x + 1 end)
inp_params =
Map.new(layer_params, fn {k, %{name: v, frozen: frz}} ->
{k, maybe_freeze(params[name][v], frz)}
end)
param_arg =
case inp_params do
%{} ->
[]
inp_params ->
[inp_params]
end
case mode do
:train ->
inp = Nx.as_type(res.prediction, compute)
out = apply(op, [inp] ++ param_arg ++ [opts])
out_hooked = apply_hooks(out, :forward, :train, hooks)
res = Map.update!(res, :prediction, fn _ -> Nx.as_type(out_hooked, output) end)
{res, {Map.put(cache, id, res), op_counts}}
:inference ->
res = apply(op, [res] ++ param_arg ++ [opts])
res = Nx.as_type(res, output)
res_hooked = apply_hooks(res, :forward, :train, hooks)
{res, {Map.put(cache, id, res_hooked), op_counts}}
end
end
## Activation Layers
@activation_layers [:celu, :elu, :exp, :gelu, :hard_sigmoid, :hard_silu, :hard_tanh] ++
[:leaky_relu, :linear, :log_sigmoid, :mish, :relu, :relu6] ++
[:sigmoid, :silu, :selu, :softmax, :softplus, :softsign, :tanh] ++
[:log_softmax]
defp recur_predict_fun(
%Axon{
id: id,
op: op,
parent: parent,
policy: %{compute: compute, output: output},
opts: opts,
hooks: hooks
},
cache_and_counts,
input_map,
params,
inputs,
mode
)
when op in @activation_layers do
{res, {cache, op_counts}} =
to_predict_fun(parent, cache_and_counts, input_map, params, inputs, mode)
op_counts = Map.update(op_counts, op, 1, fn x -> x + 1 end)
case mode do
:train ->
input =
res.prediction
|> Nx.as_type(compute)
|> apply_hooks(:pre_forward, :train, hooks)
args =
case opts do
[] ->
[input]
[_ | _] ->
[input, opts]
end
out =
args
|> then(&apply(Axon.Activations, op, &1))
|> Nx.as_type(output)
|> apply_hooks(:forward, :train, hooks)
|> apply_hooks(:backward, :train, hooks)
res = Map.update!(res, :prediction, fn _ -> out end)
{res, {Map.put(cache, id, res), op_counts}}
:inference ->
input =
res
|> Nx.as_type(compute)
|> apply_hooks(:pre_forward, :inference, hooks)
args =
case opts do
[] ->
[input]
[_ | _] ->
[input, opts]
end
res =
args
|> then(&apply(Axon.Activations, op, &1))
|> Nx.as_type(output)
|> apply_hooks(:forward, :inference, hooks)
|> apply_hooks(:backward, :inference, hooks)
{res, {Map.put(cache, id, res), op_counts}}
end
end
## Linear Layers
defp recur_predict_fun(
%Axon{
id: id,
op: :dense,
name: name_fn,
parent: parent,
params: layer_params,
policy: %{compute: compute, output: output},
opts: [use_bias: use_bias],
hooks: hooks
},
cache_and_counts,
input_map,
params,
inputs,
mode
) do
{res, {cache, op_counts}} =
to_predict_fun(parent, cache_and_counts, input_map, params, inputs, mode)
name = name_fn.(:dense, op_counts)
op_counts = Map.update(op_counts, :dense, 1, fn x -> x + 1 end)
w = layer_param(layer_params, "kernel", params[name], compute)
b =
if use_bias do
layer_param(layer_params, "bias", params[name], compute)
else
Nx.tensor(0.0, type: compute)
end
case mode do
:train ->
out =
res.prediction
|> Nx.as_type(compute)
|> apply_hooks(:pre_forward, :train, hooks)
|> Axon.Layers.dense(w, b)
|> Nx.as_type(output)
|> apply_hooks(:forward, :train, hooks)
|> apply_hooks(:backward, :train, hooks)
res = Map.update!(res, :prediction, fn _ -> out end)
{res, {Map.put(cache, id, res), op_counts}}
:inference ->
res =
res
|> Nx.as_type(compute)
|> apply_hooks(:pre_forward, :inference, hooks)
|> Axon.Layers.dense(w, b)
|> Nx.as_type(output)
|> apply_hooks(:forward, :inference, hooks)
|> apply_hooks(:backward, :inference, hooks)
{res, {Map.put(cache, id, res), op_counts}}
end
end
defp recur_predict_fun(
%Axon{
id: id,
name: name_fn,
op: :bilinear,
parent: parents,
params: layer_params,
policy: %{compute: compute, output: output},
opts: [use_bias: use_bias],
hooks: hooks
},
cache_and_counts,
input_map,
params,
inputs,
mode
) do
{[res1, res2], {cache, op_counts}} =
Enum.map_reduce(
parents,
cache_and_counts,
&to_predict_fun(&1, &2, input_map, params, inputs, mode)
)
name = name_fn.(:bilinear, op_counts)
op_counts = Map.update(op_counts, :bilinear, 1, fn x -> x + 1 end)
w = layer_param(layer_params, "kernel", params[name], compute)
b =
if use_bias do
layer_param(layer_params, "bias", params[name], compute)
else
Nx.tensor(0.0, type: compute)
end
case mode do
:train ->
input1 = Nx.as_type(res1.prediction, compute)
input2 = Nx.as_type(res2.prediction, compute)
# TODO: Should these be sent/hooked as a container?
{input1_hooked, input2_hooked} =
apply_hooks({input1, input2}, :pre_forward, :train, hooks)
out =
input1_hooked
|> Axon.Layers.bilinear(input2_hooked, w, b)
|> Nx.as_type(output)
|> apply_hooks(:forward, :train, hooks)
|> apply_hooks(:backward, :train, hooks)
res = %{prediction: out, state: Map.merge(input1.state, input2.state)}
{res, {Map.put(cache, id, res), op_counts}}
:inference ->
input1 = Nx.as_type(res1, compute)
input2 = Nx.as_type(res2, compute)
{input1_hooked, input2_hooked} =
apply_hooks({input1, input2}, :pre_forward, :train, hooks)
res =
input1_hooked
|> Axon.Layers.bilinear(input2_hooked, w, b)
|> Nx.as_type(output)
|> apply_hooks(:forward, :inference, hooks)
|> apply_hooks(:backward, :inference, hooks)
{res, {Map.put(cache, id, res), op_counts}}
end
end
## Sparse Layers
defp recur_predict_fun(
%Axon{
id: id,
name: name_fn,
op: :embedding,
parent: parent,
params: layer_params,
policy: %{compute: compute, output: output},
hooks: hooks
},
cache_and_counts,
input_map,
params,
inputs,
mode
) do
{res, {cache, op_counts}} =
to_predict_fun(parent, cache_and_counts, input_map, params, inputs, mode)
name = name_fn.(:embedding, op_counts)
op_counts = Map.update(op_counts, :embedding, 1, fn x -> x + 1 end)
w = layer_param(layer_params, "kernel", params[name], compute)
case mode do
:train ->
out =
res.prediction
|> apply_hooks(:pre_forward, :train, hooks)
|> Axon.Layers.embedding(w)
|> Nx.as_type(output)
|> apply_hooks(:forward, :train, hooks)
|> apply_hooks(:backward, :train, hooks)
res = Map.update!(res, :prediction, fn _ -> out end)
{res, {Map.put(cache, id, res), op_counts}}
:inference ->
res =
res
|> apply_hooks(:pre_forward, :inference, hooks)
|> Axon.Layers.embedding(w)
|> Nx.as_type(output)
|> apply_hooks(:forward, :inference, hooks)
|> apply_hooks(:backward, :inference, hooks)
{res, {Map.put(cache, id, res), op_counts}}
end
end
## Pooling Layers
@pooling_layers [:max_pool, :avg_pool, :adaptive_avg_pool] ++
[:adaptive_max_pool, :adaptive_lp_pool, :lp_pool] ++
[:global_lp_pool, :global_max_pool, :global_avg_pool]
defp recur_predict_fun(
%Axon{
id: id,
op: op,
parent: parent,
opts: opts,
policy: %{compute: compute, output: output},
hooks: hooks
},
cache_and_counts,
input_map,
params,
inputs,
mode
)
when op in @pooling_layers do
{res, {cache, op_counts}} =
to_predict_fun(parent, cache_and_counts, input_map, params, inputs, mode)
op_counts = Map.update(op_counts, op, 1, fn x -> x + 1 end)
case mode do
:train ->
out =
res.prediction
|> Nx.as_type(compute)
|> apply_hooks(:pre_forward, :train, hooks)
|> then(&apply(Axon.Layers, op, [&1, opts]))
|> Nx.as_type(output)
|> apply_hooks(:forward, :train, hooks)
|> apply_hooks(:backward, :train, hooks)
res = Map.update!(res, :prediction, fn _ -> out end)
{res, {Map.put(cache, id, res), op_counts}}
:inference ->
res =
res
|> Nx.as_type(compute)
|> apply_hooks(:pre_forward, :inference, hooks)
|> then(&apply(Axon.Layers, op, [&1, opts]))
|> Nx.as_type(output)
|> apply_hooks(:forward, :inference, hooks)
|> apply_hooks(:backward, :inference, hooks)
{res, {Map.put(cache, id, res), op_counts}}
end
end
## Dropout Layers
@dropout_layers [:dropout, :feature_alpha_dropout, :spatial_dropout, :alpha_dropout]
defp recur_predict_fun(
%Axon{
id: id,
op: op,
parent: parent,
opts: opts,
policy: %{compute: compute, output: output},
hooks: hooks
},
cache_and_counts,
input_map,
params,
inputs,
mode
)
when op in @dropout_layers do
{res, {cache, op_counts}} =
to_predict_fun(parent, cache_and_counts, input_map, params, inputs, mode)
op_counts = Map.update(op_counts, op, 1, fn x -> x + 1 end)
case mode do
:train ->
out =
res.prediction
|> Nx.as_type(compute)
|> apply_hooks(:pre_forward, :train, hooks)
|> then(&apply(Axon.Layers, op, [&1, opts]))
|> Nx.as_type(output)
|> apply_hooks(:forward, :train, hooks)
|> apply_hooks(:backward, :train, hooks)
res = Map.update!(res, :prediction, fn _ -> out end)
{res, {Map.put(cache, id, res), op_counts}}
:inference ->
# Skip dropout in inference mode
res = Nx.as_type(res, output)
{res, {Map.put(cache, id, res), op_counts}}
end
end
## Conv Layers
@conv_layers [:conv, :conv_transpose, :depthwise_conv]
defp recur_predict_fun(
%Axon{
id: id,
name: name_fn,
op: op,
parent: parent,
opts: opts,
params: layer_params,
policy: %{compute: compute, output: output},
hooks: hooks
},
cache_and_counts,
input_map,
params,
inputs,
mode
)
when op in @conv_layers do
{res, {cache, op_counts}} =
to_predict_fun(parent, cache_and_counts, input_map, params, inputs, mode)
name = name_fn.(op, op_counts)
op_counts = Map.update(op_counts, op, 1, fn x -> x + 1 end)
{use_bias, opts} = Keyword.pop!(opts, :use_bias)
k = layer_param(layer_params, "kernel", params[name], compute)
b =
if use_bias do
layer_param(layer_params, "bias", params[name], compute)
else
Nx.tensor(0, type: compute)
end
case mode do
:train ->
out =
res.prediction
|> Nx.as_type(compute)
|> apply_hooks(:pre_forward, :train, hooks)
|> then(&apply(Axon.Layers, op, [&1, k, b, opts]))
|> Nx.as_type(output)
|> apply_hooks(:forward, :train, hooks)
|> apply_hooks(:backward, :train, hooks)
res = Map.update!(res, :prediction, fn _ -> out end)
{res, {Map.put(cache, id, res), op_counts}}
:inference ->
res =
res
|> Nx.as_type(compute)
|> apply_hooks(:pre_forward, :inference, hooks)
|> then(&apply(Axon.Layers, op, [&1, k, b, opts]))
|> Nx.as_type(output)
|> apply_hooks(:forward, :inference, hooks)
|> apply_hooks(:backward, :inference, hooks)
{res, {Map.put(cache, id, res), op_counts}}
end
end
defp recur_predict_fun(
%Axon{
id: id,
name: name_fn,
op: :separable_conv2d,
parent: parent,
opts: opts,
params: layer_params,
policy: %{compute: compute, output: output},
hooks: hooks
},
cache_and_counts,
input_map,
params,
inputs,
mode
) do
{res, {cache, op_counts}} =
to_predict_fun(parent, cache_and_counts, input_map, params, inputs, mode)
name = name_fn.(:separable_conv2d, op_counts)
op_counts = Map.update(op_counts, :separable_conv2d, 1, fn x -> x + 1 end)
{use_bias, opts} = Keyword.pop!(opts, :use_bias)
k1 = layer_param(layer_params, "k1", params[name], compute)
k2 = layer_param(layer_params, "k2", params[name], compute)
{b1, b2} =
if use_bias do
{layer_param(layer_params, "b1", params[name], compute),
layer_param(layer_params, "b2", params[name], compute)}
else
{Nx.tensor(0, type: compute), Nx.tensor(0, type: compute)}
end
case mode do
:train ->
out =
res.prediction
|> Nx.as_type(compute)
|> apply_hooks(:pre_forward, :train, hooks)
|> Axon.Layers.separable_conv2d(k1, b1, k2, b2, opts)
|> Nx.as_type(output)
|> apply_hooks(:forward, :train, hooks)
|> apply_hooks(:backward, :train, hooks)
res = Map.update!(res, :prediction, fn _ -> out end)
{res, {Map.put(cache, id, res), op_counts}}
:inference ->
res =
res
|> Nx.as_type(compute)
|> apply_hooks(:pre_forward, :inference, hooks)
|> Axon.Layers.separable_conv2d(k1, b1, k2, b2, opts)
|> Nx.as_type(output)
|> apply_hooks(:forward, :inference, hooks)
|> apply_hooks(:backward, :inference, hooks)
{res, {Map.put(cache, id, res), op_counts}}
end
end
defp recur_predict_fun(
%Axon{
id: id,
name: name_fn,
op: :separable_conv3d,
parent: parent,
opts: opts,
params: layer_params,
policy: %{compute: compute, output: output},
hooks: hooks
},
cache_and_counts,
input_map,
params,
inputs,
mode
) do
{res, {cache, op_counts}} =
to_predict_fun(parent, cache_and_counts, input_map, params, inputs, mode)
name = name_fn.(:separable_conv3d, op_counts)
op_counts = Map.update(op_counts, :separable_conv3d, 1, fn x -> x + 1 end)
{use_bias, opts} = Keyword.pop!(opts, :use_bias)
k1 = layer_param(layer_params, "k1", params[name], compute)
k2 = layer_param(layer_params, "k2", params[name], compute)
k3 = layer_param(layer_params, "k3", params[name], compute)
{b1, b2, b3} =
if use_bias do
{layer_param(layer_params, "b1", params[name], compute),
layer_param(layer_params, "b2", params[name], compute),
layer_param(layer_params, "b3", params[name], compute)}
else
{Nx.tensor(0, type: compute), Nx.tensor(0, type: compute), Nx.tensor(0, type: compute)}
end
case mode do
:train ->
out =
res.prediction
|> Nx.as_type(compute)
|> apply_hooks(:pre_forward, :train, hooks)
|> Axon.Layers.separable_conv3d(k1, b1, k2, b2, k3, b3, opts)
|> Nx.as_type(output)
|> apply_hooks(:forward, :train, hooks)
|> apply_hooks(:backward, :train, hooks)
res = Map.update!(res, :prediction, fn _ -> out end)
{res, {Map.put(cache, id, res), op_counts}}
:inference ->
res =
res
|> Nx.as_type(compute)
|> apply_hooks(:pre_forward, :inference, hooks)
|> Axon.Layers.separable_conv3d(k1, b1, k2, b2, k3, b3, opts)
|> Nx.as_type(output)
|> apply_hooks(:forward, :inference, hooks)
|> apply_hooks(:backward, :inference, hooks)
{res, {Map.put(cache, id, res), op_counts}}
end
end
## Normalization Layers
@normalization_with_stats [:batch_norm, :instance_norm]
defp recur_predict_fun(
%Axon{
id: id,
name: name_fn,
op: op,
parent: parent,
opts: [epsilon: epsilon, channel_index: channel_index, momentum: momentum],
params: layer_params,
policy: %{compute: compute, output: output},
hooks: hooks
},
cache_and_counts,
input_map,
params,
inputs,
mode
)
when op in @normalization_with_stats do
{res, {cache, op_counts}} =
to_predict_fun(parent, cache_and_counts, input_map, params, inputs, mode)
name = name_fn.(op, op_counts)
op_counts = Map.update(op_counts, op, 1, fn x -> x + 1 end)
training? = mode == :train
norm_opts = [
epsilon: epsilon,
channel_index: channel_index,
momentum: momentum,
training?: training?
]
g = layer_param(layer_params, "gamma", params[name], compute)
b = layer_param(layer_params, "beta", params[name], compute)
mean = layer_param(layer_params, "mean", params[name], compute)
var = layer_param(layer_params, "var", params[name], compute)
case mode do
:train ->
{out, ra_mean, ra_var} =
res.prediction
|> Nx.as_type(compute)
|> apply_hooks(:pre_forward, :train, hooks)
|> then(&apply(Axon.Layers, op, [&1, g, b, mean, var, norm_opts]))
|> then(fn {y, m, v} -> {Nx.as_type(y, output), m, v} end)
|> apply_hooks(:forward, :train, hooks)
|> apply_hooks(:backward, :train, hooks)
res =
res
|> Map.update!(:prediction, fn _ -> Nx.as_type(out, output) end)
|> Map.update!(:state, fn states ->
Map.put(states, name, %{"mean" => ra_mean, "var" => ra_var})
end)
{res, {Map.put(cache, id, res), op_counts}}
:inference ->
res =
res
|> Nx.as_type(compute)
|> apply_hooks(:pre_forward, :inference, hooks)
|> then(&apply(Axon.Layers, op, [&1, g, b, mean, var, norm_opts]))
|> Nx.as_type(output)
|> apply_hooks(:forward, :inference, hooks)
|> apply_hooks(:backward, :inference, hooks)
{res, {Map.put(cache, id, res), op_counts}}
end
end
@normalization_layers [:layer_norm, :group_norm]
defp recur_predict_fun(
%Axon{
id: id,
name: name_fn,
op: op,
parent: parent,
opts: opts,
params: layer_params,
policy: %{compute: compute, output: output},
hooks: hooks
},
cache_and_counts,
input_map,
params,
inputs,
mode
)
when op in @normalization_layers do
{res, {cache, op_counts}} =
to_predict_fun(parent, cache_and_counts, input_map, params, inputs, mode)
name = name_fn.(op, op_counts)
op_counts = Map.update(op_counts, op, 1, fn x -> x + 1 end)
g = layer_param(layer_params, "gamma", params[name], compute)
b = layer_param(layer_params, "beta", params[name], compute)
case mode do
:train ->
out =
res.prediction
|> Nx.as_type(compute)
|> apply_hooks(:pre_forward, :train, hooks)
|> then(&apply(Axon.Layers, op, [&1, g, b, opts]))
|> Nx.as_type(output)
|> apply_hooks(:forward, :train, hooks)
|> apply_hooks(:backward, :train, hooks)
res = Map.update!(res, :prediction, fn _ -> out end)
{res, {Map.put(cache, id, res), op_counts}}
:inference ->
res =
res
|> Nx.as_type(compute)
|> apply_hooks(:pre_forward, :inference, hooks)
|> then(&apply(Axon.Layers, op, [&1, g, b, opts]))
|> Nx.as_type(output)
|> apply_hooks(:forward, :inference, hooks)
|> apply_hooks(:backward, :inference, hooks)
{res, {Map.put(cache, id, res), op_counts}}
end
end
## Recurrent Layers
defp recur_predict_fun(
%Axon{
id: id,
name: name_fn,
op: :lstm,
parent: parent,
params: layer_params,
policy: %{compute: compute, output: output},
opts: [
activation: activation,
gate: gate,
hidden_state: hidden_state,
hidden_state_shape: hidden_state_shape,
recurrent_initializer: recurrent_initializer,
unroll: unroll,
use_bias: use_bias
],
hooks: hooks
},
cache_and_counts,
input_map,
params,
inputs,
mode
) do
{res, cache_and_counts} =
to_predict_fun(parent, cache_and_counts, input_map, params, inputs, mode)
{{h, c}, {cache, op_counts}} =
to_hidden_state(
hidden_state,
res,
cache_and_counts,
input_map,
params,
inputs,
2,
recurrent_initializer,
hidden_state_shape,
mode
)
name = name_fn.(:lstm, op_counts)
op_counts = Map.update(op_counts, :lstm, 1, fn x -> x + 1 end)
input_kernel = {
layer_param(layer_params, "wii", params[name], compute),
layer_param(layer_params, "wif", params[name], compute),
layer_param(layer_params, "wig", params[name], compute),
layer_param(layer_params, "wio", params[name], compute)
}
hidden_kernel = {
layer_param(layer_params, "whi", params[name], compute),
layer_param(layer_params, "whf", params[name], compute),
layer_param(layer_params, "whg", params[name], compute),
layer_param(layer_params, "who", params[name], compute)
}
bias =
if use_bias do
{
layer_param(layer_params, "bi", params[name], compute),
layer_param(layer_params, "bf", params[name], compute),
layer_param(layer_params, "bg", params[name], compute),
layer_param(layer_params, "bo", params[name], compute)
}
else
{Nx.tensor(0, type: compute), Nx.tensor(0, type: compute), Nx.tensor(0, type: compute),
Nx.tensor(0, type: compute)}
end
case mode do
:train ->
input = Nx.as_type(res.prediction, compute)
carry = {Nx.as_type(h.prediction, compute), Nx.as_type(c.prediction, compute)}
# TODO: Should these be hooked together? Not at all?
{input, carry} = apply_hooks({input, carry}, :pre_forward, :train, hooks)
gate_fn = &apply(Axon.Activations, gate, [&1])
activation_fn = &apply(Axon.Activations, activation, [&1])
{{c1, c2}, res} =
case unroll do
:static ->
Axon.Recurrent.static_unroll(
&Axon.Recurrent.lstm_cell(&1, &2, &3, &4, &5, gate_fn, activation_fn),
input,
carry,
input_kernel,
hidden_kernel,
bias
)
:dynamic ->
Axon.Recurrent.dynamic_unroll(
&Axon.Recurrent.lstm_cell(&1, &2, &3, &4, &5, gate_fn, activation_fn),
input,
carry,
input_kernel,
hidden_kernel,
bias
)
end
out = {{Nx.as_type(c1, output), Nx.as_type(c2, output)}, Nx.as_type(res, output)}
out = apply_hooks(out, :forward, :train, hooks)
out = apply_hooks(out, :backward, :train, hooks)
state =
res.state
|> Map.merge(h.state)
|> Map.merge(c.state)
res = %{prediction: out, state: state}
{res, {Map.put(cache, id, res), op_counts}}
:inference ->
input = Nx.as_type(res, compute)
carry = {Nx.as_type(h, compute), Nx.as_type(c, compute)}
# TODO: Should these be hooked together? Not at all?
{input, carry} = apply_hooks({input, carry}, :pre_forward, :inference, hooks)
gate_fn = &apply(Axon.Activations, gate, [&1])
activation_fn = &apply(Axon.Activations, activation, [&1])
{{c1, c2}, res} =
case unroll do
:static ->
Axon.Recurrent.static_unroll(
&Axon.Recurrent.lstm_cell(&1, &2, &3, &4, &5, gate_fn, activation_fn),
input,
carry,
input_kernel,
hidden_kernel,
bias
)
:dynamic ->
Axon.Recurrent.dynamic_unroll(
&Axon.Recurrent.lstm_cell(&1, &2, &3, &4, &5, gate_fn, activation_fn),
input,
carry,
input_kernel,
hidden_kernel,
bias
)
end
res = {{Nx.as_type(c1, output), Nx.as_type(c2, output)}, Nx.as_type(res, output)}
res = apply_hooks(res, :forward, :inference, hooks)
res = apply_hooks(res, :backward, :inference, hooks)
{res, {Map.put(cache, id, res), op_counts}}
end
end
defp recur_predict_fun(
%Axon{
id: id,
name: name_fn,
op: :conv_lstm,
parent: parent,
params: layer_params,
policy: %{compute: compute, output: output},
opts: [
hidden_state: hidden_state,
strides: strides,
padding: padding,
hidden_state_shape: hidden_state_shape,
recurrent_initializer: recurrent_initializer,
unroll: unroll
],
hooks: hooks
},
cache_and_counts,
input_map,
params,
inputs,
mode
) do
{res, cache_and_counts} =
to_predict_fun(parent, cache_and_counts, input_map, params, inputs, mode)
{{h, c}, {cache, op_counts}} =
to_hidden_state(
hidden_state,
res,
cache_and_counts,
input_map,
params,
inputs,
2,
recurrent_initializer,
hidden_state_shape,
mode
)
name = name_fn.(:conv_lstm, op_counts)
op_counts = Map.update(op_counts, :conv_lstm, 1, fn x -> x + 1 end)
input_kernel = {layer_param(layer_params, "wi", params[name], compute)}
hidden_kernel = {layer_param(layer_params, "wh", params[name], compute)}
bias = {layer_param(layer_params, "b", params[name], compute)}
case mode do
:train ->
input = Nx.as_type(res.prediction, compute)
carry = {Nx.as_type(h.prediction, compute), Nx.as_type(c.prediction, compute)}
# TODO: Should these be hooked together? Not at all?
{input, carry} = apply_hooks({input, carry}, :pre_forward, :train, hooks)
{{c1, c2}, out} =
case unroll do
:static ->
Axon.Recurrent.static_unroll(
&Axon.Recurrent.conv_lstm_cell(&1, &2, &3, &4, &5,
strides: strides,
padding: padding
),
input,
carry,
input_kernel,
hidden_kernel,
bias
)
:dynamic ->
Axon.Recurrent.dynamic_unroll(
&Axon.Recurrent.conv_lstm_cell(&1, &2, &3, &4, &5,
strides: strides,
padding: padding
),
input,
carry,
input_kernel,
hidden_kernel,
bias
)
end
out = {{Nx.as_type(c1, output), Nx.as_type(c2, output)}, Nx.as_type(out, output)}
out = apply_hooks(out, :forward, :train, hooks)
out = apply_hooks(out, :backward, :train, hooks)
state =
res.state
|> Map.merge(h.state)
|> Map.merge(c.state)
res = %{prediction: out, state: state}
{res, {Map.put(cache, id, res), op_counts}}
:inference ->
input = Nx.as_type(res, compute)
carry = {Nx.as_type(h, compute), Nx.as_type(c, compute)}
# TODO: Should these be hooked together? Not at all?
{input, carry} = apply_hooks({input, carry}, :pre_forward, :inference, hooks)
{{c1, c2}, out} =
case unroll do
:static ->
Axon.Recurrent.static_unroll(
&Axon.Recurrent.conv_lstm_cell(&1, &2, &3, &4, &5,
strides: strides,
padding: padding
),
input,
carry,
input_kernel,
hidden_kernel,
bias
)
:dynamic ->
Axon.Recurrent.dynamic_unroll(
&Axon.Recurrent.conv_lstm_cell(&1, &2, &3, &4, &5,
strides: strides,
padding: padding
),
input,
carry,
input_kernel,
hidden_kernel,
bias
)
end
res = {{Nx.as_type(c1, output), Nx.as_type(c2, output)}, Nx.as_type(out, output)}
res = apply_hooks(res, :forward, :inference, hooks)
res = apply_hooks(res, :backward, :inference, hooks)
{res, {Map.put(cache, id, res), op_counts}}
end
end
defp recur_predict_fun(
%Axon{
id: id,
name: name_fn,
op: :gru,
parent: parent,
params: layer_params,
policy: %{compute: compute, output: output},
opts: [
activation: activation,
gate: gate,
hidden_state: hidden_state,
hidden_state_shape: hidden_state_shape,
recurrent_initializer: recurrent_initializer,
unroll: unroll,
use_bias: use_bias
],
hooks: hooks
},
cache_and_counts,
input_map,
params,
inputs,
mode
) do
{res, cache_and_counts} =
to_predict_fun(parent, cache_and_counts, input_map, params, inputs, mode)
{{h}, {cache, op_counts}} =
to_hidden_state(
hidden_state,
res,
cache_and_counts,
input_map,
params,
inputs,
1,
recurrent_initializer,
hidden_state_shape,
mode
)
name = name_fn.(:gru, op_counts)
op_counts = Map.update(op_counts, :gru, 1, fn x -> x + 1 end)
input_kernel = {
layer_param(layer_params, "wir", params[name], compute),
layer_param(layer_params, "wiz", params[name], compute),
layer_param(layer_params, "win", params[name], compute)
}
hidden_kernel = {
layer_param(layer_params, "whr", params[name], compute),
layer_param(layer_params, "whz", params[name], compute),
layer_param(layer_params, "whn", params[name], compute)
}
bias =
if use_bias do
{
layer_param(layer_params, "br", params[name], compute),
layer_param(layer_params, "bz", params[name], compute),
layer_param(layer_params, "bin", params[name], compute),
layer_param(layer_params, "bhn", params[name], compute)
}
else
{
Nx.tensor(0, type: compute),
Nx.tensor(0, type: compute),
Nx.tensor(0, type: compute),
Nx.tensor(0, type: compute)
}
end
case mode do
:train ->
input = Nx.as_type(res.prediction, compute)
carry = {Nx.as_type(h.prediction, compute)}
# TODO: Should these be hooked together? Not at all?
{input, carry} = apply_hooks({input, carry}, :pre_forward, :train, hooks)
gate_fn = &apply(Axon.Activations, gate, [&1])
activation_fn = &apply(Axon.Activations, activation, [&1])
{{c}, out} =
case unroll do
:static ->
Axon.Recurrent.static_unroll(
&Axon.Recurrent.gru_cell(&1, &2, &3, &4, &5, gate_fn, activation_fn),
input,
carry,
input_kernel,
hidden_kernel,
bias
)
:dynamic ->
Axon.Recurrent.dynamic_unroll(
&Axon.Recurrent.gru_cell(&1, &2, &3, &4, &5, gate_fn, activation_fn),
input,
carry,
input_kernel,
hidden_kernel,
bias
)
end
out = {{Nx.as_type(c, output)}, Nx.as_type(out, output)}
out = apply_hooks(out, :forward, :train, hooks)
out = apply_hooks(out, :backward, :train, hooks)
state = Map.merge(h.state, res.state)
res = %{prediction: out, state: state}
{res, {Map.put(cache, id, res), op_counts}}
:inference ->
input = Nx.as_type(res, compute)
carry = {Nx.as_type(h, compute)}
# TODO: Should these be hooked together? Not at all?
{input, carry} = apply_hooks({input, carry}, :pre_forward, :inference, hooks)
gate_fn = &apply(Axon.Activations, gate, [&1])
activation_fn = &apply(Axon.Activations, activation, [&1])
{{c}, out} =
case unroll do
:static ->
Axon.Recurrent.static_unroll(
&Axon.Recurrent.gru_cell(&1, &2, &3, &4, &5, gate_fn, activation_fn),
input,
carry,
input_kernel,
hidden_kernel,
bias
)
:dynamic ->
Axon.Recurrent.dynamic_unroll(
&Axon.Recurrent.gru_cell(&1, &2, &3, &4, &5, gate_fn, activation_fn),
input,
carry,
input_kernel,
hidden_kernel,
bias
)
end
res = {{Nx.as_type(c, output)}, Nx.as_type(out, output)}
res = apply_hooks(res, :forward, :inference, hooks)
res = apply_hooks(res, :backward, :inference, hooks)
{res, {Map.put(cache, id, res), op_counts}}
end
end
## Element-wise layers
@element_wise_layers [:add, :subtract, :multiply]
defp recur_predict_fun(
%Axon{
id: id,
op: op,
parent: parents,
policy: %{compute: compute, output: output},
hooks: hooks
},
cache_and_counts,
input_map,
params,
inputs,
mode
)
when op in @element_wise_layers do
{[expr | exprs], {cache, op_counts}} =
Enum.map_reduce(
parents,
cache_and_counts,
&to_predict_fun(&1, &2, input_map, params, inputs, mode)
)
op_counts = Map.update(op_counts, op, 1, fn x -> x + 1 end)
case mode do
:train ->
[expr | exprs] =
[expr | exprs]
|> List.to_tuple()
|> apply_hooks(:pre_forward, :train, hooks)
|> Tuple.to_list()
{out, state} =
Enum.reduce(exprs, {expr.prediction, expr.state}, fn next_expr, {acc_out, acc_state} ->
input = Nx.as_type(next_expr.prediction, compute)
acc_out = Nx.as_type(acc_out, compute)
{Nx.as_type(apply(Nx, op, [acc_out, input]), output),
Map.merge(next_expr.state, acc_state)}
end)
out = apply_hooks(out, :forward, :train, hooks)
out = apply_hooks(out, :backward, :train, hooks)
res = %{prediction: out, state: state}
{res, {Map.put(cache, id, res), op_counts}}
:inference ->
[expr | exprs] =
[expr | exprs]
|> List.to_tuple()
|> apply_hooks(:pre_forward, :inference, hooks)
|> Tuple.to_list()
res =
Enum.reduce(exprs, expr, fn next_expr, acc ->
input = Nx.as_type(next_expr, compute)
acc = Nx.as_type(acc, compute)
Nx.as_type(apply(Nx, op, [acc, input]), output)
end)
res = apply_hooks(res, :forward, :inference, hooks)
res = apply_hooks(res, :backward, :inference, hooks)
{res, {Map.put(cache, id, res), op_counts}}
end
end
## Shape Layers
defp recur_predict_fun(
%Axon{
id: id,
op: :flatten,
parent: parent,
policy: %{compute: compute, output: output},
hooks: hooks
},
cache_and_counts,
input_map,
params,
inputs,
mode
) do
{res, {cache, op_counts}} =
to_predict_fun(parent, cache_and_counts, input_map, params, inputs, mode)
op_counts = Map.update(op_counts, :flatten, 1, fn x -> x + 1 end)
case mode do
:train ->
out =
res.prediction
|> Nx.as_type(compute)
|> apply_hooks(:pre_forward, :train, hooks)
|> Axon.Layers.flatten()
|> Nx.as_type(output)
|> apply_hooks(:forward, :train, hooks)
|> apply_hooks(:backward, :train, hooks)
res = Map.update!(res, :prediction, fn _ -> out end)
{res, {Map.put(cache, id, res), op_counts}}
:inference ->
res =
res
|> Nx.as_type(compute)
|> apply_hooks(:pre_forward, :inference, hooks)
|> Axon.Layers.flatten()
|> Nx.as_type(output)
|> apply_hooks(:forward, :inference, hooks)
|> apply_hooks(:backward, :inference, hooks)
{res, {Map.put(cache, id, res), op_counts}}
end
end
defp recur_predict_fun(
%Axon{
id: id,
op: :reshape,
parent: parent,
output_shape: output_shape,
policy: %{compute: compute, output: output},
opts: [constant: is_constant_reshape?],
hooks: hooks
},
cache_and_counts,
input_map,
params,
inputs,
mode
) do
{res, {cache, op_counts}} =
to_predict_fun(parent, cache_and_counts, input_map, params, inputs, mode)
op_counts = Map.update(op_counts, :reshape, 1, fn x -> x + 1 end)
case mode do
:train ->
inp =
res.prediction
|> Nx.as_type(compute)
|> apply_hooks(:pre_forward, :train, hooks)
reshape_shape =
if is_constant_reshape? do
output_shape
else
put_elem(output_shape, 0, elem(Nx.shape(inp), 0))
end
out =
inp
|> Nx.reshape(reshape_shape)
|> Nx.as_type(output)
|> apply_hooks(:forward, :train, hooks)
|> apply_hooks(:backward, :train, hooks)
res = Map.update!(res, :prediction, fn _ -> out end)
{res, {Map.put(cache, id, res), op_counts}}
:inference ->
inp =
res
|> Nx.as_type(compute)
|> apply_hooks(:pre_forward, :inference, hooks)
reshape_shape =
if is_constant_reshape? do
output_shape
else
put_elem(output_shape, 0, elem(Nx.shape(inp), 0))
end
res =
inp
|> Nx.reshape(reshape_shape)
|> Nx.as_type(output)
|> apply_hooks(:forward, :inference, hooks)
|> apply_hooks(:backward, :inference, hooks)
{res, {Map.put(cache, id, res), op_counts}}
end
end
defp recur_predict_fun(
%Axon{
id: id,
op: :resize,
parent: parent,
policy: %{compute: compute, output: output},
opts: opts,
hooks: hooks
},
cache_and_counts,
input_map,
params,
inputs,
mode
) do
{res, {cache, op_counts}} =
to_predict_fun(parent, cache_and_counts, input_map, params, inputs, mode)
op_counts = Map.update(op_counts, :resize, 1, fn x -> x + 1 end)
case mode do
:train ->
out =
res.prediction
|> Nx.as_type(compute)
|> apply_hooks(:pre_forward, :train, hooks)
|> Axon.Layers.resize(opts)
|> Nx.as_type(output)
|> apply_hooks(:forward, :train, hooks)
|> apply_hooks(:backward, :train, hooks)
res = Map.update!(res, :prediction, fn _ -> out end)
{res, {Map.put(cache, id, res), op_counts}}
:inference ->
res =
res
|> Nx.as_type(compute)
|> apply_hooks(:pre_forward, :inference, hooks)
|> Axon.Layers.resize(opts)
|> Nx.as_type(output)
|> apply_hooks(:forward, :inference, hooks)
|> apply_hooks(:backward, :inference, hooks)
{res, {Map.put(cache, id, res), op_counts}}
end
end
defp recur_predict_fun(
%Axon{
id: id,
op: :transpose,
parent: parent,
opts: [permutation: permutation, ignore_batch?: ignore_batch?],
policy: %{compute: compute, output: output},
hooks: hooks
},
cache_and_counts,
input_map,
params,
inputs,
mode
) do
{res, {cache, op_counts}} =
to_predict_fun(parent, cache_and_counts, input_map, params, inputs, mode)
op_counts = Map.update(op_counts, :transpose, 1, fn x -> x + 1 end)
permutation =
if ignore_batch? do
[0 | Enum.map(permutation, &(&1 + 1))]
else
permutation
end
case mode do
:train ->
out =
res.prediction
|> Nx.as_type(compute)
|> apply_hooks(:pre_forward, :train, hooks)
|> Nx.transpose(axes: permutation)
|> Nx.as_type(output)
|> apply_hooks(:forward, :train, hooks)
|> apply_hooks(:backward, :train, hooks)
res = Map.update!(res, :prediction, fn _ -> out end)
{res, {Map.put(cache, id, res), op_counts}}
:inference ->
res =
res
|> Nx.as_type(compute)
|> apply_hooks(:pre_forward, :inference, hooks)
|> Nx.transpose(axes: permutation)
|> Nx.as_type(output)
|> apply_hooks(:forward, :inference, hooks)
|> apply_hooks(:backward, :inference, hooks)
{res, {Map.put(cache, id, res), op_counts}}
end
end
defp recur_predict_fun(
%Axon{
id: id,
op: :pad,
parent: parent,
opts: [padding_config: config, value: value],
policy: %{compute: compute, output: output},
hooks: hooks
},
cache_and_counts,
input_map,
params,
inputs,
mode
) do
{res, {cache, op_counts}} =
to_predict_fun(parent, cache_and_counts, input_map, params, inputs, mode)
op_counts = Map.update(op_counts, :pad, 1, fn x -> x + 1 end)
config = [{0, 0, 0}, {0, 0, 0} | Enum.map(config, fn {x, y} -> {x, y, 0} end)]
case mode do
:train ->
out =
res.prediction
|> Nx.as_type(compute)
|> apply_hooks(:pre_forward, :train, hooks)
|> Nx.pad(value, config)
|> Nx.as_type(output)
|> apply_hooks(:forward, :train, hooks)
|> apply_hooks(:backward, :train, hooks)
res = Map.update!(res, :prediction, fn _ -> out end)
{res, {Map.put(cache, id, res), op_counts}}
:inference ->
res =
res
|> Nx.as_type(compute)
|> apply_hooks(:pre_forward, :inference, hooks)
|> Nx.pad(value, config)
|> Nx.as_type(output)
|> apply_hooks(:forward, :inference, hooks)
|> apply_hooks(:backward, :inference, hooks)
{res, {Map.put(cache, id, res), op_counts}}
end
end
defp recur_predict_fun(
%Axon{
id: id,
op: :concatenate,
parent: parents,
opts: [axis: axis],
policy: %{compute: compute, output: output},
hooks: hooks
},
cache_and_counts,
input_map,
params,
inputs,
mode
) do
{exprs, {cache, op_counts}} =
Enum.map_reduce(
parents,
cache_and_counts,
&to_predict_fun(&1, &2, input_map, params, inputs, mode)
)
op_counts = Map.update(op_counts, :concatenate, 1, fn x -> x + 1 end)
case mode do
:train ->
inps = Enum.map(exprs, &Nx.as_type(&1.prediction, compute))
inps =
inps
|> List.to_tuple()
|> apply_hooks(:pre_forward, :train, hooks)
|> Tuple.to_list()
out =
inps
|> Nx.concatenate(axis: axis)
|> Nx.as_type(output)
|> apply_hooks(:forward, :train, hooks)
|> apply_hooks(:backward, :train, hooks)
state = Enum.reduce(exprs, %{}, &Map.merge(&1.state, &2))
res = %{prediction: out, state: state}
{res, {Map.put(cache, id, res), op_counts}}
:inference ->
inps = Enum.map(exprs, &Nx.as_type(&1, compute))
inps =
inps
|> List.to_tuple()
|> apply_hooks(:pre_forward, :inference, hooks)
|> Tuple.to_list()
res =
inps
|> Nx.concatenate(axis: axis)
|> Nx.as_type(output)
|> apply_hooks(:forward, :inference, hooks)
|> apply_hooks(:backward, :inference, hooks)
{res, {Map.put(cache, id, res), op_counts}}
end
end
defp recur_predict_fun(
%Axon{
id: id,
op: :cond,
parent: parents,
opts: [cond: cond_fn],
policy: %{compute: compute, output: output},
hooks: hooks
},
cache_and_counts,
input_map,
params,
inputs,
mode
) do
{exprs, {cache, op_counts}} =
Enum.map_reduce(
parents,
cache_and_counts,
&to_predict_fun(&1, &2, input_map, params, inputs, mode)
)
op_counts = Map.update(op_counts, :cond, 1, fn x -> x + 1 end)
[cond_input_expr, true_expr, false_expr] = exprs
case mode do
:train ->
cond_expr = cond_fn.(cond_input_expr.prediction)
cond_rank = Nx.rank(cond_expr)
cond_type = Nx.type(cond_expr)
{cond_expr, on_true, on_false} =
[cond_expr, true_expr.prediction, false_expr.prediction]
|> List.to_tuple()
|> apply_hooks(:pre_forward, :train, hooks)
unless cond_rank == 0 and cond_type == {:u, 8} do
raise Axon.CompilerError,
"cond_fn must return a scalar-boolean tensor" <>
" got result with rank #{inspect(cond_rank)} and" <>
" type #{inspect(cond_type)}"
end
out =
Axon.Layers.cond(
Nx.all(cond_expr),
Nx.as_type(on_true, compute),
Nx.as_type(on_false, compute)
)
out = Nx.as_type(out, output)
out = apply_hooks(out, :forward, :train, hooks)
out = apply_hooks(out, :backward, :train, hooks)
state =
cond_input_expr.state
|> Map.merge(true_expr.state)
|> Map.merge(false_expr.state)
res = %{prediction: out, state: state}
{res, {Map.put(cache, id, res), op_counts}}
:inference ->
cond_expr = cond_fn.(cond_input_expr)
cond_rank = Nx.rank(cond_expr)
cond_type = Nx.type(cond_expr)
unless cond_rank == 0 and cond_type == {:u, 8} do
raise Axon.CompilerError,
"cond_fn must return a scalar-boolean tensor" <>
" got result with rank #{inspect(cond_rank)} and" <>
" type #{inspect(cond_type)}"
end
{cond_expr, on_true, on_false} =
[cond_expr, true_expr, false_expr]
|> List.to_tuple()
|> apply_hooks(:pre_forward, :inference, hooks)
res =
Axon.Layers.cond(
Nx.all(cond_expr),
Nx.as_type(on_true, compute),
Nx.as_type(on_false, compute)
)
res = Nx.as_type(res, output)
res = apply_hooks(res, :forward, :inference, hooks)
res = apply_hooks(res, :backward, :inference, hooks)
{res, {Map.put(cache, id, res), op_counts}}
end
end
## Special Layers
defp recur_predict_fun(
%Axon{
id: id,
op: :nx,
parent: parent,
opts: [fun: nx_fun],
policy: %{compute: compute, output: output},
hooks: hooks
},
cache_and_counts,
input_map,
params,
inputs,
mode
) do
{res, {cache, op_counts}} =
to_predict_fun(parent, cache_and_counts, input_map, params, inputs, mode)
op_counts = Map.update(op_counts, :nx, 1, fn x -> x + 1 end)
case mode do
:train ->
out =
res.prediction
|> Nx.as_type(compute)
|> apply_hooks(:pre_forward, :train, hooks)
|> nx_fun.()
|> Nx.as_type(output)
|> apply_hooks(:forward, :train, hooks)
|> apply_hooks(:backward, :train, hooks)
res = Map.update!(res, :prediction, fn _ -> out end)
{res, {Map.put(cache, id, res), op_counts}}
:inference ->
res =
res
|> Nx.as_type(compute)
|> apply_hooks(:pre_forward, :train, hooks)
|> nx_fun.()
|> Nx.as_type(output)
|> apply_hooks(:forward, :train, hooks)
|> apply_hooks(:backward, :train, hooks)
{res, {Map.put(cache, id, res), op_counts}}
end
end
defp recur_predict_fun(
%Axon{id: id, op: :constant, opts: [value: tensor], policy: %{output: output}},
{cache, op_counts},
_,
_,
_,
mode
) do
out = Nx.as_type(tensor, output)
op_counts = Map.update(op_counts, :constant, 1, fn x -> x + 1 end)
case mode do
:train ->
res = %{prediction: out, state: %{}}
{res, {Map.put(cache, id, res), op_counts}}
:inference ->
{out, {Map.put(cache, id, out), op_counts}}
end
end
defp recur_predict_fun(
%Axon{id: id, op: :input, output_shape: shape, hooks: hooks},
{cache, op_counts},
input_map,
_,
inputs,
mode
) do
op_counts = Map.update(op_counts, :input, 1, fn x -> x + 1 end)
res =
if is_tuple(inputs) do
idx = input_map[id]
elem(inputs, idx)
else
inputs
end
unless Axon.Shape.compatible?(Nx.shape(res), shape) do
raise ArgumentError,
"invalid input shape given to model, expected input" <>
" with shape #{inspect(shape)}, but got input with" <>
" shape #{inspect(Nx.shape(res))}"
end
case mode do
:train ->
pred =
res
|> apply_hooks(:forward, :train, hooks)
|> apply_hooks(:backward, :train, hooks)
res = %{prediction: pred, state: %{}}
{res, {Map.put(cache, id, res), op_counts}}
:inference ->
res = apply_hooks(res, :forward, :train, hooks)
res = apply_hooks(res, :backward, :train, hooks)
{res, {Map.put(cache, id, res), op_counts}}
end
end
defp maybe_freeze(param, true), do: Nx.Defn.Kernel.stop_grad(param)
defp maybe_freeze(param, false), do: param
defp apply_hooks(res, event, mode, hooks) do
hooks
|> Enum.reverse()
|> Enum.reduce(res, fn {on_event, on_mode, hook_fn}, expr ->
event? = on_event == event or on_event == :all
mode? = on_mode == mode or on_mode == :both or mode == nil
if event? and mode? do
if on_event == :backward do
Nx.Defn.Kernel.custom_grad(expr, fn _ans, g ->
hooked_g = Nx.Defn.Kernel.hook(g, hook_fn)
[{expr, hooked_g}]
end)
else
Nx.Defn.Kernel.hook(expr, hook_fn)
end
else
expr
end
end)
end
defp to_hidden_state(
hidden_state,
input,
cache_and_counts,
input_map,
params,
inputs,
num_carry,
recurrent_initializer,
hidden_state_shape,
mode
) do
case hidden_state do
{%Axon{} = c, %Axon{} = h} ->
{c_res, cache_and_counts} =
to_predict_fun(c, cache_and_counts, input_map, params, inputs, mode)
{h_res, cache_and_counts} =
to_predict_fun(h, cache_and_counts, input_map, params, inputs, mode)
{{c_res, h_res}, cache_and_counts}
{%Axon{} = c} ->
{h_res, cache_and_counts} =
to_predict_fun(c, cache_and_counts, input_map, params, inputs, mode)
{{h_res}, cache_and_counts}
%Axon{} = x ->
{h_res, cache_and_counts} =
to_predict_fun(x, cache_and_counts, input_map, params, inputs, mode)
{h_res, cache_and_counts}
nil ->
shape = put_elem(hidden_state_shape, 0, elem(Nx.shape(input), 0))
h_res =
for _ <- 1..num_carry,
do: apply(Axon.Initializers, recurrent_initializer, [[shape: shape]])
res = List.to_tuple(h_res)
res = if mode == :train, do: %{prediction: res, state: %{}}, else: res
{res, cache_and_counts}
end
end
## Penalty Function Compilation
@doc false
def __jit_penalty__(graph, caller, args, opts) do
fun = compile_penalty(graph)
jit_or_apply(caller, fun, args, opts)
end
defp compile_penalty(graph) when is_tuple(graph) do
graph = Tuple.to_list(graph)
penalties =
graph
|> Enum.reduce(
%{},
fn x, cache ->
to_penalty_fun(x, cache)
end
)
[fun | funs] = Map.values(penalties)
fn params ->
funs
|> Enum.reduce(fun.(params), fn penalty, acc -> Nx.add(penalty.(params), acc) end)
end
end
defp compile_penalty(%Axon{} = graph) do
penalties = to_penalty_fun(graph, %{})
[fun | funs] = Map.values(penalties)
fn params ->
funs
|> Enum.reduce(fun.(params), fn penalty, acc -> Nx.add(penalty.(params), acc) end)
end
end
defp to_penalty_fun(%Axon{parent: parents}, cache) when is_list(parents) do
Enum.reduce(parents, cache, fn graph, cache ->
to_penalty_fun(graph, cache)
end)
end
defp to_penalty_fun(
%Axon{parent: parent, params: params, policy: %{params: param_policy}},
cache
) do
cache =
params
|> Enum.reduce(cache, fn {_, param}, cache ->
%{name: name, regularizer: regularizer} = param
case cache do
%{^name => _} ->
cache
%{} ->
fun = fn params ->
case regularizer do
:none ->
Nx.tensor(0.0, type: param_policy)
regularizer when is_atom(regularizer) ->
apply(Axon.Regularizers, regularizer, [params[name]])
regularizer when is_function(regularizer) ->
apply(regularizer, [params[name]])
end
end
Map.put(cache, name, fun)
end
end)
if parent do
to_penalty_fun(parent, cache)
else
cache
end
end
## Helpers
defp layer_param(layer_params, key, param_name, compute) do
%{name: p, frozen: frozen} = layer_params[key]
Nx.as_type(maybe_freeze(param_name[p], frozen), compute)
end
end
| 28.151453 | 98 | 0.532629 |
1c42f8111d6f09d190034cde604a3f4cf0c06342 | 20,289 | ex | Elixir | lib/bitcoin/script/interpreter.ex | coinscript/bitcoinsv-elixir | 2dda03c81edc5662743ed2922abb5b1910d9c09a | [
"Apache-2.0"
] | 2 | 2019-08-12T04:53:57.000Z | 2019-09-03T03:47:33.000Z | lib/bitcoin/script/interpreter.ex | coinscript/bitcoinsv-elixir | 2dda03c81edc5662743ed2922abb5b1910d9c09a | [
"Apache-2.0"
] | null | null | null | lib/bitcoin/script/interpreter.ex | coinscript/bitcoinsv-elixir | 2dda03c81edc5662743ed2922abb5b1910d9c09a | [
"Apache-2.0"
] | null | null | null | defmodule Bitcoin.Script.Interpreter do
@moduledoc """
Bitcoin Script interpreter.
Opcodes numbers are coming from: https://github.com/bitcoin/bitcoin/blob/master/src/script/script.h
Implemented from the scratch based on the wiki (https://en.bitcoin.it/wiki/Script)
and script test cases from bitcoinj/bitcore.
There is still a long road ahead to 100% of valid/invalid scripts test suite (it's really good).
List of issues to remember can be find in the source file with an upcase 'todo' tag.
"""
use Bitcoin.Script.Opcodes
use Bitcoin.Script.Minimaldata
import Bitcoin.Script.Macros
import Bitcoin.Script.Control
alias Bitcoin.Crypto
alias Bitcoin.Script.Number
defdelegate num(x), to: Number
defdelegate num(x, opts), to: Number
# we also delgate bin(x) when is_number(x), grouped with other bin function definitions down below
# Max number of items in stack + altstack
@max_stacks_size 1000
@max_pubkeys_per_multisig 20
# Max number of opcodes that can be present in the script
# (excluding opcodes with byte value equal or below OP_16)
@max_ops 201
@default_opts %{flags: %{}}
# Start running the parsed script
def exec(script, opts), do: exec([], script, opts)
def exec(stack, script, opts) do
script = validate(script)
opts =
@default_opts
|> Map.merge(opts)
|> Map.put(:script, script)
run(stack, script, opts)
end
# Run the parsed script
def run(script, opts), do: run([], script, opts)
# Opcodes return :invalid instead of returning new stack in case execution should stop and script should fail
# Parser returns [:invalid] if the script couldn't be parsed
def run({:error, err}, _script, _opts), do: {:error, err}
def run([{:error, err} | _], _script, _opts), do: {:error, err}
def run(_, [:invalid | _], _opts), do: {:error, :invalid}
def run(_, [{:error, err} | _], _opts), do: {:error, err}
# Stack size limit
# TODO should include altstack
def run(stack, _script, _opts) when length(stack) > @max_stacks_size,
do: {:error, :max_stacks_size}
# When no script is left to run, return the stack
def run(stack, [], _opts), do: stack
# OP_PUSHDATA
def run(stack, [:OP_PUSHDATA1, data | script], opts), do: run([data | stack], script, opts)
def run(stack, [:OP_PUSHDATA2, data | script], opts), do: run([data | stack], script, opts)
def run(stack, [:OP_PUSHDATA4, data | script], opts), do: run([data | stack], script, opts)
# Binary blob, put it on the stack
# In case of a parsed script this should only by a single byte
def run(stack, [data | script], opts) when is_binary(data) or is_number(data),
do: run([data | stack], script, opts)
# VAlidate sanityf of the script
# We should probably switch to a single run through script like in bitcoin core
def validate(script) do
cond do
# Script invalid if any of disabled ops is present
script |> Enum.any?(fn op -> op in @disabled_op end) ->
[{:error, :disabled_op}]
# Scirpt max ops
script
# OP_0..OP_16 + OP_RESERVED are not counted towards the limit
|> Enum.filter(&(is_atom(&1) && !(&1 in @push_data_ops)))
|> length > @max_ops ->
[{:error, :max_ops}]
true ->
script
end
end
##
## PUSH VALUE
##
op_push(:OP_TRUE, 1)
op_push(:OP_FALSE, 0)
op_push(:OP_1NEGATE, -1)
op_push(:OP_1, 1)
op_push(:OP_2, 2)
op_push(:OP_3, 3)
op_push(:OP_4, 4)
op_push(:OP_5, 5)
op_push(:OP_6, 6)
op_push(:OP_7, 7)
op_push(:OP_8, 8)
op_push(:OP_9, 9)
op_push(:OP_10, 10)
op_push(:OP_11, 11)
op_push(:OP_12, 12)
op_push(:OP_13, 13)
op_push(:OP_14, 14)
op_push(:OP_15, 15)
op_push(:OP_16, 16)
##
## CONTROL
##
# OP_NOP Does nothing
op(:OP_NOP, stack, do: stack)
# OP_RESERVED Transaction is invalid unless occuring in an unexecuted OP_IF branch
op(:OP_RESERVED, _, do: {:error, :OP_RESERVED})
# OP_VER Transaction is invalid unless occuring in an unexecuted OP_IF branch
op(:OP_VER, _, do: {:error, :OP_VER})
# OP_VERIF Transaction is invalid even when occuring in an unexecuted OP_IF branch
# Because of that, it's handled by validation same as disabled OPs
# OP_VERNOTIF transaction is invalid even when occuring in an unexecuted OP_IF branch
# Because of that, it's handled by validation same as disabled OPs
# OP_IF If the top stack value is not False, the statements are executed. The top stack value is removed.
def run([x | stack], [:OP_IF | script], opts) do
if bool(x) do
stack |> run(script |> extract_if, opts)
else
stack |> run(script |> extract_else, opts)
end
end
# OP_NOTIF If the top stack value is False, the statements are executed. The top stack value is removed.
# Not the same as OP_NOT then OP_IF because OP_NOT should only work on numbers
def run([x | stack], [:OP_NOTIF | script], opts) do
if bool(x) do
stack |> run(script |> extract_else, opts)
else
stack |> run(script |> extract_if, opts)
end
end
# OP_ELSE implemented as part of the OP_IF
# OP_VERIFY Marks transaction as invalid if top stack value is not true.
op(:OP_VERIFY, [0 | _], do: {:error, :verify_failed})
op(:OP_VERIFY, [_ | stack], do: stack)
# OP_RETURN Marks transaction as invalid.
op(:OP_RETURN, _, do: {:error, :OP_RETURN})
##
## STACKOPS
##
# OP_TOALTSTACK Puts the input onto the top of the alt stack. Removes it from the main stack.
def run([x | stack], [:OP_TOALTSTACK | script], opts) do
alt = opts[:alt] || []
stack |> run(script, opts |> Keyword.put(:alt, [x | alt]))
end
# OP_FROM_LTSTACK Puts the input onto the top of the main stack. Removes it from the alt stack.
def run(stack, [:OP_FROMALTSTACK | script], opts) do
[x | alt] = opts[:alt]
[x | stack] |> run(script, opts |> Keyword.put(:alt, alt))
end
# OP_2DROP Removes the top two stack items.
op(:OP_2DROP, [_, _ | stack], do: stack)
# OP_2DUP Duplicates the top two stack items
op(:OP_2DUP, [a, b | stack], do: [a, b, a, b | stack])
# OP_3DUP Duplicates the top two stack items
op(:OP_3DUP, [a, b, c | stack], do: [a, b, c, a, b, c | stack])
# OP_2OVER Copies the pair of items two spaces back in the stack to the front.
op(:OP_2OVER, [_a, _b, c, d | _] = stack, do: [c, d] ++ stack)
# OP_2ROT The fifth and sixth items back are moved to the top of the stack.
op(:OP_2ROT, [a, b, c, d, e, f | stack], do: [e, f, a, b, c, d | stack])
# OP_2SWAP Swaps the top two pairs of items.
op(:OP_2SWAP, [a, b, c, d | stack], do: [c, d, a, b | stack])
# OP_IFDUP IF the top stack value is not 0, duplicate it
op(:OP_IFDUP, [0 | stack], do: [0 | stack])
op(:OP_IFDUP, [x | stack], do: [x, x | stack])
# OP_DEPTH Puts the number of stack items onto the stack
op(:OP_DEPTH, stack, do: [stack |> length | stack])
# OP_DROP Removes the top stack item.
op(:OP_DROP, [_ | stack], do: stack)
# OP_DUP Duplicates the top stack item.
# special case
op(:OP_DUP, [], do: ["", ""])
op(:OP_DUP, [x | stack], do: [x, x | stack])
# OP_NIP Removes the second-to-top stack item
op(:OP_NIP, [a, _b | stack], do: [a | stack])
# OP_OVER Copies the second-to-top stack item to the top.
op(:OP_OVER, [a, b | stack], do: [b, a, b | stack])
# OP_PICK The item n back in the stack is copied to the top
op(:OP_PICK, [n | stack], opts, do: [stack |> nth_element(n, opts) | stack])
# OP_ROLL The item n back in the stack is moved to the top.
op(:OP_ROLL, [n | stack], opts,
do: [stack |> nth_element(n, opts) | stack |> List.delete_at(num(n, opts))]
)
# OP_ROT The top three items on the stack are rotated to the left.
op(:OP_ROT, [a, b, c | stack], do: [c, a, b | stack])
# OP_SWAP The top two items on the stack are swapped.
op(:OP_SWAP, [a, b | stack], do: [b, a | stack])
# OP_TUCK The item at the top of the stack is copied and inserted before the second-to-top item.
op(:OP_TUCK, [a, b | stack], do: [a, b, a | stack])
##
## SPLICE OPS
##
# OP_CAT disabled
# OP_SUBSTR disabled
# OP_LEFT disabled
# OP_RIGHT disabled
# OP_SIZE Pushes the string length of the top element of the stack (without popping it)
op(:OP_SIZE, [<<x::binary>> | stack], do: [byte_size(x), x | stack])
op(:OP_SIZE, [0 | stack], do: [0, 0 | stack])
op(:OP_SIZE, [x | stack], do: [1, x | stack])
##
## BIT LOGIC
##
# OP_INVERT disabled
# OP_AND disabled
# OP_OR disabled
# OP_XOR disabled
# OP_EQUAL Returns 1 if the inputs are exactly equal, 0 otherwise.
# These convoluted cases below come from the fact that we keep 0 and 1 on the stack
# intsead of <<>> and <<1>> (same for OP_1-16, We should switch to proper representation on the stack
op(:OP_EQUAL, [a, b | stack] when is_binary(a) and is_binary(b),
do: [if(a == b, do: 1, else: 0) | stack]
)
op(:OP_EQUAL, [a, b | stack] when is_number(a) and is_binary(b),
do: [if(bin(a) == b, do: 1, else: 0) | stack]
)
op(:OP_EQUAL, [a, b | stack] when is_binary(a) and is_number(b),
do: [if(a == bin(b), do: 1, else: 0) | stack]
)
op(:OP_EQUAL, [a, b | stack] when is_number(a) and is_number(b),
do: [if(bin(a) == bin(b), do: 1, else: 0) | stack]
)
# OP_EQUALVERIFY Same as OP_EQUAL, but runs OP_VERIFY afterward
op_alias(:OP_EQUALVERIFY, [:OP_EQUAL, :OP_VERIFY])
# OP_RESERVED1 Transaction is invalid unless occuring in an unexecuted OP_IF branch
op(:OP_RESERVED1, _, do: {:error, :OP_RESERVED1})
# OP_RESERVED2 Transaction is invalid unless occuring in an unexecuted OP_IF branch
op(:OP_RESERVED2, _, do: {:error, :OP_RESERVED2})
##
## NUMERIC
##
## All arguments for arithmetic OP codes arguments are interpreted as numbers
## (that's the magic that op_num does, calling num() on each arg and checking if it didn't return error)
# OP_1ADD 1 is added to the input.
op_num(:OP_1ADD, x, do: x + 1)
# OP_1ADD 1 is substracted from the input.
op_num(:OP_1SUB, x, do: x - 1)
# OP_2MUL disabled
# OP_2DIV disabled
# OP_NEGATE The sign of the input is flipped.
op_num(:OP_NEGATE, x, do: -1 * x)
# OP_ABS The input is made positive.
op_num(:OP_ABS, x, do: x |> abs)
# OP_NOT If the input is 0 or 1, it is flipped. Otherwise the output will be 0.
op_num(:OP_NOT, 0, do: 1)
# negative zero
op_num(:OP_NOT, <<0x80>>, do: 1)
op_num(:OP_NOT, 1, do: 0)
op_num(:OP_NOT, x, do: 0)
# OP_0NOTEQUAL Returns 0 if the input is 0. 1 otherwise.
op_num(:OP_0NOTEQUAL, 0, do: 0)
op_num(:OP_0NOTEQUAL, x, do: 1)
# OP_ADD a is added to be
op_num(:OP_ADD, a, b, do: a + b)
# OP_SUB a is substracted from b
op_num(:OP_SUB, a, b, do: b - a)
# OP_MUL disabled
# OP_DIV disabled
# OP_MOD disabled
# OP_LSHIFT disabled
# OP_RSHIFT disabled
# OP_BOOLAND If both a and b are not 0, the output is 1. Otherwise 0.
op_num(:OP_BOOLAND, a, b, do: a != 0 and b != 0)
# OP_BOOLOR If a or b is not 0, the output is 1. Otherwise 0.
op_num(:OP_BOOLOR, a, b, do: a != 0 or b != 0)
# OP_NUMEQUAL Returns 1 if the numbers are equal, 0 otherwise.
op_num(:OP_NUMEQUAL, a, b, do: a == b)
# OP_NUMNOTEQUAL Returns 1 if the numbers are not equal, 0 otherwise.
op_num(:OP_NUMNOTEQUAL, a, b, do: a != b)
# OP_NUMEQUAVERIFY Same as OP_NUMEQUAL, but runs OP_VERIFY afterward.
op_alias(:OP_NUMEQUALVERIFY, [:OP_NUMEQUAL, :OP_VERIFY])
# OP_LESSTHAN Returns 1 if a is less than b, 0 otherwise.
op_num(:OP_LESSTHAN, b, a, do: a < b)
# OP_GREATERTHAN Returns 1 if a is greater than b, 0 otherwise.
op_num(:OP_GREATERTHAN, b, a, do: a > b)
# OP_LESSTHANOREQUAL Returns 1 if a is less than or equal b, 0 otherwise.
op_num(:OP_LESSTHANOREQUAL, b, a, do: a <= b)
# OP_GREATERTHANOREQUAL Returns 1 if a is greater than b, 0 otherwise.
op_num(:OP_GREATERTHANOREQUAL, b, a, do: a >= b)
# OP_MIN Returns the smaller of a and b
op_num(:OP_MIN, a, b, do: if(a <= b, do: a, else: b))
# OP_MAX Returns the bigger of a and b
op_num(:OP_MAX, a, b, do: if(a >= b, do: a, else: b))
# OP_WITHIN Returns 1 if x is within the specified range (left-inclusive), 0 otherwise.
op_num(:OP_WITHIN, b, a, x, do: x >= a and x < b)
##
## CRYPTO
##
# OP_RIPEMD160 The input is hashed using RIPEMD-160.
op_hash(:OP_RIPEMD160, x, do: x |> Crypto.ripemd160())
# OP_SHA1 The input is hashed using SHA-1.
op_hash(:OP_SHA1, x, do: x |> Crypto.sha1())
# OP_SHA256 The input is hashed using SHA-256
op_hash(:OP_SHA256, x, do: x |> Crypto.sha256())
# OP_HASH160 The input is hashed twice: first with SHA-256 and then with RIPEMD-160.
op_hash(:OP_HASH160, x, do: x |> Crypto.sha256() |> Crypto.ripemd160())
# OP_HASH256 The input is hashed two times with SHA-256.
op_hash(:OP_HASH256, x, do: x |> Crypto.sha256() |> Crypto.sha256())
# TODO OP_CODESEPARATOR All of the signature checking words will only match signatures
# to the data after the most recently-executed OP_CODESEPARATOR.
op(:OP_CODESEPARATOR, stack, do: stack)
# OP_CHECKSIG The entire transaction's outputs, inputs, and script (from the most recently-executed OP_CODESEPARATOR
# to the end) are hashed. The signature used by OP_CHECKSIG must be a valid signature for this hash and public key.
# If it is, 1 is returned, 0 otherwise.
def run([pk, sig | stack], [:OP_CHECKSIG | script], opts) do
opts = opts |> Map.put(:sub_script, sub_script(opts, [sig]))
[verify_signature(bin(sig), bin(pk), opts) |> bin | stack]
|> run(script, opts)
end
# Get subscript that is used to compute the sighash
# FIXME this is not even a correct implementation, it just splits on the first OP_CODESEPARATOR when
# in fact it should be the last previously encountered OP_CODESEPARATOR. Plus it's a pretty ugly way
# to do it. Ah, and OP_CODESEPARATOR in unexecuted OP_IF branch doesn't count.
# Leaving it until the script rewrite, with some more complex state, then we can have something similar
# to pbegincodehash
def sub_script(%{script: script} = _opts, sigs) do
idx = script |> Enum.find_index(&(&1 == :OP_CODESEPARATOR))
script
|> Enum.split(idx || 0)
|> elem(1)
|> Kernel.--(sigs)
|> Bitcoin.Script.to_binary()
end
# OP_CHEKSIGVERIFY Same as OP_CHECKSIG, but OP_VERIFY is executed afterward.
op_alias(:OP_CHECKSIGVERIFY, [:OP_CHECKSIG, :OP_VERIFY])
# Used to get multiple keys or signatures from the stack
# First item is the number of them and then it's alist of binaries
# Returs {items, remaining_stack}
def get_multi([n | stack], opts), do: stack |> Enum.split(num(n, opts))
# OP_CHECKMULTISIG
# Compares the first signature against each public key until it finds an ECDSA match.
# Starting with the subsequent public key, it compares the second signature against each remaining public key
# until it finds an ECDSA match. The process is repeated until all signatures have been checked or not enough
# public keys remain to produce a successful result. All signatures need to match a public key. Because public
# keys are not checked again if they fail any signature comparison, signatures must be placed in the scriptSig
# using the same order as their corresponding public keys were placed in the scriptPubKey or redeemScript.
# If all signatures are valid, 1 is returned, 0 otherwise.
def run(stack, [:OP_CHECKMULTISIG | script], opts) do
{pks, stack} = stack |> get_multi(opts)
{sigs, stack} = stack |> get_multi(opts)
# Due to a bug, one extra unused value is removed from the stack.
[bug | stack] = stack
cond do
# With NULLDUMMY flag set, the dropped stack item must be an empty byte array
# see BIP147, BIP62 rule no 7
opts[:flags][:nulldummy] && bug != "" ->
{:error, :nulldummy}
# With nsigs > npubkeys script becomes is invalid
length(sigs) > length(pks) ->
{:error, :more_sigs_than_pubkeys}
# Max number of pubkeys is 20
length(pks) > @max_pubkeys_per_multisig ->
{:error, :max_pubkeys_per_multisig}
true ->
opts = opts |> Map.put(:sub_script, sub_script(opts, sigs))
[verify_all_signatures(sigs, pks, opts) |> bin | stack]
end
|> run(script, opts)
end
# Same as OP_CHECKMULTISIG, but OP_VERIFY is executed afterward.
op_alias(:OP_CHECKMULTISIGVERIFY, [:OP_CHECKMULTISIG, :OP_VERIFY])
##
## EXPANSION
###
# OP_NOP1-10 No-op
op(:OP_NOP1, stack, do: stack)
# TODO OP_CHECKLOCKTIMEVERIFY
op(:OP_NOP2, stack, do: stack)
# TODO OP_CHECKSEQUENCEVERIFY
op(:OP_NOP3, stack, do: stack)
op(:OP_NOP4, stack, do: stack)
op(:OP_NOP5, stack, do: stack)
op(:OP_NOP6, stack, do: stack)
op(:OP_NOP7, stack, do: stack)
op(:OP_NOP8, stack, do: stack)
op(:OP_NOP9, stack, do: stack)
op(:OP_NOP10, stack, do: stack)
## TEMPLATE MATCHING PARAMS ?
# no idea wee need to do anything with those from script.h
#
# OP_SMALLINTEGER
# OP_PUBKEYS
# OP_PUBKEYHASH
# OP_PUBKEY
# OP_INVALIDOPCODE
# Cast value to bitcoin script binary
def bin(x) when is_binary(x), do: x
def bin(x) when is_number(x), do: Number.bin(x)
def bin(true), do: 1
def bin(false), do: 0
# Helper to cast stack element to true/false value
def bool(0), do: false
def bool(<<>>), do: false
# Negative zero is false
def bool(<<0x80>>), do: false
def bool(_), do: true
def nth_element(stack, n, opts) do
with n when is_number(n) <- num(n, opts),
do: if(n >= 0, do: Enum.at(stack, n), else: {:error, :index_outside_stack})
end
@spec verify_signature(binary, binary, map) :: boolean
def verify_signature(sig, pk, opts)
# these two cases are only necessary because we can keep some numebrs on the stack intsead of binary exclusively
# and can be romevod when that's fixed
def verify_signature(sig, pk, opts) when not is_binary(sig),
do: verify_signature(bin(sig), pk, opts)
def verify_signature(sig, pk, opts) when not is_binary(pk),
do: verify_signature(sig, bin(pk), opts)
# TODO figure this out - from bitcoin core code it seems that with strict enc, empty sig should fail the whole
# script not just return false (IsDefinedHashtypeSignature called from CheckSignatureEncoding)
# but it makes 2 test cases fail from script_tests.json (interestingly makes one more case pass in bitcore-lib tests)
# def verify_signature("", _pk, %{flags: %{strictenc: true}}), do: {:error, :empty_signature}
# Empty signature is invalid
def verify_signature("", _pk, _opts), do: false
def verify_signature(sig, pk, %{flags: flags} = opts) do
# Separate last byte which is a a sighash_type
{sig, <<sighash_type>>} = sig |> Binary.split_at(-1)
# Compute sighash
sighash =
opts[:tx] |> Bitcoin.Tx.sighash(opts[:input_number], opts[:sub_script], sighash_type)
# Signature verification
cond do
# with STRICTENC or DERSIG, BIP66 strict signature encoding must be met
(flags[:dersig] || flags[:strictenc]) && !Bitcoin.DERSig.strict?(sig) ->
{:error, :nonstrict_der}
# with STRICTENC pk must be either compressed or uncompresed
flags[:strictenc] && !Bitcoin.Key.Public.strict?(pk) ->
{:error, :nonstrict_pk}
# with STRICTENC sighash byte must be a known value
flags[:strictenc] && !Bitcoin.Tx.Sighash.valid_type?(sighash_type) ->
# TODO does'n't seem to be covered by script any test cases
{:error, :invalid_sighash_type}
# with LOW_S flag, S must use the low value
flags[:low_s] && !Bitcoin.DERSig.low_s?(sig) ->
{:error, :high_s}
# If all conditions are met do the actual sig verification
true ->
Bitcoin.Secp256k1.verify(sighash, sig, pk)
end
end
# No sigs to verify
def verify_all_signatures([], _, _opts), do: true
# No PKs to verify against, but there are still some sigs (previous match gets rid of [])
def verify_all_signatures(_, [], _opts), do: false
def verify_all_signatures([sig | sigs], [pk | pks], opts) do
case verify_signature(sig, pk, opts) do
# Verification succeeded, move to the next sig
true -> verify_all_signatures(sigs, pks, opts)
# Verification failed, try the next PK
false -> verify_all_signatures([sig | sigs], pks, opts)
end
end
end
| 34.920826 | 119 | 0.667554 |
1c4301b1966d3b66a9ec28f1494ac8d82ae09bd9 | 365 | ex | Elixir | userManagementService/lib/helpers/map_helper.ex | andraspatka/jobportal-ms | 006c8ca212f88566113c4b5c00dfe1d4e421c034 | [
"MIT"
] | 1 | 2021-05-25T18:24:27.000Z | 2021-05-25T18:24:27.000Z | userManagementService/lib/helpers/map_helper.ex | andraspatka/jobportal-ms | 006c8ca212f88566113c4b5c00dfe1d4e421c034 | [
"MIT"
] | 1 | 2021-05-23T09:50:10.000Z | 2021-05-23T09:50:10.000Z | userManagementService/lib/helpers/map_helper.ex | andraspatka/jobportal-ms | 006c8ca212f88566113c4b5c00dfe1d4e421c034 | [
"MIT"
] | null | null | null | defmodule Api.Helpers.MapHelper do
@doc "
Takes String keys in a Map and converts them to Atoms.
"
def string_keys_to_atoms(document) when is_map(document) do
Enum.reduce(document, %{}, fn {key, val}, acc ->
acc |> Map.put(
cond do
is_binary(key) -> String.to_atom(key)
true -> key
end, val)
end)
end
end
| 24.333333 | 61 | 0.605479 |
1c43026a7d707766343893bf1ec4d5ed95a2c6f0 | 2,480 | ex | Elixir | clients/service_usage/lib/google_api/service_usage/v1/model/page.ex | kyleVsteger/elixir-google-api | 3a0dd498af066a4361b5b0fd66ffc04a57539488 | [
"Apache-2.0"
] | null | null | null | clients/service_usage/lib/google_api/service_usage/v1/model/page.ex | kyleVsteger/elixir-google-api | 3a0dd498af066a4361b5b0fd66ffc04a57539488 | [
"Apache-2.0"
] | null | null | null | clients/service_usage/lib/google_api/service_usage/v1/model/page.ex | kyleVsteger/elixir-google-api | 3a0dd498af066a4361b5b0fd66ffc04a57539488 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.ServiceUsage.V1.Model.Page do
@moduledoc """
Represents a documentation page. A page can contain subpages to represent nested documentation set structure.
## Attributes
* `content` (*type:* `String.t`, *default:* `nil`) - The Markdown content of the page. You can use (== include {path} ==) to include content from a Markdown file.
* `name` (*type:* `String.t`, *default:* `nil`) - The name of the page. It will be used as an identity of the page to generate URI of the page, text of the link to this page in navigation, etc. The full page name (start from the root page name to this page concatenated with `.`) can be used as reference to the page in your documentation. For example: pages: - name: Tutorial content: (== include tutorial.md ==) subpages: - name: Java content: (== include tutorial_java.md ==) You can reference `Java` page using Markdown reference link syntax: `Java`.
* `subpages` (*type:* `list(GoogleApi.ServiceUsage.V1.Model.Page.t)`, *default:* `nil`) - Subpages of this page. The order of subpages specified here will be honored in the generated docset.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:content => String.t() | nil,
:name => String.t() | nil,
:subpages => list(GoogleApi.ServiceUsage.V1.Model.Page.t()) | nil
}
field(:content)
field(:name)
field(:subpages, as: GoogleApi.ServiceUsage.V1.Model.Page, type: :list)
end
defimpl Poison.Decoder, for: GoogleApi.ServiceUsage.V1.Model.Page do
def decode(value, options) do
GoogleApi.ServiceUsage.V1.Model.Page.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.ServiceUsage.V1.Model.Page do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 46.792453 | 558 | 0.719758 |
1c433ef64809e1715e44ff42740369abb65caf7a | 5,502 | exs | Elixir | test/xcribe/json_schema_test.exs | brainn-co/xcribe | 06e66df0ac28d5b3e525f54bc52ed21722701a06 | [
"Apache-2.0"
] | 30 | 2019-12-17T20:09:41.000Z | 2021-01-11T13:58:24.000Z | test/xcribe/json_schema_test.exs | Finbits/xcribe | 37f4195315e27e415212910f0219c68d96a16d4a | [
"Apache-2.0"
] | 36 | 2019-12-17T20:32:04.000Z | 2020-12-02T17:50:49.000Z | test/xcribe/json_schema_test.exs | Finbits/xcribe | 37f4195315e27e415212910f0219c68d96a16d4a | [
"Apache-2.0"
] | 3 | 2020-02-21T18:13:59.000Z | 2020-10-20T07:59:05.000Z | defmodule Xcribe.JsonSchemaTest do
use ExUnit.Case, async: true
alias Plug.Upload
alias Xcribe.JsonSchema
describe "type_for/1" do
test "return type for given values" do
assert JsonSchema.type_for(%{}) == "object"
assert JsonSchema.type_for([]) == "array"
assert JsonSchema.type_for("a") == "string"
assert JsonSchema.type_for(1) == "number"
assert JsonSchema.type_for(1.0) == "number"
assert JsonSchema.type_for(true) == "boolean"
end
test "return type as string for not know types" do
assert JsonSchema.type_for(nil) == "string"
assert JsonSchema.type_for({}) == "string"
assert JsonSchema.type_for(:ok) == "string"
assert JsonSchema.type_for(self()) == "string"
end
end
describe "format_for/1" do
test "return format for given values" do
assert JsonSchema.format_for(1) == "int32"
assert JsonSchema.format_for(1.0) == "float"
end
test "return empty for not know formats" do
assert JsonSchema.format_for(true) == ""
assert JsonSchema.format_for("a") == ""
assert JsonSchema.format_for(nil) == ""
assert JsonSchema.format_for({}) == ""
assert JsonSchema.format_for(:ok) == ""
assert JsonSchema.format_for(self()) == ""
end
end
describe "schema_for/1" do
test "schema for map/object whith nested map" do
map = %{"authentication" => %{"login" => "userlogin"}, "name" => "some name"}
assert JsonSchema.schema_for(map) == %{
type: "object",
properties: %{
"authentication" => %{
type: "object",
properties: %{"login" => %{type: "string", example: "userlogin"}}
},
"name" => %{type: "string", example: "some name"}
}
}
end
test "schema for a list/array of strings" do
data = ["Doug", "Jonny"]
expected = %{
type: "array",
items: %{type: "string", example: "Doug"}
}
assert JsonSchema.schema_for(data) == expected
end
test "schema for list/array of maps" do
list = [
%{"authentication" => %{"login" => "userlogin"}, "name" => "some name"},
%{"authentication" => %{"login" => "userlogin"}, "name" => "some name"}
]
assert JsonSchema.schema_for(list) == %{
type: "array",
items: %{
type: "object",
properties: %{
"name" => %{example: "some name", type: "string"},
"authentication" => %{
properties: %{"login" => %{example: "userlogin", type: "string"}},
type: "object"
}
}
}
}
end
test "return a schema for single item as tuple" do
opts = [title: true, example: false]
assert JsonSchema.schema_for({"alias", "Jon"}, opts) == %{
title: "alias",
type: "string"
}
assert JsonSchema.schema_for({"age", 5}, opts) == %{
title: "age",
type: "number",
format: "int32"
}
assert JsonSchema.schema_for({"percent", 5.8}, opts) == %{
title: "percent",
type: "number",
format: "float"
}
end
test "given opt title false not return title key" do
opt = [title: false]
assert JsonSchema.schema_for({"name", "value"}, opt) == %{type: "string"}
assert JsonSchema.schema_for({"name", 1}, opt) == %{type: "number", format: "int32"}
assert JsonSchema.schema_for({"name", 1.2}, opt) == %{type: "number", format: "float"}
end
test "given opt example true return the example" do
opt = [title: false, example: true]
assert JsonSchema.schema_for({"name", "value"}, opt) == %{
type: "string",
example: "value"
}
assert JsonSchema.schema_for({"name", 1}, opt) == %{
type: "number",
format: "int32",
example: 1
}
assert JsonSchema.schema_for({"name", 1.2}, opt) == %{
type: "number",
format: "float",
example: 1.2
}
assert JsonSchema.schema_for({"name", %{"key" => "value"}}, opt) == %{
type: "object",
properties: %{"key" => %{type: "string", example: "value"}}
}
assert JsonSchema.schema_for({"name", ["value"]}, opt) == %{
type: "array",
items: %{type: "string", example: "value"}
}
end
test "schema for a map with an empty list" do
data = %{"id" => 1, "attributes" => []}
assert JsonSchema.schema_for(data) == %{
type: "object",
properties: %{
"attributes" => %{items: %{type: "string"}, type: "array"},
"id" => %{example: 1, format: "int32", type: "number"}
}
}
end
test "schema for Plug.Upload" do
data = %{
"file" => %Upload{
content_type: "image/png",
filename: "screenshot.png",
path: "/tmp/multipart-id"
}
}
assert JsonSchema.schema_for(data) == %{
type: "object",
properties: %{"file" => %{format: "binary", type: "string"}}
}
end
end
end
| 31.084746 | 92 | 0.496547 |
1c437b9e4fcac5bc3f0b678420cd89b9191ac26a | 1,149 | ex | Elixir | lib/arc_ecto/type.ex | amatalai/arc_ecto | 717efd790cc344ba3f12f29127361e0a90f86ae6 | [
"Apache-2.0"
] | null | null | null | lib/arc_ecto/type.ex | amatalai/arc_ecto | 717efd790cc344ba3f12f29127361e0a90f86ae6 | [
"Apache-2.0"
] | null | null | null | lib/arc_ecto/type.ex | amatalai/arc_ecto | 717efd790cc344ba3f12f29127361e0a90f86ae6 | [
"Apache-2.0"
] | null | null | null | defmodule Arc.Ecto.Type do
def type, do: :string
@filename_with_timestamp ~r{^(.*)\?(\d+)$}
def cast(definition, args) do
case definition.store(args) do
{:ok, file} -> {:ok, %{file_name: file, updated_at: Ecto.DateTime.utc}}
_ -> :error
end
end
def load(_definition, value) do
{file_name, gsec} =
case Regex.match?(@filename_with_timestamp, value) do
true ->
[_, file_name, gsec] = Regex.run(@filename_with_timestamp, value)
{file_name, gsec}
_ -> {value, nil}
end
updated_at = case gsec do
gsec when is_binary(gsec) ->
gsec
|> String.to_integer()
|> :calendar.gregorian_seconds_to_datetime()
|> Ecto.DateTime.from_erl()
_ ->
nil
end
{:ok, %{file_name: file_name, updated_at: updated_at}}
end
def dump(_definition, %{file_name: file_name, updated_at: nil}) do
{:ok, file_name}
end
def dump(_definition, %{file_name: file_name, updated_at: updated_at}) do
gsec = :calendar.datetime_to_gregorian_seconds(Ecto.DateTime.to_erl(updated_at))
{:ok, "#{file_name}?#{gsec}"}
end
end
| 26.113636 | 84 | 0.617929 |
1c43830f481fef5f392ebfbedbe51818598c20f3 | 1,120 | ex | Elixir | lib/navigation_history.ex | zillou/plug-navigation-history | 80dbadc3e7958a9085bf078ef611c5895e07a90f | [
"MIT"
] | null | null | null | lib/navigation_history.ex | zillou/plug-navigation-history | 80dbadc3e7958a9085bf078ef611c5895e07a90f | [
"MIT"
] | null | null | null | lib/navigation_history.ex | zillou/plug-navigation-history | 80dbadc3e7958a9085bf078ef611c5895e07a90f | [
"MIT"
] | null | null | null | defmodule NavigationHistory do
@moduledoc """
Module to retrieve tracked paths.
"""
@doc """
Retrieves the last tracked path.
## Examples:
NavigationHistory.last_path(conn) # returns the last path vissited
NavigationHistory.last_path(conn, 1) # returns the second last path vissited
NavigationHistory.last_path(conn, default: "/") # returns the last path and default to "/" if none available
NavigationHistory.last_path(conn, key: "admin") # returns the last path tracked by tracker with key "admin"
"""
def last_path(conn, index \\ 0, opts \\ [])
def last_path(conn, index, _opts) when is_list(index),
do: last_path(conn, 0, index)
def last_path(conn, index, opts),
do: Enum.at(last_paths(conn, opts), index) || opts[:default]
@doc """
Retrieves a list of last tracked paths.
## Examples:
NavigationHistory.last_paths(conn)
NavigationHistory.last_paths(conn, key: "admin")
"""
# NOTE: use defdelegate with optional args when shipped in 1.3
def last_paths(conn, opts \\ []),
do: NavigationHistory.Session.fetch_paths(conn, opts)
end
| 32.941176 | 115 | 0.691964 |
1c43da8d75ac9af0b90749a12a2f9cb0d359b0e9 | 30 | ex | Elixir | testData/org/elixir_lang/parser_definition/literal_string_sigil_line_parsing_test_case/SigilModifiers.ex | keyno63/intellij-elixir | 4033e319992c53ddd42a683ee7123a97b5e34f02 | [
"Apache-2.0"
] | 1,668 | 2015-01-03T05:54:27.000Z | 2022-03-25T08:01:20.000Z | testData/org/elixir_lang/parser_definition/literal_string_sigil_line_parsing_test_case/SigilModifiers.ex | keyno63/intellij-elixir | 4033e319992c53ddd42a683ee7123a97b5e34f02 | [
"Apache-2.0"
] | 2,018 | 2015-01-01T22:43:39.000Z | 2022-03-31T20:13:08.000Z | testData/org/elixir_lang/parser_definition/literal_string_sigil_line_parsing_test_case/SigilModifiers.ex | keyno63/intellij-elixir | 4033e319992c53ddd42a683ee7123a97b5e34f02 | [
"Apache-2.0"
] | 145 | 2015-01-15T11:37:16.000Z | 2021-12-22T05:51:02.000Z | ~S{}abcdefghijklmnopqrstuvwxyz | 30 | 30 | 0.9 |
1c43e730183f914bf4310845eba71ca7f5f5adf8 | 1,910 | ex | Elixir | lib/uro_web/controllers/admin/avatar_controller.ex | V-Sekai/uro | 0b23da65d5c7e459efcd6b2c3d9bdf91c533b737 | [
"MIT"
] | 1 | 2022-01-11T04:05:39.000Z | 2022-01-11T04:05:39.000Z | lib/uro_web/controllers/admin/avatar_controller.ex | V-Sekai/uro | 0b23da65d5c7e459efcd6b2c3d9bdf91c533b737 | [
"MIT"
] | 35 | 2021-02-10T08:18:57.000Z | 2021-05-06T17:19:50.000Z | lib/uro_web/controllers/admin/avatar_controller.ex | V-Sekai/uro | 0b23da65d5c7e459efcd6b2c3d9bdf91c533b737 | [
"MIT"
] | null | null | null | defmodule UroWeb.Admin.AvatarController do
use UroWeb, :controller
alias Uro.UserContent
alias Uro.UserContent.Avatar
def index(conn, params) do
page = UserContent.list_avatars_paginated(params)
render(conn, "index.html", avatars: page.entries, page: page)
end
def new(conn, _params) do
changeset = UserContent.change_avatar(%Avatar{})
render(conn, "new.html", changeset: changeset)
end
def create(conn, %{"avatar" => avatar_params}) do
case UserContent.create_avatar(avatar_params) do
{:ok, avatar} ->
conn
|> put_flash(:info, gettext("Avatar created successfully."))
|> redirect(to: Routes.admin_avatar_path(conn, :show, avatar))
{:error, %Ecto.Changeset{} = changeset} ->
render(conn, "new.html", changeset: changeset)
end
end
def show(conn, %{"id" => id}) do
avatar = UserContent.get_avatar!(id)
render(conn, "show.html", avatar: avatar)
end
def edit(conn, %{"id" => id}) do
avatar = UserContent.get_avatar!(id)
changeset = UserContent.change_avatar(avatar)
render(conn, "edit.html", avatar: avatar, changeset: changeset)
end
def update(conn, %{"id" => id, "avatar" => avatar_params}) do
avatar = UserContent.get_avatar!(id)
case UserContent.update_avatar(avatar, avatar_params) do
{:ok, avatar} ->
conn
|> put_flash(:info, gettext("Avatar updated successfully."))
|> redirect(to: Routes.admin_avatar_path(conn, :show, avatar))
{:error, %Ecto.Changeset{} = changeset} ->
render(conn, "edit.html", avatar: avatar, changeset: changeset)
end
end
def delete(conn, %{"id" => id}) do
avatar = UserContent.get_avatar!(id)
{:ok, _avatar} = UserContent.delete_avatar(avatar)
conn
|> put_flash(:info, gettext("Avatar deleted successfully."))
|> redirect(to: Routes.admin_avatar_path(conn, :index))
end
end
| 30.31746 | 71 | 0.658115 |
1c44142647f8b62aa74d2127ee71f1b68c8c6a33 | 3,987 | ex | Elixir | lib/ratatouille/renderer.ex | Fitblip/ratatouille | 873f8a51b0f824ce9e39b7575850fcc585cfc7b1 | [
"MIT"
] | 504 | 2019-01-13T21:53:21.000Z | 2022-03-31T20:58:21.000Z | lib/ratatouille/renderer.ex | iboard/ratatouille | cc7b6a37e0b1757cd89cc5084343814a79dd86dc | [
"MIT"
] | 28 | 2019-01-26T21:00:23.000Z | 2021-12-28T19:06:15.000Z | lib/ratatouille/renderer.ex | iboard/ratatouille | cc7b6a37e0b1757cd89cc5084343814a79dd86dc | [
"MIT"
] | 21 | 2019-02-21T09:08:27.000Z | 2021-12-20T15:51:10.000Z | defmodule Ratatouille.Renderer do
@moduledoc """
Logic to render a view tree.
This API is still under development.
"""
alias Ratatouille.Renderer.{Canvas, Element}
@type root_element :: %Element{
tag: :view,
children: list(child_element())
}
@type child_tag ::
:bar
| :chart
| :column
| :label
| :overlay
| :panel
| :row
| :sparkline
| :table
| :table_cell
| :table_row
| :text
| :tree
| :tree_node
@type child_element :: %Element{tag: child_tag()}
@callback render(
Canvas.t(),
Element.t(),
(Canvas.t(), Element.t() -> Canvas.t())
) :: Canvas.t()
@element_specs Element.specs()
@doc """
Renders a view tree to canvas, given a canvas and a root element (an element
with the `:view` tag).
The tree is rendered by recursively rendering each element in the hierarchy.
The canvas serves as both the accumulator for rendered cells at each stage and
as the box representing available space for rendering, which shrinks as this
space is consumed.
"""
@spec render(Canvas.t(), root_element) :: {:ok, Canvas.t()} | {:error, term()}
def render(%Canvas{} = canvas, %Element{} = root) do
with :ok <- validate_tree(root) do
{:ok, render_tree(canvas, root)}
end
end
@spec render_tree(Canvas.t(), Element.t() | list(Element.t())) :: Canvas.t()
def render_tree(%Canvas{} = canvas, elements) when is_list(elements) do
Enum.reduce(elements, canvas, fn el, new_canvas ->
render_tree(new_canvas, el)
end)
end
def render_tree(
%Canvas{} = canvas,
%Element{tag: tag} = element
) do
spec = Keyword.fetch!(@element_specs, tag)
renderer = Keyword.fetch!(spec, :renderer)
renderer.render(canvas, element, &render_tree/2)
end
### View Tree Validation
@doc """
Validates the hierarchy of a view tree given the root element.
Used by the render/2 function to prevent strange errors that may otherwise
occur when processing invalid view trees.
"""
@spec validate_tree(Element.t()) :: :ok | {:error, String.t()}
def validate_tree(%Element{tag: :view, children: children}) do
validate_subtree(:view, children)
end
def validate_tree(%Element{tag: tag}) do
{:error,
"Invalid view hierarchy: Root element must have tag 'view', but found '#{
tag
}'"}
end
defp validate_subtree(parent, [
%Element{tag: tag, attributes: attributes, children: children} | rest
]) do
with :ok <- validate_relationship(parent, tag),
:ok <- validate_attributes(tag, attributes),
:ok <- validate_subtree(tag, children),
:ok <- validate_subtree(parent, rest),
do: :ok
end
defp validate_subtree(_parent, []) do
:ok
end
defp validate_attributes(tag, attributes) do
spec = Keyword.fetch!(@element_specs, tag)
attribute_specs = spec[:attributes] || []
used_keys = Map.keys(attributes)
valid_keys = Keyword.keys(attribute_specs)
required_keys = for {key, {:required, _desc}} <- attribute_specs, do: key
case {used_keys -- valid_keys, required_keys -- used_keys} do
{[], []} ->
:ok
{invalid_keys, []} ->
{:error,
"Invalid attributes: '#{tag}' does not accept attributes #{
inspect(invalid_keys)
}"}
{_, missing_keys} ->
{:error,
"Invalid attributes: '#{tag}' is missing required attributes #{
inspect(missing_keys)
}"}
end
end
defp validate_relationship(parent_tag, child_tag) do
valid_child_tags = @element_specs[parent_tag][:child_tags] || []
if child_tag in valid_child_tags do
:ok
else
{:error,
"Invalid view hierarchy: '#{child_tag}' cannot be a child of '#{
parent_tag
}'"}
end
end
end
| 27.122449 | 80 | 0.603963 |
1c441e6713f001148dd79ee07abb4855669b4ac9 | 233 | exs | Elixir | priv/repo/migrations/20160209023941_create_trips.exs | sorentwo/triptastic | d7418381cc3284e3ade31aae7659401b24980d96 | [
"MIT"
] | 5 | 2016-02-27T00:24:38.000Z | 2020-01-22T08:35:42.000Z | priv/repo/migrations/20160209023941_create_trips.exs | sorentwo/triptastic | d7418381cc3284e3ade31aae7659401b24980d96 | [
"MIT"
] | null | null | null | priv/repo/migrations/20160209023941_create_trips.exs | sorentwo/triptastic | d7418381cc3284e3ade31aae7659401b24980d96 | [
"MIT"
] | null | null | null | defmodule Triptastic.Repo.Migrations.CreateTrips do
use Ecto.Migration
def change do
create table(:trips) do
add :name, :string
add :category, :string
add :favorites, :integer, default: 0
end
end
end
| 19.416667 | 51 | 0.67382 |
1c443c0553976886bdad3795ba54a5dabe1dceb8 | 597 | ex | Elixir | otp/lib/otp/application.ex | GameEssa/ElixirProject | 8f5fef5256719ef2ebcfb77c7bb7eb70687ece4f | [
"MIT"
] | null | null | null | otp/lib/otp/application.ex | GameEssa/ElixirProject | 8f5fef5256719ef2ebcfb77c7bb7eb70687ece4f | [
"MIT"
] | null | null | null | otp/lib/otp/application.ex | GameEssa/ElixirProject | 8f5fef5256719ef2ebcfb77c7bb7eb70687ece4f | [
"MIT"
] | null | null | null | defmodule Otp.Application do
# See https://hexdocs.pm/elixir/Application.html
# for more information on OTP Applications
@moduledoc false
use Application
@impl true
def start(_type, _args) do
children = [
# Starts a worker by calling: Otp.Worker.start_link(arg)
# {Otp.Worker, arg}
{Otp.SampleQueue.Supervisor, []},
{Otp.Supervisor, []}
]
# See https://hexdocs.pm/elixir/Supervisor.html
# for other strategies and supported options
opts = [strategy: :one_for_one, name: Otp.Supervisor]
Supervisor.start_link(children, opts)
end
end
| 25.956522 | 62 | 0.681742 |
1c44451c0a19fb6d1fdcdef2966f917007ba688f | 3,554 | ex | Elixir | lib/aws/generated/pi.ex | smanolloff/aws-elixir | c7cb6577802f5010be7e7b6ccb2c0f3c8c73ea84 | [
"Apache-2.0"
] | null | null | null | lib/aws/generated/pi.ex | smanolloff/aws-elixir | c7cb6577802f5010be7e7b6ccb2c0f3c8c73ea84 | [
"Apache-2.0"
] | null | null | null | lib/aws/generated/pi.ex | smanolloff/aws-elixir | c7cb6577802f5010be7e7b6ccb2c0f3c8c73ea84 | [
"Apache-2.0"
] | null | null | null | # WARNING: DO NOT EDIT, AUTO-GENERATED CODE!
# See https://github.com/aws-beam/aws-codegen for more details.
defmodule AWS.PI do
@moduledoc """
AWS Performance Insights enables you to monitor and explore different dimensions
of database load based on data captured from a running RDS instance.
The guide provides detailed information about Performance Insights data types,
parameters and errors. For more information about Performance Insights
capabilities see [Using Amazon RDS Performance Insights
](http://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/USER_PerfInsights.html)
in the *Amazon RDS User Guide*.
The AWS Performance Insights API provides visibility into the performance of
your RDS instance, when Performance Insights is enabled for supported engine
types. While Amazon CloudWatch provides the authoritative source for AWS service
vended monitoring metrics, AWS Performance Insights offers a domain-specific
view of database load measured as Average Active Sessions and provided to API
consumers as a 2-dimensional time-series dataset. The time dimension of the data
provides DB load data for each time point in the queried time range, and each
time point decomposes overall load in relation to the requested dimensions, such
as SQL, Wait-event, User or Host, measured at that time point.
"""
@doc """
For a specific time period, retrieve the top `N` dimension keys for a metric.
"""
def describe_dimension_keys(client, input, options \\ []) do
request(client, "DescribeDimensionKeys", input, options)
end
@doc """
Retrieve Performance Insights metrics for a set of data sources, over a time
period.
You can provide specific dimension groups and dimensions, and provide
aggregation and filtering criteria for each group.
"""
def get_resource_metrics(client, input, options \\ []) do
request(client, "GetResourceMetrics", input, options)
end
@spec request(AWS.Client.t(), binary(), map(), list()) ::
{:ok, map() | nil, map()}
| {:error, term()}
defp request(client, action, input, options) do
client = %{client | service: "pi"}
host = build_host("pi", client)
url = build_url(host, client)
headers = [
{"Host", host},
{"Content-Type", "application/x-amz-json-1.1"},
{"X-Amz-Target", "PerformanceInsightsv20180227.#{action}"}
]
payload = encode!(client, input)
headers = AWS.Request.sign_v4(client, "POST", url, headers, payload)
post(client, url, payload, headers, options)
end
defp post(client, url, payload, headers, options) do
case AWS.Client.request(client, :post, url, payload, headers, options) do
{:ok, %{status_code: 200, body: body} = response} ->
body = if body != "", do: decode!(client, body)
{:ok, body, response}
{:ok, response} ->
{:error, {:unexpected_response, response}}
error = {:error, _reason} -> error
end
end
defp build_host(_endpoint_prefix, %{region: "local", endpoint: endpoint}) do
endpoint
end
defp build_host(_endpoint_prefix, %{region: "local"}) do
"localhost"
end
defp build_host(endpoint_prefix, %{region: region, endpoint: endpoint}) do
"#{endpoint_prefix}.#{region}.#{endpoint}"
end
defp build_url(host, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}/"
end
defp encode!(client, payload) do
AWS.Client.encode!(client, payload, :json)
end
defp decode!(client, payload) do
AWS.Client.decode!(client, payload, :json)
end
end
| 36.265306 | 82 | 0.698649 |
1c445abec68cc6479c53d3fdcf07674f1244816b | 722 | ex | Elixir | lib/example_ieee754.ex | ZennerIoT/element-parsers | 0a828fd4d9d96f417839cbab5b491c21a07efd1c | [
"MIT"
] | 31 | 2017-10-18T13:21:02.000Z | 2022-03-17T13:24:36.000Z | lib/example_ieee754.ex | ZennerIoT/element-parsers | 0a828fd4d9d96f417839cbab5b491c21a07efd1c | [
"MIT"
] | 13 | 2018-04-05T12:48:14.000Z | 2021-08-19T12:22:12.000Z | lib/example_ieee754.ex | ZennerIoT/element-parsers | 0a828fd4d9d96f417839cbab5b491c21a07efd1c | [
"MIT"
] | 22 | 2018-02-26T08:31:29.000Z | 2022-03-17T13:28:12.000Z | defmodule Parser do
use Platform.Parsing.Behaviour
require Logger
# Example parser for a floating point value (IEEE-754) as part of a LoRaWAN message.
#
# Changelog:
# 2019-03-04 [as]: Initial version for demonstrating purposes.
#
def parse(<<data::float-32>>, _meta) do
# Also possible: float-signed-32 or float-signed-little-32
%{
value: data
}
end
def parse(payload, meta) do
Logger.warn(
"Could not parse payload #{inspect(payload)} with frame_port #{
inspect(get_in(meta, [:meta, :frame_port]))
}"
)
[]
end
def tests() do
[
{
:parse_hex,
"41500000",
%{},
%{value: 13.0}
}
]
end
end
| 18.512821 | 86 | 0.577562 |
1c446ae047cc6feb914c1f34bae40e4dabdbe4fb | 812 | ex | Elixir | lib/digital_ocean/action.ex | kianmeng/digital-ocean-elixir | eff6fd1c621ab51908edad731794b0ef2db1cac1 | [
"MIT"
] | null | null | null | lib/digital_ocean/action.ex | kianmeng/digital-ocean-elixir | eff6fd1c621ab51908edad731794b0ef2db1cac1 | [
"MIT"
] | null | null | null | lib/digital_ocean/action.ex | kianmeng/digital-ocean-elixir | eff6fd1c621ab51908edad731794b0ef2db1cac1 | [
"MIT"
] | null | null | null | defmodule DigitalOcean.Action do
alias DigitalOcean.{ Operation }
@doc """
Retrieve a details about a specific action.
## Examples
iex> DigitalOcean.Action.get(36804636) |> DigitalOcean.request()
{ :ok, %DigitalOcean.Response{} }
"""
@spec get(DigitalOcean.id_t()) :: Operation.t()
def get(action_id) do
%Operation{}
|> Map.put(:method, :get)
|> Map.put(:path, "/actions/#{action_id}")
end
@doc """
Retrieve a list of actions taken on an account.
## Examples
iex> DigitalOcean.Action.list() |> DigitalOcean.request()
{ :ok, %DigitalOcean.Response{} }
"""
@spec list(Keyword.t()) :: Operation.t()
def list(opts \\ []) do
%Operation{}
|> Map.put(:method, :get)
|> Map.put(:params, opts)
|> Map.put(:path, "/actions")
end
end
| 23.2 | 70 | 0.609606 |
1c446d0884a9e576b7a13879c3c6b3cd13c4c86c | 584 | exs | Elixir | mix.exs | balena/yaphone | 3f92a6acc8504289f8d9d13d5e75c45cb7008db6 | [
"BSD-3-Clause"
] | null | null | null | mix.exs | balena/yaphone | 3f92a6acc8504289f8d9d13d5e75c45cb7008db6 | [
"BSD-3-Clause"
] | null | null | null | mix.exs | balena/yaphone | 3f92a6acc8504289f8d9d13d5e75c45cb7008db6 | [
"BSD-3-Clause"
] | null | null | null | defmodule Yaphone.MixProject do
use Mix.Project
def project do
[
app: :yaphone,
version: "0.1.0",
elixir: "~> 1.12",
start_permanent: Mix.env() == :prod,
deps: deps(),
elixirc_paths: elixirc_paths(Mix.env())
]
end
def application do
[
extra_applications: [:logger]
]
end
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
defp deps do
[
{:sweet_xml, "~> 0.7"},
# test related
{:dialyxir, "~> 1.0", only: [:dev], runtime: false}
]
end
end
| 17.69697 | 57 | 0.547945 |
1c4481a83ec85390bb10139ea4f94ede4b3437f7 | 2,028 | ex | Elixir | lib/ex_onixo/helper/date.ex | damjack/ex_onixo | 5b1f97bc65867dcf1710540264094d147722ee11 | [
"MIT"
] | 1 | 2021-12-11T06:44:18.000Z | 2021-12-11T06:44:18.000Z | lib/ex_onixo/helper/date.ex | damjack/ex_onixo | 5b1f97bc65867dcf1710540264094d147722ee11 | [
"MIT"
] | null | null | null | lib/ex_onixo/helper/date.ex | damjack/ex_onixo | 5b1f97bc65867dcf1710540264094d147722ee11 | [
"MIT"
] | null | null | null | defmodule ExOnixo.Helper.Date do
import SweetXml
import Timex
def to_date("", _code), do: nil
def to_date(datetime, code) do
case code do
"00" ->
convert_strftime(datetime)
"01" ->
datetime |> String.slice(0..5) |> parse!("%Y%m", :strftime)
"05" ->
datetime |> String.slice(0..3) |> parse!("%Y", :strftime)
"13" ->
parse!(datetime, "%Y%m%dT%H%M", :strftime)
"14" ->
parse!(datetime, "%Y%m%dT%H%M%S", :strftime)
_ ->
convert_strftime(datetime)
end
end
def to_date_without_code(xml) do
code = get_code(xml, %{tag: "/DateFormat", inline: "Date/@dateformat"})
datetime = xpath(xml, ~x"./Date/text()"s)
if String.length(datetime) !== 0 do
case code do
"00" ->
convert_strftime(datetime)
"01" ->
datetime |> String.slice(0..5) |> parse!("%Y%m", :strftime)
"05" ->
datetime |> String.slice(0..3) |> parse!("%Y", :strftime)
"13" ->
parse!(datetime, "%Y%m%dT%H%M", :strftime)
"14" ->
parse!(datetime, "%Y%m%dT%H%M%S", :strftime)
_ ->
convert_strftime(datetime)
end
end
end
defp get_code(xml, opts) do
inline = xpath(xml, ~x"./#{opts[:inline]}"s)
if String.length(inline) === 0 do
xpath(xml, ~x".#{opts[:tag]}/text()"s)
else
inline
end
end
defp convert_strftime(nil), do: nil
defp convert_strftime(""), do: nil
defp convert_strftime(string) do
if string |> String.length > 10 do
String.slice(string, 0..9) |> parse!("%Y-%m-%d", :strftime)
else
String.slice(string, 0..7) |> parse!("%Y%m%d", :strftime)
end
end
def check_and_parse(""), do: nil
def check_and_parse(datetext) do
case String.length(datetext) do
8 ->
parse!(datetext, "%Y%m%d", :strftime)
6 ->
parse!(datetext, "%Y%m", :strftime)
4 ->
parse!(datetext, "%Y", :strftime)
_ ->
datetext
end
end
end
| 26.337662 | 75 | 0.536489 |
1c4493d15c77f82e0258d7097b94c4339d0f9336 | 344 | exs | Elixir | Chapter02/config/prod.exs | sthagen/Mastering-Elixir | 1b52ee79afe6b2ae80767a5e55c2be51df3c4c1d | [
"MIT"
] | 28 | 2018-08-09T05:05:29.000Z | 2022-03-14T06:59:07.000Z | Chapter02/config/prod.exs | sthagen/Mastering-Elixir | 1b52ee79afe6b2ae80767a5e55c2be51df3c4c1d | [
"MIT"
] | 1 | 2019-02-11T09:11:33.000Z | 2019-05-06T06:40:19.000Z | Chapter02/config/prod.exs | sthagen/Mastering-Elixir | 1b52ee79afe6b2ae80767a5e55c2be51df3c4c1d | [
"MIT"
] | 8 | 2018-08-09T14:53:02.000Z | 2020-12-14T19:31:21.000Z | use Mix.Config
# By default, the umbrella project as well as each child
# application will require this configuration file, ensuring
# they all use the same configuration. While one could
# configure all applications here, we prefer to delegate
# back to each application for organization purposes.
import_config "../apps/*/config/config.exs"
| 38.222222 | 60 | 0.790698 |
1c44bfbe44446d0c86b72aa3c7dd327dacc8a310 | 439 | ex | Elixir | lib/videorama/multimedia/comentario.ex | ruben44bac/videorama | 6350f9aabd778b8660aac5952dc828b3bd35c4c6 | [
"MIT"
] | null | null | null | lib/videorama/multimedia/comentario.ex | ruben44bac/videorama | 6350f9aabd778b8660aac5952dc828b3bd35c4c6 | [
"MIT"
] | null | null | null | lib/videorama/multimedia/comentario.ex | ruben44bac/videorama | 6350f9aabd778b8660aac5952dc828b3bd35c4c6 | [
"MIT"
] | null | null | null | defmodule Videorama.Multimedia.Comentario do
use Ecto.Schema
import Ecto.Changeset
schema "comentarios" do
field :at, :integer
field :body, :string
belongs_to :usuario, Videorama.Cuentas.Usuario
belongs_to :video, Videorama.Multimedia.Video
timestamps()
end
@doc false
def changeset(comentario, attrs) do
comentario
|> cast(attrs, [:body, :at])
|> validate_required([:body, :at])
end
end
| 19.086957 | 50 | 0.690205 |
1c44c7664b90981199e20d179cdb474089eafd7a | 2,496 | exs | Elixir | test/xlsx_reader/zip_archive_test.exs | Q1-Energie-AG/xlsx_reader | 02b49b6f85b693dfd445c714c01c0454409e621e | [
"Apache-2.0"
] | 40 | 2019-12-20T16:11:47.000Z | 2022-02-08T22:59:53.000Z | test/xlsx_reader/zip_archive_test.exs | Q1-Energie-AG/xlsx_reader | 02b49b6f85b693dfd445c714c01c0454409e621e | [
"Apache-2.0"
] | 4 | 2020-05-17T11:35:33.000Z | 2022-02-08T21:29:46.000Z | test/xlsx_reader/zip_archive_test.exs | Q1-Energie-AG/xlsx_reader | 02b49b6f85b693dfd445c714c01c0454409e621e | [
"Apache-2.0"
] | 6 | 2020-04-24T16:31:06.000Z | 2022-03-22T07:51:51.000Z | defmodule XlsxReader.ZipArchiveTest do
use ExUnit.Case
alias XlsxReader.ZipArchive
describe "list/1" do
test "lists the contents of a zip file" do
zip_handle = ZipArchive.handle(TestFixtures.path("test.zip"), :path)
assert {:ok, ["dir/subdir/file3.bin", "file1.txt", "file2.dat"]} =
ZipArchive.list(zip_handle)
end
test "lists the contents of a zip buffer" do
zip_handle = ZipArchive.handle(TestFixtures.read!("test.zip"), :binary)
assert {:ok, ["dir/subdir/file3.bin", "file1.txt", "file2.dat"]} =
ZipArchive.list(zip_handle)
end
test "invalid zip file" do
zip_handle = ZipArchive.handle(TestFixtures.path("not_a_zip.zip"), :path)
assert {:error, "invalid zip file"} = ZipArchive.list(zip_handle)
end
test "zip file not found" do
zip_handle = ZipArchive.handle("__does_not_exist__", :path)
assert {:error, "file not found"} = ZipArchive.list(zip_handle)
end
end
describe "extract/2" do
test "extracts a file from a zip file" do
zip_handle = ZipArchive.handle(TestFixtures.path("test.zip"), :path)
assert {:ok, "Contents of file1\n"} = ZipArchive.extract(zip_handle, "file1.txt")
assert {:ok, "Contents of file2\n"} = ZipArchive.extract(zip_handle, "file2.dat")
assert {:ok, "Contents of file3\n"} = ZipArchive.extract(zip_handle, "dir/subdir/file3.bin")
assert {:error, "file \"bogus.bin\" not found in archive"} =
ZipArchive.extract(zip_handle, "bogus.bin")
end
test "extracts a file from zip buffer" do
zip_handle = ZipArchive.handle(TestFixtures.path("test.zip"), :path)
assert {:ok, "Contents of file1\n"} = ZipArchive.extract(zip_handle, "file1.txt")
assert {:ok, "Contents of file2\n"} = ZipArchive.extract(zip_handle, "file2.dat")
assert {:ok, "Contents of file3\n"} = ZipArchive.extract(zip_handle, "dir/subdir/file3.bin")
assert {:error, "file \"bogus.bin\" not found in archive"} =
ZipArchive.extract(zip_handle, "bogus.bin")
end
test "invalid zip file" do
zip_handle = ZipArchive.handle(TestFixtures.path("not_a_zip.zip"), :path)
assert {:error, "invalid zip file"} = ZipArchive.extract(zip_handle, "file1.txt")
end
test "zip file not found" do
zip_handle = ZipArchive.handle("__does_not_exist__", :path)
assert {:error, "file not found"} = ZipArchive.extract(zip_handle, "file1.txt")
end
end
end
| 35.657143 | 98 | 0.657853 |
1c44cee62de62bf389573f9e236d2d64d2210060 | 260 | ex | Elixir | lib/ex_okex/swap/private/amend_bulk_orders.ex | yurikoval/ex_okex | bcaccee94b2f3ebcf2adec4ae70ed71dbe6f35c2 | [
"MIT"
] | 6 | 2018-12-04T22:05:05.000Z | 2022-01-08T11:54:40.000Z | lib/ex_okex/swap/private/amend_bulk_orders.ex | yurikoval/ex_okex | bcaccee94b2f3ebcf2adec4ae70ed71dbe6f35c2 | [
"MIT"
] | 14 | 2019-12-21T11:32:30.000Z | 2022-03-28T16:07:01.000Z | lib/ex_okex/swap/private/amend_bulk_orders.ex | yurikoval/ex_okex | bcaccee94b2f3ebcf2adec4ae70ed71dbe6f35c2 | [
"MIT"
] | 3 | 2019-10-19T19:33:34.000Z | 2019-10-19T19:35:18.000Z | defmodule ExOkex.Swap.Private.AmendBulkOrders do
import ExOkex.Api.Private
@prefix "/api/swap/v3"
def amend_bulk_orders(instrument_id, params, config \\ nil) do
"#{@prefix}/amend_batch_orders/#{instrument_id}"
|> post(params, config)
end
end
| 23.636364 | 64 | 0.726923 |
1c4505b59995d860fa4f7626b8ab24acad2bcde9 | 340 | exs | Elixir | priv/repo/migrations/00420170502195527_create_accounts_role.exs | smpallen99/ucx_ucc | 47225f205a6ac4aacdb9bb4f7512dcf4092576ad | [
"MIT"
] | 11 | 2017-05-15T18:35:05.000Z | 2018-02-05T18:27:40.000Z | priv/repo/migrations/00420170502195527_create_accounts_role.exs | anndream/infinity_one | 47225f205a6ac4aacdb9bb4f7512dcf4092576ad | [
"MIT"
] | 15 | 2017-11-27T10:38:05.000Z | 2018-02-09T20:42:08.000Z | priv/repo/migrations/00420170502195527_create_accounts_role.exs | anndream/infinity_one | 47225f205a6ac4aacdb9bb4f7512dcf4092576ad | [
"MIT"
] | 4 | 2017-09-13T11:34:16.000Z | 2018-02-26T13:37:06.000Z | defmodule InfinityOne.Repo.Migrations.CreateInfinityOne.Accounts.Role do
use Ecto.Migration
def change do
create table(:roles) do
add :name, :string
add :scope, :string, default: "global"
add :description, :string
timestamps(type: :utc_datetime)
end
create unique_index(:roles, [:name])
end
end
| 21.25 | 72 | 0.682353 |
1c4516742482b5f70fdbb984f2f8544889db27ef | 20,538 | ex | Elixir | lib/github/github/server.ex | gebner/bors-ng | 7a88ef7cceb1f112374a426a4bdfffc323b44a61 | [
"Apache-2.0"
] | null | null | null | lib/github/github/server.ex | gebner/bors-ng | 7a88ef7cceb1f112374a426a4bdfffc323b44a61 | [
"Apache-2.0"
] | null | null | null | lib/github/github/server.ex | gebner/bors-ng | 7a88ef7cceb1f112374a426a4bdfffc323b44a61 | [
"Apache-2.0"
] | null | null | null | require Logger
defmodule BorsNG.GitHub.Server do
use GenServer
alias BorsNG.GitHub
@moduledoc """
Provides a real connection to GitHub's REST API.
This doesn't currently do rate limiting, but it will.
"""
def start_link do
GenServer.start_link(__MODULE__, :ok, name: GitHub)
end
@installation_content_type "application/vnd.github.machine-man-preview+json"
@check_content_type "application/vnd.github.antiope-preview+json"
@team_content_type "application/vnd.github.hellcat-preview+json"
@content_type_raw "application/vnd.github.v3.raw"
@content_type "application/vnd.github.v3+json"
@type tconn :: GitHub.tconn
@type ttoken :: GitHub.ttoken
@type trepo :: GitHub.trepo
@type tuser :: GitHub.tuser
@type tpr :: GitHub.tpr
@type tcollaborator :: GitHub.tcollaborator
@type tuser_repo_perms :: GitHub.tuser_repo_perms
@typedoc """
The token cache.
"""
@type ttokenreg :: %{number => {binary, number}}
@spec config() :: keyword
defp config do
Confex.fetch_env!(:bors, GitHub.Server)
end
@spec site() :: bitstring
defp site do
Confex.fetch_env!(:bors, :api_github_root)
end
def init(:ok) do
{:ok, %{}}
end
def handle_call({type, {{_, _} = token, repo_xref}, args}, _from, state) do
use_token! token, state, fn token ->
do_handle_call(type, {token, repo_xref}, args)
end
end
def handle_call({type, {_, _} = token, args}, _from, state) do
use_token! token, state, fn token ->
do_handle_call(type, token, args)
end
end
def handle_call(:get_app, _from, state) do
result = "Bearer #{get_jwt_token()}"
|> tesla_client(@installation_content_type)
|> Tesla.get!("/app")
|> case do
%{body: raw, status: 200} ->
app_link = raw
|> Poison.decode!()
|> Map.get("html_url")
{:ok, app_link}
err ->
{:error, :get_app, err}
end
{:reply, result, state}
end
def handle_call(:get_installation_list, _from, state) do
jwt_token = get_jwt_token()
list = get_installation_list_!(
jwt_token,
"#{site()}/app/installations",
[])
{:reply, {:ok, list}, state}
end
def do_handle_call(:get_pr_files, repo_conn, {pr_xref}) do
case get!(repo_conn, "pulls/#{pr_xref}/files") do
%{body: raw, status: 200} ->
pr = raw
|> Poison.decode!()
|> Enum.map(&GitHub.File.from_json!/1)
{:ok, pr}
e ->
{:error, :get_pr_files, e.status, pr_xref}
end
end
def do_handle_call(:get_pr, repo_conn, {pr_xref}) do
case get!(repo_conn, "pulls/#{pr_xref}") do
%{body: raw, status: 200} ->
pr = raw
|> Poison.decode!()
|> GitHub.Pr.from_json!()
{:ok, pr}
e ->
{:error, :get_pr, e.status, pr_xref}
end
end
def do_handle_call(:update_pr, repo_conn, pr) do
repo_conn
|> patch!("pulls/#{pr.number}", Poison.encode!(%{
title: pr.title,
body: pr.body,
state: pr.state,
}))
|> case do
%{body: raw, status: 200} ->
pr = raw
|> Poison.decode!()
|> GitHub.Pr.from_json!()
{:ok, pr}
%{body: body, status: status} ->
{:error, :push, status, body}
end
end
def do_handle_call(:get_pr_commits, repo_conn, {pr_xref}) do
case get!(repo_conn, "pulls/#{pr_xref}/commits") do
%{body: raw, status: 200} ->
Logger.info("Raw response from GH #{inspect(raw)}")
commits = raw
|> Poison.decode!()
|> Enum.map(&GitHub.Commit.from_json!/1)
{:ok, commits}
e ->
{:error, :get_pr_commits, e.status, pr_xref}
end
end
def do_handle_call(:get_open_prs, {{:raw, token}, repo_xref}, {}) do
{:ok, get_open_prs_!(
token,
"#{site()}/repositories/#{repo_xref}/pulls?state=open",
[])}
end
def do_handle_call(:push, repo_conn, {sha, to}) do
repo_conn
|> patch!("git/refs/heads/#{to}", Poison.encode!(%{sha: sha}))
|> case do
%{body: _, status: 200} ->
{:ok, sha}
%{body: body, status: status} ->
IO.inspect({:error, :push, body})
{:error, :push, status, body}
end
end
def do_handle_call(:get_branch, repo_conn, {branch}) do
case get!(repo_conn, "branches/#{branch}") do
%{body: raw, status: 200} ->
r = Poison.decode!(raw)["commit"]
{:ok, %{commit: r["sha"], tree: r["commit"]["tree"]["sha"]}}
err ->
{:error, :get_branch, err}
end
end
def do_handle_call(:delete_branch, repo_conn, {branch}) do
case delete!(repo_conn, "git/refs/heads/#{branch}") do
%{status: 204} ->
:ok
_ ->
{:error, :delete_branch}
end
end
def do_handle_call(:merge_branch, repo_conn, {%{
from: from,
to: to,
commit_message: commit_message}}) do
msg = %{base: to, head: from, commit_message: commit_message}
repo_conn
|> post!("merges", Poison.encode!(msg))
|> case do
%{body: raw, status: 201} ->
data = Poison.decode!(raw)
res = %{
commit: data["sha"],
tree: data["commit"]["tree"]["sha"]
}
{:ok, res}
%{status: 409} ->
{:ok, :conflict}
%{status: 204} ->
{:ok, :conflict}
err ->
{:error, :merge_branch, err}
end
end
def do_handle_call(:create_commit, repo_conn, {%{
tree: tree,
parents: parents,
commit_message: commit_message,
committer: committer}}) do
msg = %{parents: parents, tree: tree, message: commit_message}
msg = if is_nil committer do
msg
else
Map.put(msg, "author", %{
name: committer.name, email: committer.email})
end
resp = repo_conn
|> post!("git/commits", Poison.encode!(msg))
|> case do
%{body: raw, status: 201} ->
Logger.info("Raw response from GH #{inspect(raw)}")
data = Poison.decode!(raw)
res = %{
commit: data["sha"],
}
{:ok, res.commit}
%{status: 409} ->
{:ok, :conflict}
%{status: 204} ->
{:ok, :conflict}
err ->
{:error, :create_commit, err}
end
resp
end
def do_handle_call(:synthesize_commit, repo_conn, {%{
branch: branch,
tree: tree,
parents: parents,
commit_message: commit_message,
committer: committer}}) do
msg = %{parents: parents, tree: tree, message: commit_message}
msg = if is_nil committer do
msg
else
Map.put(msg, "author", %{
name: committer.name, email: committer.email})
end
repo_conn
|> post!("git/commits", Poison.encode!(msg))
|> case do
%{body: raw, status: 201} ->
sha = Poison.decode!(raw)["sha"]
do_handle_call(:force_push, repo_conn, {sha, branch})
_ ->
{:error, :synthesize_commit}
end
end
def do_handle_call(:force_push, repo_conn, {sha, to}) do
repo_conn
|> get!("branches/#{to}")
|> case do
%{status: 404} ->
msg = %{ref: "refs/heads/#{to}", sha: sha}
repo_conn
|> post!("git/refs", Poison.encode!(msg))
|> case do
%{status: 201} ->
{:ok, sha}
_ ->
{:error, :force_push}
end
%{body: raw, status: 200} ->
if sha != Poison.decode!(raw)["commit"]["sha"] do
msg = %{force: true, sha: sha}
repo_conn
|> patch!("git/refs/heads/#{to}", Poison.encode!(msg))
|> case do
%{status: 200} ->
{:ok, sha}
_ ->
{:error, :force_push}
end
else
{:ok, sha}
end
_ ->
{:error, :force_push}
end
end
def do_handle_call(:get_commit_status, repo_conn, {sha}) do
with \
{:ok, status} <- (
repo_conn
|> get!("commits/#{sha}/status")
|> case do
%{body: raw, status: 200} ->
res = Poison.decode!(raw)["statuses"]
|> Enum.map(&{
&1["context"] |> GitHub.map_changed_status(),
GitHub.map_state_to_status(&1["state"])})
|> Map.new()
{:ok, res}
_ ->
{:error, :get_commit_status, :status}
end),
{:ok, check} <- (
repo_conn
|> get!("commits/#{sha}/check-runs", @check_content_type)
|> case do
%{body: raw, status: 200} ->
res = Poison.decode!(raw)["check_runs"]
|> Enum.map(&{
&1["name"] |> GitHub.map_changed_status(),
GitHub.map_check_to_status(&1["conclusion"])})
|> Map.new()
{:ok, res}
_ ->
{:error, :get_commit_status, :check}
end),
do: {:ok, Map.merge(status, check)}
end
def do_handle_call(:get_labels, repo_conn, {issue_xref}) do
repo_conn
|> get!("issues/#{issue_xref}/labels")
|> case do
%{body: raw, status: 200} ->
res = Poison.decode!(raw)
|> Enum.map(fn %{"name" => name} -> name end)
{:ok, res}
_ ->
{:error, :get_labels}
end
end
def do_handle_call(:get_reviews, {{:raw, token}, repo_xref}, {issue_xref}) do
reviews = token
|> get_reviews_json_!("#{site()}/repositories/#{repo_xref}/pulls/#{issue_xref}/reviews", [])
|> GitHub.Reviews.from_json!()
{:ok, reviews}
end
def do_handle_call(:get_file, repo_conn, {branch, path}) do
%{body: raw, status: status} = get!(
repo_conn,
"contents/#{path}",
@content_type_raw,
[query: [ref: branch]])
res = case status do
404 -> nil
200 -> raw
end
{:ok, res}
end
def do_handle_call(:post_comment, repo_conn, {number, body}) do
repo_conn
|> post!("issues/#{number}/comments", Poison.encode!(%{body: body}))
|> case do
%{status: 201} ->
:ok
_ ->
{:error, :post_comment}
end
end
def do_handle_call(:post_commit_status, repo_conn, {sha, status, msg, url}) do
state = GitHub.map_status_to_state(status)
body = %{state: state, context: "bors", description: msg, target_url: url}
repo_conn
|> post!("statuses/#{sha}", Poison.encode!(body))
|> case do
%{status: 201} ->
:ok
%{status: status, body: raw} ->
{:error, :post_commit_status, status, raw}
end
end
def do_handle_call(:belongs_to_team, repo_conn,
{username, team_id}) do
IO.inspect(repo_conn)
{{:raw, token}, _installation_id} = repo_conn
"token #{token}"
|> tesla_client()
|> Tesla.get!(URI.encode("/teams/#{team_id}/memberships/#{username}"))
|> case do
%{status: 200} ->
true
%{status: 404} ->
false
_ ->
false
end
end
def do_handle_call(:get_team_by_name, {{:raw, token}, _installation_id}, {org_name, team_name}) do
IO.inspect(token)
"token #{token}"
|> tesla_client()
|> Tesla.get!(URI.encode("/orgs/#{org_name}/teams/#{team_name}"))
|> case do
%{body: raw, status: 200} ->
user = raw
|> Poison.decode!()
|> GitHub.Team.from_json!()
{:ok, user}
%{status: 404} ->
{:ok, nil}
%{body: raw} ->
{:error, raw}
end
end
def do_handle_call(:get_collaborators_by_repo, {{:raw, token}, repo_xref},
{}) do
get_collaborators_by_repo_(
token,
"#{site()}/repositories/#{repo_xref}/collaborators",
[])
end
def do_handle_call(
:get_user_by_login, {:raw, token}, {login}
) do
"token #{token}"
|> tesla_client()
|> Tesla.get!("/users/#{URI.encode_www_form(login)}")
|> case do
%{body: raw, status: 200} ->
user = raw
|> Poison.decode!()
|> GitHub.FullUser.from_json!()
{:ok, user}
%{status: 404} ->
{:ok, nil}
_ ->
{:error, :get_user_by_login}
end
end
def do_handle_call(:get_installation_repos, {:raw, token}, {}) do
{:ok, get_installation_repos_!(
token,
"#{site()}/installation/repositories",
[])}
end
defp get_reviews_json_!(_, nil, append) do
append
end
defp get_reviews_json_!(token, url, append) do
params = get_url_params(url)
%{body: raw, status: 200, headers: headers} = "token #{token}"
|> tesla_client(@installation_content_type)
|> Tesla.get!(url, query: params)
json = Enum.concat(append, Poison.decode!(raw))
next_headers = get_next_headers(headers)
case next_headers do
[] -> json
[next] -> get_reviews_json_!(token, next.url, json)
end
end
@spec get_installation_repos_!(binary, binary, [trepo]) :: [trepo]
defp get_installation_repos_!(_, nil, repos) do
repos
end
defp get_installation_repos_!(token, url, append) do
params = get_url_params(url)
%{body: raw, status: 200, headers: headers} = "token #{token}"
|> tesla_client(@installation_content_type)
|> Tesla.get!(url, query: params)
repositories = Poison.decode!(raw)["repositories"]
|> Enum.map(&GitHub.Repo.from_json!/1)
|> Enum.concat(append)
next_headers = get_next_headers(headers)
case next_headers do
[] -> repositories
[next] -> get_installation_repos_!(token, next.url, repositories)
end
end
@spec get_installation_list_!(binary, binary | nil, [integer]) :: [integer]
defp get_installation_list_!(_, nil, list) do
list
end
defp get_installation_list_!(jwt_token, url, append) do
params = get_url_params(url)
%{body: raw, status: 200, headers: headers} = "Bearer #{jwt_token}"
|> tesla_client(@installation_content_type)
|> Tesla.get!(url, query: params)
list = Poison.decode!(raw)
|> Enum.map(fn %{"id" => id} -> id end)
|> Enum.concat(append)
next_headers = get_next_headers(headers)
case next_headers do
[] -> list
[next] -> get_installation_list_!(jwt_token, next.url, list)
end
end
@spec get_open_prs_!(binary, binary | nil, [tpr]) :: [tpr]
defp get_open_prs_!(_, nil, prs) do
prs
end
defp get_open_prs_!(token, url, append) do
params = get_url_params(url)
{raw, headers} = "token #{token}"
|> tesla_client(@content_type)
|> Tesla.get!(url, query: params)
|> case do
%{body: raw, status: 200, headers: headers} -> {raw, headers}
_ -> {"[]", %{}}
end
prs = Poison.decode!(raw)
|> Enum.flat_map(fn element ->
element |> GitHub.Pr.from_json |> case do
{:ok, pr} -> [pr]
_ -> []
end
end)
|> Enum.concat(append)
next_headers = get_next_headers(headers)
case next_headers do
[] -> prs
[next] -> get_open_prs_!(token, next.url, prs)
end
end
@spec extract_user_repo_perms(map()) :: tuser_repo_perms
defp extract_user_repo_perms(data) do
Map.new(["admin", "push", "pull"], fn perm ->
{String.to_atom(perm), !!data["permissions"][perm]}
end)
end
@spec get_collaborators_by_repo_(binary, binary, [tcollaborator]) ::
{:ok, [tcollaborator]} | {:error, :get_collaborators_by_repo}
def get_collaborators_by_repo_(token, url, append) do
params = get_url_params(url)
"token #{token}"
|> tesla_client(@team_content_type)
|> Tesla.get(url, query: params)
|> case do
{:ok, %{body: raw, status: 200, headers: headers}} ->
users = raw
|> Poison.decode!()
|> Enum.map(fn user ->
%{user: GitHub.User.from_json!(user),
perms: extract_user_repo_perms(user)}
end)
|> Enum.concat(append)
next_headers = get_next_headers(headers)
case next_headers do
[] ->
{:ok, users}
[next] ->
get_collaborators_by_repo_(token, next.url, users)
end
error ->
IO.inspect(error)
{:error, :get_collaborators_by_repo}
end
end
@spec post!(tconn, binary, binary, binary) :: map
defp post!(
{{:raw, token}, repo_xref},
path,
body,
content_type \\ @content_type
) do
"token #{token}"
|> tesla_client(content_type)
|> Tesla.post!(URI.encode("/repositories/#{repo_xref}/#{path}"), body)
end
@spec patch!(tconn, binary, binary, binary) :: map
defp patch!(
{{:raw, token}, repo_xref},
path,
body,
content_type \\ @content_type
) do
"token #{token}"
|> tesla_client(content_type)
|> Tesla.patch!(URI.encode("/repositories/#{repo_xref}/#{path}"), body)
end
@spec get!(tconn, binary, binary, list) :: map
defp get!(
{{:raw, token}, repo_xref},
path,
content_type \\ @content_type,
params \\ []
) do
"token #{token}"
|> tesla_client(content_type)
|> Tesla.get!(URI.encode("/repositories/#{repo_xref}/#{path}"), params)
end
@spec delete!(tconn, binary, binary, list) :: map
defp delete!(
{{:raw, token}, repo_xref},
path,
content_type \\ @content_type,
params \\ []
) do
"token #{token}"
|> tesla_client(content_type)
|> Tesla.delete!(URI.encode("/repositories/#{repo_xref}/#{path}"), params)
end
defp get_next_headers(headers) do
Enum.flat_map(headers, fn {name, value} ->
name
|> String.downcase(:ascii)
|> case do
"link" ->
value = ExLinkHeader.parse!(value)
if is_nil(value.next), do: [], else: [value.next]
_ -> []
end
end)
end
defp get_url_params(url) do
case URI.parse(url).query do
nil -> []
qry -> URI.query_decoder(qry) |> Enum.to_list()
end
end
@token_exp 60
@spec get_installation_token!(number) :: binary
def get_installation_token!(installation_xref) do
jwt_token = get_jwt_token()
%{body: raw, status: 201} = "Bearer #{jwt_token}"
|> tesla_client(@installation_content_type)
|> Tesla.post!("app/installations/#{installation_xref}/access_tokens", "")
Poison.decode!(raw)["token"]
end
def get_jwt_token do
import Joken.Config
cfg = config()
Joken.generate_and_sign!(default_claims(), %{
"iat" => Joken.current_time(),
"exp" => Joken.current_time() + @token_exp,
"iss" => cfg[:iss]
}, Joken.Signer.create("RS256", %{"pem" => cfg[:pem]}))
end
@doc """
Uses a token from the cache, or, if the request fails,
retry without using the cached token.
"""
@spec use_token!(ttoken, ttokenreg, ((ttoken) -> term)) ::
{:reply, term, ttokenreg}
def use_token!({:installation, installation_xref} = token, state, fun) do
{token, state} = raw_token!(token, state)
result = fun.(token)
case result do
{:ok, _} -> {:reply, result, state}
:ok -> {:reply, result, state}
_ ->
state = Map.delete(state, installation_xref)
{token, state} = raw_token!(token, state)
result = fun.(token)
{:reply, result, state}
end
end
def use_token!(token, state, fun) do
{token, state} = raw_token!(token, state)
result = fun.(token)
{:reply, result, state}
end
@doc """
Given an {:installation, installation_xref},
look it up in the token cache.
If it's there, and it's still usable, use it.
Otherwise, fetch a new one.
"""
@spec raw_token!(ttoken, ttokenreg) :: {{:raw, binary}, ttokenreg}
def raw_token!({:installation, installation_xref}, state) do
now = Joken.current_time()
case state[installation_xref] do
{token, issued} when issued + @token_exp > now ->
{{:raw, token}, state}
_ ->
token = get_installation_token!(installation_xref)
state = Map.put(state, installation_xref, {token, now})
{{:raw, token}, state}
end
end
def raw_token!({:raw, _} = raw, state) do
{raw, state}
end
defp tesla_client(authorization, content_type \\ @content_type) do
host = String.to_charlist(URI.parse(site()).host)
ssl_opts = [
verify: :verify_peer,
verify_fun: {&:ssl_verify_hostname.verify_fun/3, check_hostname: host},
cacertfile: :certifi.cacertfile,
]
Tesla.client [
{Tesla.Middleware.BaseUrl, site()},
{Tesla.Middleware.Headers, [
{"authorization", authorization},
{"accept", content_type},
{"user-agent", "bors-ng https://bors.tech"},
]},
{Tesla.Middleware.Retry, delay: 100, max_retries: 5},
], {Tesla.Adapter.Httpc, [ssl: ssl_opts]}
end
end
| 28.095759 | 100 | 0.571915 |
1c452364f7efda764cce5c0ae554e7f749082e63 | 7,920 | ex | Elixir | kousa/test/_support/ws_client.ex | LeonardSSH/dogehouse | 584055ad407bc37fa35cdf36ebb271622e29d436 | [
"MIT"
] | 9 | 2021-03-17T03:56:18.000Z | 2021-09-24T22:45:14.000Z | kousa/test/_support/ws_client.ex | ActuallyTomas/dogehouse | 8c3d2cd1d7e99e173f0658759467a391c4a90c4e | [
"MIT"
] | 12 | 2021-07-06T12:51:13.000Z | 2022-03-16T12:38:18.000Z | kousa/test/_support/ws_client.ex | ActuallyTomas/dogehouse | 8c3d2cd1d7e99e173f0658759467a391c4a90c4e | [
"MIT"
] | 4 | 2021-07-15T20:33:50.000Z | 2022-03-27T12:46:47.000Z | defmodule BrothTest.WsClient do
use WebSockex
@api_url Application.compile_env!(:kousa, :api_url)
def child_spec(info) do
# just make the id be a random uuid.
info
|> super
|> Map.put(:id, UUID.uuid4())
end
def start_link(_opts) do
ancestors =
:"$ancestors"
|> Process.get()
|> :erlang.term_to_binary()
|> Base.encode16()
@api_url
|> Path.join("socket")
|> WebSockex.start_link(__MODULE__, nil,
extra_headers: [{"user-agent", ancestors}, {"x-forwarded-for", "127.0.0.1"}]
)
end
###########################################################################
# API
# an elaboration on the send_msg that represents the equivalent of
# "fetching" / "calling" operations from the user.
def send_call(client_ws, op, payload) do
call_ref = UUID.uuid4()
WebSockex.cast(
client_ws,
{:send, %{"op" => op, "p" => payload, "ref" => call_ref, "v" => "0.2.0"}}
)
call_ref
end
def send_call_legacy(client_ws, op, payload) do
call_ref = UUID.uuid4()
WebSockex.cast(
client_ws,
{:send, %{"op" => op, "d" => payload, "fetchId" => call_ref}}
)
call_ref
end
@doc """
performs the call AND traps its reply. Should not be used for anything which is
the primary call under test; only to be used for supporting calls necessary as a
part of the setup.
"""
def do_call(ws, op, payload) do
ref = send_call(ws, op, payload)
reply_op = op <> ":reply"
receive do
{:text, %{"op" => ^reply_op, "ref" => ^ref, "p" => payload}, ^ws} ->
payload
after
100 ->
raise "reply to `#{op}` not received"
end
end
def do_call_legacy(ws, op, payload) do
ref = send_call_legacy(ws, op, payload)
receive do
{:text, %{"op" => _, "fetchId" => ^ref, "d" => payload}, ^ws} ->
payload
after
100 ->
raise "reply to `#{op}` not received"
end
end
def send_msg(client_ws, op, payload),
do: WebSockex.cast(client_ws, {:send, %{"op" => op, "p" => payload, "v" => "0.2.0"}})
def send_msg_legacy(client_ws, op, payload),
do: WebSockex.cast(client_ws, {:send, %{"op" => op, "d" => payload}})
defp send_msg_impl(map, test_pid) do
{:reply, {:text, Jason.encode!(map)}, test_pid}
end
def forward_frames(client_ws), do: WebSockex.cast(client_ws, {:forward_frames, self()})
defp forward_frames_impl(test_pid, _state), do: {:ok, test_pid}
defmacro assert_frame(op, payload, from \\ nil) do
if from do
quote do
from = unquote(from)
ExUnit.Assertions.assert_receive(
{:text, %{"op" => unquote(op), "p" => unquote(payload)}, ^from}
)
end
else
quote do
ExUnit.Assertions.assert_receive(
{:text, %{"op" => unquote(op), "p" => unquote(payload)}, _}
)
end
end
end
defmacro assert_frame_legacy(op, payload, from \\ nil) do
if from do
quote do
from = unquote(from)
ExUnit.Assertions.assert_receive(
{:text, %{"op" => unquote(op), "d" => unquote(payload)}, ^from}
)
end
else
quote do
ExUnit.Assertions.assert_receive(
{:text, %{"op" => unquote(op), "d" => unquote(payload)}, _}
)
end
end
end
@doc """
asserts that a reply from a previously issued call operation has been
receieved, as identified by its reference uuid (`ref`).
Note that the third parameter is matchable, so you can use `_`, use
it to assign a to a variable, or, do partial matches on maps.
"""
defmacro assert_reply(op, ref, payload, from \\ nil) do
if from do
quote do
op = unquote(op)
from = unquote(from)
ref = unquote(ref)
ExUnit.Assertions.assert_receive(
{:text, %{"op" => ^op, "p" => unquote(payload), "ref" => ^ref}, ^from}
)
end
else
quote do
op = unquote(op)
ref = unquote(ref)
ExUnit.Assertions.assert_receive(
{:text, %{"op" => ^op, "p" => unquote(payload), "ref" => ^ref}, _}
)
end
end
end
@doc """
asserts that an error has been returned from a previously issued call or
cast operation has been received, as identified by its reference uuid (`ref`).
Note that the third parameter is matchable, so you can use `_`, use
it to assign a to a variable, or, do partial matches on the error.
"""
defmacro assert_error(op, ref, error, from \\ nil) do
if from do
quote do
op = unquote(op)
from = unquote(from)
ref = unquote(ref)
ExUnit.Assertions.assert_receive(
{:text, %{"op" => ^op, "e" => unquote(error), "ref" => ^ref}, ^from}
)
end
else
quote do
op = unquote(op)
ref = unquote(ref)
ExUnit.Assertions.assert_receive(
{:text, %{"op" => ^op, "e" => unquote(error), "ref" => ^ref}, _}
)
end
end
end
defmacro assert_reply_legacy(ref, payload, from \\ nil) do
if from do
quote do
from = unquote(from)
ref = unquote(ref)
ExUnit.Assertions.assert_receive(
{:text, %{"op" => "fetch_done", "d" => unquote(payload), "fetchId" => ^ref}, ^from}
)
end
else
quote do
ref = unquote(ref)
ExUnit.Assertions.assert_receive(
{:text, %{"op" => "fetch_done", "d" => unquote(payload), "fetchId" => ^ref}, _}
)
end
end
end
# TODO: change off of Process.link and switch to Proce
defmacro assert_dies(client_ws, fun, reason, timeout \\ 100) do
quote bind_quoted: [client_ws: client_ws, fun: fun, reason: reason, timeout: timeout] do
Process.flag(:trap_exit, true)
Process.link(client_ws)
fun.()
ExUnit.Assertions.assert_receive({:EXIT, ^client_ws, ^reason}, timeout)
end
end
defmacro refute_frame(op, from) do
quote do
from = unquote(from)
ExUnit.Assertions.refute_receive({:text, %{"op" => unquote(op)}, ^from})
end
end
###########################################################################
# ROUTER
@impl true
def handle_frame({type, data}, test_pid) do
send(test_pid, {type, Jason.decode!(data), self()})
{:ok, test_pid}
end
@impl true
def handle_cast({:send, map}, test_pid), do: send_msg_impl(map, test_pid)
def handle_cast({:forward_frames, test_pid}, state), do: forward_frames_impl(test_pid, state)
end
defmodule BrothTest.WsClientFactory do
alias Beef.Schemas.User
alias BrothTest.WsClient
require WsClient
import ExUnit.Assertions
# note that this function ALSO causes the calling process to be subscribed
# to forwarded messages from the websocket client.
def create_client_for(user = %User{}, opts \\ []) do
tokens = Kousa.Utils.TokenUtils.create_tokens(user)
# start and link the websocket client
client_ws = ExUnit.Callbacks.start_supervised!(WsClient)
WsClient.forward_frames(client_ws)
if opts[:legacy] do
WsClient.send_msg(client_ws, "auth", %{
"accessToken" => tokens.accessToken,
"refreshToken" => tokens.refreshToken,
"platform" => "foo",
"reconnectToVoice" => false,
"muted" => false,
"deafened" => false
})
WsClient.assert_frame_legacy("auth-good", _)
else
WsClient.do_call(client_ws, "auth:request", %{
"accessToken" => tokens.accessToken,
"refreshToken" => tokens.refreshToken,
"platform" => "foo",
"reconnectToVoice" => false,
"muted" => false,
"deafened" => false
})
end
# link the UserProcess to prevent dangling DB sandbox lookups
[{usersession_pid, _}] = Registry.lookup(Onion.UserSessionRegistry, user.id)
# associate the user session with the database.
Process.link(usersession_pid)
client_ws
end
end
| 27.216495 | 95 | 0.585859 |
1c4532784ed7cc02d1cdb2818661888b2611fe6e | 1,531 | ex | Elixir | clients/secret_manager/lib/google_api/secret_manager/v1/model/empty.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/secret_manager/lib/google_api/secret_manager/v1/model/empty.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/secret_manager/lib/google_api/secret_manager/v1/model/empty.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.SecretManager.V1.Model.Empty do
@moduledoc """
A generic empty message that you can re-use to avoid defining duplicated empty messages in your APIs. A typical example is to use it as the request or the response type of an API method. For instance: service Foo { rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty); } The JSON representation for `Empty` is empty JSON object `{}`.
## Attributes
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{}
end
defimpl Poison.Decoder, for: GoogleApi.SecretManager.V1.Model.Empty do
def decode(value, options) do
GoogleApi.SecretManager.V1.Model.Empty.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.SecretManager.V1.Model.Empty do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 36.452381 | 345 | 0.758328 |
1c45afdead9bf299f44e78f780e87b4574b0bdab | 8,855 | exs | Elixir | test/graphvix/graph_test.exs | ssajnani/graphvix | e5fb0fbceb5b9ab72941633b702d414302868acc | [
"MIT"
] | 47 | 2016-10-02T21:44:30.000Z | 2022-02-21T18:01:40.000Z | test/graphvix/graph_test.exs | ssajnani/graphvix | e5fb0fbceb5b9ab72941633b702d414302868acc | [
"MIT"
] | 17 | 2016-10-27T17:11:27.000Z | 2021-12-23T13:19:39.000Z | test/graphvix/graph_test.exs | ssajnani/graphvix | e5fb0fbceb5b9ab72941633b702d414302868acc | [
"MIT"
] | 10 | 2017-03-20T18:21:21.000Z | 2021-07-25T14:34:28.000Z | defmodule Graphvix.GraphTest do
use ExUnit.Case, async: true
use ExUnitProperties
alias Graphvix.{Graph, HTMLRecord, Record}
doctest Graph, except: [
new: 1, digraph_tables: 1, to_dot: 1, write: 2, compile: 3, show: 2,
set_global_properties: 3
]
import HTMLRecord, only: [tr: 1, td: 1, td: 2]
property "generating a graph with a vertex" do
check all label <- string(:ascii, min_length: 3)
do
graph = Graph.new()
{graph, _vid} = Graph.add_vertex(graph, label, color: "blue")
assert Graph.to_dot(graph) == """
digraph G {
v0 [label="#{label}",color="blue"]
}
""" |> String.trim
end
end
property "generating a graph with a record node" do
check all labels <- list_of(string(:ascii, min_length: 3), min_length: 2, max_length: 5),
label <- string(:ascii, min_length: 3),
color <- string(:ascii, min_length: 3)
do
record = Record.new(labels, color: "blue")
graph = Graph.new
{graph, v0} = Graph.add_record(graph, record)
{graph, v1} = Graph.add_vertex(graph, label, color: color)
{graph, _eid} = Graph.add_edge(graph, v0, v1)
assert Graph.to_dot(graph) == """
digraph G {
v0 [label="#{Enum.join(labels, " | ")}",shape="record",color="blue"]
v1 [label="#{label}",color="#{color}"]
v0 -> v1
}
""" |> String.trim
end
end
property "generating a graph with edges to record ports" do
check all [l1, l2, p1, l3] <- list_of(string(:ascii, min_length: 3), length: 4)
do
graph = Graph.new
record = Record.new([l1, {p1, l2}])
{graph, v0} = Graph.add_record(graph, record)
{graph, v1} = Graph.add_vertex(graph, l3)
{graph, _eid} = Graph.add_edge(graph, {v0, p1}, v1)
assert Graph.to_dot(graph) == """
digraph G {
v0 [label="#{l1} | <#{p1}> #{l2}",shape="record"]
v1 [label="#{l3}"]
v0:#{p1} -> v1
}
""" |> String.trim
end
end
property "generating a graph with edges to HTML record ports" do
check all [l1, l2, p1, l3] <- list_of(string(:ascii, min_length: 3), length: 4)
do
graph = Graph.new
record = HTMLRecord.new([
tr([
td(l1),
td(l2, port: p1)
])
])
{graph, v0} = Graph.add_html_record(graph, record)
{graph, v1} = Graph.add_vertex(graph, l3)
{graph, _eid} = Graph.add_edge(graph, {v0, p1}, v1)
assert Graph.to_dot(graph) == """
digraph G {
v0 [label=<<table>
<tr>
<td>#{l1}</td>
<td port="#{p1}">#{l2}</td>
</tr>
</table>>,shape="plaintext"]
v1 [label="#{l3}"]
v0:#{p1} -> v1
}
""" |> String.trim
end
end
property "adding a subgraph" do
check all label <- string(:ascii, min_length: 3)
do
graph = Graph.new()
{graph, vid} = Graph.add_vertex(graph, label, color: "blue")
{graph, _cid} = Graph.add_subgraph(graph, [vid])
[subgraph] = graph.subgraphs
assert subgraph.id == "subgraph0"
assert subgraph.vertex_ids == [vid]
end
end
property "adding a cluster" do
check all label <- string(:ascii, min_length: 3)
do
graph = Graph.new()
{graph, vid} = Graph.add_vertex(graph, label, color: "blue")
{graph, _cid} = Graph.add_cluster(graph, [vid])
[cluster] = graph.subgraphs
assert cluster.id == "cluster0"
assert cluster.vertex_ids == [vid]
end
end
property "generating graphs with global properties" do
check all color <- string(:ascii, min_length: 3),
color2 <- string(:ascii, min_length: 3),
e_label <- string(:printable, min_length: 5)
do
graph = Graph.new()
graph = Graph.set_global_properties(graph, :node, color: color)
graph = Graph.set_global_properties(graph, :edge, color: color2, label: e_label)
assert Graph.to_dot(graph) == """
digraph G {
node [color="#{color}"]
edge [label="#{e_label}",color="#{color2}"]
}
""" |> String.trim
end
end
property "adding an edge" do
check all label1 <- string(:ascii, min_length: 3),
label2 <- string(:ascii, min_length: 3)
do
graph = Graph.new()
{graph, v1} = Graph.add_vertex(graph, label1)
{graph, v2} = Graph.add_vertex(graph, label2)
{graph, _e1} = Graph.add_edge(graph, v1, v2)
{_, _, etab, _, _} = graph.digraph
assert length(:ets.tab2list(etab)) == 1
end
end
property "dot format for a graph with edges" do
check all label1 <- string(:ascii, min_length: 3),
label2 <- string(:ascii, min_length: 3)
do
graph = Graph.new()
{graph, v1} = Graph.add_vertex(graph, label1)
{graph, v2} = Graph.add_vertex(graph, label2)
{graph, _e1} = Graph.add_edge(graph, v1, v2, color: "blue")
assert Graph.to_dot(graph) == """
digraph G {
v0 [label="#{label1}"]
v1 [label="#{label2}"]
v0 -> v1 [color="blue"]
}
""" |> String.trim
end
end
property "dot format for a graph with a subgraph" do
check all label1 <- string(:ascii, min_length: 3),
label2 <- string(:ascii, min_length: 3)
do
graph = Graph.new()
{graph, v1} = Graph.add_vertex(graph, label1)
{graph, v2} = Graph.add_vertex(graph, label2)
{graph, _e1} = Graph.add_edge(graph, v1, v2, color: "blue")
{graph, _cid} = Graph.add_subgraph(graph, [v1], style: "filled", color: "blue", node: [shape: "Msquare"])
assert Graph.to_dot(graph) == """
digraph G {
subgraph subgraph0 {
node [shape="Msquare"]
style="filled"
color="blue"
v0 [label="#{label1}"]
}
v1 [label="#{label2}"]
v0 -> v1 [color="blue"]
}
""" |> String.trim
end
end
property "dot format for a graph with a cluster" do
check all label1 <- string(:ascii, min_length: 3),
label2 <- string(:ascii, min_length: 3)
do
graph = Graph.new()
{graph, v1} = Graph.add_vertex(graph, label1)
{graph, v2} = Graph.add_vertex(graph, label2)
{graph, _e1} = Graph.add_edge(graph, v1, v2, color: "blue")
{graph, _cid} = Graph.add_cluster(graph, [v1], style: "filled", color: "blue", node: [shape: "Msquare"])
assert Graph.to_dot(graph) == """
digraph G {
subgraph cluster0 {
node [shape="Msquare"]
style="filled"
color="blue"
v0 [label="#{label1}"]
}
v1 [label="#{label2}"]
v0 -> v1 [color="blue"]
}
""" |> String.trim
end
end
property "dot format for a graph with clusters and subgraphs" do
check all label1 <- string(:ascii, min_length: 3),
label2 <- string(:ascii, min_length: 3),
label3 <- string(:ascii, min_length: 3),
label4 <- string(:ascii, min_length: 3)
do
graph = Graph.new()
{graph, v1} = Graph.add_vertex(graph, label1)
{graph, v2} = Graph.add_vertex(graph, label2)
{graph, v3} = Graph.add_vertex(graph, label3)
{graph, v4} = Graph.add_vertex(graph, label4)
{graph, _e} = Graph.add_edge(graph, v1, v2, color: "blue")
{graph, _e} = Graph.add_edge(graph, v2, v3)
{graph, _e} = Graph.add_edge(graph, v3, v4)
{graph, _cid} = Graph.add_cluster(graph, [v1], style: "filled", color: "blue", node: [shape: "Msquare"])
{graph, _cid} = Graph.add_subgraph(graph, [v2, v3], node: [shape: "square"], edge: [color: "green"])
assert Graph.to_dot(graph) == """
digraph G {
subgraph cluster0 {
node [shape="Msquare"]
style="filled"
color="blue"
v0 [label="#{label1}"]
}
subgraph subgraph1 {
node [shape="square"]
edge [color="green"]
v1 [label="#{label2}"]
v2 [label="#{label3}"]
v1 -> v2
}
v3 [label="#{label4}"]
v0 -> v1 [color="blue"]
v2 -> v3
}
""" |> String.trim
end
end
test ".write/2" do
g = Graph.new()
:ok = Graph.write(g, "g")
{:ok, content} = File.read("g.dot")
:ok = File.rm("g.dot")
assert content == """
digraph G {
}
""" |> String.trim
end
test ".compile/2" do
g = Graph.new()
:ok = Graph.compile(g, "g")
{:ok, content} = File.read("g.dot")
:ok = File.rm("g.dot")
:ok = File.rm("g.png")
assert content == """
digraph G {
}
""" |> String.trim
end
test ".compile/3" do
g = Graph.new()
:ok = Graph.compile(g, "g", :pdf)
{:ok, content} = File.read("g.dot")
:ok = File.rm("g.dot")
:ok = File.rm("g.pdf")
assert content == """
digraph G {
}
""" |> String.trim
end
end
| 25.014124 | 111 | 0.548955 |
1c45b56058e3d5849fe3d54d757e7ec014498022 | 18,584 | ex | Elixir | lib/credo/execution.ex | kanmaniselvan/credo | 276e0fc24d1bf56c8fc2902a9e933c8f208ce391 | [
"MIT"
] | null | null | null | lib/credo/execution.ex | kanmaniselvan/credo | 276e0fc24d1bf56c8fc2902a9e933c8f208ce391 | [
"MIT"
] | null | null | null | lib/credo/execution.ex | kanmaniselvan/credo | 276e0fc24d1bf56c8fc2902a9e933c8f208ce391 | [
"MIT"
] | 1 | 2019-10-08T16:42:40.000Z | 2019-10-08T16:42:40.000Z | defmodule Credo.Execution do
@moduledoc """
Every run of Credo is configured via a `Execution` struct, which is created and
manipulated via the `Credo.Execution` module.
"""
@doc """
The `Execution` struct is created and manipulated via the `Credo.Execution` module.
"""
defstruct argv: [],
cli_options: nil,
cli_switches: [
all_priorities: :boolean,
all: :boolean,
files_included: :keep,
files_excluded: :keep,
checks_with_tag: :keep,
checks_without_tag: :keep,
checks: :string,
config_name: :string,
config_file: :string,
color: :boolean,
crash_on_error: :boolean,
debug: :boolean,
diff_with: :string,
enable_disabled_checks: :string,
mute_exit_status: :boolean,
format: :string,
help: :boolean,
ignore_checks: :string,
ignore: :string,
min_priority: :string,
only: :string,
read_from_stdin: :boolean,
strict: :boolean,
verbose: :boolean,
version: :boolean,
watch: :boolean
],
cli_aliases: [
a: :all,
A: :all_priorities,
c: :checks,
C: :config_name,
d: :debug,
h: :help,
i: :ignore_checks,
v: :version
],
cli_switch_plugin_param_converters: [],
# config
files: nil,
color: true,
debug: false,
checks: nil,
requires: [],
plugins: [],
parse_timeout: 5000,
strict: false,
# options, set by the command line
all: false,
crash_on_error: true,
diff_with: nil,
enable_disabled_checks: nil,
format: nil,
help: false,
ignore_checks_tags: [],
ignore_checks: nil,
max_concurrent_check_runs: nil,
min_priority: 0,
mute_exit_status: false,
only_checks_tags: [],
only_checks: nil,
read_from_stdin: false,
verbose: false,
version: false,
# state, which is accessed and changed over the course of Credo's execution
pipeline_map: %{},
commands: %{},
config_files: [],
current_task: nil,
parent_task: nil,
initializing_plugin: nil,
halted: false,
config_files_pid: nil,
source_files_pid: nil,
issues_pid: nil,
timing_pid: nil,
skipped_checks: nil,
assigns: %{},
results: %{},
config_comment_map: %{}
@type t :: %__MODULE__{}
@execution_pipeline [
__pre__: [
{Credo.Execution.Task.AppendDefaultConfig, []},
{Credo.Execution.Task.ParseOptions, []},
{Credo.Execution.Task.ConvertCLIOptionsToConfig, []},
{Credo.Execution.Task.InitializePlugins, []}
],
parse_cli_options: [
{Credo.Execution.Task.ParseOptions, []}
],
initialize_plugins: [
# This is where plugins can put their hooks to initialize themselves based on
# the params given in the config as well as in their own command line switches.
],
validate_cli_options: [
{Credo.Execution.Task.ValidateOptions, []}
],
convert_cli_options_to_config: [
{Credo.Execution.Task.ConvertCLIOptionsToConfig, []}
],
determine_command: [
{Credo.Execution.Task.DetermineCommand, []}
],
set_default_command: [
{Credo.Execution.Task.SetDefaultCommand, []}
],
resolve_config: [
{Credo.Execution.Task.UseColors, []},
{Credo.Execution.Task.RequireRequires, []}
],
validate_config: [
{Credo.Execution.Task.ValidateConfig, []}
],
run_command: [
{Credo.Execution.Task.RunCommand, []}
],
halt_execution: [
{Credo.Execution.Task.AssignExitStatusForIssues, []}
]
]
alias Credo.Execution.ExecutionConfigFiles
alias Credo.Execution.ExecutionIssues
alias Credo.Execution.ExecutionSourceFiles
alias Credo.Execution.ExecutionTiming
@doc "Builds an Execution struct for a re-run with the the given `argv`, noting to just analyse the `files_that_changed`."
def build(%__MODULE__{} = previous_exec, files_that_changed) when is_list(files_that_changed) do
previous_exec.argv
|> build()
|> put_rerun(previous_exec, files_that_changed)
end
def build(argv, files_that_changed) when is_list(files_that_changed) do
build(argv)
end
@doc "Builds an Execution struct for the the given `argv`."
def build(argv \\ []) when is_list(argv) do
max_concurrent_check_runs = System.schedulers_online()
%__MODULE__{argv: argv, max_concurrent_check_runs: max_concurrent_check_runs}
|> put_pipeline(__MODULE__, @execution_pipeline)
|> put_builtin_command("categories", Credo.CLI.Command.Categories.CategoriesCommand)
|> put_builtin_command("diff", Credo.CLI.Command.Diff.DiffCommand)
|> put_builtin_command("explain", Credo.CLI.Command.Explain.ExplainCommand)
|> put_builtin_command("gen.check", Credo.CLI.Command.GenCheck)
|> put_builtin_command("gen.config", Credo.CLI.Command.GenConfig)
|> put_builtin_command("help", Credo.CLI.Command.Help)
|> put_builtin_command("info", Credo.CLI.Command.Info.InfoCommand)
|> put_builtin_command("list", Credo.CLI.Command.List.ListCommand)
|> put_builtin_command("suggest", Credo.CLI.Command.Suggest.SuggestCommand)
|> put_builtin_command("version", Credo.CLI.Command.Version)
|> start_servers()
end
@doc false
defp start_servers(%__MODULE__{} = exec) do
exec
|> ExecutionConfigFiles.start_server()
|> ExecutionIssues.start_server()
|> ExecutionSourceFiles.start_server()
|> ExecutionTiming.start_server()
end
@doc """
Returns the checks that should be run for a given `exec` struct.
Takes all checks from the `checks:` field of the exec, matches those against
any patterns to include or exclude certain checks given via the command line.
"""
def checks(exec)
def checks(%__MODULE__{checks: nil}) do
{[], [], []}
end
def checks(%__MODULE__{
checks: checks,
only_checks: only_checks,
only_checks_tags: only_checks_tags,
ignore_checks: ignore_checks,
ignore_checks_tags: ignore_checks_tags
}) do
only_matching =
checks |> filter_only_checks_by_tags(only_checks_tags) |> filter_only_checks(only_checks)
ignore_matching_by_name = filter_ignore_checks(checks, ignore_checks)
ignore_matching_by_tags = filter_ignore_checks_by_tags(checks, ignore_checks_tags)
ignore_matching = ignore_matching_by_name ++ ignore_matching_by_tags
result = only_matching -- ignore_matching
{result, only_matching, ignore_matching}
end
defp filter_only_checks(checks, nil), do: checks
defp filter_only_checks(checks, []), do: checks
defp filter_only_checks(checks, patterns), do: filter_checks(checks, patterns)
defp filter_ignore_checks(_checks, nil), do: []
defp filter_ignore_checks(_checks, []), do: []
defp filter_ignore_checks(checks, patterns), do: filter_checks(checks, patterns)
defp filter_checks(checks, patterns) do
regexes =
patterns
|> List.wrap()
|> to_match_regexes
Enum.filter(checks, &match_regex(&1, regexes, true))
end
defp match_regex(_tuple, [], default_for_empty), do: default_for_empty
defp match_regex(tuple, regexes, _default_for_empty) do
check_name =
tuple
|> Tuple.to_list()
|> List.first()
|> to_string
Enum.any?(regexes, &Regex.run(&1, check_name))
end
defp to_match_regexes(list) do
Enum.map(list, fn match_check ->
{:ok, match_pattern} = Regex.compile(match_check, "i")
match_pattern
end)
end
defp filter_only_checks_by_tags(checks, nil), do: checks
defp filter_only_checks_by_tags(checks, []), do: checks
defp filter_only_checks_by_tags(checks, tags), do: filter_checks_by_tags(checks, tags)
defp filter_ignore_checks_by_tags(_checks, nil), do: []
defp filter_ignore_checks_by_tags(_checks, []), do: []
defp filter_ignore_checks_by_tags(checks, tags), do: filter_checks_by_tags(checks, tags)
defp filter_checks_by_tags(_checks, nil), do: []
defp filter_checks_by_tags(_checks, []), do: []
defp filter_checks_by_tags(checks, tags) do
tags = Enum.map(tags, &String.to_atom/1)
Enum.filter(checks, &match_tags(&1, tags, true))
end
defp match_tags(_tuple, [], default_for_empty), do: default_for_empty
defp match_tags({check, params}, tags, _default_for_empty) do
tags_for_check = tags_for_check(check, params)
Enum.any?(tags, &Enum.member?(tags_for_check, &1))
end
@doc """
Returns the tags for a given `check` and its `params`.
"""
def tags_for_check(check, params)
def tags_for_check(check, nil), do: check.tags
def tags_for_check(check, []), do: check.tags
def tags_for_check(check, params) when is_list(params) do
params
|> Credo.Check.Params.tags(check)
|> Enum.flat_map(fn
:__initial__ -> check.tags
tag -> [tag]
end)
end
@doc """
Sets the exec values which `strict` implies (if applicable).
"""
def set_strict(exec)
def set_strict(%__MODULE__{strict: true} = exec) do
%__MODULE__{exec | all: true, min_priority: -99}
end
def set_strict(%__MODULE__{strict: false} = exec) do
%__MODULE__{exec | min_priority: 0}
end
def set_strict(exec), do: exec
@doc false
def get_path(exec) do
exec.cli_options.path
end
# Commands
@doc "Returns the name of the command, which should be run by the given execution."
def get_command_name(exec) do
exec.cli_options.command
end
@doc "Returns all valid command names."
def get_valid_command_names(exec) do
Map.keys(exec.commands)
end
def get_command(exec, name) do
Map.get(exec.commands, name) ||
raise ~S'Command not found: "#{name}"\n\nRegistered commands: #{
inspect(exec.commands, pretty: true)
}'
end
@doc false
def put_command(exec, _plugin_mod, name, command_mod) do
commands = Map.put(exec.commands, name, command_mod)
%__MODULE__{exec | commands: commands}
|> init_command(command_mod)
end
@doc false
def set_initializing_plugin(%__MODULE__{initializing_plugin: nil} = exec, plugin_mod) do
%__MODULE__{exec | initializing_plugin: plugin_mod}
end
def set_initializing_plugin(exec, nil) do
%__MODULE__{exec | initializing_plugin: nil}
end
def set_initializing_plugin(%__MODULE__{initializing_plugin: mod1}, mod2) do
raise "Attempting to initialize plugin #{inspect(mod2)}, " <>
"while already initializing plugin #{mod1}"
end
# Plugin params
def get_plugin_param(exec, plugin_mod, param_name) do
exec.plugins[plugin_mod][param_name]
end
def put_plugin_param(exec, plugin_mod, param_name, param_value) do
plugins =
Keyword.update(exec.plugins, plugin_mod, [], fn list ->
Keyword.update(list, param_name, param_value, fn _ -> param_value end)
end)
%__MODULE__{exec | plugins: plugins}
end
# CLI switches
@doc false
def put_cli_switch(exec, _plugin_mod, name, type) do
%__MODULE__{exec | cli_switches: exec.cli_switches ++ [{name, type}]}
end
@doc false
def put_cli_switch_alias(exec, _plugin_mod, name, alias_name) do
%__MODULE__{exec | cli_aliases: exec.cli_aliases ++ [{alias_name, name}]}
end
@doc false
def put_cli_switch_plugin_param_converter(exec, plugin_mod, cli_switch_name, plugin_param_name) do
converter_tuple = {cli_switch_name, plugin_mod, plugin_param_name}
%__MODULE__{
exec
| cli_switch_plugin_param_converters:
exec.cli_switch_plugin_param_converters ++ [converter_tuple]
}
end
def get_given_cli_switch(exec, switch_name) do
if Map.has_key?(exec.cli_options.switches, switch_name) do
{:ok, exec.cli_options.switches[switch_name]}
else
:error
end
end
# Assigns
@doc "Returns the assign with the given `name` for the given `exec` struct (or return the given `default` value)."
def get_assign(exec, name, default \\ nil) do
Map.get(exec.assigns, name, default)
end
@doc "Puts the given `value` with the given `name` as assign into the given `exec` struct."
def put_assign(exec, name, value) do
%__MODULE__{exec | assigns: Map.put(exec.assigns, name, value)}
end
# Config Files
@doc "Returns all config files for the given `exec` struct."
def get_config_files(exec) do
Credo.Execution.ExecutionConfigFiles.get(exec)
end
@doc false
def append_config_file(exec, {_, _, _} = config_file) do
config_files = get_config_files(exec) ++ [config_file]
ExecutionConfigFiles.put(exec, config_files)
exec
end
# Source Files
@doc "Returns all source files for the given `exec` struct."
def get_source_files(exec) do
Credo.Execution.ExecutionSourceFiles.get(exec)
end
@doc "Puts the given `source_files` into the given `exec` struct."
def put_source_files(exec, source_files) do
ExecutionSourceFiles.put(exec, source_files)
exec
end
# Issues
@doc "Returns all issues for the given `exec` struct."
def get_issues(exec) do
exec
|> ExecutionIssues.to_map()
|> Map.values()
|> List.flatten()
end
@doc "Returns all issues for the given `exec` struct that relate to the given `filename`."
def get_issues(exec, filename) do
exec
|> ExecutionIssues.to_map()
|> Map.get(filename, [])
end
@doc "Sets the issues for the given `exec` struct, overwriting any existing issues."
def set_issues(exec, issues) do
ExecutionIssues.set(exec, issues)
exec
end
# Results
@doc "Returns the result with the given `name` for the given `exec` struct (or return the given `default` value)."
def get_result(exec, name, default \\ nil) do
Map.get(exec.results, name, default)
end
@doc "Puts the given `value` with the given `name` as result into the given `exec` struct."
def put_result(exec, name, value) do
%__MODULE__{exec | results: Map.put(exec.results, name, value)}
end
# Halt
@doc "Halts further execution of the pipeline."
def halt(exec) do
%__MODULE__{exec | halted: true}
end
# Task tracking
@doc false
def set_parent_and_current_task(exec, parent_task, current_task) do
%__MODULE__{exec | parent_task: parent_task, current_task: current_task}
end
# Running tasks
@doc false
def run(exec) do
run_pipeline(exec, __MODULE__)
end
@doc false
def run_pipeline(%__MODULE__{} = initial_exec, pipeline_key)
when is_atom(pipeline_key) and not is_nil(pipeline_key) do
initial_pipeline = get_pipeline(initial_exec, pipeline_key)
Enum.reduce(initial_pipeline, initial_exec, fn {group_name, _list}, exec_inside_pipeline ->
outer_pipeline = get_pipeline(exec_inside_pipeline, pipeline_key)
task_group = outer_pipeline[group_name]
Enum.reduce(task_group, exec_inside_pipeline, fn {task_mod, opts}, exec_inside_task_group ->
Credo.Execution.Task.run(task_mod, exec_inside_task_group, opts)
end)
end)
end
@doc false
defp get_pipeline(exec, pipeline_key) do
case exec.pipeline_map[pipeline_key] do
nil -> raise "Could not find execution pipeline for '#{pipeline_key}'"
pipeline -> pipeline
end
end
def put_pipeline(exec, pipeline_key, pipeline) do
new_pipelines = Map.put(exec.pipeline_map, pipeline_key, pipeline)
%__MODULE__{exec | pipeline_map: new_pipelines}
end
@doc false
def prepend_task(exec, plugin_mod, nil, group_name, task_tuple) do
prepend_task(exec, plugin_mod, __MODULE__, group_name, task_tuple)
end
def prepend_task(exec, plugin_mod, pipeline_key, group_name, task_mod) when is_atom(task_mod) do
prepend_task(exec, plugin_mod, pipeline_key, group_name, {task_mod, []})
end
@doc false
def prepend_task(exec, _plugin_mod, pipeline_key, group_name, task_tuple) do
pipeline =
exec
|> get_pipeline(pipeline_key)
|> Enum.map(fn
{^group_name, list} -> {group_name, [task_tuple] ++ list}
value -> value
end)
put_pipeline(exec, __MODULE__, pipeline)
end
@doc false
def append_task(exec, plugin_mod, nil, group_name, task_tuple) do
append_task(exec, plugin_mod, __MODULE__, group_name, task_tuple)
end
def append_task(exec, plugin_mod, pipeline_key, group_name, task_mod) when is_atom(task_mod) do
append_task(exec, plugin_mod, pipeline_key, group_name, {task_mod, []})
end
@doc false
def append_task(exec, _plugin_mod, pipeline_key, group_name, task_tuple) do
pipeline =
exec
|> get_pipeline(pipeline_key)
|> Enum.map(fn
{^group_name, list} -> {group_name, list ++ [task_tuple]}
value -> value
end)
put_pipeline(exec, pipeline_key, pipeline)
end
defp put_builtin_command(exec, name, command_mod) do
put_command(exec, Credo, name, command_mod)
end
defp init_command(exec, command_mod) do
exec
|> command_mod.init()
|> ensure_execution_struct("#{command_mod}.init/1")
end
@doc ~S"""
Ensures that the given `value` is a `%Credo.Execution{}` struct, raises an error otherwise.
Example:
exec
|> mod.init()
|> Execution.ensure_execution_struct("#{mod}.init/1")
"""
def ensure_execution_struct(value, fun_name)
def ensure_execution_struct(%__MODULE__{} = exec, _fun_name), do: exec
def ensure_execution_struct(value, fun_name) do
raise("Expected #{fun_name} to return %Credo.Execution{}, got: #{inspect(value)}")
end
@doc false
def get_rerun(exec) do
case get_assign(exec, "credo.rerun.previous_execution") do
nil -> :notfound
previous_exec -> {previous_exec, get_assign(exec, "credo.rerun.files_that_changed")}
end
end
defp put_rerun(exec, previous_exec, files_that_changed) do
exec
|> put_assign("credo.rerun.previous_execution", previous_exec)
|> put_assign(
"credo.rerun.files_that_changed",
Enum.map(files_that_changed, fn filename ->
filename
|> Path.expand()
|> Path.relative_to_cwd()
end)
)
end
end
| 29.925926 | 124 | 0.665842 |
1c45c2230c68a20649ccd576ac2d9bbf913bd507 | 353 | exs | Elixir | test/coherence_test.exs | henb/coherence | 725247353bad46df464caffa12b9ea2788fe774f | [
"MIT"
] | 2 | 2018-01-19T06:12:16.000Z | 2018-03-12T07:17:17.000Z | test/coherence_test.exs | henb/coherence | 725247353bad46df464caffa12b9ea2788fe774f | [
"MIT"
] | null | null | null | test/coherence_test.exs | henb/coherence | 725247353bad46df464caffa12b9ea2788fe774f | [
"MIT"
] | 2 | 2017-09-22T16:54:36.000Z | 2021-11-09T20:55:58.000Z | defmodule CoherenceTest do
use TestCoherence.ModelCase
doctest Coherence
alias TestCoherence.User
test "creates a user" do
changeset = User.changeset(%User{}, %{name: "test", email: "test@example.com", password: "test", password_confirmation: "test"})
user = Repo.insert! changeset
assert user.email == "test@example.com"
end
end
| 29.416667 | 132 | 0.716714 |
1c46275f300b695ae75b798ff9685adacfb8cad3 | 2,277 | ex | Elixir | clients/content/lib/google_api/content/v21/model/lia_inventory_settings.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/content/lib/google_api/content/v21/model/lia_inventory_settings.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/content/lib/google_api/content/v21/model/lia_inventory_settings.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Content.V21.Model.LiaInventorySettings do
@moduledoc """
## Attributes
* `inventoryVerificationContactEmail` (*type:* `String.t`, *default:* `nil`) - The email of the contact for the inventory verification process.
* `inventoryVerificationContactName` (*type:* `String.t`, *default:* `nil`) - The name of the contact for the inventory verification process.
* `inventoryVerificationContactStatus` (*type:* `String.t`, *default:* `nil`) - The status of the verification contact. Acceptable values are: - "`active`" - "`inactive`" - "`pending`"
* `status` (*type:* `String.t`, *default:* `nil`) - The status of the inventory verification process. Acceptable values are: - "`active`" - "`inactive`" - "`pending`"
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:inventoryVerificationContactEmail => String.t() | nil,
:inventoryVerificationContactName => String.t() | nil,
:inventoryVerificationContactStatus => String.t() | nil,
:status => String.t() | nil
}
field(:inventoryVerificationContactEmail)
field(:inventoryVerificationContactName)
field(:inventoryVerificationContactStatus)
field(:status)
end
defimpl Poison.Decoder, for: GoogleApi.Content.V21.Model.LiaInventorySettings do
def decode(value, options) do
GoogleApi.Content.V21.Model.LiaInventorySettings.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Content.V21.Model.LiaInventorySettings do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 40.660714 | 189 | 0.726834 |
1c465ed7e6aa2d1b8dbeaf2587f0919b9bb71992 | 162 | exs | Elixir | test/test_helper.exs | chulkilee/ueberauth_example | 877fdb63199bf8d00bc72a72908ab2cb34b752aa | [
"MIT"
] | null | null | null | test/test_helper.exs | chulkilee/ueberauth_example | 877fdb63199bf8d00bc72a72908ab2cb34b752aa | [
"MIT"
] | null | null | null | test/test_helper.exs | chulkilee/ueberauth_example | 877fdb63199bf8d00bc72a72908ab2cb34b752aa | [
"MIT"
] | null | null | null | ExUnit.start()
Mix.Task.run("ecto.create", ["--quiet"])
Mix.Task.run("ecto.migrate", ["--quiet"])
Ecto.Adapters.SQL.Sandbox.mode(UeberauthExample.Repo, :manual)
| 27 | 62 | 0.703704 |
1c466ad5bc46cfbff5cab498de685823494f99e2 | 355 | ex | Elixir | lib/mix/tasks/exhal.release.ex | mikestok/exhal | 6c0fa5b648b0e934c96f87d15d9a6439cb8f1345 | [
"MIT"
] | 23 | 2016-02-02T14:24:38.000Z | 2021-12-07T16:13:30.000Z | lib/mix/tasks/exhal.release.ex | mikestok/exhal | 6c0fa5b648b0e934c96f87d15d9a6439cb8f1345 | [
"MIT"
] | 41 | 2016-02-03T17:19:42.000Z | 2019-06-04T15:42:18.000Z | lib/mix/tasks/exhal.release.ex | mikestok/exhal | 6c0fa5b648b0e934c96f87d15d9a6439cb8f1345 | [
"MIT"
] | 19 | 2016-02-03T06:04:12.000Z | 2021-11-19T16:44:06.000Z | defmodule Mix.Tasks.ExHal.Release do
@shortdoc "Release the hounds!"
use Mix.Task
alias Mix.Tasks.Hex.Build
def run(_) do
meta = Build.prepare_package()[:meta]
System.cmd("git", ["tag", "v#{meta[:version]}"])
System.cmd("git", ["push", "--tags"])
Mix.Tasks.Hex.Publish.run([])
Mix.Tasks.Hex.Publish.run(["docs"])
end
end
| 20.882353 | 52 | 0.625352 |
1c4677f95ae7c48a575e9d1fdd43d6e968c2342a | 1,936 | exs | Elixir | mix.exs | glennr/scrivener_html | 9f88769589feab96c90be7490d306bf570996eaf | [
"MIT"
] | 1 | 2021-05-08T03:20:03.000Z | 2021-05-08T03:20:03.000Z | mix.exs | glennr/scrivener_html | 9f88769589feab96c90be7490d306bf570996eaf | [
"MIT"
] | null | null | null | mix.exs | glennr/scrivener_html | 9f88769589feab96c90be7490d306bf570996eaf | [
"MIT"
] | 1 | 2022-01-24T15:50:32.000Z | 2022-01-24T15:50:32.000Z | defmodule ScrivenerHtml.Mixfile do
use Mix.Project
@version "1.8.1"
def project do
[
app: :scrivener_html,
version: @version,
elixir: "~> 1.2",
name: "scrivener_html",
source_url: "https://github.com/mgwidmann/scrivener_html",
homepage_url: "https://github.com/mgwidmann/scrivener_html",
elixirc_paths: elixirc_paths(Mix.env()),
build_embedded: Mix.env() == :prod,
start_permanent: Mix.env() == :prod,
description: "HTML helpers for Scrivener",
docs: [
main: Scrivener.HTML,
readme: "README.md"
],
package: package(),
deps: deps(),
aliases: aliases()
]
end
# Configuration for the OTP application
#
# Type `mix help compile.app` for more information
def application do
[
applications: [:logger]
]
end
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
# Dependencies can be Hex packages:
#
# {:mydep, "~> 0.3.0"}
#
# Or git/path repositories:
#
# {:mydep, git: "https://github.com/elixir-lang/mydep.git", tag: "0.1.0"}
#
# Type `mix help deps` for more examples and options
defp deps do
[
{:scrivener, "~> 1.2 or ~> 2.0"},
{:phoenix_html, "~> 2.2"},
{:phoenix, "~> 1.0 and < 1.6.0", optional: true},
{:plug, "~> 1.1"},
{:ex_doc, "~> 0.19", only: :dev},
{:earmark, "~> 1.1", only: :dev}
]
end
defp package do
[
maintainers: ["Matt Widmann"],
licenses: ["MIT"],
links: %{github: "https://github.com/mgwidmann/scrivener_html"}
]
end
defp aliases do
[publish: ["hex.publish", "hex.publish docs", "tag"], tag: &tag_release/1]
end
defp tag_release(_) do
Mix.shell().info("Tagging release as #{@version}")
System.cmd("git", ["tag", "-a", "v#{@version}", "-m", "v#{@version}"])
System.cmd("git", ["push", "--tags"])
end
end
| 25.142857 | 78 | 0.568698 |
1c46ba4910a5e02f857f105d1aca5fc9129a149c | 2,199 | exs | Elixir | config/dev.exs | AmadorZcv/crius_chat | 2ed002414f207536a0d2ab6e53e7d99da9fa920d | [
"MIT"
] | null | null | null | config/dev.exs | AmadorZcv/crius_chat | 2ed002414f207536a0d2ab6e53e7d99da9fa920d | [
"MIT"
] | 3 | 2021-03-09T17:37:23.000Z | 2021-09-01T22:28:38.000Z | config/dev.exs | AmadorZcv/crius_chat | 2ed002414f207536a0d2ab6e53e7d99da9fa920d | [
"MIT"
] | null | null | null | use Mix.Config
# Configure your database
config :crius_chat, CriusChat.Repo,
username: "postgres",
password: "postgres",
database: "crius_chat_dev",
hostname: "localhost",
show_sensitive_data_on_connection_error: true,
pool_size: 10
# For development, we disable any cache and enable
# debugging and code reloading.
#
# The watchers configuration can be used to run external
# watchers to your application. For example, we use it
# with webpack to recompile .js and .css sources.
config :crius_chat, CriusChatWeb.Endpoint,
http: [port: 4000],
debug_errors: true,
code_reloader: true,
check_origin: false,
watchers: [
node: [
"node_modules/webpack/bin/webpack.js",
"--mode",
"development",
"--watch-stdin",
cd: Path.expand("../assets", __DIR__)
]
]
# ## SSL Support
#
# In order to use HTTPS in development, a self-signed
# certificate can be generated by running the following
# Mix task:
#
# mix phx.gen.cert
#
# Note that this task requires Erlang/OTP 20 or later.
# Run `mix help phx.gen.cert` for more information.
#
# The `http:` config above can be replaced with:
#
# https: [
# port: 4001,
# cipher_suite: :strong,
# keyfile: "priv/cert/selfsigned_key.pem",
# certfile: "priv/cert/selfsigned.pem"
# ],
#
# If desired, both `http:` and `https:` keys can be
# configured to run both http and https servers on
# different ports.
# Watch static and templates for browser reloading.
config :crius_chat, CriusChatWeb.Endpoint,
live_reload: [
patterns: [
~r"priv/static/.*(js|css|png|jpeg|jpg|gif|svg)$",
~r"priv/gettext/.*(po)$",
~r"lib/crius_chat_web/{live,views}/.*(ex)$",
~r"lib/crius_chat_web/templates/.*(eex)$",
~r{lib/my_app_web/live/.*(ex)$}
]
]
# Do not include metadata nor timestamps in development logs
config :logger, :console, format: "[$level] $message\n"
# Set a higher stacktrace during development. Avoid configuring such
# in production as building large stacktraces may be expensive.
config :phoenix, :stacktrace_depth, 20
# Initialize plugs at runtime for faster development compilation
config :phoenix, :plug_init_mode, :runtime
| 28.192308 | 68 | 0.690769 |
1c46bf7e407fe70c1204078d3bcccd3747b53e48 | 193 | exs | Elixir | server/config/dev.exs | arturoeanton/elixir-dynamic-api | ad44f1b86a4ee18ce36156326fe2a1d52e7858bc | [
"Apache-2.0"
] | null | null | null | server/config/dev.exs | arturoeanton/elixir-dynamic-api | ad44f1b86a4ee18ce36156326fe2a1d52e7858bc | [
"Apache-2.0"
] | null | null | null | server/config/dev.exs | arturoeanton/elixir-dynamic-api | ad44f1b86a4ee18ce36156326fe2a1d52e7858bc | [
"Apache-2.0"
] | null | null | null | use Mix.Config
# Development configurations goes here
config :server, port: 8080
# Example of database configuration
# config :server, db_config: %{name: "dev_db", password: "", port: 10000}
| 24.125 | 73 | 0.740933 |
1c46d37dcbad8a48a69cab38dd4732d645baa25e | 2,186 | ex | Elixir | installer/lib/phx_new/umbrella.ex | tgxworld/phoenix | 89b885591431170de73e372d9544d841bedca3db | [
"MIT"
] | 2 | 2020-07-24T08:34:36.000Z | 2021-02-08T03:21:49.000Z | installer/lib/phx_new/umbrella.ex | tgxworld/phoenix | 89b885591431170de73e372d9544d841bedca3db | [
"MIT"
] | null | null | null | installer/lib/phx_new/umbrella.ex | tgxworld/phoenix | 89b885591431170de73e372d9544d841bedca3db | [
"MIT"
] | null | null | null | defmodule Phx.New.Umbrella do
@moduledoc false
use Phx.New.Generator
alias Phx.New.{Ecto, Web, Project}
template :new, [
{:eex, "phx_umbrella/gitignore", :project, ".gitignore"},
{:eex, "phx_umbrella/config/config.exs", :project, "config/config.exs"},
{:eex, "phx_umbrella/config/dev.exs", :project, "config/dev.exs"},
{:eex, "phx_umbrella/config/test.exs", :project, "config/test.exs"},
{:eex, "phx_umbrella/config/prod.exs", :project, "config/prod.exs"},
{:eex, "phx_umbrella/config/prod.secret.exs", :project, "config/prod.secret.exs"},
{:eex, "phx_umbrella/mix.exs", :project, "mix.exs"},
{:eex, "phx_umbrella/README.md", :project, "README.md"},
{:eex, "phx_umbrella/formatter.exs", :project, ".formatter.exs"},
]
def prepare_project(%Project{app: app} = project) when not is_nil(app) do
project
|> put_app()
|> put_web()
|> put_root_app()
end
defp put_app(project) do
project_path = Path.expand(project.base_path <> "_umbrella")
app_path = Path.join(project_path, "apps/#{project.app}")
%Project{project |
in_umbrella?: true,
app_path: app_path,
project_path: project_path}
end
def put_web(%Project{app: app, opts: opts} = project) do
web_app = :"#{app}_web"
web_namespace = Module.concat([opts[:web_module] || "#{project.app_mod}Web"])
%Project{project |
web_app: web_app,
lib_web_name: web_app,
web_namespace: web_namespace,
generators: [context_app: :"#{app}"],
web_path: Path.join(project.project_path, "apps/#{web_app}/")}
end
defp put_root_app(%Project{app: app} = project) do
%Project{project |
root_app: :"#{app}_umbrella",
root_mod: Module.concat(project.app_mod, "Umbrella")}
end
def generate(%Project{} = project) do
if in_umbrella?(project.project_path) do
Mix.raise "Unable to nest umbrella project within apps"
end
copy_from project, __MODULE__, :new
project
|> Web.generate()
|> Ecto.generate()
end
end
| 35.258065 | 87 | 0.60613 |
1c470d4d58bd40376467a00f26deaae0a2317d2a | 38,174 | ex | Elixir | lib/elixir/lib/base.ex | kenichi/elixir | 8c27da88c70623cbe516d5310c885943395a82a2 | [
"Apache-2.0"
] | 2 | 2018-11-15T06:38:14.000Z | 2018-11-17T18:03:14.000Z | lib/elixir/lib/base.ex | kenichi/elixir | 8c27da88c70623cbe516d5310c885943395a82a2 | [
"Apache-2.0"
] | 1 | 2018-09-10T23:36:45.000Z | 2018-09-10T23:36:45.000Z | lib/elixir/lib/base.ex | kenichi/elixir | 8c27da88c70623cbe516d5310c885943395a82a2 | [
"Apache-2.0"
] | 1 | 2018-09-10T23:32:56.000Z | 2018-09-10T23:32:56.000Z | defmodule Base do
import Bitwise
@moduledoc """
This module provides data encoding and decoding functions
according to [RFC 4648](https://tools.ietf.org/html/rfc4648).
This document defines the commonly used base 16, base 32, and base
64 encoding schemes.
## Base 16 alphabet
| Value | Encoding | Value | Encoding | Value | Encoding | Value | Encoding |
|------:|---------:|------:|---------:|------:|---------:|------:|---------:|
| 0| 0| 4| 4| 8| 8| 12| C|
| 1| 1| 5| 5| 9| 9| 13| D|
| 2| 2| 6| 6| 10| A| 14| E|
| 3| 3| 7| 7| 11| B| 15| F|
## Base 32 alphabet
| Value | Encoding | Value | Encoding | Value | Encoding | Value | Encoding |
|------:|---------:|------:|---------:|------:|---------:|------:|---------:|
| 0| A| 9| J| 18| S| 27| 3|
| 1| B| 10| K| 19| T| 28| 4|
| 2| C| 11| L| 20| U| 29| 5|
| 3| D| 12| M| 21| V| 30| 6|
| 4| E| 13| N| 22| W| 31| 7|
| 5| F| 14| O| 23| X| | |
| 6| G| 15| P| 24| Y| (pad)| =|
| 7| H| 16| Q| 25| Z| | |
| 8| I| 17| R| 26| 2| | |
## Base 32 (extended hex) alphabet
| Value | Encoding | Value | Encoding | Value | Encoding | Value | Encoding |
|------:|---------:|------:|---------:|------:|---------:|------:|---------:|
| 0| 0| 9| 9| 18| I| 27| R|
| 1| 1| 10| A| 19| J| 28| S|
| 2| 2| 11| B| 20| K| 29| T|
| 3| 3| 12| C| 21| L| 30| U|
| 4| 4| 13| D| 22| M| 31| V|
| 5| 5| 14| E| 23| N| | |
| 6| 6| 15| F| 24| O| (pad)| =|
| 7| 7| 16| G| 25| P| | |
| 8| 8| 17| H| 26| Q| | |
## Base 64 alphabet
| Value | Encoding | Value | Encoding | Value | Encoding | Value | Encoding |
|------:|---------:|------:|---------:|------:|---------:|------:|---------:|
| 0| A| 17| R| 34| i| 51| z|
| 1| B| 18| S| 35| j| 52| 0|
| 2| C| 19| T| 36| k| 53| 1|
| 3| D| 20| U| 37| l| 54| 2|
| 4| E| 21| V| 38| m| 55| 3|
| 5| F| 22| W| 39| n| 56| 4|
| 6| G| 23| X| 40| o| 57| 5|
| 7| H| 24| Y| 41| p| 58| 6|
| 8| I| 25| Z| 42| q| 59| 7|
| 9| J| 26| a| 43| r| 60| 8|
| 10| K| 27| b| 44| s| 61| 9|
| 11| L| 28| c| 45| t| 62| +|
| 12| M| 29| d| 46| u| 63| /|
| 13| N| 30| e| 47| v| | |
| 14| O| 31| f| 48| w| (pad)| =|
| 15| P| 32| g| 49| x| | |
| 16| Q| 33| h| 50| y| | |
## Base 64 (URL and filename safe) alphabet
| Value | Encoding | Value | Encoding | Value | Encoding | Value | Encoding |
|------:|---------:|------:|---------:|------:|---------:|------:|---------:|
| 0| A| 17| R| 34| i| 51| z|
| 1| B| 18| S| 35| j| 52| 0|
| 2| C| 19| T| 36| k| 53| 1|
| 3| D| 20| U| 37| l| 54| 2|
| 4| E| 21| V| 38| m| 55| 3|
| 5| F| 22| W| 39| n| 56| 4|
| 6| G| 23| X| 40| o| 57| 5|
| 7| H| 24| Y| 41| p| 58| 6|
| 8| I| 25| Z| 42| q| 59| 7|
| 9| J| 26| a| 43| r| 60| 8|
| 10| K| 27| b| 44| s| 61| 9|
| 11| L| 28| c| 45| t| 62| -|
| 12| M| 29| d| 46| u| 63| _|
| 13| N| 30| e| 47| v| | |
| 14| O| 31| f| 48| w| (pad)| =|
| 15| P| 32| g| 49| x| | |
| 16| Q| 33| h| 50| y| | |
"""
b16_alphabet = '0123456789ABCDEF'
b64_alphabet = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/'
b64url_alphabet = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-_'
b32_alphabet = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ234567'
b32hex_alphabet = '0123456789ABCDEFGHIJKLMNOPQRSTUV'
defmacrop encode_pair(alphabet, case, value) do
quote do
case unquote(value) do
unquote(encode_pair_clauses(alphabet, case))
end
end
end
defp encode_pair_clauses(alphabet, case) when case in [:sensitive, :upper] do
shift = shift(alphabet)
alphabet
|> Enum.with_index()
|> encode_clauses(shift)
end
defp encode_pair_clauses(alphabet, :lower) do
shift = shift(alphabet)
alphabet
|> Stream.map(fn c -> if c in ?A..?Z, do: c - ?A + ?a, else: c end)
|> Enum.with_index()
|> encode_clauses(shift)
end
defp shift(alphabet) do
alphabet
|> length()
|> :math.log2()
|> round()
end
defp encode_clauses(alphabet, shift) do
for {encoding1, value1} <- alphabet,
{encoding2, value2} <- alphabet do
encoding = bsl(encoding1, 8) + encoding2
value = bsl(value1, shift) + value2
[clause] = quote(do: (unquote(value) -> unquote(encoding)))
clause
end
end
defmacrop decode_char(alphabet, case, encoding) do
quote do
case unquote(encoding) do
unquote(decode_char_clauses(alphabet, case))
end
end
end
defp decode_char_clauses(alphabet, case) when case in [:sensitive, :upper] do
clauses =
alphabet
|> Enum.with_index()
|> decode_clauses()
clauses ++ bad_digit_clause()
end
defp decode_char_clauses(alphabet, :lower) do
{uppers, rest} =
alphabet
|> Stream.with_index()
|> Enum.split_with(fn {encoding, _} -> encoding in ?A..?Z end)
lowers = Enum.map(uppers, fn {encoding, value} -> {encoding - ?A + ?a, value} end)
if length(uppers) > length(rest) do
decode_mixed_clauses(lowers, rest)
else
decode_mixed_clauses(rest, lowers)
end
end
defp decode_char_clauses(alphabet, :mixed) when length(alphabet) == 16 do
alphabet = Enum.with_index(alphabet)
lowers =
alphabet
|> Stream.filter(fn {encoding, _} -> encoding in ?A..?Z end)
|> Enum.map(fn {encoding, value} -> {encoding - ?A + ?a, value} end)
decode_mixed_clauses(alphabet, lowers)
end
defp decode_char_clauses(alphabet, :mixed) when length(alphabet) == 32 do
clauses =
alphabet
|> Stream.with_index()
|> Enum.flat_map(fn {encoding, value} = pair ->
if encoding in ?A..?Z do
[pair, {encoding - ?A + ?a, value}]
else
[pair]
end
end)
|> decode_clauses()
clauses ++ bad_digit_clause()
end
defp decode_mixed_clauses(first, second) do
first_clauses = decode_clauses(first)
second_clauses = decode_clauses(second) ++ bad_digit_clause()
join_clause =
quote do
encoding ->
case encoding do
unquote(second_clauses)
end
end
first_clauses ++ join_clause
end
defp decode_clauses(alphabet) do
for {encoding, value} <- alphabet do
[clause] = quote(do: (unquote(encoding) -> unquote(value)))
clause
end
end
defp bad_digit_clause() do
quote do
c ->
raise ArgumentError,
"non-alphabet digit found: #{inspect(<<c>>, binaries: :as_strings)} (byte #{c})"
end
end
defp maybe_pad(body, "", _, _), do: body
defp maybe_pad(body, tail, false, _), do: body <> tail
defp maybe_pad(body, tail, _, group_size) do
case group_size - rem(byte_size(tail), group_size) do
^group_size -> body <> tail
6 -> body <> tail <> "======"
5 -> body <> tail <> "====="
4 -> body <> tail <> "===="
3 -> body <> tail <> "==="
2 -> body <> tail <> "=="
1 -> body <> tail <> "="
end
end
@doc """
Encodes a binary string into a base 16 encoded string.
## Options
The accepted options are:
* `:case` - specifies the character case to use when encoding
The values for `:case` can be:
* `:upper` - uses upper case characters (default)
* `:lower` - uses lower case characters
## Examples
iex> Base.encode16("foobar")
"666F6F626172"
iex> Base.encode16("foobar", case: :lower)
"666f6f626172"
"""
@spec encode16(binary, keyword) :: binary
def encode16(data, opts \\ []) when is_binary(data) do
case = Keyword.get(opts, :case, :upper)
do_encode16(case, data)
end
@doc """
Decodes a base 16 encoded string into a binary string.
## Options
The accepted options are:
* `:case` - specifies the character case to accept when decoding
The values for `:case` can be:
* `:upper` - only allows upper case characters (default)
* `:lower` - only allows lower case characters
* `:mixed` - allows mixed case characters
## Examples
iex> Base.decode16("666F6F626172")
{:ok, "foobar"}
iex> Base.decode16("666f6f626172", case: :lower)
{:ok, "foobar"}
iex> Base.decode16("666f6F626172", case: :mixed)
{:ok, "foobar"}
"""
@spec decode16(binary, keyword) :: {:ok, binary} | :error
def decode16(string, opts \\ []) do
{:ok, decode16!(string, opts)}
rescue
ArgumentError -> :error
end
@doc """
Decodes a base 16 encoded string into a binary string.
## Options
The accepted options are:
* `:case` - specifies the character case to accept when decoding
The values for `:case` can be:
* `:upper` - only allows upper case characters (default)
* `:lower` - only allows lower case characters
* `:mixed` - allows mixed case characters
An `ArgumentError` exception is raised if the padding is incorrect or
a non-alphabet character is present in the string.
## Examples
iex> Base.decode16!("666F6F626172")
"foobar"
iex> Base.decode16!("666f6f626172", case: :lower)
"foobar"
iex> Base.decode16!("666f6F626172", case: :mixed)
"foobar"
"""
@spec decode16!(binary, keyword) :: binary
def decode16!(string, opts \\ [])
def decode16!(string, opts) when is_binary(string) and rem(byte_size(string), 2) == 0 do
case = Keyword.get(opts, :case, :upper)
do_decode16(case, string)
end
def decode16!(string, _opts) when is_binary(string) do
raise ArgumentError, "odd-length string"
end
@doc """
Encodes a binary string into a base 64 encoded string.
Accepts `padding: false` option which will omit padding from
the output string.
## Examples
iex> Base.encode64("foobar")
"Zm9vYmFy"
iex> Base.encode64("foob")
"Zm9vYg=="
iex> Base.encode64("foob", padding: false)
"Zm9vYg"
"""
@spec encode64(binary, keyword) :: binary
def encode64(data, opts \\ []) when is_binary(data) do
pad? = Keyword.get(opts, :padding, true)
do_encode64(data, pad?)
end
@doc """
Decodes a base 64 encoded string into a binary string.
Accepts `ignore: :whitespace` option which will ignore all the
whitespace characters in the input string.
Accepts `padding: false` option which will ignore padding from
the input string.
## Examples
iex> Base.decode64("Zm9vYmFy")
{:ok, "foobar"}
iex> Base.decode64("Zm9vYmFy\\n", ignore: :whitespace)
{:ok, "foobar"}
iex> Base.decode64("Zm9vYg==")
{:ok, "foob"}
iex> Base.decode64("Zm9vYg", padding: false)
{:ok, "foob"}
"""
@spec decode64(binary, keyword) :: {:ok, binary} | :error
def decode64(string, opts \\ []) when is_binary(string) do
{:ok, decode64!(string, opts)}
rescue
ArgumentError -> :error
end
@doc """
Decodes a base 64 encoded string into a binary string.
Accepts `ignore: :whitespace` option which will ignore all the
whitespace characters in the input string.
Accepts `padding: false` option which will ignore padding from
the input string.
An `ArgumentError` exception is raised if the padding is incorrect or
a non-alphabet character is present in the string.
## Examples
iex> Base.decode64!("Zm9vYmFy")
"foobar"
iex> Base.decode64!("Zm9vYmFy\\n", ignore: :whitespace)
"foobar"
iex> Base.decode64!("Zm9vYg==")
"foob"
iex> Base.decode64!("Zm9vYg", padding: false)
"foob"
"""
@spec decode64!(binary, keyword) :: binary
def decode64!(string, opts \\ []) when is_binary(string) do
pad? = Keyword.get(opts, :padding, true)
string |> remove_ignored(opts[:ignore]) |> do_decode64(pad?)
end
@doc """
Encodes a binary string into a base 64 encoded string with URL and filename
safe alphabet.
Accepts `padding: false` option which will omit padding from
the output string.
## Examples
iex> Base.url_encode64(<<255, 127, 254, 252>>)
"_3_-_A=="
iex> Base.url_encode64(<<255, 127, 254, 252>>, padding: false)
"_3_-_A"
"""
@spec url_encode64(binary, keyword) :: binary
def url_encode64(data, opts \\ []) when is_binary(data) do
pad? = Keyword.get(opts, :padding, true)
do_encode64url(data, pad?)
end
@doc """
Decodes a base 64 encoded string with URL and filename safe alphabet
into a binary string.
Accepts `ignore: :whitespace` option which will ignore all the
whitespace characters in the input string.
Accepts `padding: false` option which will ignore padding from
the input string.
## Examples
iex> Base.url_decode64("_3_-_A==")
{:ok, <<255, 127, 254, 252>>}
iex> Base.url_decode64("_3_-_A==\\n", ignore: :whitespace)
{:ok, <<255, 127, 254, 252>>}
iex> Base.url_decode64("_3_-_A", padding: false)
{:ok, <<255, 127, 254, 252>>}
"""
@spec url_decode64(binary, keyword) :: {:ok, binary} | :error
def url_decode64(string, opts \\ []) when is_binary(string) do
{:ok, url_decode64!(string, opts)}
rescue
ArgumentError -> :error
end
@doc """
Decodes a base 64 encoded string with URL and filename safe alphabet
into a binary string.
Accepts `ignore: :whitespace` option which will ignore all the
whitespace characters in the input string.
Accepts `padding: false` option which will ignore padding from
the input string.
An `ArgumentError` exception is raised if the padding is incorrect or
a non-alphabet character is present in the string.
## Examples
iex> Base.url_decode64!("_3_-_A==")
<<255, 127, 254, 252>>
iex> Base.url_decode64!("_3_-_A==\\n", ignore: :whitespace)
<<255, 127, 254, 252>>
iex> Base.url_decode64!("_3_-_A", padding: false)
<<255, 127, 254, 252>>
"""
@spec url_decode64!(binary, keyword) :: binary
def url_decode64!(string, opts \\ []) when is_binary(string) do
pad? = Keyword.get(opts, :padding, true)
string |> remove_ignored(opts[:ignore]) |> do_decode64url(pad?)
end
@doc """
Encodes a binary string into a base 32 encoded string.
## Options
The accepted options are:
* `:case` - specifies the character case to use when encoding
* `:padding` - specifies whether to apply padding
The values for `:case` can be:
* `:upper` - uses upper case characters (default)
* `:lower` - uses lower case characters
The values for `:padding` can be:
* `true` - pad the output string to the nearest multiple of 8 (default)
* `false` - omit padding from the output string
## Examples
iex> Base.encode32("foobar")
"MZXW6YTBOI======"
iex> Base.encode32("foobar", case: :lower)
"mzxw6ytboi======"
iex> Base.encode32("foobar", padding: false)
"MZXW6YTBOI"
"""
@spec encode32(binary, keyword) :: binary
def encode32(data, opts \\ []) when is_binary(data) do
case = Keyword.get(opts, :case, :upper)
pad? = Keyword.get(opts, :padding, true)
do_encode32(case, data, pad?)
end
@doc """
Decodes a base 32 encoded string into a binary string.
## Options
The accepted options are:
* `:case` - specifies the character case to accept when decoding
* `:padding` - specifies whether to require padding
The values for `:case` can be:
* `:upper` - only allows upper case characters (default)
* `:lower` - only allows lower case characters
* `:mixed` - allows mixed case characters
The values for `:padding` can be:
* `true` - requires the input string to be padded to the nearest multiple of 8 (default)
* `false` - ignores padding from the input string
## Examples
iex> Base.decode32("MZXW6YTBOI======")
{:ok, "foobar"}
iex> Base.decode32("mzxw6ytboi======", case: :lower)
{:ok, "foobar"}
iex> Base.decode32("mzXW6ytBOi======", case: :mixed)
{:ok, "foobar"}
iex> Base.decode32("MZXW6YTBOI", padding: false)
{:ok, "foobar"}
"""
@spec decode32(binary, keyword) :: {:ok, binary} | :error
def decode32(string, opts \\ []) do
{:ok, decode32!(string, opts)}
rescue
ArgumentError -> :error
end
@doc """
Decodes a base 32 encoded string into a binary string.
An `ArgumentError` exception is raised if the padding is incorrect or
a non-alphabet character is present in the string.
## Options
The accepted options are:
* `:case` - specifies the character case to accept when decoding
* `:padding` - specifies whether to require padding
The values for `:case` can be:
* `:upper` - only allows upper case characters (default)
* `:lower` - only allows lower case characters
* `:mixed` - allows mixed case characters
The values for `:padding` can be:
* `true` - requires the input string to be padded to the nearest multiple of 8 (default)
* `false` - ignores padding from the input string
## Examples
iex> Base.decode32!("MZXW6YTBOI======")
"foobar"
iex> Base.decode32!("mzxw6ytboi======", case: :lower)
"foobar"
iex> Base.decode32!("mzXW6ytBOi======", case: :mixed)
"foobar"
iex> Base.decode32!("MZXW6YTBOI", padding: false)
"foobar"
"""
@spec decode32!(binary, keyword) :: binary
def decode32!(string, opts \\ []) when is_binary(string) do
case = Keyword.get(opts, :case, :upper)
pad? = Keyword.get(opts, :padding, true)
do_decode32(case, string, pad?)
end
@doc """
Encodes a binary string into a base 32 encoded string with an
extended hexadecimal alphabet.
## Options
The accepted options are:
* `:case` - specifies the character case to use when encoding
* `:padding` - specifies whether to apply padding
The values for `:case` can be:
* `:upper` - uses upper case characters (default)
* `:lower` - uses lower case characters
The values for `:padding` can be:
* `true` - pad the output string to the nearest multiple of 8 (default)
* `false` - omit padding from the output string
## Examples
iex> Base.hex_encode32("foobar")
"CPNMUOJ1E8======"
iex> Base.hex_encode32("foobar", case: :lower)
"cpnmuoj1e8======"
iex> Base.hex_encode32("foobar", padding: false)
"CPNMUOJ1E8"
"""
@spec hex_encode32(binary, keyword) :: binary
def hex_encode32(data, opts \\ []) when is_binary(data) do
case = Keyword.get(opts, :case, :upper)
pad? = Keyword.get(opts, :padding, true)
do_encode32hex(case, data, pad?)
end
@doc """
Decodes a base 32 encoded string with extended hexadecimal alphabet
into a binary string.
## Options
The accepted options are:
* `:case` - specifies the character case to accept when decoding
* `:padding` - specifies whether to require padding
The values for `:case` can be:
* `:upper` - only allows upper case characters (default)
* `:lower` - only allows lower case characters
* `:mixed` - allows mixed case characters
The values for `:padding` can be:
* `true` - requires the input string to be padded to the nearest multiple of 8 (default)
* `false` - ignores padding from the input string
## Examples
iex> Base.hex_decode32("CPNMUOJ1E8======")
{:ok, "foobar"}
iex> Base.hex_decode32("cpnmuoj1e8======", case: :lower)
{:ok, "foobar"}
iex> Base.hex_decode32("cpnMuOJ1E8======", case: :mixed)
{:ok, "foobar"}
iex> Base.hex_decode32("CPNMUOJ1E8", padding: false)
{:ok, "foobar"}
"""
@spec hex_decode32(binary, keyword) :: {:ok, binary} | :error
def hex_decode32(string, opts \\ []) do
{:ok, hex_decode32!(string, opts)}
rescue
ArgumentError -> :error
end
@doc """
Decodes a base 32 encoded string with extended hexadecimal alphabet
into a binary string.
An `ArgumentError` exception is raised if the padding is incorrect or
a non-alphabet character is present in the string.
## Options
The accepted options are:
* `:case` - specifies the character case to accept when decoding
* `:padding` - specifies whether to require padding
The values for `:case` can be:
* `:upper` - only allows upper case characters (default)
* `:lower` - only allows lower case characters
* `:mixed` - allows mixed case characters
The values for `:padding` can be:
* `true` - requires the input string to be padded to the nearest multiple of 8 (default)
* `false` - ignores padding from the input string
## Examples
iex> Base.hex_decode32!("CPNMUOJ1E8======")
"foobar"
iex> Base.hex_decode32!("cpnmuoj1e8======", case: :lower)
"foobar"
iex> Base.hex_decode32!("cpnMuOJ1E8======", case: :mixed)
"foobar"
iex> Base.hex_decode32!("CPNMUOJ1E8", padding: false)
"foobar"
"""
@spec hex_decode32!(binary, keyword) :: binary
def hex_decode32!(string, opts \\ []) when is_binary(string) do
case = Keyword.get(opts, :case, :upper)
pad? = Keyword.get(opts, :padding, true)
do_decode32hex(case, string, pad?)
end
defp remove_ignored(string, nil), do: string
defp remove_ignored(string, :whitespace) do
for <<char::8 <- string>>, char not in '\s\t\r\n', into: <<>>, do: <<char::8>>
end
enc16 = [upper: :enc16_upper, lower: :enc16_lower]
for {case, fun} <- enc16 do
defp unquote(fun)(char) do
encode_pair(unquote(b16_alphabet), unquote(case), char)
end
end
defp do_encode16(_, <<>>), do: <<>>
for {case, fun} <- enc16 do
defp do_encode16(unquote(case), data) do
split = 8 * div(byte_size(data), 8)
<<main::size(split)-binary, rest::binary>> = data
main =
for <<c1::8, c2::8, c3::8, c4::8, c5::8, c6::8, c7::8, c8::8 <- main>>, into: <<>> do
<<
unquote(fun)(c1)::16,
unquote(fun)(c2)::16,
unquote(fun)(c3)::16,
unquote(fun)(c4)::16,
unquote(fun)(c5)::16,
unquote(fun)(c6)::16,
unquote(fun)(c7)::16,
unquote(fun)(c8)::16
>>
end
case rest do
<<c1::8, c2::8, c3::8, c4::8, c5::8, c6::8, c7::8>> ->
<<
main::binary,
unquote(fun)(c1)::16,
unquote(fun)(c2)::16,
unquote(fun)(c3)::16,
unquote(fun)(c4)::16,
unquote(fun)(c5)::16,
unquote(fun)(c6)::16,
unquote(fun)(c7)::16
>>
<<c1::8, c2::8, c3::8, c4::8, c5::8, c6::8>> ->
<<
main::binary,
unquote(fun)(c1)::16,
unquote(fun)(c2)::16,
unquote(fun)(c3)::16,
unquote(fun)(c4)::16,
unquote(fun)(c5)::16,
unquote(fun)(c6)::16
>>
<<c1::8, c2::8, c3::8, c4::8, c5::8>> ->
<<
main::binary,
unquote(fun)(c1)::16,
unquote(fun)(c2)::16,
unquote(fun)(c3)::16,
unquote(fun)(c4)::16,
unquote(fun)(c5)::16
>>
<<c1::8, c2::8, c3::8, c4::8>> ->
<<
main::binary,
unquote(fun)(c1)::16,
unquote(fun)(c2)::16,
unquote(fun)(c3)::16,
unquote(fun)(c4)::16
>>
<<c1::8, c2::8, c3::8>> ->
<<main::binary, unquote(fun)(c1)::16, unquote(fun)(c2)::16, unquote(fun)(c3)::16>>
<<c1::8, c2::8>> ->
<<main::binary, unquote(fun)(c1)::16, unquote(fun)(c2)::16>>
<<c1::8>> ->
<<main::binary, unquote(fun)(c1)::16>>
<<>> ->
main
end
end
end
dec16 = [upper: :dec16_upper, lower: :dec16_lower, mixed: :dec16_mixed]
for {case, fun} <- dec16 do
defp unquote(fun)(encoding) do
decode_char(unquote(b16_alphabet), unquote(case), encoding)
end
end
defp do_decode16(_, <<>>), do: <<>>
for {case, fun} <- dec16 do
defp do_decode16(unquote(case), string) do
split = 8 * div(byte_size(string), 8)
<<main::size(split)-binary, rest::binary>> = string
main =
for <<c1::8, c2::8, c3::8, c4::8, c5::8, c6::8, c7::8, c8::8 <- main>>, into: <<>> do
<<
unquote(fun)(c1)::4,
unquote(fun)(c2)::4,
unquote(fun)(c3)::4,
unquote(fun)(c4)::4,
unquote(fun)(c5)::4,
unquote(fun)(c6)::4,
unquote(fun)(c7)::4,
unquote(fun)(c8)::4
>>
end
case rest do
<<c1::8, c2::8, c3::8, c4::8, c5::8, c6::8>> ->
<<
main::bits,
unquote(fun)(c1)::4,
unquote(fun)(c2)::4,
unquote(fun)(c3)::4,
unquote(fun)(c4)::4,
unquote(fun)(c5)::4,
unquote(fun)(c6)::4
>>
<<c1::8, c2::8, c3::8, c4::8>> ->
<<
main::bits,
unquote(fun)(c1)::4,
unquote(fun)(c2)::4,
unquote(fun)(c3)::4,
unquote(fun)(c4)::4
>>
<<c1::8, c2::8>> ->
<<main::bits, unquote(fun)(c1)::4, unquote(fun)(c2)::4>>
<<_::8>> ->
raise ArgumentError, "odd-length string"
<<>> ->
main
end
end
end
for {base, alphabet} <- ["64": b64_alphabet, "64url": b64url_alphabet] do
pair = :"enc#{base}_pair"
char = :"enc#{base}_char"
do_encode = :"do_encode#{base}"
defp unquote(pair)(value) do
encode_pair(unquote(alphabet), :sensitive, value)
end
defp unquote(char)(value) do
value
|> unquote(pair)()
|> band(0x00FF)
end
defp unquote(do_encode)(<<>>, _), do: <<>>
defp unquote(do_encode)(data, pad?) do
split = 6 * div(byte_size(data), 6)
<<main::size(split)-binary, rest::binary>> = data
main =
for <<c1::12, c2::12, c3::12, c4::12 <- main>>, into: <<>> do
<<
unquote(pair)(c1)::16,
unquote(pair)(c2)::16,
unquote(pair)(c3)::16,
unquote(pair)(c4)::16
>>
end
tail =
case rest do
<<c1::12, c2::12, c3::12, c::4>> ->
<<
unquote(pair)(c1)::16,
unquote(pair)(c2)::16,
unquote(pair)(c3)::16,
unquote(char)(bsl(c, 2))::8
>>
<<c1::12, c2::12, c3::8>> ->
<<unquote(pair)(c1)::16, unquote(pair)(c2)::16, unquote(pair)(bsl(c3, 4))::16>>
<<c1::12, c2::12>> ->
<<unquote(pair)(c1)::16, unquote(pair)(c2)::16>>
<<c1::12, c2::4>> ->
<<unquote(pair)(c1)::16, unquote(char)(bsl(c2, 2))::8>>
<<c1::8>> ->
<<unquote(pair)(bsl(c1, 4))::16>>
<<>> ->
<<>>
end
maybe_pad(main, tail, pad?, 4)
end
end
for {base, alphabet} <- ["64": b64_alphabet, "64url": b64url_alphabet] do
fun = :"dec#{base}"
do_decode = :"do_decode#{base}"
defp unquote(fun)(encoding) do
decode_char(unquote(alphabet), :sensitive, encoding)
end
defp unquote(do_decode)(<<>>, _), do: <<>>
defp unquote(do_decode)(string, pad?) do
segs = div(byte_size(string) + 7, 8) - 1
<<main::size(segs)-binary-unit(64), rest::binary>> = string
main =
for <<c1::8, c2::8, c3::8, c4::8, c5::8, c6::8, c7::8, c8::8 <- main>>, into: <<>> do
<<
unquote(fun)(c1)::6,
unquote(fun)(c2)::6,
unquote(fun)(c3)::6,
unquote(fun)(c4)::6,
unquote(fun)(c5)::6,
unquote(fun)(c6)::6,
unquote(fun)(c7)::6,
unquote(fun)(c8)::6
>>
end
case rest do
<<c1::8, c2::8, ?=, ?=>> ->
<<main::bits, unquote(fun)(c1)::6, bsr(unquote(fun)(c2), 4)::2>>
<<c1::8, c2::8, c3::8, ?=>> ->
<<main::bits, unquote(fun)(c1)::6, unquote(fun)(c2)::6, bsr(unquote(fun)(c3), 2)::4>>
<<c1::8, c2::8, c3::8, c4::8>> ->
<<
main::bits,
unquote(fun)(c1)::6,
unquote(fun)(c2)::6,
unquote(fun)(c3)::6,
unquote(fun)(c4)::6
>>
<<c1::8, c2::8, c3::8, c4::8, c5::8, c6::8, ?=, ?=>> ->
<<
main::bits,
unquote(fun)(c1)::6,
unquote(fun)(c2)::6,
unquote(fun)(c3)::6,
unquote(fun)(c4)::6,
unquote(fun)(c5)::6,
bsr(unquote(fun)(c6), 4)::2
>>
<<c1::8, c2::8, c3::8, c4::8, c5::8, c6::8, c7::8, ?=>> ->
<<
main::bits,
unquote(fun)(c1)::6,
unquote(fun)(c2)::6,
unquote(fun)(c3)::6,
unquote(fun)(c4)::6,
unquote(fun)(c5)::6,
unquote(fun)(c6)::6,
bsr(unquote(fun)(c7), 2)::4
>>
<<c1::8, c2::8, c3::8, c4::8, c5::8, c6::8, c7::8, c8::8>> ->
<<
main::bits,
unquote(fun)(c1)::6,
unquote(fun)(c2)::6,
unquote(fun)(c3)::6,
unquote(fun)(c4)::6,
unquote(fun)(c5)::6,
unquote(fun)(c6)::6,
unquote(fun)(c7)::6,
unquote(fun)(c8)::6
>>
<<c1::8, c2::8>> when not pad? ->
<<main::bits, unquote(fun)(c1)::6, bsr(unquote(fun)(c2), 4)::2>>
<<c1::8, c2::8, c3::8>> when not pad? ->
<<main::bits, unquote(fun)(c1)::6, unquote(fun)(c2)::6, bsr(unquote(fun)(c3), 2)::4>>
<<c1::8, c2::8, c3::8, c4::8, c5::8, c6::8>> when not pad? ->
<<
main::bits,
unquote(fun)(c1)::6,
unquote(fun)(c2)::6,
unquote(fun)(c3)::6,
unquote(fun)(c4)::6,
unquote(fun)(c5)::6,
bsr(unquote(fun)(c6), 4)::2
>>
<<c1::8, c2::8, c3::8, c4::8, c5::8, c6::8, c7::8>> when not pad? ->
<<
main::bits,
unquote(fun)(c1)::6,
unquote(fun)(c2)::6,
unquote(fun)(c3)::6,
unquote(fun)(c4)::6,
unquote(fun)(c5)::6,
unquote(fun)(c6)::6,
bsr(unquote(fun)(c7), 2)::4
>>
_ ->
raise ArgumentError, "incorrect padding"
end
end
end
for {base, alphabet} <- ["32": b32_alphabet, "32hex": b32hex_alphabet],
case <- [:upper, :lower] do
pair = :"enc#{base}_#{case}_pair"
char = :"enc#{base}_#{case}_char"
do_encode = :"do_encode#{base}"
defp unquote(pair)(value) do
encode_pair(unquote(alphabet), unquote(case), value)
end
defp unquote(char)(value) do
value
|> unquote(pair)()
|> band(0x00FF)
end
defp unquote(do_encode)(_, <<>>, _), do: <<>>
defp unquote(do_encode)(unquote(case), data, pad?) do
split = 5 * div(byte_size(data), 5)
<<main::size(split)-binary, rest::binary>> = data
main =
for <<c1::10, c2::10, c3::10, c4::10 <- main>>, into: <<>> do
<<
unquote(pair)(c1)::16,
unquote(pair)(c2)::16,
unquote(pair)(c3)::16,
unquote(pair)(c4)::16
>>
end
tail =
case rest do
<<c1::10, c2::10, c3::10, c4::2>> ->
<<
unquote(pair)(c1)::16,
unquote(pair)(c2)::16,
unquote(pair)(c3)::16,
unquote(char)(bsl(c4, 3))::8
>>
<<c1::10, c2::10, c3::4>> ->
<<unquote(pair)(c1)::16, unquote(pair)(c2)::16, unquote(char)(bsl(c3, 1))::8>>
<<c1::10, c2::6>> ->
<<unquote(pair)(c1)::16, unquote(pair)(bsl(c2, 4))::16>>
<<c1::8>> ->
<<unquote(pair)(bsl(c1, 2))::16>>
<<>> ->
<<>>
end
maybe_pad(main, tail, pad?, 8)
end
end
for {base, alphabet} <- ["32": b32_alphabet, "32hex": b32hex_alphabet],
case <- [:upper, :lower, :mixed] do
fun = :"dec#{base}_#{case}"
do_decode = :"do_decode#{base}"
defp unquote(fun)(encoding) do
decode_char(unquote(alphabet), unquote(case), encoding)
end
defp unquote(do_decode)(_, <<>>, _), do: <<>>
defp unquote(do_decode)(unquote(case), string, pad?) do
segs = div(byte_size(string) + 7, 8) - 1
<<main::size(segs)-binary-unit(64), rest::binary>> = string
main =
for <<c1::8, c2::8, c3::8, c4::8, c5::8, c6::8, c7::8, c8::8 <- main>>, into: <<>> do
<<
unquote(fun)(c1)::5,
unquote(fun)(c2)::5,
unquote(fun)(c3)::5,
unquote(fun)(c4)::5,
unquote(fun)(c5)::5,
unquote(fun)(c6)::5,
unquote(fun)(c7)::5,
unquote(fun)(c8)::5
>>
end
case rest do
<<c1::8, c2::8, ?=, ?=, ?=, ?=, ?=, ?=>> ->
<<main::bits, unquote(fun)(c1)::5, bsr(unquote(fun)(c2), 2)::3>>
<<c1::8, c2::8, c3::8, c4::8, ?=, ?=, ?=, ?=>> ->
<<
main::bits,
unquote(fun)(c1)::5,
unquote(fun)(c2)::5,
unquote(fun)(c3)::5,
bsr(unquote(fun)(c4), 4)::1
>>
<<c1::8, c2::8, c3::8, c4::8, c5::8, ?=, ?=, ?=>> ->
<<
main::bits,
unquote(fun)(c1)::5,
unquote(fun)(c2)::5,
unquote(fun)(c3)::5,
unquote(fun)(c4)::5,
bsr(unquote(fun)(c5), 1)::4
>>
<<c1::8, c2::8, c3::8, c4::8, c5::8, c6::8, c7::8, ?=>> ->
<<
main::bits,
unquote(fun)(c1)::5,
unquote(fun)(c2)::5,
unquote(fun)(c3)::5,
unquote(fun)(c4)::5,
unquote(fun)(c5)::5,
unquote(fun)(c6)::5,
bsr(unquote(fun)(c7), 3)::2
>>
<<c1::8, c2::8, c3::8, c4::8, c5::8, c6::8, c7::8, c8::8>> ->
<<
main::bits,
unquote(fun)(c1)::5,
unquote(fun)(c2)::5,
unquote(fun)(c3)::5,
unquote(fun)(c4)::5,
unquote(fun)(c5)::5,
unquote(fun)(c6)::5,
unquote(fun)(c7)::5,
unquote(fun)(c8)::5
>>
<<c1::8, c2::8>> when not pad? ->
<<main::bits, unquote(fun)(c1)::5, bsr(unquote(fun)(c2), 2)::3>>
<<c1::8, c2::8, c3::8, c4::8>> when not pad? ->
<<
main::bits,
unquote(fun)(c1)::5,
unquote(fun)(c2)::5,
unquote(fun)(c3)::5,
bsr(unquote(fun)(c4), 4)::1
>>
<<c1::8, c2::8, c3::8, c4::8, c5::8>> when not pad? ->
<<
main::bits,
unquote(fun)(c1)::5,
unquote(fun)(c2)::5,
unquote(fun)(c3)::5,
unquote(fun)(c4)::5,
bsr(unquote(fun)(c5), 1)::4
>>
<<c1::8, c2::8, c3::8, c4::8, c5::8, c6::8, c7::8>> when not pad? ->
<<
main::bits,
unquote(fun)(c1)::5,
unquote(fun)(c2)::5,
unquote(fun)(c3)::5,
unquote(fun)(c4)::5,
unquote(fun)(c5)::5,
unquote(fun)(c6)::5,
bsr(unquote(fun)(c7), 3)::2
>>
_ ->
raise ArgumentError, "incorrect padding"
end
end
end
end
| 29.638199 | 95 | 0.485697 |
1c474af9ebff999bade9dadd7163e26460bad116 | 1,618 | ex | Elixir | lib/freshcom/fixture.ex | dclausen/freshcom | 7e1d6397c8ab222cfd03830232cee0718f050490 | [
"BSD-3-Clause"
] | null | null | null | lib/freshcom/fixture.ex | dclausen/freshcom | 7e1d6397c8ab222cfd03830232cee0718f050490 | [
"BSD-3-Clause"
] | null | null | null | lib/freshcom/fixture.ex | dclausen/freshcom | 7e1d6397c8ab222cfd03830232cee0718f050490 | [
"BSD-3-Clause"
] | null | null | null | defmodule Freshcom.Fixture do
alias Faker.{Internet, Name}
alias Freshcom.{Request, Identity}
def standard_user(opts \\ []) do
req = %Request{
fields: %{
name: Name.name(),
username: Internet.user_name(),
email: Internet.email(),
password: "test1234",
is_term_accepted: true
},
include: opts[:include],
_role_: "system"
}
{:ok, %{data: user}} = Identity.register_user(req)
user
end
def managed_user(account_id, fields \\ []) do
req = %Request{
account_id: account_id,
fields: %{
"username" => fields[:username] || Internet.user_name(),
"role" => fields[:role] || "developer",
"password" => fields[:password] || "test1234"
},
_role_: "system"
}
{:ok, %{data: user}} = Identity.add_user(req)
user
end
def standard_app(account_id) do
req = %Request{
account_id: account_id,
fields: %{
"type" => "standard",
"name" => "Standard App",
},
_role_: "system"
}
{:ok, %{data: app}} = Identity.add_app(req)
app
end
def system_app() do
req = %Request{
fields: %{
"type" => "system",
"name" => "System App"
},
_role_: "system"
}
{:ok, %{data: app}} = Identity.add_app(req)
app
end
def password_reset_token(user_id, account_id \\ nil) do
req = %Request{
account_id: account_id,
identifiers: %{"id" => user_id},
_role_: "system"
}
{:ok, %{data: user}} = Identity.generate_password_reset_token(req)
user
end
end | 20.481013 | 70 | 0.54759 |
1c475e0f193fe08eff0cf86533b11ad4754f6709 | 207 | ex | Elixir | lib/erlef_web/views/working_group_view.ex | dhadka/website | e67c23d7052b4ef00a1af52b0b9ebc952d34776e | [
"Apache-2.0"
] | null | null | null | lib/erlef_web/views/working_group_view.ex | dhadka/website | e67c23d7052b4ef00a1af52b0b9ebc952d34776e | [
"Apache-2.0"
] | null | null | null | lib/erlef_web/views/working_group_view.ex | dhadka/website | e67c23d7052b4ef00a1af52b0b9ebc952d34776e | [
"Apache-2.0"
] | null | null | null | defmodule ErlefWeb.WorkingGroupView do
use ErlefWeb, :view
def content("embedded-systems") do
[
{"BEAM on the Embedded Landscape 2020", "landscape"}
]
end
def content(_), do: nil
end
| 17.25 | 58 | 0.666667 |
1c476a6449cb077e6344318ecd71888f6fe429bc | 1,622 | ex | Elixir | clients/dataflow/lib/google_api/dataflow/v1b3/model/name_and_kind.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | null | null | null | clients/dataflow/lib/google_api/dataflow/v1b3/model/name_and_kind.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | null | null | null | clients/dataflow/lib/google_api/dataflow/v1b3/model/name_and_kind.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | 1 | 2020-11-10T16:58:27.000Z | 2020-11-10T16:58:27.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.Dataflow.V1b3.Model.NameAndKind do
@moduledoc """
Basic metadata about a counter.
## Attributes
- kind (String.t): Counter aggregation kind. Defaults to: `null`.
- Enum - one of [INVALID, SUM, MAX, MIN, MEAN, OR, AND, SET, DISTRIBUTION, LATEST_VALUE]
- name (String.t): Name of the counter. Defaults to: `null`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:kind => any(),
:name => any()
}
field(:kind)
field(:name)
end
defimpl Poison.Decoder, for: GoogleApi.Dataflow.V1b3.Model.NameAndKind do
def decode(value, options) do
GoogleApi.Dataflow.V1b3.Model.NameAndKind.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Dataflow.V1b3.Model.NameAndKind do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 31.192308 | 92 | 0.722565 |
1c47cf18132d5e4c7f3b159c2c360ae892c828dd | 978 | exs | Elixir | blank/config/config.exs | unozerocode/turbo-journey | 55c54e4b10bb1e49a1f999fd0eb03acdd35fef98 | [
"MIT"
] | null | null | null | blank/config/config.exs | unozerocode/turbo-journey | 55c54e4b10bb1e49a1f999fd0eb03acdd35fef98 | [
"MIT"
] | 2 | 2021-03-10T20:40:33.000Z | 2021-05-11T16:13:21.000Z | blank/config/config.exs | unozerocode/turbo-journey | 55c54e4b10bb1e49a1f999fd0eb03acdd35fef98 | [
"MIT"
] | null | null | null | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
#
# This configuration file is loaded before any dependency and
# is restricted to this project.
# General application configuration
use Mix.Config
# Configures the endpoint
config :blank, BlankWeb.Endpoint,
url: [host: "localhost"],
secret_key_base: "rCXyHEKMbqYjGgNokwKmkl7VVJ06wp7fn/nvX3QnAQ7TwrNdzEMpX1p7ov9Y3xxU",
render_errors: [view: BlankWeb.ErrorView, accepts: ~w(html json), layout: false],
pubsub_server: Blank.PubSub,
live_view: [signing_salt: "pWMEVZby"]
# Configures Elixir's Logger
config :logger, :console,
format: "$time $metadata[$level] $message\n",
metadata: [:request_id]
# Use Jason for JSON parsing in Phoenix
config :phoenix, :json_library, Jason
# Import environment specific config. This must remain at the bottom
# of this file so it overrides the configuration defined above.
import_config "#{Mix.env()}.exs"
| 33.724138 | 86 | 0.770961 |
1c4815c03f9fd89d2cdc15728a0581ca2120d2e1 | 2,564 | ex | Elixir | lib/absinthe/execution/input.ex | scrogson/absinthe | aa7e9c83dc10603c72f80e09a60d12495bc1c6b7 | [
"Unlicense"
] | 1 | 2019-05-07T15:05:52.000Z | 2019-05-07T15:05:52.000Z | lib/absinthe/execution/input.ex | scrogson/absinthe | aa7e9c83dc10603c72f80e09a60d12495bc1c6b7 | [
"Unlicense"
] | null | null | null | lib/absinthe/execution/input.ex | scrogson/absinthe | aa7e9c83dc10603c72f80e09a60d12495bc1c6b7 | [
"Unlicense"
] | null | null | null | defmodule Absinthe.Execution.Input do
@moduledoc false
# Common functionality for Arguments and Variables
alias Absinthe.Execution
alias __MODULE__.Meta
def process(input_type, meta, execution) do
name = input_type
|> Atom.to_string
|> String.capitalize
{execution, missing} = process_errors(execution, meta, input_type, :missing, fn type_name ->
&"#{name} `#{&1}' (#{type_name}): Not provided"
end)
{execution, invalid} = process_errors(execution, meta, input_type, :invalid, fn type_name ->
&"#{name} `#{&1}' (#{type_name}): Invalid value provided"
end)
{execution, _} = process_errors(execution, meta, input_type, :extra, &"#{name} `#{&1}': Not present in schema")
{execution, _} = process_errors(execution, meta, input_type, :deprecated, nil)
case Enum.any?(missing) || Enum.any?(invalid) do
true ->
{:error, missing, invalid, execution}
false ->
{:ok, execution}
end
end
def process_errors(execution, meta, kind, key, default_msg) do
meta
|> Map.fetch!(key)
|> Enum.reduce({execution, []}, fn
%{name: name, ast: ast, type: type, msg: msg}, {exec, names} ->
name = name |> dotted_name(execution.adapter)
exec = exec |> Execution.put_error(kind, name, error_message(msg || default_msg, type), at: ast)
{exec, [name | names]}
end)
end
@compile {:inline, parse_scalar: 5}
def parse_scalar(nil, _, _, _type_stack, meta) do
{:ok, nil, meta}
end
def parse_scalar(value, ast, %{parse: parser} = type, type_stack, meta) do
case parser.(value) do
{:ok, coerced_value} ->
{:ok, coerced_value, meta}
:error ->
{:error, Meta.put_invalid(meta, type_stack, type, ast)}
end
end
defp error_message(msg, nil), do: msg
defp error_message(msg, type) when is_function(msg) do
msg.(type.name)
end
defp error_message(msg, _), do: msg
@spec dotted_name([binary], atom) :: binary
def dotted_name(names, adapter) do
names
|> do_dotted_names(adapter, [])
|> IO.iodata_to_binary
end
defp do_dotted_names([name | []], adapter, acc) do
[format_name(name, adapter) | acc]
end
defp do_dotted_names(["[]" | rest], adapter, acc) do
do_dotted_names(rest, adapter, ["[]" | acc])
end
defp do_dotted_names([name | rest], adapter, acc) do
do_dotted_names(rest, adapter, [ ".", format_name(name, adapter) | acc])
end
defp format_name(name, adapter) do
name
|> to_string
|> adapter.to_external_name(:argument)
end
end
| 28.808989 | 115 | 0.638846 |
1c483a22d1067bb65adbbd376fe64c0120aa7aaf | 1,610 | ex | Elixir | clients/firestore/lib/google_api/firestore/v1beta1/model/projection.ex | CertifiedrLi/elixir-google-api | 4e0e261dd06ee7753c356cca413783f3facd5f03 | [
"Apache-2.0"
] | null | null | null | clients/firestore/lib/google_api/firestore/v1beta1/model/projection.ex | CertifiedrLi/elixir-google-api | 4e0e261dd06ee7753c356cca413783f3facd5f03 | [
"Apache-2.0"
] | null | null | null | clients/firestore/lib/google_api/firestore/v1beta1/model/projection.ex | CertifiedrLi/elixir-google-api | 4e0e261dd06ee7753c356cca413783f3facd5f03 | [
"Apache-2.0"
] | 1 | 2018-07-28T20:50:50.000Z | 2018-07-28T20:50:50.000Z | # Copyright 2018 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.Firestore.V1beta1.Model.Projection do
@moduledoc """
The projection of document's fields to return.
## Attributes
- fields ([FieldReference]): The fields to return. If empty, all fields are returned. To only return the name of the document, use `['__name__']`. Defaults to: `null`.
"""
defstruct [
:fields
]
end
defimpl Poison.Decoder, for: GoogleApi.Firestore.V1beta1.Model.Projection do
import GoogleApi.Firestore.V1beta1.Deserializer
def decode(value, options) do
value
|> deserialize(:fields, :list, GoogleApi.Firestore.V1beta1.Model.FieldReference, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Firestore.V1beta1.Model.Projection do
def encode(value, options) do
GoogleApi.Firestore.V1beta1.Deserializer.serialize_non_nil(value, options)
end
end
| 34.255319 | 188 | 0.753416 |
1c4872e3c90799ee3f7059a2b22e76289996cd59 | 5,536 | ex | Elixir | clients/play_movies_partner/lib/google_api/play_movies_partner/v1/model/order.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/play_movies_partner/lib/google_api/play_movies_partner/v1/model/order.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/play_movies_partner/lib/google_api/play_movies_partner/v1/model/order.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | 1 | 2020-11-10T16:58:27.000Z | 2020-11-10T16:58:27.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.PlayMoviesPartner.V1.Model.Order do
@moduledoc """
An Order tracks the fulfillment of an Edit when delivered using the legacy, non-component-based delivery. Each Order is uniquely identified by an `order_id`, which is generated by Google. Externally, Orders can also be identified by partners using its `custom_id` (when provided).
## Attributes
- approvedTime (String): Timestamp when the Order was approved. Defaults to: `null`.
- channelId (String): YouTube Channel ID that should be used to fulfill the Order. Example: \"UCRG64darCZhb\". Defaults to: `null`.
- channelName (String): YouTube Channel Name that should be used to fulfill the Order. Example: \"Google_channel\". Defaults to: `null`.
- countries (List[String]): Countries where the Order is available, using the \"ISO 3166-1 alpha-2\" format (example: \"US\"). Defaults to: `null`.
- customId (String): ID that can be used to externally identify an Order. This ID is provided by partners when submitting the Avails. Example: 'GOOGLER_2006' Defaults to: `null`.
- earliestAvailStartTime (String): Timestamp of the earliest start date of the Avails linked to this Order. Defaults to: `null`.
- episodeName (String): Default Episode name, usually in the language of the country of origin. Only available for TV Edits Example: \"Googlers, The - Pilot\". Defaults to: `null`.
- legacyPriority (String): Legacy Order priority, as defined by Google. Example: 'P0' Defaults to: `null`.
- name (String): Default Edit name, usually in the language of the country of origin. Example: \"Googlers, The\". Defaults to: `null`.
- normalizedPriority (String): A simpler representation of the priority. Defaults to: `null`.
- Enum - one of [NORMALIZED_PRIORITY_UNSPECIFIED, LOW_PRIORITY, HIGH_PRIORITY]
- orderId (String): ID internally generated by Google to uniquely identify an Order. Example: 'abcde12_x' Defaults to: `null`.
- orderedTime (String): Timestamp when the Order was created. Defaults to: `null`.
- pphName (String): Name of the post-production house that manages the Edit ordered. Defaults to: `null`.
- priority (Float): Order priority, as defined by Google. The higher the value, the higher the priority. Example: 90 Defaults to: `null`.
- receivedTime (String): Timestamp when the Order was fulfilled. Defaults to: `null`.
- rejectionNote (String): Field explaining why an Order has been rejected. Example: \"Trailer audio is 2ch mono, please re-deliver in stereo\". Defaults to: `null`.
- seasonName (String): Default Season name, usually in the language of the country of origin. Only available for TV Edits Example: \"Googlers, The - A Brave New World\". Defaults to: `null`.
- showName (String): Default Show name, usually in the language of the country of origin. Only available for TV Edits Example: \"Googlers, The\". Defaults to: `null`.
- status (String): High-level status of the order. Defaults to: `null`.
- Enum - one of [STATUS_UNSPECIFIED, STATUS_APPROVED, STATUS_FAILED, STATUS_PROCESSING, STATUS_UNFULFILLED, STATUS_NOT_AVAILABLE]
- statusDetail (String): Detailed status of the order Defaults to: `null`.
- Enum - one of [ORDER_STATUS_UNSPECIFIED, ORDER_STATUS_QC_APPROVED, ORDER_STATUS_QC_REJECTION, ORDER_STATUS_INTERNAL_FIX, ORDER_STATUS_OPEN_ORDER, ORDER_STATUS_NOT_AVAILABLE, ORDER_STATUS_AWAITING_REDELIVERY, ORDER_STATUS_READY_FOR_QC, ORDER_STATUS_FILE_PROCESSING]
- studioName (String): Name of the studio that owns the Edit ordered. Defaults to: `null`.
- type (String): Type of the Edit linked to the Order. Defaults to: `null`.
- Enum - one of [TITLE_TYPE_UNSPECIFIED, MOVIE, SEASON, EPISODE, BUNDLE]
- videoId (String): Google-generated ID identifying the video linked to this Order, once delivered. Example: 'gtry456_xc'. Defaults to: `null`.
"""
defstruct [
:"approvedTime",
:"channelId",
:"channelName",
:"countries",
:"customId",
:"earliestAvailStartTime",
:"episodeName",
:"legacyPriority",
:"name",
:"normalizedPriority",
:"orderId",
:"orderedTime",
:"pphName",
:"priority",
:"receivedTime",
:"rejectionNote",
:"seasonName",
:"showName",
:"status",
:"statusDetail",
:"studioName",
:"type",
:"videoId"
]
end
defimpl Poison.Decoder, for: GoogleApi.PlayMoviesPartner.V1.Model.Order do
def decode(value, _options) do
value
end
end
defimpl Poison.Encoder, for: GoogleApi.PlayMoviesPartner.V1.Model.Order do
def encode(value, options) do
GoogleApi.PlayMoviesPartner.V1.Deserializer.serialize_non_nil(value, options)
end
end
| 58.893617 | 304 | 0.73591 |
1c48c2e752500b9f1015372f203c6425cb54b510 | 119 | ex | Elixir | lib/bitcoin/hsm/app.ex | cancoin/bitcoin-hsm | 8037d85ed60627f2344ed017f5ba21499dc6f462 | [
"Apache-2.0"
] | 1 | 2017-03-16T16:41:11.000Z | 2017-03-16T16:41:11.000Z | lib/bitcoin/hsm/app.ex | cancoin/bitcoin-hsm | 8037d85ed60627f2344ed017f5ba21499dc6f462 | [
"Apache-2.0"
] | null | null | null | lib/bitcoin/hsm/app.ex | cancoin/bitcoin-hsm | 8037d85ed60627f2344ed017f5ba21499dc6f462 | [
"Apache-2.0"
] | null | null | null | defmodule Bitcoin.HSM.App do
use Application
def start(_, _) do
Bitcoin.HSM.Supervisor.start_link
end
end
| 11.9 | 37 | 0.722689 |
1c48e8181b701cea4844a03ccdafcf1dd92f7645 | 1,464 | exs | Elixir | test/bitpal/stores/store_test.exs | bitpal/bitpal | 0e10eeaacf7a65b23945cfb95e4dbda8bffd4590 | [
"BSD-3-Clause-Clear"
] | 5 | 2021-05-04T21:28:00.000Z | 2021-12-01T11:19:48.000Z | test/bitpal/stores/store_test.exs | bitpal/bitpal | 0e10eeaacf7a65b23945cfb95e4dbda8bffd4590 | [
"BSD-3-Clause-Clear"
] | 71 | 2021-04-21T05:48:49.000Z | 2022-03-23T06:30:37.000Z | test/bitpal/stores/store_test.exs | bitpal/bitpal | 0e10eeaacf7a65b23945cfb95e4dbda8bffd4590 | [
"BSD-3-Clause-Clear"
] | 1 | 2021-04-25T10:35:41.000Z | 2021-04-25T10:35:41.000Z | defmodule BitPal.StoreTest do
use BitPal.DataCase, async: true
alias BitPal.Repo
alias BitPal.Stores
setup _tags do
%{store: create_store() |> Repo.preload([:users])}
end
test "store invoice association", %{store: store} do
assert {:ok, invoice} =
Invoices.register(
store.id,
%{
amount: "1.2",
currency_id: unique_currency_id(),
exchange_rate: "2.0",
fiat_currency: fiat_currency()
}
)
store = Repo.preload(store, [:invoices])
assert length(store.invoices) == 1
assert invoice.store_id == store.id
end
describe "all_addresses/1" do
test "get current and previous address_key addresses", %{store: store} do
_key0 = create_address_key(store: store)
i0 = store |> create_invoice(address: :auto, status: :open)
i1 = store |> create_invoice(address: :auto, status: :open)
key1 = create_address_key(store: store)
a0 = key1 |> create_address()
a1 = key1 |> create_address()
address_ids =
Stores.all_addresses(store.id)
|> Stream.map(fn address -> address.id end)
|> Enum.into(MapSet.new())
assert MapSet.member?(address_ids, i0.address_id)
assert MapSet.member?(address_ids, i1.address_id)
assert MapSet.member?(address_ids, a0.id)
assert MapSet.member?(address_ids, a1.id)
end
end
end
| 28.705882 | 77 | 0.603825 |
1c49125c18a8febd48805297287fcf0d6f99c791 | 2,402 | ex | Elixir | clients/content/lib/google_api/content/v2/model/account_status_products.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/content/lib/google_api/content/v2/model/account_status_products.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/content/lib/google_api/content/v2/model/account_status_products.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Content.V2.Model.AccountStatusProducts do
@moduledoc """
## Attributes
* `channel` (*type:* `String.t`, *default:* `nil`) - The channel the data applies to. Acceptable values are: - "`local`" - "`online`"
* `country` (*type:* `String.t`, *default:* `nil`) - The country the data applies to.
* `destination` (*type:* `String.t`, *default:* `nil`) - The destination the data applies to.
* `itemLevelIssues` (*type:* `list(GoogleApi.Content.V2.Model.AccountStatusItemLevelIssue.t)`, *default:* `nil`) - List of item-level issues.
* `statistics` (*type:* `GoogleApi.Content.V2.Model.AccountStatusStatistics.t`, *default:* `nil`) - Aggregated product statistics.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:channel => String.t() | nil,
:country => String.t() | nil,
:destination => String.t() | nil,
:itemLevelIssues =>
list(GoogleApi.Content.V2.Model.AccountStatusItemLevelIssue.t()) | nil,
:statistics => GoogleApi.Content.V2.Model.AccountStatusStatistics.t() | nil
}
field(:channel)
field(:country)
field(:destination)
field(:itemLevelIssues, as: GoogleApi.Content.V2.Model.AccountStatusItemLevelIssue, type: :list)
field(:statistics, as: GoogleApi.Content.V2.Model.AccountStatusStatistics)
end
defimpl Poison.Decoder, for: GoogleApi.Content.V2.Model.AccountStatusProducts do
def decode(value, options) do
GoogleApi.Content.V2.Model.AccountStatusProducts.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Content.V2.Model.AccountStatusProducts do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 40.033333 | 145 | 0.711074 |
1c4944093eddd5ee04826bb92da0b2be452a4c8e | 685 | exs | Elixir | test/defql/macros/defdelete_test.exs | fazibear/defql | 862a426f852b0b2675d3af766eda2e39f9fde19b | [
"MIT"
] | 107 | 2017-01-21T17:34:06.000Z | 2022-02-09T03:13:20.000Z | test/defql/macros/defdelete_test.exs | fazibear/defql | 862a426f852b0b2675d3af766eda2e39f9fde19b | [
"MIT"
] | 5 | 2017-01-30T11:30:20.000Z | 2019-04-12T10:49:53.000Z | test/defql/macros/defdelete_test.exs | fazibear/defql | 862a426f852b0b2675d3af766eda2e39f9fde19b | [
"MIT"
] | 8 | 2017-01-30T11:23:53.000Z | 2020-04-19T16:58:40.000Z | defmodule Test.Defql.Macros.Defdelete do
use ExUnit.Case
use Defql, table: :common_table
defdelete delete(params), table: :test
defdelete remove(params)
test "defdelete lists" do
{status, result} = delete(a: 1)
assert status == :ok
assert result.query == "DELETE test"
assert result.params == [a: 1]
end
test "defdelete maps" do
{status, result} = delete(%{a: 1})
assert status == :ok
assert result.query == "DELETE test"
assert result.params == %{a: 1}
end
test "takes a table name from Defql attribute (if it was specified)" do
{_status, result} = remove(%{a: 1})
assert result.query == "DELETE common_table"
end
end
| 23.62069 | 73 | 0.654015 |
1c496126bf63f7e26e57cf2a473a6053a743ef37 | 548 | ex | Elixir | lib/yatzy/scoring/small_straight.ex | idabmat/yatzy | fb1cc1f13670a19f3541a3b1df15e9897ffcfae3 | [
"MIT"
] | 3 | 2020-04-23T14:38:39.000Z | 2020-05-03T17:20:32.000Z | lib/yatzy/scoring/small_straight.ex | idabmat/yatzy | fb1cc1f13670a19f3541a3b1df15e9897ffcfae3 | [
"MIT"
] | null | null | null | lib/yatzy/scoring/small_straight.ex | idabmat/yatzy | fb1cc1f13670a19f3541a3b1df15e9897ffcfae3 | [
"MIT"
] | null | null | null | defmodule Yatzy.Scoring.SmallStraight do
@moduledoc """
Small Straight: The combination 1-2-3-4-5. Score: 15 points (sum of all the dice).
"""
alias Yatzy.Roll
defstruct roll: %Roll{},
name: "Small Straight",
description: "The combination 1-2-3-4-5.",
score: "15 points (sum of all the dice)."
defimpl Yatzy.Scoring.Score do
def execute(%{roll: roll}) do
roll.dice
|> Enum.sort()
|> tally()
end
defp tally([1, 2, 3, 4, 5]), do: 15
defp tally(_), do: 0
end
end
| 22.833333 | 84 | 0.580292 |
1c49658822d1665fd25d11a2f72471816c108f3f | 302 | exs | Elixir | priv/repo/migrations/20180927173924_create_tags.exs | the-mikedavis/doc_gen | efcc884ea65bba5748f41c5601abd00db2777ec4 | [
"BSD-3-Clause"
] | null | null | null | priv/repo/migrations/20180927173924_create_tags.exs | the-mikedavis/doc_gen | efcc884ea65bba5748f41c5601abd00db2777ec4 | [
"BSD-3-Clause"
] | 27 | 2018-10-29T18:34:44.000Z | 2019-03-11T18:43:12.000Z | priv/repo/migrations/20180927173924_create_tags.exs | the-mikedavis/doc_gen | efcc884ea65bba5748f41c5601abd00db2777ec4 | [
"BSD-3-Clause"
] | null | null | null | defmodule DocGen.Repo.Migrations.CreateTags do
use Ecto.Migration
def change do
create table(:tags) do
add(:weight, :integer)
add(:name, :string, null: false)
add(:video_id, references(:videos))
timestamps()
end
create(unique_index(:tags, [:name]))
end
end
| 18.875 | 46 | 0.645695 |
1c497560172a54e8072a0f7744a5171255574ccc | 436 | ex | Elixir | simple-http-server/lib/todo/application.ex | shivam-tripathi/learningElixir | 195af962dcd239230afd7732b88e577f26fc1eff | [
"MIT"
] | null | null | null | simple-http-server/lib/todo/application.ex | shivam-tripathi/learningElixir | 195af962dcd239230afd7732b88e577f26fc1eff | [
"MIT"
] | null | null | null | simple-http-server/lib/todo/application.ex | shivam-tripathi/learningElixir | 195af962dcd239230afd7732b88e577f26fc1eff | [
"MIT"
] | null | null | null | defmodule Todo.Application do
require Logger
@moduledoc false
use Application
@impl true
def start(_type, _args) do
children = [
{Plug.Cowboy, scheme: :http, plug: Todo.Router, options: [port: 3000]},
{Todo.Server, [name: Todo.Server]}
]
Logger.info("Starting application on port 3000.")
opts = [strategy: :one_for_one, name: Todo.Supervisor]
Supervisor.start_link(children, opts)
end
end
| 22.947368 | 77 | 0.674312 |
1c4976e164d6a069a4cc0be1e468a29cfd03a97c | 1,265 | ex | Elixir | phoenix/lib/mehr_schulferien_web/controllers/country_controller.ex | komlanvi/www.mehr-schulferien.de | fe74772f2cc8ce430e04adf6e66023971456ce57 | [
"MIT"
] | null | null | null | phoenix/lib/mehr_schulferien_web/controllers/country_controller.ex | komlanvi/www.mehr-schulferien.de | fe74772f2cc8ce430e04adf6e66023971456ce57 | [
"MIT"
] | null | null | null | phoenix/lib/mehr_schulferien_web/controllers/country_controller.ex | komlanvi/www.mehr-schulferien.de | fe74772f2cc8ce430e04adf6e66023971456ce57 | [
"MIT"
] | null | null | null | defmodule MehrSchulferienWeb.CountryController do
use MehrSchulferienWeb, :controller
alias MehrSchulferien.Locations
alias MehrSchulferien.Locations.FederalState
alias MehrSchulferien.Locations.Country
alias MehrSchulferien.Timetables
alias MehrSchulferien.Timetables.InsetDayQuantity
alias MehrSchulferien.Timetables.Category
alias MehrSchulferien.Repo
alias MehrSchulferien.CollectData
import Ecto.Query, only: [from: 2]
def show(conn, %{"id" => id}) do
{country, federal_states} = get_locations(id)
render(conn, "show.html", federal_states: federal_states,
country: country,
religion_categories: get_religion_categories())
end
defp get_locations(id) do
country = Locations.get_country!(id)
query = from fs in FederalState, where: fs.country_id == ^country.id,
order_by: fs.name,
preload: [:cities, :schools]
federal_states = Repo.all(query)
{country, federal_states}
end
defp get_religion_categories() do
query = from c in Category, where: c.for_students == true and
c.is_a_religion == true
Repo.all(query)
end
end
| 30.853659 | 77 | 0.655336 |
1c49816f02a90f8e14260c118a61023e755b1fa5 | 7,122 | ex | Elixir | lib/grpc/adapter/gun.ex | aroyer/grpc-elixir | 7b0b0b075ba96d4a7338bd9002225e6c611d5654 | [
"Apache-2.0"
] | null | null | null | lib/grpc/adapter/gun.ex | aroyer/grpc-elixir | 7b0b0b075ba96d4a7338bd9002225e6c611d5654 | [
"Apache-2.0"
] | null | null | null | lib/grpc/adapter/gun.ex | aroyer/grpc-elixir | 7b0b0b075ba96d4a7338bd9002225e6c611d5654 | [
"Apache-2.0"
] | null | null | null | defmodule GRPC.Adapter.Gun do
@moduledoc false
# A client adapter using Gun.
# conn_pid and stream_ref is stored in `GRPC.Server.Stream`.
@default_transport_opts [nodelay: true]
@spec connect(GRPC.Channel.t(), any) :: {:ok, GRPC.Channel.t()} | {:error, any}
def connect(channel, nil), do: connect(channel, %{})
def connect(%{scheme: "https"} = channel, opts), do: connect_securely(channel, opts)
def connect(channel, opts), do: connect_insecurely(channel, opts)
defp connect_securely(%{cred: %{ssl: ssl}} = channel, opts) do
transport_opts = Map.get(opts, :transport_opts, @default_transport_opts ++ ssl)
open_opts = %{transport: :ssl, protocols: [:http2], transport_opts: transport_opts}
open_opts = Map.merge(opts, open_opts)
do_connect(channel, open_opts)
end
defp connect_insecurely(channel, opts) do
transport_opts = Map.get(opts, :transport_opts, @default_transport_opts)
open_opts = %{transport: :tcp, protocols: [:http2], transport_opts: transport_opts}
open_opts = Map.merge(opts, open_opts)
do_connect(channel, open_opts)
end
defp do_connect(%{host: host, port: port} = channel, open_opts) do
{:ok, conn_pid} = open(host, port, open_opts)
case :gun.await_up(conn_pid) do
{:ok, :http2} ->
{:ok, Map.put(channel, :adapter_payload, %{conn_pid: conn_pid})}
{:ok, proto} ->
{:error, "Error when opening connection: protocol #{proto} is not http2"}
{:error, reason} ->
{:error, "Error when opening connection: #{inspect(reason)}"}
end
end
def disconnect(%{adapter_payload: %{conn_pid: gun_pid}} = channel)
when is_pid(gun_pid) do
:ok = :gun.close(gun_pid)
{:ok, %{channel | adapter_payload: %{conn_pid: nil}}}
end
def disconnect(%{adapter_payload: %{conn_pid: nil}} = channel) do
{:ok, channel}
end
defp open({:local, socket_path}, _port, open_opts),
do: :gun.open_unix(socket_path, open_opts)
defp open(host, port, open_opts),
do: :gun.open(String.to_charlist(host), port, open_opts)
@spec send_request(GRPC.Client.Stream.t(), binary, map) :: GRPC.Client.Stream.t()
def send_request(stream, message, opts) do
stream_ref = do_send_request(stream, message, opts)
GRPC.Client.Stream.put_payload(stream, :stream_ref, stream_ref)
end
defp do_send_request(
%{channel: %{adapter_payload: %{conn_pid: conn_pid}}, path: path} = stream,
message,
opts
) do
headers = GRPC.Transport.HTTP2.client_headers_without_reserved(stream, opts)
{:ok, data, _} = GRPC.Message.to_data(message, opts)
:gun.post(conn_pid, path, headers, data)
end
def send_headers(
%{channel: %{adapter_payload: %{conn_pid: conn_pid}}, path: path} = stream,
opts
) do
headers = GRPC.Transport.HTTP2.client_headers_without_reserved(stream, opts)
stream_ref = :gun.post(conn_pid, path, headers)
GRPC.Client.Stream.put_payload(stream, :stream_ref, stream_ref)
end
def send_data(%{channel: channel, payload: %{stream_ref: stream_ref}} = stream, message, opts) do
conn_pid = channel.adapter_payload[:conn_pid]
fin = if opts[:send_end_stream], do: :fin, else: :nofin
{:ok, data, _} = GRPC.Message.to_data(message, opts)
:gun.data(conn_pid, stream_ref, fin, data)
stream
end
def end_stream(%{channel: channel, payload: %{stream_ref: stream_ref}} = stream) do
conn_pid = channel.adapter_payload[:conn_pid]
:gun.data(conn_pid, stream_ref, :fin, "")
stream
end
def cancel(%{conn_pid: conn_pid}, %{stream_ref: stream_ref}) do
:gun.cancel(conn_pid, stream_ref)
end
def recv_headers(%{conn_pid: conn_pid}, %{stream_ref: stream_ref}, opts) do
case await(conn_pid, stream_ref, opts[:timeout]) do
{:response, headers, fin} ->
{:ok, headers, fin}
error = {:error, _} ->
error
other ->
{:error,
GRPC.RPCError.exception(
GRPC.Status.unknown(),
"unexpected when waiting for headers: #{inspect(other)}"
)}
end
end
def recv_data_or_trailers(%{conn_pid: conn_pid}, %{stream_ref: stream_ref}, opts) do
case await(conn_pid, stream_ref, opts[:timeout]) do
data = {:data, _} ->
data
trailers = {:trailers, _} ->
trailers
error = {:error, _} ->
error
other ->
{:error,
GRPC.RPCError.exception(
GRPC.Status.unknown(),
"unexpected when waiting for data: #{inspect(other)}"
)}
end
end
defp await(conn_pid, stream_ref, timeout) do
# We should use server timeout for most time
timeout =
if is_integer(timeout) do
timeout * 2
else
timeout
end
case :gun.await(conn_pid, stream_ref, timeout) do
{:response, :fin, status, headers} ->
if status == 200 do
headers = Enum.into(headers, %{})
case headers["grpc-status"] do
nil ->
{:error,
GRPC.RPCError.exception(
GRPC.Status.internal(),
"shouldn't finish when getting headers"
)}
"0" ->
{:response, headers, :fin}
_ ->
{:error,
GRPC.RPCError.exception(
String.to_integer(headers["grpc-status"]),
headers["grpc-message"]
)}
end
else
{:error,
GRPC.RPCError.exception(
GRPC.Status.internal(),
"status got is #{status} instead of 200"
)}
end
{:response, :nofin, status, headers} ->
if status == 200 do
headers = Enum.into(headers, %{})
if headers["grpc-status"] && headers["grpc-status"] != "0" do
{:error,
GRPC.RPCError.exception(
String.to_integer(headers["grpc-status"]),
headers["grpc-message"]
)}
else
{:response, headers, :nofin}
end
else
{:error,
GRPC.RPCError.exception(
GRPC.Status.internal(),
"status got is #{status} instead of 200"
)}
end
{:data, :fin, _} ->
{:error,
GRPC.RPCError.exception(GRPC.Status.internal(), "shouldn't finish when getting data")}
{:data, :nofin, data} ->
{:data, data}
trailers = {:trailers, _} ->
trailers
{:error, :timeout} ->
{:error,
GRPC.RPCError.exception(
GRPC.Status.deadline_exceeded(),
"timeout when waiting for server"
)}
{:error, {reason, msg}} ->
{:error,
GRPC.RPCError.exception(GRPC.Status.unknown(), "#{inspect(reason)}: #{inspect(msg)}")}
{:error, reason} ->
{:error, GRPC.RPCError.exception(GRPC.Status.unknown(), "#{inspect(reason)}")}
other ->
{:error,
GRPC.RPCError.exception(
GRPC.Status.unknown(),
"unexpected message when waiting: #{inspect(other)}"
)}
end
end
end
| 30.306383 | 99 | 0.590845 |
1c49dbe3c0c65f963ec7bb79c20dde2a32df6fdb | 275 | ex | Elixir | webapp/lib/penmark_web/views/layout_view.ex | zmaril/penmark | 992f570da3bdf819f912505ba9b6531db9dcb80b | [
"FSFAP"
] | null | null | null | webapp/lib/penmark_web/views/layout_view.ex | zmaril/penmark | 992f570da3bdf819f912505ba9b6531db9dcb80b | [
"FSFAP"
] | null | null | null | webapp/lib/penmark_web/views/layout_view.ex | zmaril/penmark | 992f570da3bdf819f912505ba9b6531db9dcb80b | [
"FSFAP"
] | null | null | null | defmodule PenmarkWeb.LayoutView do
use PenmarkWeb, :view
# Phoenix LiveDashboard is available only in development by default,
# so we instruct Elixir to not warn if the dashboard route is missing.
@compile {:no_warn_undefined, {Routes, :live_dashboard_path, 2}}
end
| 34.375 | 72 | 0.774545 |
1c49ddad8629ce452af97678a54c2d7e93fbeef1 | 206 | exs | Elixir | priv/repo/migrations/20180802065343_create_users.exs | jeantsai/phoenix-admin | 3f954f0c452d385438b616f7e91bc5d66bcc1adc | [
"MIT"
] | null | null | null | priv/repo/migrations/20180802065343_create_users.exs | jeantsai/phoenix-admin | 3f954f0c452d385438b616f7e91bc5d66bcc1adc | [
"MIT"
] | null | null | null | priv/repo/migrations/20180802065343_create_users.exs | jeantsai/phoenix-admin | 3f954f0c452d385438b616f7e91bc5d66bcc1adc | [
"MIT"
] | null | null | null | defmodule Admin.Repo.Migrations.CreateUsers do
use Ecto.Migration
def change do
create table(:users) do
add :name, :string
add :password, :string
timestamps()
end
end
end
| 14.714286 | 46 | 0.65534 |
1c49ef5c831f611d270fb232b7d937d0447b3fa8 | 6,415 | ex | Elixir | lib/ueberauth/strategy/keycloak.ex | firezone/ueberauth_keycloak | 8a028d6c949604b25a083d4137affa0472ff919c | [
"MIT"
] | null | null | null | lib/ueberauth/strategy/keycloak.ex | firezone/ueberauth_keycloak | 8a028d6c949604b25a083d4137affa0472ff919c | [
"MIT"
] | null | null | null | lib/ueberauth/strategy/keycloak.ex | firezone/ueberauth_keycloak | 8a028d6c949604b25a083d4137affa0472ff919c | [
"MIT"
] | null | null | null | defmodule Ueberauth.Strategy.Keycloak do
@moduledoc """
Provides an Ueberauth strategy for authenticating with Keycloak.
### Setup
Create an application in Keycloak for you to use.
Register a new application at: [your keycloak developer page](https://keycloak.com/settings/developers) and get the `client_id` and `client_secret`.
Include the provider in your configuration for Ueberauth
config :ueberauth, Ueberauth,
providers: [
keycloak: { Ueberauth.Strategy.Keycloak, [] }
]
Then include the configuration for keycloak.
config :ueberauth, Ueberauth.Strategy.Keycloak.OAuth,
client_id: System.get_env("KEYCLOAK_CLIENT_ID"),
client_secret: System.get_env("KEYCLOAK_CLIENT_SECRET")
If you haven't already, create a pipeline and setup routes for your callback handler
pipeline :auth do
Ueberauth.plug "/auth"
end
scope "/auth" do
pipe_through [:browser, :auth]
get "/:provider/callback", AuthController, :callback
end
Create an endpoint for the callback where you will handle the `Ueberauth.Auth` struct
defmodule MyApp.AuthController do
use MyApp.Web, :controller
def callback_phase(%{ assigns: %{ ueberauth_failure: fails } } = conn, _params) do
# do things with the failure
end
def callback_phase(%{ assigns: %{ ueberauth_auth: auth } } = conn, params) do
# do things with the auth
end
end
You can edit the behaviour of the Strategy by including some options when you register your provider.
To set the `uid_field`
config :ueberauth, Ueberauth,
providers: [
keycloak: { Ueberauth.Strategy.Keycloak, [uid_field: :email] }
]
Default is `:id`
To set the default 'scopes' (permissions):
config :ueberauth, Ueberauth,
providers: [
keycloak: { Ueberauth.Strategy.Keycloak, [default_scope: "api read_user read_registry", api_version: "v4"] }
]
Default is "api read_user read_registry"
"""
require Logger
use Ueberauth.Strategy,
uid_field: :id,
default_scope: "api read_user read_registry",
oauth2_module: Ueberauth.Strategy.Keycloak.OAuth
alias Ueberauth.Auth.Info
alias Ueberauth.Auth.Credentials
alias Ueberauth.Auth.Extra
@doc """
Handles the initial redirect to the keycloak authentication page.
To customize the scope (permissions) that are requested by keycloak include them as part of your url:
"/auth/keycloak?scope=api read_user read_registry"
You can also include a `state` param that keycloak will return to you.
"""
def handle_request!(conn) do
scopes = conn.params["scope"] || option(conn, :default_scope)
opts = [redirect_uri: callback_url(conn), scope: scopes]
opts =
if conn.params["state"], do: Keyword.put(opts, :state, conn.params["state"]), else: opts
opts = with_state_param(opts, conn)
module = option(conn, :oauth2_module)
redirect!(conn, apply(module, :authorize_url!, [opts]))
end
@doc """
Handles the callback from Keycloak. When there is a failure from Keycloak the failure is included in the
`ueberauth_failure` struct. Otherwise the information returned from Keycloak is returned in the `Ueberauth.Auth` struct.
"""
def handle_callback!(%Plug.Conn{params: %{"code" => code}} = conn) do
module = option(conn, :oauth2_module)
token = apply(module, :get_token!, [[code: code, redirect_uri: callback_url(conn)]])
if token.access_token == nil do
set_errors!(conn, [
error(token.other_params["error"], token.other_params["error_description"])
])
else
%{"access_token" => real_token} = Jason.decode!(token.access_token)
fetch_user(conn, real_token)
end
end
@doc false
def handle_callback!(conn) do
set_errors!(conn, [error("missing_code", "No code received")])
end
@doc """
Cleans up the private area of the connection used for passing the raw Keycloak response around during the callback.
"""
def handle_cleanup!(conn) do
conn
|> put_private(:keycloak_user, nil)
end
@doc """
Fetches the uid field from the Keycloak response. This defaults to the option `uid_field` which in-turn defaults to `id`
"""
def uid(conn) do
user =
conn
|> option(:uid_field)
|> to_string()
conn.private.keycloak_user["email"]
end
@doc """
Includes the credentials from the Keycloak response.
"""
def credentials(conn) do
token = conn.private.keycloak_token
scope_string = token.other_params["scope"] || ""
scopes = String.split(scope_string, ",")
%Credentials{
token: token.access_token,
refresh_token: token.refresh_token,
expires_at: token.expires_at,
token_type: token.token_type,
expires: !!token.expires_at,
scopes: scopes
}
end
@doc """
Fetches the fields to populate the info section of the `Ueberauth.Auth` struct.
"""
def info(conn) do
user = conn.private.keycloak_user
%Info{
name: user["name"],
nickname: user["preferred_username"],
email: user["email"],
location: user["location"],
image: user["avatar_url"],
urls: %{
web_url: user["web_url"],
website_url: user["website_url"]
}
}
end
@doc """
Stores the raw information (including the token) obtained from the Keycloak callback.
"""
def extra(conn) do
%Extra{
raw_info: %{
token: conn.private.keycloak_token,
user: conn.private.keycloak_user
}
}
end
defp fetch_user(conn, token) do
conn = put_private(conn, :keycloak_token, token)
api_ver = option(conn, :api_ver) || "v4"
case Ueberauth.Strategy.Keycloak.OAuth.get(
token,
Ueberauth.Strategy.Keycloak.OAuth.userinfo_url()
) do
{:ok, %OAuth2.Response{status_code: 401, body: _body}} ->
set_errors!(conn, [error("token", "unauthorized")])
{:ok, %OAuth2.Response{status_code: status_code, body: user}}
when status_code in 200..399 ->
put_private(conn, :keycloak_user, Jason.decode!(user))
{:error, %OAuth2.Error{reason: reason}} ->
set_errors!(conn, [error("OAuth2", reason)])
end
end
defp option(conn, key) do
Keyword.get(options(conn) || [], key, Keyword.get(default_options(), key))
end
end
| 29.027149 | 150 | 0.663601 |
1c49f743fef0e13cbaeddc529fd08be89a397364 | 189 | ex | Elixir | config.py.ex | ApolloFortyNine/hetzner_auction_notifier | 7cd99f66c7632fb22bc4cf0eede0775c11a9e573 | [
"MIT"
] | 3 | 2018-04-01T20:05:52.000Z | 2019-05-24T04:37:46.000Z | config.py.ex | ApolloFortyNine/hetzner_auction_notifier | 7cd99f66c7632fb22bc4cf0eede0775c11a9e573 | [
"MIT"
] | null | null | null | config.py.ex | ApolloFortyNine/hetzner_auction_notifier | 7cd99f66c7632fb22bc4cf0eede0775c11a9e573 | [
"MIT"
] | null | null | null | robot_username = ''
robot_password = ''
email_address = ''
smtp_address = ''
smtp_username = ''
smtp_password = ''
desired_price_euros = 18
desired_benchmark = 8000
desired_space_gb = 6000
| 18.9 | 24 | 0.746032 |
1c4a85bb8e1e038a5079228be2f066bb533c5922 | 1,490 | exs | Elixir | mix.exs | jordan0day/grpc-elixir | d07d1357b2c105258f77b908482f694bd11b3c8d | [
"Apache-2.0"
] | null | null | null | mix.exs | jordan0day/grpc-elixir | d07d1357b2c105258f77b908482f694bd11b3c8d | [
"Apache-2.0"
] | null | null | null | mix.exs | jordan0day/grpc-elixir | d07d1357b2c105258f77b908482f694bd11b3c8d | [
"Apache-2.0"
] | null | null | null | defmodule GRPC.Mixfile do
use Mix.Project
@version "0.3.1"
def project do
[
app: :grpc,
version: @version,
elixir: "~> 1.4",
elixirc_paths: elixirc_paths(Mix.env()),
build_embedded: Mix.env() == :prod,
start_permanent: Mix.env() == :prod,
deps: deps(),
package: package(),
description: "The Elixir implementation of gRPC",
docs: [
extras: ["README.md"],
main: "readme",
source_ref: "v#{@version}",
source_url: "https://github.com/tony612/grpc-elixir"
]
]
end
# Configuration for the OTP application
#
# Type "mix help compile.app" for more information
def application do
[extra_applications: [:logger]]
end
defp deps do
ex_doc_version =
if System.version() |> Version.compare("1.7.0") == :lt do
"~> 0.18.0"
else
"~> 0.19"
end
[
{:protobuf, "~> 0.5"},
{:cowboy, "~> 2.5"},
{:gun, "~> 1.2"},
{:ex_doc, ex_doc_version, only: :dev},
{:inch_ex, "~> 1.0", only: [:dev, :test]},
{:dialyxir, "~> 0.5", only: :dev, runtime: false}
]
end
defp package do
%{
maintainers: ["Bing Han"],
licenses: ["Apache 2"],
links: %{"GitHub" => "https://github.com/tony612/grpc-elixir"},
files: ~w(mix.exs README.md lib src config LICENSE .formatter.exs)
}
end
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
end
| 23.650794 | 72 | 0.54698 |
1c4aa8461d9a3f1d00c5905b48773d574ee5ea1c | 4,121 | ex | Elixir | lib/afterglow/api_actions/api_actions.ex | sahilpaudel/AfterGlow | 0859ec14b47c8c5704cc8e5cba86d39aa258fff5 | [
"MIT"
] | null | null | null | lib/afterglow/api_actions/api_actions.ex | sahilpaudel/AfterGlow | 0859ec14b47c8c5704cc8e5cba86d39aa258fff5 | [
"MIT"
] | null | null | null | lib/afterglow/api_actions/api_actions.ex | sahilpaudel/AfterGlow | 0859ec14b47c8c5704cc8e5cba86d39aa258fff5 | [
"MIT"
] | null | null | null | defmodule AfterGlow.ApiActions do
alias AfterGlow.ApiActions.ApiAction
alias AfterGlow.ApiActions.ApiActionLogs
alias AfterGlow.CacheWrapper.Repo
import Ecto.Query
def list_api_actions(ids) do
from(
aa in ApiAction,
where: aa.id in ^ids
)
|> Repo.all()
end
def get_api_action!(id) do
Repo.get!(ApiAction, id)
end
def create_api_action(attrs \\ %{}) do
attrs =
if attrs["top_level_question_id"] do
attrs
|> Map.merge(%{
"question_id" => attrs["top_level_question_id"],
"action_level" => "question",
"name" => "top_level"
})
else
attrs |> Map.merge(%{"action_level" => "question_response"})
end
%ApiAction{}
|> ApiAction.changeset(attrs)
|> Repo.insert_with_cache()
end
def update_api_action(%ApiAction{} = api_action, attrs) do
api_action
|> ApiAction.changeset(attrs)
|> Repo.update_with_cache()
end
def delete_api_action!(id) do
api_action = get_api_action!(id)
update_api_action(%ApiAction{} = api_action, %{hidden: true})
end
def send_request(api_action_data, variables, user) when is_map(api_action_data) do
api_action_data = for {key, val} <- api_action_data, into: %{}, do: {String.to_atom(key), val}
api_action = struct(ApiAction, api_action_data)
send_request(api_action, variables, api_action.open_in_new_tab, user)
end
def send_request(id, variables, user) do
api_action = get_api_action!(id)
send_request(api_action, variables, api_action.open_in_new_tab, user)
end
def send_request(%ApiAction{} = api_action, variables, true, user) do
url =
api_action.url
|> replace_variables(variables)
%{status_code: 301, response_body: "redirect", response_headers: nil}
|> log_args(url, "GET", nil, nil, user.id, variables, api_action.id)
|> save_log
%{redirect_url: url}
end
def send_request(%ApiAction{} = api_action, variables, false, user) do
url =
api_action.url
|> replace_variables(variables)
headers =
api_action.headers
|> Poison.encode!()
|> replace_variables(variables)
|> Poison.decode!()
|> Enum.into([])
body = api_action.body |> replace_variables(variables)
method = api_action.method
make_request(url, method, body || "", headers)
|> parse_response
|> log_args(url, method, body, headers, user.id, variables, api_action.id)
|> save_log
end
def replace_variables(nil, _variables), do: nil
def replace_variables(string, %{}), do: string
def replace_variables(string, nil), do: string
def replace_variables(string, []), do: string
def replace_variables(string, variables) do
variables
|> Enum.reduce(string, fn variable, string ->
variable_name =
variable["name"]
|> String.trim()
string
|> String.replace(~r({{\W*#{variable_name}\W*?}}), variable["value"] |> to_string() || "")
end)
end
def make_request(url, method, body, headers) do
case method |> to_string() do
"GET" -> HTTPoison.get(url, headers)
"POST" -> HTTPoison.post(url, body, headers)
"PUT" -> HTTPoison.put(url, body, headers)
"PATCH" -> HTTPoison.patch(url, body, headers)
"DELETE" -> HTTPoison.delete(url, headers)
end
end
def log_args(response, url, method, body, headers, user_id, variables, api_action_id) do
response
|> Map.merge(%{
url: url,
method: method,
request_body: body,
request_headers: (headers || []) |> Enum.into(%{}),
user_id: user_id,
variables: variables,
api_action_id: api_action_id
})
end
def save_log(log_args) do
ApiActionLogs.save(log_args)
log_args
end
def parse_response(response) do
case response do
{:ok, resp} ->
%{
status_code: resp.status_code,
response_body: resp.body,
response_headers: resp.headers |> Enum.into(%{})
}
{:error, resp} ->
%{status_code: 0, response_body: resp.reason, response_headers: nil}
end
end
end
| 26.587097 | 98 | 0.640864 |
1c4aad9f060ffaaadc2a910991d1676544edf127 | 1,229 | ex | Elixir | Chapter9/supervise_database/lib/todo/database_worker.ex | benjamindburke/elixir-studies | 65231b5af83dcf701041cae2879107c3bd3e5078 | [
"Unlicense"
] | null | null | null | Chapter9/supervise_database/lib/todo/database_worker.ex | benjamindburke/elixir-studies | 65231b5af83dcf701041cae2879107c3bd3e5078 | [
"Unlicense"
] | null | null | null | Chapter9/supervise_database/lib/todo/database_worker.ex | benjamindburke/elixir-studies | 65231b5af83dcf701041cae2879107c3bd3e5078 | [
"Unlicense"
] | null | null | null | defmodule Todo.DatabaseWorker do
use GenServer
def start_link({folder, worker_id}) do
IO.puts("Starting database worker #{worker_id}")
GenServer.start_link(
__MODULE__,
folder,
name: via_tuple(worker_id)
)
end
def store(worker_id, key, data) do
GenServer.cast(via_tuple(worker_id), {:store, key, data})
end
def get(worker_id, key) do
GenServer.call(via_tuple(worker_id), {:get, key})
end
@impl GenServer
def init(folder) do
File.mkdir_p!(folder)
{:ok, folder}
end
@impl GenServer
def handle_cast({:store, key, data}, folder) do
spawn(fn ->
folder
|> file_name(key)
|> File.write!(:erlang.term_to_binary(data))
end)
{:noreply, folder}
end
@impl GenServer
def handle_call({:get, key}, caller, folder) do
spawn(fn ->
data = case File.read(file_name(folder, key)) do
{:ok, contents} -> :erlang.binary_to_term(contents)
_ -> nil
end
GenServer.reply(caller, data)
end)
{:noreply, folder}
end
defp file_name(folder, key) do
Path.join(folder, to_string(key))
end
defp via_tuple(worker_id) do
Todo.ProcessRegistry.via_tuple({__MODULE__, worker_id})
end
end
| 20.483333 | 61 | 0.641172 |
1c4ab3c8c53b1a055c2d9aaa84a687e35d8e63eb | 3,415 | ex | Elixir | clients/dlp/lib/google_api/dlp/v2/model/google_privacy_dlp_v2_dlp_job.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | null | null | null | clients/dlp/lib/google_api/dlp/v2/model/google_privacy_dlp_v2_dlp_job.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | null | null | null | clients/dlp/lib/google_api/dlp/v2/model/google_privacy_dlp_v2_dlp_job.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | 1 | 2020-11-10T16:58:27.000Z | 2020-11-10T16:58:27.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.DLP.V2.Model.GooglePrivacyDlpV2DlpJob do
@moduledoc """
Combines all of the information about a DLP job.
## Attributes
- createTime (DateTime.t): Time when the job was created. Defaults to: `null`.
- endTime (DateTime.t): Time when the job finished. Defaults to: `null`.
- errors ([GooglePrivacyDlpV2Error]): A stream of errors encountered running the job. Defaults to: `null`.
- inspectDetails (GooglePrivacyDlpV2InspectDataSourceDetails): Results from inspecting a data source. Defaults to: `null`.
- jobTriggerName (String.t): If created by a job trigger, the resource name of the trigger that instantiated the job. Defaults to: `null`.
- name (String.t): The server-assigned name. Defaults to: `null`.
- riskDetails (GooglePrivacyDlpV2AnalyzeDataSourceRiskDetails): Results from analyzing risk of a data source. Defaults to: `null`.
- startTime (DateTime.t): Time when the job started. Defaults to: `null`.
- state (String.t): State of a job. Defaults to: `null`.
- Enum - one of [JOB_STATE_UNSPECIFIED, PENDING, RUNNING, DONE, CANCELED, FAILED]
- type (String.t): The type of job. Defaults to: `null`.
- Enum - one of [DLP_JOB_TYPE_UNSPECIFIED, INSPECT_JOB, RISK_ANALYSIS_JOB]
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:createTime => DateTime.t(),
:endTime => DateTime.t(),
:errors => list(GoogleApi.DLP.V2.Model.GooglePrivacyDlpV2Error.t()),
:inspectDetails => GoogleApi.DLP.V2.Model.GooglePrivacyDlpV2InspectDataSourceDetails.t(),
:jobTriggerName => any(),
:name => any(),
:riskDetails =>
GoogleApi.DLP.V2.Model.GooglePrivacyDlpV2AnalyzeDataSourceRiskDetails.t(),
:startTime => DateTime.t(),
:state => any(),
:type => any()
}
field(:createTime, as: DateTime)
field(:endTime, as: DateTime)
field(:errors, as: GoogleApi.DLP.V2.Model.GooglePrivacyDlpV2Error, type: :list)
field(:inspectDetails, as: GoogleApi.DLP.V2.Model.GooglePrivacyDlpV2InspectDataSourceDetails)
field(:jobTriggerName)
field(:name)
field(:riskDetails, as: GoogleApi.DLP.V2.Model.GooglePrivacyDlpV2AnalyzeDataSourceRiskDetails)
field(:startTime, as: DateTime)
field(:state)
field(:type)
end
defimpl Poison.Decoder, for: GoogleApi.DLP.V2.Model.GooglePrivacyDlpV2DlpJob do
def decode(value, options) do
GoogleApi.DLP.V2.Model.GooglePrivacyDlpV2DlpJob.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.DLP.V2.Model.GooglePrivacyDlpV2DlpJob do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 43.782051 | 140 | 0.724744 |
1c4ad9e1b848bfe9fdbe8c9f44dea13c8036a742 | 11,509 | exs | Elixir | test/plug/adapters/cowboy/conn_test.exs | wojtekmach/plug | e9afab6c10f173d55b13bd78c64526fb21495b39 | [
"Apache-2.0"
] | null | null | null | test/plug/adapters/cowboy/conn_test.exs | wojtekmach/plug | e9afab6c10f173d55b13bd78c64526fb21495b39 | [
"Apache-2.0"
] | null | null | null | test/plug/adapters/cowboy/conn_test.exs | wojtekmach/plug | e9afab6c10f173d55b13bd78c64526fb21495b39 | [
"Apache-2.0"
] | null | null | null | defmodule Plug.Adapters.Cowboy.ConnTest do
use ExUnit.Case, async: true
alias Plug.Conn
import Plug.Conn
## Cowboy setup for testing
#
# We use hackney to perform an HTTP request against the cowboy/plug running
# on port 8001. Plug then uses Kernel.apply/3 to dispatch based on the first
# element of the URI's path.
#
# e.g. `assert {204, _, _} = request :get, "/build/foo/bar"` will perform a
# GET http://127.0.0.1:8001/build/foo/bar and Plug will call build/1.
setup_all do
{:ok, _pid} = Plug.Adapters.Cowboy.http __MODULE__, [], port: 8001
on_exit fn ->
:ok = Plug.Adapters.Cowboy.shutdown(__MODULE__.HTTP)
end
:ok
end
@already_sent {:plug_conn, :sent}
def init(opts) do
opts
end
def call(conn, []) do
# Assert we never have a lingering @already_sent entry in the inbox
refute_received @already_sent
function = String.to_atom List.first(conn.path_info) || "root"
apply __MODULE__, function, [conn]
rescue
exception ->
receive do
{:plug_conn, :sent} ->
:erlang.raise(:error, exception, :erlang.get_stacktrace)
after
0 ->
send_resp(conn, 500, Exception.message(exception) <> "\n" <>
Exception.format_stacktrace(System.stacktrace))
end
end
## Tests
def root(%Conn{} = conn) do
assert conn.method == "HEAD"
assert conn.path_info == []
assert conn.query_string == "foo=bar&baz=bat"
assert conn.request_path == "/"
resp(conn, 200, "ok")
end
def build(%Conn{} = conn) do
assert {Plug.Adapters.Cowboy.Conn, _} = conn.adapter
assert conn.path_info == ["build", "foo", "bar"]
assert conn.query_string == ""
assert conn.scheme == :http
assert conn.host == "127.0.0.1"
assert conn.port == 8001
assert conn.method == "GET"
assert {{127, 0, 0, 1}, _} = conn.peer
assert conn.remote_ip == {127, 0, 0, 1}
resp(conn, 200, "ok")
end
test "builds a connection" do
assert {200, _, _} = request :head, "/?foo=bar&baz=bat"
assert {200, _, _} = request :get, "/build/foo/bar"
assert {200, _, _} = request :get, "//build//foo//bar"
end
def return_request_path(%Conn{} = conn) do
resp(conn, 200, conn.request_path)
end
test "request_path" do
assert {200, _, "/return_request_path/foo"} =
request :get, "/return_request_path/foo?barbat"
assert {200, _, "/return_request_path/foo/bar"} =
request :get, "/return_request_path/foo/bar?bar=bat"
assert {200, _, "/return_request_path/foo/bar/"} =
request :get, "/return_request_path/foo/bar/?bar=bat"
assert {200, _, "/return_request_path/foo//bar"} =
request :get, "/return_request_path/foo//bar"
assert {200, _, "//return_request_path//foo//bar//"} =
request :get, "//return_request_path//foo//bar//"
end
def headers(conn) do
assert get_req_header(conn, "foo") == ["bar"]
assert get_req_header(conn, "baz") == ["bat"]
resp(conn, 200, "ok")
end
test "stores request headers" do
assert {200, _, _} = request :get, "/headers", [{"foo", "bar"}, {"baz", "bat"}]
end
def send_200(conn) do
assert conn.state == :unset
assert conn.resp_body == nil
conn = send_resp(conn, 200, "OK")
assert conn.state == :sent
assert conn.resp_body == nil
conn
end
def send_418(conn) do
send_resp(conn, 418, "")
end
def send_451(conn) do
send_resp(conn, 451, "")
end
def send_500(conn) do
conn
|> delete_resp_header("cache-control")
|> put_resp_header("x-sample", "value")
|> send_resp(500, ["ERR", ["OR"]])
end
test "sends a response with status, headers and body" do
assert {200, headers, "OK"} = request :get, "/send_200"
assert List.keyfind(headers, "cache-control", 0) ==
{"cache-control", "max-age=0, private, must-revalidate"}
assert {500, headers, "ERROR"} = request :get, "/send_500"
assert List.keyfind(headers, "cache-control", 0) == nil
assert List.keyfind(headers, "x-sample", 0) ==
{"x-sample", "value"}
end
test "allows customized statuses based on config" do
assert {451, _headers, ""} = request :get, "/send_451"
{:ok, ref} = :hackney.get("http://127.0.0.1:8001/send_451", [], "", async: :once)
assert_receive({:hackney_response, ^ref, {:status, 451, "Unavailable For Legal Reasons"}})
:hackney.close(ref)
end
test "existing statuses can be customized" do
assert {418, _headers, ""} = request :get, "/send_418"
{:ok, ref} = :hackney.get("http://127.0.0.1:8001/send_418", [], "", async: :once)
assert_receive({:hackney_response, ^ref, {:status, 418, "Totally not a teapot"}})
:hackney.close(ref)
end
test "skips body on head" do
assert {200, _, nil} = request :head, "/send_200"
end
def send_file(conn) do
conn = send_file(conn, 200, __ENV__.file)
assert conn.state == :sent
assert conn.resp_body == nil
conn
end
test "sends a file with status and headers" do
assert {200, headers, body} = request :get, "/send_file"
assert body =~ "sends a file with status and headers"
assert List.keyfind(headers, "cache-control", 0) ==
{"cache-control", "max-age=0, private, must-revalidate"}
assert List.keyfind(headers, "content-length", 0) ==
{"content-length", __ENV__.file |> File.stat!() |> Map.fetch!(:size) |> Integer.to_string()}
end
test "skips file on head" do
assert {200, _, nil} = request :head, "/send_file"
end
def send_chunked(conn) do
conn = send_chunked(conn, 200)
assert conn.state == :chunked
{:ok, conn} = chunk(conn, "HELLO\n")
{:ok, conn} = chunk(conn, ["WORLD", ["\n"]])
conn
end
test "sends a chunked response with status and headers" do
assert {200, headers, "HELLO\nWORLD\n"} = request :get, "/send_chunked"
assert List.keyfind(headers, "cache-control", 0) ==
{"cache-control", "max-age=0, private, must-revalidate"}
assert List.keyfind(headers, "transfer-encoding", 0) ==
{"transfer-encoding", "chunked"}
end
def read_req_body(conn) do
expected = :binary.copy("abcdefghij", 100_000)
assert {:ok, ^expected, conn} = read_body(conn)
assert {:ok, "", conn} = read_body(conn)
resp(conn, 200, "ok")
end
def read_req_body_partial(conn) do
assert {:more, _body, conn} = read_body(conn, length: 5, read_length: 5)
resp(conn, 200, "ok")
end
test "reads body" do
body = :binary.copy("abcdefghij", 100_000)
assert {200, _, "ok"} = request :get, "/read_req_body", [], body
assert {200, _, "ok"} = request :post, "/read_req_body", [], body
assert {200, _, "ok"} = request :post, "/read_req_body_partial", [], body
end
def multipart(conn) do
conn = Plug.Parsers.call(conn, parsers: [Plug.Parsers.MULTIPART], length: 8_000_000)
assert conn.params["name"] == "hello"
assert conn.params["status"] == ["choice1", "choice2"]
assert conn.params["empty"] == nil
assert %Plug.Upload{} = file = conn.params["pic"]
assert File.read!(file.path) == "hello\n\n"
assert file.content_type == "text/plain"
assert file.filename == "foo.txt"
resp(conn, 200, "ok")
end
test "parses multipart requests" do
multipart = """
------w58EW1cEpjzydSCq\r
Content-Disposition: form-data; name=\"name\"\r
\r
hello\r
------w58EW1cEpjzydSCq\r
Content-Disposition: form-data; name=\"pic\"; filename=\"foo.txt\"\r
Content-Type: text/plain\r
\r
hello
\r
------w58EW1cEpjzydSCq\r
Content-Disposition: form-data; name=\"empty\"; filename=\"\"\r
Content-Type: application/octet-stream\r
\r
\r
------w58EW1cEpjzydSCq\r
Content-Disposition: form-data; name="status[]"\r
\r
choice1\r
------w58EW1cEpjzydSCq\r
Content-Disposition: form-data; name="status[]"\r
\r
choice2\r
------w58EW1cEpjzydSCq\r
Content-Disposition: form-data; name=\"commit\"\r
\r
Create User\r
------w58EW1cEpjzydSCq--\r
"""
headers =
[{"Content-Type", "multipart/form-data; boundary=----w58EW1cEpjzydSCq"},
{"Content-Length", byte_size(multipart)}]
assert {200, _, _} = request :post, "/multipart", headers, multipart
assert {200, _, _} = request :post, "/multipart?name=overriden", headers, multipart
end
def file_too_big(conn) do
conn = Plug.Parsers.call(conn, parsers: [Plug.Parsers.MULTIPART], length: 5)
assert %Plug.Upload{} = file = conn.params["pic"]
assert File.read!(file.path) == "hello\n\n"
assert file.content_type == "text/plain"
assert file.filename == "foo.txt"
resp(conn, 200, "ok")
end
test "returns parse error when file pushed the boundaries in multipart requests" do
multipart = """
------w58EW1cEpjzydSCq\r
Content-Disposition: form-data; name=\"pic\"; filename=\"foo.txt\"\r
Content-Type: text/plain\r
\r
hello
\r
------w58EW1cEpjzydSCq--\r
"""
headers =
[{"Content-Type", "multipart/form-data; boundary=----w58EW1cEpjzydSCq"},
{"Content-Length", byte_size(multipart)}]
assert {500, _, body} = request :post, "/file_too_big", headers, multipart
assert body =~ "the request is too large"
end
test "validates utf-8 on multipart requests" do
multipart = """
------w58EW1cEpjzydSCq\r
Content-Disposition: form-data; name=\"name\"\r
\r
#{<<139>>}\r
------w58EW1cEpjzydSCq\r
"""
headers =
[{"Content-Type", "multipart/form-data; boundary=----w58EW1cEpjzydSCq"},
{"Content-Length", byte_size(multipart)}]
assert {500, _, body} = request :post, "/multipart", headers, multipart
assert body =~ "invalid UTF-8 on multipart body, got byte 139"
end
test "returns parse error when body is badly formatted in multipart requests" do
multipart = """
------w58EW1cEpjzydSCq\r
Content-Disposition: form-data; name=\"name\"\r
------w58EW1cEpjzydSCq\r
"""
headers =
[{"Content-Type", "multipart/form-data"},
{"Content-Length", byte_size(multipart)}]
assert {500, _, body} = request :post, "/multipart", headers, multipart
assert body =~ "malformed request, a MatchError exception was raised with message " <>
~s("no match of right hand side value: false")
end
def https(conn) do
assert conn.scheme == :https
send_resp(conn, 200, "OK")
end
@https_options [
port: 8002, password: "cowboy",
keyfile: Path.expand("../../../fixtures/ssl/key.pem", __DIR__),
certfile: Path.expand("../../../fixtures/ssl/cert.pem", __DIR__)
]
test "https" do
{:ok, _pid} = Plug.Adapters.Cowboy.https __MODULE__, [], @https_options
ssl_options = [ssl_options: [cacertfile: @https_options[:certfile]]]
assert {:ok, 200, _headers, client} = :hackney.get("https://127.0.0.1:8002/https", [], "", ssl_options)
assert {:ok, "OK"} = :hackney.body(client)
:hackney.close(client)
after
:ok = Plug.Adapters.Cowboy.shutdown __MODULE__.HTTPS
end
## Helpers
defp request(:head = verb, path) do
{:ok, status, headers} =
:hackney.request(verb, "http://127.0.0.1:8001" <> path, [], "", [])
{status, headers, nil}
end
defp request(verb, path, headers \\ [], body \\ "") do
{:ok, status, headers, client} =
:hackney.request(verb, "http://127.0.0.1:8001" <> path, headers, body, [])
{:ok, body} = :hackney.body(client)
:hackney.close(client)
{status, headers, body}
end
end
| 31.274457 | 107 | 0.623338 |
1c4afe37c6ae7325d22590ed0461d5d3f0da838b | 68 | ex | Elixir | lib/gscraper_web/views/dashboard_view.ex | longnd/elixir-gscraper | 894570afd89e54b80ca591a56a182da55ac6ee61 | [
"MIT"
] | null | null | null | lib/gscraper_web/views/dashboard_view.ex | longnd/elixir-gscraper | 894570afd89e54b80ca591a56a182da55ac6ee61 | [
"MIT"
] | 25 | 2021-03-23T07:27:21.000Z | 2021-10-31T15:09:52.000Z | lib/gscraper_web/views/dashboard_view.ex | longnd/elixir-gscraper | 894570afd89e54b80ca591a56a182da55ac6ee61 | [
"MIT"
] | null | null | null | defmodule GscraperWeb.DashboardView do
use GscraperWeb, :view
end
| 17 | 38 | 0.823529 |
1c4b0a803713fcb2c91cae34aacd569df9c41443 | 1,967 | ex | Elixir | elixir/lib/advent_of_code/day_04.ex | dfireBird/aoc_2020 | 0393d3c85202b5babe73bce71c1e83d0876c53f6 | [
"MIT"
] | null | null | null | elixir/lib/advent_of_code/day_04.ex | dfireBird/aoc_2020 | 0393d3c85202b5babe73bce71c1e83d0876c53f6 | [
"MIT"
] | null | null | null | elixir/lib/advent_of_code/day_04.ex | dfireBird/aoc_2020 | 0393d3c85202b5babe73bce71c1e83d0876c53f6 | [
"MIT"
] | null | null | null | defmodule AdventOfCode.Day04 do
def is_valid_keys(passport) do
Map.keys(passport) |> Enum.filter(&(&1 != "cid")) |> Enum.sort() ==
Enum.sort(["byr", "iyr", "eyr", "hgt", "hcl", "ecl", "pid"])
end
def is_valid_byr(%{"byr" => byr}),
do: String.length(byr) == 4 && String.to_integer(byr) in 1920..2002
def is_valid_byr(_), do: false
def is_valid_iyr(%{"iyr" => iyr}),
do: String.length(iyr) == 4 && String.to_integer(iyr) in 2010..2020
def is_valid_iyr(_), do: false
def is_valid_eyr(%{"eyr" => eyr}),
do: String.length(eyr) == 4 && String.to_integer(eyr) in 2020..2030
def is_valid_eyr(_), do: false
def is_valid_hgt(%{"hgt" => hyt}) do
case String.split_at(hyt, String.length(hyt) - 2) do
{cm, "cm"} -> String.to_integer(cm) in 150..193
{inc, "in"} -> String.to_integer(inc) in 59..76
_ -> false
end
end
def is_valid_hgt(_), do: false
def is_valid_hcl(%{"hcl" => hcl}), do: String.length(hcl) == 7 && hcl =~ ~r/#[0-9a-f]+/
def is_valid_hcl(_), do: false
def is_valid_ecl(%{"ecl" => ecl}), do: ecl in ~w(amb blu brn gry grn hzl oth)
def is_valid_ecl(_), do: false
def is_valid_pid(%{"pid" => pid}), do: String.length(pid) == 9 && Integer.parse(pid) != :error
def is_valid_pid(_), do: false
def is_valid_passport(p) do
is_valid_keys(p) && is_valid_byr(p) && is_valid_iyr(p) && is_valid_eyr(p) && is_valid_hgt(p) &&
is_valid_hcl(p) && is_valid_ecl(p) && is_valid_pid(p)
end
def part1(passports) do
passports |> Enum.filter(&is_valid_keys(&1)) |> length
end
def part2(passports) do
passports |> Enum.filter(&is_valid_passport(&1)) |> length
end
def input_to_map(txt) do
txt
|> String.split("\n\n")
|> Enum.map(&String.split(&1))
|> Enum.map(&parse_passport(&1))
|> Enum.map(&Map.new(&1))
end
def parse_passport(pasport) do
pasport |> Enum.map(&String.split(&1, ":")) |> Enum.map(fn [x, y] -> {x, y} end)
end
end
| 29.80303 | 99 | 0.611591 |
1c4b0a8e335f76224b716d6a32b3a686d37ba404 | 1,457 | ex | Elixir | lib/paushal_web/controllers/payment_slip_controller.ex | begedin/paushal | 4388e1b702e98008aef2d231816d6cdc6773803e | [
"MIT"
] | null | null | null | lib/paushal_web/controllers/payment_slip_controller.ex | begedin/paushal | 4388e1b702e98008aef2d231816d6cdc6773803e | [
"MIT"
] | 6 | 2021-03-20T17:24:05.000Z | 2021-03-20T17:41:19.000Z | lib/paushal_web/controllers/payment_slip_controller.ex | begedin/paushal | 4388e1b702e98008aef2d231816d6cdc6773803e | [
"MIT"
] | null | null | null | defmodule PaushalWeb.PaymentSlipController do
use PaushalWeb, :controller
alias Paushal.PaymentSlips
alias Paushal.PaymentSlips.PaymentSlip
action_fallback PaushalWeb.FallbackController
def index(conn, _params) do
payment_slips = PaymentSlips.list_payment_slips()
render(conn, "index.json", payment_slips: payment_slips)
end
def create(conn, %{"payment_slip" => payment_slip_params}) do
with {:ok, %PaymentSlip{} = payment_slip} <-
PaymentSlips.create_payment_slip(payment_slip_params) do
conn
|> put_status(:created)
|> put_resp_header("location", Routes.payment_slip_path(conn, :show, payment_slip))
|> render("show.json", payment_slip: payment_slip)
end
end
def show(conn, %{"id" => id}) do
payment_slip = PaymentSlips.get_payment_slip!(id)
render(conn, "show.json", payment_slip: payment_slip)
end
def update(conn, %{"id" => id, "payment_slip" => payment_slip_params}) do
payment_slip = PaymentSlips.get_payment_slip!(id)
with {:ok, %PaymentSlip{} = payment_slip} <-
PaymentSlips.update_payment_slip(payment_slip, payment_slip_params) do
render(conn, "show.json", payment_slip: payment_slip)
end
end
def delete(conn, %{"id" => id}) do
payment_slip = PaymentSlips.get_payment_slip!(id)
with {:ok, %PaymentSlip{}} <- PaymentSlips.delete_payment_slip(payment_slip) do
send_resp(conn, :no_content, "")
end
end
end
| 31.673913 | 89 | 0.706246 |
1c4b40fceb94f5cc46e7bc1b1f5cb67aae346eda | 2,596 | ex | Elixir | clients/display_video/lib/google_api/display_video/v1/model/search_targeting_options_request.ex | kyleVsteger/elixir-google-api | 3a0dd498af066a4361b5b0fd66ffc04a57539488 | [
"Apache-2.0"
] | 1 | 2021-10-01T09:20:41.000Z | 2021-10-01T09:20:41.000Z | clients/display_video/lib/google_api/display_video/v1/model/search_targeting_options_request.ex | kyleVsteger/elixir-google-api | 3a0dd498af066a4361b5b0fd66ffc04a57539488 | [
"Apache-2.0"
] | null | null | null | clients/display_video/lib/google_api/display_video/v1/model/search_targeting_options_request.ex | kyleVsteger/elixir-google-api | 3a0dd498af066a4361b5b0fd66ffc04a57539488 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.DisplayVideo.V1.Model.SearchTargetingOptionsRequest do
@moduledoc """
Request message for SearchTargetingOptions.
## Attributes
* `advertiserId` (*type:* `String.t`, *default:* `nil`) - Required. The Advertiser this request is being made in the context of.
* `geoRegionSearchTerms` (*type:* `GoogleApi.DisplayVideo.V1.Model.GeoRegionSearchTerms.t`, *default:* `nil`) - Search terms for geo region targeting options. Can only be used when targeting_type is `TARGETING_TYPE_GEO_REGION`.
* `pageSize` (*type:* `integer()`, *default:* `nil`) - Requested page size. Must be between `1` and `100`. If unspecified will default to `100`. Returns error code `INVALID_ARGUMENT` if an invalid value is specified.
* `pageToken` (*type:* `String.t`, *default:* `nil`) - A token identifying a page of results the server should return. Typically, this is the value of next_page_token returned from the previous call to `SearchTargetingOptions` method. If not specified, the first page of results will be returned.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:advertiserId => String.t() | nil,
:geoRegionSearchTerms => GoogleApi.DisplayVideo.V1.Model.GeoRegionSearchTerms.t() | nil,
:pageSize => integer() | nil,
:pageToken => String.t() | nil
}
field(:advertiserId)
field(:geoRegionSearchTerms, as: GoogleApi.DisplayVideo.V1.Model.GeoRegionSearchTerms)
field(:pageSize)
field(:pageToken)
end
defimpl Poison.Decoder, for: GoogleApi.DisplayVideo.V1.Model.SearchTargetingOptionsRequest do
def decode(value, options) do
GoogleApi.DisplayVideo.V1.Model.SearchTargetingOptionsRequest.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.DisplayVideo.V1.Model.SearchTargetingOptionsRequest do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 46.357143 | 300 | 0.742296 |
1c4b4185d26207d7f319d6734f0bfd043f2d75db | 2,693 | ex | Elixir | lib/oban/queue/executor.ex | luizpvas/oban | 6e41b15ed8fda628f991ad226f3855fa3bec5b33 | [
"Apache-2.0"
] | null | null | null | lib/oban/queue/executor.ex | luizpvas/oban | 6e41b15ed8fda628f991ad226f3855fa3bec5b33 | [
"Apache-2.0"
] | null | null | null | lib/oban/queue/executor.ex | luizpvas/oban | 6e41b15ed8fda628f991ad226f3855fa3bec5b33 | [
"Apache-2.0"
] | null | null | null | defmodule Oban.Queue.Executor do
@moduledoc false
alias Oban.{Config, Job, Query, Worker}
@spec child_spec(Job.t(), Config.t()) :: Supervisor.child_spec()
def child_spec(job, conf) do
%{
id: __MODULE__,
start: {__MODULE__, :start_link, [job, conf]},
type: :worker,
restart: :temporary
}
end
@spec start_link(Job.t(), Config.t()) :: {:ok, pid()}
def start_link(%Job{} = job, %Config{} = conf) do
Task.start_link(__MODULE__, :call, [job, conf])
end
@spec call(Job.t(), Config.t()) :: :success | :failure
def call(%Job{} = job, %Config{} = conf) do
{duration, return} = :timer.tc(__MODULE__, :safe_call, [job])
case return do
{:success, ^job} ->
Query.complete_job(conf, job)
report(:success, duration, job, %{})
{:failure, ^job, kind, error, stack} ->
Query.retry_job(conf, job, worker_backoff(job), format_blamed(kind, error, stack))
report(:failure, duration, job, %{kind: kind, error: error, stack: stack})
end
end
@doc false
def safe_call(%Job{args: args, worker: worker} = job) do
worker
|> to_module()
|> apply(:perform, [args])
|> case do
{:error, error} ->
{:current_stacktrace, stacktrace} = Process.info(self(), :current_stacktrace)
{:failure, job, :error, error, stacktrace}
_ ->
{:success, job}
end
rescue
exception ->
{:failure, job, :exception, exception, __STACKTRACE__}
catch
kind, value ->
{:failure, job, kind, value, __STACKTRACE__}
end
# Helpers
defp to_module(worker) when is_binary(worker) do
worker
|> String.split(".")
|> Module.safe_concat()
end
defp to_module(worker) when is_atom(worker), do: worker
# While it is slightly wasteful, we have to convert the worker to a module again outside of
# `safe_call/1`. There is a possibility that the worker module can't be found at all and we
# need to fall back to a default implementation.
defp worker_backoff(%Job{attempt: attempt, worker: worker}) do
worker
|> to_module()
|> apply(:backoff, [attempt])
rescue
ArgumentError -> Worker.default_backoff(attempt)
end
defp format_blamed(:exception, error, stack), do: format_blamed(:error, error, stack)
defp format_blamed(kind, error, stack) do
{blamed, stack} = Exception.blame(kind, error, stack)
Exception.format(kind, blamed, stack)
end
defp report(event, duration, job, meta) do
meta =
job
|> Map.take([:id, :args, :queue, :worker, :attempt, :max_attempts])
|> Map.merge(meta)
:telemetry.execute([:oban, event], %{duration: duration}, meta)
event
end
end
| 26.93 | 93 | 0.629781 |
1c4b584bc9cdedf3eea0e6e20899293b9390a0de | 939 | exs | Elixir | priv/repo/migrations/017_create_unlock.exs | freshcom/freshcom-api | 4f2083277943cf4e4e8fd4c4d443c7309f285ad7 | [
"BSD-3-Clause"
] | 44 | 2018-05-09T01:08:57.000Z | 2021-01-19T07:25:26.000Z | priv/repo/migrations/017_create_unlock.exs | freshcom/freshcom-api | 4f2083277943cf4e4e8fd4c4d443c7309f285ad7 | [
"BSD-3-Clause"
] | 36 | 2018-05-08T23:59:54.000Z | 2018-09-28T13:50:30.000Z | priv/repo/migrations/017_create_unlock.exs | freshcom/freshcom-api | 4f2083277943cf4e4e8fd4c4d443c7309f285ad7 | [
"BSD-3-Clause"
] | 9 | 2018-05-09T14:09:19.000Z | 2021-03-21T21:04:04.000Z | defmodule BlueJet.Repo.Migrations.CreateUnlock do
use Ecto.Migration
def change do
create table(:unlocks, primary_key: false) do
add :id, :binary_id, primary_key: true
add :account_id, references(:accounts, type: :binary_id, on_delete: :delete_all), null: false
add :unlockable_id, references(:unlockables, type: :binary_id, on_delete: :delete_all), null: false
add :customer_id, references(:customers, type: :binary_id, on_delete: :delete_all), null: false
add :source_id, :binary_id
add :source_type, :string
add :sort_index, :integer, null: false
add :custom_data, :map, null: false, default: "{}"
add :translations, :map, null: false, default: "{}"
timestamps()
end
create unique_index(:unlocks, [:customer_id, :unlockable_id])
create index(:unlocks, [:account_id, :customer_id])
create index(:unlocks, [:account_id, :unlockable_id])
end
end
| 33.535714 | 105 | 0.686901 |
1c4bac9ab88e361de8f3df370c276d39115a030e | 47 | exs | Elixir | test/game/manager_test.exs | drewfravert/2048 | cbe6b75635f7d2713277c3c60fb5147e256e73e3 | [
"MIT"
] | null | null | null | test/game/manager_test.exs | drewfravert/2048 | cbe6b75635f7d2713277c3c60fb5147e256e73e3 | [
"MIT"
] | 1 | 2021-10-18T21:44:37.000Z | 2021-10-18T21:44:37.000Z | test/game/manager_test.exs | drewfravert/2048 | cbe6b75635f7d2713277c3c60fb5147e256e73e3 | [
"MIT"
] | null | null | null | defmodule ManagerTest do
use ExUnit.Case
end
| 11.75 | 24 | 0.808511 |
1c4bb6fdda7aa486296fc69443ab3885e528799b | 1,549 | ex | Elixir | lib/codes/codes_d53.ex | badubizzle/icd_code | 4c625733f92b7b1d616e272abc3009bb8b916c0c | [
"Apache-2.0"
] | null | null | null | lib/codes/codes_d53.ex | badubizzle/icd_code | 4c625733f92b7b1d616e272abc3009bb8b916c0c | [
"Apache-2.0"
] | null | null | null | lib/codes/codes_d53.ex | badubizzle/icd_code | 4c625733f92b7b1d616e272abc3009bb8b916c0c | [
"Apache-2.0"
] | null | null | null | defmodule IcdCode.ICDCode.Codes_D53 do
alias IcdCode.ICDCode
def _D530 do
%ICDCode{full_code: "D530",
category_code: "D53",
short_code: "0",
full_name: "Protein deficiency anemia",
short_name: "Protein deficiency anemia",
category_name: "Protein deficiency anemia"
}
end
def _D531 do
%ICDCode{full_code: "D531",
category_code: "D53",
short_code: "1",
full_name: "Other megaloblastic anemias, not elsewhere classified",
short_name: "Other megaloblastic anemias, not elsewhere classified",
category_name: "Other megaloblastic anemias, not elsewhere classified"
}
end
def _D532 do
%ICDCode{full_code: "D532",
category_code: "D53",
short_code: "2",
full_name: "Scorbutic anemia",
short_name: "Scorbutic anemia",
category_name: "Scorbutic anemia"
}
end
def _D538 do
%ICDCode{full_code: "D538",
category_code: "D53",
short_code: "8",
full_name: "Other specified nutritional anemias",
short_name: "Other specified nutritional anemias",
category_name: "Other specified nutritional anemias"
}
end
def _D539 do
%ICDCode{full_code: "D539",
category_code: "D53",
short_code: "9",
full_name: "Nutritional anemia, unspecified",
short_name: "Nutritional anemia, unspecified",
category_name: "Nutritional anemia, unspecified"
}
end
end
| 29.788462 | 80 | 0.613944 |
1c4bbcee555138706dfc7ef3695ce11a9293e982 | 757 | exs | Elixir | apps/snitch_core/test/tools/user_config_test.exs | Acrecio/avia | 54d264fc179b5b5f17d174854bdca063e1d935e9 | [
"MIT"
] | 456 | 2018-09-20T02:40:59.000Z | 2022-03-07T08:53:48.000Z | apps/snitch_core/test/tools/user_config_test.exs | Acrecio/avia | 54d264fc179b5b5f17d174854bdca063e1d935e9 | [
"MIT"
] | 273 | 2018-09-19T06:43:43.000Z | 2021-08-07T12:58:26.000Z | apps/snitch_core/test/tools/user_config_test.exs | Acrecio/avia | 54d264fc179b5b5f17d174854bdca063e1d935e9 | [
"MIT"
] | 122 | 2018-09-26T16:32:46.000Z | 2022-03-13T11:44:19.000Z | defmodule Snitch.Tools.UserConfigTest do
use ExUnit.Case, async: false
alias Snitch.Tools.UserConfig
describe "configured properly" do
setup do
Application.put_env(:snitch_core, :foo, [:bar, :baz])
on_exit(fn ->
Application.delete_env(:snitch_core, :foo)
end)
end
test "fetch all" do
assert {:ok, list} = UserConfig.fetch(:foo)
assert length(list) == 2
end
test "get all" do
list = UserConfig.get(:foo)
assert length(list) == 2
end
end
describe "not configured properly," do
test "fetch none return error" do
assert :error = UserConfig.fetch(:foo)
end
test "get none" do
list = UserConfig.get(:foo)
assert is_nil(list)
end
end
end
| 21.027778 | 59 | 0.626156 |
1c4bcb76b417a1cd60aa2c6cd2ea6b19141ea595 | 1,825 | exs | Elixir | projects/standup/priv/repo/migrations/19991231235959_seed.exs | erik/sketches | 0a454ada58dee6db576e93cb2216dd750290329e | [
"MIT"
] | 1 | 2020-02-11T06:00:11.000Z | 2020-02-11T06:00:11.000Z | projects/standup/priv/repo/migrations/19991231235959_seed.exs | erik/sketches | 0a454ada58dee6db576e93cb2216dd750290329e | [
"MIT"
] | 1 | 2017-09-23T19:41:29.000Z | 2017-09-25T05:12:38.000Z | projects/standup/priv/repo/migrations/19991231235959_seed.exs | erik/sketches | 0a454ada58dee6db576e93cb2216dd750290329e | [
"MIT"
] | null | null | null | defmodule Standup.Repo.Migrations.Seed do
use Ecto.Migration
def change do
# Users
create table(:users) do
add :email, :string
add :email_verified_at, :utc_datetime
timestamps()
end
create unique_index(:users, [:email])
# Authors
create table(:authors) do
add :name, :text
add :image_url, :text
add :bio, :text
add :location, :text
add :user_id, references(:users, on_delete: :delete_all),
null: false
timestamps()
end
create unique_index(:authors, [:user_id])
# Journals
create table(:journals) do
add :title, :text
add :description, :text
add :tags, {:array, :text}
add :public, :boolean, default: true, null: false
add :started_at, :utc_datetime
add :completed_at, :utc_datetime
add :author_id, references(:authors, on_delete: :nothing),
null: false
timestamps()
end
create index(:journals, [:author_id])
# Entries
create table(:entries) do
add :title, :text
add :body, :text
add :author_id, references(:authors, on_delete: :nothing),
null: false
add :journal_id, references(:journals, on_delete: :nothing),
null: false
timestamps()
end
create index(:entries, [:author_id])
create index(:entries, [:journal_id])
# Replies
create table(:replies) do
add :body, :text
add :user_id, references(:users, on_delete: :nothing),
null: false
add :entry_id, references(:entries, on_delete: :nothing),
null: false
add :parent_id, references(:replies, on_delete: :nothing)
timestamps()
end
create index(:replies, [:user_id])
create index(:replies, [:entry_id])
create index(:replies, [:parent_id])
end
end
| 21.987952 | 66 | 0.613151 |
1c4bcdb2444ec47910f7a0d1dadb5582a13a96e4 | 2,307 | ex | Elixir | clients/you_tube/lib/google_api/you_tube/v3/model/ingestion_info.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | null | null | null | clients/you_tube/lib/google_api/you_tube/v3/model/ingestion_info.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | null | null | null | clients/you_tube/lib/google_api/you_tube/v3/model/ingestion_info.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | 1 | 2020-11-10T16:58:27.000Z | 2020-11-10T16:58:27.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.YouTube.V3.Model.IngestionInfo do
@moduledoc """
Describes information necessary for ingesting an RTMP or an HTTP stream.
## Attributes
- backupIngestionAddress (String.t): The backup ingestion URL that you should use to stream video to YouTube. You have the option of simultaneously streaming the content that you are sending to the ingestionAddress to this URL. Defaults to: `null`.
- ingestionAddress (String.t): The primary ingestion URL that you should use to stream video to YouTube. You must stream video to this URL. Depending on which application or tool you use to encode your video stream, you may need to enter the stream URL and stream name separately or you may need to concatenate them in the following format: STREAM_URL/STREAM_NAME Defaults to: `null`.
- streamName (String.t): The HTTP or RTMP stream name that YouTube assigns to the video stream. Defaults to: `null`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:backupIngestionAddress => any(),
:ingestionAddress => any(),
:streamName => any()
}
field(:backupIngestionAddress)
field(:ingestionAddress)
field(:streamName)
end
defimpl Poison.Decoder, for: GoogleApi.YouTube.V3.Model.IngestionInfo do
def decode(value, options) do
GoogleApi.YouTube.V3.Model.IngestionInfo.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.YouTube.V3.Model.IngestionInfo do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 42.722222 | 388 | 0.753359 |
1c4bd42f4a11135ebbaffdb1f79a63cd383ca08d | 2,343 | ex | Elixir | lib/schism.ex | keathley/schism | 23fe89d0e503ba7a31a17effb5b290007d71a3e1 | [
"MIT"
] | 54 | 2019-02-17T15:24:42.000Z | 2021-09-29T13:21:56.000Z | lib/schism.ex | keathley/schism | 23fe89d0e503ba7a31a17effb5b290007d71a3e1 | [
"MIT"
] | 10 | 2020-01-23T16:20:11.000Z | 2021-09-22T10:13:22.000Z | lib/schism.ex | elixir-toniq/schism | a04e59332a1d62f8d3ff719a16f9ec13629b9917 | [
"MIT"
] | 2 | 2019-05-19T05:26:20.000Z | 2020-01-29T15:33:03.000Z | defmodule Schism do
@moduledoc """
Schism allows you to create network partitions in erlang nodes without
needing to leave elixir.
Let's say that we have 5 nodes and we want to test what happens when they
disconnect from each other. We can use Schism like so:
```elixir
test "netsplits" do
[n1, n2, n3, n4, n5] = nodes
# Partition our nodes
Schism.partition([n1, n3])
Schism.partition([n4])
Schism.partition([n2, n5])
# Test some stuff...
# Heal our partitions
Schism.heal([n1, n3])
Schism.heal([n2, n4, n5])
end
```
This api is useful for testing and development in conjunction with tools like
[local cluster](https://github.com/whitfin/local-cluster) and
[propcheck](https://github.com/alfert/propcheck).
"""
@doc """
Creates a partition amongst a set of nodes. Any nodes in the partition
will be able to see each other but no other nodes in the network. The
partitioned nodes will still be able to see the node that induced the
partition. Otherwise we would not be able to heal the partition.
"""
@spec partition([Node.t], String.t) :: [Node.t] | none()
def partition(nodes, id \\ random_string()) when is_binary(id) do
manager = Node.self()
for node <- nodes do
# Force the node to disconnect from all nodes that aren't us
all_except_us = :rpc.call(node, Node, :list, []) -- [manager]
Enum.each(all_except_us, fn n -> :rpc.call(node, Node, :disconnect, [n]) end)
# Set the remote nodes cookie to a different value
true = :rpc.call(node, :erlang, :set_cookie, [node, String.to_atom(id)])
# Ensure we can still talk to the node
:pong = Node.ping(node)
end
# Reconnect the nodes in partition now that the cookie is the same
connect_nodes(nodes)
nodes
end
@doc """
Re-connects the nodes to the cluster.
"""
@spec heal([Node.t]) :: [Node.t] | none()
def heal(nodes) do
# Restore the cookie
partition(nodes, Atom.to_string(:erlang.get_cookie()))
end
defp connect_nodes([node | other_nodes]) do
Enum.each(other_nodes, fn n -> :rpc.call(node, Node, :connect, [n]) end)
connect_nodes(other_nodes)
end
defp connect_nodes([]), do: :ok
defp random_string do
:crypto.strong_rand_bytes(10)
|> Base.url_encode64
|> binary_part(0, 10)
end
end
| 28.925926 | 83 | 0.665813 |
1c4bd9ed92f9f29868de4900a3ff9e3d9aec1202 | 732 | exs | Elixir | mix.exs | bus-detective/bus_detective_ng | ef54684d4f640384bd20a4d5550ff51ab440190b | [
"MIT"
] | 8 | 2018-07-06T14:44:10.000Z | 2021-08-19T17:24:25.000Z | mix.exs | bus-detective/bus_detective_ng | ef54684d4f640384bd20a4d5550ff51ab440190b | [
"MIT"
] | 12 | 2018-07-15T18:43:04.000Z | 2022-02-10T16:07:47.000Z | mix.exs | bus-detective/bus_detective_ng | ef54684d4f640384bd20a4d5550ff51ab440190b | [
"MIT"
] | 1 | 2018-07-13T17:30:20.000Z | 2018-07-13T17:30:20.000Z | defmodule BusDetective.Umbrella.Mixfile do
use Mix.Project
def project do
[
aliases: aliases(Mix.env()),
apps_path: "apps",
deps: deps(),
preferred_cli_env: [
coveralls: :test,
"coveralls.detail": :test,
"coveralls.travis": :test,
"coveralls.html": :test
],
start_permanent: Mix.env() == :prod,
test_coverage: [tool: ExCoveralls]
]
end
defp aliases(:dev), do: []
defp aliases(_) do
[compile: "compile --warnings-as-errors"]
end
# Dependencies listed here are available only for this project
# and cannot be accessed from applications inside the apps folder
defp deps do
[{:excoveralls, "~> 0.8", only: :test}]
end
end
| 22.875 | 67 | 0.61612 |
1c4bdccccd87e2544f968f4976617f08a6fba770 | 35,562 | exs | Elixir | test/oli/delivery/submission_test.exs | argos-education/oli-torus | cff73a277f80c8071217a074d9d8b650a9c068e5 | [
"MIT"
] | 1 | 2022-03-17T20:35:47.000Z | 2022-03-17T20:35:47.000Z | test/oli/delivery/submission_test.exs | argos-education/oli-torus | cff73a277f80c8071217a074d9d8b650a9c068e5 | [
"MIT"
] | 1 | 2022-03-25T13:46:08.000Z | 2022-03-25T16:06:44.000Z | test/oli/delivery/submission_test.exs | marc-hughes/oli-torus-1 | aa3c9bb2d91b678a365be839761eaf86c60ee35c | [
"MIT"
] | null | null | null | defmodule Oli.Delivery.AttemptsSubmissionTest do
use Oli.DataCase
alias Oli.Delivery.Attempts.Core, as: Attempts
alias Oli.Activities.Model.Part
alias Oli.Delivery.Attempts.PageLifecycle
alias Oli.Delivery.Attempts.ActivityLifecycle
alias Oli.Delivery.Attempts.ActivityLifecycle.Evaluate
alias Oli.Delivery.Attempts.Core.{ResourceAccess, ActivityAttempt, PartAttempt, StudentInput}
alias Oli.Delivery.Snapshots.Snapshot
alias Oli.Delivery.Page.PageContext
alias Oli.Delivery.Student.Summary
describe "concurrent activity accesses with two students" do
setup do
content = %{
"stem" => "1",
"authoring" => %{
"parts" => [
%{
"id" => "1",
"responses" => [
%{
"rule" => "input like {a}",
"score" => 10,
"id" => "r1",
"feedback" => %{"id" => "1", "content" => "yes"}
},
%{
"rule" => "input like {b}",
"score" => 1,
"id" => "r2",
"feedback" => %{"id" => "2", "content" => "almost"}
},
%{
"rule" => "input like {c}",
"score" => 0,
"id" => "r3",
"feedback" => %{"id" => "3", "content" => "no"}
}
],
"scoringStrategy" => "best",
"evaluationStrategy" => "regex"
}
]
}
}
map =
Seeder.base_project_with_resource2()
|> Seeder.create_section()
|> Seeder.add_objective("objective one", :o1)
|> Seeder.add_activity(%{title: "one", max_attempts: 5, content: content}, :activity1)
|> Seeder.add_activity(%{title: "two", max_attempts: 5, content: content}, :activity2)
|> Seeder.add_user(%{}, :user1)
|> Seeder.add_user(%{}, :user2)
Seeder.ensure_published(map.publication.id)
Seeder.add_page(
map,
%{
title: "graded page",
content: %{
"model" => [
%{
"type" => "activity-reference",
"activity_id" => Map.get(map, :activity1).revision.resource_id
},
%{
"type" => "activity-reference",
"activity_id" => Map.get(map, :activity2).revision.resource_id
}
]
},
objectives: %{"attached" => [Map.get(map, :o1).resource.id]},
graded: true
},
:container,
:graded_page
)
|> Seeder.create_section_resources()
end
test "graded page : determine_resource_attempt_state works with 2 users after user1 has started a page and user2 has not",
%{
graded_page: %{resource: resource, revision: revision},
user1: user1,
user2: user2,
section: section
} do
# View index
{:ok, _summary} = Summary.get_summary(section.slug, user1)
# Open the graded page as user 1 to get the prologue
user1_page_context = PageContext.create_for_visit(section, revision.slug, user1)
assert user1_page_context.progress_state == :not_started
assert Enum.empty?(user1_page_context.resource_attempts)
# Start the attempt and go into the assessment
activity_provider = &Oli.Delivery.ActivityProvider.provide/3
{:ok,
%Oli.Delivery.Attempts.PageLifecycle.AttemptState{
resource_attempt: user1_resource_attempt,
attempt_hierarchy: user1_activity_attempts
}} = PageLifecycle.start(revision.slug, section.slug, user1.id, activity_provider)
# Save an activity part on the page but do not submit it
{:ok, {:ok, 1}} =
ActivityLifecycle.save_student_input([
%{
# attempt_guid: user1_part_attempt.attempt_guid,
attempt_guid:
user1_activity_attempts
|> Map.values()
|> hd
|> elem(1)
|> Map.values()
|> hd
|> Map.get(:attempt_guid),
response: %{input: "a"}
}
])
# Make sure the latest resource attempt is still correct
user1_latest_resource_attempt =
Attempts.get_latest_resource_attempt(resource.id, section.slug, user1.id)
assert user1_latest_resource_attempt.id == user1_resource_attempt.id
# Make sure the progress state is correct for the latest resource attempt
assert PageContext.create_for_visit(section, revision.slug, user1).progress_state ==
:in_progress
# Now we have an "in progress" resource attempt for student 1 with a saved student input,
# so the resource is partially completed.
# User 2
{:ok, _summary2} = Summary.get_summary(section.slug, user2)
# Access the graded page with user2
assert is_nil(Attempts.get_latest_resource_attempt(resource.id, section.slug, user2.id))
user2_page_context = PageContext.create_for_visit(section, revision.slug, user2)
assert user2_page_context.progress_state == :not_started
assert Enum.count(user2_page_context.resource_attempts) == 0
{:ok,
%Oli.Delivery.Attempts.PageLifecycle.AttemptState{
resource_attempt: user2_resource_attempt,
attempt_hierarchy: user2_activity_attempts
}} = PageLifecycle.start(revision.slug, section.slug, user2.id, activity_provider)
# Save attempts for both activities
ActivityLifecycle.save_student_input([
%{
attempt_guid:
user2_activity_attempts
|> Map.values()
|> hd
|> elem(1)
|> Map.values()
|> hd
|> Map.get(:attempt_guid),
response: %{input: "a"}
}
])
ActivityLifecycle.save_student_input([
%{
attempt_guid:
user2_activity_attempts
|> Map.values()
|> tl
|> hd
|> elem(1)
|> Map.values()
|> hd
|> Map.get(:attempt_guid),
response: %{input: "a"}
}
])
# Make sure user 2 can submit the page
{:ok, access} = PageLifecycle.finalize(section.slug, user2_resource_attempt.attempt_guid)
access = Repo.preload(access, [:resource_attempts])
assert !is_nil(hd(access.resource_attempts).date_evaluated)
end
end
describe "resetting an activity" do
setup do
content = %{
"stem" => "1",
"authoring" => %{
"parts" => [
%{
"id" => "1",
"responses" => [
%{
"rule" => "input like {a}",
"score" => 10,
"id" => "r1",
"feedback" => %{"id" => "1", "content" => "yes"}
},
%{
"rule" => "input like {b}",
"score" => 1,
"id" => "r2",
"feedback" => %{"id" => "2", "content" => "almost"}
},
%{
"rule" => "input like {c}",
"score" => 0,
"id" => "r3",
"feedback" => %{"id" => "3", "content" => "no"}
}
],
"scoringStrategy" => "best",
"evaluationStrategy" => "regex"
}
]
}
}
map =
Seeder.base_project_with_resource2()
|> Seeder.create_section()
|> Seeder.add_objective("objective one", :o1)
|> Seeder.add_activity(%{title: "one", max_attempts: 2, content: content}, :activity)
|> Seeder.add_user(%{}, :user1)
|> Seeder.add_user(%{}, :user2)
Seeder.ensure_published(map.publication.id)
Seeder.add_page(
map,
%{
title: "page1",
content: %{
"model" => [
%{
"type" => "activity-reference",
"activity_id" => Map.get(map, :activity).revision.resource_id
}
]
},
objectives: %{"attached" => [Map.get(map, :o1).resource.id]}
},
:ungraded_page
)
|> Seeder.add_page(
%{
title: "page2",
content: %{
"model" => [
%{
"type" => "activity-reference",
"activity_id" => Map.get(map, :activity).revision.resource_id
}
]
},
objectives: %{"attached" => [Map.get(map, :o1).resource.id]},
graded: true
},
:graded_page
)
|> Seeder.create_section_resources()
# Ungraded page ("page1" / :page1) attempts
|> Seeder.create_resource_attempt(
%{attempt_number: 1},
:user1,
:ungraded_page,
:ungraded_page_user1_attempt1
)
|> Seeder.create_activity_attempt(
%{attempt_number: 1, transformed_model: content},
:activity,
:ungraded_page_user1_attempt1,
:ungraded_page_user1_activity_attempt1
)
|> Seeder.create_part_attempt(
%{attempt_number: 1},
%Part{id: "1", responses: [], hints: []},
:ungraded_page_user1_activity_attempt1,
:ungraded_page_user1_activity_attempt1_part1_attempt1
)
# Graded page ("page2" / :graded_page) attempts
|> Seeder.create_resource_attempt(
%{attempt_number: 1},
:user1,
:graded_page,
:graded_page_user1_attempt1
)
|> Seeder.create_activity_attempt(
%{attempt_number: 1, transformed_model: content},
:activity,
:graded_page_user1_attempt1,
:user1_activity_attempt1
)
|> Seeder.create_part_attempt(
%{attempt_number: 1},
%Part{id: "1", responses: [], hints: []},
:user1_activity_attempt1,
:user1_part1_attempt1
)
|> Seeder.create_resource_attempt(
%{attempt_number: 1},
:user2,
:graded_page,
:graded_page_user2_attempt1
)
|> Seeder.create_activity_attempt(
%{attempt_number: 1, transformed_model: content},
:activity,
:graded_page_user2_attempt1,
:user2_activity_attempt1
)
|> Seeder.create_part_attempt(
%{attempt_number: 1},
%Part{id: "1", responses: [], hints: []},
:user2_activity_attempt1,
:user2_part1_attempt1
)
end
test "ungraded page: get_latest_resource_attempt gives an unevaluated attempt with 1 user", %{
ungraded_page: %{resource: resource},
user1: user1,
section: section,
ungraded_page_user1_attempt1: resource_attempt1
} do
resource_attempt = Attempts.get_latest_resource_attempt(resource.id, section.slug, user1.id)
assert resource_attempt.id == resource_attempt1.id
assert is_nil(resource_attempt.date_evaluated)
assert is_nil(resource_attempt.score)
end
test "Graded page: get_latest_resource_attempt gives the correct resource attempts with 2 users",
%{
graded_page: %{resource: resource},
user1: user1,
user2: user2,
section: section,
graded_page_user1_attempt1: resource_attempt1,
graded_page_user2_attempt1: resource_attempt2
} do
resource_attempt_user1 =
Attempts.get_latest_resource_attempt(resource.id, section.slug, user1.id)
resource_attempt_user2 =
Attempts.get_latest_resource_attempt(resource.id, section.slug, user2.id)
assert resource_attempt1.id == resource_attempt_user1.id
assert resource_attempt2.id == resource_attempt_user2.id
assert resource_attempt1.id != resource_attempt2.id
end
test "determine_resource_attempt_state works for graded pages with 1 user", %{
graded_page: %{revision: revision},
user1: user1,
section: section,
user1_part1_attempt1: part_attempt,
user1_activity_attempt1: activity_attempt,
graded_page_user1_attempt1: resource_attempt1
} do
activity_provider = &Oli.Delivery.ActivityProvider.provide/3
# User1 has a started resource attempt, so it should be "in progress"
{:ok, {:in_progress, _resource_attempt}} =
PageLifecycle.visit(
revision,
section.slug,
user1.id,
activity_provider
)
part_inputs = [%{attempt_guid: part_attempt.attempt_guid, input: %StudentInput{input: "a"}}]
# Evaluate the parts to allow the graded page to be submitted
{:ok, _evals} =
Evaluate.evaluate_from_input(
section.slug,
activity_attempt.attempt_guid,
part_inputs
)
# Submit the page to toggle it from "in progress" to completed
PageLifecycle.finalize(section.slug, resource_attempt1.attempt_guid)
# determine_resource_attempt_state should no longer retrieve the previously in progress attempt
{:ok, {:not_started, _resource_attempt}} =
PageLifecycle.visit(
revision,
section.slug,
user1.id,
activity_provider
)
end
test "parts can only be submitted once", %{
graded_page: %{revision: revision},
user1: user1,
section: section,
user1_part1_attempt1: part_attempt,
user1_activity_attempt1: activity_attempt
} do
activity_provider = &Oli.Delivery.ActivityProvider.provide/3
{:ok, {:in_progress, _resource_attempt}} =
PageLifecycle.visit(
revision,
section.slug,
user1.id,
activity_provider
)
part_inputs = [%{attempt_guid: part_attempt.attempt_guid, input: %StudentInput{input: "a"}}]
{:ok, _evals} =
Evaluate.evaluate_from_input(
section.slug,
activity_attempt.attempt_guid,
part_inputs
)
{:error, "nothing to process"} =
Evaluate.evaluate_from_input(
section.slug,
activity_attempt.attempt_guid,
part_inputs
)
end
test "determine_resource_attempt_state works for graded pages with 2 users when user1 submits a page and user2 submits submits it afterwards",
%{
graded_page: %{revision: revision},
user1: user1,
section: section,
user1_part1_attempt1: user1_part1_attempt1,
user2_part1_attempt1: user2_part1_attempt1,
user1_activity_attempt1: user1_activity_attempt,
user2_activity_attempt1: user2_activity_attempt,
graded_page_user1_attempt1: user1_resource_attempt1,
graded_page_user2_attempt1: user2_resource_attempt1,
user2: user2
} do
activity_provider = &Oli.Delivery.ActivityProvider.provide/3
# User 1
{:ok, {:in_progress, resource_attempt_user1}} =
PageLifecycle.visit(
revision,
section.slug,
user1.id,
activity_provider
)
part_inputs = [
%{attempt_guid: user1_part1_attempt1.attempt_guid, input: %StudentInput{input: "a"}}
]
{:ok, _evals} =
Evaluate.evaluate_from_input(
section.slug,
user1_activity_attempt.attempt_guid,
part_inputs
)
PageLifecycle.finalize(section.slug, user1_resource_attempt1.attempt_guid)
{:ok, {:not_started, _resource_attempt}} =
PageLifecycle.visit(
revision,
section.slug,
user1.id,
activity_provider
)
# User 2
{:ok, {:in_progress, resource_attempt_user2}} =
PageLifecycle.visit(
revision,
section.slug,
user2.id,
activity_provider
)
# Make sure we're looking at a different resource attempt for the second user
assert resource_attempt_user1 != resource_attempt_user2
part_inputs = [
%{attempt_guid: user2_part1_attempt1.attempt_guid, input: %StudentInput{input: "a"}}
]
{:ok, _evals} =
Evaluate.evaluate_from_input(
section.slug,
user2_activity_attempt.attempt_guid,
part_inputs
)
PageLifecycle.finalize(section.slug, user2_resource_attempt1.attempt_guid)
{:ok, {:not_started, _resource_attempt}} =
PageLifecycle.visit(
revision,
section.slug,
user2.id,
activity_provider
)
end
# this is the function that saves an input in a graded assessment before the page is submitted
test "can save student inputs and receive a count", %{user1_part1_attempt1: part_attempt} do
part_inputs = [
%{
attempt_guid: part_attempt.attempt_guid,
response: %{input: "a"}
}
]
# The part can be saved once
assert {:ok, {:ok, 1}} = ActivityLifecycle.save_student_input(part_inputs)
# The part can be saved again
assert {:ok, {:ok, 1}} = ActivityLifecycle.save_student_input(part_inputs)
end
test "processing a submission", %{
activity: %{revision: activity_revision},
ungraded_page: %{revision: page_revision},
user1: user,
ungraded_page_user1_activity_attempt1_part1_attempt1: part_attempt,
section: section,
ungraded_page_user1_activity_attempt1: activity_attempt
} do
part_inputs = [%{attempt_guid: part_attempt.attempt_guid, input: %StudentInput{input: "a"}}]
{:ok, [%{attempt_guid: attempt_guid, out_of: out_of, score: score, feedback: %{id: id}}]} =
Evaluate.evaluate_from_input(section.slug, activity_attempt.attempt_guid, part_inputs)
# verify the returned feedback was what we expected
assert attempt_guid == part_attempt.attempt_guid
assert score == 10
assert out_of == 10
assert id == "1"
# verify the part attempt record was updated correctly
updated_attempt = Oli.Repo.get!(PartAttempt, part_attempt.id)
assert updated_attempt.score == 10
assert updated_attempt.out_of == 10
refute updated_attempt.date_evaluated == nil
# verify that the submission rolled up to the activity attempt
updated_attempt = Oli.Repo.get!(ActivityAttempt, activity_attempt.id)
assert updated_attempt.score == 1.0
assert updated_attempt.out_of == 1.0
refute updated_attempt.date_evaluated == nil
# now reset the activity
{:ok, {attempt_state, _}} =
ActivityLifecycle.reset_activity(section.slug, activity_attempt.attempt_guid)
assert attempt_state.dateEvaluated == nil
assert attempt_state.score == nil
assert attempt_state.outOf == nil
assert length(attempt_state.parts) == 1
assert hd(attempt_state.parts).response == nil
assert attempt_state.hasMoreAttempts == false
# now try to reset when there are no more attempts
assert {:error, {:no_more_attempts}} ==
ActivityLifecycle.reset_activity(section.slug, attempt_state.attemptGuid)
# verify that a snapshot record was created properly
[%Snapshot{} = snapshot] = Oli.Repo.all(Snapshot)
assert snapshot.score == 10
assert snapshot.out_of == 10
assert snapshot.graded == false
assert snapshot.part_attempt_id == part_attempt.id
assert snapshot.part_attempt_number == 1
assert snapshot.attempt_number == 1
assert snapshot.resource_attempt_number == 1
assert snapshot.section_id == section.id
assert snapshot.user_id == user.id
assert snapshot.activity_id == updated_attempt.resource_id
assert snapshot.resource_id == page_revision.resource_id
assert snapshot.revision_id == activity_revision.id
end
test "processing a submission where there are invalid scores", %{
graded_page_user1_attempt1: resource_attempt1,
user1_part1_attempt1: part_attempt,
section: section,
user1_activity_attempt1: activity_attempt
} do
part_inputs = [%{attempt_guid: part_attempt.attempt_guid, input: %StudentInput{input: "a"}}]
{:ok, _} =
Evaluate.evaluate_from_input(section.slug, activity_attempt.attempt_guid, part_inputs)
# verify the part attempt record was updated correctly
updated_attempt = Oli.Repo.get!(PartAttempt, part_attempt.id)
assert updated_attempt.score == 10
assert updated_attempt.out_of == 10
refute updated_attempt.date_evaluated == nil
# verify that the submission rolled up to the activity attempt
updated_attempt = Oli.Repo.get!(ActivityAttempt, activity_attempt.id)
assert updated_attempt.score == 1.0
assert updated_attempt.out_of == 1.0
refute updated_attempt.date_evaluated == nil
# Now simulate something having gone wrong, perhaps a rogue activity using
# client-side eval, or some other future bug in the system leading to an invalid
# score for the activity attempt
Attempts.update_activity_attempt(updated_attempt, %{score: 2.0, out_of: 1.0})
PageLifecycle.finalize(section.slug, resource_attempt1.attempt_guid)
# Verify a valid grade was recorded despite the invalid grade at the activity attempt
ra = Oli.Repo.get(ResourceAccess, resource_attempt1.resource_access_id)
assert ra.score == 1.0
assert ra.out_of == 1.0
end
test "handling reset attempts that request preservation of last attempt state", %{
ungraded_page_user1_activity_attempt1_part1_attempt1: part_attempt,
section: section,
ungraded_page_user1_activity_attempt1: activity_attempt
} do
# submit activity with a known state for the part attempt
part_inputs = [%{attempt_guid: part_attempt.attempt_guid, input: %StudentInput{input: "a"}}]
{:ok, _} =
Evaluate.evaluate_from_input(section.slug, activity_attempt.attempt_guid, part_inputs)
# now reset the activity, requesting seed_state_from_previous to be true
{:ok, {%{parts: [part_attempt]}, _}} =
ActivityLifecycle.reset_activity(section.slug, activity_attempt.attempt_guid, true)
assert part_attempt.response == %{"input" => "a", "files" => nil}
end
# This test case ensures that the following scenario works correctly:
#
# 1. Student opens a resource with an activity that has a maximum of TWO attempts in window tab A.
# 2. Student submits a response (exhausting attempt 1).
# 3. Student opens a new window tab B with the same resource. This generates a new activity attempt (attempt 2)
# 4. Student submits a response for tab B. (exhausting attempt 2)
# 5. Student clicks "Reset" in tab A. This should be rejected.
test "handling concurrent reset attempts", %{
ungraded_page_user1_activity_attempt1_part1_attempt1: part_attempt,
section: section,
ungraded_page_user1_activity_attempt1: activity_attempt
} do
# Submit in tab A:
part_inputs = [%{attempt_guid: part_attempt.attempt_guid, input: %StudentInput{input: "a"}}]
{:ok, _} =
Evaluate.evaluate_from_input(section.slug, activity_attempt.attempt_guid, part_inputs)
# now reset the activity, this is a simulation of the student
# opening the resource in tab B.
{:ok, {attempt_state, _}} =
ActivityLifecycle.reset_activity(section.slug, activity_attempt.attempt_guid)
assert attempt_state.hasMoreAttempts == false
# now try to reset the guid from the first attempt, simulating the
# student clicking 'Reset' in tab A.
assert {:error, {:no_more_attempts}} ==
ActivityLifecycle.reset_activity(section.slug, activity_attempt.attempt_guid)
end
end
describe "processing a one part submission" do
setup do
content = %{
"stem" => "1",
"authoring" => %{
"parts" => [
%{
"id" => "1",
"responses" => [
%{
"rule" => "input like {a}",
"score" => 10,
"id" => "r1",
"feedback" => %{"id" => "1", "content" => "yes"}
},
%{
"rule" => "input like {b}",
"score" => 1,
"id" => "r2",
"feedback" => %{"id" => "2", "content" => "almost"}
},
%{
"rule" => "input like {c}",
"score" => 0,
"id" => "r3",
"feedback" => %{"id" => "3", "content" => "no"}
}
],
"scoringStrategy" => "best",
"evaluationStrategy" => "regex"
}
]
}
}
map =
Seeder.base_project_with_resource2()
|> Seeder.create_section()
|> Seeder.add_objective("objective one", :o1)
|> Seeder.add_activity(%{title: "one", content: content}, :activity)
|> Seeder.add_user(%{}, :user1)
Seeder.ensure_published(map.publication.id)
attrs = %{
title: "page1",
content: %{
"model" => [
%{
"type" => "activity-reference",
"activity_id" => Map.get(map, :activity).revision.resource_id
}
]
},
objectives: %{"attached" => [Map.get(map, :o1).resource.id]}
}
Seeder.add_page(map, attrs, :ungraded_page)
|> Seeder.create_section_resources()
|> Seeder.create_resource_attempt(
%{attempt_number: 1},
:user1,
:ungraded_page,
:ungraded_page_user1_attempt1
)
|> Seeder.create_activity_attempt(
%{attempt_number: 1, transformed_model: content},
:activity,
:ungraded_page_user1_attempt1,
:ungraded_page_user1_activity_attempt1
)
|> Seeder.create_part_attempt(
%{attempt_number: 1},
%Part{id: "1", responses: [], hints: []},
:ungraded_page_user1_activity_attempt1,
:ungraded_page_user1_activity_attempt1_part1_attempt1
)
end
test "processing a submission", %{
ungraded_page_user1_activity_attempt1_part1_attempt1: part_attempt,
section: section,
ungraded_page_user1_activity_attempt1: activity_attempt
} do
part_inputs = [%{attempt_guid: part_attempt.attempt_guid, input: %StudentInput{input: "a"}}]
{:ok, [%{attempt_guid: attempt_guid, out_of: out_of, score: score, feedback: %{id: id}}]} =
Evaluate.evaluate_from_input(section.slug, activity_attempt.attempt_guid, part_inputs)
# verify the returned feedback was what we expected
assert attempt_guid == part_attempt.attempt_guid
assert score == 10
assert out_of == 10
assert id == "1"
# verify the part attempt record was updated correctly
updated_attempt = Oli.Repo.get!(PartAttempt, part_attempt.id)
assert updated_attempt.score == 10
assert updated_attempt.out_of == 10
refute updated_attempt.date_evaluated == nil
# verify that the submission rolled up to the activity attempt
updated_attempt = Oli.Repo.get!(ActivityAttempt, activity_attempt.id)
assert updated_attempt.score == 1.0
assert updated_attempt.out_of == 1.0
refute updated_attempt.date_evaluated == nil
end
test "processing a different submission", %{
ungraded_page_user1_activity_attempt1_part1_attempt1: part_attempt,
section: section,
ungraded_page_user1_activity_attempt1: activity_attempt
} do
part_inputs = [%{attempt_guid: part_attempt.attempt_guid, input: %StudentInput{input: "b"}}]
{:ok, [%{attempt_guid: attempt_guid, out_of: out_of, score: score, feedback: %{id: id}}]} =
Evaluate.evaluate_from_input(section.slug, activity_attempt.attempt_guid, part_inputs)
assert attempt_guid == part_attempt.attempt_guid
assert score == 1
assert out_of == 10
assert id == "2"
end
test "processing a submission whose input matches no response", %{
section: section,
ungraded_page_user1_activity_attempt1_part1_attempt1: part_attempt,
ungraded_page_user1_activity_attempt1: activity_attempt
} do
part_inputs = [%{attempt_guid: part_attempt.attempt_guid, input: %StudentInput{input: "d"}}]
# No matching response should mark the answer as incorrect with out_of
# being the highest score of any response considered.
{:ok, [%{feedback: %{id: _id}, score: 0, out_of: 10}]} =
Evaluate.evaluate_from_input(section.slug, activity_attempt.attempt_guid, part_inputs)
end
end
describe "processing a multipart submission" do
setup do
content = %{
"stem" => "1",
"authoring" => %{
"parts" => [
%{
"id" => "1",
"responses" => [
%{
"rule" => "input like {a}",
"score" => 10,
"id" => "r1",
"feedback" => %{"id" => "1", "content" => "yes"}
},
%{
"rule" => "input like {b}",
"score" => 1,
"id" => "r2",
"feedback" => %{"id" => "2", "content" => "almost"}
},
%{
"rule" => "input like {c}",
"score" => 0,
"id" => "r3",
"feedback" => %{"id" => "3", "content" => "no"}
}
],
"scoringStrategy" => "best",
"evaluationStrategy" => "regex"
},
%{
"id" => "2",
"responses" => [
%{
"rule" => "input like {a}",
"score" => 2,
"id" => "r1",
"feedback" => %{"id" => "4", "content" => "yes"}
},
%{
"rule" => "input like {b}",
"score" => 1,
"id" => "r2",
"feedback" => %{"id" => "5", "content" => "almost"}
},
%{
"rule" => "input like {c}",
"score" => 0,
"id" => "r3",
"feedback" => %{"id" => "6", "content" => "no"}
}
],
"scoringStrategy" => "best",
"evaluationStrategy" => "regex"
}
]
}
}
map =
Seeder.base_project_with_resource2()
|> Seeder.create_section()
|> Seeder.add_objective("objective one", :o1)
|> Seeder.add_activity(
%{title: "one", content: content},
:publication,
:project,
:author,
:activity
)
|> Seeder.add_user(%{}, :user1)
Seeder.ensure_published(map.publication.id)
attrs = %{
title: "page1",
content: %{
"model" => [
%{
"type" => "activity-reference",
"activity_id" => Map.get(map, :activity).revision.resource_id
}
]
},
objectives: %{"attached" => [Map.get(map, :o1).resource.id]}
}
Seeder.add_page(map, attrs, :ungraded_page)
|> Seeder.create_section_resources()
|> Seeder.create_resource_attempt(
%{attempt_number: 1},
:user1,
:ungraded_page,
:ungraded_page_user1_attempt1
)
|> Seeder.create_activity_attempt(
%{attempt_number: 1, transformed_model: content},
:activity,
:ungraded_page_user1_attempt1,
:ungraded_page_user1_activity_attempt1
)
|> Seeder.create_part_attempt(
%{attempt_number: 1},
%Part{id: "1", responses: [], hints: []},
:ungraded_page_user1_activity_attempt1,
:ungraded_page_user1_activity_attempt1_part1_attempt1
)
|> Seeder.create_part_attempt(
%{attempt_number: 1},
%Part{id: "2", responses: [], hints: []},
:ungraded_page_user1_activity_attempt1,
:ungraded_page_user1_activity_attempt1_part2_attempt1
)
end
test "processing a submission with just one of the parts submitted", %{
section: section,
ungraded_page_user1_activity_attempt1_part1_attempt1: part_attempt,
ungraded_page_user1_activity_attempt1: activity_attempt
} do
part_inputs = [%{attempt_guid: part_attempt.attempt_guid, input: %StudentInput{input: "a"}}]
{:ok, [%{attempt_guid: attempt_guid, out_of: out_of, score: score, feedback: %{id: id}}]} =
Evaluate.evaluate_from_input(section.slug, activity_attempt.attempt_guid, part_inputs)
# verify the returned feedback was what we expected
assert attempt_guid == part_attempt.attempt_guid
assert score == 10
assert out_of == 10
assert id == "1"
# verify the part attempt record was updated correctly
updated_attempt = Oli.Repo.get!(PartAttempt, part_attempt.id)
assert updated_attempt.score == 10
assert updated_attempt.out_of == 10
refute updated_attempt.date_evaluated == nil
# verify that the submission did NOT roll up to the activity attempt
updated_attempt = Oli.Repo.get!(ActivityAttempt, activity_attempt.id)
assert updated_attempt.score == nil
assert updated_attempt.out_of == nil
assert updated_attempt.date_evaluated == nil
end
test "processing a submission with all parts submitted", %{
section: section,
ungraded_page_user1_activity_attempt1_part1_attempt1: part_attempt,
ungraded_page_user1_activity_attempt1_part2_attempt1: part2_attempt,
ungraded_page_user1_activity_attempt1: activity_attempt
} do
part_inputs = [
%{attempt_guid: part_attempt.attempt_guid, input: %StudentInput{input: "a"}},
%{attempt_guid: part2_attempt.attempt_guid, input: %StudentInput{input: "b"}}
]
{:ok,
[
%{attempt_guid: attempt_guid, out_of: out_of, score: score, feedback: %{id: id}},
%{attempt_guid: attempt_guid2, out_of: out_of2, score: score2, feedback: %{id: id2}}
]} = Evaluate.evaluate_from_input(section.slug, activity_attempt.attempt_guid, part_inputs)
# verify the returned feedback was what we expected
assert attempt_guid == part_attempt.attempt_guid
assert score == 10
assert out_of == 10
assert id == "1"
assert attempt_guid2 == part2_attempt.attempt_guid
assert score2 == 1
assert out_of2 == 2
assert id2 == "5"
# verify the part attempt record was updated correctly
updated_attempt = Oli.Repo.get!(PartAttempt, part_attempt.id)
assert updated_attempt.score == 10
assert updated_attempt.out_of == 10
refute updated_attempt.date_evaluated == nil
updated_attempt = Oli.Repo.get!(PartAttempt, part2_attempt.id)
assert updated_attempt.score == 1
assert updated_attempt.out_of == 2
refute updated_attempt.date_evaluated == nil
# verify that the submission did roll up to the activity attempt
# with the fact that the scoring strategy defaults to best
updated_attempt = Oli.Repo.get!(ActivityAttempt, activity_attempt.id)
assert updated_attempt.score == 1.0
assert updated_attempt.out_of == 1.0
refute updated_attempt.date_evaluated == nil
end
end
end
| 34.933202 | 146 | 0.591755 |
1c4bf004c0b8619f58f36bd63681e5658eae9747 | 2,553 | ex | Elixir | lib/commanded/aggregates/aggregate_state_builder.ex | datafoo/commanded | b497d8cfde386c272902809511f8373d03652684 | [
"MIT"
] | null | null | null | lib/commanded/aggregates/aggregate_state_builder.ex | datafoo/commanded | b497d8cfde386c272902809511f8373d03652684 | [
"MIT"
] | null | null | null | lib/commanded/aggregates/aggregate_state_builder.ex | datafoo/commanded | b497d8cfde386c272902809511f8373d03652684 | [
"MIT"
] | null | null | null | defmodule Commanded.Aggregates.AggregateStateBuilder do
alias Commanded.Aggregates.Aggregate
alias Commanded.EventStore
alias Commanded.EventStore.RecordedEvent
alias Commanded.EventStore.SnapshotData
alias Commanded.Snapshotting
@read_event_batch_size 100
@doc """
Populate the aggregate's state from a snapshot, if present, and it's events.
Attempt to fetch a snapshot for the aggregate to use as its initial state.
If the snapshot exists, fetch any subsequent events to rebuild its state.
Otherwise start with the aggregate struct and stream all existing events for
the aggregate from the event store to rebuild its state from those events.
"""
def populate(%Aggregate{} = state) do
%Aggregate{aggregate_module: aggregate_module, snapshotting: snapshotting} = state
aggregate =
case Snapshotting.read_snapshot(snapshotting) do
{:ok, %SnapshotData{source_version: source_version, data: data}} ->
%Aggregate{
state
| aggregate_version: source_version,
aggregate_state: data
}
{:error, _error} ->
# No snapshot present, or exists but for outdated state, so use initial empty state
%Aggregate{state | aggregate_version: 0, aggregate_state: struct(aggregate_module)}
end
rebuild_from_events(aggregate)
end
@doc """
Load events from the event store, in batches, to rebuild the aggregate state
"""
def rebuild_from_events(%Aggregate{} = state) do
%Aggregate{
application: application,
aggregate_uuid: aggregate_uuid,
aggregate_version: aggregate_version
} = state
case EventStore.stream_forward(
application,
aggregate_uuid,
aggregate_version + 1,
@read_event_batch_size
) do
{:error, :stream_not_found} ->
# aggregate does not exist, return initial state
state
event_stream ->
rebuild_from_event_stream(event_stream, state)
end
end
# Rebuild aggregate state from a `Stream` of its events.
defp rebuild_from_event_stream(event_stream, %Aggregate{} = state) do
Enum.reduce(event_stream, state, fn event, state ->
%RecordedEvent{data: data, stream_version: stream_version} = event
%Aggregate{aggregate_module: aggregate_module, aggregate_state: aggregate_state} = state
%Aggregate{
state
| aggregate_version: stream_version,
aggregate_state: aggregate_module.apply(aggregate_state, data)
}
end)
end
end
| 33.155844 | 94 | 0.697611 |
1c4c5827eff1a094aeb7a98e9085ddd8c4c5eeac | 71 | exs | Elixir | test/ergo/utils_test.exs | mmower/ergo | f0cfa8debd6697c56509e7856578dc49666f3ff2 | [
"MIT"
] | 5 | 2021-07-11T13:01:56.000Z | 2021-12-29T17:02:00.000Z | test/ergo/utils_test.exs | mmower/ergo | f0cfa8debd6697c56509e7856578dc49666f3ff2 | [
"MIT"
] | null | null | null | test/ergo/utils_test.exs | mmower/ergo | f0cfa8debd6697c56509e7856578dc49666f3ff2 | [
"MIT"
] | null | null | null | defmodule Ergo.UtilsTest do
use ExUnit.Case
doctest Ergo.Utils
end
| 14.2 | 27 | 0.788732 |
1c4c7e10b9fcf328b889dd1c8ea042ec4f8e5457 | 448 | exs | Elixir | machine_translation/MorpHIN/Learned/Resources/Set5/TrainingInstances/92.exs | AdityaPrasadMishra/NLP--Project-Group-16 | fb62cc6a1db4a494058171f11c14a2be3933a9a1 | [
"MIT"
] | null | null | null | machine_translation/MorpHIN/Learned/Resources/Set5/TrainingInstances/92.exs | AdityaPrasadMishra/NLP--Project-Group-16 | fb62cc6a1db4a494058171f11c14a2be3933a9a1 | [
"MIT"
] | null | null | null | machine_translation/MorpHIN/Learned/Resources/Set5/TrainingInstances/92.exs | AdityaPrasadMishra/NLP--Project-Group-16 | fb62cc6a1db4a494058171f11c14a2be3933a9a1 | [
"MIT"
] | null | null | null | **EXAMPLE FILE**
verb_aux SYM noun cm quantifier;
conj demonstrative noun cm quantifier;
verb conj cardinal noun quantifier;
pnoun cm cardinal noun quantifier;
pnoun cm cardinal adjective quantifier;
noun cm quantifier adjective quantifier;
verb_aux pn adjective noun quantifier;
SYM pn cm demonstrative pn;
SYM pn noun cm quantifier;
conj noun noun cm pn;
SYM cm noun cm quantifier;
verb conj noun noun quantifier;
pn cm cm noun pn;
| 28 | 41 | 0.78125 |
1c4c9ca1444eea0e4e31688d847d38b39fe1e208 | 1,065 | ex | Elixir | lib/phoenix13_base/application.ex | pgrunwald/phoenix13_base | 9c9ae51f9fda5dc86ffd33b860d1524f8d585e19 | [
"MIT"
] | 1 | 2019-03-28T05:47:46.000Z | 2019-03-28T05:47:46.000Z | lib/phoenix13_base/application.ex | pgrunwald/phoenix13_base | 9c9ae51f9fda5dc86ffd33b860d1524f8d585e19 | [
"MIT"
] | null | null | null | lib/phoenix13_base/application.ex | pgrunwald/phoenix13_base | 9c9ae51f9fda5dc86ffd33b860d1524f8d585e19 | [
"MIT"
] | null | null | null | defmodule Phoenix13Base.Application do
use Application
# See https://hexdocs.pm/elixir/Application.html
# for more information on OTP Applications
def start(_type, _args) do
import Supervisor.Spec
# Define workers and child supervisors to be supervised
children = [
# Start the Ecto repository
supervisor(Phoenix13Base.Repo, []),
# Start the endpoint when the application starts
supervisor(Phoenix13BaseWeb.Endpoint, []),
# Start your own worker by calling: Phoenix13Base.Worker.start_link(arg1, arg2, arg3)
# worker(Phoenix13Base.Worker, [arg1, arg2, arg3]),
]
# See https://hexdocs.pm/elixir/Supervisor.html
# for other strategies and supported options
opts = [strategy: :one_for_one, name: Phoenix13Base.Supervisor]
Supervisor.start_link(children, opts)
end
# Tell Phoenix to update the endpoint configuration
# whenever the application is updated.
def config_change(changed, _new, removed) do
Phoenix13BaseWeb.Endpoint.config_change(changed, removed)
:ok
end
end
| 33.28125 | 91 | 0.724883 |
1c4ca4b1f631e24c5951d7dd53ae76a6cabb48dc | 3,509 | ex | Elixir | apps/aehttpclient/lib/client.ex | SingularityMatrix/elixir-node | ad126aa97931165185cf35454718ed2eee40ceed | [
"ISC"
] | null | null | null | apps/aehttpclient/lib/client.ex | SingularityMatrix/elixir-node | ad126aa97931165185cf35454718ed2eee40ceed | [
"ISC"
] | 2 | 2018-10-01T16:46:26.000Z | 2018-10-01T19:45:42.000Z | apps/aehttpclient/lib/client.ex | gspasov/dogs-blockchain | 884c14cfc98de2c3793a204da069630d090bbc90 | [
"0BSD"
] | null | null | null | defmodule Aehttpclient.Client do
@moduledoc """
Client used for making requests to a node.
"""
alias Aecore.Chain.Block
alias Aecore.Chain.Header
alias Aecore.Tx.SignedTx
alias Aecore.Tx.DataTx
alias Aecore.Keys
require Logger
@typedoc "Client request identifier"
@type req_kind :: :default | :pool_txs | :acc_txs | :info | :block | :raw_blocks
@spec get_info(term()) :: {:ok, map()} | :error
def get_info(uri) do
get(uri <> "/info", :info)
end
@spec get_peer_info(term()) :: {:ok, map()} | :error
def get_peer_info(uri) do
case get(uri <> "/peer_info") do
{:ok, %{"port" => port, "pubkey" => pubkey}} ->
decoded_pubkey = Keys.peer_decode(pubkey)
host = uri |> String.split(":") |> Enum.at(0) |> to_charlist()
peer_info = %{host: host, port: port, pubkey: decoded_pubkey}
{:ok, peer_info}
{:error, _reason} = error ->
error
end
end
@spec get_block({term(), binary()}) :: {:ok, Block.t()} | {:error, binary()}
def get_block({uri, hash}) do
hash = Header.base58c_encode(hash)
case get(uri <> "/block-by-hash?hash=#{hash}", :block) do
{:ok, serialized_block} ->
{:ok, Block.decode_from_map(serialized_block)}
{:error, reason} ->
{:error, reason}
end
end
@spec get_peers(term()) :: {:ok, list()}
def get_peers(uri) do
get(uri <> "/peers")
end
@spec get_account_txs({term(), term()}) :: {:ok, list()} | :error
def get_account_txs({uri, acc}) do
get(uri <> "/tx_pool/#{acc}", :acc_txs)
end
@spec handle_response(req_kind(), map() | list(), list(map())) :: {:ok, map()}
defp handle_response(:block, body, _headers) do
response = Poison.decode!(body)
{:ok, response}
end
defp handle_response(:raw_blocks, body, _headers) do
response = Poison.decode!(body)
deserialized_blocks =
Enum.map(response, fn block ->
Block.decode_from_map(block)
end)
{:ok, deserialized_blocks}
end
defp handle_response(:info, body, headers) do
response = Poison.decode!(body, keys: :atoms!)
{_, server} =
Enum.find(headers, fn header ->
header == {"server", "aehttpserver"}
end)
response_with_server_header = Map.put(response, :server, server)
{:ok, response_with_server_header}
end
defp handle_response(:acc_txs, body, _headers) do
response = Poison.decode!(body, as: [%SignedTx{data: %DataTx{}}], keys: :atoms!)
{:ok, response}
end
defp handle_response(:pool_txs, body, _headers) do
response =
body
|> Poison.decode!()
|> Enum.map(fn tx -> SignedTx.deserialize(tx) end)
{:ok, response}
end
defp handle_response(:default, body, _headers) do
response = Poison.decode!(body)
{:ok, response}
end
@spec get(binary(), req_kind) :: {:ok, map()} | {:error, binary()}
defp get(uri, identifier \\ :default) do
case HTTPoison.get(uri) do
{:ok, %{body: body, headers: headers, status_code: 200}} ->
handle_response(identifier, body, headers)
{:ok, %HTTPoison.Response{status_code: 404}} ->
{:error, "Response 404"}
{:ok, %HTTPoison.Response{status_code: 400}} ->
{:error, "Response 400"}
{:error, %HTTPoison.Error{} = error} ->
{:error, "HTTPPoison Error #{inspect(error)}"}
unexpected ->
Logger.error(fn ->
"unexpected client result " <> inspect(unexpected)
end)
{:error, "Unexpected error"}
end
end
end
| 26.78626 | 84 | 0.606156 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.