hexsha stringlengths 40 40 | size int64 2 991k | ext stringclasses 2 values | lang stringclasses 1 value | max_stars_repo_path stringlengths 4 208 | max_stars_repo_name stringlengths 6 106 | max_stars_repo_head_hexsha stringlengths 40 40 | max_stars_repo_licenses list | max_stars_count int64 1 33.5k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 4 208 | max_issues_repo_name stringlengths 6 106 | max_issues_repo_head_hexsha stringlengths 40 40 | max_issues_repo_licenses list | max_issues_count int64 1 16.3k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 4 208 | max_forks_repo_name stringlengths 6 106 | max_forks_repo_head_hexsha stringlengths 40 40 | max_forks_repo_licenses list | max_forks_count int64 1 6.91k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 2 991k | avg_line_length float64 1 36k | max_line_length int64 1 977k | alphanum_fraction float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
fffa7b8aeb53b92cbadb5e2b7704bd3cb177bfde | 57,965 | ex | Elixir | deps/db_connection/lib/db_connection.ex | matin360/TaksoWebApp | 4dd8fef625ecc2364fe1d6e18e73c96c59d15349 | [
"MIT"
] | 1 | 2019-11-11T21:48:20.000Z | 2019-11-11T21:48:20.000Z | deps/db_connection/lib/db_connection.ex | rwtrecs/rocketseat-nlw5-inmana | 8ce8bc32e0bdd005c423394bb163945747b557e2 | [
"MIT"
] | 3 | 2021-06-20T14:51:14.000Z | 2021-06-25T00:56:11.000Z | deps/db_connection/lib/db_connection.ex | rwtrecs/rocketseat-nlw5-inmana | 8ce8bc32e0bdd005c423394bb163945747b557e2 | [
"MIT"
] | null | null | null | defmodule DBConnection.Stream do
defstruct [:conn, :query, :params, :opts]
@type t :: %__MODULE__{conn: DBConnection.conn(), query: any, params: any, opts: Keyword.t()}
end
defimpl Enumerable, for: DBConnection.Stream do
def count(_), do: {:error, __MODULE__}
def member?(_, _), do: {:error, __MODULE__}
def slice(_), do: {:error, __MODULE__}
def reduce(stream, acc, fun), do: DBConnection.reduce(stream, acc, fun)
end
defmodule DBConnection.PrepareStream do
defstruct [:conn, :query, :params, :opts]
@type t :: %__MODULE__{conn: DBConnection.conn(), query: any, params: any, opts: Keyword.t()}
end
defimpl Enumerable, for: DBConnection.PrepareStream do
def count(_), do: {:error, __MODULE__}
def member?(_, _), do: {:error, __MODULE__}
def slice(_), do: {:error, __MODULE__}
def reduce(stream, acc, fun), do: DBConnection.reduce(stream, acc, fun)
end
defmodule DBConnection do
@moduledoc """
A behaviour module for implementing efficient database connection
client processes, pools and transactions.
`DBConnection` handles callbacks differently to most behaviours. Some
callbacks will be called in the calling process, with the state
copied to and from the calling process. This is useful when the data
for a request is large and means that a calling process can interact
with a socket directly.
A side effect of this is that query handling can be written in a
simple blocking fashion, while the connection process itself will
remain responsive to OTP messages and can enqueue and cancel queued
requests.
If a request or series of requests takes too long to handle in the
client process a timeout will trigger and the socket can be cleanly
disconnected by the connection process.
If a calling process waits too long to start its request it will
timeout and its request will be cancelled. This prevents requests
building up when the database can not keep up.
If no requests are received for an idle interval, the pool will
ping all stale connections which can then ping the database to keep
the connection alive.
Should the connection be lost, attempts will be made to reconnect with
(configurable) exponential random backoff to reconnect. All state is
lost when a connection disconnects but the process is reused.
The `DBConnection.Query` protocol provide utility functions so that
queries can be encoded and decoded without blocking the connection or pool.
"""
require Logger
alias DBConnection.Holder
defstruct [:pool_ref, :conn_ref, :conn_mode]
defmodule EncodeError do
defexception [:message]
end
defmodule TransactionError do
defexception [:status, :message]
def exception(:idle),
do: %__MODULE__{status: :idle, message: "transaction is not started"}
def exception(:transaction),
do: %__MODULE__{status: :transaction, message: "transaction is already started"}
def exception(:error),
do: %__MODULE__{status: :error, message: "transaction is aborted"}
end
@typedoc """
Run or transaction connection reference.
"""
@type t :: %__MODULE__{pool_ref: any, conn_ref: reference}
@type conn :: GenServer.server() | t
@type query :: DBConnection.Query.t()
@type params :: any
@type result :: any
@type cursor :: any
@type status :: :idle | :transaction | :error
@type start_option ::
{:after_connect, (t -> any) | {module, atom, [any]} | nil}
| {:after_connect_timeout, timeout}
| {:connection_listeners, list(Process.dest()) | nil}
| {:backoff_max, non_neg_integer}
| {:backoff_min, non_neg_integer}
| {:backoff_type, :stop | :exp | :rand | :rand_exp}
| {:configure, (keyword -> keyword) | {module, atom, [any]} | nil}
| {:idle_interval, non_neg_integer}
| {:max_restarts, non_neg_integer}
| {:max_seconds, pos_integer}
| {:name, GenServer.name()}
| {:pool, module}
| {:pool_size, pos_integer}
| {:queue_interval, non_neg_integer}
| {:queue_target, non_neg_integer}
| {:show_sensitive_data_on_connection_error, boolean}
@type option ::
{:log, (DBConnection.LogEntry.t() -> any) | {module, atom, [any]} | nil}
| {:queue, boolean}
| {:timeout, timeout}
| {:deadline, integer | nil}
@doc """
Connect to the database. Return `{:ok, state}` on success or
`{:error, exception}` on failure.
If an error is returned it will be logged and another
connection attempt will be made after a backoff interval.
This callback is called in the connection process.
"""
@callback connect(opts :: Keyword.t()) ::
{:ok, state :: any} | {:error, Exception.t()}
@doc """
Checkouts the state from the connection process. Return `{:ok, state}`
to allow the checkout or `{:disconnect, exception, state}` to disconnect.
This callback is called when the control of the state is passed to
another process. `c:checkin/1` is called with the new state when control
is returned to the connection process.
This callback is called in the connection process.
"""
@callback checkout(state :: any) ::
{:ok, new_state :: any} | {:disconnect, Exception.t(), new_state :: any}
@doc """
Checks in the state to the connection process. Return `{:ok, state}`
to allow the checkin or `{:disconnect, exception, state}` to disconnect.
This callback is called when the control of the state is passed back
to the connection process. It should reverse any changes to the
connection state made in `c:checkout/1`.
This callback is called in the connection process.
"""
@callback checkin(state :: any) ::
{:ok, new_state :: any} | {:disconnect, Exception.t(), new_state :: any}
@doc """
Called when the connection has been idle for a period of time. Return
`{:ok, state}` to continue or `{:disconnect, exception, state}` to
disconnect.
This callback is called if no callbacks have been called after the
idle timeout and a client process is not using the state. The idle
timeout can be configured by the `:idle_interval` option. This function
can be called whether the connection is checked in or checked out.
This callback is called in the connection process.
"""
@callback ping(state :: any) ::
{:ok, new_state :: any} | {:disconnect, Exception.t(), new_state :: any}
@doc """
Handle the beginning of a transaction.
Return `{:ok, result, state}` to continue, `{status, state}` to notify caller
that the transaction can not begin due to the transaction status `status`,
`{:error, exception, state}` (deprecated) to error without beginning the
transaction, or `{:disconnect, exception, state}` to error and disconnect.
A callback implementation should only return `status` if it
can determine the database's transaction status without side effect.
This callback is called in the client process.
"""
@callback handle_begin(opts :: Keyword.t(), state :: any) ::
{:ok, result, new_state :: any}
| {status, new_state :: any}
| {:disconnect, Exception.t(), new_state :: any}
@doc """
Handle committing a transaction. Return `{:ok, result, state}` on successfully
committing transaction, `{status, state}` to notify caller that the
transaction can not commit due to the transaction status `status`,
`{:error, exception, state}` (deprecated) to error and no longer be inside
transaction, or `{:disconnect, exception, state}` to error and disconnect.
A callback implementation should only return `status` if it
can determine the database's transaction status without side effect.
This callback is called in the client process.
"""
@callback handle_commit(opts :: Keyword.t(), state :: any) ::
{:ok, result, new_state :: any}
| {status, new_state :: any}
| {:disconnect, Exception.t(), new_state :: any}
@doc """
Handle rolling back a transaction. Return `{:ok, result, state}` on successfully
rolling back transaction, `{status, state}` to notify caller that the
transaction can not rollback due to the transaction status `status`,
`{:error, exception, state}` (deprecated) to
error and no longer be inside transaction, or
`{:disconnect, exception, state}` to error and disconnect.
A callback implementation should only return `status` if it
can determine the database' transaction status without side effect.
This callback is called in the client and connection process.
"""
@callback handle_rollback(opts :: Keyword.t(), state :: any) ::
{:ok, result, new_state :: any}
| {status, new_state :: any}
| {:disconnect, Exception.t(), new_state :: any}
@doc """
Handle getting the transaction status. Return `{:idle, state}` if outside a
transaction, `{:transaction, state}` if inside a transaction,
`{:error, state}` if inside an aborted transaction, or
`{:disconnect, exception, state}` to error and disconnect.
If the callback returns a `:disconnect` tuples then `status/2` will return
`:error`.
"""
@callback handle_status(opts :: Keyword.t(), state :: any) ::
{status, new_state :: any}
| {:disconnect, Exception.t(), new_state :: any}
@doc """
Prepare a query with the database. Return `{:ok, query, state}` where
`query` is a query to pass to `execute/4` or `close/3`,
`{:error, exception, state}` to return an error and continue or
`{:disconnect, exception, state}` to return an error and disconnect.
This callback is intended for cases where the state of a connection is
needed to prepare a query and/or the query can be saved in the
database to call later.
This callback is called in the client process.
"""
@callback handle_prepare(query, opts :: Keyword.t(), state :: any) ::
{:ok, query, new_state :: any}
| {:error | :disconnect, Exception.t(), new_state :: any}
@doc """
Execute a query prepared by `c:handle_prepare/3`. Return
`{:ok, query, result, state}` to return altered query `query` and result
`result` and continue, `{:error, exception, state}` to return an error and
continue or `{:disconnect, exception, state}` to return an error and
disconnect.
This callback is called in the client process.
"""
@callback handle_execute(query, params, opts :: Keyword.t(), state :: any) ::
{:ok, query, result, new_state :: any}
| {:error | :disconnect, Exception.t(), new_state :: any}
@doc """
Close a query prepared by `c:handle_prepare/3` with the database. Return
`{:ok, result, state}` on success and to continue,
`{:error, exception, state}` to return an error and continue, or
`{:disconnect, exception, state}` to return an error and disconnect.
This callback is called in the client process.
"""
@callback handle_close(query, opts :: Keyword.t(), state :: any) ::
{:ok, result, new_state :: any}
| {:error | :disconnect, Exception.t(), new_state :: any}
@doc """
Declare a cursor using a query prepared by `c:handle_prepare/3`. Return
`{:ok, query, cursor, state}` to return altered query `query` and cursor
`cursor` for a stream and continue, `{:error, exception, state}` to return an
error and continue or `{:disconnect, exception, state}` to return an error
and disconnect.
This callback is called in the client process.
"""
@callback handle_declare(query, params, opts :: Keyword.t(), state :: any) ::
{:ok, query, cursor, new_state :: any}
| {:error | :disconnect, Exception.t(), new_state :: any}
@doc """
Fetch the next result from a cursor declared by `c:handle_declare/4`. Return
`{:cont, result, state}` to return the result `result` and continue using
cursor, `{:halt, result, state}` to return the result `result` and close the
cursor, `{:error, exception, state}` to return an error and close the
cursor, `{:disconnect, exception, state}` to return an error and disconnect.
This callback is called in the client process.
"""
@callback handle_fetch(query, cursor, opts :: Keyword.t(), state :: any) ::
{:cont | :halt, result, new_state :: any}
| {:error | :disconnect, Exception.t(), new_state :: any}
@doc """
Deallocate a cursor declared by `c:handle_declare/4` with the database. Return
`{:ok, result, state}` on success and to continue,
`{:error, exception, state}` to return an error and continue, or
`{:disconnect, exception, state}` to return an error and disconnect.
This callback is called in the client process.
"""
@callback handle_deallocate(query, cursor, opts :: Keyword.t(), state :: any) ::
{:ok, result, new_state :: any}
| {:error | :disconnect, Exception.t(), new_state :: any}
@doc """
Disconnect from the database. Return `:ok`.
The exception as first argument is the exception from a `:disconnect`
3-tuple returned by a previous callback.
If the state is controlled by a client and it exits or takes too long
to process a request the state will be last known state. In these
cases the exception will be a `DBConnection.ConnectionError`.
This callback is called in the connection process.
"""
@callback disconnect(err :: Exception.t(), state :: any) :: :ok
@doc """
Use `DBConnection` to set the behaviour.
"""
defmacro __using__(_) do
quote location: :keep do
@behaviour DBConnection
end
end
@doc """
Starts and links to a database connection process.
By default the `DBConnection` starts a pool with a single connection.
The size of the pool can be increased with `:pool_size`. A separate
pool can be given with the `:pool` option.
### Options
* `:backoff_min` - The minimum backoff interval (default: `1_000`)
* `:backoff_max` - The maximum backoff interval (default: `30_000`)
* `:backoff_type` - The backoff strategy, `:stop` for no backoff and
to stop, `:exp` for exponential, `:rand` for random and `:rand_exp` for
random exponential (default: `:rand_exp`)
* `:configure` - A function to run before every connect attempt to
dynamically configure the options, either a 1-arity fun,
`{module, function, args}` with options prepended to `args` or `nil` where
only returned options are passed to connect callback (default: `nil`)
* `:after_connect` - A function to run on connect using `run/3`, either
a 1-arity fun, `{module, function, args}` with `t:DBConnection.t/0` prepended
to `args` or `nil` (default: `nil`)
* `:after_connect_timeout` - The maximum time allowed to perform
function specified by `:after_connect` option (default: `15_000`)
* `:connection_listeners` - A list of process destinations to send
notification messages whenever a connection is connected or disconnected.
See "Connection listeners" below
* `:name` - A name to register the started process (see the `:name` option
in `GenServer.start_link/3`)
* `:pool` - Chooses the pool to be started
* `:pool_size` - Chooses the size of the pool
* `:idle_interval` - Controls the frequency we ping the database when the
connection is idle. Defaults to 1000ms.
* `:queue_target` and `:queue_interval` - See "Queue config" below
* `:max_restarts` and `:max_seconds` - Configures the `:max_restarts` and
`:max_seconds` for the connection pool supervisor (see the `Supervisor` docs)
* `:show_sensitive_data_on_connection_error` - By default, `DBConnection`
hides all information during connection errors to avoid leaking credentials
or other sensitive information. You can set this option if you wish to
see complete errors and stacktraces during connection errors
### Example
{:ok, conn} = DBConnection.start_link(mod, [idle_interval: 5_000])
## Queue config
Handling requests is done through a queue. When DBConnection is
started, there are two relevant options to control the queue:
* `:queue_target` in milliseconds, defaults to 50ms
* `:queue_interval` in milliseconds, defaults to 1000ms
Our goal is to wait at most `:queue_target` for a connection.
If all connections checked out during a `:queue_interval` takes
more than `:queue_target`, then we double the `:queue_target`.
If checking out connections take longer than the new target,
then we start dropping messages.
For example, by default our target is 50ms. If all connections
checkouts take longer than 50ms for a whole second, we double
the target to 100ms and we start dropping messages if the
time to checkout goes above the new limit.
This allows us to better plan for overloads as we can refuse
requests before they are sent to the database, which would
otherwise increase the burden on the database, making the
overload worse.
## Connection listeners
The `:connection_listeners` option allows one or more processes to be notified
whenever a connection is connected or disconnected. A listener may be a remote
or local PID, a locally registered name, or a tuple in the form of
`{registered_name, node}` for a registered name at another node.
Each listener process may receive the following messages where `pid`
identifies the connection process:
* `{:connected, pid}`
* `{:disconnected, pid}`
## Telemetry
A `[:db_connection, :connection_error]` event is published whenever a connection checkout
receives a `%DBConnection.ConnectionError{}`.
Measurements:
* `:error` A fixed-value measurement which always measures 1.
Metadata
* `:connection_listeners` The list of connection listeners (as described above) passed to
the connection pool. Can be used to relay this event to the proper connection listeners.
* `:connection_error` The `DBConnection.ConnectionError` struct which triggered the event.
* `:pool` The connection pool in which this event was triggered.
"""
@spec start_link(module, opts :: Keyword.t()) :: GenServer.on_start()
def start_link(conn_mod, opts) do
case child_spec(conn_mod, opts) do
{_, {m, f, args}, _, _, _, _} -> apply(m, f, args)
%{start: {m, f, args}} -> apply(m, f, args)
end
end
@doc """
Creates a supervisor child specification for a pool of connections.
See `start_link/2` for options.
"""
@spec child_spec(module, opts :: Keyword.t()) :: :supervisor.child_spec()
def child_spec(conn_mod, opts) do
pool = Keyword.get(opts, :pool, DBConnection.ConnectionPool)
pool.child_spec({conn_mod, opts})
end
@doc """
Prepare a query with a database connection for later execution.
It returns `{:ok, query}` on success or `{:error, exception}` if there was
an error.
The returned `query` can then be passed to `execute/4` and/or `close/3`
### Options
* `:queue` - Whether to block waiting in an internal queue for the
connection's state (boolean, default: `true`). See "Queue config" in
`start_link/2` docs
* `:timeout` - The maximum time that the caller is allowed to perform
this operation (default: `15_000`)
* `:deadline` - If set, overrides `:timeout` option and specifies absolute
monotonic time in milliseconds by which caller must perform operation.
See `System` module documentation for more information on monotonic time
(default: `nil`)
* `:log` - A function to log information about a call, either
a 1-arity fun, `{module, function, args}` with `t:DBConnection.LogEntry.t/0`
prepended to `args` or `nil`. See `DBConnection.LogEntry` (default: `nil`)
The pool and connection module may support other options. All options
are passed to `c:handle_prepare/3`.
### Example
DBConnection.transaction(pool, fn conn ->
query = %Query{statement: "SELECT * FROM table"}
query = DBConnection.prepare!(conn, query)
try do
DBConnection.execute!(conn, query, [])
after
DBConnection.close(conn, query)
end
end)
"""
@spec prepare(conn, query, opts :: Keyword.t()) ::
{:ok, query} | {:error, Exception.t()}
def prepare(conn, query, opts \\ []) do
meter = meter(opts)
result =
with {:ok, query, meter} <- parse(query, meter, opts) do
run(conn, &run_prepare/4, query, meter, opts)
end
log(result, :prepare, query, nil)
end
@doc """
Prepare a query with a database connection and return the prepared
query. An exception is raised on error.
See `prepare/3`.
"""
@spec prepare!(conn, query, opts :: Keyword.t()) :: query
def prepare!(conn, query, opts \\ []) do
case prepare(conn, query, opts) do
{:ok, result} -> result
{:error, err} -> raise err
end
end
@doc """
Prepare a query and execute it with a database connection and return both the
prepared query and the result, `{:ok, query, result}` on success or
`{:error, exception}` if there was an error.
The returned `query` can be passed to `execute/4` and `close/3`.
### Options
* `:queue` - Whether to block waiting in an internal queue for the
connection's state (boolean, default: `true`). See "Queue config" in
`start_link/2` docs
* `:timeout` - The maximum time that the caller is allowed to perform
this operation (default: `15_000`)
* `:deadline` - If set, overrides `:timeout` option and specifies absolute
monotonic time in milliseconds by which caller must perform operation.
See `System` module documentation for more information on monotonic time
(default: `nil`)
* `:log` - A function to log information about a call, either
a 1-arity fun, `{module, function, args}` with `t:DBConnection.LogEntry.t/0`
prepended to `args` or `nil`. See `DBConnection.LogEntry` (default: `nil`)
### Example
query = %Query{statement: "SELECT id FROM table WHERE id=$1"}
{:ok, query, result} = DBConnection.prepare_execute(conn, query, [1])
{:ok, result2} = DBConnection.execute(conn, query, [2])
:ok = DBConnection.close(conn, query)
"""
@spec prepare_execute(conn, query, params, Keyword.t()) ::
{:ok, query, result}
| {:error, Exception.t()}
def prepare_execute(conn, query, params, opts \\ []) do
result =
with {:ok, query, meter} <- parse(query, meter(opts), opts) do
parsed_prepare_execute(conn, query, params, meter, opts)
end
log(result, :prepare_execute, query, params)
end
defp parsed_prepare_execute(conn, query, params, meter, opts) do
with {:ok, query, result, meter} <-
run(conn, &run_prepare_execute/5, query, params, meter, opts),
{:ok, result, meter} <- decode(query, result, meter, opts) do
{:ok, query, result, meter}
end
end
@doc """
Prepare a query and execute it with a database connection and return both the
prepared query and result. An exception is raised on error.
See `prepare_execute/4`.
"""
@spec prepare_execute!(conn, query, Keyword.t()) :: {query, result}
def prepare_execute!(conn, query, params, opts \\ []) do
case prepare_execute(conn, query, params, opts) do
{:ok, query, result} -> {query, result}
{:error, err} -> raise err
end
end
@doc """
Execute a prepared query with a database connection and return
`{:ok, query, result}` on success or `{:error, exception}` if there was an error.
If the query is not prepared on the connection an attempt may be made to
prepare it and then execute again.
### Options
* `:queue` - Whether to block waiting in an internal queue for the
connection's state (boolean, default: `true`). See "Queue config" in
`start_link/2` docs
* `:timeout` - The maximum time that the caller is allowed to perform
this operation (default: `15_000`)
* `:deadline` - If set, overrides `:timeout` option and specifies absolute
monotonic time in milliseconds by which caller must perform operation.
See `System` module documentation for more information on monotonic time
(default: `nil`)
* `:log` - A function to log information about a call, either
a 1-arity fun, `{module, function, args}` with `t:DBConnection.LogEntry.t/0`
prepended to `args` or `nil`. See `DBConnection.LogEntry` (default: `nil`)
The pool and connection module may support other options. All options
are passed to `handle_execute/4`.
See `prepare/3`.
"""
@spec execute(conn, query, params, opts :: Keyword.t()) ::
{:ok, query, result} | {:error, Exception.t()}
def execute(conn, query, params, opts \\ []) do
result =
case maybe_encode(query, params, meter(opts), opts) do
{:prepare, meter} ->
parsed_prepare_execute(conn, query, params, meter, opts)
{:ok, params, meter} ->
with {:ok, query, result, meter} <-
run(conn, &run_execute/5, query, params, meter, opts),
{:ok, result, meter} <- decode(query, result, meter, opts) do
{:ok, query, result, meter}
end
{_, _, _, _} = error ->
error
end
log(result, :execute, query, params)
end
@doc """
Execute a prepared query with a database connection and return the
result. Raises an exception on error.
See `execute/4`
"""
@spec execute!(conn, query, params, opts :: Keyword.t()) :: result
def execute!(conn, query, params, opts \\ []) do
case execute(conn, query, params, opts) do
{:ok, _query, result} -> result
{:error, err} -> raise err
end
end
@doc """
Close a prepared query on a database connection and return `{:ok, result}` on
success or `{:error, exception}` on error.
This function should be used to free resources held by the connection
process and/or the database server.
## Options
* `:queue` - Whether to block waiting in an internal queue for the
connection's state (boolean, default: `true`). See "Queue config" in
`start_link/2` docs
* `:timeout` - The maximum time that the caller is allowed to perform
this operation (default: `15_000`)
* `:deadline` - If set, overrides `:timeout` option and specifies absolute
monotonic time in milliseconds by which caller must perform operation.
See `System` module documentation for more information on monotonic time
(default: `nil`)
* `:log` - A function to log information about a call, either
a 1-arity fun, `{module, function, args}` with `t:DBConnection.LogEntry.t/0`
prepended to `args` or `nil`. See `DBConnection.LogEntry` (default: `nil`)
The pool and connection module may support other options. All options
are passed to `c:handle_close/3`.
See `prepare/3`.
"""
@spec close(conn, query, opts :: Keyword.t()) ::
{:ok, result} | {:error, Exception.t()}
def close(conn, query, opts \\ []) do
conn
|> run_cleanup(&run_close/4, [query], meter(opts), opts)
|> log(:close, query, nil)
end
@doc """
Close a prepared query on a database connection and return the result. Raises
an exception on error.
See `close/3`.
"""
@spec close!(conn, query, opts :: Keyword.t()) :: result
def close!(conn, query, opts \\ []) do
case close(conn, query, opts) do
{:ok, result} -> result
{:error, err} -> raise err
end
end
@doc """
Acquire a lock on a connection and run a series of requests on it.
The return value of this function is the return value of `fun`.
To use the locked connection call the request with the connection
reference passed as the single argument to the `fun`. If the
connection disconnects all future calls using that connection
reference will fail.
`run/3` and `transaction/3` can be nested multiple times but a
`transaction/3` call inside another `transaction/3` will be treated
the same as `run/3`.
### Options
* `:queue` - Whether to block waiting in an internal queue for the
connection's state (boolean, default: `true`). See "Queue config" in
`start_link/2` docs
* `:timeout` - The maximum time that the caller is allowed to perform
this operation (default: `15_000`)
* `:deadline` - If set, overrides `:timeout` option and specifies absolute
monotonic time in milliseconds by which caller must perform operation.
See `System` module documentation for more information on monotonic time
(default: `nil`)
The pool may support other options.
### Example
{:ok, res} = DBConnection.run(conn, fn conn ->
DBConnection.execute!(conn, query, [])
end)
"""
@spec run(conn, (t -> result), opts :: Keyword.t()) :: result when result: var
def run(conn, fun, opts \\ [])
def run(%DBConnection{} = conn, fun, _) do
fun.(conn)
end
def run(pool, fun, opts) do
case checkout(pool, nil, opts) do
{:ok, conn, _} ->
old_status = status(conn, opts)
try do
result = fun.(conn)
{result, run(conn, &run_status/3, nil, opts)}
catch
kind, error ->
checkin(conn)
:erlang.raise(kind, error, __STACKTRACE__)
else
{result, {:error, _, _}} ->
checkin(conn)
result
{result, {^old_status, _meter}} ->
checkin(conn)
result
{_result, {new_status, _meter}} ->
err =
DBConnection.ConnectionError.exception(
"connection was checked out with status #{inspect(old_status)} " <>
"but it was checked in with status #{inspect(new_status)}"
)
disconnect(conn, err)
raise err
{_result, {kind, reason, stack, _meter}} ->
:erlang.raise(kind, reason, stack)
end
{:error, err, _} ->
raise err
{kind, reason, stack, _} ->
:erlang.raise(kind, reason, stack)
end
end
@doc """
Acquire a lock on a connection and run a series of requests inside a
transaction. The result of the transaction fun is return inside an `:ok`
tuple: `{:ok, result}`.
To use the locked connection call the request with the connection
reference passed as the single argument to the `fun`. If the
connection disconnects all future calls using that connection
reference will fail.
`run/3` and `transaction/3` can be nested multiple times. If a transaction is
rolled back or a nested transaction `fun` raises the transaction is marked as
failed. All calls except `run/3`, `transaction/3`, `rollback/2`, `close/3` and
`close!/3` will raise an exception inside a failed transaction until the outer
transaction call returns. All `transaction/3` calls will return
`{:error, :rollback}` if the transaction failed or connection closed and
`rollback/2` is not called for that `transaction/3`.
### Options
* `:queue` - Whether to block waiting in an internal queue for the
connection's state (boolean, default: `true`). See "Queue config" in
`start_link/2` docs
* `:timeout` - The maximum time that the caller is allowed to perform
this operation (default: `15_000`)
* `:deadline` - If set, overrides `:timeout` option and specifies absolute
monotonic time in milliseconds by which caller must perform operation.
See `System` module documentation for more information on monotonic time
(default: `nil`)
* `:log` - A function to log information about begin, commit and rollback
calls made as part of the transaction, either a 1-arity fun,
`{module, function, args}` with `t:DBConnection.LogEntry.t/0` prepended to
`args` or `nil`. See `DBConnection.LogEntry` (default: `nil`)
The pool and connection module may support other options. All options
are passed to `c:handle_begin/2`, `c:handle_commit/2` and
`c:handle_rollback/2`.
### Example
{:ok, res} = DBConnection.transaction(conn, fn conn ->
DBConnection.execute!(conn, query, [])
end)
"""
@spec transaction(conn, (t -> result), opts :: Keyword.t()) ::
{:ok, result} | {:error, reason :: any}
when result: var
def transaction(conn, fun, opts \\ [])
def transaction(%DBConnection{conn_mode: :transaction} = conn, fun, _opts) do
%DBConnection{conn_ref: conn_ref} = conn
try do
result = fun.(conn)
conclude(conn, result)
catch
:throw, {__MODULE__, ^conn_ref, reason} ->
fail(conn)
{:error, reason}
kind, reason ->
stack = __STACKTRACE__
fail(conn)
:erlang.raise(kind, reason, stack)
else
result ->
{:ok, result}
end
end
def transaction(%DBConnection{} = conn, fun, opts) do
case begin(conn, &run/4, opts) do
{:ok, _} ->
run_transaction(conn, fun, &run/4, opts)
{:error, %DBConnection.TransactionError{}} ->
{:error, :rollback}
{:error, err} ->
raise err
end
end
def transaction(pool, fun, opts) do
case begin(pool, &checkout/4, opts) do
{:ok, conn, _} ->
run_transaction(conn, fun, &checkin/4, opts)
{:error, %DBConnection.TransactionError{}} ->
{:error, :rollback}
{:error, err} ->
raise err
end
end
@doc """
Rollback a database transaction and release lock on connection.
When inside of a `transaction/3` call does a non-local return, using a
`throw/1` to cause the transaction to enter a failed state and the
`transaction/3` call returns `{:error, reason}`. If `transaction/3` calls are
nested the connection is marked as failed until the outermost transaction call
does the database rollback.
### Example
{:error, :oops} = DBConnection.transaction(pool, fun(conn) ->
DBConnection.rollback(conn, :oops)
end)
"""
@spec rollback(t, reason :: any) :: no_return
def rollback(conn, reason)
def rollback(%DBConnection{conn_mode: :transaction} = conn, reason) do
%DBConnection{conn_ref: conn_ref} = conn
throw({__MODULE__, conn_ref, reason})
end
def rollback(%DBConnection{} = _conn, _reason) do
raise "not inside transaction"
end
@doc """
Return the transaction status of a connection.
The callback implementation should return the transaction status according to
the database, and not make assumptions based on client-side state.
This function will raise a `DBConnection.ConnectionError` when called inside a
deprecated `transaction/3`.
### Options
See module documentation. The pool and connection module may support other
options. All options are passed to `c:handle_status/2`.
### Example
# outside of the transaction, the status is `:idle`
DBConnection.status(conn) #=> :idle
DBConnection.transaction(conn, fn conn ->
DBConnection.status(conn) #=> :transaction
# run a query that will cause the transaction to rollback, e.g.
# uniqueness constraint violation
DBConnection.execute(conn, bad_query, [])
DBConnection.status(conn) #=> :error
end)
DBConnection.status(conn) #=> :idle
"""
@spec status(conn, opts :: Keyword.t()) :: status
def status(conn, opts \\ []) do
case run(conn, &run_status/3, nil, opts) do
{status, _meter} ->
status
{:error, _err, _meter} ->
:error
{kind, reason, stack, _meter} ->
:erlang.raise(kind, reason, stack)
end
end
@doc """
Create a stream that will prepare a query, execute it and stream results
using a cursor.
### Options
* `:queue` - Whether to block waiting in an internal queue for the
connection's state (boolean, default: `true`). See "Queue config" in
`start_link/2` docs
* `:timeout` - The maximum time that the caller is allowed to perform
this operation (default: `15_000`)
* `:deadline` - If set, overrides `:timeout` option and specifies absolute
monotonic time in milliseconds by which caller must perform operation.
See `System` module documentation for more information on monotonic time
(default: `nil`)
* `:log` - A function to log information about a call, either
a 1-arity fun, `{module, function, args}` with `t:DBConnection.LogEntry.t/0`
prepended to `args` or `nil`. See `DBConnection.LogEntry` (default: `nil`)
The pool and connection module may support other options. All options
are passed to `c:handle_prepare/3`, `c:handle_close/3`, `c:handle_declare/4`,
and `c:handle_deallocate/4`.
### Example
{:ok, results} = DBConnection.transaction(conn, fn conn ->
query = %Query{statement: "SELECT id FROM table"}
stream = DBConnection.prepare_stream(conn, query, [])
Enum.to_list(stream)
end)
"""
@spec prepare_stream(t, query, params, opts :: Keyword.t()) ::
DBConnection.PrepareStream.t()
def prepare_stream(%DBConnection{} = conn, query, params, opts \\ []) do
%DBConnection.PrepareStream{conn: conn, query: query, params: params, opts: opts}
end
@doc """
Create a stream that will execute a prepared query and stream results using a
cursor.
### Options
* `:queue` - Whether to block waiting in an internal queue for the
connection's state (boolean, default: `true`). See "Queue config" in
`start_link/2` docs
* `:timeout` - The maximum time that the caller is allowed to perform
this operation (default: `15_000`)
* `:deadline` - If set, overrides `:timeout` option and specifies absolute
monotonic time in milliseconds by which caller must perform operation.
See `System` module documentation for more information on monotonic time
(default: `nil`)
* `:log` - A function to log information about a call, either
a 1-arity fun, `{module, function, args}` with `t:DBConnection.LogEntry.t/0`
prepended to `args` or `nil`. See `DBConnection.LogEntry` (default: `nil`)
The pool and connection module may support other options. All options
are passed to `c:handle_declare/4` and `c:handle_deallocate/4`.
### Example
DBConnection.transaction(pool, fn conn ->
query = %Query{statement: "SELECT id FROM table"}
query = DBConnection.prepare!(conn, query)
try do
stream = DBConnection.stream(conn, query, [])
Enum.to_list(stream)
after
# Make sure query is closed!
DBConnection.close(conn, query)
end
end)
"""
@spec stream(t, query, params, opts :: Keyword.t()) :: DBConnection.Stream.t()
def stream(%DBConnection{} = conn, query, params, opts \\ []) do
%DBConnection.Stream{conn: conn, query: query, params: params, opts: opts}
end
@doc """
Reduces a previously built stream or prepared stream.
"""
def reduce(%DBConnection.PrepareStream{} = stream, acc, fun) do
%DBConnection.PrepareStream{conn: conn, query: query, params: params, opts: opts} = stream
declare = fn conn, opts ->
{query, cursor} = prepare_declare!(conn, query, params, opts)
{:cont, query, cursor}
end
enum = resource(conn, declare, &stream_fetch/3, &stream_deallocate/3, opts)
enum.(acc, fun)
end
def reduce(%DBConnection.Stream{} = stream, acc, fun) do
%DBConnection.Stream{conn: conn, query: query, params: params, opts: opts} = stream
declare = fn conn, opts ->
case declare(conn, query, params, opts) do
{:ok, query, cursor} ->
{:cont, query, cursor}
{:ok, cursor} ->
{:cont, query, cursor}
{:error, err} ->
raise err
end
end
enum = resource(conn, declare, &stream_fetch/3, &stream_deallocate/3, opts)
enum.(acc, fun)
end
## Helpers
defp checkout(pool, meter, opts) do
checkout = System.monotonic_time()
pool_mod = Keyword.get(opts, :pool, DBConnection.ConnectionPool)
caller = Keyword.get(opts, :caller, self())
callers = [caller | Process.get(:"$callers") || []]
try do
pool_mod.checkout(pool, callers, opts)
catch
kind, reason ->
stack = __STACKTRACE__
{kind, reason, stack, past_event(meter, :checkout, checkout)}
else
{:ok, pool_ref, _conn_mod, checkin, _conn_state} ->
conn = %DBConnection{pool_ref: pool_ref, conn_ref: make_ref()}
meter = meter |> past_event(:checkin, checkin) |> past_event(:checkout, checkout)
{:ok, conn, meter}
{:error, err} ->
{:error, err, past_event(meter, :checkout, checkout)}
end
end
defp checkout(%DBConnection{} = conn, fun, meter, opts) do
with {:ok, result, meter} <- fun.(conn, meter, opts) do
{:ok, conn, result, meter}
end
end
defp checkout(pool, fun, meter, opts) do
with {:ok, conn, meter} <- checkout(pool, meter, opts) do
case fun.(conn, meter, opts) do
{:ok, result, meter} ->
{:ok, conn, result, meter}
error ->
checkin(conn)
error
end
end
end
defp checkin(%DBConnection{pool_ref: pool_ref}) do
Holder.checkin(pool_ref)
end
defp checkin(%DBConnection{} = conn, fun, meter, opts) do
return = fun.(conn, meter, opts)
checkin(conn)
return
end
defp checkin(pool, fun, meter, opts) do
run(pool, fun, meter, opts)
end
defp disconnect(%DBConnection{pool_ref: pool_ref}, err) do
_ = Holder.disconnect(pool_ref, err)
:ok
end
defp stop(%DBConnection{pool_ref: pool_ref}, kind, reason, stack) do
msg = "client #{inspect(self())} stopped: " <> Exception.format(kind, reason, stack)
exception = DBConnection.ConnectionError.exception(msg)
_ = Holder.stop(pool_ref, exception)
:ok
end
defp handle_common_result(return, conn, meter) do
case return do
{:ok, result, _conn_state} ->
{:ok, result, meter}
{:error, err, _conn_state} ->
{:error, err, meter}
{:disconnect, err, _conn_state} ->
disconnect(conn, err)
{:error, err, meter}
{:catch, kind, reason, stack} ->
stop(conn, kind, reason, stack)
{kind, reason, stack, meter}
other ->
bad_return!(other, conn, meter)
end
end
@compile {:inline, bad_return!: 3}
defp bad_return!(other, conn, meter) do
try do
raise DBConnection.ConnectionError, "bad return value: #{inspect(other)}"
catch
:error, reason ->
stack = __STACKTRACE__
stop(conn, :error, reason, stack)
{:error, reason, stack, meter}
end
end
defp parse(query, meter, opts) do
try do
DBConnection.Query.parse(query, opts)
catch
kind, reason ->
stack = __STACKTRACE__
{kind, reason, stack, meter}
else
query ->
{:ok, query, meter}
end
end
defp describe(conn, query, meter, opts) do
try do
DBConnection.Query.describe(query, opts)
catch
kind, reason ->
stack = __STACKTRACE__
raised_close(conn, query, meter, opts, kind, reason, stack)
else
query ->
{:ok, query, meter}
end
end
defp encode(conn, query, params, meter, opts) do
try do
DBConnection.Query.encode(query, params, opts)
catch
kind, reason ->
stack = __STACKTRACE__
raised_close(conn, query, meter, opts, kind, reason, stack)
else
params ->
{:ok, params, meter}
end
end
defp maybe_encode(query, params, meter, opts) do
try do
DBConnection.Query.encode(query, params, opts)
rescue
DBConnection.EncodeError -> {:prepare, meter}
catch
kind, reason ->
stack = __STACKTRACE__
{kind, reason, stack, meter}
else
params ->
{:ok, params, meter}
end
end
defp decode(query, result, meter, opts) do
meter = event(meter, :decode)
try do
DBConnection.Query.decode(query, result, opts)
catch
kind, reason ->
stack = __STACKTRACE__
{kind, reason, stack, meter}
else
result ->
{:ok, result, meter}
end
end
defp prepare_declare(conn, query, params, opts) do
result =
with {:ok, query, meter} <- parse(query, meter(opts), opts) do
parsed_prepare_declare(conn, query, params, meter, opts)
end
log(result, :prepare_declare, query, params)
end
defp parsed_prepare_declare(conn, query, params, meter, opts) do
run(conn, &run_prepare_declare/5, query, params, meter, opts)
end
defp prepare_declare!(conn, query, params, opts) do
case prepare_declare(conn, query, params, opts) do
{:ok, query, cursor} ->
{query, cursor}
{:error, err} ->
raise err
end
end
defp declare(conn, query, params, opts) do
result =
case maybe_encode(query, params, meter(opts), opts) do
{:prepare, meter} ->
parsed_prepare_declare(conn, query, params, meter, opts)
{:ok, params, meter} ->
run(conn, &run_declare/5, query, params, meter, opts)
{_, _, _, _} = error ->
error
end
log(result, :declare, query, params)
end
defp deallocate(conn, query, cursor, opts) do
conn
|> run_cleanup(&run_deallocate/4, [query, cursor], meter(opts), opts)
|> log(:deallocate, query, cursor)
end
defp run_prepare(conn, query, meter, opts) do
with {:ok, query, meter} <- prepare(conn, query, meter, opts) do
describe(conn, query, meter, opts)
end
end
defp prepare(%DBConnection{pool_ref: pool_ref} = conn, query, meter, opts) do
pool_ref
|> Holder.handle(:handle_prepare, [query], opts)
|> handle_common_result(conn, event(meter, :prepare))
end
defp run_prepare_execute(conn, query, params, meter, opts) do
with {:ok, query, meter} <- run_prepare(conn, query, meter, opts),
{:ok, params, meter} <- encode(conn, query, params, meter, opts) do
run_execute(conn, query, params, meter, opts)
end
end
defp run_execute(conn, query, params, meter, opts) do
%DBConnection{pool_ref: pool_ref} = conn
meter = event(meter, :execute)
case Holder.handle(pool_ref, :handle_execute, [query, params], opts) do
{:ok, query, result, _conn_state} ->
{:ok, query, result, meter}
{:ok, _, _} = other ->
bad_return!(other, conn, meter)
other ->
handle_common_result(other, conn, meter)
end
end
defp raised_close(conn, query, meter, opts, kind, reason, stack) do
with {:ok, _, meter} <- run_close(conn, [query], meter, opts) do
{kind, reason, stack, meter}
end
end
defp run_close(conn, args, meter, opts) do
meter = event(meter, :close)
cleanup(conn, :handle_close, args, meter, opts)
end
defp run_cleanup(%DBConnection{} = conn, fun, args, meter, opts) do
fun.(conn, args, meter, opts)
end
defp run_cleanup(pool, fun, args, meter, opts) do
with {:ok, conn, meter} <- checkout(pool, meter, opts) do
try do
fun.(conn, args, meter, opts)
after
checkin(conn)
end
end
end
defp cleanup(conn, fun, args, meter, opts) do
%DBConnection{pool_ref: pool_ref} = conn
case Holder.cleanup(pool_ref, fun, args, opts) do
{:ok, result, _conn_state} ->
{:ok, result, meter}
{:error, err, _conn_state} ->
{:error, err, meter}
{:disconnect, err, _conn_state} ->
disconnect(conn, err)
{:error, err, meter}
{:catch, kind, reason, stack} ->
stop(conn, kind, reason, stack)
{kind, reason, stack, meter}
other ->
bad_return!(other, conn, meter)
end
end
defp run(%DBConnection{} = conn, fun, meter, opts) do
fun.(conn, meter, opts)
end
defp run(pool, fun, meter, opts) do
with {:ok, conn, meter} <- checkout(pool, meter, opts) do
try do
fun.(conn, meter, opts)
after
checkin(conn)
end
end
end
defp run(%DBConnection{} = conn, fun, arg, meter, opts) do
fun.(conn, arg, meter, opts)
end
defp run(pool, fun, arg, meter, opts) do
with {:ok, conn, meter} <- checkout(pool, meter, opts) do
try do
fun.(conn, arg, meter, opts)
after
checkin(conn)
end
end
end
defp run(%DBConnection{} = conn, fun, arg1, arg2, meter, opts) do
fun.(conn, arg1, arg2, meter, opts)
end
defp run(pool, fun, arg1, arg2, meter, opts) do
with {:ok, conn, meter} <- checkout(pool, meter, opts) do
try do
fun.(conn, arg1, arg2, meter, opts)
after
checkin(conn)
end
end
end
defp meter(opts) do
case Keyword.get(opts, :log) do
nil -> nil
log -> {log, []}
end
end
defp event(nil, _),
do: nil
defp event({log, events}, event),
do: {log, [{event, System.monotonic_time()} | events]}
defp past_event(nil, _, _),
do: nil
defp past_event(log_events, _, nil),
do: log_events
defp past_event({log, events}, event, time),
do: {log, [{event, time} | events]}
defp log({:ok, res, meter}, call, query, params),
do: log(meter, call, query, params, {:ok, res})
defp log({:ok, res1, res2, meter}, call, query, params),
do: log(meter, call, query, params, {:ok, res1, res2})
defp log({ok, res, meter}, call, query, cursor) when ok in [:cont, :halt],
do: log(meter, call, query, cursor, {ok, res})
defp log({:error, err, meter}, call, query, params),
do: log(meter, call, query, params, {:error, err})
defp log({kind, reason, stack, meter}, call, query, params),
do: log(meter, call, query, params, {kind, reason, stack})
defp log(nil, _, _, _, result),
do: log_result(result)
defp log({log, times}, call, query, params, result) do
entry = DBConnection.LogEntry.new(call, query, params, times, entry_result(result))
try do
log(log, entry)
catch
kind, reason ->
stack = __STACKTRACE__
log_raised(entry, kind, reason, stack)
end
log_result(result)
end
defp entry_result({kind, reason, stack})
when kind in [:error, :exit, :throw] do
msg = "an exception was raised: " <> Exception.format(kind, reason, stack)
{:error, %DBConnection.ConnectionError{message: msg}}
end
defp entry_result({ok, res}) when ok in [:cont, :halt],
do: {:ok, res}
defp entry_result(other), do: other
defp log({mod, fun, args}, entry), do: apply(mod, fun, [entry | args])
defp log(fun, entry), do: fun.(entry)
defp log_result({kind, reason, stack}) when kind in [:error, :exit, :throw] do
:erlang.raise(kind, reason, stack)
end
defp log_result(other), do: other
defp log_raised(entry, kind, reason, stack) do
reason = Exception.normalize(kind, reason, stack)
Logger.error(
fn ->
"an exception was raised logging #{inspect(entry)}: " <>
Exception.format(kind, reason, stack)
end,
crash_reason: {crash_reason(kind, reason), stack}
)
catch
_, _ ->
:ok
end
defp crash_reason(:throw, value), do: {:nocatch, value}
defp crash_reason(_, value), do: value
defp run_transaction(conn, fun, run, opts) do
%DBConnection{conn_ref: conn_ref} = conn
try do
result = fun.(%{conn | conn_mode: :transaction})
conclude(conn, result)
catch
:throw, {__MODULE__, ^conn_ref, reason} ->
reset(conn)
case rollback(conn, run, opts) do
{:ok, _} ->
{:error, reason}
{:error, %DBConnection.TransactionError{}} ->
{:error, reason}
{:error, %DBConnection.ConnectionError{}} ->
{:error, reason}
{:error, err} ->
raise err
end
kind, reason ->
stack = __STACKTRACE__
reset(conn)
_ = rollback(conn, run, opts)
:erlang.raise(kind, reason, stack)
else
result ->
case commit(conn, run, opts) do
{:ok, _} ->
{:ok, result}
{:error, %DBConnection.TransactionError{}} ->
{:error, :rollback}
{:error, err} ->
raise err
end
after
reset(conn)
end
end
defp fail(%DBConnection{pool_ref: pool_ref}) do
case Holder.status?(pool_ref, :ok) do
true -> Holder.put_status(pool_ref, :aborted)
false -> :ok
end
end
defp conclude(%DBConnection{pool_ref: pool_ref, conn_ref: conn_ref}, result) do
case Holder.status?(pool_ref, :ok) do
true -> result
false -> throw({__MODULE__, conn_ref, :rollback})
end
end
defp reset(%DBConnection{pool_ref: pool_ref}) do
case Holder.status?(pool_ref, :aborted) do
true -> Holder.put_status(pool_ref, :ok)
false -> :ok
end
end
defp begin(conn, run, opts) do
conn
|> run.(&run_begin/3, meter(opts), opts)
|> log(:begin, :begin, nil)
end
defp run_begin(conn, meter, opts) do
%DBConnection{pool_ref: pool_ref} = conn
meter = event(meter, :begin)
case Holder.handle(pool_ref, :handle_begin, [], opts) do
{status, _conn_state} when status in [:idle, :transaction, :error] ->
status_disconnect(conn, status, meter)
other ->
handle_common_result(other, conn, meter)
end
end
defp rollback(conn, run, opts) do
conn
|> run.(&run_rollback/3, meter(opts), opts)
|> log(:rollback, :rollback, nil)
end
defp run_rollback(conn, meter, opts) do
%DBConnection{pool_ref: pool_ref} = conn
meter = event(meter, :rollback)
case Holder.handle(pool_ref, :handle_rollback, [], opts) do
{status, _conn_state} when status in [:idle, :transaction, :error] ->
status_disconnect(conn, status, meter)
other ->
handle_common_result(other, conn, meter)
end
end
defp commit(conn, run, opts) do
case run.(conn, &run_commit/3, meter(opts), opts) do
{:rollback, {:ok, result, meter}} ->
log(meter, :commit, :rollback, nil, {:ok, result})
err = DBConnection.TransactionError.exception(:error)
{:error, err}
{query, other} ->
log(other, :commit, query, nil)
{:error, err, meter} ->
log(meter, :commit, :commit, nil, {:error, err})
{kind, reason, stack, meter} ->
log(meter, :commit, :commit, nil, {kind, reason, stack})
end
end
defp run_commit(conn, meter, opts) do
%DBConnection{pool_ref: pool_ref} = conn
meter = event(meter, :commit)
case Holder.handle(pool_ref, :handle_commit, [], opts) do
{:error, _conn_state} ->
{:rollback, run_rollback(conn, meter, opts)}
{status, _conn_state} when status in [:idle, :transaction] ->
{:commit, status_disconnect(conn, status, meter)}
other ->
{:commit, handle_common_result(other, conn, meter)}
end
end
defp status_disconnect(conn, status, meter) do
err = DBConnection.TransactionError.exception(status)
disconnect(conn, err)
{:error, err, meter}
end
defp run_status(conn, meter, opts) do
%DBConnection{pool_ref: pool_ref} = conn
case Holder.handle(pool_ref, :handle_status, [], opts) do
{status, _conn_state} when status in [:idle, :transaction, :error] ->
{status, meter}
{:disconnect, err, _conn_state} ->
disconnect(conn, err)
{:error, err, meter}
{:catch, kind, reason, stack} ->
stop(conn, kind, reason, stack)
{kind, reason, stack, meter}
other ->
bad_return!(other, conn, meter)
end
end
defp run_prepare_declare(conn, query, params, meter, opts) do
with {:ok, query, meter} <- prepare(conn, query, meter, opts),
{:ok, query, meter} <- describe(conn, query, meter, opts),
{:ok, params, meter} <- encode(conn, query, params, meter, opts),
{:ok, query, cursor, meter} <- run_declare(conn, query, params, meter, opts) do
{:ok, query, cursor, meter}
end
end
defp run_declare(conn, query, params, meter, opts) do
%DBConnection{pool_ref: pool_ref} = conn
meter = event(meter, :declare)
case Holder.handle(pool_ref, :handle_declare, [query, params], opts) do
{:ok, query, result, _conn_state} ->
{:ok, query, result, meter}
{:ok, _, _} = other ->
bad_return!(other, conn, meter)
other ->
handle_common_result(other, conn, meter)
end
end
defp stream_fetch(conn, {:cont, query, cursor}, opts) do
conn
|> run(&run_stream_fetch/4, [query, cursor], meter(opts), opts)
|> log(:fetch, query, cursor)
|> case do
{ok, result} when ok in [:cont, :halt] ->
{[result], {ok, query, cursor}}
{:error, err} ->
raise err
end
end
defp stream_fetch(_, {:halt, _, _} = state, _) do
{:halt, state}
end
defp run_stream_fetch(conn, args, meter, opts) do
[query, _] = args
with {ok, result, meter} when ok in [:cont, :halt] <- run_fetch(conn, args, meter, opts),
{:ok, result, meter} <- decode(query, result, meter, opts) do
{ok, result, meter}
end
end
defp run_fetch(conn, args, meter, opts) do
%DBConnection{pool_ref: pool_ref} = conn
meter = event(meter, :fetch)
case Holder.handle(pool_ref, :handle_fetch, args, opts) do
{:cont, result, _conn_state} ->
{:cont, result, meter}
{:halt, result, _conn_state} ->
{:halt, result, meter}
other ->
handle_common_result(other, conn, meter)
end
end
defp stream_deallocate(conn, {_status, query, cursor}, opts),
do: deallocate(conn, query, cursor, opts)
defp run_deallocate(conn, args, meter, opts) do
meter = event(meter, :deallocate)
cleanup(conn, :handle_deallocate, args, meter, opts)
end
defp resource(%DBConnection{} = conn, start, next, stop, opts) do
start = fn -> start.(conn, opts) end
next = fn state -> next.(conn, state, opts) end
stop = fn state -> stop.(conn, state, opts) end
Stream.resource(start, next, stop)
end
end
| 33.02849 | 95 | 0.644993 |
fffa8a2bcd5aeb486b3203b2583b79d05c1d4c38 | 1,087 | exs | Elixir | apps/ewallet_db/priv/repo/migrations/20181106043815_create_update_email_request.exs | jimpeebles/ewallet | ad4a9750ec8dc5adc4c0dfe6c22f0ef760825405 | [
"Apache-2.0"
] | null | null | null | apps/ewallet_db/priv/repo/migrations/20181106043815_create_update_email_request.exs | jimpeebles/ewallet | ad4a9750ec8dc5adc4c0dfe6c22f0ef760825405 | [
"Apache-2.0"
] | null | null | null | apps/ewallet_db/priv/repo/migrations/20181106043815_create_update_email_request.exs | jimpeebles/ewallet | ad4a9750ec8dc5adc4c0dfe6c22f0ef760825405 | [
"Apache-2.0"
] | null | null | null | # Copyright 2018 OmiseGO Pte Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
defmodule EWalletDB.Repo.Migrations.CreateUpdateEmailRequest do
use Ecto.Migration
def change do
create table(:update_email_request, primary_key: false) do
add :uuid, :uuid, primary_key: true
add :email, :string, null: false
add :token, :string, null: false
add :enabled, :boolean, null: false, default: true
add :user_uuid, references(:user, type: :uuid, column: :uuid)
timestamps()
end
create unique_index(:update_email_request, [:token])
end
end
| 32.939394 | 74 | 0.727691 |
fffa9ad61949316b6a7f6fc5e7e26b0c0ec22821 | 340 | exs | Elixir | test/guardian/db_fail_test.exs | kianmeng/guardian_db | 1f3a366805d820504e0c7f160a1848b1f1bfc7c3 | [
"MIT"
] | 241 | 2017-01-09T04:44:26.000Z | 2022-03-25T13:46:29.000Z | test/guardian/db_fail_test.exs | kianmeng/guardian_db | 1f3a366805d820504e0c7f160a1848b1f1bfc7c3 | [
"MIT"
] | 99 | 2017-01-10T15:42:10.000Z | 2022-03-16T02:21:22.000Z | test/guardian/db_fail_test.exs | kianmeng/guardian_db | 1f3a366805d820504e0c7f160a1848b1f1bfc7c3 | [
"MIT"
] | 85 | 2017-01-09T10:17:46.000Z | 2021-06-15T18:32:11.000Z | defmodule Guardian.DBFailTest do
use Guardian.DB.TestSupport.CaseTemplate
test "after_encode_and_sign_in is fails" do
token = get_token()
assert token == nil
{:error, :token_storage_failure} =
Guardian.DB.after_encode_and_sign(%{}, "token", %{}, "The JWT")
token = get_token()
assert token == nil
end
end
| 22.666667 | 69 | 0.682353 |
fffac67b52859fbc4b93b62c6216bf46d6756ec4 | 2,606 | ex | Elixir | clients/firebase_dynamic_links/lib/google_api/firebase_dynamic_links/v1/model/ios_info.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | 1 | 2018-12-03T23:43:10.000Z | 2018-12-03T23:43:10.000Z | clients/firebase_dynamic_links/lib/google_api/firebase_dynamic_links/v1/model/ios_info.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | clients/firebase_dynamic_links/lib/google_api/firebase_dynamic_links/v1/model/ios_info.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the elixir code generator program.
# Do not edit the class manually.
defmodule GoogleApi.FirebaseDynamicLinks.V1.Model.IosInfo do
@moduledoc """
iOS related attributes to the Dynamic Link..
## Attributes
* `iosAppStoreId` (*type:* `String.t`, *default:* `nil`) - iOS App Store ID.
* `iosBundleId` (*type:* `String.t`, *default:* `nil`) - iOS bundle ID of the app.
* `iosCustomScheme` (*type:* `String.t`, *default:* `nil`) - Custom (destination) scheme to use for iOS. By default, we’ll use the
bundle ID as the custom scheme. Developer can override this behavior using
this param.
* `iosFallbackLink` (*type:* `String.t`, *default:* `nil`) - Link to open on iOS if the app is not installed.
* `iosIpadBundleId` (*type:* `String.t`, *default:* `nil`) - iPad bundle ID of the app.
* `iosIpadFallbackLink` (*type:* `String.t`, *default:* `nil`) - If specified, this overrides the ios_fallback_link value on iPads.
* `iosMinimumVersion` (*type:* `String.t`, *default:* `nil`) - iOS minimum version.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:iosAppStoreId => String.t(),
:iosBundleId => String.t(),
:iosCustomScheme => String.t(),
:iosFallbackLink => String.t(),
:iosIpadBundleId => String.t(),
:iosIpadFallbackLink => String.t(),
:iosMinimumVersion => String.t()
}
field(:iosAppStoreId)
field(:iosBundleId)
field(:iosCustomScheme)
field(:iosFallbackLink)
field(:iosIpadBundleId)
field(:iosIpadFallbackLink)
field(:iosMinimumVersion)
end
defimpl Poison.Decoder, for: GoogleApi.FirebaseDynamicLinks.V1.Model.IosInfo do
def decode(value, options) do
GoogleApi.FirebaseDynamicLinks.V1.Model.IosInfo.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.FirebaseDynamicLinks.V1.Model.IosInfo do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 38.895522 | 135 | 0.69647 |
fffac70e2c4509db45bc6ae7856aa48b672fe47c | 259 | ex | Elixir | rockelivery/lib/rockelivery.ex | arilsonsouza/rocketseat-ignite-elixir | 93e32d52d589336dfd2d81e755d6dd7f05ee40b8 | [
"MIT"
] | null | null | null | rockelivery/lib/rockelivery.ex | arilsonsouza/rocketseat-ignite-elixir | 93e32d52d589336dfd2d81e755d6dd7f05ee40b8 | [
"MIT"
] | null | null | null | rockelivery/lib/rockelivery.ex | arilsonsouza/rocketseat-ignite-elixir | 93e32d52d589336dfd2d81e755d6dd7f05ee40b8 | [
"MIT"
] | null | null | null | defmodule Rockelivery do
@moduledoc """
Rockelivery keeps the contexts that define your domain
and business logic.
Contexts are also responsible for managing your data, regardless
if it comes from the database, an external API or others.
"""
end
| 25.9 | 66 | 0.760618 |
fffad1be158acd4df36f66fc8c6f0cdf90dd2bea | 1,165 | exs | Elixir | config/config.exs | j-peso/ElixirWeather | b1f34ea4fdfb49b11c626b77da34f4afb9028c7c | [
"MIT"
] | null | null | null | config/config.exs | j-peso/ElixirWeather | b1f34ea4fdfb49b11c626b77da34f4afb9028c7c | [
"MIT"
] | null | null | null | config/config.exs | j-peso/ElixirWeather | b1f34ea4fdfb49b11c626b77da34f4afb9028c7c | [
"MIT"
] | null | null | null | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
use Mix.Config
config :weather, api_key: 'YOUR API KEY'
# This configuration is loaded before any dependency and is restricted
# to this project. If another project depends on this project, this
# file won't be loaded nor affect the parent project. For this reason,
# if you want to provide default values for your application for
# 3rd-party users, it should be done in your "mix.exs" file.
# You can configure your application as:
#
# config :weather, key: :value
#
# and access this configuration in your application as:
#
# Application.get_env(:weather, :key)
#
# You can also configure a 3rd-party app:
#
# config :logger, level: :info
#
# It is also possible to import configuration files, relative to this
# directory. For example, you can emulate configuration per environment
# by uncommenting the line below and defining dev.exs, test.exs and such.
# Configuration from the imported file will override the ones defined
# here (which is why it is important to import them last).
#
# import_config "#{Mix.env}.exs"
| 35.30303 | 73 | 0.750215 |
fffadaacd9a6484077b7c9e0dfbd49428454a26a | 3,929 | exs | Elixir | test/state_module/on_state_entry_test.exs | ityonemo/state_server | 1e119970e20abb68fff13d449e95e3bf66298668 | [
"MIT"
] | 8 | 2019-08-31T00:31:58.000Z | 2021-06-11T22:12:05.000Z | test/state_module/on_state_entry_test.exs | ityonemo/state_server | 1e119970e20abb68fff13d449e95e3bf66298668 | [
"MIT"
] | 39 | 2019-09-07T21:29:09.000Z | 2020-05-05T15:01:30.000Z | test/state_module/on_state_entry_test.exs | ityonemo/state_server | 1e119970e20abb68fff13d449e95e3bf66298668 | [
"MIT"
] | 2 | 2020-01-04T05:44:25.000Z | 2020-01-19T21:44:41.000Z | defmodule StateServerTest.StateModule.OnStateEntryTest do
use ExUnit.Case, async: true
defmodule StateEntry do
use StateServer, [start: [tr: :end, tr_trap: :end, tr_double: :end, tr_update: :end], end: []]
def start_link(data), do: StateServer.start_link(__MODULE__, data)
@impl true
def init(data), do: {:ok, data}
@impl true
def handle_call(action, _from, _state, _data), do: {:reply, :ok, action}
@impl true
def handle_transition(:start, :tr_update, pid) do
# do a transition which will modify the state
{:noreply, update: {:update, pid}}
end
def handle_transition(_, _, _), do: :noreply
@impl true
def on_state_entry(:tr_trap, :end, pid) do
# traps the state_entry early and doesn't fall through to
# the state module.
send(pid, :trapped_route)
:noreply
end
def on_state_entry(:tr_double, :end, pid) do
# allows for a double-hit
send(pid, :first_hit)
:delegate
end
def on_state_entry(_, :start, _), do: :noreply
delegate :on_state_entry
defstate End, for: :end do
@impl true
def on_state_entry(_, {:update, resp_pid}) do
send(resp_pid, :update_verified)
:noreply
end
def on_state_entry(:tr_double, resp_pid) do
send(resp_pid, :second_hit)
:noreply
end
def on_state_entry(trans, resp_pid) do
send(resp_pid, {:entry_via, trans})
:noreply
end
end
end
describe "when you implement a state with a on_state_entry function" do
test "it gets called correctly when transitioning" do
{:ok, pid} = StateEntry.start_link(self())
GenServer.call(pid, transition: :tr)
assert_receive {:entry_via, :tr}
end
test "it gets called correctly on goto" do
{:ok, pid} = StateEntry.start_link(self())
GenServer.call(pid, goto: :end)
assert_receive {:entry_via, nil}
end
test "you can still trap special cases" do
{:ok, pid} = StateEntry.start_link(self())
GenServer.call(pid, transition: :tr_trap)
assert_receive :trapped_route
end
test "double hits must be explicit" do
{:ok, pid} = StateEntry.start_link(self())
GenServer.call(pid, transition: :tr_double)
assert_receive :first_hit
assert_receive :second_hit
end
test "you can trigger an update" do
{:ok, pid} = StateEntry.start_link(self())
GenServer.call(pid, transition: :tr_update)
assert_receive :update_verified
end
end
defmodule StateEntryDelegation do
use StateServer, [start: [tr: :end, tr2: :end], end: []]
def start_link(data), do: StateServer.start_link(__MODULE__, data)
@impl true
def init(data), do: {:ok, data}
@impl true
def handle_call(action, _from, _state, _data), do: {:reply, :ok, action}
@impl true
def on_state_entry(:tr2, :end, data) do
send(data, :outer_handler)
:noreply
end
def on_state_entry(_, :start, _), do: :noreply
delegate :on_state_entry
defstate End, for: :end do
@impl true
def on_state_entry(trans, resp_pid) do
send(resp_pid, {:entry_via, trans})
:noreply
end
end
end
describe "when you implement a state with a on_state_entry function and delegate" do
test "it gets called correctly after delegation" do
{:ok, pid} = StateEntryDelegation.start_link(self())
GenServer.call(pid, transition: :tr)
assert_receive {:entry_via, :tr}
end
test "it gets called correctly before delegation" do
{:ok, pid} = StateEntryDelegation.start_link(self())
GenServer.call(pid, transition: :tr2)
assert_receive :outer_handler
end
test "it gets called correctly on goto" do
{:ok, pid} = StateEntryDelegation.start_link(self())
GenServer.call(pid, goto: :end)
assert_receive {:entry_via, nil}
end
end
end
| 28.471014 | 98 | 0.647748 |
fffae78721daf7dd4c4c62fb4717d0aed7e2f277 | 5,221 | exs | Elixir | test/ex_oneroster/academic_sessions/academic_sessions_test.exs | jrissler/ex_oneroster | cec492117bffc14aec91e2448643682ceeb449e9 | [
"Apache-2.0"
] | 3 | 2018-09-06T11:15:07.000Z | 2021-12-27T15:36:51.000Z | test/ex_oneroster/academic_sessions/academic_sessions_test.exs | jrissler/ex_oneroster | cec492117bffc14aec91e2448643682ceeb449e9 | [
"Apache-2.0"
] | null | null | null | test/ex_oneroster/academic_sessions/academic_sessions_test.exs | jrissler/ex_oneroster | cec492117bffc14aec91e2448643682ceeb449e9 | [
"Apache-2.0"
] | null | null | null | defmodule ExOneroster.AcademicSessionsTest do
use ExOneroster.DataCase
alias ExOneroster.AcademicSessions
describe "academic_sessions" do
alias ExOneroster.AcademicSessions.AcademicSession
test "list_academic_sessions/0 returns all academic_sessions" do
academic_session = insert(:academic_session)
assert AcademicSessions.list_academic_sessions() == [academic_session]
end
test "get_academic_session!/1 returns the academic_session with given id" do
academic_session = insert(:academic_session)
assert AcademicSessions.get_academic_session!(academic_session.id) == academic_session
end
test "get_academic_session!/1 returns parent with child" do
only_child = base_setup()[:parent_academic_session]
academic_session = AcademicSessions.get_academic_session!(only_child.id)
assert length(academic_session.children) == 1
end
test "get_academic_session!/1 returns parent with parent and children" do
data = base_setup()
academic_session = AcademicSessions.get_academic_session!(data[:sub_child_academic_session].id)
assert length(academic_session.children) == 2
assert academic_session.parent != nil
end
test "get_academic_session!/1 with children returns the academic_session with given id" do
academic_session = insert(:academic_session)
assert AcademicSessions.get_academic_session!(academic_session.id) == academic_session
end
test "create_academic_session/1 with valid data creates a academic_session" do
academic_session_params = build(:academic_session)
assert {:ok, %AcademicSession{} = academic_session} = AcademicSessions.create_academic_session(params_for(:academic_session, dateLastModified: academic_session_params.dateLastModified, sourcedId: academic_session_params.sourcedId))
assert academic_session.dateLastModified == academic_session_params.dateLastModified
assert academic_session.endDate == Date.from_iso8601!(academic_session_params.endDate)
assert academic_session.metadata == academic_session_params.metadata
assert academic_session.parent_id == academic_session_params.parent_id
assert academic_session.schoolYear == academic_session_params.schoolYear
assert academic_session.sourcedId == academic_session_params.sourcedId
assert academic_session.startDate == Date.from_iso8601!(academic_session_params.startDate)
assert academic_session.status == academic_session_params.status
assert academic_session.title == academic_session_params.title
assert academic_session.type == academic_session_params.type
assert academic_session.parent == nil
assert academic_session.children == []
end
test "create_academic_session/1 with invalid data returns error changeset" do
assert {:error, %Ecto.Changeset{}} = AcademicSessions.create_academic_session(params_for(:academic_session, sourcedId: nil))
end
test "update_academic_session/2 with valid data updates the academic_session" do
existing_academic_session = insert(:academic_session)
assert {:ok, academic_session} = AcademicSessions.update_academic_session(existing_academic_session, params_for(:academic_session, sourcedId: "Bond..James Bond", dateLastModified: existing_academic_session.dateLastModified))
assert %AcademicSession{} = academic_session
assert academic_session.dateLastModified == existing_academic_session.dateLastModified
assert academic_session.endDate == existing_academic_session.endDate
assert academic_session.metadata == existing_academic_session.metadata
assert academic_session.parent_id == existing_academic_session.parent_id
assert academic_session.schoolYear == existing_academic_session.schoolYear
assert academic_session.sourcedId == "Bond..James Bond"
assert academic_session.startDate == existing_academic_session.startDate
assert academic_session.status == existing_academic_session.status
assert academic_session.title == existing_academic_session.title
assert academic_session.type == existing_academic_session.type
assert academic_session.parent == nil
assert academic_session.children == []
end
test "update_academic_session/2 with invalid data returns error changeset" do
academic_session = insert(:academic_session)
assert {:error, %Ecto.Changeset{}} = AcademicSessions.update_academic_session(academic_session, params_for(:academic_session, dateLastModified: "Not a date"))
assert academic_session == AcademicSessions.get_academic_session!(academic_session.id)
end
test "delete_academic_session/1 deletes the academic_session" do
academic_session = insert(:academic_session)
assert {:ok, %AcademicSession{}} = AcademicSessions.delete_academic_session(academic_session)
assert_raise Ecto.NoResultsError, fn -> AcademicSessions.get_academic_session!(academic_session.id) end
end
test "change_academic_session/1 returns a academic_session changeset" do
academic_session = insert(:academic_session)
assert %Ecto.Changeset{} = AcademicSessions.change_academic_session(academic_session)
end
end
end
| 53.824742 | 237 | 0.786248 |
fffb0d0e9b250ac74080439e8c75e0e13e6fec55 | 91 | ex | Elixir | test/support/factory.ex | Soonad/Bitlog | 17f41b591169dca7412b9790e8f20abf11b46313 | [
"MIT"
] | 2 | 2019-12-03T10:38:38.000Z | 2019-12-04T23:52:11.000Z | test/support/factory.ex | moonad/Bitlog | 17f41b591169dca7412b9790e8f20abf11b46313 | [
"MIT"
] | null | null | null | test/support/factory.ex | moonad/Bitlog | 17f41b591169dca7412b9790e8f20abf11b46313 | [
"MIT"
] | null | null | null | defmodule Bitlog.Factory do
@moduledoc false
use ExMachina.Ecto, repo: Bitlog.Repo
end
| 18.2 | 39 | 0.78022 |
fffb2b662445553c688d09871ba0a643ffe33075 | 2,780 | ex | Elixir | clients/dataproc/lib/google_api/dataproc/v1/model/lifecycle_config.ex | myskoach/elixir-google-api | 4f8cbc2fc38f70ffc120fd7ec48e27e46807b563 | [
"Apache-2.0"
] | null | null | null | clients/dataproc/lib/google_api/dataproc/v1/model/lifecycle_config.ex | myskoach/elixir-google-api | 4f8cbc2fc38f70ffc120fd7ec48e27e46807b563 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/dataproc/lib/google_api/dataproc/v1/model/lifecycle_config.ex | myskoach/elixir-google-api | 4f8cbc2fc38f70ffc120fd7ec48e27e46807b563 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Dataproc.V1.Model.LifecycleConfig do
@moduledoc """
Specifies the cluster auto-delete schedule configuration.
## Attributes
* `autoDeleteTime` (*type:* `DateTime.t`, *default:* `nil`) - Optional. The time when cluster will be auto-deleted (see JSON representation of Timestamp (https://developers.google.com/protocol-buffers/docs/proto3#json)).
* `autoDeleteTtl` (*type:* `String.t`, *default:* `nil`) - Optional. The lifetime duration of cluster. The cluster will be auto-deleted at the end of this period. Minimum value is 10 minutes; maximum value is 14 days (see JSON representation of Duration (https://developers.google.com/protocol-buffers/docs/proto3#json)).
* `idleDeleteTtl` (*type:* `String.t`, *default:* `nil`) - Optional. The duration to keep the cluster alive while idling (when no jobs are running). Passing this threshold will cause the cluster to be deleted. Minimum value is 5 minutes; maximum value is 14 days (see JSON representation of Duration (https://developers.google.com/protocol-buffers/docs/proto3#json).
* `idleStartTime` (*type:* `DateTime.t`, *default:* `nil`) - Output only. The time when cluster became idle (most recent job finished) and became eligible for deletion due to idleness (see JSON representation of Timestamp (https://developers.google.com/protocol-buffers/docs/proto3#json)).
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:autoDeleteTime => DateTime.t(),
:autoDeleteTtl => String.t(),
:idleDeleteTtl => String.t(),
:idleStartTime => DateTime.t()
}
field(:autoDeleteTime, as: DateTime)
field(:autoDeleteTtl)
field(:idleDeleteTtl)
field(:idleStartTime, as: DateTime)
end
defimpl Poison.Decoder, for: GoogleApi.Dataproc.V1.Model.LifecycleConfig do
def decode(value, options) do
GoogleApi.Dataproc.V1.Model.LifecycleConfig.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Dataproc.V1.Model.LifecycleConfig do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 49.642857 | 370 | 0.738489 |
fffb48903cc35f04472ea376e558e5eac55cae86 | 1,072 | ex | Elixir | lib/cassette/client/generate_st.ex | maureenwonderland/elixir-cassette | 2a1f7a0325c51b9abd572dc1d11529dfdd552dc9 | [
"MIT"
] | 15 | 2016-03-03T11:22:00.000Z | 2021-03-16T15:03:56.000Z | lib/cassette/client/generate_st.ex | maureenwonderland/elixir-cassette | 2a1f7a0325c51b9abd572dc1d11529dfdd552dc9 | [
"MIT"
] | 13 | 2016-06-16T11:43:02.000Z | 2021-09-27T18:40:19.000Z | lib/cassette/client/generate_st.ex | maureenwonderland/elixir-cassette | 2a1f7a0325c51b9abd572dc1d11529dfdd552dc9 | [
"MIT"
] | 6 | 2016-06-16T19:26:37.000Z | 2021-04-15T19:32:04.000Z | defmodule Cassette.Client.GenerateSt do
@moduledoc """
Generates CAS Service Ticket
"""
use HTTPoison.Base
alias Cassette.Client
alias Cassette.Config
@type result ::
{:error, :bad_tgt}
| {:ok, String.t()}
| {:fail, pos_integer(), String.t()}
| {:fail, :unknown}
@doc """
Do request to cas service to get a service ticket from ticket granting
tickets
"""
@spec perform(Config.t(), String.t(), String.t()) :: result
def perform(config = %Config{base_url: base_url}, tgt, service) do
url = "#{base_url}/v1/tickets/#{tgt}"
options = Client.options(config)
params = {:form, [service: service]}
headers = []
case post(url, params, headers, options) do
{:ok, %HTTPoison.Response{status_code: 404}} ->
{:error, :bad_tgt}
{:ok, %HTTPoison.Response{status_code: 200, body: body}} ->
{:ok, body}
{:ok, %HTTPoison.Response{status_code: status_code, body: body}} ->
{:fail, status_code, body}
_ ->
{:fail, :unknown}
end
end
end
| 24.930233 | 73 | 0.594216 |
fffb685c55da658a6ae906df1b2c6008329e317c | 619 | exs | Elixir | test/multi_tenancex_web/views/error_view_test.exs | dreamingechoes/multi_tenancex | cfe3feb6b7eb25559f9abaa4da89e4aafc9ad2ec | [
"MIT"
] | 30 | 2018-06-27T17:51:53.000Z | 2021-04-24T03:17:55.000Z | test/multi_tenancex_web/views/error_view_test.exs | dreamingechoes/multi_tenancex | cfe3feb6b7eb25559f9abaa4da89e4aafc9ad2ec | [
"MIT"
] | null | null | null | test/multi_tenancex_web/views/error_view_test.exs | dreamingechoes/multi_tenancex | cfe3feb6b7eb25559f9abaa4da89e4aafc9ad2ec | [
"MIT"
] | 7 | 2018-07-24T17:56:14.000Z | 2019-12-31T02:10:13.000Z | defmodule MultiTenancexWeb.ErrorViewTest do
use MultiTenancexWeb.ConnCase, async: true
# Bring render/3 and render_to_string/3 for testing custom views
import Phoenix.View
test "renders 404.html" do
assert render_to_string(MultiTenancexWeb.ErrorView, "404.html", []) ==
"Page not found"
end
test "render 500.html" do
assert render_to_string(MultiTenancexWeb.ErrorView, "500.html", []) ==
"Internal server error"
end
test "render any other" do
assert render_to_string(MultiTenancexWeb.ErrorView, "505.html", []) ==
"Internal server error"
end
end
| 28.136364 | 74 | 0.693053 |
fffbf00ffd220c49ee3dac5ae903673dd39e0687 | 69 | ex | Elixir | lib/dashboard/repo.ex | bionikspoon/classroom-dashboard | c324eeab568335591daf83ae0bdd9a60d25f4595 | [
"MIT"
] | null | null | null | lib/dashboard/repo.ex | bionikspoon/classroom-dashboard | c324eeab568335591daf83ae0bdd9a60d25f4595 | [
"MIT"
] | null | null | null | lib/dashboard/repo.ex | bionikspoon/classroom-dashboard | c324eeab568335591daf83ae0bdd9a60d25f4595 | [
"MIT"
] | null | null | null | defmodule Dashboard.Repo do
use Ecto.Repo, otp_app: :dashboard
end
| 17.25 | 36 | 0.782609 |
fffbf2deff8ce96c5f8cd490e29a0fb69fc0bd30 | 1,298 | ex | Elixir | lib/parser/tokenizer.ex | dbernheisel/date_time_parser | 53378ccfb5938c304a18fd62b1f288ab94dd5446 | [
"MIT"
] | 73 | 2019-07-26T19:48:10.000Z | 2022-03-23T10:01:34.000Z | lib/parser/tokenizer.ex | dbernheisel/date_time_parser | 53378ccfb5938c304a18fd62b1f288ab94dd5446 | [
"MIT"
] | 23 | 2019-08-28T22:08:32.000Z | 2021-06-14T19:33:18.000Z | lib/parser/tokenizer.ex | dbernheisel/date_time_parser | 53378ccfb5938c304a18fd62b1f288ab94dd5446 | [
"MIT"
] | 9 | 2019-08-01T22:42:20.000Z | 2022-02-15T19:02:50.000Z | defmodule DateTimeParser.Parser.Tokenizer do
@moduledoc """
This parser doesn't parse, instead it checks the string and assigns the appropriate parser during
preflight. The appropriate parser is determined by whether there is a `"/"` present in the string,
and if so it will assume the string is a US-formatted date or datetime, and therefore use the
US-optimized tokenizer module (ie, `DateTimeParser.Parser.DateUS` or
`DateTimeParser.Parser.DateTimeUS`) for them. Time will always be parsed with
`DateTimeParser.Parser.Time`.
"""
@behaviour DateTimeParser.Parser
alias DateTimeParser.Parser
@impl DateTimeParser.Parser
def preflight(%{string: string, context: context} = parser) do
{:ok, %{parser | mod: get_token_parser(context, string)}}
end
@impl DateTimeParser.Parser
def parse(_parser) do
raise DateTimeParser.ParseError, "Cannot parse with DateTimeParser.Parser.Tokenizer"
end
defp get_token_parser(:datetime, string) do
if String.contains?(string, "/") do
Parser.DateTimeUS
else
Parser.DateTime
end
end
defp get_token_parser(:date, string) do
if String.contains?(string, "/") do
Parser.DateUS
else
Parser.Date
end
end
defp get_token_parser(:time, _string) do
Parser.Time
end
end
| 29.5 | 100 | 0.727273 |
fffbfc8cebc3135f6b6faba7d5502e86117f18a8 | 829 | ex | Elixir | lib/bankapi/operations/notify.ex | t00lmaker/elixir-bank | 41897d8fa87bb2fedbe3ef6f8f5cd78b756e24f0 | [
"MIT"
] | 4 | 2020-05-05T18:37:28.000Z | 2022-01-05T00:56:19.000Z | lib/bankapi/operations/notify.ex | t00lmaker/elixir-bank | 41897d8fa87bb2fedbe3ef6f8f5cd78b756e24f0 | [
"MIT"
] | 21 | 2019-12-01T15:32:02.000Z | 2019-12-19T13:10:36.000Z | lib/bankapi/operations/notify.ex | t00lmaker/elixir-bank | 41897d8fa87bb2fedbe3ef6f8f5cd78b756e24f0 | [
"MIT"
] | 2 | 2020-09-12T16:07:11.000Z | 2020-12-11T06:46:45.000Z | defmodule Bank.Operations.Notify do
@moduledoc """
Envia uma notificação para o cliente
para um operacao realizada em uma de suas contas.
"""
alias Bank.Operations.Operation
require Logger
def send(operation) do
{:ok, msg} = template(operation)
send_email(msg)
{:ok, msg}
end
def send_email(_msg) do
Task.Supervisor.async_nolink(Bank.TaskSupervisor, fn ->
Logger.debug("Envidando email")
:timer.sleep(10_000)
Logger.debug("Email enviado com sucesso.")
end)
end
defp template(%Operation{type: "SAQUE"} = op) do
# tempalte msg para SAQUE .
Logger.debug("Construindo msg para operação de #{op.type}")
{:ok, " template here "}
end
defp template(operation) do
{:error, "Tipo #{operation.type} sem template definido para notificação."}
end
end
| 24.382353 | 78 | 0.674306 |
fffc0725b08299801ab50d7a9c083180eae33a8d | 2,009 | ex | Elixir | lib/sse_test_server/post_handler.ex | praekeltfoundation/sse_test_server | 1e30a7719a40b3581e87bca540e3b7f57a4ad352 | [
"MIT"
] | null | null | null | lib/sse_test_server/post_handler.ex | praekeltfoundation/sse_test_server | 1e30a7719a40b3581e87bca540e3b7f57a4ad352 | [
"MIT"
] | 9 | 2018-01-12T10:47:41.000Z | 2018-07-26T09:54:05.000Z | lib/sse_test_server/post_handler.ex | praekeltfoundation/sse_test_server | 1e30a7719a40b3581e87bca540e3b7f57a4ad352 | [
"MIT"
] | null | null | null | defmodule SSETestServer.PostHandler do
use SSETestServer.RequestHandler.Base
@behaviour :cowboy_handler
def init(req = %{method: "POST"}, state) do
when_exists(req, state, fn _ ->
{:ok, field_list, req_read} = :cowboy_req.read_urlencoded_body(req)
handle_action(Map.new(field_list), req_read, state)
end)
end
defp handle_action(fields, req, state) do
req_resp = process_field(
"action", fields, req, &perform_action(&1, &2, req, state))
{:ok, req_resp, state}
end
defp perform_action("stream_bytes", fields, req, state) do
process_field("bytes", fields, req,
fn bytes, _ ->
success(req, SSEServer.stream_bytes(state.sse_server, req.path, bytes))
end)
end
defp perform_action("keepalive", _fields, req, state),
do: success(req, SSEServer.keepalive(state.sse_server, req.path))
defp perform_action("event", fields, req, state) do
process_fields(["event", "data"], fields, req,
fn [event, data], _ ->
success(req, SSEServer.event(state.sse_server, req.path, event, data))
end)
end
defp perform_action("end_stream", _fields, req, state),
do: success(req, SSEServer.end_stream(state.sse_server, req.path))
defp perform_action(action, _fields, req, _state),
do: bad_request(req, "Unknown action: #{action}")
defp process_field(field, fields, req, fun) do
case Map.pop(fields, field) do
{nil, _} -> bad_request(req, "Missing field: #{field}")
{value, remaining_fields} -> fun.(value, remaining_fields)
end
end
defp process_fields(field_names, fields, req, fun),
do: process_fields(field_names, [], fields, req, fun)
defp process_fields([], values, fields, _req, fun),
do: fun.(Enum.reverse(values), fields)
defp process_fields([field | field_names], values, fields, req, fun) do
process_field(field, fields, req,
fn value, new_fields ->
process_fields(field_names, [value | values], new_fields, req, fun)
end)
end
end
| 31.390625 | 79 | 0.674963 |
fffc3d76f4debe1221c4bf6fcbde0094dbfb333a | 349 | exs | Elixir | server/satori/priv/repo/seeds.exs | lastmeta/Satori | cb321ee53a15fe8cba8fcdd483eeb6acc8dab3ea | [
"MIT"
] | 3 | 2022-02-16T17:25:53.000Z | 2022-02-25T05:24:15.000Z | server/satori/priv/repo/seeds.exs | lastmeta/Satori | cb321ee53a15fe8cba8fcdd483eeb6acc8dab3ea | [
"MIT"
] | 9 | 2022-02-16T20:23:55.000Z | 2022-03-26T17:27:23.000Z | server/satori/priv/repo/seeds.exs | lastmeta/Satori | cb321ee53a15fe8cba8fcdd483eeb6acc8dab3ea | [
"MIT"
] | null | null | null | # Script for populating the database. You can run it as:
#
# mix run priv/repo/seeds.exs
#
# Inside the script, you can read and write to any of your
# repositories directly:
#
# Satori.Repo.insert!(%Satori.SomeSchema{})
#
# We recommend using the bang functions (`insert!`, `update!`
# and so on) as they will fail if something goes wrong.
| 29.083333 | 61 | 0.704871 |
fffc4ea6fd08e243faf3f1c290c9f6efa07cc5b5 | 1,067 | exs | Elixir | test/integration/broken_consumer_signals_test.exs | renderedtext/ex-tackle | 403623f9be6919998fea6871f34aa07ff2655c49 | [
"MIT"
] | 49 | 2016-07-30T13:45:12.000Z | 2021-08-08T13:45:05.000Z | test/integration/broken_consumer_signals_test.exs | renderedtext/ex-tackle | 403623f9be6919998fea6871f34aa07ff2655c49 | [
"MIT"
] | 20 | 2016-08-05T11:54:35.000Z | 2021-06-02T19:43:06.000Z | test/integration/broken_consumer_signals_test.exs | renderedtext/ex-tackle | 403623f9be6919998fea6871f34aa07ff2655c49 | [
"MIT"
] | 14 | 2016-08-05T09:39:51.000Z | 2021-11-29T14:22:28.000Z | defmodule Tackle.BrokenConsumerSignalsTest do
use ExSpec
alias Support
alias Support.MessageTrace
defmodule BrokenConsumer do
use Tackle.Consumer,
url: "amqp://localhost",
exchange: "test-exchange",
routing_key: "test-messages",
service: "broken-service-signal",
retry_delay: 1,
retry_limit: 3
def handle_message(message) do
message |> MessageTrace.save("broken-service-signal")
Process.exit(self(), {:foo, message})
end
end
@publish_options %{
url: "amqp://localhost",
exchange: "test-exchange",
routing_key: "test-messages"
}
setup do
Support.purge_queue("broken-service-signal.test-messages")
MessageTrace.clear("broken-service-signal")
{:ok, _} = BrokenConsumer.start_link()
:timer.sleep(1000)
end
describe "healthy consumer" do
it "receives the message multiple times" do
Tackle.publish("Hi!", @publish_options)
:timer.sleep(5000)
assert MessageTrace.content("broken-service-signal") == "Hi!Hi!Hi!Hi!"
end
end
end
| 21.77551 | 76 | 0.667291 |
fffc592817d9b5e60d85d107d82ac573845dbb81 | 1,541 | ex | Elixir | clients/content/lib/google_api/content/v2/model/orders_cancel_test_order_by_customer_request.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/content/lib/google_api/content/v2/model/orders_cancel_test_order_by_customer_request.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/content/lib/google_api/content/v2/model/orders_cancel_test_order_by_customer_request.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Content.V2.Model.OrdersCancelTestOrderByCustomerRequest do
@moduledoc """
## Attributes
* `reason` (*type:* `String.t`, *default:* `nil`) - The reason for the cancellation.
Acceptable values are:
- "changedMind"
- "orderedWrongItem"
- "other"
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:reason => String.t()
}
field(:reason)
end
defimpl Poison.Decoder, for: GoogleApi.Content.V2.Model.OrdersCancelTestOrderByCustomerRequest do
def decode(value, options) do
GoogleApi.Content.V2.Model.OrdersCancelTestOrderByCustomerRequest.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Content.V2.Model.OrdersCancelTestOrderByCustomerRequest do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 29.634615 | 97 | 0.733939 |
fffc5fa0aff53c0610091945e6034f9cb5b5787e | 408 | ex | Elixir | lib/ref/admin/service.ex | tk04/ref | 6547a58344141e09300284943264dfe02f1e84f3 | [
"MIT"
] | null | null | null | lib/ref/admin/service.ex | tk04/ref | 6547a58344141e09300284943264dfe02f1e84f3 | [
"MIT"
] | null | null | null | lib/ref/admin/service.ex | tk04/ref | 6547a58344141e09300284943264dfe02f1e84f3 | [
"MIT"
] | null | null | null | defmodule Ref.Admin.Service do
use Ecto.Schema
import Ecto.Changeset
schema "services" do
field :description, :string
field :name, :string
field :user_id, :id
field :price, :integer
timestamps()
end
@doc false
def changeset(service, attrs) do
service
|> cast(attrs, [:name, :description, :user_id, :price])
|> validate_required([:name, :description])
end
end
| 19.428571 | 59 | 0.661765 |
fffc6e83eca009bfa1c09681577a69417b2fc536 | 995 | ex | Elixir | lib/bitcoin/protocol/types/tx_output.ex | 0xwallet/bitcoinsv-elixir | efccdfca7435ea88b075135caf36ec21e4a022d7 | [
"Apache-2.0"
] | 1 | 2019-06-07T18:42:58.000Z | 2019-06-07T18:42:58.000Z | lib/bitcoin/protocol/types/tx_output.ex | 0xwallet/bitcoinsv-elixir | efccdfca7435ea88b075135caf36ec21e4a022d7 | [
"Apache-2.0"
] | 2 | 2019-04-22T04:24:39.000Z | 2019-04-26T07:03:59.000Z | lib/bitcoin/protocol/types/tx_output.ex | 0xwallet/bitcoinsv-elixir | efccdfca7435ea88b075135caf36ec21e4a022d7 | [
"Apache-2.0"
] | 1 | 2019-08-12T02:51:15.000Z | 2019-08-12T02:51:15.000Z | defmodule Bitcoin.Protocol.Types.TxOutput do
alias Bitcoin.Protocol.Types.VarString
defstruct value: 0, # Transaction Value (in satoshis)
pk_script: <<>> # Usually contains the public key as a Bitcoin script setting up conditions to claim this output.
@type t :: %__MODULE__{
value: non_neg_integer,
pk_script: binary
}
# defimpl Inspect, for: __MODULE__ do
# def inspect(data, _opts) do
# "%Out{ #{data.value} -> #{data.pk_script |> Base.encode16} }"
# end
# end
@spec parse_stream(binary) :: {t, binary}
def parse_stream(payload) do
<< value::unsigned-little-integer-size(64), payload :: binary >> = payload
{pk_script, payload} = VarString.parse_stream(payload)
{%__MODULE__{
value: value,
pk_script: pk_script
}, payload}
end
@spec serialize(t) :: binary
def serialize(%__MODULE__{} = s) do
<< s.value :: unsigned-little-integer-size(64) >>
<> (s.pk_script |> VarString.serialize)
end
end
| 26.891892 | 125 | 0.657286 |
fffc74ce93ee52d4409da69515affe984e126ffc | 347 | exs | Elixir | priv/repo/seeds.exs | rogueunicorn/hello-phoenix | 52c7f42b1f058c61b97bcb923ed3691308072bf0 | [
"MIT"
] | 1 | 2016-01-01T12:55:25.000Z | 2016-01-01T12:55:25.000Z | priv/repo/seeds.exs | cncgl/phoenix-todo | 81b1c807a40e972df93ccc4063395ddf48bf4e2e | [
"MIT"
] | 1 | 2015-09-10T13:41:51.000Z | 2015-09-10T13:41:51.000Z | priv/repo/seeds.exs | cncgl/phoenix-todo | 81b1c807a40e972df93ccc4063395ddf48bf4e2e | [
"MIT"
] | null | null | null | # Script for populating the database. You can run it as:
#
# mix run priv/repo/seeds.exs
#
# Inside the script, you can read and write to any of your
# repositories directly:
#
# HelloPhoenix.Repo.insert!(%SomeModel{})
#
# We recommend using the bang functions (`insert!`, `update!`
# and so on) as they will fail if something goes wrong.
| 28.916667 | 61 | 0.706052 |
fffc81f63e25e870ebac31c2140cf4345265f187 | 2,456 | exs | Elixir | test/utils_test.exs | aymanosman/crawly | 9b5dbcd1328a0d80380bbc8b8c869ce252ac29f5 | [
"Apache-2.0"
] | 1 | 2020-10-02T18:33:23.000Z | 2020-10-02T18:33:23.000Z | test/utils_test.exs | aymanosman/crawly | 9b5dbcd1328a0d80380bbc8b8c869ce252ac29f5 | [
"Apache-2.0"
] | null | null | null | test/utils_test.exs | aymanosman/crawly | 9b5dbcd1328a0d80380bbc8b8c869ce252ac29f5 | [
"Apache-2.0"
] | null | null | null | defmodule UtilsTest do
use ExUnit.Case
setup do
on_exit(fn -> :meck.unload() end)
:ok
end
test "Request from url" do
requests = Crawly.Utils.request_from_url("https://test.com")
assert requests == expected_request("https://test.com")
end
test "Requests from urls" do
requests =
Crawly.Utils.requests_from_urls(
[
"https://test.com",
"https://example.com"
]
)
assert requests == [
expected_request("https://test.com"),
expected_request("https://example.com")
]
end
test "Build absolute url test" do
url = Crawly.Utils.build_absolute_url("/url1", "http://example.com")
assert url == "http://example.com/url1"
end
test "Build absolute urls test" do
paths = ["/path1", "/path2"]
result = Crawly.Utils.build_absolute_urls(paths, "http://example.com")
assert result == ["http://example.com/path1", "http://example.com/path2"]
end
test "pipe with args" do
# make mock pipeline
:meck.new(FakePipeline, [:non_strict])
:meck.expect(
FakePipeline,
:run,
fn item, state, args ->
{item, Map.put(state, :args, args)}
end
)
:meck.expect(
FakePipeline,
:run,
fn item, state ->
{item, state}
end
)
{_item, state} =
Crawly.Utils.pipe([{FakePipeline, my: "arg"}], %{my: "item"}, %{})
assert state.args == [my: "arg"]
end
test "pipe without args" do
# make mock pipeline
:meck.new(FakePipeline, [:non_strict])
:meck.expect(
FakePipeline,
:run,
fn item, state, args ->
{item, %{state | args: args}}
end
)
:meck.expect(
FakePipeline,
:run,
fn item, state ->
{item, state}
end
)
{_item, state} = Crawly.Utils.pipe([FakePipeline], %{my: "item"}, %{})
assert Map.has_key?(state, :args) == false
end
test "can find CrawlySpider behaviors" do
assert Enum.any?(
Crawly.Utils.list_spiders(),
fn x -> x == UtilsTestSpider end)
end
defp expected_request(url) do
%Crawly.Request{
url: url,
headers: [],
options: [],
middlewares: [
Crawly.Middlewares.DomainFilter,
Crawly.Middlewares.UniqueRequest,
Crawly.Middlewares.RobotsTxt,
{Crawly.Middlewares.UserAgent, user_agents: ["My Custom Bot"]}],
retries: 0
}
end
end
| 21.172414 | 77 | 0.572476 |
fffcc14a5537c1bd50ab5810efadb1704bd59166 | 2,899 | ex | Elixir | clients/remote_build_execution/lib/google_api/remote_build_execution/v2/model/google_devtools_remoteworkers_v1test2_command_task_outputs.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/remote_build_execution/lib/google_api/remote_build_execution/v2/model/google_devtools_remoteworkers_v1test2_command_task_outputs.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/remote_build_execution/lib/google_api/remote_build_execution/v2/model/google_devtools_remoteworkers_v1test2_command_task_outputs.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.RemoteBuildExecution.V2.Model.GoogleDevtoolsRemoteworkersV1test2CommandTaskOutputs do
@moduledoc """
Describes the expected outputs of the command.
## Attributes
* `directories` (*type:* `list(String.t)`, *default:* `nil`) - A list of expected directories, relative to the execution root. All paths MUST be delimited by forward slashes.
* `files` (*type:* `list(String.t)`, *default:* `nil`) - A list of expected files, relative to the execution root. All paths MUST be delimited by forward slashes.
* `stderrDestination` (*type:* `String.t`, *default:* `nil`) - The destination to which any stderr should be sent. The method by which the bot should send the stream contents to that destination is not defined in this API. As examples, the destination could be a file referenced in the `files` field in this message, or it could be a URI that must be written via the ByteStream API.
* `stdoutDestination` (*type:* `String.t`, *default:* `nil`) - The destination to which any stdout should be sent. The method by which the bot should send the stream contents to that destination is not defined in this API. As examples, the destination could be a file referenced in the `files` field in this message, or it could be a URI that must be written via the ByteStream API.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:directories => list(String.t()),
:files => list(String.t()),
:stderrDestination => String.t(),
:stdoutDestination => String.t()
}
field(:directories, type: :list)
field(:files, type: :list)
field(:stderrDestination)
field(:stdoutDestination)
end
defimpl Poison.Decoder,
for:
GoogleApi.RemoteBuildExecution.V2.Model.GoogleDevtoolsRemoteworkersV1test2CommandTaskOutputs do
def decode(value, options) do
GoogleApi.RemoteBuildExecution.V2.Model.GoogleDevtoolsRemoteworkersV1test2CommandTaskOutputs.decode(
value,
options
)
end
end
defimpl Poison.Encoder,
for:
GoogleApi.RemoteBuildExecution.V2.Model.GoogleDevtoolsRemoteworkersV1test2CommandTaskOutputs do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 46.015873 | 386 | 0.7406 |
fffcc85e7591d75193f1ca1d6b3fb9e2bbe3142f | 699 | ex | Elixir | lib/seely/default_router.ex | iboard/seely | 2389a7543d9e9897855f73e35abd6fec9abf95f3 | [
"MIT"
] | 1 | 2022-03-26T14:53:56.000Z | 2022-03-26T14:53:56.000Z | lib/seely/default_router.ex | iboard/seely | 2389a7543d9e9897855f73e35abd6fec9abf95f3 | [
"MIT"
] | null | null | null | lib/seely/default_router.ex | iboard/seely | 2389a7543d9e9897855f73e35abd6fec9abf95f3 | [
"MIT"
] | null | null | null | defmodule Seely.DefaultRouter do
@moduledoc ~s"""
The default router just defines the echo route for the `Seely.EchoController`
def routes,
do: [{"echo", Seely.EchoController, :echo}]
and it's options `--upper` and `--trim`
def parse_opts,
do: [
strict: [upper: :boolean, trim: :boolean]
]
"""
@doc """
Returns a list of Tuples of `{ "command", Controller, :function }`
"""
def routes,
do: [
{"echo", Seely.EchoController, :echo}
]
@doc """
Returns the definiton of options for the `OptionsParser` used in `Seely.Parser`
"""
def parse_opts,
do: [
strict: [upper: :boolean, trim: :boolean]
]
end
| 21.181818 | 81 | 0.589413 |
fffcdf249f9fb5eb578879ed0dc8c20bc57c6b0c | 6,628 | ex | Elixir | lib/lti_1p3/data_provider.ex | Simon-Initiative/lti_1p3 | e599a61cdaa00fe20eda19dab88b80324580123f | [
"MIT"
] | 4 | 2021-02-19T13:13:49.000Z | 2021-11-11T09:02:27.000Z | lib/lti_1p3/data_provider.ex | Simon-Initiative/lti_1p3 | e599a61cdaa00fe20eda19dab88b80324580123f | [
"MIT"
] | 3 | 2021-02-15T22:46:06.000Z | 2021-03-30T21:26:55.000Z | lib/lti_1p3/data_provider.ex | Simon-Initiative/lti_1p3 | e599a61cdaa00fe20eda19dab88b80324580123f | [
"MIT"
] | null | null | null | defmodule Lti_1p3.DataProvider do
alias Lti_1p3.Jwk
alias Lti_1p3.Nonce
alias Lti_1p3.DataProviderError
@doc """
Creates a new jwk.
## Examples
iex> create_jwk(%Jwk{})
{:ok, %Jwk{}}
iex> create_jwk(%Jwk{})
{:error, %Lti_1p3.DataProviderError{}}
"""
@callback create_jwk(%Jwk{}) :: {:ok, %Jwk{}} | {:error, DataProviderError.t()}
@doc """
Gets the active jwk. If there are more than one, this should return the latest.
## Examples
iex> get_active_jwk()
{:ok, %Jwk{}}
iex> get_active_jwk()
{:error, %Lti_1p3.DataProviderError{}}
"""
@callback get_active_jwk() :: {:ok, %Jwk{}} | {:error, DataProviderError.t()}
@doc """
Gets a list of all jwks.
## Examples
iex> get_all_jwks()
[%Jwk{}]
"""
@callback get_all_jwks() :: [%Jwk{}]
@doc """
Creates a new nonce.
## Examples
iex> create_nonce(%Nonce{})
{:ok, %Nonce{}}
iex> create_nonce(%Nonce{})
{:error, %Lti_1p3.DataProviderError{}}
"""
@callback create_nonce(%Nonce{}) :: {:ok, %Nonce{}} | {:error, DataProviderError.t()}
@doc """
Gets a nonce with the given value and optional domain.
## Examples
iex> get_nonce(value, domain)
%Nonce{}
iex> get_nonce(value, domain)
nil
"""
@callback get_nonce(String.t(), String.t() | nil) :: %Nonce{} | nil
@doc """
Deletes all expired nonces older than the provided ttl_sec. If no ttl_sec is provided,
the default value should be 86_400 seconds (1 day).
## Examples
iex> delete_expired_nonces(ttl_sec)
"""
@callback delete_expired_nonces(integer() | nil) :: any()
end
defmodule Lti_1p3.ToolDataProvider do
alias Lti_1p3.Jwk
alias Lti_1p3.Tool.Registration
alias Lti_1p3.Tool.Deployment
alias Lti_1p3.Tool.LtiParams
@doc """
Creates a new registration.
## Examples
iex> create_registration(%Registration{})
{:ok, %Registration{}}
iex> create_registration(%Registration{})
{:error, %Lti_1p3.DataProviderError{}}
"""
@callback create_registration(%Registration{}) :: {:ok, %Registration{}} | {:error, DataProviderError.t()}
@doc """
Creates a new deployment.
## Examples
iex> create_deployment(%Deployment{})
{:ok, %Deployment{}}
iex> create_deployment(%Deployment{})
{:error, %Lti_1p3.DataProviderError{}}
"""
@callback create_deployment(%Deployment{}) :: {:ok, %Deployment{}} | {:error, DataProviderError.t()}
@doc """
Gets the registration and deployment associated with the given issuer, client_id and deployment_id.
## Examples
iex> get_registration_deployment(issuer, client_id, deployment_id)
{%Registration{}, %Deployment{}}
iex> get_rd_by_deployment_id(issuer, client_id, deployment_id)
{nil, nil}
"""
@callback get_registration_deployment(String.t(), String.t(), String.t()) :: {%Registration{}, %Deployment{}} | nil
@doc """
Gets the jwk associated with the given Registration.
## Examples
iex> get_jwk_by_registration(%Registration{})
{:ok, %Jwk{}}
iex> get_jwk_by_registration(%Registration{})
{:error, %Lti_1p3.DataProviderError{}}
"""
@callback get_jwk_by_registration(%Registration{}) :: {:ok, %Jwk{}} | {:error, DataProviderError.t()}
@doc """
Gets the registration associated with the given issuer and client_id.
## Examples
iex> get_registration_by_issuer_client_id(issuer, client_id)
%Registration{}
iex> get_registration_by_issuer_client_id(issuer, client_id)
nil
"""
@callback get_registration_by_issuer_client_id(String.t(), String.t()) :: %Registration{} | nil
@doc """
Gets the deployment associated with the given registration and deployment_id.
## Examples
iex> get_deployment(%Registration{}, deployment_id)
%Deployment{}
iex> get_deployment(%Registration{}, deployment_id)
nil
"""
@callback get_deployment(%Registration{}, String.t()) :: %Deployment{} | nil
@doc """
Gets the LTI params associated with a user from the cache using the given key.
## Examples
iex> get_lti_params_by_key(key)
%LtiParams{}
iex> get_lti_params_by_key(key)
nil
"""
@callback get_lti_params_by_key(String.t()) :: %LtiParams{} | nil
@doc """
Creates or updates the LTI params for a user, keying off the 'key' value.
## Examples
iex> create_or_update_lti_params(%LtiParams{})
{:ok, %LtiParams{}}
iex> create_or_update_lti_params(%LtiParams{})
{:error, %Lti_1p3.DataProviderError{}}
"""
@callback create_or_update_lti_params(%LtiParams{}) :: {:ok, %LtiParams{}} | {:error, DataProviderError.t()}
end
defmodule Lti_1p3.PlatformDataProvider do
alias Lti_1p3.Platform.PlatformInstance
alias Lti_1p3.Platform.LoginHint
@doc """
Creates a new platform instance.
## Examples
iex> create_platform_instance(%PlatformInstance{})
{:ok, %PlatformInstance{}}
iex> create_platform_instance(%PlatformInstance{})
{:error, %Lti_1p3.DataProviderError{}}
"""
@callback create_platform_instance(%PlatformInstance{}) :: {:ok, %PlatformInstance{}} | {:error, DataProviderError.t()}
@doc """
Gets a platform instance associated with the given client_id.
## Examples
iex> get_platform_instance_by_client_id(client_id)
%PlatformInstance{}
iex> get_platform_instance_by_client_id(client_id)
nil
"""
@callback get_platform_instance_by_client_id(String.t()) :: %PlatformInstance{} | nil
@doc """
Gets a login hint associated with the given value.
## Examples
iex> get_login_hint_by_value(value)
%LoginHint{}
iex> get_login_hint_by_value(value)
nil
"""
@callback get_login_hint_by_value(String.t()) :: %LoginHint{} | nil
@doc """
Creates a new login hint.
## Examples
iex> create_login_hint(%LoginHint{})
{:ok, %LoginHint{}}
iex> create_login_hint(%LoginHint{})
{:error, %Lti_1p3.DataProviderError{}}
"""
@callback create_login_hint(%LoginHint{}) :: {:ok, %LoginHint{}} | {:error, DataProviderError.t()}
@doc """
Deletes all expired login hints older than the provided ttl_sec. If no ttl_sec is provided,
the default value should be 86_400 seconds (1 day).
## Examples
iex> delete_expired_login_hints(ttl_sec)
"""
@callback delete_expired_login_hints(integer() | nil) :: any()
end
defmodule Lti_1p3.DataProviderError do
defstruct [:msg, :reason]
@type error_reason() :: :unique_constraint_violation
| :not_found
| :unknown
@type t() :: %__MODULE__{
msg: String.t(),
reason: error_reason(),
}
end
| 30.827907 | 121 | 0.664001 |
fffce2f9fa89756ada2b10daa2becc1b410d85a9 | 3,393 | exs | Elixir | mix.exs | tankwanghow/monarco_hat_rpi3 | 0c3e08c96e8f9add135eda734a4da49d9d53ab8a | [
"Apache-2.0"
] | null | null | null | mix.exs | tankwanghow/monarco_hat_rpi3 | 0c3e08c96e8f9add135eda734a4da49d9d53ab8a | [
"Apache-2.0"
] | null | null | null | mix.exs | tankwanghow/monarco_hat_rpi3 | 0c3e08c96e8f9add135eda734a4da49d9d53ab8a | [
"Apache-2.0"
] | null | null | null | defmodule NervesSystemRpi3.MixProject do
use Mix.Project
@github_organization "nerves-project"
@app :nerves_system_rpi3
@source_url "https://github.com/#{@github_organization}/#{@app}"
@version Path.join(__DIR__, "VERSION")
|> File.read!()
|> String.trim()
def project do
[
app: @app,
version: @version,
elixir: "~> 1.6",
compilers: Mix.compilers() ++ [:nerves_package],
nerves_package: nerves_package(),
description: description(),
package: package(),
deps: deps(),
aliases: [loadconfig: [&bootstrap/1], docs: ["docs", ©_images/1]],
docs: docs(),
preferred_cli_env: %{
docs: :docs,
"hex.build": :docs,
"hex.publish": :docs
}
]
end
def application do
[]
end
defp bootstrap(args) do
set_target()
Application.start(:nerves_bootstrap)
Mix.Task.run("loadconfig", args)
end
defp nerves_package do
[
type: :system,
artifact_sites: [
{:github_releases, "#{@github_organization}/#{@app}"}
],
build_runner_opts: build_runner_opts(),
platform: Nerves.System.BR,
platform_config: [
defconfig: "nerves_defconfig"
],
# The :env key is an optional experimental feature for adding environment
# variables to the crosscompile environment. These are intended for
# llvm-based tooling that may need more precise processor information.
env: [
{"TARGET_ARCH", "arm"},
{"TARGET_CPU", "cortex_a53"},
{"TARGET_OS", "linux"},
{"TARGET_ABI", "gnueabihf"}
],
checksum: package_files()
]
end
defp deps do
[
{:nerves, "~> 1.5.4 or ~> 1.6.0 or ~> 1.7.4", runtime: false},
{:nerves_system_br, "1.15.2", runtime: false},
{:nerves_toolchain_armv7_nerves_linux_gnueabihf, "~> 1.4.2", runtime: false},
{:nerves_system_linter, "~> 0.4", only: [:dev, :test], runtime: false},
{:ex_doc, "~> 0.22", only: :docs, runtime: false}
]
end
defp description do
"""
Nerves System - Raspberry Pi 3 B / B+
"""
end
defp docs do
[
extras: ["README.md", "CHANGELOG.md"],
main: "readme",
source_ref: "v#{@version}",
source_url: @source_url,
skip_undefined_reference_warnings_on: ["CHANGELOG.md"]
]
end
defp package do
[
files: package_files(),
licenses: ["Apache 2.0"],
links: %{"GitHub" => @source_url}
]
end
defp package_files do
[
"fwup_include",
"rootfs_overlay",
"CHANGELOG.md",
"cmdline.txt",
"config.txt",
"fwup-revert.conf",
"fwup.conf",
"LICENSE",
"linux-5.4.defconfig",
"mix.exs",
"nerves_defconfig",
"post-build.sh",
"post-createfs.sh",
"ramoops.dts",
"README.md",
"VERSION"
]
end
# Copy the images referenced by docs, since ex_doc doesn't do this.
defp copy_images(_) do
File.cp_r("assets", "doc/assets")
end
defp build_runner_opts() do
case System.get_env("BR2_PRIMARY_SITE") do
nil -> []
primary_site -> [make_args: ["BR2_PRIMARY_SITE=#{primary_site}"]]
end
end
defp set_target() do
if function_exported?(Mix, :target, 1) do
apply(Mix, :target, [:target])
else
System.put_env("MIX_TARGET", "target")
end
end
end
| 24.235714 | 83 | 0.583554 |
fffd14113609716fea8f5d40e8dac2bf3a5f4451 | 11,849 | ex | Elixir | lib/fun_with_flags.ex | planswell/fun_with_flags | 97656285e59c4852d8be45a1ee8ceb15d53e9ffe | [
"MIT"
] | null | null | null | lib/fun_with_flags.ex | planswell/fun_with_flags | 97656285e59c4852d8be45a1ee8ceb15d53e9ffe | [
"MIT"
] | null | null | null | lib/fun_with_flags.ex | planswell/fun_with_flags | 97656285e59c4852d8be45a1ee8ceb15d53e9ffe | [
"MIT"
] | null | null | null | defmodule FunWithFlags do
@moduledoc """
FunWithFlags, the Elixir feature flag library.
This module provides the public interface to the library and its API is
made of three simple methods to enable, disable and query feature flags.
In their simplest form, flags can be toggled on and off globally.
More advanced rules or "gates" are available, and they can be set and queried
for any term that implements these protocols:
* The `FunWithFlags.Actor` protocol can be
implemented for types and structs that should have specific rules. For
example, in web applications it's common to use a `%User{}` struct or
equivalent as an actor, or perhaps the current country of the request.
* The `FunWithFlags.Group` protocol can be
implemented for types and structs that should belong to groups for which
one wants to enable and disable some flags. For example, one could implement
the protocol for a `%User{}` struct to identify administrators.
See the [Usage](/fun_with_flags/readme.html#usage) notes for a more detailed
explanation.
"""
alias FunWithFlags.{Flag, Gate}
@store FunWithFlags.Config.store_module
@type options :: Keyword.t
@doc """
Checks if a flag is enabled.
It can be invoked with just the flag name, as an atom,
to check the general staus of a flag (i.e. the boolean gate).
## Options
* `:for` - used to provide a term for which the flag could
have a specific value. The passed term should implement the
`Actor` or `Group` protocol, or both.
## Examples
This example relies on the [reference implementation](https://github.com/tompave/fun_with_flags/blob/master/test/support/test_user.ex)
used in the tests.
iex> alias FunWithFlags.TestUser, as: User
iex> harry = %User{id: 1, name: "Harry Potter", groups: [:wizards, :gryffindor]}
iex> FunWithFlags.disable(:elder_wand)
iex> FunWithFlags.enable(:elder_wand, for_actor: harry)
iex> FunWithFlags.enabled?(:elder_wand)
false
iex> FunWithFlags.enabled?(:elder_wand, for: harry)
true
iex> voldemort = %User{id: 7, name: "Tom Riddle", groups: [:wizards, :slytherin]}
iex> FunWithFlags.enabled?(:elder_wand, for: voldemort)
false
iex> filch = %User{id: 88, name: "Argus Filch", groups: [:staff]}
iex> FunWithFlags.enable(:magic_wands, for_group: :wizards)
iex> FunWithFlags.enabled?(:magic_wands, for: harry)
true
iex> FunWithFlags.enabled?(:magic_wands, for: voldemort)
true
iex> FunWithFlags.enabled?(:magic_wands, for: filch)
false
"""
@spec enabled?(atom, options) :: boolean
def enabled?(flag_name, options \\ [])
def enabled?(flag_name, []) when is_atom(flag_name) do
case @store.lookup(flag_name) do
{:ok, flag} -> Flag.enabled?(flag)
_ -> false
end
end
def enabled?(flag_name, [for: nil]) do
enabled?(flag_name)
end
def enabled?(flag_name, [for: item]) when is_atom(flag_name) do
case @store.lookup(flag_name) do
{:ok, flag} -> Flag.enabled?(flag, for: item)
_ -> false
end
end
@doc """
Enables a feature flag.
## Options
* `:for_actor` - used to enable the flag for a specific term only.
The value can be any term that implements the `Actor` protocol.
* `:for_group` - used to enable the flag for a specific group only.
The value should be an atom.
## Examples
### Enable globally
iex> FunWithFlags.enabled?(:super_shrink_ray)
false
iex> FunWithFlags.enable(:super_shrink_ray)
{:ok, true}
iex> FunWithFlags.enabled?(:super_shrink_ray)
true
### Enable for an actor
iex> FunWithFlags.disable(:warp_drive)
{:ok, false}
iex> FunWithFlags.enable(:warp_drive, for_actor: "Scotty")
{:ok, true}
iex> FunWithFlags.enabled?(:warp_drive)
false
iex> FunWithFlags.enabled?(:warp_drive, for: "Scotty")
true
### Enable for a group
This example relies on the [reference implementation](https://github.com/tompave/fun_with_flags/blob/master/test/support/test_user.ex)
used in the tests.
iex> alias FunWithFlags.TestUser, as: User
iex> marty = %User{name: "Marty McFly", groups: [:students, :time_travelers]}
iex> doc = %User{name: "Emmet Brown", groups: [:scientists, :time_travelers]}
iex> buford = %User{name: "Buford Tannen", groups: [:gunmen, :bandits]}
iex> FunWithFlags.enable(:delorean, for_group: :time_travelers)
{:ok, true}
iex> FunWithFlags.enabled?(:delorean)
false
iex> FunWithFlags.enabled?(:delorean, for: buford)
false
iex> FunWithFlags.enabled?(:delorean, for: marty)
true
iex> FunWithFlags.enabled?(:delorean, for: doc)
true
"""
@spec enable(atom, options) :: {:ok, true}
def enable(flag_name, options \\ [])
def enable(flag_name, []) when is_atom(flag_name) do
{:ok, flag} = @store.put(flag_name, Gate.new(:boolean, true))
verify(flag)
end
def enable(flag_name, [for_actor: nil]) do
enable(flag_name)
end
def enable(flag_name, [for_actor: actor]) when is_atom(flag_name) do
gate = Gate.new(:actor, actor, true)
{:ok, flag} = @store.put(flag_name, gate)
verify(flag, for: actor)
end
def enable(flag_name, [for_group: nil]) do
enable(flag_name)
end
def enable(flag_name, [for_group: group_name]) when is_atom(flag_name) do
gate = Gate.new(:group, group_name, true)
{:ok, _flag} = @store.put(flag_name, gate)
{:ok, true}
end
@doc """
Disables a feature flag.
## Options
* `:for_actor` - used to disable the flag for a specific term only.
The value can be any term that implements the `Actor` protocol.
* `:for_group` - used to disable the flag for a specific group only.
The value should be an atom.
## Examples
### Disable globally
iex> FunWithFlags.enable(:random_koala_gifs)
iex> FunWithFlags.enabled?(:random_koala_gifs)
true
iex> FunWithFlags.disable(:random_koala_gifs)
{:ok, false}
iex> FunWithFlags.enabled?(:random_koala_gifs)
false
## Disable for an actor
iex> FunWithFlags.enable(:spider_sense)
{:ok, true}
iex> villain = %{name: "Venom"}
iex> FunWithFlags.disable(:spider_sense, for_actor: villain)
{:ok, false}
iex> FunWithFlags.enabled?(:spider_sense)
true
iex> FunWithFlags.enabled?(:spider_sense, for: villain)
false
### Disable for a group
This example relies on the [reference implementation](https://github.com/tompave/fun_with_flags/blob/master/test/support/test_user.ex)
used in the tests.
iex> alias FunWithFlags.TestUser, as: User
iex> harry = %User{name: "Harry Potter", groups: [:wizards, :gryffindor]}
iex> dudley = %User{name: "Dudley Dursley", groups: [:muggles]}
iex> FunWithFlags.enable(:hogwarts)
{:ok, true}
iex> FunWithFlags.disable(:hogwarts, for_group: :muggles)
{:ok, false}
iex> FunWithFlags.enabled?(:hogwarts)
true
iex> FunWithFlags.enabled?(:hogwarts, for: harry)
true
iex> FunWithFlags.enabled?(:hogwarts, for: dudley)
false
"""
@spec disable(atom, options) :: {:ok, false}
def disable(flag_name, options \\ [])
def disable(flag_name, []) when is_atom(flag_name) do
{:ok, flag} = @store.put(flag_name, Gate.new(:boolean, false))
verify(flag)
end
def disable(flag_name, [for_actor: nil]) do
disable(flag_name)
end
def disable(flag_name, [for_actor: actor]) when is_atom(flag_name) do
gate = Gate.new(:actor, actor, false)
{:ok, flag} = @store.put(flag_name, gate)
verify(flag, for: actor)
end
def disable(flag_name, [for_group: nil]) do
disable(flag_name)
end
def disable(flag_name, [for_group: group_name]) when is_atom(flag_name) do
gate = Gate.new(:group, group_name, false)
{:ok, _flag} = @store.put(flag_name, gate)
{:ok, false}
end
@doc """
Clears the data of a feature flag.
Clears the data for an entire feature flag or for a specific
Actor or Group gate. Clearing a boolean gate is not supported
because a missing boolean gate is equivalent to a disabled boolean
gate.
Sometimes enabling or disabling a gate is not what you want, and you
need to remove that gate's rules instead. For example, if you don't need
anymore to explicitly enable or disable a flag for an actor, and the
default state should be used instead, you'll want to cleare the gate.
It's also possible to clear the entire flag, by not passing any option.
## Options
* `:for_actor` - used to clear the flag for a specific term only.
The value can be any term that implements the `Actor` protocol.
* `:for_group` - used to clear the flag for a specific group only.
The value should be an atom.
## Examples
iex> alias FunWithFlags.TestUser, as: User
iex> harry = %User{id: 1, name: "Harry Potter", groups: [:wizards, :gryffindor]}
iex> hagrid = %User{id: 2, name: "Rubeus Hagrid", groups: [:wizards, :gamekeeper]}
iex> dudley = %User{id: 3, name: "Dudley Dursley", groups: [:muggles]}
iex> FunWithFlags.disable(:wands)
iex> FunWithFlags.enable(:wands, for_group: :wizards)
iex> FunWithFlags.disable(:wands, for_actor: hagrid)
iex>
iex> FunWithFlags.enabled?(:wands)
false
iex> FunWithFlags.enabled?(:wands, for: harry)
true
iex> FunWithFlags.enabled?(:wands, for: hagrid)
false
iex> FunWithFlags.enabled?(:wands, for: dudley)
false
iex>
iex> FunWithFlags.clear(:wands, for_actor: hagrid)
:ok
iex> FunWithFlags.enabled?(:wands, for: hagrid)
true
iex>
iex> FunWithFlags.clear(:wands)
:ok
iex> FunWithFlags.enabled?(:wands)
false
iex> FunWithFlags.enabled?(:wands, for: harry)
false
iex> FunWithFlags.enabled?(:wands, for: hagrid)
false
iex> FunWithFlags.enabled?(:wands, for: dudley)
false
"""
@spec clear(atom, options) :: :ok
def clear(flag_name, options \\ [])
def clear(flag_name, []) when is_atom(flag_name) do
{:ok, _flag} = @store.delete(flag_name)
:ok
end
def clear(flag_name, [for_actor: nil]) do
clear(flag_name)
end
def clear(flag_name, [for_actor: actor]) when is_atom(flag_name) do
gate = Gate.new(:actor, actor, false) # we only care about the gate id
_clear_gate(flag_name, gate)
end
def clear(flag_name, [for_group: nil]) do
clear(flag_name)
end
def clear(flag_name, [for_group: group_name]) when is_atom(flag_name) do
gate = Gate.new(:group, group_name, false) # we only care about the gate id
_clear_gate(flag_name, gate)
end
defp _clear_gate(flag_name, gate) do
{:ok, _flag} = @store.delete(flag_name, gate)
:ok
end
@doc """
Returns a list of all flag names currently configured, as atoms.
This can be useful for debugging or for display purposes,
but it's not meant to be used at runtime. Undefined flags,
for example, will be considered disabled.
"""
@spec all_flag_names() :: {:ok, [atom]} | {:ok, []}
defdelegate all_flag_names(), to: @store
@doc """
Returns a list of all the flags currently configured, as data structures.
This function is provided for debugging and to build more complex
functionality (e.g. it's used in the web GUI), but it is not meant to be
used at runtime to check if a flag is enabled.
To query the value of a flag, please use the `enabled?2` function instead.
"""
@spec all_flags() :: {:ok, [FunWithFlags.Flag.t]} | {:ok, []}
defdelegate all_flags(), to: @store
defp verify(flag) do
{:ok, Flag.enabled?(flag)}
end
defp verify(flag, [for: data]) do
{:ok, Flag.enabled?(flag, for: data)}
end
end
| 30.53866 | 136 | 0.668326 |
fffd2c086985684308f071bdffc754c032514f06 | 428 | exs | Elixir | config/review.exs | batmany13/github-ci | e67df76aaeee5e829b923ad09140dc6628ef979b | [
"Apache-2.0"
] | null | null | null | config/review.exs | batmany13/github-ci | e67df76aaeee5e829b923ad09140dc6628ef979b | [
"Apache-2.0"
] | null | null | null | config/review.exs | batmany13/github-ci | e67df76aaeee5e829b923ad09140dc6628ef979b | [
"Apache-2.0"
] | null | null | null | use Mix.Config
config :github_ci, GithubCi.Endpoint,
secret_key_base: System.get_env("SECRET_KEY_BASE"),
force_ssl: [rewrite_on: [:x_forwarded_proto]],
url: [host: "https://#{System.get_env("HEROKU_APP_NAME")}.herokuapp.com", port: 80],
code_reloader: true,
http: [port: {:system, "PORT"}],
cache_static_manifest: "priv/static/manifest.json"
# Do not print debug messages in production
config :logger, level: :info | 35.666667 | 86 | 0.733645 |
fffd3c5804be91d9931f5e80d4461237f0a04e92 | 2,169 | ex | Elixir | lib/newway/schema/invoice.ex | rbkmoney/pathfinder | fdb3102f524b6f58cba579389047b425fdbd4c92 | [
"Apache-2.0"
] | null | null | null | lib/newway/schema/invoice.ex | rbkmoney/pathfinder | fdb3102f524b6f58cba579389047b425fdbd4c92 | [
"Apache-2.0"
] | null | null | null | lib/newway/schema/invoice.ex | rbkmoney/pathfinder | fdb3102f524b6f58cba579389047b425fdbd4c92 | [
"Apache-2.0"
] | 1 | 2021-12-07T09:25:32.000Z | 2021-12-07T09:25:32.000Z | defmodule NewWay.Schema.Invoice do
use Ecto.Schema
use NewWay.Schema, search_field: :invoice_id
require NewWay.Macro.EnumType, as: EnumType
@type t :: Ecto.Schema.t
EnumType.def_enum(InvoiceStatus, [
:unpaid,
:paid,
:cancelled,
:fulfilled
])
@schema_prefix "nw"
schema "invoice" do
field(:event_created_at, :utc_datetime)
field(:invoice_id, :string)
field(:party_id, :string)
field(:shop_id, :string)
field(:party_revision, :integer)
field(:created_at, :utc_datetime)
field(:status, InvoiceStatus)
field(:status_cancelled_details, :string)
field(:status_fulfilled_details, :string)
field(:details_product, :string)
field(:details_description, :string)
field(:due, :utc_datetime)
field(:amount, :integer)
field(:currency_code, :string)
field(:context, :binary)
field(:template_id, :string)
field(:wtime, :utc_datetime)
field(:current, :boolean)
field(:sequence_id, :integer)
field(:change_id, :integer)
field(:external_id, :string)
belongs_to :party, NewWay.Schema.Party,
define_field: false
belongs_to :shop, NewWay.Schema.Shop,
define_field: false
has_many :adjustments, NewWay.Schema.Adjustment,
foreign_key: :invoice_id, references: :invoice_id
has_many :payments, NewWay.Schema.Payment,
foreign_key: :invoice_id, references: :invoice_id
has_many :refunds, NewWay.Schema.Refund,
foreign_key: :invoice_id, references: :invoice_id
end
end
defimpl NewWay.Protocol.SearchResult, for: NewWay.Schema.Invoice do
alias NewWay.SearchResult
@spec encode(NewWay.Schema.Invoice.t) :: SearchResult.t
def encode(invoice) do
%SearchResult{
id: invoice.id,
entity_id: invoice.invoice_id,
ns: :invoices,
wtime: invoice.wtime,
event_time: invoice.event_created_at,
data: invoice
}
end
end
| 31.897059 | 67 | 0.609959 |
fffd4c395f9f1b4389bcf5a7ef3b5dca9cdc4d0a | 13,837 | ex | Elixir | clients/compute/lib/google_api/compute/v1/api/disk_types.ex | kolorahl/elixir-google-api | 46bec1e092eb84c6a79d06c72016cb1a13777fa6 | [
"Apache-2.0"
] | null | null | null | clients/compute/lib/google_api/compute/v1/api/disk_types.ex | kolorahl/elixir-google-api | 46bec1e092eb84c6a79d06c72016cb1a13777fa6 | [
"Apache-2.0"
] | null | null | null | clients/compute/lib/google_api/compute/v1/api/disk_types.ex | kolorahl/elixir-google-api | 46bec1e092eb84c6a79d06c72016cb1a13777fa6 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Compute.V1.Api.DiskTypes do
@moduledoc """
API calls for all endpoints tagged `DiskTypes`.
"""
alias GoogleApi.Compute.V1.Connection
alias GoogleApi.Gax.{Request, Response}
@library_version Mix.Project.config() |> Keyword.get(:version, "")
@doc """
Retrieves an aggregated list of disk types.
## Parameters
* `connection` (*type:* `GoogleApi.Compute.V1.Connection.t`) - Connection to server
* `project` (*type:* `String.t`) - Project ID for this request.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:alt` (*type:* `String.t`) - Data format for the response.
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
* `:userIp` (*type:* `String.t`) - Deprecated. Please use quotaUser instead.
* `:filter` (*type:* `String.t`) - A filter expression that filters resources listed in the response. The expression must specify the field name, a comparison operator, and the value that you want to use for filtering. The value must be a string, a number, or a boolean. The comparison operator must be either `=`, `!=`, `>`, or `<`.
For example, if you are filtering Compute Engine instances, you can exclude instances named `example-instance` by specifying `name != example-instance`.
You can also filter nested fields. For example, you could specify `scheduling.automaticRestart = false` to include instances only if they are not scheduled for automatic restarts. You can use filtering on nested fields to filter based on resource labels.
To filter on multiple expressions, provide each separate expression within parentheses. For example: ``` (scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake") ``` By default, each expression is an `AND` expression. However, you can include `AND` and `OR` expressions explicitly. For example: ``` (cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true) ```
* `:includeAllScopes` (*type:* `boolean()`) - Indicates whether every visible scope for each scope type (zone, region, global) should be included in the response. For new resource types added after this field, the flag has no effect as new resource types will always include every visible scope for each scope type in response. For resource types which predate this field, if this flag is omitted or false, only scopes of the scope types where the resource type is expected to be found will be included.
* `:maxResults` (*type:* `integer()`) - The maximum number of results per page that should be returned. If the number of available results is larger than `maxResults`, Compute Engine returns a `nextPageToken` that can be used to get the next page of results in subsequent list requests. Acceptable values are `0` to `500`, inclusive. (Default: `500`)
* `:orderBy` (*type:* `String.t`) - Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource name.
You can also sort results in descending order based on the creation timestamp using `orderBy="creationTimestamp desc"`. This sorts results based on the `creationTimestamp` field in reverse chronological order (newest result first). Use this to sort resources like operations so that the newest operation is returned first.
Currently, only sorting by `name` or `creationTimestamp desc` is supported.
* `:pageToken` (*type:* `String.t`) - Specifies a page token to use. Set `pageToken` to the `nextPageToken` returned by a previous list request to get the next page of results.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Compute.V1.Model.DiskTypeAggregatedList{}}` on success
* `{:error, info}` on failure
"""
@spec compute_disk_types_aggregated_list(Tesla.Env.client(), String.t(), keyword(), keyword()) ::
{:ok, GoogleApi.Compute.V1.Model.DiskTypeAggregatedList.t()}
| {:ok, Tesla.Env.t()}
| {:error, any()}
def compute_disk_types_aggregated_list(connection, project, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query,
:filter => :query,
:includeAllScopes => :query,
:maxResults => :query,
:orderBy => :query,
:pageToken => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/{project}/aggregated/diskTypes", %{
"project" => URI.encode(project, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.Compute.V1.Model.DiskTypeAggregatedList{}])
end
@doc """
Returns the specified disk type. Gets a list of available disk types by making a list() request.
## Parameters
* `connection` (*type:* `GoogleApi.Compute.V1.Connection.t`) - Connection to server
* `project` (*type:* `String.t`) - Project ID for this request.
* `zone` (*type:* `String.t`) - The name of the zone for this request.
* `disk_type` (*type:* `String.t`) - Name of the disk type to return.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:alt` (*type:* `String.t`) - Data format for the response.
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
* `:userIp` (*type:* `String.t`) - Deprecated. Please use quotaUser instead.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Compute.V1.Model.DiskType{}}` on success
* `{:error, info}` on failure
"""
@spec compute_disk_types_get(
Tesla.Env.client(),
String.t(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.Compute.V1.Model.DiskType.t()} | {:ok, Tesla.Env.t()} | {:error, any()}
def compute_disk_types_get(
connection,
project,
zone,
disk_type,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/{project}/zones/{zone}/diskTypes/{diskType}", %{
"project" => URI.encode(project, &URI.char_unreserved?/1),
"zone" => URI.encode(zone, &URI.char_unreserved?/1),
"diskType" => URI.encode(disk_type, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.Compute.V1.Model.DiskType{}])
end
@doc """
Retrieves a list of disk types available to the specified project.
## Parameters
* `connection` (*type:* `GoogleApi.Compute.V1.Connection.t`) - Connection to server
* `project` (*type:* `String.t`) - Project ID for this request.
* `zone` (*type:* `String.t`) - The name of the zone for this request.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:alt` (*type:* `String.t`) - Data format for the response.
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
* `:userIp` (*type:* `String.t`) - Deprecated. Please use quotaUser instead.
* `:filter` (*type:* `String.t`) - A filter expression that filters resources listed in the response. The expression must specify the field name, a comparison operator, and the value that you want to use for filtering. The value must be a string, a number, or a boolean. The comparison operator must be either `=`, `!=`, `>`, or `<`.
For example, if you are filtering Compute Engine instances, you can exclude instances named `example-instance` by specifying `name != example-instance`.
You can also filter nested fields. For example, you could specify `scheduling.automaticRestart = false` to include instances only if they are not scheduled for automatic restarts. You can use filtering on nested fields to filter based on resource labels.
To filter on multiple expressions, provide each separate expression within parentheses. For example: ``` (scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake") ``` By default, each expression is an `AND` expression. However, you can include `AND` and `OR` expressions explicitly. For example: ``` (cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true) ```
* `:maxResults` (*type:* `integer()`) - The maximum number of results per page that should be returned. If the number of available results is larger than `maxResults`, Compute Engine returns a `nextPageToken` that can be used to get the next page of results in subsequent list requests. Acceptable values are `0` to `500`, inclusive. (Default: `500`)
* `:orderBy` (*type:* `String.t`) - Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource name.
You can also sort results in descending order based on the creation timestamp using `orderBy="creationTimestamp desc"`. This sorts results based on the `creationTimestamp` field in reverse chronological order (newest result first). Use this to sort resources like operations so that the newest operation is returned first.
Currently, only sorting by `name` or `creationTimestamp desc` is supported.
* `:pageToken` (*type:* `String.t`) - Specifies a page token to use. Set `pageToken` to the `nextPageToken` returned by a previous list request to get the next page of results.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Compute.V1.Model.DiskTypeList{}}` on success
* `{:error, info}` on failure
"""
@spec compute_disk_types_list(Tesla.Env.client(), String.t(), String.t(), keyword(), keyword()) ::
{:ok, GoogleApi.Compute.V1.Model.DiskTypeList.t()}
| {:ok, Tesla.Env.t()}
| {:error, any()}
def compute_disk_types_list(connection, project, zone, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query,
:filter => :query,
:maxResults => :query,
:orderBy => :query,
:pageToken => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/{project}/zones/{zone}/diskTypes", %{
"project" => URI.encode(project, &URI.char_unreserved?/1),
"zone" => URI.encode(zone, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.Compute.V1.Model.DiskTypeList{}])
end
end
| 58.383966 | 511 | 0.670087 |
fffd591503b3bc0c04549868a45e62a2412bb497 | 1,515 | exs | Elixir | parkapp_server/test/parkapp_web/utils_test.exs | bitmaker-software/parkapp | 39d9fd3cb8ab6bd1d54b776a5467eccf3b65f310 | [
"MIT"
] | 2 | 2018-11-06T12:21:16.000Z | 2018-11-21T10:20:17.000Z | parkapp_server/test/parkapp_web/utils_test.exs | bitmaker-software/parkapp | 39d9fd3cb8ab6bd1d54b776a5467eccf3b65f310 | [
"MIT"
] | null | null | null | parkapp_server/test/parkapp_web/utils_test.exs | bitmaker-software/parkapp | 39d9fd3cb8ab6bd1d54b776a5467eccf3b65f310 | [
"MIT"
] | null | null | null | defmodule ParkappWeb.UtilsTest do
use Parkapp.DataCase
alias ParkappWeb.Utils
describe "Utils" do
@length 512
test "random_string/1 should generate a string of the given length" do
assert Utils.random_string(@length) |> String.length() == @length
end
test "random_string/1 property testing" do
length_range = for n <- 1..50, do: n * n
length_range
|> Enum.each(fn size ->
assert Utils.random_string(size) |> String.length() == size
end)
end
test "format_float_decimal_places/2 should return a string with the given decimal places" do
assert Utils.format_float_decimal_places(12.123, 1) == 12.1
assert Utils.format_float_decimal_places(12.123, 2) == 12.12
assert Utils.format_float_decimal_places(12.123, 4) == 12.1230
end
test "format_float_decimal_places/2 should return the value as a string if it is not a float" do
assert Utils.format_float_decimal_places(12, 4) == "12"
assert Utils.format_float_decimal_places("12", 4) == "12"
assert Utils.format_float_decimal_places(nil, 4) == ""
end
test "parse_string_to_float/1 should return a float based on the given string" do
assert Utils.parse_string_to_float("12.123") == 12.123
assert Utils.parse_string_to_float("16.123") == 16.123
assert Utils.parse_string_to_float("") |> is_nil()
assert Utils.parse_string_to_float(nil) |> is_nil()
assert Utils.parse_string_to_float(123) |> is_nil()
end
end
end
| 35.232558 | 100 | 0.692409 |
fffd9ab09e6844f16af308172f70822aec391286 | 954 | ex | Elixir | app/test/support/channel_case.ex | irisTa56/hobby_dapp | 5738995f4f68202aeb50df7fd23fd5c6b84ee244 | [
"MIT"
] | null | null | null | app/test/support/channel_case.ex | irisTa56/hobby_dapp | 5738995f4f68202aeb50df7fd23fd5c6b84ee244 | [
"MIT"
] | null | null | null | app/test/support/channel_case.ex | irisTa56/hobby_dapp | 5738995f4f68202aeb50df7fd23fd5c6b84ee244 | [
"MIT"
] | null | null | null | defmodule HobbyDappWeb.ChannelCase do
@moduledoc """
This module defines the test case to be used by
channel tests.
Such tests rely on `Phoenix.ChannelTest` and also
import other functionality to make it easier
to build common data structures and query the data layer.
Finally, if the test case interacts with the database,
we enable the SQL sandbox, so changes done to the database
are reverted at the end of every test. If you are using
PostgreSQL, you can even run database tests asynchronously
by setting `use HobbyDappWeb.ChannelCase, async: true`, although
this option is not recommended for other databases.
"""
use ExUnit.CaseTemplate
using do
quote do
# Import conveniences for testing with channels
import Phoenix.ChannelTest
import HobbyDappWeb.ChannelCase
# The default endpoint for testing
@endpoint HobbyDappWeb.Endpoint
end
end
setup _tags do
:ok
end
end
| 27.257143 | 66 | 0.74109 |
fffd9d3d061ff2117adcda38f3ca54a2cd96824f | 1,807 | ex | Elixir | lib/teiserver/agents/idle_agent_server.ex | badosu/teiserver | 19b623aeb7c2ab28756405f7486e92b714777c54 | [
"MIT"
] | 6 | 2021-02-08T10:42:53.000Z | 2021-04-25T12:12:03.000Z | lib/teiserver/agents/idle_agent_server.ex | Jazcash/teiserver | fec14784901cb2965d8c1350fe84107c57451877 | [
"MIT"
] | 14 | 2021-08-01T02:36:14.000Z | 2022-01-30T21:15:03.000Z | lib/teiserver/agents/idle_agent_server.ex | Jazcash/teiserver | fec14784901cb2965d8c1350fe84107c57451877 | [
"MIT"
] | 7 | 2021-05-13T12:55:28.000Z | 2022-01-14T06:39:06.000Z | defmodule Teiserver.Agents.IdleAgentServer do
@doc """
Logs on, waits around and sends a ping every @tick_ms
Also requests news and "new login" type information 5 seconds after login and every @info_ms after that
"""
use GenServer
alias Teiserver.Agents.AgentLib
@tick_ms 20000
@info_ms 60000
def handle_info(:startup, state) do
AgentLib.post_agent_update(state.id, "idle startup")
socket = AgentLib.get_socket()
AgentLib.login(socket, %{
name: "Idle_#{state.number}",
email: "Idle_#{state.number}@agent_email",
extra_data: %{}
})
:timer.send_interval(@tick_ms, self(), :tick)
:timer.send_interval(@info_ms, self(), :info)
:timer.send_after(1000, self(), :info)
{:noreply, %{state | socket: socket}}
end
def handle_info(:tick, state) do
AgentLib._send(state.socket, %{cmd: "c.system.ping"})
AgentLib.post_agent_update(state.id, "idle pinged")
{:noreply, state}
end
def handle_info(:info, state) do
AgentLib._send(state.socket, %{cmd: "c.news.get_latest_game_news", category: "Game news"})
{:noreply, state}
end
def handle_info({:ssl, _socket, data}, state) do
new_state = data
|> AgentLib.translate
|> Enum.reduce(state, fn data, acc ->
handle_msg(data, acc)
end)
{:noreply, new_state}
end
defp handle_msg(nil, state), do: state
defp handle_msg(%{"cmd" => "s.system.pong"}, state) do
state
end
defp handle_msg(%{"cmd" => "s.news.get_latest_game_news", "post" => _post}, state) do
state
end
# Startup
def start_link(opts \\ []) do
GenServer.start_link(__MODULE__, opts[:data], opts)
end
def init(opts) do
send(self(), :startup)
{:ok,
%{
id: opts.id,
number: opts.number,
socket: nil
}}
end
end
| 23.776316 | 105 | 0.643608 |
fffdd173c961075e6778bc6ca8f5e947ed198f08 | 464 | exs | Elixir | config/test.exs | jeyemwey/radiator-spark | 2fba0a84eb43ab1d58e8ec58c6a07f64adf9cb9d | [
"MIT"
] | null | null | null | config/test.exs | jeyemwey/radiator-spark | 2fba0a84eb43ab1d58e8ec58c6a07f64adf9cb9d | [
"MIT"
] | null | null | null | config/test.exs | jeyemwey/radiator-spark | 2fba0a84eb43ab1d58e8ec58c6a07f64adf9cb9d | [
"MIT"
] | null | null | null | use Mix.Config
# We don't run a server during test. If one is required,
# you can enable the server option below.
config :radiator, RadiatorWeb.Endpoint,
http: [port: 4002],
server: false
# Print only warnings and errors during test
config :logger, level: :warn
# Configure your database
config :radiator, Radiator.Repo,
username: "postgres",
password: "postgres",
database: "radiator_test",
hostname: "localhost",
pool: Ecto.Adapters.SQL.Sandbox
| 24.421053 | 56 | 0.734914 |
fffe00e265055b7eeae3e694507093610276f0ce | 772 | exs | Elixir | mix.exs | Plus17/xml_builder | 7ff4a3dd626132d6f54ea31f841d143e7cdf26fd | [
"MIT"
] | null | null | null | mix.exs | Plus17/xml_builder | 7ff4a3dd626132d6f54ea31f841d143e7cdf26fd | [
"MIT"
] | null | null | null | mix.exs | Plus17/xml_builder | 7ff4a3dd626132d6f54ea31f841d143e7cdf26fd | [
"MIT"
] | null | null | null | defmodule XmlBuilder.Mixfile do
use Mix.Project
@source_url "https://github.com/joshnuss/xml_builder"
def project do
[
app: :xml_builder,
version: "2.1.4",
elixir: "~> 1.6",
deps: deps(),
docs: docs(),
package: [
maintainers: ["Joshua Nussbaum"],
licenses: ["MIT"],
links: %{GitHub: @source_url}
],
description: "XML builder for Elixir"
]
end
def application do
[applications: []]
end
defp deps do
[
{:credo, ">= 1.4.0 and < 1.5.0", only: [:dev, :test], runtime: false},
{:ex_doc, github: "elixir-lang/ex_doc", only: :dev}
]
end
defp docs do
[
main: "readme",
source_url: @source_url,
extras: ["README.md"]
]
end
end
| 18.829268 | 76 | 0.540155 |
fffe03470784f3bf367631334a712f21484736d1 | 51,148 | ex | Elixir | lib/phoenix/controller.ex | nbw/phoenix | fdafd8563c16ed997a2b8a4ec7d73de62b52c238 | [
"MIT"
] | 7 | 2021-01-31T04:51:08.000Z | 2022-01-09T06:59:28.000Z | lib/phoenix/controller.ex | nbw/phoenix | fdafd8563c16ed997a2b8a4ec7d73de62b52c238 | [
"MIT"
] | null | null | null | lib/phoenix/controller.ex | nbw/phoenix | fdafd8563c16ed997a2b8a4ec7d73de62b52c238 | [
"MIT"
] | 2 | 2021-02-06T08:40:23.000Z | 2021-03-20T16:35:47.000Z | defmodule Phoenix.Controller do
import Plug.Conn
alias Plug.Conn.AlreadySentError
require Logger
require Phoenix.Endpoint
@unsent [:unset, :set, :set_chunked, :set_file]
@moduledoc """
Controllers are used to group common functionality in the same
(pluggable) module.
For example, the route:
get "/users/:id", MyAppWeb.UserController, :show
will invoke the `show/2` action in the `MyAppWeb.UserController`:
defmodule MyAppWeb.UserController do
use MyAppWeb, :controller
def show(conn, %{"id" => id}) do
user = Repo.get(User, id)
render(conn, "show.html", user: user)
end
end
An action is a regular function that receives the connection
and the request parameters as arguments. The connection is a
`Plug.Conn` struct, as specified by the Plug library.
## Options
When used, the controller supports the following options:
* `:namespace` - sets the namespace to properly inflect
the layout view. By default it uses the base alias
in your controller name
* `:log` - the level to log. When false, disables controller
logging
* `:put_default_views` - controls whether the default view
and layout should be set or not
## Connection
A controller by default provides many convenience functions for
manipulating the connection, rendering templates, and more.
Those functions are imported from two modules:
* `Plug.Conn` - a collection of low-level functions to work with
the connection
* `Phoenix.Controller` - functions provided by Phoenix
to support rendering, and other Phoenix specific behaviour
If you want to have functions that manipulate the connection
without fully implementing the controller, you can import both
modules directly instead of `use Phoenix.Controller`.
## Plug pipeline
As with routers, controllers also have their own plug pipeline.
However, different from routers, controllers have a single pipeline:
defmodule MyAppWeb.UserController do
use MyAppWeb, :controller
plug :authenticate, usernames: ["jose", "eric", "sonny"]
def show(conn, params) do
# authenticated users only
end
defp authenticate(conn, options) do
if get_session(conn, :username) in options[:usernames] do
conn
else
conn |> redirect(to: "/") |> halt()
end
end
end
The `:authenticate` plug will be invoked before the action. If the
plug calls `Plug.Conn.halt/1` (which is by default imported into
controllers), it will halt the pipeline and won't invoke the action.
### Guards
`plug/2` in controllers supports guards, allowing a developer to configure
a plug to only run in some particular action:
plug :authenticate, [usernames: ["jose", "eric", "sonny"]] when action in [:show, :edit]
plug :authenticate, [usernames: ["admin"]] when not action in [:index]
The first plug will run only when action is show or edit. The second plug will
always run, except for the index action.
Those guards work like regular Elixir guards and the only variables accessible
in the guard are `conn`, the `action` as an atom and the `controller` as an
alias.
## Controllers are plugs
Like routers, controllers are plugs, but they are wired to dispatch
to a particular function which is called an action.
For example, the route:
get "/users/:id", UserController, :show
will invoke `UserController` as a plug:
UserController.call(conn, :show)
which will trigger the plug pipeline and which will eventually
invoke the inner action plug that dispatches to the `show/2`
function in the `UserController`.
As controllers are plugs, they implement both `init/1` and
`call/2`, and it also provides a function named `action/2`
which is responsible for dispatching the appropriate action
after the plug stack (and is also overridable).
### Overriding `action/2` for custom arguments
Phoenix injects an `action/2` plug in your controller which calls the
function matched from the router. By default, it passes the conn and params.
In some cases, overriding the `action/2` plug in your controller is a
useful way to inject arguments into your actions that you would otherwise
need to repeatedly fetch off the connection. For example, imagine if you
stored a `conn.assigns.current_user` in the connection and wanted quick
access to the user for every action in your controller:
def action(conn, _) do
args = [conn, conn.params, conn.assigns.current_user]
apply(__MODULE__, action_name(conn), args)
end
def index(conn, _params, user) do
videos = Repo.all(user_videos(user))
# ...
end
def delete(conn, %{"id" => id}, user) do
video = Repo.get!(user_videos(user), id)
# ...
end
## Rendering and layouts
One of the main features provided by controllers is the ability
to perform content negotiation and render templates based on
information sent by the client. Read `render/3` to learn more.
It is also important not to confuse `Phoenix.Controller.render/3`
with `Phoenix.View.render/3`. The former expects
a connection and relies on content negotiation while the latter is
connection-agnostic and typically invoked from your views.
"""
defmacro __using__(opts) do
quote bind_quoted: [opts: opts] do
import Phoenix.Controller
# TODO v2: No longer automatically import dependencies
import Plug.Conn
use Phoenix.Controller.Pipeline, opts
if Keyword.get(opts, :put_default_views, true) do
plug :put_new_layout, {Phoenix.Controller.__layout__(__MODULE__, opts), :app}
plug :put_new_view, Phoenix.Controller.__view__(__MODULE__)
end
end
end
@doc """
Registers the plug to call as a fallback to the controller action.
A fallback plug is useful to translate common domain data structures
into a valid `%Plug.Conn{}` response. If the controller action fails to
return a `%Plug.Conn{}`, the provided plug will be called and receive
the controller's `%Plug.Conn{}` as it was before the action was invoked
along with the value returned from the controller action.
## Examples
defmodule MyController do
use Phoenix.Controller
action_fallback MyFallbackController
def show(conn, %{"id" => id}, current_user) do
with {:ok, post} <- Blog.fetch_post(id),
:ok <- Authorizer.authorize(current_user, :view, post) do
render(conn, "show.json", post: post)
end
end
end
In the above example, `with` is used to match only a successful
post fetch, followed by valid authorization for the current user.
In the event either of those fail to match, `with` will not invoke
the render block and instead return the unmatched value. In this case,
imagine `Blog.fetch_post/2` returned `{:error, :not_found}` or
`Authorizer.authorize/3` returned `{:error, :unauthorized}`. For cases
where these data structures serve as return values across multiple
boundaries in our domain, a single fallback module can be used to
translate the value into a valid response. For example, you could
write the following fallback controller to handle the above values:
defmodule MyFallbackController do
use Phoenix.Controller
def call(conn, {:error, :not_found}) do
conn
|> put_status(:not_found)
|> put_view(MyErrorView)
|> render(:"404")
end
def call(conn, {:error, :unauthorized}) do
conn
|> put_status(403)
|> put_view(MyErrorView)
|> render(:"403")
end
end
"""
defmacro action_fallback(plug) do
Phoenix.Controller.Pipeline.__action_fallback__(plug, __CALLER__)
end
@doc """
Returns the action name as an atom, raises if unavailable.
"""
@spec action_name(Plug.Conn.t) :: atom
def action_name(conn), do: conn.private.phoenix_action
@doc """
Returns the controller module as an atom, raises if unavailable.
"""
@spec controller_module(Plug.Conn.t) :: atom
def controller_module(conn), do: conn.private.phoenix_controller
@doc """
Returns the router module as an atom, raises if unavailable.
"""
@spec router_module(Plug.Conn.t) :: atom
def router_module(conn), do: conn.private.phoenix_router
@doc """
Returns the endpoint module as an atom, raises if unavailable.
"""
@spec endpoint_module(Plug.Conn.t) :: atom
def endpoint_module(conn), do: conn.private.phoenix_endpoint
@doc """
Returns the template name rendered in the view as a string
(or nil if no template was rendered).
"""
@spec view_template(Plug.Conn.t) :: binary | nil
def view_template(conn) do
conn.private[:phoenix_template]
end
@doc """
Sends JSON response.
It uses the configured `:json_library` under the `:phoenix`
application for `:json` to pick up the encoder module.
## Examples
iex> json(conn, %{id: 123})
"""
@spec json(Plug.Conn.t, term) :: Plug.Conn.t
def json(conn, data) do
response = Phoenix.json_library().encode_to_iodata!(data)
send_resp(conn, conn.status || 200, "application/json", response)
end
@doc """
A plug that may convert a JSON response into a JSONP one.
In case a JSON response is returned, it will be converted
to a JSONP as long as the callback field is present in
the query string. The callback field itself defaults to
"callback", but may be configured with the callback option.
In case there is no callback or the response is not encoded
in JSON format, it is a no-op.
Only alphanumeric characters and underscore are allowed in the
callback name. Otherwise an exception is raised.
## Examples
# Will convert JSON to JSONP if callback=someFunction is given
plug :allow_jsonp
# Will convert JSON to JSONP if cb=someFunction is given
plug :allow_jsonp, callback: "cb"
"""
@spec allow_jsonp(Plug.Conn.t, Keyword.t) :: Plug.Conn.t
def allow_jsonp(conn, opts \\ []) do
callback = Keyword.get(opts, :callback, "callback")
case Map.fetch(conn.query_params, callback) do
:error -> conn
{:ok, ""} -> conn
{:ok, cb} ->
validate_jsonp_callback!(cb)
register_before_send(conn, fn conn ->
if json_response?(conn) do
conn
|> put_resp_header("content-type", "application/javascript")
|> resp(conn.status, jsonp_body(conn.resp_body, cb))
else
conn
end
end)
end
end
defp json_response?(conn) do
case get_resp_header(conn, "content-type") do
["application/json;" <> _] -> true
["application/json"] -> true
_ -> false
end
end
defp jsonp_body(data, callback) do
body =
data
|> IO.iodata_to_binary()
|> String.replace(<<0x2028::utf8>>, "\\u2028")
|> String.replace(<<0x2029::utf8>>, "\\u2029")
"/**/ typeof #{callback} === 'function' && #{callback}(#{body});"
end
defp validate_jsonp_callback!(<<h, t::binary>>)
when h in ?0..?9 or h in ?A..?Z or h in ?a..?z or h == ?_,
do: validate_jsonp_callback!(t)
defp validate_jsonp_callback!(<<>>), do: :ok
defp validate_jsonp_callback!(_),
do: raise(ArgumentError, "the JSONP callback name contains invalid characters")
@doc """
Sends text response.
## Examples
iex> text(conn, "hello")
iex> text(conn, :implements_to_string)
"""
@spec text(Plug.Conn.t, String.Chars.t) :: Plug.Conn.t
def text(conn, data) do
send_resp(conn, conn.status || 200, "text/plain", to_string(data))
end
@doc """
Sends html response.
## Examples
iex> html(conn, "<html><head>...")
"""
@spec html(Plug.Conn.t, iodata) :: Plug.Conn.t
def html(conn, data) do
send_resp(conn, conn.status || 200, "text/html", data)
end
@doc """
Sends redirect response to the given url.
For security, `:to` only accepts paths. Use the `:external`
option to redirect to any URL.
The response will be sent with the status code defined within
the connection, via `Plug.Conn.put_status/2`. If no status
code is set, a 302 response is sent.
## Examples
iex> redirect(conn, to: "/login")
iex> redirect(conn, external: "http://elixir-lang.org")
"""
def redirect(conn, opts) when is_list(opts) do
url = url(opts)
html = Plug.HTML.html_escape(url)
body = "<html><body>You are being <a href=\"#{html}\">redirected</a>.</body></html>"
conn
|> put_resp_header("location", url)
|> send_resp(conn.status || 302, "text/html", body)
end
defp url(opts) do
cond do
to = opts[:to] -> validate_local_url(to)
external = opts[:external] -> external
true -> raise ArgumentError, "expected :to or :external option in redirect/2"
end
end
@invalid_local_url_chars ["\\"]
defp validate_local_url("//" <> _ = to), do: raise_invalid_url(to)
defp validate_local_url("/" <> _ = to) do
if String.contains?(to, @invalid_local_url_chars) do
raise ArgumentError, "unsafe characters detected for local redirect in URL #{inspect to}"
else
to
end
end
defp validate_local_url(to), do: raise_invalid_url(to)
@spec raise_invalid_url(term()) :: no_return()
defp raise_invalid_url(url) do
raise ArgumentError, "the :to option in redirect expects a path but was #{inspect url}"
end
@doc """
Stores the view for rendering.
Raises `Plug.Conn.AlreadySentError` if `conn` is already sent.
"""
@spec put_view(Plug.Conn.t, atom) :: Plug.Conn.t
def put_view(%Plug.Conn{state: state} = conn, module) when state in @unsent do
put_private(conn, :phoenix_view, module)
end
def put_view(%Plug.Conn{}, _module), do: raise AlreadySentError
@doc """
Stores the view for rendering if one was not stored yet.
Raises `Plug.Conn.AlreadySentError` if `conn` is already sent.
"""
@spec put_new_view(Plug.Conn.t, atom) :: Plug.Conn.t
def put_new_view(%Plug.Conn{state: state} = conn, module)
when state in @unsent do
update_in conn.private, &Map.put_new(&1, :phoenix_view, module)
end
def put_new_view(%Plug.Conn{}, _module), do: raise AlreadySentError
@doc """
Retrieves the current view.
"""
@spec view_module(Plug.Conn.t) :: atom
def view_module(conn), do: conn.private.phoenix_view
@doc """
Stores the layout for rendering.
The layout must be a tuple, specifying the layout view and the layout
name, or false. In case a previous layout is set, `put_layout` also
accepts the layout name to be given as a string or as an atom. If a
string, it must contain the format. Passing an atom means the layout
format will be found at rendering time, similar to the template in
`render/3`. It can also be set to `false`. In this case, no layout
would be used.
## Examples
iex> layout(conn)
false
iex> conn = put_layout conn, {AppView, "application.html"}
iex> layout(conn)
{AppView, "application.html"}
iex> conn = put_layout conn, "print.html"
iex> layout(conn)
{AppView, "print.html"}
iex> conn = put_layout conn, :print
iex> layout(conn)
{AppView, :print}
Raises `Plug.Conn.AlreadySentError` if `conn` is already sent.
"""
@spec put_layout(Plug.Conn.t, {atom, binary | atom} | atom | binary | false) :: Plug.Conn.t
def put_layout(%Plug.Conn{state: state} = conn, layout) do
if state in @unsent do
do_put_layout(conn, :phoenix_layout, layout)
else
raise AlreadySentError
end
end
defp do_put_layout(conn, private_key, false) do
put_private(conn, private_key, false)
end
defp do_put_layout(conn, private_key, {mod, layout}) when is_atom(mod) do
put_private(conn, private_key, {mod, layout})
end
defp do_put_layout(conn, private_key, layout) when is_binary(layout) or is_atom(layout) do
update_in conn.private, fn private ->
case Map.get(private, private_key, false) do
{mod, _} -> Map.put(private, private_key, {mod, layout})
false -> raise "cannot use put_layout/2 or put_root_layout/2 with atom/binary when layout is false, use a tuple instead"
end
end
end
@doc """
Stores the layout for rendering if one was not stored yet.
Raises `Plug.Conn.AlreadySentError` if `conn` is already sent.
"""
@spec put_new_layout(Plug.Conn.t, {atom, binary | atom} | false) :: Plug.Conn.t
def put_new_layout(%Plug.Conn{state: state} = conn, layout)
when (is_tuple(layout) and tuple_size(layout) == 2) or layout == false do
if state in @unsent do
update_in conn.private, &Map.put_new(&1, :phoenix_layout, layout)
else
raise AlreadySentError
end
end
@doc """
Stores the root layout for rendering.
Like `put_layout/2`, the layout must be a tuple,
specifying the layout view and the layout name, or false.
In case a previous layout is set, `put_root_layout` also
accepts the layout name to be given as a string or as an atom. If a
string, it must contain the format. Passing an atom means the layout
format will be found at rendering time, similar to the template in
`render/3`. It can also be set to `false`. In this case, no layout
would be used.
## Examples
iex> root_layout(conn)
false
iex> conn = put_root_layout conn, {AppView, "root.html"}
iex> root_layout(conn)
{AppView, "root.html"}
iex> conn = put_root_layout conn, "bare.html"
iex> root_layout(conn)
{AppView, "bare.html"}
iex> conn = put_root_layout conn, :bare
iex> root_layout(conn)
{AppView, :bare}
Raises `Plug.Conn.AlreadySentError` if `conn` is already sent.
"""
@spec put_root_layout(Plug.Conn.t, {atom, binary | atom} | atom | binary | false) :: Plug.Conn.t
def put_root_layout(%Plug.Conn{state: state} = conn, layout) do
if state in @unsent do
do_put_layout(conn, :phoenix_root_layout, layout)
else
raise AlreadySentError
end
end
@doc """
Sets which formats have a layout when rendering.
## Examples
iex> layout_formats(conn)
["html"]
iex> put_layout_formats(conn, ["html", "mobile"])
iex> layout_formats(conn)
["html", "mobile"]
Raises `Plug.Conn.AlreadySentError` if `conn` is already sent.
"""
@spec put_layout_formats(Plug.Conn.t, [String.t]) :: Plug.Conn.t
def put_layout_formats(%Plug.Conn{state: state} = conn, formats)
when state in @unsent and is_list(formats) do
put_private(conn, :phoenix_layout_formats, formats)
end
def put_layout_formats(%Plug.Conn{}, _formats), do: raise AlreadySentError
@doc """
Retrieves current layout formats.
"""
@spec layout_formats(Plug.Conn.t) :: [String.t]
def layout_formats(conn) do
Map.get(conn.private, :phoenix_layout_formats, ~w(html))
end
@doc """
Retrieves the current layout.
"""
@spec layout(Plug.Conn.t) :: {atom, String.t | atom} | false
def layout(conn), do: conn.private |> Map.get(:phoenix_layout, false)
@doc """
Retrieves the current root layout.
"""
@spec root_layout(Plug.Conn.t) :: {atom, String.t | atom} | false
def root_layout(conn), do: conn.private |> Map.get(:phoenix_root_layout, false)
@doc """
Render the given template or the default template
specified by the current action with the given assigns.
See `render/3` for more information.
"""
@spec render(Plug.Conn.t, Keyword.t | map | binary | atom) :: Plug.Conn.t
def render(conn, template_or_assigns \\ [])
def render(conn, template) when is_binary(template) or is_atom(template) do
render(conn, template, [])
end
def render(conn, assigns) do
render(conn, action_name(conn), assigns)
end
@doc """
Renders the given `template` and `assigns` based on the `conn` information.
Once the template is rendered, the template format is set as the response
content type (for example, an HTML template will set "text/html" as response
content type) and the data is sent to the client with default status of 200.
## Arguments
* `conn` - the `Plug.Conn` struct
* `template` - which may be an atom or a string. If an atom, like `:index`,
it will render a template with the same format as the one returned by
`get_format/1`. For example, for an HTML request, it will render
the "index.html" template. If the template is a string, it must contain
the extension too, like "index.json"
* `assigns` - a dictionary with the assigns to be used in the view. Those
assigns are merged and have higher precedence than the connection assigns
(`conn.assigns`)
## Examples
defmodule MyAppWeb.UserController do
use Phoenix.Controller
def show(conn, _params) do
render(conn, "show.html", message: "Hello")
end
end
The example above renders a template "show.html" from the `MyAppWeb.UserView`
and sets the response content type to "text/html".
In many cases, you may want the template format to be set dynamically based
on the request. To do so, you can pass the template name as an atom (without
the extension):
def show(conn, _params) do
render(conn, :show, message: "Hello")
end
In order for the example above to work, we need to do content negotiation with
the accepts plug before rendering. You can do so by adding the following to your
pipeline (in the router):
plug :accepts, ["html"]
## Views
By default, Controllers render templates in a view with a similar name to the
controller. For example, `MyAppWeb.UserController` will render templates inside
the `MyAppWeb.UserView`. This information can be changed any time by using the
`put_view/2` function:
def show(conn, _params) do
conn
|> put_view(MyAppWeb.SpecialView)
|> render(:show, message: "Hello")
end
`put_view/2` can also be used as a plug:
defmodule MyAppWeb.UserController do
use Phoenix.Controller
plug :put_view, MyAppWeb.SpecialView
def show(conn, _params) do
render(conn, :show, message: "Hello")
end
end
## Layouts
Templates are often rendered inside layouts. By default, Phoenix
will render layouts for html requests. For example:
defmodule MyAppWeb.UserController do
use Phoenix.Controller
def show(conn, _params) do
render(conn, "show.html", message: "Hello")
end
end
will render the "show.html" template inside an "app.html"
template specified in `MyAppWeb.LayoutView`. `put_layout/2` can be used
to change the layout, similar to how `put_view/2` can be used to change
the view.
`layout_formats/1` and `put_layout_formats/2` can be used to configure
which formats support/require layout rendering (defaults to "html" only).
"""
@spec render(Plug.Conn.t, binary | atom, Keyword.t | map | binary | atom) :: Plug.Conn.t
def render(conn, template, assigns)
when is_atom(template) and (is_map(assigns) or is_list(assigns)) do
format =
get_format(conn) ||
raise "cannot render template #{inspect template} because conn.params[\"_format\"] is not set. " <>
"Please set `plug :accepts, ~w(html json ...)` in your pipeline."
render_and_send(conn, format, template, assigns)
end
def render(conn, template, assigns)
when is_binary(template) and (is_map(assigns) or is_list(assigns)) do
case Path.extname(template) do
"." <> format ->
render_and_send(conn, format, template, assigns)
"" ->
raise "cannot render template #{inspect template} without format. Use an atom if the " <>
"template format is meant to be set dynamically based on the request format"
end
end
def render(conn, view, template)
when is_atom(view) and (is_binary(template) or is_atom(template)) do
IO.warn "#{__MODULE__}.render/3 with a view is deprecated, see the documentation for render/3 for an alternative"
render(conn, view, template, [])
end
@doc false
def render(conn, view, template, assigns)
when is_atom(view) and (is_binary(template) or is_atom(template)) do
IO.warn "#{__MODULE__}.render/4 with a view is deprecated, see the documentation for render/3 for an alternative"
conn
|> put_view(view)
|> render(template, assigns)
end
defp render_and_send(conn, format, template, assigns) do
template = template_name(template, format)
view =
Map.get(conn.private, :phoenix_view) ||
raise "a view module was not specified, set one with put_view/2"
conn = prepare_assigns(conn, assigns, template, format)
data = render_with_layouts(conn, view, template, format)
conn
|> ensure_resp_content_type(MIME.type(format))
|> send_resp(conn.status || 200, data)
end
defp render_with_layouts(conn, view, template, format) do
render_assigns = Map.put(conn.assigns, :conn, conn)
case root_layout(conn) do
{layout_mod, layout_tpl} ->
inner = Phoenix.View.render(view, template, render_assigns)
root_assigns = render_assigns |> Map.put(:inner_content, inner) |> Map.delete(:layout)
Phoenix.View.render_to_iodata(layout_mod, template_name(layout_tpl, format), root_assigns)
false ->
Phoenix.View.render_to_iodata(view, template, render_assigns)
end
end
defp prepare_assigns(conn, assigns, template, format) do
assigns = to_map(assigns)
layout =
case layout(conn, assigns, format) do
{mod, layout} -> {mod, template_name(layout, format)}
false -> false
end
conn
|> put_private(:phoenix_template, template)
|> Map.update!(:assigns, fn prev ->
prev
|> Map.merge(assigns)
|> Map.put(:layout, layout)
end)
end
defp layout(conn, assigns, format) do
if format in layout_formats(conn) do
case Map.fetch(assigns, :layout) do
{:ok, layout} -> layout
:error -> layout(conn)
end
else
false
end
end
defp to_map(assigns) when is_map(assigns), do: assigns
defp to_map(assigns) when is_list(assigns), do: :maps.from_list(assigns)
defp template_name(name, format) when is_atom(name), do:
Atom.to_string(name) <> "." <> format
defp template_name(name, _format) when is_binary(name), do:
name
defp send_resp(conn, default_status, default_content_type, body) do
conn
|> ensure_resp_content_type(default_content_type)
|> send_resp(conn.status || default_status, body)
end
defp ensure_resp_content_type(%Plug.Conn{resp_headers: resp_headers} = conn, content_type) do
if List.keyfind(resp_headers, "content-type", 0) do
conn
else
content_type = content_type <> "; charset=utf-8"
%Plug.Conn{conn | resp_headers: [{"content-type", content_type}|resp_headers]}
end
end
@doc """
Puts the url string or `%URI{}` to be used for route generation.
This function overrides the default URL generation pulled
from the `%Plug.Conn{}`'s endpoint configuration.
## Examples
Imagine your application is configured to run on "example.com"
but after the user signs in, you want all links to use
"some_user.example.com". You can do so by setting the proper
router url configuration:
def put_router_url_by_user(conn) do
put_router_url(conn, get_user_from_conn(conn).account_name <> ".example.com")
end
Now when you call `Routes.some_route_url(conn, ...)`, it will use
the router url set above. Keep in mind that, if you want to generate
routes to the *current* domain, it is preferred to use
`Routes.some_route_path` helpers, as those are always relative.
"""
def put_router_url(conn, %URI{} = uri) do
put_private(conn, :phoenix_router_url, uri)
end
def put_router_url(conn, url) when is_binary(url) do
put_private(conn, :phoenix_router_url, url)
end
@doc """
Puts the URL or `%URI{}` to be used for the static url generation.
Using this function on a `%Plug.Conn{}` struct tells `static_url/2` to use
the given information for URL generation instead of the the `%Plug.Conn{}`'s
endpoint configuration (much like `put_router_url/2` but for static URLs).
"""
def put_static_url(conn, %URI{} = uri) do
put_private(conn, :phoenix_static_url, uri)
end
def put_static_url(conn, url) when is_binary(url) do
put_private(conn, :phoenix_static_url, url)
end
@doc """
Puts the format in the connection.
This format is used when rendering a template as an atom.
For example, `render(conn, :foo)` will render `"foo.FORMAT"`
where the format is the one set here. The default format
is typically set from the negotiation done in `accepts/2`.
See `get_format/1` for retrieval.
"""
def put_format(conn, format), do: put_private(conn, :phoenix_format, format)
@doc """
Returns the request format, such as "json", "html".
This format is used when rendering a template as an atom.
For example, `render(conn, :foo)` will render `"foo.FORMAT"`
where the format is the one set here. The default format
is typically set from the negotiation done in `accepts/2`.
"""
def get_format(conn) do
conn.private[:phoenix_format] || conn.params["_format"]
end
@doc """
Sends the given file or binary as a download.
The second argument must be `{:binary, contents}`, where
`contents` will be sent as download, or`{:file, path}`,
where `path` is the filesystem location of the file to
be sent. Be careful to not interpolate the path from
external parameters, as it could allow traversal of the
filesystem.
The download is achieved by setting "content-disposition"
to attachment. The "content-type" will also be set based
on the extension of the given filename but can be customized
via the `:content_type` and `:charset` options.
## Options
* `:filename` - the filename to be presented to the user
as download
* `:content_type` - the content type of the file or binary
sent as download. It is automatically inferred from the
filename extension
* `:disposition` - specifies disposition type
(`:attachment` or `:inline`). If `:attachment` was used,
user will be prompted to save the file. If `:inline` was used,
the browser will attempt to open the file.
Defaults to `:attachment`.
* `:charset` - the charset of the file, such as "utf-8".
Defaults to none
* `:offset` - the bytes to offset when reading. Defaults to `0`
* `:length` - the total bytes to read. Defaults to `:all`
* `:encode` - encodes the filename using `URI.encode_www_form/1`.
Defaults to `true`. When `false`, disables encoding. If you
disable encoding, you need to guarantee there are no special
characters in the filename, such as quotes, newlines, etc.
Otherwise you can expose your application to security attacks
## Examples
To send a file that is stored inside your application priv
directory:
path = Application.app_dir(:my_app, "priv/prospectus.pdf")
send_download(conn, {:file, path})
When using `{:file, path}`, the filename is inferred from the
given path but may also be set explicitly.
To allow the user to download contents that are in memory as
a binary or string:
send_download(conn, {:binary, "world"}, filename: "hello.txt")
See `Plug.Conn.send_file/3` and `Plug.Conn.send_resp/3` if you
would like to access the low-level functions used to send files
and responses via Plug.
"""
def send_download(conn, kind, opts \\ [])
def send_download(conn, {:file, path}, opts) do
filename = opts[:filename] || Path.basename(path)
offset = opts[:offset] || 0
length = opts[:length] || :all
conn
|> prepare_send_download(filename, opts)
|> send_file(conn.status || 200, path, offset, length)
end
def send_download(conn, {:binary, contents}, opts) do
filename = opts[:filename] || raise ":filename option is required when sending binary download"
conn
|> prepare_send_download(filename, opts)
|> send_resp(conn.status || 200, contents)
end
defp prepare_send_download(conn, filename, opts) do
content_type = opts[:content_type] || MIME.from_path(filename)
encoded_filename = encode_filename(filename, Keyword.get(opts, :encode, true))
disposition_type = get_disposition_type(Keyword.get(opts, :disposition, :attachment))
warn_if_ajax(conn)
conn
|> put_resp_content_type(content_type, opts[:charset])
|> put_resp_header("content-disposition", ~s[#{disposition_type}; filename="#{encoded_filename}"])
end
defp encode_filename(filename, false), do: filename
defp encode_filename(filename, true), do: URI.encode_www_form(filename)
defp get_disposition_type(:attachment), do: "attachment"
defp get_disposition_type(:inline), do: "inline"
defp get_disposition_type(other), do: raise ArgumentError, "expected :disposition to be :attachment or :inline, got: #{inspect(other)}"
defp ajax?(conn) do
case get_req_header(conn, "x-requested-with") do
[value] -> value in ["XMLHttpRequest", "xmlhttprequest"]
[] -> false
end
end
defp warn_if_ajax(conn) do
if ajax?(conn) do
Logger.warn "send_download/3 has been invoked during an AJAX request. " <>
"The download may not work as expected under XMLHttpRequest"
end
end
@doc """
Scrubs the parameters from the request.
This process is two-fold:
* Checks to see if the `required_key` is present
* Changes empty parameters of `required_key` (recursively) to nils
This function is useful for removing empty strings sent
via HTML forms. If you are providing an API, there
is likely no need to invoke `scrub_params/2`.
If the `required_key` is not present, it will
raise `Phoenix.MissingParamError`.
## Examples
iex> scrub_params(conn, "user")
"""
@spec scrub_params(Plug.Conn.t, String.t) :: Plug.Conn.t
def scrub_params(conn, required_key) when is_binary(required_key) do
param = Map.get(conn.params, required_key) |> scrub_param()
unless param do
raise Phoenix.MissingParamError, key: required_key
end
params = Map.put(conn.params, required_key, param)
%Plug.Conn{conn | params: params}
end
defp scrub_param(%{__struct__: mod} = struct) when is_atom(mod) do
struct
end
defp scrub_param(%{} = param) do
Enum.reduce(param, %{}, fn({k, v}, acc) ->
Map.put(acc, k, scrub_param(v))
end)
end
defp scrub_param(param) when is_list(param) do
Enum.map(param, &scrub_param/1)
end
defp scrub_param(param) do
if scrub?(param), do: nil, else: param
end
defp scrub?(" " <> rest), do: scrub?(rest)
defp scrub?(""), do: true
defp scrub?(_), do: false
@doc """
Enables CSRF protection.
Currently used as a wrapper function for `Plug.CSRFProtection`
and mainly serves as a function plug in `YourApp.Router`.
Check `get_csrf_token/0` and `delete_csrf_token/0` for
retrieving and deleting CSRF tokens.
"""
def protect_from_forgery(conn, opts \\ []) do
Plug.CSRFProtection.call(conn, Plug.CSRFProtection.init(opts))
end
@doc """
Put headers that improve browser security.
It sets the following headers:
* `x-frame-options` - set to SAMEORIGIN to avoid clickjacking
through iframes unless in the same origin
* `x-content-type-options` - set to nosniff. This requires
script and style tags to be sent with proper content type
* `x-xss-protection` - set to "1; mode=block" to improve XSS
protection on both Chrome and IE
* `x-download-options` - set to noopen to instruct the browser
not to open a download directly in the browser, to avoid
HTML files rendering inline and accessing the security
context of the application (like critical domain cookies)
* `x-permitted-cross-domain-policies` - set to none to restrict
Adobe Flash Player’s access to data
* `cross-origin-window-policy` - set to deny to avoid window
control attacks
A custom headers map may also be given to be merged with defaults.
It is recommended for custom header keys to be in lowercase, to avoid sending
duplicate keys in a request.
Additionally, responses with mixed-case headers served over HTTP/2 are not
considered valid by common clients, resulting in dropped responses.
"""
def put_secure_browser_headers(conn, headers \\ %{})
def put_secure_browser_headers(conn, []) do
put_secure_defaults(conn)
end
def put_secure_browser_headers(conn, headers) when is_map(headers) do
conn
|> put_secure_defaults()
|> merge_resp_headers(headers)
end
defp put_secure_defaults(conn) do
merge_resp_headers(conn, [
{"x-frame-options", "SAMEORIGIN"},
{"x-xss-protection", "1; mode=block"},
{"x-content-type-options", "nosniff"},
{"x-download-options", "noopen"},
{"x-permitted-cross-domain-policies", "none"},
{"cross-origin-window-policy", "deny"}
])
end
@doc """
Gets or generates a CSRF token.
If a token exists, it is returned, otherwise it is generated and stored
in the process dictionary.
"""
defdelegate get_csrf_token(), to: Plug.CSRFProtection
@doc """
Deletes the CSRF token from the process dictionary.
*Note*: The token is deleted only after a response has been sent.
"""
defdelegate delete_csrf_token(), to: Plug.CSRFProtection
@doc """
Performs content negotiation based on the available formats.
It receives a connection, a list of formats that the server
is capable of rendering and then proceeds to perform content
negotiation based on the request information. If the client
accepts any of the given formats, the request proceeds.
If the request contains a "_format" parameter, it is
considered to be the format desired by the client. If no
"_format" parameter is available, this function will parse
the "accept" header and find a matching format accordingly.
This function is useful when you may want to serve different
content-types (such as JSON and HTML) from the same routes.
However, if you always have distinct routes, you can also
disable content negotiation and simply hardcode your format
of choice in your route pipelines:
plug :put_format, "html"
It is important to notice that browsers have historically
sent bad accept headers. For this reason, this function will
default to "html" format whenever:
* the accepted list of arguments contains the "html" format
* the accept header specified more than one media type preceded
or followed by the wildcard media type "`*/*`"
This function raises `Phoenix.NotAcceptableError`, which is rendered
with status 406, whenever the server cannot serve a response in any
of the formats expected by the client.
## Examples
`accepts/2` can be invoked as a function:
iex> accepts(conn, ["html", "json"])
or used as a plug:
plug :accepts, ["html", "json"]
plug :accepts, ~w(html json)
## Custom media types
It is possible to add custom media types to your Phoenix application.
The first step is to teach Plug about those new media types in
your `config/config.exs` file:
config :mime, :types, %{
"application/vnd.api+json" => ["json-api"]
}
The key is the media type, the value is a list of formats the
media type can be identified with. For example, by using
"json-api", you will be able to use templates with extension
"index.json-api" or to force a particular format in a given
URL by sending "?_format=json-api".
After this change, you must recompile plug:
$ mix deps.clean mime --build
$ mix deps.get
And now you can use it in accepts too:
plug :accepts, ["html", "json-api"]
"""
@spec accepts(Plug.Conn.t, [binary]) :: Plug.Conn.t | no_return()
def accepts(conn, [_|_] = accepted) do
case Map.fetch(conn.params, "_format") do
{:ok, format} ->
handle_params_accept(conn, format, accepted)
:error ->
handle_header_accept(conn, get_req_header(conn, "accept"), accepted)
end
end
defp handle_params_accept(conn, format, accepted) do
if format in accepted do
put_format(conn, format)
else
raise Phoenix.NotAcceptableError,
message: "unknown format #{inspect format}, expected one of #{inspect accepted}",
accepts: accepted
end
end
# In case there is no accept header or the header is */*
# we use the first format specified in the accepts list.
defp handle_header_accept(conn, header, [first|_]) when header == [] or header == ["*/*"] do
put_format(conn, first)
end
# In case there is a header, we need to parse it.
# But before we check for */* because if one exists and we serve html,
# we unfortunately need to assume it is a browser sending us a request.
defp handle_header_accept(conn, [header|_], accepted) do
if header =~ "*/*" and "html" in accepted do
put_format(conn, "html")
else
parse_header_accept(conn, String.split(header, ","), [], accepted)
end
end
defp parse_header_accept(conn, [h|t], acc, accepted) do
case Plug.Conn.Utils.media_type(h) do
{:ok, type, subtype, args} ->
exts = parse_exts(type, subtype)
q = parse_q(args)
if format = (q === 1.0 && find_format(exts, accepted)) do
put_format(conn, format)
else
parse_header_accept(conn, t, [{-q, h, exts}|acc], accepted)
end
:error ->
parse_header_accept(conn, t, acc, accepted)
end
end
defp parse_header_accept(conn, [], acc, accepted) do
acc
|> Enum.sort()
|> Enum.find_value(&parse_header_accept(conn, &1, accepted))
|> Kernel.||(refuse(conn, acc, accepted))
end
defp parse_header_accept(conn, {_, _, exts}, accepted) do
if format = find_format(exts, accepted) do
put_format(conn, format)
end
end
defp parse_q(args) do
case Map.fetch(args, "q") do
{:ok, float} ->
case Float.parse(float) do
{float, _} -> float
:error -> 1.0
end
:error ->
1.0
end
end
defp parse_exts("*", "*"), do: "*/*"
defp parse_exts(type, "*"), do: type
defp parse_exts(type, subtype), do: MIME.extensions(type <> "/" <> subtype)
defp find_format("*/*", accepted), do: Enum.fetch!(accepted, 0)
defp find_format(exts, accepted) when is_list(exts), do: Enum.find(exts, &(&1 in accepted))
defp find_format(_type_range, []), do: nil
defp find_format(type_range, [h|t]) do
mime_type = MIME.type(h)
case Plug.Conn.Utils.media_type(mime_type) do
{:ok, accepted_type, _subtype, _args} when type_range === accepted_type -> h
_ -> find_format(type_range, t)
end
end
@spec refuse(term(), [tuple], [binary]) :: no_return()
defp refuse(_conn, given, accepted) do
raise Phoenix.NotAcceptableError,
accepts: accepted,
message: """
no supported media type in accept header.
Expected one of #{inspect accepted} but got the following formats:
* #{Enum.map_join(given, "\n ", fn {_, header, exts} ->
inspect(header) <> " with extensions: " <> inspect(exts)
end)}
To accept custom formats, register them under the :mime library
in your config/config.exs file:
config :mime, :types, %{
"application/xml" => ["xml"]
}
And then run `mix deps.clean --build mime` to force it to be recompiled.
"""
end
@doc """
Fetches the flash storage.
"""
def fetch_flash(conn, _opts \\ []) do
if Map.get(conn.private, :phoenix_flash) do
conn
else
session_flash = get_session(conn, "phoenix_flash")
conn = persist_flash(conn, session_flash || %{})
register_before_send conn, fn conn ->
flash = conn.private.phoenix_flash
flash_size = map_size(flash)
cond do
is_nil(session_flash) and flash_size == 0 ->
conn
flash_size > 0 and conn.status in 300..308 ->
put_session(conn, "phoenix_flash", flash)
true ->
delete_session(conn, "phoenix_flash")
end
end
end
end
@doc """
Merges a map into the flash.
Returns the updated connection.
## Examples
iex> conn = merge_flash(conn, info: "Welcome Back!")
iex> get_flash(conn, :info)
"Welcome Back!"
"""
def merge_flash(conn, enumerable) do
map = for {k, v} <- enumerable, into: %{}, do: {flash_key(k), v}
persist_flash(conn, Map.merge(get_flash(conn), map))
end
@doc """
Persists a value in flash.
Returns the updated connection.
## Examples
iex> conn = put_flash(conn, :info, "Welcome Back!")
iex> get_flash(conn, :info)
"Welcome Back!"
"""
def put_flash(conn, key, message) do
persist_flash(conn, Map.put(get_flash(conn), flash_key(key), message))
end
@doc """
Returns a map of previously set flash messages or an empty map.
## Examples
iex> get_flash(conn)
%{}
iex> conn = put_flash(conn, :info, "Welcome Back!")
iex> get_flash(conn)
%{"info" => "Welcome Back!"}
"""
def get_flash(conn) do
Map.get(conn.private, :phoenix_flash) ||
raise ArgumentError, message: "flash not fetched, call fetch_flash/2"
end
@doc """
Returns a message from flash by `key` (or `nil` if no message is available for `key`).
## Examples
iex> conn = put_flash(conn, :info, "Welcome Back!")
iex> get_flash(conn, :info)
"Welcome Back!"
"""
def get_flash(conn, key) do
get_flash(conn)[flash_key(key)]
end
@doc """
Generates a status message from the template name.
## Examples
iex> status_message_from_template("404.html")
"Not Found"
iex> status_message_from_template("whatever.html")
"Internal Server Error"
"""
def status_message_from_template(template) do
template
|> String.split(".")
|> hd()
|> String.to_integer()
|> Plug.Conn.Status.reason_phrase()
rescue
_ -> "Internal Server Error"
end
@doc """
Clears all flash messages.
"""
def clear_flash(conn) do
persist_flash(conn, %{})
end
defp flash_key(binary) when is_binary(binary), do: binary
defp flash_key(atom) when is_atom(atom), do: Atom.to_string(atom)
defp persist_flash(conn, value) do
put_private(conn, :phoenix_flash, value)
end
@doc """
Returns the current request path with its default query parameters:
iex> current_path(conn)
"/users/123?existing=param"
See `current_path/2` to override the default parameters.
The path is normalized based on the `conn.script_name` and
`conn.path_info`. For example, "/foo//bar/" will become "/foo/bar".
If you want the original path, use `conn.request_path` instead.
"""
def current_path(%Plug.Conn{query_string: ""} = conn) do
normalized_request_path(conn)
end
def current_path(%Plug.Conn{query_string: query_string} = conn) do
normalized_request_path(conn) <> "?" <> query_string
end
@doc """
Returns the current path with the given query parameters.
You may also retrieve only the request path by passing an
empty map of params.
## Examples
iex> current_path(conn)
"/users/123?existing=param"
iex> current_path(conn, %{new: "param"})
"/users/123?new=param"
iex> current_path(conn, %{filter: %{status: ["draft", "published"]}})
"/users/123?filter[status][]=draft&filter[status][]=published"
iex> current_path(conn, %{})
"/users/123"
The path is normalized based on the `conn.script_name` and
`conn.path_info`. For example, "/foo//bar/" will become "/foo/bar".
If you want the original path, use `conn.request_path` instead.
"""
def current_path(%Plug.Conn{} = conn, params) when params == %{} do
normalized_request_path(conn)
end
def current_path(%Plug.Conn{} = conn, params) do
normalized_request_path(conn) <> "?" <> Plug.Conn.Query.encode(params)
end
defp normalized_request_path(%{path_info: info, script_name: script}) do
"/" <> Enum.join(script ++ info, "/")
end
@doc """
Returns the current request url with its default query parameters:
iex> current_url(conn)
"https://www.example.com/users/123?existing=param"
See `current_url/2` to override the default parameters.
"""
def current_url(%Plug.Conn{} = conn) do
Phoenix.Router.Helpers.url(router_module(conn), conn) <> current_path(conn)
end
@doc ~S"""
Returns the current request URL with query params.
The path will be retrieved from the currently requested path via
`current_path/1`. The scheme, host and others will be received from
the URL configuration in your Phoenix endpoint. The reason we don't
use the host and scheme information in the request is because most
applications are behind proxies and the host and scheme may not
actually reflect the host and scheme accessed by the client. If you
want to access the url precisely as requested by the client, see
`Plug.Conn.request_url/1`.
## Examples
iex> current_url(conn)
"https://www.example.com/users/123?existing=param"
iex> current_url(conn, %{new: "param"})
"https://www.example.com/users/123?new=param"
iex> current_url(conn, %{})
"https://www.example.com/users/123"
## Custom URL Generation
In some cases, you'll need to generate a request's URL, but using a
different scheme, different host, etc. This can be accomplished in
two ways.
If you want to do so in a case-by-case basis, you can define a custom
function that gets the endpoint URI configuration and changes it accordingly.
For example, to get the current URL always in HTTPS format:
def current_secure_url(conn, params \\ %{}) do
cur_uri = MyAppWeb.Endpoint.struct_url()
cur_path = Phoenix.Controller.current_path(conn, params)
MyAppWeb.Router.Helpers.url(%URI{cur_uri | scheme: "https"}) <> cur_path
end
However, if you want all generated URLs to always have a certain schema,
host, etc, you may use `put_router_url/2`.
"""
def current_url(%Plug.Conn{} = conn, %{} = params) do
Phoenix.Router.Helpers.url(router_module(conn), conn) <> current_path(conn, params)
end
@doc false
def __view__(controller_module) do
controller_module
|> Phoenix.Naming.unsuffix("Controller")
|> Kernel.<>("View")
|> String.to_atom()
end
@doc false
def __layout__(controller_module, opts) do
namespace =
if given = Keyword.get(opts, :namespace) do
given
else
controller_module
|> Atom.to_string()
|> String.split(".")
|> Enum.drop(-1)
|> Enum.take(2)
|> Module.concat()
end
Module.concat(namespace, "LayoutView")
end
end
| 32.007509 | 137 | 0.675295 |
fffe03f2077839b4a6796ad60fd71449c0dcb6a6 | 2,602 | ex | Elixir | lib/libxml/nif.ex | melpon/libxml | cc6a1175b3acf8827c31475ab1cf18c262071f21 | [
"MIT"
] | 3 | 2018-02-11T18:55:11.000Z | 2019-09-24T21:19:36.000Z | lib/libxml/nif.ex | melpon/libxml | cc6a1175b3acf8827c31475ab1cf18c262071f21 | [
"MIT"
] | 6 | 2018-01-26T08:36:08.000Z | 2018-04-09T23:40:13.000Z | lib/libxml/nif.ex | melpon/libxml | cc6a1175b3acf8827c31475ab1cf18c262071f21 | [
"MIT"
] | 2 | 2018-03-25T22:50:26.000Z | 2020-07-15T07:31:37.000Z | defmodule Libxml.Nif do
@on_load :load_nif
def load_nif() do
:ok = :erlang.load_nif(:code.lib_dir(:libxml) ++ '/priv/libxml_nif', 0)
end
def xml_read_memory(_contents), do: raise("NIF not implemented")
def xml_copy_doc(_doc, _recursive), do: raise("NIF not implemented")
def xml_free_doc(_doc), do: raise("NIF not implemented")
def xml_get_prop(_char, _attr_name), do: raise("NIF not implemented")
def xml_doc_copy_node(_node, _doc, _extended), do: raise("NIF not implemented")
def xml_doc_get_root_element(_doc), do: raise("NIF not implemented")
def xml_doc_set_root_element(_doc, _node), do: raise("NIF not implemented")
def xml_new_ns(_node, _href, _prefix), do: raise("NIF not implemented")
def xml_unlink_node(_node), do: raise("NIF not implemented")
def xml_copy_node(_node, _extended), do: raise("NIF not implemented")
def xml_free_node(_node), do: raise("NIF not implemented")
def xml_free_node_list(_node), do: raise("NIF not implemented")
def xml_c14n_doc_dump_memory(_doc, _nodeset, _mode, _inclusive_ns_prefixes, _with_comments),
do: raise("NIF not implemented")
def xml_xpath_new_context(_doc), do: raise("NIF not implemented")
def xml_xpath_free_context(_context), do: raise("NIF not implemented")
def xml_xpath_eval(_ctx, _xpath), do: raise("NIF not implemented")
def xml_xpath_free_object(_obj), do: raise("NIF not implemented")
def xml_schema_new_parser_ctxt(_url), do: raise("NIF not implemented")
def xml_schema_new_doc_parser_ctxt(_doc), do: raise("NIF not implemented")
def xml_schema_parse(_ctxt), do: raise("NIF not implemented")
def xml_schema_new_valid_ctxt(_schema), do: raise("NIF not implemented")
def xml_schema_validate_doc(_ctxt, _doc), do: raise("NIF not implemented")
def xml_schema_free_parser_ctxt(_ctxt), do: raise("NIF not implemented")
def xml_schema_free(_schema), do: raise("NIF not implemented")
def xml_schema_free_valid_ctxt(_ctxt), do: raise("NIF not implemented")
# def xml_schema_set_parser_errors(_ctxt, _err, _warn, _ctx), do: raise("NIF not implemented")
def get_xml_node(_node), do: raise("NIF not implemented")
def set_xml_node(_node, _map), do: raise("NIF not implemented")
def get_xml_char(_char), do: raise("NIF not implemented")
def get_xml_ns(_ns), do: raise("NIF not implemented")
def get_xml_xpath_context(_obj), do: raise("NIF not implemented")
def set_xml_xpath_context(_obj, _map), do: raise("NIF not implemented")
def get_xml_xpath_object(_obj), do: raise("NIF not implemented")
def get_xml_node_set(_nodeset), do: raise("NIF not implemented")
end
| 50.038462 | 96 | 0.751345 |
fffe12581b1653d3a9ebd68c33ecda8604746bd6 | 8,399 | ex | Elixir | lib/protein/client.ex | katafrakt/protein-elixir | f5ea6e079904ea0defa40e99a0e5becaa02a5735 | [
"MIT"
] | 9 | 2018-01-16T12:25:58.000Z | 2021-11-24T13:19:35.000Z | lib/protein/client.ex | katafrakt/protein-elixir | f5ea6e079904ea0defa40e99a0e5becaa02a5735 | [
"MIT"
] | 3 | 2018-09-07T16:09:57.000Z | 2019-02-19T10:37:13.000Z | lib/protein/client.ex | katafrakt/protein-elixir | f5ea6e079904ea0defa40e99a0e5becaa02a5735 | [
"MIT"
] | 3 | 2018-05-27T08:56:52.000Z | 2021-11-24T13:19:37.000Z | defmodule Protein.Client do
@moduledoc """
Calls services in remote systems.
## Usage
Here's how your RPC client module may look like:
defmodule MyProject.RemoteRPC do
use Protein.Client
# then, declare services with a convention driven config
proto :create_user
# ...or with custom proto file name (equivalent of previous call above)
proto Path.expand("./proto/create_user.proto", __DIR__)
# ...or with a completely custom config (equivalent of previous calls above)
service proto: [from: Path.expand("./proto/create_user.proto", __DIR__)],
service_name: "create_user",
proto_mod: __MODULE__.CreateUser
request_mod: __MODULE__.CreateUser.Request,
response_mod: __MODULE__.CreateUser.Response,
mock_mod: __MODULE__.CreateUserMock
end
Make sure to add it to the supervision tree in `application.ex` as follows:
defmodule MyProject.Application do
use Application
def start(_type, _args) do
import Supervisor.Spec
children = [
supervisor(MyProject.Repo, []),
supervisor(MyProject.Web.Endpoint, []),
# ...
supervisor(MyProject.RemoteRPC, []),
]
opts = [strategy: :one_for_one, name: MyProject.Supervisor]
Supervisor.start_link(children, opts)
end
end
Having that, you can call your RPC as follows:
alias MyProject.RemoteRPC
alias MyProject.RemoteRPC.CreateUser.{Request, Response}
request = %Request{}
case RemoteRPC.call(request) do
{:ok, response = %Response{}} ->
# do stuff with response
{:error, errors}
# do stuff with errors
end
# ...or assume that a failure is out of the question
response = RemoteRPC.call!(request)
# ...or issue a push to non-responding service (recognized by lack of Response structure)
RemoteRPC.push(request)
### Macros and functions
By invoking `use Protein.Client`, you include the following in your client module:
- `Protein.RouterAPI`: macros for defining a list of services and transport options
- `Protein.ClientAPI`: functions for making client requests to remote services
### Mocking for tests
Client call mocking is enabled by default for `Mix.env == :test`. You can configure it explicitly
via the `mocking_enabled` config flag as follows:
config :protein, mocking_enabled: true
You can add a mock module for your specific service to `test/support`. The module should be the
`mock_mod` on sample above (which by default is a `service_mod` with the `Mock` suffix). For
example, to mock the service sourced from `create_user.proto` on example above, you may implement
the following module:
# test/support/my_project/remote_rpc/create_user_mock.ex
alias MyProject.RemoteRPC.CreateUser.{Request, Response}
defmodule MyProject.RemoteRPC.CreateUserMock do
# with default response
def call(request = %Request{) do
:ok
end
# ...or with specific response
def call(request = %Request{}) do
{:ok, %Response{}}
end
# ...or with default error
def call(request = %Request{}) do
:error
end
# ...or with specific error code
def call(request = %Request{}) do
{:error, :something_happened}
end
# ...or with specific error message
def call(request = %Request{}) do
{:error, "Something went wrong"}
end
# ...or with error related to specific part of the request
def call(request = %Request{}) do
{:error, {:specific_arg_error, struct: "user", struct: "images", repeated: 0}}
end
# ...or with multiple errors (all above syntaxes are supported)
def call(request = %Request{}) do
{:error, [
:something_happened,
"Something went wrong",
{:specific_arg_error, struct: "user", struct: "images", repeated: 0}
]}
end
end
You can define multiple `call` clauses in your mock and use pattern matching to create different
output based on varying input.
Mock bypasses the transport layer (obviously), but it still encodes/decodes your request protobuf
just as regular client does and it still encodes/decodes the response from your mock. This ensures
that your test structures are compilant with specific proto in use.
For non-responding services, mock modules are optional and will be executed only if defined.
Otherwise, the client with mocking mode enabled will still encode the request, but then it will
silently drop it without throwing an error.
"""
alias Protein.{
CallError,
DummyServiceMock,
RequestPayload,
ResponsePayload,
Server,
Transport,
TransportError,
Utils
}
defmacro __using__(_) do
quote do
use Protein.{RouterAPI, ClientAPI}
use Supervisor
alias Protein.{Transport, Utils}
def start_link(_opts \\ []) do
Supervisor.start_link(__MODULE__, [], name: __MODULE__)
end
def init(_) do
Supervisor.init([], strategy: :one_for_one)
end
end
end
@doc false
def call(request_struct, service_opts, transport_opts) do
service_name = Keyword.fetch!(service_opts, :service_name)
request_mod = Keyword.fetch!(service_opts, :request_mod)
response_mod = Keyword.fetch!(service_opts, :response_mod)
mock_mod = Keyword.fetch!(service_opts, :mock_mod)
unless Code.ensure_loaded?(response_mod),
do: raise("Called non-responding service")
request_buf = request_mod.encode(request_struct)
result =
call_via_mock(request_buf, request_mod, response_mod, mock_mod) ||
call_via_adapter(service_name, request_buf, transport_opts)
case result do
{:ok, response_buf} ->
{:ok, response_mod.decode(response_buf)}
{:error, errors} ->
{:error, errors}
end
end
@doc false
def call!(request_struct, service_opts, transport_opts) do
request_struct
|> call(service_opts, transport_opts)
|> handle_non_failing_response()
end
defp call_via_mock(request_buf, request_mod, response_mod, mock_mod) do
if Utils.mocking_enabled?(),
do: Server.process_service(mock_mod, request_buf, request_mod, response_mod)
rescue
error -> raise TransportError, adapter: :mock, context: error
end
defp call_via_adapter(service_name, request_buf, opts) do
{adapter, adapter_opts} = Keyword.pop(opts, :adapter)
request_payload = RequestPayload.encode(service_name, request_buf)
response_payload =
adapter
|> Utils.resolve_adapter()
|> apply(:call, [request_payload, adapter_opts])
ResponsePayload.decode(response_payload)
end
defp handle_non_failing_response({:ok, response}), do: response
defp handle_non_failing_response({:error, errors}) do
raise CallError, errors: errors
end
@doc false
def push(request_struct, service_opts, transport_opts) do
service_name = Keyword.fetch!(service_opts, :service_name)
request_mod = Keyword.fetch!(service_opts, :request_mod)
response_mod = Keyword.fetch!(service_opts, :response_mod)
mock_mod = Keyword.fetch!(service_opts, :mock_mod)
if Code.ensure_loaded?(response_mod),
do: raise("Pushed to responding service")
request_buf = request_mod.encode(request_struct)
push_via_mock(request_buf, request_mod, mock_mod) ||
push_via_adapter(service_name, request_buf, transport_opts)
:ok
end
defp push_via_mock(request_buf, request_mod, mock_mod) do
if Utils.mocking_enabled?() do
mock_or_default_mod =
if Code.ensure_loaded?(mock_mod),
do: mock_mod,
else: DummyServiceMock
Server.process_service(mock_or_default_mod, request_buf, request_mod)
end
rescue
error -> raise TransportError, adapter: :mock, context: error
end
defp push_via_adapter(service_name, request_buf, opts) do
{adapter, adapter_opts} = Keyword.pop(opts, :adapter)
request_payload = RequestPayload.encode(service_name, request_buf)
adapter
|> Utils.resolve_adapter()
|> apply(:push, [request_payload, adapter_opts])
end
end
| 31.456929 | 100 | 0.673056 |
fffe5c0a2c978c2404d987cc6a54ccd36fe37997 | 1,907 | exs | Elixir | clients/firebase_rules/mix.exs | kolorahl/elixir-google-api | 46bec1e092eb84c6a79d06c72016cb1a13777fa6 | [
"Apache-2.0"
] | null | null | null | clients/firebase_rules/mix.exs | kolorahl/elixir-google-api | 46bec1e092eb84c6a79d06c72016cb1a13777fa6 | [
"Apache-2.0"
] | null | null | null | clients/firebase_rules/mix.exs | kolorahl/elixir-google-api | 46bec1e092eb84c6a79d06c72016cb1a13777fa6 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.FirebaseRules.Mixfile do
use Mix.Project
@version "0.14.0"
def project() do
[
app: :google_api_firebase_rules,
version: @version,
elixir: "~> 1.6",
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
description: description(),
package: package(),
deps: deps(),
source_url: "https://github.com/googleapis/elixir-google-api/tree/master/clients/firebase_rules"
]
end
def application() do
[extra_applications: [:logger]]
end
defp deps() do
[
{:google_gax, "~> 0.2"},
{:ex_doc, "~> 0.16", only: :dev}
]
end
defp description() do
"""
Firebase Rules API client library. Creates and manages rules that determine when a Firebase Rules-enabled service should permit a request.
"""
end
defp package() do
[
files: ["lib", "mix.exs", "README*", "LICENSE"],
maintainers: ["Jeff Ching", "Daniel Azuma"],
licenses: ["Apache 2.0"],
links: %{
"GitHub" => "https://github.com/googleapis/elixir-google-api/tree/master/clients/firebase_rules",
"Homepage" => "https://firebase.google.com/docs/storage/security"
}
]
end
end
| 28.044118 | 142 | 0.66387 |
fffe852a97c3e9465d8441af393a88bd58704712 | 24,162 | ex | Elixir | lib/surface/compiler/eex_engine.ex | thorsten-de/surface | 67ebc2eadec22a22e043394f37d0d8d0e0e81b77 | [
"MIT"
] | null | null | null | lib/surface/compiler/eex_engine.ex | thorsten-de/surface | 67ebc2eadec22a22e043394f37d0d8d0e0e81b77 | [
"MIT"
] | null | null | null | lib/surface/compiler/eex_engine.ex | thorsten-de/surface | 67ebc2eadec22a22e043394f37d0d8d0e0e81b77 | [
"MIT"
] | null | null | null | defmodule Surface.Compiler.EExEngine do
@moduledoc """
This module glues together surface's AST and Phoenix.LiveView.Engine to actually render an AST.
It takes a list of Surface AST nodes, and processes them into a sequence of static raw html items and
dynamic pieces. It then converts these into tokens which an EEx engine can understand (see EEx.Tokenizer
for information on this). Finally, it passes these tokens into the engine sequentially in the same
manner as EEx.Compiler.compile/2
"""
alias Surface.AST
alias Surface.IOHelper
# while this should technically work with other engines, the main use case is integration with Phoenix.LiveView.Engine
@default_engine Phoenix.LiveView.Engine
@spec translate(
[Surface.AST.t()],
nil | maybe_improper_list | map
) :: any
def translate(nodes, opts \\ []) do
state = %{
engine: opts[:engine] || @default_engine,
depth: 0,
context: []
}
nodes
|> to_token_sequence()
|> generate_buffer(state.engine.init(opts), state)
|> maybe_print_expression(
opts[:debug],
opts[:file] || "nofile",
opts[:line] || 1
)
end
defp to_token_sequence(nodes) do
nodes
|> to_dynamic_nested_html()
|> List.flatten()
|> combine_static_portions()
end
defp generate_buffer([], buffer, state) do
state.engine.handle_body(buffer)
end
defp generate_buffer([{:text, chars} | tail], buffer, state) do
buffer = state.engine.handle_text(buffer, chars)
generate_buffer(tail, buffer, state)
end
defp generate_buffer([%AST.Expr{} = expr | tail], buffer, state) do
buffer = state.engine.handle_expr(buffer, "", to_expression(expr, buffer, state))
generate_buffer(tail, buffer, state)
end
defp generate_buffer([expr | tail], buffer, state) do
buffer = state.engine.handle_expr(buffer, "=", to_expression(expr, buffer, state))
generate_buffer(tail, buffer, state)
end
defp to_expression(nodes, buffer, state)
defp to_expression([node], buffer, state), do: to_expression(node, buffer, state)
defp to_expression(nodes, buffer, state) when is_list(nodes) do
children =
for node <- nodes do
to_expression(node, buffer, state)
end
{:__block__, [], children}
end
defp to_expression({:text, value}, _buffer, _state), do: {:safe, value}
defp to_expression(%AST.AttributeExpr{value: expr}, _buffer, _state), do: expr
defp to_expression(%AST.Interpolation{value: expr}, _buffer, _state), do: expr
defp to_expression(%AST.Expr{value: expr}, _buffer, _state) when is_list(expr),
do: {:__block__, [], expr}
defp to_expression(%AST.Expr{value: expr}, _buffer, _state), do: {:__block__, [], [expr]}
defp to_expression(
%AST.For{generator: %AST.AttributeExpr{value: generator}, children: children} = comprehension,
buffer,
state
) do
buffer =
handle_nested_block(children, buffer, %{
state
| depth: state.depth + 1,
context: [:for | state.context]
})
generator_expr = generator ++ [[do: buffer]]
{:for, [generated: true], generator_expr}
|> maybe_print_expression(comprehension)
end
defp to_expression(
%AST.If{
condition: %AST.AttributeExpr{value: condition},
children: if_children,
else: else_children
} = conditional,
buffer,
state
) do
if_buffer =
handle_nested_block(if_children, buffer, %{
state
| depth: state.depth + 1,
context: [:if | state.context]
})
else_buffer =
handle_nested_block(else_children, buffer, %{
state
| depth: state.depth + 1,
context: [:if | state.context]
})
{:if, [generated: true], [condition, [do: if_buffer, else: else_buffer]]}
|> maybe_print_expression(conditional)
end
defp to_expression(%AST.Block{name: "case"} = block, buffer, state) do
%AST.Block{expression: case_expr, sub_blocks: sub_blocks} = block
state = %{state | depth: state.depth + 1, context: [:case | state.context]}
match_blocks =
Enum.flat_map(sub_blocks, fn %AST.SubBlock{children: children, expression: expr} ->
match_body = handle_nested_block(children, buffer, state)
quote do
unquote(expr) -> unquote(match_body)
end
end)
quote do
case unquote(case_expr) do
unquote(match_blocks)
end
end
|> maybe_print_expression(block)
end
defp to_expression(
%AST.Slot{
name: slot_name,
index: index_ast,
args: args_expr,
default: default
},
buffer,
state
) do
slot_index =
case index_ast do
%AST.AttributeExpr{value: expr} -> expr
%AST.Literal{value: value} -> value
end
context_expr =
if is_child_component?(state) do
quote generated: true do
Map.merge(@__context__, the_context)
end
else
quote do
@__context__
end
end
# TODO: map names somehow?
slot_content_expr =
quote generated: true do
if @inner_block do
render_block(
@inner_block,
{
unquote(slot_name),
unquote(slot_index),
Map.new(unquote(args_expr)),
unquote(context_expr)
}
)
end
end
default_value =
handle_nested_block(default, buffer, %{
state
| depth: state.depth + 1,
context: [:slot | state.context]
})
quote generated: true do
if Enum.member?(@__surface__.provided_templates, unquote(slot_name)) do
unquote(slot_content_expr)
else
unquote(default_value)
end
end
end
defp to_expression(
%AST.Component{
module: module,
type: Surface.LiveView,
props: props
} = component,
_buffer,
_state
) do
props_expr =
collect_component_props(module, props)
|> Enum.reject(fn {_, value} -> is_nil(value) end)
quote generated: true do
live_render(
@socket,
unquote(module),
unquote(props_expr)
)
end
|> maybe_print_expression(component)
end
defp to_expression(
%ast_type{
module: module,
props: props,
dynamic_props: dynamic_props,
templates: templates,
meta: meta
} = component,
buffer,
state
)
when ast_type in [AST.Component, AST.SlotableComponent] do
props_expr = collect_component_props(module, props)
dynamic_props_expr = handle_dynamic_props(dynamic_props)
if module.__use_context__?() do
Module.put_attribute(meta.caller.module, :use_context?, true)
end
context_expr =
cond do
module.__slots__() == [] and not module.__use_context__?() ->
quote generated: true do
%{}
end
is_child_component?(state) ->
quote generated: true do
Map.merge(@__context__ || %{}, the_context)
end
true ->
quote generated: true do
@__context__ || %{}
end
end
{do_block, slot_meta, slot_props} = collect_slot_meta(component, templates, buffer, state)
module
|> live_component_ast(
context_expr,
props_expr,
dynamic_props_expr,
slot_props,
slot_meta,
module,
meta.node_alias,
do_block
)
|> maybe_print_expression(component)
end
# Detect Phoenix Live View Version to determine if `live_component` takes
# the `socket` as first argument
Application.load(:phoenix_live_view)
:phoenix_live_view
|> Application.spec(:vsn)
|> List.to_string()
|> Version.match?(">= 0.15.6")
|> if do
defp live_component_ast(
module,
context_expr,
props_expr,
dynamic_props_expr,
slot_props,
slot_meta,
module,
node_alias,
do_block
) do
quote generated: true do
live_component(
unquote(module),
Surface.build_assigns(
unquote(context_expr),
unquote(props_expr),
unquote(dynamic_props_expr),
unquote(slot_props),
unquote(slot_meta),
unquote(module),
unquote(node_alias)
),
unquote(do_block)
)
end
end
else
# TODO: Remove when support for phoenix_live_view <= 0.15.5 is dropped
defp live_component_ast(
module,
context_expr,
props_expr,
dynamic_props_expr,
slot_props,
slot_meta,
module,
node_alias,
do_block
) do
quote generated: true do
live_component(
@socket,
unquote(module),
Surface.build_assigns(
unquote(context_expr),
unquote(props_expr),
unquote(dynamic_props_expr),
unquote(slot_props),
unquote(slot_meta),
unquote(module),
unquote(node_alias)
),
unquote(do_block)
)
end
end
end
defp handle_dynamic_props(nil), do: []
defp handle_dynamic_props(%AST.DynamicAttribute{expr: %AST.AttributeExpr{value: expr}}) do
expr
end
defp collect_component_props(module, attrs) do
{props, props_acc} =
Enum.reduce(attrs, {[], %{}}, fn attr, {props, props_acc} ->
%AST.Attribute{name: prop_name, type: type, type_opts: type_opts, value: expr} = attr
cond do
!module.__validate_prop__(prop_name) ->
{props, props_acc}
type_opts[:accumulate] ->
current_value = props_acc[prop_name] || []
updated_value = [to_prop_expr(expr, type) | current_value]
{props, Map.put(props_acc, prop_name, updated_value)}
true ->
{[{prop_name, to_prop_expr(expr, type)} | props], props_acc}
end
end)
Enum.reverse(props) ++ Enum.map(props_acc, fn {k, v} -> {k, Enum.reverse(v)} end)
end
defp collect_slot_meta(component, templates, buffer, state) do
slot_info =
templates
|> Enum.map(fn {name, templates_for_slot} ->
state = %{state | context: [:template | state.context]}
nested_templates = handle_templates(component, templates_for_slot, buffer, state)
{name, Enum.count(templates_for_slot), nested_templates}
end)
do_block =
slot_info
|> Enum.map(fn {name, _size, infos} ->
infos
|> Enum.with_index()
|> Enum.map(fn {{let, _, body}, index} ->
quote generated: true do
{
unquote(name),
unquote(index),
unquote({:%{}, [generated: true], let}),
the_context
} ->
unquote(body)
end
end)
end)
|> List.flatten()
|> case do
[] -> []
block -> [do: block]
end
slot_props =
for {name, _, infos} <- slot_info,
not Enum.empty?(infos) do
{name, Enum.map(infos, fn {_, props, _} -> {:%{}, [generated: true], props} end)}
end
slot_meta =
for {name, size, _infos} <- slot_info do
meta_value =
quote generated: true do
%{size: unquote(size)}
end
{name, meta_value}
end
{do_block, slot_meta, slot_props}
end
defp handle_nested_block(block, buffer, state) when is_list(block) do
buffer = state.engine.handle_begin(buffer)
buffer =
Enum.reduce(block, buffer, fn
{:text, chars}, buffer ->
state.engine.handle_text(buffer, chars)
%AST.Expr{} = expr, buffer ->
state.engine.handle_expr(buffer, "", to_expression(expr, buffer, state))
expr, buffer ->
state.engine.handle_expr(buffer, "=", to_expression(expr, buffer, state))
end)
state.engine.handle_end(buffer)
end
defp handle_nested_block(block, buffer, state) do
buffer = state.engine.handle_begin(buffer)
buffer = state.engine.handle_expr(buffer, "=", to_expression(block, buffer, state))
state.engine.handle_end(buffer)
end
defp handle_templates(_component, [], _, _), do: []
defp handle_templates(
component,
[
%AST.Template{
name: name,
let: let,
children: children
}
| tail
],
buffer,
state
) do
[
{add_default_bindings(component, name, let), [],
handle_nested_block(children, buffer, %{state | depth: state.depth + 1})}
| handle_templates(component, tail, buffer, state)
]
end
defp handle_templates(component, [slotable | tail], buffer, state) do
%AST.SlotableComponent{
slot: name,
module: module,
let: let,
props: props,
templates: %{default: default}
} = slotable
template =
cond do
!module.__renderless__?() ->
[
%AST.Component{
module: module,
type: slotable.type,
props: props,
dynamic_props: nil,
directives: [],
templates: slotable.templates,
meta: slotable.meta,
debug: slotable.debug
}
]
Enum.empty?(default) ->
[]
true ->
%AST.Template{children: children} = List.first(default)
children
end
props = collect_component_props(module, props)
default_props = Surface.default_props(module)
[
{add_default_bindings(component, name, let), Keyword.merge(default_props, props),
handle_nested_block(template, buffer, %{state | depth: state.depth + 1})}
| handle_templates(component, tail, buffer, state)
]
end
defp add_default_bindings(%{module: module, props: props}, name, let) do
(module.__get_slot__(name)[:opts][:args] || [])
|> Enum.reject(fn
%{generator: nil} -> true
%{name: name} -> Keyword.has_key?(let, name)
end)
|> Enum.map(fn %{generator: gen, name: name} ->
case find_attribute_value(props, gen, nil) do
%AST.AttributeExpr{value: {binding, _}} ->
{name, binding}
_ ->
nil
end
end)
|> Enum.reject(fn value -> value == nil end)
|> Keyword.merge(let)
end
defp find_attribute_value(attrs, name, default)
defp find_attribute_value([], _, default), do: default
defp find_attribute_value([%AST.Attribute{name: attr_name, value: value} | _], name, _)
when attr_name == name,
do: value
defp find_attribute_value([_ | tail], name, default),
do: find_attribute_value(tail, name, default)
defp to_prop_expr(%AST.AttributeExpr{value: value, meta: meta}, type) do
Surface.TypeHandler.update_prop_expr(type, value, meta)
end
defp to_prop_expr(%AST.Literal{value: value}, _) do
value
end
defp combine_static_portions(nodes, accumulators \\ {[], []})
defp combine_static_portions([], {[], node_acc}), do: Enum.reverse(node_acc)
defp combine_static_portions([], {static_acc, node_acc}),
do:
combine_static_portions(
[],
{[], [{:text, join_string_list(static_acc)} | node_acc]}
)
defp combine_static_portions([str | values], {static_acc, node_acc}) when is_binary(str),
do: combine_static_portions(values, {[str | static_acc], node_acc})
defp combine_static_portions([node | values], {static_acc, node_acc}) do
node_acc =
case static_acc do
[] -> node_acc
list -> [{:text, join_string_list(list)} | node_acc]
end
combine_static_portions(values, {[], [node | node_acc]})
end
defp join_string_list(list) do
list
|> Enum.reverse()
|> IO.iodata_to_binary()
end
defp to_dynamic_nested_html([]), do: []
defp to_dynamic_nested_html([%AST.Literal{value: text} | nodes]) do
[text | to_dynamic_nested_html(nodes)]
end
defp to_dynamic_nested_html([
%AST.Container{
children: children,
meta: %AST.Meta{
module: mod,
line: line
}
}
| nodes
])
when not is_nil(mod) do
[require_expr(mod, line), to_dynamic_nested_html(children) | to_dynamic_nested_html(nodes)]
end
defp to_dynamic_nested_html([%AST.Container{children: children} | nodes]) do
[to_dynamic_nested_html(children) | to_dynamic_nested_html(nodes)]
end
defp to_dynamic_nested_html([%AST.Slot{default: default} = slot | nodes]) do
[%{slot | default: to_token_sequence(default)} | to_dynamic_nested_html(nodes)]
end
defp to_dynamic_nested_html([
%AST.If{children: if_children, else: else_children} = conditional | nodes
]) do
[
%{
conditional
| children: to_token_sequence(if_children),
else: to_token_sequence(else_children)
},
to_dynamic_nested_html(nodes)
]
end
defp to_dynamic_nested_html([%AST.For{children: children} = comprehension | nodes]) do
[%{comprehension | children: to_token_sequence(children)}, to_dynamic_nested_html(nodes)]
end
defp to_dynamic_nested_html([%AST.Block{sub_blocks: sub_blocks} = block | nodes]) do
[%{block | sub_blocks: to_token_sequence(sub_blocks)} | to_dynamic_nested_html(nodes)]
end
defp to_dynamic_nested_html([%AST.SubBlock{children: children} = sub_block | nodes]) do
[%{sub_block | children: to_token_sequence(children)} | to_dynamic_nested_html(nodes)]
end
defp to_dynamic_nested_html([
%AST.VoidTag{
element: element,
attributes: attributes
}
| nodes
]) do
[
"<",
element,
to_html_attributes(attributes),
">",
to_dynamic_nested_html(nodes)
]
end
defp to_dynamic_nested_html([
%AST.Tag{
element: element,
attributes: attributes,
children: children
}
| nodes
]) do
[
"<",
element,
to_html_attributes(attributes),
">",
to_dynamic_nested_html(children),
"</",
element,
">",
to_dynamic_nested_html(nodes)
]
end
defp to_dynamic_nested_html([
%type{module: mod, templates: templates_by_name} = component | nodes
])
when type in [AST.Component, AST.SlotableComponent] do
{requires, templates_by_name} =
Enum.reduce(templates_by_name, {[], %{}}, fn {name, templates}, {requires_acc, by_name} ->
{requires, templates} =
Enum.reduce(templates, {requires_acc, []}, fn
%AST.Template{children: children} = template, {requires, templates} ->
{requires, [%{template | children: to_token_sequence(children)} | templates]}
%AST.SlotableComponent{} = template, {requires, templates} ->
[cmp, nested, translated] = to_dynamic_nested_html([template])
{[cmp, nested | requires], [translated | templates]}
end)
{requires, Map.put(by_name, name, Enum.reverse(templates))}
end)
[
require_expr(mod, component.meta.line),
requires,
%{component | templates: templates_by_name} | to_dynamic_nested_html(nodes)
]
end
defp to_dynamic_nested_html([
%AST.Error{message: message, meta: %AST.Meta{module: mod, line: line}} | nodes
])
when not is_nil(mod),
do: [
require_expr(mod, line),
~S(<span style="color: red; border: 2px solid red; padding: 3px"> Error: ),
escape_message(message),
~S(</span>) | to_dynamic_nested_html(nodes)
]
defp to_dynamic_nested_html([%AST.Error{message: message} | nodes]),
do: [
~S(<span style="color: red; border: 2px solid red; padding: 3px"> Error: ),
escape_message(message),
~S(</span>) | to_dynamic_nested_html(nodes)
]
defp to_dynamic_nested_html([%AST.Interpolation{} = value | nodes]),
do: [value | to_dynamic_nested_html(nodes)]
defp to_dynamic_nested_html([%AST.Expr{} = value | nodes]),
do: [value | to_dynamic_nested_html(nodes)]
defp to_html_attributes([]), do: []
defp to_html_attributes([
%AST.Attribute{name: name, type: :string, value: %AST.Literal{value: value}}
| attributes
])
when is_binary(value) do
[[" ", to_string(name), "=", ~S("), value, ~S(")], to_html_attributes(attributes)]
end
defp to_html_attributes([
%AST.Attribute{name: name, type: type, value: %AST.Literal{value: value}}
| attributes
]) do
runtime_value = Surface.TypeHandler.expr_to_value!(type, name, [value], [], nil, value)
[Surface.TypeHandler.attr_to_html!(type, name, runtime_value), to_html_attributes(attributes)]
end
defp to_html_attributes([
%AST.DynamicAttribute{
expr: %AST.AttributeExpr{constant?: true} = expr
}
| attributes
]) do
try do
{expr_value, _} = Code.eval_quoted(expr.value)
new_attrs =
Enum.map(expr_value, fn {name, {type, value}} ->
evaluate_literal_attribute(name, type, value, expr.meta)
end)
[new_attrs | to_html_attributes(attributes)]
rescue
e in RuntimeError ->
IOHelper.compile_error(e.message, expr.meta.file, expr.meta.line)
end
end
defp to_html_attributes([
%AST.DynamicAttribute{expr: %AST.AttributeExpr{value: expr_value} = expr} | attributes
]) do
value =
quote generated: true do
for {name, {type, value}} <- unquote(expr_value) do
Phoenix.HTML.raw(Surface.TypeHandler.attr_to_html!(type, name, value))
end
end
[%{expr | value: value} | to_html_attributes(attributes)]
end
defp to_html_attributes([
%AST.Attribute{value: %AST.AttributeExpr{constant?: true} = expr} = attr
| attributes
]) do
try do
{expr_value, _} = Code.eval_quoted(expr.value)
value = evaluate_literal_attribute(attr.name, attr.type, expr_value, attr.meta)
[value | to_html_attributes(attributes)]
rescue
e in RuntimeError ->
IOHelper.compile_error(e.message, expr.meta.file, expr.meta.line)
end
end
defp to_html_attributes([
%AST.Attribute{
name: name,
type: type,
value: %AST.AttributeExpr{value: expr_value} = expr
}
| attributes
]) do
value =
quote generated: true do
Phoenix.HTML.raw(Surface.TypeHandler.attr_to_html!(unquote(type), unquote(name), unquote(expr_value)))
end
[%{expr | value: value} | to_html_attributes(attributes)]
end
defp maybe_print_expression(expr, node) do
maybe_print_expression(
expr,
Map.has_key?(node, :debug) and Enum.member?(node.debug, :code),
node.meta.file,
node.meta.line
)
end
defp evaluate_literal_attribute(name, type, value, meta) do
case Surface.TypeHandler.attr_to_html(type, name, value) do
{:ok, attr} -> attr
{:error, message} -> IOHelper.compile_error(message, meta.file, meta.line)
end
end
defp maybe_print_expression(expr, print?, file, line) do
if print? do
IO.puts(">>> DEBUG(EXPRESSION): #{file}:#{line}")
expr |> Macro.to_string() |> Code.format_string!(line_length: 120) |> IO.puts()
IO.puts("<<<")
end
expr
end
defp require_expr(module, line) do
%AST.Expr{
value:
quote generated: true, line: line do
require unquote(module)
end,
meta: %AST.Meta{}
}
end
defp is_child_component?(state) do
state.depth > 0 and Enum.member?(state.context, :template)
end
defp escape_message(message) do
{:safe, message_iodata} = Phoenix.HTML.html_escape(message)
IO.iodata_to_binary(message_iodata)
end
end
| 27.900693 | 120 | 0.600944 |
fffe8576fe9b61abf22d3cdd046246683e528ba8 | 604 | exs | Elixir | day04/mix.exs | bjorng/advent-of-code-2015 | d59ac2fc4a93c86ebfe3917d89ebaad3b571bdb6 | [
"Apache-2.0"
] | 1 | 2021-05-12T11:30:15.000Z | 2021-05-12T11:30:15.000Z | day04/mix.exs | bjorng/advent-of-code-2016 | fb3e40ae2fd52d761f6c0bf55e7634277917ba25 | [
"Apache-2.0"
] | null | null | null | day04/mix.exs | bjorng/advent-of-code-2016 | fb3e40ae2fd52d761f6c0bf55e7634277917ba25 | [
"Apache-2.0"
] | null | null | null | defmodule Day04.MixProject do
use Mix.Project
def project do
[
app: :day04,
version: "0.1.0",
elixir: "~> 1.11",
start_permanent: Mix.env() == :prod,
deps: deps()
]
end
# Run "mix help compile.app" to learn about applications.
def application do
[
extra_applications: [:logger]
]
end
# Run "mix help deps" to learn about dependencies.
defp deps do
[
# {:nimble_parsec, "~> 1.1"},
# {:dep_from_hexpm, "~> 0.3.0"},
# {:dep_from_git, git: "https://github.com/elixir-lang/my_dep.git", tag: "0.1.0"}
]
end
end
| 20.133333 | 87 | 0.56457 |
fffe89a0216dcb2e4a9c0ccde6544aedb4a6c79d | 2,207 | ex | Elixir | clients/you_tube/lib/google_api/you_tube/v3/model/live_stream_snippet.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | 1 | 2018-12-03T23:43:10.000Z | 2018-12-03T23:43:10.000Z | clients/you_tube/lib/google_api/you_tube/v3/model/live_stream_snippet.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | clients/you_tube/lib/google_api/you_tube/v3/model/live_stream_snippet.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the elixir code generator program.
# Do not edit the class manually.
defmodule GoogleApi.YouTube.V3.Model.LiveStreamSnippet do
@moduledoc """
## Attributes
* `channelId` (*type:* `String.t`, *default:* `nil`) - The ID that YouTube uses to uniquely identify the channel that is transmitting the stream.
* `description` (*type:* `String.t`, *default:* `nil`) - The stream's description. The value cannot be longer than 10000 characters.
* `isDefaultStream` (*type:* `boolean()`, *default:* `nil`) -
* `publishedAt` (*type:* `DateTime.t`, *default:* `nil`) - The date and time that the stream was created. The value is specified in ISO 8601 (YYYY-MM-DDThh:mm:ss.sZ) format.
* `title` (*type:* `String.t`, *default:* `nil`) - The stream's title. The value must be between 1 and 128 characters long.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:channelId => String.t(),
:description => String.t(),
:isDefaultStream => boolean(),
:publishedAt => DateTime.t(),
:title => String.t()
}
field(:channelId)
field(:description)
field(:isDefaultStream)
field(:publishedAt, as: DateTime)
field(:title)
end
defimpl Poison.Decoder, for: GoogleApi.YouTube.V3.Model.LiveStreamSnippet do
def decode(value, options) do
GoogleApi.YouTube.V3.Model.LiveStreamSnippet.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.YouTube.V3.Model.LiveStreamSnippet do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 37.40678 | 177 | 0.701858 |
fffeb1de678ad1460af106ac912f6f1551f527dc | 1,518 | exs | Elixir | pangram/test/pangram_test.exs | rapidfireworks/exercism.ex | 7739c60db0510099fe8d37fd6bd76eee37623d05 | [
"MIT"
] | null | null | null | pangram/test/pangram_test.exs | rapidfireworks/exercism.ex | 7739c60db0510099fe8d37fd6bd76eee37623d05 | [
"MIT"
] | null | null | null | pangram/test/pangram_test.exs | rapidfireworks/exercism.ex | 7739c60db0510099fe8d37fd6bd76eee37623d05 | [
"MIT"
] | 1 | 2021-03-15T11:02:40.000Z | 2021-03-15T11:02:40.000Z | defmodule PangramTest do
use ExUnit.Case
# @tag :pending
test "empty sentence" do
refute Pangram.pangram?("")
end
# @tag :pending
test "pangram with only lower case" do
assert Pangram.pangram?("the quick brown fox jumps over the lazy dog")
end
# @tag :pending
test "missing character 'x'" do
refute Pangram.pangram?("a quick movement of the enemy will jeopardize five gunboats")
end
# @tag :pending
test "another missing character 'x'" do
refute Pangram.pangram?("the quick brown fish jumps over the lazy dog")
end
# @tag :pending
test "pangram with underscores" do
assert Pangram.pangram?("the_quick_brown_fox_jumps_over_the_lazy_dog")
end
# @tag :pending
test "pangram with numbers" do
assert Pangram.pangram?("the 1 quick brown fox jumps over the 2 lazy dogs")
end
# @tag :pending
test "missing letters replaced by numbers" do
refute Pangram.pangram?("7h3 qu1ck brown fox jumps ov3r 7h3 lazy dog")
end
# @tag :pending
test "pangram with mixed case and punctuation" do
assert Pangram.pangram?("Five quacking Zephyrs jolt my wax bed.")
end
# @tag :pending
test "pangram with non ascii characters" do
assert Pangram.pangram?("Victor jagt zwölf Boxkämpfer quer über den großen Sylter Deich.")
end
# @tag :pending
test "pangram in alphabet other than ASCII" do
refute Pangram.pangram?(
"Широкая электрификация южных губерний даст мощный толчок подъёму сельского хозяйства."
)
end
end
| 27.107143 | 100 | 0.702899 |
fffec14d18e874e3d8b9ce585c9ee511ac65d534 | 1,358 | ex | Elixir | lib/kwerk/utils/randomizer.ex | johndavedecano/kwerk | a9e5928c2c5eb7a9a8dfaba30450627c257832a3 | [
"MIT"
] | 2 | 2018-09-23T08:45:07.000Z | 2020-01-20T15:47:45.000Z | lib/kwerk/utils/randomizer.ex | johndavedecano/kwerk | a9e5928c2c5eb7a9a8dfaba30450627c257832a3 | [
"MIT"
] | null | null | null | lib/kwerk/utils/randomizer.ex | johndavedecano/kwerk | a9e5928c2c5eb7a9a8dfaba30450627c257832a3 | [
"MIT"
] | null | null | null | defmodule Kwerk.Utils.Randomizer do
@moduledoc """
Random string generator module.
"""
@doc """
Generate random string based on the given legth. It is also possible to generate certain type of randomise string using the options below:
* :all - generate alphanumeric random string
* :alpha - generate nom-numeric random string
* :numeric - generate numeric random string
* :upcase - generate upper case non-numeric random string
* :downcase - generate lower case non-numeric random string
## Example
iex> Iurban.String.randomizer(20) //"Je5QaLj982f0Meb0ZBSK"
"""
def generate(length, type \\ :all) do
alphabets = "ABCDEFGHIJKLMNOPQRSTUVWXYZ"
numbers = "0123456789"
lists =
cond do
type == :alpha -> alphabets <> String.downcase(alphabets)
type == :numeric -> numbers
type == :upcase -> alphabets
type == :downcase -> String.downcase(alphabets)
true -> alphabets <> String.downcase(alphabets) <> numbers
end
|> String.split("", trim: true)
do_randomizer(length, lists)
end
@doc false
defp get_range(length) when length > 1, do: (1..length)
defp get_range(_), do: [1]
@doc false
defp do_randomizer(length, lists) do
get_range(length)
|> Enum.reduce([], fn(_, acc) -> [Enum.random(lists) | acc] end)
|> Enum.join("")
end
end
| 30.863636 | 140 | 0.659794 |
fffed0358678ec36b9c8b5db7a82f59b41af2473 | 1,718 | ex | Elixir | clients/container_analysis/lib/google_api/container_analysis/v1beta1/model/grafeas_v1beta1_intoto_artifact.ex | mcrumm/elixir-google-api | 544f22797cec52b3a23dfb6e39117f0018448610 | [
"Apache-2.0"
] | null | null | null | clients/container_analysis/lib/google_api/container_analysis/v1beta1/model/grafeas_v1beta1_intoto_artifact.ex | mcrumm/elixir-google-api | 544f22797cec52b3a23dfb6e39117f0018448610 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/container_analysis/lib/google_api/container_analysis/v1beta1/model/grafeas_v1beta1_intoto_artifact.ex | mcrumm/elixir-google-api | 544f22797cec52b3a23dfb6e39117f0018448610 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.ContainerAnalysis.V1beta1.Model.GrafeasV1beta1IntotoArtifact do
@moduledoc """
## Attributes
* `hashes` (*type:* `GoogleApi.ContainerAnalysis.V1beta1.Model.ArtifactHashes.t`, *default:* `nil`) -
* `resourceUri` (*type:* `String.t`, *default:* `nil`) -
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:hashes => GoogleApi.ContainerAnalysis.V1beta1.Model.ArtifactHashes.t(),
:resourceUri => String.t()
}
field(:hashes, as: GoogleApi.ContainerAnalysis.V1beta1.Model.ArtifactHashes)
field(:resourceUri)
end
defimpl Poison.Decoder,
for: GoogleApi.ContainerAnalysis.V1beta1.Model.GrafeasV1beta1IntotoArtifact do
def decode(value, options) do
GoogleApi.ContainerAnalysis.V1beta1.Model.GrafeasV1beta1IntotoArtifact.decode(value, options)
end
end
defimpl Poison.Encoder,
for: GoogleApi.ContainerAnalysis.V1beta1.Model.GrafeasV1beta1IntotoArtifact do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 33.038462 | 106 | 0.747963 |
fffed60f18b2085bd59c1bafe8e57d4d5c481bc3 | 1,437 | ex | Elixir | lib/money/ecto/numeric_currency_type.ex | wingyplus/money | 18d8eb3581886badb9b89adf455844517c10f599 | [
"MIT"
] | null | null | null | lib/money/ecto/numeric_currency_type.ex | wingyplus/money | 18d8eb3581886badb9b89adf455844517c10f599 | [
"MIT"
] | null | null | null | lib/money/ecto/numeric_currency_type.ex | wingyplus/money | 18d8eb3581886badb9b89adf455844517c10f599 | [
"MIT"
] | null | null | null | if Code.ensure_loaded?(Ecto.Type) do
defmodule Money.Ecto.NumericCurrency.Type do
@moduledoc """
Provides a type for Ecto to store a currency.
The underlying data type is a integer.
## Migration Example
create table(:my_table) do
add :currency, :integer
end
## Schema Example
schema "my_table" do
field :currency, Money.Ecto.NumericCurrency.Type
end
"""
alias Money.Currency
if macro_exported?(Ecto.Type, :__using__, 1) do
use Ecto.Type
else
@behaviour Ecto.Type
end
@spec type :: :integer
def type, do: :integer
# @spec cast(Money.t() | String.t() | Integer.t()) :: {:ok, atom()}
def cast(val)
def cast(%Money{currency: currency}) when is_binary(currency) or is_atom(currency), do: cast(currency)
def cast(currency) when is_binary(currency) or is_atom(currency) do
{:ok, Currency.to_atom(currency)}
rescue
_ -> :error
end
def cast(currency) when is_integer(currency) do
if Currency.exists?(currency), do: {:ok, Currency.to_atom(currency)}, else: :error
end
def cast(_), do: :error
@spec load(Integer.t()) :: {:ok, atom()}
def load(int) when is_integer(int), do: {:ok, Currency.to_atom(int)}
@spec dump(atom()) :: {:ok, Integer.t()}
def dump(atom) when is_atom(atom), do: {:ok, Currency.number(atom)}
def dump(_), do: :error
end
end
| 25.660714 | 106 | 0.622129 |
fffed9e3eb5711dd577c980de0c3fd380c28da53 | 2,017 | exs | Elixir | test/phoenix_html_simplified_helpers/truncate_test.exs | ryochin/phoenix_html_simplified_helpers | 873c7b3eac9374f4805a3b1288d75496ee7b056b | [
"MIT"
] | 31 | 2016-02-27T18:15:12.000Z | 2022-02-23T11:34:09.000Z | test/phoenix_html_simplified_helpers/truncate_test.exs | ryochin/phoenix_html_simplified_helpers | 873c7b3eac9374f4805a3b1288d75496ee7b056b | [
"MIT"
] | 13 | 2016-05-26T14:08:59.000Z | 2020-10-13T11:03:08.000Z | test/phoenix_html_simplified_helpers/truncate_test.exs | ryochin/phoenix_html_simplified_helpers | 873c7b3eac9374f4805a3b1288d75496ee7b056b | [
"MIT"
] | 15 | 2016-05-21T09:54:32.000Z | 2021-09-23T01:43:03.000Z | Code.require_file("../../test_helper.exs", __ENV__.file)
defmodule Phoenix.HTML.SimplifiedHelpers.TruncateTest do
use ExUnit.Case
use Phoenix.HTML.SimplifiedHelpers
doctest Phoenix.HTML.SimplifiedHelpers
test "truncate" do
assert "Once upon a time in a world..." ==
truncate("Once upon a time in a world far far away")
end
test "truncate with length option" do
assert "Once upon a ti..." == truncate("Once upon a time in a world far far away", length: 17)
end
test "truncate with omission option" do
assert "And they f... (continued)" ==
truncate(
"And they found that many people were sleeping better.",
length: 25,
omission: "... (continued)"
)
end
test "truncate no applying" do
assert "Once upon a time in a world far far away" ==
truncate("Once upon a time in a world far far away", length: 50)
end
test "truncate nil" do
assert nil == truncate(nil)
end
test "truncate bool" do
assert false == truncate(false)
end
test "truncate with separator option" do
assert "Once upon a..." ==
truncate("Once upon a time in a world far far away", length: 17, separator: " ")
end
test "truncate with separator option one" do
assert "username@..." ==
truncate("username@username-username.com", length: 20, separator: "user")
end
test "truncate with separator option two" do
assert "username@username-..." ==
truncate(
"username@username-username.comusername@username-username.com",
separator: "user"
)
end
test "truncate with separator option three" do
assert "..." ==
truncate(
"username@username-username.comusername@username-username.com",
length: 3,
separator: "user"
)
end
# test "truncate with escape option" do
# truncate "kjkjkjk"
# assert 1 + 1 == 2
# end
end
| 28.013889 | 98 | 0.606346 |
fffedde5f30f067097ecf7cd426628dadfcdc25a | 1,032 | exs | Elixir | integration/elistrix_remote/mix.exs | tobz/elistrix | c9dd8a8369000492240969ada2b763433768aafa | [
"MIT"
] | 15 | 2015-05-05T07:22:23.000Z | 2019-08-16T12:08:56.000Z | integration/elistrix_remote/mix.exs | tobz/elistrix | c9dd8a8369000492240969ada2b763433768aafa | [
"MIT"
] | null | null | null | integration/elistrix_remote/mix.exs | tobz/elistrix | c9dd8a8369000492240969ada2b763433768aafa | [
"MIT"
] | null | null | null | defmodule ElistrixRemote.Mixfile do
use Mix.Project
def project do
[app: :elistrix_remote,
version: "0.0.1",
elixir: "~> 1.0",
elixirc_paths: elixirc_paths(Mix.env),
compilers: [:phoenix] ++ Mix.compilers,
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
deps: deps]
end
# Configuration for the OTP application
#
# Type `mix help compile.app` for more information
def application do
[mod: {ElistrixRemote, []},
applications: [:phoenix, :cowboy, :logger,
:phoenix_ecto, :postgrex]]
end
# Specifies which paths to compile per environment
defp elixirc_paths(:test), do: ["lib", "web", "test/support"]
defp elixirc_paths(_), do: ["lib", "web"]
# Specifies your project dependencies
#
# Type `mix help deps` for examples and options
defp deps do
[{:phoenix, "~> 0.12"},
{:phoenix_ecto, "~> 0.3"},
{:postgrex, ">= 0.0.0"},
{:phoenix_live_reload, "~> 0.3"},
{:cowboy, "~> 1.0"}]
end
end
| 26.461538 | 63 | 0.608527 |
fffeed10a1e58d9620727c7e79af63dc1cfed219 | 312 | exs | Elixir | test/support/migrations/20000101000000_create_tables.exs | myskoach/polymorphic_embed | 3d005c4c6334315194d329a18c193cce17669522 | [
"Apache-2.0"
] | null | null | null | test/support/migrations/20000101000000_create_tables.exs | myskoach/polymorphic_embed | 3d005c4c6334315194d329a18c193cce17669522 | [
"Apache-2.0"
] | null | null | null | test/support/migrations/20000101000000_create_tables.exs | myskoach/polymorphic_embed | 3d005c4c6334315194d329a18c193cce17669522 | [
"Apache-2.0"
] | 1 | 2020-11-26T01:50:50.000Z | 2020-11-26T01:50:50.000Z | defmodule PolymorphicEmbed.CreateTables do
use Ecto.Migration
def change do
create table(:reminders) do
add(:date, :utc_datetime, null: false)
add(:text, :text, null: false)
add(:channel, :map, null: false)
add(:contexts, :map, null: false)
timestamps()
end
end
end
| 20.8 | 44 | 0.641026 |
ffff356551a09b71b7b31b652bba93537e121f34 | 466 | exs | Elixir | test/models/product_test.exs | slurmulon/thing | 21791e7a9579d41de28a0a6131218aed3679d033 | [
"Apache-2.0"
] | null | null | null | test/models/product_test.exs | slurmulon/thing | 21791e7a9579d41de28a0a6131218aed3679d033 | [
"Apache-2.0"
] | null | null | null | test/models/product_test.exs | slurmulon/thing | 21791e7a9579d41de28a0a6131218aed3679d033 | [
"Apache-2.0"
] | null | null | null | defmodule Thing.ProductTest do
use Thing.ModelCase
alias Thing.Product
@valid_attrs %{gtin: "some content", name: "some content", upc: "some content"}
@invalid_attrs %{}
test "changeset with valid attributes" do
changeset = Product.changeset(%Product{}, @valid_attrs)
assert changeset.valid?
end
test "changeset with invalid attributes" do
changeset = Product.changeset(%Product{}, @invalid_attrs)
refute changeset.valid?
end
end
| 24.526316 | 81 | 0.72103 |
ffff39ad466ffe6489c42cfa5d9eb6cb6f6682af | 569 | ex | Elixir | lib/validators/string.ex | gmartsenkov/gate | 67a1d9a3b0c0515312b49e9a110cb00ebc46d402 | [
"MIT"
] | 3 | 2019-11-04T21:40:10.000Z | 2021-12-22T11:25:37.000Z | lib/validators/string.ex | gmartsenkov/gate | 67a1d9a3b0c0515312b49e9a110cb00ebc46d402 | [
"MIT"
] | null | null | null | lib/validators/string.ex | gmartsenkov/gate | 67a1d9a3b0c0515312b49e9a110cb00ebc46d402 | [
"MIT"
] | null | null | null | defmodule Gate.Validators.String do
@moduledoc false
alias Gate.Locale
defmacro __using__(_opts) do
quote do
def validate(value, {:regex, reg_expression}) do
with true <- validate(value, :str) do
if Regex.match?(reg_expression, value), do: true, else: Locale.get("regex", [value])
end
end
def validate(value, {:regex, reg_expression, custom_locale}) do
with true <- validate(value, :str) do
if Regex.match?(reg_expression, value), do: true, else: Locale.get(custom_locale, [value])
end
end
end
end
end
| 25.863636 | 100 | 0.666081 |
ffff5c8242f3a1b5007b80c670caf76c01cdd075 | 227 | exs | Elixir | priv/repo/migrations/20160217231650_add_path_and_file_comment_to_translations.exs | suryatmodulus/accent | 6aaf34075c33f3d9d84d38237af4a39b594eb808 | [
"BSD-3-Clause"
] | 806 | 2018-04-07T20:40:33.000Z | 2022-03-30T01:39:57.000Z | priv/repo/migrations/20160217231650_add_path_and_file_comment_to_translations.exs | suryatmodulus/accent | 6aaf34075c33f3d9d84d38237af4a39b594eb808 | [
"BSD-3-Clause"
] | 194 | 2018-04-07T13:49:37.000Z | 2022-03-30T19:58:45.000Z | priv/repo/migrations/20160217231650_add_path_and_file_comment_to_translations.exs | doc-ai/accent | e337e16f3658cc0728364f952c0d9c13710ebb06 | [
"BSD-3-Clause"
] | 89 | 2018-04-09T13:55:49.000Z | 2022-03-24T07:09:31.000Z | defmodule Accent.Repo.Migrations.AddPathAndFileCommentToTranslations do
use Ecto.Migration
def change do
alter table(:translations) do
add(:file_path, :string)
add(:file_comment, :string)
end
end
end
| 20.636364 | 71 | 0.726872 |
ffff6cb9d71bdead4c136f01da1cdcbac4c2e769 | 605 | ex | Elixir | lib/mmdb2/record/enterprise_country.ex | tcitworld/adapter_mmdb2 | 965fd00ce2ba3d11d1749047f97fa2ccaeaaf533 | [
"Apache-2.0"
] | 3 | 2018-03-03T18:59:55.000Z | 2020-12-31T19:36:59.000Z | lib/mmdb2/record/enterprise_country.ex | tcitworld/adapter_mmdb2 | 965fd00ce2ba3d11d1749047f97fa2ccaeaaf533 | [
"Apache-2.0"
] | 3 | 2019-07-17T12:55:41.000Z | 2020-08-31T18:55:15.000Z | lib/mmdb2/record/enterprise_country.ex | tcitworld/adapter_mmdb2 | 965fd00ce2ba3d11d1749047f97fa2ccaeaaf533 | [
"Apache-2.0"
] | 4 | 2019-07-15T10:07:16.000Z | 2021-09-22T15:46:04.000Z | defmodule Geolix.Adapter.MMDB2.Record.EnterpriseCountry do
@moduledoc """
Record for `country` information (enterprise database).
"""
alias Geolix.Adapter.MMDB2.Model
alias Geolix.Adapter.MMDB2.Record
defstruct %Record.Country{}
|> Map.keys()
|> List.delete(:__struct__)
|> List.flatten([:confidence])
@behaviour Model
@impl Model
def from(nil, _), do: nil
def from(data, nil), do: struct(__MODULE__, data)
def from(data, locale) do
result = from(data, nil)
result = Map.put(result, :name, result.names[locale])
result
end
end
| 22.407407 | 58 | 0.652893 |
ffff8031b9356c6d0b8d8152c815057a147235f9 | 1,253 | ex | Elixir | test/support/apps/phx1_4/lib/phx1_4_web/endpoint.ex | nhphuc412/torch | d2145b9fdb889517c0a36a5367f0b116ee7a6844 | [
"MIT"
] | 528 | 2019-09-13T15:10:36.000Z | 2022-03-31T10:28:27.000Z | test/support/apps/phx1_4/lib/phx1_4_web/endpoint.ex | nhphuc412/torch | d2145b9fdb889517c0a36a5367f0b116ee7a6844 | [
"MIT"
] | 133 | 2019-09-13T17:46:59.000Z | 2022-03-01T13:37:10.000Z | test/support/apps/phx1_4/lib/phx1_4_web/endpoint.ex | nhphuc412/torch | d2145b9fdb889517c0a36a5367f0b116ee7a6844 | [
"MIT"
] | 38 | 2019-10-29T20:37:13.000Z | 2022-03-03T05:19:33.000Z | defmodule Phx14Web.Endpoint do
use Phoenix.Endpoint, otp_app: :phx1_4
socket "/socket", Phx14Web.UserSocket,
websocket: true,
longpoll: false
# Serve at "/" the static files from "priv/static" directory.
#
# You should set gzip to true if you are running phx.digest
# when deploying your static files in production.
plug Plug.Static,
at: "/",
from: :phx1_4,
gzip: false,
only: ~w(css fonts images js favicon.ico robots.txt)
# Code reloading can be explicitly enabled under the
# :code_reloader configuration of your endpoint.
if code_reloading? do
socket "/phoenix/live_reload/socket", Phoenix.LiveReloader.Socket
plug Phoenix.LiveReloader
plug Phoenix.CodeReloader
end
plug Plug.RequestId
plug Plug.Logger
plug Plug.Parsers,
parsers: [:urlencoded, :multipart, :json],
pass: ["*/*"],
json_decoder: Phoenix.json_library()
plug Plug.MethodOverride
plug Plug.Head
# The session will be stored in the cookie and signed,
# this means its contents can be read but not tampered with.
# Set :encryption_salt if you would also like to encrypt it.
plug Plug.Session,
store: :cookie,
key: "_phx1_4_key",
signing_salt: "dJET+rEp"
plug Phx14Web.Router
end
| 26.659574 | 69 | 0.706305 |
ffff97a9be8e51cc2a4751edfc530bb243efd95c | 990 | ex | Elixir | apps/service_receive/lib/receive/accept/store.ex | kennyatpillar/hindsight | e90e2150a14218e5d6fdf5874f57eb055fd2dd07 | [
"Apache-2.0"
] | null | null | null | apps/service_receive/lib/receive/accept/store.ex | kennyatpillar/hindsight | e90e2150a14218e5d6fdf5874f57eb055fd2dd07 | [
"Apache-2.0"
] | null | null | null | apps/service_receive/lib/receive/accept/store.ex | kennyatpillar/hindsight | e90e2150a14218e5d6fdf5874f57eb055fd2dd07 | [
"Apache-2.0"
] | null | null | null | defmodule Receive.Accept.Store do
@instance Receive.Application.instance()
@collection "accepts"
import Definition, only: [identifier: 1, identifier: 2]
@spec collection() :: String.t()
def collection(), do: @collection
@spec persist(Accept.t()) :: :ok
def persist(accept) do
Brook.ViewState.merge(@collection, identifier(accept), %{accept: accept})
end
@spec get!(dataset_id :: String.t(), subset_id :: String.t()) :: Accept.t()
def get!(dataset_id, subset_id) do
case Brook.get!(@instance, @collection, identifier(dataset_id, subset_id)) do
nil -> nil
map -> Map.get(map, :accept)
end
end
@spec delete(dataset_id :: String.t(), subset_id :: String.t()) :: :ok
def delete(dataset_id, subset_id) do
Brook.ViewState.delete(@collection, identifier(dataset_id, subset_id))
end
@spec get_all!() :: [Accept.t()]
def get_all!() do
Brook.get_all_values!(@instance, @collection)
|> Enum.map(&Map.get(&1, :accept))
end
end
| 29.117647 | 81 | 0.667677 |
ffffb5d3c858a52a03b8c5e43b3f290cb2ad7227 | 194 | ex | Elixir | lib/metric/prometheus.ex | dustinrouillard/dustin-gateway | 205dd39e1e5c38d2d48638e3847024761aeba4f7 | [
"MIT"
] | 4 | 2021-08-15T17:40:14.000Z | 2021-09-04T10:03:25.000Z | lib/metric/prometheus.ex | dustinrouillard/gm-gateway | 2a2e4314cfaddf4d5e4deb32e5cfddb6231c2dd8 | [
"MIT"
] | null | null | null | lib/metric/prometheus.ex | dustinrouillard/gm-gateway | 2a2e4314cfaddf4d5e4deb32e5cfddb6231c2dd8 | [
"MIT"
] | null | null | null | defmodule Gateway.Metrics do
use Task, restart: :transient
def start_link(_opts) do
Task.start_link(fn ->
Gateway.Metrics.Collector.start()
exit(:normal)
end)
end
end
| 17.636364 | 39 | 0.675258 |
ffffc5a97f85dbe58fc2c0a58b7f12d00686eb3c | 1,266 | exs | Elixir | mix.exs | vikger/block_keys | d3bf45782b1470e0bee553804bcf9ea84cf42da1 | [
"Apache-2.0"
] | null | null | null | mix.exs | vikger/block_keys | d3bf45782b1470e0bee553804bcf9ea84cf42da1 | [
"Apache-2.0"
] | null | null | null | mix.exs | vikger/block_keys | d3bf45782b1470e0bee553804bcf9ea84cf42da1 | [
"Apache-2.0"
] | null | null | null | defmodule BlockKeys.MixProject do
use Mix.Project
def project do
[
app: :block_keys,
version: "0.1.8",
elixir: "~> 1.7",
description: description(),
start_permanent: Mix.env() == :prod,
source_url: "https://github.com/agilealpha/block_keys",
package: %{
name: "block_keys",
licenses: ["Apache License 2.0"],
links: %{"GitHub" => "https://github.com/AgileAlpha/block_keys"}
},
test_coverage: [tool: ExCoveralls],
preferred_cli_env: [
coveralls: :test,
"coveralls.detail": :test,
"coveralls.post": :test,
"coveralls.html": :test
],
name: "BlockKeys",
deps: deps()
]
end
# Run "mix help compile.app" to learn about applications.
def application do
[
extra_applications: [:logger]
]
end
defp description do
"This package generates Hierarchical Deterministic blockchain wallets for multiple currencies."
end
# Run "mix help deps" to learn about dependencies.
defp deps do
[
{:ex_doc, "~> 0.19", only: :dev, runtime: false},
{:keccakf1600, "~> 2.0", hex: :keccakf1600_otp23},
{:excoveralls, "~> 0.10", only: :test},
{:libsecp256k1, "~> 0.1.9"}
]
end
end
| 25.32 | 99 | 0.586888 |
08000bde59216bfe106935a584da57f9a2d3bb96 | 5,089 | exs | Elixir | test/arango/collection_test.exs | mpoeter/arangoex | 161ed0223f4e75ca47ca921660f162d01fdab863 | [
"Apache-2.0"
] | null | null | null | test/arango/collection_test.exs | mpoeter/arangoex | 161ed0223f4e75ca47ca921660f162d01fdab863 | [
"Apache-2.0"
] | null | null | null | test/arango/collection_test.exs | mpoeter/arangoex | 161ed0223f4e75ca47ca921660f162d01fdab863 | [
"Apache-2.0"
] | 3 | 2018-03-27T09:47:04.000Z | 2019-11-04T22:41:46.000Z | defmodule CollectionTest do
use Arango.TestCase
doctest Arango
alias Arango.Collection
alias Arango.Document
alias Arango.Wal
test "lists collections" do
{:ok, collections} = Collection.collections() |> arango(database_name: "_system")
names =
collections
|> Enum.map(fn c -> c.name end)
|> Enum.sort
assert names == [
"_apps", "_aqlfunctions", "_frontend", "_graphs", "_jobs", "_modules",
"_queues", "_routing", "_statistics", "_statistics15", "_statisticsRaw", "_users"
]
end
test "creates a collection", ctx do
new_collname = Faker.Lorem.word
{:ok, original_colls} = Collection.collections() |> on_db(ctx)
{:ok, coll} = Collection.create(%Collection{name: new_collname}) |> on_db(ctx)
{:ok, after_colls} = Collection.collections() |> on_db(ctx)
assert [coll] == after_colls -- original_colls
assert coll.name == new_collname
end
test "drops a collection", ctx do
new_coll = %Collection{name: Faker.Lorem.word}
# create one to drop
{:ok, _} = Collection.create(new_coll) |> on_db(ctx)
{:ok, colls} = Collection.collections() |> on_db(ctx)
assert new_coll.name in Enum.map(colls, & &1.name)
# drop and make sure it's gone
{:ok, _} = Collection.drop(new_coll) |> on_db(ctx)
{:ok, colls} = Collection.collections() |> on_db(ctx)
refute new_coll.name in Enum.map(colls, & &1.name)
end
test "looks up collection information", ctx do
{:ok, new_coll} = Collection.collection(ctx.coll) |> on_db(ctx)
assert new_coll == ctx.coll
end
test "loads a collection", ctx do
coll_name = ctx.coll.name
{:ok, info} = Collection.load(ctx.coll) |> on_db(ctx)
assert %{"name" => ^coll_name, "error" => false} = info
assert Map.has_key?(info, "count")
{:ok, info} = Collection.load(ctx.coll, false) |> on_db(ctx)
assert %{"name" => ^coll_name, "error" => false} = info
refute Map.has_key?(info, "count")
end
test "unloads a collection", ctx do
coll_name = ctx.coll.name
{:ok, info} = Collection.unload(ctx.coll) |> on_db(ctx)
assert %{"name" => ^coll_name, "error" => false} = info
end
test "looks up collection checksum", ctx do
coll_name = ctx.coll.name
{:ok, checksum} = Collection.checksum(ctx.coll) |> on_db(ctx)
assert %{"name" => ^coll_name, "error" => false} = checksum
assert Map.has_key?(checksum, "checksum")
assert Map.has_key?(checksum, "revision")
end
test "counts documents in a collection", ctx do
coll_name = ctx.coll.name
{:ok, count} = Collection.count(ctx.coll) |> on_db(ctx)
assert %{"name" => ^coll_name, "error" => false} = count
assert Map.has_key?(count, "count")
end
test "looks up statistics of a collection", ctx do
coll_name = ctx.coll.name
{:ok, figures} = Collection.figures(ctx.coll) |> on_db(ctx)
assert %{"name" => ^coll_name, "error" => false} = figures
assert Map.has_key?(figures, "figures")
end
test "looks up collection properties", ctx do
coll_name = ctx.coll.name
{:ok, properties} = Collection.properties(ctx.coll) |> on_db(ctx)
assert %{"name" => ^coll_name, "error" => false} = properties
assert Map.has_key?(properties, "waitForSync")
assert Map.has_key?(properties, "doCompact")
assert Map.has_key?(properties, "journalSize")
assert Map.has_key?(properties, "isVolatile")
end
test "sets collection properties", ctx do
coll_name = ctx.coll.name
{:ok, properties} = Collection.set_properties(ctx.coll, waitForSync: true) |> on_db(ctx)
assert %{"name" => ^coll_name, "error" => false, "waitForSync" => true} = properties
{:ok, properties} = Collection.set_properties(ctx.coll, journalSize: 1_048_576) |> on_db(ctx)
assert %{"name" => ^coll_name, "error" => false, "journalSize" => 1_048_576} = properties
{:ok, properties} = Collection.set_properties(ctx.coll, journalSize: 2_048_576, waitForSync: false) |> on_db(ctx)
assert %{"name" => ^coll_name, "error" => false, "waitForSync" => false, "journalSize" => 2_048_576} = properties
end
test "renames collection", ctx do
{:ok, properties} = Collection.rename(ctx.coll, "foobar") |> on_db(ctx)
assert %{"name" => "foobar", "error" => false} = properties
end
test "looks up collection revision id", ctx do
coll_name = ctx.coll.name
{:ok, revision} = Collection.revision(ctx.coll) |> on_db(ctx)
assert %{"name" => ^coll_name, "error" => false} = revision
assert Map.has_key?(revision, "revision")
end
test "rotates a collection journal", ctx do
{:ok, _} = Document.create(ctx.coll, %{name: "RotateMe"}) |> on_db(ctx)
{:ok, _} = Wal.flush(waitForSync: true, waitForCollector: true) |> on_db(ctx)
assert {:ok, %{"result" => true, "error" => false, "code" => 200}} = Collection.rotate(ctx.coll) |> on_db(ctx)
end
test "truncates a collection", ctx do
coll_name = ctx.coll.name
{:ok, truncate} = Collection.truncate(ctx.coll) |> on_db(ctx)
assert %{"name" => ^coll_name, "error" => false} = truncate
end
end
| 34.154362 | 117 | 0.648261 |
08004e63d6dd3e903bac01c10a02941c416a3a06 | 372 | ex | Elixir | web/views/error_view.ex | castingclouds/rc4gho | fdbd6665aa233370c857d63b1d6a22459e1833ce | [
"MIT"
] | null | null | null | web/views/error_view.ex | castingclouds/rc4gho | fdbd6665aa233370c857d63b1d6a22459e1833ce | [
"MIT"
] | null | null | null | web/views/error_view.ex | castingclouds/rc4gho | fdbd6665aa233370c857d63b1d6a22459e1833ce | [
"MIT"
] | null | null | null | defmodule Rocket.ErrorView do
use Rocket.Web, :view
def render("404.html", _assigns) do
"Page not found"
end
def render("500.html", _assigns) do
"Server internal error"
end
# In case no render clause matches or no
# template is found, let's render it as 500
def template_not_found(_template, assigns) do
render "500.html", assigns
end
end
| 20.666667 | 47 | 0.696237 |
0800717a24615ed0ac60f9357feaf3a1c77d7a10 | 3,757 | ex | Elixir | kousa/lib/data-layer/scheduled_room_data.ex | larsverp/dogehouse | a433d4cd64512c4723cd019957b654ae4dc877b4 | [
"MIT"
] | 2 | 2021-03-15T22:53:51.000Z | 2021-03-22T09:34:06.000Z | kousa/lib/data-layer/scheduled_room_data.ex | larsverp/dogehouse | a433d4cd64512c4723cd019957b654ae4dc877b4 | [
"MIT"
] | 2 | 2021-09-01T19:42:51.000Z | 2021-12-09T03:56:38.000Z | kousa/lib/data-layer/scheduled_room_data.ex | larsverp/dogehouse | a433d4cd64512c4723cd019957b654ae4dc877b4 | [
"MIT"
] | 1 | 2021-03-13T20:31:49.000Z | 2021-03-13T20:31:49.000Z | defmodule Kousa.Data.ScheduledRoom do
import Ecto.Query
import Ecto.Changeset
alias Kousa.Pagination
alias Beef.Schemas.ScheduledRoom
alias Beef.Repo
@fetch_limit 16
def get_by_id(id) do
from(sr in ScheduledRoom,
where: sr.id == ^id,
inner_join: u in assoc(sr, :creator),
preload: [
creator: u
]
)
|> Repo.one()
end
def delete(user_id, id) do
from(sr in ScheduledRoom, where: sr.creatorId == ^user_id and sr.id == ^id)
|> Repo.delete_all()
end
def insert(data) do
%ScheduledRoom{} |> ScheduledRoom.insert_changeset(data) |> Repo.insert(returning: true)
end
def room_started(user_id, id, room_id) do
from(sr in ScheduledRoom,
where: sr.creatorId == ^user_id and sr.id == ^id,
update: [
set: [
roomId: ^room_id,
started: true
]
]
)
|> Repo.update_all([])
end
@spec edit(
any,
any,
:invalid | %{optional(:__struct__) => none, optional(atom | binary) => any}
) :: :ok | {:error, Ecto.Changeset.t()}
def edit(user_id, id, data) do
with {:ok, cleaned_data} <-
ScheduledRoom.edit_changeset(%ScheduledRoom{}, data) |> apply_action(:update) do
from(sr in ScheduledRoom,
where: sr.creatorId == ^user_id and sr.id == ^id,
update: [
set: [
name: ^cleaned_data.name,
description: ^cleaned_data.description,
scheduledFor: ^cleaned_data.scheduledFor
]
]
)
|> Repo.update_all([])
:ok
else
error -> error
end
end
def add_cursor(q, "") do
q
end
def add_cursor(q, nil) do
q
end
def add_cursor(q, cursor) do
with [iso, id] <- String.split(cursor, "|"),
{:ok, dt} <- Timex.parse(iso, "{ISO:Basic:Z}") do
where(q, [sr], {^dt, ^id} < {sr.scheduledFor, sr.id})
else
_ ->
q
end
end
def get_my_scheduled_rooms_about_to_start(user_id) do
from(sr in ScheduledRoom,
inner_join: u in assoc(sr, :creator),
preload: [
creator: u
],
where:
sr.creatorId == ^user_id and is_nil(sr.roomId) and
sr.started ==
false and
fragment(
"? - interval '1 hours' < now() and ? + interval '2 hours' > now()",
sr.scheduledFor,
sr.scheduledFor
),
order_by: [asc: sr.scheduledFor],
limit: ^@fetch_limit
)
|> Repo.all()
end
@spec get_feed(String.t(), boolean(), String.t()) :: {[ScheduledRoom], nil | number}
def get_feed(user_id, get_only_my_scheduled_rooms, cursor) do
q =
from(sr in ScheduledRoom,
inner_join: u in assoc(sr, :creator),
order_by: [asc: sr.scheduledFor, asc: sr.id],
where: sr.started == false,
limit: ^@fetch_limit,
preload: [
creator: u
]
)
get_only_my_scheduled_rooms
|> if(
do:
where(
q,
[sr],
sr.creatorId == ^user_id and sr.scheduledFor > fragment("now() - interval '2 hours'")
),
else: where(q, [sr], sr.scheduledFor > fragment("now()"))
)
|> add_cursor(cursor)
|> Repo.all()
|> Pagination.items_to_cursor_tuple(
@fetch_limit,
&(Timex.format!(&1.scheduledFor, "{ISO:Basic:Z}") <> "|" <> &1.id)
)
end
@spec get_mine(String.t()) :: ScheduledRoom | nil
def get_mine(user_id) do
from(sr in ScheduledRoom,
inner_join: u in assoc(sr, :creator),
on: sr.creatorId == u.id,
where: sr.scheduledFor > fragment("now()") and sr.creatorId == ^user_id,
limit: 1,
preload: [
creator: u
]
)
|> Repo.one()
end
end
| 24.555556 | 95 | 0.551504 |
08008298d5c47645317bd8c190e68fbb426150e8 | 2,249 | exs | Elixir | mix.exs | vbrazo/blog_api | 8a3e15c666beef8f33b6d323627f92379267ecd9 | [
"MIT"
] | null | null | null | mix.exs | vbrazo/blog_api | 8a3e15c666beef8f33b6d323627f92379267ecd9 | [
"MIT"
] | null | null | null | mix.exs | vbrazo/blog_api | 8a3e15c666beef8f33b6d323627f92379267ecd9 | [
"MIT"
] | null | null | null | defmodule BlogApi.Mixfile do
use Mix.Project
def project do
[
app: :blog_api,
version: "1.0.0",
elixir: "~> 1.9",
elixirc_paths: elixirc_paths(Mix.env()),
compilers: [:phoenix, :gettext] ++ Mix.compilers(),
start_permanent: Mix.env() == :prod,
aliases: aliases(),
deps: deps(),
test_coverage: [tool: ExCoveralls],
# Docs
name: "BlogApi",
source_url: "https://github.com/vbrazo/blog_api",
homepage_url: "https://github.com/vbrazo/blog_api",
# The main page in the docs
docs: [main: "README", extras: ["README.md"]]
]
end
# Configuration for the OTP application.
#
# Type `mix help compile.app` for more information.
def application do
[mod: {BlogApi.Application, []}, extra_applications: [:logger, :runtime_tools, :comeonin]]
end
# Specifies which paths to compile per environment.
defp elixirc_paths(:test), do: ["lib", "test/support", "test/factories"]
defp elixirc_paths(_), do: ["lib"]
# Specifies your project dependencies.
#
# Type `mix help deps` for examples and options.
defp deps do
[
{:phoenix, "~> 1.6.2"},
{:phoenix_pubsub, "~> 2.0"},
{:phoenix_ecto, "~> 3.2"},
{:postgrex, "~> 0.13.3"},
{:poison, "~> 3.1.0"},
{:gettext, "~> 0.11"},
{:proper_case, "~> 1.0.0"},
{:cowboy, "~> 2.7"},
{:plug_cowboy, "~> 2.2"},
{:comeonin, "~> 3.2"},
{:guardian, "~> 1.0"},
{:excoveralls, "~> 0.7", only: [:dev, :test]},
{:credo, "~> 0.8.5", only: [:dev, :test]},
{:ex_machina, "~> 2.0", only: :test},
{:ex_doc, "~> 0.16", only: :dev, runtime: false},
{:faker, "~> 0.16", only: [:test, :dev]},
{:plug, "~> 1.0"},
{:corsica, "~> 1.0"}
]
end
# Aliases are shortcuts or tasks specific to the current project.
# For example, to create, migrate and run the seeds file at once:
#
# $ mix ecto.setup
#
# See the documentation for `Mix` for more info on aliases.
defp aliases do
[
"ecto.setup": ["ecto.create", "ecto.migrate", "run priv/repo/seeds.exs"],
"ecto.reset": ["ecto.drop", "ecto.setup"],
test: ["ecto.create --quiet", "ecto.migrate", "test"]
]
end
end
| 29.592105 | 94 | 0.55847 |
08008301592da03396fc8f6ebf69bcdeddc85ea9 | 1,924 | ex | Elixir | lib/harmony/units.ex | esprezzo/elixir-harmony | 0152859dc5af2762431e0399dd39f7975dbc3422 | [
"MIT"
] | null | null | null | lib/harmony/units.ex | esprezzo/elixir-harmony | 0152859dc5af2762431e0399dd39f7975dbc3422 | [
"MIT"
] | null | null | null | lib/harmony/units.ex | esprezzo/elixir-harmony | 0152859dc5af2762431e0399dd39f7975dbc3422 | [
"MIT"
] | null | null | null | defmodule Harmony.Units do
@moduledoc """
Module for @Type that represents Harmony.Unit struct with various ether denominations
"""
alias Harmony.Units
@typedoc """
Type that represents Harmony.Unit struct with various ether denominations
"""
@type t :: %Units{
"wei": integer,
"kwei": integer,
"Kwei": integer,
"babbage": integer,
"femtoether": integer,
"mwei": integer,
"Mwei": integer,
"lovelace": integer,
"picoether": integer,
"gwei": integer,
"Gwei": integer,
"shannon": integer,
"nanoether": integer,
"nano": integer,
"szabo": integer,
"microether": integer,
"micro": integer,
"finney": integer,
"milliether": integer,
"milli": integer,
"ether": integer,
"eth": integer,
"kether": integer,
"grand": integer,
"mether": integer,
"gether": integer,
"tether": integer
}
defstruct [
"wei": 1,
"kwei": 1000,
"Kwei": 1000,
"babbage": 1000,
"femtoether": 1000,
"mwei": 1000000,
"Mwei": 1000000,
"lovelace": 1000000,
"picoether": 1000000,
"gwei": 1000000000,
"Gwei": 1000000000,
"shannon": 1000000000,
"nanoether": 1000000000,
"nano": 1000000000,
"szabo": 1000000000000,
"microether": 1000000000000,
"micro": 1000000000000,
"finney": 1000000000000000,
"milliether": 1000000000000000,
"milli": 1000000000000000,
"ether": 1000000000000000000,
"eth": 1000000000000000000,
"kether": 1000000000000000000000,
"grand": 1000000000000000000000,
"mether": 1000000000000000000000000,
"gether": 1000000000000000000000000000,
"tether": 100000000000000000000000000000,
]
end
| 27.098592 | 87 | 0.555613 |
080097ed392a0b78820968a080ce5cb2a18ef815 | 90 | exs | Elixir | test/space_raiders_web/views/page_view_test.exs | abinader89/Space-Raiders | d5e01a3200a54d22824b890238613a00f32a7d62 | [
"CC-BY-3.0"
] | null | null | null | test/space_raiders_web/views/page_view_test.exs | abinader89/Space-Raiders | d5e01a3200a54d22824b890238613a00f32a7d62 | [
"CC-BY-3.0"
] | null | null | null | test/space_raiders_web/views/page_view_test.exs | abinader89/Space-Raiders | d5e01a3200a54d22824b890238613a00f32a7d62 | [
"CC-BY-3.0"
] | null | null | null | defmodule SpaceRaidersWeb.PageViewTest do
use SpaceRaidersWeb.ConnCase, async: true
end
| 22.5 | 43 | 0.844444 |
08009aab73ba92e17c870bd19fac83aefbf76c35 | 489 | ex | Elixir | lib/access.ex | cchko/libswagger | c0e5991ca240d9315631dc0784ae71ba344a58c4 | [
"MIT"
] | null | null | null | lib/access.ex | cchko/libswagger | c0e5991ca240d9315631dc0784ae71ba344a58c4 | [
"MIT"
] | null | null | null | lib/access.ex | cchko/libswagger | c0e5991ca240d9315631dc0784ae71ba344a58c4 | [
"MIT"
] | null | null | null | defmodule Swagger.Access do
@moduledoc false
defmacro __using__(_) do
quote do
@behaviour Access
@doc false
def fetch(%__MODULE__{} = s, key), do: Map.fetch(s, key)
@doc false
def get(%__MODULE__{} = s, key, default), do: Map.get(s, key, default)
@doc false
def get_and_update(%__MODULE__{} = s, key, fun), do: Map.get_and_update(s, key, fun)
@doc false
def pop(%__MODULE__{} = s, key), do: Map.pop(s, key)
end
end
end
| 25.736842 | 90 | 0.603272 |
08009bd2cad624a5ebc61a7254d482632299d3b6 | 972 | ex | Elixir | lib/z/endpoint.ex | davidsantoso/z | f9918e308c802116351db00510fbfcd871150c1c | [
"MIT"
] | null | null | null | lib/z/endpoint.ex | davidsantoso/z | f9918e308c802116351db00510fbfcd871150c1c | [
"MIT"
] | null | null | null | lib/z/endpoint.ex | davidsantoso/z | f9918e308c802116351db00510fbfcd871150c1c | [
"MIT"
] | null | null | null | defmodule Z.Endpoint do
use Phoenix.Endpoint, otp_app: :z
socket "/socket", Z.UserSocket
# Serve at "/" the static files from "priv/static" directory.
#
# You should set gzip to true if you are running phoenix.digest
# when deploying your static files in production.
plug Plug.Static,
at: "/", from: :z, gzip: false,
only: ~w(css fonts images js favicon.ico robots.txt)
# Code reloading can be explicitly enabled under the
# :code_reloader configuration of your endpoint.
if code_reloading? do
socket "/phoenix/live_reload/socket", Phoenix.LiveReloader.Socket
plug Phoenix.LiveReloader
plug Phoenix.CodeReloader
end
plug Plug.RequestId
plug Plug.Logger
plug Plug.Parsers,
parsers: [:urlencoded, :multipart, :json],
pass: ["*/*"],
json_decoder: Poison
plug Plug.MethodOverride
plug Plug.Head
plug Plug.Session,
store: :cookie,
key: "_z_key",
signing_salt: "EmdUxx43"
plug Z.Router
end
| 24.3 | 69 | 0.69856 |
0800bbff5b87ee7fb08a874c6b40f62ac9dc6647 | 509 | ex | Elixir | priv/catalogue/button/example03.ex | aarongraham/surface_bulma | 0b8ab633465681b8c3b58c767034cb557e09c8af | [
"MIT"
] | null | null | null | priv/catalogue/button/example03.ex | aarongraham/surface_bulma | 0b8ab633465681b8c3b58c767034cb557e09c8af | [
"MIT"
] | null | null | null | priv/catalogue/button/example03.ex | aarongraham/surface_bulma | 0b8ab633465681b8c3b58c767034cb557e09c8af | [
"MIT"
] | null | null | null | defmodule SurfaceBulma.Catalogue.Button.Example03 do
use Surface.Catalogue.Example,
subject: SurfaceBulma.Button,
catalogue: SurfaceBulma.Catalogue,
title: "Outlined, Rounded and Loading",
height: "90px",
code_perc: 65,
container: {:div, class: "buttons"}
def render(assigns) do
~F"""
<Button color="info" outlined rounded>Outlined</Button>
<Button color="primary" rounded>Rounded</Button>
<Button color="danger" rounded loading>Loading</Button>
"""
end
end
| 28.277778 | 59 | 0.695481 |
08011a5bd363fc5ae370136b07cb78286d356a8c | 12,603 | ex | Elixir | lib/aws/generated/service_discovery.ex | smanolloff/aws-elixir | c7cb6577802f5010be7e7b6ccb2c0f3c8c73ea84 | [
"Apache-2.0"
] | null | null | null | lib/aws/generated/service_discovery.ex | smanolloff/aws-elixir | c7cb6577802f5010be7e7b6ccb2c0f3c8c73ea84 | [
"Apache-2.0"
] | null | null | null | lib/aws/generated/service_discovery.ex | smanolloff/aws-elixir | c7cb6577802f5010be7e7b6ccb2c0f3c8c73ea84 | [
"Apache-2.0"
] | null | null | null | # WARNING: DO NOT EDIT, AUTO-GENERATED CODE!
# See https://github.com/aws-beam/aws-codegen for more details.
defmodule AWS.ServiceDiscovery do
@moduledoc """
AWS Cloud Map lets you configure public DNS, private DNS, or HTTP namespaces
that your microservice applications run in.
When an instance of the service becomes available, you can call the AWS Cloud
Map API to register the instance with AWS Cloud Map. For public or private DNS
namespaces, AWS Cloud Map automatically creates DNS records and an optional
health check. Clients that submit public or private DNS queries, or HTTP
requests, for the service receive an answer that contains up to eight healthy
records.
"""
@doc """
Creates an HTTP namespace.
Service instances that you register using an HTTP namespace can be discovered
using a `DiscoverInstances` request but can't be discovered using DNS.
For the current quota on the number of namespaces that you can create using the
same AWS account, see [AWS Cloud Map quotas](https://docs.aws.amazon.com/cloud-map/latest/dg/cloud-map-limits.html)
in the *AWS Cloud Map Developer Guide*.
"""
def create_http_namespace(client, input, options \\ []) do
request(client, "CreateHttpNamespace", input, options)
end
@doc """
Creates a private namespace based on DNS, which will be visible only inside a
specified Amazon VPC.
The namespace defines your service naming scheme. For example, if you name your
namespace `example.com` and name your service `backend`, the resulting DNS name
for the service will be `backend.example.com`. For the current quota on the
number of namespaces that you can create using the same AWS account, see [AWS Cloud Map
Limits](https://docs.aws.amazon.com/cloud-map/latest/dg/cloud-map-limits.html)
in the *AWS Cloud Map Developer Guide*.
"""
def create_private_dns_namespace(client, input, options \\ []) do
request(client, "CreatePrivateDnsNamespace", input, options)
end
@doc """
Creates a public namespace based on DNS, which will be visible on the internet.
The namespace defines your service naming scheme. For example, if you name your
namespace `example.com` and name your service `backend`, the resulting DNS name
for the service will be `backend.example.com`. For the current quota on the
number of namespaces that you can create using the same AWS account, see [AWS Cloud Map
Limits](https://docs.aws.amazon.com/cloud-map/latest/dg/cloud-map-limits.html)
in the *AWS Cloud Map Developer Guide*.
"""
def create_public_dns_namespace(client, input, options \\ []) do
request(client, "CreatePublicDnsNamespace", input, options)
end
@doc """
Creates a service, which defines the configuration for the following entities:
* For public and private DNS namespaces, one of the following
combinations of DNS records in Amazon Route 53:
* `A`
* `AAAA`
* `A` and `AAAA`
* `SRV`
* `CNAME`
* Optionally, a health check
After you create the service, you can submit a
[RegisterInstance](https://docs.aws.amazon.com/cloud-map/latest/api/API_RegisterInstance.html) request, and AWS Cloud Map uses the values in the configuration to create the
specified entities.
For the current quota on the number of instances that you can register using the
same namespace and using the same service, see [AWS Cloud Map
Limits](https://docs.aws.amazon.com/cloud-map/latest/dg/cloud-map-limits.html)
in the *AWS Cloud Map Developer Guide*.
"""
def create_service(client, input, options \\ []) do
request(client, "CreateService", input, options)
end
@doc """
Deletes a namespace from the current account.
If the namespace still contains one or more services, the request fails.
"""
def delete_namespace(client, input, options \\ []) do
request(client, "DeleteNamespace", input, options)
end
@doc """
Deletes a specified service.
If the service still contains one or more registered instances, the request
fails.
"""
def delete_service(client, input, options \\ []) do
request(client, "DeleteService", input, options)
end
@doc """
Deletes the Amazon Route 53 DNS records and health check, if any, that AWS Cloud
Map created for the specified instance.
"""
def deregister_instance(client, input, options \\ []) do
request(client, "DeregisterInstance", input, options)
end
@doc """
Discovers registered instances for a specified namespace and service.
You can use `DiscoverInstances` to discover instances for any type of namespace.
For public and private DNS namespaces, you can also use DNS queries to discover
instances.
"""
def discover_instances(client, input, options \\ []) do
request(client, "DiscoverInstances", input, options)
end
@doc """
Gets information about a specified instance.
"""
def get_instance(client, input, options \\ []) do
request(client, "GetInstance", input, options)
end
@doc """
Gets the current health status (`Healthy`, `Unhealthy`, or `Unknown`) of one or
more instances that are associated with a specified service.
There is a brief delay between when you register an instance and when the health
status for the instance is available.
"""
def get_instances_health_status(client, input, options \\ []) do
request(client, "GetInstancesHealthStatus", input, options)
end
@doc """
Gets information about a namespace.
"""
def get_namespace(client, input, options \\ []) do
request(client, "GetNamespace", input, options)
end
@doc """
Gets information about any operation that returns an operation ID in the
response, such as a `CreateService` request.
To get a list of operations that match specified criteria, see
[ListOperations](https://docs.aws.amazon.com/cloud-map/latest/api/API_ListOperations.html).
"""
def get_operation(client, input, options \\ []) do
request(client, "GetOperation", input, options)
end
@doc """
Gets the settings for a specified service.
"""
def get_service(client, input, options \\ []) do
request(client, "GetService", input, options)
end
@doc """
Lists summary information about the instances that you registered by using a
specified service.
"""
def list_instances(client, input, options \\ []) do
request(client, "ListInstances", input, options)
end
@doc """
Lists summary information about the namespaces that were created by the current
AWS account.
"""
def list_namespaces(client, input, options \\ []) do
request(client, "ListNamespaces", input, options)
end
@doc """
Lists operations that match the criteria that you specify.
"""
def list_operations(client, input, options \\ []) do
request(client, "ListOperations", input, options)
end
@doc """
Lists summary information for all the services that are associated with one or
more specified namespaces.
"""
def list_services(client, input, options \\ []) do
request(client, "ListServices", input, options)
end
@doc """
Lists tags for the specified resource.
"""
def list_tags_for_resource(client, input, options \\ []) do
request(client, "ListTagsForResource", input, options)
end
@doc """
Creates or updates one or more records and, optionally, creates a health check
based on the settings in a specified service.
When you submit a `RegisterInstance` request, the following occurs:
* For each DNS record that you define in the service that is
specified by `ServiceId`, a record is created or updated in the hosted zone that
is associated with the corresponding namespace.
* If the service includes `HealthCheckConfig`, a health check is
created based on the settings in the health check configuration.
* The health check, if any, is associated with each of the new or
updated records.
One `RegisterInstance` request must complete before you can submit another
request and specify the same service ID and instance ID.
For more information, see
[CreateService](https://docs.aws.amazon.com/cloud-map/latest/api/API_CreateService.html). When AWS Cloud Map receives a DNS query for the specified DNS name, it returns
the applicable value:
* **If the health check is healthy**: returns all the records
* **If the health check is unhealthy**: returns the applicable value
for the last healthy instance
* **If you didn't specify a health check configuration**: returns
all the records
For the current quota on the number of instances that you can register using the
same namespace and using the same service, see [AWS Cloud Map
Limits](https://docs.aws.amazon.com/cloud-map/latest/dg/cloud-map-limits.html)
in the *AWS Cloud Map Developer Guide*.
"""
def register_instance(client, input, options \\ []) do
request(client, "RegisterInstance", input, options)
end
@doc """
Adds one or more tags to the specified resource.
"""
def tag_resource(client, input, options \\ []) do
request(client, "TagResource", input, options)
end
@doc """
Removes one or more tags from the specified resource.
"""
def untag_resource(client, input, options \\ []) do
request(client, "UntagResource", input, options)
end
@doc """
Submits a request to change the health status of a custom health check to
healthy or unhealthy.
You can use `UpdateInstanceCustomHealthStatus` to change the status only for
custom health checks, which you define using `HealthCheckCustomConfig` when you
create a service. You can't use it to change the status for Route 53 health
checks, which you define using `HealthCheckConfig`.
For more information, see
[HealthCheckCustomConfig](https://docs.aws.amazon.com/cloud-map/latest/api/API_HealthCheckCustomConfig.html).
"""
def update_instance_custom_health_status(client, input, options \\ []) do
request(client, "UpdateInstanceCustomHealthStatus", input, options)
end
@doc """
Submits a request to perform the following operations:
* Update the TTL setting for existing `DnsRecords` configurations
* Add, update, or delete `HealthCheckConfig` for a specified service
You can't add, update, or delete a `HealthCheckCustomConfig` configuration.
For public and private DNS namespaces, note the following:
* If you omit any existing `DnsRecords` or `HealthCheckConfig`
configurations from an `UpdateService` request, the configurations are deleted
from the service.
* If you omit an existing `HealthCheckCustomConfig` configuration
from an `UpdateService` request, the configuration is not deleted from the
service.
When you update settings for a service, AWS Cloud Map also updates the
corresponding settings in all the records and health checks that were created by
using the specified service.
"""
def update_service(client, input, options \\ []) do
request(client, "UpdateService", input, options)
end
@spec request(AWS.Client.t(), binary(), map(), list()) ::
{:ok, map() | nil, map()}
| {:error, term()}
defp request(client, action, input, options) do
client = %{client | service: "servicediscovery"}
host = build_host("servicediscovery", client)
url = build_url(host, client)
headers = [
{"Host", host},
{"Content-Type", "application/x-amz-json-1.1"},
{"X-Amz-Target", "Route53AutoNaming_v20170314.#{action}"}
]
payload = encode!(client, input)
headers = AWS.Request.sign_v4(client, "POST", url, headers, payload)
post(client, url, payload, headers, options)
end
defp post(client, url, payload, headers, options) do
case AWS.Client.request(client, :post, url, payload, headers, options) do
{:ok, %{status_code: 200, body: body} = response} ->
body = if body != "", do: decode!(client, body)
{:ok, body, response}
{:ok, response} ->
{:error, {:unexpected_response, response}}
error = {:error, _reason} -> error
end
end
defp build_host(_endpoint_prefix, %{region: "local", endpoint: endpoint}) do
endpoint
end
defp build_host(_endpoint_prefix, %{region: "local"}) do
"localhost"
end
defp build_host(endpoint_prefix, %{region: region, endpoint: endpoint}) do
"#{endpoint_prefix}.#{region}.#{endpoint}"
end
defp build_url(host, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}/"
end
defp encode!(client, payload) do
AWS.Client.encode!(client, payload, :json)
end
defp decode!(client, payload) do
AWS.Client.decode!(client, payload, :json)
end
end
| 34.911357 | 174 | 0.71475 |
08013ef75a89b93d6a97f93070407f2bd0e69b25 | 2,281 | exs | Elixir | test/level_web/controllers/session_controller_test.exs | mindriot101/level | 0a2cbae151869c2d9b79b3bfb388f5d00739ae12 | [
"Apache-2.0"
] | 928 | 2018-04-03T16:18:11.000Z | 2019-09-09T17:59:55.000Z | test/level_web/controllers/session_controller_test.exs | mindriot101/level | 0a2cbae151869c2d9b79b3bfb388f5d00739ae12 | [
"Apache-2.0"
] | 74 | 2018-04-03T00:46:50.000Z | 2019-03-10T18:57:27.000Z | test/level_web/controllers/session_controller_test.exs | mindriot101/level | 0a2cbae151869c2d9b79b3bfb388f5d00739ae12 | [
"Apache-2.0"
] | 89 | 2018-04-03T17:33:20.000Z | 2019-08-19T03:40:20.000Z | defmodule LevelWeb.SessionControllerTest do
use LevelWeb.ConnCase, async: true
describe "GET /login" do
test "includes the correct heading", %{conn: conn} do
conn =
conn
|> get("/login")
assert html_response(conn, 200) =~ "Sign in to Level"
end
test "redirects to spaces path if already signed in", %{conn: conn} do
password = "$ecret$"
{:ok, %{user: user}} = create_user_and_space(%{password: password})
signed_in_conn =
conn
|> post("/login", %{"session" => %{"email" => user.email, "password" => password}})
conn =
signed_in_conn
|> recycle()
|> get("/login")
assert redirected_to(conn, 302) =~ "/teams"
end
end
describe "POST /login" do
setup %{conn: conn} do
password = "$ecret$"
{:ok, %{user: user}} = create_user_and_space(%{password: password})
{:ok, %{conn: conn, user: user, password: password}}
end
test "signs in the user", %{conn: conn, user: user, password: password} do
conn =
conn
|> post("/login", %{"session" => %{"email" => user.email, "password" => password}})
assert conn.assigns.current_user.id == user.id
assert redirected_to(conn, 302) =~ "/"
end
test "renders an error with invalid credentials", %{conn: conn, user: user} do
conn =
conn
|> post("/login", %{"session" => %{"email" => user.email, "password" => "wrong"}})
assert conn.assigns.current_user == nil
assert html_response(conn, 200) =~ "Oops, those credentials are not correct"
end
end
describe "GET /logout" do
setup %{conn: conn} do
password = "$ecret$"
{:ok, %{user: user}} = create_user_and_space(%{password: password})
{:ok, %{conn: conn, user: user, password: password}}
end
test "logs the user out", %{conn: conn, user: user, password: password} do
signed_in_conn =
conn
|> post("/login", %{"session" => %{"email" => user.email, "password" => password}})
signed_out_conn =
signed_in_conn
|> recycle()
|> get("/logout")
assert signed_out_conn.assigns[:current_user] == nil
assert redirected_to(signed_out_conn, 302) =~ "/login"
end
end
end
| 29.24359 | 91 | 0.580447 |
080142dfbce8c6ffc6168f15e06ad5bf9b3ec8b3 | 2,310 | ex | Elixir | verify/lib/mix/ua_inspector/verify/fixtures.ex | Route-Now/ua_inspector | 4e6ced73fc40b6e9947e0a1b0612d5517f343293 | [
"Apache-2.0"
] | null | null | null | verify/lib/mix/ua_inspector/verify/fixtures.ex | Route-Now/ua_inspector | 4e6ced73fc40b6e9947e0a1b0612d5517f343293 | [
"Apache-2.0"
] | null | null | null | verify/lib/mix/ua_inspector/verify/fixtures.ex | Route-Now/ua_inspector | 4e6ced73fc40b6e9947e0a1b0612d5517f343293 | [
"Apache-2.0"
] | null | null | null | defmodule Mix.UAInspector.Verify.Fixtures do
@moduledoc """
Utility module to bundle/download verification fixtures.
"""
alias UAInspector.Config
@fixture_base_url "https://raw.githubusercontent.com/matomo-org/device-detector/master/Tests/fixtures"
@fixtures [
"bots.yml",
"camera.yml",
"car_browser.yml",
"console.yml",
"desktop.yml",
"feature_phone.yml",
"feed_reader.yml",
"mediaplayer.yml",
"mobile_apps.yml",
"peripheral.yml",
"phablet.yml",
"portable_media_player.yml",
"smart_display.yml",
"smart_speaker.yml",
"smartphone.yml",
"smartphone-1.yml",
"smartphone-2.yml",
"smartphone-3.yml",
"smartphone-4.yml",
"smartphone-5.yml",
"smartphone-6.yml",
"smartphone-7.yml",
"smartphone-8.yml",
"smartphone-9.yml",
"smartphone-10.yml",
"smartphone-11.yml",
"smartphone-12.yml",
"smartphone-13.yml",
"smartphone-14.yml",
"smartphone-15.yml",
"smartphone-16.yml",
"smartphone-17.yml",
"smartphone-18.yml",
"smartphone-19.yml",
"smartphone-20.yml",
"smartphone-21.yml",
"smartphone-22.yml",
"smartphone-23.yml",
"smartphone-24.yml",
"smartphone-25.yml",
"smartphone-26.yml",
"tablet.yml",
"tablet-1.yml",
"tablet-2.yml",
"tablet-3.yml",
"tablet-4.yml",
"tablet-5.yml",
"tablet-6.yml",
"tv.yml",
"tv-1.yml",
"unknown.yml",
"wearable.yml"
]
def download do
Mix.shell().info("Download path: #{download_path()}")
setup()
download(@fixtures)
Mix.shell().info("Download complete!")
:ok
end
def download([]), do: :ok
def download([fixture | fixtures]) do
Mix.shell().info(".. downloading: #{fixture}")
remote = "#{@fixture_base_url}/#{fixture}"
local = download_path(fixture)
download_fixture(remote, local)
download(fixtures)
end
defp download_fixture(remote, local) do
{:ok, content} = Config.downloader_adapter().read_remote(remote)
File.write!(local, content)
end
def download_path, do: Path.expand("../../../../fixtures", __DIR__)
def download_path(file), do: Path.join(download_path(), file)
def list, do: @fixtures
def setup do
File.rm_rf!(download_path())
File.mkdir_p!(download_path())
end
end
| 22.647059 | 104 | 0.62684 |
080181a4ecc4b1800bc47b98839c198e9cdea117 | 548 | exs | Elixir | api/test/views/error_view_test.exs | panayi/react-phoenix-starter-kit | 6615402deeac9b359954cdd8b523f7735b75eec9 | [
"MIT"
] | null | null | null | api/test/views/error_view_test.exs | panayi/react-phoenix-starter-kit | 6615402deeac9b359954cdd8b523f7735b75eec9 | [
"MIT"
] | null | null | null | api/test/views/error_view_test.exs | panayi/react-phoenix-starter-kit | 6615402deeac9b359954cdd8b523f7735b75eec9 | [
"MIT"
] | null | null | null | defmodule App.ErrorViewTest do
use App.ConnCase, async: true
# Bring render/3 and render_to_string/3 for testing custom views
import Phoenix.View
test "renders 404.html" do
assert render_to_string(App.ErrorView, "404.html", []) ==
"Page not found"
end
test "render 500.html" do
assert render_to_string(App.ErrorView, "500.html", []) ==
"Server internal error"
end
test "render any other" do
assert render_to_string(App.ErrorView, "505.html", []) ==
"Server internal error"
end
end
| 24.909091 | 66 | 0.664234 |
080188658f121b8315e3797599633443bfc4d118 | 1,376 | ex | Elixir | lib/livebook_web/live/output/frame_dynamic_live.ex | alaadahmed/livebook | 24668c6edb6ee638a3f5291b27bd42a3dfc0c18d | [
"Apache-2.0"
] | 1,846 | 2021-04-13T14:46:36.000Z | 2021-07-14T20:37:40.000Z | lib/livebook_web/live/output/frame_dynamic_live.ex | alaadahmed/livebook | 24668c6edb6ee638a3f5291b27bd42a3dfc0c18d | [
"Apache-2.0"
] | 223 | 2021-04-13T14:55:06.000Z | 2021-07-15T00:25:23.000Z | lib/livebook_web/live/output/frame_dynamic_live.ex | alaadahmed/livebook | 24668c6edb6ee638a3f5291b27bd42a3dfc0c18d | [
"Apache-2.0"
] | 130 | 2021-04-13T15:43:55.000Z | 2021-07-12T16:57:46.000Z | defmodule LivebookWeb.Output.FrameDynamicLive do
use LivebookWeb, :live_view
@impl true
def mount(
_params,
%{
"pid" => pid,
"id" => id,
"session_id" => session_id,
"input_values" => input_values,
"cell_validity_status" => cell_validity_status
},
socket
) do
if connected?(socket) do
send(pid, {:connect, self()})
end
{:ok,
assign(socket,
id: id,
output: nil,
session_id: session_id,
input_values: input_values,
cell_validity_status: cell_validity_status
)}
end
@impl true
def render(assigns) do
~H"""
<div>
<%= if @output do %>
<LivebookWeb.Output.outputs
outputs={[@output]}
id={"#{@id}-frame"}
socket={@socket}
session_id={@session_id}
runtime={nil}
input_values={@input_values}
cell_validity_status={@cell_validity_status} />
<% else %>
<div class="text-gray-300">
Empty output frame
</div>
<% end %>
</div>
"""
end
@impl true
def handle_info({:connect_reply, %{output: output}}, socket) do
{:noreply, assign(socket, output: output)}
end
def handle_info({:render, %{output: output}}, socket) do
{:noreply, assign(socket, output: output)}
end
end
| 22.557377 | 65 | 0.548692 |
0801b2c5bd41c9787e2617a7a7210bd152ac0b66 | 386 | ex | Elixir | lib/phoenix_bloc/web/views/error_view.ex | kevinzg/phoenix-bloc | 7c61b6abfdebe25e0e43948b325093b5c98f48c8 | [
"MIT"
] | 5 | 2017-07-13T03:38:55.000Z | 2019-03-25T18:19:54.000Z | lib/phoenix_bloc/web/views/error_view.ex | kevinzg/phoenix-bloc | 7c61b6abfdebe25e0e43948b325093b5c98f48c8 | [
"MIT"
] | null | null | null | lib/phoenix_bloc/web/views/error_view.ex | kevinzg/phoenix-bloc | 7c61b6abfdebe25e0e43948b325093b5c98f48c8 | [
"MIT"
] | 2 | 2017-04-26T04:59:46.000Z | 2021-09-19T12:53:49.000Z | defmodule PhoenixBloc.Web.ErrorView do
use PhoenixBloc.Web, :view
def render("404.html", _assigns) do
"Page not found"
end
def render("500.html", _assigns) do
"Internal server error"
end
# In case no render clause matches or no
# template is found, let's render it as 500
def template_not_found(_template, assigns) do
render "500.html", assigns
end
end
| 21.444444 | 47 | 0.704663 |
0801d6b2bb8d52f2de7b1988698185b9cff8d574 | 2,642 | ex | Elixir | lib/ash_policy_authorizer/check.ex | jonathanstiansen/ash_policy_authorizer | d383c63d890c43211c2ac65c8ebb1aeed7fddfa5 | [
"MIT"
] | null | null | null | lib/ash_policy_authorizer/check.ex | jonathanstiansen/ash_policy_authorizer | d383c63d890c43211c2ac65c8ebb1aeed7fddfa5 | [
"MIT"
] | null | null | null | lib/ash_policy_authorizer/check.ex | jonathanstiansen/ash_policy_authorizer | d383c63d890c43211c2ac65c8ebb1aeed7fddfa5 | [
"MIT"
] | null | null | null | defmodule AshPolicyAuthorizer.Check do
@moduledoc """
A behaviour for declaring checks, which can be used to easily construct
authorization rules.
If a check can be expressed simply as a function of the actor, or the context of the request,
see `AshPolicyAuthorizer.SimpleCheck` for an easy way to write that check.
If a check can be expressed simply with a filter statement, see `AshPolicyAuthorizer.FilterCheck`
for an easy way to write that check.
"""
@type options :: Keyword.t()
@type authorizer :: AshPolicyAuthorizer.Authorizer.t()
@type check_type :: :simple | :filter | :manual
@doc """
Strict checks should be cheap, and should never result in external calls (like database or api)
It should return `{:ok, true}` if it can tell that the request is authorized, and `{:ok, false}` if
it can tell that it is not. If unsure, it should return `{:ok, :unknown}`
"""
@callback strict_check(Ash.actor(), authorizer(), options) :: {:ok, boolean | :unknown}
@doc """
An optional callback, that allows the check to work with policies set to `access_type :filter`
Return a keyword list filter that will be applied to the query being made, and will scope the results to match the rule
"""
@callback auto_filter(Ash.actor(), authorizer(), options()) :: Keyword.t()
@doc """
An optional callback, hat allows the check to work with policies set to `access_type :runtime`
Takes a list of records, and returns `{:ok, true}` if they are all authorized, or `{:ok, list}` containing the list
of records that are authorized. You can also just return the whole list, `{:ok, true}` is just a shortcut.
Can also return `{:error, error}` if something goes wrong
"""
@callback check(Ash.actor(), list(Ash.record()), map, options) ::
{:ok, list(Ash.record()) | boolean} | {:error, Ash.error()}
@doc "Describe the check in human readable format, given the options"
@callback describe(options()) :: String.t()
@doc """
The type fo the check
`:manual` checks must be written by hand as standard check modules
`:filter` checks can use `AshPolicyAuthorizer.FilterCheck` for simplicity
`:simple` checks can use `AshPolicyAuthorizer.SimpleCheck` for simplicity
"""
@callback type() :: check_type()
@optional_callbacks check: 4, auto_filter: 3
def defines_check?(module) do
:erlang.function_exported(module, :check, 4)
end
def defines_auto_filter?(module) do
:erlang.function_exported(module, :auto_filter, 3)
end
defmacro __using__(_opts) do
quote do
@behaviour AshPolicyAuthorizer.Check
def type, do: :manual
end
end
end
| 38.852941 | 121 | 0.708176 |
0802320bf550334477adacf3e31d584ccad3549d | 4,021 | ex | Elixir | lib/swoosh/adapters/sparkpost.ex | nathf/swoosh | 9507d32e3c14f52ef37e10996f161898c0c11250 | [
"MIT"
] | null | null | null | lib/swoosh/adapters/sparkpost.ex | nathf/swoosh | 9507d32e3c14f52ef37e10996f161898c0c11250 | [
"MIT"
] | null | null | null | lib/swoosh/adapters/sparkpost.ex | nathf/swoosh | 9507d32e3c14f52ef37e10996f161898c0c11250 | [
"MIT"
] | null | null | null | defmodule Swoosh.Adapters.SparkPost do
@moduledoc ~S"""
An adapter that sends email using the SparkPost API.
For reference: [SparkPost API docs](https://developers.sparkpost.com/api/)
## Example
# config/config.exs
config :sample, Sample.Mailer,
adapter: Swoosh.Adapters.SparkPost,
api_key: "my-api-key",
endpoint: "https://api.sparkpost.com/api/v1"
# or "https://YOUR_DOMAIN.sparkpostelite.com/api/v1" for enterprise
# lib/sample/mailer.ex
defmodule Sample.Mailer do
use Swoosh.Mailer, otp_app: :sample
end
"""
use Swoosh.Adapter, required_config: [:api_key]
alias Swoosh.Email
import Swoosh.Email.Render
@endpoint "https://api.sparkpost.com/api/v1"
def deliver(%Email{} = email, config \\ []) do
headers = prepare_headers(email, config)
body = email |> prepare_body |> Swoosh.json_library.encode!
url = [endpoint(config), "/transmissions"]
case :hackney.post(url, headers, body, [:with_body]) do
{:ok, 200, _headers, body} ->
{:ok, Swoosh.json_library.decode!(body)}
{:ok, code, _headers, body} when code > 399 ->
{:error, {code, Swoosh.json_library.decode!(body)}}
{:error, reason} ->
{:error, reason}
end
end
defp endpoint(config), do: config[:endpoint] || @endpoint
defp prepare_headers(_email, config) do
[{"User-Agent", "swoosh/#{Swoosh.version}"},
{"Authorization", config[:api_key]},
{"Content-Type", "application/json"}]
end
defp prepare_body(%{
from: {name, address},
to: to,
subject: subject,
text_body: text,
html_body: html,
} = email) do
%{
content: %{
from: %{
name: name,
email: address
},
subject: subject,
text: text,
html: html,
headers: %{},
},
recipients: prepare_recipients(to, to)
}
|> prepare_reply_to(email)
|> prepare_cc(email)
|> prepare_bcc(email)
|> prepare_custom_headers(email)
|> prepare_attachments(email)
end
defp prepare_reply_to(body, %{reply_to: nil}), do: body
defp prepare_reply_to(body, %{reply_to: reply_to}) do
put_in(body, [:content, :reply_to], render_recipient(reply_to))
end
defp prepare_cc(body, %{cc: []}), do: body
defp prepare_cc(body, %{cc: cc, to: to}) do
body
|> update_in([:recipients], fn list ->
list ++ prepare_recipients(cc, to)
end)
|> put_in([:content, :headers, "CC"], render_recipient(cc))
end
defp prepare_bcc(body, %{bcc: []}), do: body
defp prepare_bcc(body, %{bcc: bcc, to: to}) do
update_in(body.recipients, fn list ->
list ++ prepare_recipients(bcc, to)
end)
end
defp prepare_recipients(recipients, to) do
Enum.map(recipients, fn {name, address} ->
%{
address: %{
name: name,
email: address,
header_to: raw_email_addresses(to)
}
}
end)
end
defp raw_email_addresses(mailboxes) do
mailboxes |> Enum.map(fn {_name, address} -> address end) |> Enum.join(",")
end
defp prepare_attachments(body, %{attachments: []}), do: body
defp prepare_attachments(body, %{attachments: attachments}) do
{standalone_attachments, inline_attachments} =
Enum.split_with(attachments, fn %{type: type} -> type == :attachment end)
body
|> inject_attachments(:attachments, standalone_attachments)
|> inject_attachments(:inline_images, inline_attachments)
end
defp inject_attachments(body, _key, []), do: body
defp inject_attachments(body, key, attachments) do
Map.put(body, key, Enum.map(
attachments,
fn %{content_type: type, filename: name} = attachment ->
%{type: type, name: name, data: Swoosh.Attachment.get_content(attachment, :base64)}
end
))
end
defp prepare_custom_headers(body, %{headers: headers}) do
custom_headers = Map.merge(body.content.headers, headers)
put_in(body, [:content, :headers], custom_headers)
end
end
| 28.51773 | 91 | 0.631684 |
0802a03d5f6fbc8deec2e6704c150e39424213a3 | 3,873 | ex | Elixir | apps/toniefy/lib/toniex/clients/tonies.ex | benvp/toniefy.me | 9a432380e43f1c9a15ccd1910357d491d64a1dc0 | [
"MIT"
] | 17 | 2021-04-12T10:01:14.000Z | 2022-02-25T17:30:27.000Z | apps/toniefy/lib/toniex/clients/tonies.ex | benvp/toniefy.me | 9a432380e43f1c9a15ccd1910357d491d64a1dc0 | [
"MIT"
] | 1 | 2022-03-09T09:35:00.000Z | 2022-03-09T11:46:14.000Z | apps/toniefy/lib/toniex/clients/tonies.ex | benvp/toniefy | 9a432380e43f1c9a15ccd1910357d491d64a1dc0 | [
"MIT"
] | null | null | null | defmodule Toniex.Clients.Tonies do
@client_id "my-tonies"
@token_url "https://login.tonies.com/auth/realms/tonies/protocol/openid-connect/token"
@api_url "https://api.tonie.cloud/v2"
def get_token(refresh_token) do
res =
Tesla.client([
Tesla.Middleware.FormUrlencoded,
Tesla.Middleware.JSON
])
|> Tesla.post!(
@token_url,
%{
scope: "openid",
client_id: @client_id,
grant_type: "refresh_token",
refresh_token: refresh_token
}
)
case res.status do
200 ->
%{
access_token: res.body["access_token"],
expires_in: res.body["expires_in"],
refresh_token: res.body["refresh_token"],
refresh_expires_in: res.body["refresh_expires_in"],
token_type: res.body["token_type"],
scope: res.body["scope"]
}
_ ->
{:error, res}
end
end
def client(token) do
middlewares = [
{Tesla.Middleware.BaseUrl, @api_url},
{Tesla.Middleware.Headers, [{"Authorization", "Bearer #{token}"}]},
Tesla.Middleware.JSON
]
Tesla.client(middlewares)
end
def get_main_household_creative_tonies(client) do
household_id =
client
|> get_households()
|> elem(1)
|> Enum.find(fn x -> x["access"] == "owner" end)
|> Map.fetch!("id")
get_creative_tonies(client, household_id)
end
def get_creative_tonies(client, household_id) do
client
|> Tesla.get!("/households/#{household_id}/creativetonies")
|> handle_result()
end
def get_households(client) do
client
|> Tesla.get!("/households")
|> handle_result()
end
def upload_file(client, path) do
{:ok,
%{
"fileId" => key,
"request" => %{
"url" => url,
"fields" => fields
}
}} = get_upload_meta(client)
# We use HTTPoison here instead of tesla as I have no
# idea why the multipart transfer via Tesla fails.
result =
HTTPoison.post(
url,
{:multipart,
[
{"key", key},
{"x-amz-algorithm", fields["x-amz-algorithm"]},
{"x-amz-credential", fields["x-amz-credential"]},
{"x-amz-date", fields["x-amz-date"]},
{"x-amz-signature", fields["x-amz-signature"]},
{"x-amz-security-token", fields["x-amz-security-token"]},
{"policy", fields["policy"]},
{:file, Path.expand(path),
{"form-data",
[
{"name", "file"},
{"filename", key}
]},
[
{"Content-Type", MIME.from_path(Path.expand(path))}
]}
]}
)
case result do
{:ok, %HTTPoison.Response{status_code: 204}} -> {:ok, key}
{:ok, res} -> {:error, res}
other -> other
end
end
def get_chapters(client, household_id, tonie_id) do
Tesla.get!(client, "/households/#{household_id}/creativetonies/#{tonie_id}")
|> handle_result()
end
@doc """
Updates the given chapters. Requires a list of chapters.
If you add a new chapter, `file`, `id` and `title` are required.
Note: Not sure if all existing chapters are required as it's a PATCH
operation. I didn't test it, so include them, just in case.
"""
def update_chapters(client, household_id, tonie_id, chapters) do
Tesla.patch!(client, "/households/#{household_id}/creativetonies/#{tonie_id}", %{
chapters: chapters
})
|> handle_result()
end
defp get_upload_meta(client) do
client
|> Tesla.post!("/file", %{headers: %{}})
|> handle_result()
end
defp handle_result(result) do
case result do
%Tesla.Env{status: status, body: body} when status >= 200 and status <= 299 ->
{:ok, body}
other ->
{:error, other}
end
end
end
| 25.993289 | 88 | 0.568293 |
0803225b62a61ed938f8d66b1f683ae9bd289ee5 | 9,574 | exs | Elixir | lib/elixir/test/elixir/kernel/lexical_tracker_test.exs | doughsay/elixir | 7356a47047d0b54517bd6886603f09b1121dde2b | [
"Apache-2.0"
] | 19,291 | 2015-01-01T02:42:49.000Z | 2022-03-31T21:01:40.000Z | lib/elixir/test/elixir/kernel/lexical_tracker_test.exs | doughsay/elixir | 7356a47047d0b54517bd6886603f09b1121dde2b | [
"Apache-2.0"
] | 8,082 | 2015-01-01T04:16:23.000Z | 2022-03-31T22:08:02.000Z | lib/elixir/test/elixir/kernel/lexical_tracker_test.exs | doughsay/elixir | 7356a47047d0b54517bd6886603f09b1121dde2b | [
"Apache-2.0"
] | 3,472 | 2015-01-03T04:11:56.000Z | 2022-03-29T02:07:30.000Z | Code.require_file("../test_helper.exs", __DIR__)
defmodule Kernel.LexicalTrackerTest do
use ExUnit.Case, async: true
alias Kernel.LexicalTracker, as: D
setup do
{:ok, pid} = D.start_link()
{:ok, [pid: pid]}
end
test "can add remote dispatch", config do
D.remote_dispatch(config[:pid], String, :runtime)
assert D.references(config[:pid]) == {[], [], [String], []}
D.remote_dispatch(config[:pid], String, :compile)
assert D.references(config[:pid]) == {[String], [], [], []}
D.remote_dispatch(config[:pid], String, :runtime)
assert D.references(config[:pid]) == {[String], [], [], []}
end
test "can add requires", config do
D.add_require(config[:pid], URI)
assert D.references(config[:pid]) == {[], [URI], [], []}
D.remote_dispatch(config[:pid], URI, :runtime)
assert D.references(config[:pid]) == {[], [URI], [URI], []}
D.remote_dispatch(config[:pid], URI, :compile)
assert D.references(config[:pid]) == {[URI], [URI], [], []}
end
test "can add module imports", config do
D.add_require(config[:pid], String)
D.add_import(config[:pid], String, [], 1, true)
D.import_dispatch(config[:pid], String, {:upcase, 1}, :runtime)
assert D.references(config[:pid]) == {[], [String], [String], []}
D.import_dispatch(config[:pid], String, {:upcase, 1}, :compile)
assert D.references(config[:pid]) == {[String], [String], [], []}
end
test "can add module with {function, arity} imports", config do
D.add_require(config[:pid], String)
D.add_import(config[:pid], String, [upcase: 1], 1, true)
D.import_dispatch(config[:pid], String, {:upcase, 1}, :compile)
assert D.references(config[:pid]) == {[String], [String], [], []}
end
test "can add aliases", config do
D.add_alias(config[:pid], String, 1, true)
D.alias_dispatch(config[:pid], String)
assert D.references(config[:pid]) == {[], [], [], []}
end
test "unused module imports", config do
D.add_import(config[:pid], String, [], 1, true)
assert D.collect_unused_imports(config[:pid]) == [{String, 1}]
end
test "used module imports are not unused", config do
D.add_import(config[:pid], String, [], 1, true)
D.import_dispatch(config[:pid], String, {:upcase, 1}, :compile)
assert D.collect_unused_imports(config[:pid]) == []
end
test "unused {module, function, arity} imports", config do
D.add_import(config[:pid], String, [upcase: 1], 1, true)
assert D.collect_unused_imports(config[:pid]) == [{String, 1}, {{String, :upcase, 1}, 1}]
end
test "used {module, function, arity} imports are not unused", config do
D.add_import(config[:pid], String, [upcase: 1], 1, true)
D.add_import(config[:pid], String, [downcase: 1], 1, true)
D.import_dispatch(config[:pid], String, {:upcase, 1}, :compile)
assert D.collect_unused_imports(config[:pid]) == [{{String, :downcase, 1}, 1}]
end
test "overwriting {module, function, arity} import with module import", config do
D.add_import(config[:pid], String, [upcase: 1], 1, true)
D.add_import(config[:pid], String, [], 1, true)
D.import_dispatch(config[:pid], String, {:downcase, 1}, :compile)
assert D.collect_unused_imports(config[:pid]) == []
end
test "imports with no warn are not unused", config do
D.add_import(config[:pid], String, [], 1, false)
assert D.collect_unused_imports(config[:pid]) == []
end
test "unused aliases", config do
D.add_alias(config[:pid], String, 1, true)
assert D.collect_unused_aliases(config[:pid]) == [{String, 1}]
end
test "used aliases are not unused", config do
D.add_alias(config[:pid], String, 1, true)
D.alias_dispatch(config[:pid], String)
assert D.collect_unused_aliases(config[:pid]) == []
end
test "aliases with no warn are not unused", config do
D.add_alias(config[:pid], String, 1, false)
assert D.collect_unused_aliases(config[:pid]) == []
end
describe "references" do
test "typespecs do not tag aliases nor types" do
Code.eval_string("""
defmodule Kernel.LexicalTrackerTest.AliasTypespecs do
alias Foo.Bar, as: Bar, warn: false
@type bar :: Foo.Bar | Foo.Bar.t
@opaque bar2 :: Foo.Bar.t
@typep bar3 :: Foo.Bar.t
@callback foo :: Foo.Bar.t
@macrocallback foo2(Foo.Bar.t) :: Foo.Bar.t
@spec foo(bar3) :: Foo.Bar.t
def foo(_), do: :ok
# References from specs are processed only late
@after_compile __MODULE__
def __after_compile__(env, _) do
send(self(), {:references, Kernel.LexicalTracker.references(env.lexical_tracker)})
end
end
""")
assert_received {:references, {compile, _exports, runtime, _}}
refute Elixir.Bar in runtime
refute Elixir.Bar in compile
refute Foo.Bar in runtime
refute Foo.Bar in compile
end
test "typespecs track structs as exports" do
Code.eval_string("""
defmodule Kernel.LexicalTrackerTest.StructTypespecs do
@type uri :: %URI{}
# References from specs are processed only late
@after_compile __MODULE__
def __after_compile__(env, _) do
send(self(), {:references, Kernel.LexicalTracker.references(env.lexical_tracker)})
end
end
""")
assert_received {:references, {compile, exports, runtime, _}}
assert URI in runtime
assert URI in exports
refute URI in compile
end
test "@compile adds a runtime dependency" do
{{compile, exports, runtime, _}, _binding} =
Code.eval_string("""
defmodule Kernel.LexicalTrackerTest.Compile do
@compile {:no_warn_undefined, String}
@compile {:no_warn_undefined, {Enum, :concat, 1}}
Kernel.LexicalTracker.references(__ENV__.lexical_tracker)
end |> elem(3)
""")
refute String in compile
refute String in exports
assert String in runtime
refute Enum in compile
refute Enum in exports
assert Enum in runtime
end
test "defdelegate with literal adds runtime dependency" do
{{compile, _exports, runtime, _}, _binding} =
Code.eval_string("""
defmodule Kernel.LexicalTrackerTest.Defdelegate do
defdelegate a, to: A
opts = [to: B]
defdelegate b, opts
Kernel.LexicalTracker.references(__ENV__.lexical_tracker)
end |> elem(3)
""")
refute A in compile
assert B in compile
assert A in runtime
end
test "imports adds an export dependency" do
{{compile, exports, runtime, _}, _binding} =
Code.eval_string("""
defmodule Kernel.LexicalTrackerTest.Imports do
import String, warn: false
Kernel.LexicalTracker.references(__ENV__.lexical_tracker)
end |> elem(3)
""")
refute String in compile
assert String in exports
refute String in runtime
end
test "structs are exports or compile time" do
{{compile, exports, runtime, _}, _binding} =
Code.eval_string("""
defmodule Kernel.LexicalTrackerTest.StructRuntime do
def expand, do: %URI{}
Kernel.LexicalTracker.references(__ENV__.lexical_tracker)
end |> elem(3)
""")
refute URI in compile
assert URI in exports
assert URI in runtime
{{compile, exports, runtime, _}, _binding} =
Code.eval_string("""
defmodule Kernel.LexicalTrackerTest.StructCompile do
_ = %URI{}
Kernel.LexicalTracker.references(__ENV__.lexical_tracker)
end |> elem(3)
""")
assert URI in compile
assert URI in exports
refute URI in runtime
end
test "Macro.struct! adds an export dependency" do
{{compile, exports, runtime, _}, _binding} =
Code.eval_string("""
defmodule Kernel.LexicalTrackerTest.MacroStruct do
# We do not use the alias because it would be a compile time
# dependency. The alias may happen in practice, which is the
# mechanism to make this expansion become a compile-time one.
# However, in some cases, such as typespecs, we don't necessarily
# want the compile-time dependency to happen.
Macro.struct!(:"Elixir.URI", __ENV__)
Kernel.LexicalTracker.references(__ENV__.lexical_tracker)
end |> elem(3)
""")
refute URI in compile
assert URI in exports
refute URI in runtime
end
test "compile_env! does not add a compile dependency" do
{{compile, exports, runtime, _}, _binding} =
Code.eval_string("""
defmodule Kernel.LexicalTrackerTest.CompileEnvStruct do
require Application
Application.compile_env(:elixir, URI)
Application.compile_env(:elixir, [:foo, URI, :bar])
Kernel.LexicalTracker.references(__ENV__.lexical_tracker)
end |> elem(3)
""")
refute URI in compile
refute URI in exports
assert URI in runtime
end
test "defmodule does not add a compile dependency" do
{{compile, exports, runtime, _}, _binding} =
Code.eval_string("""
defmodule Kernel.LexicalTrackerTest.Defmodule do
Kernel.LexicalTracker.references(__ENV__.lexical_tracker)
end |> elem(3)
""")
refute Kernel.LexicalTrackerTest.Defmodule in compile
refute Kernel.LexicalTrackerTest.Defmodule in exports
refute Kernel.LexicalTrackerTest.Defmodule in runtime
end
end
end
| 33.358885 | 93 | 0.633591 |
08035556fdd30af7c52296f5614231e5f5f53c74 | 2,006 | exs | Elixir | config/prod.exs | pelgero/featex | ef0bd9efa07f87b58f3df68e355a5fa6d367b04f | [
"Apache-2.0"
] | null | null | null | config/prod.exs | pelgero/featex | ef0bd9efa07f87b58f3df68e355a5fa6d367b04f | [
"Apache-2.0"
] | 1 | 2021-05-11T19:13:08.000Z | 2021-05-11T19:13:08.000Z | config/prod.exs | pelgero/featex | ef0bd9efa07f87b58f3df68e355a5fa6d367b04f | [
"Apache-2.0"
] | null | null | null | use Mix.Config
# For production, don't forget to configure the url host
# to something meaningful, Phoenix uses this information
# when generating URLs.
#
# Note we also include the path to a cache manifest
# containing the digested version of static files. This
# manifest is generated by the `mix phx.digest` task,
# which you should run after static files are built and
# before starting your production server.
config :featex, FeatexWeb.Endpoint,
url: [host: "example.com", port: 80],
cache_static_manifest: "priv/static/cache_manifest.json"
# Do not print debug messages in production
config :logger, level: :info
# ## SSL Support
#
# To get SSL working, you will need to add the `https` key
# to the previous section and set your `:url` port to 443:
#
# config :featex, FeatexWeb.Endpoint,
# ...
# url: [host: "example.com", port: 443],
# https: [
# port: 443,
# cipher_suite: :strong,
# keyfile: System.get_env("SOME_APP_SSL_KEY_PATH"),
# certfile: System.get_env("SOME_APP_SSL_CERT_PATH"),
# transport_options: [socket_opts: [:inet6]]
# ]
#
# The `cipher_suite` is set to `:strong` to support only the
# latest and more secure SSL ciphers. This means old browsers
# and clients may not be supported. You can set it to
# `:compatible` for wider support.
#
# `:keyfile` and `:certfile` expect an absolute path to the key
# and cert in disk or a relative path inside priv, for example
# "priv/ssl/server.key". For all supported SSL configuration
# options, see https://hexdocs.pm/plug/Plug.SSL.html#configure/1
#
# We also recommend setting `force_ssl` in your endpoint, ensuring
# no data is ever sent via http, always redirecting to https:
#
# config :featex, FeatexWeb.Endpoint,
# force_ssl: [hsts: true]
#
# Check `Plug.SSL` for all available options in `force_ssl`.
# Finally import the config/prod.secret.exs which loads secrets
# and configuration from environment variables.
import_config "prod.secret.exs"
| 35.821429 | 66 | 0.713858 |
0803739f6555edba6ba1080ce66f2397af95774a | 159 | ex | Elixir | lib/liveview_bindings_web/controllers/page_controller.ex | rafalgolarz/liveview_bindings | c64fbf9661d5ff78c839daa7561549bbf6cbc731 | [
"MIT"
] | null | null | null | lib/liveview_bindings_web/controllers/page_controller.ex | rafalgolarz/liveview_bindings | c64fbf9661d5ff78c839daa7561549bbf6cbc731 | [
"MIT"
] | 1 | 2021-03-10T09:32:06.000Z | 2021-03-10T09:32:06.000Z | lib/liveview_bindings_web/controllers/page_controller.ex | rafalgolarz/liveview_bindings | c64fbf9661d5ff78c839daa7561549bbf6cbc731 | [
"MIT"
] | null | null | null | defmodule LiveviewBindingsWeb.PageController do
use LiveviewBindingsWeb, :controller
def index(conn, _params) do
render(conn, "index.html")
end
end
| 19.875 | 47 | 0.767296 |
08037dc6c8d534d2aecc90ff557433adf067e965 | 4,418 | ex | Elixir | clients/android_management/lib/google_api/android_management/v1/model/enterprise.ex | yoshi-code-bot/elixir-google-api | cdb6032f01fac5ab704803113c39f2207e9e019d | [
"Apache-2.0"
] | null | null | null | clients/android_management/lib/google_api/android_management/v1/model/enterprise.ex | yoshi-code-bot/elixir-google-api | cdb6032f01fac5ab704803113c39f2207e9e019d | [
"Apache-2.0"
] | null | null | null | clients/android_management/lib/google_api/android_management/v1/model/enterprise.ex | yoshi-code-bot/elixir-google-api | cdb6032f01fac5ab704803113c39f2207e9e019d | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.AndroidManagement.V1.Model.Enterprise do
@moduledoc """
The configuration applied to an enterprise.
## Attributes
* `appAutoApprovalEnabled` (*type:* `boolean()`, *default:* `nil`) - Deprecated and unused.
* `contactInfo` (*type:* `GoogleApi.AndroidManagement.V1.Model.ContactInfo.t`, *default:* `nil`) - The enterprise contact info of an EMM-managed enterprise.
* `enabledNotificationTypes` (*type:* `list(String.t)`, *default:* `nil`) - The types of Google Pub/Sub notifications enabled for the enterprise.
* `enterpriseDisplayName` (*type:* `String.t`, *default:* `nil`) - The name of the enterprise displayed to users.
* `logo` (*type:* `GoogleApi.AndroidManagement.V1.Model.ExternalData.t`, *default:* `nil`) - An image displayed as a logo during device provisioning. Supported types are: image/bmp, image/gif, image/x-ico, image/jpeg, image/png, image/webp, image/vnd.wap.wbmp, image/x-adobe-dng.
* `name` (*type:* `String.t`, *default:* `nil`) - The name of the enterprise which is generated by the server during creation, in the form enterprises/{enterpriseId}.
* `primaryColor` (*type:* `integer()`, *default:* `nil`) - A color in RGB format that indicates the predominant color to display in the device management app UI. The color components are stored as follows: (red << 16) | (green << 8) | blue, where the value of each component is between 0 and 255, inclusive.
* `pubsubTopic` (*type:* `String.t`, *default:* `nil`) - The topic which Pub/Sub notifications are published to, in the form projects/{project}/topics/{topic}. This field is only required if Pub/Sub notifications are enabled.
* `signinDetails` (*type:* `list(GoogleApi.AndroidManagement.V1.Model.SigninDetail.t)`, *default:* `nil`) - Sign-in details of the enterprise.
* `termsAndConditions` (*type:* `list(GoogleApi.AndroidManagement.V1.Model.TermsAndConditions.t)`, *default:* `nil`) - Terms and conditions that must be accepted when provisioning a device for this enterprise. A page of terms is generated for each value in this list.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:appAutoApprovalEnabled => boolean() | nil,
:contactInfo => GoogleApi.AndroidManagement.V1.Model.ContactInfo.t() | nil,
:enabledNotificationTypes => list(String.t()) | nil,
:enterpriseDisplayName => String.t() | nil,
:logo => GoogleApi.AndroidManagement.V1.Model.ExternalData.t() | nil,
:name => String.t() | nil,
:primaryColor => integer() | nil,
:pubsubTopic => String.t() | nil,
:signinDetails => list(GoogleApi.AndroidManagement.V1.Model.SigninDetail.t()) | nil,
:termsAndConditions =>
list(GoogleApi.AndroidManagement.V1.Model.TermsAndConditions.t()) | nil
}
field(:appAutoApprovalEnabled)
field(:contactInfo, as: GoogleApi.AndroidManagement.V1.Model.ContactInfo)
field(:enabledNotificationTypes, type: :list)
field(:enterpriseDisplayName)
field(:logo, as: GoogleApi.AndroidManagement.V1.Model.ExternalData)
field(:name)
field(:primaryColor)
field(:pubsubTopic)
field(:signinDetails, as: GoogleApi.AndroidManagement.V1.Model.SigninDetail, type: :list)
field(:termsAndConditions,
as: GoogleApi.AndroidManagement.V1.Model.TermsAndConditions,
type: :list
)
end
defimpl Poison.Decoder, for: GoogleApi.AndroidManagement.V1.Model.Enterprise do
def decode(value, options) do
GoogleApi.AndroidManagement.V1.Model.Enterprise.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.AndroidManagement.V1.Model.Enterprise do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 55.924051 | 311 | 0.720688 |
0803acb2b6d8a56846db5b7588ff286de71eff34 | 3,919 | ex | Elixir | lib/protobuf.ex | addcninblue/protobuf-elixir | a18144710fe394909a37d4533bbc28a5636311d1 | [
"MIT"
] | 4 | 2021-01-16T02:21:44.000Z | 2022-03-04T18:42:18.000Z | lib/protobuf.ex | addcninblue/protobuf-elixir | a18144710fe394909a37d4533bbc28a5636311d1 | [
"MIT"
] | 5 | 2020-04-07T20:22:38.000Z | 2020-09-23T02:28:36.000Z | lib/protobuf.ex | addcninblue/protobuf-elixir | a18144710fe394909a37d4533bbc28a5636311d1 | [
"MIT"
] | 4 | 2020-07-22T23:38:34.000Z | 2021-03-26T18:52:54.000Z | defmodule Protobuf do
@moduledoc """
`protoc` should always be used to generate code instead of wrting the code by hand.
By `use` this module, macros defined in `Protobuf.DSL` will be injected. Most of thee macros
are equal to definition in .proto files.
defmodule Foo do
use Protobuf, syntax: :proto3
defstruct [:a, :b]
field :a, 1, type: :int32
field :b, 2, type: :string
end
Your Protobuf message(module) is just a normal Elixir struct. Some useful functions are also injected,
see "Callbacks" for details. Examples:
foo1 = Foo.new!(%{a: 1})
foo1.b == ""
bin = Foo.encode(foo1)
foo1 == Foo.decode(bin)
Except functions in "Callbacks", some other functions may be defined:
* Extension functions when your Protobuf message use extensions. See `Protobuf.Extension` for details.
* put_extension(struct, extension_mod, field, value)
* get_extension(struct, extension_mod, field, default \\ nil)
"""
defmacro __using__(opts) do
quote location: :keep do
import Protobuf.DSL,
only: [field: 3, field: 2, oneof: 2, extend: 4, extensions: 1, option: 1]
Module.register_attribute(__MODULE__, :fields, accumulate: true)
Module.register_attribute(__MODULE__, :oneofs, accumulate: true)
Module.register_attribute(__MODULE__, :extends, accumulate: true)
Module.register_attribute(__MODULE__, :extensions, [])
Module.register_attribute(__MODULE__, :msg_options, accumulate: true)
@options unquote(opts)
@before_compile Protobuf.DSL
@behaviour Protobuf
def new() do
Protobuf.Builder.new(__MODULE__)
end
def new(attrs) do
Protobuf.Builder.new(__MODULE__, attrs)
end
def new!(attrs) do
Protobuf.Builder.new!(__MODULE__, attrs)
end
@doc """
`new_and_verify!` makes sure that the values used to instantiate a
protobuf struct have valid types. For example, if `a` is a string field
in `Foo`,
Foo.new_and_verify!(a: 123)
would raise an exception.
"""
def new_and_verify!(attrs) do
struct = Protobuf.Builder.new!(__MODULE__, attrs)
case Protobuf.Verifier.verify(struct) do
{:error, messages} ->
raise Protobuf.VerificationError, message: Enum.join(messages, "\n\t")
:ok ->
struct
end
end
unquote(def_encode_decode())
end
end
defp def_encode_decode() do
quote do
def decode(data), do: Protobuf.Decoder.decode(data, __MODULE__)
def encode(struct), do: Protobuf.Encoder.encode(struct)
end
end
@doc """
Build a blank struct with default values. This and other "new" functions are
preferred than raw building struct method like `%Foo{}`.
In proto3, the zero values are the default values.
"""
@callback new() :: struct
@doc "Build and update the struct with passed fields."
@callback new(Enum.t()) :: struct
@doc """
Similar to `new/1`, but use `struct!/2` to build the struct, so
errors will be raised if unknown keys are passed.
"""
@callback new!(Enum.t()) :: struct
@doc """
Encode the struct to a protobuf binary.
Errors may be raised if there's something wrong in the struct.
"""
@callback encode(struct) :: binary
@doc """
Decode a protobuf binary to a struct.
Errors may be raised if there's something wrong in the binary.
"""
@callback decode(binary) :: struct
@doc """
It's preferable to use message's `decode` function, like
Foo.decode(bin)
"""
@spec decode(binary, module) :: struct
def decode(data, mod) do
Protobuf.Decoder.decode(data, mod)
end
@doc """
It's preferable to use message's `encode` function, like
Foo.encode(foo)
"""
@spec encode(struct) :: binary
def encode(struct) do
Protobuf.Encoder.encode(struct)
end
end
| 27.405594 | 104 | 0.654249 |
0803b2631795b9a31025e262f72eff009a9faca9 | 2,114 | exs | Elixir | test/altstatus/admission/admission_test.exs | AltCampus/altstatus_backend | 70bf7a9d337e570f54002c3a7df264e88372adfa | [
"MIT"
] | 1 | 2020-01-20T18:17:59.000Z | 2020-01-20T18:17:59.000Z | test/altstatus/admission/admission_test.exs | AltCampus/altstatus_backend | 70bf7a9d337e570f54002c3a7df264e88372adfa | [
"MIT"
] | null | null | null | test/altstatus/admission/admission_test.exs | AltCampus/altstatus_backend | 70bf7a9d337e570f54002c3a7df264e88372adfa | [
"MIT"
] | 2 | 2018-09-09T08:05:24.000Z | 2018-09-09T08:35:18.000Z | defmodule Altstatus.AdmissionTest do
use Altstatus.DataCase
alias Altstatus.Admission
describe "batches" do
alias Altstatus.Admission.Batch
@valid_attrs %{name: "some name", slug: "some slug"}
@update_attrs %{name: "some updated name", slug: "some updated slug"}
@invalid_attrs %{name: nil, slug: nil}
def batch_fixture(attrs \\ %{}) do
{:ok, batch} =
attrs
|> Enum.into(@valid_attrs)
|> Admission.create_batch()
batch
end
test "list_batches/0 returns all batches" do
batch = batch_fixture()
assert Admission.list_batches() == [batch]
end
test "get_batch!/1 returns the batch with given id" do
batch = batch_fixture()
assert Admission.get_batch!(batch.id) == batch
end
test "create_batch/1 with valid data creates a batch" do
assert {:ok, %Batch{} = batch} = Admission.create_batch(@valid_attrs)
assert batch.name == "some name"
assert batch.slug == "some slug"
end
test "create_batch/1 with invalid data returns error changeset" do
assert {:error, %Ecto.Changeset{}} = Admission.create_batch(@invalid_attrs)
end
test "update_batch/2 with valid data updates the batch" do
batch = batch_fixture()
assert {:ok, batch} = Admission.update_batch(batch, @update_attrs)
assert %Batch{} = batch
assert batch.name == "some updated name"
assert batch.slug == "some updated slug"
end
test "update_batch/2 with invalid data returns error changeset" do
batch = batch_fixture()
assert {:error, %Ecto.Changeset{}} = Admission.update_batch(batch, @invalid_attrs)
assert batch == Admission.get_batch!(batch.id)
end
test "delete_batch/1 deletes the batch" do
batch = batch_fixture()
assert {:ok, %Batch{}} = Admission.delete_batch(batch)
assert_raise Ecto.NoResultsError, fn -> Admission.get_batch!(batch.id) end
end
test "change_batch/1 returns a batch changeset" do
batch = batch_fixture()
assert %Ecto.Changeset{} = Admission.change_batch(batch)
end
end
end
| 31.088235 | 88 | 0.662725 |
0803c8e33e97d101f9a88d072c179ed21d1cf9c9 | 1,987 | ex | Elixir | clients/compute/lib/google_api/compute/v1/model/autoscaler_list_warning_data.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/compute/lib/google_api/compute/v1/model/autoscaler_list_warning_data.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/compute/lib/google_api/compute/v1/model/autoscaler_list_warning_data.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Compute.V1.Model.AutoscalerListWarningData do
@moduledoc """
## Attributes
* `key` (*type:* `String.t`, *default:* `nil`) - [Output Only] A key that provides more detail on the warning being returned. For example, for warnings where there are no results in a list request for a particular zone, this key might be scope and the key value might be the zone name. Other examples might be a key indicating a deprecated resource and a suggested replacement, or a warning about invalid network settings (for example, if an instance attempts to perform IP forwarding but is not enabled for IP forwarding).
* `value` (*type:* `String.t`, *default:* `nil`) - [Output Only] A warning data value corresponding to the key.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:key => String.t(),
:value => String.t()
}
field(:key)
field(:value)
end
defimpl Poison.Decoder, for: GoogleApi.Compute.V1.Model.AutoscalerListWarningData do
def decode(value, options) do
GoogleApi.Compute.V1.Model.AutoscalerListWarningData.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Compute.V1.Model.AutoscalerListWarningData do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 39.74 | 527 | 0.739809 |
080415c46a766073105ecd2b0425e56ccc5658d9 | 11,074 | ex | Elixir | lib/plots.ex | piisgaaf/chi2fit | 64e1c6719142e74979a407749953b892d4ccd985 | [
"Apache-2.0"
] | 4 | 2017-11-06T20:08:40.000Z | 2021-07-01T13:42:48.000Z | lib/plots.ex | piisgaaf/chi2fit | 64e1c6719142e74979a407749953b892d4ccd985 | [
"Apache-2.0"
] | null | null | null | lib/plots.ex | piisgaaf/chi2fit | 64e1c6719142e74979a407749953b892d4ccd985 | [
"Apache-2.0"
] | null | null | null | defmodule Gnuplotlib do
# Copyright 2019-2019 Pieter Rijken
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
@moduledoc """
Provides various various plots using the [Gnuplot](https://hex.pm/packages/gnuplot) package.
"""
alias Chi2fit.Utilities, as: U
alias Gnuplot, as: G
@imgpath "/app/notebooks/images"
@terminal "pngcairo"
@pngoptions ~w(set terminal #{@terminal} transparent enhanced)a
@doc """
Captures the output sent by `&Port.open/2` and returns it as a binary
"""
@timeout 1_000
@spec capture(out :: binary) :: binary
def capture(out \\ <<>>) do
receive do
{_, {:data, data}} ->
capture(out <> data)
{_, :closed} ->
out
after
@timeout ->
out
end
end
@doc """
Draws a histogram of the data.
## Options
`:bin` - the size of the bins to use,
`:plottitle` - the title of the plot,
`:xrange` - the range for the x-values to use in the format '[x1:x2]'
`:xrange` - the range for the y-values to use in the format '[y1:y2]'
`:xlabel` - the label to use for the x-axis,
`:ylabel` - the label to use for the y-axis.
"""
@spec histogram(data :: [number], options :: Keyword.t) :: none()
def histogram(data, options \\ []) do
binsize = options[:bin] || 1
hist = data |> U.make_histogram(binsize,0) |> Enum.map(&Tuple.to_list/1)
terminal(options)
++ [
['width=#{binsize}'],
['hist(x,width)=width*floor(x-1)+width/2.0'],
[:set, :boxwidth, 'width*0.9'],
[:set, :style, :fill, :solid, 0.5],
if(options[:plottitle], do: [:set, :title, options[:plottitle]], else: []),
if(options[:xrange], do: [:set, :xrange, options[:xrange]], else: []),
if(options[:yrange], do: [:set, :yrange, options[:yrange]], else: []),
if(options[:xlabel], do: [:set, :xlabel, options[:xlabel]], else: [:set, :xlabel]),
if(options[:ylabel], do: [:set, :ylabel, options[:ylabel], :rotate, :by, 90], else: [:set, :ylabel]),
[:plot, "-", :u, '(hist($1,width)):2', :smooth, :freq, :w, :boxes, :lc, 'rgb"green"', :notitle]
]
|> do_output([hist], options)
end
@doc """
Draws a graph of the empirical CDF as steps, the data points with error bars, and the (fitted) function.
## Options
`:bin` - the size of the bins to use,
`:plottitle` - the title of the plot,
`:xrange`- the range for the x-values to use in the format '[x1:x2]'
`:xrange` - the range for the y-values to use in the format '[y1:y2]'
`:xlabel` - the label to use for the x-axis,
`:ylabel` - the label to use for the y-axis,
`:func` - the data to use for the CDF curve as a list of `[x,y]`,
`:title` - the title to use for the CDF curve.
`:bounds` - 2-tuple of functions describing the minimum and maximum error-curves for the CDF
"""
@type datapoint() :: {x :: number, y :: number, ylow :: number, yhigh :: number}
@spec ecdf(data :: [datapoint], options :: Keyword.t) :: none()
def ecdf(data, options) do
npoints = options[:npoints] || 100
hist = data |> Enum.map(&Tuple.to_list/1)
maxx = data |> Enum.map(&elem(&1,0)) |> Enum.max |> Kernel.*(1.2)
args = [
[[0,0,0,0]|hist]++[[maxx,1,0,0]],
hist,
hist
]
++ if(options[:func], do: [dofun(npoints,maxx,options[:func])], else: [])
++ case options[:bounds] do
{minrate,maxrate} ->
[ dofun(npoints,maxx,minrate) |> Enum.zip(dofun(npoints,maxx,maxrate)) |> Enum.map(fn {[x,y1],[x,y2]}->[x,y1,y2] end) ]
_else ->
[ ]
end
terminal(options)
++ [
[:set, :style, :line, 1,
:linecolor, :rgb, "#0060ad",
:linetype, 1, :linewidth, 2,
:pointtype, 7, :pointsize, 1.5],
[:set, :style, :line, 2,
:linecolor, :rgb, "#dd181f",
:linetype, 1, :linewidth, 2],
[:set, :style, :line, 3,
:linecolor, :rgb, "green",
:linetype, 1, :linewidth, 2],
~w(set style fill transparent solid 0.2 noborder)a,
~w(set key left top)a,
if(options[:plottitle], do: [:set, :title, options[:plottitle]], else: []),
if(options[:xrange], do: [:set, :xrange, options[:xrange]], else: []),
if(options[:yrange], do: [:set, :yrange, options[:yrange]], else: [:set,:yrange,'[0:1.2]']),
if(options[:xlabel], do: [:set, :xlabel, options[:xlabel]], else: [:set, :xlabel]),
if(options[:ylabel], do: [:set, :ylabel, options[:ylabel], :rotate, :by, 90], else: [:set, :ylabel]),
[:plot, G.list([
~w('-' u 1:2 w steps ls 1 notitle)a,
~w('' u 1:2 w points ls 1 notitle)a,
~w('' u 1:2:3:4 w yerrorbars ls 2 title 'Empirical CDF')a,
if(options[:func], do: ["", :u, '1:2', :w, :lines, :ls, 3, :title, options[:title]], else: [])
] ++ case options[:bounds] do
{_,_} -> [
["", :u, '1:2:3', :lc, :rgb, "grey", :w, :filledcurve, :closed, :title, "Error bounds"]
]
_else -> []
end
)
]
]
|> do_output(args, options)
end
@doc """
Draws a graph of the PDF.
## Options
`:bin` - the size of the bins to use,
`:offset` -- the offset of the bin,
`:plottitle` - the title of the plot,
`:noerror` -- no error bars,
`:xrange` - the range for the x-values to use in the format '[x1:x2]'
`:xrange` - the range for the y-values to use in the format '[y1:y2]'
`:xlabel` - the label to use for the x-axis,
`:ylabel` - the label to use for the y-axis,
`:pdf` - the data to use for the PDF curve as a list of `[x,y]`,
`:title` - the title to use for the PDF curve.
"""
@spec pdf(data :: [number], options :: Keyword.t) :: none()
def pdf(data, options) do
npoints = options[:npoints] || 100
bin = options[:bin] || 1
offset = options[:offset] || 0
noerrors = options[:noerrors] || false
maxx = data |> Enum.max |> Kernel.*(1.2)
hist = data
|> U.make_histogram(bin,offset)
|> Enum.map(&Tuple.to_list/1)
|> Enum.map(fn [x,y]->[x*bin,y] end)
args = [ hist ] ++ if(noerrors, do: [], else: [hist]) ++ [ dofun(npoints,maxx,options[:pdf]) ]
terminal(options)
++ [
['count=#{length(data)}'],
['width=#{bin}'],
['hist(x,width)=width*floor((x-1)/width)+width/2.0'],
[:set, :boxwidth, 'width*0.9'],
[:set, :style, :fill, :solid, 0.5],
if(options[:plottitle], do: [:set, :title, options[:plottitle]], else: []),
if(options[:xrange], do: [:set, :xrange, options[:xrange]], else: []),
if(options[:yrange], do: [:set, :yrange, options[:yrange]], else: []),
if(options[:xlabel], do: [:set, :xlabel, options[:xlabel]], else: [:set,:xlabel]),
if(options[:ylabel], do: [:set, :ylabel, options[:ylabel], :rotate, :by, 90], else: [:set,:ylabel]),
[:plot, G.list(
[~w|'-' u (hist($1,width)):($2/count/#{bin}) smooth freq w boxes lc rgb "green" title "Empirical PDF"|a]
++ if(noerrors, do: [], else: [~w|'-' u (hist($1,width)):($2/count/#{bin}):(sqrt($2)/count/#{bin}) w errorbars ls 3 notitle|a])
++ [ ["", :u, '1:2', :w, :lines, :ls, 3, :title, options[:title]] ]
)
]
]
|> do_output(args, options)
end
@doc """
Plots severals graphs in a multi-plot.
"""
@spec multi(all :: [{command :: [],data :: []}], options :: Keyword.t) :: none()
def multi(all, options \\ []) do
cols = options[:columns] || 2
rows = options[:rows] || trunc(Float.ceil(length(all)/cols,0))
{commands,data} = all |> U.unzip
[
terminal(options)
++ [
[:set, :multiplot, :layout, '#{rows},#{cols}'] ++ (if options[:title], do: [:title, options[:title], :font, ",14"], else: []),
]
]
|> Enum.concat(commands)
|> Enum.concat
|> do_output(Enum.concat(data), options)
end
@spec surface(data :: [[number()]], options :: Keyword.t) :: none()
def surface(data, options) do
dgrid3d = options[:dgrid3d] || ""
using = options[:parameters] || "1:2"
terminal(options)
++ [
['set cntrparam levels 50'],
['set contour'],
['set dgrid3d #{dgrid3d}'],
['unset key'],
['set cntrlabel start 2 font ",7"'],
if(options[:plottitle], do: [:set, :title, options[:plottitle]], else: []),
if(options[:xrange], do: [:set, :xrange, options[:xrange]], else: []),
if(options[:yrange], do: [:set, :yrange, options[:yrange]], else: []),
if(options[:zrange], do: [:set, :zrange, options[:zrange]], else: []),
if(options[:xlabel], do: [:set, :xlabel, options[:xlabel]], else: [:set,:xlabel]),
if(options[:ylabel], do: [:set, :ylabel, options[:ylabel]], else: [:set,:ylabel]),
if(options[:zlabel], do: [:set, :zlabel, options[:zlabel], :rotate, :by, 90], else: [:set,:zlabel]),
~w(splot '-' u #{using}:3 w lines notitle)a
]
|> do_output([data], options)
end
#############################################################################
##
## Local functions
##
#############################################################################
defp make_terminal(options) do
List.flatten([
@pngoptions,
if(options[:size], do: ~w(size #{options[:size]})a, else: [ ])
])
end
defp terminal(options) do
case options[:mode] do
{:as_file, path} ->
File.mkdir_p @imgpath
[ make_terminal(options), ~w(set output '#{@imgpath}/#{path}.png')a ]
raw when raw==nil or raw==:raw or raw==:raw64 ->
[ make_terminal(options), ~w(set output)a ]
_else ->
[ ]
end
end
defp do_output(commands, datasets, options) do
case options[:mode] do
:as_commands ->
{commands, datasets}
{:as_file, _} ->
G.plot(commands, datasets)
:as_raw ->
G.plot(commands, datasets)
capture() |> IO.write
:as_raw64 ->
G.plot(commands, datasets)
capture() |> Base.encode64 |> IO.write
nil ->
G.plot(commands, datasets)
capture() |> Base.encode64 |> IO.write
end
end
defp dofun(npoints,maxx,fun) do
0..npoints
|> Enum.map(fn i -> [i*maxx/npoints,fun.(i*maxx/npoints)] end)
end
end | 36.668874 | 143 | 0.53603 |
0804290b86985579893d00d5221308fe8adf9d47 | 3,595 | ex | Elixir | lib/vintage_net_bridge.ex | takasehideki/vintage_net_bridge | ba439f52d1b639b1c8f49b3a1f2461012a52c62d | [
"Apache-2.0"
] | null | null | null | lib/vintage_net_bridge.ex | takasehideki/vintage_net_bridge | ba439f52d1b639b1c8f49b3a1f2461012a52c62d | [
"Apache-2.0"
] | null | null | null | lib/vintage_net_bridge.ex | takasehideki/vintage_net_bridge | ba439f52d1b639b1c8f49b3a1f2461012a52c62d | [
"Apache-2.0"
] | null | null | null | defmodule VintageNetBridge do
@moduledoc """
Configure network bridges with VintageNet
Configurations for this technology are maps with a `:type` field set to
`VintageNetBridge`. The following additional fields are supported:
* `:vintage_net_bridge` - Bridge options
* `:interfaces` - Set to a list of interface names to add to the bridge.
This option is required for the bridge to be useful.
* `:forward_delay`
* `:priority`
* `:hello_time`
* `:max_age`
* `:path_cost`
* `:path_priority`
* `:hairpin`
* `:stp`
Here's an example configuration for setting up a bridge:
```elixir
%{
type: VintageNetBridge,
vintage_net_bridge: %{
vintage_net_bridge: %{
interfaces: ["eth0", "wlan0"],
}
}
```
See [brctl(8)](https://www.man7.org/linux/man-pages/man8/brctl.8.html) for
more information on individual options.
"""
@behaviour VintageNet.Technology
alias VintageNet.Interface.RawConfig
alias VintageNet.IP.{IPv4Config, DhcpdConfig, DnsdConfig}
alias VintageNetBridge.Server
@impl true
def normalize(config), do: config
@impl true
def to_raw_config(ifname, config, opts) do
normalized_config = normalize(config)
bridge_config = normalized_config[:vintage_net_bridge]
brctl = Keyword.fetch!(opts, :bin_brctl)
interfaces = Map.fetch!(bridge_config, :interfaces)
up_cmds = [
{:run, brctl, ["addbr", ifname]}
]
down_cmds = [
{:run, brctl, ["delbr", ifname]}
]
bridge_up_cmds = Enum.flat_map(bridge_config, &config_to_cmd(&1, brctl, ifname))
addif_up_cmds =
Map.get(bridge_config, :interfaces, [])
|> Enum.map(fn addif ->
{:run_ignore_errors, brctl, ["addif", ifname, addif]}
end)
%RawConfig{
ifname: ifname,
type: __MODULE__,
source_config: normalized_config,
up_cmds: up_cmds ++ bridge_up_cmds ++ addif_up_cmds,
down_cmds: down_cmds,
required_ifnames: [],
child_specs: [{Server, %{brctl: brctl, bridge_ifname: ifname, interfaces: interfaces}}]
}
|> IPv4Config.add_config(normalized_config, opts)
|> DhcpdConfig.add_config(normalized_config, opts)
|> DnsdConfig.add_config(normalized_config, opts)
end
@impl true
def ioctl(_ifname, _command, _args) do
{:error, :unsupported}
end
@impl true
def check_system(_opts) do
{:error, "unimplemented"}
end
defp config_to_cmd({:forward_delay, value}, brctl, ifname) do
[{:run, brctl, ["setfd", ifname, to_string(value)]}]
end
defp config_to_cmd({:priority, value}, brctl, ifname) do
[{:run, brctl, ["setbridgeprio", ifname, to_string(value)]}]
end
defp config_to_cmd({:hello_time, value}, brctl, ifname) do
[{:run, brctl, ["sethello", ifname, to_string(value)]}]
end
defp config_to_cmd({:max_age, value}, brctl, ifname) do
[{:run, brctl, ["setmaxage", ifname, to_string(value)]}]
end
defp config_to_cmd({:path_cost, value}, brctl, ifname) do
[{:run, brctl, ["setpathcost", ifname, to_string(value)]}]
end
defp config_to_cmd({:path_priority, value}, brctl, ifname) do
[{:run, brctl, ["setportprio", ifname, to_string(value)]}]
end
defp config_to_cmd({:hairpin, {port, value}}, brctl, ifname) do
[{:run, brctl, ["hairpin", ifname, to_string(port), bool_to_yn(value)]}]
end
defp config_to_cmd({:stp, value}, brctl, ifname) do
[{:run, brctl, ["stp", ifname, bool_to_yn(value)]}]
end
defp config_to_cmd(_other, _brctl, _ifname), do: []
defp bool_to_yn(true), do: "yes"
defp bool_to_yn(false), do: "no"
end
| 27.868217 | 93 | 0.663978 |
0804399fb3b08a226d5fe4a274c08db54cd999fd | 1,107 | exs | Elixir | config/config.exs | seanabrahams/tv | 3d103a6ccbf2d2bade619155bf478d5c7b61a065 | [
"MIT"
] | null | null | null | config/config.exs | seanabrahams/tv | 3d103a6ccbf2d2bade619155bf478d5c7b61a065 | [
"MIT"
] | null | null | null | config/config.exs | seanabrahams/tv | 3d103a6ccbf2d2bade619155bf478d5c7b61a065 | [
"MIT"
] | null | null | null | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
use Mix.Config
# This configuration is loaded before any dependency and is restricted
# to this project. If another project depends on this project, this
# file won't be loaded nor affect the parent project. For this reason,
# if you want to provide default values for your application for
# 3rd-party users, it should be done in your "mix.exs" file.
# You can configure for your application as:
#
# config :tv, key: :value
#
# And access this configuration in your application as:
#
# Application.get_env(:tv, :key)
#
# Or configure a 3rd-party app:
#
# config :logger, level: :info
#
# It is also possible to import configuration files, relative to this
# directory. For example, you can emulate configuration per environment
# by uncommenting the line below and defining dev.exs, test.exs and such.
# Configuration from the imported file will override the ones defined
# here (which is why it is important to import them last).
#
# import_config "#{Mix.env}.exs"
| 35.709677 | 73 | 0.748871 |
08043d3f4fe2f73e612b37927b8ac51661c9539e | 732 | ex | Elixir | lib/cforum_web/gettext.ex | campingrider/cforum_ex | cf27684c47d6dc26c9c37a946f1c729a79d27c70 | [
"MIT"
] | 16 | 2019-04-04T06:33:33.000Z | 2021-08-16T19:34:31.000Z | lib/cforum_web/gettext.ex | campingrider/cforum_ex | cf27684c47d6dc26c9c37a946f1c729a79d27c70 | [
"MIT"
] | 294 | 2019-02-10T11:10:27.000Z | 2022-03-30T04:52:53.000Z | lib/cforum_web/gettext.ex | campingrider/cforum_ex | cf27684c47d6dc26c9c37a946f1c729a79d27c70 | [
"MIT"
] | 10 | 2019-02-10T10:39:24.000Z | 2021-07-06T11:46:05.000Z | defmodule CforumWeb.Gettext do
@moduledoc """
A module providing Internationalization with a gettext-based API.
By using [Gettext](https://hexdocs.pm/gettext),
your module gains a set of macros for translations, for example:
import Cforum.Gettext
# Simple translation
gettext "Here is the string to translate"
# Plural translation
ngettext "Here is the string to translate",
"Here are the strings to translate",
3
# Domain-based translation
dgettext "errors", "Here is the error message to translate"
See the [Gettext Docs](https://hexdocs.pm/gettext) for detailed usage.
"""
use Gettext, otp_app: :cforum, plural_forms: Cforum.Plurals
end
| 29.28 | 72 | 0.684426 |
080442d7926f4e1d55c3d368b639073544eda1c1 | 14,501 | ex | Elixir | lib/ex_unit/lib/ex_unit.ex | spencerdcarlson/elixir | 23d75ecdf58df80969e12f4420282238e19219a1 | [
"Apache-2.0"
] | 1 | 2020-12-18T19:20:37.000Z | 2020-12-18T19:20:37.000Z | lib/ex_unit/lib/ex_unit.ex | spencerdcarlson/elixir | 23d75ecdf58df80969e12f4420282238e19219a1 | [
"Apache-2.0"
] | null | null | null | lib/ex_unit/lib/ex_unit.ex | spencerdcarlson/elixir | 23d75ecdf58df80969e12f4420282238e19219a1 | [
"Apache-2.0"
] | 1 | 2020-11-25T02:22:55.000Z | 2020-11-25T02:22:55.000Z | defmodule ExUnit do
@moduledoc """
Unit testing framework for Elixir.
## Example
A basic setup for ExUnit is shown below:
# File: assertion_test.exs
# 1) Start ExUnit.
ExUnit.start()
# 2) Create a new test module (test case) and use "ExUnit.Case".
defmodule AssertionTest do
# 3) Note that we pass "async: true", this runs the test case
# concurrently with other test cases. The individual tests
# within each test case are still run serially.
use ExUnit.Case, async: true
# 4) Use the "test" macro instead of "def" for clarity.
test "the truth" do
assert true
end
end
To run the tests above, run the file using `elixir` from the
command line. Assuming you named the file `assertion_test.exs`,
you can run it as:
elixir assertion_test.exs
## Case, Callbacks and Assertions
See `ExUnit.Case` and `ExUnit.Callbacks` for more information
about defining test cases and setting up callbacks.
The `ExUnit.Assertions` module contains a set of macros to
generate assertions with appropriate error messages.
## Integration with Mix
Mix is the project management and build tool for Elixir. Invoking `mix test`
from the command line will run the tests in each file matching the pattern
`*_test.exs` found in the `test` directory of your project.
You must create a `test_helper.exs` file inside the
`test` directory and put the code common to all tests there.
The minimum example of a `test_helper.exs` file would be:
# test/test_helper.exs
ExUnit.start()
Mix will load the `test_helper.exs` file before executing the tests.
It is not necessary to `require` the `test_helper.exs` file in your test
files. Run `mix help test` for more information.
"""
@typedoc """
All tests start with a state of `nil`.
A finished test can be in one of five states:
1. Passed (also represented by `nil`)
2. Failed
3. Skipped (via @tag :skip)
4. Excluded (via :exclude filters)
5. Invalid (when setup_all fails)
"""
@type state ::
nil | {:excluded, binary} | {:failed, failed} | {:invalid, module} | {:skipped, binary}
@typedoc "The error state returned by `ExUnit.Test` and `ExUnit.TestModule`"
@type failed :: [{Exception.kind(), reason :: term, Exception.stacktrace()}]
@typedoc "A map representing the results of running a test suite"
@type suite_result :: %{
excluded: non_neg_integer,
failures: non_neg_integer,
skipped: non_neg_integer,
total: non_neg_integer
}
@type test_id :: {module, name :: atom}
defmodule Test do
@moduledoc """
A struct that keeps information about the test.
It is received by formatters and contains the following fields:
* `:name` - the test name
* `:module` - the test module
* `:state` - the finished test state (see `t:ExUnit.state/0`)
* `:time` - the duration in microseconds of the test's runtime
* `:tags` - the test tags
* `:logs` - the captured logs
"""
defstruct [:name, :case, :module, :state, time: 0, tags: %{}, logs: ""]
# TODO: Remove the `:case` field on v2.0
@type t :: %__MODULE__{
name: atom,
case: module,
module: module,
state: ExUnit.state(),
time: non_neg_integer,
tags: map,
logs: String.t()
}
end
defmodule TestModule do
@moduledoc """
A struct that keeps information about the test module.
It is received by formatters and contains the following fields:
* `:file` - (since v1.11.0) the file of the test module
* `:name` - the test module name
* `:state` - the test error state (see `t:ExUnit.state/0`)
* `:tests` - all tests in this module
"""
defstruct [:file, :name, :state, tests: []]
@type t :: %__MODULE__{
file: binary(),
name: module,
state: ExUnit.state(),
tests: [ExUnit.Test.t()]
}
end
defmodule TestCase do
# TODO: Remove this module on v2.0 (it has been replaced by TestModule)
@moduledoc false
defstruct [:name, :state, tests: []]
@type t :: %__MODULE__{name: module, state: ExUnit.state(), tests: [ExUnit.Test.t()]}
end
defmodule TimeoutError do
defexception [:timeout, :type]
@impl true
def message(%{timeout: timeout, type: type}) do
"""
#{type} timed out after #{timeout}ms. You can change the timeout:
1. per test by setting "@tag timeout: x" (accepts :infinity)
2. per test module by setting "@moduletag timeout: x" (accepts :infinity)
3. globally via "ExUnit.start(timeout: x)" configuration
4. by running "mix test --timeout x" which sets timeout
5. or by running "mix test --trace" which sets timeout to infinity
(useful when using IEx.pry/0)
where "x" is the timeout given as integer in milliseconds (defaults to 60_000).
"""
end
end
use Application
@doc false
def start(_type, []) do
children = [
ExUnit.Server,
ExUnit.CaptureServer,
ExUnit.OnExitHandler
]
opts = [strategy: :one_for_one, name: ExUnit.Supervisor]
Supervisor.start_link(children, opts)
end
@doc """
Starts ExUnit and automatically runs tests right before the
VM terminates.
It accepts a set of `options` to configure `ExUnit`
(the same ones accepted by `configure/1`).
If you want to run tests manually, you can set the `:autorun` option
to `false` and use `run/0` to run tests.
"""
@spec start(Keyword.t()) :: :ok
def start(options \\ []) do
{:ok, _} = Application.ensure_all_started(:ex_unit)
configure(options)
if Application.fetch_env!(:ex_unit, :autorun) do
Application.put_env(:ex_unit, :autorun, false)
System.at_exit(fn
0 ->
time = ExUnit.Server.modules_loaded()
options = persist_defaults(configuration())
%{failures: failures} = ExUnit.Runner.run(options, time)
System.at_exit(fn _ ->
if failures > 0, do: exit({:shutdown, 1})
end)
_ ->
:ok
end)
else
:ok
end
end
@doc """
Configures ExUnit.
## Options
ExUnit supports the following options:
* `:assert_receive_timeout` - the timeout to be used on `assert_receive`
calls in milliseconds, defaults to `100`;
* `:autorun` - if ExUnit should run by default on exit. Defaults to `true`;
* `:capture_log` - if ExUnit should default to keeping track of log messages
and print them on test failure. Can be overridden for individual tests via
`@tag capture_log: false`. Defaults to `false`;
* `:colors` - a keyword list of color options to be used by some formatters:
* `:enabled` - boolean option to enable colors, defaults to `IO.ANSI.enabled?/0`;
* `:diff_insert` - color of the insertions on diffs, defaults to `:green`;
* `:diff_insert_whitespace` - color of the whitespace insertions on diffs,
defaults to `IO.ANSI.color_background(2, 0, 0)`;
* `:diff_delete` - color of the deletions on diffs, defaults to `:red`;
* `:diff_delete_whitespace` - color of the whitespace deletions on diffs,
defaults to `IO.ANSI.color_background(0, 2, 0)`;
* `:exclude` - specifies which tests are run by skipping tests that match the
filter;
* `:failures_manifest_file` - specifies a path to the file used to store failures
between runs;
* `:formatters` - the formatters that will print results,
defaults to `[ExUnit.CLIFormatter]`;
* `:include` - specifies which tests are run by skipping tests that do not
match the filter. Keep in mind that all tests are included by default, so unless they are
excluded first, the `:include` option has no effect. To only run the tests
that match the `:include` filter, exclude the `:test` tag first (see the
documentation for `ExUnit.Case` for more information on tags);
* `:max_cases` - maximum number of tests to run in parallel. Only tests from
different modules run in parallel. It defaults to `System.schedulers_online * 2`
to optimize both CPU-bound and IO-bound tests;
* `:max_failures` - the suite stops evaluating tests when this number of test failures
is reached. All tests within a module that fail when using the `setup_all/1,2` callbacks
are counted as failures. Defaults to `:infinity`;
* `:only_test_ids` - a list of `{module_name, test_name}` tuples that limits
what tests get run;
* `:refute_receive_timeout` - the timeout to be used on `refute_receive`
calls in milliseconds, defaults to `100`;
* `:seed` - an integer seed value to randomize the test suite. This seed
is also mixed with the test module and name to create a new unique seed
on every test, which is automatically fed into the `:rand` module. This
provides randomness between tests, but predictable and reproducible results;
* `:slowest` - prints timing information for the N slowest tests. Running
ExUnit with slow test reporting automatically runs in `trace` mode. It
is disabled by default;
* `:stacktrace_depth` - configures the stacktrace depth to be used
on formatting and reporters, defaults to `20`;
* `:timeout` - sets the timeout for the tests in milliseconds, defaults to `60_000`;
* `:trace` - sets ExUnit into trace mode, this sets `:max_cases` to `1` and
prints each test case and test while running. Note that in trace mode test timeouts
will be ignored as timeout is set to `:infinity`.
* `:test_location_relative_path` - the test location is the file:line information
printed by tests as a shortcut to run a given test. When this value is set,
the value is used as a prefix for the test itself. This is typically used by
Mix to properly set-up umbrella projects
Any arbitrary configuration can also be passed to `configure/1` or `start/1`,
and these options can then be used in places such as custom formatters. These
other options will be ignored by ExUnit itself.
"""
@spec configure(Keyword.t()) :: :ok
def configure(options) do
Enum.each(options, fn {k, v} ->
Application.put_env(:ex_unit, k, v)
end)
end
@doc """
Returns ExUnit configuration.
"""
@spec configuration() :: Keyword.t()
def configuration do
Application.get_all_env(:ex_unit)
|> put_seed()
|> put_slowest()
|> put_max_cases()
end
@doc """
Returns the pluralization for `word`.
If one is not registered, returns the word appended with an "s".
"""
@spec plural_rule(binary) :: binary
def plural_rule(word) when is_binary(word) do
Application.get_env(:ex_unit, :plural_rules, %{})
|> Map.get(word, "#{word}s")
end
@doc """
Registers a `pluralization` for `word`.
If one is already registered, it is replaced.
"""
@spec plural_rule(binary, binary) :: :ok
def plural_rule(word, pluralization) when is_binary(word) and is_binary(pluralization) do
plural_rules =
Application.get_env(:ex_unit, :plural_rules, %{})
|> Map.put(word, pluralization)
configure(plural_rules: plural_rules)
end
@doc """
Runs the tests. It is invoked automatically
if ExUnit is started via `start/1`.
Returns a map containing the total number of tests, the number
of failures, the number of excluded tests and the number of skipped tests.
"""
@spec run() :: suite_result()
def run do
options = persist_defaults(configuration())
with_signal_handler(fn -> ExUnit.Runner.run(options, nil) end)
end
defp with_signal_handler(fun) do
ExUnit.SignalHandler.install(self())
fun.()
after
ExUnit.SignalHandler.uninstall(self())
end
@doc """
Sets a callback to be executed after the completion of a test suite.
Callbacks set with `after_suite/1` must accept a single argument, which is a
map containing the results of the test suite's execution.
If `after_suite/1` is called multiple times, the callbacks will be called in
reverse order. In other words, the last callback set will be the first to be
called.
"""
@doc since: "1.8.0"
@spec after_suite((suite_result() -> any)) :: :ok
def after_suite(function) when is_function(function) do
current_callbacks = Application.fetch_env!(:ex_unit, :after_suite)
configure(after_suite: [function | current_callbacks])
end
@doc """
Fetches the test supervisor for the current test.
Returns `{:ok, supervisor_pid}` or `:error` if not called from the test process.
This is the same supervisor as used by `ExUnit.Callbacks.start_supervised/2`
and similar, see `ExUnit.Callbacks` module documentation for more information.
"""
@doc since: "1.11.0"
@spec fetch_test_supervisor() :: {:ok, pid()} | :error
def fetch_test_supervisor() do
case ExUnit.OnExitHandler.get_supervisor(self()) do
{:ok, nil} ->
opts = [strategy: :one_for_one, max_restarts: 1_000_000, max_seconds: 1]
{:ok, sup} = Supervisor.start_link([], opts)
ExUnit.OnExitHandler.put_supervisor(self(), sup)
{:ok, sup}
{:ok, _} = ok ->
ok
:error ->
:error
end
end
# Persists default values in application
# environment before the test suite starts.
defp persist_defaults(config) do
config |> Keyword.take([:max_cases, :seed, :trace]) |> configure()
config
end
defp put_seed(opts) do
Keyword.put_new_lazy(opts, :seed, fn ->
# We're using `rem System.system_time()` here
# instead of directly using :os.timestamp or using the
# :microsecond argument because the VM on Windows has odd
# precision. Calling with :microsecond will give us a multiple
# of 1000. Calling without it gives actual microsecond precision.
System.system_time()
|> System.convert_time_unit(:native, :microsecond)
|> rem(1_000_000)
end)
end
defp put_max_cases(opts) do
Keyword.put(opts, :max_cases, max_cases(opts))
end
defp put_slowest(opts) do
if opts[:slowest] > 0 do
Keyword.put(opts, :trace, true)
else
opts
end
end
defp max_cases(opts) do
cond do
opts[:trace] -> 1
max = opts[:max_cases] -> max
true -> System.schedulers_online() * 2
end
end
end
| 32.440716 | 97 | 0.659817 |
080472df5e81d6878202ecf227bb1d9942b4b1a1 | 6,109 | exs | Elixir | test/screens/config/struct_test.exs | mbta/screens | 4b586970f8844b19543bb2ffd4b032a89f6fa40a | [
"MIT"
] | 3 | 2021-07-27T14:11:00.000Z | 2022-01-03T14:16:43.000Z | test/screens/config/struct_test.exs | mbta/screens | 4b586970f8844b19543bb2ffd4b032a89f6fa40a | [
"MIT"
] | 444 | 2021-03-10T20:57:17.000Z | 2022-03-31T16:00:35.000Z | test/screens/config/struct_test.exs | mbta/screens | 4b586970f8844b19543bb2ffd4b032a89f6fa40a | [
"MIT"
] | null | null | null | defmodule TEST.Config1 do
@type t :: %__MODULE__{
action: :include | :exclude,
values: list(String.t())
}
@enforce_keys [:action, :values]
defstruct @enforce_keys
use Screens.Config.Struct
defp value_from_json("action", "include"), do: :include
defp value_from_json("action", "exclude"), do: :exclude
defp value_from_json(_, value), do: value
defp value_to_json(_, value), do: value
end
defmodule TEST.Config2 do
@type t :: %__MODULE__{
action: :include | :exclude,
values: list(String.t())
}
defstruct action: :include,
values: []
use Screens.Config.Struct, with_default: true
defp value_from_json("action", "include"), do: :include
defp value_from_json("action", "exclude"), do: :exclude
defp value_from_json(_, value), do: value
defp value_to_json(_, value), do: value
end
defmodule TEST.Config3 do
@type t :: %__MODULE__{
daughter: TEST.Config1.t(),
son: TEST.Config2.t()
}
@enforce_keys [:daughter]
defstruct daughter: nil,
son: TEST.Config2.from_json(:default)
use Screens.Config.Struct, children: [daughter: TEST.Config1, son: TEST.Config2]
end
defmodule TEST.Config4 do
@type t :: %__MODULE__{
daughter: TEST.Config1.t(),
sons: list(TEST.Config2.t())
}
@enforce_keys [:daughter]
defstruct daughter: nil,
sons: []
use Screens.Config.Struct, children: [daughter: TEST.Config1, sons: {:list, TEST.Config2}]
end
defmodule TEST.Config5 do
@type t :: %__MODULE__{
daughter_map: %{String.t() => TEST.Config1.t()},
son: TEST.Config2.t()
}
defstruct daughter_map: %{},
son: TEST.Config2.from_json(:default)
use Screens.Config.Struct, children: [daughter_map: {:map, TEST.Config1}, son: TEST.Config2]
end
defmodule TEST.Config6 do
@type t :: %__MODULE__{child: TEST.Config2.t() | nil}
defstruct child: nil
use Screens.Config.Struct, children: [child: TEST.Config2]
defp value_from_json(_, nil), do: nil
end
defmodule TEST.Config7 do
@type t :: %__MODULE__{child: TEST.Config2.t()}
defstruct [:child]
use Screens.Config.Struct, children: [child: TEST.Config2]
end
defmodule TEST.Config8 do
@type t :: %__MODULE__{a: String.t() | nil}
defstruct a: nil
use Screens.Config.Struct
end
defmodule TEST.Config9 do
@type t :: %__MODULE__{a: String.t() | nil, b: boolean()}
defstruct a: nil,
b: false
use Screens.Config.Struct
defp value_from_json("a", value), do: value
defp value_to_json(:a, value), do: value
end
defmodule Screens.Config.StructTest do
use ExUnit.Case, async: true
alias TEST.{Config1, Config2, Config3, Config4, Config5, Config6, Config7, Config8, Config9}
describe "__using__/1" do
test "generates a functioning config module when passed default options" do
original_json = %{"action" => "exclude", "values" => ["a", "b"], "unsupported_key" => "foo"}
config = %Config1{action: :exclude, values: ["a", "b"]}
serialized_config = %{action: :exclude, values: ["a", "b"]}
assert config == Config1.from_json(original_json)
assert serialized_config == Config1.to_json(config)
end
test "includes handling of :default when directed to do so" do
expected_config = %Config2{action: :include, values: []}
assert expected_config == Config2.from_json(:default)
end
test "does not include handling of :default when not directed to do so" do
assert_raise FunctionClauseError, fn -> Config1.from_json(:default) end
end
test "generates value_from_json/2, value_to_json/2 for child config fields" do
original_json = %{"daughter" => %{"action" => "include", "values" => ["c"]}}
config = %Config3{
daughter: %Config1{action: :include, values: ["c"]},
son: %Config2{action: :include, values: []}
}
assert config == Config3.from_json(original_json)
end
test "supports generating functions for list-valued child config fields" do
original_json = %{
"daughter" => %{"action" => "include", "values" => ["d"]},
"sons" => [%{"values" => ["e", "f"]}, %{"action" => "exclude", "values" => ["g"]}]
}
config = %Config4{
daughter: %Config1{action: :include, values: ["d"]},
sons: [
%Config2{action: :include, values: ["e", "f"]},
%Config2{action: :exclude, values: ["g"]}
]
}
assert config == Config4.from_json(original_json)
end
test "supports generating functions for map-valued child config fields" do
original_json = %{
"daughter_map" => %{
"1" => %{"action" => "include", "values" => ["e", "f"]},
"2" => %{"action" => "exclude", "values" => ["g"]}
}
}
config = %Config5{
daughter_map: %{
"1" => %Config1{action: :include, values: ["e", "f"]},
"2" => %Config1{action: :exclude, values: ["g"]}
},
son: %Config2{action: :include, values: []}
}
assert config == Config5.from_json(original_json)
end
test "defers to the using module for handling (or not handling) nil-valued children" do
assert %Config6{child: nil} == Config6.from_json(%{"child" => nil})
assert_raise RuntimeError,
"Elixir.TEST.Config7.value_from_json/2 not implemented (key: `child`)",
fn -> Config7.from_json(%{"child" => nil}) end
end
test "defers to the using module for handling (or not handling) fields not defined in `children` list" do
original_json1 = %{"a" => "foo"}
assert_raise RuntimeError,
"Elixir.TEST.Config8.value_from_json/2 not implemented (key: `a`)",
fn -> Config8.from_json(original_json1) end
config = %Config9{a: "foo", b: false}
assert config == Config9.from_json(original_json1)
original_json2 = %{"a" => "foo", "b" => true}
assert_raise FunctionClauseError, fn -> Config9.from_json(original_json2) end
end
end
end
| 28.816038 | 109 | 0.617777 |
080480990f0f311de2e990b8e77576445f265561 | 5,331 | exs | Elixir | test/error_storage_test.exs | sevgit/boom | 80be1830f69c71cab0f0b12e8dd3faad8a64d859 | [
"MIT"
] | null | null | null | test/error_storage_test.exs | sevgit/boom | 80be1830f69c71cab0f0b12e8dd3faad8a64d859 | [
"MIT"
] | null | null | null | test/error_storage_test.exs | sevgit/boom | 80be1830f69c71cab0f0b12e8dd3faad8a64d859 | [
"MIT"
] | null | null | null | defmodule ErrorStorageTest do
use ExUnit.Case, async: true
@error_info "Some error information"
@error_kind :error_kind
setup_all do
Boom.ErrorStorage.start_link()
:ok
end
setup do
Agent.update(:boom, fn _ -> %{} end)
end
describe "add_errors/2" do
test "appends the error to its proper error kind" do
Boom.ErrorStorage.add_errors(@error_kind, @error_info)
assert %{@error_kind => {1, [@error_info]}} == Agent.get(:boom, fn state -> state end)
Boom.ErrorStorage.add_errors(@error_kind, @error_info)
assert %{@error_kind => {1, [@error_info, @error_info]}} ==
Agent.get(:boom, fn state -> state end)
Boom.ErrorStorage.add_errors(:another_error_kind, "Another error information")
assert %{
@error_kind => {1, [@error_info, @error_info]},
:another_error_kind => {1, ["Another error information"]}
}
end
end
describe "get_errors/1" do
test "returns the errors for the proper error kind" do
Agent.update(:boom, fn _ ->
%{
@error_kind => {1, [@error_info, @error_info]},
:another_error_kind => {1, ["another_error"]}
}
end)
assert [@error_info, @error_info] == Boom.ErrorStorage.get_errors(@error_kind)
assert ["another_error"] == Boom.ErrorStorage.get_errors(:another_error_kind)
end
test "returns nil if error kind does not exist" do
assert nil == Boom.ErrorStorage.get_errors(:wrong_error_kind)
end
end
describe "send_notification?/1" do
test "returns false when count is smaller than the error length" do
Agent.update(:boom, fn _ -> %{@error_kind => {2, [@error_info]}} end)
assert false == Boom.ErrorStorage.send_notification?(@error_kind)
end
test "returns true when error length is greater or equal than count" do
Agent.update(:boom, fn _ -> %{@error_kind => {2, [@error_info, @error_info]}} end)
assert true == Boom.ErrorStorage.send_notification?(@error_kind)
end
test "returns false when error kind does not exist" do
assert false == Boom.ErrorStorage.send_notification?(:wrong_error_kind)
end
end
describe "clear_errors/2" do
test "flushes error list" do
Agent.update(:boom, fn _ -> %{@error_kind => {2, [@error_info, @error_info]}} end)
Boom.ErrorStorage.clear_errors(:exponential, @error_kind)
{_count, errors} = Agent.get(:boom, fn state -> state end) |> Map.get(@error_kind)
assert errors == []
Agent.update(:boom, fn _ -> %{@error_kind => {2, [@error_info, @error_info]}} end)
Boom.ErrorStorage.clear_errors(:always, @error_kind)
{_count, errors} = Agent.get(:boom, fn state -> state end) |> Map.get(@error_kind)
assert errors == []
end
test "increases the counter when notification trigger is :exponential" do
Agent.update(:boom, fn _ -> %{@error_kind => {1, []}} end)
Boom.ErrorStorage.clear_errors(:exponential, @error_kind)
{counter, _errors} = Agent.get(:boom, fn state -> state end) |> Map.get(@error_kind)
assert counter === 2
Boom.ErrorStorage.clear_errors(:exponential, @error_kind)
{counter, _errors} = Agent.get(:boom, fn state -> state end) |> Map.get(@error_kind)
assert counter === 4
Boom.ErrorStorage.clear_errors(:exponential, @error_kind)
{counter, _errors} = Agent.get(:boom, fn state -> state end) |> Map.get(@error_kind)
assert counter === 8
end
test "increases the counter when notification trigger is :exponential and :limit is set" do
Agent.update(:boom, fn _ -> %{@error_kind => {1, []}} end)
Boom.ErrorStorage.clear_errors([exponential: [limit: 5]], @error_kind)
{counter, _errors} = Agent.get(:boom, fn state -> state end) |> Map.get(@error_kind)
assert counter === 2
Boom.ErrorStorage.clear_errors([exponential: [limit: 5]], @error_kind)
{counter, _errors} = Agent.get(:boom, fn state -> state end) |> Map.get(@error_kind)
assert counter === 4
Boom.ErrorStorage.clear_errors([exponential: [limit: 5]], @error_kind)
{counter, _errors} = Agent.get(:boom, fn state -> state end) |> Map.get(@error_kind)
assert counter === 5
end
test "does not increase the counter when notification_trigger is :always" do
Agent.update(:boom, fn _ -> %{@error_kind => {1, []}} end)
Boom.ErrorStorage.clear_errors(:always, @error_kind)
{counter, _errors} = Agent.get(:boom, fn state -> state end) |> Map.get(@error_kind)
assert counter === 1
Boom.ErrorStorage.clear_errors(:always, @error_kind)
{counter, _errors} = Agent.get(:boom, fn state -> state end) |> Map.get(@error_kind)
assert counter === 1
end
test "updates the proper error counter" do
Agent.update(:boom, fn _ ->
%{@error_kind => {1, ["error1", "error2"]}, :another_error_kind => {1, ["another_error"]}}
end)
Boom.ErrorStorage.clear_errors(:exponential, @error_kind)
{counter, errors} = Agent.get(:boom, fn state -> state end) |> Map.get(@error_kind)
assert counter == 2
assert errors == []
{counter, errors} = Agent.get(:boom, fn state -> state end) |> Map.get(:another_error_kind)
assert counter == 1
assert errors == ["another_error"]
end
end
end
| 37.020833 | 98 | 0.642656 |
0804a547e3226caf505767e53647f562a62d362b | 3,609 | exs | Elixir | test/metric_transaction_test.exs | trustvox/elixir_agent | 391c83c82336c96fd78a03c8ee316d48de2eea4f | [
"Apache-2.0"
] | null | null | null | test/metric_transaction_test.exs | trustvox/elixir_agent | 391c83c82336c96fd78a03c8ee316d48de2eea4f | [
"Apache-2.0"
] | null | null | null | test/metric_transaction_test.exs | trustvox/elixir_agent | 391c83c82336c96fd78a03c8ee316d48de2eea4f | [
"Apache-2.0"
] | null | null | null | defmodule MetricTransactionTest do
use ExUnit.Case
use Plug.Test
alias NewRelic.Harvest.Collector
defmodule TestPlugAppForward do
import Plug.Conn
def init(opts), do: opts
def call(conn, _opts), do: send_resp(conn, 200, "ok")
end
defmodule Status do
use Plug.Router
plug(:match)
plug(:dispatch)
get("/check", do: send_resp(conn, 200, "ok"))
get("/info", do: send_resp(conn, 200, "ok"))
end
defmodule External do
use NewRelic.Tracer
@trace {:call, category: :external}
def call, do: :make_request
end
defmodule TestPlugApp do
use Plug.Router
use NewRelic.Transaction
plug(:match)
plug(:dispatch)
get "/foo/:blah" do
External.call()
send_resp(conn, 200, blah)
end
get "/fail" do
raise "FAIL"
send_resp(conn, 200, "won't get here")
end
get "/ordering/:one/test/:two/ok/:three" do
send_resp(conn, 200, "ok")
end
get "/custom_name" do
NewRelic.set_transaction_name("/very/unique/name")
send_resp(conn, 200, "ok")
end
get "/named_wildcard/*public_variable_name" do
send_resp(conn, 200, "ok")
end
get "/unnamed_wildcard/*_secret_variable_name" do
send_resp(conn, 200, "ok")
end
get "/fancy/:transaction/:_names/*supported" do
send_resp(conn, 200, "hello")
end
forward("/forward/a", to: TestPlugAppForward)
forward("/forward/b", to: TestPlugAppForward)
forward("/status", to: Status)
end
setup do
TestHelper.restart_harvest_cycle(NewRelic.Harvest.Collector.Metric.HarvestCycle)
on_exit(fn ->
TestHelper.pause_harvest_cycle(NewRelic.Harvest.Collector.Metric.HarvestCycle)
end)
end
test "Basic transaction" do
TestPlugApp.call(conn(:get, "/foo/1"), [])
metrics = TestHelper.gather_harvest(Collector.Metric.Harvester)
assert TestHelper.find_metric(metrics, "WebTransaction/Plug/GET//foo/:blah")
apdex = TestHelper.find_metric(metrics, "Apdex")
assert [_, [1, _, _, _, _, _]] = apdex
assert TestHelper.find_metric(metrics, "External/MetricTransactionTest.External.call/all")
assert TestHelper.find_metric(metrics, "External/allWeb")
end
test "Failed transaction" do
TestHelper.request(TestPlugApp, conn(:get, "/fail"))
metrics = TestHelper.gather_harvest(Collector.Metric.Harvester)
assert TestHelper.find_metric(metrics, "Errors/all")
apdex = TestHelper.find_metric(metrics, "Apdex", 0)
assert [_, [_, _, 1, _, _, _]] = apdex
end
test "Custom transaction names" do
TestPlugApp.call(conn(:get, "/custom_name"), [])
metrics = TestHelper.gather_harvest(Collector.Metric.Harvester)
assert TestHelper.find_metric(metrics, "WebTransaction/very/unique/name")
end
test "fancy transaction names" do
TestPlugApp.call(conn(:get, "/fancy/transaction/names/supported/here!"), [])
metrics = TestHelper.gather_harvest(Collector.Metric.Harvester)
assert TestHelper.find_metric(
metrics,
"WebTransaction/Plug/GET//fancy/:transaction/:_names/*supported"
)
end
test "Forwarding transaction names" do
TestHelper.request(TestPlugApp, conn(:get, "/status/check"))
TestHelper.request(TestPlugApp, conn(:get, "/status/check"))
TestHelper.request(TestPlugApp, conn(:get, "/status/info"))
metrics = TestHelper.gather_harvest(Collector.Metric.Harvester)
assert TestHelper.find_metric(metrics, "WebTransaction/Plug/GET//status/check", 2)
assert TestHelper.find_metric(metrics, "WebTransaction/Plug/GET//status/info")
end
end
| 26.343066 | 94 | 0.682738 |
0804c4bea044e83982288282bd8bc2e1a004b9fe | 1,943 | ex | Elixir | clients/vision/lib/google_api/vision/v1/model/google_cloud_vision_v1p2beta1_web_detection_web_entity.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | null | null | null | clients/vision/lib/google_api/vision/v1/model/google_cloud_vision_v1p2beta1_web_detection_web_entity.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | null | null | null | clients/vision/lib/google_api/vision/v1/model/google_cloud_vision_v1p2beta1_web_detection_web_entity.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | 1 | 2020-11-10T16:58:27.000Z | 2020-11-10T16:58:27.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.Vision.V1.Model.GoogleCloudVisionV1p2beta1WebDetectionWebEntity do
@moduledoc """
Entity deduced from similar images on the Internet.
## Attributes
- description (String.t): Canonical description of the entity, in English. Defaults to: `null`.
- entityId (String.t): Opaque entity ID. Defaults to: `null`.
- score (float()): Overall relevancy score for the entity. Not normalized and not comparable across different image queries. Defaults to: `null`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:description => any(),
:entityId => any(),
:score => any()
}
field(:description)
field(:entityId)
field(:score)
end
defimpl Poison.Decoder,
for: GoogleApi.Vision.V1.Model.GoogleCloudVisionV1p2beta1WebDetectionWebEntity do
def decode(value, options) do
GoogleApi.Vision.V1.Model.GoogleCloudVisionV1p2beta1WebDetectionWebEntity.decode(
value,
options
)
end
end
defimpl Poison.Encoder,
for: GoogleApi.Vision.V1.Model.GoogleCloudVisionV1p2beta1WebDetectionWebEntity do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 32.932203 | 147 | 0.739063 |
0804ef642910a9974e3cf3c8f2ab71778a876c88 | 726 | ex | Elixir | lib/forecastr/cache.ex | densefog/forecastr | 5b95b52ecd6a1324cac5e8616f693e929135a331 | [
"Apache-2.0"
] | 11 | 2018-03-26T07:09:17.000Z | 2020-01-12T21:52:03.000Z | lib/forecastr/cache.ex | densefog/forecastr | 5b95b52ecd6a1324cac5e8616f693e929135a331 | [
"Apache-2.0"
] | 3 | 2018-03-27T09:10:54.000Z | 2018-04-10T19:08:03.000Z | lib/forecastr/cache.ex | densefog/forecastr | 5b95b52ecd6a1324cac5e8616f693e929135a331 | [
"Apache-2.0"
] | 7 | 2018-03-26T07:09:20.000Z | 2021-03-06T14:43:44.000Z | defmodule Forecastr.Cache do
@moduledoc """
"Proxy" module for different caches
"""
@spec get(:today, String.t()) :: map() | nil
def get(:today, query) do
Forecastr.Cache.Worker.get(Forecastr.Cache.Today, query)
end
@spec get(:next_days, String.t()) :: map() | nil
def get(:next_days, query) do
Forecastr.Cache.Worker.get(Forecastr.Cache.NextDays, query)
end
@spec set(:today, String.t(), map()) :: :ok
def set(:today, query, response) do
Forecastr.Cache.Worker.set(Forecastr.Cache.Today, query, response)
end
@spec set(:next_days, String.t(), map()) :: :ok
def set(:next_days, query, response) do
Forecastr.Cache.Worker.set(Forecastr.Cache.NextDays, query, response)
end
end
| 27.923077 | 73 | 0.674931 |
0804fefbfcf7dd6cffcce25a895d72e2926c3c90 | 613 | exs | Elixir | test/views/error_view_test.exs | gilcierweb/appElixirPhoenix | 4c594b365ddc07e732bd421901620469a49b2395 | [
"MIT"
] | 1 | 2018-04-25T15:03:42.000Z | 2018-04-25T15:03:42.000Z | test/views/error_view_test.exs | gilcierweb/appElixirPhoenix | 4c594b365ddc07e732bd421901620469a49b2395 | [
"MIT"
] | null | null | null | test/views/error_view_test.exs | gilcierweb/appElixirPhoenix | 4c594b365ddc07e732bd421901620469a49b2395 | [
"MIT"
] | null | null | null | defmodule AppElixirPhoenix.ErrorViewTest do
use AppElixirPhoenix.ConnCase, async: true
# Bring render/3 and render_to_string/3 for testing custom views
import Phoenix.View
test "renders 404.html" do
assert render_to_string(AppElixirPhoenix.ErrorView, "404.html", []) ==
"Page not found"
end
test "render 500.html" do
assert render_to_string(AppElixirPhoenix.ErrorView, "500.html", []) ==
"Server internal error"
end
test "render any other" do
assert render_to_string(AppElixirPhoenix.ErrorView, "505.html", []) ==
"Server internal error"
end
end
| 27.863636 | 74 | 0.699837 |
0805080468b02cde07fb31b42e3c88fc4b686577 | 411 | ex | Elixir | test/support/mocks/behaviours/path.ex | f0lio/edgedb-elixir | b285bd8037b0b951aabfa1d1733889880f8bfd66 | [
"MIT"
] | 30 | 2021-05-19T08:54:44.000Z | 2022-03-11T22:52:25.000Z | test/support/mocks/behaviours/path.ex | f0lio/edgedb-elixir | b285bd8037b0b951aabfa1d1733889880f8bfd66 | [
"MIT"
] | 3 | 2021-11-17T21:26:01.000Z | 2022-03-12T09:49:25.000Z | test/support/mocks/behaviours/path.ex | f0lio/edgedb-elixir | b285bd8037b0b951aabfa1d1733889880f8bfd66 | [
"MIT"
] | 3 | 2021-08-29T14:55:41.000Z | 2022-03-12T01:30:35.000Z | defmodule Tests.Support.Mocks.Behaviours.Path do
@callback basename(path :: Path.t()) :: String.t()
@callback expand(path :: Path.t()) :: String.t()
@callback type(path :: Path.t()) :: :absolute | :relative | :volumerelative
@callback join(paths :: list(Path.t())) :: String.t()
@callback join(left :: Path.t(), right :: Path.t()) :: String.t()
@callback dirname(path :: Path.t()) :: String.t()
end
| 45.666667 | 77 | 0.632603 |
080567035007d99810e1b0c3440371653859c291 | 3,248 | ex | Elixir | lib/litelist_web/controllers/dashboard_controller.ex | saycel/Litelist | 2a1f67385523e5388c431c307b4052a6f8778818 | [
"MIT"
] | 1 | 2021-01-13T15:46:49.000Z | 2021-01-13T15:46:49.000Z | lib/litelist_web/controllers/dashboard_controller.ex | saycel/Litelist | 2a1f67385523e5388c431c307b4052a6f8778818 | [
"MIT"
] | 1 | 2021-01-13T16:42:24.000Z | 2021-01-13T16:42:24.000Z | lib/litelist_web/controllers/dashboard_controller.ex | saycel/Litelist | 2a1f67385523e5388c431c307b4052a6f8778818 | [
"MIT"
] | null | null | null | defmodule LitelistWeb.DashboardController do
use LitelistWeb, :controller
alias Litelist.Posts
alias LitelistWeb.Utils.SharedUtils
alias LitelistWeb.Utils.ExportUtils
alias Litelist.Moderation
alias Litelist.Discussions
def index(conn, _params) do
render(conn, "index.html")
end
def posts(conn, _params) do
posts = Posts.list_posts_by_neighbor(conn.assigns.current_neighbor)
render(conn, "posts.html", posts: posts)
end
def delete_all(conn, _params) do
{_total, _posts} = Posts.delete_all_by_neighbor(conn.assigns.current_neighbor)
conn
|> put_flash(:info, "All posts permanently deleted.")
|> redirect(to: Routes.dashboard_path(conn, :posts))
end
def delete(conn, %{"id" => id}) do
job = Posts.get_post!(id)
if SharedUtils.permission?(conn.assigns.current_neighbor, job, job.type) do
{:ok, _job} = Posts.delete_post(job)
conn
|> put_flash(:info, "Job deleted successfully.")
|> redirect(to: Routes.dashboard_path(conn, :posts))
else
unauthorized_redirect(conn)
end
end
def export_posts(conn, _params) do
csv = ExportUtils.build_posts_csv(conn.assigns.current_neighbor)
conn
|> put_resp_content_type("text/csv")
|> put_resp_header("content-disposition", "attachment; filename=\"MyPosts.csv\"")
|> send_resp(200, csv)
end
def export_my_flagged_posts(conn, _params) do
csv = ExportUtils.build_my_flagged_posts_csv(conn.assigns.current_neighbor)
conn
|> put_resp_content_type("text/csv")
|> put_resp_header("content-disposition", "attachment; filename=\"MyFlaggedPosts.csv\"")
|> send_resp(200, csv)
end
def export_posts_i_flagged(conn, _params) do
csv = ExportUtils.build_posts_i_flagged(conn.assigns.current_neighbor)
conn
|> put_resp_content_type("text/csv")
|> put_resp_header("content-disposition", "attachment; filename=\"MyFlaggedPosts.csv\"")
|> send_resp(200, csv)
end
def my_flagged_posts(conn, _params) do
flags = Moderation.list_my_flagged_posts(conn.assigns.current_neighbor)
render(conn, "my_flagged_posts.html", flags: flags)
end
def posts_i_flagged(conn, _params) do
flags = Moderation.list_flags_by_neighbor(conn.assigns.current_neighbor)
render(conn, "posts_i_flagged.html", flags: flags)
end
def my_discussions(conn, _params) do
discussions = Discussions.list_discussions_by_neighbor(conn.assigns.current_neighbor)
render(conn, "my_discussions.html", discussions: discussions)
end
def export_my_discussions(conn, _params) do
csv = ExportUtils.build_discussions_csv(conn.assigns.current_neighbor)
conn
|> put_resp_content_type("text/csv")
|> put_resp_header("content-disposition", "attachment; filename=\"MyDiscussions.csv\"")
|> send_resp(200, csv)
end
defp unauthorized_redirect(conn) do
conn
|> put_flash(:error, "Unauthorized.")
|> redirect(to: Routes.dashboard_path(conn, :posts))
end
end | 34.189474 | 96 | 0.664101 |
08058fa863e9b6ee62a73aefa72777f72bc3318b | 140 | exs | Elixir | test/onvif_test.exs | cogini/onvif | ec687d70895bf22ad7b802d7964142e3719787b6 | [
"Apache-2.0"
] | 3 | 2018-09-26T07:33:22.000Z | 2020-01-14T18:34:44.000Z | test/onvif_test.exs | cogini/onvif | ec687d70895bf22ad7b802d7964142e3719787b6 | [
"Apache-2.0"
] | null | null | null | test/onvif_test.exs | cogini/onvif | ec687d70895bf22ad7b802d7964142e3719787b6 | [
"Apache-2.0"
] | null | null | null | defmodule OnvifTest do
use ExUnit.Case
# doctest Onvif
# test "greets the world" do
# assert Onvif.hello() == :world
# end
end
| 15.555556 | 36 | 0.657143 |
080591648efff0e8a017bb7171cd989a53ced046 | 4,594 | exs | Elixir | test/yacto/query_test.exs | pumbaacave/yacto | 6bb98a7d156ff49aa18756b6cf7ce9d1ee89bf23 | [
"Apache-2.0"
] | null | null | null | test/yacto/query_test.exs | pumbaacave/yacto | 6bb98a7d156ff49aa18756b6cf7ce9d1ee89bf23 | [
"Apache-2.0"
] | null | null | null | test/yacto/query_test.exs | pumbaacave/yacto | 6bb98a7d156ff49aa18756b6cf7ce9d1ee89bf23 | [
"Apache-2.0"
] | null | null | null | defmodule Yacto.QueryTest do
use PowerAssert
require Ecto.Query
@player_id "player_id"
setup do
repo_default = Yacto.DB.repo(:default)
_item = repo_default.insert!(%Yacto.QueryTest.Item{name: "foo", quantity: 100})
repo_player = Yacto.DB.repo(:player, @player_id)
_player = repo_player.insert!(%Yacto.QueryTest.Player{name: "player", value: 1000})
ExUnit.Callbacks.on_exit(fn -> cleanup() end)
:ok
end
defp cleanup() do
repo_default = Yacto.DB.repo(:default)
repo_player = Yacto.DB.repo(:player, @player_id)
Yacto.QueryTest.Item |> Ecto.Query.where([], true) |> repo_default.delete_all()
Yacto.QueryTest.Player |> Ecto.Query.where([], true) |> repo_player.delete_all()
end
test "Yacto.Query.get_by_for_update" do
repo = Yacto.QueryTest.Item.repo()
obj = repo.get_by_for_update!(Yacto.QueryTest.Item, name: "foo")
assert obj.name == "foo"
assert obj.quantity == 100
repo = Yacto.QueryTest.Player.repo(@player_id)
obj = repo.get_by_for_update!(Yacto.QueryTest.Player, name: "player")
assert obj.name == "player"
assert obj.value == 1000
end
defp test_get_by_or_new(lock) do
repo = Yacto.QueryTest.Item.repo()
{obj, false} =
if lock do
repo.get_by_or_insert_for_update(
Yacto.QueryTest.Item,
[name: "foo"],
Ecto.Changeset.change(%Yacto.QueryTest.Item{name: "foo", quantity: 1000})
)
else
repo.get_by_or_new(Yacto.QueryTest.Item, [name: "foo"], %Yacto.QueryTest.Item{
name: "foo",
quantity: 1000
})
end
assert obj.name == "foo"
assert obj.quantity == 100
{obj, true} =
if lock do
repo.get_by_or_insert_for_update(
Yacto.QueryTest.Item,
[name: "bar"],
Ecto.Changeset.change(%Yacto.QueryTest.Item{name: "bar", quantity: 1000})
)
else
repo.get_by_or_new(Yacto.QueryTest.Item, [name: "bar"], %Yacto.QueryTest.Item{
name: "bar",
quantity: 1000
})
end
assert obj.name == "bar"
assert obj.quantity == 1000
repo = Yacto.QueryTest.Player.repo(@player_id)
{obj, false} =
if lock do
repo.get_by_or_insert_for_update(
Yacto.QueryTest.Player,
[name: "player"],
Ecto.Changeset.change(%Yacto.QueryTest.Player{name: "player", value: 999})
)
else
repo.get_by_or_new(Yacto.QueryTest.Player, [name: "player"], %Yacto.QueryTest.Player{
name: "player",
value: 999
})
end
assert obj.name == "player"
assert obj.value == 1000
assert obj.updated_at != nil
assert obj.inserted_at != nil
{obj, true} =
if lock do
repo.get_by_or_insert_for_update(
Yacto.QueryTest.Player,
[name: "not player"],
Ecto.Changeset.change(%Yacto.QueryTest.Player{name: "not player", value: 999})
)
else
repo.get_by_or_new(Yacto.QueryTest.Player, [name: "not player"], %Yacto.QueryTest.Player{
name: "not player",
value: 999
})
end
assert obj.name == "not player"
assert obj.value == 999
if lock do
assert obj.updated_at != nil
assert obj.inserted_at != nil
else
assert obj.updated_at == nil
assert obj.inserted_at == nil
end
end
test "Yacto.Repo.get_by_or_new with no lock" do
test_get_by_or_new(false)
end
test "Yacto.Repo.get_by_or_new with lock" do
test_get_by_or_new(true)
end
test "Yacto.Repo.find" do
mod = Yacto.QueryTest.Item
assert length(mod.repo().find(mod, name: "foo")) == 1
assert length(mod.repo().find(mod, name: "bar")) == 0
mod = Yacto.QueryTest.Player
assert length(mod.repo(@player_id).find(mod, name: "player")) == 1
assert length(mod.repo(@player_id).find(mod, name: "not player")) == 0
end
test "Yacto.Repo.count" do
mod = Yacto.QueryTest.Item
assert mod.repo().count(mod, name: "foo") == 1
assert mod.repo().count(mod, name: "bar") == 0
mod = Yacto.QueryTest.Player
assert mod.repo(@player_id).count(mod, name: "player") == 1
assert mod.repo(@player_id).count(mod, name: "not player") == 0
end
test "Yacto.Repo.delete_by" do
mod = Yacto.QueryTest.Player
assert mod.repo(@player_id).delete_by(mod, name: "player") == {1, nil}
assert mod.repo(@player_id).delete_by(mod, name: "player") == {0, nil}
assert_raise Ecto.NoResultsError, fn ->
mod.repo(@player_id).delete_by!(mod, name: "player")
end
end
end
| 28.7125 | 97 | 0.62081 |
0805ab8ce6ba05bb9ad72ebb2f209074ba9e8b93 | 5,513 | ex | Elixir | lib/bamboo/postmark_adapter.ex | kianmeng/bamboo_postmark | e25fbecde39adc90c84c1fc64545d8f332c3ce85 | [
"MIT"
] | null | null | null | lib/bamboo/postmark_adapter.ex | kianmeng/bamboo_postmark | e25fbecde39adc90c84c1fc64545d8f332c3ce85 | [
"MIT"
] | null | null | null | lib/bamboo/postmark_adapter.ex | kianmeng/bamboo_postmark | e25fbecde39adc90c84c1fc64545d8f332c3ce85 | [
"MIT"
] | null | null | null | defmodule Bamboo.PostmarkAdapter do
@moduledoc """
Sends email using Postmarks's API.
Use this adapter to send emails through Postmark's API. Requires that an API
key is set in the config.
## Example config
# In config/config.exs, or config.prod.exs, etc.
config :my_app, MyApp.Mailer,
adapter: Bamboo.PostmarkAdapter,
api_key: "my_api_key" or {:system, "POSTMARK_API_KEY"}
"""
@behaviour Bamboo.Adapter
@default_base_uri "https://api.postmarkapp.com"
@send_email_path "email"
@send_email_template_path "email/withTemplate"
import Bamboo.ApiError, only: [build_api_error: 1]
def deliver(email, config) do
api_key = get_key(config)
params = email |> convert_to_postmark_params() |> json_library().encode!()
uri = [base_uri(), "/", api_path(email)]
case :hackney.post(uri, headers(api_key), params, options(config)) do
{:ok, status, _headers, response} when status > 299 ->
{:error, build_api_error(%{params: params, response: response})}
{:ok, status, headers, response} ->
{:ok, %{status_code: status, headers: headers, body: response}}
{:error, reason} ->
{:error, build_api_error(%{message: inspect(reason)})}
end
end
def handle_config(config) do
# build the api key - will raise if there are errors
Map.merge(config, %{api_key: get_key(config)})
end
@doc false
def supports_attachments?, do: true
defp get_key(config) do
api_key =
case Map.get(config, :api_key) do
{:system, var} -> System.get_env(var)
key -> key
end
if api_key in [nil, ""] do
raise_api_key_error(config)
else
api_key
end
end
def json_library do
Bamboo.json_library()
end
defp raise_api_key_error(config) do
raise ArgumentError, """
There was no API key set for the Postmark adapter.
* Here are the config options that were passed in:
#{inspect config}
"""
end
defp convert_to_postmark_params(email) do
email
|> email_params()
|> maybe_put_template_params(email)
|> maybe_put_tag_params(email)
|> maybe_put_attachments(email)
end
def maybe_put_attachments(params, %{attachments: []}) do
params
end
def maybe_put_attachments(params, %{attachments: attachments}) do
params
|> Map.put(:"Attachments", Enum.map(attachments, fn attachment ->
%{
Name: attachment.filename,
Content: attachment.data |> Base.encode64(),
ContentType: attachment.content_type,
ContentId: attachment.content_id
}
end))
end
defp maybe_put_template_params(params, %{private:
%{template_id: template_name, template_model: template_model}}) do
params
|> Map.put(:"TemplateId", template_name)
|> Map.put(:"TemplateModel", template_model)
|> Map.put(:"InlineCss", true)
end
defp maybe_put_template_params(params, _) do
params
end
defp maybe_put_tag_params(params, %{private: %{tag: tag}}) do
Map.put(params, :"Tag", tag)
end
defp maybe_put_tag_params(params, _) do
params
end
defp email_params(email) do
recipients = recipients(email)
add_message_params(%{
"From": email_from(email),
"To": recipients_to_string(recipients, "To"),
"Cc": recipients_to_string(recipients, "Cc"),
"Bcc": recipients_to_string(recipients, "Bcc"),
"Subject": email.subject,
"TextBody": email.text_body,
"HtmlBody": email.html_body,
"Headers": email_headers(email),
"TrackOpens": true
}, email)
end
defp add_message_params(params, %{private: %{message_params: message_params}}) do
Enum.reduce(message_params, params, fn({key, value}, params) ->
Map.put(params, key, value)
end)
end
defp add_message_params(params, _), do: params
defp email_from(email) do
name = elem(email.from, 0)
email = elem(email.from, 1)
if name do
String.trim("#{name} <#{email}>")
else
String.trim(email)
end
end
defp email_headers(email) do
Enum.map(email.headers,
fn {header, value} -> %{"Name": header, "Value": value} end)
end
defp recipients(email) do
[]
|> add_recipients(email.to, type: "To")
|> add_recipients(email.cc, type: "Cc")
|> add_recipients(email.bcc, type: "Bcc")
end
defp add_recipients(recipients, new_recipients, type: recipient_type) do
Enum.reduce(new_recipients, recipients, fn(recipient, recipients) ->
recipients ++ [%{
name: elem(recipient, 0),
email: elem(recipient, 1),
type: recipient_type
}]
end)
end
defp recipients_to_string(recipients, type) do
recipients
|> Enum.filter(fn(recipient) -> recipient[:type] == type end)
|> Enum.map_join(",", fn(rec) -> "#{rec[:name]} <#{rec[:email]}>" end)
end
defp headers(api_key) do
[{"accept", "application/json"},
{"content-type", "application/json"},
{"x-postmark-server-token", api_key}]
end
defp api_path(%{private: %{template_id: _}}), do: @send_email_template_path
defp api_path(_), do: @send_email_path
defp base_uri do
Application.get_env(:bamboo, :postmark_base_uri) || @default_base_uri
end
defp options(config) do
Keyword.merge(config[:request_options] || [], [with_body: true])
end
end
| 27.984772 | 83 | 0.630691 |
0805d4b406aca7afb153bb60b1b1cdf1287bbb39 | 6,065 | exs | Elixir | test/blockfrost/cardano/metadata_test.exs | blockfrost/blockfrost-elixir | b1f8ea7ae47cd3a7037e1c9ed0d3691fc775bdec | [
"Apache-2.0"
] | 13 | 2021-08-31T03:54:37.000Z | 2022-01-30T17:39:40.000Z | test/blockfrost/cardano/metadata_test.exs | blockfrost/blockfrost-elixir | b1f8ea7ae47cd3a7037e1c9ed0d3691fc775bdec | [
"Apache-2.0"
] | 6 | 2021-08-30T04:45:52.000Z | 2021-09-23T09:15:08.000Z | test/blockfrost/cardano/metadata_test.exs | blockfrost/blockfrost-elixir | b1f8ea7ae47cd3a7037e1c9ed0d3691fc775bdec | [
"Apache-2.0"
] | null | null | null | defmodule Blockfrost.Cardano.MetadataTest do
use Blockfrost.Case
alias Blockfrost.Cardano.Metadata
alias Blockfrost.HTTPClientMock
alias Blockfrost.Response.{
TransactionMetadataLabelsResponse,
TransactionMetadataContentJSONResponse,
TransactionMetadataContentCBORResponse
}
setup_all do
start_supervised!({Blockfrost, api_key: "apikey", network: :cardano_testnet})
:ok
end
describe "transaction_metadata_labels/1,2" do
test "properly decodes 200 response" do
expect(HTTPClientMock, :request, fn request, _finch, _opts ->
assert request.path ==
"/api/v0/metadata/txs/labels"
response(
200,
[
%{
label: "1990",
cip10: nil,
count: "1"
},
%{
label: "1967",
cip10: "nut.link metadata oracles registry",
count: "3"
},
%{
label: "1968",
cip10: "nut.link metadata oracles data points",
count: "16321"
}
]
)
end)
assert {:ok,
[
%TransactionMetadataLabelsResponse.MetadataLabel{
label: "1990",
cip10: nil,
count: "1"
},
%TransactionMetadataLabelsResponse.MetadataLabel{
label: "1967",
cip10: "nut.link metadata oracles registry",
count: "3"
},
%TransactionMetadataLabelsResponse.MetadataLabel{
label: "1968",
cip10: "nut.link metadata oracles data points",
count: "16321"
}
]} == Metadata.transaction_metadata_labels(Blockfrost)
end
end
describe "transaction_metadata_content_json/2,3" do
test "properly decodes 200 response" do
expect(HTTPClientMock, :request, fn request, _finch, _opts ->
assert request.path ==
"/api/v0/metadata/txs/labels/felipe"
response(
200,
[
%{
tx_hash: "257d75c8ddb0434e9b63e29ebb6241add2b835a307aa33aedba2effe09ed4ec8",
json_metadata: %{
ADAUSD: [
%{
value: "0.10409800535729975",
source: "ergoOracles"
}
]
}
},
%{
tx_hash: "e865f2cc01ca7381cf98dcdc4de07a5e8674b8ea16e6a18e3ed60c186fde2b9c",
json_metadata: %{
ADAUSD: [
%{
value: "0.15409850555139935",
source: "ergoOracles"
}
]
}
},
%{
tx_hash: "4237501da3cfdd53ade91e8911e764bd0699d88fd43b12f44a1f459b89bc91be",
json_metadata: nil
}
]
)
end)
assert {:ok,
[
%TransactionMetadataContentJSONResponse.TransactionMetadataContent{
tx_hash: "257d75c8ddb0434e9b63e29ebb6241add2b835a307aa33aedba2effe09ed4ec8",
json_metadata: %{
"ADAUSD" => [
%{
"value" => "0.10409800535729975",
"source" => "ergoOracles"
}
]
}
},
%TransactionMetadataContentJSONResponse.TransactionMetadataContent{
tx_hash: "e865f2cc01ca7381cf98dcdc4de07a5e8674b8ea16e6a18e3ed60c186fde2b9c",
json_metadata: %{
"ADAUSD" => [
%{
"value" => "0.15409850555139935",
"source" => "ergoOracles"
}
]
}
},
%TransactionMetadataContentJSONResponse.TransactionMetadataContent{
tx_hash: "4237501da3cfdd53ade91e8911e764bd0699d88fd43b12f44a1f459b89bc91be",
json_metadata: nil
}
]} == Metadata.transaction_metadata_content_json(Blockfrost, "felipe")
end
end
describe "transaction_metadata_content_cbor/2,3" do
test "properly decodes 200 response" do
expect(HTTPClientMock, :request, fn request, _finch, _opts ->
assert request.path ==
"/api/v0/metadata/txs/labels/felipe/cbor"
response(
200,
[
%{
tx_hash: "257d75c8ddb0434e9b63e29ebb6241add2b835a307aa33aedba2effe09ed4ec8",
cbor_metadata: nil
},
%{
tx_hash: "e865f2cc01ca7381cf98dcdc4de07a5e8674b8ea16e6a18e3ed60c186fde2b9c",
cbor_metadata: nil
},
%{
tx_hash: "4237501da3cfdd53ade91e8911e764bd0699d88fd43b12f44a1f459b89bc91be",
cbor_metadata: "\\xa100a16b436f6d62696e6174696f6e8601010101010c"
}
]
)
end)
assert {:ok,
[
%TransactionMetadataContentCBORResponse.TransactionMetadataContent{
tx_hash: "257d75c8ddb0434e9b63e29ebb6241add2b835a307aa33aedba2effe09ed4ec8",
cbor_metadata: nil
},
%TransactionMetadataContentCBORResponse.TransactionMetadataContent{
tx_hash: "e865f2cc01ca7381cf98dcdc4de07a5e8674b8ea16e6a18e3ed60c186fde2b9c",
cbor_metadata: nil
},
%TransactionMetadataContentCBORResponse.TransactionMetadataContent{
tx_hash: "4237501da3cfdd53ade91e8911e764bd0699d88fd43b12f44a1f459b89bc91be",
cbor_metadata: "\\xa100a16b436f6d62696e6174696f6e8601010101010c"
}
]} == Metadata.transaction_metadata_content_cbor(Blockfrost, "felipe")
end
end
end
| 33.324176 | 94 | 0.519373 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.