code
stringlengths 114
1.05M
| path
stringlengths 3
312
| quality_prob
float64 0.5
0.99
| learning_prob
float64 0.2
1
| filename
stringlengths 3
168
| kind
stringclasses 1
value |
|---|---|---|---|---|---|
defmodule Shipping.Repo do
# use Ecto.Repo, otp_app: :shipping
@moduledoc """
Normally the Repo module is responsible for the data storage and retrieval to
and from a database system. See the commented out code above.
However, Stage 1 of the Shipping example does not
use a database. Instead, data is handled by Elixir Agents:
Shipping.HandlingEventAgent and Shipping.CargoAgent. These Agents maintain
a list of HandlingEvents and Cargoes in memory (the Agent's state)
and in a file. See the Agents for more detail.
"""
alias Shipping.Cargoes.Cargo
alias Shipping.HandlingEvents.HandlingEvent
alias Shipping.{HandlingEventAgent, CargoAgent}
@doc """
Retrieve all of the Cargoes from the Cargo Agent.
"""
def all(Cargo) do
CargoAgent.all()
end
@doc """
Retrieve all of the Handling Events from the Handling Event Agent.
"""
def all(HandlingEvent) do
HandlingEventAgent.all()
end
@doc """
Use the Handling Event Agent to insert a new Handling Event.
"""
def insert(%{data: %HandlingEvent{}} = changeset) do
if changeset.valid? do
data = Ecto.Changeset.apply_changes(changeset)
{:ok, HandlingEventAgent.add(data)}
else
{:error, %{changeset | action: :insert}}
end
end
def get!(HandlingEvent, id) when is_binary(id) do
get!(HandlingEvent, String.to_integer(id))
end
def get!(HandlingEvent, id) do
all(HandlingEvent)
|> Enum.find(fn handling_event -> handling_event.id == id end)
end
def get!(Cargo, id) when is_binary(id) do
get!(Cargo, String.to_integer(id))
end
def get!(Cargo, id) do
all(Cargo)
|> Enum.find(fn cargo -> cargo.id == id end)
end
def get_by_tracking_id!(HandlingEvent, tracking_id) do
all(HandlingEvent)
|> Enum.filter(fn handling_event -> handling_event.tracking_id == tracking_id end)
end
def get_by_tracking_id!(Cargo, tracking_id) do
all(Cargo)
|> Enum.find(fn cargo -> cargo.tracking_id == tracking_id end)
end
@doc """
Update a Handling Event using the Handling Event Agent.
"""
def update(%{data: %HandlingEvent{}} = changeset) do
if changeset.valid? do
handling_event = Ecto.Changeset.apply_changes(changeset)
{:ok, HandlingEventAgent.update(handling_event)}
else
{:error, %{changeset | action: :update}}
end
end
@doc """
Update a Cargo entity.
"""
def update(%{data: %Cargo{}} = changeset) do
if changeset.valid? do
cargo = Ecto.Changeset.apply_changes(changeset)
{:ok, CargoAgent.update(cargo)}
else
{:error, %{changeset | action: :update}}
end
end
end
|
apps/shipping/lib/shipping/repo.ex
| 0.71423
| 0.464416
|
repo.ex
|
starcoder
|
defmodule SMPPEX.MC do
@moduledoc """
This is a module for launching a TCP listener (or any other listener supported by `ranch`, for example, `ssl`) which handles incoming connections with the passed `SMPPEX.Session` implementations.
To start an MC one generally should do the following.
1. Implement an `SMPPEX.Session` behaviour.
```elixir
defmodule MyMCSession do
use SMPPEX.Session
# ...Callback implementation
end
```
2. Pass the child specification to a supervisor, using implemented behaviour as a session module:
```elixir
Supervisor.start_link(
[
{
SMPPEX.MC,
session: {MyESMESession, session_arg},
transport_opts: [port: 2775]
},
...
],
...
)
```
Note that each received connection is served with its own process which uses passed callback module (`MyESMESession`) for handling connection events. Each process has his own state initialized by `init` callback receiving `socket`, `transport` and a copy of arguments (`session_arg`).
"""
alias :ranch, as: Ranch
alias SMPPEX.Session.Defaults
@default_transport :ranch_tcp
@default_acceptor_count 50
@spec start({module, args :: term}, opts :: Keyword.t()) ::
{:ok, listener_ref :: Ranch.ref()}
| {:error, reason :: term}
@doc """
Starts listener for MC entity.
The listener is started in the supervision tree of the `:ranch` application.
Therefore, prefer `child_spec/1`, which allows you to start the MC in your own supervision tree.
The first argument must be a `{module, arg}` tuple, where `module` is the callback module which should implement `SMPPEX.Session` behaviour, while `arg` is the argument passed to the `init` callback each time a new connection is received.
For the list of other options see `child_spec/1`.
"""
def start(mod_with_args, opts \\ []) do
{ref, transport, transport_opts, protocol, protocol_opts} =
ranch_start_args(mod_with_args, opts)
start_result = Ranch.start_listener(ref, transport, transport_opts, protocol, protocol_opts)
case start_result do
{:error, _} = error -> error
{:ok, _, _} -> {:ok, ref}
{:ok, _} -> {:ok, ref}
end
end
@doc """
Returns a supervisor child specification for starting listener for MC entity.
Starting under a supervisor:
```elixir
Supervisor.start_link(
[
{SMPPEX.MC, session: {MyESMESession, session_arg}, ...},
...
],
...
)
```
Options:
* `:session` (required) a `{module, arg}` tuple, where `module` is the callback module which should implement `SMPPEX.Session` behaviour, while `arg` is the argument passed to the `init` callback each time a new connection is received.
* `:transport` is Ranch transport used for TCP connections: either `ranch_tcp` (the default) or `ranch_ssl`;
* `:transport_opts` is a map of Ranch transport options. The major key is `socket_opts` which contains a list of important options such as `{:port, port}`. The port is set to `0` by default, which means that the listener will accept connections on a random free port. For backward compatibility one can pass a list of socket options instead of `transport_opts` map (as in Ranch 1.x).
* `:session_module` is a module to use as an alternative to `SMPPEX.Session` for handling sessions (if needed). For example, `SMPPEX.TelemetrySession`.
* `:acceptor_count` is the number of Ranch listener acceptors, #{@default_acceptor_count} by default.
* `:mc_opts` is a keyword list of MC options:
- `:timer_resolution` is interval of internal `ticks` on which time related events happen, like checking timeouts for pdus, checking SMPP timers, etc. The default is #{
inspect(Defaults.timer_resolution())
} ms;
- `:session_init_limit` is the maximum time for which a session waits an incoming bind request. If no bind request is received within this interval of time, the session stops. The default value is #{
inspect(Defaults.session_init_limit())
} ms;
- `:enquire_link_limit` is value for enquire_link SMPP timer, i.e. the interval of SMPP session inactivity after which enquire_link PDU is send to "ping" the connetion. The default value is #{
inspect(Defaults.enquire_link_limit())
} ms;
- `:enquire_link_resp_limit` is the maximum time for which a session waits for enquire_link PDU response. If the response is not received within this interval of time and no activity from the peer occurs, the session is then considered dead and the session stops. The default value is #{
inspect(Defaults.enquire_link_resp_limit())
} ms;
- `:inactivity_limit` is the maximum time for which a peer is allowed not to send PDUs (which are not response PDUs). If no such PDUs are received within this interval of time, the session stops. The default is #{
inspect(Defaults.inactivity_limit())
} ms;
- `:response_limit` is the maximum time to wait for a response for a previously sent PDU. If the response is not received within this interval, `handle_resp_timeout` callback is triggered for the original pdu. If the response is received later, it is discarded. The default value is #{
inspect(Defaults.response_limit())
} ms.
- `:default_call_timeout` is an integer greater than zero which specifies how many milliseconds to wait for a reply, or the atom :infinity to wait indefinitely.If no reply is received within the specified time, the function call fails and the caller exits. The default value is #{
inspect(Defaults.default_call_timeout())
} ms.
If `:mc_opts` list of options is ommited, all options take their default values.
The returned value is either `{:ok, ref}` or `{:error, reason}`. The `ref` can be later used
to stop the whole MC listener and all sessions received by it.
"""
@spec child_spec(Keyword.t()) :: Supervisor.child_spec()
def child_spec(opts) do
# TODO: using fetch! + delete since pop! is supported on 1.10+. Replace this with pop! once we require at least Elixir 1.10.
mod_with_args = Keyword.fetch!(opts, :session)
opts = Keyword.delete(opts, :session)
{ref, transport, transport_opts, protocol, protocol_opts} =
ranch_start_args(mod_with_args, opts)
Ranch.child_spec(ref, transport, transport_opts, protocol, protocol_opts)
end
defp ranch_start_args({_module, _args} = mod_with_args, opts) do
acceptor_count = Keyword.get(opts, :acceptor_count, @default_acceptor_count)
transport = Keyword.get(opts, :transport, @default_transport)
transport_opts =
opts
|> Keyword.get(:transport_opts, [{:port, 0}])
|> normalize_transport_opts(acceptor_count)
mc_opts = Keyword.get(opts, :mc_opts, [])
ref = make_ref()
session_module = Keyword.get(opts, :session_module, SMPPEX.Session)
{
ref,
transport,
transport_opts,
SMPPEX.TransportSession,
{session_module, [mod_with_args, mc_opts]}
}
end
defp normalize_transport_opts(opts, acceptor_count) when is_list(opts) do
%{num_acceptors: acceptor_count, socket_opts: opts}
end
defp normalize_transport_opts(opts, acceptor_count) when is_map(opts) do
Map.put_new(opts, :num_acceptors, acceptor_count)
end
@spec stop(Ranch.ref()) :: :ok
@doc """
Stops MC listener and all its sessions.
"""
def stop(listener) do
Ranch.stop_listener(listener)
end
end
|
lib/smppex/mc.ex
| 0.879458
| 0.850189
|
mc.ex
|
starcoder
|
defmodule CairoEx.Context do
@moduledoc """
Context is the main module used for drawing.
"""
alias CairoEx.{
CairoPort,
Matrix,
Ref
}
@doc """
Create a new context from a surface
"""
@spec create(surface :: Ref.t()) ::
{:ok, Ref.t()} | {:error, CairoEx.error_reason()}
def create(%Ref{type: :surface} = surface) do
port = surface.port
cmd = {:create, [surface.handle]}
port
|> CairoPort.command(cmd)
|> case do
{:ok, handle} ->
{:ok, Ref.make(handle, :context, port)}
error ->
error
end
end
@doc """
Gets the target surface associated with the context
"""
@spec get_target(context :: Ref.t()) ::
{:ok, Ref.t()} | {:error, CairoEx.error_reason()}
def get_target(%Ref{type: :context} = context) do
port = context.port
cmd = {:get_target, [context.handle]}
port
|> CairoPort.command(cmd)
|> case do
{:ok, handle} ->
{:ok, Ref.make(handle, :surface, port)}
error ->
error
end
end
@doc """
Transform a coordinate from user space to device space by multiplying the given point by the current transformation matrix (CTM).
"""
@spec user_to_device(context :: Ref.t(), x :: float(), y :: float()) ::
{:ok, CairoEx.vector()} | {:error, CairoEx.error_reason()}
def user_to_device(%Ref{} = context, x, y)
when is_float(x) and is_float(y) do
context.port
|> CairoPort.command({:user_to_device, [context.handle, x, y]})
|> case do
{:ok, [x1, y1]} -> {:ok, {x1, y1}}
error -> error
end
end
@doc """
Transform a distance vector from user space to device space.
"""
@spec user_to_device_distance(context :: Ref.t(), dx :: float(), dy :: float()) ::
{:ok, CairoEx.vector()} | {:error, CairoEx.error_reason()}
def user_to_device_distance(%Ref{} = context, dx, dy)
when is_float(dx) and is_float(dy) do
context.port
|> CairoPort.command({:user_to_device_distance, [context.handle, dx, dy]})
|> case do
{:ok, [dx1, dy1]} -> {:ok, {dx1, dy1}}
error -> error
end
end
@doc """
Transform a coordinate from device space to user space by multiplying the given point by the inverse of the current transformation matrix (CTM).
"""
@spec device_to_user(context :: Ref.t(), x :: float(), y :: float()) ::
{:ok, CairoEx.vector()} | {:error, CairoEx.error_reason()}
def device_to_user(%Ref{} = context, x, y)
when is_float(x) and is_float(y) do
context.port
|> CairoPort.command({:device_to_user, [context.handle, x, y]})
|> case do
{:ok, [x1, y1]} -> {:ok, {x1, y1}}
error -> error
end
end
@doc """
Transform a distance vector from device space to user space.
"""
@spec device_to_user_distance(context :: Ref.t(), dx :: float(), dy :: float()) ::
{:ok, CairoEx.vector()} | {:error, CairoEx.error_reason()}
def device_to_user_distance(%Ref{} = context, dx, dy)
when is_float(dx) and is_float(dy) do
context.port
|> CairoPort.command({:device_to_user_distance, [context.handle, dx, dy]})
|> case do
{:ok, [dx1, dy1]} -> {:ok, {dx1, dy1}}
error -> error
end
end
@doc """
Returns the current transformation matrix (CTM).
"""
@spec get_matrix(context :: Ref.t()) :: {:ok, Matrix.t()}
def get_matrix(%Ref{} = context) do
case CairoPort.command(context.port, {:get_matrix, [context.handle]}) do
{:ok, m} -> {:ok, Matrix.from_list(m)}
error -> error
end
end
@doc """
Modifies the current transformation matrix (CTM) by translating the user-space origin by (tx, ty).
"""
@spec translate(context :: Ref.t(), tx :: float(), ty :: float()) :: Ref.t()
def translate(%Ref{} = context, tx, ty)
when is_float(tx) and is_float(ty) do
chained_command(context, {:translate, [context.handle, tx, ty]})
end
@doc """
Modifies the current transformation matrix (CTM) by scaling the X and Y user-space axes by sx and sy respectively.
"""
@spec scale(context :: Ref.t(), sx :: float(), sy :: float()) :: Ref.t()
def scale(%Ref{} = context, sx, sy)
when is_float(sx) and is_float(sy) do
chained_command(context, {:scale, [context.handle, sx, sy]})
end
@doc """
Modifies the current transformation matrix (CTM) by rotating the user-space axes by angle radians.
"""
@spec rotate(context :: Ref.t(), angle :: float()) :: Ref.t()
def rotate(%Ref{} = context, angle)
when is_float(angle) do
chained_command(context, {:rotate, [context.handle, angle]})
end
@doc """
Modifies the current transformation matrix (CTM) by applying matrix as an additional transformation.
"""
@spec transform(context :: Ref.t(), matrix :: Matrix.t()) :: Ref.t()
def transform(%Ref{} = context, matrix) do
m = matrix |> Matrix.to_float() |> Matrix.to_list()
chained_command(context, {:transform, [context.handle, m]})
end
@doc """
Set the current transformation matrix (CTM).
"""
@spec set_matrix(context :: Ref.t(), matrix :: Matrix.t()) :: Ref.t()
def set_matrix(%Ref{} = context, matrix) do
m = matrix |> Matrix.to_float() |> Matrix.to_list()
chained_command(context, {:set_matrix, [context.handle, m]})
end
@doc """
Resets the current transformation matrix (CTM) by setting it equal to the identity matrix.
"""
@spec identity_matrix(context :: Ref.t()) :: Ref.t()
def identity_matrix(%Ref{} = context) do
chained_command(context, {:identity_matrix, [context.handle]})
end
@doc """
Sets the source pattern within the context to an opaque color.
"""
@spec set_source_rgb(context :: Ref.t(), r :: float(), g :: float(), b :: float()) :: Ref.t()
def set_source_rgb(%Ref{} = context, r, g, b)
when is_float(r) and is_float(g) and is_float(b) do
chained_command(context, {:set_source_rgb, [context.handle, r, g, b]})
end
@doc """
Sets the source pattern within the context to a translucent color.
"""
@spec set_source_rgba(
context :: Ref.t(),
r :: float(),
g :: float(),
b :: float(),
a :: float()
) :: Ref.t()
def set_source_rgba(%Ref{} = context, r, g, b, a)
when is_float(r) and is_float(g) and is_float(b) and is_float(a) do
chained_command(context, {:set_source_rgba, [context.handle, r, g, b, a]})
end
@doc """
Adds a circular arc of the given radius to the current path.
"""
@spec arc(
context :: Ref.t(),
xc :: float(),
yc :: float(),
radius :: float(),
angle1 :: float(),
angle2 :: float()
) :: Ref.t()
def arc(%Ref{} = context, xc, yc, radius, angle1, angle2)
when is_float(xc) and
is_float(yc) and
is_float(radius) and
is_float(angle1) and
is_float(angle2) do
chained_command(
context,
{:arc, [context.handle, xc, yc, radius, angle1, angle2]}
)
end
@doc """
Adds a closed-subpath rectangle of the given size to the current path at position (x, y) in user-space coordinates.
"""
@spec rectangle(
context :: Ref.t(),
x :: float(),
y :: float(),
width :: float(),
height :: float()
) :: Ref.t()
def rectangle(%Ref{} = context, x, y, width, height)
when is_float(x) and
is_float(y) and
is_float(width) and
is_float(height) do
chained_command(context, {:rectangle, [context.handle, x, y, width, height]})
end
@doc """
A drawing operator that paints the current source everywhere within the current clip region.
"""
@spec paint(context :: Ref.t()) :: Ref.t()
def paint(%Ref{} = context) do
chained_command(context, {:paint, [context.handle]})
end
@doc """
A drawing operator that fills the current path according to the current fill rule, (each sub-path is implicitly closed before being filled).
"""
@spec fill(context :: Ref.t()) :: Ref.t()
def fill(%Ref{} = context) do
chained_command(context, {:fill, [context.handle]})
end
@doc """
Adds a line to the path from the current point to position (x, y) in user-space coordinates.
"""
@spec line_to(context :: Ref.t(), x :: float(), y :: float()) :: Ref.t()
def line_to(%Ref{} = context, x, y) do
chained_command(context, {:line_to, [context.handle, x / 1, y / 1]})
end
@doc """
If the current subpath is not empty, begin a new subpath.
"""
@spec move_to(context :: Ref.t(), x :: float(), y :: float()) :: Ref.t()
def move_to(%Ref{} = context, x, y)
when is_float(x) and is_float(y) do
chained_command(context, {:move_to, [context.handle, x, y]})
end
@doc """
Sets the current line width within the cairo context.
"""
@spec set_line_width(context :: Ref.t(), width :: float()) :: Ref.t()
def set_line_width(%Ref{} = context, width)
when is_float(width) do
chained_command(context, {:set_line_width, [context.handle, width]})
end
@doc """
A drawing operator that strokes the current Path according to the current line width, line join, line cap, and dash settings.
"""
@spec stroke(context :: Ref.t()) :: Ref.t()
def stroke(%Ref{} = context) do
chained_command(context, {:stroke, [context.handle]})
end
@doc """
Makes a copy of the current state of the context and saves it on an internal stack of saved states.
"""
@spec save(context :: Ref.t()) :: Ref.t()
def save(%Ref{} = context) do
chained_command(context, {:save, [context.handle]})
end
@doc """
Restores cr to the state saved by a preceding call to save/1 and removes
that state from the stack of saved states.
"""
@spec restore(context :: Ref.t()) :: Ref.t()
def restore(%Ref{} = context) do
chained_command(context, {:restore, [context.handle]})
end
@doc """
Temporarily redirects drawing to an intermediate surface known as a group.
The redirection lasts until the group is completed by a call to Context.pop_group/1 or
Context.pop_group_to_source/1. These calls provide the result of any drawing to the group as a
pattern.
"""
@spec push_group_with_content(context :: Ref.t(), content :: CairoEx.content()) :: Ref.t()
def push_group_with_content(%Ref{} = context, content) when is_atom(content) do
chained_command(context, {:push_group_with_content, [context.handle, content]})
end
@doc """
Terminates the redirection begun by a call to Context.push_group/1 or Context.push_group_with_content/2
and returns a new pattern containing the results of all drawing operations performed to the group.
"""
@spec pop_group(context :: Ref.t()) :: {:ok, Ref.t()}
def pop_group(%Ref{} = context) do
case CairoPort.command(context.port, {:pop_group, [context.handle]}) do
{:ok, handle} -> {:ok, Ref.make(handle, :pattern, context.port)}
error -> error
end
end
@doc """
Terminates the redirection begun by a call to Context.push_group/1 or Context.push_group_with_content/2
and installs the resulting pattern as the source pattern in the given context.
"""
@spec pop_group_to_source(context :: Ref.t()) :: Ref.t()
def pop_group_to_source(%Ref{} = context) do
chained_command(context, {:pop_group_to_source, [context.handle]})
end
@doc """
Temporarily redirects drawing to an intermediate surface known as a group.
"""
@spec push_group(context :: Ref.t()) :: Ref.t()
def push_group(%Ref{} = context) do
chained_command(context, {:push_group, [context.handle]})
end
@doc """
Sets the dash pattern to be used by Context.stroke/1. A dash pattern is specified by dashes, an array of
positive values. Each value provides the length of alternate "on" and "off" portions of the stroke.
The offset specifies an offset into the pattern at which the stroke begins.
"""
@spec set_dash(context :: Ref.t(), list(float()), offset :: float()) :: Ref.t()
def set_dash(%Ref{} = context, dashes, offset)
when is_list(dashes) and is_float(offset) do
chained_command(context, {:set_dash, [context.handle, dashes, offset]})
end
@spec set_antialias(context :: Ref.t(), antialias :: CairoEx.antialias()) :: Ref.t()
def set_antialias(%Ref{} = context, antialias)
when is_atom(antialias) do
chained_command(context, {:set_antialias, [context.handle, antialias]})
end
@spec get_antialias(context :: Ref.t()) ::
{:ok, CairoEx.antialias()} | {:error, CairoEx.error_reason()}
def get_antialias(%Ref{} = context) do
context.port
|> CairoPort.command({:get_antialias, [context.handle]})
|> case do
{:ok, antialias} -> {:ok, antialias}
error -> error
end
end
@doc """
Returns the last error, if any.
"""
@spec status(context :: Ref.t()) :: :ok | {:error, CairoEx.error_reason()}
def status(%Ref{} = context) do
CairoPort.command(context.port, {:status, [context.handle]})
end
@doc """
Destroys the context
"""
@spec destroy(context :: Ref.t()) :: no_return()
def destroy(%Ref{} = context) do
chained_command(context, {:destroy, [context.handle]})
nil
end
defp chained_command(%Ref{type: :context, port: port} = context, cmd) do
case CairoPort.command(port, cmd) do
:ok ->
context
result ->
raise CairoEx.Error, message: "Unexpected result: #{inspect(result)}"
end
end
end
|
lib/cairo_ex/context.ex
| 0.912876
| 0.597344
|
context.ex
|
starcoder
|
defmodule Grizzly.ZWave.Commands.CentralSceneNotification do
@moduledoc """
This command is used to advertise a scene key event.
Versions 1 and 2 are obsolete.
Params:
* `:seq_number` - The receiving device uses the sequence number to ignore duplicates. (required)
* `:slow_refresh` - This flag is used to advertise if the node is sending Key Held Down notifications at a slow rate. (required)
* `:key_attribute` - This field advertises one or more events detected by the key (required)
* `:scene_number` - The scene for the key event (required)
"""
@behaviour Grizzly.ZWave.Command
alias Grizzly.ZWave.{Command, DecodeError}
alias Grizzly.ZWave.CommandClasses.CentralScene
@type param ::
{:seq_number, non_neg_integer()}
| {:slow_refresh, boolean}
| {:key_attribute, CentralScene.key_attribute()}
| {:scene_number, non_neg_integer()}
@impl true
@spec new([param()]) :: {:ok, Command.t()}
def new(params) do
command = %Command{
name: :central_scene_notification,
command_byte: 0x03,
command_class: CentralScene,
params: params,
impl: __MODULE__
}
{:ok, command}
end
@impl true
@spec encode_params(Command.t()) :: binary()
def encode_params(command) do
seq_number = Command.param!(command, :seq_number)
slow_refresh_bit = Command.param!(command, :slow_refresh) |> CentralScene.boolean_to_bit()
key_attribute_byte =
Command.param!(command, :key_attribute) |> CentralScene.key_attribute_to_byte()
scene_number = Command.param!(command, :scene_number)
<<seq_number, slow_refresh_bit::size(1), 0x00::size(4), key_attribute_byte::size(3),
scene_number>>
end
@impl true
@spec decode_params(binary()) :: {:ok, [param()]} | {:error, DecodeError.t()}
def decode_params(
<<seq_number, slow_refresh_bit::size(1), 0x00::size(4), key_attribute_byte::size(3),
scene_number>>
) do
{:ok,
[
seq_number: seq_number,
slow_refresh: slow_refresh_bit == 1,
key_attribute: CentralScene.key_attribute_from_byte(key_attribute_byte),
scene_number: scene_number
]}
end
end
|
lib/grizzly/zwave/commands/central_scene_notification.ex
| 0.882282
| 0.423607
|
central_scene_notification.ex
|
starcoder
|
defmodule ExForce do
@moduledoc """
Simple wrapper for Salesforce REST API.
## Usage
```elixir
{:ok, %{instance_url: instance_url} = oauth_response} =
ExForce.OAuth.get_token(
"https://login.salesforce.com",
grant_type: "password",
client_id: "client_id",
client_secret: "client_secret",
username: "username",
password: "password"
)
{:ok, version_maps} = ExForce.versions(instance_url)
latest_version = version_maps |> Enum.map(&Map.fetch!(&1, "version")) |> List.last()
client = ExForce.build_client(oauth_response, api_version: latest_version)
names =
ExForce.query_stream(client, "SELECT Name FROM Account")
|> Stream.map(&Map.fetch!(&1.data, "Name"))
|> Stream.take(50)
|> Enum.to_list()
```
"""
alias ExForce.{QueryResult, SObject}
import ExForce.Client, only: [request: 2]
@type client :: ExForce.Client.t()
@type sobject_id :: String.t()
@type sobject_name :: String.t()
@type field_name :: String.t()
@type soql :: String.t()
@type query_id :: String.t()
@default_api_version "42.0"
@default_user_agent "ex_force"
@doc """
Lists available REST API versions at an instance.
See [Versions](https://developer.salesforce.com/docs/atlas.en-us.api_rest.meta/api_rest/resources_versions.htm)
"""
@spec versions(String.t()) :: {:ok, list(map)} | {:error, any}
def versions(instance_url) do
case instance_url |> build_client() |> request(method: :get, url: "/services/data") do
{:ok, %Tesla.Env{status: 200, body: body}} when is_list(body) -> {:ok, body}
{:ok, %Tesla.Env{body: body}} -> {:error, body}
{:error, _} = other -> other
end
end
@doc """
Lists available resources for the specific API version.
See [Resources by Version](https://developer.salesforce.com/docs/atlas.en-us.api_rest.meta/api_rest/resources_discoveryresource.htm)
"""
@spec resources(client, String.t()) :: {:ok, map} | {:error, any}
def resources(client, version) do
case request(client, method: :get, url: "/services/data/v#{version}") do
{:ok, %Tesla.Env{status: 200, body: body}} -> {:ok, body}
{:ok, %Tesla.Env{body: body}} -> {:error, body}
{:error, _} = other -> other
end
end
@doc """
Options
- `:headers`: set additional headers; default: `[{"user-agent", "#{@default_user_agent}"}]`
- `:api_version`: use the given api_version; default: `"#{@default_api_version}"`
"""
def build_client(instance_url_or_map, opts \\ [headers: [{"user-agent", @default_user_agent}]])
def build_client(%{instance_url: instance_url, access_token: access_token}, opts) do
with headers <- Keyword.get(opts, :headers, []),
new_headers <- [{"authorization", "Bearer " <> access_token} | headers],
new_opts <- Keyword.put(opts, :headers, new_headers) do
build_client(instance_url, new_opts)
end
end
def build_client(instance_url, opts) when is_binary(instance_url) do
Tesla.client([
{ExForce.TeslaMiddleware,
{instance_url, Keyword.get(opts, :api_version, @default_api_version)}},
{Tesla.Middleware.Compression, format: "gzip"},
{Tesla.Middleware.JSON, engine: Jason},
{Tesla.Middleware.Headers, Keyword.get(opts, :headers, [])}
])
end
@doc """
Lists the available objects.
See [Describe Global](https://developer.salesforce.com/docs/atlas.en-us.api_rest.meta/api_rest/resources_describeGlobal.htm)
"""
@spec describe_global(client) :: {:ok, map} | {:error, any}
def describe_global(client) do
case request(client, method: :get, url: "sobjects") do
{:ok, %Tesla.Env{status: 200, body: body}} -> {:ok, body}
{:ok, %Tesla.Env{body: body}} -> {:error, body}
{:error, _} = other -> other
end
end
@doc """
Retrieves extended metadata for the specified SObject.
See [SObject Describe](https://developer.salesforce.com/docs/atlas.en-us.api_rest.meta/api_rest/resources_sobject_describe.htm)
"""
@spec describe_sobject(client, sobject_name) :: {:ok, map} | {:error, any}
def describe_sobject(client, name) do
case request(client, method: :get, url: "sobjects/#{name}/describe") do
{:ok, %Tesla.Env{status: 200, body: body}} -> {:ok, body}
{:ok, %Tesla.Env{body: body}} -> {:error, body}
{:error, _} = other -> other
end
end
@doc """
Retrieves basic metadata for the specific SObject.
See [SObject Basic Information](https://developer.salesforce.com/docs/atlas.en-us.api_rest.meta/api_rest/resources_sobject_basic_info.htm)
"""
@spec basic_info(client, sobject_name) :: {:ok, map} | {:error, any}
def basic_info(client, name) do
case request(client, method: :get, url: "sobjects/#{name}") do
{:ok, %Tesla.Env{status: 200, body: %{"recentItems" => recent_items} = body}} ->
{:ok, Map.put(body, "recentItems", Enum.map(recent_items, &SObject.build/1))}
{:ok, %Tesla.Env{body: body}} ->
{:error, body}
{:error, _} = other ->
other
end
end
@doc """
Retrieves a SObject by ID.
See [SObject Rows](https://developer.salesforce.com/docs/atlas.en-us.api_rest.meta/api_rest/resources_sobject_retrieve.htm)
"""
@spec get_sobject(client, sobject_id, sobject_name, list) :: {:ok, SObject.t()} | {:error, any}
def get_sobject(client, id, name, fields),
do: do_get_sobject(client, "sobjects/#{name}/#{id}", fields)
@doc """
Retrieves a SObject based on the value of a specified extneral ID field.
See [SObject Rows by External ID](https://developer.salesforce.com/docs/atlas.en-us.api_rest.meta/api_rest/resources_sobject_upsert.htm)
"""
@spec get_sobject_by_external_id(client, any, field_name, sobject_name) ::
{:ok, SObject.t()} | {:error, any}
def get_sobject_by_external_id(client, field_value, field_name, sobject_name),
do:
do_get_sobject(client, "sobjects/#{sobject_name}/#{field_name}/#{URI.encode(field_value)}")
@doc """
Retrieves a SObject by relationship field.
See [SObject Relationships](https://developer.salesforce.com/docs/atlas.en-us.api_rest.meta/api_rest/resources_sobject_relationships.htm)
"""
@spec get_sobject_by_relationship(
client,
sobject_id,
sobject_name,
field_name,
list(field_name)
) :: {:ok, SObject.t() | QueryResult.t()} | {:error, any}
def get_sobject_by_relationship(client, id, sobject_name, field_name, fields) do
path = "sobjects/#{sobject_name}/#{id}/#{field_name}"
case request(client, method: :get, url: path, query: build_fields_query(fields)) do
{:ok, %Tesla.Env{status: 200, body: %{"attributes" => _} = body}} -> {:ok, SObject.build(body)}
{:ok, %Tesla.Env{status: 200, body: %{"records" => _} = body}} -> {:ok, build_result_set(body)}
{:ok, %Tesla.Env{body: body}} -> {:error, body}
{:error, _} = other -> other
end
end
defp do_get_sobject(client, path, fields \\ []) do
case request(client, method: :get, url: path, query: build_fields_query(fields)) do
{:ok, %Tesla.Env{status: 200, body: body}} -> {:ok, SObject.build(body)}
{:ok, %Tesla.Env{body: body}} -> {:error, body}
{:error, _} = other -> other
end
end
defp build_fields_query([]), do: []
defp build_fields_query(fields), do: [fields: Enum.join(fields, ",")]
@doc """
Updates a SObject.
See [SObject Rows](https://developer.salesforce.com/docs/atlas.en-us.api_rest.meta/api_rest/resources_sobject_retrieve.htm)
"""
@spec update_sobject(client, sobject_id, sobject_name, map) :: :ok | {:error, any}
def update_sobject(client, id, name, attrs) do
case request(client, method: :patch, url: "sobjects/#{name}/#{id}", body: attrs) do
{:ok, %Tesla.Env{status: 204, body: ""}} -> :ok
{:ok, %Tesla.Env{body: body}} -> {:error, body}
{:error, _} = other -> other
end
end
@doc """
Creates a SObject.
See [SObject Rows](https://developer.salesforce.com/docs/atlas.en-us.api_rest.meta/api_rest/resources_sobject_basic_info.htm)
"""
@spec create_sobject(client, sobject_name, map) :: {:ok, sobject_id} | {:error, any}
def create_sobject(client, name, attrs) do
case request(client, method: :post, url: "sobjects/#{name}/", body: attrs) do
{:ok, %Tesla.Env{status: 201, body: %{"id" => id, "success" => true}}} -> {:ok, id}
{:ok, %Tesla.Env{body: body}} -> {:error, body}
{:error, _} = other -> other
end
end
@doc """
Deletes a SObject.
[SObject Rows](https://developer.salesforce.com/docs/atlas.en-us.api_rest.meta/api_rest/resources_sobject_retrieve.htm)
"""
@spec delete_sobject(client, sobject_id, sobject_name) :: :ok | {:error, any}
def delete_sobject(client, id, name) do
case request(client, method: :delete, url: "sobjects/#{name}/#{id}") do
{:ok, %Tesla.Env{status: 204, body: ""}} -> :ok
{:ok, %Tesla.Env{status: 404, body: body}} -> {:error, body}
{:error, _} = other -> other
end
end
@doc """
Excute the SOQL query and get the result of it.
[Query](https://developer.salesforce.com/docs/atlas.en-us.api_rest.meta/api_rest/resources_query.htm)
"""
@spec query(client, soql) :: {:ok, QueryResult.t()} | {:error, any}
def query(client, soql) do
case request(client, method: :get, url: "query", query: [q: soql]) do
{:ok, %Tesla.Env{status: 200, body: body}} -> {:ok, build_result_set(body)}
{:ok, %Tesla.Env{body: body}} -> {:error, body}
{:error, _} = other -> other
end
end
@spec query_stream(client, soql) :: Enumerable.t()
def query_stream(client, soql), do: start_query_stream(client, &query/2, soql)
@doc """
Retrieve additional query results for the specified query ID.
[Query](https://developer.salesforce.com/docs/atlas.en-us.api_rest.meta/api_rest/resources_query.htm)
"""
@spec query_retrieve(client, query_id | String.t()) :: {:ok, QueryResult.t()} | {:error, any}
def query_retrieve(client, query_id_or_url) do
path =
if full_path?(query_id_or_url) do
query_id_or_url
else
"query/#{query_id_or_url}"
end
case request(client, method: :get, url: path) do
{:ok, %Tesla.Env{status: 200, body: body}} -> {:ok, build_result_set(body)}
{:ok, %Tesla.Env{body: body}} -> {:error, body}
{:error, _} = other -> other
end
end
@doc """
Excute the SOQL query and get the result of it, including deleted or archived objects.
[QueryAll](https://developer.salesforce.com/docs/atlas.en-us.api_rest.meta/api_rest/resources_queryall.htm)
"""
@spec query_all(client, soql) :: {:ok, QueryResult.t()} | {:error, any}
def query_all(client, soql) do
case request(client, method: :get, url: "queryAll", query: [q: soql]) do
{:ok, %Tesla.Env{status: 200, body: body}} -> {:ok, build_result_set(body)}
{:ok, %Tesla.Env{body: body}} -> {:error, body}
{:error, _} = other -> other
end
end
@spec query_all_stream(client, soql) :: Enumerable.t()
def query_all_stream(client, soql), do: start_query_stream(client, &query_all/2, soql)
defp build_result_set(%{"records" => records, "totalSize" => total_size} = resp) do
case resp do
%{"done" => true} ->
%QueryResult{
done: true,
total_size: total_size,
records: records |> Enum.map(&SObject.build/1)
}
%{"done" => false, "nextRecordsUrl" => next_records_url} ->
%QueryResult{
done: false,
next_records_url: next_records_url,
total_size: total_size,
records: records |> Enum.map(&SObject.build/1)
}
end
end
@spec start_query_stream(
client,
(client, soql -> {:ok, QueryResult.t()} | any),
soql
) :: Enumerable.t()
defp start_query_stream(client, func, soql) do
{:ok, qr} = func.(client, soql)
stream_query_result(client, qr)
end
@doc """
Returns `Enumerable.t` from the `QueryResult`.
"""
@spec stream_query_result(client, QueryResult.t()) :: Enumerable.t()
def stream_query_result(client, %QueryResult{} = qr) do
Stream.unfold({client, qr}, &stream_unfold/1)
end
defp stream_unfold({client, %QueryResult{records: [h | tail]} = qr}),
do: {h, {client, %QueryResult{qr | records: tail}}}
defp stream_unfold({
client,
%QueryResult{records: [], done: false, next_records_url: next_records_url}
}) do
{:ok, %QueryResult{records: [h | tail]} = qr} = query_retrieve(client, next_records_url)
{h, {client, %QueryResult{qr | records: tail}}}
end
defp stream_unfold({_client, %QueryResult{records: [], done: true}}), do: nil
defp full_path?(path), do: String.starts_with?(path, "/services/data/v")
end
|
lib/ex_force.ex
| 0.757436
| 0.548432
|
ex_force.ex
|
starcoder
|
defmodule Cbuf.ETS do
@moduledoc """
`Cbuf.ETS` implements the `Cbuf` behaviour with an ETS table as its implementation.
For examples of typical use, see the documentation for `new/1`, `insert/2`, `peek/1`, and `delete/1`.
Each new buffer creates and owns a new ETS table.
Operations that must interact with the actual data of the buffer
(`new/1`, `insert/2`, `peek/1`, `pop/1`, `delete/1`, `to_list/1`, `member?/2`),
perform as well as ETS does.
Use this module if you have tried `Cbuf.Map` with a GenServer and benchmarked it against this one to determine that
this one is faster for your application. I recommend defaulting to `Cbuf.Map`.
This module is not currently designed or tested for parallel writes, and so I also recommend using it
as part of a GenServer. Crucially, the `start` and `current` pointers are stored on the struct itself,
rather than somewhere in ETS, so they are completely uncoordinated with the backing implementation and require
updates to be serialized.
This may change at some point if I can determine that this is a beneficial use case and can be implemented in
a way that preserves the existing API.
Note that this module is a superset of the `Cbuf` behaviour, implementing one additional function, `destroy/1`,
to destroy the buffer's backing ETS table. See the function documentation for more details.
"""
@behaviour Cbuf
@opaque t :: %__MODULE__{
impl: :ets.tab(),
size: non_neg_integer,
start: non_neg_integer,
current: non_neg_integer,
empty: boolean
}
defstruct impl: nil, size: 0, start: 0, current: 0, empty: true
@doc """
Create a new circular buffer of a given size.
iex> Cbuf.ETS.new(5)
#Cbuf<[]>
"""
@spec new(pos_integer) :: t
def new(size) when size > 0 do
tid = :ets.new(:ok, [:public, :set])
Enum.each(0..(size - 1), fn i ->
:ets.insert(tid, {i, :undefined})
end)
%__MODULE__{
impl: tid,
size: size,
start: 0,
current: 0,
empty: true
}
end
@doc """
Calculate the allocated size for the buffer.
This is maximum addressable size of the buffer, not how many values it currently contains. For the number of values in the current buffer, see `count/1`
iex> Cbuf.ETS.new(5) |> Cbuf.ETS.size()
5
"""
@spec size(t) :: non_neg_integer
def size(buf) do
buf.size
end
@doc """
Whether or not the buffer is empty.
This value corresponds to when the buffer has a `count` of zero, not its `size`.
iex> buf = Cbuf.ETS.new(5)
iex> Cbuf.ETS.empty?(buf)
true
iex> buf = Cbuf.ETS.new(5) |> Cbuf.ETS.insert("hi")
iex> Cbuf.ETS.empty?(buf)
false
iex> buf = Cbuf.ETS.new(5) |> Cbuf.ETS.insert("hi") |> Cbuf.ETS.delete()
iex> Cbuf.ETS.empty?(buf)
true
"""
@spec empty?(t) :: boolean
def empty?(buf) do
buf.empty
end
@doc """
Insert a value into a circular buffer.
Values are inserted such that when the buffer is full, the oldest items are overwritten first.
iex> buf = Cbuf.ETS.new(5)
iex> buf |> Cbuf.ETS.insert("a") |> Cbuf.ETS.insert("b")
#Cbuf<["a", "b"]>
iex> buf = Cbuf.ETS.new(3)
iex> Enum.reduce(1..20, buf, fn(val, acc) -> Cbuf.ETS.insert(acc, val) end)
#Cbuf<[18, 19, 20]>
iex> buf = Cbuf.ETS.new(1)
iex> Enum.reduce(1..20, buf, fn(val, acc) -> Cbuf.ETS.insert(acc, val) end)
#Cbuf<[20]>
"""
@spec insert(t, term) :: t
def insert(buf, val) do
first = 0
size = buf.size
last = size - 1
{start, current, empty} =
case {buf.start, buf.current, buf.empty} do
# special case:
# handle the initial case where start and current both == 0
{s, c, true} when s == first and c == first ->
{first, first, false}
# special case:
# handle the case where start and current both == 0
# but a value has been set at current and the size
# of the buffer == 1
{s, c, false} when size == 1 and s == first and c == first ->
{first, first, false}
# special case:
# handle the case where start and current both == 0
# but a value has been set at current AND
# the buffer size is larger than 1
{s, c, false} when s == first and c == first ->
{first, 1, false}
# normal advance
{s, c, false} when c > s and c != last ->
{s, c + 1, false}
# normal advance with wraparound
{s, c, false} when s == first and c == last ->
{s + 1, first, false}
# normal advance with wraparound
{s, c, false} when s == last and c == s - 1 ->
{first, c + 1, false}
# normal advance
{s, c, false} when c == s - 1 ->
{s + 1, c + 1, false}
end
:ets.insert(buf.impl, {current, val})
%{
buf
| start: start,
current: current,
empty: empty
}
end
@doc """
See the oldest value in the buffer. Works in constant time.
iex> buf = Enum.reduce(1..20, Cbuf.ETS.new(3), fn(val, acc) -> Cbuf.ETS.insert(acc, val) end)
iex> Cbuf.ETS.peek(buf)
18
iex> buf = Cbuf.ETS.new(20) |> Cbuf.ETS.insert("ok") |> Cbuf.ETS.insert("fine")
iex> Cbuf.ETS.peek(buf)
"ok"
iex> Cbuf.ETS.new(3) |> Cbuf.ETS.peek()
nil
"""
@spec peek(t) :: term | nil
def peek(buf) do
case :ets.match(buf.impl, {buf.start, :"$1"}) do
[[:undefined]] -> nil
[[val]] -> val
end
end
@doc """
Return the oldest value in the buffer, and a new buffer with that value removed.
iex> buf = Enum.reduce(1..20, Cbuf.ETS.new(3), fn(val, acc) -> Cbuf.ETS.insert(acc, val) end)
iex> {val, buf} = Cbuf.ETS.pop(buf)
iex> {val, Cbuf.ETS.to_list(buf)} # Elixir has trouble inspecting a nested struct, see https://hexdocs.pm/ex_unit/ExUnit.DocTest.html#module-opaque-types
{18, [19, 20]}
iex> {val, buf} = Cbuf.ETS.new(1) |> Cbuf.ETS.insert("hi") |> Cbuf.ETS.pop()
iex> {val, Cbuf.ETS.to_list(buf)}
{"hi", []}
"""
@spec pop(t) :: {term | nil, t}
def pop(buf) do
{peek(buf), delete(buf)}
end
@doc """
Return a new buffer with the oldest item in the buffer removed.
iex> buf = Enum.reduce(1..20, Cbuf.ETS.new(3), fn(val, acc) -> Cbuf.ETS.insert(acc, val) end)
iex> buf = Cbuf.ETS.delete(buf)
iex> Cbuf.ETS.peek(buf)
19
iex> buf = Enum.reduce(1..6, Cbuf.ETS.new(5), fn(val, acc) -> Cbuf.ETS.insert(acc, val) end)
iex> buf = Cbuf.ETS.delete(buf)
iex> Cbuf.ETS.peek(buf)
3
iex> buf = Enum.reduce(1..6, Cbuf.ETS.new(5), fn(val, acc) -> Cbuf.ETS.insert(acc, val) end)
iex> Cbuf.ETS.delete(buf) |> Cbuf.ETS.count()
4
iex> buf = Cbuf.ETS.new(5)
iex> buf = Cbuf.ETS.insert(buf, "ok")
iex> Cbuf.ETS.delete(buf)
#Cbuf<[]>
iex> buf = Cbuf.ETS.new(5)
iex> Cbuf.ETS.delete(buf)
#Cbuf<[]>
"""
@spec delete(t) :: t
def delete(buf) do
size = buf.size
{start, current, empty} =
case {buf.start, buf.current, buf.empty} do
{0, 0, true} ->
{0, 0, true}
{s, c, false} when s < c ->
{s + 1, c, false}
{s, c, false} when s == c ->
{0, 0, true}
{s, c, false} when s > c and s == size - 1 ->
{0, c, false}
{s, c, false} when s > c ->
{s + 1, c, false}
end
:ets.insert(buf.impl, {buf.start, :undefined})
%{
buf
| start: start,
current: current,
empty: empty
}
end
@doc """
Convert a circular buffer to a list. The list is ordered by age, oldest to newest.
This operation takes linear time.
iex> buf = Cbuf.ETS.new(5)
iex> buf |> Cbuf.ETS.insert("a") |> Cbuf.ETS.insert("b") |> Cbuf.ETS.to_list()
["a", "b"]
iex> buf = Cbuf.ETS.new(3)
iex> Enum.reduce(1..20, buf, fn(val, acc) -> Cbuf.ETS.insert(acc, val) end) |> Cbuf.ETS.to_list()
[18, 19, 20]
iex> Cbuf.ETS.new(5) |> Cbuf.ETS.to_list()
[]
"""
@spec to_list(t) :: [term] | []
def to_list(buf) do
do_to_list(buf, [], count(buf)) |> Enum.reverse()
end
defp do_to_list(_buf, list, 0), do: list
defp do_to_list(buf, list, remaining) do
value =
case :ets.match(buf.impl, {buf.start, :"$1"}) do
[[:undefined]] -> nil
[[val]] -> val
end
buf =
if buf.start == buf.size - 1 do
%{buf | start: 0}
else
%{buf | start: buf.start + 1}
end
do_to_list(buf, [value | list], remaining - 1)
end
@doc """
Returns the count of the non-empty values in the buffer.
iex> Cbuf.ETS.new(5) |> Cbuf.ETS.insert("hi") |> Cbuf.ETS.count()
1
iex> Cbuf.ETS.new(5) |> Cbuf.ETS.count()
0
iex> Cbuf.ETS.new(5) |> Cbuf.ETS.insert(nil) |> Cbuf.ETS.count()
1
iex> Cbuf.ETS.new(5) |> Cbuf.ETS.insert("hi") |> Cbuf.ETS.delete() |> Cbuf.ETS.count()
0
iex> buf = Enum.reduce(1..13, Cbuf.ETS.new(5), &Cbuf.ETS.insert(&2, &1))
iex> Cbuf.ETS.delete(buf) |> Cbuf.ETS.count()
4
iex> Cbuf.ETS.new(3) |> Cbuf.ETS.delete() |> Cbuf.ETS.delete() |> Cbuf.ETS.count()
0
"""
@spec count(t) :: non_neg_integer
def count(buf) do
case {buf.start, buf.current, buf.empty} do
{s, c, false} when c > s ->
c + 1 - s
{s, c, false} when s > c ->
buf.size - (s - c - 1)
{s, c, false} when s == c ->
1
{0, 0, false} ->
1
{_, _, true} ->
0
end
end
@doc """
Queries `buf` for the presence of `val`.
iex> Cbuf.ETS.new(5) |> Cbuf.ETS.insert("hello") |> Cbuf.ETS.member?("hello")
true
iex> Cbuf.ETS.new(5) |> Cbuf.ETS.insert("hello") |> Cbuf.ETS.member?("nope")
false
"""
@spec member?(t, term) :: boolean
def member?(buf, val) do
if val == :undefined do
false
else
:ets.foldl(fn {_key, ets_val}, acc ->
(ets_val == val) || acc
end, false, buf.impl)
end
end
@doc """
Destroy the backing ETS table of a buffer. This function exists if you wish to manually dispose of the ETS table that backs the buffer. The other option is to destroy the process in which the buffer was created, as the ETS table will be disposed of when its parent process dies. See http://erlang.org/doc/man/ets.html for more information about the behavior of ETS tables.
iex> buf = Cbuf.ETS.new(5)
iex> buf = Cbuf.ETS.insert(buf, "ok")
iex> Cbuf.ETS.destroy(buf)
true
iex> buf = Cbuf.ETS.new(5)
iex> buf = Cbuf.ETS.insert(buf, "ok")
iex> Cbuf.ETS.destroy(buf)
iex> Cbuf.ETS.peek(buf)
** (ArgumentError) argument error
"""
@spec destroy(t) :: true
def destroy(buf) do
:ets.delete(buf.impl)
end
defimpl Collectable, for: Cbuf.ETS do
def into(original) do
collector_fun = fn
buf, {:cont, val} ->
Cbuf.ETS.insert(buf, val)
buf, :done ->
buf
_buf, :halt ->
:ok
end
{original, collector_fun}
end
end
defimpl Enumerable, for: Cbuf.ETS do
def count(buf), do: {:ok, Cbuf.ETS.count(buf)}
def member?(buf, val), do: {:ok, Cbuf.ETS.member?(buf, val)}
def reduce(buf, acc, fun),
do: Enumerable.List.reduce(Cbuf.ETS.to_list(buf), acc, fun)
def slice(_buf), do: {:error, __MODULE__}
end
defimpl Inspect, for: Cbuf.ETS do
import Inspect.Algebra
def inspect(buf, opts) do
concat(["#Cbuf<", to_doc(Cbuf.ETS.to_list(buf), opts), ">"])
end
end
end
|
lib/cbuf/ets.ex
| 0.854733
| 0.764848
|
ets.ex
|
starcoder
|
defmodule Absinthe.Phase.Document.Arguments.Parse do
@moduledoc false
# Parses Leaf Node inputs
alias Absinthe.Blueprint.Input
alias Absinthe.{Blueprint, Type}
use Absinthe.Phase
def run(input, options \\ []) do
result = Blueprint.prewalk(input, &handle_node(&1, options[:context] || %{}))
{:ok, result}
end
defp handle_node(%{normalized: nil} = node, _context) do
node
end
defp handle_node(%Input.Value{normalized: normalized} = node, context) do
case build_value(normalized, node.schema_node, context) do
{:ok, value} ->
%{node | data: value}
:not_leaf_node ->
node
{:error, flag} ->
%{node | normalized: normalized |> flag_invalid(flag)}
end
end
defp handle_node(node, _context), do: node
defp build_value(%Input.Null{}, %Type.NonNull{}, _) do
{:error, :non_null}
end
defp build_value(%{__struct__: struct} = normalized, %Type.Scalar{} = schema_node, context)
when struct in [Input.Boolean, Input.Float, Input.Integer, Input.String, Input.Null] do
case Type.Scalar.parse(schema_node, normalized, context) do
:error ->
{:error, :bad_parse}
{:ok, val} ->
{:ok, val}
end
end
defp build_value(
%Input.Object{} = normalized,
%Type.Scalar{open_ended: true} = schema_node,
context
) do
case Type.Scalar.parse(schema_node, normalized, context) do
:error ->
{:error, :bad_parse}
{:ok, val} ->
{:ok, val}
end
end
defp build_value(_normalized, %Type.Scalar{}, _context) do
{:error, :bad_parse}
end
defp build_value(%{value: value} = _normalized, nil = _schema_node, _context) do
{:ok, value}
end
defp build_value(%Input.Null{}, %Type.Enum{}, _) do
{:ok, nil}
end
defp build_value(normalized, %Type.Enum{} = schema_node, _) do
case Type.Enum.parse(schema_node, normalized) do
{:ok, %{value: value}} ->
{:ok, value}
:error ->
{:error, :bad_parse}
end
end
defp build_value(normalized, %Type.NonNull{of_type: inner_type}, context) do
build_value(normalized, inner_type, context)
end
defp build_value(%{__struct__: struct}, %Type.InputObject{}, _)
when struct in [Input.Boolean, Input.Float, Input.Integer, Input.String] do
{:error, :bad_parse}
end
defp build_value(_, _, _) do
:not_leaf_node
end
end
|
lib/absinthe/phase/document/arguments/parse.ex
| 0.7586
| 0.413063
|
parse.ex
|
starcoder
|
defprotocol Type.Properties do
@spec usable_as(Type.t, Type.t, keyword) :: Type.ternary
def usable_as(subject, target, meta)
@spec subtype?(Type.t, Type.t) :: boolean
def subtype?(subject, target)
@spec compare(Type.t, Type.t) :: :gt | :eq | :lt
def compare(a, b)
@spec typegroup(Type.t) :: Type.group
def typegroup(type)
@spec intersection(Type.t, Type.t) :: Type.t
def intersection(type, type)
end
defimpl Type.Properties, for: Integer do
import Type, only: :macros
use Type.Helpers
group_compare do
def group_compare(_, builtin(:integer)), do: :lt
def group_compare(left, builtin(:neg_integer)), do: (if left >= 0, do: :gt, else: :lt)
def group_compare(_, builtin(:non_neg_integer)), do: :lt
def group_compare(_, builtin(:pos_integer)), do: :lt
def group_compare(left, _..last), do: (if left > last, do: :gt, else: :lt)
def group_compare(left, right) when is_integer(right) do
cond do
left > right -> :gt
left < right -> :lt
true -> :eq
end
end
def group_compare(left, %Type.Union{of: ints}) do
group_compare(left, List.last(ints))
end
end
usable_as do
def usable_as(i, a..b, _) when a <= i and i <= b, do: :ok
def usable_as(i, builtin(:pos_integer), _) when i > 0, do: :ok
def usable_as(i, builtin(:neg_integer), _) when i < 0, do: :ok
def usable_as(i, builtin(:non_neg_integer), _) when i >= 0, do: :ok
def usable_as(_, builtin(:integer), _), do: :ok
end
intersection do
def intersection(i, a..b) when a <= i and i <= b, do: i
def intersection(i, builtin(:neg_integer)) when i < 0, do: i
def intersection(i, builtin(:pos_integer)) when i > 0, do: i
def intersection(i, builtin(:non_neg_integer)) when i >= 0, do: i
def intersection(i, builtin(:integer)), do: i
end
subtype :usable_as
end
defimpl Type.Properties, for: Range do
import Type, only: :macros
use Type.Helpers
group_compare do
def group_compare(_, builtin(:integer)), do: :lt
def group_compare(_, builtin(:pos_integer)), do: :lt
def group_compare(_, builtin(:non_neg_integer)), do: :lt
def group_compare(_..last, builtin(:neg_integer)), do: (if last >= 0, do: :gt, else: :lt)
def group_compare(first1..last, first2..last), do: (if first1 < first2, do: :gt, else: :lt)
def group_compare(_..last1, _..last2), do: (if last1 > last2, do: :gt, else: :lt)
def group_compare(_..last, right) when is_integer(right), do: (if last >= right, do: :gt, else: :lt)
def group_compare(first..last, %Type.Union{of: [init | types]}) do
case List.last(types) do
_..b when b < last -> :gt
_..b ->
# the range is bigger if it's bigger than the biggest union
Type.compare(init, first) && (last >= b)
i when i < last -> :gt
i when is_integer(i) ->
Type.compare(init, first) && (last >= i)
_ -> :lt
end
end
end
usable_as do
def usable_as(_, builtin(:integer), _), do: :ok
def usable_as(a.._, builtin(:pos_integer), _) when a > 0, do: :ok
def usable_as(a.._, builtin(:non_neg_integer), _) when a >= 0, do: :ok
def usable_as(_..a, builtin(:neg_integer), _) when a < 0, do: :ok
def usable_as(a..b, builtin(:pos_integer), meta) when b > 0 do
{:maybe, [Type.Message.make(a..b, builtin(:pos_integer), meta)]}
end
def usable_as(a..b, builtin(:neg_integer), meta) when a < 0 do
{:maybe, [Type.Message.make(a..b, builtin(:neg_integer), meta)]}
end
def usable_as(a..b, builtin(:non_neg_integer), meta) when b >= 0 do
{:maybe, [Type.Message.make(a..b, builtin(:non_neg_integer), meta)]}
end
def usable_as(a..b, target, meta)
when is_integer(target) and a <= target and target <= b do
{:maybe, [Type.Message.make(a..b, target, meta)]}
end
def usable_as(a..b, c..d, meta) do
cond do
a >= c and b <= d -> :ok
a > d or b < c -> {:error, Type.Message.make(a..b, c..d, meta)}
true ->
{:maybe, [Type.Message.make(a..b, c..d, meta)]}
end
end
# strange stitched ranges
def usable_as(a..b, union = %Type.Union{}, meta) when a <= -1 and b >= 0 do
pos_leftovers = if b == 0, do: 0, else: 0..b
neg_leftovers = if a == -1, do: -1, else: a..-1
if leftover_check(union, :neg_integer, pos_leftovers) or
leftover_check(union, :non_neg_integer, neg_leftovers) do
:ok
else
usable_as_union_fallback(a..b, union, meta)
end
end
end
intersection do
def intersection(a..b, i) when a <= i and i <= b, do: i
def intersection(a.._, _..a), do: a
def intersection(_..a, a.._), do: a
def intersection(a..b, c..d) do
case {a >= c, a > d, b < c, b <= d} do
{_, x, y, _} when x or y -> builtin(:none)
{false, _, _, true} -> c..b
{true, _, _, true} -> a..b
{true, _, _, false} -> a..d
{false, _, _, false} -> c..d
end
end
def intersection(a..b, builtin(:neg_integer)) when b < 0, do: a..b
def intersection(-1.._, builtin(:neg_integer)), do: -1
def intersection(a.._, builtin(:neg_integer)) when a < 0, do: a..-1
def intersection(a..b, builtin(:pos_integer)) when a > 0, do: a..b
def intersection(_..1, builtin(:pos_integer)), do: 1
def intersection(_..a, builtin(:pos_integer)) when a > 1, do: 1..a
def intersection(a..b, builtin(:non_neg_integer)) when a >= 0, do: a..b
def intersection(_..0, builtin(:non_neg_integer)), do: 0
def intersection(_..a, builtin(:non_neg_integer)) when a > 0, do: 0..a
def intersection(a..b, builtin(:integer)), do: a..b
end
defp leftover_check(union = %{of: types}, int_class, leftover) do
(builtin(int_class) in types) and Type.subtype?(leftover, union)
end
defp usable_as_union_fallback(challenge, target, meta) do
target.of
|> Enum.map(&Type.usable_as(challenge, &1, meta))
|> Enum.reduce(&Type.ternary_or/2)
end
subtype :usable_as
end
defimpl Type.Properties, for: Atom do
import Type, only: :macros
use Type.Helpers
alias Type.Message
group_compare do
def group_compare(_, builtin(:atom)), do: :lt
def group_compare(left, right), do: (if left >= right, do: :gt, else: :lt)
end
usable_as do
def usable_as(_, builtin(:atom), _), do: :ok
def usable_as(atom, builtin(:node), meta) do
if Type.Properties.Type.valid_node?(atom) do
:ok
else
{:error, Message.make(atom, builtin(:node), meta)}
end
end
def usable_as(atom, builtin(:module), meta) do
if Type.Properties.Type.valid_module?(atom) do
:ok
else
{:maybe, [Message.make(atom, builtin(:module), meta)]}
end
end
end
intersection do
def intersection(atom, builtin(:atom)), do: atom
def intersection(atom, builtin(:node)) do
if Type.Properties.Type.valid_node?(atom), do: atom, else: builtin(:none)
end
def intersection(atom, builtin(:module)) do
if Type.Properties.Type.valid_module?(atom), do: atom, else: builtin(:none)
end
end
def subtype?(a, b), do: usable_as(a, b, []) == :ok
end
# remember, the empty list is its own type
defimpl Type.Properties, for: List do
import Type, only: :macros
use Type.Helpers
group_compare do
def group_compare([], %Type.List{nonempty: ne}), do: (if ne, do: :gt, else: :lt)
def group_compare(_, _) do
raise "any list other than the empty list [] is an invalid type!"
end
end
usable_as do
def usable_as([], %Type.List{nonempty: false, final: []}, _meta), do: :ok
def usable_as([], builtin(:iolist), _), do: :ok
def usable_as(list, _, _) when is_list(list) and length(list) > 0 do
raise "any list other than the empty list [] is an invalid type!"
end
end
intersection do
def intersection([], %Type.List{nonempty: false, final: []}), do: []
def intersection([], builtin(:iolist)), do: Type.Iolist.intersection_with([])
def intersection(list, _) when is_list(list) and length(list) > 0 do
raise "any list other than the empty list [] is an invalid type!"
end
end
subtype :usable_as
end
|
lib/type/_properties.ex
| 0.781789
| 0.533458
|
_properties.ex
|
starcoder
|
defmodule Animu.Media.Anime.Options do
@moduledoc """
Parses Anime options
"""
use Animu.Ecto.Schema
alias Animu.Media.Anime
alias __MODULE__
defmodule Numbers do
@behaviour Ecto.Type
def type, do: {:array, :float}
defguard is_integers(from, to)
when is_integer(from) and is_integer(to)
## Total, ex: 25
def cast(total) when is_integer(total) do
cast(%{from: 1, to: total})
end
## From -> To, ex: {from: 1, to: 5}
def cast(%{"from" => from, "to" => to}) do
cast(%{from: from, to: to})
end
def cast(%{from: from, to: to}) when is_integers(from, to) do
from..to
|> Enum.to_list
|> cast
end
## Integer Array, ex: [1,2,3]
def cast(array) when is_list(array) do
Ecto.Type.cast({:array, :float}, array)
end
def cast(_), do: :error
def load(_), do: :error
def dump(_), do: :error
end
embedded_schema do
embeds_many :summon, Summon do
field :source, :string
field :except, {:array, :string}
field :only, {:array, :string}
field :force, :boolean, default: false
end
embeds_one :audit, Audit do
field :scan, {:array, :string}
field :calc, {:array, :string}
field :force, :boolean, default: false
end
embeds_one :conjure, Conjure do
embeds_one :episodes, Episodes do
field :numbers, Numbers
field :type, :string, default: "spawn"
end
embeds_many :image, Image do
field :field, :string
field :sizes, :map
end
end
end
def parse(attrs) do
%Options{}
|> changeset(attrs)
|> trim
end
defp trim(%Changeset{valid?: false} = ch) do
errors = Animu.Util.format_errors(ch)
{:error, errors}
end
defp trim(ch) do
opt =
ch
|> apply_changes
|> to_map
|> Map.to_list
{:ok, opt}
end
defp changeset(%Options{} = opt, attrs) do
opt
|> cast(attrs, [])
|> cast_embed(:summon, with: &summon_changeset/2)
|> cast_embed(:audit, with: &audit_changeset/2)
|> cast_embed(:conjure, with: &conjure_changeset/2)
end
defp summon_changeset(%_{}, attrs) do
fields = all_fields(Anime, as: :string, assoc: true)
sources = ["kitsu"]
%Options.Summon{}
|> cast(attrs, all_fields(Options.Summon))
|> validate_inclusion(:source, sources)
|> validate_subset(:only, fields)
|> validate_subset(:except, fields)
|> update_to_atoms(:only)
|> update_to_atoms(:except)
end
defp audit_changeset(%_{}, attrs) do
fields = all_fields(Anime, as: :string, assoc: true)
%Options.Audit{}
|> cast(attrs, all_fields(Options.Audit))
|> validate_subset(:calc, fields)
|> validate_subset(:scan, fields)
|> update_to_atoms(:calc)
|> update_to_atoms(:scan)
end
defp conjure_changeset(%_{}, attrs) do
%Options.Conjure{}
|> cast(attrs, [])
|> cast_embed(:episodes, with: &conj_episodes_changeset/2)
|> cast_embed(:image, with: &conj_image_changeset/2)
end
defp conj_episodes_changeset(%_{}, attrs) do
types = ["spawn", "conjure_video", "conjure_thumb"]
%Options.Conjure.Episodes{}
|> cast(attrs, all_fields(Options.Conjure.Episodes))
|> validate_inclusion(:type, types)
end
defp conj_image_changeset(%_{}, attrs) do
fields = ["poster_image", "cover_image"]
%Options.Conjure.Image{}
|> cast(attrs, all_fields(Options.Conjure.Image))
|> validate_inclusion(:field, fields)
|> update_to_atom(:field)
end
defp update_to_atoms(ch, field) do
case ch.valid? do
true -> update_change(ch, field, &update_to_atoms/1)
_ -> ch
end
end
defp update_to_atoms(list) do
Enum.map(list, &to_atom/1)
end
defp update_to_atom(ch, field) do
case ch.valid? do
true -> update_change(ch, field, &to_atom/1)
_ -> ch
end
end
defp to_atom(str) when is_binary(str) do
String.to_existing_atom(str)
end
defp to_atom(value), do: value
end
|
lib/animu/media/anime/options.ex
| 0.741487
| 0.455138
|
options.ex
|
starcoder
|
defmodule Crawly.Manager do
@moduledoc """
Crawler manager module
This module is responsible for spawning all processes related to
a given Crawler.
The manager spawns the following processes tree.
┌────────────────┐ ┌───────────────────┐
│ Crawly.Manager ├────────> Crawly.ManagerSup │
└────────────────┘ └─────────┬─────────┘
│ |
│ |
┌──────────────────────────┤
│ │
│ │
┌────────▼───────┐ ┌─────────▼───────┐
│ Worker1 │ │ Worker2 │
└────────┬───────┘ └────────┬────────┘
│ │
│ │
│ │
│ │
┌────────▼─────────┐ ┌──────────▼───────────┐
│Crawly.DataStorage│ │Crawly.RequestsStorage│
└──────────────────┘ └──────────────────────┘
"""
require Logger
@timeout 60_000
@start_request_split_size 50
use GenServer
alias Crawly.{Engine, Utils}
@spec add_workers(module(), non_neg_integer()) ::
:ok | {:error, :spider_non_exist}
def add_workers(spider_name, num_of_workers) do
case Engine.get_manager(spider_name) do
{:error, reason} ->
{:error, reason}
pid ->
GenServer.cast(pid, {:add_workers, num_of_workers})
end
end
def start_link([spider_name, options]) do
Logger.debug("Starting the manager for #{inspect(spider_name)}")
GenServer.start_link(__MODULE__, [spider_name, options])
end
@impl true
def init([spider_name, options]) do
crawl_id = Keyword.get(options, :crawl_id)
Logger.metadata(spider_name: spider_name, crawl_id: crawl_id)
itemcount_limit =
Keyword.get(
options,
:closespider_itemcount,
get_default_limit(:closespider_itemcount, spider_name)
)
closespider_timeout_limit =
Keyword.get(
options,
:closespider_timeout,
get_default_limit(:closespider_timeout, spider_name)
)
# Start DataStorage worker
{:ok, data_storage_pid} =
Crawly.DataStorage.start_worker(spider_name, crawl_id)
Process.link(data_storage_pid)
# Start RequestsWorker for a given spider
{:ok, request_storage_pid} =
Crawly.RequestsStorage.start_worker(spider_name, crawl_id)
Process.link(request_storage_pid)
# Start workers
num_workers =
Keyword.get(
options,
:concurrent_requests_per_domain,
Utils.get_settings(:concurrent_requests_per_domain, spider_name, 4)
)
worker_pids =
Enum.map(1..num_workers, fn _x ->
DynamicSupervisor.start_child(
spider_name,
{Crawly.Worker, [spider_name: spider_name, crawl_id: crawl_id]}
)
end)
# Schedule basic service operations for given spider manager
timeout =
Utils.get_settings(:manager_operations_timeout, spider_name, @timeout)
tref = Process.send_after(self(), :operations, timeout)
Logger.debug(
"Started #{Enum.count(worker_pids)} workers for #{inspect(spider_name)}"
)
{:ok,
%{
name: spider_name,
crawl_id: crawl_id,
itemcount_limit: itemcount_limit,
closespider_timeout_limit: closespider_timeout_limit,
tref: tref,
prev_scraped_cnt: 0,
workers: worker_pids
}, {:continue, {:startup, options}}}
end
@impl true
def handle_continue({:startup, options}, state) do
# Add start requests to the requests storage
init = state.name.init(options)
start_requests_from_req = Keyword.get(init, :start_requests, [])
start_requests_from_urls =
init
|> Keyword.get(:start_urls, [])
|> Crawly.Utils.requests_from_urls()
start_requests = start_requests_from_req ++ start_requests_from_urls
# Split start requests, so it's possible to initialize a part of them in async
# manner
{start_reqs, async_start_reqs} =
Enum.split(start_requests, @start_request_split_size)
:ok = Crawly.RequestsStorage.store(state.name, start_reqs)
Task.start(fn ->
Crawly.RequestsStorage.store(state.name, async_start_reqs)
end)
{:noreply, state}
end
@impl true
def handle_cast({:add_workers, num_of_workers}, state) do
Logger.info("Adding #{num_of_workers} workers for #{inspect(state.name)}")
Enum.each(1..num_of_workers, fn _ ->
DynamicSupervisor.start_child(
state.name,
{Crawly.Worker, [spider_name: state.name, crawl_id: state.crawl_id]}
)
end)
{:noreply, state}
end
@impl true
def handle_info(:operations, state) do
Process.cancel_timer(state.tref)
# Close spider if required items count was reached.
{:stored_items, items_count} = Crawly.DataStorage.stats(state.name)
delta = items_count - state.prev_scraped_cnt
Logger.info("Current crawl speed is: #{delta} items/min")
maybe_stop_spider_by_itemcount_limit(
state.name,
items_count,
state.itemcount_limit
)
# Close spider in case if it's not scraping items fast enough
maybe_stop_spider_by_timeout(
state.name,
delta,
state.closespider_timeout_limit
)
tref =
Process.send_after(
self(),
:operations,
Utils.get_settings(:manager_operations_timeout, state.name, @timeout)
)
{:noreply, %{state | tref: tref, prev_scraped_cnt: items_count}}
end
defp maybe_stop_spider_by_itemcount_limit(
spider_name,
current,
limit
)
when current >= limit do
Logger.info(
"Stopping #{inspect(spider_name)}, closespider_itemcount achieved"
)
Crawly.Engine.stop_spider(spider_name, :itemcount_limit)
end
defp maybe_stop_spider_by_itemcount_limit(_, _, _), do: :ok
defp maybe_stop_spider_by_timeout(spider_name, current, limit)
when current <= limit and is_integer(limit) do
Logger.info("Stopping #{inspect(spider_name)}, itemcount timeout achieved")
Crawly.Engine.stop_spider(spider_name, :itemcount_timeout)
end
defp maybe_stop_spider_by_timeout(_, _, _), do: :ok
defp maybe_convert_to_integer(value) when is_atom(value), do: value
defp maybe_convert_to_integer(value) when is_binary(value),
do: String.to_integer(value)
defp maybe_convert_to_integer(value) when is_integer(value), do: value
# Get a closespider_itemcount or closespider_timeout_limit from config or spider
# settings.
defp get_default_limit(limit_name, spider_name) do
limit_name
|> Utils.get_settings(spider_name)
|> maybe_convert_to_integer()
end
end
|
lib/crawly/manager.ex
| 0.73914
| 0.479138
|
manager.ex
|
starcoder
|
defmodule Contex.CategoryColourScale do
@moduledoc """
Maps categories to colours.
The `Contex.CategoryColourScale` maps categories to a colour palette. It is used, for example, to calculate
the fill colours for `Contex.BarChart`, or to calculate the colours for series in `Contex.PointPlot`.
Internally it is a very simple map with some convenience methods to handle duplicated data inputs,
cycle through colours etc.
The mapping is done on a first identified, first matched basis from the provided dataset. So, for example,
if you have a colour palette of `["ff0000", "00ff00", "0000ff"]` (aka red, green, blue), the mapping
for a dataset would be as follows:
X | Y | Category | Mapped Colour
-- | - | -------- | -------------
0 | 0 | Turtle | red
1 | 1 | Turtle | red
0 | 1 | Camel | green
2 | 1 | Brontosaurus | blue
3 | 4 | Turtle | red
5 | 5 | Brontosaurus | blue
6 | 7 | Hippopotamus | red ← *NOTE* - if you run out of colours, they will cycle
Tn use, the `CategoryColourScale` is created with a list of values to map to colours and optionally a colour
palette. If using with a `Contex.Dataset`, it would be initialised like this:
```
dataset = Dataset.new(data, ["X", "Y", "Category"])
colour_scale
= dataset
|> Dataset.unique_values("Category")
|> CategoryColourScale(["ff0000", "00ff00", "0000ff"])
```
Then it can be used to look up colours for values as needed:
```
fill_colour = CategoryColourScale.colour_for_value(colour_scale, "Brontosaurus") // returns "0000ff"
```
There are a number of built-in colour palettes - see `colour_palette()`, but you can supply your own by
providing a list of strings representing hex code of the colour as per CSS colour hex codes, but without the #. For example:
```
scale = CategoryColourScale.set_palette(scale, ["fbb4ae", "b3cde3", "ccebc5"])
```
"""
alias __MODULE__
defstruct [:values, :colour_palette, :colour_map, :default_colour]
@type t() :: %__MODULE__{}
@type colour_palette() :: nil | :default | :pastel1 | :warm | list()
@default_colour "fa8866"
@doc """
Create a new CategoryColourScale from a list of values.
Optionally attach a colour palette.
Pretty well any value list can be used so long as it can be a key in a map.
"""
@spec new(list(), colour_palette()) :: Contex.CategoryColourScale.t()
def new(raw_values, palette \\ :default) when is_list(raw_values) do
values = Enum.uniq(raw_values)
%CategoryColourScale{values: values}
|> set_palette(palette)
end
@doc """
Update the colour palette used for the scale
"""
@spec set_palette(Contex.CategoryColourScale.t(), colour_palette()) ::
Contex.CategoryColourScale.t()
def set_palette(%CategoryColourScale{} = colour_scale, nil),
do: set_palette(colour_scale, :default)
def set_palette(%CategoryColourScale{} = colour_scale, palette) when is_atom(palette) do
set_palette(colour_scale, get_palette(palette))
end
def set_palette(%CategoryColourScale{} = colour_scale, palette) when is_list(palette) do
%{colour_scale | colour_palette: palette}
|> map_values_to_palette()
end
@doc """
Sets the default colour for the scale when it isn't possible to look one up for a value
"""
def set_default_colour(%CategoryColourScale{} = colour_scale, colour) do
%{colour_scale | default_colour: colour}
end
@doc """
Look up a colour for a value from the palette.
"""
@spec colour_for_value(Contex.CategoryColourScale.t() | nil, any()) :: String.t()
def colour_for_value(nil, _value), do: @default_colour
def colour_for_value(%CategoryColourScale{colour_map: colour_map} = colour_scale, value) do
case Map.fetch(colour_map, value) do
{:ok, result} -> result
_ -> get_default_colour(colour_scale)
end
end
@doc """
Get the default colour. Surprise.
"""
@spec get_default_colour(Contex.CategoryColourScale.t() | nil) :: String.t()
def get_default_colour(%CategoryColourScale{default_colour: default} = _colour_scale)
when is_binary(default),
do: default
def get_default_colour(_), do: @default_colour
defp map_values_to_palette(
%CategoryColourScale{values: values, colour_palette: palette} = colour_scale
) do
{_, colour_map} =
Enum.reduce(values, {0, Map.new()}, fn value, {index, current_result} ->
colour = get_colour(palette, index)
{index + 1, Map.put(current_result, value, colour)}
end)
%{colour_scale | colour_map: colour_map}
end
# "Inspired by" https://github.com/d3/d3-scale-chromatic/blob/master/src/categorical/category10.js
@default_palette [
"1f77b4",
"ff7f0e",
"2ca02c",
"d62728",
"9467bd",
"8c564b",
"e377c2",
"7f7f7f",
"bcbd22",
"17becf"
]
defp get_palette(:default), do: @default_palette
# "Inspired by" https://github.com/d3/d3-scale-chromatic/blob/master/src/categorical/Pastel1.js
@pastel1_palette [
"fbb4ae",
"b3cde3",
"ccebc5",
"decbe4",
"fed9a6",
"ffffcc",
"e5d8bd",
"fddaec",
"f2f2f2"
]
defp get_palette(:pastel1), do: @pastel1_palette
# Warm colours - see https://learnui.design/tools/data-color-picker.html#single
@warm_palette ["d40810", "e76241", "f69877", "ffcab4", "ffeac4", "fffae4"]
defp get_palette(:warm), do: @warm_palette
defp get_palette(_), do: nil
# TODO: We currently cycle the palette when we run out of colours. Probably should fade them (or similar)
defp get_colour(colour_palette, index) when is_list(colour_palette) do
palette_length = length(colour_palette)
adjusted_index = rem(index, palette_length)
Enum.at(colour_palette, adjusted_index)
end
end
|
lib/chart/scale/category_colour_scale.ex
| 0.898972
| 0.908982
|
category_colour_scale.ex
|
starcoder
|
defmodule Freshcom.Filter do
@moduledoc """
[%{"$and" => [
%{"$or" => [%{"role" => "test1Test"}, %{"role" => "test2"}]},
%{"role" => "lol"},
%{"$or" => [%{"role" => "tt1"}, %{"role" => "tt2"}]}
]}]
"""
import Ecto.Query
import FCSupport.Normalization, only: [stringify_list: 1]
alias Ecto.Queryable
@spec attr_only(Ecto.Query.t(), [map], [String.t()]) :: Ecto.Query.t()
def attr_only(query, [], _), do: query
def attr_only(%Ecto.Query{} = query, statements, :all) do
{_, queryable} = query.from
permitted_fields = stringify_list(queryable.__schema__(:fields))
dynamic = do_attr_only("$and", statements, permitted_fields)
from(q in query, where: ^dynamic)
end
def attr_only(%Ecto.Query{} = query, statements, permitted_fields) when is_list(permitted_fields) do
dynamic = do_attr_only("$and", statements, permitted_fields)
from(q in query, where: ^dynamic)
end
defp do_attr_only(op, statements, permitted_fields) when op in ["$or", "$and"] do
statements
|> collect_dynamics(permitted_fields)
|> combine_dynamics(op)
end
defp do_attr_only(attr, expression, permitted_fields) when is_binary(attr) do
if is_field_permitted(attr, permitted_fields) do
compare_attr(String.to_existing_atom(attr), expression)
else
nil
end
end
defp collect_dynamics(statements, permitted_fields) do
dynamics =
Enum.reduce(statements, [], fn(statement_or_expression, acc) ->
{operator_or_attr, statements_or_expression} = Enum.at(statement_or_expression, 0)
acc ++ [do_attr_only(operator_or_attr, statements_or_expression, permitted_fields)]
end)
Enum.reject(dynamics, &is_nil/1)
end
defp combine_dynamics([d], _), do: d
defp combine_dynamics([d1, d2], "$and"), do: dynamic([], ^d1 and ^d2)
defp combine_dynamics([d1, d2], "$or"), do: dynamic([], ^d1 or ^d2)
defp combine_dynamics([d1, d2 | rest], op) do
acc = combine_dynamics([d1, d2], op)
Enum.reduce(rest, acc, &combine_dynamics([&2, &1], op))
end
defp compare_attr(attr, nil) do
dynamic([q], is_nil(field(q, ^attr)))
end
defp compare_attr(attr, %{"$eq" => nil}) do
dynamic([q], is_nil(field(q, ^attr)))
end
defp compare_attr(attr, value) when not is_map(value) do
dynamic([q], field(q, ^attr) == ^value)
end
defp compare_attr(attr, %{"$eq" => value}) do
dynamic([q], field(q, ^attr) == ^value)
end
defp compare_attr(attr, %{"$gt" => value}) do
dynamic([q], field(q, ^attr) > ^value)
end
defp compare_attr(attr, %{"$gte" => value}) do
dynamic([q], field(q, ^attr) >= ^value)
end
defp compare_attr(attr, %{"$in" => value}) do
dynamic([q], field(q, ^attr) in ^value)
end
defp compare_attr(attr, %{"$lt" => value}) do
dynamic([q], field(q, ^attr) < ^value)
end
defp compare_attr(attr, %{"$lte" => value}) do
dynamic([q], field(q, ^attr) <= ^value)
end
defp compare_attr(attr, %{"$ne" => nil}) do
dynamic([q], not(is_nil(field(q, ^attr))))
end
defp compare_attr(attr, %{"$ne" => value}) do
dynamic([q], field(q, ^attr) != ^value)
end
defp compare_attr(attr, %{"$nin" => value}) do
dynamic([q], not(field(q, ^attr) in ^value))
end
defp is_field_permitted(_, :all), do: true
defp is_field_permitted(field, permitted_fields), do: field in permitted_fields
@spec with_assoc(Ecto.Query.t(), [map], [String.t()], map) :: Ecto.Query.t()
def with_assoc(query, expressions, permitted_fields, assoc_queries \\ %{}) do
Enum.reduce(expressions, query, fn(expression, acc_query) ->
{field, comparison} = Enum.at(expression, 0)
if is_field_permitted(field, permitted_fields) do
expression(acc_query, field, comparison, assoc_queries)
else
acc_query
end
end)
end
defp expression(query, field, comparison, assoc_queries) do
if is_assoc(field) do
{assoc, assoc_field} = assoc(field)
assoc_assoc_queries = assoc_queries(assoc_queries, assoc)
assoc_query =
query
|> assoc_query(assoc, assoc_queries)
|> expression(assoc_field, comparison, assoc_assoc_queries)
%{owner_key: owner_key, related_key: related_key} = reflection(query, assoc)
from(q in query,
join: aq in subquery(assoc_query),
on: field(q, ^owner_key) == field(aq, ^related_key)
)
else
dynamic = compare_attr(String.to_existing_atom(field), comparison)
from(q in query, where: ^dynamic)
end
end
@spec is_assoc(String.t()) :: boolean
def is_assoc(field) do
length(String.split(field, ".")) > 1
end
defp assoc(field) do
splitted = String.split(field, ".")
assoc = Enum.at(splitted, 0)
assoc_field = Enum.join(Enum.slice(splitted, 1..-1), ".")
{assoc, assoc_field}
end
defp assoc_queries(assoc_queries, target_assoc) do
Enum.reduce(assoc_queries, %{}, fn({assoc, query}, acc) ->
if assoc != target_assoc && String.starts_with?(assoc, target_assoc) do
{_, assoc_field} = assoc(assoc)
Map.put(acc, assoc_field, query)
else
acc
end
end)
end
defp assoc_query(query, assoc, assoc_queries) do
if assoc_queries[assoc] do
assoc_queries[assoc]
else
reflection = reflection(query, assoc)
Queryable.to_query(reflection.queryable)
end
end
defp reflection(query, assoc) do
{_, queryable} = query.from
queryable.__schema__(:association, String.to_existing_atom(assoc))
end
@spec normalize(list, String.t(), function) :: list
def normalize(filter, key, func) when is_list(filter) do
Enum.map(filter, fn(statement_or_expression) ->
{operator_or_attr, statements_or_expression} = Enum.at(statement_or_expression, 0)
cond do
String.starts_with?(operator_or_attr, "$") ->
%{operator_or_attr => normalize(statements_or_expression, key, func)}
operator_or_attr == key && is_map(statements_or_expression) ->
{cmp, value} = Enum.at(statements_or_expression, 0)
%{operator_or_attr => %{cmp => func.(value)}}
operator_or_attr == key ->
%{operator_or_attr => func.(statements_or_expression)}
true ->
statement_or_expression
end
end)
end
end
|
lib/freshcom/core/filter.ex
| 0.606032
| 0.471041
|
filter.ex
|
starcoder
|
defmodule Mssqlex do
@moduledoc """
Interface for interacting with MS SQL Server via an ODBC driver for Elixir.
It implements `DBConnection` behaviour, using `:odbc` to connect to the
system's ODBC driver. Requires MS SQL Server ODBC driver, see
[README](readme.html) for installation instructions.
"""
alias Mssqlex.Query
alias Mssqlex.Type
@doc """
Connect to a MS SQL Server using ODBC.
`opts` expects a keyword list with zero or more of:
* `:odbc_driver` - The driver the adapter will use.
* environment variable: `MSSQL_DVR`
* default value: {ODBC Driver 17 for SQL Server}
* `:hostname` - The server hostname.
* environment variable: `MSSQL_HST`
* default value: localhost
* `:instance_name` - OPTIONAL. The name of the instance, if using named instances.
* environment variable: `MSSQL_IN`
* `:port` - OPTIONAL. The server port number.
* environment variable: `MSSQL_PRT`
* `:database` - The name of the database.
* environment variable: `MSSQL_DB`
* `:username` - Username.
* environment variable: `MSSQL_UID`
* `:password` - <PASSWORD>.
* environment variable: `MSSQL_PWD`
`Mssqlex` uses the `DBConnection` framework and supports all `DBConnection`
options like `:idle`, `:after_connect` etc.
See `DBConnection.start_link/2` for more information.
## Examples
iex> {:ok, pid} = Mssqlex.start_link(database: "mr_microsoft")
{:ok, #PID<0.70.0>}
"""
@spec start_link(Keyword.t) :: {:ok, pid}
def start_link(opts) do
DBConnection.start_link(Mssqlex.Protocol, opts)
end
@doc """
Executes a query against an MS SQL Server with ODBC.
`conn` expects a `Mssqlex` process identifier.
`statement` expects a SQL query string.
`params` expects a list of values in one of the following formats:
* Strings with only valid ASCII characters, which will be sent to the
database as strings.
* Other binaries, which will be converted to UTF16 Little Endian binaries
(which is what SQL Server expects for its unicode fields).
* `Decimal` structs, which will be encoded as strings so they can be
sent to the database with arbitrary precision.
* Integers, which will be sent as-is if under 10 digits or encoded
as strings for larger numbers.
* Floats, which will be encoded as strings.
* Time as `{hour, minute, sec, usec}` tuples, which will be encoded as
strings.
* Dates as `{year, month, day}` tuples, which will be encoded as strings.
* Datetime as `{{hour, minute, sec, usec}, {year, month, day}}` tuples which
will be encoded as strings. Note that attempting to insert a value with
usec > 0 into a 'datetime' or 'smalldatetime' column is an error since
those column types don't have enough precision to store usec data.
`opts` expects a keyword list with zero or more of:
* `:preserve_encoding`: If `true`, doesn't convert returned binaries from
UTF16LE to UTF8. Default: `false`.
* `:mode` - set to `:savepoint` to use a savepoint to rollback to before the
query on error, otherwise set to `:transaction` (default: `:transaction`);
Result values will be encoded according to the following conversions:
* char and varchar: strings.
* nchar and nvarchar: strings unless `:preserve_encoding` is set to `true`
in which case they will be returned as UTF16 Little Endian binaries.
* int, smallint, tinyint, decimal and numeric when precision < 10 and
scale = 0 (i.e. effectively integers): integers.
* float, real, double precision, decimal and numeric when precision between
10 and 15 and/or scale between 1 and 15: `Decimal` structs.
* bigint, money, decimal and numeric when precision > 15: strings.
* date: `{year, month, day}`
* smalldatetime, datetime, dateime2: `{{YY, MM, DD}, {HH, MM, SS, 0}}` (note that fractional
second data is lost due to limitations of the ODBC adapter. To preserve it
you can convert these columns to varchar during selection.)
* uniqueidentifier, time, binary, varbinary, rowversion: not currently
supported due to adapter limitations. Select statements for columns
of these types must convert them to supported types (e.g. varchar).
"""
@spec query(pid(), binary(), [Type.param()], Keyword.t) ::
{:ok, iodata(), Mssqlex.Result.t}
def query(conn, statement, params, opts \\ []) do
DBConnection.prepare_execute(
conn, %Query{name: "", statement: statement}, params, opts)
end
@doc """
Executes a query against an MS SQL Server with ODBC.
Raises an error on failure. See `query/4` for details.
"""
@spec query!(pid(), binary(), [Type.param()], Keyword.t) ::
{iodata(), Mssqlex.Result.t}
def query!(conn, statement, params, opts \\ []) do
DBConnection.prepare_execute!(
conn, %Query{name: "", statement: statement}, params, opts)
end
end
|
lib/mssqlex.ex
| 0.944434
| 0.664067
|
mssqlex.ex
|
starcoder
|
defmodule ReviewParser do
@moduledoc """
Parses the review snippets from [www.dealerrater.com](https://www.dealerrater.com) into `Model.Review` model.
"""
@doc """
Parses the review snippets from [www.dealerrater.com](https://www.dealerrater.com) into `Model.Review` model.
"""
def parse(review) do
%Model.Review{
user: get_user(review),
reason_for_visit: get_reason_to_visit(review),
title: get_title(review),
body: get_body(review),
date: get_date(review),
dealership_rating: get_dealership_rating(review),
ratings: get_individual_ratings(review)
}
end
defp get_body(review) do
review
|> Floki.find("p.review-content")
|> Floki.text
|> String.trim
end
defp get_title(review) do
review
|> Floki.find("div.review-wrapper")
|> Floki.find("div.margin-bottom-sm,div.line-height-150")
|> Floki.find("h3")
|> Floki.text
|> String.replace(~r/^"|"$/, "")
|> String.trim
end
defp get_user(review) do
review
|> Floki.find("div.review-wrapper")
|> Floki.find("div.margin-bottom-sm,div.line-height-150")
|> Floki.find("span")
|> Floki.text
|> String.split(~r/ *- */)
|> List.last
|> String.trim
end
defp get_reason_to_visit(review) do
review
|> Floki.find("div.dealership-rating")
|> Floki.find("div.small-text.dr-grey")
|> Floki.text
|> String.trim
end
defp get_dealership_rating(review) do
review
|> Floki.find("div.dealership-rating")
|> Floki.find("div.rating-static.hidden-xs.margin-center")
|> Floki.attribute("class")
|> List.first
|> String.split
|> Enum.filter(&(String.starts_with?(&1, "rating-")) && !String.ends_with?(&1, "-static"))
|> List.first
|> rating_matcher()
end
defp get_individual_ratings(review) do
[customer_service, quality_of_work, friendliness, pricing, experience] = review
|> Floki.find("div.rating-static-indv")
|> Floki.attribute("class")
|> Enum.map(&String.split/1)
|> List.flatten
|> Enum.filter(&(String.starts_with?(&1, "rating-")) && !String.ends_with?(&1, "-static-indv"))
%Model.Ratings{
customer_service: rating_matcher(customer_service),
quality_of_work: rating_matcher(quality_of_work),
friendliness: rating_matcher(friendliness),
pricing: rating_matcher(pricing),
experience: rating_matcher(experience)
}
end
defp get_date(review) do
review
|> Floki.find("div.review-date")
|> Floki.DeepText.get("|")
|> String.split("|")
|> List.first
|> Timex.parse!("%B %d, %Y", :strftime)
end
defp rating_matcher("rating-" <> rating), do: String.to_integer(rating)
end
|
lib/review_parser.ex
| 0.664214
| 0.423577
|
review_parser.ex
|
starcoder
|
defmodule Tabular do
@moduledoc """
Tabular converts an ascii table string into either a list of lists or a list of maps.
"""
@row_splitter_re ~r/^\s*[|+]--[-+|]+$/
@doc ~S'''
Converts an ascii table to a list of maps.
## Examples
iex> ascii_table = """
...> |---------------+--------------------|
...> | name | dob |
...> |---------------+--------------------|
...> | Malcolm | September 20, 2468 |
...> | Reynolds | |
...> |---------------+--------------------|
...> | Zoe Washburne | February 15, 2484 |
...> |---------------+--------------------|
...> """
...> Tabular.to_list_of_maps(ascii_table)
[
%{dob: "September 20, 2468", name: "<NAME>"},
%{dob: "February 15, 2484", name: "<NAME>"}
]
Without separators:
iex> ascii_table = """
...> |------------------+--------------------|
...> | name | dob |
...> |------------------+--------------------|
...> | <NAME> | September 20, 2468 |
...> | Zoe Washburne | February 15, 2484 |
...> |------------------+--------------------|
...> """
...> Tabular.to_list_of_maps(ascii_table)
[
%{dob: "September 20, 2468", name: "<NAME>"},
%{dob: "February 15, 2484", name: "<NAME>"}
]
'''
def to_list_of_maps(ascii_table) do
[headers | rows] = to_list_of_lists(ascii_table)
headers_as_atoms = Enum.map(headers, &String.to_atom(&1))
rows
|> Enum.map(fn row ->
Enum.zip(headers_as_atoms, row)
|> Map.new()
end)
end
@doc deprecated: "Use to_list_of_lists(table, header: false) instead"
@doc ~S'''
Converts an ascii table to a list of lists, omitting the header row.
## Examples
iex> ascii_table = """
...> |---------------+--------------------|
...> | name | dob |
...> |---------------+--------------------|
...> | Malcolm | September 20, 2468 |
...> | Reynolds | |
...> |---------------+--------------------|
...> | Zoe Washburne | February 15, 2484 |
...> |---------------+--------------------|
...> """
...> Tabular.to_list_of_lists_no_header(ascii_table)
[
["<NAME>", "September 20, 2468"],
["Zoe Washburne", "February 15, 2484"]
]
Without separators:
iex> ascii_table = """
...> |------------------+--------------------|
...> | name | dob |
...> |------------------+--------------------|
...> | <NAME> | September 20, 2468 |
...> | Zoe Washburne | February 15, 2484 |
...> |------------------+--------------------|
...> """
...> Tabular.to_list_of_lists_no_header(ascii_table)
[
["<NAME>", "September 20, 2468"],
["Zoe Washburne", "February 15, 2484"]
]
'''
def to_list_of_lists_no_header(ascii_table) do
to_list_of_lists(ascii_table, header: false)
end
@doc ~S'''
Converts an ascii table to a list of lists.
## Examples
iex> ascii_table = """
...> |---------------+--------------------|
...> | name | dob |
...> |---------------+--------------------|
...> | Malcolm | September 20, 2468 |
...> | Reynolds | |
...> |---------------+--------------------|
...> | Zoe Washburne | February 15, 2484 |
...> |---------------+--------------------|
...> """
...> Tabular.to_list_of_lists(ascii_table)
[
["name", "dob"],
["<NAME>", "September 20, 2468"],
["Z<NAME>", "February 15, 2484"]
]
...> Tabular.to_list_of_lists(ascii_table, header: false)
[
["<NAME>", "September 20, 2468"],
["Z<NAME>", "February 15, 2484"]
]
Without separators:
iex> ascii_table = """
...> |------------------+--------------------|
...> | name | dob |
...> |------------------+--------------------|
...> | <NAME> | September 20, 2468 |
...> | Zoe Washburne | February 15, 2484 |
...> |------------------+--------------------|
...> """
...> Tabular.to_list_of_lists(ascii_table)
[
["name", "dob"],
["<NAME>", "September 20, 2468"],
["Z<NAME>", "February 15, 2484"]
]
...> Tabular.to_list_of_lists(ascii_table, header: false)
[
["<NAME>", "September 20, 2468"],
["Zoe Washburne", "February 15, 2484"]
]
'''
def to_list_of_lists(ascii_table, opts \\ [header: true]) do
rows =
ascii_table
|> lines()
|> cell_line_groups(has_separators?: has_separators?(ascii_table))
|> trimmed_and_grouped_cell_contents()
|> folded_cell_contents()
|> specials()
if opts[:header], do: rows, else: Enum.drop(rows, 1)
end
@doc false
def lines(ascii_table) do
ascii_table
|> String.trim()
|> String.split("\n")
end
@doc false
def cell_line_groups(lines, has_separators?: true) do
lines
|> Enum.chunk_by(fn line ->
Regex.match?(@row_splitter_re, line)
end)
|> Enum.filter(fn group ->
!(length(group) == 1 &&
Regex.match?(@row_splitter_re, hd(group)))
end)
end
@doc false
def cell_line_groups(lines, has_separators?: false) do
lines
|> Enum.reject(&(&1 =~ @row_splitter_re))
|> Enum.map(&[&1])
end
@doc false
def trimmed_and_grouped_cell_contents(line_groups) do
line_groups
|> Enum.map(fn line_group ->
line_group
|> Enum.map(fn line ->
[_all, trimmed_line] = Regex.run(~r/^\s*\|\s*(.*)\s*\|\s*$/, line)
trimmed_line
|> String.split("|")
|> Enum.map(fn cell_content ->
String.trim(cell_content)
end)
end)
end)
end
@doc false
def folded_cell_contents(grouped_cell_contents) do
grouped_cell_contents
|> Enum.map(fn row_contents ->
row_contents
|> fold_contents_if_necessary
end)
end
@doc false
def fold_contents_if_necessary(row_contents) when length(row_contents) > 1 do
Enum.zip(row_contents)
|> Enum.map(fn foldables ->
foldables
|> Tuple.to_list()
|> Enum.filter(fn text -> text != "" end)
|> Enum.join(" ")
end)
end
@doc false
def fold_contents_if_necessary(row_contents) do
hd(row_contents)
end
@doc false
def specials(rows_of_columns) do
rows_of_columns
|> Enum.map(fn row ->
row
|> Enum.map(fn text ->
special(text)
end)
end)
end
@doc false
def special("true"), do: true
def special("false"), do: false
def special("nil"), do: nil
def special(""), do: ""
def special(":" <> rest), do: String.to_atom(rest)
def special(not_special), do: not_special
@doc false
def has_separators?(ascii_table) do
[_, _, _ | lines] = lines(ascii_table)
{_, lines} = List.pop_at(lines, -1)
Enum.any?(lines, &(&1 =~ @row_splitter_re))
end
end
|
lib/tabular.ex
| 0.696165
| 0.510863
|
tabular.ex
|
starcoder
|
defmodule Financials do
alias Decimal, as: D
@moduledoc """
A financial modeling library for elixir. Contains functions that can be used as building blocks for complex financial modeling.
## Usage
Requests return a 2-tuple with the standard `:ok` or `:error` status.
```elixir
# Successful response
{:ok, result} = Financials.debt_to_equity(100_000, 1_000_000)
# Unsuccessful response due to argument type
{:error, "Arguments must be numerical"} = Financials.net_income(100_000, "1_000_000")
# Unsuccessful response due to argument value
{:error, "total_equity can't be zero (Divide by zero error)"} = Financials.net_income(100_000, 0)
```
## Functions
"""
##--------------------------------------------------------------
## CONSTANTS
##--------------------------------------------------------------
@two_decimal_precision 2
@arg_msg "Arguments must be decimals"
@zero_error "can't equal zero (Divide by zero error)"
@doc """
Net Income Calculation
"""
def net_income(%Decimal{} = total_revenues, %Decimal{} = total_expenses),
do: {:ok, D.sub(total_revenues, total_expenses)}
def net_income(_, _),
do: {:error, @arg_msg}
@doc """
Net Earnings Calculation
"""
def earnings(%Decimal{} = net_income, %Decimal{} = preferred_dividends),
do: {:ok, D.sub(net_income, preferred_dividends)}
def earnings(_, _),
do: {:error, @arg_msg}
@doc """
Retained Earnings Calculation
"""
def retained_earnings(
%Decimal{} = beginning_period_retained_earnings,
%Decimal{} = net_income,
%Decimal{} = cash_dividends,
%Decimal{} = stock_dividends
) do
earnings = D.add(beginning_period_retained_earnings, net_income)
dividend_expenses = D.sub(cash_dividends, stock_dividends)
{:ok, D.sub(earnings, dividend_expenses)}
end
def retained_earnings(_, _, _, _),
do: {:error, @arg_msg}
@doc """
Operating Cash Flow Calculation
"""
def ocf(
%Decimal{} = operating_income,
%Decimal{} = depreciation,
%Decimal{} = taxes,
%Decimal{} = change_in_working_capital
) do
r1 = D.add(operating_income, depreciation)
r2 = D.add(taxes, change_in_working_capital)
{:ok, D.sub(r1, r2)}
end
def ocf(_, _, _, _),
do: {:error, @arg_msg}
@doc """
Return on Revenue Calculation
"""
def ror(net_income, sales_revenue)
def ror(%Decimal{coef: 0} = _, %Decimal{coef: 0} = _),
do: {:ok, D.new(0)}
def ror(_, %Decimal{coef: 0} = _),
do: {:error, "sales_revenue #{@zero_error}"}
def ror(%Decimal{} = net_income, %Decimal{} = sales_revenue),
do: {:ok, D.div(net_income, sales_revenue)}
def ror(_, _),
do: {:error, @arg_msg}
@doc """
Return on Sales Calculation
"""
def ros(operating_profit, net_sales)
def ros(_, %Decimal{coef: 0} = _),
do: {:error, "net_sales #{@zero_error}"}
def ros(%Decimal{} = operating_profit, %Decimal{} = net_sales),
do: {:ok, D.div(operating_profit, net_sales)}
def ros(_, _),
do: {:error, @arg_msg}
@doc """
Cost of Goods Sold Calculation
"""
def cogs(%Decimal{} = beginning_inventory, %Decimal{} = purchases, %Decimal{} = ending_inventory),
do: {:ok, D.add(beginning_inventory, D.sub(purchases, ending_inventory))}
def cogs(_, _, _),
do: {:error, @arg_msg}
@doc """
EBIT -- Earnings Before Interest and Taxes Calculation
"""
def ebit(%Decimal{} = revenue, %Decimal{} = cogs, %Decimal{} = operating_expenses),
do: {:ok, D.sub(revenue, D.sub(cogs, operating_expenses))}
def ebit(_, _, _),
do: {:error, @arg_msg}
@doc """
EBITA -- Earnings Before Interest, Taxes, and Amortization Calculation
"""
def ebita(
%Decimal{} = revenue,
%Decimal{} = cogs,
%Decimal{} = operating_expenses,
%Decimal{} = amortization
) do
{:ok, D.sub(revenue, D.sub(cogs, D.add(operating_expenses, amortization)))}
end
def ebita(_, _, _, _),
do: {:error, @arg_msg}
@doc """
EBITDA -- Earnings Before Interest, Taxes, Depreciation and Amortization Calculation
"""
def ebitda(
%Decimal{} = net_income,
%Decimal{} = interest_expense,
%Decimal{} = taxes,
%Decimal{} = depreciation,
%Decimal{} = amortization
) do
{:ok, D.add(net_income, D.add(interest_expense, D.add(taxes, D.add(depreciation, amortization))))}
end
def ebitda(_, _, _, _, _),
do: {:error, @arg_msg}
@doc """
Receivable Turnover Ratio Calculation
"""
def receivable_turnover_ratio(net_credit_sales, average_accounts_receivable)
def receivable_turnover_ratio(_, %Decimal{coef: 0} = _),
do: {:error, "avg_accounts_receivable #{@zero_error}"}
def receivable_turnover_ratio(%Decimal{} = net_credit_sales, %Decimal{} = average_accounts_receivable),
do: {:ok, D.div(net_credit_sales, average_accounts_receivable)}
def receivable_turnover_ratio(_, _),
do: {:error, @arg_msg}
@doc """
Accumulated Depreciation to Fixed Assets Calculation
"""
def accumulated_depreciation_to_fixed_assets(accumulated_depreciation, total_fixed_assets)
def accumulated_depreciation_to_fixed_assets(_, %Decimal{coef: 0} = _),
do: {:error, "total_fixed_assets #{@zero_error}"}
def accumulated_depreciation_to_fixed_assets(
%Decimal{} = accumulated_depreciation,
%Decimal{} = total_fixed_assets
) do
{:ok, D.div(accumulated_depreciation, total_fixed_assets)}
end
def accumulated_depreciation_to_fixed_assets(_, _),
do: {:error, @arg_msg}
@doc """
Asset Coverage Ratio Calculation
"""
def asset_coverage(total_assets, intangible_assets, current_liabilities, short_term_debt, total_debt)
def asset_coverage(_, _, _, _, %Decimal{coef: 0} = _),
do: {:error, "total_debt #{@zero_error}"}
def asset_coverage(
%Decimal{} = total_assets,
%Decimal{} = intangible_assets,
%Decimal{} = current_liabilities,
%Decimal{} = short_term_debt,
%Decimal{} = total_debt
) do
assets = D.sub(total_assets, intangible_assets)
liabilities = D.sub(current_liabilities, short_term_debt)
{:ok, D.div(D.sub(assets, liabilities), total_debt)}
end
def asset_coverage(_, _, _, _, _),
do: {:error, @arg_msg}
@doc """
Asset Turnover Ratio Calculation
"""
def asset_turnover(net_sales, average_total_sales)
def asset_turnover(_, %Decimal{coef: 0} = _),
do: {:error, "average_total_sales #{@zero_error}"}
def asset_turnover(%Decimal{} = net_sales, %Decimal{} = average_total_sales),
do: {:ok, D.div(net_sales, average_total_sales)}
def asset_turnover(_, _),
do: {:error, @arg_msg}
@doc """
Average Inventory Period Calculation
"""
def average_inventory_period(days, inventory_turnover)
def average_inventory_period(_, %Decimal{coef: 0} = _),
do: {:error, "inventory_turnover #{@zero_error}"}
def average_inventory_period(%Decimal{} = days, %Decimal{} = inventory_turnover),
do: {:ok, D.div(days, inventory_turnover)}
def average_inventory_period(_, _), do: {:error, @arg_msg}
@doc """
Average Payment Period Calculation
"""
def average_payment_period(average_accounts_payable, total_credit_purchases, days)
def average_payment_period(_, %Decimal{coef: 0} = _, 0),
do: {:error, "days & total_credit_purchases #{@zero_error}"}
def average_payment_period(_, %Decimal{coef: 0} = _, %Decimal{coef: 0} = _),
do: {:error, "days & total_credit_purchases #{@zero_error}"}
def average_payment_period(_, %Decimal{coef: 0} = _, _),
do: {:error, "total_credit_purchases #{@zero_error}"}
def average_payment_period(
%Decimal{} = average_accounts_payable,
%Decimal{} = total_credit_purchases,
days
) when is_integer(days)
do
average_payment_period(average_accounts_payable, total_credit_purchases, D.new(days))
end
def average_payment_period(
%Decimal{} = average_accounts_payable,
%Decimal{} = total_credit_purchases,
days
) when is_float(days)
do
average_payment_period(average_accounts_payable, total_credit_purchases, D.from_float(days))
end
def average_payment_period(_, _, %Decimal{coef: 0} = _),
do: {:error, "days #{@zero_error}"}
def average_payment_period(
%Decimal{} = average_accounts_payable,
%Decimal{} = total_credit_purchases,
%Decimal{} = days
) do
credit_days = D.div(total_credit_purchases, D.new(days))
{:ok, D.div(average_accounts_payable, credit_days)}
end
def average_payment_period(_, _, _),
do: {:error, @arg_msg}
@doc """
Break Even Analysis Calculation
"""
def break_even_analysis(
%Decimal{} = fixed_costs,
%Decimal{} = sales_price_per_unit,
%Decimal{} = variable_cost_per_unit
) do
price = D.sub(sales_price_per_unit, variable_cost_per_unit)
cond do
D.eq?(price, 0) -> {:error, "sales_price_per_unit - variable_cost_per_unit #{@zero_error}"}
:else -> {:ok, D.div(fixed_costs, price)}
end
end
def break_even_analysis(_, _, _),
do: {:error, @arg_msg}
@doc """
Capitalization Ratio Calculation
"""
def capitalization_ratio(total_debt, shareholders_equity)
def capitalization_ratio(_, %Decimal{coef: 0} = _),
do: {:error, "shareholders_equity #{@zero_error}"}
def capitalization_ratio(%Decimal{} = total_debt, %Decimal{} = shareholders_equity),
do: {:ok, D.div(total_debt, D.add(total_debt, shareholders_equity))}
def capitalization_ratio(_, _),
do: {:error, @arg_msg}
@doc """
Cash Conversion Cycle Calculation
"""
def cash_conversion_cycle(
%Decimal{} = days_inventory_outstanding,
%Decimal{} = days_sales_outstanding,
%Decimal{} = days_payables_outstanding
) do
{:ok, D.add(days_inventory_outstanding, D.add(days_sales_outstanding, days_payables_outstanding))}
end
def cash_conversion_cycle(_, _, _),
do: {:error, @arg_msg}
@doc """
Cash Flow Coverage Ratio Calculation
"""
def cash_flow_coverage(operating_cash_flows, total_debt)
def cash_flow_coverage(_, %Decimal{coef: 0} = _),
do: {:error, "total_debt #{@zero_error}"}
def cash_flow_coverage(%Decimal{} = operating_cash_flows, %Decimal{} = total_debt) do
{:ok, D.div(operating_cash_flows, total_debt)}
end
def cash_flow_coverage(_, _),
do: {:error, @arg_msg}
@doc """
Cash Ratio Calculation
"""
def cash_ratio(cash, cash_equivalents, total_current_liabilities)
def cash_ratio(_, _, %Decimal{coef: 0} = _),
do: {:error, "cash_equivalents #{@zero_error}"}
def cash_ratio(
%Decimal{} = cash,
%Decimal{} = cash_equivalents,
%Decimal{} = total_current_liabilities
) do
{:ok, D.div(D.add(cash, cash_equivalents), total_current_liabilities)}
end
def cash_ratio(_, _, _),
do: {:error, @arg_msg}
@doc """
Compound Annual Growth Rate Calculation
"""
def cagr(beginning_investment_value, ending_investment_value, years)
def cagr(%Decimal{coef: 0} = _, _, 0),
do: {:error, "beginning_investment_amount & years #{@zero_error}"}
def cagr(%Decimal{coef: 0} = _, _, %Decimal{coef: 0} = _),
do: {:error, "beginning_investment_amount & years #{@zero_error}"}
def cagr(%Decimal{coef: 0} = _, _, _),
do: {:error, "beginning_investment_amount #{@zero_error}"}
def cagr(
%Decimal{} = beginning_investment_value,
%Decimal{} = ending_investment_value,
years
) when is_integer(years) do
cagr(beginning_investment_value, ending_investment_value, D.new(years))
end
def cagr(
%Decimal{} = beginning_investment_value,
%Decimal{} = ending_investment_value,
years
) when is_float(years) do
cagr(beginning_investment_value, ending_investment_value, D.from_float(years))
end
def cagr(_, _, %Decimal{coef: 0} = _),
do: {:error, "years #{@zero_error}"}
def cagr(
%Decimal{} = beginning_investment_value,
%Decimal{} = ending_investment_value,
%Decimal{} = years
)
do
value_ratio = D.div(ending_investment_value, beginning_investment_value)
time_ratio = D.div(D.new(1), years)
res = :math.pow(D.to_float(value_ratio), D.to_float(time_ratio))
{:ok, D.sub(D.new(res), D.new(1))}
end
def cagr(_, _, _),
do: {:error, @arg_msg}
@doc """
Contribution Margin Calculation
"""
def contribution_margin(%Decimal{} = net_sales, %Decimal{} = variable_costs),
do: {:ok, D.sub(net_sales, variable_costs)}
def contribution_margin(_, _),
do: {:error, @arg_msg}
@doc """
Current Ratio Calculation
"""
def current_ratio(current_assets, current_liabilities)
def current_ratio(_, %Decimal{coef: 0} = _),
do: {:error, "current_liabilities #{@zero_error}"}
def current_ratio(%Decimal{} = current_assets, %Decimal{} = current_liabilities), do:
{:ok, D.div(current_assets, current_liabilities)}
def current_ratio(_, _),
do: {:error, @arg_msg}
@doc """
Days Payable Outstanding Calculation
"""
def dpo(accounts_payable, cost_of_sales, days)
def dpo(_, %Decimal{coef: 0} = _, %Decimal{coef: 0} = _),
do: {:error, "cost_of_sales & days #{@zero_error}"}
def dpo(_, %Decimal{coef: 0} = _, _),
do: {:error, "cost_of_sales #{@zero_error}"}
def dpo(%Decimal{} = accounts_payable, %Decimal{} = cost_of_sales, days) when is_integer(days),
do: dpo(accounts_payable, cost_of_sales, D.new(days))
def dpo(%Decimal{} = accounts_payable, %Decimal{} = cost_of_sales, days) when is_float(days),
do: dpo(accounts_payable, cost_of_sales, D.from_float(days))
def dpo(_, _, %Decimal{coef: 0} = _),
do: {:error, "days #{@zero_error}"}
def dpo(%Decimal{} = accounts_payable, %Decimal{} = cost_of_sales, %Decimal{} = days) do
cost_of_sales_per_day = D.div(cost_of_sales, days)
{:ok, D.div(accounts_payable, cost_of_sales_per_day)}
end
def dpo(_, _, _),
do: {:error, @arg_msg}
@doc """
Days Sales in Inventory Calculation
"""
def dsi(ending_inventory, cogs)
def dsi(_, %Decimal{coef: 0} = _),
do: {:error, "cogs #{@zero_error}"}
def dsi(%Decimal{} = ending_inventory, %Decimal{} = cogs),
do: {:ok, D.mult(D.div(ending_inventory,cogs), 365)}
def dsi(_, _),
do: {:error, @arg_msg}
@doc """
Days Sales Outstanding Calculation
"""
def dso(accounts_receivable, net_credit_sales)
def dso(_, %Decimal{coef: 0} = _),
do: {:error, "net_credit_sales #{@zero_error}"}
def dso(%Decimal{} = accounts_receivable, %Decimal{} = net_credit_sales), do:
{:ok, D.mult(D.div(accounts_receivable, net_credit_sales), 365)}
def dso(_, _),
do: {:error, @arg_msg}
@doc """
Debt Ratio Calculation
"""
def debt_ratio(total_liabilities, total_assets)
def debt_ratio(_, %Decimal{coef: 0} = _),
do: {:error, "total_assets #{@zero_error}"}
def debt_ratio(%Decimal{} = total_liabilities, %Decimal{} = total_assets),
do: {:ok, D.div(total_liabilities, total_assets)}
def debt_ratio(_, _),
do: {:error, @arg_msg}
@doc """
Debt Service Coverage Ratio
"""
def dscr(operating_income, total_debt_service_costs)
def dscr(_, %Decimal{coef: 0} = _),
do: {:error, "total_debt_service_costs #{@zero_error}"}
def dscr(%Decimal{} = operating_income, %Decimal{} = total_debt_service_costs),
do: {:ok, (Float.round(operating_income/total_debt_service_costs, @two_decimal_precision))}
def dscr(_, _),
do: {:error, @arg_msg}
@doc """
Debt to Asset Ratio Calculation
"""
def debt_to_asset(total_debt, total_assets)
def debt_to_asset(_, %Decimal{coef: 0} = _),
do: {:error, "total_assets #{@zero_error}"}
def debt_to_asset(%Decimal{} = total_debt, %Decimal{} = total_assets),
do: {:ok, D.div(total_debt, total_assets)}
def debt_to_asset(_, _),
do: {:error, @arg_msg}
@doc """
Debt ot Capital Ratio Calculation
"""
def debt_to_capital(total_debt, shareholders_equity)
def debt_to_capital(_, %Decimal{coef: 0} = _),
do: {:error, "shareholders_equity #{@zero_error}"}
def debt_to_capital(%Decimal{} = total_debt, %Decimal{} = shareholders_equity),
do: {:ok, D.div(total_debt, D.add(total_debt, shareholders_equity))}
def debt_to_capital(_, _),
do: {:error, @arg_msg}
@doc """
Debt to Equity Ratio Calculation
"""
def debt_to_equity(total_liabilities, total_equity)
def debt_to_equity(_, %Decimal{coef: 0} = _),
do: {:error, "total_equity #{@zero_error}"}
def debt_to_equity(%Decimal{} = total_liabilities, %Decimal{} = total_equity),
do: {:ok, D.div(total_liabilities, total_equity)}
def debt_to_equity(_, _),
do: {:error, @arg_msg}
@doc """
Debt to Income Ratio Calculation
"""
def dti(total_monthly_debt_payments, gross_monthly_income)
def dti(_, %Decimal{coef: 0} = _),
do: {:error, "gross_monthly_income #{@zero_error}"}
def dti(%Decimal{} = total_monthly_debt_payments, %Decimal{} = gross_monthly_income),
do: {:ok, D.div(total_monthly_debt_payments, gross_monthly_income)}
def dti(_, _),
do: {:error, @arg_msg}
@doc """
Defensive Interval Ratio Calculation
"""
def dir(defensive_assets, daily_operational_expenses)
def dir(_, %Decimal{coef: 0} = _),
do: {:error, "daily_operational_expenses #{@zero_error}"}
def dir(%Decimal{} = defensive_assets, %Decimal{} = daily_operational_expenses),
do: {:ok, D.div(defensive_assets, daily_operational_expenses)}
def dir(_, _),
do: {:error, @arg_msg}
@doc """
Basic Earnings Per Share Calculation
"""
def eps_basic(earnings, shares_outstanding)
def eps_basic(_, %Decimal{coef: 0} = _),
do: {:error, "shares_outstanding #{@zero_error}"}
def eps_basic(%Decimal{} = earnings, %Decimal{} = shares_outstanding),
do: {:ok, D.div(earnings, shares_outstanding)}
def eps_basic(_, _),
do: {:error, @arg_msg}
@doc """
Diluted Earnings Per Share Calculation
"""
def eps_diluted(%Decimal{} = earnings, %Decimal{} = shares_outstanding, %Decimal{} = diluted_shares) do
if D.lt?(shares_outstanding, 1) or D.lt?(diluted_shares, 0) do
{:error, "shares #{@zero_error}"}
else
shares = D.add(shares_outstanding, diluted_shares)
{:ok, D.div(earnings, shares)}
end
end
def eps_diluted(_, _, _),
do: {:error, @arg_msg}
@doc """
Pro Forma Earnings Per Share Calculation
"""
def eps_pro_forma(
%Decimal{} = acquirers_net_income,
%Decimal{} = targets_net_income,
%Decimal{} = incremental_adjustments,
%Decimal{} = shares_outstanding,
%Decimal{} = diluted_shares
) do
if D.lt?(shares_outstanding, 1) or D.lt?(diluted_shares, 0) do
{:error, "shares #{@zero_error}"}
else
earnings = D.add(acquirers_net_income, D.add(targets_net_income, incremental_adjustments))
shares = D.add(shares_outstanding, diluted_shares)
{:ok, D.div(earnings, shares)}
end
end
def eps_pro_forma(_, _, _, _, _),
do: {:error, @arg_msg}
@doc """
Book Value Earnings Per Share Calculation
"""
def eps_book_value(total_equity, preferred_equity, shares_outstanding)
def eps_book_value(_, _, %Decimal{coef: 0} = _),
do: {:error, "shares_outstanding #{@zero_error}"}
def eps_book_value(%Decimal{} = total_equity, %Decimal{} = preferred_equity, %Decimal{} = shares_outstanding),
do: {:ok, D.div(D.sub(total_equity, preferred_equity), shares_outstanding)}
def eps_book_value(_, _, _),
do: {:error, @arg_msg}
@doc """
Retained Earnings Per Share Calculation
"""
def eps_retained(retained_earnings, shares_outstanding)
def eps_retained(_, %Decimal{coef: 0} = _),
do: {:error, "shares_outstanding #{@zero_error}"}
def eps_retained(%Decimal{} = retained_earnings, %Decimal{} = shares_outstanding),
do: {:ok, D.div(retained_earnings, shares_outstanding)}
def eps_retained(_, _),
do: {:error, @arg_msg}
@doc """
Cash Earnings Per Share Calculation
"""
def eps_cash(operating_cash_flow, shares_outstanding)
def eps_cash(_, %Decimal{coef: 0} = _),
do: {:error, "shares_outstanding #{@zero_error}"}
def eps_cash(%Decimal{} = operating_cash_flow, %Decimal{} = shares_outstanding),
do: {:ok, D.div(operating_cash_flow, shares_outstanding)}
def eps_cash(_, _),
do: {:error, @arg_msg}
@doc """
Price to Earnings Ratio Calculation
"""
def pe_ratio(price, earnings_per_share)
def pe_ratio(_, %Decimal{coef: 0} = _),
do: {:error, "earnings_per_share #{@zero_error}"}
def pe_ratio(%Decimal{} = price, %Decimal{} = earnings_per_share), do:
{:ok, D.div(price, earnings_per_share)}
def pe_ratio(_, _),
do: {:error, @arg_msg}
@doc """
Price to Earnings to Growth Ratio Calculation
"""
def peg_ratio(price_to_earnings, earnings_growth)
def peg_ratio(_, %Decimal{coef: 0} = _),
do: {:error, "earnings_growth #{@zero_error}"}
def peg_ratio(%Decimal{} = price_to_earnings, %Decimal{} = earnings_growth),
do: {:ok, D.div(price_to_earnings, earnings_growth)}
def peg_ratio(_, _),
do: {:error, @arg_msg}
@doc """
Dividend Payout Calculation
"""
def dividend_payout(net_dividends, net_income)
def dividend_payout(_, %Decimal{coef: 0} = _),
do: {:error, "net_income #{@zero_error}"}
def dividend_payout(%Decimal{} = net_dividends, %Decimal{} = net_income),
do: {:ok, D.div(net_dividends, net_income)}
def dividend_payout(_, _),
do: {:error, @arg_msg}
@doc """
Dividend Yield Calculation
"""
def dividend_yield(cash_dividends_per_share, market_value_per_share)
def dividend_yield(_, %Decimal{coef: 0} = _),
do: {:error, "market_value_per_share #{@zero_error}"}
def dividend_yield(%Decimal{} = cash_dividends_per_share, %Decimal{} = market_value_per_share),
do: {:ok, D.div(cash_dividends_per_share, market_value_per_share)}
def dividend_yield(_, _),
do: {:error, @arg_msg}
@doc """
DuPont Analysis Calculation
"""
def du_pont_analysis(%Decimal{} = profit_margin, %Decimal{} = total_asset_turnover, %Decimal{} = financial_leverage),
do: {:ok, D.mult(profit_margin, D.mult(total_asset_turnover, financial_leverage))}
def du_pont_analysis(_, _, _),
do: {:error, @arg_msg}
@doc """
Enterprise Value Calculation
"""
def ev(%Decimal{} = market_capitalization, %Decimal{} = debt, %Decimal{} = current_cash),
do: {:ok, D.sub(D.add(market_capitalization, debt), current_cash)}
def ev(_, _, _),
do: {:error, @arg_msg}
@doc """
Equity Multiplier Calculation
"""
def equity_multiplier(total_assets, total_stockholders_equity)
def equity_multiplier(_, %Decimal{coef: 0} = _),
do: {:error, "total_stockholders_equity #{@zero_error}"}
def equity_multiplier(%Decimal{} = total_assets, %Decimal{} = total_stockholders_equity),
do: {:ok, D.div(total_assets, total_stockholders_equity)}
def equity_multiplier(_, _),
do: {:error, @arg_msg}
@doc """
Equity Ratio Calculation
"""
def equity_ratio(total_equity, total_assets)
def equity_ratio(_, %Decimal{coef: 0} = _),
do: {:error, "total_assets #{@zero_error}"}
def equity_ratio(%Decimal{} = total_equity, %Decimal{} = total_assets),
do: {:ok, D.div(total_equity, total_assets)}
def equity_ratio(_, _),
do: {:error, @arg_msg}
@doc """
Expense Ratio Calculation
"""
def expense_ratio(operating_expenses, average_value_of_fund_assets)
def expense_ratio(_, %Decimal{coef: 0} = _),
do: {:error, "average_value_of_fund_assets #{@zero_error}"}
def expense_ratio(%Decimal{} = operating_expenses, %Decimal{} = average_value_of_fund_assets),
do: {:ok, D.div(operating_expenses, average_value_of_fund_assets)}
def expense_ratio(_, _),
do: {:error, @arg_msg}
@doc """
Fixed Asset Turnover Ratio
"""
def fixed_asset_turnover_ratio(
%Decimal{} = net_sales,
%Decimal{} = fixed_assets,
%Decimal{} = accumulated_depreciation
) do
depreciated_assets = D.sub(fixed_assets, accumulated_depreciation)
cond do
D.eq?(depreciated_assets, 0) -> {:error, "fixed_assets - accumulated_depreciation #{@zero_error}"}
:else -> {:ok, D.div(net_sales, depreciated_assets)}
end
end
def fixed_asset_turnover_ratio(_, _, _),
do: {:error, @arg_msg}
@doc """
Fixed Charge Coverage Ratio
"""
def fixed_charge_coverage_ratio(
%Decimal{} = ebit,
%Decimal{} = fixed_charges_before_taxes,
%Decimal{} = interest
) do
charges = (fixed_charges_before_taxes + interest)
cond do
D.eq?(charges, 0) -> {:error, "fixed_charges_before_taxes + interest #{@zero_error}"}
:else -> {:ok, D.div(D.add(ebit, fixed_charges_before_taxes), charges)}
end
end
def fixed_charge_coverage_ratio(_, _, _),
do: {:error, @arg_msg}
@doc """
Free Cash Flow Calculation
"""
def fcf(%Decimal{} = operating_cash_flow, %Decimal{} = capital_expenditures),
do: {:ok, D.sub(operating_cash_flow, capital_expenditures)}
def fcf(_, _),
do: {:error, @arg_msg}
@doc """
Goodwill to Assets Calculation
"""
def goodwill_to_assets(goodwill, assets)
def goodwill_to_assets(_, %Decimal{coef: 0} = _),
do: {:error, "assets #{@zero_error}"}
def goodwill_to_assets(%Decimal{} = goodwill, %Decimal{} = assets),
do: {:ok, D.div(goodwill, assets)}
def goodwill_to_assets(_, _),
do: {:error, @arg_msg}
@doc """
Gross Margin Ratio Calculation
"""
def gross_margin_ratio(gross_margin, net_sales)
def gross_margin_ratio(_, %Decimal{coef: 0} = _),
do: {:error, "net_sales #{@zero_error}"}
def gross_margin_ratio(%Decimal{} = gross_margin, %Decimal{} = net_sales),
do: {:ok, D.div(gross_margin, net_sales)}
def gross_margin_ratio(_, _),
do: {:error, @arg_msg}
@doc """
Gross Profit Calculation
"""
def gross_profit(%Decimal{} = total_sales, %Decimal{} = cogs),
do: {:ok, D.sub(total_sales, cogs)}
def gross_profit(_, _),
do: {:error, @arg_msg}
@doc """
Interest Coverage Ratio Calculation
"""
def interest_coverage_ratio(ebit, interest_expense)
def interest_coverage_ratio(_, %Decimal{coef: 0} = _),
do: {:error, "interest_expense #{@zero_error}"}
def interest_coverage_ratio(%Decimal{} = ebit, %Decimal{} = interest_expense),
do: {:ok, D.div(ebit, interest_expense)}
def interest_coverage_ratio(_, _),
do: {:error, @arg_msg}
@doc """
Inventory Turnover Ratio
"""
def inventory_turnover_ratio(cogs, average_inventory)
def inventory_turnover_ratio(_, %Decimal{coef: 0} = _),
do: {:error, "average_inventory #{@zero_error}"}
def inventory_turnover_ratio(%Decimal{} = cogs, %Decimal{} = average_inventory), do:
{:ok, D.div(cogs, average_inventory)}
def inventory_turnover_ratio(_, _),
do: {:error, @arg_msg}
@doc """
Loan to Value Ratio Calculation
"""
def ltv(mortgage_amount, appraised_value_of_property)
def ltv(_, %Decimal{coef: 0} = _),
do: {:error, "appraised_value_of_property #{@zero_error}"}
def ltv(%Decimal{} = mortgage_amount, %Decimal{} = appraised_value_of_property),
do: {:ok, D.div(mortgage_amount, appraised_value_of_property)}
def ltv(_, _),
do: {:error, @arg_msg}
@doc """
Long Term Debt to Total Asset Ratio Calculation
"""
def long_term_debt_to_total_asset_ratio(long_term_debt, total_assets)
def long_term_debt_to_total_asset_ratio(_, %Decimal{coef: 0} = _),
do: {:error, "total_assets #{@zero_error}"}
def long_term_debt_to_total_asset_ratio(%Decimal{} = long_term_debt, %Decimal{} = total_assets),
do: {:ok, D.div(long_term_debt, total_assets)}
def long_term_debt_to_total_asset_ratio(_, _),
do: {:error, @arg_msg}
@doc """
Margin of Safety Calculation
"""
def margin_of_safety(%Decimal{} = actual_sales, %Decimal{} = break_even_point),
do: {:ok, D.sub(actual_sales, break_even_point)}
def margin_of_safety(_, _),
do: {:error, @arg_msg}
@doc """
Margin of Safety Ratio Calculation
"""
def margin_of_safety_ratio(actual_sales, break_even_point)
def margin_of_safety_ratio(%Decimal{coef: 0} = _, _),
do: {:error, "actual_sales #{@zero_error}"}
def margin_of_safety_ratio(%Decimal{} = actual_sales, %Decimal{} = break_even_point),
do: {:ok, D.div(D.sub(actual_sales, break_even_point), actual_sales)}
def margin_of_safety_ratio(_, _),
do: {:error, @arg_msg}
@doc """
Margin of Revenue Calculation
"""
def margin_of_revenue(change_in_total_revenues, change_in_quantity_sold)
def margin_of_revenue(_, %Decimal{coef: 0} = _),
do: {:error, "change_in_quantity_sold #{@zero_error}"}
def margin_of_revenue(%Decimal{} = change_in_total_revenues, %Decimal{} = change_in_quantity_sold),
do: {:ok, D.div(change_in_total_revenues, change_in_quantity_sold)}
def margin_of_revenue(_, _),
do: {:error, @arg_msg}
end
|
lib/financials.ex
| 0.79909
| 0.735357
|
financials.ex
|
starcoder
|
defmodule PhoenixInlineSvg.Helpers do
@moduledoc """
This module adds view helpers for rendering SVG files into safe HTML.
To add the helpers, add the following to the quoted `view` in your `my_app_web.ex` file.
def view do
quote do
use PhoenixInlineSvg.Helpers, otp_app: :my_app
end
end
This will generate functions for each of your images, effectively caching them at compile time.
You can call these functions like so
# Get an image with the default collection
svg_image("image_name")
# Get an image and insert HTML attributes to svg tag
svg_image("image_name", class: "elixir-is-awesome", id: "inline-svg")
# Get an image from a different collection
svg_image("image_name", "collection_name")
# Get an image from a different collection and insert HTML attributes to the svg tag
svg_image("image_name", "collection_name", class: "elixir-is-awesome", id: "inline-svg")
## Old Way
As an alternative this module can be imported in the quoted `view` def of the `my_app_web.ex` which will always pull the SVG files from the disk (unless you are using a caching module).
def view do
quote do
import PhoenixInlineSvg.Helpers
end
end
*Note:* If you are setting a custom directory for the SVG files and are using Exrm or Distillery, you will need to ensure that the directory you set is in the outputted `lib` directory of your application.
## Configuration
By default SVG files are loaded from: priv/static/svg/
The directory where SVG files are loaded from can be configured by setting the configuration variable:
config :phoenix_inline_svg, dir: "some/other/dir"
Where `some/other/dir` is a directory located in the Phoenix application directory.
"""
@doc """
The using macro precompiles the SVG images into functions.
Using this macro requires passing your otp_app name as an argument.
## Examples
# Default collection
svg_image("image_name")
svg_image("image_name", attrs)
# Named collection
svg_image("image_name", "collection_name")
svg_image("image_name", "collection_name", attrs)
"""
defmacro __using__([otp_app: app_name] = _opts) do
svgs_path = Application.app_dir(app_name,
PhoenixInlineSvg.Utils.config_or_default(:dir, "priv/static/svg/"))
svgs_path
|> find_collection_sets
|> Enum.uniq
|> Enum.map(&create_cached_svg_image(&1))
end
defmacro __using__(_) do
raise "You must specifiy an OTP app!"
end
@doc """
Returns a safe HTML string with the contents of the SVG file using the `default_collection` configuration.
## Examples
<%= svg_image(@conn, "home") %>
<%= svg_image(YourAppWeb.Endpoint, "home") %>
Will result in the output:
```html
<svg>...</svg>
```
The main function is `svg_image/4`.
"""
def svg_image(conn_or_endpoint, name) do
svg_image(conn_or_endpoint, name, PhoenixInlineSvg.Utils.config_or_default(:default_collection, "generic"))
end
@doc """
Returns a safe HTML string with the contents of the SVG file after inserting the given HTML attributes.
## Examples
<%= svg_image(@conn, "home", class: "logo", id: "bounce-animation") %>
<%= svg_image(YourAppWeb.Endpoint, "home", class: "logo", id: "bounce-animation") %>
Will result in the output:
```html
<svg class="logo" id="bounce-animation">...</svg>
```
The main function is `svg_image/4`.
"""
def svg_image(conn_or_endpoint, name, attrs) when is_list(attrs) do
svg_image(conn_or_endpoint, name, PhoenixInlineSvg.Utils.config_or_default(:default_collection, "generic"), attrs)
end
@doc """
Returns a safe HTML string with the contents of the SVG file for the given collection after inserting the given HTML attributes.
## Examples
<%= svg_image(@conn, "user", "fontawesome") %>
<%= svg_image(YourAppWeb.Endpoint, "user", "fontawesome") %>
Will result in the output:
```html
<svg>...</svg>
```
Find SVG file inside of "icons" folder and add class "fa fa-share" and id "bounce-animation"
<%= svg_image(@conn, "user", "icons", class: "fa fa-share", id: "bounce-animation") %>
<%= svg_image(YourAppWeb.Endpoint, "user", "icons", class: "fa fa-share", id: "bounce-animation") %>
Will result in the output:
```html
<svg class="fa fa-share" id="bounce-animation">...</svg>
```
"""
def svg_image(conn_or_endpoint, name, collection, attrs \\ []) do
"#{collection}/#{name}.svg"
|> read_svg_file(conn_or_endpoint)
|> PhoenixInlineSvg.Utils.insert_attrs(attrs)
|> PhoenixInlineSvg.Utils.safety_string
end
defp read_svg_from_path(path) do
case File.read(path) do
{:ok, result} ->
String.trim(result)
{:error, _} ->
PhoenixInlineSvg.Utils.config_or_default(:not_found,
"<svg viewbox='0 0 60 60'>" <>
"<text x='0' y='40' font-size='30' font-weight='bold'" <>
"font-family='monospace'>Err</text></svg>")
end
end
defp read_svg_file(icon_path, %Plug.Conn{} = conn) do
[
Application.app_dir(Phoenix.Controller.endpoint_module(conn).config(:otp_app)),
PhoenixInlineSvg.Utils.config_or_default(:dir, "priv/static/svg/"),
icon_path
]
|> Path.join
|> read_svg_from_path
end
defp read_svg_file(icon_path, endpoint) do
[
Application.app_dir(endpoint.config(:otp_app)),
PhoenixInlineSvg.Utils.config_or_default(:dir, "priv/static/svg/"),
icon_path
]
|> Path.join
|> read_svg_from_path
end
defp find_collection_sets(svgs_path) do
case File.ls(svgs_path) do
{:ok, listed_files} ->
listed_files
|> Stream.filter(fn(e) -> File.dir?(Path.join(svgs_path, e)) end)
|> Enum.flat_map(&map_collection(&1, svgs_path))
_ ->
[]
end
end
defp map_collection(collection, svgs_path) do
collection_path =
Path.join(svgs_path, collection)
collection_path
|> File.ls!
|> Stream.map(&Path.join(collection_path, &1))
|> Stream.flat_map(&to_file_path/1)
|> Enum.map(&{collection, &1})
end
defp to_file_path(path)do
if File.dir?(path) do
path
|> File.ls!
|> Stream.map(&Path.join(path, &1))
|> Enum.flat_map(&to_file_path/1)
else
[path]
end
end
defp create_cached_svg_image({collection, name}) do
try do
filename =
hd Regex.run(~r|.*/#{collection}/(.*)\.svg$|, name, capture: :all_but_first)
svg = read_svg_from_path(name)
generic_funcs = quote do
def svg_image(unquote(filename)) do
svg_image(unquote(filename), unquote(collection), [])
end
def svg_image(unquote(filename), opts) when is_list(opts) do
svg_image(unquote(filename), unquote(collection), opts)
end
end
explicit_funcs = quote do
def svg_image(unquote(filename), unquote(collection)) do
svg_image(unquote(filename), unquote(collection), [])
end
def svg_image(unquote(filename), unquote(collection), opts) do
unquote(svg)
|> PhoenixInlineSvg.Utils.insert_attrs(opts)
|> PhoenixInlineSvg.Utils.safety_string
end
end
[PhoenixInlineSvg.Utils.insert_generic_funcs(generic_funcs, collection), explicit_funcs]
rescue
ArgumentError -> nil
end
end
end
|
lib/phoenix_inline_svg/helpers.ex
| 0.870101
| 0.791439
|
helpers.ex
|
starcoder
|
defmodule AWS.SageMakerFeatureStoreRuntime do
@moduledoc """
Contains all data plane API operations and data types for the Amazon SageMaker
Feature Store.
Use this API to put, delete, and retrieve (get) features from a feature store.
Use the following operations to configure your `OnlineStore` and `OfflineStore`
features, and to create and manage feature groups:
*
[CreateFeatureGroup](https://docs.aws.amazon.com/sagemaker/latest/APIReference/API_CreateFeatureGroup.html) *
[DeleteFeatureGroup](https://docs.aws.amazon.com/sagemaker/latest/APIReference/API_DeleteFeatureGroup.html)
*
[DescribeFeatureGroup](https://docs.aws.amazon.com/sagemaker/latest/APIReference/API_DescribeFeatureGroup.html) *
[ListFeatureGroups](https://docs.aws.amazon.com/sagemaker/latest/APIReference/API_ListFeatureGroups.html)
"""
alias AWS.Client
alias AWS.Request
def metadata do
%AWS.ServiceMetadata{
abbreviation: nil,
api_version: "2020-07-01",
content_type: "application/x-amz-json-1.1",
credential_scope: nil,
endpoint_prefix: "featurestore-runtime.sagemaker",
global?: false,
protocol: "rest-json",
service_id: "SageMaker FeatureStore Runtime",
signature_version: "v4",
signing_name: "sagemaker",
target_prefix: nil
}
end
@doc """
Deletes a `Record` from a `FeatureGroup`.
A new record will show up in the `OfflineStore` when the `DeleteRecord` API is
called. This record will have a value of `True` in the `is_deleted` column.
"""
def delete_record(%Client{} = client, feature_group_name, input, options \\ []) do
url_path = "/FeatureGroup/#{URI.encode(feature_group_name)}"
headers = []
{query_params, input} =
[
{"EventTime", "EventTime"},
{"RecordIdentifierValueAsString", "RecordIdentifierValueAsString"}
]
|> Request.build_params(input)
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
nil
)
end
@doc """
Use for `OnlineStore` serving from a `FeatureStore`.
Only the latest records stored in the `OnlineStore` can be retrieved. If no
Record with `RecordIdentifierValue` is found, then an empty result is returned.
"""
def get_record(
%Client{} = client,
feature_group_name,
feature_names \\ nil,
record_identifier_value_as_string,
options \\ []
) do
url_path = "/FeatureGroup/#{URI.encode(feature_group_name)}"
headers = []
query_params = []
query_params =
if !is_nil(record_identifier_value_as_string) do
[{"RecordIdentifierValueAsString", record_identifier_value_as_string} | query_params]
else
query_params
end
query_params =
if !is_nil(feature_names) do
[{"FeatureName", feature_names} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Used for data ingestion into the `FeatureStore`.
The `PutRecord` API writes to both the `OnlineStore` and `OfflineStore`. If the
record is the latest record for the `recordIdentifier`, the record is written to
both the `OnlineStore` and `OfflineStore`. If the record is a historic record,
it is written only to the `OfflineStore`.
"""
def put_record(%Client{} = client, feature_group_name, input, options \\ []) do
url_path = "/FeatureGroup/#{URI.encode(feature_group_name)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
nil
)
end
end
|
lib/aws/generated/sage_maker_feature_store_runtime.ex
| 0.808597
| 0.461077
|
sage_maker_feature_store_runtime.ex
|
starcoder
|
defmodule Tint.Lab do
@moduledoc """
A color in the CIELAB colorspace.
"""
@moduledoc since: "1.0.0"
import Tint.Utils.Cast
alias Tint.Distance
defstruct [:lightness, :a, :b]
@type t :: %__MODULE__{
lightness: float,
a: float,
b: float
}
@doc """
Builds a new Lab color using the lightness, a and b color channels.
"""
@spec new(number | String.t(), number | String.t(), number | String.t()) :: t
def new(lightness, a, b) do
%__MODULE__{
lightness: cast_value!(lightness, :float),
a: cast_value!(a, :float),
b: cast_value!(b, :float)
}
end
@doc """
Converts a tuple containing lightness, a and b into `Tint.Lab` struct.
"""
@spec from_tuple(
{number | String.t(), number | String.t(), number | String.t()}
) :: t
def from_tuple({lightness, a, b}) do
new(lightness, a, b)
end
@doc """
Converts a Lab color into a tuple containing the lightness, a and b channels.
"""
@spec to_tuple(t) :: {float, float, float}
def to_tuple(%__MODULE__{} = color) do
{color.lightness, color.a, color.b}
end
@doc """
Calculates the distance of two colors using the CIEDE2000 algorithm. See
`Tint.Distance.CIEDE2000` for more details.
"""
@spec ciede2000_distance(Tint.color(), Tint.color(), Keyword.t()) :: float
def ciede2000_distance(color, other_color, opts \\ []) do
Distance.distance(color, other_color, {Distance.CIEDE2000, opts})
end
@doc """
Gets the nearest color from the given palette using the CIEDE2000 color
distance algorithm.
## Options
* `:weights` - A tuple defining the weights for the LCh color channels.
Defaults to `{1, 1, 1}`.
"""
@spec nearest_color(
Tint.color(),
[Tint.color()],
Distance.distance_algorithm()
) :: nil | Tint.color()
def nearest_color(
color,
palette,
distance_algorithm \\ Distance.CIEDE2000
) do
Distance.nearest_color(color, palette, distance_algorithm)
end
@doc """
Gets the n nearest colors from the given palette using the CIEDE2000 color
distance algorithm.
"""
@spec nearest_colors(
Tint.color(),
[Tint.color()],
non_neg_integer,
Distance.distance_algorithm()
) :: [Tint.color()]
def nearest_colors(
color,
palette,
n,
distance_algorithm \\ Distance.CIEDE2000
) do
Distance.nearest_colors(color, palette, n, distance_algorithm)
end
defimpl Inspect do
import Inspect.Algebra
import Tint.Utils.Formatter
def inspect(color, _opts) do
concat([
"#Tint.Lab<",
format_value(color.lightness),
",",
format_value(color.a),
",",
format_value(color.b),
">"
])
end
end
end
|
lib/tint/lab.ex
| 0.928749
| 0.766643
|
lab.ex
|
starcoder
|
defmodule HyperMap do
@moduledoc """
HyperMap provides a multiple-key Map.
A use case could be to store a phonebook from people and then
looking it up by either their first name or their last name without
having to do a full lookup.
"""
defstruct [
values: nil,
num_keys: nil,
opts: nil
]
@doc """
`new` creates a new HyperMap. The `keys` argument is a list defining
the "name" of each key. It can be any term.
For now no `opts` are allowed.
Example: `hmap = HyperMap.new([:first_name, :last_name])`
"""
@spec new(keys :: [term()], opts :: Keyword.t) :: map
def new(keys, opts \\ []) do
if length(keys) < 2, do:
raise "inserting a value with less than 2 keys. You should use a Map"
%__MODULE__{opts: opts,
values: empty_values(keys),
num_keys: length(keys)}
end
@doc """
`put` adds a `value` for the given `keys` to the HyperMap.
Example `HyperMap.put(hmap, "555-4246031", [{:first_name, "Joe"}, {:last_name, "Doe"}])`
If the keys are atoms they can be given as a keyword
`HyperMap.put(hmap, "555-4246031", first_name: "Joe", last_name: "Doe")`
"""
@spec put(hypermap :: map, value :: term(), keys :: [tuple()]) :: map
def put(%__MODULE__{} = hmap, value, keys) when is_list(keys) do
if hmap.num_keys != length(keys), do:
raise "inserting a value with a different number of keys " <>
"than #{hmap.num_keys}"
add_item = &(&1 ++ [{keys, value}])
update_in(hmap.values,
fn vals ->
Enum.reduce(keys, vals,
fn ({key, key_val}, map) ->
unless Map.has_key?(map, key),
do: raise "unknown key #{inspect key}"
update_in(map, [key],
&(Map.update(&1, key_val, [{keys, value}], add_item)))
end)
end)
end
@doc """
`from_list` initializes a HyperMap from a list a `values` for the given
`keys`.
Example:
`hypermap = HyperMap.from_list([:first_name, :last_name],
[{"John", "Doe", "555-4246031"},
{"Mary", "Doe", "555-4246049"}])`
"""
@spec from_list(keys :: [term()], values :: [tuple()]) :: map
def from_list(keys, values) when is_list(keys) and is_list(values) do
Enum.reduce(values, __MODULE__.new(keys),
fn (value, hmap) when is_tuple(value) ->
unless tuple_size(value) - 1 == length(keys), do:
raise "incompatible tuple size with keys"
put(hmap,
elem(value, tuple_size(value) - 1),
List.zip([keys, value]))
end)
end
@doc """
`fetch` looks-up a value in the HyperMap using the key valued as second argument.
To look up a value by lastname we could do: `fetch(hypermap, last_name: "Doe")`
To look up vy first name: `fetch(hypermap, first_name: "John")`
"""
@spec fetch(hypermap :: map, key_value :: [tuple]) :: map
def fetch(%__MODULE__{} = hmap, [{key, key_val}]) do
unless Map.has_key?(hmap.values, key),
do: raise "unknown key #{inspect key}"
hmap
|> Map.get(:values)
|> Map.get(key)
|> Map.get(key_val)
end
defp empty_values(keys) do
Enum.reduce(keys, %{}, &Map.put(&2, &1, %{}))
end
end
|
lib/hypermap.ex
| 0.891973
| 0.700588
|
hypermap.ex
|
starcoder
|
defmodule Predicator.Evaluator do
@moduledoc "Evaluator Module"
alias Predicator.{
InstructionNotCompleteError,
Machine
}
@typedoc "Error types returned from Predicator.Evaluator"
@type error_t ::
{:error,
InstructionError.t()
| ValueError.t()
| InstructionNotCompleteError.t()}
def execute(%Machine{} = machine) do
case Machine.step(machine) do
%Machine{} = machine ->
cond do
Machine.complete?(machine) and is_boolean(Machine.peek(machine)) ->
Machine.peek(machine)
Machine.complete?(machine) ->
InstructionNotCompleteError.inst_not_complete_error(machine)
true ->
execute(machine)
end
{:error, _reason} = err ->
err
end
end
@doc ~S"""
Execute will evaluate a predicator instruction set.
If your context struct is using string_keyed map then you will need to pass in the
`[map_type: :string]` options to the execute function to evaluate.
### Examples:
iex> Predicator.Evaluator.execute([["lit", true]])
true
iex> Predicator.Evaluator.execute([["lit", 2], ["lit", 3], ["compare", "LT"]])
true
iex> Predicator.Evaluator.execute([["load", "age"], ["lit", 18], ["compare", "GT"]], %{age: 19})
true
iex> Predicator.Evaluator.execute([["load", "name"], ["lit", "jrichocean"], ["compare", "EQ"]], %{age: 19})
{:error, %Predicator.ValueError{error: "Non valid load value to evaluate", instruction_pointer: 0, instructions: [["load", "name"], ["lit", "jrichocean"], ["compare", "EQ"]], stack: [], opts: [map_type: :string, nil_values: ["", nil]]}}
iex> Predicator.Evaluator.execute([["load", "age"], ["lit", 18], ["compare", "GT"]], %{"age" => 19}, [map_type: :string])
true
"""
@spec execute(list(), struct() | map()) :: boolean() | error_t
def execute(inst, context \\ %{}, opts \\ [map_type: :string, nil_values: ["", nil]])
when is_list(inst) do
inst
|> to_machine(context, opts)
|> execute
end
def to_machine(instructions, context, opts) do
Machine.new(instructions, context, opts)
end
end
|
lib/predicator/evaluator.ex
| 0.651909
| 0.409959
|
evaluator.ex
|
starcoder
|
defmodule CFSync.Entry.Extractors do
@moduledoc """
Utility functions to extract data from contenful JSON.
"""
require Logger
alias CFSync.Link
alias CFSync.RichText
@typedoc """
Entry's payload as provided to `c:CFSync.Entry.Fields.new/1`
"""
@opaque data() :: {map, binary}
@doc """
Returns value of `field_name` as a `binary`.
- `data` is the entry's payload as provided to `c:CFSync.Entry.Fields.new/1`
- `field_name` is the field's id in Contentful (ie. What is configured in Contentful app)
Returns `default` on failure (field empty, not a string...)
"""
@spec extract_binary(data(), String.t(), nil | String.t()) :: nil | String.t()
def extract_binary({data, locale} = _data, field_name, default \\ nil) do
case extract(data, field_name, locale) do
v when is_binary(v) -> v
_ -> default
end
end
@doc """
Returns value of `field_name` as a `boolean`.
- `data` is the entry's payload as provided to `c:CFSync.Entry.Fields.new/1`
- `field_name` is the field's id in Contentful (ie. What is configured in Contentful app)
Returns `default` on failure (field empty, not a boolean...)
"""
@spec extract_boolean(data(), String.t(), nil | boolean) :: nil | boolean
def extract_boolean({data, locale} = _data, field_name, default \\ nil) do
case extract(data, field_name, locale) do
v when is_boolean(v) -> v
_ -> default
end
end
@doc """
Returns value of `field_name` as a `number`.
- `data` is the entry's payload as provided to `c:CFSync.Entry.Fields.new/1`
- `field_name` is the field's id in Contentful (ie. What is configured in Contentful app)
Be careful with the result as it can be either an integer or float, depending
of it's value. A contentful decimal value of 1.0 will be stored as 1 in the JSON
and read as an integer by JASON.
Returns `default` on failure (field empty, not a number...)
"""
@spec extract_number(data(), String.t(), nil | number) :: nil | number
def extract_number({data, locale} = _data, field_name, default \\ nil) do
case extract(data, field_name, locale) do
v when is_number(v) -> v
_ -> default
end
end
@doc """
Returns value of `field_name` as a `Date`.
- `data` is the entry's payload as provided to `c:CFSync.Entry.Fields.new/1`
- `field_name` is the field's id in Contentful (ie. What is configured in Contentful app)
Returns `default` on failure (field empty, invalid format, invalid date...)
"""
@spec extract_date(data(), String.t(), nil | Date.t()) :: nil | Date.t()
def extract_date({data, locale} = _data, field_name, default \\ nil) do
with v when is_binary(v) <- extract(data, field_name, locale),
{:ok, date} <- Date.from_iso8601(v) do
date
else
_ ->
default
end
end
@doc """
Returns value of `field_name` as a `DateTime`.
- `data` is the entry's payload as provided to `c:CFSync.Entry.Fields.new/1`
- `field_name` is the field's id in Contentful (ie. What is configured in Contentful app)
Returns `default` on failure (field empty, invalid format, invalid datetime...)
"""
@spec extract_datetime(data(), String.t(), nil | DateTime.t()) :: nil | DateTime.t()
def extract_datetime({data, locale} = _data, field_name, default \\ nil) do
with v when is_binary(v) <- extract(data, field_name, locale),
{:ok, date, _offset} <- DateTime.from_iso8601(v) do
date
else
_ ->
default
end
end
@doc """
Returns value of `field_name` as a `map`.
- `data` is the entry's payload as provided to `c:CFSync.Entry.Fields.new/1`
- `field_name` is the field's id in Contentful (ie. What is configured in Contentful app)
Returns `default` on failure (field empty, not a map...)
"""
@spec extract_map(data(), String.t(), nil | map) :: nil | map
def extract_map({data, locale} = _data, field_name, default \\ nil) do
case extract(data, field_name, locale) do
v when is_map(v) -> v
_ -> default
end
end
@doc """
Returns value of `field_name` as a `list`.
- `data` is the entry's payload as provided to `c:CFSync.Entry.Fields.new/1`
- `field_name` is the field's id in Contentful (ie. What is configured in Contentful app)
Returns `default` on failure (field empty, not a list...)
"""
@spec extract_list(data(), String.t(), nil | list) :: nil | list
def extract_list({data, locale} = _data, field_name, default \\ nil) do
case extract(data, field_name, locale) do
v when is_list(v) -> v
_ -> default
end
end
@doc """
Returns value of `field_name` as a `CFSync.Link`.
- `data` is the entry's payload as provided to `c:CFSync.Entry.Fields.new/1`
- `field_name` is the field's id in Contentful (ie. What is configured in Contentful app)
Returns `default` on failure (field empty, not a link...)
"""
@spec extract_link(data(), String.t(), nil | Link.t()) :: nil | Link.t()
def extract_link({data, locale} = _data, field_name, default \\ nil) do
with link_data when is_map(link_data) <- extract(data, field_name, locale),
%Link{} = link <- try_link(link_data) do
link
else
_ -> default
end
end
@doc """
Returns value of `field_name` as a list of `CFSync.Link`.
- `data` is the entry's payload as provided to `c:CFSync.Entry.Fields.new/1`
- `field_name` is the field's id in Contentful (ie. What is configured in Contentful app)
Returns `default` on failure (field empty, not a list...)
"""
@spec extract_links(data(), String.t(), nil | list(Link.t())) :: nil | list(Link.t())
def extract_links({data, locale} = _data, field_name, default \\ nil) do
case extract(data, field_name, locale) do
links when is_list(links) ->
links
|> Enum.map(&try_link/1)
|> Enum.reject(&is_nil/1)
_ ->
default
end
end
@doc """
Returns value of `field_name` as `CFSync.RichText` tree.
- `data` is the entry's payload as provided to `c:CFSync.Entry.Fields.new/1`
- `field_name` is the field's id in Contentful (ie. What is configured in Contentful app)
Returns `default` on failure (field empty, not a richtext...)
"""
@spec extract_rich_text(data(), String.t(), nil | RichText.t()) :: nil | RichText.t()
def extract_rich_text({data, locale} = _data, field_name, default \\ nil) do
case extract(data, field_name, locale) do
rt when is_map(rt) -> RichText.new(rt)
_ -> default
end
end
@doc """
Returns value of `field_name` as an `atom`.
- `data` is the entry's payload as provided to `c:CFSync.Entry.Fields.new/1`
- `field_name` is the field's id in Contentful (ie. What is configured in Contentful app)
- `mapping` is a map of `"value" => :atom` used to find which atom correspond to the field's value
Returns `default` on failure (field empty, no mapping...)
"""
@spec extract_atom(data(), String.t(), %{any() => atom()}, atom) :: nil | atom
def extract_atom({data, locale} = _data, field_name, mapping, default \\ nil) do
v = extract(data, field_name, locale)
case mapping[v] do
nil -> default
value -> value
end
end
defp extract(data, field, locale) do
data[field][locale]
end
defp try_link(link_data) do
Link.new(link_data)
rescue
_ ->
Logger.error("Bad link data:\n#{inspect(link_data)}")
nil
end
end
|
lib/cf_sync/entry/extractors.ex
| 0.910022
| 0.574066
|
extractors.ex
|
starcoder
|
defmodule VML do
@moduledoc """
Parse VML text strings
"""
require Logger
@doc """
Parse, raise on errors
"""
def parse!(string) do
case parse(string) do
{:ok, ast} ->
ast
{:error, _type, error} ->
raise error
end
end
@doc """
Parse a string into an AST for processing
"""
def parse(list) when is_list(list), do: {:ok, list}
def parse(string) do
case :vml_lexer.string(String.to_charlist(string)) do
{:ok, tokens, _} ->
parse_tokens(tokens)
{:error, {_, _, reason}} ->
Logger.warn("Encountered a lexing error for #{inspect(string)} - #{inspect(reason)}")
{:error, :lexer, reason}
end
end
def escape(string) do
string
|> String.replace("[", "\\[")
|> String.replace("]", "\\]")
|> String.replace("{", "\\{")
|> String.replace("}", "\\}")
end
@doc """
Convert a processed (no variables) AST back to a string
"""
def collapse(string) when is_binary(string), do: string
def collapse(integer) when is_integer(integer), do: to_string(integer)
def collapse(float) when is_float(float), do: to_string(float)
def collapse(atom) when is_atom(atom), do: to_string(atom)
def collapse({:variable, variable}) do
"[#{variable}]"
end
def collapse({:tag, attributes, nodes}) do
name = Keyword.get(attributes, :name)
case Keyword.get(attributes, :attributes) do
nil ->
"{#{name}}#{collapse(nodes)}{/#{name}}"
[] ->
"{#{name}}#{collapse(nodes)}{/#{name}}"
attributes ->
"{#{name} #{collapse_attributes(attributes)}}#{collapse(nodes)}{/#{name}}"
end
end
def collapse({:string, string}), do: string
def collapse(list) when is_list(list) do
list
|> Enum.map(&collapse/1)
|> Enum.join()
end
defp collapse_attributes(attributes) do
attributes
|> Enum.map(fn {key, value} ->
value = Enum.map(value, &collapse/1)
"#{key}='#{value}'"
end)
|> Enum.join(" ")
end
@doc false
def parse_tokens(tokens) do
case :vml_parser.parse(tokens) do
{:ok, ast} ->
{:ok, pre_process(ast)}
{:error, {_, _, reason}} ->
Logger.warn("Encountered a parsing error for #{inspect(tokens)} - #{inspect(reason)}")
{:error, :parser, reason}
end
end
@doc """
Preprocess the AST
- Turn charlists into elixir strings
- Collapse blocks of string nodes
"""
def pre_process(ast) do
ast
|> Enum.map(&process_node/1)
|> collapse_strings()
end
@doc """
Process a single node
Handles strings, variables, resources, and tags. Everything else
passes through without change.
"""
def process_node({:string, string}) do
{:string, to_string(string)}
end
def process_node({:variable, string}) do
{:variable, to_string(string)}
end
def process_node({:variable, space, string}) do
{:variable, to_string(space), to_string(string)}
end
def process_node({:resource, resource, id}) do
{:resource, to_string(resource), to_string(id)}
end
def process_node({:tag, attributes, nodes}) do
attributes =
Enum.map(attributes, fn {key, value} ->
{key, process_attribute(key, value)}
end)
{:tag, attributes, pre_process(nodes)}
end
def process_node(node), do: node
defp process_attribute(:name, value) do
value
|> Enum.map(fn {:string, value} ->
to_string(value)
end)
|> Enum.join()
end
defp process_attribute(:attributes, attributes) do
Enum.map(attributes, fn attribute ->
process_attribute(:attribute, attribute)
end)
end
defp process_attribute(:attribute, {name, values}) do
values = Enum.map(values, &process_node/1)
{to_string(name), collapse_strings(values)}
end
@doc """
Collapse string nodes next to each other into a single node
Recurses through the list adding the newly collapsed node into the processing stream.
iex> VML.collapse_strings([string: "hello", string: " ", string: "world"])
[string: "hello world"]
iex> VML.collapse_strings([variable: "name", string: ",", string: " ", string: "hello", string: " ", string: "world"])
[variable: "name", string: ", hello world"]
"""
def collapse_strings([]), do: []
def collapse_strings([{:string, string1}, {:string, string2} | nodes]) do
collapse_strings([{:string, to_string(string1) <> to_string(string2)} | nodes])
end
def collapse_strings([node | nodes]) do
[node | collapse_strings(nodes)]
end
end
|
lib/vml.ex
| 0.749271
| 0.514888
|
vml.ex
|
starcoder
|
defmodule Rtmp.Handshake.OldHandshakeFormat do
@moduledoc """
Functions to parse and validate RTMP handshakes as specified in the
official RTMP specification.
This handshake format does *NOT* work for h.264 video.
"""
@behaviour Rtmp.Handshake
require Logger
@type state :: %__MODULE__.State{}
defmodule State do
@moduledoc false
defstruct random_data: <<>>,
current_stage: :p0,
unparsed_binary: <<>>,
bytes_to_send: <<>>,
received_start_time: 0
end
@spec new() :: state
@doc "Creates a new old handshake format instance"
def new() do
%State{}
end
@spec is_valid_format(binary) :: :unknown | :yes | :no
@doc "Validates if the passed in binary can be parsed using the old style handshake."
def is_valid_format(binary) do
case byte_size(binary) >= 16 do
false ->
:unknown
true ->
case binary do
<<fd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b*8, _::4*8, 0::4*8, _::binary>> -> :yes
_ -> :no
end
end
end
@spec process_bytes(state, binary) :: {state, Rtmp.Handshake.process_result()}
@doc "Attempts to proceed with the handshake process with the passed in bytes"
def process_bytes(state = %State{}, binary) do
state = %{state | unparsed_binary: state.unparsed_binary <> binary}
do_process_bytes(state)
end
@spec create_p0_and_p1_to_send(state) :: {state, binary}
@doc "Returns packets 0 and 1 to send to the peer"
def create_p0_and_p1_to_send(state = %State{}) do
state = %{state | random_data: :crypto.strong_rand_bytes(1528)}
p0 = <<3::8>>
# local start time is alawys zero
p1 = <<fc00:db20:35b:7399::5*8, fc00:db20:35b:7399::5*8>> <> state.random_data
{state, p0 <> p1}
end
defp do_process_bytes(state = %State{current_stage: :p0}) do
if byte_size(state.unparsed_binary) < 1 do
send_incomplete_response(state)
else
case state.unparsed_binary do
<<3::8, rest::binary>> ->
state = %{state | unparsed_binary: rest, current_stage: :p1}
do_process_bytes(state)
_ ->
{state, :failure}
end
end
end
defp do_process_bytes(state = %State{current_stage: :p1}) do
if byte_size(state.unparsed_binary) < 1536 do
send_incomplete_response(state)
else
case state.unparsed_binary do
<<time::4*8, 0::4*8, random::binary-size(1528), rest::binary>> ->
state = %{
state
| # packet 2
bytes_to_send: state.bytes_to_send <> <<time::4*8, 0::4*8>> <> random,
unparsed_binary: rest,
received_start_time: time,
current_stage: :p2
}
do_process_bytes(state)
_ ->
{state, :failure}
end
end
end
defp do_process_bytes(state = %State{current_stage: :p2}) do
if byte_size(state.unparsed_binary) < 1536 do
send_incomplete_response(state)
else
expected_random = state.random_data
random_size = byte_size(expected_random)
case state.unparsed_binary do
<<0::4*8, _::4*8, ^expected_random::size(random_size)-binary, rest::binary>> ->
bytes_to_send = state.bytes_to_send
state = %{state | unparsed_binary: <<>>, current_stage: :complete, bytes_to_send: <<>>}
{state, {:success, state.received_start_time, bytes_to_send, rest}}
_ ->
{state, :failure}
end
end
end
defp send_incomplete_response(state) do
bytes_to_send = state.bytes_to_send
state = %{state | bytes_to_send: <<>>}
{state, {:incomplete, bytes_to_send}}
end
end
|
apps/rtmp/lib/rtmp/handshake/old_handshake_format.ex
| 0.666388
| 0.456591
|
old_handshake_format.ex
|
starcoder
|
defmodule SieveOfEratosthenes do
@moduledoc """
Documentation for `SieveOfEratosthenes`.
Implementation of sieve of eratosthenes algorithm to calculate all the prime numbers
until number given used as limit, using tail recursive optimization and async functions
"""
@doc """
Calculate all the primes until given `input` used as limit
"""
def calculate_primes(input) do
chunk_size = get_chunk_size(input)
chunked_list = get_chunked_list(input, chunk_size)
primes = recursive_primes(hd(chunked_list) , [])
another_primes = get_non_multiples(tl(chunked_list), primes)
primes ++ another_primes
end
@doc """
Generate a list between two and `input` number
And chunk that list by the `chunk_size`
## Examples
iex> SieveOfEratosthenes.get_chunked_list(10, 2)
[[2, 3], [4, 5], [6, 7], [8, 9], [10]]
"""
def get_chunked_list(input, chunk_size) do
2..input
|> Enum.to_list
|> Enum.chunk_every(chunk_size)
end
@doc """
Get the size of the chunk using the square root from `input`
this number are used to limit the prime calculation using the sieve of eratosthenes algorithm
## Examples
iex> SieveOfEratosthenes.get_chunk_size(1_000)
32
"""
def get_chunk_size(input) do
:math.sqrt(input)
|> Float.ceil(0)
|> trunc
end
@doc """
filter all non-multiple `numbers` of the given `primes`
## Examples
iex> SieveOfEratosthenes.get_non_multiples([2..100], [2,3,5,7,11])
[13, 17, 19, 23, 29, 31, 37, 41, 43, 47, 53, 59, 61, 67, 71, 73, 79, 83, 89, 97]
"""
def get_non_multiples(numbers, primes) do
for l <- numbers do
Task.async(fn -> remove_multiples(primes, l) end)
end
|> Task.yield_many(100_000)
|> Enum.map(fn {t, res} -> elem(res, 1) || Task.shutdown(t, :brutal_kill) end)
|> Enum.concat
end
@doc """
Calculate all primes of list given using the sieve of eratosthenes algorithm
## Examples
iex> SieveOfEratosthenes.recursive_primes([2,3,4,5,6,7,8,9,10], [])
[2, 3, 5, 7]
"""
def recursive_primes([head | tail], primes) do
recursive_primes(Enum.filter(tail, fn x -> rem(x, head) != 0 end), primes ++ [head])
end
def recursive_primes([], list_primes), do: list_primes
@doc """
remove all the multiples numbers from given number list using list of prime numbers
## Examples
iex> l = 10..100 |> Enum.to_list
iex> SieveOfEratosthenes.remove_multiples([2,3,5,7,11], l)
[13, 17, 19, 23, 29, 31, 37, 41, 43, 47, 53, 59, 61, 67, 71, 73, 79, 83, 89, 97]
"""
def remove_multiples([head | tail], number_list) do
remove_multiples(tail, Enum.filter(number_list, fn x -> rem(x, head) != 0 end))
end
def remove_multiples([], number_list), do: number_list
end
|
lib/sieve_of_eratosthenes.ex
| 0.90712
| 0.662297
|
sieve_of_eratosthenes.ex
|
starcoder
|
defmodule ApiAccounts.Changeset do
@moduledoc """
Assists in creating and updating records by applying rules to changes in data.
"""
alias ApiAccounts.Changeset
@type t :: %__MODULE__{}
defstruct source: nil,
data: %{},
changes: %{},
params: %{},
errors: %{},
action: nil,
constraints: [],
valid?: true
@doc """
Creates a base changeset to work from.
## Examples
iex> change(%User{...})
%Changeset{source: User, data: %User{...}, ...}
"""
@spec change(map) :: t
def change(%source{} = data) do
%Changeset{source: source, data: data}
end
@doc """
Applies the given params as changes for the given data based on the allowed
set of keys.
##Examples
iex> cast(%User{}, %{email: "<EMAIL>"}, ~w(email)a)
%Changeset{changes: %{email: "<EMAIL>"}, source: User, ...}
"""
@spec cast(map, %{optional(atom) => term}, [atom]) :: t
def cast(%mod{} = data, params, allowed) do
allowed = List.wrap(allowed)
types = mod.table_info().field_types
params
|> Stream.filter(&field_allowed?(&1, allowed))
|> Stream.map(&atomized_fields/1)
|> Enum.reduce(change(data), &cast_change(&2, &1, types))
end
defp field_allowed?({key, _}, allowed) when is_atom(key) do
key in allowed
end
defp field_allowed?({key, _}, allowed) when is_binary(key) do
String.to_existing_atom(key) in allowed
end
defp atomized_fields({key, value}) when is_binary(key) do
{String.to_existing_atom(key), value}
end
defp atomized_fields({key, _} = pair) when is_atom(key), do: pair
defp cast_change(changeset, {key, value}, types) do
type = Map.get(types, key)
{key, value} = do_cast_field(key, value, type)
put_in(changeset.changes[key], value)
rescue
e in ArgumentError ->
append_error({key, [e.message]}, %{changeset | valid?: false})
end
defp do_cast_field(key, nil, _), do: {key, nil}
defp do_cast_field(key, value, :boolean)
when is_binary(value) and value in ["true", "false"] do
{key, String.to_atom(value)}
end
defp do_cast_field(key, value, :boolean) when is_boolean(value) do
{key, value}
end
defp do_cast_field(key, value, :string) when is_binary(value) do
{key, value}
end
defp do_cast_field(key, value, :integer) when is_integer(value) do
{key, value}
end
defp do_cast_field(key, "", :integer) do
{key, nil}
end
defp do_cast_field(key, value, :integer) when is_binary(value) do
case Integer.parse(value) do
{integer, ""} ->
{key, integer}
_ ->
raise ArgumentError, "not an integer"
end
end
defp do_cast_field(key, %DateTime{} = value, :datetime) do
{key, value}
end
defp do_cast_field(key, %NaiveDateTime{} = value, :datetime) do
datetime = DateTime.from_naive!(value, "Etc/UTC")
{key, datetime}
end
defp do_cast_field(key, %{} = date, :datetime) do
case date do
%{"year" => _, "month" => _, "day" => _, "hour" => _, "minute" => _} ->
%{
"year" => year_string,
"month" => month_string,
"day" => day_string,
"hour" => hour_string,
"minute" => minute_string
} = date
year = String.to_integer(year_string)
month = String.to_integer(month_string)
day = String.to_integer(day_string)
hour = String.to_integer(hour_string)
minute = String.to_integer(minute_string)
{:ok, datetime} = NaiveDateTime.new(year, month, day, hour, minute, 0)
{:ok, datetime} = DateTime.from_naive(datetime, "Etc/UTC")
{key, datetime}
_ ->
raise ArgumentError, "invalid date format"
end
end
@doc """
Validates that one or more fields are present in the changeset.
## Examples
validate_required(changeset, :email)
validate_required(changeset, [:email, :role])
"""
@spec validate_required(t, [atom]) :: t
def validate_required(%Changeset{} = changeset, fields) do
fields = List.wrap(fields)
case do_validate_required(changeset.changes, fields) do
[] ->
changeset
errors ->
errors
|> Enum.reduce(changeset, &append_error/2)
|> Map.put(:valid?, false)
end
end
defp do_validate_required(changes, fields) do
Enum.flat_map(fields, fn field ->
if Map.has_key?(changes, field) and Map.get(changes, field) != "" do
[]
else
[{field, ["is required"]}]
end
end)
end
@doc """
Validates that one or more fields do not have `nil` values in the changeset.
The validation only applies to the changes. If the applied changes are
`%{email: nil}` in the changeset and the this function is called to check
that `:email` is not `nil`, the changeset will be marked as invalid and state
the field can't be `nil`.
## Examples
validate_not_nil(changeset, :email)
validate_not_nil(changeset, [:email, :role])
"""
@spec validate_not_nil(t, [atom]) :: t
def validate_not_nil(%Changeset{} = changeset, fields) do
fields = List.wrap(fields)
case do_validate_not_nil(changeset.changes, fields) do
[] ->
changeset
errors ->
errors
|> Enum.reduce(changeset, &append_error/2)
|> Map.put(:valid?, false)
end
end
defp do_validate_not_nil(changes, fields) do
Enum.flat_map(changes, fn {field, v} ->
if field in fields and v == nil do
[{field, ["cannot be nil"]}]
else
[]
end
end)
end
@doc """
Validates that confirmation field value matches the field value.
If you were to call `validate_confirmation(changeset, :password)`, then
the changeset values for `:password` and `:password_confirmation` would be
checked for equality.
## Examples
validate_confirmation(changeset, :password)
"""
@spec validate_confirmation(t, atom) :: t
def validate_confirmation(%Changeset{changes: changes} = changeset, field) do
confirmation_field = String.to_existing_atom("#{field}_confirmation")
field_value = Map.get(changes, field)
confirmation_field_value = Map.get(changes, confirmation_field)
if field_value == confirmation_field_value do
changeset
else
{confirmation_field, ["does not match #{field}"]}
|> append_error(changeset)
|> Map.put(:valid?, false)
end
end
@doc """
Adds a unique constraint on the field.
The constraint will be checked right before insertion/update. The contraint
assumes that the field is a secondary index.
## Examples
unique_constraint(changeset, :email)
"""
@spec unique_constraint(t, atom) :: t
def unique_constraint(%Changeset{} = changeset, field) do
constraints = changeset.constraints
constraint = %{
field: field,
type: :unique,
message: "has already been taken"
}
put_in(changeset.constraints, constraints ++ [constraint])
end
@doc """
Validates the field has a given format.
## Examples
validate_format(changeset, :email, ~r"@")
"""
@spec validate_format(t, atom, Regex.t()) :: t
def validate_format(%Changeset{} = changeset, field, format) do
field_value = Map.get(changeset.changes, field, "")
if field_value =~ format do
changeset
else
{field, ["has invalid format"]}
|> append_error(changeset)
|> Map.put(:valid?, false)
end
end
@doc """
Validates the length of a field.
Only Strings are supported.
## Options
* `:min` - length must be greater than or equal to this value
* `:max` - length must be less than or equal to this value
* `:is` - length must be exactly this value
## Examples
validate_length(changeset, :email, min: 8)
validate_length(changeset, :email, max: 32)
validate_length(changeset, :email, min: 8, max: 32)
validate_length(changeset, :phone, is: 10)
"""
@spec validate_length(t, atom, Keyword.t()) :: t
def validate_length(%Changeset{changes: changes} = changeset, field, opts) do
field_value = Map.get(changes, field, "")
length = String.length(field_value)
error =
((is = opts[:is]) && wrong_length(length, is)) ||
((min = opts[:min]) && too_short(length, min)) ||
((max = opts[:max]) && too_long(length, max))
if error do
{field, [error]}
|> append_error(changeset)
|> Map.put(:valid?, false)
else
changeset
end
end
defp wrong_length(length, length), do: nil
defp wrong_length(_, length) do
"should be #{length} character(s)"
end
defp too_short(length, min) when length >= min, do: nil
defp too_short(_, min) do
"should be at least #{min} character(s)"
end
defp too_long(length, max) when length <= max, do: nil
defp too_long(_, max) do
"should be at most #{max} character(s)"
end
@doc false
def append_error({field, error}, changeset) do
case Map.get(changeset.errors, field) do
nil -> put_in(changeset.errors[field], error)
list -> put_in(changeset.errors[field], list ++ error)
end
end
@doc """
Sets a value in the changes for a given key.
## Examples
iex> changeset = User.changeset(%User{}, %{username: "foo"})
iex> changeset = put_change(changeset, :username, "bar")
iex> changeset.changes
%{username: "bar"}
"""
@spec put_change(t, atom, any) :: t
def put_change(%Changeset{} = changeset, field, value) when is_atom(field) do
changes = Map.put(changeset.changes, field, value)
%Changeset{changeset | changes: changes}
end
end
|
apps/api_accounts/lib/api_accounts/changeset.ex
| 0.881577
| 0.409634
|
changeset.ex
|
starcoder
|
defmodule AWS.Inspector do
@moduledoc """
Amazon Inspector
Amazon Inspector enables you to analyze the behavior of your AWS resources
and to identify potential security issues. For more information, see [
Amazon Inspector User
Guide](https://docs.aws.amazon.com/inspector/latest/userguide/inspector_introduction.html).
"""
@doc """
Assigns attributes (key and value pairs) to the findings that are specified
by the ARNs of the findings.
"""
def add_attributes_to_findings(client, input, options \\ []) do
request(client, "AddAttributesToFindings", input, options)
end
@doc """
Creates a new assessment target using the ARN of the resource group that is
generated by `CreateResourceGroup`. If resourceGroupArn is not specified,
all EC2 instances in the current AWS account and region are included in the
assessment target. If the [service-linked
role](https://docs.aws.amazon.com/inspector/latest/userguide/inspector_slr.html)
isn’t already registered, this action also creates and registers a
service-linked role to grant Amazon Inspector access to AWS Services needed
to perform security assessments. You can create up to 50 assessment targets
per AWS account. You can run up to 500 concurrent agents per AWS account.
For more information, see [ Amazon Inspector Assessment
Targets](https://docs.aws.amazon.com/inspector/latest/userguide/inspector_applications.html).
"""
def create_assessment_target(client, input, options \\ []) do
request(client, "CreateAssessmentTarget", input, options)
end
@doc """
Creates an assessment template for the assessment target that is specified
by the ARN of the assessment target. If the [service-linked
role](https://docs.aws.amazon.com/inspector/latest/userguide/inspector_slr.html)
isn’t already registered, this action also creates and registers a
service-linked role to grant Amazon Inspector access to AWS Services needed
to perform security assessments.
"""
def create_assessment_template(client, input, options \\ []) do
request(client, "CreateAssessmentTemplate", input, options)
end
@doc """
Starts the generation of an exclusions preview for the specified assessment
template. The exclusions preview lists the potential exclusions
(ExclusionPreview) that Inspector can detect before it runs the assessment.
"""
def create_exclusions_preview(client, input, options \\ []) do
request(client, "CreateExclusionsPreview", input, options)
end
@doc """
Creates a resource group using the specified set of tags (key and value
pairs) that are used to select the EC2 instances to be included in an
Amazon Inspector assessment target. The created resource group is then used
to create an Amazon Inspector assessment target. For more information, see
`CreateAssessmentTarget`.
"""
def create_resource_group(client, input, options \\ []) do
request(client, "CreateResourceGroup", input, options)
end
@doc """
Deletes the assessment run that is specified by the ARN of the assessment
run.
"""
def delete_assessment_run(client, input, options \\ []) do
request(client, "DeleteAssessmentRun", input, options)
end
@doc """
Deletes the assessment target that is specified by the ARN of the
assessment target.
"""
def delete_assessment_target(client, input, options \\ []) do
request(client, "DeleteAssessmentTarget", input, options)
end
@doc """
Deletes the assessment template that is specified by the ARN of the
assessment template.
"""
def delete_assessment_template(client, input, options \\ []) do
request(client, "DeleteAssessmentTemplate", input, options)
end
@doc """
Describes the assessment runs that are specified by the ARNs of the
assessment runs.
"""
def describe_assessment_runs(client, input, options \\ []) do
request(client, "DescribeAssessmentRuns", input, options)
end
@doc """
Describes the assessment targets that are specified by the ARNs of the
assessment targets.
"""
def describe_assessment_targets(client, input, options \\ []) do
request(client, "DescribeAssessmentTargets", input, options)
end
@doc """
Describes the assessment templates that are specified by the ARNs of the
assessment templates.
"""
def describe_assessment_templates(client, input, options \\ []) do
request(client, "DescribeAssessmentTemplates", input, options)
end
@doc """
Describes the IAM role that enables Amazon Inspector to access your AWS
account.
"""
def describe_cross_account_access_role(client, input, options \\ []) do
request(client, "DescribeCrossAccountAccessRole", input, options)
end
@doc """
Describes the exclusions that are specified by the exclusions' ARNs.
"""
def describe_exclusions(client, input, options \\ []) do
request(client, "DescribeExclusions", input, options)
end
@doc """
Describes the findings that are specified by the ARNs of the findings.
"""
def describe_findings(client, input, options \\ []) do
request(client, "DescribeFindings", input, options)
end
@doc """
Describes the resource groups that are specified by the ARNs of the
resource groups.
"""
def describe_resource_groups(client, input, options \\ []) do
request(client, "DescribeResourceGroups", input, options)
end
@doc """
Describes the rules packages that are specified by the ARNs of the rules
packages.
"""
def describe_rules_packages(client, input, options \\ []) do
request(client, "DescribeRulesPackages", input, options)
end
@doc """
Produces an assessment report that includes detailed and comprehensive
results of a specified assessment run.
"""
def get_assessment_report(client, input, options \\ []) do
request(client, "GetAssessmentReport", input, options)
end
@doc """
Retrieves the exclusions preview (a list of ExclusionPreview objects)
specified by the preview token. You can obtain the preview token by running
the CreateExclusionsPreview API.
"""
def get_exclusions_preview(client, input, options \\ []) do
request(client, "GetExclusionsPreview", input, options)
end
@doc """
Information about the data that is collected for the specified assessment
run.
"""
def get_telemetry_metadata(client, input, options \\ []) do
request(client, "GetTelemetryMetadata", input, options)
end
@doc """
Lists the agents of the assessment runs that are specified by the ARNs of
the assessment runs.
"""
def list_assessment_run_agents(client, input, options \\ []) do
request(client, "ListAssessmentRunAgents", input, options)
end
@doc """
Lists the assessment runs that correspond to the assessment templates that
are specified by the ARNs of the assessment templates.
"""
def list_assessment_runs(client, input, options \\ []) do
request(client, "ListAssessmentRuns", input, options)
end
@doc """
Lists the ARNs of the assessment targets within this AWS account. For more
information about assessment targets, see [Amazon Inspector Assessment
Targets](https://docs.aws.amazon.com/inspector/latest/userguide/inspector_applications.html).
"""
def list_assessment_targets(client, input, options \\ []) do
request(client, "ListAssessmentTargets", input, options)
end
@doc """
Lists the assessment templates that correspond to the assessment targets
that are specified by the ARNs of the assessment targets.
"""
def list_assessment_templates(client, input, options \\ []) do
request(client, "ListAssessmentTemplates", input, options)
end
@doc """
Lists all the event subscriptions for the assessment template that is
specified by the ARN of the assessment template. For more information, see
`SubscribeToEvent` and `UnsubscribeFromEvent`.
"""
def list_event_subscriptions(client, input, options \\ []) do
request(client, "ListEventSubscriptions", input, options)
end
@doc """
List exclusions that are generated by the assessment run.
"""
def list_exclusions(client, input, options \\ []) do
request(client, "ListExclusions", input, options)
end
@doc """
Lists findings that are generated by the assessment runs that are specified
by the ARNs of the assessment runs.
"""
def list_findings(client, input, options \\ []) do
request(client, "ListFindings", input, options)
end
@doc """
Lists all available Amazon Inspector rules packages.
"""
def list_rules_packages(client, input, options \\ []) do
request(client, "ListRulesPackages", input, options)
end
@doc """
Lists all tags associated with an assessment template.
"""
def list_tags_for_resource(client, input, options \\ []) do
request(client, "ListTagsForResource", input, options)
end
@doc """
Previews the agents installed on the EC2 instances that are part of the
specified assessment target.
"""
def preview_agents(client, input, options \\ []) do
request(client, "PreviewAgents", input, options)
end
@doc """
Registers the IAM role that grants Amazon Inspector access to AWS Services
needed to perform security assessments.
"""
def register_cross_account_access_role(client, input, options \\ []) do
request(client, "RegisterCrossAccountAccessRole", input, options)
end
@doc """
Removes entire attributes (key and value pairs) from the findings that are
specified by the ARNs of the findings where an attribute with the specified
key exists.
"""
def remove_attributes_from_findings(client, input, options \\ []) do
request(client, "RemoveAttributesFromFindings", input, options)
end
@doc """
Sets tags (key and value pairs) to the assessment template that is
specified by the ARN of the assessment template.
"""
def set_tags_for_resource(client, input, options \\ []) do
request(client, "SetTagsForResource", input, options)
end
@doc """
Starts the assessment run specified by the ARN of the assessment template.
For this API to function properly, you must not exceed the limit of running
up to 500 concurrent agents per AWS account.
"""
def start_assessment_run(client, input, options \\ []) do
request(client, "StartAssessmentRun", input, options)
end
@doc """
Stops the assessment run that is specified by the ARN of the assessment
run.
"""
def stop_assessment_run(client, input, options \\ []) do
request(client, "StopAssessmentRun", input, options)
end
@doc """
Enables the process of sending Amazon Simple Notification Service (SNS)
notifications about a specified event to a specified SNS topic.
"""
def subscribe_to_event(client, input, options \\ []) do
request(client, "SubscribeToEvent", input, options)
end
@doc """
Disables the process of sending Amazon Simple Notification Service (SNS)
notifications about a specified event to a specified SNS topic.
"""
def unsubscribe_from_event(client, input, options \\ []) do
request(client, "UnsubscribeFromEvent", input, options)
end
@doc """
Updates the assessment target that is specified by the ARN of the
assessment target.
If resourceGroupArn is not specified, all EC2 instances in the current AWS
account and region are included in the assessment target.
"""
def update_assessment_target(client, input, options \\ []) do
request(client, "UpdateAssessmentTarget", input, options)
end
@spec request(AWS.Client.t(), binary(), map(), list()) ::
{:ok, Poison.Parser.t() | nil, Poison.Response.t()}
| {:error, Poison.Parser.t()}
| {:error, HTTPoison.Error.t()}
defp request(client, action, input, options) do
client = %{client | service: "inspector"}
host = build_host("inspector", client)
url = build_url(host, client)
headers = [
{"Host", host},
{"Content-Type", "application/x-amz-json-1.1"},
{"X-Amz-Target", "InspectorService.#{action}"}
]
payload = Poison.Encoder.encode(input, %{})
headers = AWS.Request.sign_v4(client, "POST", url, headers, payload)
case HTTPoison.post(url, payload, headers, options) do
{:ok, %HTTPoison.Response{status_code: 200, body: ""} = response} ->
{:ok, nil, response}
{:ok, %HTTPoison.Response{status_code: 200, body: body} = response} ->
{:ok, Poison.Parser.parse!(body, %{}), response}
{:ok, %HTTPoison.Response{body: body}} ->
error = Poison.Parser.parse!(body, %{})
{:error, error}
{:error, %HTTPoison.Error{reason: reason}} ->
{:error, %HTTPoison.Error{reason: reason}}
end
end
defp build_host(_endpoint_prefix, %{region: "local"}) do
"localhost"
end
defp build_host(endpoint_prefix, %{region: region, endpoint: endpoint}) do
"#{endpoint_prefix}.#{region}.#{endpoint}"
end
defp build_url(host, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}/"
end
end
|
lib/aws/inspector.ex
| 0.855474
| 0.528229
|
inspector.ex
|
starcoder
|
defmodule Raxx.Middleware do
alias Raxx.Server
@moduledoc """
A "middleware" is a component that sits between the HTTP server
such as as [Ace](https://github.com/CrowdHailer/Ace) and a `Raxx.Server` controller.
The middleware can modify requests request before giving it to the controller and
modify the controllers response before it's given to the server.
Oftentimes multiple middlewaress might be attached to a controller and
function as a single `t:Raxx.Server.t/0` - see `Raxx.Stack` for details.
The `Raxx.Middleware` provides a behaviour to be implemented by middlewares.
## Example
Traditionally, middlewares are used for a variety of purposes: managing CORS,
CSRF protection, logging, error handling, and many more. This example shows
a middleware that given a HEAD request "translates" it to a GET one, hands
it over to the controller and strips the response body transforms the
response according to [RFC 2616](https://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.13)
This way the controller doesn't heed to handle the HEAD case at all.
defmodule Raxx.Middleware.Head do
alias Raxx.Server
alias Raxx.Middleware
@behaviour Middleware
@impl Middleware
def process_head(request = %{method: :HEAD}, _config, inner_server) do
request = %{request | method: :GET}
state = :engage
{parts, inner_server} = Server.handle_head(inner_server, request)
parts = modify_response_parts(parts, state)
{parts, state, inner_server}
end
def process_head(request = %{method: _}, _config, inner_server) do
{parts, inner_server} = Server.handle_head(inner_server, request)
{parts, :disengage, inner_server}
end
@impl Middleware
def process_data(data, state, inner_server) do
{parts, inner_server} = Server.handle_data(inner_server, data)
parts = modify_response_parts(parts, state)
{parts, state, inner_server}
end
@impl Middleware
def process_tail(tail, state, inner_server) do
{parts, inner_server} = Server.handle_tail(inner_server, tail)
parts = modify_response_parts(parts, state)
{parts, state, inner_server}
end
@impl Middleware
def process_info(info, state, inner_server) do
{parts, inner_server} = Server.handle_info(inner_server, info)
parts = modify_response_parts(parts, state)
{parts, state, inner_server}
end
defp modify_response_parts(parts, :disengage) do
parts
end
defp modify_response_parts(parts, :engage) do
Enum.flat_map(parts, &do_handle_response_part(&1))
end
defp do_handle_response_part(response = %Raxx.Response{}) do
# the content-length will remain the same
[%Raxx.Response{response | body: false}]
end
defp do_handle_response_part(%Raxx.Data{}) do
[]
end
defp do_handle_response_part(%Raxx.Tail{}) do
[]
end
end
Within the callback implementations the middleware should call through
to the "inner" server and make sure to return its updated state as part
of the `t:Raxx.Middleware.next/0` tuple.
In certain situations the middleware might want to short-circuit processing
of the incoming messages, bypassing the server. In that case, it should not
call through using `Raxx.Server`'s `handle_*` helper functions and return
the `inner_server` unmodified.
## Gotchas
### Info messages forwarding
As you can see in the above example, the middleware can even modify
the `info` messages sent to the server and is responsible for forwarding them
to the inner servers.
### Iodata contents
While much of the time the request body, response body and data chunks will
be represented with binaries, they can be represented
as [`iodata`](https://hexdocs.pm/elixir/typespecs.html#built-in-types).
A robust middleware should handle that.
"""
@typedoc """
The behaviour module and state/config of a raxx middleware
"""
@type t :: {module, state}
@typedoc """
State of middleware.
"""
@type state :: any()
@typedoc """
Values returned from the `process_*` callbacks
"""
@type next :: {[Raxx.part()], state, Server.t()}
@doc """
Called once when a client starts a stream,
The arguments a `Raxx.Request`, the middleware configuration and
the "inner" server for the middleware to call through to.
This callback can be relied upon to execute before any other callbacks
"""
@callback process_head(request :: Raxx.Request.t(), state(), inner_server :: Server.t()) ::
next()
@doc """
Called every time data from the request body is received.
"""
@callback process_data(binary(), state(), inner_server :: Server.t()) :: next()
@doc """
Called once when a request finishes.
This will be called with an empty list of headers is request is completed without trailers.
Will not be called at all if the `t:Raxx.Request.t/0` passed to `c:process_head/3` had `body: false`.
"""
@callback process_tail(trailers :: [{binary(), binary()}], state(), inner_server :: Server.t()) ::
next()
@doc """
Called for all other messages the middleware may recieve.
The middleware is responsible for forwarding them to the inner server.
"""
@callback process_info(any(), state(), inner_server :: Server.t()) :: next()
defmacro __using__(_options) do
quote do
@behaviour unquote(__MODULE__)
@impl unquote(__MODULE__)
def process_head(request, state, inner_server) do
{parts, inner_server} = Server.handle_head(inner_server, request)
{parts, state, inner_server}
end
@impl unquote(__MODULE__)
def process_data(data, state, inner_server) do
{parts, inner_server} = Server.handle_data(inner_server, data)
{parts, state, inner_server}
end
@impl unquote(__MODULE__)
def process_tail(tail, state, inner_server) do
{parts, inner_server} = Server.handle_tail(inner_server, tail)
{parts, state, inner_server}
end
@impl unquote(__MODULE__)
def process_info(message, state, inner_server) do
{parts, inner_server} = Server.handle_info(inner_server, message)
{parts, state, inner_server}
end
defoverridable unquote(__MODULE__)
end
end
@doc false
@spec is_implemented?(module) :: boolean
def is_implemented?(module) when is_atom(module) do
# taken from Raxx.Server
case Code.ensure_compiled(module) do
{:module, module} ->
module.module_info[:attributes]
|> Keyword.get(:behaviour, [])
|> Enum.member?(__MODULE__)
_ -> false
end
end
end
|
lib/raxx/middleware.ex
| 0.92243
| 0.562207
|
middleware.ex
|
starcoder
|
defmodule ETS do
use ETS.Utils
@moduledoc """
ETS, an Elixir wrapper for Erlang's [`:ets`](http://erlang.org/doc/man/ets.html) module.
See `ETS.Set` for information on creating and managing Sets, and `ETS.Bag` for information on creating and managing Bags.
See `ETS.KeyValueSet` for an abstraction which provides standard key/value interaction with Sets.
## What type of `ETS` table should I use?
## Set
If you need your key column to be unique, then you should use a Set. If you just want a simple key/value store,
then use an `ETS.KeyValueSet`, but if you want to store full tuple records, use an `ETS.Set`. If you want your
records ordered by key value, which adds some performance overhead on insertion, set `ordered: true` when creating the Set (defaults to false).
## Bag
If you do not need your key column to be unique, then you should use an `ETS.Bag`, and if you want to prevent exact duplicate
records from being inserted, which adds some performance overhead on insertion, set duplicate: false when creating the Bag
(defaults to true).
"""
@type table_name :: atom()
@type table_reference :: :ets.tid()
@type table_identifier :: table_name | table_reference
@type match_pattern :: :ets.match_pattern()
@type match_spec :: :ets.match_spec()
@type comp_match_spec :: :ets.comp_match_spec()
@type end_of_table :: :"$end_of_table"
@type continuation ::
end_of_table
| {table_reference(), integer(), integer(), comp_match_spec(), list(), integer()}
| {table_reference(), any(), any(), integer(), comp_match_spec(), list(), integer(),
integer()}
@doc """
Returns list of current :ets tables, each wrapped as either `ETS.Set` or `ETS.Bag`.
NOTE: `ETS.Bag` is not yet implemented. This list returns only :set and :ordered_set tables, both wrapped as `ETS.Set`.
## Examples
iex> {:ok, all} = ETS.all()
iex> x = length(all)
iex> ETS.Set.new!()
iex> {:ok, all} = ETS.all()
iex> length(all) == x + 1
true
"""
@spec all :: {:ok, [ETS.table_identifier()]} | {:error, any()}
def all do
catch_error do
all =
:ets.all()
|> Enum.map(fn tid ->
tid
|> :ets.info()
|> Keyword.get(:type)
|> case do
type when type in [:set, :ordered_set] -> ETS.Set.wrap_existing!(tid)
type when type in [:bag, :duplicate_bag] -> ETS.Bag.wrap_existing!(tid)
end
end)
{:ok, all}
end
end
@doc """
Same as all/1 but unwraps or raises on :error.
"""
@spec all! :: [ETS.table_identifier()]
def all!, do: unwrap_or_raise(all())
end
|
lib/ets.ex
| 0.893336
| 0.562537
|
ets.ex
|
starcoder
|
defmodule Alchemy.Cache do
@moduledoc """
This module provides a handful of useful functions to interact with the cache.
By default, Alchemy caches a great deal of information given to it, notably about
guilds. In general, using the cache should be prioritised over using the api
functions in `Alchemy.Client`. However, a lot of struct modules have "smart"
functions that will correctly balance the cache and the api, as well as use macros
to get information from the context of commands.
"""
alias Alchemy.Cache.{Guilds, Guilds.GuildSupervisor}
alias Alchemy.{DMChannel, Channel, Guild, User, VoiceState, Voice}
alias Alchemy.Guild.{Emoji, GuildMember, Presence, Role}
alias Alchemy.Discord.Gateway.RateLimiter, as: Gateway
import Alchemy.Structs, only: [to_struct: 2]
@type snowflake :: String.t
@doc """
Gets the corresponding guild_id for a channel.
In case the channel guild can't be found, `:none` will be returned.
This is useful when the guild_id is needed for some kind of task, but there's no
need for getting the whole struct. Because of how the registry is set up, getting
the entire guild requires a whole extra step, that passes through this one anyways.
"""
@spec guild_id(snowflake) :: {:ok, snowflake} | {:error, String.t}
def guild_id(channel_id) do
case :ets.lookup(:channels, channel_id) do
[{_, id}] -> {:ok, id}
[] -> {:error, "Failed to find a channel entry for #{channel_id}."}
end
end
@doc """
Fetches a guild from the cache by a given id.
By default, this method needs the guild_id, but keywords can be used to specify
a different id, and use the appropiate paths to get the guild using that.
In general there are "smarter" methods, that will deal with getting the id for you;
nonetheless, the need for this function sometimes exists.
## Keywords
- `channel`
Using this keyword will fetch the information for the guild a channel belongs to.
"""
@spec guild(snowflake) :: {:ok, Guild.t} | {:error, String.t}
def guild(channel: channel_id) do
with {:ok, id} <- guild_id(channel_id) do
guild(id)
end
end
def guild(guild_id) do
case Guilds.safe_call(guild_id, :show) do
{:error, :no_guild} ->
{:error, "You don't seem to be in this guild"}
{:ok, %{"unavailable" => true}} ->
{:error, "This guild hasn't been loaded in the cache yet"}
{:ok, guild} ->
{:ok, guild |> Guilds.de_index |> Guild.from_map}
end
end
defp access(guild_id, section, id, module) when is_atom(module) do
access(guild_id, section, id, &module.from_map/1)
end
defp access(guild_id, section, id, function) do
maybe_val =
with {:ok, guild} <- Guilds.safe_call(guild_id, {:section, section}) do
{:ok, guild[id]}
end
case maybe_val do
{:error, :no_guild} ->
{:error, "You don't seem to be in this guild"}
{:ok, nil} ->
{:error, "Failed to find an entry for #{id} in section #{section}"}
{:ok, some} ->
{:ok, function.(some)}
end
end
@doc """
Gets a member from a cache, by guild and member id.
"""
@spec member(snowflake, snowflake) :: {:ok, Guild.member} | {:error, String.t}
def member(guild_id, member_id) do
access(guild_id, "members", member_id, GuildMember)
end
@doc """
Gets a specific role in a guild.
"""
@spec role(snowflake, snowflake) :: {:ok, Guild.role} | {:error, String.t}
def role(guild_id, role_id) do
access(guild_id, "roles", role_id, &to_struct(&1, Role))
end
@doc """
Gets the presence of a user in a certain guild.
This contains info such as their status, and roles.
"""
@spec presence(snowflake, snowflake) :: {:ok, Presence.t} | {:error, String.t}
def presence(guild_id, user_id) do
access(guild_id, "presences", user_id, Presence)
end
@doc """
Retrieves a custom emoji by id in a guild.
"""
@spec emoji(snowflake, snowflake) :: {:ok, Guild.emoji} | {:error, String.t}
def emoji(guild_id, emoji_id) do
access(guild_id, "emojis", emoji_id, &to_struct(&1, Emoji))
end
@doc """
Retrieves a user's voice state by id in a guild.
"""
@spec voice_state(snowflake, snowflake) :: {:ok, Voice.state} | {:error, String.t}
def voice_state(guild_id, user_id) do
access(guild_id, "voice_states", user_id, &to_struct(&1, VoiceState))
end
@doc """
Retrieves a specific channel in a guild.
"""
@spec channel(snowflake, snowflake) :: {:ok, Channel.t} | {:error, String.t}
def channel(guild_id, channel_id) do
access(guild_id, "channels", channel_id, Channel)
end
# Returns the corresponding protocol for an atom key.
# This is mainly needed for `search/2`
defp cache_sections(key) do
%{members: {"members", &GuildMember.from_map/1},
roles: {"roles", &to_struct(&1, Role)},
presences: {"presences", &Presence.from_map/1},
voice_states: {"voice_states", &to_struct(&1, VoiceState)},
emojis: {"emojis", &to_struct(&1, Emoji)},
channels: {"channels", &Channel.from_map/1}}[key]
end
@doc """
Searches across all guild for information.
The section is the type of object to search for. The possibilities are:
`:guilds`, `:members`, `:roles`, `:presences`, `:voice_states`, `:emojis`,
`:channels`
The filter is a function returning a boolean, that allows you to filter out
elements from this list.
The return type will be a struct of the same type of the section searched for.
## Examples
```elixir
Cache.search(:members, fn x -> String.length(x.nick) < 10 end)
```
This will return a list of all members whose nickname is less than 10
characters long.
```elixir
Cache.search(:roles, &match?(%{name: "Cool Kids"}, &1))
```
This is a good example of using the `match?/2`
function to filter against a pattern.
```elixir
Cache.search(:guilds, &match?(%{name: "Test"}, &1))
```
Will match any guilds named "Test" in the cache.
"""
@spec search(atom, (any -> Boolean)) :: [struct]
def search(:guilds, filter) do
Supervisor.which_children(GuildSupervisor)
|> Stream.map(fn {_, pid, _, _} -> pid end)
|> Task.async_stream(&GenServer.call(&1, :show))
|> Stream.filter(fn {:ok, val} ->
val["unavailable"] != true
end)
|> Stream.map(fn {:ok, val} ->
val |> Guilds.de_index |> Guild.from_map
end)
|> Enum.filter(filter)
end
def search(:private_channels, filter) do
fold = fn {_id, val}, acc ->
if filter.(val) do [val | acc] else acc end
end
:ets.foldr(fold, [], :priv_channels)
end
def search(section, filter) do
{key, de_indexer} = cache_sections(section)
Supervisor.which_children(GuildSupervisor)
|> Stream.map(fn {_, pid, _, _} -> pid end)
|> Task.async_stream(&GenServer.call(&1, {:section, key}))
|> Stream.flat_map(fn {:ok, v} -> Map.values(v) end)
|> Stream.map(de_indexer)
|> Enum.filter(filter)
end
@doc """
Fetches a private_channel in the cache by id of the channel.
Takes a DMChannel id. Alternatively, `user: user_id` can be passed to find
the private channel related to a user.
"""
@spec private_channel(snowflake) :: {:ok, Channel.dm_channel} | {:error, String.t}
def private_channel(user: user_id) do
case :ets.lookup(:priv_channels, user_id) do
[{_, id}] -> private_channel(id)
[] -> {:error, "Failed to find a DM channel for this user: #{user_id}"}
end
end
def private_channel(channel_id) do
case :ets.lookup(:priv_channels, channel_id) do
[{_, channel}] -> {:ok, DMChannel.from_map(channel)}
[] -> {:error, "Failed to find a DM channel entry for #{channel_id}."}
end
end
@doc """
Gets the user struct for this client from the cache.
## Examples
```elixir
Cogs.def hello do
Cogs.say "hello, my name is \#{Cache.user().name}"
end
```
"""
@spec user :: User.t
def user do
GenServer.call(Alchemy.Cache.User, :get)
|> to_struct(User)
end
@doc """
Requests the loading of offline guild members for a guild.
Guilds should automatically get 250 offline members after the
`:ready` event, however, you can use this method to request a fuller
list if needed.
The `username` is used to only select members whose username starts
with a certain string; `""` won't do any filtering. The `limit`
specifies the amount of members to get; `0` for unlimited.
There's a ratelimit of ~100 requests per shard per minute on this
function, so be wary of the fact that this might block a process.
"""
def load_guild_members(guild_id, username \\ "", limit \\ 0) do
Gateway.request_guild_members(guild_id, username, limit)
end
end
|
lib/cache.ex
| 0.846657
| 0.64526
|
cache.ex
|
starcoder
|
defmodule Cldr.String do
@moduledoc """
Functions that operate on a `String.t` that are not provided
in the standard lib.
"""
@doc """
Hash a string using a polynomial rolling hash function.
See https://cp-algorithms.com/string/string-hashing.html for
a description of the algoithim.
"""
@p 99991
@m trunc(1.0e9) + 9
def hash(string) do
{hash, _} =
string
|> String.to_charlist()
|> Enum.reduce({0, 1}, fn char, {hash, p_pow} ->
hash = rem(hash + char * p_pow, @m)
p_pow = rem(p_pow * @p, @m)
{hash, p_pow}
end)
hash
end
@doc """
Replaces "-" with "_" in a string
### Example
iex> Cldr.String.to_underscore("this-one")
"this_one"
"""
def to_underscore(string) when is_binary(string) do
String.replace(string, "-", "_")
end
@doc """
This is the code of Macro.underscore with modifications:
The change is to cater for strings in the format:
This_That
which in Macro.underscore gets formatted as
this__that (note the double underscore)
when we actually want
that_that
"""
def underscore(atom) when is_atom(atom) do
"Elixir." <> rest = Atom.to_string(atom)
underscore(rest)
end
def underscore(<<h, t::binary>>) do
<<to_lower_char(h)>> <> do_underscore(t, h)
end
def underscore("") do
""
end
# h is upper case, next char is not uppercase, or a _ or . => and prev != _
defp do_underscore(<<h, t, rest::binary>>, prev)
when h >= ?A and h <= ?Z and not (t >= ?A and t <= ?Z) and t != ?. and t != ?_ and
prev != ?_ do
<<?_, to_lower_char(h), t>> <> do_underscore(rest, t)
end
# h is uppercase, previous was not uppercase or _
defp do_underscore(<<h, t::binary>>, prev)
when h >= ?A and h <= ?Z and not (prev >= ?A and prev <= ?Z) and prev != ?_ do
<<?_, to_lower_char(h)>> <> do_underscore(t, h)
end
# h is .
defp do_underscore(<<?., t::binary>>, _) do
<<?/>> <> underscore(t)
end
# Any other char
defp do_underscore(<<h, t::binary>>, _) do
<<to_lower_char(h)>> <> do_underscore(t, h)
end
defp do_underscore(<<>>, _) do
<<>>
end
def to_upper_char(char) when char >= ?a and char <= ?z, do: char - 32
def to_upper_char(char), do: char
def to_lower_char(char) when char >= ?A and char <= ?Z, do: char + 32
def to_lower_char(char), do: char
end
|
lib/cldr/utils/string.ex
| 0.779028
| 0.42656
|
string.ex
|
starcoder
|
defmodule APIacFilterIPWhitelist do
@behaviour Plug
@behaviour APIac.Filter
@moduledoc """
An `APIac.Filter` plug enabling IP whitelist (IPv4 & IPv6)
## Plug options
- `whitelist`: a *list* of allowed IPv4 and IPv6 addresses in CIDR notation or a
`(Plug.Conn.t -> [String])` function returning that list of addresses
- `exec_cond`: a `(Plug.Conn.t() -> boolean())` function that determines whether
this filter is to be executed or not. Defaults to a function returning `true`
- `send_error_response`: function called when IP address is not whitelisted.
Defaults to `APIacFilterIPWhitelist.send_error_response/3`
- `error_response_verbosity`: one of `:debug`, `:normal` or `:minimal`.
Defaults to `:normal`
## Example
```elixir
plug APIacFilterIPWhitelist, whitelist: [
"192.168.13.0/24",
"2001:45B8:991A::/48",
"192.168.3.11/16",
"20E7:4128:D4F0:0::/64",
"172.16.31.10/24"
]
```
## Security considerations
This plug uses the `remote_ip` field of the `Plug.Conn.t` for IP filtering, which means:
- **do use** [`remote_ip`](https://github.com/ajvondrak/remote_ip) or a similar
library if you're behind a proxy
- **do not use** `remote_ip` or a similar library if you're not behind a proxy
"""
@impl Plug
def init(opts) do
opts
|> Enum.into(%{})
|> Map.put(:whitelist, transform_whitelist(opts[:whitelist]))
|> Map.put_new(:exec_cond, &__MODULE__.always_true/1)
|> Map.put_new(:send_error_response, &__MODULE__.send_error_response/3)
|> Map.put_new(:error_response_verbosity, :normal)
end
defp transform_whitelist(whitelist) when is_list(whitelist) do
Enum.map(whitelist, fn cidr -> InetCidr.parse(cidr) end)
end
defp transform_whitelist(whitelist) when is_function(whitelist, 1), do: whitelist
defp transform_whitelist(_), do: raise("Whitelist must be a list or a function")
@impl Plug
def call(conn, opts) do
if opts[:exec_cond].(conn) do
case filter(conn, opts) do
{:ok, conn} ->
conn
{:error, conn, reason} ->
opts[:send_error_response].(conn, reason, opts)
end
else
conn
end
end
@impl APIac.Filter
def filter(conn, %{whitelist: whitelist}) do
if do_filter(conn, whitelist) do
{:ok, conn}
else
{:error, conn, %APIac.Filter.Forbidden{filter: __MODULE__, reason: :ip_not_whitelisted}}
end
end
defp do_filter(conn, whitelist) when is_function(whitelist, 1) do
do_filter(conn, whitelist.(conn))
end
defp do_filter(%Plug.Conn{remote_ip: remote_ip}, whitelist) do
Enum.any?(
whitelist,
fn cidr -> InetCidr.contains?(cidr(cidr), remote_ip) end
)
end
defp cidr(cidr) when is_binary(cidr), do: InetCidr.parse(cidr)
defp cidr(cidr) when is_tuple(cidr), do: cidr
@doc """
Implementation of the `APIac.Filter` behaviour.
## Verbosity
The following elements in the HTTP response are set depending on the value
of the `:error_response_verbosity` option:
| Error reponse verbosity | HTTP status | Headers | Body |
|:-----------------------:|-------------------------|-------------|-----------------------------------------------|
| :debug | Forbidden (403) | | `APIac.Filter.Forbidden` exception's message |
| :normal | Forbidden (403) | | |
| :minimal | Forbidden (403) | | |
"""
@impl APIac.Filter
def send_error_response(conn, %APIac.Filter.Forbidden{} = error, opts) do
case opts[:error_response_verbosity] do
:debug ->
conn
|> Plug.Conn.send_resp(:forbidden, Exception.message(error))
|> Plug.Conn.halt()
atom when atom in [:normal, :minimal] ->
conn
|> Plug.Conn.send_resp(:forbidden, "")
|> Plug.Conn.halt()
end
end
def always_true(_), do: true
end
|
lib/apiac_filter_ip_whitelist.ex
| 0.84905
| 0.784154
|
apiac_filter_ip_whitelist.ex
|
starcoder
|
defmodule Alchemy.Cogs do
alias Alchemy.Cache
alias Alchemy.Cogs.CommandHandler
alias Alchemy.Cogs.EventRegistry
alias Alchemy.Events
alias Alchemy.Guild
require Logger
@moduledoc """
This module provides quite a bit of sugar for registering commands.
To use the macros in this module, it must be `used`. This also defines a
`__using__` macro for that module, which will then allow these commands
to be loaded in the main application via `use`
## Example Module
```elixir
defmodule Example do
use Alchemy.Cogs
Cogs.def ping do
Cogs.say "pong"
end
Cogs.def echo do
Cogs.say "please give me a word to echo"
end
Cogs.def echo(word) do
Cogs.say word
end
end
```
This defines a basic Cog, that can now be loaded into our application via `use`.
The command created from this module are "!ping", and "!echo",
("!" is merely the default prefix, it could be anything from "?", to "SHARKNADO").
The `ping` command is straight forward, but as you can see, the `echo` command
takes in an argument. When you define a command, the handler will
try and get arguments up to the max arity of that command;
in this case, `echo` has a max arity of one, so the parser will pass up to
one argument to the function. In the case that the parser can't get enough
arguments, it will pass a lower amount. We explicitly handle this case
here, in this case sending a useful error message back.
### Shared names across multiple modules
If I define a command `ping` in module `A`, and a `ping` in module `B`,
which `ping` should become the command? In general, you should avoid doing
this, but the module used last will override previously loaded commands
with a matching name.
## Parsing
The way the parser works is simple: a message is first decomposed into
parts:
```
prefix <> command <> " " <> rest
```
If the prefix doesn't match, the message is ignored. If it does match,
a new Task is started to handle this event. This task will try and find
the function corresponding to the command called, and will return preemptively
if no such function is found. After that, `rest` is passed to the parser,
which will try and extract arguments to pass to the function. The default
parsing method is simply splitting by whitespace.
Thankfully,
you can define a custom parser for a command via `Cogs.set_parser/2`. This
parser will act upon `rest`, and parse out the relevant arguments.
## The `message` argument
When you define a function with `Cogs.def` the function gets expanded
to take an extra `message` parameter, which is the message triggering
the command. This contains a lot of useful information, and is what
enables a lot of the other macros to work. Because of this,
be wary of naming something else `message`.
## Loading and Unloading
Loading a cog merely requires having started the client:
```elixir
use Example
```
If you need to remove this cog from the handler:
```elixir
Cogs.unload(Example)
```
Or you just want to disable a single function:
```elixir
Cogs.disable(:ping)
```
"""
@doc """
Sets the client's command prefix to a specific string.
This will only work after the client has been started
# Example
```elixir
Client.start(@token)
Cogs.set_prefix("!!")
```
"""
@spec set_prefix(String.t) :: :ok
def set_prefix(prefix) do
CommandHandler.set_prefix(prefix)
end
@doc """
Unloads a module from the handler.
If you just want to disable a single command, use `Cogs.disable/1`
## Examples
```elixir
Client.start(@token)
use Commands2
```
Turns out we want to stop using `Commands2` commands in our bot, so we
can simply unload the module:
```elixir
Cogs.unload(Commands2)
```
Now none of the commands defined in that module will be accessible. If
we want to reverse that, we can merely do:
```elixir
use Commands2
```
and reload them back in.
"""
@spec unload(atom) :: :ok
def unload(module) do
CommandHandler.unload(module)
Logger.info "*#{inspect module}* unloaded from cogs"
end
@doc """
Disables a command.
If you want to remove a whole module from the cogs, use `Cogs.unload/1`.
This will stop a command from being triggered. The only way to reenable the
command is to reload the module with `use`.
## Examples
```elixir
defmodule Example do
use Alchemy.Cogs
Cogs.def ping, do: Cogs.say "pong"
Cogs.def foo, do: Cogs.say "bar"
end
```
```elixir
Client.start(@token)
use Example
Cogs.disable(:foo)
```
Only `ping` will be triggerable now.
```elixir
use Example
```
At runtime this will add `foo` back in, given it's still in the module.
"""
@spec disable(atom) :: :ok
def disable(command) do
CommandHandler.disable(command)
Logger.info "Command *#{command}* disabled"
end
@doc """
Sends a message to the same channel as the message triggering a command.
This can only be used in a command defined with `Cogs.def`
This is just a thin macro around `Alchemy.Client.send_message/2`
## Examples
```elixir
Cogs.def ping, do: Cogs.say("pong!")
```
"""
defmacro say(content, options \\ []) do
quote do
Alchemy.Client.send_message(var!(message).channel_id,
unquote(content),
unquote(options))
end
end
@doc """
Gets the id of the guild from which a command was triggered.
Returns `{:ok, id}`, or `{:error, why}`. Will never return ok outside
of a guild, naturally.
This is to be used when the guild_id is necessary for an operation,
but the full guild struct isn't needed.
"""
defmacro guild_id do
quote do
Cache.guild_id(var!(message).channel_id)
end
end
@doc """
Gets the guild struct from which a command was triggered.
If only the id is needed, see `:guild_id/0`
## Examples
```elixir
Cogs.def guild do
{:ok, %Alchemy.Guild{name: name}} = Cogs.guild()
Cogs.say(name)
end
```
"""
defmacro guild do
quote do
Cache.guild(channel: var!(message).channel_id)
end
end
@doc """
Gets the member that triggered a command.
Returns either `{:ok, member}`, or `{:error, why}`. Will not return
ok if the command wasn't run in a guild.
As opposed to `message.author`, this comes with a bit more info about who
triggered the command. This is useful for when you want to use certain information
in a command, such as permissions, for example.
"""
defmacro member do
quote do
with {:ok, guild} <- Cache.guild_id(var!(message).channel_id) do
Cache.member(guild, var!(message).author.id)
end
end
end
@doc """
Allows you to register a custom message parser for a command.
The parser will be applied to part of the message not used for command matching.
```elixir
prefix <> command <> " " <> rest
```
## Examples
```elixir
Cogs.set_parser(:echo, &List.wrap/1)
Cogs.def echo(rest) do
Cogs.say(rest)
end
```
"""
@type parser :: (String.t -> Enum.t)
defmacro set_parser(name, parser) do
parser = Macro.escape(parser)
quote do
@commands update_in(@commands, [Atom.to_string(unquote(name))], fn
nil ->
{__MODULE__, 0, unquote(name), unquote(parser)}
{mod, x, name} ->
{mod, x, name, unquote(parser)}
full ->
full
end)
end
end
@doc """
Makes all commands in this module sub commands of a group.
## Examples
```elixir
defmodule C do
use Alchemy.Cogs
Cogs.group("cool")
Cogs.def foo, do: Cogs.say "foo"
end
```
To use this foo command, one has to type `!cool foo`, from there on
arguments will be passed like normal.
The relevant parsing will be done in the command task, as if there
were a command `!cool` that redirected to subfunctions. Because of this,
`Cogs.disable/1` will not be able to disable the subcommands, however,
`Cogs.unload/1` still works as expected. Reloading a grouped module
will also disable removed commands, unlike with ungrouped modules.
"""
defmacro group(str) do
quote do
@command_group {:group, unquote(str)}
end
end
@doc """
Halts the current command until an event is received.
The event type is an item corresponding to the events in `Alchemy.Events`,
i.e. `on_message_edit` -> `Cogs.wait_for(:message_edit, ...)`. The `fun`
is the function that gets called with the relevant event arguments; see
`Alchemy.Events` for more info on what events have what arguments.
The `:message` event is a bit special, as it will specifically wait for
a message not triggered by a bot, in that specific channel, unlike other events,
which trigger generically across the entire bot.
The process will kill itself if it doesn't receive any such event
for 20s.
## Examples
```elixir
Cogs.def color do
Cogs.say "What's your favorite color?"
Cogs.wait_for :message, fn msg ->
Cogs.say "\#{msg.content} is my favorite color too!"
end
end
```
```elixir
Cogs.def typing do
Cogs.say "I'm waiting for someone to type.."
Cogs.wait_for :typing, fn _,_,_ ->
Cogs.say "Someone somewhere started typing..."
end
```
"""
# messages need special treatment, to ignore bots
defmacro wait_for(:message, fun) do
quote do
EventRegistry.subscribe()
channel = var!(message).channel_id
receive do
{:discord_event, {:message_create,
[%{author: %{bot: false}, channel_id: ^channel}] = args}} ->
apply(unquote(fun), args)
after
20_000 -> Process.exit(self(), :kill)
end
end
end
defmacro wait_for(type, fun) do
# convert the special cases we set in the Events module
type = Events.convert_type(type)
quote do
EventRegistry.subscribe()
receive do
{:discord_event, {unquote(type), args}} ->
apply(unquote(fun), args)
after
20_000 -> Process.exit(self(), :kill)
end
end
end
@doc """
Waits for a specific event satisfying a condition.
Same as `wait_for/2`, except this takes an extra condition that needs to be
met for the waiting to handle to trigger.
## Examples
```elixir
Cogs.def foo do
Cogs.say "Send me foo"
Cogs.wait_for(:message, & &1.content == "foo", fn _msg ->
Cogs.say "Nice foo man!"
end)
```
Note that, if no event of the given type is received after 20s, the process
will kill itself, it's possible that this will never get met, but
no event satisfying the condition will ever arrive, essentially rendering
the process a waste. To circumvent this, it might be smart to send
a preemptive kill message:
```elixir
self = self()
Task.start(fn ->
Process.sleep(20_000)
Process.exit(self, :kill)
)
Cogs.wait_for(:message, fn x -> false end, fn _msg ->
Cogs.say "If you hear this, logic itself is falling apart!!!"
end)
```
"""
defmacro wait_for(:message, condition, fun) do
m = __MODULE__
quote do
EventRegistry.subscribe()
unquote(m).wait(:message, unquote(condition),
unquote(fun), var!(message).channel_id)
end
end
defmacro wait_for(type, condition, fun) do
type = Events.convert_type(type)
m = __MODULE__
quote do
EventRegistry.subscribe()
unquote(m).wait(unquote(type), unquote(condition), unquote(fun))
end
end
# Loops until the correct command is received
@doc false
def wait(:message, condition, fun, channel_id) do
receive do
{:discord_event, {:message_create,
[%{author: %{bot: false}, channel_id: ^channel_id}] = args}} ->
if apply(condition, args) do
apply(fun, args)
else
wait(:message, condition, fun, channel_id)
end
after
20_000 -> Process.exit(self(), :kill)
end
end
@doc false
def wait(type, condition, fun) do
receive do
{:discord_event, {^type, args}} ->
if apply(condition, args) do
apply(fun, args)
else
wait(type, condition, fun)
end
after
20_000 -> Process.exit(self(), :kill)
end
end
@doc """
Registers a new command, under the name of the function.
This macro modifies the function definition, to accept an extra
`message` parameter, allowing the message that triggered the command to be passed,
as a `t:Alchemy.Message/0`
## Examples
```elixir
Cogs.def ping do
Cogs.say "pong"
end
```
In this case, "!ping" will trigger the command, unless another prefix has been set
with `set_prefix/1`
```elixir
Cogs.def mimic, do: Cogs.say "Please send a word for me to echo"
Cogs.def mimic(word), do: Cogs.say word
```
Messages will be parsed, and arguments will be extracted, however,
to deal with potentially missing arguments, pattern matching should be used.
So, in this case, when a 2nd argument isn't given, an error message is sent back.
"""
defmacro def(func, body) do
{name, arity, new_func} = inject(func, body)
quote do
arity = unquote(arity)
@commands update_in(@commands, [Atom.to_string(unquote(name))], fn
nil ->
{__MODULE__, arity, unquote(name)}
{mod, x, name} when x < arity ->
{mod, arity, name}
{mod, x, name, parser} when x < arity ->
{mod, arity, name, parser}
val ->
val
end)
unquote(new_func)
end
end
defp inject({:when, ctx, [{name, _, args} | func_rest]} = guard, body) do
args = args || []
injected = [{:message, [], ctx[:context]} | args]
new_guard = Macro.prewalk(guard, fn {a, b, _} ->
{a, b, [{name, ctx, injected} | func_rest]}
end)
new_func = {:def, ctx, [new_guard, body]}
{name, length(args), new_func}
end
defp inject({name, ctx, args}, body) do
args = args || []
injected = [{:message, [], ctx[:context]} | args]
new_func = {:def, ctx, [{name, ctx, injected}, body]}
{name, length(args), new_func}
end
@doc false
defmacro __using__(_opts) do
quote do
alias Alchemy.Cogs
require Cogs
@commands unquote(Macro.escape(%{}))
@before_compile Cogs
end
end
defp normal_cog do
quote do
defmacro __using__(_opts) do
commands = Macro.escape(@commands)
module = __MODULE__
quote do
Alchemy.Cogs.CommandHandler.add_commands(unquote(module),
unquote(commands) |> Enum.map(fn
{k, {mod, arity, name, quoted}} ->
{eval, _} = Code.eval_quoted(quoted)
{k, {mod, arity, name, eval}}
{k, v} ->
{k, v}
end)
|> Enum.into(%{}))
end
end
end
end
defp grouped_cog(str, commands) do
quote do
def cOGS_COMMANDS_GROUPER(message, rest) do
[sub, rest] =
rest
|> String.split(" ", parts: 2)
|> Enum.concat([""])
|> Enum.take(2)
case unquote(commands)[sub] do
{m, a, f, e} ->
apply(m, f, [message | rest |> e.() |> Enum.take(a)])
{m, a, f} ->
apply(m, f, [message | rest |> String.split |> Enum.take(a)])
_x ->
nil
end
end
defmacro __using__(_opts) do
module = __MODULE__
commands = %{unquote(str) =>
{module, 1, :cOGS_COMMANDS_GROUPER, &List.wrap/1}}
|> Macro.escape
quote do
Alchemy.Cogs.CommandHandler.add_commands(
unquote(module), unquote(commands)
)
end
end
end
end
defmacro __before_compile__(env) do
module = env.module
case Module.get_attribute(module, :command_group) do
{:group, str} ->
# Replace the map with the AST representing it, keeping the lambdas
commands =
Module.get_attribute(module, :commands)
|> Enum.map(fn {k, v} ->
{k, {:{}, [], Tuple.to_list(v)}}
end)
grouped_cog(str, {:%{}, [], commands})
nil ->
normal_cog()
end
end
@doc """
Returns a map from command name (string) to the command information.
Each command is either `{module, arity, function_name}`, or
`{module, arity, function_name, parser}`.
This can be useful for providing some kind of help command, or telling
a user if a command is defined, e.g. :
```elixir
Cogs.def iscommand(maybe) do
case Cogs.all_commands()[maybe] do
nil -> Cogs.say "\#{maybe} is not a command"
_ -> Cogs.say "\#{maybe} is a command"
end
end
```
"""
@spec all_commands :: map
def all_commands do
GenServer.call(Alchemy.Cogs.CommandHandler, :list)
|> Map.delete(:prefix)
|> Map.delete(:options)
end
@doc """
Returns the base permissions for a member in a guild.
Functions similarly to `permissions`.
"""
defmacro guild_permissions do
quote do
with {:ok, guild} <- Cache.guild(channel: var!(message).channel_id),
{:ok, member} <- Cache.member(guild.id, var!(message).author.id)
do
{:ok, Alchemy.Guild.highest_role(guild, member).permissions}
end
end
end
@doc """
Returns the permission bitset of the current member in the channel the command
was called from.
If you just want the base permissions of the member in the guild,
see `guild_permissions`.
Returns `{:ok, perms}`, or `{:error, why}`. Fails if not called from
a guild, or the guild or the member couldn't be fetched from the cache.
## Example
```elixir
Cogs.def perms do
with {:ok, permissions} <- Cogs.permissions() do
Cogs.say "Here's a list of your permissions `\#{Permissions.to_list(permissions)}`"
end
end
```
"""
defmacro permissions do
quote do
with {:ok, guild} <- Cache.guild(channel: var!(message).channel_id),
{:ok, member} <- Cache.member(guild.id, var!(message).author.id)
do
Alchemy.Permissions.channel_permissions(member, guild, var!(message).channel_id)
end
end
end
end
|
lib/cogs.ex
| 0.784154
| 0.840488
|
cogs.ex
|
starcoder
|
defmodule Ueberauth.Strategy.Procore do
@moduledoc """
Implements an ÜeberauthProcore strategy for authentication with procore.com.
When configuring the strategy in the Üeberauth providers, you can specify some defaults.
* `oauth2_module` - The OAuth2 module to use. Default Ueberauth.Strategy.Procore.OAuth
````elixir
config :ueberauth, Ueberauth,
providers: [
procore: { Ueberauth.Strategy.Procore }
]
"""
@oauth2_module Ueberauth.Strategy.Procore.OAuth
use Ueberauth.Strategy, oauth2_module: @oauth2_module
alias Ueberauth.Auth.Info
alias Ueberauth.Auth.Credentials
alias Ueberauth.Auth.Extra
# When handling the request just redirect to Procore
@doc false
def handle_request!(conn) do
opts = [redirect_uri: callback_url(conn)]
opts =
if conn.params["state"], do: Keyword.put(opts, :state, conn.params["state"]), else: opts
redirect!(conn, apply(@oauth2_module, :authorize_url!, [opts]))
end
# When handling the callback, if there was no errors we need to
# make two calls. The first, to fetch the procore auth is so that we can get hold of
# the user id so we can make a query to fetch the user info.
# So that it is available later to build the auth struct, we put it in the private section of the conn.
@doc false
def handle_callback!(%Plug.Conn{params: %{"code" => code}} = conn) do
params = [
code: code,
redirect_uri: callback_url(conn)
]
token = apply(@oauth2_module, :get_token!, [params])
if token.access_token == nil do
set_errors!(conn, [
error(token.other_params["error"], token.other_params["error_description"])
])
else
conn
|> store_token(token)
|> fetch_companies(token)
|> fetch_user(token)
end
end
# If we don't match code, then we have an issue
@doc false
def handle_callback!(conn) do
set_errors!(conn, [error("missing_code", "No code received")])
end
# We store the token for use later when fetching the procore auth and user and constructing the auth struct.
@doc false
defp store_token(conn, token) do
put_private(conn, :procore_token, token)
end
# Remove the temporary storage in the conn for our data. Run after the auth struct has been built.
@doc false
def handle_cleanup!(conn) do
conn
|> put_private(:procore_user, nil)
|> put_private(:procore_token, nil)
end
# The structure of the requests is such that it is difficult to provide cusomization for the uid field.
# instead, we allow selecting any field from the info struct
@doc false
def uid(conn) do
conn.private[:procore_user]["id"]
end
@doc false
def credentials(conn) do
token = conn.private.procore_token
%Credentials{
token: token.access_token,
refresh_token: token.refresh_token,
expires_at: token.expires_at,
token_type: token.token_type,
expires: !!token.expires_at,
scopes: []
}
end
@doc false
def info(conn) do
user = conn.private[:procore_user]
%Info{
email: user["email_address"],
first_name: user["first_name"],
last_name: user["last_name"]
}
end
@doc false
def extra(conn) do
user = conn.private[:procore_user]
%Extra{
raw_info: %{
companies: conn.private[:procore_companies],
token: conn.private[:procore_token],
user: conn.private[:procore_user],
job_title: user["job_title"],
is_employee: user["is_employee"],
business_phone: user["business_phone"],
mobile_phone: user["mobile_phone"]
}
}
end
defp fetch_companies(conn, token) do
case Ueberauth.Strategy.Procore.OAuth.get(token, "/vapid/companies") do
{:ok, %OAuth2.Response{status_code: 401, body: _body}} ->
set_errors!(conn, [error("token", "unauthorized")])
{:ok, %OAuth2.Response{status_code: status_code, body: companies}}
when status_code in 200..399 ->
put_private(conn, :procore_companies, companies)
{:error, %OAuth2.Error{reason: reason}} ->
set_errors!(conn, [error("OAuth2", reason)])
{:error, %OAuth2.Response{} = response} ->
set_errors!(conn, [error("Request Error", response)])
end
end
defp fetch_user(%Plug.Conn{assigns: %{ueberauth_failure: _fails}} = conn, _), do: conn
defp fetch_user(conn, token) do
first_company_id =
conn.private[:procore_companies]
|> List.first()
|> Map.get("id")
case Ueberauth.Strategy.Procore.OAuth.get(token, "/vapid/companies/#{first_company_id}/me") do
{:ok, %OAuth2.Response{status_code: 401, body: _body}} ->
set_errors!(conn, [error("token", "unauthorized")])
{:ok, %OAuth2.Response{status_code: status_code, body: user}}
when status_code in 200..399 ->
put_private(conn, :procore_user, user)
{:error, %OAuth2.Error{reason: reason}} ->
set_errors!(conn, [error("OAuth2", reason)])
end
end
end
|
lib/ueberauth/strategy/procore.ex
| 0.725649
| 0.421284
|
procore.ex
|
starcoder
|
defmodule Valvex do
@moduledoc """
Valvex is a simple wrapper for the Steam ReST Api
Before making calls be sure to initialize the client with your Api key provided
by Steam. Check `start_link/0` for more details.
Every single one of the user ids specified here are 64-bit Steam ids.
"""
alias Valvex.Api.{Player, App, GlobalAchievements, Game}
@doc """
Initializes the client with the api key from your `config.exs` file.
Set your token as `config :valvex, token: "<PASSWORD>"` OR
export the `STEAM_TOKEN` variable: `export STEAM_TOKEN="<PASSWORD>"`.
## Example
```elixir
iex> Valvex.start_link()
```
"""
def start_link do
start_link(Application.get_env(:valvex, :token, System.get_env("STEAM_TOKEN")))
end
@doc """
Initialize the client with an api key given directly.
## Example
```elixir
iex> Valvex.start_link("your-token")
```
"""
def start_link(key) do
Agent.start_link(fn -> key end, name: __MODULE__)
end
@doc """
Get the api key that you previously set.
"""
def api_key do
Agent.get(__MODULE__, &(&1))
end
@doc """
Return news of a game specified by its ID.
## Example
```elixir
iex> Valvex.get_app_news(gameid: 730, count: 3, max_length: 100)
```
"""
def get_app_news([gameid: id, count: count, max_length: m_length]) do
App.get_app_news(gameid: id, count: count, max_length: m_length)
end
@doc """
Returns Global achievements of a game in percentages.
## Example
```elixir
iex> Valvex.global_achievements(730)
```
"""
def global_achievements(id) do
GlobalAchievements.percentages(id)
end
@doc """
Return basic profile information for a given 64-bit Steam ID.
Fetch for multiple ids by passing a list.
## Examples
```elixir
iex> Valvex.player_summaries(123)
iex> Valvex.player_summaries([123, 76561198083075294])
```
"""
def player_summaries(ids) do
Player.summaries(ids)
end
@doc """
Returns the friend list of a given user specified by ID.
User profile must be set to public.
## Example
```elixir
iex> Valvex.friend_list(123)
```
"""
def friend_list(id) do
Player.friend_list(id)
end
@doc """
Returns achievements of a player specified by user id and game id.
## Example
```elixir
iex> Valvex.achievements(steam: 123, gameid: 730)
```
"""
def achievements([steam: id, gameid: gameid]) do
Player.achievements(steamid: id, gameid: gameid)
end
@doc """
Returns stats from a player for a game specified by user id and game id.
## Example
```elixir
iex> Valvex.user_stats(steamid: 123, gameid: 730)
```
"""
def user_stats([steamid: id, gameid: gameid]) do
Player.stats_for_game(steamid: id, gameid: gameid)
end
@doc """
Returns owned games for a user specified by user id.
## Example
```elixir
iex> Valvex.owned_games(123)
```
"""
def owned_games(id) do
Player.owned_games(id)
end
@doc """
Returns recently played games from an user specified by user id.
Pass `count` as a parameter for a maximun of games to return. Usually it's not much
by default.
## Examples
```elixir
iex> Valvex.recently_played(steamid: 123)
iex > Valvex.recently_played(steamid: 123, count: 3)
```
"""
def recently_played([steamid: id, count: count]) do
Player.recently_played(steamid: id, count: count)
end
def recently_played(steamid: id) do
Player.recently_played(steamid: id)
end
@doc """
Returns the original user id of game's owner if the game that the user
is currently played is a shared one.
Pass the id of the user to fetch and the id of the game.
## Example
```elixir
iex> Valvex.shared(steamid: 123, gameid: 730)
```
"""
def shared([steamid: id, gameid: gameid]) do
Player.shared_game(steamid: id, gameid: gameid)
end
@doc """
Returns bans for a given player or list of players specified by user id.
## Examples
```elixir
iex> Valvex.bans(123)
iex> Valvex.bans([123, 456, 789])
```
"""
def bans(ids) do
Player.bans(ids)
end
@doc """
Returns inventory of an specific player.
Options are:
1. `count`: Number of items to retrieve from the inventory, maximum is 5000.
2. `lang`: Language of the description returned.
## Examples
```elixir
iex> Valvex.inventory(user_id, game_id)
iex> Valvex.inventory(user_id, game_id, count: 500)
```
"""
def inventory(id, gameid, opts \\ []) do
Player.inventory(id, gameid, opts)
end
@doc """
Returns schema for a given game specified by game id.
## Example
```elixir
iex> Valvex.game_schema(730)
```
"""
def game_schema(gameid) do
Game.schema(gameid)
end
end
|
lib/valvex.ex
| 0.863651
| 0.835886
|
valvex.ex
|
starcoder
|
defmodule Sanbase.Cryptocompare.HistoricalScheduler do
@moduledoc ~s"""
Scrape the prices from Cryptocompare websocket API
https://min-api.cryptocompare.com/documentation/websockets
Use the cryptocompare API to fetch prices aggregated across many exchanges
in near-realtime. For every base/quote asset pairs fetch:
- price
- volume 24h (sliding window) - in number of tokens and in the quote asset currency
- top tier exchanges volume 24h (sliding window) - in number of tokens and
in the quote asset currency
"""
use GenServer
import Sanbase.DateTimeUtils, only: [generate_dates_inclusive: 2]
alias Sanbase.Cryptocompare.HistoricalWorker
require Logger
require Sanbase.Utils.Config, as: Config
@oban_conf_name :oban_scrapers
@unique_peroid 60 * 86_400
@oban_queue :cryptocompare_historical_jobs_queue
def start_link(opts \\ []) do
GenServer.start_link(__MODULE__, opts, name: __MODULE__)
end
def queue(), do: @oban_queue
def resume(), do: Oban.resume_queue(@oban_conf_name, queue: @oban_queue)
def pause(), do: Oban.pause_queue(@oban_conf_name, queue: @oban_queue)
def conf_name(), do: @oban_conf_name
def init(_opts) do
# In order to be able to stop the historical scraper via env variables
# the queue is defined as paused and should be resumed from code.
if enabled?() do
Logger.info("[Cryptocompare Historical] Start exporting OHLCV historical data.")
resume()
end
{:ok, %{}}
end
def enabled?(), do: Config.get(:enabled?) |> String.to_existing_atom()
def add_jobs(base_asset, quote_asset, from, to) do
start_time = DateTime.utc_now()
recorded_dates = get_pair_dates(base_asset, quote_asset, from, to) |> MapSet.new()
dates = generate_dates_inclusive(from, to)
dates_to_insert = Enum.reject(dates, &(&1 in recorded_dates))
result = do_add_jobs_no_uniqueness_check(base_asset, quote_asset, dates_to_insert)
result_map = %{
jobs_count_total: length(dates),
jobs_already_present_count: MapSet.size(recorded_dates),
jobs_inserted: length(result),
time_elapsed: DateTime.diff(DateTime.utc_now(), start_time, :second)
}
Logger.info("""
[Cryptocompare Historical] Scheduled #{result_map.jobs_inserted} new jobs \
for the #{base_asset}/#{quote_asset} pair. Took: #{result_map.time_elapsed}s.
""")
{:ok, result_map}
end
def get_pair_dates(base_asset, quote_asset, from, to) do
query = """
SELECT args->>'date', inserted_at FROM oban_jobs
WHERE args->>'base_asset' = $1 AND args->>'quote_asset' = $2 AND queue = $3
UNION ALL
SELECT args->>'date', inserted_at FROM finished_oban_jobs
WHERE args->>'base_asset' = $1 AND args->>'quote_asset' = $2 AND queue = $3
"""
{:ok, %{rows: rows}} =
Ecto.Adapters.SQL.query(Sanbase.Repo, query, [
base_asset,
quote_asset,
to_string(@oban_queue)
])
now = NaiveDateTime.utc_now()
rows
|> Enum.filter(fn [_, inserted_at] ->
NaiveDateTime.diff(now, inserted_at, :second) <= @unique_peroid
end)
|> Enum.map(fn [date, _] -> Date.from_iso8601!(date) end)
|> Enum.filter(fn date -> Timex.between?(date, from, to, inclusive: true) end)
end
defp do_add_jobs_no_uniqueness_check(base_asset, quote_asset, dates) do
data =
dates
|> Enum.map(fn date ->
HistoricalWorker.new(%{
base_asset: base_asset,
quote_asset: quote_asset,
date: date
})
end)
Oban.insert_all(@oban_conf_name, data)
end
end
|
lib/sanbase/cryptocompare/historical_scheduler.ex
| 0.712932
| 0.481454
|
historical_scheduler.ex
|
starcoder
|
defmodule Gim do
@moduledoc """
#{Gim.MixProject.project()[:description]}
## Usage
Add Gim to your application by adding `{:gim, "~> #{Mix.Project.config()[:version]}"}` to your list of dependencies in `mix.exs`:
```elixir
def deps do
[
# ...
{:gim, "~> #{Mix.Project.config()[:version]}"}
]
end
```
## Usage
Create schemas:
defmodule MyApp.Author do
use Gim.Schema
schema do
property(:name, index: :unique)
property(:age, default: 0, index: true)
has_edges(:author_of, MyApp.Book, reflect: :authored_by)
end
end
defmodule MyApp.Book do
use Gim.Schema
schema do
property(:title, index: :unique)
property(:body)
has_edge(:authored_by, MyApp.Author, reflect: :author_of)
end
end
defmodule MyApp.Publisher do
use Gim.Schema
alias MyApp.Book
schema do
property(:name, index: :unique)
has_edges(:publisher_of, Book, reflect: :published_by)
end
end
Create a repo:
defmodule MyApp.Repo do
use Gim.Repo,
types: [
MyApp.Author,
MyApp.Book,
MyApp.Publisher
]
end
Use queries:
iex> MyApp.Repo.fetch!(MyApp.Author, :name, "<NAME>")
%MyApp.Author{__id__: 2, __repo__: MyApp.Repo, age: 0, author_of: [4, 3], name: "<NAME>"}
iex> terry = MyApp.Repo.fetch!(MyApp.Author, :name, "<NAME>")
iex> {:ok, [cs]} = MyApp.Publisher
...> |> Gim.Query.query()
...> |> Gim.Query.filter(name: &String.starts_with?(&1, "Colin"))
...> |> MyApp.Repo.resolve()
iex> %MyApp.Book{title: "The Colour of Magic"}
...> |> Gim.Query.add_edge(:authored_by, terry)
...> |> Gim.Query.add_edge(:published_by, cs)
...> |> MyApp.Repo.insert!()
%MyApp.Book{__id__: 5, __repo__: MyApp.Repo, authored_by: 2, body: nil, published_by: [3], title: "The Colour of Magic"}
"""
end
|
lib/gim.ex
| 0.651687
| 0.523725
|
gim.ex
|
starcoder
|
defmodule AWS.Imagebuilder do
@moduledoc """
EC2 Image Builder is a fully managed AWS service that makes it easier to
automate the creation, management, and deployment of customized, secure,
and up-to-date “golden” server images that are pre-installed and
pre-configured with software and settings to meet specific IT standards.
"""
@doc """
CancelImageCreation cancels the creation of Image. This operation can only
be used on images in a non-terminal state.
"""
def cancel_image_creation(client, input, options \\ []) do
path_ = "/CancelImageCreation"
headers = []
query_ = []
request(client, :put, path_, query_, headers, input, options, nil)
end
@doc """
Creates a new component that can be used to build, validate, test, and
assess your image.
"""
def create_component(client, input, options \\ []) do
path_ = "/CreateComponent"
headers = []
query_ = []
request(client, :put, path_, query_, headers, input, options, nil)
end
@doc """
Creates a new distribution configuration. Distribution configurations
define and configure the outputs of your pipeline.
"""
def create_distribution_configuration(client, input, options \\ []) do
path_ = "/CreateDistributionConfiguration"
headers = []
query_ = []
request(client, :put, path_, query_, headers, input, options, nil)
end
@doc """
Creates a new image. This request will create a new image along with all of
the configured output resources defined in the distribution configuration.
"""
def create_image(client, input, options \\ []) do
path_ = "/CreateImage"
headers = []
query_ = []
request(client, :put, path_, query_, headers, input, options, nil)
end
@doc """
Creates a new image pipeline. Image pipelines enable you to automate the
creation and distribution of images.
"""
def create_image_pipeline(client, input, options \\ []) do
path_ = "/CreateImagePipeline"
headers = []
query_ = []
request(client, :put, path_, query_, headers, input, options, nil)
end
@doc """
Creates a new image recipe. Image recipes define how images are configured,
tested, and assessed.
"""
def create_image_recipe(client, input, options \\ []) do
path_ = "/CreateImageRecipe"
headers = []
query_ = []
request(client, :put, path_, query_, headers, input, options, nil)
end
@doc """
Creates a new infrastructure configuration. An infrastructure configuration
defines the environment in which your image will be built and tested.
"""
def create_infrastructure_configuration(client, input, options \\ []) do
path_ = "/CreateInfrastructureConfiguration"
headers = []
query_ = []
request(client, :put, path_, query_, headers, input, options, nil)
end
@doc """
Deletes a component build version.
"""
def delete_component(client, input, options \\ []) do
path_ = "/DeleteComponent"
headers = []
{query_, input} =
[
{"componentBuildVersionArn", "componentBuildVersionArn"},
]
|> AWS.Request.build_params(input)
request(client, :delete, path_, query_, headers, input, options, nil)
end
@doc """
Deletes a distribution configuration.
"""
def delete_distribution_configuration(client, input, options \\ []) do
path_ = "/DeleteDistributionConfiguration"
headers = []
{query_, input} =
[
{"distributionConfigurationArn", "distributionConfigurationArn"},
]
|> AWS.Request.build_params(input)
request(client, :delete, path_, query_, headers, input, options, nil)
end
@doc """
Deletes an image.
"""
def delete_image(client, input, options \\ []) do
path_ = "/DeleteImage"
headers = []
{query_, input} =
[
{"imageBuildVersionArn", "imageBuildVersionArn"},
]
|> AWS.Request.build_params(input)
request(client, :delete, path_, query_, headers, input, options, nil)
end
@doc """
Deletes an image pipeline.
"""
def delete_image_pipeline(client, input, options \\ []) do
path_ = "/DeleteImagePipeline"
headers = []
{query_, input} =
[
{"imagePipelineArn", "imagePipelineArn"},
]
|> AWS.Request.build_params(input)
request(client, :delete, path_, query_, headers, input, options, nil)
end
@doc """
Deletes an image recipe.
"""
def delete_image_recipe(client, input, options \\ []) do
path_ = "/DeleteImageRecipe"
headers = []
{query_, input} =
[
{"imageRecipeArn", "imageRecipeArn"},
]
|> AWS.Request.build_params(input)
request(client, :delete, path_, query_, headers, input, options, nil)
end
@doc """
Deletes an infrastructure configuration.
"""
def delete_infrastructure_configuration(client, input, options \\ []) do
path_ = "/DeleteInfrastructureConfiguration"
headers = []
{query_, input} =
[
{"infrastructureConfigurationArn", "infrastructureConfigurationArn"},
]
|> AWS.Request.build_params(input)
request(client, :delete, path_, query_, headers, input, options, nil)
end
@doc """
Gets a component object.
"""
def get_component(client, component_build_version_arn, options \\ []) do
path_ = "/GetComponent"
headers = []
query_ = []
query_ = if !is_nil(component_build_version_arn) do
[{"componentBuildVersionArn", component_build_version_arn} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Gets a component policy.
"""
def get_component_policy(client, component_arn, options \\ []) do
path_ = "/GetComponentPolicy"
headers = []
query_ = []
query_ = if !is_nil(component_arn) do
[{"componentArn", component_arn} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Gets a distribution configuration.
"""
def get_distribution_configuration(client, distribution_configuration_arn, options \\ []) do
path_ = "/GetDistributionConfiguration"
headers = []
query_ = []
query_ = if !is_nil(distribution_configuration_arn) do
[{"distributionConfigurationArn", distribution_configuration_arn} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Gets an image.
"""
def get_image(client, image_build_version_arn, options \\ []) do
path_ = "/GetImage"
headers = []
query_ = []
query_ = if !is_nil(image_build_version_arn) do
[{"imageBuildVersionArn", image_build_version_arn} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Gets an image pipeline.
"""
def get_image_pipeline(client, image_pipeline_arn, options \\ []) do
path_ = "/GetImagePipeline"
headers = []
query_ = []
query_ = if !is_nil(image_pipeline_arn) do
[{"imagePipelineArn", image_pipeline_arn} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Gets an image policy.
"""
def get_image_policy(client, image_arn, options \\ []) do
path_ = "/GetImagePolicy"
headers = []
query_ = []
query_ = if !is_nil(image_arn) do
[{"imageArn", image_arn} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Gets an image recipe.
"""
def get_image_recipe(client, image_recipe_arn, options \\ []) do
path_ = "/GetImageRecipe"
headers = []
query_ = []
query_ = if !is_nil(image_recipe_arn) do
[{"imageRecipeArn", image_recipe_arn} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Gets an image recipe policy.
"""
def get_image_recipe_policy(client, image_recipe_arn, options \\ []) do
path_ = "/GetImageRecipePolicy"
headers = []
query_ = []
query_ = if !is_nil(image_recipe_arn) do
[{"imageRecipeArn", image_recipe_arn} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Gets an infrastructure configuration.
"""
def get_infrastructure_configuration(client, infrastructure_configuration_arn, options \\ []) do
path_ = "/GetInfrastructureConfiguration"
headers = []
query_ = []
query_ = if !is_nil(infrastructure_configuration_arn) do
[{"infrastructureConfigurationArn", infrastructure_configuration_arn} | query_]
else
query_
end
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Imports a component and transforms its data into a component document.
"""
def import_component(client, input, options \\ []) do
path_ = "/ImportComponent"
headers = []
query_ = []
request(client, :put, path_, query_, headers, input, options, nil)
end
@doc """
Returns the list of component build versions for the specified semantic
version.
"""
def list_component_build_versions(client, input, options \\ []) do
path_ = "/ListComponentBuildVersions"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Returns the list of component build versions for the specified semantic
version.
"""
def list_components(client, input, options \\ []) do
path_ = "/ListComponents"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Returns a list of distribution configurations.
"""
def list_distribution_configurations(client, input, options \\ []) do
path_ = "/ListDistributionConfigurations"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Returns a list of image build versions.
"""
def list_image_build_versions(client, input, options \\ []) do
path_ = "/ListImageBuildVersions"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Returns a list of images created by the specified pipeline.
"""
def list_image_pipeline_images(client, input, options \\ []) do
path_ = "/ListImagePipelineImages"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Returns a list of image pipelines.
"""
def list_image_pipelines(client, input, options \\ []) do
path_ = "/ListImagePipelines"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Returns a list of image recipes.
"""
def list_image_recipes(client, input, options \\ []) do
path_ = "/ListImageRecipes"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Returns the list of images that you have access to.
"""
def list_images(client, input, options \\ []) do
path_ = "/ListImages"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Returns a list of infrastructure configurations.
"""
def list_infrastructure_configurations(client, input, options \\ []) do
path_ = "/ListInfrastructureConfigurations"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Returns the list of tags for the specified resource.
"""
def list_tags_for_resource(client, resource_arn, options \\ []) do
path_ = "/tags/#{URI.encode(resource_arn)}"
headers = []
query_ = []
request(client, :get, path_, query_, headers, nil, options, nil)
end
@doc """
Applies a policy to a component. We recommend that you call the RAM API
[CreateResourceShare](https://docs.aws.amazon.com/ram/latest/APIReference/API_CreateResourceShare.html)
to share resources. If you call the Image Builder API `PutComponentPolicy`,
you must also call the RAM API
[PromoteResourceShareCreatedFromPolicy](https://docs.aws.amazon.com/ram/latest/APIReference/API_PromoteResourceShareCreatedFromPolicy.html)
in order for the resource to be visible to all principals with whom the
resource is shared.
"""
def put_component_policy(client, input, options \\ []) do
path_ = "/PutComponentPolicy"
headers = []
query_ = []
request(client, :put, path_, query_, headers, input, options, nil)
end
@doc """
Applies a policy to an image. We recommend that you call the RAM API
[CreateResourceShare](https://docs.aws.amazon.com/ram/latest/APIReference/API_CreateResourceShare.html)
to share resources. If you call the Image Builder API `PutImagePolicy`, you
must also call the RAM API
[PromoteResourceShareCreatedFromPolicy](https://docs.aws.amazon.com/ram/latest/APIReference/API_PromoteResourceShareCreatedFromPolicy.html)
in order for the resource to be visible to all principals with whom the
resource is shared.
"""
def put_image_policy(client, input, options \\ []) do
path_ = "/PutImagePolicy"
headers = []
query_ = []
request(client, :put, path_, query_, headers, input, options, nil)
end
@doc """
Applies a policy to an image recipe. We recommend that you call the RAM API
[CreateResourceShare](https://docs.aws.amazon.com/ram/latest/APIReference/API_CreateResourceShare.html)
to share resources. If you call the Image Builder API
`PutImageRecipePolicy`, you must also call the RAM API
[PromoteResourceShareCreatedFromPolicy](https://docs.aws.amazon.com/ram/latest/APIReference/API_PromoteResourceShareCreatedFromPolicy.html)
in order for the resource to be visible to all principals with whom the
resource is shared.
"""
def put_image_recipe_policy(client, input, options \\ []) do
path_ = "/PutImageRecipePolicy"
headers = []
query_ = []
request(client, :put, path_, query_, headers, input, options, nil)
end
@doc """
Manually triggers a pipeline to create an image.
"""
def start_image_pipeline_execution(client, input, options \\ []) do
path_ = "/StartImagePipelineExecution"
headers = []
query_ = []
request(client, :put, path_, query_, headers, input, options, nil)
end
@doc """
Adds a tag to a resource.
"""
def tag_resource(client, resource_arn, input, options \\ []) do
path_ = "/tags/#{URI.encode(resource_arn)}"
headers = []
query_ = []
request(client, :post, path_, query_, headers, input, options, nil)
end
@doc """
Removes a tag from a resource.
"""
def untag_resource(client, resource_arn, input, options \\ []) do
path_ = "/tags/#{URI.encode(resource_arn)}"
headers = []
{query_, input} =
[
{"tagKeys", "tagKeys"},
]
|> AWS.Request.build_params(input)
request(client, :delete, path_, query_, headers, input, options, nil)
end
@doc """
Updates a new distribution configuration. Distribution configurations
define and configure the outputs of your pipeline.
"""
def update_distribution_configuration(client, input, options \\ []) do
path_ = "/UpdateDistributionConfiguration"
headers = []
query_ = []
request(client, :put, path_, query_, headers, input, options, nil)
end
@doc """
Updates a new image pipeline. Image pipelines enable you to automate the
creation and distribution of images.
"""
def update_image_pipeline(client, input, options \\ []) do
path_ = "/UpdateImagePipeline"
headers = []
query_ = []
request(client, :put, path_, query_, headers, input, options, nil)
end
@doc """
Updates a new infrastructure configuration. An infrastructure configuration
defines the environment in which your image will be built and tested.
"""
def update_infrastructure_configuration(client, input, options \\ []) do
path_ = "/UpdateInfrastructureConfiguration"
headers = []
query_ = []
request(client, :put, path_, query_, headers, input, options, nil)
end
@spec request(AWS.Client.t(), binary(), binary(), list(), list(), map(), list(), pos_integer()) ::
{:ok, map() | nil, map()}
| {:error, term()}
defp request(client, method, path, query, headers, input, options, success_status_code) do
client = %{client | service: "imagebuilder"}
host = build_host("imagebuilder", client)
url = host
|> build_url(path, client)
|> add_query(query, client)
additional_headers = [{"Host", host}, {"Content-Type", "application/x-amz-json-1.1"}]
headers = AWS.Request.add_headers(additional_headers, headers)
payload = encode!(client, input)
headers = AWS.Request.sign_v4(client, method, url, headers, payload)
perform_request(client, method, url, payload, headers, options, success_status_code)
end
defp perform_request(client, method, url, payload, headers, options, success_status_code) do
case AWS.Client.request(client, method, url, payload, headers, options) do
{:ok, %{status_code: status_code, body: body} = response}
when is_nil(success_status_code) and status_code in [200, 202, 204]
when status_code == success_status_code ->
body = if(body != "", do: decode!(client, body))
{:ok, body, response}
{:ok, response} ->
{:error, {:unexpected_response, response}}
error = {:error, _reason} -> error
end
end
defp build_host(_endpoint_prefix, %{region: "local", endpoint: endpoint}) do
endpoint
end
defp build_host(_endpoint_prefix, %{region: "local"}) do
"localhost"
end
defp build_host(endpoint_prefix, %{region: region, endpoint: endpoint}) do
"#{endpoint_prefix}.#{region}.#{endpoint}"
end
defp build_url(host, path, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}#{path}"
end
defp add_query(url, [], _client) do
url
end
defp add_query(url, query, client) do
querystring = encode!(client, query, :query)
"#{url}?#{querystring}"
end
defp encode!(client, payload, format \\ :json) do
AWS.Client.encode!(client, payload, format)
end
defp decode!(client, payload) do
AWS.Client.decode!(client, payload, :json)
end
end
|
lib/aws/generated/imagebuilder.ex
| 0.737158
| 0.442275
|
imagebuilder.ex
|
starcoder
|
defmodule Iteraptor.Iteraptable do
@moduledoc """
`use Iteraptor.Iteraptable` inside structs to make them both
[`Enumerable`](http://elixir-lang.org/docs/stable/elixir/Enumerable.html) and
[`Collectable`](http://elixir-lang.org/docs/stable/elixir/Collectable.html) and
implement the [`Access`](https://hexdocs.pm/elixir/Access.html#content) behaviour:
## Usage
Use the module within the struct of your choice and this struct will be
automagically granted `Enumerable` and `Collectable` protocols implementations.
`use Iteraptor.Iteraptable` accepts keyword parameter `skip: Access` or
`skip: [Enumerable, Collectable]` which allows to implement a subset of
protocols. Also it accepts keyword parameter `derive: MyProtocol` allowing
to specify what protocol(s) implementations should be implicitly derived
for this struct.
"""
defmodule Unsupported do
@moduledoc """
Unsupported in applying `Iteraptor.Iteraptable`
"""
defexception [:reason, :message]
@doc false
def exception(reason: reason) do
message =
"the given function must return a two-element tuple or :pop, got: #{inspect(reason)}"
%Iteraptor.Iteraptable.Unsupported{message: message, reason: reason}
end
end
@codepieces %{
Enumerable =>
quote location: :keep do
defimpl Enumerable, for: __MODULE__ do
def slice(enumerable) do
{:error, __MODULE__}
end
def count(map) do
# do not count :__struct__
{:ok, map |> Map.from_struct() |> map_size}
end
def member?(_, {:__struct__, _}) do
{:ok, false}
end
def member?(map, {key, value}) do
{:ok, match?({:ok, ^value}, :maps.find(key, map))}
end
def member?(_, _) do
{:ok, false}
end
def reduce(map, acc, fun) do
do_reduce(map |> Map.from_struct() |> :maps.to_list(), acc, fun)
end
defp do_reduce(_, {:halt, acc}, _fun), do: {:halted, acc}
defp do_reduce(list, {:suspend, acc}, fun),
do: {:suspended, acc, &do_reduce(list, &1, fun)}
defp do_reduce([], {:cont, acc}, _fun), do: {:done, acc}
defp do_reduce([h | t], {:cont, acc}, fun), do: do_reduce(t, fun.(h, acc), fun)
end
end,
Collectable =>
quote location: :keep do
defimpl Collectable, for: __MODULE__ do
def into(original) do
{original,
fn
map, {:cont, {k, v}} -> :maps.put(k, v, map)
map, :done -> map
_, :halt -> :ok
end}
end
end
end,
Access =>
quote location: :keep do
@behaviour Access
@impl Access
def fetch(term, key), do: Map.fetch(term, key)
@impl Access
def pop(term, key, default \\ nil),
do: {get(term, key, default), delete(term, key)}
@impl Access
def get_and_update(term, key, fun) when is_function(fun, 1) do
current = get(term, key)
case fun.(current) do
{get, update} -> {get, put(term, key, update)}
:pop -> {current, delete(term, key)}
other -> raise Unsupported, reason: other
end
end
if Version.compare(System.version(), "1.7.0") == :lt, do: @impl(Access)
def get(term, key, default \\ nil) do
case term do
%{^key => value} -> value
_ -> default
end
end
def put(term, key, val), do: %{term | key => val}
def delete(term, key), do: put(term, key, nil)
defoverridable get: 3, put: 3, delete: 2
end
}
@iteraptable (quote location: :keep do
defimpl Iteraptable, for: __MODULE__ do
def type(_), do: __MODULE__
def name(_), do: Macro.underscore(__MODULE__)
def to_enumerable(term), do: term
def to_collectable(term), do: term
end
end)
@doc """
Allows to enable iterating features on structs with `use Iteraptor.Iteraptable`
## Parameters
- keyword parameter `opts`
- `skip: Access` or `skip: [Enumerable, Collectable]` allows
to implement a subset of protocols;
- `derive: MyProtocol` allows to derive selected protocol implementation(s).
"""
defmacro __using__(opts \\ []) do
checker = quote(location: :keep, do: @after_compile({Iteraptor.Utils, :struct_checker}))
derive =
opts[:derive]
|> Macro.expand(__ENV__)
|> case do
nil -> []
value when is_list(value) -> value
value -> [value]
end
|> case do
[] -> []
protos -> [quote(location: :keep, do: @derive(unquote(protos)))]
end
skip =
opts
|> Keyword.get(:skip, [])
|> Macro.expand(__ENV__)
excluded =
skip
|> case do
:all -> Map.keys(@codepieces)
value when is_list(value) -> value
value -> [value]
end
|> Enum.map(fn value ->
case value |> to_string() |> String.capitalize() do
<<"Elixir.", _::binary>> -> value
_ -> Module.concat([value])
end
end)
init =
case [Enumerable, Collectable] -- excluded do
[Enumerable, Collectable] -> [checker, @iteraptable | derive]
_ -> [checker | derive]
end
Enum.reduce(@codepieces, init, fn {type, ast}, acc ->
if Enum.find(excluded, &(&1 == type)), do: acc, else: [ast | acc]
end)
end
end
|
lib/iteraptor/iteraptable.ex
| 0.883506
| 0.55254
|
iteraptable.ex
|
starcoder
|
defmodule Aoc.Day10 do
@moduledoc false
import Aoc.Utils
def run(1), do: solve1(get_input(10))
def run(2), do: solve2(get_input(10))
def solve1(input) do
input
|> read_instructions
|> zooming_around()
:ets.delete(:instructions)
end
def read_instructions(t) do
:ets.new(:instructions, [:set, :protected, :named_table])
String.split(t, "\n")
|> Enum.map(&String.split/1)
|> Enum.reduce(%{}, fn instr, devs ->
case instr do
["value", mc, "goes", "to", "bot", bot] ->
Map.put(
devs,
"bot#{bot}",
case Map.get(devs, "bot#{bot}") do
nil -> [String.to_integer(mc)]
b -> [String.to_integer(mc) | b]
end
)
["bot", bot, "gives", "low", "to", l_dest, l_n, "and", "high", "to", h_dest, h_n] ->
:ets.insert(
:instructions,
{"bot" <> bot, [low_to: l_dest <> l_n, high_to: h_dest <> h_n]}
)
devs
end
end)
end
def zooming_around(devs) do
# IO.inspect(devs)
two_mcs =
Enum.filter(devs, fn
{_, [_, _]} -> true
{_, _} -> false
end)
if two_mcs == [] do
devs
else
new_devs = devs
two_mcs
|> Enum.reduce(new_devs, fn {bot, [mc1, mc2]}, ds ->
if [17, 61] == [mc1, mc2] or [61, 17] == [mc1, mc2], do: IO.inspect(bot)
low = min(mc1, mc2)
high = max(mc1, mc2)
[{_, [low_to: low_to, high_to: high_to]}] = :ets.lookup(:instructions, bot)
ds =
case Map.get(ds, low_to) do
nil -> Map.put(ds, low_to, [low])
b -> Map.put(ds, low_to, [low | b])
end
ds =
case Map.get(ds, high_to) do
nil -> Map.put(ds, high_to, [high])
b -> Map.put(ds, high_to, [high | b])
end
Map.put(ds, bot, [])
end)
|> zooming_around
end
end
def solve2(input) do
devs =
input
|> read_instructions
|> zooming_around()
[mc0] = Map.get(devs, "output0")
[mc1] = Map.get(devs, "output1")
[mc2] = Map.get(devs, "output2")
IO.inspect(mc0 * mc1 * mc2)
:ets.delete(:instructions)
end
end
|
lib/aoc/day10.ex
| 0.598547
| 0.504516
|
day10.ex
|
starcoder
|
defmodule ICouch.View do
@moduledoc """
Module to handle views in CouchDB.
View structs should not be created or manipulated directly, please use
`ICouch.open_view/3` or `ICouch.open_view!/3`.
The view struct implements the enumerable protocol for easy handling with
Elixir's `Enum` module.
A view can be in a "fetched" state or in an "unfetched" state which can be
tested with the `fetched?/1` function and changed with the `fetch/1`,
`fetch!/1` and `unfetch/1` function.
Note that iterating over an unfetched view will create an intermediate fetched
version.
"""
use ICouch.RequestError
defstruct [:db, :ddoc, :name, :params, :rows, :total_rows, :update_seq]
@type t :: %__MODULE__{
db: ICouch.DB.t,
ddoc: String.t | nil,
name: String.t,
params: map,
rows: [map] | nil,
total_rows: integer | nil,
update_seq: integer | String.t | nil
}
@type view_option_key :: :conflicts | :descending | :endkey | :endkey_docid |
:group | :group_level | :include_docs | :attachments | :att_encoding_info |
:inclusive_end | :key | :keys | :limit | :reduce | :skip | :stale |
:startkey | :startkey_docid | :update_seq
@type view_option_value :: boolean | String.t | integer | [String.t] | :ok |
:update_after
@doc """
Fetches all rows of `view`, turning it into a "fetched view".
"""
@spec fetch(view :: t) :: {:ok, t} | {:error, term}
def fetch(%__MODULE__{params: params} = view) do
case send_req(view) do
{:ok, %{"rows" => rows} = result} ->
rows = if params[:include_docs] do
Enum.map(rows, fn
%{"doc" => doc} = row when doc != nil ->
%{row | "doc" => ICouch.Document.from_api!(doc)}
other ->
other
end)
else
rows
end
{:ok, %{view | rows: rows, total_rows: result["total_rows"], update_seq: result["update_seq"]}}
{:ok, _} ->
{:error, :invalid_response}
other ->
other
end
end
@doc """
Same as `fetch/1` but returns the fetched view directly on success or raises
an error on failure.
"""
@spec fetch!(view :: t) :: t
def fetch!(view),
do: req_result_or_raise! fetch(view)
@doc """
Resets `view` back to the "unfetched" state.
"""
@spec unfetch(view :: t) :: t
def unfetch(%__MODULE__{} = view),
do: %{view | rows: nil}
@doc """
Tests whether `view` is in "fetched" state or not.
"""
@spec fetched?(view :: t) :: boolean
def fetched?(%__MODULE__{rows: rows}) when is_list(rows),
do: true
def fetched?(%__MODULE__{}),
do: false
@doc """
Replaces `view`'s options with the given ones.
This will also set the view back to the "unfetched" state.
"""
@spec set_options(view :: t, options :: [ICouch.open_view_option]) :: t
def set_options(%__MODULE__{} = view, options),
do: %{view | params: Map.new(options), rows: nil}
@doc """
Adds or updates a single option in `view`.
This will also set the view back to the "unfetched" state.
"""
@spec put_option(view :: t, key :: view_option_key, value :: view_option_value) :: t
def put_option(%__MODULE__{params: params} = view, key, value),
do: %{view | params: Map.put(params, key, value), rows: nil}
@doc """
Deletes an option in `view`.
This will also set the view back to the "unfetched" state.
Returns `view` unchanged if the option was not set (and it already was
"unfetched").
"""
@spec delete_option(view :: t, key :: view_option_key) :: t
def delete_option(%__MODULE__{params: params, rows: rows} = view, key) do
if not Map.has_key?(params, key) and rows == nil do
view
else
%{view | params: Map.delete(params, key), rows: nil}
end
end
@doc """
Returns the value of an option in `view` or `nil` if it was not set.
"""
@spec get_option(view :: t, key :: view_option_key) :: view_option_value | nil
def get_option(%__MODULE__{params: params}, key),
do: Map.get(params, key)
@doc """
Internal function to build a db endpoint.
"""
@spec db_endpoint(view :: t) :: {String.t, map}
def db_endpoint(%__MODULE__{ddoc: ddoc, name: name, params: params}),
do: db_endpoint(ddoc, name, params)
defp send_req(%__MODULE__{db: db, ddoc: ddoc, name: name, params: %{keys: keys} = params}),
do: ICouch.DB.send_req(db, db_endpoint(ddoc, name, Map.delete(params, :keys)), :post, %{"keys" => keys})
defp send_req(%__MODULE__{db: db, ddoc: ddoc, name: name, params: params}),
do: ICouch.DB.send_req(db, db_endpoint(ddoc, name, params))
defp db_endpoint(nil, "_all_docs", params),
do: {"_all_docs", params}
defp db_endpoint(ddoc, name, params),
do: {"_design/#{ddoc}/_view/#{name}", params}
end
defimpl Enumerable, for: ICouch.View do
def count(%ICouch.View{params: params, rows: nil} = view),
do: count(ICouch.View.fetch!(%{view | params: Map.delete(params, :include_docs)}))
def count(%ICouch.View{rows: rows}),
do: {:ok, length(rows)}
def member?(_view, _element),
do: {:error, __MODULE__}
def slice(_),
do: {:error, __MODULE__}
def reduce(_, {:halt, acc}, _fun),
do: {:halted, acc}
def reduce(%ICouch.View{rows: rest_rows}, {:suspend, acc}, fun),
do: {:suspended, acc, &reduce(rest_rows, &1, fun)}
def reduce(%ICouch.View{rows: []}, {:cont, acc}, _fun),
do: {:done, acc}
def reduce(%ICouch.View{rows: [h | t]} = view, {:cont, acc}, fun),
do: reduce(%{view | rows: t}, fun.(h, acc), fun)
def reduce(%ICouch.View{rows: nil} = view, acc, fun),
do: ICouch.View.fetch!(view) |> reduce(acc, fun)
end
|
lib/icouch/view.ex
| 0.860721
| 0.457985
|
view.ex
|
starcoder
|
defmodule DemonSpiritGame.Card do
@moduledoc """
Provides a structure to hold a card containing moves that
a player may use. Also contains a hardcoded list of all cards.
id: Hardcoded integer.
name: String, name of card.
moves: List of {int, int} tuples, representing moves.
{1, 1} is the ability to move the piece up and right one.
color: Atom, color of the card. Not used in gameplay.
Blue is left-oriented, red is right-oriented, green is balanced.
"""
alias DemonSpiritGame.{Card}
defstruct id: nil, name: nil, moves: [], color: nil
@doc """
by_name/1: Retrieve a card by name.
Input: A String of the name to search for.
Output: Either {:ok, card} or {:error, nil}
"""
@spec by_name(String.t()) :: {:ok, %Card{}} | {:error, nil}
def by_name(name) do
card = cards() |> Enum.filter(fn c -> c.name == name end)
case length(card) do
1 -> {:ok, Enum.at(card, 0)}
0 -> {:error, nil}
end
end
@doc """
flip/1: Return a card with all of the moves flipped.
That is, a {2, 1} will become a {-2, -1}.
This is needed when black is playing, since by default, all
moves specified are from white's perspective.
Input: %Card
Output: %Card with moves flipped.
"""
@spec flip(%Card{}) :: %Card{}
def flip(card) do
flipped_moves = card.moves |> Enum.map(fn {x, y} -> {-x, -y} end)
%{card | moves: flipped_moves}
end
@spec cards() :: nonempty_list(%Card{})
def cards do
base_cards() ++ exp1_cards() ++ exp2_cards()
end
@doc """
cards/0: Provides all 16 cards that may be used in the game.
A random set of 5 should be chosen when actually playing the game.
"""
@spec base_cards() :: nonempty_list(%Card{})
def base_cards do
[
%Card{
id: 1,
name: "Panther",
moves: [{0, 2}, {0, -1}],
color: :green
},
%Card{
id: 2,
name: "Crustacean",
moves: [{0, 1}, {-2, 0}, {2, 0}],
color: :green
},
%Card{
id: 3,
name: "Wukong",
moves: [{-1, 1}, {1, 1}, {-1, -1}, {1, -1}],
color: :green
},
%Card{
id: 4,
name: "Heron",
moves: [{0, 1}, {-1, -1}, {1, -1}],
color: :green
},
%Card{
id: 5,
name: "Drake",
moves: [{-2, 1}, {2, 1}, {-1, -1}, {1, -1}],
color: :green
},
%Card{
id: 6,
name: "Pachyderm",
moves: [{1, 0}, {-1, 0}, {1, 1}, {-1, 1}],
color: :green
},
%Card{
id: 7,
name: "Hierodula",
moves: [{-1, 1}, {1, 1}, {0, -1}],
color: :green
},
%Card{
id: 8,
name: "<NAME>",
moves: [{0, 1}, {-1, 0}, {1, 0}],
color: :green
},
%Card{
id: 9,
name: "Toad",
moves: [{-2, 0}, {-1, 1}, {1, -1}],
color: :blue
},
%Card{
id: 10,
name: "Chen",
moves: [{-1, 1}, {-1, 0}, {1, 0}, {1, -1}],
color: :blue
},
%Card{
id: 11,
name: "Pony",
moves: [{-1, 0}, {0, 1}, {0, -1}],
color: :blue
},
%Card{
id: 12,
name: "Moray",
moves: [{-1, 1}, {-1, -1}, {1, 0}],
color: :blue
},
%Card{
id: 13,
name: "Hare",
moves: [{2, 0}, {1, 1}, {-1, -1}],
color: :red
},
%Card{
id: 14,
name: "Cockerel",
moves: [{1, 1}, {1, 0}, {-1, 0}, {-1, -1}],
color: :red
},
%Card{
id: 15,
name: "Steer",
moves: [{1, 0}, {0, 1}, {0, -1}],
color: :red
},
%Card{
id: 16,
name: "Python",
moves: [{1, 1}, {1, -1}, {-1, 0}],
color: :red
}
]
end
@spec exp1_cards() :: nonempty_list(%Card{})
def exp1_cards do
[
%Card{
id: 17,
name: "Camelopard",
moves: [{0, -1}, {-2, 1}, {2, 1}],
color: :green
},
%Card{
id: 18,
name: "Qilin",
moves: [{1, 2}, {-1, 2}, {0, -2}],
color: :green
},
%Card{
id: 19,
name: "Hawk",
moves: [{-2, 0}, {2, 0}, {-1, 1}, {1, 1}],
color: :green
},
# %Card{
# id: 21,
# name: "Vulpa",
# moves: [{1, 1}, {1, 0}, {1, -1}],
# color: :red
# },
%Card{
id: 22,
name: "<NAME>",
moves: [{0, 1}, {1, 1}, {-1, -1}],
color: :red
},
%Card{
id: 23,
name: "Threadsnake",
moves: [{0, 1}, {2, 0}, {-1, -1}],
color: :red
},
%Card{
id: 24,
name: "Rodent",
moves: [{1, 0}, {0, 1}, {-1, -1}],
color: :red
},
%Card{
id: 25,
name: "<NAME>",
moves: [{0, 1}, {2, 1}, {-1, -1}],
color: :red
},
%Card{
id: 26,
name: "Marten",
moves: [{1, 1}, {-2, 0}, {-1, -1}],
color: :red
},
# %Card{
# id: 27,
# name: "Canine",
# moves: [{-1, 0}, {-1, 1}, {-1, -1}],
# color: :blue
# },
%Card{
id: 28,
name: "Ursidae",
moves: [{0, 1}, {-1, 1}, {1, -1}],
color: :blue
},
%Card{
id: 29,
name: "Boa",
moves: [{-2, 0}, {0, 1}, {1, -1}],
color: :blue
},
%Card{
id: 30,
name: "Bandicoot",
moves: [{-1, 0}, {0, 1}, {1, -1}],
color: :blue
},
%Card{
id: 31,
name: "Lizard",
moves: [{0, 1}, {-2, 1}, {1, -1}],
color: :blue
},
%Card{
id: 32,
name: "Kawauso",
moves: [{-1, 1}, {1, -1}, {2, 0}],
color: :blue
}
]
end
@spec exp2_cards() :: nonempty_list(%Card{})
def exp2_cards do
# Green - Default
# Blue - Left
# Red - Right
[
%Card{
id: 33,
name: "Wasp",
moves: [{-1, 1}, {-1, -1}, {1, 2}, {1, -2}],
color: :red
},
%Card{
id: 34,
name: "Bee",
moves: [{1, 1}, {1, -1}, {-1, 2}, {-1, -2}],
color: :blue
},
# %Card{
# id: 35,
# name: "Mole",
# moves: [{2, 0}, {2, -1}, {2, 1}],
# color: :red
# },
# %Card{
# id: 36,
# name: "Gopher",
# moves: [{-2, 0}, {-2, -1}, {-2, 1}],
# color: :blue
# },
%Card{
id: 37,
name: "Duck",
moves: [{0, 1}, {1, 0}, {2, 2}],
color: :red
},
%Card{
id: 38,
name: "Swan",
moves: [{0, 1}, {-1, 0}, {-2, 2}],
color: :blue
},
%Card{
id: 39,
name: "<NAME>",
moves: [{0, 2}, {1, 1}, {-1, 1}],
color: :green
},
%Card{
id: 40,
name: "Dolphin",
moves: [{0, 1}, {-1, 0}, {1, 2}],
color: :red
},
%Card{
id: 41,
name: "Shark",
moves: [{0, 1}, {1, 0}, {-1, 2}],
color: :blue
},
%Card{
id: 42,
name: "Eagle",
moves: [{2, 2}, {-2, 2}, {0, -1}],
color: :green
},
%Card{
id: 43,
name: "Piglet",
moves: [{0, 1}, {-1, 0}, {1, 0}],
color: :green
},
%Card{
id: 44,
name: "Warthog",
moves: [{0, 1}, {0, -1}, {-1, 0}, {1, 0}],
color: :green
}
]
end
end
|
apps/demon_spirit_game/lib/demon_spirit_game/card.ex
| 0.705988
| 0.475544
|
card.ex
|
starcoder
|
defmodule OpenMovieApi.DbSetup do
@moduledoc """
Provides functionalities needed to setup movie database in mnesia
:mnesia setup process includes
stopping available :mnesia applications
creating schema
creating tables
starting mnesia
"""
require Logger
@basics_table [:tconst, :type, :title, :isAdult, :startYear, :endYear, :runtime, :genres]
@ratings_table [:tconst, :rate, :votes]
@akas_table [:tconst, :region, :lang]
@episode_table [:tconst, :parent, :season, :episode]
@crew_table [:tconst, :directors, :writers]
@principals_table [:nconst, :category, :job, :characters]
@frag_props [{:node_pool, [node()]}, {:n_fragments, 16}, {:n_disc_copies, 1}]
defp prepare do
:mnesia.stop()
:mnesia.delete_schema([node()])
:mnesia.create_schema([node()])
:mnesia.start()
end
defp create do
:mnesia.create_table(Basics, [
{:disc_copies, [node()]},
{:frag_properties, @frag_props},
{:type, :ordered_set},
majority: true,
attributes: @basics_table,
index: [:isAdult]
])
:mnesia.create_table(Ratings, [
{:disc_copies, [node()]},
{:frag_properties, @frag_props},
{:type, :ordered_set},
majority: true,
attributes: @ratings_table,
index: [:rate, :votes]
])
:mnesia.create_table(Akas, [
{:disc_copies, [node()]},
{:frag_properties, @frag_props},
{:type, :ordered_set},
majority: true,
attributes: @akas_table
])
:mnesia.create_table(Episodes, [
{:disc_copies, [node()]},
{:frag_properties, @frag_props},
{:type, :ordered_set},
majority: true,
attributes: @episode_table,
index: [:parent]
])
:mnesia.create_table(Crew, [
{:disc_copies, [node()]},
{:frag_properties, @frag_props},
{:type, :ordered_set},
majority: true,
attributes: @crew_table
])
:mnesia.create_table(Principals, [
{:disc_copies, [node()]},
{:frag_properties, @frag_props},
{:type, :ordered_set},
majority: true,
attributes: @principals_table,
index: [:category]
])
end
def run do
prepare()
create()
end
end
|
lib/open_movie_api/db_setup.ex
| 0.659295
| 0.563318
|
db_setup.ex
|
starcoder
|
defmodule Insights.Adapter do
@moduledoc """
This module specifies the adapter API that an adapter is required to
implement.
"""
use Behaviour
@type t :: module
@type source :: {table :: binary, model :: atom}
@typep adapter :: Insights.Server.t
@typep collection :: String.t
@typep query :: String.t
@typep params :: Keyword.t
@typep options :: Keyword.t
@doc """
The callback invoked in case the adapter needs to inject code.
"""
defmacrocallback __before_compile__(Macro.Env.t) :: Macro.t
@doc """
Starts any connection pooling or supervision and return `{:ok, pid}`
or just `:ok` if nothing needs to be done.
Returns `{:error, {:already_started, pid}}` if the adapter already
started or `{:error, term}` in case anything else goes wrong.
## Adapter start
Because some Insights tasks like migration may run without starting
the parent application, it is recommended that start_link in
adapters make sure the adapter application is started by calling
`Application.ensure_all_started/1`.
"""
defcallback start_link(adapter, options) ::
{:ok, pid} | :ok | {:error, {:already_started, pid}} | {:error, term}
@doc """
Fetches all result from the data store based in the given query
"""
defcallback query(adapter, collection, query, params, options) :: [[term]] | no_return
@doc """
Fetches all results from the data store based on the given query.
It receives a preprocess function responsible that should be
invoked for each selected field in the query result in order
to convert them to the expected Insights type.
"""
defcallback all(adapter, query, options) :: [[term]] | no_return
@doc """
Count all results from the data store based on the given query.
It receives a preprocess function responsible that should be
invoked for each selected field in the query result in order
to convert them to the expected Insights type.
"""
defcallback count(adapter, query, options) :: {:ok, Keyword.t} | {:error, Keyword.t} | no_return
@doc """
Inserts a single new model in the data store.
"""
defcallback insert(adapter, query, source, options) ::
{:ok, Keyword.t} | no_return
@doc """
Updates a single model with the given filters.
While `filters` can be any record column, it is expected that
at least the primary key (or any other key that uniquely
identifies an existing record) to be given as filter. Therefore,
in case there is no record matching the given filters,
`{:error, :stale}` is returned.
"""
defcallback update(adapter, query, source, options) ::
{:ok, Keyword.t} | {:error, :stale} | no_return
@doc """
Deletes a sigle model with the given filters.
While `filters` can be any record column, it is expected that
at least the primary key (or any other key that uniquely
identifies an existing record) to be given as filter. Therefore,
in case there is no record matching the given filters,
`{:error, :stale}` is returned.
"""
defcallback delete(adapter, query, options) ::
{:ok, Keyword.t} | {:error, :stale} | no_return
end
|
lib/insights/adapter.ex
| 0.912163
| 0.614871
|
adapter.ex
|
starcoder
|
defmodule Descisionex.AnalyticHierarchy do
@moduledoc """
https://en.wikipedia.org/wiki/Analytic_hierarchy_process
"""
alias Descisionex.{AnalyticHierarchy, Helper}
defstruct comparison_matrix: [],
normalized_comparison_matrix: [],
criteria_weights: [],
criteria_num: 0,
alternatives: [],
alternatives_matrix: %{},
alternatives_weights: [],
alternatives_weights_by_criteria: [],
alternatives_num: 0,
criteria: []
def set_criteria(%AnalyticHierarchy{} = _data, []) do
raise ArgumentError, message: "Criteria must be not empty!"
end
@doc """
Set criteria for analytic hierarchy.
## Examples
iex> %Descisionex.AnalyticHierarchy{} |> Descisionex.AnalyticHierarchy.set_criteria([])
** (ArgumentError) Criteria must be not empty!
iex> %Descisionex.AnalyticHierarchy{} |> Descisionex.AnalyticHierarchy.set_criteria(["some", "criteria"])
%Descisionex.AnalyticHierarchy{
alternatives: [],
alternatives_matrix: %{},
alternatives_num: 0,
alternatives_weights: [],
alternatives_weights_by_criteria: [],
comparison_matrix: [],
criteria: ["some", "criteria"],
criteria_num: 2,
criteria_weights: [],
normalized_comparison_matrix: []
}
"""
def set_criteria(%AnalyticHierarchy{} = data, criteria) do
data |> Map.put(:criteria, criteria) |> Map.put(:criteria_num, Enum.count(criteria))
end
def set_alternatives(%AnalyticHierarchy{} = _data, []) do
raise ArgumentError, message: "Alternatives must be not empty!"
end
@doc """
Set alternatives for analytic hierarchy.
## Examples
iex> %Descisionex.AnalyticHierarchy{} |> Descisionex.AnalyticHierarchy.set_alternatives([])
** (ArgumentError) Alternatives must be not empty!
iex> %Descisionex.AnalyticHierarchy{} |> Descisionex.AnalyticHierarchy.set_alternatives(["some", "alternatives"])
%Descisionex.AnalyticHierarchy{
alternatives: ["some", "alternatives"],
alternatives_matrix: %{},
alternatives_num: 2,
alternatives_weights: [],
alternatives_weights_by_criteria: [],
comparison_matrix: [],
criteria: [],
criteria_num: 0,
criteria_weights: [],
normalized_comparison_matrix: []
}
"""
def set_alternatives(%AnalyticHierarchy{} = data, alternatives) do
data
|> Map.put(:alternatives, alternatives)
|> Map.put(:alternatives_num, Enum.count(alternatives))
end
@doc """
Set alternatives matrix for analytic hierarchy (criteria must be set!).
## Examples
iex> %Descisionex.AnalyticHierarchy{} |> Descisionex.AnalyticHierarchy.set_alternatives_matrix([[1, 2], [3, 4]])
** (ArgumentError) Criteria must be set!
iex> %Descisionex.AnalyticHierarchy{} |> Descisionex.AnalyticHierarchy.set_criteria(["some", "criteria"]) |> Descisionex.AnalyticHierarchy.set_alternatives_matrix([[1, 2], [3, 4]])
%Descisionex.AnalyticHierarchy{
alternatives: [],
alternatives_matrix: %{"criteria" => [3, 4], "some" => [1, 2]},
alternatives_num: 0,
alternatives_weights: [],
alternatives_weights_by_criteria: [],
comparison_matrix: [],
criteria: ["some", "criteria"],
criteria_num: 2,
criteria_weights: [],
normalized_comparison_matrix: []
}
"""
def set_alternatives_matrix(%AnalyticHierarchy{} = data, matrix) do
if data.criteria_num == 0, do: raise(ArgumentError, message: "Criteria must be set!")
tagged =
Enum.map(Enum.with_index(data.criteria), fn {criteria, index} ->
{criteria, Enum.at(matrix, index)}
end)
|> Enum.into(%{})
data |> Map.put(:alternatives_matrix, tagged)
end
@doc """
Normalizes comparison matrix for analytic hierarchy (criteria must be set, such as comparison matrix!).
## Examples
iex> %Descisionex.AnalyticHierarchy{} |> Descisionex.AnalyticHierarchy.set_criteria(["some", "criteria"]) |> Descisionex.AnalyticHierarchy.normalize_comparison_matrix()
** (ArgumentError) Comparison matrix must be set!
iex> %Descisionex.AnalyticHierarchy{comparison_matrix: [[1, 2], [3, 4]]} |> Descisionex.AnalyticHierarchy.set_criteria(["some", "criteria"]) |> Descisionex.AnalyticHierarchy.normalize_comparison_matrix()
%Descisionex.AnalyticHierarchy{
alternatives: [],
alternatives_matrix: %{},
alternatives_num: 0,
alternatives_weights: [],
alternatives_weights_by_criteria: [],
comparison_matrix: [[1, 2], [3, 4]],
criteria: ["some", "criteria"],
criteria_num: 2,
criteria_weights: [],
normalized_comparison_matrix: [[0.25, 0.333], [0.75, 0.667]]
}
"""
def normalize_comparison_matrix(%AnalyticHierarchy{} = data) do
size = data.criteria_num
if size == 0, do: raise(ArgumentError, message: "Criteria must be set!")
if data.comparison_matrix == [],
do: raise(ArgumentError, message: "Comparison matrix must be set!")
normalized = Helper.normalize(data.comparison_matrix, size)
Map.put(data, :normalized_comparison_matrix, normalized)
end
@doc """
Calculates weights for normalized comparison matrix for analytic hierarchy (criteria must be set, such as comparison matrix!).
## Examples
iex> %Descisionex.AnalyticHierarchy{comparison_matrix: [[1, 2], [3, 4]]} |> Descisionex.AnalyticHierarchy.set_criteria(["some", "criteria"]) |> Descisionex.AnalyticHierarchy.calculate_criteria_weights()
** (ArgumentError) Comparison matrix must be normalized!
iex> %Descisionex.AnalyticHierarchy{comparison_matrix: [[1, 2], [3, 4]]} |> Descisionex.AnalyticHierarchy.set_criteria(["some", "criteria"]) |> Descisionex.AnalyticHierarchy.normalize_comparison_matrix() |> Descisionex.AnalyticHierarchy.calculate_criteria_weights()
%Descisionex.AnalyticHierarchy{
alternatives: [],
alternatives_matrix: %{},
alternatives_num: 0,
alternatives_weights: [],
alternatives_weights_by_criteria: [],
comparison_matrix: [[1, 2], [3, 4]],
criteria: ["some", "criteria"],
criteria_num: 2,
criteria_weights: [[0.291], [0.709]],
normalized_comparison_matrix: [[0.25, 0.333], [0.75, 0.667]]
}
"""
def calculate_criteria_weights(%AnalyticHierarchy{} = data) do
size = data.criteria_num
if size == 0, do: raise(ArgumentError, message: "Criteria must be set!")
if data.normalized_comparison_matrix == [],
do: raise(ArgumentError, message: "Comparison matrix must be normalized!")
criteria_weights = Helper.calculate_weights(data.normalized_comparison_matrix, size)
Map.put(data, :criteria_weights, criteria_weights)
end
@doc """
Calculates alternatives weights by criteria for analytic hierarchy (criteria must be set, such as comparison matrix!).
## Examples
iex> %Descisionex.AnalyticHierarchy{comparison_matrix: [[1, 2], [3, 4]]} |> Descisionex.AnalyticHierarchy.set_criteria(["some", "criteria"]) |> Descisionex.AnalyticHierarchy.calculate_alternatives_weights_by_criteria()
** (ArgumentError) Alternatives matrix must be set!
iex> %Descisionex.AnalyticHierarchy{} |> Descisionex.AnalyticHierarchy.set_criteria(["some", "criteria"]) |> Descisionex.AnalyticHierarchy.set_alternatives(["some", "alternatives"]) |> Descisionex.AnalyticHierarchy.set_alternatives_matrix([[[1, 2, 3]], [[4, 5, 6]]]) |> Descisionex.AnalyticHierarchy.calculate_alternatives_weights_by_criteria()
%Descisionex.AnalyticHierarchy{
alternatives: ["some", "alternatives"],
alternatives_matrix: %{"criteria" => [[4, 5, 6]], "some" => [[1, 2, 3]]},
alternatives_num: 2,
alternatives_weights: [],
alternatives_weights_by_criteria: [[3.0, 3.0]],
comparison_matrix: [],
criteria: ["some", "criteria"],
criteria_num: 2,
criteria_weights: [],
normalized_comparison_matrix: []
}
"""
def calculate_alternatives_weights_by_criteria(%AnalyticHierarchy{} = data) do
if data.criteria_num == 0, do: raise(ArgumentError, message: "Criteria must be set!")
if data.alternatives_matrix == %{},
do: raise(ArgumentError, message: "Alternatives matrix must be set!")
alternatives_weights_by_criteria =
Enum.map(data.criteria, fn criteria ->
matrix = data.alternatives_matrix[criteria]
size = Enum.count(matrix)
weights =
matrix
|> Helper.normalize(size)
|> Helper.calculate_weights(size)
weights
end)
result = alternatives_weights_by_criteria |> Matrix.transpose() |> Enum.map(&List.flatten/1)
Map.put(data, :alternatives_weights_by_criteria, result)
end
@doc """
Calculates alternatives weights for analytic hierarchy (criteria must be set, such as comparison matrix and weights before must be calculated!).
## Examples
iex> %Descisionex.AnalyticHierarchy{comparison_matrix: [[1, 2], [3, 4]]} |> Descisionex.AnalyticHierarchy.set_criteria(["some", "criteria"]) |> Descisionex.AnalyticHierarchy.normalize_comparison_matrix() |> Descisionex.AnalyticHierarchy.set_alternatives(["some", "alternatives"]) |> Descisionex.AnalyticHierarchy.set_alternatives_matrix([[[1, 2, 3]], [[4, 5, 6]]]) |> Descisionex.AnalyticHierarchy.calculate_alternatives_weights_by_criteria() |> Descisionex.AnalyticHierarchy.calculate_alternatives_weights()
** (ArgumentError) Weights must be calculated before!
iex> %Descisionex.AnalyticHierarchy{comparison_matrix: [[1, 2], [3, 4]]} |> Descisionex.AnalyticHierarchy.set_criteria(["some", "criteria"]) |> Descisionex.AnalyticHierarchy.normalize_comparison_matrix() |> Descisionex.AnalyticHierarchy.calculate_criteria_weights() |> Descisionex.AnalyticHierarchy.set_alternatives(["some", "alternatives"]) |> Descisionex.AnalyticHierarchy.set_alternatives_matrix([[[1, 2, 3]], [[4, 5, 6]]]) |> Descisionex.AnalyticHierarchy.calculate_alternatives_weights_by_criteria() |> Descisionex.AnalyticHierarchy.calculate_alternatives_weights()
%Descisionex.AnalyticHierarchy{
alternatives: ["some", "alternatives"],
alternatives_matrix: %{"criteria" => [[4, 5, 6]], "some" => [[1, 2, 3]]},
alternatives_num: 2,
alternatives_weights: [3.0],
alternatives_weights_by_criteria: [[3.0, 3.0]],
comparison_matrix: [[1, 2], [3, 4]],
criteria: ["some", "criteria"],
criteria_num: 2,
criteria_weights: [[0.291], [0.709]],
normalized_comparison_matrix: [[0.25, 0.333], [0.75, 0.667]]
}
"""
def calculate_alternatives_weights(%AnalyticHierarchy{} = data) do
weights = data.criteria_weights
if weights == [], do: raise(ArgumentError, message: "Weights must be calculated before!")
alternatives_weights =
Enum.reduce(data.alternatives_weights_by_criteria, [], fn column, acc ->
product =
Enum.map(Enum.with_index(column), fn {number, index} ->
[weight | _] = Enum.at(weights, index)
Float.round(number * weight, 3)
end)
|> Enum.sum()
acc ++ [product]
end)
Map.put(data, :alternatives_weights, alternatives_weights)
end
end
|
lib/algorithms/analytic_hierarchy.ex
| 0.909742
| 0.558809
|
analytic_hierarchy.ex
|
starcoder
|
defmodule BSV.Crypto.Hash do
@moduledoc """
A collection of one-way hashing functions.
"""
alias BSV.Util
@hash_algorithms [:md5, :ripemd160, :sha, :sha224, :sha256, :sha384, :sha512]
@doc """
Returns a list of supported hash algorithms.
"""
@spec hash_algorithms :: list
def hash_algorithms, do: @hash_algorithms
@doc """
Computes a hash of the given data, using the specified hash algorithm.
## Options
The accepted hash algorithms are:
* `:md5` - MD5 message-digest algorithm (128 bit)
* `:ripemd160` - RIPE Message Digest algorithm (160 bit)
* `:sha` - Secure Hash Algorithm 1 (SHA-1) (160 bit)
* `:sha224` - Secure Hash Algorithm 2 (SHA-2) (224 bit)
* `:sha256` - Secure Hash Algorithm 2 (SHA-2) (256 bit)
* `:sha384` - Secure Hash Algorithm 2 (SHA-2) (384 bit)
* `:sha512` - Secure Hash Algorithm 2 (SHA-2) (512 bit)
The accepted options are:
* `:encoding` - Optionally encode the returned hash with either the `:base64` or `:hex` encoding scheme.
## Examples
iex> BSV.Crypto.Hash.hash("hello world", :sha256)
<<185, 77, 39, 185, 147, 77, 62, 8, 165, 46, 82, 215, 218, 125, 171, 250, 196, 132, 239, 227, 122, 83, 128, 238, 144, 136, 247, 172, 226, 239, 205, 233>>
iex> BSV.Crypto.Hash.hash("hello world", :sha256, encoding: :hex)
"b94d27b9934d3e08a52e52d7da7dabfac484efe37a5380ee9088f7ace2efcde9"
iex> BSV.Crypto.Hash.hash("hello world", :sha256, encoding: :base64)
"uU0nuZNNPgilLlLX2n2r+sSE7+N6U4DukIj3rOLvzek="
"""
@spec hash(binary, atom, keyword) :: binary
def hash(data, algorithm, options \\ []) do
encoding = Keyword.get(options, :encoding)
:crypto.hash(algorithm, data)
|> Util.encode(encoding)
end
def hmac(data, algorithm, key, options \\ []) do
encoding = Keyword.get(options, :encoding)
:crypto.mac(:hmac, algorithm, key, data)
|> Util.encode(encoding)
end
@doc """
Computes the RIPEMD hash of a given input, outputting 160 bits.
See `BSV.Crypto.Hash.hash/3` for the accepted options.
## Examples
iex> BSV.Crypto.Hash.ripemd160("hello world")
<<152, 198, 21, 120, 76, 203, 95, 229, 147, 111, 188, 12, 190, 157, 253, 180, 8, 217, 47, 15>>
iex> BSV.Crypto.Hash.ripemd160("hello world", encoding: :hex)
"98c615784ccb5fe5936fbc0cbe9dfdb408d92f0f"
"""
@spec ripemd160(binary, keyword) :: binary
def ripemd160(data, options \\ []), do: hash(data, :ripemd160, options)
@doc """
Computes the SHA-1 hash of a given input, outputting 160 bits.
See `BSV.Crypto.Hash.hash/3` for the accepted options.
## Examples
iex> BSV.Crypto.Hash.sha1("hello world")
<<42, 174, 108, 53, 201, 79, 207, 180, 21, 219, 233, 95, 64, 139, 156, 233, 30, 232, 70, 237>>
iex> BSV.Crypto.Hash.sha1("hello world", encoding: :hex)
"2aae6c35c94fcfb415dbe95f408b9ce91ee846ed"
"""
@spec sha1(binary, keyword) :: binary
def sha1(data, options \\ []), do: hash(data, :sha, options)
@doc """
Computes the SHA-2 hash of a given input, outputting 256 bits.
See `BSV.Crypto.Hash.hash/3` for the accepted options.
## Examples
iex> BSV.Crypto.Hash.sha256("hello world")
<<185, 77, 39, 185, 147, 77, 62, 8, 165, 46, 82, 215, 218, 125, 171, 250, 196, 132, 239, 227, 122, 83, 128, 238, 144, 136, 247, 172, 226, 239, 205, 233>>
iex> BSV.Crypto.Hash.sha256("hello world", encoding: :hex)
"b94d27b9934d3e08a52e52d7da7dabfac484efe37a5380ee9088f7ace2efcde9"
"""
@spec sha256(binary, keyword) :: binary
def sha256(data, options \\ []), do: hash(data, :sha256, options)
@doc """
Computes the SHA-2 hash of a given input, outputting 512 bits.
See `BSV.Crypto.Hash.hash/3` for the accepted options.
## Examples
iex> BSV.Crypto.Hash.sha512("hello world", encoding: :hex)
"309ecc489c12d6eb4cc40f50c902f2b4d0ed77ee511a7c7a9bcd3ca86d4cd86f989dd35bc5ff499670da34255b45b0cfd830e81f605dcf7dc5542e93ae9cd76f"
iex> BSV.Crypto.Hash.sha512("hello world", encoding: :base64)
"MJ7MSJwS1utMxA9QyQLytNDtd+5RGnx6m808qG1M2G+YndNbxf9JlnDaNCVbRbDP2DDoH2Bdz33FVC6TrpzXbw=="
"""
@spec sha512(binary, keyword) :: binary
def sha512(data, options \\ []), do: hash(data, :sha512, options)
@doc """
Computes a RIPEMD0160 hash of a SHA256 hash, outputting 160 bits. This is
commonly used inside Bitcoin, particularly for Bitcoin addresses.
See `BSV.Crypto.Hash.hash/3` for the accepted options.
## Examples
iex> BSV.Crypto.Hash.sha256_ripemd160("hello world")
<<215, 213, 238, 120, 36, 255, 147, 249, 76, 48, 85, 175, 147, 130, 200, 108, 104, 181, 202, 146>>
iex> BSV.Crypto.Hash.sha256_ripemd160("hello world", encoding: :hex)
"d7d5ee7824ff93f94c3055af9382c86c68b5ca92"
"""
@spec sha256_ripemd160(binary, keyword) :: binary
def sha256_ripemd160(data, options \\ []), do: sha256(data) |> hash(:ripemd160, options)
@doc """
Computes a double SHA256 hash. This hash function is commonly used inside
Bitcoin, particularly for the hash of a block and the hash of a transaction.
See `BSV.Crypto.Hash.hash/3` for the accepted options.
## Examples
iex> BSV.Crypto.Hash.sha256_sha256("hello world")
<<188, 98, 212, 184, 13, 158, 54, 218, 41, 193, 108, 93, 77, 159, 17, 115, 31, 54, 5, 44, 114, 64, 26, 118, 194, 60, 15, 181, 169, 183, 68, 35>>
iex> BSV.Crypto.Hash.sha256_sha256("hello world", encoding: :hex)
"bc62d4b80d9e36da29c16c5d4d9f11731f36052c72401a76c23c0fb5a9b74423"
"""
@spec sha256_sha256(binary, keyword) :: binary
def sha256_sha256(data, options \\ []), do: sha256(data) |> hash(:sha256, options)
end
|
lib/bsv/crypto/hash.ex
| 0.856498
| 0.629547
|
hash.ex
|
starcoder
|
defmodule K8s.Conn.Config do
@moduledoc """
Add runtime cluster configuration with environment variables.
Each variable consists of a prefix that determines where the value will be placed in the config
and a suffix that is the cluster name. The cluster name will be atomized.
Environment Variable Prefixes:
* `K8S_CLUSTER_CONF_SA_` - *boolean* enables authentication to the k8s API with the pods `spec.serviceAccountName`.
* `K8S_CLUSTER_CONF_PATH_` - *string* absolute path to the kube config file.
* `K8S_CLUSTER_CONF_CONTEXT_` *string* which context to use in the kube config file.
## Examples
```shell
export K8S_CLUSTER_CONF_SA_us_central=true
export K8S_CLUSTER_CONF_PATH_us_east="east.yaml"
export K8S_CLUSTER_CONF_CONTEXT_us_east="east"
export K8S_CLUSTER_CONF_PATH_us_west="west.yaml"
export K8S_CLUSTER_CONF_CONTEXT_us_west="west"
```
"""
@env_var_prefix "K8S_CLUSTER_CONF_"
@env_var_sa_prefix "K8S_CLUSTER_CONF_SA_"
@env_var_path_prefix "K8S_CLUSTER_CONF_PATH_"
@env_var_context_prefix "K8S_CLUSTER_CONF_CONTEXT_"
@doc """
Returns runtime and compile time cluster configuration merged together.
"""
@spec all :: map()
def all do
merge_configs(runtime_cluster_configs(), compiletime_cluster_configs())
end
@spec compiletime_cluster_configs :: map()
def compiletime_cluster_configs do
Application.get_env(:k8s, :clusters, %{})
end
@doc """
Cluster configuration read from env variables.
To be merged over `Application.get_env(:k8s, :clusters)`.
## Examples
Only specifying compiletime configs
iex> config = %{dev: %{conn: "runtime/path/to/dev.kubeconfig.yaml"}}
...> K8s.Conn.Config.merge_configs(%{}, config)
%{dev: %{conn: "runtime/path/to/dev.kubeconfig.yaml"}}
Only specifying runtime configs
iex> env = %{"K8S_CLUSTER_CONF_PATH_dev" => "runtime/path/to/dev.kubeconfig.yaml"}
...> K8s.Conn.Config.merge_configs(env, %{})
%{dev: %{conn: "runtime/path/to/dev.kubeconfig.yaml"}}
Overriding compile time configs
iex> env = %{"K8S_CLUSTER_CONF_PATH_dev" => "runtime/path/to/dev.kubeconfig.yaml"}
...> compile_config = %{dev: %{conn: "compiletime/path/to/dev.kubeconfig.yaml"}}
...> K8s.Conn.Config.merge_configs(env, compile_config)
%{dev: %{conn: "runtime/path/to/dev.kubeconfig.yaml"}}
Merging compile time configs
iex> env = %{"K8S_CLUSTER_CONF_CONTEXT_dev" => "runtime-context"}
...> compile_config = %{dev: %{conn: "compiletime/path/to/dev.kubeconfig.yaml"}}
...> K8s.Conn.Config.merge_configs(env, compile_config)
%{dev: %{conn: "compiletime/path/to/dev.kubeconfig.yaml", conn_opts: [context: "runtime-context"]}}
Adding clusters at runtime
iex> env = %{"K8S_CLUSTER_CONF_PATH_us_east" => "runtime/path/to/us_east.kubeconfig.yaml", "K8S_CLUSTER_CONF_CONTEXT_us_east" => "east-context"}
...> compile_config = %{us_west: %{conn: "compiletime/path/to/us_west.kubeconfig.yaml"}}
...> K8s.Conn.Config.merge_configs(env, compile_config)
%{us_east: %{conn: "runtime/path/to/us_east.kubeconfig.yaml", conn_opts: [context: "east-context"]}, us_west: %{conn: "compiletime/path/to/us_west.kubeconfig.yaml"}}
"""
@spec merge_configs(map, map) :: map
def merge_configs(env_vars, config) do
Enum.reduce(env_vars, config, fn {k, v}, acc ->
cluster_name = k |> cluster_name() |> String.to_atom()
acc_cluster_config = Map.get(acc, cluster_name, %{})
{new_key, new_value} = get_config_kv(k, v)
updated_cluster_conn = Map.put(acc_cluster_config, new_key, new_value)
Map.put(acc, cluster_name, updated_cluster_conn)
end)
end
# given an env var name/value, map the config to the correct cluster
defp get_config_kv(@env_var_context_prefix <> _cluster_name, conn_opts_context),
do: {:conn_opts, [context: conn_opts_context]}
# given an env var name/value, map the config to the correct cluster
defp get_config_kv(@env_var_path_prefix <> _cluster_name, conn_path), do: {:conn, conn_path}
defp get_config_kv(@env_var_sa_prefix <> _cluster_name, "true"), do: {:use_sa, true}
defp get_config_kv(@env_var_sa_prefix <> _cluster_name, "false"), do: {:use_sa, false}
# given an env var name, map it to the correct cluster
defp cluster_name(@env_var_context_prefix <> cluster_name), do: cluster_name
defp cluster_name(@env_var_path_prefix <> cluster_name), do: cluster_name
defp cluster_name(@env_var_sa_prefix <> cluster_name), do: cluster_name
@spec runtime_cluster_configs :: map
@doc "Parses ENV variables to runtime cluster configs"
def runtime_cluster_configs, do: Map.take(System.get_env(), env_keys())
@spec env_keys :: list(binary)
defp env_keys do
System.get_env()
|> Map.keys()
|> Enum.filter(&String.starts_with?(&1, @env_var_prefix))
end
end
|
lib/k8s/conn/config.ex
| 0.848753
| 0.530784
|
config.ex
|
starcoder
|
defmodule Conform.Schema.Validator do
@moduledoc """
This module defines the behaviour for custom validators.
Validators can be defined inline in which case this behaviour need not be used,
but if you want to define reusable validators which you can reference in your
schema, you should implement this behaviour, and then import the application in
your schema so that they are made available by the module name.
## Example
app.schema.exs:
[ mappings: [
"foo.bar": [
datatype: :integer,
default: 1,
validators: [MyApp.RangeValidator: 1..2, MyApp.PositiveIntegerValidator]
]
]]
app.conf:
foo.bar = 3
In the example above, `foo.bar` will be first parsed and mapped as the integer value 3,
and then validated by calling `MyApp.RangeValidator.validate(3, [1..2])` where the second
parameter is an optional list of extra arguments used by the validator. The second validator
will then be called like `MyApp.PositiveIntegerValidator.validate(3, [])`.
Validators must return `:ok` if validation passed, `{:warn, message}` if validation passed but a warning
should be printed to stdout (for instance if you set a value to a valid but extreme value), or
`{:error, message}` if validation failed.
"""
alias Conform.Schema.Validator
defmacro __using__(_) do
quote do
@behaviour Conform.Schema.Validator
end
end
defstruct name: nil, # The name of this validator
validator: nil, # The validator function
definition: "", # The definition of the validator function as a string
persist: true
@callback validate(term, [term]) :: :ok | {:warn, String.t} | {:error, String.t}
def from_quoted({_, _, module_path}) do
%Validator{definition: nil, validator: Module.concat(module_path)}
end
def from_quoted({name, validator}) when is_function(validator, 1) do
definition = validator
{validator, _} = Code.eval_quoted(validator)
case is_function(validator, 1) do
true ->
%Validator{name: Atom.to_string(name), definition: definition, validator: validator}
false ->
raise Conform.Schema.SchemaError, message: "Invalid validator #{name}, it must be a function of arity 1."
end
end
@doc """
Loads all user-defined Validator modules.
"""
@spec load() :: [%Validator{}]
def load() do
Conform.Utils.load_plugins_of(__MODULE__)
|> Enum.map(fn module -> %Validator{definition: nil, validator: module} end)
end
end
|
lib/conform/schema/validator.ex
| 0.889999
| 0.505493
|
validator.ex
|
starcoder
|
defmodule Scenic.Assets.Stream.Bitmap do
@moduledoc """
This module helps you to prepare images, in the form of a bitmap, that are to be streamed
and displayed through the `Scenic.Assets.Stream` module.
A bitmap is a rectangular field of pixels. Each pixel can be addressed and assigned a color.
When the bitmap is put into `Scenic.Assets.Stream` it becomes an image that can be displayed
in a scene via `Scenic.Primitive.Style.Paint.Stream`.
### Committed vs. Mutable
Bitmaps are interesting because a typical pattern is to change the color of many pixels in
a rapid burst, then send the image up. The bitmaps can become quite large tho, so if we
were to make a copy of it every time a single pixel was changed, that could become quite
slow.
Unfortunately, writing a NIF that manipulates individual pixels quickly and without making
a copy, breaks the immutable, functional model of Erlang/Elixir.
The compromise is that a Bitmap can be either in a "commited" state, which can be put
into `Scenic.Assets.Stream`, but not changed, or in a "mutable" state, which can be
manipulated rapidly, but not streamed to scenes.
When a new bitmap is built, it starts in the mutable state, unless the `commit: true` option is set.
```elixir
alias Scenic.Assets.Stream.Bitmap
bitmap = Bitmap.build( :rgb, 20, 10, clear: :blue )
|> Bitmap.put( 2, 3, :red )
|> Bitmap.put( 9, 10, :yellow )
|> Bitmap.commit()
Scenic.Assets.Stream.put( "stream_id", bitmap )
```
In the above example, a new bitmap is created, that can hold an rgb color in every pixel,
is 20 pixels wide, 10 pixels high, and starts with the entire image set to the color `:blue`.
The `:commit` option is not set, so it is mutable.
Then two of the pixels are set to other colors. One `:red` and the other `:yellow`.
Finally, the image is committed, making it usable, but no longer mutable. After the image is
completed, it is sent to `Scenic.Assets.Stream`, which makes it available for use in a scene.
### Color Depth
Bitmaps can be one of four depths. Each consumes a different amount of memory per pixel.
If you are running on a constrained memory device, or are worried about bandwidth when remoting
the UI, then you should choose the depth that you actually use. If you have lots of memory,
then `:rgba` is usually the fastest format.
| Depth | Bytes per pixel | Notes |
|---------------|------------------------|-----------|
| `:g` | 1 | Simple Greyscale. 256 shades of grey |
| `:ga` | 2 | Greyscale plus an alhpa channel |
| `:rgb` | 3 | Red/Green/Blue Millions of colors |
| `:rgba` | 4 | Red/Green/Blue/Alpha |
"""
alias Scenic.Assets.Stream.Bitmap
alias Scenic.Color
@app Mix.Project.config()[:app]
# load the NIF
@compile {:autoload, false}
@on_load :load_nifs
@doc false
def load_nifs do
:ok =
@app
|> :code.priv_dir()
|> :filename.join('bitmap')
|> :erlang.load_nif(0)
end
@type depth ::
:g
| :ga
| :rgb
| :rgba
@type meta :: {width :: pos_integer, height :: pos_integer, depth :: depth()}
@bitmap __MODULE__
@mutable :mutable_bitmap
@type t :: {__MODULE__, meta :: meta(), data :: binary}
@type m :: {:mutable_bitmap, meta :: meta(), data :: binary}
# --------------------------------------------------------
@doc """
Build a new bitmap with a given depth, width and height.
Build creates a new bitmap in memory. It begins in a mutable state
and will be set to transparent black unless the :clear option is specified.
The valid depths are :g, :ga, :rgb, :rgba as explained in the following table
| Depth | Bytes per pixel | Notes |
|---------------|------------------------|-----------|
| `:g` | 1 | Simple Greyscale. 256 shades of grey |
| `:ga` | 2 | Greyscale plus an alhpa channel |
| `:rgb` | 3 | Red/Green/Blue Millions of colors |
| `:rgba` | 4 | Red/Green/Blue/Alpha |
### Options
* `:clear` Set the new bitmap so that every pixel is the specified color.
* `:commit` Set to true to start the bitmap committed. Set to false for mutable. The default if not specified is mutable.
"""
@spec build(
depth :: Bitmap.depth(),
width :: pos_integer,
height :: pos_integer,
opts :: Keyword.t()
) :: t()
def build(format, width, height, opts \\ [])
def build(format, width, height, opts) do
bits =
case format do
:g -> 8 * width * height
:ga -> 8 * width * height * 2
:rgb -> 8 * width * height * 3
:rgba -> 8 * width * height * 4
end
m = {@mutable, {width, height, format}, <<0::size(bits)>>}
m =
case opts[:clear] do
nil -> m
color -> clear(m, color)
end
case opts[:commit] do
nil -> m
false -> m
true -> commit(m)
end
end
# --------------------------------------------------------
@doc """
Change a bitmap from committed to mutable.
This makes a copy of the bitmap's memory to preserve the Erlang model.
Mutable bitmaps are not usable by `Scenic.Assets.Stream`.
"""
@spec mutable(texture :: t()) :: mutable :: m()
def mutable({@bitmap, meta, bin}), do: {@mutable, meta, :binary.copy(bin)}
# --------------------------------------------------------
@doc """
Change a bitmap from mutable to committed.
Committed bitmaps can be used by `Scenic.Assets.Stream`. They will not
work with the `put` and `clear` functions in this module.
"""
@spec commit(mutable :: m()) :: texture :: t()
def commit({@mutable, meta, bin}), do: {@bitmap, meta, bin}
# --------------------------------------------------------
@doc """
Get the color value of a single pixel in a bitmap.
Works with either committed or mutable bitmaps.
"""
@spec get(t_or_m :: t() | m(), x :: pos_integer, y :: pos_integer) :: Color.explicit()
def get(texture, x, y)
def get({@mutable, meta, bin}, x, y), do: do_get(meta, bin, x, y)
def get({@bitmap, meta, bin}, x, y), do: do_get(meta, bin, x, y)
defp do_get({w, h, :g}, p, x, y)
when is_integer(x) and x >= 0 and x <= w and
is_integer(y) and y >= 0 and y <= h do
skip = y * w + x
<<_::binary-size(skip), g::8, _::binary>> = p
Color.to_g(g)
end
defp do_get({w, h, :ga}, p, x, y)
when is_integer(x) and x >= 0 and x <= w and
is_integer(y) and y >= 0 and y <= h do
skip = y * w * 2 + x * 2
<<_::binary-size(skip), g::8, a::8, _::binary>> = p
Color.to_ga({g, a})
end
defp do_get({w, h, :rgb}, p, x, y)
when is_integer(x) and x >= 0 and x <= w and
is_integer(y) and y >= 0 and y <= h do
skip = y * w * 3 + x * 3
<<_::binary-size(skip), r::8, g::8, b::8, _::binary>> = p
Color.to_rgb({r, g, b})
end
defp do_get({w, h, :rgba}, p, x, y)
when is_integer(x) and x >= 0 and x <= w and
is_integer(y) and y >= 0 and y <= h do
skip = y * w * 4 + x * 4
<<_::binary-size(skip), r::8, g::8, b::8, a::8, _::binary>> = p
Color.to_rgba({r, g, b, a})
end
# --------------------------------------------------------
@doc """
Set the color value of a single pixel in a bitmap.
Only works with mutable bitmaps.
The color you provide can be any valid value from the `Scenic.Color` module.
If the color you provide doesn't match the depth of the bitmap, this will
transform the color as appropriate to fit. For example, putting an `:rgb`
color into a `:g` (greyscale) bit map, will set the level of grey to be the average
value of the red, green, and blue channels of the supplied color
"""
@spec put(mutable :: m(), x :: pos_integer, y :: pos_integer, color :: Color.t()) ::
mutable :: m()
def put(mutable, x, y, color)
def put({@mutable, {w, h, :g}, p}, x, y, color)
when is_integer(x) and x >= 0 and x <= w and
is_integer(y) and y >= 0 and y <= h do
{:color_g, g} = Color.to_g(color)
nif_put(p, y * w + x, g)
{@mutable, {w, h, :g}, p}
end
def put({@mutable, {w, h, :ga}, p}, x, y, color)
when is_integer(x) and x >= 0 and x <= w and
is_integer(y) and y >= 0 and y <= h do
{:color_ga, {g, a}} = Color.to_ga(color)
nif_put(p, y * w + x, g, a)
{@mutable, {w, h, :ga}, p}
end
def put({@mutable, {w, h, :rgb}, p}, x, y, color)
when is_integer(x) and x >= 0 and x <= w and
is_integer(y) and y >= 0 and y <= h do
{:color_rgb, {r, g, b}} = Color.to_rgb(color)
nif_put(p, y * w + x, r, g, b)
{@mutable, {w, h, :rgb}, p}
end
def put({@mutable, {w, h, :rgba}, p}, x, y, color)
when is_integer(x) and x >= 0 and x <= w and
is_integer(y) and y >= 0 and y <= h do
{:color_rgba, {r, g, b, a}} = Color.to_rgba(color)
nif_put(p, y * w + x, r, g, b, a)
{@mutable, {w, h, :rgba}, p}
end
defp nif_put(_, _, _), do: :erlang.nif_error("Did not find nif_put_g")
defp nif_put(_, _, _, _), do: :erlang.nif_error("Did not find nif_put_ga")
defp nif_put(_, _, _, _, _), do: :erlang.nif_error("Did not find nif_put_rgb")
defp nif_put(_, _, _, _, _, _), do: :erlang.nif_error("Did not find nif_put_rgba")
# --------------------------------------------------------
@doc """
Set the color value of all pixels in a bitmap. This effectively erases the bitmap,
replacing it with a solid field of the supplied color.
Only works with mutable bitmaps.
The color you provide can be any valid value from the `Scenic.Color` module.
If the color you provide doesn't match the depth of the bitmap, this will
transform the color as appropriate to fit. For example, putting an `:rgb`
color into a `:g` (greyscale) bit map, will set the level of grey to be the average
value of the red, green, and blue channels of the supplied color
"""
@spec clear(mutable :: m(), color :: Color.t()) :: mutable :: m()
def clear(mutable, color)
def clear({@mutable, {w, h, :g}, p}, color) do
{:color_g, g} = Color.to_g(color)
nif_clear(p, g)
{@mutable, {w, h, :g}, p}
end
def clear({@mutable, {w, h, :ga}, p}, color) do
{:color_ga, {g, a}} = Color.to_ga(color)
nif_clear(p, g, a)
{@mutable, {w, h, :ga}, p}
end
def clear({@mutable, {w, h, :rgb}, p}, color) do
{:color_rgb, {r, g, b}} = Color.to_rgb(color)
nif_clear(p, r, g, b)
{@mutable, {w, h, :rgb}, p}
end
def clear({@mutable, {w, h, :rgba}, p}, color) do
{:color_rgba, {r, g, b, a}} = Color.to_rgba(color)
nif_clear(p, r, g, b, a)
{@mutable, {w, h, :rgba}, p}
end
def clear({@mutable, {_, _, :file}, _p}, _c) do
raise "Texture.clear(...) is not supported for file encoded data"
end
defp nif_clear(_, _), do: :erlang.nif_error("Did not find nif_clear_g")
defp nif_clear(_, _, _), do: :erlang.nif_error("Did not find nif_clear_ga")
defp nif_clear(_, _, _, _), do: :erlang.nif_error("Did not find nif_clear_rgb")
defp nif_clear(_, _, _, _, _), do: :erlang.nif_error("Did not find nif_clear_rgba")
# --------------------------------------------------------
@doc false
# @impl Scenic.Assets.Stream
@spec valid?(bitmap :: t()) :: boolean
def valid?(bitmap)
def valid?({@bitmap, {w, h, :g}, p}), do: byte_size(p) == w * h
def valid?({@bitmap, {w, h, :ga}, p}), do: byte_size(p) == w * h * 2
def valid?({@bitmap, {w, h, :rgb}, p}), do: byte_size(p) == w * h * 3
def valid?({@bitmap, {w, h, :rgba}, p}), do: byte_size(p) == w * h * 4
def valid?(_), do: false
end
|
lib/scenic/assets/stream/bitmap.ex
| 0.937243
| 0.910027
|
bitmap.ex
|
starcoder
|
defmodule NzIrValidator do
@moduledoc """
Documentation for NzIrValidator.
"""
@length 8
@primary_weighting [ 3, 2, 7, 6, 5, 4, 3, 2 ]
@secondary_weighting [ 7, 4, 3, 2, 5, 2, 7, 6 ]
@modulus 11
@doc """
Validate the provided IR number.
## Examples
iex> NzIrValidator.is_valid?(49091850)
{:ok, true}
iex> NzIrValidator.is_valid?(9125568)
{:error, false}
"""
@spec is_valid?( integer ) :: { :ok, true } | { :error, false }
def is_valid?( ird_number ) when not ird_number in 10_000_000..150_000_000, do: { :error, false }
def is_valid?( ird_number ) do
as_list = Integer.digits( ird_number )
base = Enum.drop( as_list, -1 ) |> Integer.undigits
check = List.last( as_list )
base |> pad |> as_integer_list |> apply_algorithm( @primary_weighting, check )
end
# Calls helper functions to determine the validity of an IR number
@spec apply_algorithm( list( integer ), list( integer ), integer ) :: { :ok, true } | { :error, false }
defp apply_algorithm( list, weighting, check ) do
list |> apply_weighting( weighting ) |> Enum.sum |> rem( @modulus) |> compare_against_check_digit( check, list )
end
# Pads an IR number with leading zeroes if necessary.
# The required number of digits is specified by @length
@spec pad( integer ) :: String.t
defp pad( base ) do
Integer.to_string( base ) |> String.pad_leading( @length, "0" )
end
# Converts an IR number from a String into an integer list.
@spec as_integer_list( String.t ) :: list( integer )
defp as_integer_list( padded ) do
String.split( padded, "", trim: true ) |> Enum.map( fn(x) -> String.to_integer(x) end )
end
# Multiply each integer in the list with its corresponding weighting, as
# specified in @primary_weighting and @secondary_weighting.
@spec apply_weighting( list( integer ), list( integer ) ) :: list( integer )
defp apply_weighting( list, weighting ) do
Enum.zip( list, weighting ) |> Enum.reduce( [], fn(x, acc) -> acc ++ [elem(x, 0) * elem(x, 1)] end )
end
# Compare the remainder against the check digit.
# Re-apply the validation algorithm (apply_algorithm()) if necesssary
@spec compare_against_check_digit( list( integer), integer, integer ) :: { :ok, true } | { :error, false }
defp compare_against_check_digit( remainder, _check, _list ) when remainder == 0, do: { :ok, true }
defp compare_against_check_digit( remainder, check, _list ) when @modulus - remainder == check, do: { :ok, true }
defp compare_against_check_digit( remainder, check, list ) when @modulus - remainder == 10, do: apply_algorithm( list, @secondary_weighting, check )
defp compare_against_check_digit( _remainder, _check, _list ), do: { :error, false }
end
|
lib/nz_ir_validator.ex
| 0.857738
| 0.451206
|
nz_ir_validator.ex
|
starcoder
|
defmodule ParkingTweets.GarageMap do
@moduledoc """
Responsible for maintaing a map of garages and and their current state.
"""
alias ParkingTweets.{Garage, IdMapSet}
defstruct garages: IdMapSet.new(&Garage.id/1),
alternates: %{},
facility_to_stop_id: %{},
stop_id_to_stop_name: %{}
def new do
%__MODULE__{}
end
def new(opts) do
alternates = build_alternate_map(Keyword.get(opts, :alternates))
%__MODULE__{alternates: alternates}
end
def empty?(%__MODULE__{garages: garages}) do
IdMapSet.size(garages) == 0
end
def update_multiple(%__MODULE__{} = map, events) do
Enum.reduce(events, map, fn event, map -> update(map, event) end)
end
def update(%__MODULE__{} = map, %{event: "reset", data: data}) do
reset_map = %__MODULE__{alternates: map.alternates}
data
|> Jason.decode!()
|> Enum.reduce(reset_map, &put_json(&2, &1))
end
def update(%__MODULE__{} = map, %{event: update, data: data})
when update in ["add", "update"] do
data |> Jason.decode!() |> (&put_json(map, &1)).()
end
def update(%__MODULE__{} = map, %{event: "remove"}) do
map
end
defp build_alternate_map(nil) do
%{}
end
defp build_alternate_map(alternates) do
# `alternates` is a list of lists of garage IDs. In a given list of IDs,
# any of the garages can be substituted with each other.
Enum.reduce(alternates, %{}, fn ids, acc ->
set = MapSet.new(ids)
Enum.reduce(ids, acc, fn id, acc ->
without_current = MapSet.delete(set, id)
Map.update(acc, id, without_current, &MapSet.union(&1, without_current))
end)
end)
end
defp put_json(map, %{"type" => "facility"} = json) do
%{
"id" => facility_id,
"relationships" => %{
"stop" => %{
"data" => %{
"id" => stop_id
}
}
}
} = json
put_in(map.facility_to_stop_id[facility_id], stop_id)
end
defp put_json(map, %{"type" => "stop"} = json) do
%{
"id" => stop_id,
"attributes" => %{
"name" => stop_name
}
} = json
put_in(map.stop_id_to_stop_name[stop_id], stop_name)
end
defp put_json(map, json) do
garage = Garage.from_json_api(json)
stop_id = Map.get(map.facility_to_stop_id, garage.id)
stop_name = Map.get(map.stop_id_to_stop_name, stop_id)
garage = Garage.put_name(garage, stop_name)
put(map, garage)
end
@doc "Insert a garage directly"
def put(%__MODULE__{} = map, %Garage{} = garage) do
%{map | garages: IdMapSet.put(map.garages, garage)}
end
def difference(%__MODULE__{} = garage_map_1, %__MODULE__{} = garage_map_2) do
IdMapSet.difference_by(
garage_map_1.garages,
garage_map_2.garages,
&Garage.utilization_percent_or_status/1
)
end
def with_alternates(%__MODULE__{} = map) do
for garage <- map.garages do
case calculate_alternates(map, garage) do
[] ->
garage
alternates ->
Garage.put_alternates(garage, alternates)
end
end
end
defp calculate_alternates(map, garage) do
for alternate_id <- Map.get(map.alternates, garage.id, []),
%Garage{} = alternate_garage <- [IdMapSet.get(map.garages, alternate_id)],
Garage.utilization_percent(alternate_garage) < 90 do
alternate_garage
end
end
end
|
lib/parking_tweets/garage_map.ex
| 0.70791
| 0.408601
|
garage_map.ex
|
starcoder
|
defmodule GitDiff do
@moduledoc """
A simple implementation for taking the output from 'git diff' and transforming it into Elixir structs.
## Installation
The package can be installed by adding `git_diff` to your list of dependencies in `mix.exs`:
```elixir
def deps do
[
{:git_diff, "~> 0.6.1"}
]
end
```
## Example
Output:
```
[
%GitDiff.Patch{
chunks: [
%GitDiff.Chunk{
from_num_lines: "42",
from_start_line: "42",
header: "@@ -481,23 +483,24 @@ class Cursor extends Model {"
context: "class Cursor extends Model {", # will be "" if there is no context
lines: [
%GitDiff.Line{
from_line_number: 481,
text: " {",
to_line_number: 483,
type: :context # will be one of :context, :add, :remove
},
...
],
to_num_lines: "42",
to_start_line: "42"
}
],
from: "src/cursor.js",
headers: %{"index" => {"10bdef8", "181eeb9", "100644"}},
to: "src/cursor.js"},
]
```
The above output is heavily truncated for illustration, but it should give enough of an idea of what to expect. The
code, while naive, is less than 100 lines of actual code and all takes place in the GitDiff module. Emulate the tests
in a an interactive shell for quick viewing of the output.
## Benchmarks
Haven't done much benchmarking, but up to around a 5k (I just stopped trying there) line diff the performance was
linear and took a whopping 35ms per call on the test VM. For a more reasonably sized ~150 line diff it clocked in at
around 340 microseconds.
"""
alias GitDiff.Patch
alias GitDiff.Chunk
alias GitDiff.Line
@doc """
Parse the output from a 'git diff' command.
Returns `{:ok, [%GitDiff.Patch{}]}` for success, `{:error, :unrecognized_format}` otherwise. See `GitDiff.Patch`.
"""
@spec parse_patch(String.t(), Keyword.t()) :: {:ok, [%GitDiff.Patch{}]} | {:error, :unrecognized_format}
def parse_patch(git_diff, opts \\ []) do
try do
parsed_diff =
git_diff
|> String.trim()
|> String.splitter("\n")
|> split_diffs()
|> process_diffs(state(opts))
|> Enum.to_list()
{:ok, parsed_diff}
catch
:throw, {:git_diff, _reason} -> {:error, :unrecognized_format}
end
end
@doc """
Parse the output from a 'git diff' command.
Like `parse_patch/1` but takes an `Enumerable` of lines and returns a stream
of `{:ok, %GitDiff.Patch{}}` for successfully parsed patches or `{:error, _}`
if the patch failed to parse.
"""
@spec stream_patch(Enum.t(), Keyword.t()) :: Enum.t()
def stream_patch(stream, opts \\ []) do
stream
|> Stream.map(&String.trim_trailing(&1, "\n"))
|> split_diffs()
|> process_diffs_ok(state(opts))
end
defp state(opts) do
%{
relative_from: opts[:relative_from] && Path.relative(opts[:relative_from]),
relative_to: opts[:relative_to] && Path.relative(opts[:relative_to])
}
end
defp process_diffs(diffs, state) do
Stream.map(diffs, &process_diff(&1, state))
end
defp process_diffs_ok(diffs, state) do
Stream.map(diffs, fn diff ->
try do
{:ok, process_diff(diff, state)}
catch
:throw, {:git_diff, _reason} -> {:error, :unrecognized_format}
end
end)
end
defp process_diff(diff, state) do
[headers | chunks] = split_diff(diff) |> Enum.to_list()
patch = process_diff_headers(headers, state)
chunks =
Enum.map(chunks, fn lines ->
process_chunk(%{from_line_number: nil, to_line_number: nil}, %Chunk{}, lines)
end)
%{patch | chunks: chunks}
end
defp process_chunk(_, chunk, []) do
%{chunk | lines: Enum.reverse(chunk.lines)}
end
defp process_chunk(context, chunk, ["" | lines]), do: process_chunk(context, chunk, lines)
defp process_chunk(context, chunk, [line | lines]) do
{context, chunk} =
case line do
"@@" <> text ->
results =
Regex.named_captures(
~r/ -(?<from_start_line>[0-9]+)(,(?<from_num_lines>[0-9]+))? \+(?<to_start_line>[0-9]+)(,(?<to_num_lines>[0-9]+))? @@( (?<context>.+))?/,
text
)
{%{
context
| from_line_number: String.to_integer(results["from_start_line"]),
to_line_number: String.to_integer(results["to_start_line"])
},
%{
chunk
| from_num_lines: results["from_num_lines"],
from_start_line: results["from_start_line"],
to_num_lines: results["to_num_lines"],
to_start_line: results["to_start_line"],
context: results["context"],
header: "@@" <> text
}}
" " <> _ = text ->
line = %Line{
text: text,
type: :context,
to_line_number: Integer.to_string(context.to_line_number),
from_line_number: Integer.to_string(context.from_line_number)
}
{
%{
context
| to_line_number: context.to_line_number + 1,
from_line_number: context.from_line_number + 1
},
%{chunk | lines: [line | chunk.lines]}
}
"+" <> _ = text ->
line = %Line{
text: text,
type: :add,
to_line_number: Integer.to_string(context.to_line_number)
}
{
%{context | to_line_number: context.to_line_number + 1},
%{chunk | lines: [line | chunk.lines]}
}
"-" <> _ = text ->
line = %Line{
text: text,
type: :remove,
from_line_number: Integer.to_string(context.from_line_number)
}
{
%{context | from_line_number: context.from_line_number + 1},
%{chunk | lines: [line | chunk.lines]}
}
"\\" <> _ = text ->
line = %Line{
text: text,
type: :context
}
{
context,
%{chunk | lines: [line | chunk.lines]}
}
other ->
throw({:git_diff, {:invalid_chunk_line, other}})
end
process_chunk(context, chunk, lines)
end
defp process_diff_headers([header | headers], state) do
[_ | [diff_type | _]] = String.split(header, " ")
if diff_type !== "--git" do
throw({:git_diff, {:invalid_diff_type, diff_type}})
else
process_diff_headers(%Patch{}, headers, state)
end
end
defp process_diff_headers(patch, [], _state), do: patch
defp process_diff_headers(patch, [header | headers], state) do
patch =
case header do
"old mode " <> mode ->
%{patch | headers: Map.put(patch.headers, "old mode", mode)}
"new mode " <> mode ->
%{patch | headers: Map.put(patch.headers, "new mode", mode)}
"deleted file mode " <> mode ->
%{patch | headers: Map.put(patch.headers, "deleted file mode", mode)}
"new file mode " <> mode ->
%{patch | headers: Map.put(patch.headers, "new file mode", mode)}
"copy from mode " <> mode ->
%{patch | headers: Map.put(patch.headers, "copy from mode", mode)}
"copy to mode " <> mode ->
%{patch | headers: Map.put(patch.headers, "copy to mode", mode)}
"rename from " <> filepath ->
%{patch | headers: Map.put(patch.headers, "rename from", filepath), from: filepath}
"rename from mode " <> mode ->
%{patch | headers: Map.put(patch.headers, "rename from mode", mode)}
"rename to " <> filepath ->
%{patch | headers: Map.put(patch.headers, "rename to", filepath), to: filepath}
"rename to mode " <> mode ->
%{patch | headers: Map.put(patch.headers, "rename to mode", mode)}
"similarity index " <> number ->
%{patch | headers: Map.put(patch.headers, "similarity index", number)}
"dissimilarity index " <> number ->
%{patch | headers: Map.put(patch.headers, "dissimilarity index", number)}
"index " <> rest ->
results =
Regex.named_captures(~r/(?<first_hash>.+?)\.\.(?<second_hash>.+?) (?<mode>.+)/, rest)
%{
patch
| headers:
Map.put(
patch.headers,
"index",
{results["first_hash"], results["second_hash"], results["mode"]}
)
}
"--- " <> file ->
%{patch | from: maybe_relative_to(from_file(file), state.relative_from)}
"+++ " <> file ->
%{patch | to: maybe_relative_to(to_file(file), state.relative_to)}
"Binary files " <> rest ->
results = Regex.named_captures(~r/(?<from>.+?) and (?<to>.+?) differ/, rest)
%{patch | from: maybe_relative_to(from_file(results["from"]), state.relative_from), to: maybe_relative_to(to_file(results["to"]), state.relative_to)}
other ->
throw({:git_diff, {:invalid_header, other}})
end
process_diff_headers(patch, headers, state)
end
defp from_file("a/" <> file), do: file
defp from_file("/dev/null"), do: nil
defp from_file(other), do: throw({:git_diff, {:invalid_from_filename, other}})
defp to_file("b/" <> file), do: file
defp to_file("/dev/null"), do: nil
defp to_file(other), do: throw({:git_diff, {:invalid_to_filename, other}})
defp maybe_relative_to(nil, _relative), do: nil
defp maybe_relative_to(path, nil), do: path
defp maybe_relative_to(path, relative), do: Path.relative_to(path, relative)
defp split_diff(diff) do
chunk_fun = fn line, lines ->
if String.starts_with?(line, "@@") do
{:cont, Enum.reverse(lines), [line]}
else
{:cont, [line | lines]}
end
end
after_fun = fn
[] -> {:cont, []}
lines -> {:cont, Enum.reverse(lines), []}
end
Stream.chunk_while(diff, [], chunk_fun, after_fun)
end
defp split_diffs(split_diff) do
chunk_fun = fn line, lines ->
if String.starts_with?(line, "diff") and lines != [] do
{:cont, Enum.reverse(lines), [line]}
else
{:cont, [line | lines]}
end
end
after_fun = fn
[] -> {:cont, []}
lines -> {:cont, Enum.reverse(lines), []}
end
Stream.chunk_while(split_diff, [], chunk_fun, after_fun)
end
end
|
lib/git_diff.ex
| 0.845863
| 0.862815
|
git_diff.ex
|
starcoder
|
defmodule DgraphEx.Changeset do
alias DgraphEx.Changeset, as: Cs
alias DgraphEx.{Field, Vertex, Types, Util}
defstruct [
module: nil,
model: nil,
changes: nil,
errors: nil,
]
@doc """
In `cast/3` we do 3 things:
1) We ensure only changes to allowed fields "get through" by using Map.take/2.
2) We separate the model's struct into it's component parts: module and map.
3) We initialize the errors field to an empty list. And since only a
changeset with an empty list is valid we ensure that a changeset
has been instantiated outside cast/3 is not valid unless the errors
field is set to an empty list.
"""
def cast(%{__struct__: module} = model, %{} = changes, allowed_fields) when is_list(allowed_fields) do
%Cs{
module: module,
model: model |> Map.from_struct |> Map.put(:_uid_, model._uid_),
changes: filter_allowed_fields(changes, allowed_fields),
errors: [],
}
end
defp filter_allowed_fields(changes, allowed_fields) do
changes
|> Map.take(allowed_fields ++ Enum.map(allowed_fields, &to_string/1))
|> Enum.map(fn
{k, v} when is_atom(k) -> {k, v}
{k, v} when is_binary(k) -> {String.to_existing_atom(k), v}
end)
|> Enum.into(%{})
end
@doc """
In uncast/1 we first check to make sure that the errors field of the changeset is a list.
If the errors field is not a list then this is not a valid changeset and an error is raised.
NOTE: A NON-LIST ERRORS FIELD IS NOT ALLOWED. USE CAST/3.
The errors field being a non-list indicates that there was an error in programming, not invalid
input into a changes map. If you need to construct a Changeset struct outside cast/3 then ensure the errors field
is set to a list upon instantiation.
After checking for a non-list errors field, we check is_valid?/1 which returns true only for empty
errors fields of changesets. If the Changeset is valid we apply each of the changes to the
model's map and reconstruct the original struct with the changes applied, and return an
:ok tuple as in `{:ok, model_struct_here}`. Finally, if the changeset was not valid we
return an :error tuple as in `{:error, changeset_here}`.
This should be the final function called for a chain of changeset functions (such as validators).
"""
def uncast(%Cs{} = cs) do
cond do
!is_list(cs.errors) ->
raise %ArgumentError{
message: "A DgraphEx Changeset requires the :errors field to be a keyword list. Got #{inspect cs.errors}.\nDid you use cast/3 to construct your changeset?"
}
is_valid?(cs) ->
{:ok, struct!(cs.module, do_apply_changes(cs))}
true ->
{:error, cs}
end
end
defp do_apply_changes(%Cs{} = cs) do
cs.model
|> Enum.reduce(cs.model, fn ({key, _}, model_acc) ->
Map.put(model_acc, key, do_get_value(cs, key))
end)
end
def is_valid?(%Cs{errors: []}) do
true
end
def is_valid?(%Cs{}) do
false
end
def put_error(%Cs{errors: errors} = cs, {key, _} = err) when is_atom(key) do
%{ cs | errors: [ err | errors ]}
end
def put_error(%Cs{} = cs, key, reason) when is_list(reason)
when is_atom(reason) do
put_error(cs, {key, reason})
end
def validate_model(%Cs{} = cs, field_name, module, func_name) do
cs
|> do_get_value(field_name)
|> case do
nil ->
# it was nil. do nothing.
cs
%{__struct__: _} = submodel ->
# found a module's struct i dont know how...
# turn it to a map and
# use the model as model
# and use the map as the changes
validate_other_model(cs, field_name, module, func_name, submodel, submodel |> Map.from_struct)
%{} = changes ->
validate_other_model(cs, field_name, module, func_name, module.__struct__, changes)
changes_list when is_list(changes_list) ->
changes_list
|> Enum.reduce(cs, fn changes ->
validate_other_model(cs, field_name, module, func_name, module.__struct__, changes)
end)
end
end
defp validate_other_model(cs, field_name, module, func_name, model, changes) do
case apply(module, func_name, [model, changes]) do
%{errors: []} ->
cs
%{errors: errors} when length(errors) > 1 ->
put_error(cs, field_name, errors)
end
end
def validate_required(%Cs{} = cs, required_fields) do
cs
|> validate_required_errors(required_fields)
|> Enum.reduce(cs, fn
(err, acc_cs) -> put_error(acc_cs, err)
end)
end
defp validate_required_errors(%Cs{} = cs, required_fields) do
required_fields
|> Enum.map(fn key -> {key, do_get_value(cs, key)} end)
|> Enum.reduce([], fn
({key, nil}, acc) -> [ {key, :cannot_be_nil} | acc ]
({key, ""}, acc) -> [ {key, :cannot_be_empty_string} | acc ]
(_, acc) -> acc
end)
end
def validate_type(%Cs{} = cs, field_name, type) when is_atom(type) do
value = do_get_value(cs, field_name)
case do_validate_types([type], value) do
:ok ->
cs
{:error, _} ->
put_error(cs, {field_name, Types.error_message_by_type(type)})
end
end
def validate_type(%Cs{} = cs, field_name, types) when is_atom(field_name) and is_list(types) do
value = do_get_value(cs, field_name)
case do_validate_types(types, value) do
:ok ->
cs
{:error, :none_of_types} ->
put_error(cs, {field_name, :invalid_type})
end
end
def validate_type(%Cs{module: module} = cs, typed_fields) when is_list(typed_fields) do
type_tuples(module, typed_fields)
|> Enum.reduce(cs, fn
({field_name, typing}, acc_cs) when is_atom(typing) or is_list(typing) ->
validate_type(acc_cs, field_name, typing)
end)
end
defp do_get_value(%Cs{model: model, changes: changes}, key) do
Util.get_value(changes, key, model[key])
end
defp do_validate_types(types, value) do
if Types.is_any_of?(types, value) do
:ok
else
{:error, :none_of_types}
end
end
defp type_tuples(module, types_list) do
types_list
|> Enum.map(fn
key when is_atom(key) ->
{key, retrieve_field_type(module, key)}
{key, type} when is_atom(key) and is_atom(type) ->
{key, type}
{key, types} when is_atom(key) and is_list(types) ->
{key, types}
end)
end
defp retrieve_field_type(module, key) do
case Vertex.get_field(module, key) do
nil ->
err = "Could not find DgraphEx.Field for key #{inspect key} in module #{inspect module}"
raise %ArgumentError{message: err}
%Field{type: type} ->
type
end
end
end
|
lib/dgraph_ex/changeset.ex
| 0.750827
| 0.438605
|
changeset.ex
|
starcoder
|
defmodule Google.Bigtable.Admin.V2.CreateTableRequest do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
parent: String.t(),
table_id: String.t(),
table: Google.Bigtable.Admin.V2.Table.t(),
initial_splits: [Google.Bigtable.Admin.V2.CreateTableRequest.Split.t()]
}
defstruct [:parent, :table_id, :table, :initial_splits]
field :parent, 1, type: :string
field :table_id, 2, type: :string
field :table, 3, type: Google.Bigtable.Admin.V2.Table
field :initial_splits, 4,
repeated: true,
type: Google.Bigtable.Admin.V2.CreateTableRequest.Split
end
defmodule Google.Bigtable.Admin.V2.CreateTableRequest.Split do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
key: String.t()
}
defstruct [:key]
field :key, 1, type: :bytes
end
defmodule Google.Bigtable.Admin.V2.CreateTableFromSnapshotRequest do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
parent: String.t(),
table_id: String.t(),
source_snapshot: String.t()
}
defstruct [:parent, :table_id, :source_snapshot]
field :parent, 1, type: :string
field :table_id, 2, type: :string
field :source_snapshot, 3, type: :string
end
defmodule Google.Bigtable.Admin.V2.DropRowRangeRequest do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
target: {atom, any},
name: String.t()
}
defstruct [:target, :name]
oneof :target, 0
field :name, 1, type: :string
field :row_key_prefix, 2, type: :bytes, oneof: 0
field :delete_all_data_from_table, 3, type: :bool, oneof: 0
end
defmodule Google.Bigtable.Admin.V2.ListTablesRequest do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
parent: String.t(),
view: integer,
page_size: integer,
page_token: String.t()
}
defstruct [:parent, :view, :page_size, :page_token]
field :parent, 1, type: :string
field :view, 2, type: Google.Bigtable.Admin.V2.Table.View, enum: true
field :page_size, 4, type: :int32
field :page_token, 3, type: :string
end
defmodule Google.Bigtable.Admin.V2.ListTablesResponse do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
tables: [Google.Bigtable.Admin.V2.Table.t()],
next_page_token: String.t()
}
defstruct [:tables, :next_page_token]
field :tables, 1, repeated: true, type: Google.Bigtable.Admin.V2.Table
field :next_page_token, 2, type: :string
end
defmodule Google.Bigtable.Admin.V2.GetTableRequest do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
name: String.t(),
view: integer
}
defstruct [:name, :view]
field :name, 1, type: :string
field :view, 2, type: Google.Bigtable.Admin.V2.Table.View, enum: true
end
defmodule Google.Bigtable.Admin.V2.DeleteTableRequest do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
name: String.t()
}
defstruct [:name]
field :name, 1, type: :string
end
defmodule Google.Bigtable.Admin.V2.ModifyColumnFamiliesRequest do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
name: String.t(),
modifications: [Google.Bigtable.Admin.V2.ModifyColumnFamiliesRequest.Modification.t()]
}
defstruct [:name, :modifications]
field :name, 1, type: :string
field :modifications, 2,
repeated: true,
type: Google.Bigtable.Admin.V2.ModifyColumnFamiliesRequest.Modification
end
defmodule Google.Bigtable.Admin.V2.ModifyColumnFamiliesRequest.Modification do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
mod: {atom, any},
id: String.t()
}
defstruct [:mod, :id]
oneof :mod, 0
field :id, 1, type: :string
field :create, 2, type: Google.Bigtable.Admin.V2.ColumnFamily, oneof: 0
field :update, 3, type: Google.Bigtable.Admin.V2.ColumnFamily, oneof: 0
field :drop, 4, type: :bool, oneof: 0
end
defmodule Google.Bigtable.Admin.V2.GenerateConsistencyTokenRequest do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
name: String.t()
}
defstruct [:name]
field :name, 1, type: :string
end
defmodule Google.Bigtable.Admin.V2.GenerateConsistencyTokenResponse do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
consistency_token: String.t()
}
defstruct [:consistency_token]
field :consistency_token, 1, type: :string
end
defmodule Google.Bigtable.Admin.V2.CheckConsistencyRequest do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
name: String.t(),
consistency_token: String.t()
}
defstruct [:name, :consistency_token]
field :name, 1, type: :string
field :consistency_token, 2, type: :string
end
defmodule Google.Bigtable.Admin.V2.CheckConsistencyResponse do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
consistent: boolean
}
defstruct [:consistent]
field :consistent, 1, type: :bool
end
defmodule Google.Bigtable.Admin.V2.SnapshotTableRequest do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
name: String.t(),
cluster: String.t(),
snapshot_id: String.t(),
ttl: Google.Protobuf.Duration.t(),
description: String.t()
}
defstruct [:name, :cluster, :snapshot_id, :ttl, :description]
field :name, 1, type: :string
field :cluster, 2, type: :string
field :snapshot_id, 3, type: :string
field :ttl, 4, type: Google.Protobuf.Duration
field :description, 5, type: :string
end
defmodule Google.Bigtable.Admin.V2.GetSnapshotRequest do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
name: String.t()
}
defstruct [:name]
field :name, 1, type: :string
end
defmodule Google.Bigtable.Admin.V2.ListSnapshotsRequest do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
parent: String.t(),
page_size: integer,
page_token: String.t()
}
defstruct [:parent, :page_size, :page_token]
field :parent, 1, type: :string
field :page_size, 2, type: :int32
field :page_token, 3, type: :string
end
defmodule Google.Bigtable.Admin.V2.ListSnapshotsResponse do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
snapshots: [Google.Bigtable.Admin.V2.Snapshot.t()],
next_page_token: String.t()
}
defstruct [:snapshots, :next_page_token]
field :snapshots, 1, repeated: true, type: Google.Bigtable.Admin.V2.Snapshot
field :next_page_token, 2, type: :string
end
defmodule Google.Bigtable.Admin.V2.DeleteSnapshotRequest do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
name: String.t()
}
defstruct [:name]
field :name, 1, type: :string
end
defmodule Google.Bigtable.Admin.V2.SnapshotTableMetadata do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
original_request: Google.Bigtable.Admin.V2.SnapshotTableRequest.t(),
request_time: Google.Protobuf.Timestamp.t(),
finish_time: Google.Protobuf.Timestamp.t()
}
defstruct [:original_request, :request_time, :finish_time]
field :original_request, 1, type: Google.Bigtable.Admin.V2.SnapshotTableRequest
field :request_time, 2, type: Google.Protobuf.Timestamp
field :finish_time, 3, type: Google.Protobuf.Timestamp
end
defmodule Google.Bigtable.Admin.V2.CreateTableFromSnapshotMetadata do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
original_request: Google.Bigtable.Admin.V2.CreateTableFromSnapshotRequest.t(),
request_time: Google.Protobuf.Timestamp.t(),
finish_time: Google.Protobuf.Timestamp.t()
}
defstruct [:original_request, :request_time, :finish_time]
field :original_request, 1, type: Google.Bigtable.Admin.V2.CreateTableFromSnapshotRequest
field :request_time, 2, type: Google.Protobuf.Timestamp
field :finish_time, 3, type: Google.Protobuf.Timestamp
end
defmodule Google.Bigtable.Admin.V2.BigtableTableAdmin.Service do
@moduledoc false
use GRPC.Service, name: "google.bigtable.admin.v2.BigtableTableAdmin"
rpc :CreateTable, Google.Bigtable.Admin.V2.CreateTableRequest, Google.Bigtable.Admin.V2.Table
rpc :CreateTableFromSnapshot,
Google.Bigtable.Admin.V2.CreateTableFromSnapshotRequest,
Google.Longrunning.Operation
rpc :ListTables,
Google.Bigtable.Admin.V2.ListTablesRequest,
Google.Bigtable.Admin.V2.ListTablesResponse
rpc :GetTable, Google.Bigtable.Admin.V2.GetTableRequest, Google.Bigtable.Admin.V2.Table
rpc :DeleteTable, Google.Bigtable.Admin.V2.DeleteTableRequest, Google.Protobuf.Empty
rpc :ModifyColumnFamilies,
Google.Bigtable.Admin.V2.ModifyColumnFamiliesRequest,
Google.Bigtable.Admin.V2.Table
rpc :DropRowRange, Google.Bigtable.Admin.V2.DropRowRangeRequest, Google.Protobuf.Empty
rpc :GenerateConsistencyToken,
Google.Bigtable.Admin.V2.GenerateConsistencyTokenRequest,
Google.Bigtable.Admin.V2.GenerateConsistencyTokenResponse
rpc :CheckConsistency,
Google.Bigtable.Admin.V2.CheckConsistencyRequest,
Google.Bigtable.Admin.V2.CheckConsistencyResponse
rpc :SnapshotTable, Google.Bigtable.Admin.V2.SnapshotTableRequest, Google.Longrunning.Operation
rpc :GetSnapshot, Google.Bigtable.Admin.V2.GetSnapshotRequest, Google.Bigtable.Admin.V2.Snapshot
rpc :ListSnapshots,
Google.Bigtable.Admin.V2.ListSnapshotsRequest,
Google.Bigtable.Admin.V2.ListSnapshotsResponse
rpc :DeleteSnapshot, Google.Bigtable.Admin.V2.DeleteSnapshotRequest, Google.Protobuf.Empty
end
defmodule Google.Bigtable.Admin.V2.BigtableTableAdmin.Stub do
@moduledoc false
use GRPC.Stub, service: Google.Bigtable.Admin.V2.BigtableTableAdmin.Service
end
|
lib/grpc/admin/bigtable_table_admin.pb.ex
| 0.714628
| 0.491334
|
bigtable_table_admin.pb.ex
|
starcoder
|
defmodule Cobs do
@moduledoc """
Elixir implementation of [Consistent Overhead Byte Stuffing](https://en.wikipedia.org/wiki/Consistent_Overhead_Byte_Stuffing)
"""
@doc """
Encode a binary (with `0` bytes) into a COBS encoded binary (without `0` bytes).
## Example
iex> Cobs.encode(<<0x01, 0x02, 0x00, 0x03>>)
{:ok, <<0x03, 0x01, 0x02, 0x02, 0x03>>}
"""
@spec encode(binary()) :: {:ok, binary()} | {:error | String.t}
def encode(binary) do
if byte_size(binary) <= 254 do
{:ok, do_encode(<<>>, <<>>, binary)}
else
{:error, "Binary too long"}
end
end
@spec do_encode(binary(), binary(), binary()) :: binary()
defp do_encode(head, block, tail)
defp do_encode(head, block, <<>>),
do: head <> <<byte_size(block) + 1>> <> block
defp do_encode(head, block, <<0, tail :: binary>>),
do: do_encode(head <> <<byte_size(block) + 1>> <> block, <<>>, tail)
defp do_encode(head, block, <<val, tail :: binary>>),
do: do_encode(head, block <> <<val>>, tail)
@doc """
Encode a binary (with `0` bytes) into a COBS encoded binary (without `0` bytes).
Raise an `ArgumentError` on input data exceeding the allowed length (254 bytes)
## Example
iex> Cobs.encode!(<<0x01, 0x02, 0x00, 0x03>>)
<<0x03, 0x01, 0x02, 0x02, 0x03>>
"""
@spec encode!(binary()) :: binary()
def encode!(binary) do
case encode(binary) do
{:ok, result} ->
result
{:error, message} ->
raise ArgumentError, message
_ ->
raise ArgumentError
end
end
@doc """
Decode COBS encoded binary (without `0` bytes) into a binary (with `0` bytes).
## Example
iex> Cobs.decode(<<0x03, 0x01, 0x02, 0x02, 0x03>>)
{:ok, <<0x01, 0x02, 0x00, 0x03>>}
"""
@spec decode(binary()) :: {:ok, binary()} | {:error | String.t}
def decode(binary)
def decode(binary) do
do_decode(<<>>, binary)
end
@spec do_decode(binary(), binary()) :: {:ok, binary()} | {:error | any()}
defp do_decode(head, tail)
defp do_decode(head, <<>>) do
{:ok, head}
end
defp do_decode(head, <<ohb, tail :: binary>>) do
block_length = ohb - 1
if block_length > byte_size(tail) do
{:error, "Offset byte specifies more bytes than available"}
else
<<block :: binary - size(block_length), remaining :: binary>> = tail
new_head = if byte_size(remaining) > 0, do: head <> block <> <<0>>, else: head <> block
do_decode(new_head, remaining)
end
end
@doc """
Decode COBS encoded binary (without `0` bytes) into a binary (with `0` bytes).
Raise an `ArgumentError` on invalid input data.
## Example
iex> Cobs.decode!(<<0x03, 0x01, 0x02, 0x02, 0x03>>)
<<0x01, 0x02, 0x00, 0x03>>
"""
@spec decode!(binary()) :: binary()
def decode!(binary) do
case decode(binary) do
{:ok, result} ->
result
{:error, message} ->
raise ArgumentError, message
_ ->
raise ArgumentError
end
end
end
|
lib/cobs.ex
| 0.897635
| 0.671072
|
cobs.ex
|
starcoder
|
defmodule Scrip.Response do
defmodule Error do
@moduledoc """
Module dealing with error responses from the App Store
"""
@type t :: %__MODULE__{
environment: :sandbox | :production | nil,
message: String.t(),
status: integer
}
@doc """
The `#{__MODULE__}` struct
Contains
* `:status` - status code returned by the App Store
* `:message` - message explaining the status code
* `:environment` - which environment the error was generated (sometimes nil)
"""
@enforce_keys [:status, :message]
defstruct [:status, :message, :environment]
@spec new(integer, String.t(), String.t()) :: Scrip.Response.Error.t()
def new(status, message, environment) do
%__MODULE__{
environment: Scrip.Util.to_environment(environment),
status: status,
message: message
}
end
end
@moduledoc """
Handles the JSON data returned in the response from the App Store.
"""
@typedoc """
The environment for which the receipt was generated.
#### Possible values:
* `:sandbox`
* `:production`
"""
@type environment :: :sandbox | :production
@status_map %{
0 => "The request is valid",
21_000 => "The request to the App Store was not made using the HTTP POST request method.",
21_001 => "This status code is no longer sent by the App Store.",
21_002 =>
"The data in the receipt-data property was malformed or the service experienced a temporary issue. Try again.",
21_003 => "The receipt could not be authenticated.",
21_004 =>
"The shared secret you provided does not match the shared secret on file for your account.",
21_005 => "The receipt server was temporarily unable to provide the receipt. Try again.",
21_006 =>
"This receipt is valid but the subscription has expired. When this status code is returned to your server, the receipt data is also decoded and returned as part of the response. Only returned for iOS 6-style transaction receipts for auto-renewable subscriptions.",
21_007 =>
"This receipt is from the test environment, but it was sent to the production environment for verification.",
21_008 =>
"This receipt is from the production environment, but it was sent to the test environment for verification.",
21_009 => "Internal data access error. Try again later.",
21_010 => "The user account cannot be found or has been deleted."
}
@typedoc """
The status of the app receipt.
#### Possible values:
#{
Enum.reduce(@status_map, "", fn {key, value}, acc ->
"#{acc}\n * `#{key}` - #{value}\n"
end)
}
See: https://developer.apple.com/documentation/appstorereceipts/status
"""
@type status :: 0 | 21_000..21_010
@typedoc """
An array that contains all in-app purchase transactions.
Only returned for receipts that contain auto-renewable subscriptions.
https://developer.apple.com/documentation/appstorereceipts/responsebody/latest_receipt_info
"""
@type latest_receipt_info :: [Scrip.IAPReceipt.t()]
@typedoc """
The JSON data returned in the response from the App Store.
See: https://developer.apple.com/documentation/appstorereceipts/responsebody
"""
@type t :: %__MODULE__{
environment: environment,
latest_receipt_info: latest_receipt_info | nil,
latest_receipt: String.t(),
message: String.t(),
pending_renewal_info: [Scrip.PendingRenewalInfo.t()] | nil,
receipt: Scrip.Receipt.t(),
status: status
}
@doc """
The `#{__MODULE__}` struct
"""
@enforce_keys [
:environment,
:latest_receipt_info,
:latest_receipt,
:message,
:receipt,
:status
]
defstruct [
:environment,
:latest_receipt_info,
:latest_receipt,
:message,
:pending_renewal_info,
:receipt,
:status
]
@spec new(response :: map) :: Scrip.Response.t() | Scrip.Response.Error.t()
@doc """
Converts response map to `#{__MODULE__}` or`#{__MODULE__}.Error` struct
"""
def new(%{"status" => status} = response) when status in [0, 21_006] do
%__MODULE__{
environment: Scrip.Util.to_environment(response["environment"]),
latest_receipt_info:
response["latest_receipt_info"] &&
Enum.map(response["latest_receipt_info"], &Scrip.IAPReceipt.new/1),
latest_receipt: response["latest_receipt"],
message: build_message(response["status"]),
pending_renewal_info:
response["pending_renewal_info"] &&
Enum.map(response["pending_renewal_info"], &Scrip.PendingRenewalInfo.new/1),
receipt: Scrip.Receipt.new(response["receipt"]),
status: response["status"]
}
end
def new(response) do
__MODULE__.Error.new(
response["status"],
build_message(response["status"]),
response["environment"]
)
end
def build_message(status) do
Map.get(@status_map, status, "Unknown status (#{inspect(status)}) was returned")
end
end
|
lib/scrip/response.ex
| 0.782288
| 0.470128
|
response.ex
|
starcoder
|
defmodule MeshxConsul.Service.Endpoint do
@moduledoc """
Consul agent http API endpoint.
`MeshxConsul` is using OTP [`:httpc`](http://erlang.org/doc/man/httpc.html) HTTP client to access Consul agent HTTP API endpoint when managing services and upstreams. Required by `:httpc` configuration is described in `MeshxConsul` **Configuration options** section.
#### Example
Query [Consul KV store](https://www.consul.io/api/kv):
```elixir
iex(1)> MeshxConsul.Service.Endpoint.put("/kv/my-key", "my-key_value")
:ok
iex(2)> MeshxConsul.Service.Endpoint.get("/kv/my-key", %{raw: true})
{:ok, 'my-key_value'}
iex(3)> MeshxConsul.Service.Endpoint.delete("/kv/my-key")
:ok
iex(4)> MeshxConsul.Service.Endpoint.get("/kv/my-key")
{:error,
[{{'HTTP/1.1', 404, 'Not Found'}, [...], []},
"Get request uri: [http:///v1/kv/my-key?]"
]}
```
"""
alias MeshxConsul.App.C
@api "/v1"
@doc """
Returns configuration and member information of the local agent using [`/agent/self`](https://www.consul.io/api-docs/agent#read-configuration) Consul HTTP API endpoint.
```elixir
iex(1)> MeshxConsul.Service.Endpoint.self()
{:ok,
%{
"Config" => %{
"Datacenter" => "my-dc",
"NodeName" => "h11",
...
}
...
}
}
```
"""
@spec self() :: {:ok, map()} | {:error, reason :: term()}
def self(), do: get("/agent/self")
@doc """
Returns `GET` request response at `path` API endpoint address.
"""
@spec get(path :: String.t(), query :: map()) :: {:ok, response :: term()} | {:error, reason :: term()}
def get(path, query \\ %{}) do
uri =
C.uri()
|> Map.put(:path, Path.join(@api, path))
|> Map.put(:query, URI.encode_query(query))
|> URI.to_string()
|> to_charlist()
case :httpc.request(:get, {uri, C.httpc_headers()}, C.httpc_request_http_options(), C.httpc_request_options()) do
{:ok, {{_http_version, 200, _reason_phrase}, _headers, body}} ->
if Map.get(query, :raw, false), do: {:ok, body}, else: Jason.decode(body)
{:ok, {200, body}} ->
if Map.get(query, :raw, false), do: {:ok, body}, else: Jason.decode(body)
{:ok, err} ->
{:error, [err, "Get request uri: [#{uri}]"]}
err ->
{:error, [err, "Get request uri: [#{uri}]"]}
end
end
@doc """
`PUT` `payload` at `path` API endpoint address.
"""
@spec put(path :: String.t(), payload :: String.t() | map(), query :: map()) :: :ok | {:error, reason :: term()}
def put(path, payload \\ "", query \\ %{})
def put(path, payload, query) when is_map(payload) do
case Jason.encode(payload) do
{:ok, payload} -> put(path, payload, query)
err -> err
end
end
def put(path, payload, query) when is_bitstring(payload) do
uri =
C.uri()
|> Map.put(:path, Path.join(@api, path))
|> Map.put(:query, URI.encode_query(query))
|> URI.to_string()
|> to_charlist()
case :httpc.request(
:put,
{uri, C.httpc_headers(), 'application/json', payload},
C.httpc_request_http_options(),
C.httpc_request_options()
) do
{:ok, {{_http_version, 200, _reason_phrase}, _headers, _body}} -> :ok
{:ok, {200, _body}} -> :ok
{:ok, err} -> {:error, [err, "Put request uri: [#{uri}]"]}
err -> {:error, [err, "Put request uri: [#{uri}]"]}
end
end
@doc """
Issues `DELETE` request at `path`.
"""
@spec delete(path :: String.t(), query :: map()) :: :ok | {:error, reason :: term()}
def delete(path, query \\ %{}) do
uri =
C.uri()
|> Map.put(:path, Path.join(@api, path))
|> Map.put(:query, URI.encode_query(query))
|> URI.to_string()
|> to_charlist()
case :httpc.request(:delete, {uri, C.httpc_headers()}, C.httpc_request_http_options(), C.httpc_request_options()) do
{:ok, {{_http_version, 200, _reason_phrase}, _headers, 'true'}} -> :ok
{:ok, {200, 'true'}} -> :ok
{:ok, err} -> {:error, [err, "Put request uri: [#{uri}]"]}
err -> {:error, [err, "Put request uri: [#{uri}]"]}
end
end
end
|
lib/service/endpoint.ex
| 0.864468
| 0.679332
|
endpoint.ex
|
starcoder
|
defmodule Day07 do
@moduledoc """
AoC 2017, Day 7 - Amplification Circuit
"""
@doc """
Find largest output signal that can be sent to thrusters
"""
def part1 do
Util.priv_file(:day07, "day7_input.txt")
|> Intcode.load()
|> max_thrust(0..4)
end
@doc """
Find largest output signal that can be sent to thrusters in feedback mode
"""
def part2 do
Util.priv_file(:day07, "day7_input.txt")
|> Intcode.load()
|> max_feedback_thrust(5..9)
end
@doc """
Find the max thrust
"""
def max_thrust(prog, phases) do
max_for_type(prog, phases, &compute_thrust/2)
end
@doc """
Find the max thrust in the feedback loop configuration
"""
def max_feedback_thrust(prog, phases) do
max_for_type(prog, phases, &compute_feedback_thrust/2)
end
defp max_for_type(prog, phases, f) do
Enum.into(phases, [])
|> permutations()
|> Enum.map(&f.(prog, &1))
|> Enum.max()
end
@doc """
Compute the output signal sent to the thrusters for a given phase setting
"""
def compute_thrust(prog, phases) do
[a_pid | _pids] = spawn_amps(prog, phases)
send(a_pid, 0)
receive do
x -> x
end
end
@doc """
Compute the output signal sent to the thrusters in feedback mode
"""
def compute_feedback_thrust(prog, phases) do
[a_pid | _pids] = spawn_amps(prog, phases)
send(a_pid, 0)
await(a_pid)
end
defp await(pid) do
receive do
x ->
if Process.alive?(pid) do
send(pid, x)
await(pid)
else
x
end
end
end
defp spawn_amps(prog, phases) do
Enum.reverse(phases)
|> Enum.reduce(
[self()],
fn p, others = [next_pid | _rest] ->
pid =
Process.spawn(
Intcode,
:run,
[
prog,
[p],
fn ->
receive do
x -> x
end
end,
&send(next_pid, &1)
],
[]
)
[pid | others]
end
)
end
defp permutations([]), do: [[]]
defp permutations(list),
do: for(elem <- list, rest <- permutations(list -- [elem]), do: [elem | rest])
end
|
apps/day07/lib/day07.ex
| 0.708717
| 0.409457
|
day07.ex
|
starcoder
|
defmodule AdaptableCostsEvaluatorWeb.OutputController do
use AdaptableCostsEvaluatorWeb, :controller
use OpenApiSpex.ControllerSpecs
import AdaptableCostsEvaluatorWeb.Helpers.AuthHelper, only: [current_user: 1]
alias AdaptableCostsEvaluator.{Outputs, Computations}
alias AdaptableCostsEvaluator.Outputs.Output
action_fallback AdaptableCostsEvaluatorWeb.FallbackController
alias AdaptableCostsEvaluatorWeb.ApiSpec.{Schemas, Parameters, Errors}
tags ["Outputs"]
security [%{"JWT" => []}]
operation :index,
summary: "List all Outputs in the Computation",
parameters: [Parameters.computation_id()],
responses:
[
ok: {"Outputs list response", "application/json", Schemas.OutputsResponse}
] ++ Errors.internal_errors()
def index(conn, %{"computation_id" => computation_id}) do
computation = get_computation!(computation_id)
with :ok <- Bodyguard.permit(Output, :list, current_user(conn), computation) do
outputs = Outputs.list_outputs(computation)
render(conn, "index.json", outputs: outputs)
end
end
operation :create,
summary: "Create a new Output in the Computation",
parameters: [Parameters.computation_id()],
request_body:
{"Output attributes", "application/json", Schemas.OutputRequest, required: true},
responses:
[
created: {"Output response", "application/json", Schemas.OutputResponse}
] ++ Errors.all_errors()
def create(conn, %{"output" => output_params, "computation_id" => computation_id}) do
computation = get_computation!(computation_id)
output_params = Map.put(output_params, "computation_id", computation_id)
with :ok <- Bodyguard.permit(Output, :create, current_user(conn), computation),
{:ok, %Output{} = output} <- Outputs.create_output(output_params) do
conn
|> put_status(:created)
|> put_resp_header(
"location",
Routes.computation_output_path(conn, :show, computation_id, output)
)
|> render("show.json", output: output)
end
end
operation :show,
summary: "Retrieve the Output from the Computation",
parameters: [Parameters.id(), Parameters.computation_id()],
responses:
[
ok: {"Output response", "application/json", Schemas.OutputResponse}
] ++ Errors.internal_errors()
def show(conn, %{"id" => id, "computation_id" => computation_id}) do
computation = get_computation!(computation_id)
with :ok <- Bodyguard.permit(Output, :read, current_user(conn), computation) do
output = Outputs.get_output!(id, computation)
render(conn, "show.json", output: output)
end
end
operation :update,
summary: "Update the Output in the Computation",
parameters: [Parameters.id(), Parameters.computation_id()],
request_body:
{"Output attributes", "application/json", Schemas.OutputRequest, required: true},
responses:
[
ok: {"Output response", "application/json", Schemas.OutputResponse}
] ++ Errors.all_errors()
def update(conn, %{"id" => id, "output" => output_params, "computation_id" => computation_id}) do
computation = get_computation!(computation_id)
output = Outputs.get_output!(id, computation)
with :ok <- Bodyguard.permit(Output, :update, current_user(conn), computation),
{:ok, %Output{} = output} <- Outputs.update_output(output, output_params) do
render(conn, "show.json", output: output)
end
end
operation :delete,
summary: "Delete the Output in the Computation",
parameters: [Parameters.id(), Parameters.computation_id()],
responses:
[
no_content: {"Output was successfully deleted", "application/json", nil}
] ++ Errors.internal_errors()
def delete(conn, %{"id" => id, "computation_id" => computation_id}) do
computation = get_computation!(computation_id)
output = Outputs.get_output!(id, computation)
with :ok <- Bodyguard.permit(Output, :delete, current_user(conn), computation),
{:ok, %Output{}} <- Outputs.delete_output(output) do
send_resp(conn, :no_content, "")
end
end
defp get_computation!(id), do: Computations.get_computation!(id)
end
|
lib/adaptable_costs_evaluator_web/controllers/output_controller.ex
| 0.731538
| 0.406567
|
output_controller.ex
|
starcoder
|
defmodule Day7 do
@moduledoc """
--- Day 7: Recursive Circus ---
Wandering further through the circuits of the computer, you come upon a tower of programs that have gotten themselves
into a bit of trouble. A recursive algorithm has gotten out of hand, and now they're balanced precariously in a large
tower.
One program at the bottom supports the entire tower. It's holding a large disc, and on the disc are balanced several
more sub-towers. At the bottom of these sub-towers, standing on the bottom disc, are other programs, each holding
their own disc, and so on. At the very tops of these sub-sub-sub-...-towers, many programs stand simply keeping the
disc below them balanced but with no disc of their own.
You offer to help, but first you need to understand the structure of these towers. You ask each program to yell out
their name, their weight, and (if they're holding a disc) the names of the programs immediately above them balancing
on that disc. You write this information down (your puzzle input). Unfortunately, in their panic, they don't do this
in an orderly fashion; by the time you're done, you're not sure which program gave which information.
For example, if your list is the following:
pbga (66)
xhth (57)
ebii (61)
havc (66)
ktlj (57)
fwft (72) -> ktlj, cntj, xhth
qoyq (66)
padx (45) -> pbga, havc, qoyq
tknk (41) -> ugml, padx, fwft
jptl (61)
ugml (68) -> gyxo, ebii, jptl
gyxo (61)
cntj (57)
...then you would be able to recreate the structure of the towers that looks like this:
gyxo
/
ugml - ebii
/ \
| jptl
|
| pbga
/ /
tknk --- padx - havc
\ \
| qoyq
|
| ktlj
\ /
fwft - cntj
\
xhth
In this example, tknk is at the bottom of the tower (the bottom program), and is holding up ugml, padx, and fwft.
Those programs are, in turn, holding up other programs; in this example, none of those programs are holding up any
other programs, and are all the tops of their own towers. (The actual tower balancing in front of you is much larger.)
Before you're ready to help them, you need to make sure your information is correct. What is the name of the bottom
program?
--- Part Two ---
The programs explain the situation: they can't get down. Rather, they could get down, if they weren't expending
all of their energy trying to keep the tower balanced. Apparently, one program has the wrong weight, and until it's
fixed, they're stuck here.
For any program holding a disc, each program standing on that disc forms a sub-tower. Each of those sub-towers are
supposed to be the same weight, or the disc itself isn't balanced. The weight of a tower is the sum of the weights of
the programs in that tower.
In the example above, this means that for ugml's disc to be balanced, gyxo, ebii, and jptl must all have the same
weight, and they do: 61.
However, for tknk to be balanced, each of the programs standing on its disc and all programs above it must each match.
This means that the following sums must all be the same:
ugml + (gyxo + ebii + jptl) = 68 + (61 + 61 + 61) = 251
padx + (pbga + havc + qoyq) = 45 + (66 + 66 + 66) = 243
fwft + (ktlj + cntj + xhth) = 72 + (57 + 57 + 57) = 243
As you can see, tknk's disc is unbalanced: ugml's stack is heavier than the other two. Even though the nodes above
ugml are balanced, ugml itself is too heavy: it needs to be 8 units lighter for its stack to weigh 243 and keep the
towers balanced. If this change were made, its weight would be 60.
Given that exactly one program is the wrong weight, what would its weight need to be to balance the entire tower?
"""
def part_a do
build_tree("res/day7.input") |>
:digraph_utils.arborescence_root()
end
def test_a do
build_tree("res/day7_test.input") |>
:digraph_utils.arborescence_root()
end
def part_b do
digraph=build_tree("res/day7.input")
{:yes, root} = :digraph_utils.arborescence_root(digraph)
{root, weight} = :digraph.vertex(digraph, root)
find_imbalance(digraph, {root, String.to_integer(weight)})
end
def test_b do
digraph=build_tree("res/day7_test.input")
{:yes, root} = :digraph_utils.arborescence_root(digraph)
{root, weight} = :digraph.vertex(digraph, root)
find_imbalance(digraph, {root, String.to_integer(weight)})
end
def build_tree(filename) do
digraph = :digraph.new([:acyclic])
File.read!(filename) |>
String.split("\n") |>
Enum.map(
fn(line) ->
String.split(line, [" ", "(", ")", ","], trim: true) |>
process_line(digraph)
end)
digraph
end
def process_line([vertex, weight], digraph) do
:digraph.add_vertex(digraph, vertex, weight)
end
def process_line([vertex, weight|more], digraph) do
:digraph.add_vertex(digraph, vertex, weight)
process_edges(more, {vertex, weight}, digraph)
end
def process_edges([], {_, _}, _) do
:done
end
def process_edges(["->"|edges], {vertex, weight}, digraph) do
process_edges(edges, {vertex, weight}, digraph)
end
def process_edges([edge_vertex|edges], {vertex, weight}, digraph) do
case :digraph.add_edge(digraph, vertex, edge_vertex) do
{:error, {:bad_vertex, new_v}} ->
:digraph.add_vertex(digraph, new_v)
:digraph.add_edge(digraph, vertex, edge_vertex)
_ ->
:ok
end
process_edges(edges, {vertex, weight}, digraph)
end
def find_imbalance(digraph, {r, w}) do
children=:digraph.out_neighbours(digraph, r)
children_w=check_weights(digraph, children, [])
case match_weights(children_w) do
true ->
Enum.sum(children_w)+w
false ->
IO.inspect Enum.zip(get_children_weight(digraph, children, []),children), label: "branches"
IO.inspect children_w, label: "total_branch_weights"
throw :break
end
end
def check_weights(_digraph, [], acc) do
acc
end
def check_weights(digraph, [child|t], acc) do
{child, weight} = :digraph.vertex(digraph, child)
sub_branch_weight=find_imbalance(digraph, {child, String.to_integer(weight)})
check_weights(digraph, t, [sub_branch_weight|acc])
end
def match_weights([]) do
true
end
def match_weights([_]) do
true
end
def match_weights([h,h|t]) do
match_weights([h|t])
end
def match_weights(_) do
false
end
def get_children_weight(_, [], acc) do
acc
end
def get_children_weight(digraph, [child|tail], acc) do
{_, weight} = :digraph.vertex(digraph, child)
get_children_weight(digraph, tail, [weight|acc])
end
end
|
lib/day7.ex
| 0.720663
| 0.757682
|
day7.ex
|
starcoder
|
defmodule Grax.Id.Schema do
alias Grax.Id.Namespace
alias Grax.Id.Schema.Extension
@type template :: struct
@type t :: %__MODULE__{
namespace: Namespace.t(),
template: template | :bnode,
schema: module | [module],
selector: {module, atom} | nil,
counter: {module, atom} | nil,
var_mapping: {module, atom} | nil,
extensions: list | nil
}
@enforce_keys [:namespace, :template, :schema]
defstruct [:namespace, :template, :schema, :selector, :counter, :var_mapping, :extensions]
@bnode_template :bnode
def new(namespace, template, opts) do
selector = Keyword.get(opts, :selector)
schema = Keyword.get(opts, :schema)
unless schema || selector do
raise ArgumentError, "no :schema or :selector provided on Grax.Id.Schema"
end
with {:ok, template} <- init_template(template) do
%__MODULE__{
namespace: namespace,
template: template,
schema: schema,
counter: counter_tuple(namespace, opts),
var_mapping: Keyword.get(opts, :var_mapping),
selector: selector
}
|> Extension.init(Keyword.get(opts, :extensions), opts)
else
{:error, error} -> raise error
end
end
def new_blank_node_schema(namespace, schema) do
%__MODULE__{
namespace: namespace,
schema: schema,
template: @bnode_template
}
end
defp init_template(template) do
YuriTemplate.parse(template)
end
defp counter_tuple(namespace, opts) do
opts
|> Keyword.get(:counter)
|> counter_tuple(namespace, opts)
end
defp counter_tuple(nil, _, _), do: nil
defp counter_tuple(name, namespace, opts) do
{
Keyword.get(opts, :counter_adapter) ||
Namespace.option(namespace, :counter_adapter) ||
Grax.Id.Counter.default_adapter(),
name
}
end
def generate_id(id_schema, variables, opts \\ [])
def generate_id(%__MODULE__{template: @bnode_template}, _, _) do
{:ok, RDF.BlankNode.new("_" <> UUID.uuid4(:hex))}
end
def generate_id(%__MODULE__{} = id_schema, variables, opts) when is_list(variables) do
generate_id(id_schema, Map.new(variables), opts)
end
def generate_id(%__MODULE__{} = id_schema, %_{} = mapping, opts) do
generate_id(id_schema, Map.from_struct(mapping), opts)
end
def generate_id(%__MODULE__{} = id_schema, variables, opts) do
variables =
variables
|> add_schema_var(id_schema)
|> add_counter_var(id_schema)
with {:ok, variables} <- var_mapping(id_schema, variables),
{:ok, variables} <- Extension.call(id_schema, variables, opts),
{:ok, variables} <- preprocess_variables(id_schema, variables),
{:ok, segment} <- YuriTemplate.expand(id_schema.template, variables) do
{:ok, expand(id_schema, segment, opts)}
end
end
def parameters(%{template: template}), do: YuriTemplate.parameters(template)
defp preprocess_variables(id_schema, variables) do
parameters = parameters(id_schema)
parameters
|> Enum.filter(fn parameter -> is_nil(Map.get(variables, parameter)) end)
|> case do
[] ->
{:ok,
variables
|> Map.take(parameters)
|> Map.new(fn {variable, value} -> {variable, to_string(value)} end)}
missing ->
{:error, "no value for id schema template parameter: #{Enum.join(missing, ", ")}"}
end
end
defp add_schema_var(_, %{schema: nil} = id_schema) do
raise "no schema found in id schema #{inspect(id_schema)}"
end
defp add_schema_var(variables, %{schema: schema}) do
Map.put(variables, :__schema__, schema)
end
defp add_counter_var(variables, %{counter: nil}), do: variables
defp add_counter_var(variables, %{counter: {adapter, name}}) do
case adapter.inc(name) do
{:ok, value} -> Map.put(variables, :counter, value)
{:error, error} -> raise error
end
end
defp var_mapping(%__MODULE__{var_mapping: {mod, fun}}, variables),
do: apply(mod, fun, [variables])
defp var_mapping(_, variables), do: {:ok, variables}
def expand(id_schema, id_segment, opts \\ [])
def expand(%__MODULE__{} = id_schema, id_segment, opts) do
expand(id_schema.namespace, id_segment, opts)
end
def expand(namespace, id_segment, _opts) do
RDF.iri(to_string(namespace) <> id_segment)
end
def option(opts, key, id_schema) do
Keyword.get(opts, key) ||
Namespace.option(id_schema.namespace, key)
end
def option!(opts, key, id_schema) do
option(opts, key, id_schema) ||
raise ArgumentError, "required #{inspect(key)} keyword argument missing"
end
end
|
lib/grax/id/schema.ex
| 0.669205
| 0.426202
|
schema.ex
|
starcoder
|
defmodule Grakn do
@moduledoc """
The main entry point for interacting with Grakn. All functions take a
connection reference.
"""
@behaviour Multix.OnFailure
@typedoc """
A connection process name, pid or reference.
A connection reference is used when making multiple requests within a
transaction, see `transaction/3`.
"""
@type conn :: DBConnection.conn()
@default_timeout :timer.minutes(5)
@doc """
Start and link to a Grakn connnection process.
### Single-server Options
* `:hostname` - The hostname of the Grakn server to connect to (default: "localhost")
* `:port` - The port of the Grakn server (default: 48555)
### Muti-server Options
* `:servers` - A list of server options (e.g. [[hostname: "10.0.0.1", port: 48555], [hostname: "10.0.0.2"]])
`:name` is required to use alongside of this option
* `:select_strategy` - select strategy for selecting connection, which implements `Multix.OnGet`
"""
@spec start_link(Keyword.t()) :: {:ok, conn()} | {:error, any}
def start_link(opts \\ []) do
opts = with_start_config(opts)
case Keyword.get(opts, :servers) do
list when is_list(list) -> Grakn.Sup.start_link(opts)
nil -> DBConnection.start_link(Grakn.Protocol, opts)
end
end
@doc """
Execute a query on the connection process. Queries can anly be run run within
a transaction, see `transaction/3`.
### Options
* `:include_inferences` - Boolean specifying if inferences should be
included in the querying process (default: true)
* `:stream` - Boolean specifying if stream should be returned (default: false)
"""
@spec query(conn(), Grakn.Query.t(), Keyword.t()) :: any()
def query(conn, query, opts \\ []) do
DBConnection.execute(get_conn(conn), query, [], with_transaction_config(opts))
end
@doc """
Execute a query on the connection process and raise an exception if there is
an error. See `query/3` for documentation.
"""
@spec query!(conn(), Grakn.Query.t(), Keyword.t()) :: any()
def query!(conn, %Grakn.Query{} = query, opts \\ []) do
DBConnection.execute!(get_conn(conn), query, [], with_transaction_config(opts))
end
@spec command(conn(), Grakn.Command.t(), Keyword.t()) :: any()
def command(conn, %Grakn.Command{} = command, opts \\ []) do
DBConnection.execute(get_conn(conn), command, [], with_transaction_config(opts))
end
@doc """
Create a new transaction and execute a sequence of statements within the
context of the transaction.
### Options
* `:type` - The type of transaction, value must be
`Grakn.Transaction.Type.read()` (default), or
`Grakn.Transaction.Type.write()`
### Example
```
Grakn.transaction(
conn,
fn conn ->
Grakn.query(conn, Grakn.Query.graql("match $x isa Person; get;"))
end
)
```
"""
@spec transaction(conn(), (conn() -> result), Keyword.t()) :: {:ok, result} | {:error, any}
when result: var
def transaction(conn, fun, opts \\ []) do
chosen_conn = get_conn(conn, opts)
with {:error, error, stacktrace} <- do_transaction(chosen_conn, fun, opts) do
case error do
%DBConnection.ConnectionError{} -> Multix.failure(conn, chosen_conn)
%Grakn.Error{reason: %GRPC.RPCError{}} -> Multix.failure(conn, chosen_conn)
_ -> nil
end
reraise error, stacktrace
end
end
defp do_transaction(conn, fun, opts) do
opts = with_transaction_config(opts)
try do
DBConnection.transaction(conn, fun, opts)
rescue
error -> {:error, error, __STACKTRACE__}
end
end
@doc """
Rollback a transaction, does not return.
Aborts the current transaction fun. If inside multiple `transaction/3`
functions, bubbles up to the top level.
## Example
{:error, :oops} = Grakn.transaction(pid, fn(conn) ->
Grakn.rollback(conn, :oops)
IO.puts "never reaches here!"
end)
"""
@spec rollback(DBConnection.t(), any) :: no_return()
defdelegate rollback(conn, any), to: DBConnection
@health_check "health_check"
def check(conn) do
opts = [keyspace: @health_check, type: Grakn.Transaction.Type.write()]
case do_transaction(conn, &check_query/1, opts) do
{:error, _, _} -> :error
{:ok, _} -> :ok
end
end
defp check_query(conn) do
Grakn.query!(conn, Grakn.Query.graql("match $x isa thing; get; limit 1;"), stream: true)
end
@doc false
def connection_uri(opts) do
"#{Keyword.get(opts, :hostname, "localhost")}:#{Keyword.get(opts, :port, 48555)}"
end
def child_spec(opts) do
type = if Keyword.has_key?(opts, :servers), do: :supervisor, else: :worker
%{
id: {__MODULE__, opts[:name]},
start: {__MODULE__, :start_link, [opts]},
type: type
}
end
@compile {:inline, get_conn: 1}
defp get_conn(conn), do: get_conn(conn, nil)
@compile {:inline, get_conn: 2}
defp get_conn(conn, data) when is_atom(conn) do
case Multix.get(conn, data) do
nil -> raise Grakn.Error, "no servers available"
# disabled, so we return conn
:error -> conn
chosen_conn -> chosen_conn
end
end
defp get_conn(conn, _data), do: conn
defp with_start_config(opts) do
opts
|> Keyword.put_new(:pool_size, get_config(:pool_size, 4))
|> Keyword.put_new(:pool, DBConnection.Poolboy)
end
defp with_transaction_config(opts) do
opts_with_defaults =
opts
|> Keyword.put_new(:pool_size, get_config(:pool_size, 4))
|> Keyword.put_new(:pool, DBConnection.Poolboy)
|> Keyword.put_new(:pool_timeout, get_config(:pool_timeout, 30_000))
|> Keyword.put_new(:timeout, get_config(:timeout, @default_timeout))
|> Keyword.put_new(:queue, get_config(:queue, true))
|> Keyword.put_new(:username, get_config(:username, ""))
|> Keyword.put_new(:password, get_config(:password, ""))
case get_config(:log) do
nil -> opts_with_defaults
log_function -> Keyword.put_new(opts_with_defaults, :log, log_function)
end
end
defp get_config(key, default \\ nil),
do: Application.get_env(:grakn_elixir, key, default)
end
|
lib/grakn.ex
| 0.933862
| 0.648452
|
grakn.ex
|
starcoder
|
defmodule KeyboardLayout do
@moduledoc """
Describes a keyboard layout.
The layout can be created dynamically, or it can be predefined in the `Config`
for the application.
Example of a layout defined in `Config`:
import Config
config :keyboard_layout,
layout: [
leds: [
%{id: :l1, x: 0, y: 0},
%{id: :l2, x: 2, y: 1.5},
%{id: :l3, x: 3, y: 3}
],
keys: [
%{id: :k1, x: 0, y: 0, opts: [led: :l1]},
%{id: :k2, x: 2, y: 1.5, opts: [width: 1.5, height: 2, led: :l2]},
%{id: :k3, x: 5, y: 0}
]
]
"""
alias __MODULE__.{Key, LED}
@typedoc """
A keyboard layout consisting of [keys](`t:KeyboardLayout.Key.t/0`) and optional
[LEDs](`t:KeyboardLayout.LED.t/0`).
"""
@type t :: %__MODULE__{
keys: [Key.t()],
leds: [LED.t()],
leds_by_keys: %{Key.id() => LED.t()},
keys_by_leds: %{LED.id() => Key.t()}
}
defstruct [:keys, :leds, :leds_by_keys, :keys_by_leds]
@doc """
Creates a new [KeyboardLayout](`t:KeyboardLayout.t/0`) from a list of keys and
LEDs. LEDs are optional.
Example:
iex> keys = [KeyboardLayout.Key.new(:k1, 0, 0, led: :l1)]
[%KeyboardLayout.Key{height: 1, id: :k1, led: :l1, width: 1, x: 0, y: 0}]
iex> leds = [KeyboardLayout.LED.new(:l1, 0, 0)]
[%KeyboardLayout.LED{id: :l1, x: 0, y: 0}]
iex> KeyboardLayout.new(keys, leds)
%KeyboardLayout{
keys: [
%KeyboardLayout.Key{height: 1, id: :k1, led: :l1, width: 1, x: 0, y: 0}
],
keys_by_leds: %{
l1: %KeyboardLayout.Key{height: 1, id: :k1, led: :l1, width: 1, x: 0, y: 0}
},
leds: [
%KeyboardLayout.LED{id: :l1, x: 0, y: 0}
],
leds_by_keys: %{
k1: %KeyboardLayout.LED{id: :l1, x: 0, y: 0}
}
}
"""
@spec new(keys :: [Key.t()], leds :: [LED.t()]) :: t
def new(keys, leds \\ []) do
leds_map = Map.new(leds, &{&1.id, &1})
leds_by_keys =
keys
|> Enum.filter(& &1.led)
|> Map.new(&{&1.id, Map.fetch!(leds_map, &1.led)})
keys_by_leds =
keys
|> Enum.filter(& &1.led)
|> Map.new(&{&1.led, &1})
%__MODULE__{
keys: keys,
leds: leds,
leds_by_keys: leds_by_keys,
keys_by_leds: keys_by_leds
}
end
@doc """
Returns a list of [keys](`t:KeyboardLayout.Key.t/0`) from the provided [layout](`t:KeyboardLayout.t/0`)
"""
@spec keys(layout :: t) :: [Key.t()]
def keys(layout), do: layout.keys
@doc """
Returns a list of [leds](`t:KeyboardLayout.LED.t/0`) from the provided [layout](`t:KeyboardLayout.t/0`)
"""
@spec leds(layout :: t) :: [LED.t()]
def leds(layout), do: layout.leds
@doc """
Returns the corresponding [LED](`t:KeyboardLayout.LED.t/0`) from the provided [layout](`t:KeyboardLayout.t/0`)
and [key id](`t:KeyboardLayout.Key.id/0`).
Returns `nil` if the LED does not belong to a key.
"""
@spec led_for_key(layout :: t, Key.id()) :: LED.t() | nil
def led_for_key(%__MODULE__{} = layout, key_id) when is_atom(key_id),
do: Map.get(layout.leds_by_keys, key_id)
@doc """
Returns the corresponding [key](`t:KeyboardLayout.Key.t/0`) from the provided [layout](`t:KeyboardLayout.t/0`)
and [LED id](`t:KeyboardLayout.LED.id/0`).
Returns `nil` if the key has no LED.
"""
@spec key_for_led(layout :: t, LED.id()) :: Key.t() | nil
def key_for_led(%__MODULE__{} = layout, led_id) when is_atom(led_id),
do: Map.get(layout.keys_by_leds, led_id)
@doc """
Returns the [layout](`t:KeyboardLayout.t/0`) defined in the `Config` of the application
"""
@spec load_from_config() :: t
def load_from_config do
env_layout =
case Application.get_env(:keyboard_layout, :layout) do
nil -> raise "A layout must be defined for the application to function"
layout -> layout
end
keys = build_keys(Keyword.get(env_layout, :keys, []))
leds = build_leds(Keyword.get(env_layout, :leds, []))
new(keys, leds)
end
@spec build_leds([map]) :: [LED.t()]
defp build_leds(led_list) do
led_list
|> Enum.map(fn %{id: id, x: x, y: y} ->
LED.new(id, x, y)
end)
end
@spec build_keys([map]) :: [Key.t()]
defp build_keys(key_list) do
key_list
|> Enum.map(fn
%{id: id, x: x, y: y, opts: opts} ->
Key.new(id, x, y, opts)
%{id: id, x: x, y: y} ->
Key.new(id, x, y)
end)
end
end
|
lib/keyboard_layout.ex
| 0.873835
| 0.54958
|
keyboard_layout.ex
|
starcoder
|
defmodule RDF.Quad do
@moduledoc """
Helper functions for RDF quads.
A RDF Quad is represented as a plain Elixir tuple consisting of four valid
RDF values for subject, predicate, object and a graph context.
"""
alias RDF.Statement
@type t :: {Statement.subject, Statement.predicate, Statement.object, Statement.graph_name}
@type coercible_t ::
{Statement.coercible_subject, Statement.coercible_predicate,
Statement.coercible_object, Statement.coercible_graph_name}
@type t_values :: {String.t, String.t, any, String.t}
@doc """
Creates a `RDF.Quad` with proper RDF values.
An error is raised when the given elements are not coercible to RDF values.
Note: The `RDF.quad` function is a shortcut to this function.
## Examples
iex> RDF.Quad.new("http://example.com/S", "http://example.com/p", 42, "http://example.com/Graph")
{~I<http://example.com/S>, ~I<http://example.com/p>, RDF.literal(42), ~I<http://example.com/Graph>}
iex> RDF.Quad.new(EX.S, EX.p, 42, EX.Graph)
{RDF.iri("http://example.com/S"), RDF.iri("http://example.com/p"), RDF.literal(42), RDF.iri("http://example.com/Graph")}
"""
@spec new(
Statement.coercible_subject,
Statement.coercible_predicate,
Statement.coercible_object,
Statement.coercible_graph_name
) :: t
def new(subject, predicate, object, graph_context) do
{
Statement.coerce_subject(subject),
Statement.coerce_predicate(predicate),
Statement.coerce_object(object),
Statement.coerce_graph_name(graph_context)
}
end
@doc """
Creates a `RDF.Quad` with proper RDF values.
An error is raised when the given elements are not coercible to RDF values.
Note: The `RDF.quad` function is a shortcut to this function.
## Examples
iex> RDF.Quad.new {"http://example.com/S", "http://example.com/p", 42, "http://example.com/Graph"}
{~I<http://example.com/S>, ~I<http://example.com/p>, RDF.literal(42), ~I<http://example.com/Graph>}
iex> RDF.Quad.new {EX.S, EX.p, 42, EX.Graph}
{RDF.iri("http://example.com/S"), RDF.iri("http://example.com/p"), RDF.literal(42), RDF.iri("http://example.com/Graph")}
"""
@spec new(coercible_t) :: t
def new({subject, predicate, object, graph_context}),
do: new(subject, predicate, object, graph_context)
@doc """
Returns a tuple of native Elixir values from a `RDF.Quad` of RDF terms.
Returns `nil` if one of the components of the given tuple is not convertible via `RDF.Term.value/1`.
The optional second argument allows to specify a custom mapping with a function
which will receive a tuple `{statement_position, rdf_term}` where
`statement_position` is one of the atoms `:subject`, `:predicate`, `:object` or
`:graph_name`, while `rdf_term` is the RDF term to be mapped. When the given
function returns `nil` this will be interpreted as an error and will become
the overhaul result of the `values/2` call.
## Examples
iex> RDF.Quad.values {~I<http://example.com/S>, ~I<http://example.com/p>, RDF.literal(42), ~I<http://example.com/Graph>}
{"http://example.com/S", "http://example.com/p", 42, "http://example.com/Graph"}
iex> {~I<http://example.com/S>, ~I<http://example.com/p>, RDF.literal(42), ~I<http://example.com/Graph>}
...> |> RDF.Quad.values(fn
...> {:object, object} ->
...> RDF.Term.value(object)
...> {:graph_name, graph_name} ->
...> graph_name
...> {_, resource} ->
...> resource |> to_string() |> String.last() |> String.to_atom()
...> end)
{:S, :p, 42, ~I<http://example.com/Graph>}
"""
@spec values(t | any, Statement.term_mapping) :: t_values | nil
def values(quad, mapping \\ &Statement.default_term_mapping/1)
def values({subject, predicate, object, graph_context}, mapping) do
with subject_value when not is_nil(subject_value) <- mapping.({:subject, subject}),
predicate_value when not is_nil(predicate_value) <- mapping.({:predicate, predicate}),
object_value when not is_nil(object_value) <- mapping.({:object, object}),
graph_context_value <- mapping.({:graph_name, graph_context})
do
{subject_value, predicate_value, object_value, graph_context_value}
else
_ -> nil
end
end
def values(_, _), do: nil
@doc """
Checks if the given tuple is a valid RDF quad.
The elements of a valid RDF quad must be RDF terms. On the subject
position only IRIs and blank nodes allowed, while on the predicate and graph
context position only IRIs allowed. The object position can be any RDF term.
"""
@spec valid?(t | any) :: boolean
def valid?(tuple)
def valid?({_, _, _, _} = quad), do: Statement.valid?(quad)
def valid?(_), do: false
end
|
lib/rdf/quad.ex
| 0.874419
| 0.68223
|
quad.ex
|
starcoder
|
defmodule TicTacToe do
@moduledoc """
Documentation for `TicTacToe`.
"""
alias TicTacToe.Board, as: Board
alias TicTacToe.Opponent, as: Opponent
alias Mix.Shell.IO, as: Shell
@doc """
This defines the entry point of functions and data structures needed to play
a game of tic-tac-toe in the terminal.
## Examples
iex> TicTacToe.intro()
"""
def intro do
Shell.info("Hi, welcome to tic-tac-toe \n")
Shell.info("The rules are simple! Select X or O when prompted \n")
Shell.info("Enter a number one - nine(in words) corresponding to the positions displayed")
Shell.info("Fill up the diagonal, horizontal or vertical to win!")
IO.puts(Board.example())
if Shell.yes?("Would you like to play?") do
Shell.cmd("clear")
play(%Board{}, select())
else
exit_game()
end
end
# Main game loop - orchestrates logic
defp play(board, selected) do
{mark, op_mark} = selected
user_pos = choose(board)
op_pos = Opponent.choose(board)
board
|> place_mark(user_pos, mark)
|> place_mark(op_pos, op_mark)
|> check_rules()
|> play(selected)
end
# Gets user input and validates that to a choice of x or o
def select do
response =
Shell.prompt("Would you like to be X or O?") |> String.trim() |> String.capitalize()
case response do
"X" ->
{:x, :o}
"O" ->
{:o, :x}
_ ->
Shell.info("Invalid input, try again. Enter X or O ")
select()
end
end
# Gets user input and validates a choice on the board
def choose(board) do
valid_input = ["one", "two", "three", "four", "five", "six", "seven", "eight", "nine"]
choice =
Shell.prompt("Please enter a position one - nine") |> String.trim() |> String.downcase()
valid = Enum.member?(valid_input, choice)
if valid do
position = String.to_atom("pos_#{choice}")
{:ok, exist} = Map.fetch(board, position)
case exist do
# position is free
" " ->
position
_ ->
IO.puts("Sorry, that position is already taken!")
choose(board)
end
else
Shell.info("Invalid input please try again")
choose(board)
end
end
# Updates the game state
defp place_mark(board, position, mark) do
new_board = Board.update_position(mark, position, board)
IO.puts(new_board)
new_board
end
# Validates win, loss or draw according to tic-tac-toe rules
def check_rules(board) do
draw =
board
|> Map.values()
|> Enum.filter(fn p -> p != " " end)
|> length
# TODO: possible opportunity to refactor
row_one =
board.pos_one == board.pos_two and board.pos_one == board.pos_three and board.pos_one != " "
row_two =
board.pos_four == board.pos_five and board.pos_four == board.pos_six and
board.pos_four != " "
row_three =
board.pos_seven == board.pos_eight and board.pos_seven == board.pos_nine and
board.pos_seven != " "
vert_one =
board.pos_one == board.pos_four and board.pos_one == board.pos_seven and
board.pos_one != " "
vert_two =
board.pos_two == board.pos_five and board.pos_two == board.pos_eight and
board.pos_two != " "
vert_three =
board.pos_three == board.pos_six and board.pos_three == board.pos_nine and
board.pos_three != " "
diag_one =
board.pos_one == board.pos_five and board.pos_one == board.pos_nine and board.pos_one != " "
diag_two =
board.pos_three == board.pos_five and board.pos_three == board.pos_seven and
board.pos_three != " "
cond do
draw == 1 -> exit_game("It's a draw")
row_one or vert_one -> exit_game("Player #{board.pos_one} won!")
row_three or vert_three -> exit_game("Player #{board.pos_nine} won!")
row_two or vert_two or diag_one or diag_two -> exit_game("Player #{board.pos_five} won!")
true -> board
end
end
# Terminates game gracefully
def exit_game(outcome \\ "") do
Shell.info(outcome)
Shell.info("Thanks for checking out the game bye!")
exit(":)")
end
end
|
tic_tac_toe/lib/tic_tac_toe.ex
| 0.704364
| 0.602032
|
tic_tac_toe.ex
|
starcoder
|
defmodule Postgrex.Interval do
@moduledoc """
Struct for PostgreSQL `interval`.
## Fields
* `months`
* `days`
* `secs`
* `microsecs`
"""
@type t :: %__MODULE__{months: integer, days: integer, secs: integer, microsecs: integer}
defstruct months: 0, days: 0, secs: 0, microsecs: 0
def compare(
%__MODULE__{months: m1, days: d1, secs: s1, microsecs: ms1},
%__MODULE__{months: m2, days: d2, secs: s2, microsecs: ms2}
) do
t1 = {m1, d1, s1, ms1}
t2 = {m2, d2, s2, ms2}
cond do
t1 > t2 -> :gt
t1 < t2 -> :lt
true -> :eq
end
end
def to_string(%__MODULE__{months: months, days: days, secs: secs, microsecs: microsecs}) do
optional_interval(months, :month) <>
optional_interval(days, :day) <>
Integer.to_string(secs) <>
optional_microsecs(microsecs) <>
" seconds"
end
defp optional_interval(0, _), do: ""
defp optional_interval(1, key), do: "1 #{key}, "
defp optional_interval(n, key), do: "#{n} #{key}s, "
defp optional_microsecs(0),
do: ""
defp optional_microsecs(ms),
do: "." <> (ms |> Integer.to_string() |> String.pad_leading(6, "0"))
end
defmodule Postgrex.Range do
@moduledoc """
Struct for PostgreSQL `range`.
## Fields
* `lower`
* `upper`
* `lower_inclusive`
* `upper_inclusive`
"""
@type t :: %__MODULE__{
lower: term | :empty | :unbound,
upper: term | :empty | :unbound,
lower_inclusive: boolean,
upper_inclusive: boolean
}
defstruct lower: nil, upper: nil, lower_inclusive: true, upper_inclusive: true
end
defmodule Postgrex.INET do
@moduledoc """
Struct for PostgreSQL `inet` / `cidr`.
## Fields
* `address`
* `netmask`
"""
@type t :: %__MODULE__{address: :inet.ip_address(), netmask: nil | 0..128}
defstruct address: nil, netmask: nil
end
defmodule Postgrex.MACADDR do
@moduledoc """
Struct for PostgreSQL `macaddr`.
## Fields
* `address`
"""
@type macaddr :: {0..255, 0..255, 0..255, 0..255, 0..255, 0..255}
@type t :: %__MODULE__{address: macaddr}
defstruct address: nil
end
defmodule Postgrex.Point do
@moduledoc """
Struct for PostgreSQL `point`.
## Fields
* `x`
* `y`
"""
@type t :: %__MODULE__{x: float, y: float}
defstruct x: nil, y: nil
end
defmodule Postgrex.Polygon do
@moduledoc """
Struct for PostgreSQL `polygon`.
## Fields
* `vertices`
"""
@type t :: %__MODULE__{vertices: [Postgrex.Point.t()]}
defstruct vertices: nil
end
defmodule Postgrex.Line do
@moduledoc """
Struct for PostgreSQL `line`.
Note, lines are stored in PostgreSQL in the form `{a, b, c}`, which
parameterizes a line as `a*x + b*y + c = 0`.
## Fields
* `a`
* `b`
* `c`
"""
@type t :: %__MODULE__{a: float, b: float, c: float}
defstruct a: nil, b: nil, c: nil
end
defmodule Postgrex.LineSegment do
@moduledoc """
Struct for PostgreSQL `lseg`.
## Fields
* `point1`
* `point2`
"""
@type t :: %__MODULE__{point1: Postgrex.Point.t(), point2: Postgrex.Point.t()}
defstruct point1: nil, point2: nil
end
defmodule Postgrex.Box do
@moduledoc """
Struct for PostgreSQL `box`.
## Fields
* `upper_right`
* `bottom_left`
"""
@type t :: %__MODULE__{
upper_right: Postgrex.Point.t(),
bottom_left: Postgrex.Point.t()
}
defstruct upper_right: nil, bottom_left: nil
end
defmodule Postgrex.Path do
@moduledoc """
Struct for PostgreSQL `path`.
## Fields
* `open`
* `points`
"""
@type t :: %__MODULE__{points: [Postgrex.Point.t()], open: boolean}
defstruct points: nil, open: nil
end
defmodule Postgrex.Circle do
@moduledoc """
Struct for PostgreSQL `circle`.
## Fields
* `center`
* `radius`
"""
@type t :: %__MODULE__{center: Postgrex.Point.t(), radius: number}
defstruct center: nil, radius: nil
end
defmodule Postgrex.Lexeme do
@moduledoc """
Struct for PostgreSQL `lexeme`.
## Fields
* `word`
* `positions`
"""
@type t :: %__MODULE__{word: String.t(), positions: [{pos_integer, :A | :B | :C | nil}]}
defstruct word: nil, positions: nil
end
|
lib/postgrex/builtins.ex
| 0.900162
| 0.609117
|
builtins.ex
|
starcoder
|
defmodule Flowex.PipelineBuilder do
@moduledoc "Defines functions to start and to stop a pipeline"
import Supervisor.Spec
def start(pipeline_module, opts) do
{producer_name, consumer_name, all_specs} = build_children(pipeline_module, opts)
sup_name = supervisor_name(pipeline_module)
{:ok, _sup_pid} = Flowex.Supervisor.start_link(all_specs, sup_name)
pipeline_struct(pipeline_module, producer_name, consumer_name, sup_name)
end
def supervised_start(pipeline_module, pid, opts) do
{producer_name, consumer_name, all_specs} = build_children(pipeline_module, opts)
sup_name = supervisor_name(pipeline_module)
sup_spec =
supervisor(Flowex.Supervisor, [all_specs, sup_name], id: sup_name, restart: :permanent)
{:ok, _sup_pid} = Supervisor.start_child(pid, sup_spec)
pipeline_struct(pipeline_module, producer_name, consumer_name, sup_name)
end
def stop(sup_name) do
Enum.each(Supervisor.which_children(sup_name), fn {id, _pid, :worker, [_]} ->
Supervisor.terminate_child(sup_name, id)
end)
Supervisor.stop(sup_name)
end
defp build_children(pipeline_module, opts) do
producer_name = producer_name(pipeline_module)
producer_spec = worker(Flowex.Producer, [nil, [name: producer_name]], id: producer_name)
{wss, last_names} = init_pipes({producer_spec, producer_name}, {pipeline_module, opts})
consumer_name = consumer_name(pipeline_module)
consumer_worker_spec =
worker(Flowex.Consumer, [last_names, [name: consumer_name]], id: consumer_name)
{producer_name, consumer_name, wss ++ [consumer_worker_spec]}
end
defp supervisor_name(pipeline_module) do
String.to_atom("Flowex.Supervisor_#{inspect(pipeline_module)}_#{inspect(make_ref())}")
end
defp producer_name(pipeline_module) do
String.to_atom("Flowex.Producer_#{inspect(pipeline_module)}_#{inspect(make_ref())}")
end
defp consumer_name(pipeline_module) do
String.to_atom("Flowex.Consumer_#{inspect(pipeline_module)}_#{inspect(make_ref())}")
end
defp pipeline_struct(pipeline_module, producer_name, consumer_name, sup_name) do
%Flowex.Pipeline{
module: pipeline_module,
in_name: producer_name,
out_name: consumer_name,
sup_name: sup_name
}
end
defp init_pipes({producer_spec, producer_name}, {pipeline_module, opts}) do
(pipeline_module.pipes() ++ [pipeline_module.error_pipe])
|> Enum.reduce({[producer_spec], [producer_name]}, fn {atom, count, pipe_opts, type},
{wss, prev_names} ->
opts = Map.merge(Enum.into(opts, %{}), Enum.into(pipe_opts, %{}))
list =
Enum.map(1..count, fn _i ->
init_pipe({pipeline_module, opts}, {atom, type}, prev_names)
end)
{new_wss, names} = Enum.unzip(list)
{wss ++ new_wss, names}
end)
end
def init_pipe({pipeline_module, opts}, {atom, type}, prev_names) do
case Atom.to_charlist(atom) do
~c"Elixir." ++ _ -> init_module_pipe({type, atom, opts}, prev_names)
_ -> init_function_pipe({type, pipeline_module, atom, opts}, prev_names)
end
end
defp init_function_pipe({type, pipeline_module, function, opts}, prev_names) do
name = String.to_atom("Flowex_#{pipeline_module}.#{function}_#{inspect(make_ref())}")
opts = %Flowex.StageOpts{
type: type,
module: pipeline_module,
function: function,
opts: opts,
name: name,
producer_names: prev_names
}
worker_spec = worker(Flowex.Stage, [opts, [name: name]], id: name)
{worker_spec, name}
end
defp init_module_pipe({type, module, opts}, prev_names) do
opts = module.init(opts)
name = String.to_atom("Flowex_#{module}.call_#{inspect(make_ref())}")
opts = %Flowex.StageOpts{
type: type,
module: module,
function: :call,
opts: opts,
name: name,
producer_names: prev_names
}
worker_spec = worker(Flowex.Stage, [opts, [name: name]], id: name)
{worker_spec, name}
end
end
|
lib/flowex/pipeline_builder.ex
| 0.558086
| 0.425098
|
pipeline_builder.ex
|
starcoder
|
defmodule Booklist.Reports do
@moduledoc """
The Reports context.
"""
import Ecto.Query, warn: false
alias Booklist.Repo
alias Booklist.Admin.Rating
alias Booklist.Admin.Genre
alias Booklist.Admin.Author
def increment(num) do
num + 1
end
def calculate_percent_of_ratings(total, ratings_count) do
total / max(ratings_count, 1) * 100 |> Float.round(2)
end
@doc """
Gets all ratings for given year
"""
def get_ratings(year) do
from(
r in Rating,
left_join: book in assoc(r, :book),
where: fragment("EXTRACT(year FROM ?)", r.date_scored) == ^year,
order_by: [desc: r.score, desc: r.id],
select: %{
id: r.id,
score: r.score,
date_scored: r.date_scored,
week_number: fragment("CAST(extract(week FROM ?) AS integer)", r.date_scored),
book: %{
id: book.id,
title: book.title,
sort_title: book.sort_title,
subtitle: book.subtitle,
is_fiction: book.is_fiction,
genre_id: book.genre_id
}
}
)
|> Repo.all
end
def calculate_rating_total(ratings) do
Enum.reduce(ratings, 0, fn (rating, total) -> total + rating.score end)
end
def calculate_nonfiction_count(ratings) do
Enum.count(ratings, fn (rating) -> rating.book.is_fiction == false end)
end
def calculate_ratings_by_week(ratings, is_past_year) do
week_numbers = 1..53
week_map_initial = week_numbers |> Enum.map(fn (i) -> {i, 0} end) |> Map.new
week_map = Enum.reduce(ratings, week_map_initial, fn (%{week_number: week_number}, week_map) ->
Map.update!(week_map, week_number, &increment/1)
end)
raw_week_count = Enum.map(week_numbers, fn (week_number) -> %{week_number: week_number, count: week_map[week_number]} end)
case is_past_year do
true ->
Enum.filter(raw_week_count, fn (%{week_number: week_number, count: count}) -> week_number < 53 or count > 0 end)
|> format_last_week_of_year
false ->
current_date = Common.ModelHelpers.Date.today
{_, current_week_num} = {current_date.year, current_date.month, current_date.day} |> :calendar.iso_week_number
Enum.filter(raw_week_count, fn (%{week_number: week_number, count: count}) -> week_number <= current_week_num or (week_number == 53 and count > 0) end) |> format_last_week_of_year
end
end
def format_last_week_of_year(ratings_by_week) do
last_week = List.last(ratings_by_week)
case last_week.week_number do
53 ->
[last_week | ratings_by_week]
|> List.delete_at(-1)
_ -> ratings_by_week
end
end
def get_genres() do
from(
g in Genre
)
|> Repo.all
end
def calculate_genres_count(genres, ratings, ratings_count) do
initial_map = Enum.reduce(genres, %{}, fn (genre, map) -> Map.put(map, genre.id, 0) end)
genre_map = Enum.reduce(
ratings, initial_map, fn (rating, map) -> Map.update!(map, rating.book.genre_id, &increment/1)
end)
Enum.map(genres, fn (genre) -> %{genre: genre, count: genre_map[genre.id] |> calculate_percent_of_ratings(ratings_count)} end)
|> Enum.filter(fn (%{count: count}) -> count > 0 end)
|> Enum.sort(fn (a, b) ->
case a.count == b.count do
true -> a.genre.name < b.genre.name
false -> a.count > b.count
end
end)
end
@doc """
Returns the list of authors.
## Examples
iex> list_authors()
[%Author{}, ...]
"""
def list_authors do
from(
author in Author,
join: book in assoc(author, :books),
join: rating in assoc(book, :ratings),
preload: [books: {book, [ratings: rating]}],
order_by: [author.last_name, author.first_name, author.middle_name]
)
|> Repo.all()
end
@doc """
Returns the list of authors with calculated sum of ratings
## Examples
iex> calculate_authors_average_score()
[{ %Author{}, ratings_count, average_score }]
"""
def calculate_authors_average_score(authors) do
authors
|> Enum.map(fn (author) ->
ratings = Enum.flat_map(author.books, fn (book) -> book.ratings end)
ratings_count = Enum.count(ratings)
ratings_sum = ratings
|> Enum.reduce(0, fn (rating, sum) -> rating.score + sum end)
ratings_average = ratings_sum / ratings_count
{author, ratings_count, ratings_average}
end)
|> Enum.sort(fn ({_author_1, _ratings_count_1, ratings_average_1}, {_author_2, _ratings_count_2, ratings_average_2}) ->
if ratings_average_1 >= ratings_average_2 do
true
else
false
end
end)
end
end
|
apps/booklist/lib/booklist/admin/reports.ex
| 0.756268
| 0.448547
|
reports.ex
|
starcoder
|
defmodule Nabo.Repo do
@moduledoc """
Precompiles and provides interface to interact with your posts.
defmodule MyRepo do
use Nabo.Repo, root: "priv/posts"
end
posts = MyRepo.all
{:ok, post} = MyRepo.get("foo")
post = MyRepo.get!("foo")
Can be configured with:
```
defmodule MyRepo do
use Nabo.Repo,
root: "priv/posts",
compiler: [
split_pattern: "<<--------->>",
front_parser: {MyJSONParser, []},
excerpt_parser: {MyExcerptParser, []},
body_parser: {Nabo.Parser.Markdown, %Earmark.Options{smartypants: false}}
]
end
```
* `:root` - the path to posts.
* `:compiler` - the compiler options, includes of four sub-options. See `Nabo.Parser` for instructions of how to implement a parser.
* `:split_pattern` - the delimeter that separates front-matter, excerpt and post body. This will be passed
as the second argument in `String.split/3`.
* `:front_parser` - the options for parsing front matter, in `{parser_module, parser_options}` format.
Parser options will be passed to `parse/2` function in parser module. Defaults to `{Nabo.Parser.Front, []}`
* `:excerpt_parser` - the options for parsing post excerpt, in `{parser_module, parser_options}` format.
Parser options will be passed to `parse/2` function in parser module. Defaults to `{Nabo.Parser.Markdown, []}`
* `:body_parser` - the options for parsing post body, in `{parser_module, parser_options}` format.
Parser options will be passed to `parse/2` function in parser module. Defaults to `{Nabo.Parser.Markdown, []}`
"""
@doc false
defmacro __using__(options) do
quote location: :keep do
options = unquote(options)
root_path =
options
|> Keyword.fetch!(:root)
|> Path.relative_to_cwd()
compiler_options =
options
|> Keyword.get(:compiler, [])
|> Nabo.Compiler.Options.new()
@root_path root_path
@compiler_options compiler_options
def __options__(), do: unquote(options)
@before_compile unquote(__MODULE__)
end
end
@doc false
defmacro __before_compile__(env) do
root_path = Module.get_attribute(env.module, :root_path)
compiler_options = Module.get_attribute(env.module, :compiler_options)
post_paths = Path.wildcard(root_path <> "/*.md")
posts = post_paths |> compile_async(compiler_options) |> Macro.escape()
quote bind_quoted: [posts: posts, paths: post_paths] do
for path <- paths, do: @external_resource(path)
@posts posts
def all(), do: @posts
slugs = Enum.map(@posts, & &1.slug)
def availables(), do: unquote(slugs)
for %{slug: slug} = post <- @posts do
def get(unquote(slug)) do
unquote(Macro.escape(post))
end
end
def get(slug), do: nil
def get!(slug) when is_binary(slug) do
case get(slug) do
nil ->
raise "could not find post with #{inspect(slug)}, availables: #{inspect(availables())}"
post ->
post
end
end
def order_by_datetime(posts) do
Enum.sort(posts, &(DateTime.compare(&1.published_at, &2.published_at) == :gt))
end
def exclude_draft(posts) do
Enum.reject(posts, & &1.draft?)
end
def filter_published(posts, published_at \\ DateTime.utc_now()) do
Enum.filter(posts, &(DateTime.compare(&1.published_at, published_at) == :lt))
end
end
end
defp compile_async(paths, compiler_options) do
paths
|> Task.async_stream(&compile(&1, compiler_options))
|> Enum.flat_map(fn
{:ok, compiled} -> List.wrap(compiled)
{:error, _} -> []
end)
end
defp compile(path, options) do
content = File.read!(path)
case Nabo.Compiler.compile(content, options) do
{:ok, post} ->
post
{:error, reason} ->
IO.warn(["Could not compile ", inspect(path), " due to: ", reason], [])
nil
end
end
@doc """
Finds a post by the given slug.
## Example
MyRepo.get("my-slug")
"""
@callback get(slug :: Nabo.Post.slug()) :: Nabo.Post.t() | nil
@doc """
Similar to `get/1` but raises error when no post is found.
## Example
post = MyRepo.get!("my-slug")
"""
@callback get!(slug :: Nabo.Post.slug()) :: Nabo.Post.t()
@doc """
Fetches all available posts in the repo.
## Example
posts = MyRepo.all()
"""
@callback all() :: [Nabo.Post.t()]
@doc """
Order posts by date.
## Example
posts = MyRepo.all() |> MyRepo.order_by_date()
"""
@callback order_by_date(posts :: [Nabo.Post.t()]) :: [Nabo.Post.t()]
@doc """
Exclude draft posts.
## Example
posts = MyRepo.all() |> MyRepo.exclude_draft()
"""
@callback exclude_draft(posts :: [Nabo.Post.t()]) :: [Nabo.Post.t()]
@doc """
Filter only posts published before a specified datetime.
## Example
posts = MyRepo.all() |> MyRepo.filter_published()
"""
@callback filter_published(posts :: [Nabo.Post.t()], published_at :: DateTime.t()) :: [
Nabo.Post.t()
]
@doc """
Fetches all availables post names in the repo.
## Example
availables = MyRepo.availables()
"""
@callback availables() :: [Nabo.Post.slug()]
end
|
lib/nabo/repo.ex
| 0.818882
| 0.68763
|
repo.ex
|
starcoder
|
defmodule Microformats2.Items.DtProp do
@moduledoc false
import Microformats2.Helpers
alias Microformats2.Helpers.DateTimeNormalizer
alias Microformats2.ParserState
defp parse_infos_from_state(state) do
%{
date_parts: state.dates,
implied_timezone: state.implied_timezone,
value: nil,
date: nil,
time: nil,
zone: nil,
timezone_offset: nil
}
end
def parsed_prop(child = {elem, _, _}, state) do
dt = Floki.attribute([child], "datetime") |> List.first()
title = Floki.attribute([child], "title") |> List.first()
value = Floki.attribute([child], "value") |> List.first()
parse_state = parse_value_class([child], parse_infos_from_state(state))
{value, timezone_offset, dates} =
if present?(parse_state) do
{parse_state[:value], parse_state[:implied_timezone], parse_state[:date_parts]}
else
value =
cond do
elem in ["time", "ins", "del"] && present?(dt) -> dt
elem == "abbr" && !is_nil(title) -> title
elem in ["data", "input"] && !is_nil(value) -> value
true -> [child] |> cleanup_html() |> text_content()
end
|> stripped_or_nil()
timezone =
if !Regex.match?(~r/^(\d{4}-\d{2}-\d{2})$/, value) do
data = (Regex.run(~r/Z|[+-]\d{1,2}:?(\d{2})?$/i, value) || []) |> List.first()
if blank?(state.implied_timezone) && present?(data),
do: data,
else: state.implied_timezone
end
data = (Regex.run(~r/(\d{4}-\d{2}-\d{2})/, value) || []) |> List.first()
if data,
do: {value, timezone, [data | state.dates]},
else: {value, timezone, state.dates}
end
fixed_value =
if (value =~ ~r/^\d{1,2}:\d{2}(:\d{2})?(Z|[+-]\d{2}:?\d{2}?)?$/ ||
value =~ ~r/^\d{1,2}(:\d{2})?(:\d{2})?[ap]\.?m\.?$/i) && present?(dates) do
{time, _offset} = DateTimeNormalizer.normalized_time_zone_offset(value)
time = DateTimeNormalizer.normalized_time_format(time)
[date | _] = dates
"#{date} #{stripped_or_nil(time)}"
else
value
end
{fixed_value, %ParserState{state | implied_timezone: timezone_offset, dates: dates}}
end
defp parse_value_class(node, parse_infos) do
nodes = Floki.find(node, ">[class~=value], >[class~=value-title]")
if present?(nodes) do
nodes
|> Enum.map(&value_for_node/1)
|> parse_date_values(parse_infos)
end
end
defp value_for_node({elem, _, _} = node) do
alt = Floki.attribute([node], "alt") |> List.first()
cond do
Enum.member?(attr_list([node], "class"), "value-title") ->
Floki.attribute([node], "title") |> List.first()
elem in ~w[img area] && !is_nil(alt) ->
alt
elem == "data" ->
value = Floki.attribute([node], "value") |> List.first()
if is_nil(value),
do: [node] |> Floki.text() |> stripped_or_nil(),
else: value
elem == "abbr" ->
title = Floki.attribute([node], "title") |> List.first()
if is_nil(title),
do: [node] |> Floki.text() |> stripped_or_nil(),
else: title
elem in ~w[del ins time] ->
datetime = Floki.attribute([node], "datetime") |> List.first()
if is_nil(datetime),
do: [node] |> Floki.text() |> stripped_or_nil(),
else: datetime
true ->
[node] |> Floki.text() |> stripped_or_nil()
end
end
defp parse_date_values(parts, parse_infos)
defp parse_date_values([], parse_info), do: parse_info
defp parse_date_values([nil | rest], parse_infos),
do: parse_date_values(rest, parse_infos)
defp parse_date_values([part | rest], parse_infos) do
updated_parse_infos =
cond do
# we found the date/time value, return it and be done with this
part =~ ~r/^\d{4}-\d{2}-\d{2}[ T]\d{2}:\d{2}(:\d{2})?(Z|[+-]\d{2}:?\d{2})?$/ ->
part
# we found a time value, possibly with time zone information
part =~ ~r/^\d{1,2}:\d{2}(:\d{2})?(Z|[+-]\d{1,2}:?\d{2})?$/ ||
(part =~ ~r/^\d{1,2}(:\d{2})?(:\d{2})?[ap]\.?m\.?$/i && blank?(parse_infos[:time])) ->
{time, offset} = DateTimeNormalizer.normalized_time_zone_offset(part)
parse_infos
|> Map.merge(%{time: time, timezone_offset: offset})
|> maybe_put_implied_timezone(offset)
# we found a valid date and no other date has been found
part =~ ~r/^\d{4}-\d{2}-\d{2}$/ && blank?(parse_infos[:date]) ->
Map.put(parse_infos, :date, part)
# we found a ordinal date and no other date has been found
part =~ ~r/^\d{4}-\d{3}$/ && blank?(parse_infos[:date]) ->
Map.put(parse_infos, :date, DateTimeNormalizer.normalized_ordinal_date(part))
# we found a valid time zone and no other zone has been found
part =~ ~r/^(Z|[+-]\d{1,2}:?(\d{2})?)$/ && blank?(parse_infos[:zone]) ->
{_, offset} = DateTimeNormalizer.normalized_time_zone_offset(part)
parse_infos
|> Map.merge(%{zone: part, timezone_offset: offset})
|> maybe_put_implied_timezone(offset)
# nothing valid found, no state change
true ->
{:next, parse_infos}
end
|> maybe_save_date_part()
|> maybe_fix_time_part()
|> maybe_save_date_or_time()
if is_binary(updated_parse_infos) do
date = String.replace(updated_parse_infos, ~r/[T ].*$/, "")
parse_infos
|> Map.put(:date, updated_parse_infos)
|> Map.update!(:date_parts, &[date | &1])
else
parse_date_values(rest, updated_parse_infos)
end
end
defp maybe_save_date_or_time(str) when is_binary(str), do: str
defp maybe_save_date_or_time({:next, parse_infos}), do: {:next, parse_infos}
defp maybe_save_date_or_time(parse_infos) do
cond do
blank?(parse_infos[:date]) && present?(parse_infos[:time]) ->
time = DateTimeNormalizer.normalized_time_format(parse_infos[:time])
Map.merge(parse_infos, %{value: stripped_or_nil(time), time: time})
present?(parse_infos[:date]) && blank?(parse_infos[:time]) ->
Map.put(parse_infos, :value, String.trim_trailing(parse_infos[:date], "T"))
true ->
time = DateTimeNormalizer.normalized_time_format(parse_infos[:time])
date = String.trim_trailing(parse_infos[:date], "T")
value = "#{date} #{stripped_or_nil(time)}"
Map.merge(parse_infos, %{value: value, time: time})
end
end
defp maybe_save_date_part(str) when is_binary(str), do: str
defp maybe_save_date_part({:next, parse_infos}), do: {:next, parse_infos}
defp maybe_save_date_part(parse_infos) do
if present?(parse_infos[:date]) && !Enum.member?(parse_infos[:date_parts], parse_infos[:date]),
do: Map.update!(parse_infos, :date_parts, &[parse_infos[:date] | &1]),
else: parse_infos
end
defp maybe_fix_time_part(str) when is_binary(str), do: str
defp maybe_fix_time_part({:next, parse_infos}), do: {:next, parse_infos}
defp maybe_fix_time_part(parse_infos) do
if present?(parse_infos[:zone]) && present?(parse_infos[:time]),
do: Map.put(parse_infos, :time, "#{parse_infos[:time]}#{parse_infos[:zone]}"),
else: parse_infos
end
defp maybe_put_implied_timezone(map, value) do
if blank?(map[:implied_timezone]) && present?(value),
do: Map.put(map, :implied_timezone, value),
else: map
end
end
|
lib/items/dt_prop.ex
| 0.581778
| 0.414247
|
dt_prop.ex
|
starcoder
|
defmodule Timex.Timezone do
@moduledoc """
This module is used for looking up the timezone information for
a given point in time, in the desired zone. Timezones are dependent
not only on locale, but the date and time for which you are querying.
For instance, the timezone offset from UTC for `Europe/Moscow` is different
for March 3rd of 2015, than it was in 2013. These differences are important,
and as such, all functions in this module are date/time sensitive, and where
omitted, the current date/time are assumed.
In addition to lookups, this module also does conversion of datetimes from one
timezone period to another, and determining the difference between a date in one
timezone period and the same date/time in another timezone period.
"""
alias Timex.Date, as: Date
alias Timex.DateTime, as: DateTime
alias Timex.TimezoneInfo, as: TimezoneInfo
alias Timex.Timezone.Local, as: Local
Application.ensure_all_started(:tzdata)
@abbreviations Tzdata.canonical_zone_list
|> Enum.flat_map(fn name -> {:ok, periods} = Tzdata.periods(name); periods end)
|> Enum.map(fn %{:zone_abbr => abbr} -> abbr end)
|> Enum.uniq
|> Enum.filter(fn abbr -> abbr != "" end)
@doc """
Determines if a given zone name exists
"""
@spec exists?(String.t) :: boolean
def exists?(zone), do: Tzdata.zone_exists?(zone) || Enum.member?(@abbreviations, zone)
@doc """
Gets the local timezone configuration for the current date and time.
"""
@spec local() :: %TimezoneInfo{}
def local(), do: local(Date.now)
@doc """
Gets the local timezone configuration for the provided date and time.
The provided date and time can either be an Erlang datetime tuple, or a DateTime struct.
"""
@spec local(Date.datetime | %DateTime{}) :: %TimezoneInfo{}
def local(date)
def local({{y,m,d}, {h,min,s}}), do: %DateTime{year: y, month: m, day: d, hour: h, minute: min, second: s, timezone: %TimezoneInfo{}} |> local
def local(%DateTime{} = date), do: get(Local.lookup(date), date)
@doc """
Gets timezone info for a given zone name and date. The date provided
can either be an Erlang datetime tuple, or a DateTime struct, and if one
is not provided, then the current date and time is returned.
"""
@spec get(String.t | integer | :utc, Date.datetime | %DateTime{} | nil) :: %TimezoneInfo{} | {:error, String.t}
def get(tz, for \\ Date.now)
def get(tz, for) when tz in ["Z", "UT", "GMT"], do: get(:utc, for)
def get(:utc, _), do: %TimezoneInfo{}
def get(0, for), do: get("UTC", for)
# These are shorthand for specific time zones
def get("A", for), do: get(+1, for)
def get("M", for), do: get(+12, for)
def get("N", for), do: get(-1, for)
def get("Y", for), do: get(-12, for)
# Allow querying by offset
def get(offset, for) when is_number(offset) do
if offset > 0 do
get("Etc/GMT-#{offset}", for)
else
get("Etc/GMT+#{offset * -1}", for)
end
end
def get(<<?+, offset :: binary>>, for) do
{num, _} = Integer.parse(offset)
cond do
num >= 100 -> get(trunc(num/100), for)
true -> get(num, for)
end
end
def get(<<?-, offset :: binary>>, for) do
{num, _} = Integer.parse(offset)
cond do
num >= 100 -> get(trunc(num/100) * -1, for)
true -> get(num * -1, for)
end
end
# Gets a timezone for an Erlang datetime tuple
def get(timezone, {{_,_,_}, {_,_,_}} = datetime) do
case Tzdata.zone_exists?(timezone) do
false ->
case @abbreviations |> Enum.member?(timezone) do
true ->
# Lookup the real timezone for this abbreviation and date
seconds_from_zeroyear = :calendar.datetime_to_gregorian_seconds(datetime)
case lookup_timezone_by_abbreviation(timezone, seconds_from_zeroyear) do
{:error, _} -> {:error, "No timezone found for: #{timezone}"}
{:ok, {name, period}} -> tzdata_to_timezone(period, name)
end
false ->
{:error, "No timezone found for: #{timezone}"}
end
true ->
seconds_from_zeroyear = :calendar.datetime_to_gregorian_seconds(datetime)
[period | _] = Tzdata.periods_for_time(timezone, seconds_from_zeroyear, :wall)
period |> tzdata_to_timezone(timezone)
end
end
# Gets a timezone for a DateTime struct
def get(timezone, %DateTime{} = dt) do
case Tzdata.zone_exists?(timezone) do
false ->
case @abbreviations |> Enum.member?(timezone) do
true ->
# Lookup the real timezone for this abbreviation and date
seconds_from_zeroyear = dt |> Date.to_secs(:zero)
case lookup_timezone_by_abbreviation(timezone, seconds_from_zeroyear) do
{:error, _} -> {:error, "No timezone found for: #{timezone}"}
{:ok, {name, period}} -> tzdata_to_timezone(period, name)
end
false ->
{:error, "No timezone found for: #{timezone}"}
end
true ->
seconds_from_zeroyear = dt |> Date.to_secs(:zero)
[period | _] = Tzdata.periods_for_time(timezone, seconds_from_zeroyear, :wall)
period |> tzdata_to_timezone(timezone)
end
end
@doc """
Convert a date to the given timezone (either TimezoneInfo or a timezone name)
"""
@spec convert(date :: DateTime.t, tz :: TimezoneInfo.t | String.t) :: DateTime.t
def convert(%DateTime{ms: ms} = date, %TimezoneInfo{full_name: name} = tz) do
# Calculate the difference between `date`'s timezone, and the provided timezone
difference = diff(date, tz)
# Offset the provided date's time by the difference
shifted = Date.shift(date, mins: difference) |> Map.put(:timezone, tz)
# Check the shifted datetime to make sure it's in the right zone
seconds_from_zeroyear = shifted |> Date.to_secs(:zero, utc: false)
[period | _] = Tzdata.periods_for_time(name, seconds_from_zeroyear, :wall)
case period |> tzdata_to_timezone(name) do
# No change, we're valid
^tz ->
shifted
|> Map.put(:ms, ms)
# The shift put us in a new timezone, so shift by the updated
# difference, and set the zone
new_zone ->
difference = diff(shifted, new_zone)
Date.shift(shifted, mins: difference)
|> Map.put(:timezone, new_zone)
|> Map.put(:ms, ms)
end
end
def convert(date, tz) when is_binary(tz) do
case get(tz, date) do
{:error, e} -> {:error, e}
timezone -> convert(date, timezone)
end
end
@doc """
Determine what offset is required to convert a date into a target timezone
"""
@spec diff(date :: DateTime.t, tz :: TimezoneInfo.t) :: integer
def diff(%DateTime{:timezone => origin}, %TimezoneInfo{:offset_std => dest_std, :offset_utc => dest_utc}) do
%TimezoneInfo{:offset_std => origin_std, :offset_utc => origin_utc} = origin
cond do
origin_utc == dest_utc -> dest_std - origin_std
true -> (dest_utc + dest_std) - (origin_utc + origin_std)
end
end
# Fetches the first timezone period which matches the abbreviation and is
# valid for the given moment in time (secs from :zero)
defp lookup_timezone_by_abbreviation(abbr, secs) do
result = Tzdata.canonical_zone_list
|> Stream.map(fn name -> {:ok, periods} = Tzdata.periods(name); {name, periods} end)
|> Stream.map(fn {name, periods} ->
p = periods |> Enum.drop_while(fn %{:from => %{:wall => from}, :until => %{:wall => until}, :zone_abbr => abbrev} ->
cond do
from == :min && until >= secs && abbrev == abbr -> false
from == :min && until == :max && abbrev == abbr -> false
from <= secs && until == :max && abbrev == abbr -> false
from <= secs && until >= secs && abbrev == abbr -> false
true -> true
end
end)
case p do
[x|_] -> {name, x}
[] -> {name, nil}
end
end)
|> Stream.filter(fn {_, nil} -> false; {_, _} -> true end)
|> Enum.take(1)
case result do
[x] -> {:ok, x}
[] -> {:error, :not_found}
end
end
defp tzdata_to_timezone(%{from: %{standard: from}, std_off: std_off_secs, until: %{standard: until}, utc_off: utc_off_secs, zone_abbr: abbr} = _tzdata, zone) do
start_bound = boundary_to_erlang_datetime(from)
end_bound = boundary_to_erlang_datetime(until)
%TimezoneInfo{
full_name: zone,
abbreviation: abbr,
offset_std: trunc(std_off_secs / 60),
offset_utc: trunc(utc_off_secs / 60),
from: start_bound |> erlang_datetime_to_boundary_date,
until: end_bound |> erlang_datetime_to_boundary_date
}
end
defp boundary_to_erlang_datetime(:min), do: :min
defp boundary_to_erlang_datetime(:max), do: :max
defp boundary_to_erlang_datetime(secs), do: :calendar.gregorian_seconds_to_datetime(trunc(secs))
defp erlang_datetime_to_boundary_date(:min), do: :min
defp erlang_datetime_to_boundary_date(:max), do: :max
defp erlang_datetime_to_boundary_date({{y, m, d}, _} = date) do
dow = case :calendar.day_of_the_week({y, m, d}) do
1 -> :monday
2 -> :tuesday
3 -> :wednesday
4 -> :thursday
5 -> :friday
6 -> :saturday
7 -> :sunday
end
{dow, date}
end
end
|
lib/timezone/timezone.ex
| 0.88729
| 0.591399
|
timezone.ex
|
starcoder
|
defmodule Artheon.Artwork do
use Artheon.Web, :model
schema "artworks" do
field :uid, :string
field :slug, :string
field :title, :string
field :category, :string
field :medium, :string
field :created_at, Ecto.DateTime
field :updated_at, Ecto.DateTime
field :date, Ecto.Date
field :date_str, :string
field :height, :float
field :width, :float
field :depth, :float
field :diameter, :float
field :website, :string
field :collecting_institution, :string
field :image_rights, :string
has_many :images, Artheon.ArtworkImage
belongs_to :artist, Artheon.Artist
end
@editable_fields [
:uid,
:slug,
:title,
:category,
:medium,
:created_at,
:updated_at,
:date,
:date_str,
:height,
:width,
:depth,
:diameter,
:website,
:collecting_institution,
:image_rights,
:artist_id
]
@required_fields [
:uid,
:slug,
:title,
:height,
:width
]
@doc """
Builds a changeset based on the `struct` and `params`.
"""
def changeset(struct), do: changeset(struct, %{})
def changeset(struct, %{
"medium" => medium,
"title" => title,
"slug" => slug,
"dimensions" => %{
"cm" => %{
"depth" => depth,
"diameter" => diameter,
"height" => height,
"width" => width
}
},
"date" => date,
"created_at" => created_at,
"updated_at" => updated_at,
} = params) do
artwork_params = params
|> Map.drop(["dimensions", "date", "created_at", "updated_at", "slug", "title", "medium"])
|> Map.put("slug", String.slice(slug, 0, 255))
|> Map.put("title", String.slice(title, 0, 255))
|> Map.put("medium", String.slice(medium, 0, 255))
|> Map.put("height", height)
|> Map.put("width", width)
|> Map.put("depth", depth)
|> Map.put("diameter", diameter)
|> Map.put("date", parse_date(date))
|> Map.put("date_str", date)
|> Map.put("created_at", to_ecto_datetime(created_at))
|> Map.put("updated_at", to_ecto_datetime(updated_at))
changeset(struct, artwork_params)
end
def changeset(struct, params) do
struct
|> cast(params, @editable_fields)
|> validate_required(@required_fields)
|> unique_constraint(:uid)
|> unique_constraint(:slug)
end
@spec parse_date(String.t) :: %Ecto.Date{}
defp parse_date(date) when byte_size(date) >= 4 do
with [year] <- Regex.run(~r/\d{4}/, date) do
"#{year}-01-01"
else
_ ->
nil
end
end
defp parse_date(_date), do: nil
end
|
web/models/artwork.ex
| 0.517327
| 0.413981
|
artwork.ex
|
starcoder
|
defmodule ExOauth2Provider.Authorization.Code do
@moduledoc """
Methods for authorization code flow.
The flow consists of three method calls:
1. `preauthorize(resource_owner, request)`
This validates the request. If a resource owner already have been
authenticated previously it'll respond with a redirect tuple.
2. `authorize(resource_owner, request)`
This confirms a resource owner authorization, and will generate an access
token.
3. `deny(resource_owner, request)`
This rejects a resource owner authorization.
---
In a controller it could look like this:
```elixir
alias ExOauth2Provider.Authorization
def new(conn, params) do
case Authorization.preauthorize(current_resource_owner(conn), params) do
{:ok, client, scopes} ->
render(conn, "new.html", params: params, client: client, scopes: scopes)
{:native_redirect, %{code: code}} ->
redirect(conn, to: oauth_authorization_path(conn, :show, code))
{:redirect, redirect_uri} ->
redirect(conn, external: redirect_uri)
{:error, error, status} ->
conn
|> put_status(status)
|> render("error.html", error: error)
end
end
def create(conn, params) do
conn
|> current_resource_owner
|> Authorization.authorize(params)
|> redirect_or_render(conn)
end
def delete(conn, params) do
conn
|> current_resource_owner
|> Authorization.deny(params)
|> redirect_or_render(conn)
end
```
"""
alias ExOauth2Provider.{
Config,
AccessTokens,
AccessGrants,
Authorization.Utils,
Authorization.Utils.Response,
RedirectURI,
Scopes,
Utils.Error
}
alias Ecto.Schema
@doc """
Validates an authorization code flow request.
Will check if there's already an existing access token with same scope and client
for the resource owner.
## Example
resource_owner
|> ExOauth2Provider.Authorization.preauthorize(%{
"client_id" => "Jf5rM8hQBc",
"response_type" => "code"
}, otp_app: :my_app)
## Response
{:ok, client, scopes} # Show request page with client and scopes
{:error, %{error: error, error_description: _}, http_status} # Show error page with error and http status
{:redirect, redirect_uri} # Redirect
{:native_redirect, %{code: code}} # Redirect to :show page
"""
@spec preauthorize(Schema.t(), map(), keyword()) ::
Response.preauthorization_success()
| Response.error()
| Response.redirect()
| Response.native_redirect()
def preauthorize(resource_owner, request, config \\ []) do
resource_owner
|> Utils.prehandle_request(request, config)
|> validate_request(config)
|> check_previous_authorization(config)
|> reissue_grant(config)
|> skip_authorization_if_applicable(config)
|> Response.preauthorize_response(config)
end
defp check_previous_authorization({:error, params}, _config), do: {:error, params}
defp check_previous_authorization(
{:ok,
%{resource_owner: resource_owner, client: application, request: %{"scope" => scopes}} =
params},
config
) do
case AccessTokens.get_token_for(resource_owner, application, scopes, config) do
nil -> {:ok, params}
token -> {:ok, Map.put(params, :access_token, token)}
end
end
defp reissue_grant({:error, params}, _config), do: {:error, params}
defp reissue_grant({:ok, %{access_token: _access_token} = params}, config),
do: issue_grant({:ok, params}, config)
defp reissue_grant({:ok, params}, _config), do: {:ok, params}
defp skip_authorization_if_applicable({:error, _params} = error, _config), do: error
defp skip_authorization_if_applicable({:ok, %{grant: _grant}} = payload, _config), do: payload
defp skip_authorization_if_applicable({:ok, params}, config) do
%{client: application, resource_owner: user} = params
case Config.skip_authorization(config).(user, application) do
true -> issue_grant({:ok, params}, config)
false -> {:ok, params}
end
end
@doc """
Authorizes an authorization code flow request.
This is used when a resource owner has authorized access. If successful,
this will generate an access token grant.
## Example
resource_owner
|> ExOauth2Provider.Authorization.authorize(%{
"client_id" => "Jf5rM8hQBc",
"response_type" => "code",
"scope" => "read write", # Optional
"state" => "46012", # Optional
"redirect_uri" => "https://example.com/" # Optional
}, otp_app: :my_app)
## Response
{:ok, code} # A grant was generated
{:error, %{error: error, error_description: _}, http_status} # Error occurred
{:redirect, redirect_uri} # Redirect
{:native_redirect, %{code: code}} # Redirect to :show page
"""
@spec authorize(Schema.t(), map(), keyword()) ::
Response.authorization_success()
| Response.error()
| Response.redirect()
| Response.native_redirect()
def authorize(resource_owner, request, config \\ []) do
resource_owner
|> Utils.prehandle_request(request, config)
|> validate_request(config)
|> issue_grant(config)
|> Response.authorize_response(config)
end
defp issue_grant({:error, %{error: _error} = params}, _config), do: {:error, params}
defp issue_grant(
{:ok, %{resource_owner: resource_owner, client: application, request: request} = params},
config
) do
grant_params =
request
|> Map.take(["redirect_uri", "scope"])
|> Map.new(fn {k, v} ->
case k do
"scope" -> {:scopes, v}
_ -> {String.to_atom(k), v}
end
end)
|> Map.put(:expires_in, Config.authorization_code_expires_in(config))
case AccessGrants.create_grant(resource_owner, application, grant_params, config) do
{:ok, grant} -> {:ok, Map.put(params, :grant, grant)}
{:error, error} -> Error.add_error({:ok, params}, error)
end
end
@doc """
Rejects an authorization code flow request.
This is used when a resource owner has rejected access.
## Example
resource_owner
|> ExOauth2Provider.Authorization.deny(%{
"client_id" => "Jf5rM8hQBc",
"response_type" => "code"
}, otp_app: :my_app)
## Response type
{:error, %{error: error, error_description: _}, http_status} # Error occurred
{:redirect, redirect_uri} # Redirect
"""
@spec deny(Schema.t(), map(), keyword()) :: Response.error() | Response.redirect()
def deny(resource_owner, request, config \\ []) do
resource_owner
|> Utils.prehandle_request(request, config)
|> validate_request(config)
|> Error.add_error(Error.access_denied())
|> Response.deny_response(config)
end
defp validate_request({:error, params}, _config), do: {:error, params}
defp validate_request({:ok, params}, config) do
{:ok, params}
|> validate_resource_owner()
|> validate_redirect_uri(config)
|> validate_scopes(config)
end
defp validate_resource_owner({:ok, %{resource_owner: resource_owner} = params}) do
case resource_owner do
%{__struct__: _} -> {:ok, params}
_ -> Error.add_error({:ok, params}, Error.invalid_request())
end
end
defp validate_scopes({:error, params}, _config), do: {:error, params}
defp validate_scopes({:ok, %{request: %{"scope" => scopes}, client: client} = params}, config) do
scopes = Scopes.to_list(scopes)
server_scopes =
client.scopes
|> Scopes.to_list()
|> Scopes.default_to_server_scopes(config)
case Scopes.all?(server_scopes, scopes) do
true -> {:ok, params}
false -> Error.add_error({:ok, params}, Error.invalid_scopes())
end
end
defp validate_redirect_uri({:error, params}, _config), do: {:error, params}
defp validate_redirect_uri(
{:ok, %{request: %{"redirect_uri" => redirect_uri}, client: client} = params},
config
) do
cond do
RedirectURI.native_redirect_uri?(redirect_uri, config) ->
{:ok, params}
RedirectURI.valid_for_authorization?(redirect_uri, client.redirect_uri, config) ->
{:ok, params}
true ->
Error.add_error({:ok, params}, Error.invalid_redirect_uri())
end
end
defp validate_redirect_uri({:ok, params}, _config),
do: Error.add_error({:ok, params}, Error.invalid_request())
end
|
lib/ex_oauth2_provider/oauth2/authorization/strategy/code.ex
| 0.900836
| 0.707607
|
code.ex
|
starcoder
|
defmodule Sass do
@moduledoc """
Compiles SASS into CSS using a NIF for Libsass
## Currently supported Sass options
* [output_style](http://sass-lang.com/documentation/file.SASS_REFERENCE.html#output_style) Use the helpers below to assign the style
* precision `integer` - decimal rounding percision
* source_comments `true` or `false` - Causes the line number and file where a selector is defined to be emitted into the compiled CSS as a comment
* soure_map_embed `true` or `false`
* source_map_contents `true` or `false`
* omit_source_map_url `true` or `false`
* is_indented_syntax `true` or `false`
* indent `:tab` or `:space`
* linefeed `:unix` or `:windows`
* include_paths list of directorys for Sass to search for imports ex.
`["bower_compoents", "../node_modules"]`
"""
@doc """
Compiles a string of SASS into a string of CSS
## Parameters
* string: Sass String.
* options: Map of sass options as defined above
## Examples
Sass.compile("$white : #fff; a { color: $white;}")
#=> "a { color: #fff; }"
# With Options
Sass.compile("$white : #fff; a { color: $white;}", %{output_style: Sass.sass_style_compressed})
#=> "a{color:#fff;}"
"""
def compile(string, options \\ %{output_style: sass_style_nested()}) do
sass = string |> String.trim
Sass.Compiler.compile(sass, options)
end
@doc """
Compiles a file of SASS into a string of CSS
## Parameters
* path: Path to sass file to compile.
* options: Map of sass options as defined above
## Examples
Sass.compile_file("application.scss")
#=> "a { color: #fff; }"
# With Options
Sass.compile_file("application.scss", %{output_style: Sass.sass_style_compressed})
#=> "a{color:#fff;}"
"""
def compile_file(path, options \\ %{output_style: sass_style_nested()}) do
filename = path |> String.trim
Sass.Compiler.compile_file(filename, options)
end
@doc """
Returns current sass version
"""
def version, do: Sass.Compiler.version
@doc """
Sass option value for sass output style [nested](http://sass-lang.com/documentation/file.SASS_REFERENCE.html#_13)
"""
def sass_style_nested, do: 0
@doc """
Sass option value for sass output style [expanded](http://sass-lang.com/documentation/file.SASS_REFERENCE.html#_14)
"""
def sass_style_expanded, do: 1
@doc """
Sass option value for sass output style [compact](http://sass-lang.com/documentation/file.SASS_REFERENCE.html#_15)
"""
def sass_style_compact, do: 2
@doc """
Sass option value for sass output style [compressed](http://sass-lang.com/documentation/file.SASS_REFERENCE.html#_16)
"""
def sass_style_compressed, do: 3
end
|
lib/sass.ex
| 0.803637
| 0.436142
|
sass.ex
|
starcoder
|
defmodule Ash.Resource.Calculation do
@moduledoc "Represents a named calculation on a resource"
defstruct [:name, :type, :calculation, :arguments, :description, :private?, :allow_nil?]
@schema [
name: [
type: :atom,
required: true,
doc: "The field name to use for the calculation value"
],
type: [
type: :any,
required: true
],
calculation: [
type: {:custom, __MODULE__, :calculation, []},
required: true,
doc: "The module or `{module, opts}` to use for the calculation"
],
description: [
type: :string,
doc: "An optional description for the calculation"
],
private?: [
type: :boolean,
default: false,
doc:
"Whether or not the calculation will appear in any interfaces created off of this resource, e.g AshJsonApi and AshGraphql"
],
allow_nil?: [
type: :boolean,
default: true,
doc: "Whether or not the calculation can return nil."
]
]
@type t :: %__MODULE__{
name: atom(),
calculation: {:ok, {atom(), any()}} | {:error, String.t()},
arguments: list(any()),
description: String.t() | nil,
private?: boolean,
allow_nil?: boolean
}
defmodule Argument do
@moduledoc "An argument to a calculation"
defstruct [:name, :type, :default, :allow_nil?, :constraints]
@schema [
name: [
type: :atom,
required: true,
doc: "The name to use for the argument"
],
type: [
type: :any,
required: true,
doc: "The type of the argument"
],
default: [
type: {:custom, Ash.OptionsHelpers, :default, []},
required: false,
doc: "A default value to use for the argument if not provided"
],
allow_nil?: [
type: :boolean,
default: true,
doc: "Whether or not the argument value may be nil"
],
constraints: [
type: :keyword_list,
default: [],
doc:
"Constraints to provide to the type when casting the value. See the type's documentation for more information."
]
]
def schema, do: @schema
end
def schema, do: @schema
def calculation({module, opts}) when is_atom(module) and is_list(opts),
do: {:ok, {module, opts}}
def calculation(module) when is_atom(module), do: {:ok, {module, []}}
def calculation(other) do
{:error, "Expected a module or {module, opts}, got: #{inspect(other)}"}
end
end
|
lib/ash/resource/calculation/calculation.ex
| 0.829871
| 0.553626
|
calculation.ex
|
starcoder
|
defmodule ExUnit.Diff do
@moduledoc false
@doc """
Returns an edit script representing the difference between `left` and `right`.
Returns `nil` if they are not the same data type,
or if the given data type is not supported.
"""
def script(left, right)
def script(term, term)
when is_binary(term) or is_number(term)
when is_map(term) or is_list(term) or is_tuple(term) do
[eq: inspect(term)]
end
# Binaries
def script(left, right) when is_binary(left) and is_binary(right) do
if String.printable?(left) and String.printable?(right) do
script_string(left, right, ?\")
end
end
# Structs
def script(%name{} = left, %name{} = right) do
if Inspect.impl_for(left) != Inspect.Any do
inspect_left = inspect(left)
inspect_right = inspect(right)
if inspect_left != inspect_right do
script_string(inspect_left, inspect_right)
else
script_struct(left, right, name)
end
else
script_struct(left, right, name)
end
end
# Maps
def script(%{} = left, %{} = right) do
if match?(%_{}, left) or match?(%_{}, right) do
nil
else
script_map(left, right, "")
end
end
# Char lists and lists
def script(left, right) when is_list(left) and is_list(right) do
if List.ascii_printable?(left) and List.ascii_printable?(right) do
script_string(List.to_string(left), List.to_string(right), ?')
else
keywords? = Inspect.List.keyword?(left) and Inspect.List.keyword?(right)
script = script_maybe_improper_list(left, right, keywords?)
[{:eq, "["}, script, {:eq, "]"}]
end
end
# Numbers
def script(left, right)
when is_integer(left) and is_integer(right)
when is_float(left) and is_float(right) do
script_string(inspect(left), inspect(right))
end
# Tuples
def script(left, right) when is_tuple(left) and is_tuple(right) do
script = script_list(Tuple.to_list(left), Tuple.to_list(right), false)
[{:eq, "{"}, script, {:eq, "}"}]
end
def script(_left, _right), do: nil
defp script_string(string1, string2, token) do
if String.bag_distance(string1, string2) > 0.4 do
{escaped1, _} = Code.Identifier.escape(string1, token)
{escaped2, _} = Code.Identifier.escape(string2, token)
string1 = IO.iodata_to_binary(escaped1)
string2 = IO.iodata_to_binary(escaped2)
[{:eq, <<token>>}, script_string(string1, string2), {:eq, <<token>>}]
end
end
defp script_string(string1, string2) do
String.myers_difference(string1, string2)
end
defp slice_proper_part([item | rest], result), do: slice_proper_part(rest, [item | result])
defp slice_proper_part([], result), do: {Enum.reverse(result), []}
defp slice_proper_part(item, result), do: {Enum.reverse(result), [item]}
defp script_maybe_improper_list(list1, list2, keywords?) do
{list1, improper_rest1} = slice_proper_part(list1, [])
{list2, improper_rest2} = slice_proper_part(list2, [])
script = script_list(list1, list2, keywords?)
case {improper_rest1, improper_rest2} do
{[item1], [item2]} ->
[script, [eq: " | "] ++ script_inner(item1, item2)]
{[item1], []} ->
[script, del: " | " <> inspect(item1)]
{[], [item2]} ->
[script, ins: " | " <> inspect(item2)]
{[], []} ->
script
end
end
defp script_list(list1, list2, keywords?) do
script =
case script_subset_list(list1, list2) do
{:ok, script} -> script
:error when keywords? -> List.myers_difference(list1, list2, &script_keyword/2)
:error -> List.myers_difference(list1, list2, &script/2)
end
format_each_fragment(script, [], keywords?)
end
defp script_keyword({key, val1}, {key, val2}),
do: [{:eq, format_key(key, true)}, script_inner(val1, val2)]
defp script_keyword(_pair1, _pair2),
do: nil
defp script_subset_list(list1, list2) do
case find_subset_list(list1, list2, []) do
{subset, rest1, rest2} ->
{:ok, wrap_in(:eq, Enum.reverse(subset)) ++ wrap_in(:del, rest1) ++ wrap_in(:ins, rest2)}
nil ->
case find_subset_list(Enum.reverse(list1), Enum.reverse(list2), []) do
{subset, rest1, rest2} ->
{:ok,
wrap_in(:del, Enum.reverse(rest1)) ++
wrap_in(:ins, Enum.reverse(rest2)) ++ wrap_in(:eq, subset)}
nil ->
:error
end
end
end
defp find_subset_list([item | rest1], [item | rest2], subset) do
find_subset_list(rest1, rest2, [item | subset])
end
defp find_subset_list(rest1, rest2, subset) when rest1 == [] or rest2 == [] do
{subset, rest1, rest2}
end
defp find_subset_list(_subset, _rest1, _rest2), do: nil
defp wrap_in(_tag, []), do: []
defp wrap_in(tag, items), do: [{tag, items}]
defp format_each_fragment([{:diff, script}], [], _keywords?) do
script
end
defp format_each_fragment([{kind, elems}], [], keywords?) do
[format_fragment(kind, elems, keywords?)]
end
defp format_each_fragment([_, _] = fragments, acc, keywords?) do
result =
case fragments do
[del: elems1, ins: elems2] ->
[
format_fragment(:del, elems1, keywords?),
format_fragment(:ins, elems2, keywords?)
]
[{kind1, elems1}, {kind2, elems2}] ->
[
format_fragment(kind1, elems1, keywords?),
script_comma(kind1, kind2),
format_fragment(kind2, elems2, keywords?)
]
end
Enum.reverse(acc, result)
end
defp format_each_fragment([{:diff, script} | rest], acc, keywords?) do
format_each_fragment(rest, [{:eq, ", "}, script | acc], keywords?)
end
defp format_each_fragment([{kind, elems} | rest], acc, keywords?) do
new_acc = [script_comma(kind, kind), format_fragment(kind, elems, keywords?) | acc]
format_each_fragment(rest, new_acc, keywords?)
end
defp script_comma(:diff, :diff), do: {:eq, ", "}
defp script_comma(:diff, kind), do: {kind, ", "}
defp script_comma(kind, :diff), do: {kind, ", "}
defp script_comma(:eq, kind), do: {kind, ", "}
defp script_comma(kind, :eq), do: {kind, ", "}
defp script_comma(kind, _), do: {kind, ", "}
defp format_fragment(:diff, script, _) do
script
end
defp format_fragment(kind, elems, keywords?) do
formatter = fn
{key, val} when keywords? ->
format_key_value(key, val, true)
elem ->
inspect(elem)
end
{kind, Enum.map_join(elems, ", ", formatter)}
end
defp script_map(left, right, name) do
{surplus, altered, missing, same} = map_difference(left, right)
keywords? =
Inspect.List.keyword?(surplus) and Inspect.List.keyword?(altered) and
Inspect.List.keyword?(missing) and Inspect.List.keyword?(same)
result =
Enum.reduce(missing, [], fn {key, val}, acc ->
map_pair = format_key_value(key, val, keywords?)
[[ins: ", ", ins: map_pair] | acc]
end)
result =
if same == [] and altered == [] and missing != [] and surplus != [] do
[[_ | elem_diff] | rest] = result
[elem_diff | rest]
else
result
end
result =
Enum.reduce(surplus, result, fn {key, val}, acc ->
map_pair = format_key_value(key, val, keywords?)
[[del: ", ", del: map_pair] | acc]
end)
result =
Enum.reduce(altered, result, fn {key, {val1, val2}}, acc ->
value_diff = script_inner(val1, val2)
[[{:eq, ", "}, {:eq, format_key(key, keywords?)}, value_diff] | acc]
end)
result =
Enum.reduce(same, result, fn {key, val}, acc ->
map_pair = format_key_value(key, val, keywords?)
[[eq: ", ", eq: map_pair] | acc]
end)
[[_ | elem_diff] | rest] = result
[{:eq, "%" <> name <> "{"}, [elem_diff | rest], {:eq, "}"}]
end
defp script_struct(left, right, name) do
left = Map.from_struct(left)
right = Map.from_struct(right)
script_map(left, right, inspect(name))
end
defp map_difference(map1, map2) do
{surplus, altered, same} =
Enum.reduce(map1, {[], [], []}, fn {key, val1}, {surplus, altered, same} ->
case Map.fetch(map2, key) do
{:ok, ^val1} ->
{surplus, altered, [{key, val1} | same]}
{:ok, val2} ->
{surplus, [{key, {val1, val2}} | altered], same}
:error ->
{[{key, val1} | surplus], altered, same}
end
end)
missing =
Enum.reduce(map2, [], fn {key, _} = pair, acc ->
if Map.has_key?(map1, key), do: acc, else: [pair | acc]
end)
{surplus, altered, missing, same}
end
defp format_key(key, false) do
inspect(key) <> " => "
end
defp format_key(key, true) when is_nil(key) or is_boolean(key) do
inspect(key) <> ": "
end
defp format_key(key, true) do
":" <> result = inspect(key)
result <> ": "
end
defp format_key_value(key, value, keyword?) do
format_key(key, keyword?) <> inspect(value)
end
defp script_inner(term, term) do
[eq: inspect(term)]
end
defp script_inner(left, right) do
if result = script(left, right) do
result
else
[del: inspect(left), ins: inspect(right)]
end
end
end
|
lib/ex_unit/lib/ex_unit/diff.ex
| 0.78785
| 0.567937
|
diff.ex
|
starcoder
|
defmodule Cloak.Ciphers.AES.GCM do
@moduledoc """
A `Cloak.Cipher` which encrypts values with the AES cipher in GCM (block) mode.
Internally relies on Erlang's `:crypto.block_encrypt/4`.
"""
@behaviour Cloak.Cipher
@aad "AES256GCM"
@default_iv_length 16
alias Cloak.Tags.Encoder
alias Cloak.Tags.Decoder
@doc """
Callback implementation for `Cloak.Cipher`. Encrypts a value using
AES in GCM mode.
Generates a random IV for every encryption, and prepends the key tag, IV,
and ciphertag to the beginning of the ciphertext. The format can be
diagrammed like this:
+----------------------------------------------------------+----------------------+
| HEADER | BODY |
+-------------------+---------------+----------------------+----------------------+
| Key Tag (n bytes) | IV (n bytes) | Ciphertag (16 bytes) | Ciphertext (n bytes) |
+-------------------+---------------+----------------------+----------------------+
| |_________________________________
| |
+---------------+-----------------+-------------------+
| Type (1 byte) | Length (1 byte) | Key Tag (n bytes) |
+---------------+-----------------+-------------------+
The `Key Tag` component of the header breaks down into a `Type`, `Length`,
and `Value` triplet for easy decoding.
**Important**: Because a random IV is used for every encryption, `encrypt/2`
will not produce the same ciphertext twice for the same value.
"""
@impl true
def encrypt(plaintext, opts) do
key = Keyword.fetch!(opts, :key)
tag = Keyword.fetch!(opts, :tag)
iv_length = Keyword.get(opts, :iv_length, @default_iv_length)
iv = :crypto.strong_rand_bytes(iv_length)
{ciphertext, ciphertag} = do_encrypt(key, iv, plaintext)
{:ok, Encoder.encode(tag) <> iv <> ciphertag <> ciphertext}
end
@doc """
Callback implementation for `Cloak.Cipher`. Decrypts a value
encrypted with AES in GCM mode.
"""
@impl true
def decrypt(ciphertext, opts) do
if can_decrypt?(ciphertext, opts) do
key = Keyword.fetch!(opts, :key)
iv_length = Keyword.get(opts, :iv_length, @default_iv_length)
%{remainder: <<iv::binary-size(iv_length), ciphertag::binary-16, ciphertext::binary>>} =
Decoder.decode(ciphertext)
{:ok, do_decrypt(key, iv, ciphertext, ciphertag)}
else
:error
end
end
@doc """
Callback implementation for `Cloak.Cipher`. Determines whether this module
can decrypt the given ciphertext.
"""
@impl true
def can_decrypt?(ciphertext, opts) do
tag = Keyword.fetch!(opts, :tag)
iv_length = Keyword.get(opts, :iv_length, @default_iv_length)
case Decoder.decode(ciphertext) do
%{
tag: ^tag,
remainder: <<_iv::binary-size(iv_length), _ciphertag::binary-16, _ciphertext::binary>>
} ->
true
_other ->
false
end
end
# TODO: remove this once support for Erlang/OTP 21 is dropped
if System.otp_release() >= "22" do
defp do_decrypt(key, iv, ciphertext, ciphertag) do
:crypto.crypto_one_time_aead(:aes_256_gcm, key, iv, ciphertext, @aad, ciphertag, false)
end
defp do_encrypt(key, iv, plaintext) do
:crypto.crypto_one_time_aead(:aes_256_gcm, key, iv, plaintext, @aad, true)
end
else
defp do_decrypt(key, iv, ciphertext, ciphertag) do
:crypto.block_decrypt(:aes_gcm, key, iv, {@aad, ciphertext, ciphertag})
end
defp do_encrypt(key, iv, plaintext) do
:crypto.block_encrypt(:aes_gcm, key, iv, {@aad, plaintext})
end
end
end
|
lib/cloak/ciphers/aes_gcm.ex
| 0.836388
| 0.520862
|
aes_gcm.ex
|
starcoder
|
defmodule JSONAPI.Serializer do
@moduledoc """
Serialize a map of data into a properly formatted JSON API response object
"""
import JSONAPI.Ecto, only: [assoc_loaded?: 1]
alias JSONAPI.{Config, Utils}
alias Utils.String, as: JString
require Logger
@typep serialized_doc :: map()
@doc """
Takes a view, data and a optional plug connection and returns a fully JSONAPI Serialized document.
This assumes you are using the JSONAPI.View and have data in maps or structs.
Please refer to `JSONAPI.View` for more information. If you are in interested in relationships
and includes you may also want to reference the `JSONAPI.QueryParser`.
"""
@spec serialize(module(), term(), Plug.Conn.t() | nil, map() | nil, list()) :: serialized_doc()
def serialize(view, data, conn \\ nil, meta \\ nil, options \\ []) do
{query_includes, query_page} =
case conn do
%Plug.Conn{assigns: %{jsonapi_query: %Config{include: include, page: page}}} ->
{include, page}
_ ->
{[], nil}
end
{to_include, encoded_data} = encode_data(view, data, conn, query_includes, options)
encoded_data = %{
data: encoded_data,
included: flatten_included(to_include)
}
encoded_data =
if is_map(meta) do
Map.put(encoded_data, :meta, meta)
else
encoded_data
end
merge_links(encoded_data, data, view, conn, query_page, remove_links?(), options)
end
def encode_data(view, data, conn, query_includes, options) when is_list(data) do
Enum.map_reduce(data, [], fn d, acc ->
{to_include, encoded_data} = encode_data(view, d, conn, query_includes, options)
{to_include, acc ++ [encoded_data]}
end)
end
def encode_data(view, data, conn, query_includes, options) do
valid_includes = get_includes(view, query_includes)
encoded_data = %{
id: view.id(data),
type: view.type(),
attributes: transform_fields(view.attributes(data, conn)),
relationships: %{}
}
doc = merge_links(encoded_data, data, view, conn, nil, remove_links?(), options)
doc =
case view.meta(data, conn) do
nil -> doc
meta -> Map.put(doc, :meta, meta)
end
encode_relationships(conn, doc, {view, data, query_includes, valid_includes}, options)
end
@spec encode_relationships(Plug.Conn.t(), serialized_doc(), tuple(), list()) :: tuple()
def encode_relationships(conn, doc, {view, data, _, _} = view_info, options) do
view.relationships()
|> Enum.filter(&data_loaded?(Map.get(data, elem(&1, 0))))
|> Enum.map_reduce(doc, &build_relationships(conn, view_info, &1, &2, options))
end
@spec build_relationships(Plug.Conn.t(), tuple(), tuple(), tuple(), list()) :: tuple()
def build_relationships(
conn,
{view, data, query_includes, valid_includes},
{key, include_view},
acc,
options
) do
rel_view =
case include_view do
{view, :include} -> view
view -> view
end
rel_data = Map.get(data, key)
# Build the relationship url
rel_key = transform_fields(key)
rel_url = view.url_for_rel(data, rel_key, conn)
# Build the relationship
acc =
put_in(
acc,
[:relationships, rel_key],
encode_relation({rel_view, rel_data, rel_url, conn})
)
valid_include_view = include_view(valid_includes, key)
if {rel_view, :include} == valid_include_view && data_loaded?(rel_data) do
rel_query_includes =
if is_list(query_includes) do
query_includes
|> Enum.reduce([], fn
{^key, value}, acc -> acc ++ [value]
_, acc -> acc
end)
|> List.flatten()
else
[]
end
{rel_included, encoded_rel} =
encode_data(rel_view, rel_data, conn, rel_query_includes, options)
{rel_included ++ [encoded_rel], acc}
else
{nil, acc}
end
end
defp include_view(valid_includes, key) when is_list(valid_includes) do
valid_includes
|> Keyword.get(key)
|> generate_view_tuple
end
defp include_view(view, _key), do: generate_view_tuple(view)
defp generate_view_tuple({view, :include}), do: {view, :include}
defp generate_view_tuple(view) when is_atom(view), do: {view, :include}
@spec data_loaded?(map() | list()) :: boolean()
def data_loaded?(rel_data) do
assoc_loaded?(rel_data) && (is_map(rel_data) || is_list(rel_data))
end
@spec encode_relation(tuple()) :: map()
def encode_relation({rel_view, rel_data, _rel_url, _conn} = info) do
data = %{
data: encode_rel_data(rel_view, rel_data)
}
merge_related_links(data, info, remove_links?())
end
defp merge_base_links(%{links: links} = doc, data, view, conn) do
view_links =
%{self: view.url_for(data, conn)}
|> Map.merge(view.links(data, conn))
|> Map.merge(links)
Map.merge(doc, %{links: view_links})
end
defp merge_links(doc, data, view, conn, nil, false, _options) do
doc
|> Map.merge(%{links: %{}})
|> merge_base_links(data, view, conn)
end
defp merge_links(doc, data, view, conn, page, false, options) do
doc
|> Map.merge(%{links: view.pagination_links(data, conn, page, options)})
|> merge_base_links(data, view, conn)
end
defp merge_links(doc, _data, _view, _conn, _page, _remove_links, _options), do: doc
defp merge_related_links(
encoded_data,
{rel_view, rel_data, rel_url, conn},
false = _remove_links
) do
Map.merge(encoded_data, %{links: %{self: rel_url, related: rel_view.url_for(rel_data, conn)}})
end
defp merge_related_links(encoded_rel_data, _info, _remove_links), do: encoded_rel_data
@spec encode_rel_data(module(), map() | list()) :: map() | nil
def encode_rel_data(_view, nil), do: nil
def encode_rel_data(view, data) when is_list(data) do
Enum.map(data, &encode_rel_data(view, &1))
end
def encode_rel_data(view, data) do
%{
type: view.type(),
id: view.id(data)
}
end
# Flatten and unique all the included objects
@spec flatten_included(keyword()) :: keyword()
def flatten_included(included) do
included
|> List.flatten()
|> Enum.reject(&is_nil/1)
|> Enum.uniq()
end
defp get_includes(view, query_includes) do
includes = get_default_includes(view) ++ get_query_includes(view, query_includes)
Enum.uniq(includes)
end
defp get_default_includes(view) do
rels = view.relationships()
Enum.filter(rels, fn
{_k, {_v, :include}} -> true
_ -> false
end)
end
defp get_query_includes(view, query_includes) do
rels = view.relationships()
query_includes
|> Enum.map(fn
{include, _} -> Keyword.take(rels, [include])
include -> Keyword.take(rels, [include])
end)
|> List.flatten()
end
defp remove_links?, do: Application.get_env(:jsonapi, :remove_links, false)
defp transform_fields(fields) do
case JString.field_transformation() do
:camelize -> JString.expand_fields(fields, &JString.camelize/1)
:dasherize -> JString.expand_fields(fields, &JString.dasherize/1)
_ -> fields
end
end
end
|
lib/jsonapi/serializer.ex
| 0.817101
| 0.434941
|
serializer.ex
|
starcoder
|
defmodule Membrane.Core.Element.DemandController do
@moduledoc false
# Module handling demands incoming through output pads.
use Bunch
alias Membrane.Core.{CallbackHandler, Message}
alias Membrane.Core.Child.PadModel
alias Membrane.Core.Element.{ActionHandler, State, Toilet}
alias Membrane.Element.CallbackContext
alias Membrane.Pad
require CallbackContext.Demand
require Membrane.Core.Child.PadModel
require Membrane.Logger
@doc """
Handles demand coming on an output pad. Updates demand value and executes `handle_demand` callback.
"""
@spec handle_demand(Pad.ref_t(), non_neg_integer, State.t()) :: State.t()
def handle_demand(pad_ref, size, state) do
data = PadModel.get_data!(state, pad_ref)
%{direction: :output, mode: :pull} = data
do_handle_demand(pad_ref, size, data, state)
end
defp do_handle_demand(pad_ref, size, %{demand_mode: :auto} = data, state) do
%{demand: old_demand, associated_pads: associated_pads} = data
state = PadModel.set_data!(state, pad_ref, :demand, old_demand + size)
if old_demand <= 0 do
Enum.reduce(associated_pads, state, &send_auto_demand_if_needed/2)
else
state
end
end
defp do_handle_demand(pad_ref, size, %{demand_mode: :manual} = data, state) do
demand = data.demand + size
data = %{data | demand: demand}
state = PadModel.set_data!(state, pad_ref, data)
if exec_handle_demand?(data) do
require CallbackContext.Demand
context = &CallbackContext.Demand.from_state(&1, incoming_demand: size)
CallbackHandler.exec_and_handle_callback(
:handle_demand,
ActionHandler,
%{
split_continuation_arbiter: &exec_handle_demand?(PadModel.get_data!(&1, pad_ref)),
context: context
},
[pad_ref, demand, data.other_demand_unit],
state
)
else
state
end
end
@doc """
Sends auto demand to an input pad if it should be sent.
The demand should be sent when the current demand on the input pad is at most
half of the demand request size and if there's positive demand on each of
associated output pads.
Also, the `demand_decrease` argument can be passed, decreasing the size of the
demand on the input pad before proceeding to the rest of the function logic.
"""
@spec send_auto_demand_if_needed(Pad.ref_t(), integer, State.t()) :: State.t()
def send_auto_demand_if_needed(pad_ref, demand_decrease \\ 0, state) do
data = PadModel.get_data!(state, pad_ref)
%{
demand: demand,
toilet: toilet,
associated_pads: associated_pads,
auto_demand_size: demand_request_size
} = data
demand = demand - demand_decrease
demand =
if demand <= div(demand_request_size, 2) and auto_demands_positive?(associated_pads, state) do
if toilet do
Toilet.drain(toilet, demand_request_size - demand)
else
Membrane.Logger.debug_verbose(
"Sending auto demand of size #{demand_request_size - demand} on pad #{inspect(pad_ref)}"
)
%{pid: pid, other_ref: other_ref} = data
Message.send(pid, :demand, demand_request_size - demand, for_pad: other_ref)
end
demand_request_size
else
Membrane.Logger.debug_verbose(
"Not sending auto demand on pad #{inspect(pad_ref)}, pads data: #{inspect(state.pads_data)}"
)
demand
end
PadModel.set_data!(state, pad_ref, :demand, demand)
end
defp auto_demands_positive?(associated_pads, state) do
Enum.all?(associated_pads, &(PadModel.get_data!(state, &1, :demand) > 0))
end
defp exec_handle_demand?(%{end_of_stream?: true}) do
Membrane.Logger.debug_verbose("""
Demand controller: not executing handle_demand as :end_of_stream action has already been returned
""")
false
end
defp exec_handle_demand?(%{demand: demand}) when demand <= 0 do
Membrane.Logger.debug_verbose("""
Demand controller: not executing handle_demand as demand is not greater than 0,
demand: #{inspect(demand)}
""")
false
end
defp exec_handle_demand?(_pad_data) do
true
end
end
|
lib/membrane/core/element/demand_controller.ex
| 0.722918
| 0.541954
|
demand_controller.ex
|
starcoder
|
defmodule Sanbase.Alert.Trigger.SignalTriggerSettings do
@moduledoc ~s"""
An alert based on the ClickHouse signals.
The signal we're following is configured via the 'signal' parameter
"""
use Vex.Struct
import Sanbase.{Validation, Alert.Validation}
import Sanbase.DateTimeUtils, only: [round_datetime: 1, str_to_sec: 1]
alias __MODULE__
alias Sanbase.Model.Project
alias Sanbase.Alert.Type
alias Sanbase.Cache
alias Sanbase.Signal
@derive {Jason.Encoder, except: [:filtered_target, :triggered?, :payload, :template_kv]}
@trigger_type "signal_data"
@enforce_keys [:type, :channel, :target]
defstruct type: @trigger_type,
signal: nil,
channel: nil,
selector: nil,
target: nil,
operation: nil,
time_window: "1d",
# Private fields, not stored in DB.
filtered_target: %{list: []},
triggered?: false,
payload: %{},
template_kv: %{}
@type t :: %__MODULE__{
signal: Type.signal(),
type: Type.trigger_type(),
channel: Type.channel(),
target: Type.complex_target(),
selector: map(),
operation: Type.operation(),
time_window: Type.time_window(),
# Private fields, not stored in DB.
filtered_target: Type.filtered_target(),
triggered?: boolean(),
payload: Type.payload(),
template_kv: Type.template_kv()
}
validates(:signal, &valid_signal?/1)
validates(:operation, &valid_operation?/1)
validates(:time_window, &valid_time_window?/1)
@spec type() :: String.t()
def type(), do: @trigger_type
def post_create_process(_trigger), do: :nochange
def post_update_process(_trigger), do: :nochange
def get_data(%{} = settings) do
%{filtered_target: %{list: target_list, type: type}} = settings
target_list
|> Enum.map(fn identifier ->
{identifier, fetch_signal(%{type => identifier}, settings)}
end)
|> Enum.reject(fn
{_, {:error, _}} -> true
{_, nil} -> true
_ -> false
end)
end
defp fetch_signal(selector, settings) do
%{signal: signal, time_window: time_window} = settings
cache_key =
{__MODULE__, :fetch_signal_data, signal, selector, time_window, round_datetime(Timex.now())}
|> Sanbase.Cache.hash()
%{
first_start: first_start,
first_end: first_end,
second_start: second_start,
second_end: second_end
} = timerange_params(settings)
slug = selector.slug
Cache.get_or_store(cache_key, fn ->
with {:ok, %{^slug => value1}} <-
Signal.aggregated_timeseries_data(signal, selector, first_start, first_end, []),
{:ok, %{^slug => value2}} <-
Signal.aggregated_timeseries_data(signal, selector, second_start, second_end, []) do
[
%{datetime: first_start, value: value1},
%{datetime: second_start, value: value2}
]
else
_ -> {:error, "Cannot fetch #{signal} for #{inspect(selector)}"}
end
end)
end
defp timerange_params(%SignalTriggerSettings{} = settings) do
interval_seconds = str_to_sec(settings.time_window)
now = Timex.now()
%{
first_start: Timex.shift(now, seconds: -2 * interval_seconds),
first_end: Timex.shift(now, seconds: -interval_seconds),
second_start: Timex.shift(now, seconds: -interval_seconds),
second_end: now
}
end
defimpl Sanbase.Alert.Settings, for: SignalTriggerSettings do
import Sanbase.Alert.Utils
alias Sanbase.Alert.{OperationText, ResultBuilder}
def triggered?(%SignalTriggerSettings{triggered?: triggered}), do: triggered
def evaluate(%SignalTriggerSettings{} = settings, _trigger) do
case SignalTriggerSettings.get_data(settings) do
data when is_list(data) and data != [] ->
build_result(data, settings)
_ ->
%SignalTriggerSettings{settings | triggered?: false}
end
end
def build_result(data, %SignalTriggerSettings{} = settings) do
ResultBuilder.build(data, settings, &template_kv/2)
end
def cache_key(%SignalTriggerSettings{} = settings) do
construct_cache_key([
settings.type,
settings.target,
settings.selector,
settings.time_window,
settings.operation
])
end
defp template_kv(values, settings) do
%{identifier: slug} = values
project = Project.by_slug(slug)
{operation_template, operation_kv} =
OperationText.to_template_kv(values, settings.operation)
{:ok, human_readable_name} = Sanbase.Signal.human_readable_name(settings.signal)
{curr_value_template, curr_value_kv} = OperationText.current_value(values)
{details_template, details_kv} = OperationText.details(:signal, settings)
kv =
%{
type: settings.type,
operation: settings.operation,
signal: settings.signal,
project_name: project.name,
project_slug: project.slug,
project_ticker: project.ticker,
signal_human_readable_name: human_readable_name
}
|> Map.merge(operation_kv)
|> Map.merge(curr_value_kv)
|> Map.merge(details_kv)
template = """
🔔 \#{{project_ticker}} | **{{project_name}}**'s {{signal_human_readable_name}} #{operation_template}.
#{curr_value_template}.
#{details_template}
"""
{template, kv}
end
end
end
|
lib/sanbase/alerts/trigger/settings/signal_trigger_settings.ex
| 0.700792
| 0.412205
|
signal_trigger_settings.ex
|
starcoder
|
defmodule StepFlow.WorkflowView do
use StepFlow, :view
alias StepFlow.{ArtifactView, JobView, RightView, WorkflowView}
require Logger
def render("index.json", %{workflows: %{data: workflows, total: total}}) do
%{
data: render_many(workflows, WorkflowView, "workflow.json"),
total: total
}
end
def render("show.json", %{workflow: workflow}) do
%{data: render_one(workflow, WorkflowView, "workflow.json")}
end
def render("created.json", %{workflow: workflow}) do
%{data: render_one(workflow, WorkflowView, "workflow_created.json")}
end
def render("workflow.json", %{workflow: workflow}) do
result = %{
schema_version: workflow.schema_version,
id: workflow.id,
identifier: workflow.identifier,
version_major: workflow.version_major,
version_minor: workflow.version_minor,
version_micro: workflow.version_micro,
tags: workflow.tags,
reference: workflow.reference,
steps: workflow.steps,
parameters: workflow.parameters,
created_at: workflow.inserted_at
}
result =
if is_list(workflow.artifacts) do
artifacts = render_many(workflow.artifacts, ArtifactView, "artifact.json")
Map.put(result, :artifacts, artifacts)
else
result
end
result =
if is_list(workflow.jobs) do
jobs = render_many(workflow.jobs, JobView, "job.json")
Map.put(result, :jobs, jobs)
else
result
end
if is_list(workflow.rights) do
rights = render_many(workflow.rights, RightView, "right.json")
Map.put(result, :rights, rights)
else
result
end
end
def render("workflow_created.json", %{workflow: workflow}) do
%{
schema_version: workflow.schema_version,
id: workflow.id,
identifier: workflow.identifier,
version_major: workflow.version_major,
version_minor: workflow.version_minor,
version_micro: workflow.version_micro,
tags: workflow.tags,
reference: workflow.reference,
parameters: workflow.parameters,
created_at: workflow.inserted_at
}
end
def render("statistics.json", %{workflows_status: []}) do
%{
data: %{
processing: 0,
error: 0,
completed: 0,
bins: []
}
}
end
def render("statistics.json", %{
workflows_status: workflows_status,
time_interval: time_interval,
end_date: end_date
}) do
%{
data: %{
processing:
workflows_status
|> Enum.filter(fn s -> s.state == :processing end)
|> length(),
error:
workflows_status
|> Enum.filter(fn s -> s.state == :error end)
|> length(),
completed:
workflows_status
|> Enum.filter(fn s -> s.state == :completed end)
|> length(),
bins:
workflows_status
|> Enum.group_by(fn s ->
NaiveDateTime.diff(end_date, s.inserted_at, :second)
|> Kernel.div(time_interval)
end)
|> Enum.map(fn {bin, group} ->
%{
bin: bin,
start_date:
NaiveDateTime.add(end_date, -(bin + 1) * time_interval, :second)
|> NaiveDateTime.to_string(),
end_date:
NaiveDateTime.add(end_date, -bin * time_interval, :second)
|> NaiveDateTime.to_string(),
processing:
group
|> Enum.filter(fn s -> s.state == :processing end)
|> length(),
error:
group
|> Enum.filter(fn s -> s.state == :error end)
|> length(),
completed:
group
|> Enum.filter(fn s -> s.state == :completed end)
|> length()
}
end)
}
}
end
end
|
lib/step_flow/view/workflow_view.ex
| 0.598782
| 0.48987
|
workflow_view.ex
|
starcoder
|
defmodule JourneyList do
@moduledoc """
Responsible for grouping together schedules and predictions based on an origin and destination, in
a form to be used in the schedule views.
"""
alias Predictions.Prediction
alias Schedules.{Schedule, Trip}
alias PredictedSchedule.Group
defstruct journeys: [],
expansion: :none
@type t :: %__MODULE__{
journeys: [Journey.t()],
expansion: :expanded | :collapsed | :none
}
@type stop_id :: Stops.Stop.id_t()
@type schedule_pair :: Group.schedule_pair_t()
@type schedule_or_pair :: Schedule.t() | schedule_pair
@type map_key_t :: Group.map_key_t()
@type schedule_map :: %{map_key_t => %{stop_id => Schedule.t()}}
@type schedule_pair_map :: %{map_key_t => schedule_pair}
@type filter_flag_t :: Journey.Filter.filter_flag_t()
@type opt_string :: String.t() | nil
@build_opts [origin_id: nil, destination_id: nil, current_time: nil]
@doc "Returns true if any of the journeys have a prediction"
@spec has_predictions?(t) :: boolean
def has_predictions?(journeys) do
journeys
|> Enum.any?(&Journey.has_prediction?/1)
end
@doc """
Builds a JourneyList from given schedules and predictions.
schedules: Schedules to be combined with predictions for Journeys
predictions: Predictions to combined with schedules for Journeys
origin_id (optional): Trip origin
destination_id (optional): Trip Destination
filter_flag: Flag to determine how the trip list will be filtered and sorted
current_time (optional): Current time, used to determine the first trip to in filtered/sorted list. If nil, all trips will be returned
keep_all?: Determines if all journeys should be returned, regardless of filter flag
"""
@spec build([schedule_or_pair], [Prediction.t()], filter_flag_t, boolean, Keyword.t()) :: t
def build(schedules, predictions, filter_flag, keep_all?, user_opts) do
opts = Keyword.merge(@build_opts, user_opts)
case schedules do
{:error, _json_api_error} ->
[]
_ ->
schedules
|> build_journeys(predictions, opts[:origin_id], opts[:destination_id])
|> from_journeys(filter_flag, opts[:current_time], keep_all?)
end
end
@doc """
Build a JourneyList using only predictions. This will also filter out predictions that are
missing departure_predictions. Limits to 5 predictions at most.
"""
@spec build_predictions_only([Schedule.t()], [Prediction.t()], opt_string, opt_string) :: t
def build_predictions_only(schedules, predictions, origin_id, destination_id) do
journey_list =
schedules
|> build_journeys(predictions, origin_id, destination_id)
|> Enum.filter(&Journey.has_departure_prediction?/1)
|> from_journeys(:predictions_then_schedules, nil, true)
%{journey_list | journeys: Enum.take(journey_list.journeys, 5)}
end
@spec build_journeys([schedule_or_pair], [Prediction.t()], opt_string, opt_string) :: [
Journey.t()
]
defp build_journeys(schedule_pairs, predictions, origin_id, destination_id)
when is_binary(origin_id) and is_binary(destination_id) do
predictions = match_schedule_direction(schedule_pairs, predictions)
journeys =
group_trips(
schedule_pairs,
predictions,
origin_id,
destination_id,
build_schedule_map_fn: &build_schedule_pair_map/2,
trip_mapper_fn: &build_journey(&1, &2, &3, origin_id, destination_id)
)
Enum.reject(journeys, &reversed_journey?/1)
end
defp build_journeys(schedules, predictions, origin_id, nil) when is_binary(origin_id) do
group_trips(
schedules,
predictions,
origin_id,
nil,
build_schedule_map_fn: &build_schedule_map/2,
trip_mapper_fn: &predicted_departures(&1, &2, &3, origin_id)
)
end
defp build_journeys(_schedules, _predictions, _origin_id, _destination_id), do: []
# Creates a JourneyList object from a list of journeys and the expansion value
# Both the expanded and collapsed journeys are calculated in order to determine the `expansion` field
@spec from_journeys([Journey.t()], Journey.Filter.filter_flag_t(), DateTime.t() | nil, boolean) ::
t
defp from_journeys(expanded_journeys, filter_flag, current_time, keep_all?) do
collapsed_journeys =
expanded_journeys
|> Journey.Filter.filter(filter_flag, current_time)
|> Journey.Filter.sort()
|> Journey.Filter.limit(!keep_all?)
%__MODULE__{
journeys:
if(keep_all?, do: Journey.Filter.sort(expanded_journeys), else: collapsed_journeys),
expansion: Journey.Filter.expansion(expanded_journeys, collapsed_journeys, keep_all?)
}
end
defp group_trips(schedules, predictions, origin_id, destination_id, mappers) do
prediction_map = Group.build_prediction_map(predictions, schedules, origin_id, destination_id)
schedule_map = Enum.reduce(schedules, %{}, mappers[:build_schedule_map_fn])
trip_mapper_fn = mappers[:trip_mapper_fn]
schedule_map
|> get_trips(prediction_map)
|> Enum.map(&trip_mapper_fn.(&1, schedule_map, prediction_map))
end
@spec build_journey(map_key_t, schedule_pair_map, Group.prediction_map_t(), stop_id, stop_id) ::
Journey.t()
defp build_journey(key, schedule_map, prediction_map, origin_id, dest) do
departure_prediction = prediction_map[key][origin_id]
arrival_prediction = prediction_map[key][dest]
case Map.get(schedule_map, key) do
{departure, arrival} ->
trip = first_trip([departure_prediction, departure, arrival_prediction, arrival])
%Journey{
departure: %PredictedSchedule{schedule: departure, prediction: departure_prediction},
arrival: %PredictedSchedule{schedule: arrival, prediction: arrival_prediction},
trip: trip
}
nil ->
trip = first_trip([departure_prediction, arrival_prediction])
%Journey{
departure: %PredictedSchedule{schedule: nil, prediction: departure_prediction},
arrival: %PredictedSchedule{schedule: nil, prediction: arrival_prediction},
trip: trip
}
end
end
@spec predicted_departures(map_key_t, schedule_map, Group.prediction_map_t(), stop_id) ::
Journey.t()
defp predicted_departures(key, schedule_map, prediction_map, origin_id) do
departure_schedule = schedule_map[key][origin_id]
departure_prediction = prediction_map[key][origin_id]
%Journey{
departure: %PredictedSchedule{
schedule: departure_schedule,
prediction: departure_prediction
},
arrival: nil,
trip: first_trip([departure_prediction, departure_schedule])
}
end
@spec get_trips(schedule_pair_map, Group.prediction_map_t()) :: [map_key_t]
defp get_trips(schedule_map, prediction_map) do
[prediction_map, schedule_map]
|> Enum.map(&Map.keys/1)
|> Enum.concat()
|> Enum.uniq()
end
@spec build_schedule_pair_map({Schedule.t(), Schedule.t()}, schedule_pair_map) ::
schedule_pair_map
defp build_schedule_pair_map({departure, arrival}, schedule_pair_map) do
key = departure.trip
Map.put(schedule_pair_map, key, {departure, arrival})
end
@spec build_schedule_map(Schedule.t(), schedule_map) :: schedule_map
defp build_schedule_map(schedule, schedule_map) do
key = schedule.trip
updater = fn trip_map -> Map.merge(trip_map, %{schedule.stop.id => schedule}) end
Map.update(schedule_map, key, %{schedule.stop.id => schedule}, updater)
end
@spec first_trip([Schedule.t() | Prediction.t() | nil]) :: Trip.t() | nil
defp first_trip(list_with_trips) do
list_with_valid_trips =
list_with_trips
|> Enum.reject(&is_nil/1)
if Enum.empty?(list_with_valid_trips) do
nil
else
list_with_valid_trips
|> List.first()
|> Map.get(:trip)
end
end
@spec reversed_journey?(Journey.t()) :: boolean
defp reversed_journey?(journey) do
case {Journey.departure_time(journey), Journey.arrival_time(journey)} do
{nil, _} ->
# no departure time, ignore the journey
true
{_, nil} ->
false
{departure_time, arrival_time} ->
Timex.after?(departure_time, arrival_time)
end
end
# reject predictions which are going in the wrong direction from the schedule
@spec match_schedule_direction([{Schedule.t(), Schedule.t()}], [Prediction.t()]) :: [
Prediction.t()
]
defp match_schedule_direction(schedule_pairs, predictions)
defp match_schedule_direction([], predictions) do
predictions
end
defp match_schedule_direction([{departure_schedule, _} | _], predictions) do
direction_id = departure_schedule.trip.direction_id
Enum.filter(predictions, &match?(%{direction_id: ^direction_id}, &1))
end
end
defimpl Enumerable, for: JourneyList do
def count(_journey_list) do
{:error, __MODULE__}
end
def member?(_journey_list, %JourneyList{}) do
{:error, __MODULE__}
end
def member?(_journey_list, _other) do
{:ok, false}
end
def reduce(%{journeys: journeys}, acc, fun) do
Enumerable.reduce(journeys, acc, fun)
end
def slice(_journey_list) do
{:error, __MODULE__}
end
end
|
apps/site/lib/journey_list.ex
| 0.849862
| 0.600071
|
journey_list.ex
|
starcoder
|
defmodule DumboOctopus.Simulation do
@moduledoc """
Implements the rules of the puzzle.
"""
@doc """
Parse puzzle input into a map of locations to energy level
"""
def parse_input(input) do
for {row, i} <- String.split(input, "\n") |> Enum.with_index(),
{number, j} <- String.trim(row) |> String.to_charlist() |> Enum.with_index(),
into: %{} do
{{i, j}, number - ?0}
end
end
@doc """
Advances one step in the simulation.
"""
def step(sim) do
Enum.reduce(Map.keys(sim), sim, &propagate/2)
|> Enum.map(fn
{key, 10} -> {key, 0}
other -> other
end)
|> Enum.into(%{})
end
def step(sim, steps) when steps <= 0 do
sim
end
def step(sim, steps) do
Stream.iterate(sim, &step/1) |> Enum.at(steps)
end
@doc """
Advances in the simulation keeping also a count of the number of
flashes. Returns {simulation_result, number_of_flashes}
"""
def step_with_count({sim, count}) do
new_sim = step(sim)
{new_sim, count + count_flashes(new_sim)}
end
def step_with_count(sim) do
step_with_count({sim, 0})
end
def step_with_count(sim, steps) when steps <= 0 do
{sim, 0}
end
def step_with_count(sim, steps) do
Stream.iterate(sim, &step_with_count/1) |> Enum.at(steps)
end
defp count_flashes(sim) do
Enum.count(sim, fn {_, val} -> val == 0 end)
end
@doc """
Find first iteration where all octopi flash simultaneously
"""
def find_sync(sim) do
Stream.iterate(sim, &step/1) |> Enum.find_index(&all_flash/1)
end
def all_flash(sim) do
Enum.all?(sim, fn {_, val} -> val == 0 end)
end
@doc """
Trigger a propagation on location `loc` for simulation state `sim`.
The order of the arguments makes it more convenient to use as a function
to apply as a reducing functions over a list of locations.
"""
def propagate(loc, sim) do
case sim[loc] do
# Those at max energy flash and trigger a propagation
# to all neighbours
9 ->
neighbours(sim, loc)
|> Enum.filter(fn loc -> sim[loc] != 100 end)
|> Enum.reduce(
Map.put(sim, loc, 10),
&propagate/2
)
# Those who have already flashed on this turn are skipped
10 ->
sim
# For the rest, they simply increase their energy level by one
val ->
Map.put(sim, loc, val + 1)
end
end
def neighbours(sim, {y, x} = loc) do
for y <- (y - 1)..(y + 1),
x <- (x - 1)..(x + 1),
Map.has_key?(sim, {y, x}),
{y, x} != loc do
{y, x}
end
end
@doc """
Returns the energy levels of the octopi as a list of lists (rows).
"""
def octopi_values(sim) do
{h, w} = dimensions(sim)
for row <- 0..(h - 1) do
for column <- 0..(w - 1) do
sim[{row, column}]
end
end
end
defp dimensions(sim) when sim == %{} do
{0, 0}
end
defp dimensions(sim) do
Map.keys(sim)
|> Enum.unzip()
|> then(fn {ys, xs} -> {Enum.max(ys) + 1, Enum.max(xs) + 1} end)
end
end
|
day11/dumbo_octopus/lib/dumbo_octopus/simulation.ex
| 0.752559
| 0.707556
|
simulation.ex
|
starcoder
|
defmodule Tarearbol do
@moduledoc """
`Tarearbol` module provides an interface to run tasks in easy way.
## Examples
iex> result = Tarearbol.ensure(fn -> raise "¡?" end, attempts: 1, raise: false)
iex> {:error, %{job: _job, outcome: outcome}} = result
iex> {error, _stacktrace} = outcome
iex> error
%RuntimeError{message: "¡?"}
"""
use Boundary,
deps: [Application],
exports: [
Errand,
DynamicManager,
Job,
Jobs,
InternalWorker
]
@doc """
Ensures the task to be completed; restarts it when necessary.
Possible options:
- `attempts` [_default:_ `:infinity`] Might be any of `@Tarearbol.Utils.interval`
type (`5` for five attempts, `:random` for the random amount etc)
- `delay` [_default:_ `1 msec`]. Might be any of `@Tarearbol.Utils.interval`
type (`1_000` or `1.0` for one second, `:timeout` for five seconds etc)
- `on_success` [_default:_ `nil`], the function to be called on successful
execution (`arity ∈ [0, 1]` or tuple `{Mod, fun}` where `fun` is of arity
zero or one.) When the arity of given function is `1`, the result of
task execution is passed
- `on_retry` [_default:_ `nil`], same as above, called on retries after
insuccessful attempts **or** one of `[:debug, :info, :warn, :error]` atoms
to log a retry with default logger
- `on_fail` [_default:_ `nil`], same as above, called when the task finally
failed after `attempts` amount of insuccessful attempts
"""
@spec ensure((() -> any()) | {atom(), atom(), list()}, keyword()) ::
{:error, any} | {:ok, any}
def ensure(job, opts \\ []), do: Tarearbol.Job.ensure(job, opts)
@doc """
Same as `Tarearbol.ensure/2`, but it raises on fail and returns the result
itself on successful execution.
"""
@spec ensure!((() -> any()) | {atom(), atom(), list()}, keyword()) ::
{:error, any} | {:ok, any}
def ensure!(job, opts \\ []), do: Tarearbol.Job.ensure!(job, opts)
@doc "Spawns an ensured job asynchronously, passing all options given."
@spec spawn_ensured((() -> any()) | {atom(), atom(), list()}, keyword()) :: Task.t()
def spawn_ensured(job, opts),
do: Tarearbol.Errand.run_in(job, :none, Keyword.merge(opts, sidekiq: true, on_retry: :warn))
@doc "Wrapper for [`Task.Supervisor.async_stream/4`](https://hexdocs.pm/elixir/Task.Supervisor.html#async_stream/4)."
@spec ensure_all_streamed([(() -> any()) | {atom(), atom(), list()}], keyword()) :: %Stream{
:done => nil,
:funs => nonempty_maybe_improper_list()
}
def ensure_all_streamed(jobs, opts \\ []),
do: Tarearbol.Jobs.ensure_all_streamed(jobs, opts)
@doc "Executes `Tarearbol.ensure_all_streamed/2` and collects tasks results."
@spec ensure_all([(() -> any()) | {atom(), atom(), list()}], keyword()) :: [
{:error, any} | {:ok, any}
]
def ensure_all(jobs, opts \\ []), do: Tarearbol.Jobs.ensure_all(jobs, opts)
@doc """
Runs a task specified by the first argument in a given interval.
See [`Tarearbol.ensure/2`] for all possible variants of the `interval` argument.
"""
@spec run_in(
(() -> any()) | {atom(), atom(), list()},
atom() | integer() | float(),
keyword()
) :: Task.t()
def run_in(job, interval, opts \\ []), do: Tarearbol.Errand.run_in(job, interval, opts)
@doc """
Runs a task specified by the first argument at a given time.
If the second parameter is a [`DateTime`] struct, the task will be run once.
If the second parameter is a [`Time`] struct, the task will be run at that time
on daily basis.
"""
@spec run_at(
(() -> any()) | {atom(), atom(), list()},
DateTime.t() | String.t(),
keyword()
) :: Task.t()
def run_at(job, at, opts \\ []), do: Tarearbol.Errand.run_at(job, at, opts)
@doc "Spawns the task for the immediate async execution."
@spec spawn((() -> any()) | {atom(), atom(), list()}, keyword()) :: Task.t()
def spawn(job, opts \\ []), do: Tarearbol.Errand.spawn(job, opts)
@doc "Executes all the scheduled tasks immediately, cleaning up the queue."
@spec drain() :: [{:error, any} | {:ok, any}]
def drain(jobs \\ Tarearbol.Application.jobs())
def drain([]), do: []
def drain(jobs) do
Tarearbol.Application.kill()
Enum.map(jobs, &Tarearbol.ensure/1)
end
end
|
lib/tarearbol.ex
| 0.901652
| 0.58599
|
tarearbol.ex
|
starcoder
|
defmodule Absinthe.Middleware.Batch do
@moduledoc """
Batch the resolution of multiple fields.
## Motivation
Consider the following graphql query:
```
{
posts {
author {
name
}
}
}
```
`posts` returns a list of `post` objects, which has an associated `author` field.
If the `author` field makes a call to the database we have the classic N + 1 problem.
What we want is a way to load all authors for all posts in one database request.
This plugin provides this, without any eager loading at the parent level. That is,
the code for the `posts` field does not need to do anything to facilitate the
efficient loading of its children.
## Example Usage
The API for this plugin is a little on the verbose side because it is not specific
to any particular batching mechanism. That is, this API is just as useful for an Ecto
based DB as it is for talking to S3 or the File System. Thus we anticipate people
(including ourselves) will be creating additional functions more tailored to each
of those specific use cases.
Here is an example using the `Absinthe.Resolution.Helpers.batch/3` helper.
```elixir
object :post do
field :name, :string
field :author, :user do
resolve fn post, _, _ ->
batch({__MODULE__, :users_by_id}, post.author_id, fn batch_results ->
{:ok, Map.get(batch_results, post.author_id)}
end)
end
end
end
def users_by_id(_, user_ids) do
users = Repo.all from u in User, where: u.id in ^user_ids
Map.new(users, fn user -> {user.id, user} end)
end
```
Let's look at this piece by piece:
- `{__MODULE__, :users_by_id}`: is the batching function which will be used. It must
be a 2 arity function. For details see the `batch_fun` typedoc.
- `post.author_id`: This is the information to be aggregated. The aggregated values
are the second argument to the batching function.
- `fn batch_results`: This function takes the results from the batching function.
it should return one of the resolution function values.
Clearly some of this could be derived for ecto functions. Check out the Absinthe.Ecto
library for something that provides this:
```elixir
field :author, :user, resolve: assoc(:author)
```
Such a function could be easily built upon the API of this module.
"""
@behaviour Absinthe.Middleware
@behaviour Absinthe.Plugin
@typedoc """
The function to be called with the aggregate batch information.
It comes in both a 2 tuple and 3 tuple form. The first two elements are the module
and function name. The third element is an arbitrary parameter that is passed
as the first argument to the batch function.
For example, one could parameterize the `users_by_id` function from the moduledoc
to make it more generic. Instead of doing `{__MODULE__, :users_by_id}` you could do
`{__MODULE__, :by_id, User}`. Then the function would be:
```elixir
def by_id(model, ids) do
model
|> where([m], m.id in ^ids)
|> Repo.all()
|> Map.new(&{&1.id, &1})
end
```
It could also be used to set options unique to the execution of a particular
batching function.
"""
@type batch_fun :: {module, atom} | {module, atom, term}
@type post_batch_fun :: (term -> Absinthe.Type.Field.result())
def before_resolution(exec) do
case exec.acc do
%{__MODULE__ => _} ->
put_in(exec.acc[__MODULE__][:input], [])
_ ->
put_in(exec.acc[__MODULE__], %{input: [], output: %{}})
end
end
def call(%{state: :unresolved} = res, {batch_key, field_data, post_batch_fun, batch_opts}) do
acc = res.acc
acc =
update_in(acc[__MODULE__][:input], fn
nil -> [{{batch_key, batch_opts}, field_data}]
data -> [{{batch_key, batch_opts}, field_data} | data]
end)
%{
res
| state: :suspended,
middleware: [{__MODULE__, {batch_key, post_batch_fun}} | res.middleware],
acc: acc
}
end
def call(%{state: :suspended} = res, {batch_key, post_batch_fun}) do
batch_data_for_fun =
res.acc
|> Map.fetch!(__MODULE__)
|> Map.fetch!(:output)
|> Map.fetch!(batch_key)
res
|> Absinthe.Resolution.put_result(post_batch_fun.(batch_data_for_fun))
end
def after_resolution(exec) do
output = do_batching(exec.acc[__MODULE__][:input])
put_in(exec.acc[__MODULE__][:output], output)
end
defp do_batching(input) do
input
|> Enum.group_by(&elem(&1, 0), &elem(&1, 1))
|> Enum.map(fn {{batch_fun, batch_opts}, batch_data} ->
telemetry_data = generate_telemetry_data(batch_fun, batch_opts, batch_data)
emit_start_event(telemetry_data)
{
batch_opts,
Task.async(fn ->
{batch_fun, call_batch_fun(batch_fun, batch_data)}
end),
telemetry_data
}
end)
|> Map.new(fn {batch_opts, task, telemetry_data} ->
timeout = Keyword.get(batch_opts, :timeout, 5_000)
result = Task.await(task, timeout)
emit_stop_event(telemetry_data, result)
result
end)
end
defp generate_telemetry_data(batch_fun, batch_opts, batch_data) do
%{
id: :erlang.unique_integer(),
system_time: System.system_time(),
start_time_mono: System.monotonic_time(),
batch_fun: batch_fun,
batch_opts: batch_opts,
batch_data: batch_data
}
end
defp emit_start_event(telemetry_data) do
:telemetry.execute(
[:absinthe, :middleware, :batch, :start],
Map.take(telemetry_data, [:system_time]),
Map.take(telemetry_data, [:id, :batch_fun, :batch_opts, :batch_data])
)
end
defp emit_stop_event(telemetry_data, result) do
metadata = %{
id: telemetry_data.id,
batch_fun: telemetry_data.batch_fun,
batch_opts: telemetry_data.batch_opts,
batch_data: telemetry_data.batch_data,
result: result
}
:telemetry.execute(
[:absinthe, :middleware, :batch, :stop],
%{duration: System.monotonic_time() - telemetry_data.start_time_mono},
metadata
)
end
defp call_batch_fun({module, fun}, batch_data) do
call_batch_fun({module, fun, []}, batch_data)
end
defp call_batch_fun({module, fun, config}, batch_data) do
apply(module, fun, [config, batch_data])
end
# If the flag is set we need to do another resolution phase.
# otherwise, we do not
def pipeline(pipeline, exec) do
case exec.acc[__MODULE__][:input] do
[_ | _] ->
[Absinthe.Phase.Document.Execution.Resolution | pipeline]
_ ->
pipeline
end
end
end
|
lib/absinthe/middleware/batch.ex
| 0.918795
| 0.896704
|
batch.ex
|
starcoder
|
defmodule AutoApi.GetAvailabilityCommand do
@moduledoc """
Abstraction for a `get_availability` command in AutoApi (id `0x02`).
The `struct` contains two fields:
* `capability` specifies the capability of the command as a Capability module
* `properties` specifies which properties for which the availability is requested. An empty list indicates all properties.
"""
@behaviour AutoApi.Command
@version AutoApi.version()
@identifier 0x02
@name :get_availability
@type properties :: list(AutoApi.Capability.property())
@type t :: %__MODULE__{
capability: AutoApi.Capability.t(),
properties: properties(),
version: AutoApi.version()
}
@enforce_keys [:capability, :properties]
defstruct [:capability, :properties, version: @version]
@doc """
Returns the identifier of the command.
# Example
iex> #{__MODULE__}.identifier()
0x02
"""
@impl true
@spec identifier() :: byte()
def identifier(), do: @identifier
@doc """
Returns the name of the command.
# Example
iex> #{__MODULE__}.name()
:get_availability
"""
@impl true
@spec name() :: AutoApi.Command.name()
def name(), do: @name
@doc """
Creates a new GetAvailabilityCommand structure with the given `capability` and `properties`.
# Example
iex> capability = AutoApi.SeatsCapability
iex> properties = [:persons_detected]
iex> #{__MODULE__}.new(capability, properties)
%#{__MODULE__}{capability: AutoApi.SeatsCapability, properties: [:persons_detected], version: 12}
"""
@spec new(AutoApi.Capability.t(), properties()) :: t()
def new(capability, properties) do
%__MODULE__{capability: capability, properties: properties}
end
@doc """
Returns the properties set in the command.
If the command specifies all properties (that is, it is an empty list) it will return a list
of the state properties as by the specifications of the capability.
## Examples
iex> command = #{__MODULE__}.new(AutoApi.RaceCapability, [:vehicle_moving, :gear_mode])
iex> #{__MODULE__}.properties(command)
[:vehicle_moving, :gear_mode]
iex> command = #{__MODULE__}.new(AutoApi.HoodCapability, [])
iex> #{__MODULE__}.properties(command)
[:position, :nonce, :vehicle_signature, :timestamp, :vin, :brand]
"""
@impl true
@spec properties(t()) :: list(AutoApi.Capability.property())
def properties(%__MODULE__{capability: capability, properties: properties}) do
case properties do
[] -> capability.state_properties()
properties -> properties
end
end
@doc """
Transforms a GetAvailabilityCommand struct into a binary format.
If the command is somehow invalid, it returns an error.
# Examples
iex> # Request the door locks state availability
iex> command = %#{__MODULE__}{capability: AutoApi.DoorsCapability, properties: [:locks_state]}
iex> #{__MODULE__}.to_bin(command)
<<12, 0, 32, 2, 6>>
iex> # Request all properties availability for race state
iex> command = %#{__MODULE__}{capability: AutoApi.RaceCapability, properties: []}
iex> #{__MODULE__}.to_bin(command)
<<12, 0, 87, 2>>
"""
@impl true
@spec to_bin(t()) :: binary()
def to_bin(%__MODULE__{capability: capability, properties: properties}) do
preamble = <<@version, capability.identifier()::binary, @identifier>>
Enum.reduce(properties, preamble, &(&2 <> <<capability.property_id(&1)::8>>))
end
@doc """
Parses a command binary and returns a GetAvailabilityCommand struct
## Examples
iex> #{__MODULE__}.from_bin(<<0x0C, 0x00, 0x33, 0x02, 0x01, 0x04>>)
%#{__MODULE__}{capability: AutoApi.DiagnosticsCapability, properties: [:mileage, :engine_rpm], version: 12}
"""
@impl true
@spec from_bin(binary) :: t()
def from_bin(<<@version, capability_id::binary-size(2), @identifier, properties::binary>>) do
capability = AutoApi.Capability.get_by_id(capability_id)
property_names =
properties
|> :binary.bin_to_list()
|> Enum.map(&capability.property_name/1)
new(capability, property_names)
end
end
|
lib/auto_api/commands/get_availability_command.ex
| 0.911702
| 0.486332
|
get_availability_command.ex
|
starcoder
|
defmodule Mix.Releases.App do
@moduledoc """
Represents important metadata about a given application.
"""
defstruct name: nil,
vsn: nil,
applications: [],
included_applications: [],
unhandled_deps: [],
start_type: nil,
path: nil
@type start_type :: :permanent | :temporary | :transient | :load | :none
@type t :: %__MODULE__{
name: atom(),
vsn: String.t,
applications: [atom()],
included_applications: [atom()],
unhandled_deps: [atom()],
start_type: start_type,
path: nil | String.t
}
@doc """
Create a new Application struct from an application name
"""
@spec new(atom) :: nil | __MODULE__.t | {:error, String.t}
def new(name), do: new(name, nil)
@doc """
Same as new/1, but specify the application's start type
"""
@spec new(atom, start_type | nil) :: nil | __MODULE__.t | {:error, String.t}
def new(name, start_type)
when is_atom(name) and start_type in [nil, :permanent, :temporary, :transient, :load, :none] do
_ = Application.load(name)
case Application.spec(name) do
nil -> nil
spec ->
vsn = '#{Keyword.get(spec, :vsn)}'
deps = get_dependencies(name)
apps = Keyword.get(spec, :applications, [])
included = Keyword.get(spec, :included_applications, [])
path = Application.app_dir(name)
missing = MapSet.new(deps)
|> MapSet.difference(MapSet.union(MapSet.new(apps), MapSet.new(included)))
|> MapSet.to_list
%__MODULE__{name: name, vsn: vsn,
start_type: start_type,
applications: apps,
included_applications: included,
unhandled_deps: missing,
path: path}
end
end
def new(name, start_type), do: {:error, "Invalid start type for #{name}: #{start_type}"}
# Gets a list of all applications which are children
# of this application.
defp get_dependencies(name) do
try do
Mix.Dep.loaded_by_name([name], [])
|> Enum.flat_map(fn %Mix.Dep{deps: deps} -> deps end)
|> Enum.filter_map(&include_dep?/1, &map_dep/1)
rescue
Mix.Error -> # This is a top-level app
cond do
Mix.Project.umbrella? ->
# find the app in the umbrella
app_path = Path.join(Mix.Project.config[:apps_path], "#{name}")
cond do
File.exists?(app_path) ->
Mix.Project.in_project(name, app_path, fn mixfile ->
mixfile.project[:deps]
|> Enum.filter_map(&include_dep?/1, &map_dep/1)
end)
:else ->
[]
end
:else ->
Mix.Project.config[:deps]
|> Enum.filter_map(&include_dep?/1, &map_dep/1)
end
end
end
defp include_dep?({_, _}), do: true
defp include_dep?({_, _, opts}), do: include_dep?(opts)
defp include_dep?(%Mix.Dep{opts: opts}), do: include_dep?(opts)
defp include_dep?(opts) when is_list(opts) do
case Keyword.get(opts, :only) do
nil -> true
envs when is_list(envs) -> Enum.member?(envs, :prod)
env when is_atom(env) -> env == :prod
end
end
defp map_dep({a, _}), do: a
defp map_dep({a, _, _opts}), do: a
defp map_dep(%Mix.Dep{app: a}), do: a
end
|
lib/mix/lib/releases/models/app.ex
| 0.708414
| 0.416322
|
app.ex
|
starcoder
|
defmodule Cldr.Calendar.Formatter.Options do
@moduledoc """
Defines and validates the options
for a calendar formatter.
These options are passed to the formatter
callbacks defined in `Cldr.Calendar.Formatter`.
The valid options are:
* `:calendar` is an calendar module defined with
`use Cldr.Calendar`.
* `:backend` is any module that applied
`use Cldr`. The default is `Cldr.default_backend()`.
* `:formatter` is any module implementing the
`Cldr.Calendar.Formatter` behaviour.
* `:locale` is any locale returned by `Cldr.validate_locale/1`.
The default is `Cldr.get_locale()`.
* `:number_system` is any valid number system name
or number system type for the given locale.
Available number systems for a locale are returned by
`Cldr.Number.System.number_systems_for(locale, backend)`.
The default is `:default`.
* `:territory` is any territory returned by `Cldr.validate_territory/1`
The default is `Cldr.Locale.territory_from_locale/1`.
* `:caption` is a caption to be applied in any way defined
by the `:formatter`. The default is `nil`.
* `:class` is a class name that can be used any way
defined by the `:formatter`. It is most commonly
used to apply an HTML class to an enclosing tag. The
default is `Cldr.Calendar.Format.default_calendar_css_class/0`
* `:id` is an id that can be used any way
defined by the `:formatter`. It is most commonly
used to apply an HTML id to an enclosing tag. The
default is `nil`
* `:private` is for your private use in your formatter.
For example if you wanted to pass a selected day and
format it differently, you could provide
`options.private = %{selected: ~D[2020-04-05]}` and
take advantage of it while formatting the days.
* `:today` is any `Date.t` that represents today.
It is commonly used to allow a formatting to
appropriately format a date that is today
differently to other days on a calendar. The
default is `Date.utc_today/0`
* `:day_names` is a list of 2-tuples that
map the day of the week to a localised day
name that are most often used as headers
for a month. The default is automatically
calculated from the provided `:calendar`
and `:locale`.
"""
@valid_options [
:backend,
:locale,
:calendar,
:caption,
:class,
:day_names,
:formatter,
:id,
:number_system,
:private,
:territory,
:today
]
defstruct @valid_options
@typedoc """
Formatter options
"""
@type t :: %__MODULE__{
calendar: module(),
number_system: Cldr.Number.System.system_name(),
territory: atom() | String.t(),
locale: Cldr.LanguageTag.t(),
formatter: module(),
backend: module(),
caption: String.t | nil,
class: String.t | nil,
id: String.t | nil,
today: Date.t(),
private: any(),
day_names: [{1..7, String.t}]
}
alias Cldr.Number
@doc false
def validate_options(options) when is_list(options) do
options =
Enum.reduce_while(@valid_options, options, fn option, options ->
case validate_option(option, options, Keyword.get(options, option)) do
{:ok, value} -> {:cont, Keyword.put(options, option, value)}
other -> {:halt, other}
end
end)
case options do
{:error, _} = error -> error
valid_options -> {:ok, struct(__MODULE__, valid_options)}
end
end
def validate_option(:calendar, _options, nil) do
{:ok, Cldr.Calendar.default_calendar()}
end
def validate_option(:calendar, _options, Calendar.ISO) do
{:ok, Cldr.Calendar.default_calendar()}
end
def validate_option(:calendar, _options, calendar) do
with {:ok, calendar} <- Cldr.Calendar.validate_calendar(calendar) do
{:ok, calendar}
end
end
def validate_option(:number_system, options, nil) do
locale = Keyword.get(options, :locale)
backend = Keyword.get(options, :backend)
{:ok, Number.System.number_system_from_locale(locale, backend)}
end
def validate_option(:number_system, options, number_system) do
locale = Keyword.get(options, :locale)
backend = Keyword.get(options, :backend)
with {:ok, number_system} <- Number.validate_number_system(locale, number_system, backend) do
{:ok, number_system}
end
end
def validate_option(:territory, options, nil) do
locale = Keyword.get(options, :locale)
{:ok, Cldr.Locale.territory_from_locale(locale)}
end
def validate_option(:backend, _options, nil) do
{:ok, Cldr.default_backend()}
end
def validate_option(:backend, _options, backend) do
with {:ok, backend} <- Cldr.validate_backend(backend) do
{:ok, backend}
end
end
def validate_option(:formatter, _options, nil) do
{:ok, Cldr.Calendar.Format.default_formatter_module()}
end
def validate_option(:formatter, _options, formatter) do
if Cldr.Calendar.Format.formatter_module?(formatter) do
{:ok, formatter}
else
{:error, Cldr.Calendar.Format.invalid_formatter_error(formatter)}
end
end
def validate_option(:locale, options, nil) do
backend = Keyword.get(options, :backend)
{:ok, backend.get_locale()}
end
def validate_option(:locale, options, locale) do
backend = Keyword.get(options, :backend)
with {:ok, locale} <- Cldr.validate_locale(locale, backend) do
{:ok, locale}
end
end
def validate_option(:today, _options, nil) do
{:ok, Date.utc_today}
end
def validate_option(:today, _options, date) do
if is_map(date) and Map.has_key?(date, :year) and
Map.has_key?(date, :month) and Map.has_key?(date, :day) do
{:ok, date}
else
{:error, Cldr.Calendar.Format.invalid_date_error(date)}
end
end
def validate_option(:class, _options, nil) do
{:ok, Cldr.Calendar.Format.default_calendar_css_class()}
end
def validate_option(:class, _options, class) do
{:ok, class}
end
def validate_option(:caption, _options, nil) do
{:ok, nil}
end
def validate_option(:caption, _options, caption) do
{:ok, caption}
end
def validate_option(:id, _options, nil) do
{:ok, nil}
end
def validate_option(:id, _options, id) do
{:ok, id}
end
def validate_option(:private, _options, nil) do
{:ok, nil}
end
def validate_option(:private, _options, private) do
{:ok, private}
end
def validate_option(:day_names, options, nil) do
backend = Keyword.get(options, :backend)
locale = Keyword.get(options, :locale)
{:ok, date} = Date.new(2000, 1, 1, Keyword.get(options, :calendar))
{:ok, Cldr.Calendar.localize(date, :days_of_week, backend: backend, locale: locale)}
end
end
|
lib/formatter/options.ex
| 0.919018
| 0.62458
|
options.ex
|
starcoder
|
defmodule ChildNode do
@moduledoc """
ChildNode provides facilities for starting another erlang node on the current machine.
This module enhances and abstracts the erlang `slave` module. After calling `slave.start` to
make sure the child node is running, it ensures that Elixir is started, after which it will run
any function passed in as the `:on_start` param. This function must be compiled and loaded on
both nodes.
After that, control is handed back to the caller who can use the `:rpc` module to invoke
functions remotely.
The child nodes process is linked to the caller's process, so if the caller dies, so will the
child node.
If additional logging is required, set `enable_sasl` option to `true`.
"""
@type param :: {:enable_sasl, boolean} | {:on_start, (() -> any)}
@type params :: [param]
defmodule Runner do
@moduledoc """
When the new node starts up, we often want to set up a supervision tree by calling
a function with `:rpc.call`. However, when the call ends, all the linked processes
in the rpc call will die. This runner encapsulates them and doesn't link to its caller,
so that any processes started by `Runner` will continue to live after the `:rpc` call.
"""
use GenServer
def start(mod, fun, args) do
GenServer.start(__MODULE__, [mod, fun, args])
end
def start(init_fn) when is_function(init_fn) do
GenServer.start(__MODULE__, [init_fn])
end
def init([mod, fun, args]) do
rv = apply(mod, fun, args)
{:ok, rv}
end
def init([init_fn]) do
{:ok, init_fn}
end
def get(runner_pid) do
GenServer.call(runner_pid, :get)
end
def do_init(runner_pid, args) do
GenServer.call(runner_pid, {:do_init, args})
end
def handle_call({:do_init, args}, _from, init_fn) do
{:reply, init_fn.(args), init_fn}
end
def handle_call(:get, _from, v) do
{:reply, v, v}
end
end
@spec start_link(Application.t(), atom, params) :: {:ok, pid} | {:error, any}
def start_link(app_to_start, node_name, params \\ [], timeout \\ 5_000) do
unless Node.alive?() do
{:ok, _} = Node.start(:"local@0.0.0.0")
end
code_paths = Enum.join(:code.get_path(), " ")
default_node_start_args = [
"-setcookie #{Node.get_cookie()}",
"-pa #{code_paths}",
"-s application ensure_all_started elixir",
"-connect_all false"
]
node_start_args =
if params[:enable_sasl] do
default_node_start_args ++ ["-logger handle_sasl_reports true"]
else
default_node_start_args
end
|> Enum.join(" ")
|> String.to_charlist()
node_name = to_node_name(node_name)
{:ok, node_name} = :slave.start_link('0.0.0.0', node_name, node_start_args)
on_start = params[:on_start]
rpc_args = [node_name, app_to_start, on_start, self()]
case :rpc.call(node_name, __MODULE__, :on_start, rpc_args, timeout) do
{:ok, start_fn_results} ->
{:ok, node_name, start_fn_results}
{:badrpc, :timeout} ->
{:error, :timeout}
end
end
def on_start(node_name, app_to_start, start_callback, _caller) do
case app_to_start do
apps when is_list(apps) ->
for app <- apps do
{:ok, _} = Application.ensure_all_started(app)
end
app when is_atom(app) ->
{:ok, _started_apps} = Application.ensure_all_started(app)
end
start_fn_results =
case start_callback do
callback when is_function(callback) ->
{:ok, runner_pid} = Runner.start(callback)
Runner.do_init(runner_pid, node_name)
{m, f, a} ->
{:ok, runner_pid} = Runner.start(m, f, a)
Runner.get(runner_pid)
nil ->
nil
end
{:ok, start_fn_results}
end
@doc "Runs the MFA in a process on the remote node"
@spec run(node, module(), atom(), [any]) :: any
def run(node, m, f, a) do
{:ok, runner_pid} = :rpc.call(node, Runner, :start, [m, f, a])
:rpc.call(node, Runner, :get, [runner_pid])
end
defp to_node_name(node_name) when is_atom(node_name) do
node_name
|> Atom.to_string()
|> String.split(".")
|> sanitize_node_name
end
defp sanitize_node_name([node_name]) do
String.to_atom(node_name)
end
defp sanitize_node_name(node_name) when is_list(node_name) do
node_name
|> List.last()
|> Macro.underscore()
|> String.downcase()
|> String.to_atom()
end
end
|
test/support/child_node.ex
| 0.731346
| 0.460228
|
child_node.ex
|
starcoder
|
defmodule CTE.Adapter do
@moduledoc ~S"""
Specification of the Closure Table implementation.
Most of the functions implementing the `CTE.Adapter` behavior, will accept the following options:
- `:limit`, to limit the total number of nodes returned, when finding the ancestors or the descendants for nodes
- `:itself`, accepting a boolean value. When `true`, the node used for finding its neighbors are returned as part of the results. Default: true
- `:nodes`, accepting a boolean value. When `true`, the results are containing additional information about the nodes. Default: false
"""
@type t :: module
@type options :: Keyword.t()
defmacro __using__(opts) do
quote bind_quoted: [opts: opts] do
use GenServer
@behaviour CTE.Adapter
@doc """
start the Adapter server
"""
def start_link(init_args) do
GenServer.start_link(__MODULE__, init_args)
end
@doc """
Initializes the adapter supervision tree by returning the children and adapter metadata.
"""
def init(repo: _repo, config: config) do
{:ok, config}
end
defoverridable start_link: 1, init: 1
end
end
@doc """
Retrieve the descendants of a node
"""
@callback descendants(pid(), ancestor :: any(), options) :: {:ok, CTE.nodes()} | {:error, any()}
@doc """
Retrieve the ancestors of a node
"""
@callback ancestors(pid(), descendant :: any(), options) :: {:ok, CTE.nodes()} | {:error, any()}
@doc """
Delete a leaf or a subtree.
When limit: 1, the default value, then delete only the leafs, else the entire subtree
"""
@callback delete(pid(), leaf :: any(), options) :: :ok | {:error, any()}
@doc """
Insert a node under an existing ancestor
"""
@callback insert(pid(), leaf :: any(), ancestor :: any(), options) ::
{:ok, CTE.t()} | {:error, any()}
@doc """
Move a subtree from one location to another.
First, the subtree and its descendants are disconnected from its ancestors. And second, the subtree is inserted under the new parent (ancestor) and the subtree, including its descendants, is declared as descendants of all the new ancestors.
"""
@callback move(pid(), leaf :: any(), ancestor :: any(), options) :: :ok | {:error, any()}
@doc """
Calculate and return a "tree" structure containing the paths and the nodes under the given leaf/node
"""
@callback tree(pid(), leaf :: any(), options) :: {:ok, CTE.nodes()} | {:error, any()}
@doc false
def lookup_meta(repo_name_or_pid) do
{_, meta} = CTE.Registry.lookup(repo_name_or_pid)
meta
end
end
|
lib/cte/adapter.ex
| 0.858807
| 0.648745
|
adapter.ex
|
starcoder
|
defmodule Beamchmark.Suite do
@moduledoc """
The module defines a struct representing a single run of benchmark. It is also responsible for running the
benchmark and saving/loading the results.
The results are serialized and stored in `output_dir / scenario name / delay_duration` directory, where
`scenario name` is the name of module implementing scenario (without separating dots) and `output_dir`,
`delay`, `duration` are fetched from the suite's configuration.
"""
alias Beamchmark.Scenario
alias __MODULE__.{Configuration, SystemInfo, Measurements}
@type t :: %__MODULE__{
scenario: Scenario.t(),
configuration: Configuration.t(),
system_info: SystemInfo.t(),
measurements: Measurements.t() | nil
}
@enforce_keys [
:scenario,
:configuration,
:system_info,
:measurements
]
defstruct @enforce_keys
@suite_filename "suite"
@old_suite_filename "suite_old"
@spec init(Scenario.t(), Configuration.t()) :: t()
def init(scenario, %Configuration{} = configuration) do
implements_scenario? =
scenario.module_info(:attributes)
|> Keyword.get(:behaviour, [])
|> Enum.member?(Scenario)
unless implements_scenario? do
raise "#{inspect(scenario)} is not a module implementing #{inspect(Scenario)} behaviour."
end
%__MODULE__{
scenario: scenario,
configuration: configuration,
system_info: SystemInfo.init(),
measurements: nil
}
end
@spec run(t()) :: t()
def run(%__MODULE__{scenario: scenario, configuration: config} = suite) do
Mix.shell().info("Running scenario \"#{inspect(scenario)}\"...")
task = Task.async(fn -> suite.scenario.run() end)
Mix.shell().info("Waiting #{inspect(config.delay)} seconds...")
Process.sleep(:timer.seconds(config.delay))
Mix.shell().info("Benchmarking for #{inspect(config.duration)} seconds...")
measurements = Measurements.gather(config.duration, config.cpu_interval)
if Process.alive?(task.pid) do
Mix.shell().info("Benchmarking finished. Stopping scenario.")
case Task.shutdown(task, :brutal_kill) do
{:exit, reason} ->
raise "The scenario process unexpectedly died due to #{inspect(reason)}."
_other ->
:ok
end
else
Mix.shell().error("""
The scenario had been completed before the measurements ended.
Consider decreasing duration/delay or making the scenario run longer to get more accurate results.
""")
end
%__MODULE__{suite | measurements: measurements}
end
@spec save(t()) :: :ok
def save(%__MODULE__{configuration: config} = suite) do
output_dir = output_dir_for(suite)
File.mkdir_p!(output_dir)
new_path = Path.join([output_dir, @suite_filename])
old_path = Path.join([output_dir, @old_suite_filename])
if File.exists?(new_path) do
File.rename!(new_path, old_path)
end
File.write!(new_path, :erlang.term_to_binary(suite))
Mix.shell().info("The results were saved to \"#{inspect(config.output_dir)}`\" directory.")
end
@spec try_load_base(t()) :: {:ok, t()} | {:error, File.posix()}
def try_load_base(%__MODULE__{} = suite) do
output_dir = output_dir_for(suite)
with old_path <- Path.join([output_dir, @old_suite_filename]),
{:ok, suite} <- File.read(old_path),
suite <- :erlang.binary_to_term(suite) do
{:ok, suite}
end
end
defp output_dir_for(%__MODULE__{configuration: config} = suite) do
scenario_dir = suite.scenario |> Atom.to_string() |> String.replace(".", "")
config_dir = "#{config.delay}_#{config.duration}"
Path.join([config.output_dir, scenario_dir, config_dir])
end
end
|
lib/beamchmark/suite.ex
| 0.788135
| 0.511229
|
suite.ex
|
starcoder
|
defmodule MapSet do
@moduledoc """
A set store.
The `MapSet` is represented internally as a struct, therefore
`%MapSet{}` can be used whenever there is a need to match
on any `MapSet`. Note though the struct fields are private and
must not be accessed directly. Instead, use the functions on this
or in the `Set` module.
The `MapSet` is implemented using `Map` data type.
For more information about the functions
and their APIs, please consult the `Set` module.
"""
@behaviour Set
defstruct map: %{}
def new(), do: %MapSet{}
def delete(%MapSet{map: map} = set, term) do
%{set | map: Map.delete(map, term)}
end
def difference(%MapSet{} = set1, %MapSet{} = set2) do
reduce(set2, {:cont, set1}, fn value, acc ->
{:cont, delete(acc, value)}
end) |> elem(1)
end
def disjoint?(%MapSet{} = set1, %MapSet{} = set2) do
if size(set1) > size(set2), do: {set1, set2} = {set2, set1}
reduce(set1, {:cont, true}, fn value, _ ->
if member?(set2, value) do
{:halt, false}
else
{:cont, true}
end
end) |> elem(1)
end
def equal?(%MapSet{map: map1}, %MapSet{map: map2}) do
Map.equal?(map1, map2)
end
def intersection(%MapSet{} = set1, %MapSet{} = set2) do
if size(set1) > size(set2), do: {set1, set2} = {set2, set1}
reduce(set1, {:cont, new}, fn value, acc ->
if member?(set2, value) do
{:cont, put(acc, value)}
else
{:cont, acc}
end
end) |> elem(1)
end
def member?(%MapSet{map: map}, value) do
Map.has_key?(map, value)
end
def put(%MapSet{map: map} = set, value) do
%{set | map: Map.put(map, value, nil)}
end
def size(%MapSet{map: map}) do
map_size(map)
end
def subset?(%MapSet{} = set1, %MapSet{} = set2) do
if size(set1) <= size(set2) do
reduce(set1, {:cont, true}, fn value, _ ->
if member?(set2, value), do: {:cont, true}, else: {:halt, false}
end) |> elem(1)
else
false
end
end
@doc false
def reduce(%MapSet{} = set, acc, fun) do
Enumerable.List.reduce(to_list(set), acc, fun)
end
def to_list(%MapSet{map: map}) do
Map.keys(map)
end
def union(%MapSet{map: map1}, %MapSet{map: map2}) do
%MapSet{map: Map.merge(map1, map2)}
end
defimpl Enumerable do
def reduce(set, acc, fun), do: MapSet.reduce(set, acc, fun)
def member?(set, val), do: {:ok, MapSet.member?(set, val)}
def count(set), do: {:ok, MapSet.size(set)}
end
defimpl Collectable do
def into(original) do
{original, fn
set, {:cont, x} -> MapSet.put(set, x)
set, :done -> set
_, :halt -> :ok
end}
end
end
defimpl Inspect do
import Inspect.Algebra
def inspect(set, opts) do
concat ["#MapSet<", Inspect.List.inspect(MapSet.to_list(set), opts), ">"]
end
end
end
|
lib/elixir/lib/map_set.ex
| 0.768863
| 0.666227
|
map_set.ex
|
starcoder
|
defmodule Ash.Dsl.Transformer do
@moduledoc """
A transformer manipulates and/or validates the entire DSL state of a resource.
It's `transform/2` takes a `map`, which is just the values/configurations at each point
of the DSL. Don't manipulate it directly, if possible, instead use functions like
`get_entities/3` and `replace_entity/5` to manipulate it.
Use the `after?/1` and `before?/1` callbacks to ensure that your transformer
runs either before or after some other transformer.
Return `true` in `after_compile/0` to have the transformer run in an `after_compile` hook,
but keep in mind that no modifications to the dsl structure will be retained, so there is no
point in returning a new dsl structure from `transform/2` if `after_compile/0` is defined. Instead,
simply return `:ok` or `{:error, error}`
"""
@callback transform(module, map) :: :ok | {:ok, map} | {:error, term} | :halt
@callback before?(module) :: boolean
@callback after?(module) :: boolean
@callback after_compile?() :: boolean
defmacro __using__(_) do
quote do
@behaviour Ash.Dsl.Transformer
def before?(_), do: false
def after?(_), do: false
def after_compile?, do: false
defoverridable before?: 1, after?: 1, after_compile?: 0
end
end
def persist(dsl, key, value) do
Map.update(dsl, :persist, %{key => value}, &Map.put(&1, key, value))
end
def get_persisted(dsl, key, default \\ nil) do
dsl
|> Map.get(:persist, %{})
|> Map.get(key, default)
end
def build_entity(extension, path, name, opts) do
do_build_entity(extension.sections(), path, name, opts)
end
defp do_build_entity(sections, [section_name], name, opts) do
section = Enum.find(sections, &(&1.name == section_name))
entity = Enum.find(section.entities, &(&1.name == name))
do_build(entity, opts)
end
defp do_build_entity(
sections,
[section_name, maybe_entity_name],
maybe_nested_entity_name,
opts
) do
section = Enum.find(sections, &(&1.name == section_name))
entity = Enum.find(section.entities, &(&1.name == maybe_entity_name))
sub_entity =
entity.entities
|> Keyword.values()
|> List.flatten()
|> Enum.find(&(&1.name == maybe_nested_entity_name))
if sub_entity do
do_build(sub_entity, opts)
else
do_build_entity(section.sections, [maybe_entity_name], maybe_nested_entity_name, opts)
end
end
defp do_build_entity(sections, [section_name | rest], name, opts) do
section = Enum.find(sections, &(&1.name == section_name))
do_build_entity(section.sections, rest, name, opts)
end
defp do_build(entity, opts) do
entity_names =
entity.entities
|> Kernel.||([])
|> Keyword.keys()
{entities, opts} = Keyword.split(opts, entity_names)
case Ash.OptionsHelpers.validate(opts, entity.schema) do
{:ok, opts} ->
result = struct(struct(entity.target, opts), entities)
Ash.Dsl.Entity.transform(entity.transform, result)
{:error, error} ->
{:error, error}
end
end
def add_entity(dsl_state, path, entity, opts \\ []) do
Map.update(dsl_state, path, %{entities: [entity], opts: []}, fn config ->
Map.update(config, :entities, [entity], fn entities ->
if (opts[:type] || :prepend) == :prepend do
[entity | entities]
else
entities ++ [entity]
end
end)
end)
end
def get_entities(dsl_state, path) do
dsl_state
|> Map.get(path, %{entities: []})
|> Map.get(:entities, [])
end
def get_option(dsl_state, path, option) do
dsl_state
|> Map.get(path, %{opts: []})
|> Map.get(:opts)
|> Kernel.||([])
|> Keyword.get(option)
end
def set_option(dsl_state, path, option, value) do
dsl_state
|> Map.put_new(path, %{opts: []})
|> Map.update!(path, fn existing_opts ->
existing_opts
|> Map.put_new(:opts, [])
|> Map.update!(:opts, fn opts ->
Keyword.put(opts, option, value)
end)
end)
end
def replace_entity(dsl_state, path, replacement, matcher) do
Map.update(dsl_state, path, %{entities: [replacement], opts: []}, fn config ->
Map.update(config, :entities, [replacement], fn entities ->
replace_match(entities, replacement, matcher)
end)
end)
end
defp replace_match(entities, replacement, matcher) do
Enum.map(entities, fn entity ->
if matcher.(entity) do
replacement
else
entity
end
end)
end
def sort(transformers) do
Enum.reduce(transformers, [], fn transformer, list ->
put_transformer_in(list, transformer)
end)
end
defp put_transformer_in([], transformer), do: [transformer]
defp put_transformer_in([first | rest] = remaining, transformer) do
if transformer.before?(first) or first.after?(transformer) do
[transformer | remaining]
else
[first | put_transformer_in(rest, transformer)]
end
end
end
|
lib/ash/dsl/transformer.ex
| 0.808521
| 0.571288
|
transformer.ex
|
starcoder
|
defmodule Rummage.Phoenix.SearchView do
@moduledoc """
Search View Module for Rummage. This has view helpers that can generate rummagable links and forms.
Usage:
```elixir
defmodule MyApp.ProductView do
use MyApp.Web, :view
use Rummage.Phoenix.View, only: [:search]
end
```
OR
```elixir
defmodule MyApp.ProductView do
use MyApp.Web, :view
use Rummage.Phoenix.View
end
```
"""
import Phoenix.HTML
import Phoenix.HTML.Form
@doc """
This macro includes the helpers functions for searching.
Provides helpers function `search_form/3` for creating search form in an html.eex
file of using `Phoenix`.
Usage:
Just add the following code in the index template. Make sure that you're passing
rummage from the controller. Please look at the
[README](https://github.com/Excipients/rummage_phoenix) for more details
```elixir
<%= search_form(@conn, @rummage, [fields:
[
name: %{label: "Search by Product Name", search_type: "ilike"},
price: %{label: "Search by Price", search_type: "eq"},
], button_class: "btn",
]) %>
```
"""
def search_form(conn, rummage, link_params, opts \\ []) do
search = rummage["search"]
sort = if rummage["sort"], do: Poison.encode!(rummage["sort"]), else: ""
paginate = if rummage["paginate"], do: Poison.encode!(rummage["paginate"]), else: ""
button_class = Keyword.get(link_params, :button_class, "btn btn-primary")
button_label = Keyword.get(link_params, :button_label, "Search")
fields = Keyword.fetch!(link_params, :fields)
form_for(conn, apply(opts[:helpers], String.to_atom("#{opts[:struct]}_path"), [conn, :index]), [as: :rummage, method: :get], fn(f) ->
{
:safe,
elem(hidden_input(f, :sort, value: sort, class: "form-control"), 1) ++
elem(hidden_input(f, :paginate, value: paginate, class: "form-control"), 1) ++
elem(inputs_for(f, :search, fn(s) ->
{
:safe,
inner_form(s, fields, search)
}
end), 1) ++
elem(submit(raw(button_label), class: button_class), 1)
}
end)
end
defp inner_form(s, fields, search) do
Enum.map(fields, fn(field) ->
field_name = elem(field, 0)
field_params = elem(field, 1)
label = field_params[:label] || "Search by #{Phoenix.Naming.humanize(field_name)}"
search_type = field_params[:search_type] || "like"
assoc = case field_params[:assoc] do
nil -> ""
assocs -> Enum.join(assocs, " -> ")
end
elem(label(s, field_name, label, class: "control-label"), 1) ++
elem(inputs_for(s, field_name, fn(e) ->
{
:safe,
elem(hidden_input(e, :search_type, value: search_type, class: "form-control"), 1) ++
elem(hidden_input(e, :assoc, value: assoc, class: "form-control"), 1) ++
elem(search_input(e, :search_term, value: search[Atom.to_string(field_name)]["search_term"], class: "form-control"), 1)
}
end), 1)
end) |> Enum.reduce([], & &2 ++ &1)
end
end
|
lib/rummage_phoenix/hooks/views/search_view.ex
| 0.726426
| 0.794305
|
search_view.ex
|
starcoder
|
defmodule Mix.Tasks.Doctor do
@moduledoc """
Doctor is a command line utility that can be used to ensure that your project
documentation remains healthy. For more in depth documentation on Doctor or to
file bug/feature requests, please check out https://github.com/akoutmos/doctor.
The `mix doctor` command supports the following CLI flags (all of these options
and more are also configurable from your `.doctor.exs` file). The following CLI
flags are supported:
```
--config_file Provide a relative or absolute path to a `.doctor.exs`
file to use during the execution of the mix command.
--full When generating a Doctor report of your project, use
the Doctor.Reporters.Full reporter.
--short When generating a Doctor report of your project, use
the Doctor.Reporters.Short reporter.
--summary When generating a Doctor report of your project, use
the Doctor.Reporters.Summary reporter.
--raise If any of your modules fails Doctor validation, then
raise an error and return a non-zero exit status.
--failed If set only the failed modules will be reported. Works with
--full and --short options.
--umbrella By default, in an umbrella project, each app will be
evaluated independently against the specified thresholds
in your .doctor.exs file. This flag changes that behavior
by aggregating the results of all your umbrella apps,
and then comparing those results to the configured
thresholds.
```
"""
use Mix.Task
alias Doctor.{CLI, Config}
alias Doctor.Reporters.{Full, Short, Summary}
@shortdoc "Documentation coverage report"
@recursive true
@umbrella_accumulator Doctor.Umbrella
@impl true
def run(args) do
default_config_opts = Config.config_defaults()
cli_arg_opts = parse_cli_args(args)
config_file_opts = load_config_file(cli_arg_opts)
# Aggregate all of the various options sources
# Precedence order is:
# default < config file < cli args
config =
default_config_opts
|> Map.merge(config_file_opts)
|> Map.merge(cli_arg_opts)
if config.umbrella do
run_umbrella(config)
else
run_default(config)
end
end
defp run_umbrella(config) do
module_report_list = CLI.generate_module_report_list(config)
acc_pid =
case Process.whereis(@umbrella_accumulator) do
nil -> init_umbrella_acc(config)
pid -> pid
end
Agent.update(acc_pid, fn acc ->
acc ++ module_report_list
end)
:ok
end
defp run_default(config) do
result =
config
|> CLI.generate_module_report_list()
|> CLI.process_module_report_list(config)
unless result do
IO.puts("\nSystem exit call used to be here.\n")
if config.raise do
Mix.raise("Doctor validation has failed and raised an error")
end
end
:ok
end
defp init_umbrella_acc(config) do
{:ok, pid} = Agent.start_link(fn -> [] end, name: @umbrella_accumulator)
System.at_exit(fn _ ->
module_report_list = Agent.get(pid, & &1)
Agent.stop(pid)
result = CLI.process_module_report_list(module_report_list, config)
unless result do
if config.raise do
Mix.raise("Doctor validation has failed and raised an error")
end
exit({:shutdown, 0})
end
end)
pid
end
defp load_config_file(%{config_file_path: file_path} = _cli_args) do
full_path = Path.expand(file_path)
if File.exists?(full_path) do
Mix.shell().info("Doctor file found. Loading configuration.")
{config, _bindings} = Code.eval_file(full_path)
config
else
Mix.shell().error("Doctor file not found at path \"#{full_path}\". Using defaults.")
%{}
end
end
defp load_config_file(_) do
# If we are performing this operation on an umbrella app then look to
# the project root for the config file
file =
if Mix.Task.recursing?() do
Path.join(["..", "..", Config.config_file()])
else
Config.config_file()
end
if File.exists?(file) do
Mix.shell().info("Doctor file found. Loading configuration.")
{config, _bindings} = Code.eval_file(file)
config
else
Mix.shell().info("Doctor file not found. Using defaults.")
%{}
end
end
defp parse_cli_args(args) do
{parsed_args, _args, _invalid} =
OptionParser.parse(args,
strict: [
full: :boolean,
short: :boolean,
summary: :boolean,
raise: :boolean,
failed: :boolean,
umbrella: :boolean,
config_file: :string
]
)
parsed_args
|> Enum.reduce(%{}, fn
{:full, true}, acc -> Map.merge(acc, %{reporter: Full})
{:short, true}, acc -> Map.merge(acc, %{reporter: Short})
{:summary, true}, acc -> Map.merge(acc, %{reporter: Summary})
{:raise, true}, acc -> Map.merge(acc, %{raise: true})
{:failed, true}, acc -> Map.merge(acc, %{failed: true})
{:umbrella, true}, acc -> Map.merge(acc, %{umbrella: true})
{:config_file, file_path}, acc -> Map.merge(acc, %{config_file_path: file_path})
_unexpected_arg, acc -> acc
end)
end
end
|
lib/mix/tasks/doctor.ex
| 0.729327
| 0.707897
|
doctor.ex
|
starcoder
|
defmodule Elixlsx.Sheet do
alias __MODULE__
alias Elixlsx.Sheet
alias Elixlsx.Util
@moduledoc ~S"""
Describes a single sheet with a given name. The name can be up to 31 characters long.
The rows property is a list, each corresponding to a
row (from the top), of lists, each corresponding to
a column (from the left), of contents.
Content may be
- a String.t (unicode),
- a number, or
- a list [String|number, property_list...]
The property list describes formatting options for that
cell. See Font.from_props/1 for a list of options.
"""
defstruct name: "",
rows: [],
col_widths: %{},
row_heights: %{},
group_cols: [],
group_rows: [],
merge_cells: [],
pane_freeze: nil,
show_grid_lines: true,
autofilter_ref: nil,
autofilter_cols: %{}
@type t :: %Sheet{
name: String.t(),
rows: list(list(any())),
col_widths: %{pos_integer => number},
row_heights: %{pos_integer => number},
group_cols: list(rowcol_group),
group_rows: list(rowcol_group),
merge_cells: [{String.t(), String.t()}],
pane_freeze: {number, number} | nil,
show_grid_lines: boolean(),
autofilter_ref: {number, number, number, number} | nil,
autofilter_cols: [{number, operator_filter}]
}
@type rowcol_group :: Range.t() | {Range.t(), opts :: keyword}
@type filter_type :: :list | :operator
@type operator :: :equal | :not_equal | :less_than | :less_than_or_equal | :greater_than | :greather_than_or_equal
@type connective :: :and | :or
@type operator_filter :: {operator, any()} | {operator, any(), connective, operator, any()}
@type filter :: {:list, list(any())} | {:operator, operator_filter}
@doc ~S"""
Create a sheet with a sheet name.
The name can be up to 31 characters long.
"""
@spec with_name(String.t()) :: Sheet.t()
def with_name(name) do
%Sheet{name: name}
end
defp split_cell_content_props(cell) do
cond do
is_list(cell) ->
{hd(cell), tl(cell)}
true ->
{cell, []}
end
end
@doc ~S"""
Returns a "CSV" representation of the Sheet. This is mainly
used for doctests and does not generate valid CSV (yet).
"""
def to_csv_string(sheet) do
Enum.map_join(sheet.rows, "\n", fn row ->
Enum.map_join(row, ",", fn cell ->
{content, _} = split_cell_content_props(cell)
case content do
nil -> ""
_ -> to_string(content)
end
end)
end)
end
@spec set_cell(Sheet.t(), String.t(), any(), Keyword.t()) :: Sheet.t()
@doc ~S"""
Set a cell indexed by excel coordinates.
## Example
iex> %Elixlsx.Sheet{} |>
...> Elixlsx.Sheet.set_cell("C1", "<NAME>",
...> bold: true, underline: true) |>
...> Elixlsx.Sheet.to_csv_string
",,Hello World"
"""
def set_cell(sheet, index, content, opts \\ []) when is_binary(index) do
{row, col} = Util.from_excel_coords0(index)
set_at(sheet, row, col, content, opts)
end
@spec set_at(Sheet.t(), non_neg_integer, non_neg_integer, any(), Keyword.t()) :: Sheet.t()
@doc ~S"""
Set a cell at a given row/column index. Indizes start at 0.
## Example
iex> %Elixlsx.Sheet{} |>
...> Elixlsx.Sheet.set_at(0, 2, "<NAME>",
...> bold: true, underline: true) |>
...> Elixlsx.Sheet.to_csv_string
",,Hello World"
"""
def set_at(sheet, rowidx, colidx, content, opts \\ [])
when is_number(rowidx) and is_number(colidx) do
cond do
length(sheet.rows) <= rowidx ->
# append new rows, call self again with new sheet
n_new_rows = rowidx - length(sheet.rows)
new_rows = 0..n_new_rows |> Enum.map(fn _ -> [] end)
update_in(sheet.rows, &(&1 ++ new_rows))
|> set_at(rowidx, colidx, content, opts)
length(Enum.at(sheet.rows, rowidx)) <= colidx ->
n_new_cols = colidx - length(Enum.at(sheet.rows, rowidx))
new_cols = 0..n_new_cols |> Enum.map(fn _ -> nil end)
new_row = Enum.at(sheet.rows, rowidx) ++ new_cols
update_in(sheet.rows, &List.replace_at(&1, rowidx, new_row))
|> set_at(rowidx, colidx, content, opts)
true ->
update_in(sheet.rows, fn rows ->
List.update_at(rows, rowidx, fn cols ->
List.replace_at(cols, colidx, [content | opts])
end)
end)
end
end
@spec set_col_width(Sheet.t(), String.t(), number) :: Sheet.t()
@doc ~S"""
Set the column width for a given column. Column is indexed by
name ("A", ...)
"""
def set_col_width(sheet, column, width) do
update_in(
sheet.col_widths,
&Map.put(&1, Util.decode_col(column), width)
)
end
@spec set_row_height(Sheet.t(), number, number) :: Sheet.t()
@doc ~S"""
Set the row height for a given row. Row is indexed starting from 1
"""
def set_row_height(sheet, row_idx, height) do
update_in(
sheet.row_heights,
&Map.put(&1, row_idx, height)
)
end
@spec group_cols(Sheet.t(), String.t(), String.t()) :: Sheet.t()
@doc ~S"""
Group given column range. (i.e. increase outline level by one)
Column is indexed by name ("A", ...)
## Options
- `collapsed`: if true, collapse this group.
"""
def group_cols(sheet, first_col, last_col, opts \\ []) do
col_range = Range.new(Util.decode_col(first_col), Util.decode_col(last_col))
new_group = if opts === [], do: col_range, else: {col_range, opts}
update_in(sheet.group_cols, fn groups -> groups ++ [new_group] end)
end
@spec group_rows(Sheet.t(), pos_integer, pos_integer) :: Sheet.t()
@doc ~S"""
Group given row range. (i.e. increase outline level by one)
Row is indexed starting from 1.
## Options
- `collapsed`: if true, collapse this group.
"""
def group_rows(sheet, first_row_idx, last_row_idx, opts \\ []) do
row_range = Range.new(first_row_idx, last_row_idx)
new_group = if opts === [], do: row_range, else: {row_range, opts}
update_in(sheet.group_rows, fn groups -> groups ++ [new_group] end)
end
@spec set_pane_freeze(Sheet.t(), number, number) :: Sheet.t()
@doc ~S"""
Set the pane freeze at the given row and column. Row and column are indexed starting from 1.
Special value 0 means no freezing, e.g. {1, 0} will freeze first row and no columns.
"""
def set_pane_freeze(sheet, row_idx, col_idx) do
%{sheet | pane_freeze: {row_idx, col_idx}}
end
@spec remove_pane_freeze(Sheet.t()) :: Sheet.t()
@doc ~S"""
Removes any pane freezing that has been set
"""
def remove_pane_freeze(sheet) do
%{sheet | pane_freeze: nil}
end
@spec set_autofilter(Sheet.t(), String.t(), String.t()) :: Sheet.t()
@doc ~S"""
Set the range for autofiltering
"""
def set_autofilter(sheet, start_cell, end_cell) do
{row1, col1} = Util.from_excel_coords(start_cell)
{row2, col2} = Util.from_excel_coords(end_cell)
set_autofilter(sheet, row1, col1, row2, col2)
end
@spec set_autofilter(Sheet.t(), number, number, number, number) :: Sheet.t()
@doc ~S"""
Set the range for autofiltering
"""
def set_autofilter(sheet, row1, col1, row2, col2) do
%{sheet | autofilter_ref: {row1, col1, row2, col2}}
end
@spec append_list_filter(Sheet.t(), String.t(), list(any())) :: Sheet.t()
@doc ~S"""
Add filter on a column as a list of inclusive elements
"""
def append_list_filter(sheet, column, filters) when is_binary(column) do
append_list_filter(sheet, Util.decode_col(column), filters)
end
@spec append_list_filter(Sheet.t(), number, list(any())) :: Sheet.t()
@doc ~S"""
Add filter on a column as a list of inclusive elements
"""
def append_list_filter(sheet, column, filters) do
update_in(sheet.autofilter_cols, &Map.put(&1, column, {:list, filters}))
end
@spec append_criteria_filter(Sheet.t(), String.t(), operator, any()) :: Sheet.t()
@doc ~S"""
Add filter on a column as an operator-based criteria.
Valid operators are: :equal, :not_equal, :less_than, :less_than_or_equal, :greater_than, :greather_than_or_equal
"""
def append_criteria_filter(sheet, column, op, val) when is_binary(column) do
append_criteria_filter(sheet, Util.decode_col(column), op, val)
end
@spec append_criteria_filter(Sheet.t(), number, operator, any()) :: Sheet.t()
@doc ~S"""
Add filter on a column as an operator-based criteria.
Valid operators are: :equal, :not_equal, :less_than, :less_than_or_equal, :greater_than, :greather_than_or_equal
"""
def append_criteria_filter(sheet, column, op, val) do
update_in(sheet.autofilter_cols, &Map.put(&1, column, {:operator, {op, val}}))
end
def append_criteria_filter(sheet, column, op1, val1, op2, val2, connective \\ :and)
@spec append_criteria_filter(Sheet.t(), String.t(), operator, any(), operator, any(), connective) :: Sheet.t()
@doc ~S"""
Add two filters connected by :and or :or on a column as an operator-based criteria.
Valid operators are: :equal, :not_equal, :less_than, :less_than_or_equal, :greater_than, :greather_than_or_equal
"""
def append_criteria_filter(sheet, column, op1, val1, op2, val2, connective) when is_binary(column) do
append_criteria_filter(sheet, Util.decode_col(column), op1, val1, op2, val2, connective)
end
@spec append_criteria_filter(Sheet.t(), number, operator, any(), operator, any(), connective) :: Sheet.t()
@doc ~S"""
Add two filters connected by :and or :or on a column as an operator-based criteria.
Valid operators are: :equal, :not_equal, :less_than, :less_than_or_equal, :greater_than, :greather_than_or_equal
"""
def append_criteria_filter(sheet, column, op1, val1, op2, val2, connective) do
update_in(sheet.autofilter_cols, &Map.put(&1, column, {:operator, {op1, val1, connective, op2, val2}}))
end
end
|
lib/elixlsx/sheet.ex
| 0.808332
| 0.502625
|
sheet.ex
|
starcoder
|
defmodule Lasagna do
@moduledoc """
API for creating a tasty dish made of stacked layers of lasgane and filling.
"""
@doc """
Returns the total time in minutes that a lasagna is expected to remain in the oven.
## Examples
iex> Lasagna.expected_minutes_in_oven()
40
"""
@spec expected_minutes_in_oven() :: pos_integer()
def expected_minutes_in_oven(), do: 40
@doc """
Returns the time in minutes that a lassagna has to remain in the oven based on the given
`minutes_in_oven` that the lassagna has already been in the oven.
`minutes_in_oven` might be a negative number if the lasgana has not yet been put into
the oven.
## Examples
iex> Lasagna.remaining_minutes_in_oven(0)
40
iex> Lasagna.remaining_minutes_in_oven(21)
19
iex> Lasagna.remaining_minutes_in_oven(-4)
44
iex> Lasagna.remaining_minutes_in_oven(42)
-2
"""
@spec remaining_minutes_in_oven(integer()) :: integer()
def remaining_minutes_in_oven(minutes_in_oven) do
expected_minutes_in_oven() - minutes_in_oven
end
@doc """
Computes the expected preparation time based on the given `number_of_layers`.
## Examples
iex> Lasagna.preparation_time_in_minutes(0)
0
iex> Lasagna.preparation_time_in_minutes(2)
4
iex> Lasagna.preparation_time_in_minutes(4319871)
8639742
"""
@spec preparation_time_in_minutes(pos_integer()) :: pos_integer()
def preparation_time_in_minutes(number_of_layers), do: number_of_layers * 2
@doc """
Computes the total amount of minutes spent on preparing the lasagna based on the given
`number_of_layers` and `minutes_in_oven`.
If `minutes_in_oven` is negative it is disregarded (do something else while waiting to
put the lasagna in the oven).
## Examples
iex> Lasagna.total_time_in_minutes(1, 10)
12
iex> Lasagna.total_time_in_minutes(2, 10)
14
iex> Lasagna.total_time_in_minutes(1, 20)
22
iex> Lasagna.total_time_in_minutes(1, 0)
2
iex> Lasagna.total_time_in_minutes(1, -4)
2
"""
@spec total_time_in_minutes(pos_integer(), pos_integer()) :: pos_integer()
def total_time_in_minutes(number_of_layers, minutes_in_oven) when minutes_in_oven >= 0 do
preparation_time_in_minutes(number_of_layers) + minutes_in_oven
end
def total_time_in_minutes(number_of_layers, _minutes_in_oven) do
preparation_time_in_minutes(number_of_layers)
end
@doc """
Uses sophisticated magic to alert you.
## Examples
iex> Lasagna.alarm()
"Ding!"
"""
@spec alarm() :: String.t()
def alarm(), do: "Ding!"
end
|
elixir/lasagna/lib/lasagna.ex
| 0.845113
| 0.807233
|
lasagna.ex
|
starcoder
|
defmodule Accounting.Assertions do
@moduledoc """
This module contains a set of assertion functions.
"""
alias Accounting.{Account, Entry, Journal}
import ExUnit.Assertions, only: [flunk: 1]
@timeout 100
@spec assert_registered_categories(Journal.id, [String.t]) :: true | no_return
def assert_registered_categories(journal_id, categories) do
receive do
{:registered_categories, ^journal_id, ^categories} -> true
after
@timeout ->
flunk """
Categories were not registered:
#{inspect categories}
"""
end
end
@spec refute_registered_categories(Journal.id, [String.t]) :: true | no_return
def refute_registered_categories(journal_id, categories) do
receive do
{:registered_categories, ^journal_id, ^categories} ->
flunk """
Unexpected categories were registered:
#{inspect categories}
"""
after
@timeout -> true
end
end
@spec assert_setup_accounts(Journal.id, [Account.setup, ...]) :: true | no_return
def assert_setup_accounts(journal_id, accounts) do
receive do
{:setup_accounts, ^journal_id, ^accounts} -> true
after
@timeout ->
flunk """
Accounts were not registered:
#{inspect accounts}
"""
end
end
@spec refute_setup_accounts(Journal.id, [Account.setup, ...]) :: true | no_return
def refute_setup_accounts(journal_id, accounts) do
receive do
{:setup_accounts, ^journal_id, ^accounts} ->
flunk """
Unexpected accounts were registered:
#{inspect accounts}
"""
after
@timeout -> true
end
end
@spec assert_setup_account_conversions(Journal.id, 1..12, pos_integer, [Account.setup, ...]) :: true | no_return
def assert_setup_account_conversions(journal_id, month, year, accounts) do
receive do
{:setup_account_conversions, ^journal_id, ^month, ^year, ^accounts} ->
true
after
@timeout ->
flunk """
Account conversion balances were not set:
#{inspect accounts}
"""
end
end
@spec refute_setup_account_conversions(Journal.id, 1..12, pos_integer, [Account.setup, ...]) :: true | no_return
def refute_setup_account_conversions(journal_id, month, year, accounts) do
receive do
{:setup_account_conversions, ^journal_id, ^month, ^year, ^accounts} ->
flunk """
Unexpected account conversion balances were set:
#{inspect accounts}
"""
after
@timeout -> true
end
end
@spec assert_created_account(Journal.id, String.t) :: true | no_return
def assert_created_account(journal_id, number) do
receive do
{:registered_account, ^journal_id, ^number} -> true
after
@timeout ->
flunk "An account was not created with the number '#{number}'."
end
end
@spec refute_created_account(Journal.id, String.t) :: true | no_return
def refute_created_account(journal_id, number) do
receive do
{:registered_account, ^journal_id, ^number} ->
flunk "An account was unexpectedly created with the number '#{number}'."
after
@timeout ->
true
end
end
@spec assert_recorded_entries(Journal.id, [Entry.t]) :: true | no_return
def assert_recorded_entries(journal_id, entries) do
receive do
{:recorded_entries, ^journal_id, ^entries} -> true
after
@timeout ->
flunk """
Entries were not recorded:
#{inspect entries}
"""
end
end
@spec refute_recorded_entries(Journal.id, [Entry.t]) :: true | no_return
def refute_recorded_entries(journal_id, entries) do
receive do
{:recorded_entries, ^journal_id, ^entries} ->
flunk """
Unexpected entries were recorded:
#{inspect entries}
"""
after
@timeout -> true
end
end
@spec assert_recorded_invoices(Journal.id, [Entry.t]) :: true | no_return
def assert_recorded_invoices(journal_id, entries) do
receive do
{:recorded_invoices, ^journal_id, ^entries} -> true
after
@timeout ->
flunk """
Invoices were not recorded:
#{inspect entries}
"""
end
end
@spec refute_recorded_invoices(Journal.id, [Entry.t]) :: true | no_return
def refute_recorded_invoices(journal_id, entries) do
receive do
{:recorded_invoices, ^journal_id, ^entries} ->
flunk """
Unexpected invoices were recorded:
#{inspect entries}
"""
after
@timeout -> true
end
end
end
|
lib/accounting/assertions.ex
| 0.853119
| 0.426919
|
assertions.ex
|
starcoder
|
defmodule CyberSourceSDK.Client do
@moduledoc """
This Client module handle all HTTPS requests to the CyberSource server. It
takes some parameters and convert to HTTPS requests.
It support the following payments:
* Android Pay
* Apple Pay
It supports the following requests:
* Authorization
* Capture
* Refund
"""
alias CyberSourceSDK.Logger
@response_codes %{
100 => "Successful transaction",
101 => "Request is missing one or more required fields",
102 => "One or more fields contains invalid data",
150 => "General failure",
151 => "The request was received but a server time-out occurred",
152 => "The request was received, but a service timed out",
200 => "The authorization request was approved by the issuing bank but declined by CyberSource because it did not pass the AVS check",
201 => "The issuing bank has questions about the request",
202 => "Expired card",
203 => "General decline of the card",
204 => "Insufficient funds in the account",
205 => "Stolen or lost card",
207 => "Issuing bank unavailable",
208 => "Inactive card or card not authorized for card-not-present transactions",
209 => "American Express Card Identifiction Digits (CID) did not match",
210 => "The card has reached the credit limit",
211 => "Invalid card verification number",
221 => "The customer matched an entry on the processor's negative file",
230 => "The authorization request was approved by the issuing bank but declined by CyberSource because it did not pass the card verification check",
231 => "Invalid account number",
232 => "The card type is not accepted by the payment processor",
233 => "General decline by the processor",
234 => "A problem exists with your CyberSource merchant configuration",
235 => "The requested amount exceeds the originally authorized amount",
236 => "Processor failure",
237 => "The authorization has already been reversed",
238 => "The authorization has already been captured",
239 => "The requested transaction amount must match the previous transaction amount",
240 => "The card type sent is invalid or does not correlate with the credit card number",
241 => "The request ID is invalid",
242 => "You requested a capture, but there is no corresponding, unused authorization record.",
243 => "The transaction has already been settled or reversed",
244 => "The bank account number failed the validation check",
246 => "The capture or credit is not voidable because the capture or credit information has already been submitted to your processor",
247 => "You requested a credit for a capture that was previously voided",
250 => "The request was received, but a time-out occurred with the payment processor",
254 => "Your CyberSource account is prohibited from processing stand-alone refunds",
255 => "Your CyberSource account is not configured to process the service in the country you specified"
}
import SweetXml
alias CyberSourceSDK.Helper
use GenServer
def init(args) do
{:ok, args}
end
def start_link do
GenServer.start_link(__MODULE__, {}, name: :cybersource_sdk_client)
end
@doc """
Create an authorization payment
For a normal account, bill_to is mandatory. If you ask CyberSource for a
relaxed AVS check, bill_to can be optional.
## Parameters
- price: Float that represents the price to be charged to the user.
- merchant_reference_code: String that represents the order. Normally you should pass an unique identifier like `order_id`.
- card_type: String with the name of card type, like VISA, MASTERCARD, etc.
- encrypted_payment: String that must be in Base64 received by Apple/Android payment system.
- bill_to: Structure generated by `CyberSourceSDK.bill_to()`. (Optional)
- worker: Atom with name of the structure in configurations to be use. (Optional)
## Example
Without `bill_to` and `worker` parameters
```
authorize(32.0, "1234", "VISA", "oJ8IOx6SA9HNncxzpS9akm32n+DSAJH==")
```
With `bill_to` parameter
```
bill_to = CyberSourceSDK.bill_to("John", "Doe", "Marylane Street", "34", "New York", "Hong Kong", "<EMAIL>")
authorize(32.0, "1234", "VISA", "oJ8IOx6SA9HNncxzpS9akm32n+DSAJH==", bill_to)
```
"""
def authorize(
price,
merchant_reference_code,
card_type,
encrypted_payment,
bill_to \\ [],
worker \\ :merchant
)
def authorize(price, merchant_reference_code, card_type, encrypted_payment, bill_to, worker)
when is_float(price) do
case validate_merchant_reference_code(merchant_reference_code) do
{:error, reason} ->
{:error, reason}
merchant_reference_code_validated ->
case Helper.check_payment_type(encrypted_payment) do
{:ok, :apple_pay} ->
pay_with_apple_pay(
price,
merchant_reference_code_validated,
card_type,
encrypted_payment,
bill_to,
worker
)
{:ok, :android_pay} ->
pay_with_android_pay(
price,
merchant_reference_code_validated,
card_type,
encrypted_payment,
bill_to,
worker
)
{:error, reason} ->
{:error, reason}
end
end
end
def authorize(_, _, _, _, _, _) do
{:error, :price_needs_to_be_float}
end
@doc """
Create a credit card token
## Example
```
bill_to = CyberSourceSDK.bill_to("John", "Doe", "Marylane Street", "34", "New York", "12345", "NY" "USA", "<EMAIL>")
credit_card = CyberSourceSDK.credit_card("4111111111111111", "12", "2020", "001")
create_credit_card_token("<PASSWORD>", credit_card, bill_to)
```
"""
def create_credit_card_token(
merchant_reference_code,
credit_card,
bill_to,
worker \\ :merchant
)
def create_credit_card_token(merchant_reference_code, credit_card, bill_to, worker) do
Logger.info("Creating CC token")
case validate_merchant_reference_code(merchant_reference_code) do
{:error, reason} ->
Logger.error("Coundn't validate merchant reference code #{inspect(reason)}")
{:error, reason}
merchant_reference_code_validated ->
merchant_configuration = get_configuration_params(worker)
if length(merchant_configuration) > 0 do
replace_params = CyberSourceSDK.Client.get_configuration_params(worker) ++ credit_card ++ bill_to ++ [reference_id: merchant_reference_code_validated]
EEx.eval_file(get_template("credit_card_create.xml"), assigns: replace_params) |> call()
else
Helper.invalid_merchant_configuration()
end
end
end
@doc """
Update a credit card
## Example
```
bill_to = CyberSourceSDK.bill_to(nil, nil, nil, nil, nil, nil, nil, nil, "<EMAIL>") # can also be nil
credit_card = CyberSourceSDK.credit_card(nil, "12", "2024", nil) # can also be nil
update_credit_card("1234", "XXXXXXXX", credit_card, bill_to)
```
"""
def update_credit_card(
merchant_reference_code,
token,
credit_card,
bill_to,
worker \\ :merchant
)
def update_credit_card(merchant_reference_code, token, credit_card, bill_to, worker) do
credit_card = if is_nil(credit_card), do: CyberSourceSDK.credit_card(nil, nil, nil), else: credit_card
bill_to = if is_nil(bill_to), do: CyberSourceSDK.bill_to(nil, nil, nil, nil, nil, nil, nil, nil, nil), else: bill_to
case validate_merchant_reference_code(merchant_reference_code) do
{:error, reason} ->
{:error, reason}
merchant_reference_code_validated ->
merchant_configuration = get_configuration_params(worker)
if length(merchant_configuration) > 0 do
replace_params = CyberSourceSDK.Client.get_configuration_params(worker) ++ credit_card ++ bill_to ++ [reference_id: merchant_reference_code_validated, token: token]
EEx.eval_file(get_template("credit_card_update.xml"), assigns: replace_params) |> call()
else
Helper.invalid_merchant_configuration()
end
end
end
@doc """
Retrieve a credit card by reference code and token
## Example
```
retrieve_credit_card("1234", "XXXXXXXXXXXXX")
```
"""
def retrieve_credit_card(
merchant_reference_code,
token,
worker \\ :merchant
)
def retrieve_credit_card(merchant_reference_code, token, worker) do
case validate_merchant_reference_code(merchant_reference_code) do
{:error, reason} ->
{:error, reason}
merchant_reference_code_validated ->
merchant_configuration = get_configuration_params(worker)
if length(merchant_configuration) > 0 do
replace_params = CyberSourceSDK.Client.get_configuration_params(worker) ++ [reference_id: merchant_reference_code_validated, token: token]
EEx.eval_file(get_template("credit_card_retrieve.xml"), assigns: replace_params) |> call()
else
Helper.invalid_merchant_configuration()
end
end
end
@doc """
Delete a credit card by reference code and token
## Example
```
delete_credit_card("1234", "XXXXXXXXXXXXX")
```
"""
def delete_credit_card(
merchant_reference_code,
token,
worker \\ :merchant
)
def delete_credit_card(merchant_reference_code, token, worker) do
case validate_merchant_reference_code(merchant_reference_code) do
{:error, reason} ->
{:error, reason}
merchant_reference_code_validated ->
merchant_configuration = get_configuration_params(worker)
if length(merchant_configuration) > 0 do
replace_params = CyberSourceSDK.Client.get_configuration_params(worker) ++ [reference_id: merchant_reference_code_validated, token: token]
EEx.eval_file(get_template("credit_card_delete.xml"), assigns: replace_params) |> call()
else
Helper.invalid_merchant_configuration()
end
end
end
@doc """
Charge a credit card by token
## Example
```
charge_credit_card(10.00, "1234", "XXXXXXXXXXXXX")
```
"""
def charge_credit_card(
price,
merchant_reference_code,
token,
worker \\ :merchant
)
def charge_credit_card(price, merchant_reference_code, token, worker) do
case validate_merchant_reference_code(merchant_reference_code) do
{:error, reason} ->
{:error, reason}
merchant_reference_code_validated ->
merchant_configuration = get_configuration_params(worker)
if length(merchant_configuration) > 0 do
replace_params = CyberSourceSDK.Client.get_configuration_params(worker) ++ [reference_id: merchant_reference_code_validated, token: token, price: price]
EEx.eval_file(get_template("credit_card_charge.xml"), assigns: replace_params) |> call()
else
Helper.invalid_merchant_configuration()
end
end
end
@doc """
Authorise a credit card by token
## Example
```
auth_credit_card(10.00, "1234", "XXXXXXXXXXXXX")
```
"""
def auth_credit_card(
price,
merchant_reference_code,
token,
worker \\ :merchant
)
def auth_credit_card(price, merchant_reference_code, token, worker) do
case validate_merchant_reference_code(merchant_reference_code) do
{:error, reason} ->
{:error, reason}
merchant_reference_code_validated ->
merchant_configuration = get_configuration_params(worker)
if length(merchant_configuration) > 0 do
replace_params = CyberSourceSDK.Client.get_configuration_params(worker) ++ [reference_id: merchant_reference_code_validated, token: token, price: price]
EEx.eval_file(get_template("credit_card_auth.xml"), assigns: replace_params) |> call()
else
Helper.invalid_merchant_configuration()
end
end
end
@doc """
Capture authorization on user credit card
## Parameters
- order_id: Unique number to identify the purchase.
- request_params: Base64 of a JSON with `request_id` and `request_token` from authorization request.
- items: An array of map containing the following values: `id`, `unit_price` and `quantity`. Example: ```%{id: id, unit_price: unit_price, quantity: quantity}```
- worker: Merchant atom to use (setup in configurations).
## Result
On successful return the result will be:
```
{:ok, object}
```
"""
def capture(order_id, request_params, items \\ [], worker \\ :merchant) do
case Helper.json_from_base64(request_params) do
{:ok, %{request_id: request_id, request_token: _request_token}} ->
merchant_configuration = get_configuration_params(worker)
if length(merchant_configuration) > 0 do
replace_params =
get_configuration_params(worker) ++
[request_id: request_id, reference_id: order_id] ++ [items: items]
EEx.eval_file(get_template("capture_request.xml"), assigns: replace_params)
|> call
else
Helper.invalid_merchant_configuration()
end
{:error, message} ->
{:error, message}
end
end
@doc """
Remove authorization on user credit card
## Parameters
- order_id: Unique number to identify the purchase.
- amount: Price (value) to refund.
- request_params: Base64 of a JSON with `request_id` and `request_token` from authorization request.
- items: An array of map containing the following values: `id`, `unit_price` and `quantity`. Example: ```%{id: id, unit_price: unit_price, quantity: quantity}```
- worker: Merchant atom to use (setup in configurations)
## Example
```
refund("1234", 23435465442432, items)
```
"""
def refund(order_id, amount, request_params, items \\ [], worker \\ :merchant) do
case Helper.json_from_base64(request_params) do
{:ok, %{request_id: request_id, request_token: _request_token}} ->
merchant_configuration = get_configuration_params(worker)
if length(merchant_configuration) > 0 do
replace_params =
get_configuration_params(worker) ++
[request_id: request_id, reference_id: order_id, total_amount: amount] ++
[items: items]
EEx.eval_file(get_template("refund_request.xml"), assigns: replace_params)
|> call
else
Helper.invalid_merchant_configuration()
end
{:error, message} ->
{:error, message}
end
end
@doc """
A void cancels a capture or credit request that you submitted to CyberSource. A
transaction can be voided only when CyberSource has not already submitted the capture
or credit request to your processor. CyberSource usually submits capture and credit
requests to your processor once a day, so your window for successfully voiding a capture
or credit request is small. CyberSource declines your void request when the capture or
credit request has already been sent to the processor
"""
def void(order_id, request_params, worker \\ :merchant) do
case Helper.json_from_base64(request_params) do
{:ok, %{request_id: request_id, request_token: _request_token}} ->
merchant_configuration = get_configuration_params(worker)
if length(merchant_configuration) > 0 do
replace_params =
get_configuration_params(worker) ++ [request_id: request_id, reference_id: order_id]
EEx.eval_file(get_template("void_request.xml"), assigns: replace_params)
|> call
else
Helper.invalid_merchant_configuration()
end
{:error, message} ->
{:error, message}
end
end
@doc """
When your request for a credit is successful, the issuing bank for the credit
card takes money out of your merchant bank account and returns it to the customer.
It usually takes two to four days for your acquiring bank to transfer funds
from your merchant bank account.
"""
def credit(order_id, amount, reason, request_params, worker \\ :merchant) do
case Helper.json_from_base64(request_params) do
{:ok, %{request_id: request_id, request_token: _request_token}} ->
merchant_configuration = get_configuration_params(worker)
if length(merchant_configuration) > 0 do
replace_params =
get_configuration_params(worker) ++
[
request_id: request_id,
reference_id: order_id,
total_amount: amount,
refund_reason: reason
]
EEx.eval_file(get_template("credit_request.xml"), assigns: replace_params)
|> call
else
Helper.invalid_merchant_configuration()
end
{:error, message} ->
{:error, message}
end
end
@doc """
Make a request to pay with Android Pay
Returns `{:ok, response_object}` , `{:error, :card_type_not_found` or
`{:error, response_code}`
"""
def pay_with_android_pay(
price,
merchant_reference_code,
card_type,
encrypted_payment,
bill_to \\ [],
worker \\ :merchant
) do
case get_card_type(card_type) do
nil ->
{:error, :card_type_not_found}
card_type ->
merchant_configuration = get_configuration_params(worker)
if length(merchant_configuration) > 0 do
replace_params =
get_configuration_params(worker) ++
get_payment_params(merchant_reference_code, price, encrypted_payment, card_type) ++
bill_to
EEx.eval_file(get_template("android_pay_request.xml"), assigns: replace_params)
|> call
else
Helper.invalid_merchant_configuration()
end
end
end
@doc """
Make a request to pay with Apple Pay
Returns `{:ok, response_object}` , `{:error, :card_type_not_found` or
`{:error, response_code}`
"""
def pay_with_apple_pay(
price,
merchant_reference_code,
card_type,
encrypted_payment,
bill_to \\ [],
worker \\ :merchant
) do
case get_card_type(card_type) do
nil ->
{:error, :card_type_not_found}
card_type ->
merchant_configuration = get_configuration_params(worker)
if length(merchant_configuration) > 0 do
replace_params =
CyberSourceSDK.Client.get_configuration_params(worker) ++
CyberSourceSDK.Client.get_payment_params(
merchant_reference_code,
price,
encrypted_payment,
card_type
) ++ bill_to
EEx.eval_file(get_template("apple_pay_request.xml"), assigns: replace_params)
|> call()
else
Helper.invalid_merchant_configuration()
end
end
end
# Define path of request templates
defp get_template(filename) do
Path.join(__DIR__, "/requests/" <> filename <> ".eex")
Path.join(:code.priv_dir(:cybersource_sdk), "/requests/" <> filename <> ".eex")
end
# Get Payment parameters
@spec get_payment_params(String.t(), float(), String.t(), String.t()) :: list()
def get_payment_params(order_id, price, encrypted_token, card_type) do
[
reference_id: order_id,
total_amount: price,
encrypted_payment_data: encrypted_token,
card_type: card_type
]
end
@spec get_card_type(String.t()) :: String.t() | nil
def get_card_type(card_type) do
case card_type do
"VISA" -> "001"
"MASTERCARD" -> "002"
"AMEX" -> "003"
"DISCOVER" -> "004"
"JCB" -> "007"
_ -> nil
end
end
@spec get_configuration_params(atom()) :: list()
def get_configuration_params(worker) do
merchant_configuration = Application.get_env(:cybersource_sdk, worker)
if !is_nil(merchant_configuration) do
merchant_configuration = merchant_configuration |> Enum.into(%{})
[
merchant_id: Map.get(merchant_configuration, :id) || System.get_env("CYBERSOURCE_MERCHANT_ID"),
transaction_key: Map.get(merchant_configuration, :transaction_key) || System.get_env("CYBERSOURCE_TRANSACTION_KEY"),
currency: Map.get(merchant_configuration, :currency) || System.get_env("CYBERSOURCE_CURRENCY"),
client_library: "CyberSourceSDK Elixir #{Application.spec(:cybersource_sdk, :vsn)}"
]
else
[]
end
end
# Make HTTPS request
@spec call(String.t()) :: {:ok, map()} | {:error, String.t()} | {:error, :unknown_response}
defp call(xml_body) do
endpoint = Application.get_env(:cybersource_sdk, :endpoint) || System.get_env("CYBERSOURCE_ENDPOINT")
timeout = Application.get_env(:cybersource_sdk, :timeout, 60_000)
case HTTPoison.post(
endpoint,
xml_body,
[{"Content-Type", "application/xml"}],
[
timeout: timeout,
recv_timeout: timeout
]
) do
{:ok, %HTTPoison.Response{body: response_body}} ->
parse_response(response_body)
|> handle_response
{:error, %HTTPoison.Error{id: _, reason: reason}} = response ->
Logger.info("Timeout: #{timeout}")
Logger.error("#{inspect(response)}")
{:error, reason}
end
end
defp validate_merchant_reference_code(merchant_reference_code) do
cond do
String.valid?(merchant_reference_code) && String.length(merchant_reference_code) ->
merchant_reference_code
is_integer(merchant_reference_code) ->
Integer.to_string(merchant_reference_code)
true ->
{:error, :invalid_order_id}
end
end
# Parse response from CyberSource
@spec parse_response(String.t()) :: map()
def parse_response(xml) do
xml
|> xmap(
merchantReferenceCode:
~x"//soap:Envelope/soap:Body/c:replyMessage/c:merchantReferenceCode/text()"os,
requestID: ~x"//soap:Envelope/soap:Body/c:replyMessage/c:requestID/text()"oi,
decision: ~x"//soap:Envelope/soap:Body/c:replyMessage/c:decision/text()"os,
invalidField: ~x"//soap:Envelope/soap:Body/c:replyMessage/c:invalidField/text()"os,
missingField: ~x"//soap:Envelope/soap:Body/c:replyMessage/c:missingField/text()"os,
reasonCode: ~x"//soap:Envelope/soap:Body/c:replyMessage/c:reasonCode/text()"oi,
requestToken: ~x"//soap:Envelope/soap:Body/c:replyMessage/c:requestToken/text()"os,
ccAuthReply: [
~x".//c:ccAuthReply"o,
reasonCode: ~x"./c:reasonCode/text()"i,
amount: ~x"./c:amount/text()"of
],
ccCaptureReply: [
~x".//c:ccCaptureReply"o,
reasonCode: ~x"./c:reasonCode/text()"i,
amount: ~x"./c:amount/text()"of,
requestDateTime: ~x"./c:requestDateTime/text()"so,
reconciliationID: ~x"./c:reconciliationID/text()"so
],
ccAuthReversalReply: [
~x".//c:ccAuthReversalReply"o,
reasonCode: ~x"./c:reasonCode/text()"i
],
originalTransaction: [
~x".//c:originalTransaction"o,
amount: ~x"./c:amount/text()"of,
reasonCode: ~x"./c:reasonCode/text()"i
],
voidReply: [
~x".//c:voidReply"o,
reasonCode: ~x"./c:reasonCode/text()"i,
amount: ~x"./c:amount/text()"of,
requestDateTime: ~x"./c:requestDateTime/text()"so,
currency: ~x"./c:currency/text()"so
],
ccCreditReply: [
~x".//c:ccCreditReply"o,
reasonCode: ~x"./c:reasonCode/text()"i,
requestDateTime: ~x"./c:requestDateTime/text()"so,
amount: ~x"./c:amount/text()"of,
reconciliationID: ~x"./c:reconciliationID/text()"so,
purchasingLevel3Enabled: ~x"./c:purchasingLevel3Enabled/text()"so,
enhancedDataEnabled: ~x"./c:enhancedDataEnabled/text()"so,
authorizationXID: ~x"./c:authorizationXID/text()"so,
forwardCode: ~x"./c:forwardCode/text()"so,
ownerMerchantID: ~x"./c:ownerMerchantID/text()"so,
reconciliationReferenceNumber: ~x"./c:reconciliationReferenceNumber/text()"so
],
paySubscriptionCreateReply: [
~x".//c:paySubscriptionCreateReply"o,
reasonCode: ~x"./c:reasonCode/text()"i,
subscriptionID: ~x"./c:subscriptionID/text()"i,
],
paySubscriptionUpdateReply: [
~x".//c:paySubscriptionUpdateReply"o,
reasonCode: ~x"./c:reasonCode/text()"i,
subscriptionID: ~x"./c:subscriptionID/text()"i,
],
paySubscriptionDeleteReply: [
~x".//c:paySubscriptionDeleteReply"o,
reasonCode: ~x"./c:reasonCode/text()"i,
subscriptionID: ~x"./c:subscriptionID/text()"i,
],
paySubscriptionRetrieveReply: [
~x".//c:paySubscriptionRetrieveReply"o,
reasonCode: ~x"./c:reasonCode/text()"i,
approvalRequired: ~x"./c:approvalRequired/text()"s,
automaticRenew: ~x"./c:automaticRenew/text()"s,
cardAccountNumber: ~x"./c:cardAccountNumber/text()"s,
cardExpirationMonth: ~x"./c:cardExpirationMonth/text()"i,
cardExpirationYear: ~x"./c:cardExpirationYear/text()"i,
cardType: ~x"./c:cardType/text()"s,
city: ~x"./c:city/text()"s,
country: ~x"./c:country/text()"s,
currency: ~x"./c:currency/text()"s,
email: ~x"./c:email/text()"s,
endDate: ~x"./c:endDate/text()"i,
firstName: ~x"./c:firstName/text()"s,
frequency: ~x"./c:frequency/text()"s,
lastName: ~x"./c:lastName/text()"s,
paymentMethod: ~x"./c:paymentMethod/text()"s,
paymentsRemaining: ~x"./c:paymentsRemaining/text()"i,
postalCode: ~x"./c:postalCode/text()"s,
startDate: ~x"./c:startDate/text()"i,
state: ~x"./c:state/text()"s,
status: ~x"./c:status/text()"s,
street1: ~x"./c:street1/text()"s,
subscriptionID: ~x"./c:subscriptionID/text()"s,
totalPayments: ~x"./c:totalPayments/text()"i,
ownerMerchantID: ~x"./c:ownerMerchantID/text()"s
],
fault: [
~x"//soap:Envelope/soap:Body/soap:Fault"o,
faultCode: ~x"./faultcode/text()"s,
faultString: ~x"./faultstring/text()"s
]
)
end
@spec handle_response(map()) ::
{:ok, map()} | {:error, String.t(), String.t()} | {:error, :unknown_response}
defp handle_response(response) do
cond do
response.decision != "" ->
case response.decision do
"ACCEPT" -> {:ok, response}
"REJECT" -> {:error, response.reasonCode, reason_from_code(response.reasonCode), response}
"ERROR" -> {:error, response.reasonCode, reason_from_code(response.reasonCode), response}
end
response.fault.faultCode != "" ->
{:error, "#{response.fault.faultCode} - #{response.fault.faultString}"}
true ->
{:error, :unknown_response}
end
end
defp reason_from_code(code) do
@response_codes[code]
end
end
|
lib/cybersource-sdk/client.ex
| 0.85186
| 0.716181
|
client.ex
|
starcoder
|
defmodule Veritaserum do
@moduledoc """
Sentiment analisis based on AFINN-165, emojis and some enhancements.
Also supports:
- emojis (❤️, 😱...)
- boosters (*very*, *really*...)
- negators (*don't*, *not*...).
"""
alias Veritaserum.Evaluator
@spec analyze(list(String.t()) | String.t()) :: list(integer) | integer
def analyze(input) when is_list(input) do
input
|> Stream.map(&analyze/1)
|> Enum.to_list()
end
def analyze(input) do
{score, _} = analyze(input, return: :score_and_marks)
score
end
@doc """
Returns a sentiment value for the given text
iex> Veritaserum.analyze(["I ❤️ Veritaserum", "Veritaserum is really awesome"])
[3, 5]
iex> Veritaserum.analyze("I love Veritaserum")
3
"""
@spec analyze(String.t(), return: :score_and_marks) :: {number(), [{atom, number, String.t()}]}
def analyze(input, return: :score_and_marks) do
list_with_marks = get_list_with_marks(input)
score = get_score(list_with_marks)
{score, list_with_marks}
end
defp get_score(words) do
words
|> analyze_list
|> Enum.sum()
end
defp get_list_with_marks(input) do
input
|> clean
|> String.split()
|> mark_list
|> Enum.reverse()
end
defp mark_word(word) do
with {_, nil, _} <- {:negator, Evaluator.evaluate_negator(word), word},
{_, nil, _} <- {:booster, Evaluator.evaluate_booster(word), word},
{_, nil, _} <- {:emoticon, Evaluator.evaluate_emoticon(word), word},
{_, nil, _} <- {:word, Evaluator.evaluate_word(word), word},
do: {:neutral, 0, word}
end
defp mark_list([head | tail]) do
mark_list(tail, [mark_word(head)])
end
defp mark_list([head | tail], result) do
mark_list(tail, [mark_word(head) | result])
end
defp mark_list([], result), do: result
defp analyze_mark({type, score, _}) do
case type do
:word -> score
:emoticon -> score
_ -> 0
end
end
defp analyze_mark(mark, previous) do
case previous do
{:negator, _, _} ->
-analyze_mark(mark)
{:booster, booster_value, _} ->
analyze_mark(mark) |> apply_booster(booster_value)
_ ->
analyze_mark(mark)
end
end
defp analyze_list([head | tail]) do
analyze_list(tail, head, [analyze_mark(head)])
end
defp analyze_list([head | tail], previous, result) do
analyze_list(tail, head, [analyze_mark(head, previous) | result])
end
defp analyze_list([], _, result), do: result
defp apply_booster(word_value, booster) when word_value > 0, do: word_value + booster
defp apply_booster(word_value, booster) when word_value < 0, do: word_value - booster
defp apply_booster(word_value, _booster), do: word_value
defp clean(text) do
text
|> String.replace(~r/\n/, " ")
|> String.downcase()
|> String.replace(~r/[.,\/#!$%\^&\*;:{}=_`\"~()]/, " ")
|> :binary.replace(Evaluator.emoticon_list(), " ", insert_replaced: 1)
|> String.replace(~r/ {2,}/, " ")
end
end
|
lib/veritaserum.ex
| 0.715821
| 0.49231
|
veritaserum.ex
|
starcoder
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.