code
stringlengths 114
1.05M
| path
stringlengths 3
312
| quality_prob
float64 0.5
0.99
| learning_prob
float64 0.2
1
| filename
stringlengths 3
168
| kind
stringclasses 1
value |
|---|---|---|---|---|---|
defmodule GerryCipher do
@moduledoc """
Documentation for GerryCipher, an encoder based on the [Ugly Gerry typeface](https://twitter.com/UglyGerry/status/1153661354462588929)
Upper and lower case letters are the two letter US state abbreviation (in upper and lower case, respectively)
and their two digit districts. If a letter was comprised of multiple districts, the one on the left/top corresponds to the code
for the uppercase letter and the one on the right/bottom is the code for the lowercase letter. Since 'M' and 'W' are reflections
of each other, the codes for 'W' (80YN) and 'w' (80yn) are the reverse of 'M' (NY08) and 'm' (ny08).
Since Ugly Gerry did not provide digits, the ten largest congressional districts by area were used for the digits 1..9,0.
The '+' and '/' seen in base64 encoding become Puerto Rico and Washington DC, the two longest non-voting representatives.
"""
@char_map %{
?A => "CA03", ?a => "tx35", ?1 => "AK01",
?B => "OH12", ?b => "oh07", ?2 => "MN01",
?C => "CT01", ?c => "ct01", ?3 => "WY01",
?D => "MO08", ?d => "mo08", ?4 => "SD01",
?E => "MO06", ?e => "mo06", ?5 => "NM02",
?F => "OR05", ?f => "or05", ?6 => "OR02",
?G => "OH04", ?g => "oh04", ?7 => "ND01",
?H => "NC06", ?h => "nc06", ?8 => "NE03",
?I => "TX15", ?i => "tx15", ?9 => "TX23",
?J => "IL18", ?j => "il18", ?0 => "NV02",
?K => "AL01", ?k => "al01", ?+ => "PR01",
?L => "NY07", ?l => "ny07", ?/ => "DC01",
?M => "NY08", ?m => "ny08",
?N => "IL11", ?n => "il11",
?O => "AZ06", ?o => "az06",
?P => "FL25", ?p => "fl25",
?Q => "TX12", ?q => "tx12",
?R => "OH10", ?r => "mi13",
?S => "TN04", ?s => "tn04",
?T => "CA43", ?t => "ca43",
?U => "IL04", ?u => "il04",
?V => "NJ05", ?v => "nj05",
?W => "80YN", ?w => "80yn",
?X => "CA08", ?x => "ca14",
?Y => "IL12", ?y => "il12",
?Z => "IN08", ?z => "oh08"
}
@doc """
Encode raw binary data in Gerry Cipher.
## Examples
iex> GerryCipher.encode_bytes("Hello world")
"TN04OH04NJ05tn04oh07OH04NE03oh04mo08MN01TX23il12oh07OH04TX12"
"""
def encode_bytes(data) do
data
|> Base.encode64(padding: false)
|> to_charlist()
|> Enum.map(&(Map.get(@char_map, &1)))
|> Enum.join("")
end
@doc """
Encode alpha-numeric strings in Gerry Cipher.
## Examples
iex> GerryCipher.encode_alpha_numeric("Attack at dawn")
"CA03ca43ca43tx35ct01al01 tx35ca43 mo08tx3580ynil11"
"""
def encode_alpha_numeric(str) do
str
|> to_charlist()
|> Enum.map(&(Map.get(@char_map, &1, [&1])))
|> Enum.join("")
end
end
|
lib/gerry_cipher.ex
| 0.690455
| 0.579311
|
gerry_cipher.ex
|
starcoder
|
defmodule ExAdmin.CSV do
@moduledoc """
ExAdmin provides a CSV export link on the index page of each resource.
The CSV file format can be customized with the `csv` macro.
For example, give the following ecto model for Example.Contact:
defmodule Example.Contact do
use Ecto.Model
schema "contacts" do
field :first_name, :string, default: ""
field :last_name, :string, default: ""
field :email, :string, default: ""
belongs_to :category, Example.Category
has_many :contacts_phone_numbers, Example.ContactPhoneNumber
has_many :phone_numbers, through: [:contacts_phone_numbers, :phone_number]
has_many :contacts_groups, Example.ContactGroup
has_many :groups, through: [:contacts_groups, :group]
end
...
end
The following resource file will export the contact list as shown below:
defmodule Example.ExAdmin.Contact do
use ExAdmin.Register
alias Example.PhoneNumber
register_resource Example.Contact do
csv [
{"Surname", &(&1.last_name)},
{:category, &(&1.category.name)},
{"Groups", &(Enum.map(&1.groups, fn g -> g.name end) |> Enum.join("; "))},
] ++
(for label <- PhoneNumber.all_labels do
fun = fn c ->
c.phone_numbers
|> PhoneNumber.find_by_label(label)
|> Map.get(:number, "")
end
{label, fun}
end)
end
end
# output.csv
Surname,Given,Category,Groups,Home Phone,Business Phone,Mobile Phone
Pallen,Steve,R&D,Groop 1;Groop2,555-555-5555,555,555,1234
The macros available in the csv do block include
* `column` - Define a column in the exported CSV file
## Examples
# List format
csv [:name, :description]
# List format with functions
csv [:id, {:name, fn item -> "Mr. " <> item.name end}, :description]
# No header
csv header: false do
column :id
column :name
end
# Don't humanize the header name
csv [:name, :created_at], humanize: false
"""
require Logger
defmacro __using__(_opts \\ []) do
quote do
import unquote(__MODULE__), only: [csv: 1]
end
end
@doc """
Customize the exported CSV file.
"""
defmacro csv(opts \\ [], block \\ [])
defmacro csv(block_or_opts, block) do
{block, opts} = if block == [], do: {block_or_opts, block}, else: {block, block_or_opts}
quote location: :keep do
import ExAdmin.Register, except: [column: 1, column: 2]
import unquote(__MODULE__)
def build_csv(resources) do
var!(columns, ExAdmin.CSV) = []
unquote(block)
case var!(columns, ExAdmin.CSV) do
[] ->
unquote(block)
|> ExAdmin.CSV.build_csv(resources, unquote(opts))
schema ->
schema
|> Enum.reverse
|> ExAdmin.CSV.build_csv(resources, unquote(opts))
end
end
end
end
@doc """
Configure a column in the exported CSV file.
## Examples
csv do
column :id
column :name, fn user -> "#\{user.first_name} #\{user.last_name}" end
column :age
end
"""
defmacro column(field, fun \\ nil) do
quote do
entry = %{field: unquote(field), fun: unquote(fun)}
var!(columns, ExAdmin.CSV) = [entry | var!(columns, ExAdmin.CSV)]
end
end
@doc false
def default_schema([]), do: []
@doc false
def default_schema([resource | _]) do
resource.__struct__.__schema__(:fields)
|> Enum.map(&(build_default_column(&1)))
end
@doc false
def build_default_column(name) do
%{field: name, fun: nil}
end
@doc false
def build_csv(schema, resources, opts) do
schema = normalize_schema schema
Enum.reduce(resources, build_header_row(schema, opts), &(build_row(&2, &1, schema)))
|> Enum.reverse
|> CSVLixir.write
end
def build_csv(resources) do
default_schema(resources)
|> build_csv(resources, [])
end
defp normalize_schema(schema) do
Enum.map schema, fn
{name, fun} -> %{field: name, fun: fun}
name when is_atom(name) -> %{field: name, fun: nil}
map -> map
end
end
@doc false
def build_header_row(schema, opts) do
if Keyword.get(opts, :header, true) do
humanize? = Keyword.get(opts, :humanize, true)
[(for field <- schema, do: column_name(field[:field], humanize?))]
else
[]
end
end
defp column_name(field, true), do: ExAdmin.Utils.humanize(field)
defp column_name(field, _), do: Atom.to_string(field)
@doc false
def build_row(acc, resource, schema) do
row = Enum.reduce(schema, [], fn
%{field: name, fun: nil}, acc ->
[(Map.get(resource, name) |> ExAdmin.Render.to_string) | acc]
%{field: _name, fun: fun}, acc ->
[(fun.(resource) |> ExAdmin.Render.to_string) | acc]
end)
|> Enum.reverse
[row | acc]
end
@doc false
def write_csv(csv) do
csv
|> CSVLixir.write
end
end
|
lib/ex_admin/csv.ex
| 0.802246
| 0.430925
|
csv.ex
|
starcoder
|
defmodule Carmen.Object.Worker do
@moduledoc false
use GenStateMachine
@interface Application.get_env(:carmen, :interface, Carmen.InterfaceExample)
defmodule Data do
@moduledoc false
defstruct [:id, :shape, inters: [], meta: %{}, processed: 0]
end
def start_link({id, opts}) do
GenStateMachine.start_link(__MODULE__, id, opts)
end
def init(id) do
actions = [{:state_timeout, 10, :load_object_state}]
{:ok, :starting, %Data{id: id}, actions}
end
def handle_event(:state_timeout, :load_object_state, :starting, %Data{id: id}) do
case apply(@interface, :load_object_state, [id]) do
{shape, inters, meta} ->
{:next_state, :running, %Data{id: id, shape: shape, inters: inters, meta: meta}}
_ ->
{:next_state, :running, :not_found}
end
end
def handle_event({:call, from}, {:put_state, {id, {shape, inters, meta}}}, _state, _data) do
actions = [{:reply, from, :ok}]
{:next_state, :running, %Data{id: id, shape: shape, inters: inters, meta: meta}, actions}
end
def handle_event({:call, _from}, _, :starting, _data) do
{:keep_state_and_data, [:postpone]}
end
def handle_event(
{:call, from},
{:update, id, shape, new_meta},
:running,
%Data{inters: inters, meta: old_meta, processed: processed} = data
) do
if (id && new_meta == :omitted) || (id && apply(@interface, :valid?, [new_meta, old_meta])) do
new_inters = Carmen.Zone.Store.intersections(shape)
enters = new_inters -- inters
exits = inters -- new_inters
{:ok, meta} = apply(@interface, :events, [id, shape, enters, exits, new_meta, old_meta])
data = %Data{data | shape: shape, inters: new_inters, meta: meta, processed: processed + 1}
cond do
apply(@interface, :sync_after_count, []) == :every ->
:ok = apply(@interface, :save_object_state, [id, shape, inters, meta])
actions = [
{:reply, from, {enters, exits}},
{:state_timeout, apply(@interface, :die_after_ms, []), :shutdown}
]
{:keep_state, data, actions}
processed + 1 >= apply(@interface, :sync_after_count, []) ->
actions = [
{:reply, from, {enters, exits}},
{:timeout, :infinity, :saving_object_state},
{:next_event, :internal, :saving_object_state},
{:state_timeout, apply(@interface, :die_after_ms, []), :shutdown}
]
{:keep_state, data, actions}
true ->
actions = [
{:reply, from, {enters, exits}},
{:state_timeout, apply(@interface, :die_after_ms, []), :shutdown},
{:timeout, apply(@interface, :sync_after_ms, []), :saving_object_state}
]
{:keep_state, data, actions}
end
else
{:keep_state_and_data, [{:reply, from, :dropped}]}
end
end
def handle_event({:call, from}, {:update, _, _, _}, :running, :not_found) do
{:keep_state_and_data, [{:reply, from, :not_found}]}
end
def handle_event({:call, from}, {:intersecting?, zone_id}, _state, %Data{inters: inters}) do
{:keep_state_and_data, [{:reply, from, Enum.member?(inters, zone_id)}]}
end
def handle_event({:call, from}, :get_meta, _state, %Data{meta: meta}) do
{:keep_state_and_data, [{:reply, from, meta}]}
end
def handle_event({:call, from}, :get_shape, _state, %Data{shape: shape}) do
{:keep_state_and_data, [{:reply, from, shape}]}
end
def handle_event(event, :saving_object_state, _state, %Data{id: id, shape: shape, inters: inters, meta: meta} = data)
when event in [:internal, :timeout] do
:ok = apply(@interface, :save_object_state, [id, shape, inters, meta])
{:next_state, :running, %{data | processed: 0}}
end
def handle_event(:state_timeout, :shutdown, _state, data) do
shutdown(data)
end
def handle_event(:info, :shutdown, _state, data) do
shutdown(data)
end
# either I'm missing something obvious or there's a bug in gen_statem because timeouts should show
# up as a gen_statem event and be caught by the function above but occasionally this :info shows up
def handle_event(:info, {:timeout, _, :shutdown}, _state, data) do
shutdown(data)
end
def handle_event({:call, from}, _, _, :not_found) do
{:keep_state_and_data, [{:reply, from, :not_found}]}
end
def handle_event(_, _, _, :not_found) do
:keep_state_and_data
end
def handle_event({:call, from}, msg, _state, data), do: apply(@interface, :handle_msg, [{:call, from}, msg, data])
def handle_event(:cast, msg, _state, data), do: apply(@interface, :handle_msg, [:cast, msg, data])
def handle_event(:info, msg, _state, data), do: apply(@interface, :handle_msg, [:info, msg, data])
defp shutdown(%Data{id: id, shape: shape, inters: inters, meta: meta}) do
:ok = apply(@interface, :save_object_state, [id, shape, inters, meta])
:stop
end
end
|
lib/carmen/object/object_worker.ex
| 0.574992
| 0.410166
|
object_worker.ex
|
starcoder
|
defmodule Advent2019Web.Day03Controller do
use Advent2019Web, :controller
@doc """
Calculate the list of the segments from the path description.
Segments are defined as a map of 5 values: x1, y1, x2, y2 and distance
distance is the distance traveled across this path from the origin until the
beginning of this segment, which is the coordinate (x1, y1)
The origin is at 0, 0 so the values can be negative.
The coordinates are defined using cartesian plane axes.
"""
def segments_from_path(path) do
Enum.reduce(path, %{position: {0, 0}, distance: 0, order: 0, segments: []}, fn mov, acc ->
direction = String.at(mov, 0)
step = String.slice(mov, 1..-1) |> String.to_integer()
# current position, the "head" of the circuit so far
cur_x = elem(acc[:position], 0)
cur_y = elem(acc[:position], 1)
# offset X and Y
{off_x, off_y} =
case direction do
"U" ->
{0, step}
"D" ->
2
{0, -step}
"L" ->
{-step, 0}
"R" ->
{step, 0}
end
%{
position: {cur_x + off_x, cur_y + off_y},
distance: acc[:distance] + abs(off_x) + abs(off_y),
order: acc[:order] + 1,
segments:
acc[:segments] ++
[
%{
x1: cur_x,
y1: cur_y,
x2: cur_x + off_x,
y2: cur_y + off_y,
distance_in_path: acc[:distance],
order: acc[:order]
}
]
}
end)[:segments]
end
@doc """
Calculate the intersection between two segments which are assumed to
be vertical or horizontal and have at most 1 point in common.
If they don't, return nil
It's quite verbose, probably there's a nicer way but didn't look into it.
"""
def ortho_segment_intersection(segment_a, segment_b) do
%{:x1 => x1a, :x2 => x2a, :y1 => y1a, :y2 => y2a} = segment_a
%{:x1 => x1b, :x2 => x2b, :y1 => y1b, :y2 => y2b} = segment_b
{min_ya, max_ya} = Enum.min_max([y1a, y2a])
{min_xb, max_xb} = Enum.min_max([x1b, x2b])
{min_xa, max_xa} = Enum.min_max([x1a, x2a])
{min_yb, max_yb} = Enum.min_max([y1b, y2b])
case {x1a, y1a, x2a, y2a, x1b, y1b, x2b, y2b} do
# a is vertical, b horizontal
{x1a, _, x2a, _, _, y1b, _, y2b} when x1a == x2a and y1b == y2b ->
if min_xb <= x1a and x1a <= max_xb and
min_ya <= y1b and y1b <= max_ya do
%{x: x1a, y: y1b}
else
nil
end
# a is horizontal, b vertical
{_, y1a, _, y2a, x1b, _, x2b, _} when x1b == x2b and y1a == y2a ->
if min_xa <= x1b and x1b <= max_xa and
min_yb <= y1a and y1a <= max_yb do
%{x: x1b, y: y1a}
else
nil
end
# special case, same vertical line and one ends when the other starts
{x1a, _, x2a, _, x1b, _, x2b, _} when x1b == x2b and x1a == x2a and x1a == x1b ->
# a is before b except the intersection
if min_yb == max_ya and
max_yb != max_ya do
%{x: x1b, y: min_yb}
else
# a is after b except the intersection
if min_ya == max_yb and
max_ya != max_yb do
%{x: x1b, y: min_ya}
else
nil
end
end
# special case, but horizontal
{_, y1a, _, _, _, y1b, _, _} when y1b == y2b and y1a == y2a and y1b == y1a ->
# a is before b except the intersection
if min_xb == max_xa and
max_xb != max_xa do
%{x: min_xb, y: y1b}
else
# a is after b except the intersection
if min_xa == max_xb and
max_xa != max_xb do
%{x: min_xa, y: y1b}
else
nil
end
end
_ ->
nil
end
end
@doc """
Calculate the coordinates of every intersection between paths.
The two paths are defined as a list of segments, every segment is a map
containing x1, y1, y1, y2
Returns a map containing coords (coordinates of the intersection) and the
two segments sa and sb
"""
def intersections_from_segments(segments_a, segments_b) do
for sa <- segments_a, sb <- segments_b do
int_coord = ortho_segment_intersection(sa, sb)
if int_coord == nil do
nil
else
int = Map.merge(int_coord, %{sa: sa, sb: sb})
Map.merge(int, %{distance_sum: partial_distance(int)})
end
end
|> Enum.filter(fn x -> x != nil end)
end
@doc """
Calculate the distance of an intersection from the origin, across the path
The intersection already contains the two segments with their initial
distance. This function adds the offset from that initial distance to the
actual intersection
"""
def partial_distance(intersection) do
intersection[:sa][:distance_in_path] + intersection[:sb][:distance_in_path] +
abs(intersection[:sa][:x1] - intersection[:x]) +
abs(intersection[:sa][:y1] - intersection[:y]) +
abs(intersection[:sb][:x1] - intersection[:x]) +
abs(intersection[:sb][:y1] - intersection[:y])
end
def solve1(conn, params) do
segments_a = segments_from_path(params["a"])
segments_b = segments_from_path(params["b"])
intersections = intersections_from_segments(segments_a, segments_b)
closest =
intersections
|> Enum.filter(fn %{:x => x, :y => y} -> x != 0 or y != 0 end)
|> Enum.min_by(fn %{:x => x, :y => y} -> abs(x) + abs(y) end)
# IO.puts("Day 03.1 result: #{processed_map[0]}")
json(conn, %{
result: abs(closest[:x]) + abs(closest[:y]),
segments_a: segments_a,
segments_b: segments_b,
intersections: intersections,
closest: closest
})
end
def solve2(conn, params) do
segments_a = segments_from_path(params["a"])
segments_b = segments_from_path(params["b"])
intersections = intersections_from_segments(segments_a, segments_b)
closest =
intersections
|> Enum.filter(fn %{:x => x, :y => y} -> x != 0 or y != 0 end)
|> Enum.min_by(&partial_distance/1)
# IO.puts("Day 03.1 result: #{processed_map[0]}")
json(conn, %{
result: closest[:distance_sum],
segments_a: segments_a,
segments_b: segments_b,
intersections: intersections,
closest: closest
})
end
end
|
lib/advent2019_web/controllers/day03_controller.ex
| 0.790652
| 0.722796
|
day03_controller.ex
|
starcoder
|
defmodule XDR.Optional do
@moduledoc """
This module manages the `Optional-Data` type based on the RFC4506 XDR Standard.
"""
@behaviour XDR.Declaration
alias XDR.{Bool, OptionalError}
defstruct [:type]
@typedoc """
`XDR.Optional` structure type specification.
"""
@type t :: %XDR.Optional{type: any()}
@doc """
Create a new `XDR.Optional` structure with the `type` passed.
"""
@spec new(type :: any()) :: t()
def new(type), do: %XDR.Optional{type: type}
@doc """
Encode a `XDR.Optional` structure into a XDR format.
"""
@impl true
def encode_xdr(%{type: type}) when is_bitstring(type), do: {:error, :not_valid}
def encode_xdr(%{type: type}) when is_list(type), do: {:error, :not_valid}
def encode_xdr(%{type: type}) when is_tuple(type), do: {:error, :not_valid}
def encode_xdr(%{type: type}) when is_boolean(type), do: {:error, :not_valid}
def encode_xdr(%{type: nil}), do: false |> Bool.new() |> Bool.encode_xdr()
def encode_xdr(%{type: type}) do
module = type.__struct__
encoded_value = module.encode_xdr!(type)
bool = true |> Bool.new() |> Bool.encode_xdr!()
{:ok, bool <> encoded_value}
end
@doc """
Encode a `XDR.Optional` structure into a XDR format.
If the `optional` is not valid, an exception is raised.
"""
@impl true
def encode_xdr!(optional) do
case encode_xdr(optional) do
{:ok, binary} -> binary
{:error, reason} -> raise(OptionalError, reason)
end
end
@doc """
Decode the Optional-Data in XDR format to a `XDR.Optional` structure.
"""
@impl true
def decode_xdr(bytes, _optional) when not is_binary(bytes), do: {:error, :not_binary}
def decode_xdr(_bytes, %{type: type}) when not is_atom(type), do: {:error, :not_module}
def decode_xdr(bytes, %{type: type}) do
{bool, rest} = Bool.decode_xdr!(bytes)
get_decoded_value(bool.identifier, rest, type)
end
@doc """
Decode the Optional-Data in XDR format to a `XDR.Optional` structure.
If the binaries are not valid, an exception is raised.
"""
@impl true
def decode_xdr!(bytes, optional) do
case decode_xdr(bytes, optional) do
{:ok, result} -> result
{:error, reason} -> raise(OptionalError, reason)
end
end
@spec get_decoded_value(has_optional_value :: boolean(), rest :: binary(), type :: atom()) ::
{:ok, {t, binary()}} | {:ok, {nil, binary()}}
defp get_decoded_value(true, rest, type) do
{decoded_type, rest} = type.decode_xdr!(rest)
optional = new(decoded_type)
{:ok, {optional, rest}}
end
defp get_decoded_value(false, rest, _type), do: {:ok, {nil, rest}}
end
|
lib/xdr/optional.ex
| 0.920388
| 0.5564
|
optional.ex
|
starcoder
|
defmodule Trunk.VersionState do
@moduledoc """
This module defines a `Trunk.VersionState` struct and provides some helper functions for working with that state.
Most of these fields are used internally during processing.
## Fields
The following fields are available in the version state object. Some values are filled in during processing.
- `temp_path` - The path to the temporary file created for transformation. If the version doesn't undergo transformation, no temporary path will be available.
- `transform` - The transform instruction returned from `c:Trunk.transform/2`
- `storage_dir` - The storage directory returned from `c:Trunk.storage_dir/2`
- `filename` - The filename returned from `c:Trunk.filename/2`
- `storage_opts` - The additional storage options returned from `c:Trunk.storage_opts/2`
- `assigns` - shared user data as a map (Same as assigns in `Plug.Conn`)
## Usage
This information is made available during `c:Trunk.postprocess/3` which is called once the transformation is complete but before the storage callbacks are called. At this point you can work with the transformed version file and assign data that can be used later when determining the storage directory, filename and storage options.
"""
defstruct temp_path: nil,
transform: nil,
assigns: %{},
storage_dir: nil,
filename: nil,
storage_opts: []
@type t :: %__MODULE__{
temp_path: String.t(),
transform: any,
assigns: map,
storage_dir: String.t(),
filename: String.t(),
storage_opts: Keyword.t()
}
@doc ~S"""
Assigns a value to a key on the state.
## Example:
```
iex> version_state.assigns[:hello]
nil
iex> version_state = assign(version_state, :hello, :world)
iex> version_state.assigns[:hello]
:world
```
"""
@spec assign(state :: Trunk.VersionState.t(), key :: any, value :: any) :: map
def assign(%{assigns: assigns} = version_state, key, value),
do: %{version_state | assigns: Map.put(assigns, key, value)}
end
|
lib/trunk/version_state.ex
| 0.852859
| 0.863909
|
version_state.ex
|
starcoder
|
defmodule Plaid.Identity do
@moduledoc """
[Plaid Identity API](https://plaid.com/docs/api/products/#identity) calls and schema.
"""
@behaviour Plaid.Castable
alias Plaid.Castable
alias Plaid.Identity.{Address, Email, PhoneNumber}
@type t :: %__MODULE__{
addresses: [Address.t()],
emails: [Email.t()],
names: [String.t()],
phone_numbers: [PhoneNumber.t()]
}
defstruct [:addresses, :emails, :names, :phone_numbers]
@impl true
def cast(generic_map) do
%__MODULE__{
addresses: Castable.cast_list(Address, generic_map["addresses"]),
emails: Castable.cast_list(Email, generic_map["emails"]),
names: generic_map["names"],
phone_numbers: Castable.cast_list(PhoneNumber, generic_map["phone_numbers"])
}
end
defmodule GetResponse do
@moduledoc """
[Plaid API /identity/get response schema.](https://plaid.com/docs/api/accounts).
"""
@behaviour Castable
alias Plaid.Account
alias Plaid.Item
@type t :: %__MODULE__{
accounts: [Account.t()],
item: Item.t(),
request_id: String.t()
}
defstruct [:accounts, :item, :request_id]
@impl true
def cast(generic_map) do
%__MODULE__{
accounts: Castable.cast_list(Account, generic_map["accounts"]),
item: Castable.cast(Item, generic_map["item"]),
request_id: generic_map["request_id"]
}
end
end
@doc """
Get information about all available accounts.
Does a `POST /identity/get` call to retrieve account information,
along with the `owners` info for each account associated with an access_token's item.
Params:
* `access_token` - Token to fetch identity for.
Options:
* `:account_ids` - Specific account ids to fetch identity for.
## Examples
Identity.get("access-sandbox-123xxx", client_id: "<PASSWORD>", secret: "abc")
{:ok, %Identity.GetResponse{}}
"""
@spec get(String.t(), options, Plaid.config()) ::
{:ok, GetResponse.t()} | {:error, Plaid.Error.t()}
when options: %{optional(:account_ids) => [String.t()]}
def get(access_token, options \\ %{}, config) do
options_payload = Map.take(options, [:account_ids])
payload =
%{}
|> Map.put(:access_token, access_token)
|> Map.put(:options, options_payload)
Plaid.Client.call("/identity/get", payload, GetResponse, config)
end
end
|
lib/plaid/identity.ex
| 0.872504
| 0.410609
|
identity.ex
|
starcoder
|
defmodule Broadway.Options do
@moduledoc false
def definition() do
[
name: [
required: true,
type: {:custom, __MODULE__, :validate_name, []},
doc: """
Used for name registration. When an atom, all processes/stages
created will be named using this value as prefix.
"""
],
shutdown: [
type: :pos_integer,
default: 30000,
doc: """
Optional. The time in milliseconds given for Broadway to
gracefully shutdown without discarding events. Defaults to `30_000`(ms).
"""
],
max_restarts: [type: :non_neg_integer, default: 3],
max_seconds: [type: :pos_integer, default: 5],
resubscribe_interval: [
type: :non_neg_integer,
default: 100,
doc: """
The interval in milliseconds that
processors wait until they resubscribe to a failed producers. Defaults
to `100`(ms).
"""
],
context: [
type: :any,
default: :context_not_set,
doc: """
A user defined data structure that will be passed to handle_message/3 and handle_batch/4.
"""
],
producer: [
required: true,
type: :non_empty_keyword_list,
doc: """
A keyword list of options. See ["Producers options"](#start_link/2-producers-options)
section below. Only a single producer is allowed.
""",
subsection: """
### Producers options
The producer options allow users to set up the producer.
The available options are:
""",
keys: [
module: [
required: true,
type: :mod_arg,
doc: """
A tuple representing a GenStage producer.
The tuple format should be `{mod, arg}`, where `mod` is the module
that implements the GenStage behaviour and `arg` the argument that will
be passed to the `init/1` callback of the producer. See `Broadway.Producer`
for more information.
"""
],
concurrency: [
type: :pos_integer,
default: 1,
doc: """
The number of concurrent producers that
will be started by Broadway. Use this option to control the concurrency
level of each set of producers. The default value is `1`.
"""
],
transformer: [
type: :mfa,
default: nil,
doc: """
A tuple representing a transformer that translates a produced GenStage event into a
`%Broadway.Message{}`. The tuple format should be `{mod, fun, opts}` and the function
should have the following spec `(event :: term, opts :: term) :: Broadway.Message.t`
This function must be used sparingly and exclusively to convert regular
messages into `Broadway.Message`. That's because a failure in the
`:transformer` callback will cause the whole producer to terminate,
possibly leaving unacknowledged messages along the way.
"""
],
spawn_opt: [
type: :keyword_list,
doc: """
Overrides the top-level `:spawn_opt`.
"""
],
hibernate_after: [
type: :pos_integer,
doc: """
Overrides the top-level `:hibernate_after`.
"""
],
rate_limiting: [
type: :non_empty_keyword_list,
doc: """
A list of options to enable and configure rate limiting for producing.
If this option is present, rate limiting is enabled, otherwise it isn't.
Rate limiting refers to the rate at which producers will forward
messages to the rest of the pipeline. The rate limiting is applied to
and shared by all producers within the time limit.
The following options are supported:
""",
keys: [
allowed_messages: [
required: true,
type: :pos_integer,
doc: """
An integer that describes how many messages are allowed in the specified interval.
"""
],
interval: [
required: true,
type: :pos_integer,
doc: """
An integer that describes the interval (in milliseconds)
during which the number of allowed messages is allowed.
If the producer produces more than `allowed_messages`
in `interval`, only `allowed_messages` will be published until
the end of `interval`, and then more messages will be published.
"""
]
]
]
]
],
processors: [
required: true,
type: :non_empty_keyword_list,
doc: """
A keyword list of named processors where the key is an atom as identifier and
the value is another keyword list of options.
See ["Processors options"](#start_link/2-processors-options)
section below. Currently only a single processor is allowed.
""",
subsection: """
### Processors options
""",
keys: [
*: [
type: :keyword_list,
keys: [
concurrency: [
type: :pos_integer,
doc: """
The number of concurrent process that will
be started by Broadway. Use this option to control the concurrency level
of the processors. The default value is `System.schedulers_online() * 2`.
"""
],
min_demand: [
type: :non_neg_integer,
doc: """
Set the minimum demand of all processors
stages. Default value is `5`.
"""
],
max_demand: [
type: :non_neg_integer,
default: 10,
doc: """
Set the maximum demand of all processors
stages. Default value is `10`.
"""
],
partition_by: [
type: {:fun, 1},
doc: """
Overrides the top-level `:partition_by`.
"""
],
spawn_opt: [
type: :keyword_list,
doc: """
Overrides the top-level `:spawn_opt`.
"""
],
hibernate_after: [
type: :pos_integer,
doc: """
Overrides the top-level `:hibernate_after`.
"""
]
]
]
]
],
batchers: [
default: [],
type: :keyword_list,
doc: """
A keyword list of named batchers
where the key is an atom as identifier and the value is another
keyword list of options. See ["Batchers options"](#start_link/2-batchers-options)
section below.
""",
subsection: """
### Batchers options
""",
keys: [
*: [
type: :keyword_list,
keys: [
concurrency: [
type: :pos_integer,
default: 1,
doc: """
The number of concurrent batch processors
that will be started by Broadway. Use this option to control the
concurrency level. Note that this only sets the numbers of batch
processors for each batcher group, not the number of batchers.
The number of batchers will always be one for each batcher key
defined. The default value is `1`.
"""
],
batch_size: [
type: :pos_integer,
default: 100,
doc: """
The size of the generated batches. Default value is `100`.
"""
],
batch_timeout: [
type: :pos_integer,
default: 1000,
doc: """
The time, in milliseconds, that the batcher waits before flushing
the list of messages. When this timeout is reached, a new batch
is generated and sent downstream, no matter if the `:batch_size`
has been reached or not. Default value is `1000` (1 second).
"""
],
partition_by: [
type: {:fun, 1},
doc: """
Optional. Overrides the top-level `:partition_by`.
"""
],
spawn_opt: [
type: :keyword_list,
doc: """
Overrides the top-level `:spawn_opt`.
"""
],
hibernate_after: [
type: :pos_integer,
doc: """
Overrides the top-level `:hibernate_after`.
"""
]
]
]
]
],
partition_by: [
type: {:fun, 1},
doc: """
A function that controls how data is
partitioned across all processors and batchers. It receives a
`Broadway.Message` and it must return a non-negative integer,
starting with zero, that will be mapped to one of the existing
processors. See ["Ordering and Partitioning"](#module-ordering-and-partitioning)
in the module docs for more information.
"""
],
spawn_opt: [
type: :keyword_list,
doc: """
Low-level options given when starting a
process. Applies to producers, processors, and batchers.
See `erlang:spawn_opt/2` for more information.
"""
],
hibernate_after: [
type: :pos_integer,
default: 15_000,
doc: """
If a process does not receive any message within this interval, it will hibernate,
compacting memory. Applies to producers, processors, and batchers.
Defaults to `15_000`(ms).
"""
]
]
end
def validate_name(name) when is_atom(name), do: {:ok, name}
def validate_name({:via, module, _term} = via) when is_atom(module), do: {:ok, via}
def validate_name(name) do
{:error,
"expected :name to be an atom or a {:via, module, term} tuple, got: #{inspect(name)}"}
end
end
|
lib/broadway/options.ex
| 0.865878
| 0.510374
|
options.ex
|
starcoder
|
defmodule Mipha.Topics.Topic do
@moduledoc false
use Ecto.Schema
import Ecto.{Changeset, Query}
import EctoEnum, only: [defenum: 3]
alias Mipha.{
Repo,
Accounts.User,
Replies.Reply,
Stars.Star,
Collections.Collection
}
alias Mipha.Topics.{Topic, Node}
@type t :: %Topic{}
defenum(TopicType, :topic_type, [
:normal,
:featured,
:educational,
:job
])
schema "topics" do
field :title, :string
field :body, :string
field :type, TopicType
field :closed_at, :naive_datetime
field :replied_at, :naive_datetime
field :suggested_at, :naive_datetime
field :reply_count, :integer, default: 0
field :visit_count, :integer, default: 0
field :star_count, :integer, default: 0
belongs_to :user, User
belongs_to :node, Node
belongs_to :last_reply, Reply, foreign_key: :last_reply_id
belongs_to :last_reply_user, User, foreign_key: :last_reply_user_id
has_many :replies, Reply
has_many :stars, Star
has_many :collections, Collection, on_delete: :delete_all
timestamps()
end
@doc """
Returns the job of topic.
"""
@spec job(Ecto.Queryable.t()) :: Ecto.Query.t()
def job(query \\ __MODULE__),
do: where(query, [..., t], t.type == ^:job)
@doc """
Filters the featured of topics.
"""
@spec featured(Ecto.Queryable.t()) :: Ecto.Query.t()
def featured(query \\ __MODULE__),
do: where(query, [..., t], t.type == ^:featured)
@doc """
Filters the educational of topics.
"""
@spec educational(Ecto.Queryable.t()) :: Ecto.Query.t()
def educational(query \\ __MODULE__),
do: where(query, [..., t], t.type == ^:educational)
@doc """
Filters the no_reply of topics.
"""
@spec no_reply(Ecto.Queryable.t()) :: Ecto.Query.t()
def no_reply(query \\ __MODULE__),
do: where(query, [..., t], t.reply_count == 0)
@doc """
Filters the popular of topics.
"""
@spec popular(Ecto.Queryable.t()) :: Ecto.Query.t()
def popular(query \\ __MODULE__),
do: where(query, [..., t], t.reply_count >= 10)
@doc """
Filters the node of topics.
"""
@spec by_node(Ecto.Queryable.t(), Node.t()) :: Ecto.Query.t()
def by_node(query \\ __MODULE__, %Node{id: node_id}),
do: where(query, [..., t], t.node_id == ^node_id)
@doc """
Filters the user of topics.
"""
@spec by_user(Ecto.Queryable.t(), User.t()) :: Ecto.Query.t()
def by_user(query \\ __MODULE__, %User{id: user_id}),
do: where(query, [..., t], t.user_id == ^user_id)
@doc """
Filters the user of topics.
"""
@spec by_user_ids(Ecto.Queryable.t(), List.t()) :: Ecto.Query.t()
def by_user_ids(query \\ __MODULE__, list),
do: where(query, [..., t], t.user_id in ^list)
@doc """
Returns nearly 10 topics
"""
@spec recent(t()) :: t()
def recent(query \\ __MODULE__),
do: from(t in query, order_by: [desc: t.updated_at], limit: 10)
@doc """
Returns the default sort of the topic list, according to suggested_at && updated_at
"""
@spec base_order(t()) :: t()
def base_order(query \\ __MODULE__),
do: from(t in query, order_by: [asc: t.suggested_at], order_by: [desc: t.updated_at])
@doc """
Preloads the user of a topic.
"""
@spec preload_user(t()) :: t()
def preload_user(topic), do: Repo.preload(topic, [:user, :last_reply_user])
@doc """
Preloads the reply of a topic.
"""
@spec preload_replies(t()) :: t()
def preload_replies(topic), do: Repo.preload(topic, :replies)
@doc """
Preloads the reply of a topic.
"""
@spec preload_node(t()) :: t()
def preload_node(topic), do: Repo.preload(topic, :node)
@doc """
Preloads all of a topic.
"""
@spec preload_all(t()) :: t()
def preload_all(topic) do
topic
|> preload_replies
|> preload_user
|> preload_node
end
@doc """
Topic increment/decrement count
"""
def counter(%Topic{id: topic_id}, :inc, :visit_count) do
Topic
|> where([..., t], t.id == ^topic_id)
|> Repo.update_all(inc: [visit_count: 1])
end
def counter(%Topic{id: topic_id}, :inc, :reply_count) do
Topic
|> where([..., t], t.id == ^topic_id)
|> Repo.update_all(inc: [reply_count: 1])
end
def counter(%Topic{id: topic_id}, :dec, :reply_count) do
Topic
|> where([..., t], t.id == ^topic_id)
|> Repo.update_all(inc: [reply_count: -1])
end
@doc false
def changeset(topic, attrs) do
permitted_attrs = ~w(
title
body
closed_at
user_id
type
node_id
visit_count
reply_count
star_count
last_reply_id
last_reply_user_id
replied_at
suggested_at
)a
required_attrs = ~w(
title
body
node_id
user_id
)a
topic
|> cast(attrs, permitted_attrs)
|> validate_required(required_attrs)
end
end
|
lib/mipha/topics/topic.ex
| 0.708011
| 0.404243
|
topic.ex
|
starcoder
|
defmodule Monetized.Math do
alias Monetized.Money
alias Decimal
@moduledoc """
This module defines arithmetical operations on money.
All functions in this module take either money structs as
parameters from which the currency for the result is inferred or
if you don't care for the currency, any of the `Monetized.Money.make/2`
supported values.
A error will be raised if the money structs hold distinct non nil
values.
"""
@doc """
Adds two values and returns a money struct with
the result.
## Examples
iex> value_one = Monetized.Money.make(10)
...> value_two = Monetized.Money.make(20.50)
...> Monetized.Math.add(value_one, value_two)
#Money<30.50>
iex> five_euros = Monetized.Money.make("€ 5")
...> result = Monetized.Math.add(five_euros, 20)
...> Monetized.Money.to_string(result, [currency_symbol: true])
"€ 25.00"
iex> Monetized.Math.add("£ 100", "£ 1,350.25")
#Money<1450.25GBP>
"""
@spec add(Money.t | String.t | integer | float | Decimal.t, Money.t | String.t | integer | float | Decimal.t) :: Money.t
def add(a, b) do
a = to_money(a)
b = to_money(b)
c = determine_currency(a.currency, b.currency)
Decimal.add(a.value, b.value) |> Money.make([currency: c])
end
@doc """
Substracts money from money returning a money struct
with the result.
## Examples
iex> payment_one = Monetized.Money.make(50)
...> payment_two = Monetized.Money.make(51, [currency: "EUR"])
...> Monetized.Math.sub(payment_one, payment_two)
#Money<-1.00EUR>
iex> payment_one = Monetized.Money.make(2000)
...> payment_two = Monetized.Money.make(150.25)
...> result = Monetized.Math.sub(payment_one, payment_two)
...> Monetized.Money.to_string(result)
"1,849.75"
iex> result = Monetized.Math.sub(100.50, 200)
...> Monetized.Money.to_string(result)
"-99.50"
iex> result = Monetized.Math.sub("£ -100", "1,200.00")
...> Monetized.Money.to_string(result, [currency_symbol: true])
"£ -1,300.00"
"""
@spec sub(Money.t | String.t | integer | float | Decimal.t, Money.t | String.t | integer | float | Decimal.t) :: Money.t
def sub(a, b) do
a = to_money(a)
b = to_money(b)
c = determine_currency(a.currency, b.currency)
Decimal.sub(a.value, b.value)
|> Money.make([currency: c])
end
defp to_money(%Monetized.Money{} = money), do: money
defp to_money(amount), do: Money.make(amount)
defp determine_currency(nil, b), do: b
defp determine_currency(a, nil), do: a
defp determine_currency(nil, nil), do: nil
defp determine_currency(a, b) do
if a != b, do: raise_currency_conflict()
a || b
end
defp raise_currency_conflict do
raise "Math requires both values to be of the same currency."
end
end
|
lib/math.ex
| 0.879374
| 0.580174
|
math.ex
|
starcoder
|
defmodule ActivestorageEx.DiskService do
@moduledoc """
Wraps a local disk path as an ActivestorageEx service.
`:root_path` in your config must be set. Both blobs and
variants are stored in folders with `:root_path` as the root
"""
@behaviour ActivestorageEx.Service
alias ActivestorageEx.Service
@doc """
Returns a binary representation of an image from a given `%Blob{}` or `%Variant{}` key
## Parameters
- `key`: A `%Blob{}` or `%Variant{}`'s key
## Examples
Downloading an image from a `%Blob{}` key
```
blob = %Blob{}
DiskService.download(blob.key) # {:ok, <<...>>}
```
"""
def download(key) do
case File.open(path_for(key)) do
{:ok, io} -> IO.binread(io, :all)
{:error, err} -> {:error, err}
end
end
@doc """
Downloads and saves a file to disk in a streaming fashion.
Good for downloading large files
## Parameters
- `key`: A `%Blob{}` or `%Variant{}`'s key
- `filepath`: The desired filepath. Note that directories will not be created
## Examples
Downloading an image from a `%Blob{}` key
```
blob = %Blob{}
filepath = "storage/image.png"
DiskService.stream_download(blob.key, filepath) # {:ok, "storage/image.png"}
```
"""
def stream_download(key, filepath) do
five_megabytes = 5 * 1024 * 1024
path_for(key)
|> File.stream!([], five_megabytes)
|> Stream.into(File.stream!(filepath))
|> Stream.run()
{:ok, filepath}
end
@doc """
Saves an `%Image{}` to disk, as determined by a given `%Blob{}` or `%Variant{}` key
## Parameters
- `image`: A `%Mogrify.Image{}` that isn't persisted
- `key`: The blob or variant's key. File location will be based off this.
Directories _will_ be created
## Examples
Uploading an `%Image{}` to disk from a `%Blob{}` key
```
image = %Mogrify.Image{}
blob = %Blob{}
DiskService.upload(image, blob.key) # %Mogrify.Image{}
```
"""
def upload(image, key) do
with :ok <- make_path_for(key) do
image
|> Mogrify.save()
|> rename_image(key)
:ok
else
{:error, err} -> {:error, err}
end
end
@doc """
Deletes an image based on its `key`
## Parameters
- `key`: The blob or variant's key
## Examples
Deleting a file from a `%Blob{}` key
```
blob = %Blob{}
DiskService.delete(blob.key)
```
"""
def delete(key) do
case File.rm(path_for(key)) do
:ok -> :ok
# Ignore files that don't exist
{:error, :enoent} -> :ok
{:error, err} -> {:error, err}
end
end
@doc """
Creates a URL with a signed token that represents an attachment's
content type, disposition, and key.
Expiration based off `token_duration` option
## Parameters
- `key`: A `%Blob{}` or `%Variant{}`'s key
- `opts`: A Map containing the following data:
```
%{
disposition: String, # Optional, but recommended
filename: String, # Required
content_type: String, # Required
token_duration: nil | Integer # Optional. `nil` will generate a non-expiring URL
}
```
## Examples
Getting an asset's URL from a `%Blob{}` key
```
blob = %Blob{}
opts = %{}
DiskService.url(blob.key, opts) # /active_storage/...
```
"""
def url(key, opts) do
disposition = Service.content_disposition_with(opts[:disposition], opts[:filename])
verified_key_with_expiration =
ActivestorageEx.sign_message(
%{
key: key,
disposition: disposition,
content_type: opts[:content_type]
},
opts[:token_duration]
)
disk_service_url(verified_key_with_expiration, %{
host: ActivestorageEx.env(:asset_host),
disposition: disposition,
content_type: opts[:content_type],
filename: opts[:filename]
})
end
@doc """
Returns the path on disk for a given `%Blob{}` or `%Variant{}` key
## Parameters
- `key`: The blob or variant's key
## Examples
Getting a path from a `%Blob{}` key
```
blob = %Blob{}
DiskService.path_for(blob.key) # storage/te/st/test_key
```
"""
def path_for(key) do
Path.join(root_path(), [folder_for(key), "/", key])
end
@doc """
Returns whether a file for a given `%Blob{}` or `%Variant{}` key exists
## Parameters
- `key`: The blob or variant's key
## Examples
Determining file's existence from a `%Blob{}` key
```
blob = %Blob{}
DiskService.exist?(blob.key) # true
```
"""
def exist?(key) do
key
|> path_for()
|> File.exists?()
end
defp make_path_for(key) do
key
|> path_for()
|> Path.dirname()
|> File.mkdir_p()
end
defp folder_for(key) do
[String.slice(key, 0..1), String.slice(key, 2..3)] |> Enum.join("/")
end
defp root_path do
ActivestorageEx.env(:root_path)
end
defp rename_image(image, key) do
File.copy!(image.path, path_for(key))
File.rm!(image.path)
image
end
defp disk_service_url(token, opts) do
cleaned_filename = Service.sanitize(opts[:filename])
whitelisted_opts = Map.take(opts, [:content_type, :disposition])
base_url = "#{opts[:host]}/active_storage/disk/#{token}/#{cleaned_filename}"
base_url
|> URI.parse()
|> Map.put(:query, URI.encode_query(whitelisted_opts))
|> URI.to_string()
end
end
|
lib/service/disk_service.ex
| 0.904009
| 0.859605
|
disk_service.ex
|
starcoder
|
defmodule Nerves.Runtime.LogTailer do
@moduledoc """
Collects operating system-level messages from `/dev/log` and `/proc/kmsg`,
forwarding them to `Logger` with an appropriate level to match the syslog
priority parsed out of the message.
You can disable this feature (e.g. for testing) by configuring the following
option:
```elixir
# config.exs
config :nerves_runtime, enable_syslog: false
```
"""
use GenServer
require Logger
@port_binary_name "log_tailer"
defp gen_server_name(:syslog), do: __MODULE__.Syslog
defp gen_server_name(:kmsg), do: __MODULE__.Kmsg
@type type :: :syslog | :kmsg
@doc """
`type` must be `:syslog` or `:kmsg` to indicate which log to tail with this
process. They're managed by separate processes, both to isolate failures and
to simplify the handling of messages being sent back from the ports.
"""
@spec start_link(:syslog | :kmsg) :: {:ok, pid()}
def start_link(type) do
enabled = Application.get_env(:nerves_runtime, :enable_syslog, true)
GenServer.start_link(__MODULE__, %{type: type, enabled: enabled}, name: gen_server_name(type))
end
@spec init(%{type: :syslog | :kmsg, enabled: boolean()}) ::
{:ok, %{type: atom(), port: port(), buffer: binary()}} | :ignore
def init(%{enabled: false}), do: :ignore
def init(%{type: type}), do: {:ok, %{type: type, port: open_port(type), buffer: ""}}
def handle_info({port, {:data, {:noeol, fragment}}}, %{port: port, buffer: buffer} = state) do
{:noreply, %{state | buffer: buffer <> fragment}}
end
def handle_info(
{port, {:data, {:eol, fragment}}},
%{type: type, port: port, buffer: buffer} = state
) do
handle_message(type, buffer <> fragment)
{:noreply, %{state | buffer: ""}}
end
defp open_port(type) do
Port.open({:spawn_executable, executable()}, [
{:args, [to_string(type)]},
{:line, 1024},
:use_stdio,
:binary,
:exit_status
])
end
defp executable() do
:nerves_runtime
|> :code.priv_dir()
|> Path.join(@port_binary_name)
end
defp handle_message(type, data) do
case parse_syslog_message(data) do
%{facility: facility, severity: severity, message: message} ->
Logger.bare_log(
logger_level(severity),
message,
module: gen_server_name(type),
facility: facility,
severity: severity
)
_ ->
# This is unlikely to ever happen, but if a message was somehow
# malformed and we couldn't parse the syslog priority, we should
# still do a best-effort to pass along the raw data.
Logger.bare_log(:info, data, module: gen_server_name(type))
end
end
@doc """
Parse out the syslog facility, severity, and message (including the timestamp
and host) from a syslog-formatted string.
"""
@spec parse_syslog_message(binary()) ::
%{facility: atom(), severity: atom(), message: binary()}
| {:error, :not_syslog_format}
def parse_syslog_message(data) do
case Regex.named_captures(~r/^<(?<pri>\d{1,3})>(?<message>.*)$/, data) do
%{"pri" => pri, "message" => message} ->
{facility, severity} = pri |> String.to_integer() |> divmod(8)
%{facility: facility_name(facility), severity: severity_name(severity), message: message}
_ ->
{:error, :not_syslog_format}
end
end
defp divmod(numerator, denominator),
do: {div(numerator, denominator), Integer.mod(numerator, denominator)}
defp facility_name(0), do: :kernel
defp facility_name(1), do: :user_level
defp facility_name(2), do: :mail
defp facility_name(3), do: :system
defp facility_name(4), do: :security_authorization
defp facility_name(5), do: :syslogd
defp facility_name(6), do: :line_printer
defp facility_name(7), do: :network_news
defp facility_name(8), do: :UUCP
defp facility_name(9), do: :clock
defp facility_name(10), do: :security_authorization
defp facility_name(11), do: :FTP
defp facility_name(12), do: :NTP
defp facility_name(13), do: :log_audit
defp facility_name(14), do: :log_alert
defp facility_name(15), do: :clock
defp facility_name(16), do: :local0
defp facility_name(17), do: :local1
defp facility_name(18), do: :local2
defp facility_name(19), do: :local3
defp facility_name(20), do: :local4
defp facility_name(21), do: :local5
defp facility_name(22), do: :local6
defp facility_name(23), do: :local7
defp severity_name(0), do: :Emergency
defp severity_name(1), do: :Alert
defp severity_name(2), do: :Critical
defp severity_name(3), do: :Error
defp severity_name(4), do: :Warning
defp severity_name(5), do: :Notice
defp severity_name(6), do: :Informational
defp severity_name(7), do: :Debug
defp logger_level(severity) when severity in [:Emergency, :Alert, :Critical, :Error], do: :error
defp logger_level(severity) when severity == :Warning, do: :warn
defp logger_level(severity) when severity in [:Notice, :Informational], do: :info
defp logger_level(severity) when severity == :Debug, do: :debug
end
|
lib/nerves_runtime/log_tailer.ex
| 0.625896
| 0.743727
|
log_tailer.ex
|
starcoder
|
defmodule IdleAnimations.GOL do
use GenServer, restart: :temporary
@moduledoc "A GOL idle animation"
defmodule State do
use TypedStruct
typedstruct enforce: true do
field :id, String.t()
field :gol_state, Matrix.t(boolean())
field :fading_out, boolean(), default: false
field :fader, Fader.t(), default: Fader.new(8)
field :steps, non_neg_integer(), default: 0
field :max_steps, non_neg_integer()
end
end
def start_link(options) do
mode = Keyword.fetch!(options, :mode)
{gol_state, max_steps} = get_initial_state(mode)
state = %State{
id: Keyword.get(options, :game_id),
gol_state: gol_state,
max_steps: max_steps
}
GenServer.start_link(__MODULE__, state, options)
end
@impl true
def init(state) do
tick_request()
{:ok, state}
end
@impl true
def handle_info(:tick, state) do
render(state)
state = %State{
state
| gol_state:
Matrix.map(state.gol_state, fn x, y, s -> update_cell(state.gol_state, x, y, s) end),
steps: state.steps + 1,
fader: Fader.step(state.fader)
}
if state.steps < state.max_steps and not (state.fading_out and Fader.done(state.fader)) do
tick_request()
{:noreply, state}
else
{:stop, :normal, state}
end
end
@impl true
def handle_cast(:terminate, state) do
{:noreply, %State{state | fading_out: true, fader: %Fader{state.fader | direction: :dec}}}
end
@impl true
def terminate(_reason, state) do
Coordinator.notify_idle_animation_terminated(state.id)
end
def possible_modes, do: [:random, :random, :glider]
defp get_initial_state(mode) do
{screen_x, screen_y} = Screen.dims()
case mode do
:random ->
gol_state =
Matrix.of_dims_f(screen_x, screen_y, fn _, _ -> Enum.random([true, false, false]) end)
{gol_state, 500}
:glider ->
{make_glider_state(), 1000}
end
end
defp make_glider_state do
{screen_x, screen_y} = Screen.dims()
m = Matrix.of_dims(screen_x, screen_y, false)
positions = [
{5, 1},
{5, 2},
{6, 1},
{6, 2},
{5, 11},
{6, 11},
{7, 11},
{4, 12},
{3, 13},
{3, 14},
{8, 12},
{9, 13},
{9, 14},
{6, 15},
{4, 16},
{5, 17},
{6, 17},
{7, 17},
{6, 18},
{8, 16},
{3, 21},
{4, 21},
{5, 21},
{3, 22},
{4, 22},
{5, 22},
{2, 23},
{6, 23},
{1, 25},
{2, 25},
{6, 25},
{7, 25},
{3, 35},
{4, 35},
{3, 36},
{4, 36}
]
offset_x = Integer.floor_div(screen_x - 36, 2)
offset_y = Integer.floor_div(screen_y - 9, 2)
Enum.reduce(positions, m, fn {y, x}, m ->
Matrix.draw_at(m, x + offset_x, y + offset_y, true)
end)
end
defp update_cell(gol_state, x, y, s) do
{screen_x, screen_y} = Screen.dims()
live_neighbors =
for dx <- -1..1, dy <- -1..1, {dx, dy} != {0, 0}, reduce: 0 do
c ->
c +
if gol_state[Integer.mod(x + dx, screen_x)][Integer.mod(y + dy, screen_y)],
do: 1,
else: 0
end
if s do
live_neighbors in 2..3
else
live_neighbors == 3
end
end
defp tick_request do
Process.send_after(self(), :tick, Integer.floor_div(1000, 8))
end
defp render(state) do
on_colour = Fader.apply(Pixel.white(), state.fader)
off_colour = Pixel.empty()
on = {on_colour.r, on_colour.g, on_colour.b}
off = {off_colour.r, off_colour.g, off_colour.b}
frame_vals =
Matrix.reduce(state.gol_state, [], fn x, y, s, acc ->
[{x, y, if(s, do: on, else: off)} | acc]
end)
frame =
Screen.blank()
|> NativeMatrix.set_from_list(frame_vals)
Screen.update_frame(frame)
end
end
|
web/lib/infolab_light_games/idle_animations/game_of_life.ex
| 0.764804
| 0.490358
|
game_of_life.ex
|
starcoder
|
defmodule Benchee.Conversion.Scale do
@moduledoc """
Functions for scaling values to other units. Different domains handle
this task differently, for example durations and counts.
See `Benchee.Conversion.Count` and `Benchee.Conversion.Duration` for examples
"""
alias Benchee.Conversion.Unit
@type unit :: Unit.t()
@type unit_atom :: atom
@type any_unit :: unit | unit_atom
@type scaled_number :: {number, unit}
@type scaling_strategy :: :best | :largest | :smallest | :none
@doc """
Scales a number in a domain's base unit to an equivalent value in the best
fit unit. Results are a `{number, unit}` tuple. See `Benchee.Conversion.Count` and
`Benchee.Conversion.Duration` for examples
"""
@callback scale(number) :: scaled_number
@doc """
Scales a number in a domain's base unit to an equivalent value in the
specified unit. Results are a `{number, unit}` tuple. See
`Benchee.Conversion.Count` and `Benchee.Conversion.Duration` for examples
"""
@callback scale(number, any_unit) :: number
@doc """
Finds the best fit unit for a list of numbers in a domain's base unit.
"Best fit" is the most common unit, or (in case of tie) the largest of the
most common units.
"""
@callback best(list, keyword) :: unit
@doc """
Returns the base_unit in which Benchee takes its measurements, which in
general is the smallest supported unit.
"""
@callback base_unit :: unit
@doc """
Given the atom representation of a unit (`:hour`) return the appropriate
`Benchee.Conversion.Unit` struct.
"""
@callback unit_for(any_unit) :: unit
@doc """
Takes a tuple of a number and a unit and a unit to be converted to, returning
the the number scaled to the new unit and the new unit.
"""
@callback convert({number, any_unit}, any_unit) :: scaled_number
# Generic scaling functions
@doc """
Used internally by implemented units to handle their scaling with units and
without.
## Examples
iex> Benchee.Conversion.Scale.scale(12345, :thousand, Benchee.Conversion.Count)
12.345
"""
def scale(value, unit = %Unit{}, _module) do
scale(value, unit)
end
def scale(value, unit_atom, module) do
scale(value, module.unit_for(unit_atom))
end
@doc """
Used internally for scaling but only supports scaling with actual units.
## Examples
iex> unit = %Benchee.Conversion.Unit{magnitude: 1000}
iex> Benchee.Conversion.Scale.scale 12345, unit
12.345
"""
def scale(value, %Unit{magnitude: magnitude}) do
value / magnitude
end
@doc """
Lookup a unit by its `atom` presentation for the representation of supported
units. Used by `Benchee.Conversion.Duration` and `Benchee.Conversion.Count`.
"""
def unit_for(_units, unit = %Unit{}), do: unit
def unit_for(units, unit), do: Map.fetch!(units, unit)
@doc """
Used internally to implement scaling in the modules without duplication.
"""
def convert({value, current_unit}, desired_unit, module) do
current_unit = module.unit_for(current_unit)
desired_unit = module.unit_for(desired_unit)
do_convert({value, current_unit}, desired_unit)
end
defp do_convert(
{value, %Unit{magnitude: current_magnitude}},
desired_unit = %Unit{magnitude: desired_magnitude}
) do
multiplier = current_magnitude / desired_magnitude
{value * multiplier, desired_unit}
end
@doc """
Given a `list` of number values and a `module` describing the domain of the
values (e.g. Duration, Count), finds the "best fit" unit for the list as a
whole.
The best fit unit for a given value is the smallest unit in the domain for
which the scaled value is at least 1. For example, the best fit unit for a
count of 1_000_000 would be `:million`.
The best fit unit for the list as a whole depends on the `:strategy` passed
in `opts`:
* `:best` - the most frequent best fit unit. In case of tie, the
largest of the most frequent units
* `:largest` - the largest best fit unit
* `:smallest` - the smallest best fit unit
* `:none` - the domain's base (unscaled) unit
## Examples
iex> list = [1, 101, 1_001, 10_001, 100_001, 1_000_001]
iex> Benchee.Conversion.Scale.best_unit(list, Benchee.Conversion.Count, strategy: :best).name
:thousand
iex> list = [1, 101, 1_001, 10_001, 100_001, 1_000_001]
iex> Benchee.Conversion.Scale.best_unit(list, Benchee.Conversion.Count, strategy: :smallest).name
:one
iex> list = [1, 101, 1_001, 10_001, 100_001, 1_000_001]
iex> Benchee.Conversion.Scale.best_unit(list, Benchee.Conversion.Count, strategy: :largest).name
:million
iex> list = []
iex> Benchee.Conversion.Scale.best_unit(list, Benchee.Conversion.Count, strategy: :best).name
:one
iex> list = [nil]
iex> Benchee.Conversion.Scale.best_unit(list, Benchee.Conversion.Count, strategy: :best).name
:one
iex> list = [nil, nil, nil, nil]
iex> Benchee.Conversion.Scale.best_unit(list, Benchee.Conversion.Count, strategy: :best).name
:one
iex> list = [nil, nil, nil, nil, 2_000]
iex> Benchee.Conversion.Scale.best_unit(list, Benchee.Conversion.Count, strategy: :best).name
:thousand
"""
def best_unit(measurements, module, options) do
do_best_unit(Enum.reject(measurements, &is_nil/1), module, options)
end
defp do_best_unit([], module, _) do
module.base_unit
end
defp do_best_unit(list, module, opts) do
case Keyword.get(opts, :strategy, :best) do
:best -> best_unit(list, module)
:largest -> largest_unit(list, module)
:smallest -> smallest_unit(list, module)
:none -> module.base_unit
end
end
# Finds the most common unit in the list. In case of tie, chooses the
# largest of the most common
defp best_unit(list, module) do
list
|> Enum.map(fn n -> scale_unit(n, module) end)
|> Enum.group_by(fn unit -> unit end)
|> Enum.map(fn {unit, occurrences} -> {unit, length(occurrences)} end)
|> Enum.sort(fn unit, freq -> by_frequency_and_magnitude(unit, freq) end)
|> hd
|> elem(0)
end
# Finds the smallest unit in the list
defp smallest_unit(list, module) do
list
|> Enum.map(fn n -> scale_unit(n, module) end)
|> Enum.min_by(fn unit -> magnitude(unit) end)
end
# Finds the largest unit in the list
defp largest_unit(list, module) do
list
|> Enum.map(fn n -> scale_unit(n, module) end)
|> Enum.max_by(fn unit -> magnitude(unit) end)
end
defp scale_unit(count, module) do
{_, unit} = module.scale(count)
unit
end
# Fetches the magnitude for the given unit
defp magnitude(%Unit{magnitude: magnitude}) do
magnitude
end
# Sorts two elements first by total, then by magnitude of the unit in case
# of tie
defp by_frequency_and_magnitude({unit_a, frequency}, {unit_b, frequency}) do
magnitude(unit_a) > magnitude(unit_b)
end
defp by_frequency_and_magnitude({_, frequency_a}, {_, frequency_b}) do
frequency_a > frequency_b
end
end
|
lib/benchee/conversion/scale.ex
| 0.94419
| 0.880848
|
scale.ex
|
starcoder
|
defmodule Instruments.Probe do
@moduledoc """
A behavior for a Probe.
Modules that define probes are expected to implement all of the functions in
this behaviour.
A probe is created via the call to `Instruments.Probe.probe_init/3`, and is
then called every `sample_interval` milliseconds via the
`Instruments.Probe.probe_sample/1` function. The probe can then update its
internal state and do any processing it requires.
Every `report_interval` milliseconds, the probe is expected to emit its metric
value.
"""
@type datapoint :: String.t
@type state :: any
@type probe_value :: number | keyword
@type probe_type :: :counter | :spiral | :gauge | :histogram | :timing | :set
@type probe_options :: [
{:sample_rate, pos_integer} |
{:tags, [String.t, ...]} |
{:report_interval, pos_integer} |
{:sample_interval, pos_integer} |
{:function, (() -> {:ok, state})} |
{:mfa, mfa} |
{:module, module} |
{:keys, [atom]}
]
@doc """
Called when the probe is created. The callback is passed
the name of the probe, what kind of metric it's producing and the options
the probe was created with.
You must return `{:ok, state}`. The state will be passed back to you on
subsequent callbacks. Any other return values will cancel further
execution of the probe.
"""
@callback probe_init(String.t, probe_type, probe_options) :: {:ok, state}
@doc """
Called every `sample_interval` milliseconds. When called, the probe should
perform its measurement and update its internal state.
You must return `{:ok, state}`. Any other return values will cancel further
execution of the probe.
"""
@callback probe_sample(state) :: {:ok, state}
@doc """
Called at least every `report_interval` milliseconds. This call reads the
value of the probe, which is reported to the underlying statistics system.
Return values can either take the form of a single numeric value, or a
keyword list keys -> numeric values. Nil values won't be reported to the
statistics system.
"""
@callback probe_get_value(state) :: {:ok, probe_value}
@doc """
Resets the probe's state.
You must return `{:ok, state}`. Any other return values will cancel further
execution of the probe.
"""
@callback probe_reset(state) :: {:ok, state}
@doc """
Called when the probe's runner process receives an unknown message.
You must return `{:ok, state}`. Any other return values will cancel further
execution of the probe.
"""
@callback probe_handle_msg(any, state) :: {:ok, state}
alias Instruments.Probe.Definitions
defdelegate define(name, type, options), to: Definitions
defdelegate define!(name, type, options), to: Definitions
end
|
lib/probe.ex
| 0.902289
| 0.655682
|
probe.ex
|
starcoder
|
defmodule FileSize.Ecto.BitWithUnit do
@moduledoc """
An Ecto type that represents a file size in bits, supporting storage of
different units. The value is stored as map in the database (i.e. jsonb when
using PostgreSQL).
## Example
defmodule MySchema do
use Ecto.Schema
schema "my_table" do
field :file_size, FileSize.Ecto.BitWithUnit
end
end
"""
use Ecto.Type
alias FileSize.Bit
alias FileSize.Ecto.Bit, as: BitType
alias FileSize.Ecto.Utils
@impl true
def type, do: :map
@impl true
def cast(term)
def cast(%Bit{} = size) do
{:ok, size}
end
def cast(%{"bits" => bits, "unit" => unit}) do
cast(%{bits: bits, unit: unit})
end
def cast(%{"value" => value, "unit" => unit}) do
cast(%{value: value, unit: unit})
end
def cast(%{bits: bits, unit: unit}) when is_integer(bits) do
with {:ok, unit} <- parse_unit(unit) do
{:ok, FileSize.from_bits(bits, unit)}
end
end
def cast(%{value: value, unit: unit}) do
with {:ok, value} <- Utils.assert_value(value),
{:ok, unit} <- parse_unit(unit) do
{:ok, FileSize.new(value, unit)}
end
end
def cast(str) when is_binary(str) do
case FileSize.parse(str) do
{:ok, %Bit{} = size} -> {:ok, size}
_ -> :error
end
end
def cast(term) do
BitType.cast(term)
end
@impl true
def dump(term)
def dump(%Bit{} = size) do
{:ok,
%{
"bits" => FileSize.to_integer(size),
"unit" => Utils.serialize_unit(size.unit)
}}
end
def dump(_term), do: :error
@impl true
def embed_as(_format), do: :dump
@impl true
defdelegate equal?(size, other_size), to: Utils
@impl true
def load(term)
def load(%{"bits" => bits, "unit" => unit_str})
when is_integer(bits) and is_binary(unit_str) do
with {:ok, unit} <- parse_unit(unit_str) do
{:ok, FileSize.from_bits(bits, unit)}
end
end
def load(_term), do: :error
defp parse_unit(unit) do
Utils.parse_unit_for_type(unit, Bit)
end
end
|
lib/file_size/ecto/bit_with_unit.ex
| 0.836053
| 0.520374
|
bit_with_unit.ex
|
starcoder
|
defmodule Annex.Layer.Sequence do
@moduledoc """
The Sequence layer is the container and orchestrator of other layers and is
used to define a list of Layers that compose a deep neural network.
"""
alias Annex.{
AnnexError,
Data,
Data.DMatrix,
Layer,
Layer.Backprop,
Layer.Sequence,
Learner,
Shape
}
require Logger
@behaviour Learner
use Layer
@type layers :: MapArray.t()
@type t :: %__MODULE__{
layers: layers,
initialized?: boolean(),
init_options: Keyword.t(),
train_options: Keyword.t()
}
defstruct layers: %{},
layer_configs: [],
initialized?: false,
init_options: [],
train_options: []
@impl Layer
@spec init_layer(LayerConfig.t(Sequence)) :: t()
def init_layer(%LayerConfig{} = cfg) do
with(
{:ok, :layers, layer_configs} <- LayerConfig.fetch(cfg, :layers),
layers <- do_init_layers(layer_configs)
) do
%Sequence{layers: layers, layer_configs: layer_configs, initialized?: true}
else
{:error, :layers, %AnnexError{} = err} ->
raise err
end
end
defp do_init_layers(layer_configs) do
layer_configs
|> Enum.reduce([], fn %LayerConfig{} = layer_config, acc ->
[LayerConfig.init_layer(layer_config) | acc]
end)
|> Enum.reverse()
|> MapArray.new()
end
@spec get_layers(t()) :: layers
def get_layers(%Sequence{layers: layers}), do: layers
@impl Learner
@spec init_learner(t() | LayerConfig.t(Sequence), Keyword.t()) :: t() | no_return()
def init_learner(seq, opts \\ [])
def init_learner(%Sequence{initialized?: true} = seq, _opts), do: seq
def init_learner(%Sequence{layer_configs: layer_configs, initialized?: false}, opts) do
Sequence
|> LayerConfig.build(layers: layer_configs)
|> init_learner(opts)
end
def init_learner(%LayerConfig{} = cfg, _opts) do
init_layer(cfg)
end
@impl Layer
@spec data_type(t()) :: DMatrix
def data_type(_), do: DMatrix
@impl Layer
@spec feedforward(Sequence.t(), Data.data()) :: {Sequence.t(), Data.data()}
def feedforward(%Sequence{} = seq, seq_inputs) do
layers1 = get_layers(seq)
{output, layers2} =
MapArray.reduce(layers1, {seq_inputs, %{}}, fn layer1, {inputs1, layers_acc}, i ->
inputs2 = do_convert_for_feedforward(layers1, i, inputs1)
{layer2, output} = Layer.feedforward(layer1, inputs2)
{output, MapArray.append(layers_acc, layer2)}
end)
{%Sequence{seq | layers: layers2}, output}
end
defp do_convert_for_feedforward(layers, start_index, data) do
layers
|> MapArray.seek_up(start_index, fn layer ->
Layer.has_shapes?(layer) && Layer.data_type(layer)
end)
|> case do
:error ->
data
{:ok, layer} ->
data_type = Layer.data_type(layer)
shape = shape_for_feedforward(layer)
Data.convert(data_type, data, shape)
end
end
defp do_convert_for_backprop(layers, start_index, data) do
layers
|> MapArray.seek_down(start_index, fn layer ->
Layer.has_shapes?(layer) && Layer.data_type(layer)
end)
|> case do
:error ->
data
{:ok, layer} ->
data_type = Layer.data_type(layer)
shape = shape_for_backprop(layer)
Data.convert(data_type, data, shape)
end
end
defp shape_for_feedforward(layer) do
case Layer.input_shape(layer) do
[_rows, columns] -> [columns, :any]
[columns] -> [columns, :any]
end
end
defp shape_for_backprop(layer) do
case Layer.output_shape(layer) do
[_columns, rows] -> [rows, :any]
[_] -> [1, :any]
end
end
@impl Layer
@spec backprop(t(), Data.data(), Backprop.t()) :: {t(), Data.data(), Backprop.t()}
def backprop(%Sequence{} = seq, seq_errors, seq_backprops) do
layers = get_layers(seq)
{
output_errors,
output_props,
output_layers
} =
MapArray.reverse_reduce(layers, {seq_errors, seq_backprops, %{}}, fn
layer, {errors, backprops, layers_acc}, i ->
errors2 = do_convert_for_backprop(layers, i, errors)
{layer2, errors3, backprops2} = Layer.backprop(layer, errors2, backprops)
layers_acc2 = Map.put(layers_acc, i, layer2)
{errors3, backprops2, layers_acc2}
end)
{%Sequence{seq | layers: output_layers}, output_errors, output_props}
end
@impl Layer
@spec shapes(t()) :: {Shape.t(), Shape.t()}
def shapes(%Sequence{} = seq) do
{input_shape, _} = first_shape(seq)
{_, output_shape} = last_shape(seq)
{input_shape, output_shape}
end
defp first_shape(%Sequence{} = seq) do
seq
|> get_layers
|> MapArray.seek_up(fn layer ->
Layer.has_shapes?(layer)
end)
|> case do
:error ->
raise Annex.AnnexError,
message: """
Sequence requires at least one shaped layer.
"""
{:ok, layer} ->
Layer.shapes(layer)
end
end
defp last_shape(%Sequence{} = seq) do
# error case cover in first_shape.
{:ok, layer} =
seq
|> get_layers
|> MapArray.seek_down(fn layer ->
Layer.has_shapes?(layer)
end)
Layer.shapes(layer)
end
@impl Learner
@spec predict(Sequence.t(), any()) :: Data.data()
def predict(%Sequence{} = seq, data) do
{_, prediction} = Layer.feedforward(seq, data)
prediction
end
defimpl Inspect do
def inspect(seq, _) do
details =
seq
|> Sequence.get_layers()
|> MapArray.map(fn %module{} = layer ->
Kernel.inspect({module, data_type(layer), shapes(layer)})
end)
|> Enum.intersperse("\n\t")
|> IO.iodata_to_binary()
"#Sequence<[\n\t#{details}\n]>"
end
def data_type(layer) do
if Layer.has_data_type?(layer) do
Layer.data_type(layer)
end
end
def shapes(layer) do
if Layer.has_shapes?(layer) do
Layer.shapes(layer)
end
end
end
end
|
lib/annex/layer/sequence.ex
| 0.875202
| 0.538498
|
sequence.ex
|
starcoder
|
defmodule XGen.Option do
@moduledoc """
Helpers to create and resolve generator options.
Options have some properties do define:
* `key` - the key to add in the options map after resolution
* `type` - the option type
* `default` - a default value *(optional)*
* `options` - some options *(optional, see [Types and their
options](#module-types-and-their-options))*
* `name` - a printable name *(optional)*
* `prompt` - the user prompt
* `documentation` - documentation for the option *(optional)*
If set, `name` and `documentation` need to be defined together.
In addition to these properties, options can define an optional [validator
callback](#module-validators).
Options are declared as modules using a domain-specific language.
## Examples
To define a single option, simply define a module `use`-ing `XGen.Option`.
defmodule AnOption do
@moduledoc false
use XGen.Option
key :an_option
type :yesno
default :yes
name "An option"
prompt "Activate the option?"
documentation \"""
This option enables the achievement of great things. If you choose to
activate it (the default behaviour), you will not regret it.
\"""
end
If you want to define multiple options in a module as a collection, you can
use the `defoption` macro by `use`-ing `XGen.Option` with the `collection`
option set:
defmodule OptionCollection do
@moduledoc false
# `collection: true` imports only the defoption/2 macro and avoids
# defining OptionCollection as an option.
use XGen.Option, collection: true
# `defoption` defines a module use-ing XGen.Option.
defoption AnOption do
key :an_option
type :string
options required: true
prompt "Value"
end
defoption AnotherOption do
key :another_option
type :integer
options range: 1..10
prompt "Number of things"
end
end
Values for all properties can be generated dynamically from other options
values. For instance, if you are sure to run some `:project_name` option
before to run the following one, you can write:
defoption Module do
key :module
type :string
default Macro.camelize(@project_name)
prompt "Module name"
end
## Types and their options
### Strings
Options with the type `:string` prompt the user for a value. If no default
value is set, pressing enter without writing a value sets the option to `""`.
If a default value is set, it is printed in brackets after the prompt.
Pressing enter without writing a value then sets the option to its default.
Options for string options are:
* `required` - if set to `true`, pressing enter without writing a value
prints an error and prompts the user to enter a value again
* `length` - an optional range for acceptable length
For instance:
defoption StringExample do
name :string_example
type :string
options required: true, length: 3..20
prompt "Project name"
end
### Integers
Options with the type `:integer` prompt the user for an integer value. If no
default value is set, pressing enter without writing a value prints an error
and prompts the user to enter a value again. If a default value is set, it is
printed in brackets after the prompt. Pressing enter without writing a value
then sets the option to its default.
If the user input is not a valid integer, an error is printed and the user is
asked to enter a new value.
Options for integer options are:
* `range` - an optional accepted range for the value. If set, the range is
printed in parentheses after the prompt
For instance:
defoption IntegerExample do
name :integer_example
type :integer
options range: 1..10
prompt "Enter an integer"
end
### Yes/no questions
Options with the type `:yesno` ask for user confirmation. If no default value
is set, `(y/n)` is printed after the prompt. Entering without a value is an
error and the user is asked the question again. If a default value is set,
either `[Y/n]` or `[y/N]` is printed after the prompt accordingly. Entering
without a value sets the default value.
Options for the yes/no questions are:
* `if_yes` - an optional list of options to resolve if the user answers yes
* `if_no` - an optional list of options to resolve if the user answers no
For instance:
defoption YesNoExample do
name :yesno_example
type :yesno
default :no
options if_yes: [ChoiceExample]
prompt "Do you want to add an item?"
end
### Choices in a list
Options with the type `:choice` prompt the user to choose a value from a list.
If no default value is set, the user need to provide a choice. Else, hitting
enter with an empty choice sets the default one.
Options of this type **must** set the `:choices` option with a keyword list.
Keys are potential values while values are strings printed to the user. For
instance:
defoption ChoiceExample do
name :choice_example
type :choice
default :fork
options choices: items()
prompt "Which item to add?"
defp choices do
[
spoon: "A spoon",
fork: "A fork",
knife: "A knife"
]
end
end
This would print something like:
Which item to add?
1. A spoon
2. A fork
3. A knife
Choice [2]:
Like for yes/no questions, it is possible to resolve additional options
depending on the answer:
defoption ChoiceExample do
name :choice_example
type :choice
default :fork
# Here, NumberOfTines will be resolved only if the choice is :fork.
options choices: items(), if_fork: [NumberOfTines]
prompt "Which item to add?"
defp choices do
[
spoon: "A spoon",
fork: "A fork",
knife: "A knife"
]
end
end
## Validators
If standard constraints like the length for strings or the range for integers
are not sufficient, you can write a custom validator. A validator is a
callback that takes the value as argument and returns either `{:ok,
validated_value}` or `{:error, message}`:
defoption ValidatedOption do
key :validated_option
type :string
prompt "Email"
@impl true
def validator(value) do
if value =~ ~r/@/,
do: {:ok, String.downcase(value)},
else: {:error, "The value must be a valid email."}
end
end
## Resolving options
Options are meant to be resolved by a user input at some point. This can be
achieved by using `resolve/2`, which takes the option as its first parameter
and the keyword list of previous options results as its second parameter.
Passing previous options results is what makes dynamic properties possible.
This function returns an updated keyword list with the value of the newly
resolved option:
iex> XGen.Option.resolve(AnOption, %{previous: "value"})
Validate the option? (y/n) y
%{an_option: true, previous: "value"}
Adding the new value to the keyword list makes possible to chain options. For
instance, you can use `Enum.reduce/3` to resolve more options:
iex> Enum.reduce([AnOption, AnotherOption], %{}, &XGen.Option.resolve/2)
Number of files (1-10): 5
Confirm? [Y/n]
%{an_option: 5, another_option: true}
"""
use XGen.Properties
import Marcus
@typedoc "A generator option"
@type t() :: module()
@typedoc "Option types"
@type type() :: :string | :integer | :yesno | :choice
defproperty :key, atom(), doc: "the option key"
defproperty :type, type(), doc: "the option type"
defproperty :default, any(), doc: "the default value", optional: true
defproperty :options, keyword(), doc: "the options", optional: true
defproperty :name, String.t(), doc: "the option name", optional: true
defproperty :prompt, String.t(), doc: "the user prompt"
defproperty :documentation, String.t(),
doc: "documentation for the option",
optional: true
@doc """
Validates the user input.
If the value is valid, a validator must return `{:ok, validated_value}`. The
validated value may be the same as `value` or may be transformed to match a
given format.
If the value is invalid, `{:error, message}` must be returned. The message
will be printed to the user.
"""
@callback validator(value :: term()) ::
{:ok, validated_value :: term()}
| {:error, message :: String.t()}
@optional_callbacks validator: 1
@doc false
defmacro __using__(opts) do
if opts[:collection] do
quote do
import unquote(__MODULE__), only: [defoption: 2]
end
else
quote do
@behaviour unquote(__MODULE__)
import unquote(__MODULE__)
end
end
end
@doc """
Defines an option.
"""
defmacro defoption(name, do: block) do
quote do
defmodule unquote(name) do
@moduledoc false
use unquote(__MODULE__)
unquote(block)
end
end
end
@doc """
Resolves an `option` given some previous results as `opts`.
To resolve a list of option, you can use this function in conjonction with
`Enum.reduce/3`:
iex> Enum.reduce([AnOption, AnotherOption], %{}, &XGen.Option.resolve/2)
Value: some value
Number of things (1-10): 7
%{an_option: "some value", another_option: 7}
"""
@spec resolve(t(), map()) :: map()
def resolve(option, opts) do
properties = option.__info__(:functions)
key = option.key(opts)
type = option.type(opts)
prompt = option.prompt(opts)
default = if {:default, 1} in properties, do: option.default(opts)
options = if {:options, 1} in properties, do: option.options(opts), else: []
validator = if {:validator, 1} in properties, do: &option.validator/1
if {:name, 1} in properties and {:documentation, 1} in properties do
doc(option.name(opts), option.documentation(opts))
end
value = get_value(prompt, type, default, validator, options)
opts = Map.put(opts, key, value)
# Some options can lead to more options being resolved.
if is_list(options[if_(value)]),
do: Enum.reduce(options[if_(value)], opts, &resolve/2),
else: opts
end
@spec doc(String.t(), IO.ANSI.ansidata()) :: :ok
defp doc(title, content) do
Marcus.info([:blue, :bright, "\n #{title}\n\n", :normal, content])
end
@spec get_value(String.t(), type(), term(), function(), keyword()) :: term()
defp get_value(prompt, type, default, validator, opts) do
type
|> case do
:string -> prompt_string(prompt, [default: default] ++ opts)
:integer -> prompt_integer(prompt, [default: default] ++ opts)
:yesno -> yes?(prompt, default: default)
:choice -> choose(prompt, opts[:choices], default: default)
end
|> validate(validator)
|> case do
{:ok, value} ->
value
{:error, message} ->
Marcus.error(message <> "\n")
get_value(prompt, type, default, validator, opts)
end
end
@spec validate(term(), function()) :: {:ok, term()} | {:error, String.t()}
defp validate(value, nil), do: {:ok, value}
defp validate(value, validator), do: validator.(value)
@spec if_(atom()) :: atom()
defp if_(true), do: :if_yes
defp if_(false), do: :if_no
defp if_(value), do: :"if_#{value}"
end
|
lib/xgen/option.ex
| 0.867598
| 0.577853
|
option.ex
|
starcoder
|
defmodule BitstylesPhoenix.Component.Icon do
use BitstylesPhoenix.Component
import BitstylesPhoenix.Component.UseSVG
@moduledoc """
An SVG icon system, that expects the icons to be present on the page, rendered as SVG `<symbol>`s.
"""
@doc ~S"""
Renders an icon element.
This uses `BitstylesPhoenix.Component.UseSVG` to render an icon either inlined in the page or
referenced in an external SVG file. Icons are assumed to have an id prefixed with `icon-` followed
by the name of the icon, which is used to reference the icon.
## Attributes
- `name` *(required)* - The name of the icon. Assumes icons are prefixed with `icon-`.
- `size` - Specify the icon size to use. Available sizes are specified in CSS, and default to `s`, `m`, `l`, `xl`. If you do not specify a size, the icon will fit into a `1em` square.
- `file` - To be set if icons should be loaded from an external resource (see `BitstylesPhoenix.Component.UseSVG.ui_svg/1`).
This can also be configured to a default `icon_file`, see `BitstylesPhoenix` for config options. With the configuration present, inline icons can still be rendered with `file={nil}`.
- `class` - Extra classes to pass to the svg. See `BitstylesPhoenix.Helper.classnames/1` for usage.
See the [bitstyles icon docs](https://bitcrowd.github.io/bitstyles/?path=/docs/atoms-icon--icon) for examples of icon usage, and available icons in the bitstyles icon set.
"""
story(
"An icon (from inline svg)",
'''
iex> assigns = %{}
...> render ~H"""
...> <.ui_icon name="inline-arrow"/>
...> """
"""
<svg xmlns="http://www.w3.org/2000/svg" aria-hidden="true" class="a-icon" focusable="false" height="16" width="16">
<use xlink:href="#icon-inline-arrow">
</use>
</svg>
"""
''',
extra_html: """
<svg xmlns="http://www.w3.org/2000/svg" hidden aria-hidden="true">
<symbol id="icon-inline-arrow" viewBox="0 0 100 100">
<path d="M32.83,97.22a6.07,6.07,0,1,1-8.59-8.58L58.59,54.29a6.07,6.07,0,0,0,0-8.58L24.24,11.36a6.07,6.07,0,1,1,8.59-8.58L75.76,45.71a6.07,6.07,0,0,1,0,8.58Z" fill-rule="evenodd" />
</symbol>
</svg>
"""
)
story("An icon with a size", '''
iex> assigns = %{}
...> render ~H"""
...> <.ui_icon name="hamburger" file="assets/icons.svg" size="xl"/>
...> """
"""
<svg xmlns="http://www.w3.org/2000/svg" aria-hidden="true" class="a-icon a-icon--xl" focusable="false" height="16" width="16">
<use xlink:href="assets/icons.svg#icon-hamburger">
</use>
</svg>
"""
''')
story("An icon with extra options", '''
iex> assigns = %{}
...> render ~H"""
...> <.ui_icon name="bin" file="assets/icons.svg" class="foo bar"/>
...> """
"""
<svg xmlns="http://www.w3.org/2000/svg" aria-hidden="true" class="a-icon foo bar" focusable="false" height="16" width="16">
<use xlink:href="assets/icons.svg#icon-bin">
</use>
</svg>
"""
''')
def ui_icon(assigns) do
icon = "icon-#{assigns.name}"
class =
classnames([
"a-icon",
{"a-icon--#{assigns[:size]}", assigns[:size] != nil},
assigns[:class]
])
extra =
assigns
|> assigns_to_attributes([:class, :name, :size])
|> put_defaults
assigns = assign(assigns, extra: extra, class: class, icon: icon)
~H"""
<.ui_svg use={@icon} class={@class} aria-hidden="true" focusable="false" {@extra} />
"""
end
@default_size 16
defp put_defaults(opts) do
opts
|> Keyword.put_new(:width, @default_size)
|> Keyword.put_new(:height, @default_size)
|> put_icon_file(Application.get_env(:bitstyles_phoenix, :icon_file, :inline))
end
defp put_icon_file(opts, :inline), do: opts
defp put_icon_file(opts, file) when is_binary(file) do
Keyword.put_new(opts, :file, file)
end
defp put_icon_file(opts, {module, function, arguments}) do
file = apply(module, function, arguments)
put_icon_file(opts, file)
end
defp put_icon_file(opts, {module, function}) do
file = apply(module, function)
put_icon_file(opts, file)
end
end
|
lib/bitstyles_phoenix/component/icon.ex
| 0.861974
| 0.426501
|
icon.ex
|
starcoder
|
defmodule Distributed do
@moduledoc """
Make your systems distributed, replicated, scaled well, easily.
[](https://hex.pm/packages/distributed) [](https://hexdocs.pm/distributed) [](https://hex.pm/packages/distributed) [](https://github.com/ertgl/distributed) [](LICENSE.txt)
---
### Tutorial
This is an example of a replicated `GenServer`.
defmodule Storage.KV do
use GenServer
def start_link() do
GenServer.start_link(__MODULE__, [initial_state: %{}], name: __MODULE__.process_id())
end
def init(opts \\\\ []) do
{:ok, Keyword.get(opts, :initial_state, %{})}
end
def process_id() do
Storage.KV
end
def handle_cast({:set, key, value}, state) do
{:noreply, Map.put(state, key, value)}
end
def handle_call({:get, key, default}, _from, state) do
{:reply, Map.get(state, key, default), state}
end
def handle_call({:has, key}, _from, state) do
{:reply, Map.has_key?(state, key), state}
end
def handle_call({:pop, key, default}, _from, state) do
{value, new_state} = Map.pop(state, key, default)
{:reply, value, new_state}
end
def get(key, default \\\\ nil) do
Distributed.Scaler.GenServer.call(__MODULE__.process_id(), {:get, key, default})
end
def set(key, value) do
{_node_name, result} = Distributed.Replicator.GenServer.cast(__MODULE__.process_id(), {:set, key, value})
|> List.first()
result
end
def has?(key) do
Distributed.Scaler.GenServer.call(__MODULE__.process_id(), {:has, key})
end
def pop(key, default \\\\ nil) do
{_node_name, result} = Distributed.Replicator.GenServer.call(__MODULE__.process_id(), {:pop, key, default})
|> List.first()
result
end
end
You can see the example as a small project on [GitHub](https://github.com/ertgl/storage).
### Installation:
If [you have Hex](https://hex.pm), the package can be installed
by adding `:distributed` to your list of dependencies in `mix.exs`:
def application do
[
extra_applications: [
:distributed,
],
]
end
def deps do
[
{:distributed, "~> 0.1.3"},
]
end
"""
defmacro __using__(opts \\ []) do
scaler_opts = Keyword.get(opts, :scaler, [])
replicator_opts = Keyword.get(opts, :replicator, [])
quote do
use Distributed.Scaler, unquote(scaler_opts)
use Distributed.Replicator, unquote(replicator_opts)
end
end
end
|
lib/distributed.ex
| 0.739046
| 0.57684
|
distributed.ex
|
starcoder
|
defmodule AWS.Route53Resolver do
@moduledoc """
When you create a VPC using Amazon VPC, you automatically get DNS resolution
within the VPC from Route 53 Resolver.
By default, Resolver answers DNS queries for VPC domain names such as domain
names for EC2 instances or ELB load balancers. Resolver performs recursive
lookups against public name servers for all other domain names.
You can also configure DNS resolution between your VPC and your network over a
Direct Connect or VPN connection:
## Forward DNS queries from resolvers on your network to Route 53 Resolver
DNS resolvers on your network can forward DNS queries to Resolver in a specified
VPC. This allows your DNS resolvers to easily resolve domain names for AWS
resources such as EC2 instances or records in a Route 53 private hosted zone.
For more information, see [How DNS Resolvers on Your Network Forward DNS Queries to Route 53
Resolver](https://docs.aws.amazon.com/Route53/latest/DeveloperGuide/resolver.html#resolver-overview-forward-network-to-vpc)
in the *Amazon Route 53 Developer Guide*.
## Conditionally forward queries from a VPC to resolvers on your network
You can configure Resolver to forward queries that it receives from EC2
instances in your VPCs to DNS resolvers on your network. To forward selected
queries, you create Resolver rules that specify the domain names for the DNS
queries that you want to forward (such as example.com), and the IP addresses of
the DNS resolvers on your network that you want to forward the queries to. If a
query matches multiple rules (example.com, acme.example.com), Resolver chooses
the rule with the most specific match (acme.example.com) and forwards the query
to the IP addresses that you specified in that rule. For more information, see
[How Route 53 Resolver Forwards DNS Queries from Your VPCs to Your Network](https://docs.aws.amazon.com/Route53/latest/DeveloperGuide/resolver.html#resolver-overview-forward-vpc-to-network)
in the *Amazon Route 53 Developer Guide*.
Like Amazon VPC, Resolver is regional. In each region where you have VPCs, you
can choose whether to forward queries from your VPCs to your network (outbound
queries), from your network to your VPCs (inbound queries), or both.
"""
@doc """
Adds IP addresses to an inbound or an outbound Resolver endpoint.
If you want to add more than one IP address, submit one
`AssociateResolverEndpointIpAddress` request for each IP address.
To remove an IP address from an endpoint, see
[DisassociateResolverEndpointIpAddress](https://docs.aws.amazon.com/Route53/latest/APIReference/API_route53resolver_DisassociateResolverEndpointIpAddress.html).
"""
def associate_resolver_endpoint_ip_address(client, input, options \\ []) do
request(client, "AssociateResolverEndpointIpAddress", input, options)
end
@doc """
Associates an Amazon VPC with a specified query logging configuration.
Route 53 Resolver logs DNS queries that originate in all of the Amazon VPCs that
are associated with a specified query logging configuration. To associate more
than one VPC with a configuration, submit one `AssociateResolverQueryLogConfig`
request for each VPC.
The VPCs that you associate with a query logging configuration must be in the
same Region as the configuration.
To remove a VPC from a query logging configuration, see
[DisassociateResolverQueryLogConfig](https://docs.aws.amazon.com/Route53/latest/APIReference/API_route53resolver_DisassociateResolverQueryLogConfig.html).
"""
def associate_resolver_query_log_config(client, input, options \\ []) do
request(client, "AssociateResolverQueryLogConfig", input, options)
end
@doc """
Associates a Resolver rule with a VPC.
When you associate a rule with a VPC, Resolver forwards all DNS queries for the
domain name that is specified in the rule and that originate in the VPC. The
queries are forwarded to the IP addresses for the DNS resolvers that are
specified in the rule. For more information about rules, see
[CreateResolverRule](https://docs.aws.amazon.com/Route53/latest/APIReference/API_route53resolver_CreateResolverRule.html).
"""
def associate_resolver_rule(client, input, options \\ []) do
request(client, "AssociateResolverRule", input, options)
end
@doc """
Creates a Resolver endpoint.
There are two types of Resolver endpoints, inbound and outbound:
* An *inbound Resolver endpoint* forwards DNS queries to the DNS
service for a VPC from your network.
* An *outbound Resolver endpoint* forwards DNS queries from the DNS
service for a VPC to your network.
"""
def create_resolver_endpoint(client, input, options \\ []) do
request(client, "CreateResolverEndpoint", input, options)
end
@doc """
Creates a Resolver query logging configuration, which defines where you want
Resolver to save DNS query logs that originate in your VPCs.
Resolver can log queries only for VPCs that are in the same Region as the query
logging configuration.
To specify which VPCs you want to log queries for, you use
`AssociateResolverQueryLogConfig`. For more information, see
[AssociateResolverQueryLogConfig](https://docs.aws.amazon.com/Route53/latest/APIReference/API_route53resolver_AssociateResolverQueryLogConfig.html).
You can optionally use AWS Resource Access Manager (AWS RAM) to share a query
logging configuration with other AWS accounts. The other accounts can then
associate VPCs with the configuration. The query logs that Resolver creates for
a configuration include all DNS queries that originate in all VPCs that are
associated with the configuration.
"""
def create_resolver_query_log_config(client, input, options \\ []) do
request(client, "CreateResolverQueryLogConfig", input, options)
end
@doc """
For DNS queries that originate in your VPCs, specifies which Resolver endpoint
the queries pass through, one domain name that you want to forward to your
network, and the IP addresses of the DNS resolvers in your network.
"""
def create_resolver_rule(client, input, options \\ []) do
request(client, "CreateResolverRule", input, options)
end
@doc """
Deletes a Resolver endpoint.
The effect of deleting a Resolver endpoint depends on whether it's an inbound or
an outbound Resolver endpoint:
* **Inbound**: DNS queries from your network are no longer routed to
the DNS service for the specified VPC.
* **Outbound**: DNS queries from a VPC are no longer routed to your
network.
"""
def delete_resolver_endpoint(client, input, options \\ []) do
request(client, "DeleteResolverEndpoint", input, options)
end
@doc """
Deletes a query logging configuration.
When you delete a configuration, Resolver stops logging DNS queries for all of
the Amazon VPCs that are associated with the configuration. This also applies if
the query logging configuration is shared with other AWS accounts, and the other
accounts have associated VPCs with the shared configuration.
Before you can delete a query logging configuration, you must first disassociate
all VPCs from the configuration. See
[DisassociateResolverQueryLogConfig](https://docs.aws.amazon.com/Route53/latest/APIReference/API_route53resolver_DisassociateResolverQueryLogConfig.html).
If you used Resource Access Manager (RAM) to share a query logging configuration
with other accounts, you must stop sharing the configuration before you can
delete a configuration. The accounts that you shared the configuration with can
first disassociate VPCs that they associated with the configuration, but that's
not necessary. If you stop sharing the configuration, those VPCs are
automatically disassociated from the configuration.
"""
def delete_resolver_query_log_config(client, input, options \\ []) do
request(client, "DeleteResolverQueryLogConfig", input, options)
end
@doc """
Deletes a Resolver rule.
Before you can delete a Resolver rule, you must disassociate it from all the
VPCs that you associated the Resolver rule with. For more information, see
[DisassociateResolverRule](https://docs.aws.amazon.com/Route53/latest/APIReference/API_route53resolver_DisassociateResolverRule.html).
"""
def delete_resolver_rule(client, input, options \\ []) do
request(client, "DeleteResolverRule", input, options)
end
@doc """
Removes IP addresses from an inbound or an outbound Resolver endpoint.
If you want to remove more than one IP address, submit one
`DisassociateResolverEndpointIpAddress` request for each IP address.
To add an IP address to an endpoint, see
[AssociateResolverEndpointIpAddress](https://docs.aws.amazon.com/Route53/latest/APIReference/API_route53resolver_AssociateResolverEndpointIpAddress.html).
"""
def disassociate_resolver_endpoint_ip_address(client, input, options \\ []) do
request(client, "DisassociateResolverEndpointIpAddress", input, options)
end
@doc """
Disassociates a VPC from a query logging configuration.
Before you can delete a query logging configuration, you must first disassociate
all VPCs from the configuration. If you used Resource Access Manager (RAM) to
share a query logging configuration with other accounts, VPCs can be
disassociated from the configuration in the following ways:
The accounts that you shared the configuration with can
disassociate VPCs from the configuration.
You can stop sharing the configuration.
"""
def disassociate_resolver_query_log_config(client, input, options \\ []) do
request(client, "DisassociateResolverQueryLogConfig", input, options)
end
@doc """
Removes the association between a specified Resolver rule and a specified VPC.
If you disassociate a Resolver rule from a VPC, Resolver stops forwarding DNS
queries for the domain name that you specified in the Resolver rule.
"""
def disassociate_resolver_rule(client, input, options \\ []) do
request(client, "DisassociateResolverRule", input, options)
end
@doc """
Gets information about a specified Resolver endpoint, such as whether it's an
inbound or an outbound Resolver endpoint, and the current status of the
endpoint.
"""
def get_resolver_endpoint(client, input, options \\ []) do
request(client, "GetResolverEndpoint", input, options)
end
@doc """
Gets information about a specified Resolver query logging configuration, such as
the number of VPCs that the configuration is logging queries for and the
location that logs are sent to.
"""
def get_resolver_query_log_config(client, input, options \\ []) do
request(client, "GetResolverQueryLogConfig", input, options)
end
@doc """
Gets information about a specified association between a Resolver query logging
configuration and an Amazon VPC.
When you associate a VPC with a query logging configuration, Resolver logs DNS
queries that originate in that VPC.
"""
def get_resolver_query_log_config_association(client, input, options \\ []) do
request(client, "GetResolverQueryLogConfigAssociation", input, options)
end
@doc """
Gets information about a query logging policy.
A query logging policy specifies the Resolver query logging operations and
resources that you want to allow another AWS account to be able to use.
"""
def get_resolver_query_log_config_policy(client, input, options \\ []) do
request(client, "GetResolverQueryLogConfigPolicy", input, options)
end
@doc """
Gets information about a specified Resolver rule, such as the domain name that
the rule forwards DNS queries for and the ID of the outbound Resolver endpoint
that the rule is associated with.
"""
def get_resolver_rule(client, input, options \\ []) do
request(client, "GetResolverRule", input, options)
end
@doc """
Gets information about an association between a specified Resolver rule and a
VPC.
You associate a Resolver rule and a VPC using
[AssociateResolverRule](https://docs.aws.amazon.com/Route53/latest/APIReference/API_route53resolver_AssociateResolverRule.html).
"""
def get_resolver_rule_association(client, input, options \\ []) do
request(client, "GetResolverRuleAssociation", input, options)
end
@doc """
Gets information about a Resolver rule policy.
A Resolver rule policy specifies the Resolver operations and resources that you
want to allow another AWS account to be able to use.
"""
def get_resolver_rule_policy(client, input, options \\ []) do
request(client, "GetResolverRulePolicy", input, options)
end
@doc """
Gets the IP addresses for a specified Resolver endpoint.
"""
def list_resolver_endpoint_ip_addresses(client, input, options \\ []) do
request(client, "ListResolverEndpointIpAddresses", input, options)
end
@doc """
Lists all the Resolver endpoints that were created using the current AWS
account.
"""
def list_resolver_endpoints(client, input, options \\ []) do
request(client, "ListResolverEndpoints", input, options)
end
@doc """
Lists information about associations between Amazon VPCs and query logging
configurations.
"""
def list_resolver_query_log_config_associations(client, input, options \\ []) do
request(client, "ListResolverQueryLogConfigAssociations", input, options)
end
@doc """
Lists information about the specified query logging configurations.
Each configuration defines where you want Resolver to save DNS query logs and
specifies the VPCs that you want to log queries for.
"""
def list_resolver_query_log_configs(client, input, options \\ []) do
request(client, "ListResolverQueryLogConfigs", input, options)
end
@doc """
Lists the associations that were created between Resolver rules and VPCs using
the current AWS account.
"""
def list_resolver_rule_associations(client, input, options \\ []) do
request(client, "ListResolverRuleAssociations", input, options)
end
@doc """
Lists the Resolver rules that were created using the current AWS account.
"""
def list_resolver_rules(client, input, options \\ []) do
request(client, "ListResolverRules", input, options)
end
@doc """
Lists the tags that you associated with the specified resource.
"""
def list_tags_for_resource(client, input, options \\ []) do
request(client, "ListTagsForResource", input, options)
end
@doc """
Specifies an AWS account that you want to share a query logging configuration
with, the query logging configuration that you want to share, and the operations
that you want the account to be able to perform on the configuration.
"""
def put_resolver_query_log_config_policy(client, input, options \\ []) do
request(client, "PutResolverQueryLogConfigPolicy", input, options)
end
@doc """
Specifies an AWS account that you want to share rules with, the Resolver rules
that you want to share, and the operations that you want the account to be able
to perform on those rules.
"""
def put_resolver_rule_policy(client, input, options \\ []) do
request(client, "PutResolverRulePolicy", input, options)
end
@doc """
Adds one or more tags to a specified resource.
"""
def tag_resource(client, input, options \\ []) do
request(client, "TagResource", input, options)
end
@doc """
Removes one or more tags from a specified resource.
"""
def untag_resource(client, input, options \\ []) do
request(client, "UntagResource", input, options)
end
@doc """
Updates the name of an inbound or an outbound Resolver endpoint.
"""
def update_resolver_endpoint(client, input, options \\ []) do
request(client, "UpdateResolverEndpoint", input, options)
end
@doc """
Updates settings for a specified Resolver rule.
`ResolverRuleId` is required, and all other parameters are optional. If you
don't specify a parameter, it retains its current value.
"""
def update_resolver_rule(client, input, options \\ []) do
request(client, "UpdateResolverRule", input, options)
end
@spec request(AWS.Client.t(), binary(), map(), list()) ::
{:ok, map() | nil, map()}
| {:error, term()}
defp request(client, action, input, options) do
client = %{client | service: "route53resolver"}
host = build_host("route53resolver", client)
url = build_url(host, client)
headers = [
{"Host", host},
{"Content-Type", "application/x-amz-json-1.1"},
{"X-Amz-Target", "Route53Resolver.#{action}"}
]
payload = encode!(client, input)
headers = AWS.Request.sign_v4(client, "POST", url, headers, payload)
post(client, url, payload, headers, options)
end
defp post(client, url, payload, headers, options) do
case AWS.Client.request(client, :post, url, payload, headers, options) do
{:ok, %{status_code: 200, body: body} = response} ->
body = if body != "", do: decode!(client, body)
{:ok, body, response}
{:ok, response} ->
{:error, {:unexpected_response, response}}
error = {:error, _reason} -> error
end
end
defp build_host(_endpoint_prefix, %{region: "local", endpoint: endpoint}) do
endpoint
end
defp build_host(_endpoint_prefix, %{region: "local"}) do
"localhost"
end
defp build_host(endpoint_prefix, %{region: region, endpoint: endpoint}) do
"#{endpoint_prefix}.#{region}.#{endpoint}"
end
defp build_url(host, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}/"
end
defp encode!(client, payload) do
AWS.Client.encode!(client, payload, :json)
end
defp decode!(client, payload) do
AWS.Client.decode!(client, payload, :json)
end
end
|
lib/aws/generated/route53_resolver.ex
| 0.899722
| 0.480235
|
route53_resolver.ex
|
starcoder
|
defmodule Commanded.Aggregate.Multi.BankAccount do
defstruct [:account_number, :status, balance: 0]
alias Commanded.Aggregate.Multi
alias Commanded.Aggregate.Multi.BankAccount
defmodule Commands do
defmodule OpenAccount do
defstruct [:account_number, :initial_balance]
end
defmodule DepositMoney do
defstruct [:account_number, :amount]
end
defmodule WithdrawMoney do
defstruct [:account_number, :transfer_uuid, :amount]
end
end
defmodule Events do
defmodule BankAccountOpened do
@derive Jason.Encoder
defstruct [:account_number, :balance]
end
defmodule MoneyDeposited do
@derive Jason.Encoder
defstruct [:account_number, :amount, :balance]
end
defmodule MoneyWithdrawn do
@derive Jason.Encoder
defstruct [:account_number, :transfer_uuid, :amount, :balance]
end
end
alias Commands.{DepositMoney, OpenAccount, WithdrawMoney}
alias Events.{BankAccountOpened, MoneyDeposited, MoneyWithdrawn}
# Public command functions
def execute(%BankAccount{status: nil}, %OpenAccount{} = command) do
%OpenAccount{account_number: account_number, initial_balance: initial_balance} = command
if is_number(initial_balance) and initial_balance > 0 do
event = %BankAccountOpened{account_number: account_number, balance: initial_balance}
{:ok, event}
else
{:error, :invalid_balance}
end
end
def execute(%BankAccount{}, %OpenAccount{}), do: {:error, :account_exists}
# Ignore any other commands for unopened accounts.
def execute(%BankAccount{status: nil}, _command), do: {:error, :invalid_account}
def execute(%BankAccount{status: :active} = account, %DepositMoney{} = command) do
%BankAccount{account_number: account_number, balance: balance} = account
%DepositMoney{amount: amount} = command
if is_number(amount) and amount > 0 do
event = %MoneyDeposited{
account_number: account_number,
amount: amount,
balance: balance + amount
}
{:ok, event}
else
{:error, :invalid_amount}
end
end
def execute(%BankAccount{status: :active} = account, %WithdrawMoney{} = command) do
account
|> Multi.new()
|> Multi.execute(&withdraw_money(&1, command))
|> Multi.execute(&check_balance/1)
end
# State mutators
def apply(%BankAccount{} = state, %BankAccountOpened{} = event) do
%BankAccountOpened{account_number: account_number, balance: balance} = event
%BankAccount{state | account_number: account_number, balance: balance, status: :active}
end
def apply(%BankAccount{} = state, %MoneyDeposited{} = event) do
%MoneyDeposited{balance: balance} = event
%BankAccount{state | balance: balance}
end
def apply(%BankAccount{} = state, %MoneyWithdrawn{} = event) do
%MoneyWithdrawn{balance: balance} = event
%BankAccount{state | balance: balance}
end
# Private helpers
defp withdraw_money(%BankAccount{} = state, %WithdrawMoney{} = command) do
%BankAccount{account_number: account_number, balance: balance} = state
%WithdrawMoney{transfer_uuid: transfer_uuid, amount: amount} = command
if is_number(amount) and amount > 0 do
event = %MoneyWithdrawn{
account_number: account_number,
transfer_uuid: transfer_uuid,
amount: amount,
balance: balance - amount
}
{:ok, event}
else
{:error, :invalid_amount}
end
end
defp check_balance(%BankAccount{balance: balance}) when balance < 0,
do: {:error, :insufficient_funds_available}
defp check_balance(%BankAccount{}), do: []
end
|
test/aggregates/support/multi_bank_account.ex
| 0.568895
| 0.572334
|
multi_bank_account.ex
|
starcoder
|
defmodule Nostrum.Cache.UserCache do
@default_cache_implementation Nostrum.Cache.UserCache.ETS
@moduledoc """
Cache behaviour & dispatcher for users.
"""
alias Nostrum.Struct.User
alias Nostrum.Util
import Nostrum.Snowflake, only: [is_snowflake: 1]
if function_exported?(Application, :compile_env, 3) do
@configured_cache :nostrum
|> Application.compile_env(:caches, %{})
|> Map.get(:users, @default_cache_implementation)
else
# credo:disable-for-next-line Credo.Check.Warning.ApplicationConfigInModuleAttribute
@configured_cache :nostrum
|> Application.get_env(:caches, %{})
|> Map.get(:users, @default_cache_implementation)
end
## Behaviour specification
@doc ~s"""
Retrieves a user from the cache by id.
If successful, returns `{:ok, user}`. Otherwise, returns `{:error, reason}`.
## Example
```elixir
case Nostrum.Cache.UserCache.get(1111222233334444) do
{:ok, user} ->
"We found " <> user.username
{:error, _reason} ->
"No es bueno"
end
```
"""
@callback get(id :: User.id()) :: {:ok, User.t()} | {:error, atom}
@doc ~S"""
Add a new user to the cache based on the Discord Gateway payload.
Returns a `t:Nostrum.Struct.User.t/0` struct representing the created user.
"""
@callback create(payload :: Map.t()) :: User.t()
@doc ~S"""
Bulk add multiple users to the cache at once.
Returns `:ok`.
"""
@callback bulk_create(user_payloads :: [Map.t()]) :: :ok
@doc ~S"""
Update a user in the cache based on payload sent via the Gateway.
Returns `:noop` if the user has not been updated in the cache, or
`{old_user, new_user}` is the user has been written to the cache.
"""
@callback update(payload :: Map.t()) :: :noop | {User.t(), User.t()}
@doc ~S"""
Delete a user by ID.
Returns the deleted user if present in the cache, or
`:noop` if the user was not cached.
"""
@callback delete(snowflake :: User.id()) :: :noop | User.t()
## Dispatching
@doc "Retrieve a user using the selected cache implementation."
@spec get(User.id()) :: {:error, atom} | {:ok, User.t()}
def get(id) when is_snowflake(id) do
@configured_cache.get(id)
end
@doc """
Same as `get/1`, but raises `Nostrum.Error.CacheError` in case of a failure.
"""
@spec get!(User.id()) :: no_return | User.t()
def get!(id) when is_snowflake(id), do: id |> get |> Util.bangify_find(id, __MODULE__)
@doc "Create a user using the selected cache implementation."
@spec create(Map.t()) :: User.t()
def create(payload) do
@configured_cache.create(payload)
end
@doc "Bulk create multiple users using the selected cache implementation."
@spec bulk_create([Map.t()]) :: :ok
def bulk_create(users) do
@configured_cache.bulk_create(users)
end
@doc "Update the given user using the selected cache implementation."
@spec update(Map.t()) :: :noop | {User.t(), User.t()}
def update(payload) do
@configured_cache.update(payload)
end
@doc "Delete a user by ID using the selected cache implementation."
@spec delete(User.id()) :: :noop | User.t()
def delete(id) when is_snowflake(id) do
@configured_cache.delete(id)
end
end
|
lib/nostrum/cache/user_cache.ex
| 0.821152
| 0.415017
|
user_cache.ex
|
starcoder
|
defmodule Money do
@moduledoc """
Money implements a set of functions to store, retrieve, convert and perform
arithmetic on a `Money.t` type that is composed of a currency code and
a decimal currency amount.
Money is very opinionated in the interests of serving as a dependable library
that can underpin accounting and financial applications.
This opinion expressed by ensuring that:
1. Money must always have both a amount and a currency code.
2. The currency code must always be valid.
3. Money arithmetic can only be performed when both operands are of the
same currency.
4. Money amounts are represented as a `Decimal`.
5. Money is serialised to the database as a custom Postgres composite type
that includes both the amount and the currency. Therefore for Ecto
serialization Postgres is assumed as the data store. Serialization is
entirely optional and Ecto is not a package dependency.
6. All arithmetic functions work in fixed point decimal. No rounding
occurs automatically (unless expressly called out for a function).
7. Explicit rounding obeys the rounding rules for a given currency. The
rounding rules are defined by the Unicode consortium in its CLDR
repository as implemented by the hex package `ex_cldr`. These rules
define the number of fractional digits for a currency and the rounding
increment where appropriate.
"""
import Kernel, except: [round: 1]
@typedoc """
Money is composed of an atom representation of an ISO4217 currency code and
a `Decimal` representation of an amount.
"""
@type t :: %Money{currency: atom(), amount: Decimal.t()}
@type currency_code :: atom() | String.t()
@type amount :: float() | integer() | Decimal.t() | String.t()
@enforce_keys [:currency, :amount]
defstruct currency: nil, amount: nil
@json_library Application.get_env(:ex_money, :json_library, Cldr.Config.json_library())
unless Code.ensure_loaded?(@json_library) do
IO.puts("""
The json_library '#{inspect(@json_library)}' does not appear
to be available. A json library is required
for Money to operate. Is it configured as a
dependency in mix.exs?
In config.exs your expicit or implicit configuration is:
config ex_money,
json_library: #{inspect(@json_library)}
In mix.exs you will need something like:
def deps() do
[
...
{:#{String.downcase(inspect(@json_library))}, version_string}
]
end
""")
raise ArgumentError,
"Json library #{String.downcase(inspect(@json_library))} does " <>
"not appear to be a dependency"
end
# Default mode for rounding is :half_even, also known
# as bankers rounding
@default_rounding_mode :half_even
alias Cldr.Currency
alias Money.ExchangeRates
defdelegate validate_currency(currency_code), to: Cldr
defdelegate known_currencies, to: Cldr
defdelegate known_current_currencies, to: Money.Currency
defdelegate known_historic_currencies, to: Money.Currency
defdelegate known_tender_currencies, to: Money.Currency
@doc """
Returns a %Money{} struct from a currency code and a currency amount or
an error tuple of the form `{:error, {exception, message}}`.
## Options
* `currency_code` is an ISO4217 three-character upcased binary or atom
* `amount` is an integer, string or Decimal
Note that the `currency_code` and `amount` arguments can be supplied in
either order,
## Examples
iex> Money.new(:USD, 100)
#Money<:USD, 100>
iex> Money.new(100, :USD)
#Money<:USD, 100>
iex> Money.new("USD", 100)
#Money<:USD, 100>
iex> Money.new("thb", 500)
#Money<:THB, 500>
iex> Money.new("EUR", Decimal.new(100))
#Money<:EUR, 100>
iex> Money.new(:EUR, "100.30")
#Money<:EUR, 100.30>
iex> Money.new(:XYZZ, 100)
{:error, {Money.UnknownCurrencyError, "The currency :XYZZ is invalid"}}
iex(1)> Money.new 123.445, :USD
{:error,
{Money.InvalidAmountError,
"Float amounts are not supported in new/2 due to potenial " <>
"rounding and precision issues. If absolutely required, " <>
"use Money.from_float/2"}}
"""
@spec new(amount | currency_code, amount | currency_code) ::
Money.t() | {:error, {Exception.t(), String.t()}}
def new(currency_code, amount) when is_binary(currency_code) and is_integer(amount) do
case validate_currency(currency_code) do
{:error, {_exception, message}} -> {:error, {Money.UnknownCurrencyError, message}}
{:ok, code} -> new(code, amount)
end
end
def new(amount, currency_code) when is_binary(currency_code) and is_integer(amount) do
new(currency_code, amount)
end
def new(currency_code, amount) when is_atom(currency_code) and is_integer(amount) do
case validate_currency(currency_code) do
{:error, {_exception, message}} -> {:error, {Money.UnknownCurrencyError, message}}
{:ok, code} -> %Money{amount: Decimal.new(amount), currency: code}
end
end
def new(amount, currency_code) when is_integer(amount) and is_atom(currency_code) do
new(currency_code, amount)
end
def new(currency_code, %Decimal{} = amount)
when is_atom(currency_code) or is_binary(currency_code) do
case validate_currency(currency_code) do
{:error, {_exception, message}} -> {:error, {Money.UnknownCurrencyError, message}}
{:ok, code} -> %Money{amount: amount, currency: code}
end
end
def new(%Decimal{} = amount, currency_code)
when is_atom(currency_code) or is_binary(currency_code) do
new(currency_code, amount)
end
def new(currency_code, amount) when is_atom(currency_code) and is_binary(amount) do
new(currency_code, Decimal.new(amount))
rescue
Decimal.Error ->
{
:error,
{Money.InvalidAmountError, "Amount cannot be converted to a number: #{inspect(amount)}"}
}
end
def new(amount, currency_code) when is_atom(currency_code) and is_binary(amount) do
new(currency_code, amount)
end
def new(_currency_code, amount) when is_float(amount) do
{:error,
{Money.InvalidAmountError,
"Float amounts are not supported in new/2 due to potenial rounding " <>
"and precision issues. If absolutely required, use Money.from_float/2"}}
end
def new(amount, currency_code) when is_float(amount) do
new(currency_code, amount)
end
def new(param_a, param_b) when is_binary(param_a) and is_binary(param_b) do
with {:ok, currency_code} <- validate_currency(param_a) do
new(currency_code, param_b)
else
{:error, _} ->
with {:ok, currency_code} <- validate_currency(param_b) do
new(currency_code, param_a)
else
{:error, _} ->
{:error,
{Money.Invalid,
"Unable to create money from #{inspect(param_a)} " <> "and #{inspect(param_b)}"}}
end
end
end
@doc """
Returns a %Money{} struct from a currency code and a currency amount. Raises an
exception if the current code is invalid.
## Options
* `currency_code` is an ISO4217 three-character upcased binary or atom
* `amount` is an integer, float or Decimal
## Examples
Money.new!(:XYZZ, 100)
** (Money.UnknownCurrencyError) Currency :XYZZ is not known
(ex_money) lib/money.ex:177: Money.new!/2
"""
@spec new!(amount | currency_code, amount | currency_code) :: Money.t() | no_return()
def new!(currency_code, amount)
when is_binary(currency_code) or is_atom(currency_code) do
case money = new(currency_code, amount) do
{:error, {exception, message}} -> raise exception, message
_ -> money
end
end
def new!(amount, currency_code)
when (is_binary(currency_code) or is_atom(currency_code)) and is_number(amount) do
new!(currency_code, amount)
end
def new!(%Decimal{} = amount, currency_code)
when is_binary(currency_code) or is_atom(currency_code) do
new!(currency_code, amount)
end
def new!(currency_code, %Decimal{} = amount)
when is_binary(currency_code) or is_atom(currency_code) do
new!(currency_code, amount)
end
@doc """
Returns a %Money{} struct from a currency code and a float amount, or
an error tuple of the form `{:error, {exception, message}}`.
Floats are fraught with danger in computer arithmetic due to the
unexpected loss of precision during rounding. The IEEE754 standard
indicates that a number with a precision of 16 digits should
round-trip convert without loss of fidelity. This function supports
numbers with a precision up to 15 digits and will error if the
provided amount is outside that range.
**Note** that `Money` cannot detect lack of precision or rounding errors
introduced upstream. This function therefore should be used with
great care and its use should be considered potentially harmful.
## Options
* `currency_code` is an ISO4217 three-character upcased binary or atom
* `amount` is a float
## Examples
iex> Money.from_float 1.23456, :USD
#Money<:USD, 1.23456>
iex> Money.from_float 1.234567890987656, :USD
{:error,
{Money.InvalidAmountError,
"The precision of the float 1.234567890987656 is " <>
"greater than 15 which could lead to unexpected results. " <>
"Reduce the precision or call Money.new/2 with a Decimal or String amount"}}
"""
# @doc since: "2.0.0"
@max_precision_allowed 15
@spec from_float(float | currency_code, float | currency_code) ::
Money.t() | {:error, {Exception.t(), String.t()}}
def from_float(currency_code, amount)
when (is_binary(currency_code) or is_atom(currency_code)) and is_float(amount) do
if Cldr.Number.precision(amount) <= @max_precision_allowed do
new(currency_code, Decimal.from_float(amount))
else
{:error,
{Money.InvalidAmountError,
"The precision of the float #{inspect(amount)} " <>
"is greater than #{inspect(@max_precision_allowed)} " <>
"which could lead to unexpected results. Reduce the " <>
"precision or call Money.new/2 with a Decimal or String amount"}}
end
end
def from_float(amount, currency_code)
when (is_binary(currency_code) or is_atom(currency_code)) and is_float(amount) do
from_float(currency_code, amount)
end
@doc """
Returns a %Money{} struct from a currency code and a float amount, or
raises an exception if the currency code is invalid.
See `Money.from_float/2` for further information.
**Note** that `Money` cannot detect lack of precision or rounding errors
introduced upstream. This function therefore should be used with
great care and its use should be considered potentially harmful.
## Options
* `currency_code` is an ISO4217 three-character upcased binary or atom
* `amount` is a float
## Examples
iex> Money.from_float!(:USD, 1.234)
#Money<:USD, 1.234>
Money.from_float!(:USD, 1.234567890987654)
#=> ** (Money.InvalidAmountError) The precision of the float 1.234567890987654 is greater than 15 which could lead to unexpected results. Reduce the precision or call Money.new/2 with a Decimal or String amount
(ex_money) lib/money.ex:293: Money.from_float!/2
"""
# @doc since: "2.0.0"
@spec from_float!(currency_code, float) :: Money.t() | no_return()
def from_float!(currency_code, amount) do
case from_float(currency_code, amount) do
{:error, {exception, reason}} -> raise exception, reason
money -> money
end
end
@doc """
Returns a %Money{} struct from a tuple consistenting of a currency code and
a currency amount. The format of the argument is a 2-tuple where:
## Options
* `currency_code` is an ISO4217 three-character upcased binary
* `amount` is an integer or Decimal
This function is typically called from Ecto when it's loading a `%Money{}`
struct from the database.
## Example
iex> Money.from_tuple({"USD", 100})
#Money<:USD, 100>
iex> Money.from_tuple({100, "USD"})
#Money<:USD, 100>
"""
@deprecated "Use new/2 instead. Will be removed in Money 3.0"
@spec from_tuple({binary, number}) :: Money.t()
def from_tuple({currency_code, amount}) when is_binary(currency_code) and is_integer(amount) do
case validate_currency(currency_code) do
{:error, {_exception, message}} ->
{:error, {Money.UnknownCurrencyError, message}}
{:ok, code} ->
%Money{amount: Decimal.new(amount), currency: code}
end
end
def from_tuple({amount, currency_code}) when is_binary(currency_code) and is_integer(amount) do
from_tuple({currency_code, amount})
end
@doc """
Returns a %Money{} struct from a tuple consistenting of a currency code and
a currency amount. Raises an exception if the currency code is invalid.
## Options
* `currency_code` is an ISO4217 three-character upcased binary
* `amount` is an integer, float or Decimal
This function is typically called from Ecto when it's loading a %Money{}
struct from the database.
## Example
iex> Money.from_tuple!({"USD", 100})
#Money<:USD, 100>
Money.from_tuple!({"NO!", 100})
** (Money.UnknownCurrencyError) Currency "NO!" is not known
(ex_money) lib/money.ex:130: Money.new!/1
"""
@deprecated "Use new/2 instead. Will be removed in Money 3.0"
def from_tuple!({currency_code, amount}) when is_binary(currency_code) and is_integer(amount) do
case money = new(currency_code, amount) do
{:error, {exception, message}} -> raise exception, message
_ -> money
end
end
def from_tuple!({amount, currency_code}) when is_binary(currency_code) and is_integer(amount) do
from_tuple!({currency_code, amount})
end
@doc """
Returns a formatted string representation of a `Money{}`.
Formatting is performed according to the rules defined by CLDR. See
`Cldr.Number.to_string/2` for formatting options. The default is to format
as a currency which applies the appropriate rounding and fractional digits
for the currency.
## Options
* `money_1` is any valid `Money.t` type returned
by `Money.new/2`
## Returns
* `{:ok, string}` or
* `{:error, reason}`
## Examples
iex> Money.to_string Money.new(:USD, 1234)
{:ok, "$1,234.00"}
iex> Money.to_string Money.new(:JPY, 1234)
{:ok, "¥1,234"}
iex> Money.to_string Money.new(:THB, 1234)
{:ok, "THB1,234.00"}
iex> Money.to_string Money.new(:USD, 1234), format: :long
{:ok, "1,234 US dollars"}
"""
def to_string(%Money{} = money, options \\ []) do
options = merge_options(options, currency: money.currency)
Cldr.Number.to_string(money.amount, options)
end
@doc """
Returns a formatted string representation of a `Money{}` or raises if
there is an error.
Formatting is performed according to the rules defined by CLDR. See
`Cldr.Number.to_string!/2` for formatting options. The default is to format
as a currency which applies the appropriate rounding and fractional digits
for the currency.
## Examples
iex> Money.to_string! Money.new(:USD, 1234)
"$1,234.00"
iex> Money.to_string! Money.new(:JPY, 1234)
"¥1,234"
iex> Money.to_string! Money.new(:THB, 1234)
"THB1,234.00"
iex> Money.to_string! Money.new(:USD, 1234), format: :long
"1,234 US dollars"
"""
def to_string!(%Money{} = money, options \\ []) do
options = merge_options(options, currency: money.currency)
Cldr.Number.to_string!(money.amount, options)
end
@doc """
Returns the amount part of a `Money` type as a `Decimal`
## Options
* `money` is any valid `Money.t` type returned
by `Money.new/2`
## Returns
* a `Decimal.t`
## Example
iex> m = Money.new("USD", 100)
iex> Money.to_decimal(m)
#Decimal<100>
"""
@spec to_decimal(money :: Money.t()) :: Decimal.t()
def to_decimal(%Money{amount: amount}) do
amount
end
@doc """
Add two `Money` values.
## Options
* `money_1` and `money_2` are any valid `Money.t` types returned
by `Money.new/2`
## Returns
* `{:ok, money}` or
* `{:error, reason}`
## Example
iex> Money.add Money.new(:USD, 200), Money.new(:USD, 100)
{:ok, Money.new(:USD, 300)}
iex> Money.add Money.new(:USD, 200), Money.new(:AUD, 100)
{:error, {ArgumentError, "Cannot add monies with different currencies. " <>
"Received :USD and :AUD."}}
"""
@spec add(money_1 :: Money.t(), money_2 :: Money.t()) ::
{:ok, Money.t()} | {:error, {Exception.t(), String.t()}}
def add(%Money{currency: same_currency, amount: amount_a}, %Money{
currency: same_currency,
amount: amount_b
}) do
{:ok, %Money{currency: same_currency, amount: Decimal.add(amount_a, amount_b)}}
end
def add(%Money{currency: code_a}, %Money{currency: code_b}) do
{
:error,
{
ArgumentError,
"Cannot add monies with different currencies. " <>
"Received #{inspect(code_a)} and #{inspect(code_b)}."
}
}
end
@doc """
Add two `Money` values and raise on error.
## Options
* `money_1` and `money_2` are any valid `Money.t` types returned
by `Money.new/2`
## Returns
* `{:ok, money}` or
* raises an exception
## Examples
iex> Money.add! Money.new(:USD, 200), Money.new(:USD, 100)
#Money<:USD, 300>
Money.add! Money.new(:USD, 200), Money.new(:CAD, 500)
** (ArgumentError) Cannot add two %Money{} with different currencies. Received :USD and :CAD.
"""
def add!(%Money{} = money_1, %Money{} = money_2) do
case add(money_1, money_2) do
{:ok, result} -> result
{:error, {exception, message}} -> raise exception, message
end
end
@doc """
Subtract one `Money` value struct from another.
## Options
* `money_1` and `money_2` are any valid `Money.t` types returned
by `Money.new/2`
## Returns
* `{:ok, money}` or
* `{:error, reason}`
## Example
iex> Money.sub Money.new(:USD, 200), Money.new(:USD, 100)
{:ok, Money.new(:USD, 100)}
"""
@spec sub(money_1 :: Money.t(), money_2 :: Money.t()) ::
{:ok, Money.t()} | {:error, {Exception.t(), String.t()}}
def sub(%Money{currency: same_currency, amount: amount_a}, %Money{
currency: same_currency,
amount: amount_b
}) do
{:ok, %Money{currency: same_currency, amount: Decimal.sub(amount_a, amount_b)}}
end
def sub(%Money{currency: code_a}, %Money{currency: code_b}) do
{:error,
{ArgumentError,
"Cannot subtract two monies with different currencies. " <>
"Received #{inspect(code_a)} and #{inspect(code_b)}."}}
end
@doc """
Subtract one `Money` value struct from another and raise on error.
Returns either `{:ok, money}` or `{:error, reason}`.
## Options
* `money_1` and `money_2` are any valid `Money.t` types returned
by `Money.new/2`
## Returns
* a `Money.t` struct or
* raises an exception
## Examples
iex> Money.sub! Money.new(:USD, 200), Money.new(:USD, 100)
#Money<:USD, 100>
Money.sub! Money.new(:USD, 200), Money.new(:CAD, 500)
** (ArgumentError) Cannot subtract monies with different currencies. Received :USD and :CAD.
"""
@spec sub!(money_1 :: Money.t(), money_2 :: Money.t()) :: Money.t() | none()
def sub!(%Money{} = a, %Money{} = b) do
case sub(a, b) do
{:ok, result} -> result
{:error, {exception, message}} -> raise exception, message
end
end
@doc """
Multiply a `Money` value by a number.
## Options
* `money` is any valid `Money.t` type returned
by `Money.new/2`
* `number` is an integer, float or `Decimal.t`
> Note that multipling one %Money{} by another is not supported.
## Returns
* `{:ok, money}` or
* `{:error, reason}`
## Example
iex> Money.mult(Money.new(:USD, 200), 2)
{:ok, Money.new(:USD, 400)}
iex> Money.mult(Money.new(:USD, 200), "xx")
{:error, {ArgumentError, "Cannot multiply money by \\"xx\\""}}
"""
@spec mult(Money.t(), Cldr.Math.number_or_decimal()) ::
{:ok, Money.t()} | {:error, {Exception.t(), String.t()}}
def mult(%Money{currency: code, amount: amount}, number) when is_number(number) do
{:ok, %Money{currency: code, amount: Decimal.mult(amount, Decimal.new(number))}}
end
def mult(%Money{currency: code, amount: amount}, %Decimal{} = number) do
{:ok, %Money{currency: code, amount: Decimal.mult(amount, number)}}
end
def mult(%Money{}, other) do
{:error, {ArgumentError, "Cannot multiply money by #{inspect(other)}"}}
end
@doc """
Multiply a `Money` value by a number and raise on error.
## Options
* `money` is any valid `Money.t` types returned
by `Money.new/2`
* `number` is an integer, float or `Decimal.t`
## Returns
* a `Money.t` or
* raises an exception
## Examples
iex> Money.mult!(Money.new(:USD, 200), 2)
#Money<:USD, 400>
Money.mult!(Money.new(:USD, 200), :invalid)
** (ArgumentError) Cannot multiply money by :invalid
"""
@spec mult!(Money.t(), Cldr.Math.number_or_decimal()) :: Money.t() | none()
def mult!(%Money{} = money, number) do
case mult(money, number) do
{:ok, result} -> result
{:error, {exception, message}} -> raise exception, message
end
end
@doc """
Divide a `Money` value by a number.
## Options
* `money` is any valid `Money.t` types returned
by `Money.new/2`
* `number` is an integer, float or `Decimal.t`
> Note that dividing one %Money{} by another is not supported.
## Returns
* `{:ok, money}` or
* `{:error, reason}`
## Example
iex> Money.div Money.new(:USD, 200), 2
{:ok, Money.new(:USD, 100)}
iex> Money.div(Money.new(:USD, 200), "xx")
{:error, {ArgumentError, "Cannot divide money by \\"xx\\""}}
"""
@spec div(Money.t(), Cldr.Math.number_or_decimal()) ::
{:ok, Money.t()} | {:error, {Exception.t(), String.t()}}
def div(%Money{currency: code, amount: amount}, number) when is_number(number) do
{:ok, %Money{currency: code, amount: Decimal.div(amount, Decimal.new(number))}}
end
def div(%Money{currency: code, amount: amount}, %Decimal{} = number) do
{:ok, %Money{currency: code, amount: Decimal.div(amount, number)}}
end
def div(%Money{}, other) do
{:error, {ArgumentError, "Cannot divide money by #{inspect(other)}"}}
end
@doc """
Divide a `Money` value by a number and raise on error.
## Options
* `money` is any valid `Money.t` types returned
by `Money.new/2`
* `number` is an integer, float or `Decimal.t`
## Returns
* a `Money.t` struct or
* raises an exception
## Examples
iex> Money.div Money.new(:USD, 200), 2
{:ok, Money.new(:USD, 100)}
Money.div(Money.new(:USD, 200), "xx")
** (ArgumentError) "Cannot divide money by \\"xx\\""]}}
"""
def div!(%Money{} = money, number) do
case Money.div(money, number) do
{:ok, result} -> result
{:error, {exception, message}} -> raise exception, message
end
end
@doc """
Returns a boolean indicating if two `Money` values are equal
## Options
* `money_1` and `money_2` are any valid `Money.t` types returned
by `Money.new/2`
## Returns
* `true` or `false`
## Example
iex> Money.equal? Money.new(:USD, 200), Money.new(:USD, 200)
true
iex> Money.equal? Money.new(:USD, 200), Money.new(:USD, 100)
false
"""
@spec equal?(money_1 :: Money.t(), money_2 :: Money.t()) :: boolean
def equal?(%Money{currency: same_currency, amount: amount_a}, %Money{
currency: same_currency,
amount: amount_b
}) do
Decimal.equal?(amount_a, amount_b)
end
def equal?(_, _) do
false
end
@doc """
Compares two `Money` values numerically. If the first number is greater
than the second :gt is returned, if less than :lt is returned, if both
numbers are equal :eq is returned.
## Options
* `money_1` and `money_2` are any valid `Money.t` types returned
by `Money.new/2`
## Returns
* `:gt` | `:eq` | `:lt` or
* `{:error, {Exception.t, String.t}}`
## Examples
iex> Money.cmp Money.new(:USD, 200), Money.new(:USD, 100)
:gt
iex> Money.cmp Money.new(:USD, 200), Money.new(:USD, 200)
:eq
iex> Money.cmp Money.new(:USD, 200), Money.new(:USD, 500)
:lt
iex> Money.cmp Money.new(:USD, 200), Money.new(:CAD, 500)
{:error,
{ArgumentError,
"Cannot compare monies with different currencies. Received :USD and :CAD."}}
"""
@spec cmp(money_1 :: Money.t(), money_2 :: Money.t()) ::
:gt | :eq | :lt | {:error, {Exception.t(), String.t()}}
def cmp(%Money{currency: same_currency, amount: amount_a}, %Money{
currency: same_currency,
amount: amount_b
}) do
Decimal.cmp(amount_a, amount_b)
end
def cmp(%Money{currency: code_a}, %Money{currency: code_b}) do
{:error,
{ArgumentError,
"Cannot compare monies with different currencies. " <>
"Received #{inspect(code_a)} and #{inspect(code_b)}."}}
end
@doc """
Compares two `Money` values numerically and raises on error.
## Options
* `money_1` and `money_2` are any valid `Money.t` types returned
by `Money.new/2`
## Returns
* `:gt` | `:eq` | `:lt` or
* raises an exception
## Examples
Money.cmp! Money.new(:USD, 200), Money.new(:CAD, 500)
** (ArgumentError) Cannot compare monies with different currencies. Received :USD and :CAD.
"""
def cmp!(%Money{} = money_1, %Money{} = money_2) do
case cmp(money_1, money_2) do
{:error, {exception, reason}} -> raise exception, reason
result -> result
end
end
@doc """
Compares two `Money` values numerically. If the first number is greater
than the second #Integer<1> is returned, if less than Integer<-1> is
returned. Otherwise, if both numbers are equal Integer<0> is returned.
## Options
* `money_1` and `money_2` are any valid `Money.t` types returned
by `Money.new/2`
## Returns
* `-1` | `0` | `1` or
* `{:error, {Exception.t, String.t}}`
## Examples
iex> Money.compare Money.new(:USD, 200), Money.new(:USD, 100)
1
iex> Money.compare Money.new(:USD, 200), Money.new(:USD, 200)
0
iex> Money.compare Money.new(:USD, 200), Money.new(:USD, 500)
-1
iex> Money.compare Money.new(:USD, 200), Money.new(:CAD, 500)
{:error,
{ArgumentError,
"Cannot compare monies with different currencies. Received :USD and :CAD."}}
"""
@spec compare(money_1 :: Money.t(), money_2 :: Money.t()) ::
-1 | 0 | 1 | {:error, {Exception.t(), String.t()}}
def compare(%Money{currency: same_currency, amount: amount_a}, %Money{
currency: same_currency,
amount: amount_b
}) do
amount_a
|> Decimal.compare(amount_b)
|> Decimal.to_integer()
end
def compare(%Money{currency: code_a}, %Money{currency: code_b}) do
{:error,
{ArgumentError,
"Cannot compare monies with different currencies. " <>
"Received #{inspect(code_a)} and #{inspect(code_b)}."}}
end
@doc """
Compares two `Money` values numerically and raises on error.
## Options
* `money_1` and `money_2` are any valid `Money.t` types returned
by `Money.new/2`
## Returns
* `-1` | `0` | `1` or
* raises an exception
## Examples
Money.compare! Money.new(:USD, 200), Money.new(:CAD, 500)
** (ArgumentError) Cannot compare monies with different currencies. Received :USD and :CAD.
"""
def compare!(%Money{} = money_1, %Money{} = money_2) do
case compare(money_1, money_2) do
{:error, {exception, reason}} -> raise exception, reason
result -> result
end
end
@doc """
Split a `Money` value into a number of parts maintaining the currency's
precision and rounding and ensuring that the parts sum to the original
amount.
## Options
* `money` is a `%Money{}` struct
* `parts` is an integer number of parts into which the `money` is split
Returns a tuple `{dividend, remainder}` as the function result
derived as follows:
1. Round the money amount to the required currency precision using
`Money.round/1`
2. Divide the result of step 1 by the integer divisor
3. Round the result of the division to the precision of the currency
using `Money.round/1`
4. Return two numbers: the result of the division and any remainder
that could not be applied given the precision of the currency.
## Examples
Money.split Money.new(123.5, :JPY), 3
{¥41, ¥1}
Money.split Money.new(123.4, :JPY), 3
{¥41, ¥0}
Money.split Money.new(123.7, :USD), 9
{$13.74, $0.04}
"""
@spec split(Money.t(), non_neg_integer) :: {Money.t(), Money.t()}
def split(%Money{} = money, parts) when is_integer(parts) do
rounded_money = Money.round(money)
div =
rounded_money
|> Money.div!(parts)
|> round
remainder = sub!(money, mult!(div, parts))
{div, remainder}
end
@doc """
Round a `Money` value into the acceptable range for the requested currency.
## Arguments
* `money` is a `%Money{}` struct
* `opts` is a keyword list of options
## Options
* `:rounding_mode` that defines how the number will be rounded. See
`Decimal.Context`. The default is `:half_even` which is also known
as "banker's rounding"
* `:currency_digits` which determines the rounding increment.
The valid options are `:cash`, `:accounting` and `:iso`. The
default is `:iso`. The rounding increment applies to currencies
such as :AUD and :CHF which have an accounting increment of 0.01
but a minimum cash increment of 0.05.
## Notes
There are two kinds of rounding applied:
1. Round to the appropriate number of fractional digits
2. Apply an appropriate rounding increment. Most currencies
round to the same precision as the number of decimal digits, but some
such as :AUD and :CHF round to a minimum such as 0.05 when its a cash
amount.
## Examples
iex> Money.round Money.new("123.73", :CHF), currency_digits: :cash
#Money<:CHF, 123.75>
iex> Money.round Money.new("123.7456", :CHF)
#Money<:CHF, 123.75>
Money.round Money.new("123.7456", :JPY)
#Money<:JPY, 124>
"""
@spec round(Money.t(), Keyword.t()) :: Money.t()
def round(%Money{} = money, opts \\ []) do
money
|> round_to_decimal_digits(opts)
|> round_to_nearest(opts)
end
defp round_to_decimal_digits(%Money{currency: code, amount: amount}, opts) do
with {:ok, currency} <- Currency.currency_for_code(code) do
rounding_mode = Keyword.get(opts, :rounding_mode, @default_rounding_mode)
rounding = digits_from_opts(currency, opts[:currency_digits])
rounded_amount = Decimal.round(amount, rounding, rounding_mode)
%Money{currency: code, amount: rounded_amount}
end
end
defp digits_from_opts(currency, nil) do
currency.iso_digits
end
defp digits_from_opts(currency, :iso) do
currency.iso_digits
end
defp digits_from_opts(currency, :accounting) do
currency.digits
end
defp digits_from_opts(currency, :cash) do
currency.cash_digits
end
defp digits_from_opts(currency, _) do
currency.iso_digits
end
defp round_to_nearest(%Money{currency: code} = money, opts) do
with {:ok, currency} <- Currency.currency_for_code(code) do
digits = digits_from_opts(currency, opts[:currency_digits])
increment = increment_from_opts(currency, opts[:currency_digits])
do_round_to_nearest(money, digits, increment, opts)
end
end
defp do_round_to_nearest(money, _digits, 0, _opts) do
money
end
defp do_round_to_nearest(money, digits, increment, opts) do
rounding_mode = Keyword.get(opts, :rounding_mode, @default_rounding_mode)
rounding =
-digits
|> Cldr.Math.power_of_10()
|> Kernel.*(increment)
|> Decimal.new()
rounded_amount =
money.amount
|> Decimal.div(rounding)
|> Decimal.round(0, rounding_mode)
|> Decimal.mult(rounding)
%Money{currency: money.currency, amount: rounded_amount}
end
defp increment_from_opts(currency, :cash) do
currency.cash_rounding
end
defp increment_from_opts(currency, _) do
currency.rounding
end
@doc """
Convert `money` from one currency to another.
## Options
* `money` is any `Money.t` struct returned by `Cldr.Currency.new/2`
* `to_currency` is a valid currency code into which the `money` is converted
* `rates` is a `Map` of currency rates where the map key is an upcased
atom or string and the value is a Decimal conversion factor. The default is the
latest available exchange rates returned from `Money.ExchangeRates.latest_rates()`
## Examples
Money.to_currency(Money.new(:USD, 100), :AUD, %{USD: Decimal.new(1), AUD: Decimal.new(0.7345)})
{:ok, #Money<:AUD, 73.4500>}
Money.to_currency(Money.new("USD", 100), "AUD", %{"USD" => Decimal.new(1), "AUD" => Decimal.new(0.7345)})
{:ok, #Money<:AUD, 73.4500>}
iex> Money.to_currency Money.new(:USD, 100) , :AUDD, %{USD: Decimal.new(1), AUD: Decimal.new(0.7345)}
{:error, {Cldr.UnknownCurrencyError, "The currency :AUDD is invalid"}}
iex> Money.to_currency Money.new(:USD, 100) , :CHF, %{USD: Decimal.new(1), AUD: Decimal.new(0.7345)}
{:error, {Money.ExchangeRateError, "No exchange rate is available for currency :CHF"}}
"""
@spec to_currency(
Money.t(),
currency_code(),
ExchangeRates.t() | {:ok, ExchangeRates.t()} | {:error, {Exception.t(), String.t()}}
) :: {:ok, Money.t()} | {:error, {Exception.t(), String.t()}}
def to_currency(money, to_currency, rates \\ Money.ExchangeRates.latest_rates())
def to_currency(%Money{currency: currency} = money, to_currency, _rates)
when currency == to_currency do
{:ok, money}
end
def to_currency(%Money{currency: currency} = money, to_currency, %{} = rates)
when is_atom(to_currency) or is_binary(to_currency) do
with {:ok, to_code} <- validate_currency(to_currency) do
if currency == to_code, do: money, else: to_currency(money, to_currency, {:ok, rates})
end
end
def to_currency(%Money{currency: from_currency, amount: amount}, to_currency, {:ok, rates})
when is_atom(to_currency) or is_binary(to_currency) do
with {:ok, currency_code} <- validate_currency(to_currency),
{:ok, base_rate} <- get_rate(from_currency, rates),
{:ok, conversion_rate} <- get_rate(currency_code, rates) do
converted_amount =
amount
|> Decimal.div(base_rate)
|> Decimal.mult(conversion_rate)
{:ok, Money.new(to_currency, converted_amount)}
end
end
def to_currency(_money, _to_currency, {:error, reason}) do
{:error, reason}
end
@doc """
Convert `money` from one currency to another and raises on error
## Arguments
* `money` is any `Money.t` struct returned by `Cldr.Currency.new/2`
* `to_currency` is a valid currency code into which the `money` is converted
* `rates` is a `Map` of currency rates where the map key is an upcased
atom or string and the value is a Decimal conversion factor. The default is the
latest available exchange rates returned from `Money.ExchangeRates.latest_rates()`
## Examples
iex> Money.to_currency! Money.new(:USD, 100) , :AUD, %{USD: Decimal.new(1), AUD: Decimal.new(0.7345)}
#Money<:AUD, 73.4500>
iex> Money.to_currency! Money.new("USD", 100) , "AUD", %{"USD" => Decimal.new(1), "AUD" => Decimal.new(0.7345)}
#Money<:AUD, 73.4500>
Money.to_currency! Money.new(:USD, 100) , :ZZZ, %{USD: Decimal.new(1), AUD: Decimal.new(0.7345)}
** (Cldr.UnknownCurrencyError) Currency :ZZZ is not known
"""
@spec to_currency!(
Money.t(),
currency_code(),
ExchangeRates.t() | {:ok, ExchangeRates.t()} | {:error, {Exception.t(), String.t()}}
) :: Money.t() | no_return
def to_currency!(money, to_currency, rates \\ Money.ExchangeRates.latest_rates())
def to_currency!(%Money{} = money, currency, rates) do
money
|> to_currency(currency, rates)
|> do_to_currency!
end
defp do_to_currency!({:ok, converted}) do
converted
end
defp do_to_currency!({:error, {exception, reason}}) do
raise exception, reason
end
@doc """
Calls `Decimal.reduce/1` on the given `Money.t()`
This will reduce the coefficient and exponent of the
decimal amount in a standard way that may aid in
native comparison of `%Money.t()` items.
## Example
iex> x = %Money{currency: :USD, amount: %Decimal{sign: 1, coef: 42, exp: 0}}
#Money<:USD, 42>
iex> y = %Money{currency: :USD, amount: %Decimal{sign: 1, coef: 4200000000, exp: -8}}
#Money<:USD, 42.00000000>
iex> x == y
false
iex> y = Money.reduce(x)
#Money<:USD, 42>
iex> x == y
true
"""
@spec reduce(Money.t()) :: Money.t()
def reduce(%Money{currency: currency, amount: amount}) do
%Money{currency: currency, amount: Decimal.reduce(amount)}
end
@doc """
Returns a tuple comprising the currency code, integer amount,
exponent and remainder
Some services require submission of money items as an integer
with an implied exponent that is appropriate to the currency.
Rather than return only the integer, `Money.to_integer_exp`
returns the currency code, integer, exponent and remainder.
The remainder is included because to return an integer
money with an implied exponent the `Money` has to be rounded
potentially leaving a remainder.
## Options
* `money` is any `Money.t` struct returned by `Cldr.Currency.new/2`
## Notes
* Since the returned integer is expected to have the implied fractional
digits the `Money` needs to be rounded which is what this function does.
## Example
iex> m = Money.new(:USD, "200.012356")
#Money<:USD, 200.012356>
iex> Money.to_integer_exp(m)
{:USD, 20001, -2, Money.new(:USD, "0.002356")}
iex> m = Money.new(:USD, "200.00")
#Money<:USD, 200.00>
iex> Money.to_integer_exp(m)
{:USD, 20000, -2, Money.new(:USD, "0.00")}
"""
def to_integer_exp(%Money{} = money, opts \\ []) do
new_money =
money
|> Money.round(opts)
|> Money.reduce()
{:ok, remainder} = Money.sub(money, new_money)
{:ok, currency} = Cldr.Currency.currency_for_code(money.currency)
digits = digits_from_opts(currency, opts[:currency_digits])
exponent = -digits
exponent_adjustment = abs(exponent - new_money.amount.exp)
integer = Cldr.Math.power_of_10(exponent_adjustment) * new_money.amount.coef
{money.currency, integer, exponent, remainder}
end
@doc """
Convert an integer representation of money into a `Money` struct.
This is the inverse operation of `Money.to_integer_exp/1`. Note
that the ISO definition of currency digits (subunit) is *always*
used. This is, in some cases like the Colombian Peso (COP)
different to the CLDR definition.
## Options
* `integer` is an integer representation of a mooney item including
any decimal digits. ie. 20000 would interpreted to mean $200.00
* `currency` is the currency code for the `integer`. The assumed
decimal places is derived from the currency code.
## Returns
* A `Money` struct or
* `{:error, {Cldr.UnknownCurrencyError, message}}`
## Examples
iex> Money.from_integer(20000, :USD)
#Money<:USD, 200.00>
iex> Money.from_integer(200, :JPY)
#Money<:JPY, 200>
iex> Money.from_integer(20012, :USD)
#Money<:USD, 200.12>
iex> Money.from_integer(20012, :COP)
#Money<:COP, 200.12>
"""
@spec from_integer(integer, currency_code) :: Money.t() | {:error, Exception.t(), String.t()}
def from_integer(amount, currency) when is_integer(amount) do
with {:ok, currency} <- validate_currency(currency),
{:ok, %{iso_digits: digits}} <- Cldr.Currency.currency_for_code(currency) do
sign = if amount < 0, do: -1, else: 1
digits = if digits == 0, do: 0, else: -digits
sign
|> Decimal.new(abs(amount), digits)
|> Money.new(currency)
end
end
@doc """
Return a zero amount `Money.t` in the given currency
## Example
iex> Money.zero(:USD)
#Money<:USD, 0>
iex> money = Money.new(:USD, 200)
iex> Money.zero(money)
#Money<:USD, 0>
iex> Money.zero :ZZZ
{:error, {Cldr.UnknownCurrencyError, "The currency :ZZZ is invalid"}}
"""
@spec zero(currency_code | Money.t()) :: Money.t()
def zero(%{currency: currency, amount: _amount}) do
zero(currency)
end
def zero(currency) do
with {:ok, currency} <- validate_currency(currency) do
Money.new(currency, 0)
end
end
@doc false
def from_integer({currency, integer, _exponent, _remainder}) do
from_integer(integer, currency)
end
## Helpers
@doc false
def get_env(key, default \\ nil) do
case env = Application.get_env(:ex_money, key, default) do
{:system, env_key} ->
System.get_env(env_key) || default
_ ->
env
end
end
def get_env(key, default, :integer) do
key
|> get_env(default)
|> to_integer
end
def get_env(key, default, :maybe_integer) do
key
|> get_env(default)
|> to_maybe_integer
end
def get_env(key, default, :module) do
key
|> get_env(default)
|> to_module
end
def get_env(key, default, :boolean) do
case get_env(key, default) do
true ->
true
false ->
false
other ->
raise RuntimeError,
"[ex_money] The configuration key " <>
"#{inspect(key)} must be either true or false. #{inspect(other)} was provided."
end
end
defp to_integer(nil), do: nil
defp to_integer(n) when is_integer(n), do: n
defp to_integer(n) when is_binary(n), do: String.to_integer(n)
defp to_maybe_integer(nil), do: nil
defp to_maybe_integer(n) when is_integer(n), do: n
defp to_maybe_integer(n) when is_atom(n), do: n
defp to_maybe_integer(n) when is_binary(n), do: String.to_integer(n)
defp to_module(nil), do: nil
defp to_module(module_name) when is_atom(module_name), do: module_name
defp to_module(module_name) when is_binary(module_name) do
Module.concat([module_name])
end
defp get_rate(currency, rates) do
rates
|> Map.take([currency, Atom.to_string(currency)])
|> Map.values()
|> case do
[rate] ->
{:ok, rate}
_ ->
{:error,
{Money.ExchangeRateError,
"No exchange rate is available for currency #{inspect(currency)}"}}
end
end
defp merge_options(options, required) do
Keyword.merge(options, required, fn _k, _v1, v2 -> v2 end)
end
@doc false
def json_library do
@json_library
end
end
|
lib/money.ex
| 0.918256
| 0.776157
|
money.ex
|
starcoder
|
defmodule Marker.Element do
@moduledoc """
This module is responsible for generating element macro's. Marker generates by default all html5 elements,
but you can easily generate other elements too:
```elixir
defmodule MyElements do
use Marker.Element, tags: [:my_element, :another_one]
end
```
You can now use your custom elements like the default elements:
```elixir
use MyElements
my_element id: 42 do
another_one "Hello world"
end
```
Which will result in:
```elixir
{:safe, "<my_element id='42'><another_one>Hello world</another_one></my_element>"}
```
### Casing
You can control the casing of the generated elements too:
```elixir
defmodule MyElements do
use Marker.Element, casing: :camel, tags: [:my_element, :another_one]
end
my_element id: 42 do
another_one "Hello world"
end
{:safe, "<myElement id='42'><anotherOne>Hello world</anotherOne></myElement>"}
```
The following casing options are allowed:
* `:snake` => `my_element` (default)
* `:snake_upcase` => `MY_ELEMENT`
* `:pascal` => `MyElement`
* `:camel` => `myElement`
* `:lisp` => `my-element`
* `:lisp_upcase` => `MY-ELEMENT`
"""
defstruct tag: :div, attrs: %{}, content: nil
@type attr_name :: atom
@type attr_value :: Marker.Encoder.t()
@type attrs :: [{attr_name, attr_value}]
@type t :: %Marker.Element{tag: atom, content: Marker.content(), attrs: attrs}
@doc false
defmacro __using__(opts) do
tags = opts[:tags] || []
casing = opts[:casing] || :snake
quote do
defmacro __using__(_) do
ambiguous_imports = Marker.Element.find_ambiguous_imports(unquote(tags))
quote do
import Kernel, except: unquote(ambiguous_imports)
import unquote(__MODULE__)
end
end
Enum.each(unquote(tags), fn tag ->
Marker.Element.def_element(tag, unquote(casing))
end)
end
end
@doc false
defmacro def_element(tag, casing) do
quote bind_quoted: [tag: tag, casing: casing] do
defmacro unquote(tag)(content_or_attrs \\ nil, maybe_content \\ nil) do
tag = unquote(tag) |> Marker.Element.apply_casing(unquote(casing))
{attrs, content} =
Marker.Element.normalize_args(content_or_attrs, maybe_content, __CALLER__)
%Marker.Element{tag: tag, attrs: attrs, content: content}
|> Marker.Compiler.compile_env(__CALLER__)
end
end
end
@doc false
def apply_casing(tag, :snake) do
tag
end
def apply_casing(tag, :snake_upcase) do
tag |> Atom.to_string() |> String.upcase() |> String.to_atom()
end
def apply_casing(tag, :pascal) do
tag |> split() |> Enum.map(&String.capitalize/1) |> join()
end
def apply_casing(tag, :camel) do
[first | rest] = split(tag)
rest = Enum.map(rest, &String.capitalize/1)
join([first | rest])
end
def apply_casing(tag, :lisp) do
tag |> split() |> join("-")
end
def apply_casing(tag, :lisp_upcase) do
tag |> split() |> Enum.map(&String.upcase/1) |> join("-")
end
defp split(tag) do
tag |> Atom.to_string() |> String.split("_")
end
defp join(tokens, joiner \\ "") do
tokens |> Enum.join(joiner) |> String.to_atom()
end
@doc false
def find_ambiguous_imports(tags) do
default_imports = Kernel.__info__(:functions) ++ Kernel.__info__(:macros)
for {name, arity} <- default_imports, arity in 0..2 and name in tags do
{name, arity}
end
end
@doc false
def normalize_args(content_or_attrs, maybe_content, env) do
case {expand(content_or_attrs, env), expand(maybe_content, env)} do
{[], [{:do, {:__block__, _, content}}]} ->
{[], content}
{[], [{:do, content}]} ->
{[], content}
{[], content} ->
{[], content}
{[{:do, {:__block__, _, content}}], nil} ->
{[], content}
{[{:do, content}], nil} ->
{[], content}
{[{_, _} | _] = attrs, nil} ->
{attrs, nil}
{[{_, _} | _] = attrs, [{:do, {:__block__, _, content}}]} ->
{attrs, content}
{[{_, _} | _] = attrs, [{:do, content}]} ->
{attrs, content}
{[{_, _} | _] = attrs, content} ->
{attrs, content}
{attrs, [{:do, content}]} when is_tuple(attrs) ->
{attrs, content}
{content, nil} when is_list(content) ->
{[], content}
{content, nil} ->
{content, []}
{content, [{_, _} | _] = attrs} ->
{attrs, content}
_ ->
raise ArgumentError, message: "element macro received unexpected arguments"
end
end
defp expand(arg, env) do
Macro.prewalk(arg, &Macro.expand_once(&1, env))
end
end
|
lib/marker/element.ex
| 0.808974
| 0.775817
|
element.ex
|
starcoder
|
defmodule Level10.Games.Levels do
@moduledoc """
The game typically goes through 10 levels. This module contains the details
for those levels so that they can be grabbed when needed.
"""
alias Level10.Games.{Card, Game}
@type type :: :color | :set | :run
@type count :: non_neg_integer()
@type group :: {group(), count()}
@type level :: list(group())
@levels %{
1 => [set: 3, set: 3],
2 => [set: 3, run: 4],
3 => [set: 4, run: 4],
4 => [run: 7],
5 => [run: 8],
6 => [run: 9],
7 => [set: 4, set: 4],
8 => [color: 7],
9 => [set: 5, set: 2],
10 => [set: 5, set: 3]
}
@doc """
Returns the level requirements for the level number provided
## Examples
iex> by_number(1)
[set: 3, set: 3]
"""
@spec by_number(integer()) :: level()
def by_number(level_number) do
Map.get(@levels, level_number)
end
@doc """
Returns the player's table fully sorted based on the level number provided.
"""
@spec sort_for_level(integer(), Game.player_table()) :: Game.table()
def sort_for_level(level_number, player_table) do
level_requirements = by_number(level_number)
for {position, cards} <- player_table, into: %{} do
{type, _} = Enum.at(level_requirements, position)
{position, Card.sort_for_group(type, cards)}
end
end
@doc """
Returns whether the cards provided are valid for the group provided
## Examples
iex> valid_group?({:set, 3}, [
...> %Card{value: :three, color: :green},
...> %Card{value: :three, color: :red},
...> %Card{value: :three, color: :blue}
...> ])
true
iex> valid_group?({:set, 3}, [%Card{value: :three, color: :green}])
false
"""
@spec valid_group?(group(), Game.cards()) :: boolean()
def valid_group?({_, count}, cards) when length(cards) < count, do: false
def valid_group?({type, _}, cards) do
{wild_count, cards} = Card.pop_wilds(cards)
valid_group?(type, cards, wild_count)
end
@doc """
Returns whether the "table" given is valid for the level provided
## Examples
iex> valid_level?(2, %{
...> 0 => [
...> %Card{value: :twelve, color: :green},
...> %Card{value: :twelve, color: :blue},
...> %Card{value: :twelve, color: :yellow}
...> ],
...> 1 => [
...> %Card{value: :wild, color: :black},
...> %Card{value: :four, color: :green},
...> %Card{value: :five, color: :blue},
...> %Card{value: :seven, color: :yello}
...> ]
...> })
true
iex> valid_level?(1, %{0 => [%Card{value: :twelve, color: :green}]})
false
"""
@spec valid_level?(non_neg_integer(), Game.table()) :: boolean()
def valid_level?(level_number, table) do
level_number
|> by_number()
|> Enum.with_index()
|> Enum.all?(fn {group, position} -> valid_group?(group, table[position]) end)
end
# Private
@spec next_value(Card.value()) :: Card.value()
defp next_value(:one), do: :two
defp next_value(:two), do: :three
defp next_value(:three), do: :four
defp next_value(:four), do: :five
defp next_value(:five), do: :six
defp next_value(:six), do: :seven
defp next_value(:seven), do: :eight
defp next_value(:eight), do: :nine
defp next_value(:nine), do: :ten
defp next_value(:ten), do: :eleven
defp next_value(:eleven), do: :twelve
defp next_value(_), do: nil
@spec valid_color?(Card.color(), Game.cards()) :: boolean()
defp valid_color?(color, [%{color: color} | rest]), do: valid_color?(color, rest)
defp valid_color?(_, []), do: true
defp valid_color?(_, _), do: false
@spec valid_group?(type(), Game.cards(), non_neg_integer()) :: boolean()
defp valid_group?(_, [], _), do: true
defp valid_group?(:color, [%{color: color} | rest], _), do: valid_color?(color, rest)
defp valid_group?(:run, cards, wild_count) do
[%{value: value} | rest] = Card.sort(cards)
valid_run?(value, rest, wild_count)
end
defp valid_group?(:set, [%{value: value} | rest], _), do: valid_set?(value, rest)
@spec valid_run?(Card.value(), Game.cards(), non_neg_integer()) :: boolean()
defp valid_run?(value, [%{value: next_value} | rest], 0) do
if next_value(value) == next_value, do: valid_run?(next_value, rest, 0), else: false
end
defp valid_run?(previous_value, cards = [%{value: value} | rest], wild_count) do
case next_value(previous_value) do
^value -> valid_run?(value, rest, wild_count)
next_value -> valid_run?(next_value, cards, wild_count - 1)
end
end
defp valid_run?(_, [], _), do: true
@spec valid_set?(Card.value(), Game.cards()) :: boolean()
defp valid_set?(value, [%{value: value} | rest]), do: valid_set?(value, rest)
defp valid_set?(_, []), do: true
defp valid_set?(_, _), do: false
end
|
lib/level10/games/levels.ex
| 0.867626
| 0.528594
|
levels.ex
|
starcoder
|
defmodule HashRing do
@moduledoc """
This module defines an API for creating/manipulating a hash ring.
The internal data structure for the hash ring is actually a gb_tree, which provides
fast lookups for a given key on the ring.
- The ring is a continuum of 2^32 "points", or integer values.
- Nodes are sharded into 128 points, and distributed across the ring.
- Each shard owns the keyspace below it.
- Keys are hashed and assigned a point on the ring, the node for a given
ring is determined by finding the next highest point on the ring for a shard,
the node that shard belongs to is then the node which owns that key.
- If a key's hash does not have any shards above it, it belongs to the first shard,
this mechanism is what creates the ring-like topology.
- When nodes are added/removed from the ring, only a small subset of keys must be reassigned.
"""
defstruct ring: :gb_trees.empty(), nodes: []
@type t :: %__MODULE__{
ring: :gb_trees.tree(),
nodes: [term()]
}
@hash_range trunc(:math.pow(2, 32) - 1)
@doc """
Creates a new hash ring structure, with no nodes added yet.
## Examples
iex> ring = HashRing.new()
...> %HashRing{nodes: ["a"]} = ring = HashRing.add_node(ring, "a")
...> HashRing.key_to_node(ring, {:complex, "key"})
"a"
"""
@spec new() :: __MODULE__.t()
def new(), do: %__MODULE__{}
@doc """
Creates a new hash ring structure, seeded with the given node,
with an optional weight provided which determines the number of
virtual nodes (shards) that will be assigned to it on the ring.
The default weight for a node is `128`.
## Examples
iex> ring = HashRing.new("a")
...> %HashRing{nodes: ["a"]} = ring
...> HashRing.key_to_node(ring, :foo)
"a"
iex> ring = HashRing.new("a", 200)
...> %HashRing{nodes: ["a"]} = ring
...> HashRing.key_to_node(ring, :foo)
"a"
"""
@spec new(term(), pos_integer) :: __MODULE__.t()
def new(node, weight \\ 128) when is_integer(weight) and weight > 0,
do: add_node(new(), node, weight)
@doc """
Returns the list of nodes which are present on the ring.
The type of the elements in this list are the same as the type of the elements
you initially added to the ring. In the following example, we used strings, but
if you were using atoms, such as those used for Erlang node names, you would get
a list of atoms back.
iex> ring = HashRing.new |> HashRing.add_nodes(["a", "b"])
...> HashRing.nodes(ring)
["b", "a"]
"""
@spec nodes(t) :: [term]
def nodes(%__MODULE__{nodes: nodes}), do: nodes
@doc """
Adds a node to the hash ring, with an optional weight provided which
determines the number of virtual nodes (shards) that will be assigned to
it on the ring.
The default weight for a node is `128`.
## Examples
iex> ring = HashRing.new()
...> ring = HashRing.add_node(ring, "a")
...> %HashRing{nodes: ["b", "a"]} = ring = HashRing.add_node(ring, "b", 64)
...> HashRing.key_to_node(ring, :foo)
"b"
"""
@spec add_node(__MODULE__.t(), term(), pos_integer) :: __MODULE__.t()
def add_node(ring, node, weight \\ 128)
def add_node(_, node, _weight) when is_binary(node) and byte_size(node) == 0,
do: raise(ArgumentError, message: "Node keys cannot be empty strings")
def add_node(%__MODULE__{} = ring, node, weight) when is_integer(weight) and weight > 0 do
cond do
Enum.member?(ring.nodes, node) ->
ring
:else ->
ring = %{ring | nodes: [node | ring.nodes]}
Enum.reduce(1..weight, ring, fn i, %__MODULE__{ring: r} = acc ->
n = :erlang.phash2({node, i}, @hash_range)
try do
%{acc | ring: :gb_trees.insert(n, node, r)}
catch
:error, {:key_exists, _} ->
acc
end
end)
end
end
@doc """
Adds a list of nodes to the hash ring.
The list can contain just the node key, or a tuple of the node key and it's desired weight.
See also the documentation for `add_node/3`.
## Examples
iex> ring = HashRing.new()
...> ring = HashRing.add_nodes(ring, ["a", {"b", 64}])
...> %HashRing{nodes: ["b", "a"]} = ring
...> HashRing.key_to_node(ring, :foo)
"b"
"""
@spec add_nodes(__MODULE__.t(), [term() | {term(), pos_integer}]) :: __MODULE__.t()
def add_nodes(%__MODULE__{} = ring, nodes) when is_list(nodes) do
Enum.reduce(nodes, ring, fn
{node, weight}, acc when is_integer(weight) and weight > 0 ->
add_node(acc, node, weight)
node, acc ->
add_node(acc, node)
end)
end
@doc """
Removes a node from the hash ring.
## Examples
iex> ring = HashRing.new()
...> %HashRing{nodes: ["a"]} = ring = HashRing.add_node(ring, "a")
...> %HashRing{nodes: []} = ring = HashRing.remove_node(ring, "a")
...> HashRing.key_to_node(ring, :foo)
{:error, {:invalid_ring, :no_nodes}}
"""
@spec remove_node(__MODULE__.t(), term()) :: __MODULE__.t()
def remove_node(%__MODULE__{ring: r} = ring, node) do
cond do
Enum.member?(ring.nodes, node) ->
r2 =
:gb_trees.to_list(r)
|> Enum.filter(fn
{_key, ^node} -> false
_ -> true
end)
|> :gb_trees.from_orddict()
%{ring | nodes: ring.nodes -- [node], ring: r2}
:else ->
ring
end
end
@doc """
Determines which node owns the given key.
This function assumes that the ring has been populated with at least one node.
## Examples
iex> ring = HashRing.new("a")
...> HashRing.key_to_node(ring, :foo)
"a"
iex> ring = HashRing.new()
...> HashRing.key_to_node(ring, :foo)
{:error, {:invalid_ring, :no_nodes}}
"""
@spec key_to_node(__MODULE__.t(), term) :: term() | {:error, {:invalid_ring, :no_nodes}}
def key_to_node(%__MODULE__{nodes: []}, _key),
do: {:error, {:invalid_ring, :no_nodes}}
# Convert atoms to binaries, as phash does not distribute them evenly
def key_to_node(ring, key) when is_atom(key),
do: key_to_node(ring, :erlang.term_to_binary(key))
def key_to_node(%__MODULE__{ring: r}, key) do
hash = :erlang.phash2(key, @hash_range)
case :gb_trees.iterator_from(hash, r) do
[{_key, node, _, _} | _] ->
node
_ ->
{_key, node} = :gb_trees.smallest(r)
node
end
end
@doc """
Determines which nodes owns a given key. Will return either `count` results or
the number of nodes, depending on which is smaller.
This function assumes that the ring has been populated with at least one node.
## Examples
iex> ring = HashRing.new()
...> ring = HashRing.add_node(ring, "a")
...> ring = HashRing.add_node(ring, "b")
...> ring = HashRing.add_node(ring, "c")
...> HashRing.key_to_nodes(ring, :foo, 2)
["b", "c"]
iex> ring = HashRing.new()
...> HashRing.key_to_nodes(ring, :foo, 1)
{:error, {:invalid_ring, :no_nodes}}
"""
@spec key_to_nodes(__MODULE__.t(), term, pos_integer) ::
[term()] | {:error, {:invalid_ring, :no_nodes}}
def key_to_nodes(%__MODULE__{nodes: []}, _key, _count),
do: {:error, {:invalid_ring, :no_nodes}}
def key_to_nodes(%__MODULE__{nodes: nodes, ring: r}, key, count) do
hash = :erlang.phash2(key, @hash_range)
count = min(length(nodes), count)
case :gb_trees.iterator_from(hash, r) do
[{_key, node, _, _} | _] = iter ->
find_nodes_from_iter(iter, count - 1, [node])
_ ->
{_key, node} = :gb_trees.smallest(r)
[node]
end
end
defp find_nodes_from_iter(_iter, 0, results), do: Enum.reverse(results)
defp find_nodes_from_iter(iter, count, results) do
case :gb_trees.next(iter) do
{_key, node, iter} ->
if node in results do
find_nodes_from_iter(iter, count, results)
else
[node | results]
find_nodes_from_iter(iter, count - 1, [node | results])
end
_ ->
results
end
end
end
defimpl Inspect, for: HashRing do
def inspect(%HashRing{ring: ring}, _opts) do
nodes = Enum.uniq(Enum.map(:gb_trees.to_list(ring), fn {_, n} -> n end))
"#<Ring#{Kernel.inspect(nodes)}>"
end
end
|
lib/ring.ex
| 0.947051
| 0.761405
|
ring.ex
|
starcoder
|
defmodule MeshxRpc do
@readme File.read!("docs/README.md") |> String.split("<!-- MDOC !-->") |> Enum.fetch!(1)
@moduledoc """
#{@readme}
## Common configuration
RPC client and server modules provide child specifications which should be used with user supervisors as shown on examples above. RPC client `child_spec` can be created directly by accessing `MeshxRpc.Client.Pool.child_spec/2` or by using wrapper module `MeshxRpc.Client`. Similarly RPC server `child_spec` is available through `MeshxRpc.Server.Pool.child_spec/2` or `MeshxRpc.Server` modules.
Configuration options common to client and server `child_spec/2` functions:
#{NimbleOptions.docs(MeshxRpc.Common.Options.common())}
## Telemetry
### Telemetry events
Telemetry event prefix is defined with `:telemetry_prefix` configuration option.
Events generated by `MeshxRpc`:
* `:init` - emitted only by server when server worker cannot establish socket connection with user provided `address` terminated with transport solution (e.g. service mesh sidecar proxy),
* `:hsk` - emitted by both server and client during connection handshake phase,
* `:idle` - emitted only by client workers when worker is in idle state waiting for user requests,
* `:recv` and `:send` - emitted by both client and server workers if there was a problem when receiving or sending request data,
* `:call` and `:cast` - emitted by client and server during failed or after successful call/cast request processing.
### Telemetry metadata
* `:address` - connection address, e.g. `{:tcp, {127, 0, 0, 1}, 1024}`,
* `:fun_name` - request function name, e.g. `:echo`,
* `:fun_req` - request function type, can be `:call` or `:cast`,
* `:hsk_ref` - handshake reference, `integer()`,
* `:id` - RPC server or client id, e.g. `Example2.Client`,
* `:local` - map describing local endpoint using keys: `conn_ref`, `node_ref` and `svc_ref`.
* `:remote` - as `:local` but for remote endpoint,
* `:req_ref` - request reference, `integer()`,
* `:result` - execution result. If request execution was successful `:result` is set to atom `:ok`, real execution results are not emitted by telemetry. If execution failed, error reason is emitted,
* `:socket` - socket port used in connection, e.g. `#Port<0.19>`,
* `:state` - worker `:gen_statem` last state, e.g. `:reply`.
Example request telemetry metadata:
```elixir
%{
address: {:tcp, {127, 0, 0, 1}, 65535},
fun_name: :echo,
fun_req: :cast,
hsk_ref: 3490,
id: Example2.Client,
local: %{
conn_ref: <<123, 219, 9, 168>>,
node_ref: "nonode@nohost",
svc_ref: "Elixir.Example2.Client"
},
remote: %{
conn_ref: <<66, 9, 108, 5>>,
node_ref: "nonode@nohost",
svc_ref: "Elixir.Example2.Server"
},
req_ref: 3650,
result: :ok,
socket: #Port<0.12863>,
state: :reply
}
```
### Telemetry metrics
Three metrics types are reported:
* `:blocks` - reports number of blocks, send and received,
* `:size` - number of bytes, send and received,
* `:time` - **approximate** time in microseconds spend on consecutive request processing steps.
`:time` metrics:
* `:ser` and `:dser` - serialization and de-serialization time,
* `:exec` - request function execution time,
* `:hsk` - handshake time,
* `:idle` - worker idle time,
* `:recv` and `:send` - time spent on request data receiving and sending.
Example telemetry metrics:
```elixir
%{
blocks: %{recv: 1, send: 1},
size: %{recv: 14, send: 40},
time: [
dser: 0,
exec: 1038,
hsk: 0,
idle: 101101,
recv: 12,
send: 102,
ser: 1
]
}
```
"""
@doc """
Attaches pretty-printing Logger handler to telemetry events.
First argument should correspond to `:telemetry_prefix` configuration option described earlier. Second argument is telemetry [handler id](https://hexdocs.pm/telemetry/telemetry.html#type-handler_id). If handler id is undefined it will be assigned value equal to second list element in `telemetry_prefix`.
Errors are logged with `:error` Logger level, all other events are logged with `:debug` level.
Example log of `:ping` call request:
```elixir
Example2.Client.call(:ping)
12:17:11.869 [debug]
[:example2, Example2.Client, :call, :ping] -> :ok
local: %{conn_ref: "e9sJqA", node_ref: "nonode@nohost", svc_ref: "Elixir.Example2.Client"}
remote: %{conn_ref: "QglsBQ", node_ref: "nonode@nohost", svc_ref: "Elixir.Example2.Server"}
address: {:tcp, {127, 0, 0, 1}, 65535}
meta: [hsk_ref: 4034, req_ref: 4066, socket: #Port<0.14455>, state: :reply]
t_req: 2.152 [dser: 0.006, exec: 2.002, recv: 0.036, send: 0.105, ser: 0.003]
t_idle: 17547.272
size: [recv: "31B", send: "31B"]
blocks: [recv: 1, send: 1]
```
`t_req` is a total request time followed by [individual request steps times], milliseconds.
`t_idle` is a worker idle time, milliseconds.
`attach_telemetry/2` is created as helper for use during development phase, most probably should not be used in production.
"""
@spec attach_telemetry(telemetry_prefix :: [atom()], id :: term()) :: :ok
def attach_telemetry(telemetry_prefix, id \\ nil), do: MeshxRpc.Common.Telemetry.attach(telemetry_prefix, id)
end
|
lib/meshx_rpc.ex
| 0.837254
| 0.76074
|
meshx_rpc.ex
|
starcoder
|
defmodule AdventOfCode.Y2020.Day24 do
def run1(input) do
input
|> setup_floor()
|> count_black()
end
def run2(input, day) do
input
|> setup_floor()
|> Stream.iterate(&apply_art_rules/1)
|> Stream.drop(day)
|> Enum.take(1)
|> hd()
|> count_black
end
def setup_floor(input) do
input
|> String.split("\n", trim: true)
|> Enum.map(&parse_line/1)
|> flip_tiles()
end
def get_floor_edges(tiles) do
tiles
|> Enum.map(fn {coordinates, _} -> coordinates end)
|> Enum.reduce({{0, 0}, {0, 0}}, fn {x, y}, {{min_x, max_x}, {min_y, max_y}} ->
{{min(x, min_x), max(x, max_x)}, {min(y, min_y), max(y, max_y)}}
end)
end
def get_coordinates_to_check({{min_x, max_x}, {min_y, max_y}}) do
for x <- (min_x - 1)..(max_x + 1),
y <- (min_y - 1)..(max_y + 1),
do: {x, y}
end
def apply_art_rules(tiles) do
tiles
|> get_floor_edges()
|> get_coordinates_to_check()
|> Enum.map(fn coord -> {coord, Map.get(tiles, coord, :white)} end)
|> Enum.map(&update_tile(&1, tiles))
|> Enum.filter(fn {_, state} -> state == :black end)
|> Map.new()
end
def update_tile({coordinate, state}, tiles) do
new_state =
tiles
|> get_surrounding_tiles(coordinate)
|> count_black()
|> get_new_tile_color(state)
{coordinate, new_state}
end
def get_new_tile_color(surrounding_black, current_state) do
case current_state do
:black when surrounding_black == 0 or surrounding_black > 2 -> :white
:white when surrounding_black == 2 -> :black
_ -> current_state
end
end
def count_black(tiles) do
tiles
|> Enum.filter(fn {_, tile} -> tile == :black end)
|> Enum.count()
end
def flip_tiles(tiles_to_flip), do: flip_tiles(Map.new(), tiles_to_flip)
def flip_tiles(tiles, []), do: tiles
def flip_tiles(tiles, [line | rest]) do
tiles
|> find_and_flip(line, {0, 0})
|> flip_tiles(rest)
end
def find_and_flip(tiles, [], current), do: tiles |> flip_tile(current)
def find_and_flip(tiles, [direction | rest], current) do
find_and_flip(tiles, rest, get_coordinate(current, direction))
end
def flip_tile(tiles, coordinate) do
tiles
|> Map.update(coordinate, :black, fn tile ->
case tile do
:white -> :black
:black -> :white
end
end)
end
def get_surrounding_tiles(tiles, coordinate) do
coordinates =
[:ne, :se, :nw, :sw, :e, :w]
|> Enum.map(fn dir -> get_coordinate(coordinate, dir) end)
Map.take(tiles, coordinates)
end
def get_coordinate({x, y}, dir) do
case dir do
:ne -> {x + rem(abs(y), 2), y + 1}
:se -> {x + rem(abs(y), 2), y - 1}
:nw -> {x - 1 + rem(abs(y), 2), y + 1}
:sw -> {x - 1 + rem(abs(y), 2), y - 1}
:e -> {x + 1, y}
:w -> {x - 1, y}
end
end
def parse_line(line), do: parse_line(line, [])
def parse_line("", result), do: Enum.reverse(result)
def parse_line("se" <> rest, result), do: parse_line(rest, [:se | result])
def parse_line("sw" <> rest, result), do: parse_line(rest, [:sw | result])
def parse_line("ne" <> rest, result), do: parse_line(rest, [:ne | result])
def parse_line("nw" <> rest, result), do: parse_line(rest, [:nw | result])
def parse_line("w" <> rest, result), do: parse_line(rest, [:w | result])
def parse_line("e" <> rest, result), do: parse_line(rest, [:e | result])
end
|
lib/2020/day24.ex
| 0.570092
| 0.590218
|
day24.ex
|
starcoder
|
defmodule ExWire.Struct.BlockQueue do
@moduledoc """
A structure to store and process blocks received by peers. The goal of this module
is to keep track of partial blocks until we're ready to add the block to the chain.
There are two reasons we need to keep them stored in a queue:
1. Block headers are sent separately of block bodies. We need to store the
headers until we receive the bodies.
2. We shouldn't accept a block as canonical until we've heard from several
peers that the block is the most canonical block at that number. Thus,
we store the block and a number of commitments. Once the number of
commitments tips over some threshold, we process the block and add it
to our block tree.
"""
alias Block.Header
alias ExWire.Struct.Block, as: BlockStruct
alias Blockchain.Block
alias Blockchain.Blocktree
alias Blockchain.Chain
alias MerklePatriciaTree.Trie
require Logger
# These will be used to help us determine if a block is empty
@empty_trie MerklePatriciaTree.Trie.empty_trie_root_hash
@empty_hash [] |> ExRLP.encode |> ExthCrypto.Hash.Keccak.kec()
defstruct [
queue: %{},
do_validation: true
]
@type block_item :: %{
commitments: [binary()],
block: Blockchain.Block.t,
ready: boolean()
}
@type block_map :: %{
EVM.hash => block_item
}
@type t :: %__MODULE__{
queue: %{integer() => block_map},
do_validation: boolean()
}
@doc """
Adds a given header received by a peer to a block queue. Returns wether or not we should
request the block body, as well.
Note: we will process it if the block is empty (i.e. has no transactions nor ommers).
## Examples
iex> chain = Blockchain.Test.ropsten_chain()
iex> db = MerklePatriciaTree.Test.random_ets_db(:proces_block_queue)
iex> header = %Block.Header{number: 5, parent_hash: <<0::256>>, beneficiary: <<2, 3, 4>>, difficulty: 100, timestamp: 11, mix_hash: <<1>>, nonce: <<2>>}
iex> header_hash = <<78, 28, 127, 10, 192, 253, 127, 239, 254, 179, 39, 34, 245, 44, 152, 98, 128, 71, 238, 155, 100, 161, 199, 71, 243, 223, 172, 191, 74, 99, 128, 63>>
iex> {block_queue, block_tree, false} = ExWire.Struct.BlockQueue.add_header_to_block_queue(%ExWire.Struct.BlockQueue{do_validation: false}, Blockchain.Blocktree.new_tree(), header, header_hash, "remote_id", chain, db)
iex> block_queue.queue
%{}
iex> block_tree.parent_map
%{<<109, 191, 166, 180, 1, 44, 85, 48, 107, 43, 51, 4, 81, 128, 110, 188, 130, 1, 5, 255, 21, 204, 250, 214, 105, 55, 182, 104, 0, 94, 102, 6>> => <<0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0>>}
# TODO: Add a second addition example
"""
@spec add_header_to_block_queue(t, Blocktree.t, Header.t, EVM.hash, binary(), Chain.t, MerklePatriciaTree.DB.db) :: {t, Blocktree.t, boolean()}
def add_header_to_block_queue(block_queue=%__MODULE__{queue: queue}, block_tree, header, header_hash, remote_id, chain, db) do
block_map = Map.get(queue, header.number, %{})
{block_map, should_request_body} = case Map.get(block_map, header_hash) do
nil ->
is_empty = is_block_empty?(header) # may already be ready, already.
block_map = Map.put(block_map, header_hash, %{
commitments: MapSet.new([remote_id]),
block: %Block{header: header},
ready: is_empty
})
{block_map, not is_empty}
block_item ->
{Map.put(block_map, header_hash, %{block_item | commitments: MapSet.put(block_item.commitments, remote_id)}), false}
end
updated_block_queue = %{block_queue | queue: Map.put(queue, header.number, block_map)}
{block_queue, block_tree} = process_block_queue(updated_block_queue, block_tree, chain, db)
{block_queue, block_tree, should_request_body}
end
@doc """
Adds a given block struct received by a peer to a block queue.
Since we don't really know which block this belongs to, we're going to just
need to look at every block and try and guess.
To guess, we'll compute the transactions root and ommers hash, and then try
and find a header that matches it. For empty blocks (ones with no transactions
and no ommers, there may be several matches. Otherwise, each block body should
pretty much be unique).
## Examples
iex> chain = Blockchain.Test.ropsten_chain()
iex> db = MerklePatriciaTree.Test.random_ets_db(:add_block_struct_to_block_queue)
iex> header = %Block.Header{
...> transactions_root: <<200, 70, 164, 239, 152, 124, 5, 149, 40, 10, 157, 9, 210, 181, 93, 89, 5, 119, 158, 112, 221, 58, 94, 86, 206, 113, 120, 51, 241, 9, 154, 150>>,
...> ommers_hash: <<232, 5, 101, 202, 108, 35, 61, 149, 228, 58, 111, 18, 19, 234, 191, 129, 189, 107, 167, 195, 222, 123, 50, 51, 176, 222, 225, 181, 72, 231, 198, 53>>
...> }
iex> block_struct = %ExWire.Struct.Block{
...> transactions_list: [[1], [2], [3]],
...> transactions: ["trx"],
...> ommers: ["ommers"]
...> }
iex> block_queue = %ExWire.Struct.BlockQueue{
...> queue: %{
...> 1 => %{
...> <<1::256>> => %{
...> commitments: MapSet.new([]),
...> header: header,
...> block: %Blockchain.Block{header: header, block_hash: <<1::256>>},
...> ready: false,
...> }
...> }
...> },
...> do_validation: false
...> }
iex> {block_queue, _block_tree} = ExWire.Struct.BlockQueue.add_block_struct_to_block_queue(
...> block_queue,
...> Blockchain.Blocktree.new_tree(),
...> block_struct,
...> chain,
...> db
...> )
iex> block_queue.queue[1][<<1::256>>].block.transactions
["trx"]
iex> block_queue.queue[1][<<1::256>>].block.ommers
["ommers"]
"""
@spec add_block_struct_to_block_queue(t, BlockStruct.t, Blocktree.t, Chain.t, MerklePatriciaTree.DB.db) :: t
def add_block_struct_to_block_queue(block_queue=%__MODULE__{queue: queue}, block_tree, block_struct, chain, db) do
transactions_root = get_transactions_root(block_struct.transactions_list)
ommers_hash = get_ommers_hash(block_struct.ommers)
updated_queue = Enum.reduce(queue, queue, fn {number, block_map}, queue ->
updated_block_map = Enum.reduce(block_map, block_map, fn {hash, block_item}, block_map ->
if block_item.block.header.transactions_root == transactions_root and block_item.block.header.ommers_hash == ommers_hash do
# This is now ready! (though, it may not still have enough commitments)
block = %{block_item.block | transactions: block_struct.transactions, ommers: block_struct.ommers }
Map.put(block_map, hash, %{block_item | block: block, ready: true})
else
block_map
end
end)
Map.put(queue, number, updated_block_map)
end)
updated_block_queue = %{block_queue | queue: updated_queue}
process_block_queue(updated_block_queue, block_tree, chain, db)
end
@doc """
Processes a the block queue, adding any blocks which are complete and pass the number
of confirmations to the block tree. Those are then removed from the queue.
## Examples
iex> chain = Blockchain.Test.ropsten_chain()
iex> db = MerklePatriciaTree.Test.random_ets_db(:process_block_queue)
iex> header = %Block.Header{number: 1, parent_hash: <<0::256>>, beneficiary: <<2, 3, 4>>, difficulty: 100, timestamp: 11, mix_hash: <<1>>, nonce: <<2>>}
iex> {block_queue, block_tree} = %ExWire.Struct.BlockQueue{
...> queue: %{
...> 1 => %{
...> <<1::256>> => %{
...> commitments: MapSet.new([1, 2]),
...> header: header,
...> block: %Blockchain.Block{header: header, block_hash: <<1::256>>},
...> ready: true,
...> }
...> }
...> },
...> do_validation: false
...> }
...> |> ExWire.Struct.BlockQueue.process_block_queue(Blockchain.Blocktree.new_tree(), chain, db)
iex> block_tree.parent_map
%{<<226, 210, 216, 149, 139, 194, 100, 151, 35, 86, 131, 75, 10, 203, 201, 20, 232, 134, 23, 195, 24, 34, 181, 6, 142, 4, 57, 85, 121, 223, 246, 87>> => <<0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0>>}
iex> block_queue.queue
%{}
"""
@spec process_block_queue(t, Blocktree.t, Chain.t, MerklePatriciaTree.DB.db) :: {t, Blocktree.t}
def process_block_queue(block_queue=%__MODULE__{do_validation: do_validation}, block_tree, chain, db) do
# We can only process the next canonical block
{remaining_block_queue, blocks} = get_complete_blocks(block_queue)
block_tree = Enum.reduce(blocks, block_tree, fn block, block_tree ->
case Blockchain.Blocktree.verify_and_add_block(block_tree, chain, block, db, do_validation) do
:parent_not_found ->
Logger.debug("[Block Queue] Failed to verify block due to missing parent")
block_tree
{:invalid, reasons} ->
Logger.debug("[Block Queue] Failed to verify block due to #{inspect reasons}")
block_tree
{:ok, new_block_tree} ->
Logger.debug("[Block Queue] Verified block and added to new block tree")
new_block_tree
end
end)
{remaining_block_queue, block_tree}
end
@doc """
Returns the set of blocks which are complete in the block queue, returning a new block queue
with those blocks removed. This effective dequeues blocks once they have sufficient data and
commitments.
## Examples
iex> %ExWire.Struct.BlockQueue{
...> queue: %{
...> 5 => %{
...> <<1::256>> => %{
...> commitments: MapSet.new([1, 2]),
...> header: %Block.Header{number: 5},
...> block: %Blockchain.Block{block_hash: <<1::256>>},
...> ready: true,
...> },
...> <<2::256>> => %{
...> commitments: MapSet.new([]),
...> header: %Block.Header{number: 5},
...> block: %Blockchain.Block{block_hash: <<2::256>>},
...> ready: true,
...> },
...> <<3::256>> => %{
...> commitments: MapSet.new([1, 2]),
...> header: %Block.Header{number: 5, gas_used: 5},
...> block: %Blockchain.Block{block_hash: <<3::256>>},
...> ready: false,
...> },
...> <<4::256>> => %{
...> commitments: MapSet.new([1, 2]),
...> header: %Block.Header{number: 5, ommers_hash: <<5::256>>},
...> block: %Blockchain.Block{block_hash: <<4::256>>},
...> ready: false,
...> }
...> },
...> 6 => %{
...> <<5::256>> => %{
...> commitments: MapSet.new([1, 2]),
...> header: %Block.Header{number: 6},
...> block: %Blockchain.Block{block_hash: <<5::256>>},
...> ready: true,
...> }
...> }
...> }
...> }
...> |> ExWire.Struct.BlockQueue.get_complete_blocks()
{
%ExWire.Struct.BlockQueue{
queue: %{
5 => %{
<<2::256>> => %{
commitments: MapSet.new([]),
header: %Block.Header{number: 5},
block: %Blockchain.Block{block_hash: <<2::256>>},
ready: true
},
<<3::256>> => %{
commitments: MapSet.new([1, 2]),
header: %Block.Header{number: 5, gas_used: 5},
block: %Blockchain.Block{block_hash: <<3::256>>},
ready: false
},
<<4::256>> => %{
commitments: MapSet.new([1, 2]),
header: %Block.Header{number: 5, ommers_hash: <<5::256>>},
block: %Blockchain.Block{block_hash: <<4::256>>},
ready: false
}
}
}
},
[
%Blockchain.Block{block_hash: <<1::256>>},
%Blockchain.Block{block_hash: <<5::256>>}
]
}
"""
@spec get_complete_blocks(t) :: {t, [Block.t]}
def get_complete_blocks(block_queue=%__MODULE__{queue: queue}) do
{queue, blocks} = Enum.reduce(queue, {queue, []}, fn {number, block_map}, {queue, blocks} ->
{final_block_map, new_blocks} = Enum.reduce(block_map, {block_map, []}, fn {hash, block_item}, {block_map, blocks} ->
if block_item.ready and MapSet.size(block_item.commitments) >= ExWire.Config.commitment_count() do
{ Map.delete(block_map, hash), [block_item.block | blocks] }
else
{ block_map, blocks }
end
end)
total_blocks = blocks ++ new_blocks
if final_block_map == %{} do
{Map.delete(queue, number), total_blocks}
else
{Map.put(queue, number, final_block_map), total_blocks}
end
end)
{%{block_queue | queue: queue}, blocks}
end
@doc """
Determines if a block is empty. There's no reason to actually ask for a block
body if we know, a priori, that a block is empty.
## Examples
iex> %Block.Header{
...> transactions_root: MerklePatriciaTree.Trie.empty_trie_root_hash(),
...> ommers_hash: <<29, 204, 77, 232, 222, 199, 93, 122, 171, 133, 181, 103, 182, 204, 212, 26, 211, 18, 69, 27, 148, 138, 116, 19, 240, 161, 66, 253, 64, 212, 147, 71>>
...> }
...> |> ExWire.Struct.BlockQueue.is_block_empty?
true
iex> %Block.Header{
...> transactions_root: MerklePatriciaTree.Trie.empty_trie_root_hash(),
...> ommers_hash: <<1>>
...> }
...> |> ExWire.Struct.BlockQueue.is_block_empty?
false
iex> %Block.Header{
...> transactions_root: <<1>>,
...> ommers_hash: <<29, 204, 77, 232, 222, 199, 93, 122, 171, 133, 181, 103, 182, 204, 212, 26, 211, 18, 69, 27, 148, 138, 116, 19, 240, 161, 66, 253, 64, 212, 147, 71>>
...> }
...> |> ExWire.Struct.BlockQueue.is_block_empty?
false
"""
@spec is_block_empty?(Header.t) :: boolean()
def is_block_empty?(header) do
header.transactions_root == @empty_trie and header.ommers_hash == @empty_hash
end
@spec get_transactions_root([ExRLP.t]) :: MerklePatriciaTree.Trie.root_hash
defp get_transactions_root(transactions_list) do
db = MerklePatriciaTree.Test.random_ets_db() # this is a throw-away
trie = Enum.reduce(transactions_list |> Enum.with_index, Trie.new(db), fn {trx, i}, trie ->
Trie.update(trie, ExRLP.encode(i), ExRLP.encode(trx))
end)
trie.root_hash
end
@spec get_ommers_hash([EVM.hash]) :: ExthCrypto.hash
defp get_ommers_hash(ommers) do
ommers |> ExRLP.encode |> ExthCrypto.Hash.Keccak.kec()
end
end
|
apps/ex_wire/lib/ex_wire/struct/block_queue.ex
| 0.661923
| 0.560614
|
block_queue.ex
|
starcoder
|
defmodule RingCentral.OAuth do
@moduledoc """
The main module for the [Authorization flow](https://developers.ringcentral.com/api-reference/authentication)
"""
alias RingCentral.HTTPClient
alias RingCentral.Response
alias RingCentral.Error
@doc """
Get the URL for initializing the OAuth 2.0 authorizaiton flow.
`params` is a map contains the options described in
the [official documentation](https://developers.ringcentral.com/api-reference/Authorization)
## Example
```elixir
ringcentral = %RingCentral{
client_id: "the-client-id",
client_secret: "the-client-secret",
http_client: RingCentral.HTTPClient.DefaultClient,
server_url: "https://platform.ringcentral.com",
token_info: nil
}
{:ok, authorization_url} = RingCentral.OAuth.authorize(ringcentral, %{
response_type: "code",
redirect_uri: "https://ringcentral-elixir.test"
})
# {:ok, "https://service.ringcentral.com/..."}
```
"""
@spec authorize(RingCentral.t(), map()) :: {:error, RingCentral.Error.t()} | {:ok, String.t()}
def authorize(%RingCentral{} = ringcentral, params \\ %{}) do
query =
Map.merge(
%{
client_id: ringcentral.client_id
},
params
)
url =
ringcentral
|> build_url("/restapi/oauth/authorize")
|> Map.put(:query, URI.encode_query(query))
with {:ok, %Response{status: 302, headers: headers}} <-
HTTPClient.perform_request(ringcentral, :get, url, nil) do
location =
headers
|> Enum.reduce_while(nil, fn
{"location", v}, _acc -> {:halt, v}
_, acc -> {:cont, acc}
end)
{:ok, location}
else
error -> handle_error(error)
end
end
@doc """
Get the access token and refresh token.
`params` is a map contains the options described in
the [official documentation](https://developers.ringcentral.com/api-reference/Get-Token)
## Example
```elixir
ringcentral = %RingCentral{
client_id: "the-client-id",
client_secret: "the-client-secret",
http_client: RingCentral.HTTPClient.DefaultClient,
server_url: "https://platform.ringcentral.com",
token_info: nil
}
{:ok, token_info} = RingCentral.OAuth.get_token(ringcentral, %{
grant_type: "authorization_code",
code: "<KEY>",
redirect_uri: "https://ringcentral-elixir.test"
})
# {:ok, %{"access_token": "...", "token_type": "bearer", "refresh_token": "..."}}
```
"""
@spec get_token(RingCentral.t(), map()) :: {:error, RingCentral.Error.t()} | {:ok, String.t()}
def get_token(%RingCentral{} = ringcentral, params \\ %{}) do
url =
ringcentral
|> build_url("/restapi/oauth/token")
headers = [
{"Accept", "application/json"},
{"Content-Type", "application/x-www-form-urlencoded"},
{"Authorization",
"Basic " <> Base.encode64("#{ringcentral.client_id}:#{ringcentral.client_secret}")}
]
with {:ok, %Response{status: 200, body: body}} <-
HTTPClient.perform_request(
ringcentral,
:post,
url,
{:form, params},
headers
) do
token_info = RingCentral.JSON.decode!(ringcentral, body)
{:ok, token_info}
else
error -> handle_error(error)
end
end
@doc """
Revokes access/refresh token.
`token` is the active access or refresh token to be revoked,
see the [official documentation](https://developers.ringcentral.com/api-reference/Revoke-Token) for more information.
## Example
```elixir
ringcentral = %RingCentral{
client_id: "the-client-id",
client_secret: "the-client-secret",
http_client: RingCentral.HTTPClient.DefaultClient,
server_url: "https://platform.ringcentral.com",
token_info: nil
}
RingCentral.OAuth.revoke_token(ringcentral, "<KEY>")
# :ok
```
"""
@spec revoke_token(RingCentral.t(), String.t()) :: {:error, RingCentral.Error.t()} | :ok
def revoke_token(%RingCentral{} = ringcentral, token) do
url =
ringcentral
|> build_url("/restapi/oauth/revoke")
headers = [
{"Accept", "application/json"},
{"Content-Type", "application/x-www-form-urlencoded"},
{"Authorization",
"Basic " <> Base.encode64("#{ringcentral.client_id}:#{ringcentral.client_secret}")}
]
request_body = %{
token: token
}
with {:ok, %Response{status: 200}} <-
HTTPClient.perform_request(
ringcentral,
:post,
url,
{:form, request_body},
headers
) do
:ok
else
error -> handle_error(ringcentral, error)
end
end
defp handle_error(
ringcentral,
{:ok, %Response{status: status_code, body: body, headers: headers}}
)
when status_code >= 400 and status_code < 500 do
error_info = RingCentral.JSON.decode!(ringcentral, body)
{:error,
%Error{
code: :client_error,
detail: %{
status: status_code,
data: error_info,
headers: headers
}
}}
end
defp handle_error(
ringcentral,
{:ok, %Response{status: status_code, body: body, headers: headers}}
)
when status_code >= 500 do
error_info = RingCentral.JSON.decode!(ringcentral, body)
{:error,
%Error{
code: :server_error,
detail: %{
status: status_code,
data: error_info,
headers: headers
}
}}
end
defp handle_error({:error, %Error{} = err}) do
{:error, err}
end
defp build_url(%RingCentral{} = client, path) do
URI.merge(client.server_url, path)
end
end
|
lib/ring_central/oauth.ex
| 0.834508
| 0.740597
|
oauth.ex
|
starcoder
|
defmodule RlStudy.DP.BellmanEquation do
@moduledoc """
Value base
"""
require Logger
@spec v(String.t(), float()) :: float()
def v(s, gamma \\ 0.99) do
Logger.info("v: #{inspect(s)}")
v = r(s) + gamma * max_v_on_next_state(s)
Logger.info("v: #{inspect(s)} -> #{inspect(v)}")
v
end
@spec r(String.t()) :: -1 | 0 | 1
def r(s) do
case s do
"happy_end" -> 1
"bad_end" -> -1
_ -> 0
end
end
@spec max_v_on_next_state(String.t()) :: float()
def max_v_on_next_state(s) do
Logger.debug("max_v_on_next_state: #{inspect(s)}")
if(Enum.member?(["happy_end", "bad_end"], s)) do
# game end, the expected value is 0.
0
else
["up", "down"]
|> Enum.map(fn a ->
Logger.debug("state=#{inspect(s)}, action=#{inspect(a)}")
transit_func(s, a)
|> (fn transit_probes ->
Logger.debug("transit_probes=#{inspect(transit_probes)}")
transit_probes
end).()
|> Enum.reduce(0, fn {next_state, prob}, acc ->
acc + prob * v(next_state)
end)
|> (fn value ->
Logger.debug("value: #{inspect(s)} + #{inspect(a)} -> #{inspect(value)}")
value
end).()
end)
|> (fn values ->
Logger.debug("values: #{inspect(values)}")
values
end).()
|> Enum.max()
end
|> (fn v ->
Logger.debug("max_v_on_next_state: state=#{inspect(s)} -> #{inspect(v)}")
v
end).()
end
@spec transit_func(String.t(), String.t()) :: %{String.t() => float()}
def transit_func(state, action) do
Logger.debug("transit_func: state=#{inspect(state)}, action=#{inspect(action)}")
[_ | actions] = String.split(state, "_")
limit_game_count = 5
happy_end_border = 4
move_prob = 0.9
if length(actions) == limit_game_count do
up_count = actions |> Enum.filter(fn a -> a == "up" end) |> Enum.count()
if up_count >= happy_end_border do
Map.new() |> Map.put("happy_end", 1.0)
else
Map.new() |> Map.put("bad_end", 1.0)
end
else
opposite = if action == "down", do: "up", else: "down"
Map.new()
|> Map.put(next_state(state, action), move_prob)
|> Map.put(next_state(state, opposite), 1 - move_prob)
end
end
defp next_state(state, action) do
state <> "_" <> action
end
end
|
lib/dp/bellman_equation.ex
| 0.745584
| 0.495117
|
bellman_equation.ex
|
starcoder
|
defmodule Day19 do
def part1(input) do
{pos, width, grid} = parse(input)
move(:down, pos, grid, width, [])
end
def part2(input) do
{pos, width, grid} = parse(input)
move_count(:down, pos, grid, width, 0)
end
defp move(dir, {x, y}, grid, width, seen) do
case :binary.at(grid, x + y * width) do
?| ->
move(dir, next(dir, {x, y}), grid, width, seen)
?- ->
move(dir, next(dir, {x, y}), grid, width, seen)
?+ ->
dir = branch(dir, {x, y}, grid, width)
move(dir, next(dir, {x, y}), grid, width, seen)
letter when letter in ?A..?Z ->
move(dir, next(dir, {x, y}), grid, width, [letter | seen])
_ ->
Enum.reverse(seen)
end
end
defp move_count(dir, {x, y}, grid, width, steps) do
case :binary.at(grid, x + y * width) do
?| ->
move_count(dir, next(dir, {x, y}), grid, width, steps + 1)
?- ->
move_count(dir, next(dir, {x, y}), grid, width, steps + 1)
?+ ->
dir = branch(dir, {x, y}, grid, width)
move_count(dir, next(dir, {x, y}), grid, width, steps + 1)
letter when letter in ?A..?Z ->
move_count(dir, next(dir, {x, y}), grid, width, steps + 1)
_ ->
steps
end
end
defp next(:down, {x, y}), do: {x, y+1}
defp next(:up, {x, y}), do: {x, y-1}
defp next(:left, {x, y}), do: {x-1, y}
defp next(:right, {x, y}), do: {x+1, y}
defp branch(dir, {x, y}, grid, width) when dir === :down or dir === :up do
case :binary.at(grid, (x - 1) + y * width) do
?\s -> :right
_ -> :left
end
end
defp branch(dir, {x, y}, grid, width) when dir === :left or dir === :right do
case :binary.at(grid, x + (y - 1) * width) do
?\s -> :down
_ -> :up
end
end
defp parse(input) do
lines = String.split(input, "\n")
width = String.length(hd(lines))
grid = to_string(lines)
{start, _} = :binary.match(grid, "|")
pos = {start, 0}
{pos, width, grid}
end
end
|
day19/lib/day19.ex
| 0.593609
| 0.894467
|
day19.ex
|
starcoder
|
defmodule Authex.AuthorizationPlug do
@moduledoc """
A plug to handle authorization.
This plug must be passed an auth module in which to authorize with. Otherwise,
it will raise an `Authex.Error`. The plug must also only be used after the
`Authex.AuthenticationPlug` has been used.
With it, we can easily authorize a Phoenix controller:
defmodule MyAppWeb.MyController do
use MyAppWeb, :controller
plug Authex.AuthenticationPlug, auth: MyApp.Auth
plug Authex.AuthorizationPlug, auth: MyApp.Auth, permits: ["user", "admin"]
def show(conn, _params) do
with {:ok, %{id: id}} <- MyApp.Auth.current_user(conn),
{:ok, user} <- MyApp.Users.get(id)
do
render(conn, "show.json", user: user)
end
end
end
The plug checks the scopes of the token and compares them to the "permits" passed
to the plug. Authorization works by combining the "permits" with the "type" of
request that is being made.
For example, with our controller above, we are permitting "user" and "admin" access.
The show action would be a `GET` request, and would therefore be a "read" type.
Requests are bucketed under the following types:
* GET - read
* HEAD - read
* PUT - write
* PATCH - write
* POST - write
* DELETE - delete
So, in order to access the show action, our token would require one of the
following scopes: `["user/read", "admin/read"]`. Or, the token would require
`["user/write", "admin/write"]` to access the update action.
By default, if authorization fails, the plug sends the conn to the `Authex.ForbiddenPlug`
plug. This plug will put a `403` status into the conn with the body `"Forbidden"`.
We can configure our own forbidden plug by passing it as an option to the
`Authex.AuthorizationPlug` plug or through our config.
config :my_app, MyApp.Auth, [
forbidden: MyApp.ForbiddenPlug
]
"""
@behaviour Plug
import Plug.Conn
@impl Plug
def init(opts \\ []) do
verify_options(opts) && opts
end
@impl Plug
def call(conn, opts) do
opts = build_options(opts)
with {:ok, permits} <- fetch_permits(opts),
{:ok, action} <- fetch_action(conn),
{:ok, scopes} <- fetch_current_scopes(conn, opts),
{:ok, current_scope} <- verify_scope(permits, action, scopes),
{:ok, conn} <- assign_current_scope(conn, current_scope) do
conn
else
_ -> forbidden(conn, opts)
end
end
defp fetch_permits(opts) do
case Map.get(opts, :permits) do
permits when is_list(permits) -> {:ok, permits}
false -> :error
end
end
defp fetch_action(%{method: method}) do
case method do
"GET" -> {:ok, "read"}
"HEAD" -> {:ok, "read"}
"PUT" -> {:ok, "write"}
"PATCH" -> {:ok, "write"}
"POST" -> {:ok, "write"}
"DELETE" -> {:ok, "delete"}
_ -> :error
end
end
defp fetch_current_scopes(conn, opts) do
auth = Map.get(opts, :auth)
apply(auth, :current_scopes, [conn])
end
defp verify_scope(permits, action, scopes) do
current_scopes =
Enum.map(permits, fn permit ->
permit <> "/" <> action
end)
case Authex.Token.has_scope?(current_scopes, scopes) do
false -> :error
result -> {:ok, result}
end
end
defp assign_current_scope(conn, current_scope) do
{:ok, put_private(conn, :authex_current_scope, current_scope)}
end
defp forbidden(conn, opts) do
handler = Map.get(opts, :forbidden)
apply(handler, :call, [conn, []])
end
defp build_options(opts) do
auth = Keyword.get(opts, :auth)
Enum.into(opts, %{
forbidden: auth.config(:forbidden, Authex.ForbiddenPlug),
permits: []
})
end
defp verify_options(opts) do
Keyword.has_key?(opts, :auth) || raise Authex.Error, "auth module missing"
end
end
|
lib/authex/plugs/authorization_plug.ex
| 0.837354
| 0.421582
|
authorization_plug.ex
|
starcoder
|
defmodule X3m.System.Message do
@moduledoc """
System Message.
This module defines a `X3m.System.Message` struct and the main functions
for working with it.
## Fields:
* `service_name` - the name of the service that should handle this message. Example: `:create_job`.
* `id` - unique id of the message.
* `correlation_id` - id of the message that "started" conversation.
* `causation_id` - id of the message that "caused" this message.
* `logger_metadata` - In each new process `Logger.metadata` should be set to this value.
* `invoked_at` - utc time when message was generated.
* `dry_run` - specifies dry run option. It can be either `false`, `true` or `:verbose`.
* `request` - request structure converted to Ecto.Changeset (or anything else useful).
* `raw_request` - request as it is received before converting to Message (i.e. `params` from controller action).
* `assigns` - shared Data as a map.
* `response` - the response for invoker.
* `events` - list of generated events.
* `aggregate_meta` - metadata for aggregate.
* `valid?` - when set to `true` it means that raw_request was successfully validated and
structered request is set to `request` field
* `reply_to` - Pid of process that is waiting for response.
* `halted?` - when set to `true` it means that response should be returned to the invoker
without further processing of Message.
"""
require Logger
alias X3m.System.Response
@enforce_keys ~w(service_name id correlation_id causation_id invoked_at dry_run
reply_to halted? raw_request request valid? response events
aggregate_meta assigns logger_metadata)a
defstruct @enforce_keys
@type t() :: %__MODULE__{
service_name: atom,
id: String.t(),
correlation_id: String.t(),
causation_id: String.t(),
logger_metadata: Keyword.t(),
invoked_at: DateTime.t(),
dry_run: dry_run(),
raw_request: map(),
request: nil | request,
valid?: boolean,
assigns: assigns,
response: nil | Response.t(),
events: [map],
aggregate_meta: map,
reply_to: pid,
halted?: boolean
}
@typep assigns :: %{atom => any}
@typep request :: map()
@type error :: {String.t(), Keyword.t()}
@type errors :: [{atom, error}]
@type dry_run :: boolean | :verbose
@doc """
Creates new message with given `service_name` and provided `opts`:
* `id` - id of the message. If not provided it generates random one.
* `correlation_id` - id of "conversation". If not provided it is set to `id`.
* `causation_id` - id of message that "caused" this message. If not provided it is set to `id`.
* `reply_to` - sets pid of process that expects response. If not provided it is set to `self()`.
* `raw_request` - sets raw request as it is received (i.e. `params` from controller action).
* `logger_metadata` - if not provided `Logger.metadata` is used by default.
"""
@spec new(atom, Keyword.t()) :: __MODULE__.t()
def new(service_name, opts \\ []) when is_atom(service_name) do
id = Keyword.get(opts, :id) || gen_msg_id()
correlation_id = Keyword.get(opts, :correlation_id, id)
causation_id = Keyword.get(opts, :causation_id, correlation_id)
dry_run = Keyword.get(opts, :dry_run, false)
reply_to = Keyword.get(opts, :reply_to, self())
raw_request = Keyword.get(opts, :raw_request)
logger_metadata = Keyword.get(opts, :logger_metadata, Logger.metadata())
%__MODULE__{
service_name: service_name,
id: id,
correlation_id: correlation_id,
causation_id: causation_id,
invoked_at: DateTime.utc_now(),
dry_run: dry_run,
raw_request: raw_request,
request: nil,
valid?: true,
response: nil,
events: [],
aggregate_meta: %{},
reply_to: reply_to,
halted?: false,
assigns: %{},
logger_metadata: logger_metadata
}
end
@doc """
Creates new message with given `service_name` that is caused by other `msg`.
"""
@spec new_caused_by(atom, __MODULE__.t(), Keyword.t()) :: __MODULE__.t()
def new_caused_by(service_name, %__MODULE__{} = msg, opts \\ []) when is_atom(service_name) do
service_name
|> new(
id: gen_msg_id(),
correlation_id: msg.correlation_id,
causation_id: msg.id,
raw_request: opts[:raw_request]
)
end
@spec to_service(t(), atom) :: t()
def to_service(%__MODULE__{} = sys_msg, service_name),
do: %__MODULE__{sys_msg | service_name: service_name}
@doc """
Assigns a value to a key in the message.
The "assigns" storage is meant to be used to store values in the message
so that others in pipeline can use them when needed. The assigns storage
is a map.
## Examples
iex> sys_msg.assigns[:user_id]
nil
iex> sys_msg = assign(sys_msg, :user_id, 123)
iex> sys_msg.assigns[:user_id]
123
"""
@spec assign(__MODULE__.t(), atom, any) :: __MODULE__.t()
def assign(%__MODULE__{assigns: assigns} = sys_msg, key, val) when is_atom(key),
do: %{sys_msg | assigns: Map.put(assigns, key, val)}
@doc """
Returns `sys_msg` with provided `response` and as `halted? = true`.
"""
@spec return(__MODULE__.t(), Response.t()) :: __MODULE__.t()
def return(%__MODULE__{events: events} = sys_msg, response) do
sys_msg
|> Map.put(:response, response)
|> Map.put(:halted?, true)
|> Map.put(:events, Enum.reverse(events))
end
@doc """
Returns `message` it received with `Response.created(id)` result set.
"""
@spec created(__MODULE__.t(), any) :: __MODULE__.t()
def created(%__MODULE__{} = message, id) do
response = Response.created(id)
return(message, response)
end
@spec ok(__MODULE__.t()) :: __MODULE__.t()
def ok(message) do
response = Response.ok()
return(message, response)
end
@spec ok(__MODULE__.t(), any) :: __MODULE__.t()
def ok(message, any) do
response = Response.ok(any)
return(message, response)
end
@spec error(__MODULE__.t(), any) :: __MODULE__.t()
def error(message, any) do
response = Response.error(any)
return(message, response)
end
def put_request(%{valid?: false} = request, %__MODULE__{} = message) do
%{message | valid?: false, request: request}
|> return(Response.validation_error(request))
end
def put_request(%{} = request, %__MODULE__{} = message),
do: %{message | valid?: true, request: request}
@doc """
Puts `value` under `key` in `message.raw_request` map.
"""
def put_in_raw_request(%__MODULE__{} = message, key, value) do
raw_request =
(message.raw_request || %{})
|> Map.put(key, value)
%{message | raw_request: raw_request}
end
@doc """
Adds `event` in `message.events` list. If `event` is nil
it behaves as noop.
After `return/2` (and friends) order of `msg.events` will be the same as
they've been added.
"""
@spec add_event(message :: t(), event :: nil | any) :: t()
def add_event(%__MODULE__{} = message, nil),
do: message
def add_event(%__MODULE__{events: events} = message, event),
do: %{message | events: [event | events]}
def prepare_aggregate_id(%__MODULE__{} = message, id_field, opts \\ []) do
id =
message
|> Map.from_struct()
|> get_in([:raw_request, id_field])
case {id, opts[:generate_if_missing] == true} do
{nil, true} ->
id = UUID.uuid4()
raw_request = Map.put(message.raw_request, id_field, id)
aggregate_meta = Map.put(message.aggregate_meta, :id, id)
message
|> Map.put(:raw_request, raw_request)
|> Map.put(:aggregate_meta, aggregate_meta)
{nil, false} ->
response = Response.missing_id(id_field)
return(message, response)
_ ->
aggregate_meta = Map.put(message.aggregate_meta, :id, id)
Map.put(message, :aggregate_meta, aggregate_meta)
end
end
# taken from https://github.com/elixir-plug/plug/blob/master/lib/plug/request_id.ex
@spec gen_msg_id :: String.t()
def gen_msg_id() do
binary = <<
System.system_time(:nanosecond)::64,
:erlang.phash2({node(), self()}, 16_777_216)::24,
:erlang.unique_integer()::32
>>
Base.url_encode64(binary)
end
end
|
lib/message.ex
| 0.885142
| 0.519399
|
message.ex
|
starcoder
|
defmodule Fiat.CacheServer do
@moduledoc """
Fiat is a module to handle basic caching needs. Behind
the scenes it leverages an ets table to store objects
and a GenServer to maintain the state of the current
keys.
## Usage
Add `Fiat.CacheServer` to your application's supervision
tree. Because `Fiat.CacheServer` is registered with its
module name, it can be accessed without providing a pid
to access the process.
For example:
```elixir
children = [
...
Fiat.CacheServer
]
...
```
"""
use GenServer
@table :table
@clear_interval :timer.seconds(5)
@doc false
def start_link(_) do
GenServer.start_link(__MODULE__, %{}, name: __MODULE__)
end
@doc false
def start_link() do
start_link([])
end
@doc """
Stops the GenServer.
## Examples
iex> Fiat.CacheServer.stop()
:ok
"""
@spec stop() :: :ok
def stop do
GenServer.stop(__MODULE__)
end
@doc """
Caches an object using a cache_key.
## Examples
iex> Fiat.CacheServer.cache_object("data", {"code", 2})
true
iex> Fiat.CacheServer.cache_object("data", {"code", 2}, 10)
true
"""
@spec cache_object(term(), term(), integer()) :: true
def cache_object(cache_key, object, expires_in \\ 300) do
expires_at = System.os_time(:second) + expires_in
GenServer.call(__MODULE__, {:set, cache_key, object, expires_at})
end
@doc """
Fetches the cached object for a particular key.
Returns object if it exists in the cache, otherwise
returns `nil`.
## Examples
iex> Fiat.CacheServer.cache_object("data", {"code", 2})
iex> Fiat.CacheServer.fetch_object("data")
{"code", 2}
iex> Fiat.CacheServer.fetch_object("data_old")
nil
"""
@spec fetch_object(term()) :: term() | nil
def fetch_object(cache_key) do
case :ets.lookup(@table, cache_key) do
[] -> nil
[{_, result}] -> result
end
end
@doc """
Fetches the cached object for a particular key. If
the `cache_key` is not present in the cache, it
executes the provided `query_fn` paramter, stores
the result in the cache and returns it.
Returns either the cached object or the result of
the `query_fn` parameter.
## Examples
iex> Fiat.CacheServer.cache_object("data", :data)
iex> Fiat.CacheServer.fetch_object("data", fn -> :ok end)
:data
iex> Fiat.CacheServer.fetch_object("data", fn -> :ok end)
:ok
"""
@spec fetch_object(term(), (() -> term()), integer()) :: term()
def fetch_object(cache_key, query_fn, expires_in \\ 300) do
case fetch_object(cache_key) do
nil ->
object = query_fn.()
cache_object(cache_key, object, expires_in)
object
object ->
object
end
end
@doc """
Clears stale items from the cache.
## Examples
iex> Fiat.CacheServer.clear_stale_objects
[]
"""
def clear_stale_objects() do
GenServer.call(__MODULE__, :clear_stale_objects)
end
@impl true
def init(_) do
:ets.new(@table, [
:set,
:named_table,
read_concurrency: true
])
schedule_clear()
{:ok, %{}}
end
@impl true
def handle_call({:set, key, object, expires_at}, _from, state) do
result = :ets.insert(@table, {key, object})
{:reply, result, Map.put(state, key, expires_at)}
end
def handle_call(:clear_stale_objects, _from, state) do
new_state = remove_stale_objects(state)
{:reply, [], new_state}
end
@impl true
def handle_info(:clear_stale_objects, state) do
new_state = remove_stale_objects(state)
schedule_clear()
{:noreply, new_state}
end
@impl true
def terminate(_, _) do
:ets.delete_all_objects(@table)
end
defp remove_stale_objects(state) do
now = System.os_time(:second)
{keys_to_delete, keep} =
Map.to_list(state)
|> Enum.reduce({[], []}, fn {key, expires_at}, {to_delete, to_keep} ->
if now > expires_at do
{to_delete ++ [key], to_keep}
else
{to_delete, to_keep ++ [{key, expires_at}]}
end
end)
Enum.each(keys_to_delete, &:ets.delete(@table, &1))
Enum.into(keep, Map.new())
end
defp schedule_clear() do
Process.send_after(self(), :clear_stale_objects, @clear_interval)
end
end
|
lib/fiat.ex
| 0.866867
| 0.812644
|
fiat.ex
|
starcoder
|
defmodule Dicer.Validator do
@invalid_operator_sequences [%Dicer.Tokens.Plus{}, %Dicer.Tokens.Minus{}, %Dicer.Tokens.Multiply{}, %Dicer.Tokens.Divide{}]
@invalid_operators_at_start [%Dicer.Tokens.Multiply{}, %Dicer.Tokens.Divide{}]
def validate({:ok, input}, validation_options) when is_list(input) and is_map(validation_options) do
{:ok, input}
|> _validate_proper_input_start(input)
|> _validate_proper_input_end(input)
|> _validate_operator_sequence(input)
|> _validate_max_dice(input, validation_options)
|> _validate_max_sides(input, validation_options)
end
def validate(input = {:error, _}, _validation_options) do
input
end
defp _validate_operator_sequence(input = {:error, _}, _) do
input
end
defp _validate_operator_sequence({:ok, [head | tail]}, input) do
case Enum.member?(@invalid_operator_sequences, head) and Enum.member?(@invalid_operator_sequences, hd(tail)) do
true -> {:error, ["Improper operator format (Ex. 1--1)!"]}
_ -> _validate_operator_sequence({:ok, tail}, input)
end
end
defp _validate_operator_sequence({:ok,[]}, input) do
{:ok, input}
end
defp _validate_proper_input_end(input = {_, _}, [%Dicer.Tokens.End{}]) do
input
end
defp _validate_proper_input_end(input = {:error, _}, _) do
input
end
defp _validate_proper_input_end({:ok, input}, input) do
case Enum.member?(@invalid_operator_sequences, hd(tl(Enum.reverse(input)))) do
true -> {:error, ["Trailing operator(s) on input!"]}
_ -> {:ok, input}
end
end
defp _validate_proper_input_start(input = {_, _}, [%Dicer.Tokens.End{}]) do
input
end
defp _validate_proper_input_start(input = {:error, _}, _) do
input
end
defp _validate_proper_input_start({:ok, input}, input) do
case Enum.member?(@invalid_operators_at_start, hd(input)) do
true -> {:error, ["Invalid operator(s) at beginning of input!"]}
_ -> {:ok, input}
end
end
defp _validate_max_dice(input = {:error, _}, _, _) do
input
end
defp _validate_max_dice({:ok, input}, input, %{max_dice: max}) when is_integer(max) and max > 0 do
case Enum.reduce(input, 0, fn(token, acc) -> if _is_dice(token), do: acc + token.quantity, else: acc end) do
total when total > max -> {:error, "Number of dice exceeds maximum allowed: #{max}"}
_ -> {:ok, input}
end
end
defp _validate_max_sides(input = {:error, _}, _, _) do
input
end
defp _validate_max_sides({:ok, input}, input, %{max_sides: max}) when is_integer(max) and max > 0 do
case Enum.reduce(input, 0, fn(token, acc) -> if _is_dice(token) and token.sides > acc, do: token.sides, else: acc end) do
total when total > max -> {:error, "Number of sides exceeds maximum allowed: #{max}"}
_ -> {:ok, input}
end
end
defp _is_dice(%{__struct__: var}) when var in [Dicer.Tokens.Dice, Dicer.Tokens.FudgeDice] do
true
end
defp _is_dice(_) do
false
end
end
|
lib/dicer/validator.ex
| 0.631253
| 0.411436
|
validator.ex
|
starcoder
|
defmodule EctoAutoFilter do
@moduledoc """
Ecto Auto Filter
**Automatic Filters based Ecto Schemas**
EctoAutoFilter is a helper for projects that use Ecto Schemas and segregate the queries in entity repository modules.
EctoAutoFilter inject the `filter/3` function that by default has a pattern matching for each field of declared entity.
"""
@all_binaries_coparable_types ~w(id date integer datetime naive_datetime)a
@doc """
Injeta a função `filter/3` e as funções privadas responsáveis pelo filtro customizado a partir do `schema` base passado
Injects the `filter/3` functions and the privated functions responsible for custom filters from base entity schema's passed.
Options:
- `schema`: This options is required and is necessary for EctoAutoFilter build the queries.
- `repo`: This options is optional and is used to execute the queries, and when is passed such as `use` option, overrides the global config for the current module.
Exemple:
defmodule MyApp.User do
use Ecto.Schema
schema "users" do
field(:name, :string)
field(:email, :string)
field(:age, :integer)
end
...
end
defmodule UserRepository do
use EctoAutoFilter,
repo: MyApp.Repo,
schema: MyApp.User
end
After that entity repository module is declared the filter functions can already be used like a below example:
Depois de declarado o using no módulo de repositório as funções de filtro podem ser usadas como no exemplo a seguir:
iex> user_one = Repo.insert!(%User{name: "<NAME>", email: "<EMAIL>", age: 30})
iex> user_two = Repo.insert!(%User{name: "<NAME>", email: "<EMAIL>", age: 28})
iex> UserRepository.filter(%{age: {30, ">="}})
%User{
__meta__: #Ecto.Schema.Metadata<:loaded, "users">,
age: 30,
email: "<EMAIL>",
id: 1,
name: "<NAME>"
}
"""
defmacro __using__(opts) do
quote bind_quoted: [opts: opts] do
@before_compile EctoAutoFilter
import Ecto.Query
import EctoAutoFilter
@repo opts[:repo] || Application.compile_env(:ecto_auto_filter, :repo)
@schema opts[:schema]
@doc false
def filter(filter, result_format \\ :many, query \\ @schema)
def filter(filter, _result_format, _query) when filter == %{} or filter == [],
do: {:error, :filter_not_found}
def filter(filters, result_format, query) when is_map(filters) or is_list(filters) do
cond do
is_map(filters) ->
build_filters(filters, result_format, query)
is_list(filters) and Keyword.keyword?(filters) ->
build_filters(filters, result_format, query)
true ->
{:error, :unsupported_filter}
end
end
def filter(_filter, _result_format, _query), do: {:error, :unsupported_filter}
defp build_filters(filters, result_format, query) do
Enum.reduce(filters, query, fn
{field, value}, query ->
apply_filter(field, query, value)
end)
|> run_query(result_format)
|> handle_result()
end
for schema_field <- @schema.__schema__(:fields) do
field_type = @schema.__schema__(:type, schema_field)
write_filters_by_type(schema_field, field_type)
|> Enum.each(fn macro ->
macro
|> Macro.expand(__ENV__)
|> Code.eval_quoted()
end)
end
defp run_query({:error, _} = error_tuple, _result_format), do: error_tuple
defp run_query(queryable, :one) do
@repo.one(queryable)
end
defp run_query(queryable, :first) do
@repo.all(queryable)
|> case do
[first | _] ->
first
_ ->
nil
end
end
defp run_query(queryable, :many) do
@repo.all(queryable)
end
defp handle_result({:error, _} = error_tuple), do: error_tuple
defp handle_result(nil), do: {:error, :not_found}
defp handle_result(result), do: {:ok, result}
end
end
@doc false
def write_filters_by_type(field_name, field_type)
when field_type in @all_binaries_coparable_types do
[
list_filters(field_name),
not_in_compare_filter(field_name),
in_compare_filter(field_name),
binary_compare_filter(field_name),
equal_compare_filter(field_name)
]
end
@doc false
def write_filters_by_type(field_name, _) do
[
list_filters(field_name),
not_in_compare_filter(field_name),
in_compare_filter(field_name),
ilike_filter(field_name),
like_filter(field_name),
equal_compare_filter(field_name)
]
end
defp list_filters(field_name) do
quote do
defp apply_filter(unquote(field_name) = field_id, query, values) when is_list(values) do
Enum.reduce(values, query, fn value, query_acc ->
apply_filter(field_id, query_acc, value)
end)
end
end
end
defp equal_compare_filter(field_name) do
quote do
defp apply_filter(unquote(field_name), query, value) do
where(query, [r], field(r, ^unquote(field_name)) == ^value)
end
end
end
defp not_in_compare_filter(field_name) do
quote do
defp apply_filter(unquote(field_name), query, {values, "not_in"}) when is_list(values) do
where(query, [r], field(r, ^unquote(field_name)) not in ^values)
end
end
end
defp in_compare_filter(field_name) do
quote do
defp apply_filter(unquote(field_name), query, {values, "in"}) when is_list(values) do
where(query, [r], field(r, ^unquote(field_name)) in ^values)
end
end
end
defp ilike_filter(field_name) do
quote do
defp apply_filter(unquote(field_name), query, {value, "ilike"}) when is_binary(value) do
where(query, [r], ilike(field(r, ^unquote(field_name)), ^value))
end
end
end
defp like_filter(field_name) do
quote do
defp apply_filter(unquote(field_name), query, {value, "like"}) when is_binary(value) do
where(query, [r], like(field(r, ^unquote(field_name)), ^value))
end
end
end
defp binary_compare_filter(field_name) do
quote do
defp apply_filter(unquote(field_name), query, {value, op}) when is_binary(op) do
case op do
"==" -> where(query, [r], field(r, ^unquote(field_name)) == ^value)
"!=" -> where(query, [r], field(r, ^unquote(field_name)) != ^value)
">=" -> where(query, [r], field(r, ^unquote(field_name)) >= ^value)
"<=" -> where(query, [r], field(r, ^unquote(field_name)) <= ^value)
"<" -> where(query, [r], field(r, ^unquote(field_name)) < ^value)
">" -> where(query, [r], field(r, ^unquote(field_name)) > ^value)
_ -> {:error, :unsupported_filter_operator}
end
end
end
end
@doc """
Adiciona um filtro customizado e componivel aos demais filtros do módulo base em tempo de compilação.
- query: é queryable que a função irá receber;
- value: é o valor que será recebido para compor o filtro e pode ser manipulado por pattern matching;
- key: indentify the created filter in `filter/3` function.
Este filtro é adicionado as demais regras de filtro e se torna disponível na forma de `filter/3`,
por exemplo:
defmodule MyApp.User do
use Ecto.Schema
schema "users" do
field(:name, :string)
field(:email, :string)
field(:age, :integer)
field(:bith_date, :date)
end
...
end
defmodule UserRepository do
use EctoAutoFilter,
repo: MyApp.Repo,
schema: MyApp.User
add_filter query, value, :birth_years_ago do
x_years_ago = (365 * value)
limit_date = Date.utc_today() |> Date.add(-x_years_ago)
where(query, [r], r.birth_date == ^limit_date)
end
end
Depois de declarado o filtro customizado ele pode ser usado conforme o exemplo a seguir:
iex> user_one = Repo.insert!(%User{name: "<NAME>", email: "<EMAIL>", age: 30, birth_date: 1991-01-01})
iex> user_two = Repo.insert!(%User{name: "<NAME>", email: "<EMAIL>", age: 28, birth_date: 1993-01-01})
iex> UserRepository.filter(%{birth_years_ago: 28})
%User{
__meta__: #Ecto.Schema.Metadata<:loaded, "users">,
age: 28,
email: "<EMAIL>",
id: 2,
name: "<NAME>"
}
"""
defmacro add_filter(query, value, key, do: block) do
quote do
defp apply_filter(unquote(key), unquote(query), unquote(value)) do
unquote(block)
end
end
end
defmacro __before_compile__(_env) do
quote do
defp apply_filter(_, _, _), do: {:error, :unsupported_filter}
end
end
end
|
lib/ecto_auto_filter.ex
| 0.756762
| 0.433442
|
ecto_auto_filter.ex
|
starcoder
|
defmodule MafiaEngine.Accusations do
@moduledoc """
This module defines the type for accusations and functions to handle them.
## Examples
iex> a = MafiaEngine.Accusations.new(2)
%MafiaEngine.Accusations{ballots: %{}, required: 2}
iex> abed = MafiaEngine.Player.new("Abed")
...> jeff = MafiaEngine.Player.new("Jeff")
...> {:ok, a} = MafiaEngine.Accusations.accuse(a, abed, jeff)
{:ok, %MafiaEngine.Accusations{ballots: %{"Abed" => "Jeff"}, required: 2}}
iex> a = MafiaEngine.Accusations.withdraw(a, "Abed")
%MafiaEngine.Accusations{ballots: %{}, required: 2}
iex> {:ok, a} = MafiaEngine.Accusations.accuse(a, jeff, abed)
...> MafiaEngine.Accusations.accuse(a, abed, abed)
{:accused, "Abed",
%MafiaEngine.Accusations{
ballots: %{"Abed" => "Abed", "Jeff" => "Abed"},
required: 2
}}
"""
alias __MODULE__
alias MafiaEngine.Player
@enforce_keys [:ballots, :required]
defstruct [:ballots, :required]
@type t :: %Accusations{ballots: %{optional(String.t()) => String.t()}, required: pos_integer}
@doc """
Creates a new accusations with `required` as the number of accusations required to cause a player to be accused.
"""
@spec new(pos_integer) :: t
def new(required) do
%Accusations{ballots: Map.new(), required: required}
end
@doc """
Removes the accusation from `accuser` if exists.
"""
@spec withdraw(t, String.t()) :: t
def withdraw(%Accusations{} = accusations, accuser) do
accusations
|> Map.update!(:ballots, &Map.delete(&1, accuser))
end
@doc """
Adds the accusation from `accuser` to `accused`.
It also checks if accused has the required accusations to be accused.
Returns an error if either `accuser` or `accused` is not alive.
## Examples
iex> a = MafiaEngine.Accusations.new(2)
...> jeff = MafiaEngine.Player.new("Jeff")
...> pierce = MafiaEngine.Player.new("Pierce")
...> pierce = MafiaEngine.Player.kill(pierce)
...> MafiaEngine.Accusations.accuse(a, pierce, jeff)
{:error, :cannot_accuse_while_dead}
iex> MafiaEngine.Accusations.accuse(a, jeff, pierce)
{:error, :cannot_accuse_dead_players}
"""
@spec accuse(t, MafiaEngine.Player.t(), MafiaEngine.Player.t()) ::
{:ok, t}
| {:accused, String.t(), t}
| {:error, :cannot_accuse_while_dead | :cannot_accuse_dead_players}
def accuse(_accusations, %Player{alive: false}, _accused) do
{:error, :cannot_accuse_while_dead}
end
def accuse(_accusations, _accuser, %Player{alive: false}) do
{:error, :cannot_accuse_dead_players}
end
def accuse(%Accusations{} = accusations, accuser, accused) do
accusations
|> Map.update!(:ballots, &Map.put(&1, accuser.name, accused.name))
|> check_accusations(accused.name)
end
@spec check_accusations(t, String.t()) :: {:ok, t} | {:accused, String.t(), t}
defp check_accusations(accusations, accused) do
if enough?(accusations, accused) do
{:accused, accused, accusations}
else
{:ok, accusations}
end
end
@spec enough?(t, String.t()) :: boolean
defp enough?(%{ballots: ballots, required: required}, accused) do
ballots
|> Map.values()
|> Enum.filter(&(&1 == accused))
|> Enum.count()
|> (&(&1 >= required)).()
end
end
|
lib/mafia_engine/accusations.ex
| 0.717903
| 0.499084
|
accusations.ex
|
starcoder
|
defmodule PQueue2 do
@moduledoc """
Priority queue that wraps [pqueue2](https://hex.pm/packages/pqueue).
iex> {value, _} = PQueue2.new
iex> |> PQueue2.put(:a, 2)
iex> |> PQueue2.put(:b, 1)
iex> |> PQueue2.put(:c, 1)
iex> |> PQueue2.pop
iex> value
:b
PQueue2 implements Collectable & Enumerable.
iex> [{:a, 2}, {:b, 1}, {:c, 1}, {:d, 2}] |> Enum.into(PQueue2.new) |> Enum.to_list
[:b, :c, :a, :d]
"""
# @type t :: %__MODULE__{pq: :pqueue2.pqueue2}
@type pqueue2 ::
:empty
| {integer, pqueue2, pqueue2, :element, term}
| {integer, pqueue2, pqueue2, :queue, :queue.queue()}
@type t :: %__MODULE__{pq: pqueue2}
@default_priority 0
@default_value nil
defstruct pq: :empty
@doc """
Create a new priority queue.
"""
@spec new :: t
def new, do: %__MODULE__{pq: :pqueue2.new()}
@doc """
"""
@spec empty?(t) :: boolean
def empty?(queue)
def empty?(%{pq: pq}), do: :pqueue2.is_empty(pq)
@doc """
"""
@spec count(t) :: non_neg_integer
def count(queue)
def count(%{pq: pq}), do: :pqueue2.len(pq)
@doc """
Put the value.
"""
@spec put(t, term, non_neg_integer) :: t
def put(queue, value, priority \\ @default_priority),
do: update_in(queue.pq, &:pqueue2.in(value, priority, &1))
@doc """
Pop the max value.
iex> PQueue2.new |> PQueue2.put(:a, 2) |> PQueue2.put(:b, 1) |> PQueue2.pop
{:b, %PQueue2{pq: {2, :empty, :empty, :element, :a}}}
iex> PQueue2.pop PQueue2.new
{nil, %PQueue2{pq: :empty}}
iex> PQueue2.pop PQueue2.new, :empty
{:empty, %PQueue2{pq: :empty}}
"""
@spec pop(t, term) :: {term, t}
def pop(queue, default \\ @default_value) do
case :pqueue2.out(queue.pq) do
{:empty, _pq} -> {default, queue}
{{:value, value}, pq} -> {value, put_in(queue.pq, pq)}
end
end
@doc """
Pop the max value & priority.
iex> PQueue2.new |> PQueue2.put(:a, 2) |> PQueue2.put(:b, 1) |> PQueue2.pop_with_priority
{{:b, 1}, %PQueue2{pq: {2, :empty, :empty, :element, :a}}}
"""
@spec pop_with_priority(t, term) :: {{term, non_neg_integer} | term, t}
def pop_with_priority(queue, default \\ @default_value) do
case :pqueue2.pout(queue.pq) do
{:empty, _pq} ->
{default, queue}
{{:value, value, priority}, pq} ->
{{value, priority}, put_in(queue.pq, pq)}
end
end
@doc """
Pop the first value at the priority.
iex> PQueue2.new |> PQueue2.put(:a, 2) |> PQueue2.put(:b, 1) |> PQueue2.pop_at(2)
{:a, %PQueue2{pq: {1, :empty, :empty, :element, :b}}}
iex> PQueue2.new |> PQueue2.put(:a, 2) |> PQueue2.put(:b, 1) |> PQueue2.pop_at(3)
{nil, %PQueue2{pq: {1, :empty, {2, :empty, :empty, :element, :a}, :element, :b}}}
"""
@spec pop_at(t, non_neg_integer, term) :: {term, t}
def pop_at(queue, priority, default \\ @default_value) do
case :pqueue2.out(priority, queue.pq) do
{:empty, _pq} -> {default, queue}
{{:value, value}, pq} -> {value, put_in(queue.pq, pq)}
end
end
end
|
lib/pqueue2.ex
| 0.793346
| 0.518607
|
pqueue2.ex
|
starcoder
|
defmodule Iteraptor.Utils do
@moduledoc "Helper functions to update nested terms"
defmodule Unsupported do
@moduledoc """
An exception to be thrown from banged methods of `Iteraptor`.
Sooner or later we’ll support everything, that’s why meanwhile
we raise `Unsupported` if something goes wrong.
"""
defexception [:term, :function, :message]
def exception(term: term, function: function) do
message = "Unsupported term #{inspect(term)} in call to #{function}."
%Iteraptor.Utils.Unsupported{term: term, function: function, message: message}
end
end
@doc """
Determines the type of the given term.
## Examples:
iex> Iteraptor.Utils.type(%{foo: :bar})
{Map, %{foo: :bar}, %{}}
iex> Iteraptor.Utils.type([foo: :bar])
{Keyword, [foo: :bar], []}
iex> Iteraptor.Utils.type([{:foo, :bar}])
{Keyword, [{:foo, :bar}], []}
iex> Iteraptor.Utils.type(~w|foo bar|a)
{List, [:foo, :bar], []}
iex> Iteraptor.Utils.type(42)
:error
"""
@spec type(%{} | keyword() | list() | any()) :: {atom(), any(), any()} | :error
def type(input) do
case {input, Enumerable.impl_for(input), Iteraptable.impl_for(input)} do
{%MapSet{}, _, _} ->
{MapSet, input, MapSet.new()}
{%Iteraptor.Array{}, _, _} ->
{Iteraptor.Array, input, Iteraptor.Array.new()}
{_, Enumerable.List, _} ->
{if(Keyword.keyword?(input), do: Keyword, else: List), input, []}
{_, Enumerable.Map, _} ->
{Map, input, %{}}
{_, _, i} when not is_nil(i) ->
{i.type(input), i.to_enumerable(input), i.to_collectable(input)}
{_, _, _} ->
if is_map(input),
do: {input.__struct__, Map.from_struct(input), %{}},
else: :error
end
end
@doc """
Digs the leaf value in the nested keyword / map.
## Examples:
iex> Iteraptor.Utils.dig(%{k1: %{k2: %{k3: :value}}})
{:ok, {[:k1, :k2, :k3], :value}}
iex> Iteraptor.Utils.dig([k1: [k2: [k3: :value]]])
{:ok, {[:k1, :k2, :k3], :value}}
iex> Iteraptor.Utils.dig([k1: :value, k2: :value])
{:error, [k1: :value, k2: :value]}
iex> Iteraptor.Utils.dig([k1: %{k2: [k3: :value]}])
{:ok, {[:k1, :k2, :k3], :value}}
"""
@spec dig(%{} | keyword(), keyword()) :: {:ok, {list(), any()}} | {:error, any()}
def dig(input, acc \\ [])
def dig(_, {:error, _} = error), do: error
def dig(input, acc) when is_map(input) do
case Map.keys(input) do
[k] -> dig(input[k], [k | acc])
_ -> {:error, input}
end
end
def dig([{k, v}], acc), do: dig(v, [k | acc])
def dig(input, _) when is_list(input), do: {:error, input}
def dig(input, acc), do: {:ok, {:lists.reverse(acc), input}}
@spec dig!(%{} | keyword(), keyword()) :: {list(), any()} | no_return()
def dig!(input, acc \\ []) do
case dig(input, acc) do
{:ok, result} -> result
{:error, term} -> raise Unsupported, term: term, function: "Iteraptor.Utils.dig/2"
end
end
@delimiter Application.get_env(:iteraptor, :delimiter, ".")
@doc false
@spec delimiter(list()) :: binary()
def delimiter(opts) when is_list(opts), do: opts[:delimiter] || @delimiter
@doc false
@spec smart_convert(any()) :: integer() | binary() | atom()
def smart_convert(value) do
case value |> to_string() |> Integer.parse() do
{value, ""} -> value
_ -> String.to_existing_atom(value)
end
end
@doc """
Splits the string by delimiter, possibly converting the keys to symbols.
## Examples:
iex> Iteraptor.Utils.split("a.b.c.d", transform: :none)
["a", "b", "c", "d"]
iex> Iteraptor.Utils.split("a_b_c_d", delimiter: "_")
["a", "b", "c", "d"]
iex> Iteraptor.Utils.split("a.b.c.d", transform: :unsafe)
[:a, :b, :c, :d]
iex> Iteraptor.Utils.split("a.b.c.d", transform: :safe)
[:a, :b, :c, :d]
"""
@spec split(input :: binary(), opts :: keyword()) :: [binary() | atom()]
def split(input, opts \\ []) when is_binary(input) do
result = String.split(input, delimiter(opts))
case opts[:transform] do
:safe -> Enum.map(result, &String.to_existing_atom/1)
:unsafe -> Enum.map(result, &String.to_atom/1)
_ -> result
end
end
@doc """
Joins the array of keys into the string using delimiter.
## Examples:
iex> Iteraptor.Utils.join(~w|a b c d|)
"a.b.c.d"
iex> Iteraptor.Utils.join(~w|a b c d|, delimiter: "_")
"a_b_c_d"
"""
@spec join(Enum.t(), keyword()) :: binary()
def join(input, opts \\ []) when is_list(input),
do: Enum.join(input, delimiter(opts))
@into Application.get_env(:iteraptor, :into, %{})
@doc """
Safe put the value deeply into the term nesting structure. Creates
all the intermediate keys if needed.
## Examples:
iex> Iteraptor.Utils.deep_put_in(%{}, {~w|a b c|a, 42})
%{a: %{b: %{c: 42}}}
iex> Iteraptor.Utils.deep_put_in(%{a: %{b: %{c: 42}}}, {~w|a b d|a, :foo})
%{a: %{b: %{c: 42, d: :foo}}}
iex> Iteraptor.Utils.deep_put_in(%{a: %{b: [c: 42]}}, {~w|a b d|a, :foo})
%{a: %{b: [c: 42, d: :foo]}}
iex> Iteraptor.Utils.deep_put_in(%{a: %{b: [42]}}, {~w|a b|a, :foo})
%{a: %{b: [42, :foo]}}
iex> Iteraptor.Utils.deep_put_in(%{a: [:foo, %{b: 42}]}, {~w|a b|a, :foo})
%{a: [:foo, %{b: 42}, {:b, :foo}]}
"""
@spec deep_put_in(%{} | keyword(), {list(), any()}, keyword()) :: %{} | keyword()
def deep_put_in(target, key_value, opts \\ [])
def deep_put_in(target, {[key], value}, _opts) do
put_in(target, [key], value)
end
def deep_put_in(target, {key, value}, opts) when is_list(key) do
into = opts[:into] || @into
[tail | head] = :lists.reverse(key)
head = :lists.reverse(head)
{_, target} =
Enum.reduce(head, {[], target}, fn k, {keys, acc} ->
keys = keys ++ [k]
{_, value} = get_and_update_in(acc, keys, &{&1, if(is_nil(&1), do: into, else: &1)})
{keys, value}
end)
case get_in(target, key) do
nil ->
{_, result} =
get_and_update_in(target, head, fn
nil -> {nil, Enum.into([{tail, value}], into)}
curr when is_map(curr) -> {curr, Map.put(curr, tail, value)}
curr when is_list(curr) -> {curr, curr ++ [{tail, value}]}
curr -> {curr, [curr, {tail, value}]}
end)
result
curr when is_list(curr) ->
put_in(target, key, curr ++ [value])
curr when is_map(curr) ->
put_in(target, key, Map.to_list(curr) ++ [value])
curr ->
put_in(target, key, [curr, value])
end
end
@doc """
Checks if the map/keyword looks like a normal list.
## Examples:
iex> Iteraptor.Utils.quacks_as_list(%{"0" => :foo, 1 => :bar})
true
iex> Iteraptor.Utils.quacks_as_list([{:"1", :bar}, {:"0", :foo}])
true
iex> Iteraptor.Utils.quacks_as_list(%{foo: :bar})
false
iex> Iteraptor.Utils.quacks_as_list(%{"5" => :foo, "1" => :bar})
false
iex> Iteraptor.Utils.quacks_as_list(42)
false
"""
@spec quacks_as_list(%{} | keyword() | any()) :: true | false
def quacks_as_list(input) when is_list(input) or is_map(input) do
input
|> Enum.map(fn
{k, _} when is_atom(k) or is_binary(k) or is_number(k) ->
case k |> to_string() |> Integer.parse() do
{value, ""} -> value
_ -> nil
end
_ ->
nil
end)
|> Enum.sort() == 0..(Enum.count(input) - 1) |> Enum.to_list()
end
def quacks_as_list(_), do: false
@doc """
Gently tries to create a linked list out of input, returns input if it
cannot be safely converted to the list.
## Examples:
iex> Iteraptor.Utils.try_to_list(%{"0" => :foo, 1 => :bar})
[:foo, :bar]
iex> Iteraptor.Utils.try_to_list([{:"1", :bar}, {:"0", :foo}])
[:foo, :bar]
iex> Iteraptor.Utils.try_to_list(%{foo: :bar})
%{foo: :bar}
iex> Iteraptor.Utils.try_to_list(%{"5" => :foo, "1" => :bar})
%{"5" => :foo, "1" => :bar}
"""
@spec try_to_list(any()) :: list() | any()
def try_to_list(input) do
if quacks_as_list(input) do
input
|> Enum.sort(fn {k1, _}, {k2, _} ->
String.to_integer(to_string(k1)) < String.to_integer(to_string(k2))
end)
|> Enum.map(fn {_, v} -> v end)
else
input
end
end
@doc """
Squeezes the nested structure merging same keys.
## Examples:
#iex> Iteraptor.Utils.squeeze([foo: [bar: 42], foo: [baz: 3.14]])
#[foo: [bar: 42, baz: 3.14]]
iex> Iteraptor.Utils.squeeze([foo: %{bar: 42}, foo: %{baz: 3.14}])
[foo: %{bar: 42, baz: 3.14}]
iex> Iteraptor.Utils.squeeze([foo: %{bar: 42}, foo: :baz])
[foo: [%{bar: 42}, :baz]]
iex> Iteraptor.Utils.squeeze([a: [b: [c: 42]], a: [b: [d: 3.14]]])
[a: [b: [c: 42, d: 3.14]]]
iex> Iteraptor.Utils.squeeze([a: [b: [c: 42]], a: [b: %{d: 3.14}]])
[a: [b: [c: 42, d: 3.14]]]
iex> Iteraptor.Utils.squeeze([a: [b: [c: :foo]], a: [b: [c: 3.14]]])
[a: [b: [c: [:foo, 3.14]]]]
iex> Iteraptor.Utils.squeeze([a: [b: [:foo, :bar]], a: [b: [c: 3.14]]])
[a: [b: [:foo, :bar, {:c, 3.14}]]]
iex> Iteraptor.Utils.squeeze([a: [:foo, :bar], a: [b: [c: 3.14]]])
[a: [:foo, :bar, {:b, [c: 3.14]}]]
"""
@spec squeeze(%{} | keyword() | list() | Access.t(), keyword()) :: %{} | keyword() | list()
# credo:disable-for-lines:59
def squeeze(input, opts \\ [])
def squeeze(input, opts) when is_map(input) or is_list(input) do
{type, input, into} = type(input)
{result, _} =
Enum.reduce(input, {into, 0}, fn
{k, v}, {acc, orphans} ->
{_, neu} =
case type do
MapSet ->
{nil, MapSet.put(acc, {k, v})}
Iteraptor.Array ->
{nil, Iteraptor.Array.append(acc, {k, v})}
List ->
{nil, [{k, v} | acc]}
_ ->
get_and_update_in(acc, [k], fn
nil ->
{nil, v}
map when is_map(map) ->
case v do
%{} -> {map, Map.merge(map, v)}
_ -> {map, [map, v]}
end
list when is_list(list) ->
case v do
[] -> {list, list}
[_ | _] -> {list, list ++ v}
%{} -> {list, list ++ Map.to_list(v)}
_ -> {list, list ++ [v]}
end
other ->
{other, [other, v]}
end)
end
{neu, orphans}
v, {acc, orphans} ->
case type do
Keyword -> {[v | acc], orphans}
List -> {[v | acc], orphans}
Map -> {Map.put(acc, orphans, v), orphans + 1}
end
end)
result =
result
|> Enum.into(into, fn
{k, v} when is_list(v) -> {k, v |> squeeze(opts) |> :lists.reverse()}
{k, v} -> {k, squeeze(v, opts)}
v -> v
end)
|> try_to_list()
if opts[:structs] == :keep && is_map(result) and type != Map,
do: struct(type, result),
else: result
end
def squeeze(input, _opts), do: input
@doc false
def struct_checker(env, _bytecode), do: env.module.__struct__
end
|
lib/iteraptor/utils.ex
| 0.901047
| 0.517388
|
utils.ex
|
starcoder
|
defrecord File.Stat, Record.extract(:file_info, from_lib: "kernel/include/file.hrl") do
@moduledoc """
A record responsible to hold file information. Its fields are:
* `size` - Size of file in bytes.
* `type` - `:device`, `:directory`, `:regular`, `:other`. The type of the file.
* `access` - `:read`, `:write`, `:read_write`, `:none`. The current system access to
the file.
* `atime` - The last time the file was read.
* `mtime` - The last time the file was written.
* `ctime` - The interpretation of this time field depends on the operating
system. On Unix, it is the last time the file or the inode was
changed. In Windows, it is the create time.
* `mode` - The file permissions.
* `links` - The number of links to this file. This is always 1 for file
systems which have no concept of links.
* `major_device` - Identifies the file system where the file is located.
In windows, the number indicates a drive as follows:
0 means A:, 1 means B:, and so on.
* `minor_device` - Only valid for character devices on Unix. In all other
cases, this field is zero.
* `inode` - Gives the inode number. On non-Unix file systems, this field
will be zero.
* `uid` - Indicates the owner of the file.
* `gid` - Gives the group that the owner of the file belongs to. Will be
zero for non-Unix file systems.
The time type returned in `atime`, `mtime`, and `ctime` is dependent on the
time type set in options. `{:time, type}` where type can be `:local`,
`:universal`, or `:posix`. Default is `:local`.
"""
end
defexception File.Error, [reason: nil, action: "", path: nil] do
def message(exception) do
formatted = list_to_binary(:file.format_error(reason exception))
"could not #{action exception} #{path exception}: #{formatted}"
end
end
defexception File.CopyError, [reason: nil, action: "", source: nil, destination: nil] do
def message(exception) do
formatted = list_to_binary(:file.format_error(reason exception))
"could not #{action exception} from #{source exception} to #{destination exception}: #{formatted}"
end
end
defexception File.IteratorError, reason: nil do
def message(exception) do
formatted = list_to_binary(:file.format_error(reason exception))
"error during file iteration: #{formatted}"
end
end
defmodule File do
@moduledoc """
This module contains function to manipulate files.
Many of the functions that interact with the filesystem
have their naming based on its UNIX variants. For
example, deleting a file is done with `File.rm`.
Getting its stats with `File.stat`.
In order to write and read files, one must use the
functions in the IO module. By default, a file is
opened in binary mode which requires the functions
`IO.binread`, `IO.binwrite` and `IO.binreadline` to
interact with the file. A developer may pass `:utf8`
as an option when opening the file and then all other
functions from IO are available, since they work directly
with Unicode data.
Most of the functions in this module return `:ok`
or `{ :ok, result }` in case of success, `{ :error, reason }`
otherwise. Those function are also followed by
a variant that ends with `!` which returns the
result (without the `{ :ok, result }` tuple) in
case of success or raises an exception in case it
fails. For example:
File.read("hello.txt")
#=> { :ok, "World" }
File.read("invalid.txt")
#=> { :error, :enoent }
File.read!("hello.txt")
#=> "World"
File.read!("invalid.txt")
#=> raises File.Error
In general, a developer should use the former in case
he wants to react in the fie does not exist. The latter
should be used when the developer expects his software
to fail in case the file cannot be read (i.e. it is
literally an exception).
Finally, the functions in this module accept either
a char lists or a binary. When manipulating paths, a char
list is returned if one is given as argument. However,
when reading files, binaries are always returned.
"""
alias :file, as: F
alias :filename, as: FN
alias :filelib, as: FL
@doc """
Returns true if the path is a regular file.
## Examples
File.regular? __FILE__ #=> true
"""
def regular?(path) do
FL.is_regular(path)
end
@doc """
Returns true if the path is a directory.
"""
def dir?(path) do
FL.is_dir(path)
end
@doc """
Returns true if the given argument exists.
It can be regular file, directory, socket,
symbolic link, named pipe or device file.
## Examples
File.exists?("test/")
#=> true
File.exists?("missing.txt")
#=> false
File.exists?("/dev/null")
#=> true
"""
def exists?(path) do
match?({ :ok, _ }, F.read_file_info(path))
end
@doc """
Tries to create the directory `path`. Missing parent directories are not created.
Returns `:ok` if successful, or `{:error, reason}` if an error occurs.
Typical error reasons are:
* :eacces - Missing search or write permissions for the parent directories of `path`.
* :eexist - There is already a file or directory named `path`.
* :enoent - A component of `path` does not exist.
* :enospc - There is a no space left on the device.
* :enotdir - A component of `path` is not a directory
On some platforms, `:enoent` is returned instead.
"""
def mkdir(path) do
F.make_dir(path)
end
@doc """
Same as `mkdir`, but raises an exception in case of failure. Otherwise `:ok`.
"""
def mkdir!(path) do
case mkdir(path) do
:ok -> :ok
{ :error, reason } ->
raise File.Error, reason: reason, action: "make directory", path: :unicode.characters_to_binary(path)
end
end
@doc """
Tries to create the directory `path`. Missing parent directories are created.
Returns `:ok` if successful, or `{:error, reason}` if an error occurs.
Typical error reasons are:
* :eacces - Missing search or write permissions for the parent directories of `path`.
* :enospc - There is a no space left on the device.
* :enotdir - A component of `path` is not a directory.
"""
def mkdir_p(path) do
FL.ensure_dir(FN.join(path, "."))
end
@doc """
Same as `mkdir_p`, but raises an exception in case of failure. Otherwise `:ok`.
"""
def mkdir_p!(path) do
case mkdir_p(path) do
:ok -> :ok
{ :error, reason } ->
raise File.Error, reason: reason, action: "make directory (with -p)", path: :unicode.characters_to_binary(path)
end
end
@doc """
Returns `{:ok, binary}`, where `binary` is a binary data object that contains the contents
of `path`, or `{:error, reason}` if an error occurs.
Typical error reasons:
* :enoent - The file does not exist.
* :eacces - Missing permission for reading the file,
or for searching one of the parent directories.
* :eisdir - The named file is a directory.
* :enotdir - A component of the file name is not a directory.
On some platforms, `:enoent` is returned instead.
* :enomem - There is not enough memory for the contents of the file.
You can use `:file.format_error(reason)` to get a descriptive string of the error.
"""
def read(path) do
F.read_file(path)
end
@doc """
Returns binary with the contents of the given filename or raises
File.Error if an error occurs.
"""
def read!(path) do
case read(path) do
{ :ok, binary } ->
binary
{ :error, reason } ->
raise File.Error, reason: reason, action: "read file", path: :unicode.characters_to_binary(path)
end
end
@doc """
Returns information about the `path`. If it exists, it
returns a `{ :ok, info }` tuple, where info is as a
`File.Info` record. Retuns `{ :error, reason }` with
the same reasons as `File.read` if a failure occurs.
## Options
The accepted options are:
* `:time` if the time should be local, universal or posix.
Default is local.
"""
def stat(path, opts // []) do
case F.read_file_info(path, opts) do
{:ok, fileinfo} ->
{:ok, File.Stat.new fileinfo}
error ->
error
end
end
@doc """
Same as `stat` but returns the `File.Stat` directly and
throws `File.Error` if an error is returned.
"""
def stat!(path, opts // []) do
case stat(path, opts) do
{:ok, info} -> info
{:error, reason} ->
raise File.Error, reason: reason, action: "read file stats", path: :unicode.characters_to_binary(path)
end
end
@doc """
Writes the given `File.Stat` back to the filesystem at the given
path. Returns `:ok` or `{ :error, reason }`.
"""
def write_stat(path, File.Stat[] = stat, opts // []) do
F.write_file_info(path, set_elem(stat, 0, :file_info), opts)
end
@doc """
Same as `write_stat/3` but raises an exception if it fails.
Returns `:ok` otherwise.
"""
def write_stat!(path, File.Stat[] = stat, opts // []) do
case write_stat(path, stat, opts) do
:ok -> :ok
{ :error, reason } ->
raise File.Error, reason: reason, action: "write file stats", path: :unicode.characters_to_binary(path)
end
end
@doc """
Updates modification time (mtime) and access time (atime) of
the given file. File is created if it doesn’t exist.
"""
def touch(path, time // :calendar.local_time) do
case F.change_time(path, time) do
{ :error, :enoent } -> write(path, "")
other -> other
end
end
@doc """
Same as `touch/1` but raises an exception if it fails.
Returns `:ok` otherwise.
"""
def touch!(path, time // :calendar.local_time) do
case touch(path, time) do
:ok -> :ok
{ :error, reason } ->
raise File.Error, reason: reason, action: "touch", path: :unicode.characters_to_binary(path)
end
end
@doc """
Copies the contents of `source` to `destination`. Both
parameters can be a filename or an io device opened with `File.open`.
`bytes_count` specifies the number of bytes to count, the default
being `:infinity`.
If file `destination` already exists, it is overriden
by the contents in `source`.
Returns `{ :ok, bytes_copied }` if successful,
`{ :error, reason }` otherwise.
Typical error reasons are the same as in `open/2`,
`read/1` and `write/2`.
"""
def copy(source, destination, bytes_count // :infinity) do
F.copy(source, destination, bytes_count)
end
@doc """
The same as `copy/3` but raises an File.CopyError if it fails.
Returns the `bytes_copied` otherwise.
"""
def copy!(source, destination, bytes_count // :infinity) do
case copy(source, destination, bytes_count) do
{ :ok, bytes_count } -> bytes_count
{ :error, reason } ->
raise File.CopyError, reason: reason, action: "copy",
source: :unicode.characters_to_binary(source), destination: :unicode.characters_to_binary(destination)
end
end
@doc """
Copies the contents in `source` to `destination`.
Similar to the command `cp -r` in Unix systems,
this function behaves differently depending
if `source` and `destination` are a file or a directory.
If both are files, it simply copies `source` to
`destination`. However, if `destination` is a directory,
it copies the contents of `source` to `destination/source`
recursively.
If a file already exists in the destination,
it invokes a callback which should return
true if the existing file should be overriden,
false otherwise. It defaults to return true.
It returns `:ok` in case of success, returns
`{ :error, reason }` otherwise.
"""
def cp(source, destination, callback // fn(_, _) -> true end) do
if dir?(source) do
{ :error, :eisdir }
else
output =
if dir?(destination) do
mkdir(destination)
FN.join(destination, FN.basename(source))
else
destination
end
case do_cp_file(source, output, callback, []) do
{ :error, _ } = error -> error
_ -> :ok
end
end
end
@doc """
The same as `cp/3`, but raises File.CopyError if it fails.
Returns the list of copied files otherwise.
"""
def cp!(source, destination, callback // fn(_, _) -> true end) do
case cp(source, destination, callback) do
:ok -> :ok
{ :error, reason } ->
raise File.CopyError, reason: reason, action: "copy recursively",
source: :unicode.characters_to_binary(source), destination: :unicode.characters_to_binary(destination)
end
end
@doc %B"""
Copies the contents in source to destination.
Similar to the command `cp -r` in Unix systems,
this function behaves differently depending
if `source` and `destination` are a file or a directory.
If both are files, it simply copies `source` to
`destination`. However, if `destination` is a directory,
it copies the contents of `source` to `destination/source`
recursively.
If a file already exists in the destination,
it invokes a callback which should return
true if the existing file should be overriden,
false otherwise. It defaults to return true.
If a directory already exists in the destination
where a file is meant to be (or otherwise), this
function will fail.
This function may fail while copying files,
in such cases, it will leave the destination
directory in a dirty state, where already
copied files won't be removed.
It returns `{ :ok, files_and_directories }` in case of
success with all files and directories copied in no
specific order, `{ :error, reason }` otherwise.
## Examples
# Copies "a.txt" to "tmp/a.txt"
File.cp_r "a.txt", "tmp"
# Copies all files in "samples" to "tmp/samples"
File.cp_r "samples", "tmp"
# Copies all files in "samples" to "tmp"
File.cp_r "samples/.", "tmp"
# Same as before, but asks the user how to proceed in case of conflicts
File.cp_r "samples/.", "tmp", fn(source, destination) ->
IO.gets("Overriding #{destination} by #{source}. Type y to confirm.") == "y"
end
"""
def cp_r(source, destination, callback // fn(_, _) -> true end) when is_function(callback) do
output =
if dir?(destination) || dir?(source) do
mkdir(destination)
FN.join(destination, FN.basename(source))
else
destination
end
case do_cp_r(source, output, callback, []) do
{ :error, _ } = error -> error
res -> { :ok, res }
end
end
@doc """
The same as `cp_r/3`, but raises File.CopyError if it fails.
Returns the list of copied files otherwise.
"""
def cp_r!(source, destination, callback // fn(_, _) -> true end) do
case cp_r(source, destination, callback) do
{ :ok, files } -> files
{ :error, reason } ->
raise File.CopyError, reason: reason, action: "copy recursively",
source: :unicode.characters_to_binary(source), destination: :unicode.characters_to_binary(destination)
end
end
# src may be a file or a directory, dest is definitely
# a directory. Returns nil unless an error is found.
defp do_cp_r(src, dest, callback, acc) when is_list(acc) do
case F.read_link(src) do
{ :ok, link } ->
do_cp_link(link, src, dest, callback, acc)
_ ->
case F.list_dir(src) do
{ :ok, files } ->
case mkdir(dest) do
success in [:ok, { :error, :eexist }] ->
Enum.reduce(files, [dest|acc], fn(x, acc) ->
do_cp_r(FN.join(src, x), FN.join(dest, x), callback, acc)
end)
reason -> reason
end
{ :error, :enotdir } ->
do_cp_file(src, dest, callback, acc)
reason -> reason
end
end
end
# If we reach this clause, there was an error while
# processing a file.
defp do_cp_r(_, _, _, acc) do
acc
end
defp copy_file_mode!(src, dest) do
src_stat = File.stat!(src)
dest_stat = File.stat!(dest)
File.write_stat!(dest, File.Stat.mode(File.Stat.mode(src_stat), dest_stat))
end
# Both src and dest are files.
defp do_cp_file(src, dest, callback, acc) do
case copy(src, { dest, [:exclusive] }) do
{ :ok, _ } ->
copy_file_mode!(src, dest)
[dest|acc]
{ :error, :eexist } ->
if callback.(src, dest) do
rm(dest)
case copy(src, dest) do
{ :ok, _ } ->
copy_file_mode!(src, dest)
[dest|acc]
reason -> reason
end
else
acc
end
reason -> reason
end
end
# Both src and dest are files.
defp do_cp_link(link, src, dest, callback, acc) do
case F.make_symlink(link, dest) do
:ok ->
[dest|acc]
{ :error, :eexist } ->
if callback.(src, dest) do
rm(dest)
case F.make_symlink(link, dest) do
:ok -> [dest|acc]
reason -> reason
end
else
acc
end
reason -> reason
end
end
@doc """
Writes `content` to the file `path`. The file is created if it
does not exist. If it exists, the previous contents are overwritten.
Returns `:ok` if successful, or `{:error, reason}` if an error occurs.
Typical error reasons are:
* :enoent - A component of the file name does not exist.
* :enotdir - A component of the file name is not a directory.
On some platforms, enoent is returned instead.
* :enospc - There is a no space left on the device.
* :eacces - Missing permission for writing the file or searching one of the parent directories.
* :eisdir - The named file is a directory.
"""
def write(path, content, modes // []) do
F.write_file(path, content, modes)
end
@doc """
Same as `write/3` but raises an exception if it fails, returns `:ok` otherwise.
"""
def write!(path, content, modes // []) do
case F.write_file(path, content, modes) do
:ok -> :ok
{ :error, reason } ->
raise File.Error, reason: reason, action: "write to file", path: :unicode.characters_to_binary(path)
end
end
@doc """
Tries to delete the file `path`.
Returns `:ok` if successful, or `{:error, reason}` if an error occurs.
Typical error reasons are:
* :enoent - The file does not exist.
* :eacces - Missing permission for the file or one of its parents.
* :eperm - The file is a directory and user is not super-user.
* :enotdir - A component of the file name is not a directory.
On some platforms, enoent is returned instead.
* :einval - Filename had an improper type, such as tuple.
## Examples
File.rm('file.txt')
#=> :ok
File.rm('tmp_dir/')
#=> {:error, :eperm}
"""
def rm(path) do
F.delete(path)
end
@doc """
Same as `rm`, but raises an exception in case of failure. Otherwise `:ok`.
"""
def rm!(path) do
case rm(path) do
:ok -> :ok
{ :error, reason } ->
raise File.Error, reason: reason, action: "remove file", path: :unicode.characters_to_binary(path)
end
end
@doc """
Tries to delete the dir at `path`.
Returns `:ok` if successful, or `{:error, reason}` if an error occurs.
## Examples
File.rddir('tmp_dir')
#=> :ok
File.rmdir('file.txt')
#=> {:error, :enotdir}
"""
def rmdir(path) do
F.del_dir(path)
end
@doc """
Same as `rmdir/1`, but raises an exception in case of failure. Otherwise `:ok`.
"""
def rmdir!(path) do
case rmdir(path) do
:ok -> :ok
{ :error, reason } ->
raise File.Error, reason: reason, action: "remove directory", path: :unicode.characters_to_binary(path)
end
end
@doc """
Remove files and directories recursively at the given `path`.
Symlinks are not followed but simply removed, non existing
files are simply ignored (i.e. doesn't make this function fail).
Returns `{ :ok, files_and_directories }` with all files and
directories removed in no specific order, `{ :error, reason }`
otherwise.
## Examples
File.rm_rf "samples"
#=> { :ok, ["samples", "samples/1.txt"] }
File.rm_rf "unknown"
#=> { :ok, [] }
"""
def rm_rf(path) do
do_rm_rf(path, { :ok, [] })
end
defp do_rm_rf(path, { :ok, acc } = entry) do
case safe_list_dir(path) do
{ :ok, files } ->
res =
Enum.reduce files, entry, fn(file, tuple) ->
do_rm_rf(FN.join(path, file), tuple)
end
case res do
{ :ok, acc } ->
case rmdir(path) do
:ok -> { :ok, [path|acc] }
{ :error, :enoent } -> res
reason -> reason
end
reason -> reason
end
{ :error, :enotdir } ->
case rm(path) do
:ok -> { :ok, [path|acc] }
{ :error, :enoent } -> entry
reason -> reason
end
{ :error, :enoent } -> entry
reason -> reason
end
end
defp do_rm_rf(_, reason) do
reason
end
defp safe_list_dir(path) do
case F.read_link(path) do
{ :ok, _ } -> { :error, :enotdir }
_ -> F.list_dir(path)
end
end
@doc """
Same as `rm_rf/1` but raises `File.Error` in case of failures,
otherwise the list of files or directories removed.
"""
def rm_rf!(path) do
case rm_rf(path) do
{ :ok, files } -> files
{ :error, reason } ->
raise File.Error, reason: reason, action: "remove files and directories recursively from", path: :unicode.characters_to_binary(path)
end
end
@doc """
Opens the given `path` according to the given list of modes.
In order to write and read files, one must use the functions
in the IO module. By default, a file is opened in binary mode
which requires the functions `IO.binread`, `IO.binwrite` and
`IO.binreadline` to interact with the file. A developer may pass
`:utf8` as an option when opening the file and then all other
functions from IO are available, since they work directly with
Unicode data.
The allowed modes:
* `:read` - The file, which must exist, is opened for reading.
* `:write` - The file is opened for writing. It is created if it does not exist.
If the file exists, and if write is not combined with read, the file will be truncated.
* `:append` - The file will be opened for writing, and it will be created if it does not exist.
Every write operation to a file opened with append will take place at the end of the file.
* `:exclusive` - The file, when opened for writing, is created if it does not exist.
If the file exists, open will return { :error, :eexist }.
* `:charlist` - When this term is given, read operations on the file will return char lists rather than binaries;
* `:compressed` - Makes it possible to read or write gzip compressed files.
The compressed option must be combined with either read or write, but not both.
Note that the file size obtained with `stat/1` will most probably not
match the number of bytes that can be read from a compressed file.
* `:utf8` - This option denotes how data is actually stored in the disk file and
makes the file perform automatic translation of characters to and from utf-8.
If data is sent to a file in a format that cannot be converted to the utf-8
or if data is read by a function that returns data in a format that cannot cope
with the character range of the data, an error occurs and the file will be closed.
If a function is given to modes (instead of a list), it dispatches to `open/3`.
Check `http://www.erlang.org/doc/man/file.html#open-2` for more information about
other options as `read_ahead` and `delayed_write`.
This function returns:
* { :ok, io_device } - The file has been opened in the requested mode.
`io_device` is actually the pid of the process which handles the file.
This process is linked to the process which originally opened the file.
If any process to which the io_device is linked terminates, the file will
be closed and the process itself will be terminated. An io_device returned
from this call can be used as an argument to the `IO` module functions.
* { :error, reason } - The file could not be opened.
## Examples
{ :ok, file } = File.open("foo.tar.gz", [:read, :compressed])
IO.readline(file)
File.close(file)
"""
def open(path, modes // [])
def open(path, modes) when is_list(modes) do
F.open(path, open_defaults(modes, true))
end
def open(path, function) when is_function(function) do
open(path, [], function)
end
@doc """
Similar to `open/2` but expects a function as last argument.
The file is opened, given to the function as argument and
automatically closed after the function returns, regardless
if there was an error or not.
It returns `{ :ok, function_result }` in case of success,
`{ :error, reason }` otherwise.
Do not use this function with :delayed_write option
since automatically closing the file may fail
(as writes are delayed).
## Examples
File.open("file.txt", [:read, :write], fn(file) ->
IO.readline(file)
end)
"""
def open(path, modes, function) do
case open(path, modes) do
{ :ok, device } ->
try do
{ :ok, function.(device) }
after
:ok = close(device)
end
other -> other
end
end
@doc """
Same as `open/2` but raises an error if file could not be opened.
Returns the `io_device` otherwise.
"""
def open!(path, modes // []) do
case open(path, modes) do
{ :ok, device } -> device
{ :error, reason } ->
raise File.Error, reason: reason, action: "open", path: :unicode.characters_to_binary(path)
end
end
@doc """
Same as `open/3` but raises an error if file could not be opened.
Returns the function result otherwise.
"""
def open!(path, modes, function) do
case open(path, modes, function) do
{ :ok, device } -> device
{ :error, reason } ->
raise File.Error, reason: reason, action: "open", path: :unicode.characters_to_binary(path)
end
end
@doc """
Gets the current working directory. In rare circumstances, this function can
fail on Unix. It may happen if read permission does not exist for the parent
directories of the current directory. For this reason, returns `{ :ok, cwd }`
in case of success, `{ :error, reason }` otherwise.
"""
def cwd() do
case F.get_cwd do
{ :ok, cwd } -> { :ok, :unicode.characters_to_binary(cwd) }
{ :error, _ } = error -> error
end
end
@doc """
The same as `cwd/0`, but raises an exception if it fails.
"""
def cwd!() do
case F.get_cwd do
{ :ok, cwd } -> :unicode.characters_to_binary(cwd)
{ :error, reason } ->
raise File.Error, reason: reason, action: "get current working directory"
end
end
@doc """
Sets the current working directory. Returns `:ok` if successful,
`{ :error, reason }` otherwise.
"""
def cd(path) do
F.set_cwd(path)
end
@doc """
The same as `cd/0`, but raises an exception if it fails.
"""
def cd!(path) do
case F.set_cwd(path) do
:ok -> :ok
{ :error, reason } ->
raise File.Error, reason: reason, action: "set current working directory to", path: :unicode.characters_to_binary(path)
end
end
@doc """
Changes the current directory to the given `path`,
executes the given function and then revert back
to the previous path regardless if there is an exception.
Raises an error if retrieving or changing the current
directory fails.
"""
def cd!(path, function) do
old = cwd!
cd!(path)
try do
function.()
after
cd!(old)
end
end
@doc """
Returns list of files in the given directory.
It returns `{ :ok, [files] }` in case of success,
`{ :error, reason }` otherwise.
"""
def ls(path // ".") do
case F.list_dir(path) do
{ :ok, file_list } -> { :ok, Enum.map file_list, :unicode.characters_to_binary(&1) }
{ :error, _ } = error -> error
end
end
@doc """
The same as `ls/1` but raises `File.Error`
in case of an error.
"""
def ls!(dir // ".") do
case ls(dir) do
{ :ok, value } -> value
{ :error, reason } ->
raise File.Error, reason: reason, action: "list directory", path: :unicode.characters_to_binary(dir)
end
end
@doc """
Closes the file referenced by `io_device`. It mostly returns `:ok`, except
for some severe errors such as out of memory.
Note that if the option `:delayed_write` was used when opening the file,
`close/1` might return an old write error and not even try to close the file.
See `open/2`.
"""
def close(io_device) do
F.close(io_device)
end
@doc """
Converts the file device into an iterator that can be
passed into `Enum`. The device is iterated line
by line, at the end of iteration the file is closed.
This reads the file as utf-8. CHeck out `File.biniterator`
to handle the file as a raw binary.
## Examples
An example that lazily iterates a file replacing all double
quotes per single quotes and write each line to a target file
is shown below:
{ :ok, device } = File.open("README.md")
source = File.iterator(device)
File.open "NEWREADME.md", [:write], fn(target) ->
Enum.each source, fn(line) ->
IO.write target, Regex.replace(%r/"/, line, "'")
end
end
"""
def iterator(device) do
fn(fun, acc) ->
do_iterator(device, fun, acc)
end
end
@doc """
Opens the given `file` with the given `mode` and
returns its iterator. The returned iterator will
fail for the same reasons as `File.open!`. Note
that the file is opened when the iteration begins.
"""
def iterator!(file, mode // []) do
fn(fun, acc) ->
device = open!(file, mode)
try do
do_iterator(device, fun, acc)
after
F.close(device)
end
end
end
@doc """
Converts the file device into an iterator that can
be passed into `Enum` to iterate line by line as a
binary. Check `iterator/1` for more information.
"""
def biniterator(device) do
fn(fun, acc) ->
do_biniterator(device, fun, acc)
end
end
@doc """
Opens the given `file` with the given `mode` and
returns its biniterator. The returned iterator will
fail for the same reasons as `File.open!`. Note
that the file is opened when the iteration begins.
"""
def biniterator!(file, mode // []) do
fn(fun, acc) ->
device = open!(file, mode)
try do
do_biniterator(device, fun, acc)
after
F.close(device)
end
end
end
## Helpers
defp open_defaults([:charlist|t], _add_binary) do
open_defaults(t, false)
end
defp open_defaults([:utf8|t], add_binary) do
open_defaults([{ :encoding, :utf8 }|t], add_binary)
end
defp open_defaults([h|t], add_binary) do
[h|open_defaults(t, add_binary)]
end
defp open_defaults([], true), do: [:binary]
defp open_defaults([], false), do: []
defp do_iterator(device, acc, fun) do
case :io.get_line(device, '') do
:eof ->
acc
{ :error, reason } ->
raise File.IteratorError, reason: reason
data ->
do_iterator(device, fun.(data, acc), fun)
end
end
defp do_biniterator(device, acc, fun) do
case F.read_line(device) do
:eof ->
acc
{ :error, reason } ->
raise File.IteratorError, reason: reason
{ :ok, data } ->
do_iterator(device, fun.(data, acc), fun)
end
end
end
|
lib/elixir/lib/file.ex
| 0.786336
| 0.5816
|
file.ex
|
starcoder
|
defmodule Shmex.Native do
@moduledoc """
This module provides natively implemented functions allowing low-level
operations on Posix shared memory. Use with caution!
"""
use Bundlex.Loader, nif: :shmex
@doc """
Creates shared memory segment and adds a guard for it.
The guard associated with this memory segment is placed in returned
`Shmex` struct. When the guard resource is deallocated by BEAM,
the shared memory is unlinked and will disappear from the system when last process
using it unmaps it
"""
@spec allocate(Shmex.t()) ::
{:ok, Shmex.t()} | {:error, {:file.posix(), :ftruncate}}
defnif allocate(shm)
@doc """
Creates guard for existing shared memory.
This function should be only used when `Shmex` struct was created by
some other NIF and even though the SHM exists, it's guard field is set to `nil`.
Trying to use it with SHM obtained via `allocate/1` will result in error.
See also docs for `allocate/1`
"""
@spec add_guard(Shmex.t()) :: {:ok, Shmex.t()} | {:error, :already_guarded}
defnif add_guard(shm)
@doc """
Sets the capacity of shared memory area and updates the Shmex struct accordingly.
"""
@spec set_capacity(Shmex.t(), capacity :: pos_integer()) ::
{:ok, Shmex.t()} | {:error, {:file.posix(), :shm_open | :ftruncate}}
defnif set_capacity(shm, capacity)
@doc """
Reads the contents of shared memory and returns it as a binary.
"""
@spec read(Shmex.t()) :: {:ok, binary} | {:error, {:file.posix(), :shm_open | :mmap}}
def read(%Shmex{size: size} = shm) do
read(shm, size)
end
@doc """
Reads `cnt` bytes from the shared memory and returns it as a binary.
`cnt` should not be greater than `shm.size`
"""
@spec read(Shmex.t(), read_size :: non_neg_integer()) ::
{:ok, binary()} | {:error, :invalid_read_size | {:file.posix(), :shm_open | :mmap}}
defnif read(shm, read_size)
@doc """
Writes the binary into the shared memory.
Overwrites the existing content. Increases the capacity of shared memory
to fit the data.
"""
@spec write(Shmex.t(), data :: binary()) ::
{:ok, Shmex.t()} | {:error, {:file.posix(), :shm_open | :mmap}}
defnif write(shm, data)
@doc """
Splits the contents of shared memory area into two by moving the data past
the specified position into a new shared memory.
`shm` has to be an existing shared memory (obtained via `allocate/1`).
It virtually trims the existing shared memory to `position` bytes
by setting `size` to `position` (The actual data is still present)
and the overlapping data is copied into the new shared memory area.
"""
@spec split_at(Shmex.t(), position :: non_neg_integer()) ::
{:ok, {Shmex.t(), Shmex.t()}}
| {:error, {:file.posix(), :shm_open | :mmap | :ftruncate}}
defnif split_at(shm, position)
@doc """
Concatenates two shared memory areas by appending the data from the second
at the end of the first one. Fails with `{:error, {:einval, :ftruncate}}` if
OS does not support changing shared memory capacity.
The first shared memory is a target that will contain data from both shared memory areas.
Its capacity will be set to the sum of sizes of both shared memory areas.
The second one, the source, will remain unmodified.
"""
@spec append(target :: Shmex.t(), source :: Shmex.t()) ::
{:ok, Shmex.t()} | {:error, {:file.posix(), :shm_open | :mmap | :ftruncate}}
defnif append(target, source)
@doc """
Ensures that shared memory is not garbage collected at the point of executing
this function.
Useful when passing shared memory to other OS process, to prevent it
from being garbage collected until received and mapped by that process.
"""
@spec ensure_not_gc(Shmex.t()) :: :ok
defnif ensure_not_gc(shm)
@doc """
Trims shared memory capacity to match its size.
"""
@spec trim(Shmex.t()) :: {:ok, Shmex.t()} | {:error, {:file.posix(), :shm_open | :ftruncate}}
def trim(%Shmex{size: size} = shm) do
shm |> set_capacity(size)
end
@doc """
Drops `bytes` bytes from the beggining of shared memory area and
trims it to match the new size.
"""
@spec trim(Shmex.t(), bytes :: non_neg_integer) ::
{:ok, Shmex.t()} | {:error, {:file.posix(), :shm_open | :mmap}}
def trim(shm, bytes) do
with {:ok, trimmed_front} <- trim_leading(shm, bytes),
{:ok, result} <- trim(trimmed_front) do
{:ok, result}
end
end
defnifp trim_leading(shm, offset)
end
|
lib/shmex_native.ex
| 0.84137
| 0.531392
|
shmex_native.ex
|
starcoder
|
defmodule FusionDsl.NativeImpl do
@moduledoc """
This module helps with building proxy Fusion modules to native erlang/elixir
modules.
A native package has `type: :native` in its opts.
Example:
```
config :fusion_dsl, packages: [{String, [type: :native]}, ...]
```
Refer to [packages](packages.html#existing-elixir-erlang-modules-as-packages)
docs for more info.
"""
@doc """
Creates proxy modules for native packages.
Returns a new list of all packages and manipulates the native
package list with new module names.
"""
@spec create_native_packages(List.t()) :: List.t()
def create_native_packages(packages) do
Enum.reduce(packages, [], fn package, acc ->
{module, opts} = package
case opts[:type] do
:native ->
pack_mod = String.to_atom("Elixir.FusionDsl.Dyn.#{module}")
create_module_not_exists(module, pack_mod, opts)
[{pack_mod, opts} | acc]
_ ->
[package | acc]
end
end)
end
# Creates module if module does not exist
defp create_module_not_exists(module, pack_mod, opts) do
if not function_exported?(pack_mod, :__info__, 1) do
create_fusion_module(module, pack_mod, opts)
end
end
defp create_fusion_module(module, pack_mod, opts) do
# Get functions names of module
module_functions =
case opts[:functions] do
nil ->
# All functions of a module
module
|> :erlang.apply(:__info__, [:functions])
|> Enum.reduce([], fn {name, _}, acc -> [name | acc] end)
|> Enum.uniq()
list when is_list(list) ->
# Specific user set functions
list
end
# Quote implementation of each function
impl_functions =
Enum.reduce(module_functions, [], fn fn_name, acc ->
data =
quote do
@doc "#{unquote(get_function_doc(module, fn_name))}"
def unquote(fn_name)({unquote(fn_name), _ctx, args}, env) do
{:ok, args, env} = prep_arg(env, args)
{:ok, :erlang.apply(unquote(module), unquote(fn_name), args), env}
end
end
[data | acc]
end)
# Quote implementation of module
impl_contents =
quote do
use FusionDsl.Impl
@impl true
def __list_fusion_functions__, do: unquote(module_functions)
unquote(impl_functions)
end
try do
Module.create(pack_mod, impl_contents, Macro.Env.location(__ENV__))
rescue
CompileError ->
# As get_packages in FusionDsl module will get called async,
# Sometimes this method would be called twice.
# To fix this problem we will ignore module exists exception
# and rely on the Tests below for the module.
:ok
end
# Test module and raise if its not returning function list as expected.
if :erlang.apply(pack_mod, :__list_fusion_functions__, []) !=
module_functions do
raise "Module #{pack_mod} is not returning __list_fusion_function__ as expected!"
end
end
# Returns documentation of native functions with argument lists
defp get_function_doc(module, function) do
docs = Code.get_docs(module, :docs)
doc =
Enum.find(docs, fn {{name, _}, _, kind, _, _} ->
name == function and kind == :def
end)
case doc do
{{^function, _}, _, _, args, doc} when is_binary(doc) ->
arg_docs =
Enum.reduce(args, "## Arguments\n", fn {name, _, _}, acc ->
acc <> "\n - #{name}"
end)
doc <> "\n" <> arg_docs
_ ->
"No native documentation available!"
end
end
end
|
lib/fusion_dsl/native_impl.ex
| 0.860779
| 0.681853
|
native_impl.ex
|
starcoder
|
defmodule Abbr.Mnesia.Sync do
@moduledoc """
Ensures cache stays in sync across cluster.
Flow:
- monitors the `:inconsistent_database` mnesia system event
- and merges local cache with (potentially) out of sync node
"""
alias Abbr.Cache
alias Abbr.Mnesia.Local
alias Abbr.Mnesia.Url
alias Memento.Schema
alias Memento.Table
alias Phoenix.PubSub
use GenServer
require Logger
@spec start_link([any()]) :: {:ok, pid()}
def start_link(opts) do
{:ok, pid} = GenServer.start_link(__MODULE__, :ok, [{:name, __MODULE__} | opts])
:ok = GenServer.cast(__MODULE__, :synchronize_on_startup)
{:ok, pid}
end
@impl GenServer
def init(:ok) do
Logger.metadata(node: Node.self())
:mnesia.subscribe(:system)
{:ok, nil}
end
@doc """
The node is either the 1st node in the cluster in which case:
- it needs to create the table
or, it needs to join the Mnesia cluster, which requires:
- registering the node via `:mnesia.change_config/2`, which effectively joins the Mnesia cluster
- waiting for table to become available
We're using Memento master branch due to usage of:
- `Memento.wait/1`
- `Memento.add_nodes/1`
When [PR 20](https://github.com/sheharyarn/memento/pull/20) is released, we can move to release version.
"""
@impl GenServer
def handle_cast(:synchronize_on_startup, state) do
if Enum.empty?(Node.list()) do
Schema.set_storage_type(Node.self(), :ram_copies)
Table.create(Url)
Memento.wait([Url])
PubSub.broadcast(Abbr.PubSub, Cache.events_topic(), {:cache_event, :synchronized})
else
Memento.add_nodes(Node.list())
Memento.wait([Url])
end
{:noreply, state}
end
@impl GenServer
def handle_cast({:merge, cached_data}, state) do
Memento.wait([Url])
:ok = Local.merge(cached_data)
PubSub.broadcast(Abbr.PubSub, Cache.events_topic(), {:cache_event, :synchronized})
{:noreply, state}
end
@doc """
Catches the `:inconsistent_database` Mnesia event.
It occurs every time a node joins the cluster, for which the schema is not in sync with this node.
It can even occur if there are no differences in the underlying data,
e.g. if during network split there were no new data added to respective tables.
Every time this event occurs, we need to reconcile the data.
It needs to be done manually, since Mnesia doesn't know which reconciliation method suits our data.
"""
@impl true
def handle_info({:mnesia_system_event, {:inconsistent_database, _, node}}, state) do
:global.trans({__MODULE__, self()}, fn -> join(node) end)
{:noreply, state}
end
@impl true
def handle_info(_, state) do
{:noreply, state}
end
defp join(node) do
:running_db_nodes
|> Memento.system()
|> Enum.member?(node)
|> case do
true ->
Logger.info("Already healed and joined #{node}")
:ok
false ->
Logger.warn("Detected netsplit on #{node}")
do_join(node)
end
end
defp do_join(node) do
:mnesia_controller.connect_nodes([node], fn merge_fun ->
case merge_fun.([Url]) do
{:merged, _, _} = result ->
:ok = GenServer.cast({__MODULE__, node}, {:merge, Local.export()})
result
other ->
other
end
end)
end
end
|
lib/abbr/mnesia/sync.ex
| 0.825941
| 0.450843
|
sync.ex
|
starcoder
|
defmodule Typesense.Documents do
@moduledoc """
The `Typesense.Documents` module is the service implementation for Typesense' `Documents` API Resource.
"""
@doc """
Index a document.
## Examples
```elixir
iex> document = %{
company_name: "<NAME>",
num_employees: 5215,
country: "USA"
}
iex> Typesense.Documents.create(collection, document)
{:ok, document}
```
"""
def create(collection, document) do
response = Typesense.post("/collections/#{collection}/documents", document)
case response do
{:ok, env} -> Typesense.Http.handle_response(env)
{:error, reason} -> {:error, reason}
end
end
@doc """
Retrieve a document.
## Examples
```elixir
iex> args = %{
limit_multi_searches
}
iex> Typesense.Documents.retrieve(collection, id)
{:ok, document}
```
"""
def retrieve(collection, id) do
response = Typesense.get("/collections/#{collection}/documents/#{id}")
case response do
{:ok, env} -> Typesense.Http.handle_response(env)
{:error, reason} -> {:error, reason}
end
end
@doc """
Search for documents.
## Examples
```elixir
iex> search_params = %{
q: "stark",
query_by: "company_name",
filter_by: "num_employees:>100",
sort_by: "num_employees:desc"
}
iex> Typesense.Documents.search(collection, search_params)
iex> {:ok, documents}
```
"""
def search(collection, search_params) do
query_string = Typesense.Utilities.to_query_string(search_params)
response = Typesense.get("/collections/#{collection}/documents/search", query: search_params)
case response do
{:ok, env} -> Typesense.Http.handle_response(env)
{:error, reason} -> {:error, reason}
end
end
@doc """
Update a document.
## Examples
```elixir
iex> Typesense.Documents.update(collection, id, document)
{:ok, document}
```
"""
def update(collection, id, document) do
response = Typesense.patch(
"/collections/#{collection}/documents/#{id}",
document,
headers: [{"content-type", "application/json"}]
)
case response do
{:ok, env} -> Typesense.Http.handle_response(env)
{:error, reason} -> {:error, reason}
end
end
@doc """
Delete a document.
## Options
* `:id` - The `id` of the document to be deleted
* `:query` - The `map` of params to filter the delete by
## Examples
```elixir
iex> Typesense.Documents.delete(collection, id \\\\ nil, query \\\\ %{})
{:ok, _document}
```
"""
def delete(collection, id) when ( is_integer(id) or is_binary(id) ) do
response = Typesense.delete("/collections/#{collection}/documents/#{id}")
case response do
{:ok, env} -> Typesense.Http.handle_response(env)
{:error, reason} -> {:error, reason}
end
end
def delete(collection, query) when is_map(query) do
response = Typesense.delete("/collections/#{collection}/documents", query: query)
case response do
{:ok, env} -> Typesense.Http.handle_response(env)
{:error, reason} -> {:error, reason}
end
end
@doc """
Export documents from a collection.
## Examples
```elixir
iex> Typesense.Documents.export(collection)
[%{}, ...]
"""
def export(collection) do
response = Typesense.get("/collections/#{collection}/documents/export")
case response do
{:ok, env} -> Typesense.Http.handle_response(env)
{:error, reason} -> {:error, reason}
end
end
@doc """
Import documents into a collection.
## Examples
```elixir
iex> documents = [{
id: "124",
company_name: "<NAME>",
num_employees: 5215,
country: "USA"
}]
iex> Typesense.Documents.import(collection, documents, :create)
{:ok, documents}
"""
def import(collection, documents, action \\ :create) do
response = Typesense.post("/collections/#{collection}/documents/import?action=#{action}", documents, headers: [{"content-type", "text/plain"}])
case response do
{:ok, env} -> Typesense.Http.handle_response(env)
{:error, reason} -> {:error, reason}
end
end
end
|
lib/typesense/documents/documents.ex
| 0.885823
| 0.792022
|
documents.ex
|
starcoder
|
defmodule RDF.Turtle.Decoder do
@moduledoc """
A decoder for N-Triples serializations to `RDF.Graph`s.
As for all decoders of `RDF.Serialization.Format`s, you normally won't use these
functions directly, but via one of the `read_` functions on the `RDF.Turtle` format
module or the generic `RDF.Serialization` module.
## Options
- `:base`: allows to specify the base URI to be used against relative URIs
when no base URI is defined with a `@base` directive within the document
"""
use RDF.Serialization.Decoder
import RDF.Serialization.ParseHelper, only: [error_description: 1]
alias RDF.{Graph, IRI}
defmodule State do
defstruct base_iri: nil, namespaces: %{}, bnode_counter: 0
def add_namespace(%State{namespaces: namespaces} = state, ns, iri) do
%State{state | namespaces: Map.put(namespaces, ns, iri)}
end
def ns(%State{namespaces: namespaces}, prefix) do
namespaces[prefix]
end
def next_bnode(%State{bnode_counter: bnode_counter} = state) do
{RDF.bnode("b#{bnode_counter}"), %State{state | bnode_counter: bnode_counter + 1}}
end
end
@impl RDF.Serialization.Decoder
@spec decode(String.t(), keyword) :: {:ok, Graph.t()} | {:error, any}
def decode(content, opts \\ []) do
base_iri =
Keyword.get_lazy(
opts,
:base_iri,
fn -> Keyword.get_lazy(opts, :base, fn -> RDF.default_base_iri() end) end
)
with {:ok, tokens, _} <- tokenize(content),
{:ok, ast} <- parse(tokens) do
build_graph(ast, base_iri && RDF.iri(base_iri))
else
{:error, {error_line, :turtle_lexer, error_descriptor}, _error_line_again} ->
{:error,
"Turtle scanner error on line #{error_line}: #{error_description(error_descriptor)}"}
{:error, {error_line, :turtle_parser, error_descriptor}} ->
{:error,
"Turtle parser error on line #{error_line}: #{error_description(error_descriptor)}"}
end
end
def tokenize(content), do: content |> to_charlist |> :turtle_lexer.string()
def parse([]), do: {:ok, []}
def parse(tokens), do: tokens |> :turtle_parser.parse()
defp build_graph(ast, base_iri) do
{graph, %State{namespaces: namespaces, base_iri: base_iri}} =
Enum.reduce(ast, {RDF.Graph.new(), %State{base_iri: base_iri}}, fn
{:triples, triples_ast}, {graph, state} ->
with {statements, state} = triples(triples_ast, state) do
{RDF.Graph.add(graph, statements), state}
end
{:directive, directive_ast}, {graph, state} ->
{graph, directive(directive_ast, state)}
end)
{:ok,
if Enum.empty?(namespaces) do
graph
else
RDF.Graph.add_prefixes(graph, namespaces)
end
|> RDF.Graph.set_base_iri(base_iri)}
rescue
error -> {:error, Exception.message(error)}
end
defp directive({:prefix, {:prefix_ns, _, ns}, iri}, state) do
if IRI.absolute?(iri) do
State.add_namespace(state, ns, iri)
else
with absolute_iri = IRI.absolute(iri, state.base_iri) do
State.add_namespace(state, ns, to_string(absolute_iri))
end
end
end
defp directive({:base, iri}, %State{base_iri: base_iri} = state) do
cond do
IRI.absolute?(iri) ->
%State{state | base_iri: RDF.iri(iri)}
base_iri != nil ->
with absolute_iri = IRI.absolute(iri, base_iri) do
%State{state | base_iri: absolute_iri}
end
true ->
raise "Could not resolve relative IRI '#{iri}', no base iri provided"
end
end
defp triples({:blankNodePropertyList, _} = ast, state) do
with {_, statements, state} = resolve_node(ast, [], state) do
{statements, state}
end
end
defp triples({subject, predications}, state) do
with {subject, statements, state} = resolve_node(subject, [], state) do
Enum.reduce(predications, {statements, state}, fn {predicate, objects},
{statements, state} ->
with {predicate, statements, state} = resolve_node(predicate, statements, state) do
Enum.reduce(objects, {statements, state}, fn object, {statements, state} ->
with {object, statements, state} = resolve_node(object, statements, state) do
{[{subject, predicate, object} | statements], state}
end
end)
end
end)
end
end
defp resolve_node({:prefix_ln, line_number, {prefix, name}}, statements, state) do
if ns = State.ns(state, prefix) do
{RDF.iri(ns <> local_name_unescape(name)), statements, state}
else
raise "line #{line_number}: undefined prefix #{inspect(prefix)}"
end
end
defp resolve_node({:prefix_ns, line_number, prefix}, statements, state) do
if ns = State.ns(state, prefix) do
{RDF.iri(ns), statements, state}
else
raise "line #{line_number}: undefined prefix #{inspect(prefix)}"
end
end
defp resolve_node({:relative_iri, relative_iri}, _, %State{base_iri: nil}) do
raise "Could not resolve relative IRI '#{relative_iri}', no base iri provided"
end
defp resolve_node({:relative_iri, relative_iri}, statements, state) do
{IRI.absolute(relative_iri, state.base_iri), statements, state}
end
defp resolve_node({:anon}, statements, state) do
with {node, state} = State.next_bnode(state) do
{node, statements, state}
end
end
defp resolve_node({:blankNodePropertyList, property_list}, statements, state) do
with {subject, state} = State.next_bnode(state),
{new_statements, state} = triples({subject, property_list}, state) do
{subject, statements ++ new_statements, state}
end
end
defp resolve_node(
{{:string_literal_quote, _line, value}, {:datatype, datatype}},
statements,
state
) do
with {datatype, statements, state} = resolve_node(datatype, statements, state) do
{RDF.literal(value, datatype: datatype), statements, state}
end
end
defp resolve_node({:collection, []}, statements, state) do
{RDF.nil(), statements, state}
end
defp resolve_node({:collection, elements}, statements, state) do
with {first_list_node, state} = State.next_bnode(state),
[first_element | rest_elements] = elements,
{first_element_node, statements, state} = resolve_node(first_element, statements, state),
first_statement = [{first_list_node, RDF.first(), first_element_node}] do
{last_list_node, statements, state} =
Enum.reduce(
rest_elements,
{first_list_node, statements ++ first_statement, state},
fn element, {list_node, statements, state} ->
with {element_node, statements, state} = resolve_node(element, statements, state),
{next_list_node, state} = State.next_bnode(state) do
{next_list_node,
statements ++
[
{list_node, RDF.rest(), next_list_node},
{next_list_node, RDF.first(), element_node}
], state}
end
end
)
{first_list_node, statements ++ [{last_list_node, RDF.rest(), RDF.nil()}], state}
end
end
defp resolve_node(node, statements, state), do: {node, statements, state}
defp local_name_unescape(string),
do: Macro.unescape_string(string, &local_name_unescape_map(&1))
@reserved_characters ~c[~.-!$&'()*+,;=/?#@%_]
defp local_name_unescape_map(e) when e in @reserved_characters, do: e
defp local_name_unescape_map(_), do: false
end
|
lib/rdf/serializations/turtle_decoder.ex
| 0.817319
| 0.616936
|
turtle_decoder.ex
|
starcoder
|
defmodule Spotify.Tracks do
@moduledoc """
For manipulating tracks.
[Spotify Docs](https://beta.developer.spotify.com/documentation/web-api/reference/tracks/)
"""
alias Spotify.Tracks.{TrackFull, TrackLink}
alias Spotify.{ExternalIds, ExternalUrls, Restrictions, Timestamp}
alias Spotify.Albums.AlbumSimple
alias Spotify.Artists.ArtistSimple
@typedoc """
The album on which the track appears.
The album object includes a link in href to full information about the album.
"""
@type album :: AlbumSimple.t
@typedoc """
The artists who performed the track. Each artist object includes a
link in href to more detailed information about the artist.
"""
@type artists :: [ArtistSimple.t]
@typedoc """
A list of the countries in which the track can be played,
identified by their ISO 3166-1 alpha-2 code.
"""
@type available_markets :: [String.t] | nil
@typedoc """
The disc number (usually 1 unless the album consists of more than one disc).
"""
@type disc_number :: integer
@typedoc """
The track length in milliseconds
"""
@type duration_ms :: integer
@typedoc """
Whether or not the track has explicit lyrics
( `true` = yes it does; `false` = no it does not OR unknown).
"""
@type explicit :: boolean
@typedoc """
Known external IDs for the track.
"""
@type external_ids :: ExternalIds.t
@typedoc """
Known external URLs for this track.
"""
@type external_urls :: ExternalUrls
@typedoc """
A link to the Web API endpoint providing full details of the track.
"""
@type href :: String.t
@typedoc """
The Spotify ID for the track.
"""
@type id :: String.t
@typedoc """
Part of the response when Track Relinking is applied. If true,
the track is playable in the given market. Otherwise false.
"""
@type is_playable :: boolean
@typedoc """
Part of the response when Track Relinking is applied, and the
requested track has been replaced with different track.
The track in the linked_from object contains information
about the originally requested track.
"""
@type linked_from :: TrackLink.t | nil
@typedoc """
Part of the response when Track Relinking is applied,
the original track is not available in the given market,
and Spotify did not have any tracks to relink it with.
The track response will still contain metadata for the original track,
and a restrictions object containing the reason why the track
is not available: `"restrictions" : {"reason" : "market"}`
"""
@type restrictions :: Restrictions.t
@typedoc """
The name of the track.
"""
@type name :: String.t
@typedoc """
The popularity of the track. The value will be between 0 and 100,
with 100 being the most popular. The popularity of a track is a
value between 0 and 100, with 100 being the most popular.
The popularity is calculated by algorithm and is based, in the most part,
on the total number of plays the track has had and how recent those plays are.
Generally speaking, songs that are being played a lot now will have a higher
popularity than songs that were played a lot in the past.
Duplicate tracks (e.g. the same track from a single and an album) are rated
independently. Artist and album popularity is derived mathematically from
track popularity. Note that the popularity value may lag actual popularity
by a few days: the value is not updated in real time.
"""
@type popularity :: integer
@typedoc """
A link to a 30 second preview (MP3 format) of the track.
"""
@type preview_url :: String.t
@typedoc """
The number of the track. If an album has several discs,
the track number is the number on the specified disc.
"""
@type track_number :: integer
@typedoc """
The object type: `track`.
"""
@type type :: String.t
@typedoc """
The Spotify URI for the track.
"""
@type uri :: String.t
@typedoc """
The date and time the track was saved.
"""
@type added_at :: Timestamp.t
@typedoc """
Information about the track.
"""
@type track :: TrackFull.t
end
|
lib/spotify/models/tracks/tracks.ex
| 0.874212
| 0.759538
|
tracks.ex
|
starcoder
|
defmodule AWS.CertificateManager do
@moduledoc """
AWS Certificate Manager
Welcome to the AWS Certificate Manager (ACM) API documentation.
You can use ACM to manage SSL/TLS certificates for your AWS-based websites
and applications. For general information about using ACM, see the [ *AWS
Certificate Manager User Guide*
](https://docs.aws.amazon.com/acm/latest/userguide/).
"""
@doc """
Adds one or more tags to an ACM certificate. Tags are labels that you can
use to identify and organize your AWS resources. Each tag consists of a
`key` and an optional `value`. You specify the certificate on input by its
Amazon Resource Name (ARN). You specify the tag by using a key-value pair.
You can apply a tag to just one certificate if you want to identify a
specific characteristic of that certificate, or you can apply the same tag
to multiple certificates if you want to filter for a common relationship
among those certificates. Similarly, you can apply the same tag to multiple
resources if you want to specify a relationship among those resources. For
example, you can add the same tag to an ACM certificate and an Elastic Load
Balancing load balancer to indicate that they are both used by the same
website. For more information, see [Tagging ACM
certificates](https://docs.aws.amazon.com/acm/latest/userguide/tags.html).
To remove one or more tags, use the `RemoveTagsFromCertificate` action. To
view all of the tags that have been applied to the certificate, use the
`ListTagsForCertificate` action.
"""
def add_tags_to_certificate(client, input, options \\ []) do
request(client, "AddTagsToCertificate", input, options)
end
@doc """
Deletes a certificate and its associated private key. If this action
succeeds, the certificate no longer appears in the list that can be
displayed by calling the `ListCertificates` action or be retrieved by
calling the `GetCertificate` action. The certificate will not be available
for use by AWS services integrated with ACM.
<note> You cannot delete an ACM certificate that is being used by another
AWS service. To delete a certificate that is in use, the certificate
association must first be removed.
</note>
"""
def delete_certificate(client, input, options \\ []) do
request(client, "DeleteCertificate", input, options)
end
@doc """
Returns detailed metadata about the specified ACM certificate.
"""
def describe_certificate(client, input, options \\ []) do
request(client, "DescribeCertificate", input, options)
end
@doc """
Exports a private certificate issued by a private certificate authority
(CA) for use anywhere. You can export the certificate, the certificate
chain, and the encrypted private key associated with the public key
embedded in the certificate. You must store the private key securely. The
private key is a 2048 bit RSA key. You must provide a passphrase for the
private key when exporting it. You can use the following OpenSSL command to
decrypt it later. Provide the passphrase when prompted.
`openssl rsa -in encrypted_key.pem -out decrypted_key.pem`
"""
def export_certificate(client, input, options \\ []) do
request(client, "ExportCertificate", input, options)
end
@doc """
Retrieves a certificate specified by an ARN and its certificate chain . The
chain is an ordered list of certificates that contains the end entity
certificate, intermediate certificates of subordinate CAs, and the root
certificate in that order. The certificate and certificate chain are base64
encoded. If you want to decode the certificate to see the individual
fields, you can use OpenSSL.
"""
def get_certificate(client, input, options \\ []) do
request(client, "GetCertificate", input, options)
end
@doc """
Imports a certificate into AWS Certificate Manager (ACM) to use with
services that are integrated with ACM. Note that [integrated
services](https://docs.aws.amazon.com/acm/latest/userguide/acm-services.html)
allow only certificate types and keys they support to be associated with
their resources. Further, their support differs depending on whether the
certificate is imported into IAM or into ACM. For more information, see the
documentation for each service. For more information about importing
certificates into ACM, see [Importing
Certificates](https://docs.aws.amazon.com/acm/latest/userguide/import-certificate.html)
in the *AWS Certificate Manager User Guide*.
<note> ACM does not provide [managed
renewal](https://docs.aws.amazon.com/acm/latest/userguide/acm-renewal.html)
for certificates that you import.
</note> Note the following guidelines when importing third party
certificates:
<ul> <li> You must enter the private key that matches the certificate you
are importing.
</li> <li> The private key must be unencrypted. You cannot import a private
key that is protected by a password or a passphrase.
</li> <li> If the certificate you are importing is not self-signed, you
must enter its certificate chain.
</li> <li> If a certificate chain is included, the issuer must be the
subject of one of the certificates in the chain.
</li> <li> The certificate, private key, and certificate chain must be
PEM-encoded.
</li> <li> The current time must be between the `Not Before` and `Not
After` certificate fields.
</li> <li> The `Issuer` field must not be empty.
</li> <li> The OCSP authority URL, if present, must not exceed 1000
characters.
</li> <li> To import a new certificate, omit the `CertificateArn` argument.
Include this argument only when you want to replace a previously imported
certificate.
</li> <li> When you import a certificate by using the CLI, you must specify
the certificate, the certificate chain, and the private key by their file
names preceded by `file://`. For example, you can specify a certificate
saved in the `C:\temp` folder as
`file://C:\temp\certificate_to_import.pem`. If you are making an HTTP or
HTTPS Query request, include these arguments as BLOBs.
</li> <li> When you import a certificate by using an SDK, you must specify
the certificate, the certificate chain, and the private key files in the
manner required by the programming language you're using.
</li> </ul> This operation returns the [Amazon Resource Name
(ARN)](https://docs.aws.amazon.com/general/latest/gr/aws-arns-and-namespaces.html)
of the imported certificate.
"""
def import_certificate(client, input, options \\ []) do
request(client, "ImportCertificate", input, options)
end
@doc """
Retrieves a list of certificate ARNs and domain names. You can request that
only certificates that match a specific status be listed. You can also
filter by specific attributes of the certificate.
"""
def list_certificates(client, input, options \\ []) do
request(client, "ListCertificates", input, options)
end
@doc """
Lists the tags that have been applied to the ACM certificate. Use the
certificate's Amazon Resource Name (ARN) to specify the certificate. To add
a tag to an ACM certificate, use the `AddTagsToCertificate` action. To
delete a tag, use the `RemoveTagsFromCertificate` action.
"""
def list_tags_for_certificate(client, input, options \\ []) do
request(client, "ListTagsForCertificate", input, options)
end
@doc """
Remove one or more tags from an ACM certificate. A tag consists of a
key-value pair. If you do not specify the value portion of the tag when
calling this function, the tag will be removed regardless of value. If you
specify a value, the tag is removed only if it is associated with the
specified value.
To add tags to a certificate, use the `AddTagsToCertificate` action. To
view all of the tags that have been applied to a specific ACM certificate,
use the `ListTagsForCertificate` action.
"""
def remove_tags_from_certificate(client, input, options \\ []) do
request(client, "RemoveTagsFromCertificate", input, options)
end
@doc """
Renews an eligable ACM certificate. At this time, only exported private
certificates can be renewed with this operation. In order to renew your ACM
PCA certificates with ACM, you must first [grant the ACM service principal
permission to do
so](https://docs.aws.amazon.com/acm-pca/latest/userguide/PcaPermissions.html).
For more information, see [Testing Managed
Renewal](https://docs.aws.amazon.com/acm/latest/userguide/manual-renewal.html)
in the ACM User Guide.
"""
def renew_certificate(client, input, options \\ []) do
request(client, "RenewCertificate", input, options)
end
@doc """
Requests an ACM certificate for use with other AWS services. To request an
ACM certificate, you must specify a fully qualified domain name (FQDN) in
the `DomainName` parameter. You can also specify additional FQDNs in the
`SubjectAlternativeNames` parameter.
If you are requesting a private certificate, domain validation is not
required. If you are requesting a public certificate, each domain name that
you specify must be validated to verify that you own or control the domain.
You can use [DNS
validation](https://docs.aws.amazon.com/acm/latest/userguide/gs-acm-validate-dns.html)
or [email
validation](https://docs.aws.amazon.com/acm/latest/userguide/gs-acm-validate-email.html).
We recommend that you use DNS validation. ACM issues public certificates
after receiving approval from the domain owner.
"""
def request_certificate(client, input, options \\ []) do
request(client, "RequestCertificate", input, options)
end
@doc """
Resends the email that requests domain ownership validation. The domain
owner or an authorized representative must approve the ACM certificate
before it can be issued. The certificate can be approved by clicking a link
in the mail to navigate to the Amazon certificate approval website and then
clicking **I Approve**. However, the validation email can be blocked by
spam filters. Therefore, if you do not receive the original mail, you can
request that the mail be resent within 72 hours of requesting the ACM
certificate. If more than 72 hours have elapsed since your original request
or since your last attempt to resend validation mail, you must request a
new certificate. For more information about setting up your contact email
addresses, see [Configure Email for your
Domain](https://docs.aws.amazon.com/acm/latest/userguide/setup-email.html).
"""
def resend_validation_email(client, input, options \\ []) do
request(client, "ResendValidationEmail", input, options)
end
@doc """
Updates a certificate. Currently, you can use this function to specify
whether to opt in to or out of recording your certificate in a certificate
transparency log. For more information, see [ Opting Out of Certificate
Transparency
Logging](https://docs.aws.amazon.com/acm/latest/userguide/acm-bestpractices.html#best-practices-transparency).
"""
def update_certificate_options(client, input, options \\ []) do
request(client, "UpdateCertificateOptions", input, options)
end
@spec request(map(), binary(), map(), list()) ::
{:ok, Poison.Parser.t | nil, Poison.Response.t} |
{:error, Poison.Parser.t} |
{:error, HTTPoison.Error.t}
defp request(client, action, input, options) do
client = %{client | service: "acm"}
host = get_host("acm", client)
url = get_url(host, client)
headers = [{"Host", host},
{"Content-Type", "application/x-amz-json-1.1"},
{"X-Amz-Target", "CertificateManager.#{action}"}]
payload = Poison.Encoder.encode(input, [])
headers = AWS.Request.sign_v4(client, "POST", url, headers, payload)
case HTTPoison.post(url, payload, headers, options) do
{:ok, response=%HTTPoison.Response{status_code: 200, body: ""}} ->
{:ok, nil, response}
{:ok, response=%HTTPoison.Response{status_code: 200, body: body}} ->
{:ok, Poison.Parser.parse!(body), response}
{:ok, _response=%HTTPoison.Response{body: body}} ->
error = Poison.Parser.parse!(body)
exception = error["__type"]
message = error["message"]
{:error, {exception, message}}
{:error, %HTTPoison.Error{reason: reason}} ->
{:error, %HTTPoison.Error{reason: reason}}
end
end
defp get_host(endpoint_prefix, client) do
if client.region == "local" do
"localhost"
else
"#{endpoint_prefix}.#{client.region}.#{client.endpoint}"
end
end
defp get_url(host, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}/"
end
end
|
lib/aws/certificate_manager.ex
| 0.883116
| 0.638314
|
certificate_manager.ex
|
starcoder
|
defmodule AdventOfCode.Day11 do
@moduledoc "Day 11"
defmodule Seat, do: defstruct x: 0, y: 0, empty?: true
defmodule SeatMap do
defstruct by_row: %{}, neighbors: %{}
def new(by_row, neighbors) when is_function(neighbors), do: %SeatMap{
by_row: by_row,
neighbors: Map.new(Enum.flat_map(by_row, &(Enum.map(elem(&1, 1), neighbors.(by_row)))))
}
def new(by_row, neighbors) when is_map(neighbors), do: %SeatMap{by_row: by_row, neighbors: neighbors}
end
defp update(seat_map, threshold), do: fn {row, row_seats} ->
mapper = fn seat ->
Enum.reduce_while(
seat_map.neighbors[{seat.x, seat.y}],
0,
fn {x, y}, acc ->
case Enum.find(Map.get(seat_map.by_row, y, []), &(&1.x == x)) do
nil -> {:cont, acc}
s when (not s.empty?) and (seat.empty? or acc + 1 == threshold) -> {:halt, threshold}
s -> {:cont, acc + (if s.empty?, do: 0, else: 1)}
end
end
)
|> (&(((&1 == 0 && seat.empty?) || (&1 >= threshold && !seat.empty?)) != seat.empty?)).()
|> (&(%Seat{x: seat.x, y: seat.y, empty?: &1})).()
end
{row, Enum.map(row_seats, mapper)}
end
defp process(list, neighbors, threshold), do:
SeatMap.new(AdventOfCode.from(list, "L", &(%Seat{x: &1, y: &2})), neighbors)
|> Stream.unfold(&({&1, SeatMap.new(Map.new(&1.by_row, update(&1, threshold)), &1.neighbors)}))
|> Enum.reduce_while(nil, &(if &1 == &2, do: {:halt, &1.by_row}, else: {:cont, &1}))
|> Enum.reduce(0, fn {_, row_seats}, acc -> acc + Enum.count(row_seats, &(!&1.empty?)) end)
defp part_one_neighbors(_), do: fn seat -> AdventOfCode.expand().({seat.x, seat.y}) end
defp nearby(seats, finder), do: Enum.map(
case Enum.find_index(seats, finder) do
0 -> [Enum.at(seats, 1)]
n -> Enum.filter(Enum.map([-1, 1], &(Enum.at(seats, n + &1))), &(&1 != nil))
end,
&({&1.x, &1.y})
)
defp part_two_neighbors(by_row), do: fn seat ->
filter = &(seat.y != &1.y && (seat.x == &1.x || abs(seat.x - &1.x) == abs(seat.y - &1.y)))
key = &(if &1 == 0, do: 0, else: (if &1 < 0, do: -1, else: 1))
Enum.flat_map(by_row, fn {_, r} -> Enum.filter(r, filter) end)
|> Enum.group_by(&(key.((seat.x - &1.x) * (seat.y - &1.y))))
|> Enum.flat_map(fn {_, p} -> nearby(Enum.sort_by([seat | p], &(&1.y)), &(seat.y == &1.y)) end)
|> (fn positions -> {{seat.x, seat.y}, positions ++ nearby(by_row[seat.y], &(seat.x == &1.x))} end).()
end
def part1(list), do: process(list, &part_one_neighbors/1, 4)
def part2(list), do: process(list, &part_two_neighbors/1, 5)
end
|
lib/advent_of_code/day11.ex
| 0.599368
| 0.572394
|
day11.ex
|
starcoder
|
defmodule LogicalFile.Section do
alias __MODULE__
@moduledoc """
A `Section` represents lines of text from a backing file that represent
a range of logical line numbers within a `LogicalFile`.
## Fields
* `source_path` the fully qualified file name of the backing file that
the `Section` represents.
* `range` the range of logical line numbers the `Section` represents
* `lines` a list of the lines in the `Section`
* `offset` a value that transforms a logical line number to a local
line number within the backing file.
In the simple case a `Section` represents the entire contents of a backing
file. However, a `Section` can be split and moved (for example when another
`Section` is inserted within its range). Here the `offset` is adjusted to
allow the conversion of logical line numbers to local line numbers in the
backing file.
"""
defstruct source_path: nil,
range: 0..0,
lines: [],
offset: 0
# Interface
@doc """
`new/1` creates a new `Section` representing lines from the specified file.
`new/4` creates a new `Section` representing the contents of the file specified
by `source_path` and representing a particular range of logical line numbers
and their offset
`new/4` for more information.
a range of lines with an offset.
he offset determines how the line numbers in the range are converted into lines in the
source file. For example if the offset is -5 then line 10 will correspond to
line 5 of the source file.
## Examples
iex> alias LogicalFile.Section
iex> section = Section.new("test/support/main.source")
iex> assert "test/support/main.source" = section.source_path
iex> assert 11 = Section.size(section)
iex> assert 1..11 = section.range
iex> assert 0 = section.offset
iex> alias LogicalFile.Section
iex> %Section{source_path: source_path, range: range, lines: lines} =
...> Section.new("foo.source", 1..6, ["one", "two", "three", "four", "five", "six"])
iex> assert "foo.source" = source_path
iex> assert 1..6 = range
iex> assert 6 = Enum.count(lines)
iex> alias LogicalFile.Section
iex> %Section{offset: offset} = Section.new("foo.source", 1..2, ["one", "two"], -7)
iex> assert -7 = offset
iex> alias LogicalFile.Section
iex> assert_raise(RuntimeError, fn ->
...> %Section{} =
...> Section.new("foo.source", 1..5, ["one", "two", "three", "four"])
...> end)
"""
def new(source_path) do
with lines = read_lines(source_path),
line_count = Enum.count(lines) do
if line_count == 0 do
raise("Cannot create empty section from (#{source_path}!")
end
new(source_path, 1..line_count, lines)
end
end
def new(source_path, %Range{} = range, lines, offset \\ 0)
when is_binary(source_path) and is_list(lines) and is_integer(offset) do
if Enum.empty?(lines), do: raise("Range line list cannot be empty! (#{source_path})")
if Enum.count(lines) != Range.size(range),
do: raise("Range and line count does not match! (#{source_path})")
%Section{source_path: source_path, range: range, lines: lines, offset: offset}
end
@doc """
`line/2` returns a `String` containing the contents of logical line number
`lno` which is expected to be within the range the `Section` represents.
## Examples
iex> alias LogicalFile.Section
iex> section = Section.new("test/support/main.source")
iex> assert "%(include.source)" = Section.line(section, 6)
"""
def line(%Section{range: lo.._hi, lines: lines}, lno) do
Enum.at(lines, lno - lo)
end
@doc """
`line_matching/2` takes a `Section` and either a predicate function or a
regular expression and returns a tuple `{logical_line_no, line}` representing
the first line from the `Section` that matches.
## Examples
iex> alias LogicalFile.Section
iex> section = Section.new("test/support/main.source")
iex> include = ~r/%\((?<file>.*)\)/
iex> assert {6, "%(include.source)"} = Section.line_matching(section, fn line -> String.length(line) > 5 end)
iex> assert {6, "%(include.source)"} = Section.line_matching(section, include)
"""
def line_matching(%Section{range: range, lines: lines}, pred_fn) when is_function(pred_fn) do
Enum.zip(range, lines)
|> Enum.find(fn {_lno, line} -> pred_fn.(line) end)
end
def line_matching(%Section{range: range, lines: lines}, %Regex{} = expr) do
Enum.zip(range, lines)
|> Enum.find(fn {_lno, line} -> Regex.match?(expr, line) end)
end
@doc """
`lines_matching/2` takes a `Section` and either a predicate function or a
regular expression and returns a list of tuples of the form
`{logical_lno, line}` for each line that matches.
## Examples
iex> alias LogicalFile.Section
iex> section = Section.new("test/support/commented.source")
iex> assert [{3, "%% nothing here"}, {6, "%% or here"}] = Section.lines_matching(section, fn line ->
...> String.starts_with?(line, "%%")
...> end)
iex> assert [{1, "one"}, {2, "two"}, {8, "six"}] = Section.lines_matching(section, fn line -> String.length(line) <4 end)
"""
def lines_matching(%Section{range: range, lines: lines}, fun) when is_function(fun) do
Enum.zip(range, lines)
|> Enum.filter(fn {_lno, line} -> fun.(line) end)
end
def lines_matching(%Section{range: range, lines: lines}, %Regex{} = expr) do
Enum.zip(range, lines)
|> Enum.filter(fn {_lno, line} -> Regex.match?(expr, line) end)
end
@doc """
`update_line/3` takes a `Section` a logical number number expected to be
within the `Section` and a function. It replaces that line with the result
of calling the function with the existing line.
## Examples
iex> alias LogicalFile.Section
iex> section = Section.new("test/support/main.source")
...> |> Section.update_line(6, fn line -> String.duplicate(" ", String.length(line)) end)
iex> assert " " = Section.line(section, 6)
"""
def update_line(%Section{range: lo.._hi = range, lines: lines} = section, lno, fun)
when is_function(fun) do
if lno not in range, do: raise("Section (#{inspect(range)}) does not contain line: #{lno}")
%{section | lines: List.update_at(lines, lno - lo, fun)}
end
@doc """
`splittable?/1` takes a `Section` and determines whether it is splittable. In
general it's not splittable if it contains only one line.
## Examples
iex> alias LogicalFile.Section
iex> section1 = Section.new("bar.source", 1..1, ["one"])
iex> section2 = Section.new("foo.source", 1..2, ["one", "two"])
iex> assert not Section.splittable?(section1)
iex> assert Section.splittable?(section2)
"""
def splittable?(%Section{range: lo..lo}), do: false
def splittable?(%Section{}), do: true
defp shift_range(%Range{first: first, last: last} = range, shift_by) when is_integer(shift_by) do
%{range | first: first + shift_by, last: last + shift_by}
end
@doc """
`split/2` takes a `Section` and a logical line number `at_line` expected to be
within the `Section` and returns a tuple `{before_section, after_section}`
derived by splitting the contents of the Section at the specified line.
The `before_section` contains all lines up to, but not including, the
specified line, the `after_section` contains all lines from the specified
line to the end of the `Section`.
Note: It is illegal to attempt to split a Section containing one line, or
to set the split point on the first or last line of a Section. In any of
these cases an exception is raised.
## Examples
iex> alias LogicalFile.Section
iex> section = Section.new("foo.source", 1..6, ["one", "two", "three", "four", "five", "six"])
iex> {%Section{} = first, %Section{} = second} = Section.split(section, 4)
iex> assert "foo.source" = first.source_path
iex> assert 1..3 = first.range
iex> assert ["one", "two", "three"] = first.lines
iex> assert "foo.source" = second.source_path
iex> assert 4..6 = second.range
iex> assert ["four", "five", "six"] = second.lines
iex> alias LogicalFile.Section
iex> assert_raise(RuntimeError, fn ->
...> section = Section.new("foo.source", 1..4, ["one", "two", "three", "four"])
...> Section.split(section, 0)
...> end)
iex> alias LogicalFile.Section
iex> section = Section.new("foo.source", 1..3, ["alpha", "beta", "delta"]) |> Section.shift(36)
iex> {s1, s2} = Section.split(section, 38)
iex> assert %Section{range: 37..37, offset: -36, lines: ["alpha"]} = s1
iex> assert %Section{range: 38..39, offset: -36, lines: ["beta", "delta"]} = s2
iex> alias LogicalFile.Section
iex> section = Section.new("bar.source", 29..33, [" ", " ", "", "end", ""], -4)
iex> {s1, s2} = Section.split(section, 30)
iex> assert %Section{range: 29..29, offset: -4, lines: [" "]} = s1
iex> assert %Section{range: 30..33, offset: -4, lines: [" ", "", "end", ""]} = s2
"""
def split(%Section{range: lo..lo}), do: raise("Cannot split a section containing one line!")
def split(%Section{range: lo.._}, lo), do: raise("Cannot set split point on first line!")
def split(%Section{range: _..hi}, hi), do: raise("Cannot set split point on last line!")
def split(
%Section{source_path: path, range: lo..hi = range, lines: lines, offset: offset},
at_line
) do
if at_line not in range, do: raise("Line specified outside range")
pre_range = lo..(at_line - 1)
pre_slice = Enum.slice(lines, pre_range |> shift_range(-lo))
post_range = at_line..hi
post_slice = Enum.slice(lines, post_range |> shift_range(-lo))
{
Section.new(path, pre_range, pre_slice, offset),
Section.new(path, post_range, post_slice, offset)
}
end
@doc """
`shift/2` takes a `Section` and a number of lines to offset the section
`by_lines` and returns a new `Section` containing the same lines with the
logical line number range and offset shifted appropriately.
## Examples
iex> alias LogicalFile.Section
iex> section =
...> Section.new("foo.source", 1..4, ["one", "two", "three", "four"])
...> |> Section.shift(10)
iex> assert 11..14 = section.range
iex> assert -10 = section.offset
"""
def shift(%Section{} = section, 0), do: section
def shift(%Section{range: lo..hi, offset: offset} = section, by_lines) do
section
|> set_range((lo + by_lines)..(hi + by_lines))
|> set_offset(offset - by_lines)
end
@doc """
`first_line_number/1` returns the first logical line number of the specified
`Section`.
"""
def first_line_number(%Section{range: lo.._hi}) do
lo
end
@doc """
`last_line_number/1` returns the last logical line number of the specified
`Section`.
"""
def last_line_number(%Section{range: _lo..hi}) do
hi
end
@doc """
`size/1` returns the number of lines in the specified `Section`.
## Examples
iex> alias LogicalFile.Section
iex> section = Section.new("foo.source", 1..4, ["one", "two", "three", "four"])
iex> assert 4 = Section.size(section)
"""
def size(%Section{lines: lines}) do
Enum.count(lines)
end
@doc """
`total_size/1` returns the number of lines contained in the given list of
`Section`s.
## Examples
iex> alias LogicalFile.Section
iex> section1 = Section.new("foo.source", 1..4, ["one", "two", "three", "four"])
iex> section2 = Section.new("bar.source", 5..7, ["alpha", "beta", "delta"])
iex> assert 7 = Section.total_size([section1, section2])
"""
def total_size(sections) when is_list(sections) do
Enum.reduce(sections, 0, fn section, acc -> acc + Section.size(section) end)
end
@doc """
`set_range/2` replaces the logical line number range of the specified
`Section`.
"""
def set_range(%Section{} = section, new_range) do
%{section | range: new_range}
end
@doc """
`set_offset/2` replaces the line number offset of the specified `Section`.
"""
def set_offset(%Section{} = section, new_offset) do
%{section | offset: new_offset}
end
@doc """
`resolve_line/2` takes a `Section` and a logical line number `line` that is
expected to be within the range of the `Section` and returns a tuple
`{file, line}` representing the file backing the `Section` and the
corresponding local line number within the `Section`
number
Maps a line number coming from a source map that may include many sections
into a line number relative to the section. For example a section may represent
source included from another file.
E.g. File 1 contains 20 lines & File 2 contains 10 lines if we insert File 2
we get a structure like:
Lines 1..10 => File 1: Lines 1..10
Lines 11..20 => File 2: Lines 1..10
Lines 21..30 => File 1: Lines 11..20
If we ask for line 15 this maps to File 2, line 5. This means file 2 is
offset from the map by -10. If we ask for line 25 this maps to file 1
line 15, again offset by -10.
## Examples
iex> alias LogicalFile.Section
iex> section =
...> Section.new("test/support/main.source")
...> |> Section.set_range(21..30)
...> |> Section.set_offset(-10)
iex> assert {"test/support/main.source", 15} = Section.resolve_line(section, 25)
"""
def resolve_line(%Section{source_path: source_path, range: range, offset: offset}, line) do
if line in range do
{source_path, line + offset}
else
raise "Attempt to resolve logical line #{line} outside section range #{inspect(range)}"
end
end
# Implementation
defp read_lines(source_path) when is_binary(source_path) do
source_path
|> File.read!()
|> String.split(~r/\R/)
end
end
defimpl Inspect, for: LogicalFile.Section do
import Inspect.Algebra
def line_output(range, lines) do
Stream.zip([range, lines])
|> Enum.map(fn {idx, line} ->
idx_str = idx |> Integer.to_string() |> String.pad_leading(4, " ")
"#{idx_str}: #{line}"
end)
|> Enum.join("\n")
end
def inspect(
%LogicalFile.Section{
source_path: source_path,
range: range,
offset: offset,
lines: lines
},
opts
) do
concat([
"#Section{path:\"",
source_path,
"\", range:",
to_doc(range, opts),
", size:",
to_doc(Range.size(range), opts),
", offset:",
to_doc(offset, opts),
">\n",
line_output(range, lines),
"\n"
])
end
end
|
lib/logical_file/section.ex
| 0.867022
| 0.698188
|
section.ex
|
starcoder
|
defmodule XPlane.Instance do
@moduledoc """
Represent a running instance of X-Plane and provide a GenServer to monitor
the local network for X-Plane multicast "beacon" messages and return them
as a list.
## Example
```
iex> XPlane.Instance.start
{:ok, #PID<0.138.0>}
iex> XPlane.Instance.list
[
%XPlane.Instance{
computer_name: "Starboard",
host: :xplane,
ip: {192, 168, 0, 58},
addr: "192.168.0.58",
major_version: 1,
minor_version: 1,
port: 49000,
role: :extern_visual,
seconds_since_seen: 0,
version_number: 105101
}
]
iex> XPlane.Instance.stop
:ok
```
"""
defstruct [
:ip, # IP Address of X-Plane instance as tuple
:addr, # Same as dot separated string
:major_version, # 1 at the time of X-Plane 10.40
:minor_version, # 1 at the time of X-Plane 10.40
:version_number, # 104103 for X-Plane 10.41r3
:host, # :xplane | :planemaker
:role, # :master | :extern_visual | :ios
:port, # Port number X-Plane is listening on, 49000 by default
:computer_name, # Hostname of the computer
:seconds_since_seen] # Time since last beacon multicast received in seconds
@type t :: %XPlane.Instance{
ip: {integer, integer, integer, integer},
addr: String.t,
major_version: integer,
minor_version: integer,
version_number: integer,
host: :xplane | :planemaker,
role: :master | :extern_visual | :ios,
port: integer,
computer_name: String.t,
seconds_since_seen: integer
}
@beacon_addr {239, 255, 1, 1}
@beacon_port 49707
@zeros_addr {0, 0, 0, 0}
@startup_grace_period 2500
@sol_socket 0xffff
@so_reuseport 0x0200
use GenServer
# API
@doc """
Start GenServer that listens for X-Plane multicast beacon messages and
maintains a register of received beacon details. A short delay built in
to the function leaves enough time for beacons to be received so that
subsequent calls to list return reasonable results.
## Parameters
Accepts normal GenServer options apart from name which is set to the
module name.
"""
@spec start(list) :: {:ok, pid} | {:error, any} | :ignore
def start(opts \\ []) do
result = GenServer.start(__MODULE__, :ok, [name: __MODULE__] ++ opts)
:timer.sleep(@startup_grace_period) # Allow time for beacons to be picked up
result
end
@doc """
Start GenServer linked to current process that listens for X-Plane multicast
beacon messages and maintains a register of received beacon details. A short
delay built in to the function leaves enough time for beacons to be received
so that subsequent calls to list return reasonable results.
## Parameters
Accepts normal GenServer options apart from name which is set to the
module name.
"""
@spec start_link(list) :: {:ok, pid} | {:error, any} | :ignore
def start_link(opts \\ []) do
result = GenServer.start_link(__MODULE__, :ok, [name: __MODULE__] ++ opts)
:timer.sleep(@startup_grace_period) # Allow time for beacons to be picked up
result
end
@doc """
Return a list of the most recent X-Plane beacon details received from each
IP address. Note that a listing does not guarantee that the instance is
currently running, only that it was seen `seconds_since_seen` seconds ago.
Reuseport code based on:
https://github.com/refuge/rbeacon/blob/master/src/rbeacon.erl#L414-L425
"""
@spec list() :: list(XPlane.Instance.t)
def list() do
now = :erlang.system_time(:second)
Enum.map(
GenServer.call(__MODULE__, :list) |> Map.to_list,
fn {{ip, _}, {major_version, minor_version, host, version_number,
role, port, computer_name, last_seen}} ->
%XPlane.Instance{
ip: ip,
addr: (ip |> Tuple.to_list |> Enum.join(".")),
major_version: major_version,
minor_version: minor_version,
version_number: version_number,
host: [nil, :xplane, :planemaker] |> Enum.at(host),
role: [nil, :master, :extern_visual, :ios] |> Enum.at(role),
port: port,
computer_name: computer_name,
seconds_since_seen: now - last_seen
}
end
)
end
@doc """
Stop the GenServer listening for multicast X-Plane beacon messages
"""
@spec stop() :: :ok | {:error, any}
def stop() do
GenServer.cast(__MODULE__, :stop)
end
# GenServer Callbacks
def init(:ok) do
udp_options = [
:binary,
active: true,
add_membership: {@beacon_addr, @zeros_addr},
multicast_if: @zeros_addr,
multicast_loop: false,
multicast_ttl: 4,
reuseaddr: true] ++
# In addition to reuseaddr BSD variants have to specifically
# set reuseport (only) when listening to beacon according
# to "Sending Data to X-Plane"
case :os.type() do
{:unix, os_name} ->
cond do
os_name in [:darwin, :freebsd, :openbsd, :netbsd] ->
[{:raw, @sol_socket, @so_reuseport, <<1::native-32>>}]
true ->
[]
end
_ ->
[]
end
{:ok, _sock} = :gen_udp.open(@beacon_port, udp_options)
{:ok, %{}}
end
def handle_info({:udp, _sock, sender_ip, _sender,
<<"BECN\0",
major_version::unsigned,
minor_version::unsigned,
host::native-integer-32,
version_number::native-integer-32,
role::unsigned-native-32,
port::unsigned-native-16,
computer_name::binary>>}, state) do
{
:noreply,
state
|> Map.put(
{sender_ip, port}, {
major_version,
minor_version,
host,
version_number,
role,
port,
computer_name |> String.split(<<0>>) |> List.first,
:erlang.system_time(:second)
}
)
}
end
def handle_call(:list, _from, state) do
{:reply, state, state}
end
def handle_cast(:stop, state) do
{:stop, :normal, state}
end
end
|
lib/xplane_instance.ex
| 0.745398
| 0.694342
|
xplane_instance.ex
|
starcoder
|
defmodule Kalevala.Output.Websocket.Tag do
@moduledoc false
@derive Jason.Encoder
defstruct [:name, attributes: %{}, children: []]
def append(tag, child) do
%{tag | children: tag.children ++ [child]}
end
end
defmodule Kalevala.Output.Websocket do
@moduledoc """
Processes tags for the websocket output
Finds matching opening and closing tags and groups children together
"""
use Kalevala.Output
alias Kalevala.Output.Websocket.Tag
@impl true
def init(opts) do
%Context{
data: [],
opts: opts,
meta: %{
current_tag: :empty,
tag_stack: []
}
}
end
@impl true
def parse({:open, tag_name, attributes}, context) do
parse_open(context, tag_name, attributes)
end
def parse({:close, tag_name}, context) do
parse_close(context, tag_name)
end
def parse(datum, context) do
case context.meta.current_tag == :empty do
true ->
Map.put(context, :data, context.data ++ [datum])
false ->
current_tag = Tag.append(context.meta.current_tag, datum)
meta = Map.put(context.meta, :current_tag, current_tag)
Map.put(context, :meta, meta)
end
end
defp parse_open(context, tag, attributes) do
tag_stack = [context.meta.current_tag | context.meta.tag_stack]
meta =
context.meta
|> Map.put(:current_tag, %Tag{name: tag, attributes: attributes})
|> Map.put(:tag_stack, tag_stack)
Map.put(context, :meta, meta)
end
defp parse_close(context, tag_name) do
[new_current | tag_stack] = context.meta.tag_stack
current_tag = %{name: ^tag_name} = context.meta.current_tag
current_tag = %{current_tag | children: current_tag.children}
case new_current do
:empty ->
meta =
context.meta
|> Map.put(:current_tag, :empty)
|> Map.put(:tag_stack, tag_stack)
context
|> Map.put(:data, context.data ++ [current_tag])
|> Map.put(:meta, meta)
new_current ->
meta =
context.meta
|> Map.put(:current_tag, Tag.append(new_current, current_tag))
|> Map.put(:tag_stack, tag_stack)
Map.put(context, :meta, meta)
end
end
@impl true
def post_parse(context), do: context
end
|
lib/kalevala/output/websocket.ex
| 0.718693
| 0.423339
|
websocket.ex
|
starcoder
|
defmodule Toolshed.Top.Report do
@moduledoc false
@typedoc """
Options:
* `:order` - the sort order for the results (`:reductions`, `:delta_reductions`,
`:mailbox`, `:delta_mailbox`, `:total_heap_size`, `:delta_total_heap_size`, `:heap_size`,
`:delta_heap_size`, `:stack_size`, `:delta_stack_size`)
* `:rows` - the number of rows to use
* `:columns` - the number of columns to use
"""
@type options() :: %{rows: pos_integer(), columns: pos_integer(), order: atom()}
@spec back_to_the_top(options()) :: iolist()
def back_to_the_top(options) do
[IO.ANSI.cursor_up(options.rows - 3), "\r"]
end
@doc """
Create a top process report
"""
@spec generate(list(), options()) :: iolist()
def generate(info, options) do
n = options.rows - 5
content = info |> Enum.sort(sort(options.order)) |> Enum.take(n) |> Enum.map(&format/1)
[format_summary(info), format_header(), content]
end
defp sort(:reductions), do: fn x, y -> x.reductions > y.reductions end
defp sort(:delta_reductions), do: fn x, y -> x.delta_reductions > y.delta_reductions end
defp sort(:mailbox), do: fn x, y -> x.message_queue_len > y.message_queue_len end
defp sort(:delta_mailbox),
do: fn x, y -> x.delta_message_queue_len > y.delta_message_queue_len end
defp sort(:total_heap_size), do: fn x, y -> x.total_heap_size > y.total_heap_size end
defp sort(:delta_total_heap_size),
do: fn x, y -> x.delta_total_heap_size > y.delta_total_heap_size end
defp sort(:heap_size), do: fn x, y -> x.heap_size > y.heap_size end
defp sort(:delta_heap_size), do: fn x, y -> x.delta_heap_size > y.delta_heap_size end
defp sort(:stack_size), do: fn x, y -> x.stack_size > y.stack_size end
defp sort(:delta_stack_size), do: fn x, y -> x.delta_stack_size > y.delta_stack_size end
defp sort(_other), do: sort(:delta_reductions)
defp format_summary(infos) do
cnt = Enum.count(infos)
"Total processes: #{cnt}\n"
end
defp format_header() do
[
IO.ANSI.clear_line(),
IO.ANSI.cyan(),
:io_lib.format(
"~-12ts ~-28ts ~5ts/~-5ts ~5ts/~-5ts ~5ts/~-5ts ~5ts/~-5ts ~5ts/~-5ts~n",
[
"Application",
"Name or PID",
"Reds",
"Δ",
"Mbox",
"Δ",
"Total",
"Δ",
"Heap",
"Δ",
"Stack",
"Δ"
]
),
IO.ANSI.white()
]
end
defp format(info) do
:io_lib.format(
IO.ANSI.clear_line() <>
"~-12ts ~-28ts ~5ts/~-5ts ~5ts/~-5ts ~5ts/~-5ts ~5ts/~-5ts ~5ts/~-5ts~n",
[
String.slice(to_string(info.application), 0, 12),
String.slice(info.name, 0, 28),
format_num(info.reductions),
format_num(info.delta_reductions),
format_num(info.message_queue_len),
format_num(info.delta_message_queue_len),
format_num(info.total_heap_size),
format_num(info.delta_total_heap_size),
format_num(info.heap_size),
format_num(info.delta_heap_size),
format_num(info.stack_size),
format_num(info.delta_stack_size)
]
)
end
defp format_num(x) when x < 10 * 1024, do: Integer.to_string(x)
defp format_num(x) when x < 10 * 1024 * 1024, do: Integer.to_string(div(x, 1024)) <> "K"
defp format_num(x), do: Integer.to_string(div(x, 1024 * 1024)) <> "M"
end
|
lib/toolshed/top/report.ex
| 0.768081
| 0.738693
|
report.ex
|
starcoder
|
defmodule DataMatrix.Matrix do
@moduledoc false
alias DataMatrix.MappingMatrix
defstruct ~w(dark version nrow ncol)a
@symbol_size Code.eval_file("lib/datamatrix/static/symbol_size.tuple") |> elem(0)
@region_size Code.eval_file("lib/datamatrix/static/data_region_size.tuple") |> elem(0)
@doc """
"""
def new(version) when version in 0..29 do
{nrow, ncol} = elem(@symbol_size, version)
%__MODULE__{
dark: nil,
version: version,
nrow: nrow,
ncol: ncol
}
end
@doc """
"""
def draw_patterns(%__MODULE__{nrow: nrow, ncol: ncol, version: version} = symbol) do
{row_gap, col_gap} = elem(@region_size, version)
alignment_patterns =
[
draw_horizontal_patterns(row_gap, nrow, ncol),
draw_vertical_patterns(col_gap, nrow, ncol)
]
|> Enum.concat()
|> List.flatten()
%__MODULE__{symbol | dark: alignment_patterns}
end
defp draw_horizontal_patterns(gap, nrow, ncol) do
0
|> Stream.iterate(&(&1 + gap + 2))
|> Stream.take_while(&(&1 < nrow))
|> Stream.map(fn row ->
[
draw_dashed_horizontal_line(row, ncol),
draw_solid_horizontal_line(row + gap + 1, ncol)
]
end)
end
defp draw_vertical_patterns(gap, nrow, ncol) do
0
|> Stream.iterate(&(&1 + gap + 2))
|> Stream.take_while(&(&1 < ncol))
|> Stream.map(fn col ->
[
draw_solid_vertical_line(col, nrow),
draw_dashed_vertical_line(col + gap + 1, nrow)
]
end)
end
defp draw_solid_horizontal_line(row, ncol) do
for col <- 0..(ncol - 1), do: {row, col}
end
defp draw_dashed_horizontal_line(row, ncol) do
for col <- 0..(div(ncol, 2) - 1), do: {row, 2 * col}
end
defp draw_solid_vertical_line(col, nrow) do
for row <- 0..(nrow - 1), do: {row, col}
end
defp draw_dashed_vertical_line(col, nrow) do
for row <- 0..(div(nrow, 2) - 1), do: {2 * row + 1, col}
end
@doc """
"""
def draw_data(%__MODULE__{dark: dark, version: version} = symbol, bits) do
{mapping, remaining_area} = MappingMatrix.get_mapping_matrix(version)
data =
mapping
|> Stream.zip(bits)
|> Stream.filter(fn {_, bit} -> bit == 1 end)
|> Stream.map(&elem(&1, 0))
|> Stream.concat(remaining_area)
|> subdivide_into_data_regions(elem(@region_size, version))
|> Stream.map(fn {row, col} -> {row + 1, col + 1} end)
%__MODULE__{symbol | dark: Enum.concat(dark, data)}
end
defp subdivide_into_data_regions(points, {nrow, ncol}) do
Stream.map(points, fn {row, col} ->
{
div(row, nrow) * (nrow + 2) + rem(row, nrow),
div(col, ncol) * (ncol + 2) + rem(col, ncol)
}
end)
end
@doc """
"""
def draw_quiet_zone(%__MODULE__{nrow: nrow, ncol: ncol, dark: dark} = symbol, quiet_zone) do
translated =
Enum.map(dark, fn {row, col} ->
{row + quiet_zone, col + quiet_zone}
end)
nrow = nrow + 2 * quiet_zone
ncol = ncol + 2 * quiet_zone
%__MODULE__{symbol | nrow: nrow, ncol: ncol, dark: translated}
end
@doc """
"""
def export(%__MODULE__{nrow: nrow, ncol: ncol, version: version} = symbol) do
dark =
symbol.dark
|> Stream.map(&{&1, 1})
|> Enum.into(Map.new())
flatten =
for row <- 0..(nrow - 1),
col <- 0..(ncol - 1),
do: Map.get(dark, {row, col}, 0)
%{
version: version,
nrow: nrow,
ncol: ncol,
matrix: Enum.chunk_every(flatten, ncol)
}
end
end
|
lib/datamatrix/matrix.ex
| 0.775817
| 0.533884
|
matrix.ex
|
starcoder
|
defmodule Pngstruct do
@enforce_keys [:length, :width, :height, :chunks]
defstruct [:length, :width, :height, :bit_depth,
:color_type, :compression_method, :filter_method, :interlace_method,
:crc, :chunks]
end
defmodule ParseImg do
@moduledoc """
Helper module for image parsing functions
"""
@doc """
parse a png file into binary structure
see: http://www.zohaib.me/binary-pattern-matching-in-elixir/
"""
def extract_png(png_file_path) do
case File.read!(png_file_path) do
<<_png_header::size(64),
length::size(32), "IHDR",
width::size(32),
height::size(32),
bit_depth, color_type,
compression_method, filter_method,
interlace_method,
crc::size(32),
chunks::binary>> ->
%Pngstruct{
length: length,
width: width,
height: height,
bit_depth: bit_depth,
color_type: color_type,
compression_method: compression_method,
filter_method: filter_method,
interlace_method: interlace_method,
crc: crc,
chunks: [] ++ parse_chunks(chunks)
}
_ -> raise("Unexpected png structure")
end
end
defp parse_chunks(<<length::size(32), chunk_type::size(32),
chunk_data::binary-size(length), crc::size(32), chunks::binary>>) when is_bitstring(chunks) do
[%{length: length, chunk_type: chunk_type, chunk_data: chunk_data,
crc: crc} | parse_chunks(chunks)]
end
defp parse_chunks(<<>>), do: []
@doc """
create the structure for svg values
"""
def merge_code_pixels(idx, width, pixel_tuple, ratio \\ 0.6) do
%{
x: calc_x(idx, width) * ratio,
y: calc_y(idx, width),
fill: create_fill(pixel_tuple)
}
end
@doc """
takes the parsed code string
and maps it into the pixel values
"""
def map_pixels(code, pixels, ratio, width \\ 3150) do
pixels
|> List.flatten
|> Enum.zip(code)
# |> Flow.from_enumerable
# |> Flow.partition
# |> Stream.reduce(fn-> [1, []] end, fn({pixel, character}, [idx, acc]) ->
|> Enum.reduce([1, []], fn({pixel, character}, [idx, acc]) ->
%{fill: fill} = res = merge_code_pixels(idx, width, pixel, ratio)
case [idx, acc] do
[1, _acc] ->
[idx + 1, [{:text, res, character} | acc]]
[idx, [{:text, %{fill: ^fill} = element, text} | acc]] ->
[idx + 1, [{:text, element, text <> character} | acc]]
[idx, acc] ->
[idx + 1, [{:text, res, character} | acc]]
end
end)
|> Enum.to_list
|> Enum.fetch!(1)
end
@doc """
generates the pixel values as an svg file
"""
def generate_svg(pixel_matrix, width, height, ratio) do
XmlBuilder.element(:svg, %{
viewBox: "0 0 #{width*ratio} #{height}",
xmlns: "http://www.w3.org/2000/svg",
style: "font-family: 'Times New Roman'; font-size: 1; font-weight: 900;",
width: width,
height: height,
"xml:space": "preserve"
}, pixel_matrix)
|> XmlBuilder.generate
end
@doc """
converts each pixel integer value to a binary unsigned base encoded value
"""
defp binary_encode(px_elem) do
px_elem |> :binary.encode_unsigned |> Base.encode16
end
@doc """
joining binary encoded integers becomes a hex string
"""
defp to_hex({r, g, b}) do
"#" <> (Enum.map([r,g,b], &binary_encode/1) |> Enum.join(""))
end
@doc """
joining binary encoded integers becomes a hex string
"""
defp to_hex({r, g, b, a}) do
"#" <> (Enum.map([r,g,b,a], &binary_encode/1) |> Enum.join(""))
end
@doc """
wrapper to to_hex/1 function
"""
defp create_fill(pixel_tuple), do: to_hex(pixel_tuple)
defp calc_x(idx, width), do: rem(idx, width)
defp calc_y(idx, width), do: div(idx, width)
end
|
lib/helpers/parse_img.ex
| 0.736401
| 0.518668
|
parse_img.ex
|
starcoder
|
defmodule Xgit.Repository do
@moduledoc ~S"""
Represents a git repository.
Create a repository by calling the `start_link` function on one of the modules
that implements `Xgit.Repository.Storage`. The resulting PID can be used when
calling functions in this module and `Xgit.Repository.Plumbing`.
The functions implemented in this module correspond to the "porcelain" commands
implemented by command-line git.
(As of this writing, relatively few of the porcelain commands are implemented.)
"""
import Xgit.Util.ForceCoverage
alias Xgit.Object
alias Xgit.ObjectId
alias Xgit.PersonIdent
alias Xgit.Ref
alias Xgit.Repository.Storage
alias Xgit.Tag
@typedoc ~S"""
The process ID for an `Xgit.Repository` process.
This is the same process ID returned from the `start_link` function of any
module that implements `Xgit.Repository.Storage`.
"""
@type t :: pid
@doc ~S"""
Returns `true` if the argument is a PID representing a valid `Xgit.Repository` process.
"""
@spec valid?(repository :: term) :: boolean
defdelegate valid?(repository), to: Storage
## -- Tags --
@typedoc ~S"""
Reason codes that can be returned by `tag/4`.
"""
@type tag_reason :: Storage.put_ref_reason()
@doc ~S"""
Create a tag object.
Analogous to the _create_ form of [`git tag`](https://git-scm.com/docs/git-tag).
## Parameters
`repository` is the `Xgit.Repository.Storage` (PID) to search for the object.
`tag_name` (`String`) is the name to give to the new tag.
`object` (`Xgit.ObjectId`) is the object ID to be pointed to by this tag
(typically a `commit` object).
## Options
`annotated?`: (boolean) true to create an annotated tag (default: `false` unless `message` is specified)
`force?`: (boolean) true to replace an existing tag (default: `false`)
`message`: (`String` or bytelist) message to associate with the tag.
* Must be present and non-empty if `:annotated?` is `true`.
* Implies `annotated?: true`.
`tagger`: (`Xgit.PersonIdent`, required if annotated) tagger name, email, timestamp
## Return Value
`:ok` if created successfully.
`{:error, reason}` if unable. Reason codes may come from `Xgit.Repository.Storage.put_ref/3`.
TO DO: Support GPG signatures. https://github.com/elixir-git/xgit/issues/202
"""
@spec tag(repository :: t, tag_name :: String.t(), object :: ObjectId.t(),
annotated?: boolean,
force?: boolean,
message: [byte] | String.t(),
tagger: PersonIdent.t()
) :: :ok | {:error, reason :: tag_reason}
def tag(repository, tag_name, object, options \\ [])
when is_pid(repository) and is_binary(tag_name) and is_binary(object) and is_list(options) do
repository = Storage.assert_valid(repository)
unless Tag.valid_name?(String.to_charlist(tag_name)) do
raise ArgumentError,
~s(Xgit.Repository.tag/4: tag_name "#{tag_name}" is invalid)
end
unless ObjectId.valid?(object) do
raise ArgumentError,
"Xgit.Repository.tag/4: object #{inspect(object)} is invalid"
end
force? = force_from_tag_options(options)
message = message_from_tag_options(options)
annotated? = annotated_from_tag_options(options, message)
if annotated? do
create_annotated_tag(
repository,
tag_name,
object,
force?,
message,
tagger_from_tag_options(options)
)
else
create_lightweight_tag(repository, tag_name, object, force?)
end
end
defp force_from_tag_options(options) do
case Keyword.get(options, :force?, false) do
false ->
cover false
true ->
cover true
invalid ->
raise ArgumentError,
"Xgit.Repository.tag/4: force? #{inspect(invalid)} is invalid"
end
end
defp message_from_tag_options(options) do
case Keyword.get(options, :message) do
nil ->
cover nil
"" ->
raise ArgumentError,
"Xgit.Repository.tag/4: message must be non-empty if present"
message when is_binary(message) ->
String.to_charlist(message)
[_ | _] = message ->
cover message
[] ->
raise ArgumentError,
"Xgit.Repository.tag/4: message must be non-empty if present"
invalid ->
raise ArgumentError,
"Xgit.Repository.tag/4: message #{inspect(invalid)} is invalid"
end
end
defp annotated_from_tag_options(options, message) do
case Keyword.get(options, :annotated?, message != nil) do
false ->
if message == nil do
cover false
else
raise ArgumentError,
"Xgit.Repository.tag/4: annotated?: false can not be specified when message is present"
end
true ->
if message == nil do
raise ArgumentError,
"Xgit.Repository.tag/4: annotated?: true can not be specified without message"
else
cover true
end
invalid ->
raise ArgumentError,
"Xgit.Repository.tag/4: annotated? #{inspect(invalid)} is invalid"
end
end
defp tagger_from_tag_options(options) do
tagger = Keyword.get(options, :tagger)
cond do
tagger == nil ->
raise ArgumentError,
"Xgit.Repository.tag/4: tagger must be specified for an annotated tag"
PersonIdent.valid?(tagger) ->
cover tagger
true ->
raise ArgumentError,
"Xgit.Repository.tag/4: tagger #{inspect(tagger)} is invalid"
end
end
defp create_annotated_tag(repository, tag_name, object, force?, message, tagger) do
with :ok <- check_existing_ref(repository, tag_name, force?),
{:ok, %Object{type: target_type}} <- Storage.get_object(repository, object),
tag <- %Tag{
object: object,
type: target_type,
name: String.to_charlist(tag_name),
tagger: tagger,
message: ensure_trailing_newline(message)
},
%Object{id: tag_id} = tag_object <- Tag.to_object(tag),
:ok <- Storage.put_loose_object(repository, tag_object) do
ref = %Ref{name: "refs/tags/#{tag_name}", target: tag_id}
Storage.put_ref(repository, ref, opts_for_force(force?))
else
{:error, reason} -> cover {:error, reason}
end
end
defp check_existing_ref(_repository, _tag_name, true), do: cover(:ok)
defp check_existing_ref(repository, tag_name, false) do
case Storage.get_ref(repository, "refs/tags/#{tag_name}") do
{:ok, %Ref{}} -> cover {:error, :old_target_not_matched}
{:error, :not_found} -> cover :ok
end
end
defp ensure_trailing_newline(message) do
if List.last(message) == 10 do
cover(message)
else
cover(message ++ '\n')
end
end
defp create_lightweight_tag(repository, tag_name, object, force?) do
ref = %Ref{name: "refs/tags/#{tag_name}", target: object}
Storage.put_ref(repository, ref, opts_for_force(force?))
end
defp opts_for_force(true), do: cover(follow_link?: false)
defp opts_for_force(false), do: cover(follow_link?: false, old_target: :new)
end
|
lib/xgit/repository.ex
| 0.901608
| 0.554591
|
repository.ex
|
starcoder
|
defmodule Convertat do
@moduledoc """
Provides functions for converting **from** and **to** arbitrary bases.
"""
@type integer_base :: 2..36
@type list_base :: []
@doc """
Converts any string of digits or list of digits (where each digit is a string)
to a value in decimal base (base 10), given a starting base.
The starting base can be an integer in the `2..36` range (in which case the
native `String.to_integer/2` function is used) or a list with at least two
elements (digits).
## Examples
iex> "101" |> Convertat.from_base(2)
5
iex> "fe" |> Convertat.from_base(16)
254
iex> "foo" |> Convertat.from_base(["f", "o"])
3
iex> "↑" |> Convertat.from_base(["↓", "↑"])
1
iex> ["foo", "bar"] |> Convertat.from_base(["bar", "foo"])
2
iex> "test" |> Convertat.from_base(["onedigit"])
** (ArgumentError) list bases must have at least two digits
"""
@spec from_base(String.t, integer_base) :: integer
@spec from_base([] | String.t, list_base) :: integer
def from_base(digits, base)
def from_base("", _), do: 0
def from_base([], _), do: 0
def from_base(digits, base) when is_binary(digits) and is_integer(base),
do: String.to_integer(digits, base)
def from_base(_digits, base) when is_list(base) and length(base) < 2,
do: raise(ArgumentError, "list bases must have at least two digits")
def from_base(digits, base) when is_binary(digits) and is_list(base),
do: digits |> String.codepoints |> from_base(base)
def from_base(digits, base) when is_list(digits) and is_list(base) do
numeric_base = Enum.count(base)
digits_map = base
|> Enum.map(&to_string/1)
|> Enum.with_index
|> Enum.into(%{})
Enum.reduce digits, 0, fn(digit, acc) ->
Dict.get(digits_map, digit) + numeric_base * acc
end
end
@doc """
Converts a value in decimal base (`val`, which has to be an integer) to an
arbitrary base. If the `:as_list` option is true, the resulting value in base
`base` will be returned as a list of digits instead of a string of digits.
## Examples
iex> 35 |> Convertat.to_base(36)
"z"
iex> 11 |> Convertat.to_base(["a", "b"])
"babb"
iex> 6 |> Convertat.to_base(["foo", "bar"], as_list: true)
["bar", "bar", "foo"]
iex> 10 |> Convertat.to_base(["↓", "↑"])
"↑↓↑↓"
iex> 42 |> Convertat.to_base(["onedigitbase"])
** (ArgumentError) list bases must have at least two digits
"""
@spec to_base(integer, integer_base | list_base, [as_list: true]) :: [String.t]
@spec to_base(integer, integer_base | list_base, [as_list: false]) :: [String.t]
def to_base(val, base, opts \\ [as_list: false])
def to_base(_val, base, _opts) when is_list(base) and length(base) < 2,
do: raise(ArgumentError, "list bases must have at least two digits")
def to_base(val, base, as_list: as_list?) when is_integer(base) do
result = val |> Integer.to_string(base) |> String.downcase
if as_list?, do: String.codepoints(result), else: result
end
def to_base(0, base, as_list: as_list?) do
if as_list?, do: [zero_digit(base)], else: zero_digit(base)
end
def to_base(val, base, opts) do
result = do_to_base(val, base, Enum.count(base)) |> Enum.reverse
if opts[:as_list], do: result, else: Enum.join(result)
end
@spec do_to_base(integer, list_base, non_neg_integer) :: [String.t]
defp do_to_base(val, _base, _numeric_base) when val == 0, do: []
defp do_to_base(val, base, numeric_base) do
digit = Enum.at(base, rem(val, numeric_base))
[digit|do_to_base(div(val, numeric_base), base, numeric_base)]
end
@compile {:inline, zero_digit: 1}
@spec zero_digit(list_base) :: String.t
defp zero_digit(base), do: base |> Enum.at(0) |> to_string
end
|
lib/convertat.ex
| 0.897657
| 0.524577
|
convertat.ex
|
starcoder
|
defmodule ScrollHat.Font.Medium do
@moduledoc """
Medium 5x5 Font
Transposed from https://github.com/pimoroni/scroll-phat-hd/blob/master/library/scrollphathd/fonts/font5x5.py
"""
@behaviour ScrollHat.Font
def char_matrix(char, brightness \\ 0xFF)
def char_matrix(0x20, _b) do
[
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0]
]
end
def char_matrix(0x21, b) do
[[0x0], [0x0], [b], [b], [b], [0x0], [b]]
end
def char_matrix(0x22, b) do
[
[0x0, 0x0, 0x0],
[0x0, 0x0, 0x0],
[0x0, 0x0, 0x0],
[0x0, 0x0, 0x0],
[0x0, 0x0, 0x0],
[b, 0x0, b],
[b, 0x0, b]
]
end
def char_matrix(0x23, b) do
[
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, b, 0x0, b, 0x0],
[b, b, b, b, b],
[0x0, b, 0x0, b, 0x0],
[b, b, b, b, b],
[0x0, b, 0x0, b, 0x0]
]
end
def char_matrix(0x24, b) do
[
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, b, b, b, 0x0],
[b, 0x0, b, 0x0, 0x0],
[0x0, b, b, b, 0x0],
[0x0, 0x0, b, 0x0, b],
[0x0, b, b, b, 0x0]
]
end
def char_matrix(0x25, b) do
[
[0x0, 0x0, 0x0],
[0x0, 0x0, 0x0],
[b, 0x0, 0x0],
[0x0, 0x0, b],
[0x0, b, 0x0],
[b, 0x0, 0x0],
[0x0, 0x0, b]
]
end
def char_matrix(0x26, b) do
[
[0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0],
[0x0, b, 0x0, 0x0],
[b, 0x0, b, 0x0],
[0x0, b, 0x0, 0x0],
[b, 0x0, b, 0x0],
[0x0, b, 0x0, b]
]
end
def char_matrix(0x27, b) do
[[0x0], [0x0], [0x0], [0x0], [0x0], [b], [b]]
end
def char_matrix(0x28, b) do
[[0x0, 0x0], [0x0, 0x0], [0x0, b], [b, 0x0], [b, 0x0], [b, 0x0], [0x0, b]]
end
def char_matrix(0x29, b) do
[[0x0, 0x0], [0x0, 0x0], [b, 0x0], [0x0, b], [0x0, b], [0x0, b], [b, 0x0]]
end
def char_matrix(0x2A, b) do
[
[0x0, 0x0, 0x0],
[0x0, 0x0, 0x0],
[0x0, 0x0, 0x0],
[0x0, b, 0x0],
[b, b, b],
[0x0, b, 0x0],
[b, 0x0, b]
]
end
def char_matrix(0x2B, b) do
[
[0x0, 0x0, 0x0],
[0x0, 0x0, 0x0],
[0x0, 0x0, 0x0],
[0x0, 0x0, 0x0],
[0x0, b, 0x0],
[b, b, b],
[0x0, b, 0x0]
]
end
def char_matrix(0x2C, b) do
[[0x0, 0x0], [0x0, 0x0], [0x0, 0x0], [0x0, 0x0], [0x0, 0x0], [0x0, b], [b, 0x0]]
end
def char_matrix(0x2D, b) do
[[0x0, 0x0], [0x0, 0x0], [0x0, 0x0], [0x0, 0x0], [0x0, 0x0], [0x0, 0x0], [b, b]]
end
def char_matrix(0x2E, b) do
[[0x0], [0x0], [0x0], [0x0], [0x0], [0x0], [b]]
end
def char_matrix(0x2F, b) do
[[0x0, 0x0], [0x0, 0x0], [0x0, 0x0], [0x0, b], [0x0, b], [b, 0x0], [b, 0x0]]
end
def char_matrix(0x30, b) do
[
[0x0, 0x0, 0x0],
[0x0, 0x0, 0x0],
[0x0, b, 0x0],
[b, 0x0, b],
[b, b, b],
[b, 0x0, b],
[0x0, b, 0x0]
]
end
def char_matrix(0x31, b) do
[
[0x0, 0x0, 0x0],
[0x0, 0x0, 0x0],
[0x0, b, 0x0],
[b, b, 0x0],
[0x0, b, 0x0],
[0x0, b, 0x0],
[b, b, b]
]
end
def char_matrix(0x32, b) do
[
[0x0, 0x0, 0x0],
[0x0, 0x0, 0x0],
[b, b, 0x0],
[0x0, 0x0, b],
[0x0, b, 0x0],
[b, 0x0, 0x0],
[b, b, b]
]
end
def char_matrix(0x33, b) do
[
[0x0, 0x0, 0x0],
[0x0, 0x0, 0x0],
[b, b, 0x0],
[0x0, 0x0, b],
[0x0, b, 0x0],
[0x0, 0x0, b],
[b, b, 0x0]
]
end
def char_matrix(0x34, b) do
[
[0x0, 0x0, 0x0],
[0x0, 0x0, 0x0],
[0x0, b, 0x0],
[b, 0x0, 0x0],
[b, 0x0, b],
[b, b, b],
[0x0, 0x0, b]
]
end
def char_matrix(0x35, b) do
[
[0x0, 0x0, 0x0],
[0x0, 0x0, 0x0],
[b, b, b],
[b, 0x0, 0x0],
[b, b, 0x0],
[0x0, 0x0, b],
[b, b, 0x0]
]
end
def char_matrix(0x36, b) do
[
[0x0, 0x0, 0x0],
[0x0, 0x0, 0x0],
[0x0, b, 0x0],
[b, 0x0, 0x0],
[b, b, 0x0],
[b, 0x0, b],
[0x0, b, 0x0]
]
end
def char_matrix(0x37, b) do
[
[0x0, 0x0, 0x0],
[0x0, 0x0, 0x0],
[b, b, b],
[0x0, 0x0, b],
[0x0, b, 0x0],
[b, 0x0, 0x0],
[b, 0x0, 0x0]
]
end
def char_matrix(0x38, b) do
[
[0x0, 0x0, 0x0],
[0x0, 0x0, 0x0],
[0x0, b, 0x0],
[b, 0x0, b],
[0x0, b, 0x0],
[b, 0x0, b],
[0x0, b, 0x0]
]
end
def char_matrix(0x39, b) do
[
[0x0, 0x0, 0x0],
[0x0, 0x0, 0x0],
[0x0, b, 0x0],
[b, 0x0, b],
[0x0, b, b],
[0x0, 0x0, b],
[0x0, b, 0x0]
]
end
def char_matrix(0x3A, b) do
[[0x0], [0x0], [0x0], [0x0], [b], [0x0], [b]]
end
def char_matrix(0x3B, b) do
[[0x0, 0x0], [0x0, 0x0], [0x0, 0x0], [0x0, b], [0x0, 0x0], [0x0, b], [b, 0x0]]
end
def char_matrix(0x3C, b) do
[[0x0, 0x0], [0x0, 0x0], [0x0, 0x0], [0x0, 0x0], [0x0, b], [b, 0x0], [0x0, b]]
end
def char_matrix(0x3D, b) do
[
[0x0, 0x0, 0x0],
[0x0, 0x0, 0x0],
[0x0, 0x0, 0x0],
[0x0, 0x0, 0x0],
[b, b, b],
[0x0, 0x0, 0x0],
[b, b, b]
]
end
def char_matrix(0x3E, b) do
[[0x0, 0x0], [0x0, 0x0], [0x0, 0x0], [0x0, 0x0], [b, 0x0], [0x0, b], [b, 0x0]]
end
def char_matrix(0x3F, b) do
[
[0x0, 0x0, 0x0],
[0x0, 0x0, 0x0],
[b, b, 0x0],
[0x0, 0x0, b],
[0x0, b, 0x0],
[0x0, 0x0, 0x0],
[0x0, b, 0x0]
]
end
def char_matrix(0x40, b) do
[
[0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0],
[0x0, b, b, 0x0],
[b, 0x0, 0x0, b],
[b, b, b, b],
[b, b, 0x0, b],
[0x0, b, b, 0x0]
]
end
def char_matrix(0x41, b) do
[
[0x0, 0x0, 0x0],
[0x0, 0x0, 0x0],
[0x0, b, 0x0],
[b, 0x0, b],
[b, b, b],
[b, 0x0, b],
[b, 0x0, b]
]
end
def char_matrix(0x42, b) do
[
[0x0, 0x0, 0x0],
[0x0, 0x0, 0x0],
[b, b, 0x0],
[b, 0x0, b],
[b, b, 0x0],
[b, 0x0, b],
[b, b, 0x0]
]
end
def char_matrix(0x43, b) do
[
[0x0, 0x0, 0x0],
[0x0, 0x0, 0x0],
[0x0, b, b],
[b, 0x0, 0x0],
[b, 0x0, 0x0],
[b, 0x0, 0x0],
[0x0, b, b]
]
end
def char_matrix(0x44, b) do
[
[0x0, 0x0, 0x0],
[0x0, 0x0, 0x0],
[b, b, 0x0],
[b, 0x0, b],
[b, 0x0, b],
[b, 0x0, b],
[b, b, 0x0]
]
end
def char_matrix(0x45, b) do
[
[0x0, 0x0, 0x0],
[0x0, 0x0, 0x0],
[b, b, b],
[b, 0x0, 0x0],
[b, b, 0x0],
[b, 0x0, 0x0],
[b, b, b]
]
end
def char_matrix(0x46, b) do
[
[0x0, 0x0, 0x0],
[0x0, 0x0, 0x0],
[b, b, b],
[b, 0x0, 0x0],
[b, b, 0x0],
[b, 0x0, 0x0],
[b, 0x0, 0x0]
]
end
def char_matrix(0x47, b) do
[
[0x0, 0x0, 0x0],
[0x0, 0x0, 0x0],
[0x0, b, b],
[b, 0x0, 0x0],
[b, 0x0, b],
[b, 0x0, b],
[0x0, b, b]
]
end
def char_matrix(0x48, b) do
[
[0x0, 0x0, 0x0],
[0x0, 0x0, 0x0],
[b, 0x0, b],
[b, 0x0, b],
[b, b, b],
[b, 0x0, b],
[b, 0x0, b]
]
end
def char_matrix(0x49, b) do
[
[0x0, 0x0, 0x0],
[0x0, 0x0, 0x0],
[b, b, b],
[0x0, b, 0x0],
[0x0, b, 0x0],
[0x0, b, 0x0],
[b, b, b]
]
end
def char_matrix(0x4A, b) do
[
[0x0, 0x0, 0x0],
[0x0, 0x0, 0x0],
[b, b, b],
[0x0, 0x0, b],
[0x0, 0x0, b],
[b, 0x0, b],
[0x0, b, 0x0]
]
end
def char_matrix(0x4B, b) do
[
[0x0, 0x0, 0x0],
[0x0, 0x0, 0x0],
[b, 0x0, b],
[b, 0x0, b],
[b, b, 0x0],
[b, 0x0, b],
[b, 0x0, b]
]
end
def char_matrix(0x4C, b) do
[
[0x0, 0x0, 0x0],
[0x0, 0x0, 0x0],
[b, 0x0, 0x0],
[b, 0x0, 0x0],
[b, 0x0, 0x0],
[b, 0x0, 0x0],
[b, b, b]
]
end
def char_matrix(0x4D, b) do
[
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[b, 0x0, 0x0, 0x0, b],
[b, b, 0x0, b, b],
[b, 0x0, b, 0x0, b],
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, 0x0, 0x0, b]
]
end
def char_matrix(0x4E, b) do
[
[0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0],
[b, 0x0, 0x0, b],
[b, b, 0x0, b],
[b, 0x0, b, b],
[b, 0x0, b, b],
[b, 0x0, 0x0, b]
]
end
def char_matrix(0x4F, b) do
[
[0x0, 0x0, 0x0],
[0x0, 0x0, 0x0],
[0x0, b, 0x0],
[b, 0x0, b],
[b, 0x0, b],
[b, 0x0, b],
[0x0, b, 0x0]
]
end
def char_matrix(0x50, b) do
[
[0x0, 0x0, 0x0],
[0x0, 0x0, 0x0],
[b, b, 0x0],
[b, 0x0, b],
[b, 0x0, b],
[b, b, 0x0],
[b, 0x0, 0x0]
]
end
def char_matrix(0x51, b) do
[
[0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0],
[0x0, b, b, 0x0],
[b, 0x0, 0x0, b],
[b, 0x0, 0x0, b],
[b, 0x0, b, 0x0],
[0x0, b, 0x0, b]
]
end
def char_matrix(0x52, b) do
[
[0x0, 0x0, 0x0],
[0x0, 0x0, 0x0],
[b, b, 0x0],
[b, 0x0, b],
[b, 0x0, b],
[b, b, 0x0],
[b, 0x0, b]
]
end
def char_matrix(0x53, b) do
[
[0x0, 0x0, 0x0],
[0x0, 0x0, 0x0],
[0x0, b, b],
[b, 0x0, 0x0],
[0x0, b, 0x0],
[0x0, 0x0, b],
[b, b, 0x0]
]
end
def char_matrix(0x54, b) do
[
[0x0, 0x0, 0x0],
[0x0, 0x0, 0x0],
[b, b, b],
[0x0, b, 0x0],
[0x0, b, 0x0],
[0x0, b, 0x0],
[0x0, b, 0x0]
]
end
def char_matrix(0x55, b) do
[
[0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0],
[b, 0x0, 0x0, b],
[b, 0x0, 0x0, b],
[b, 0x0, 0x0, b],
[b, 0x0, 0x0, b],
[0x0, b, b, 0x0]
]
end
def char_matrix(0x56, b) do
[
[0x0, 0x0, 0x0],
[0x0, 0x0, 0x0],
[b, 0x0, b],
[b, 0x0, b],
[b, 0x0, b],
[b, 0x0, b],
[0x0, b, 0x0]
]
end
def char_matrix(0x57, b) do
[
[0x0, 0x0, 0x0, 0x0, 0x0],
[0x0, 0x0, 0x0, 0x0, 0x0],
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, 0x0, 0x0, b],
[b, 0x0, b, 0x0, b],
[0x0, b, 0x0, b, 0x0]
]
end
def char_matrix(0x58, b) do
[
[0x0, 0x0, 0x0],
[0x0, 0x0, 0x0],
[b, 0x0, b],
[b, 0x0, b],
[0x0, b, 0x0],
[b, 0x0, b],
[b, 0x0, b]
]
end
def char_matrix(0x59, b) do
[
[0x0, 0x0, 0x0],
[0x0, 0x0, 0x0],
[b, 0x0, b],
[b, 0x0, b],
[0x0, b, 0x0],
[0x0, b, 0x0],
[0x0, b, 0x0]
]
end
def char_matrix(0x5A, b) do
[
[0x0, 0x0, 0x0],
[0x0, 0x0, 0x0],
[b, b, b],
[0x0, 0x0, b],
[0x0, b, 0x0],
[b, 0x0, 0x0],
[b, b, b]
]
end
def char_matrix(0x5B, b) do
[[0x0, 0x0], [0x0, 0x0], [b, b], [b, 0x0], [b, 0x0], [b, 0x0], [b, b]]
end
def char_matrix(0x5C, b) do
[[0x0, 0x0], [0x0, 0x0], [0x0, 0x0], [b, 0x0], [b, 0x0], [0x0, b], [0x0, b]]
end
def char_matrix(0x5D, b) do
[[0x0, 0x0], [0x0, 0x0], [b, b], [0x0, b], [0x0, b], [0x0, b], [b, b]]
end
def char_matrix(0x5E, b) do
[
[0x0, 0x0, 0x0],
[0x0, 0x0, 0x0],
[0x0, 0x0, 0x0],
[0x0, 0x0, 0x0],
[0x0, 0x0, 0x0],
[0x0, b, 0x0],
[b, 0x0, b]
]
end
def char_matrix(0x5F, b) do
[
[0x0, 0x0, 0x0],
[0x0, 0x0, 0x0],
[0x0, 0x0, 0x0],
[0x0, 0x0, 0x0],
[0x0, 0x0, 0x0],
[0x0, 0x0, 0x0],
[b, b, b]
]
end
end
|
lib/scroll_hat/fonts/medium.ex
| 0.664867
| 0.604632
|
medium.ex
|
starcoder
|
defmodule Solana.SystemProgram.Nonce do
@moduledoc """
Functions for interacting with the [System
Program](https://docs.solana.com/developing/runtime-facilities/programs#system-program)'s
nonce accounts, required for [durable transaction
nonces](https://docs.solana.com/offline-signing/durable-nonce).
These accounts can be useful for offline transactions, as well as transactions
that require more time to generate a transaction signature than the normal
`recent_blockhash` transaction mechanism gives them (~2 minutes).
"""
alias Solana.{Instruction, Account, SystemProgram}
import Solana.Helpers
@doc """
The size of a serialized nonce account.
"""
def byte_size(), do: 80
@doc """
Translates the result of a `Solana.RPC.Request.get_account_info/2` into a
nonce account's information.
"""
@spec from_account_info(info :: map) :: map | :error
def from_account_info(info)
def from_account_info(%{"data" => %{"parsed" => %{"info" => info}}}) do
from_nonce_account_info(info)
end
def from_account_info(_), do: :error
defp from_nonce_account_info(%{
"authority" => authority,
"blockhash" => blockhash,
"feeCalculator" => calculator
}) do
%{
authority: Solana.pubkey!(authority),
blockhash: B58.decode58!(blockhash),
calculator: calculator
}
end
defp from_nonce_account_info(_), do: :error
@init_schema [
nonce: [
type: {:custom, Solana.Key, :check, []},
required: true,
doc: "Public key of the nonce account"
],
authority: [
type: {:custom, Solana.Key, :check, []},
required: true,
doc: "Public key of the nonce authority"
]
]
@doc """
Generates the instructions for initializing a nonce account.
## Options
#{NimbleOptions.docs(@init_schema)}
"""
def init(opts) do
case validate(opts, @init_schema) do
{:ok, params} ->
%Instruction{
program: SystemProgram.id(),
accounts: [
%Account{key: params.nonce, writable?: true},
%Account{key: Solana.recent_blockhashes()},
%Account{key: Solana.rent()}
],
data: Instruction.encode_data([{6, 32}, params.authority])
}
error ->
error
end
end
@authorize_schema [
nonce: [
type: {:custom, Solana.Key, :check, []},
required: true,
doc: "Public key of the nonce account"
],
authority: [
type: {:custom, Solana.Key, :check, []},
required: true,
doc: "Public key of the nonce authority"
],
new_authority: [
type: {:custom, Solana.Key, :check, []},
required: true,
doc: "Public key to set as the new nonce authority"
]
]
@doc """
Generates the instructions for re-assigning the authority of a nonce account.
## Options
#{NimbleOptions.docs(@authorize_schema)}
"""
def authorize(opts) do
case validate(opts, @authorize_schema) do
{:ok, params} ->
%Instruction{
program: SystemProgram.id(),
accounts: [
%Account{key: params.nonce, writable?: true},
%Account{key: params.authority, signer?: true}
],
data: Instruction.encode_data([{7, 32}, params.new_authority])
}
error ->
error
end
end
@advance_schema [
nonce: [
type: {:custom, Solana.Key, :check, []},
required: true,
doc: "Public key of the nonce account"
],
authority: [
type: {:custom, Solana.Key, :check, []},
required: true,
doc: "Public key of the nonce authority"
]
]
@doc """
Generates the instructions for advancing a nonce account's stored nonce value.
## Options
#{NimbleOptions.docs(@advance_schema)}
"""
def advance(opts) do
case validate(opts, @advance_schema) do
{:ok, params} ->
%Instruction{
program: SystemProgram.id(),
accounts: [
%Account{key: params.nonce, writable?: true},
%Account{key: Solana.recent_blockhashes()},
%Account{key: params.authority, signer?: true}
],
data: Instruction.encode_data([{4, 32}])
}
error ->
error
end
end
@withdraw_schema [
nonce: [
type: {:custom, Solana.Key, :check, []},
required: true,
doc: "Public key of the nonce account"
],
authority: [
type: {:custom, Solana.Key, :check, []},
required: true,
doc: "Public key of the nonce authority"
],
to: [
type: {:custom, Solana.Key, :check, []},
required: true,
doc: "Public key of the account which will get the withdrawn lamports"
],
lamports: [
type: :pos_integer,
required: true,
doc: "Amount of lamports to transfer to the created account"
]
]
@doc """
Generates the instructions for withdrawing funds form a nonce account.
## Options
#{NimbleOptions.docs(@withdraw_schema)}
"""
def withdraw(opts) do
case validate(opts, @withdraw_schema) do
{:ok, params} ->
%Instruction{
program: SystemProgram.id(),
accounts: [
%Account{key: params.nonce, writable?: true},
%Account{key: params.to, writable?: true},
%Account{key: Solana.recent_blockhashes()},
%Account{key: Solana.rent()},
%Account{key: params.authority, signer?: true}
],
data: Instruction.encode_data([{5, 32}, {params.lamports, 64}])
}
error ->
error
end
end
end
|
lib/solana/system_program/nonce.ex
| 0.814496
| 0.575469
|
nonce.ex
|
starcoder
|
defmodule Harald.HCI.ACLData do
@moduledoc """
Reference: version 5.2, vol 4, part E, 5.4.2.
"""
alias Harald.Host.L2CAP
@enforce_keys [
:handle,
:packet_boundary_flag,
:broadcast_flag,
:data_total_length,
:data
]
defstruct [
:handle,
:packet_boundary_flag,
:broadcast_flag,
:data_total_length,
:data
]
def decode(
<<
handle::bits-size(12),
pb_flag::size(2),
bc_flag::size(2),
data_total_length::little-size(16),
data::binary-size(data_total_length)
>> = encoded_bin
) do
with {:ok, decoded_data} <- L2CAP.decode(data) do
decoded = %__MODULE__{
handle: handle,
packet_boundary_flag: decode_pb_flag!(pb_flag),
broadcast_flag: decode_bc_flag!(bc_flag),
data_total_length: data_total_length,
data: decoded_data
}
{:ok, decoded}
else
{:error, {:not_implemented, error, _bin}} ->
{:error, {:not_implemented, error, encoded_bin}}
end
end
def encode(%__MODULE__{
handle: handle,
packet_boundary_flag: pb_flag,
broadcast_flag: bc_flag,
data_total_length: data_total_length,
data: data
}) do
encoded_pb_flag = encode_pb_flag!(pb_flag)
encoded_bc_flag = encode_bc_flag!(bc_flag)
{:ok, encoded_data} = L2CAP.encode(data)
encoded = <<
handle::bits-size(12),
encoded_pb_flag::size(2),
encoded_bc_flag::size(2),
data_total_length::little-size(16),
encoded_data::binary
>>
{:ok, encoded}
end
def new(handle, packet_boundary_flag, broadcast_flag, data) do
acl_data = %__MODULE__{
handle: handle,
packet_boundary_flag: packet_boundary_flag,
broadcast_flag: broadcast_flag,
data_total_length: byte_size(data),
data: data
}
{:ok, acl_data}
end
defp decode_bc_flag!(0b00 = bc_flag) do
%{description: "Point-to-point (ACL-U, AMP-U, or LE-U)", value: bc_flag}
end
defp decode_bc_flag!(0b01 = bc_flag) do
%{description: "BR/EDR broadcast (ASB-U)", value: bc_flag}
end
defp decode_bc_flag!(0b10 = bc_flag) do
%{description: "Reserved for future use.", value: bc_flag}
end
defp decode_bc_flag!(0b11 = bc_flag) do
%{description: "Reserved for future use.", value: bc_flag}
end
defp decode_pb_flag!(0b00 = pb_flag) do
%{
description:
"First non-automatically-flushable packet of a higher layer message (start of a non-automatically-flushable L2CAP PDU) from Host to Controller.",
value: pb_flag
}
end
defp decode_pb_flag!(0b01 = pb_flag) do
%{
description: "Continuing fragment of a higher layer message",
value: pb_flag
}
end
defp decode_pb_flag!(0b10 = pb_flag) do
%{
description:
"First automatically flushable packet of a higher layer message (start of an automatically-flushable L2CAP PDU).",
value: pb_flag
}
end
defp decode_pb_flag!(0b11 = pb_flag) do
%{
description: "A complete L2CAP PDU. Automatically flushable.",
value: pb_flag
}
end
defp encode_bc_flag!(%{value: encoded_bc_flag})
when encoded_bc_flag in [0b00, 0b01, 0b10, 0b11] do
encoded_bc_flag
end
defp encode_pb_flag!(%{value: encoded_pb_flag})
when encoded_pb_flag in [0b00, 0b01, 0b10, 0b11] do
encoded_pb_flag
end
end
|
lib/harald/hci/acl_data.ex
| 0.668988
| 0.478102
|
acl_data.ex
|
starcoder
|
defmodule Class do
@moduledoc """
The `Class` module defines a few macros that provide object-oriented features, such as inheritance and polymorphism, on top of Elixir's structs.
Additional documentation is available on the [Classy structs Github page](https://github.com/timmolderez/classy-structs#usage).
@author <NAME>
"""
defmacro __using__(_opts) do
quote do
import Class
end
end
@doc """
Defines a new immutable class
The defclass macro is similar in use to defmodule, except that you can also
use `var` to define fields, and `extends` to specify superclasses.
Additional documentation is available on the [Classy structs Github page](https://github.com/timmolderez/classy-structs#usage).
## Examples
```Elixir
defclass Animal do
var weight: 0
@abstract sound(Animal) :: String.t
end
defclass Dog do
extends Animal
var species: ""
def new(species), do
end
```
"""
defmacro defclass(name, do: body) do
# Retrieve the list of all class members
block = if (macro_name(body) == :__block__) do
elem(body, 2)
else
[body]
end
### Class attributes
attributes = Enum.filter(block,
fn(member) -> is_tuple(member) and macro_name(member) == :@ end)
# Rename any @abstract attributes to @callback
attributes = Enum.map(attributes,
fn(attribute) ->
if (attribute_name(attribute) == :abstract) do
rename_attribute(attribute, :callback)
else
attribute
end
end)
### Super classes
extends = Enum.find(block,
fn(member) -> is_tuple(member) and elem(member, 0) == :extends end)
super_classes = if (extends != nil) do elem(extends, 2) else [] end
### Class fields
fields = Enum.filter(block,
fn(member) -> is_tuple(member) and macro_name(member) == :var end)
# Expand the "var" macros so we get a Keyword list
fields = Enum.map(fields,
fn(field) -> Macro.expand(field,__CALLER__) end)
# Include all inherited fields
all_fields = Enum.reduce(super_classes, fields,
fn(super_class,fields) ->
super_instance = instantiate_class(super_class, __CALLER__)
Enum.reduce(Map.from_struct(super_instance), fields,
fn({super_field_key, super_field_value}, fields) ->
# Check if this field was already overridden
if (Enum.find(fields, fn({field_key, _}) -> field_key == super_field_key end)) do
fields
else
fields ++ [{super_field_key, Macro.escape(super_field_value)}]
end
end)
end)
### Class methods
methods = Enum.filter(block,
fn(member) ->
is_tuple(member)
and (not Enum.member? [:@, :var, :extends, :__aliases__], macro_name(member))
end)
# Generate a default constructor (if needed)
methods = if (search_methods(methods, fn(name,arity) -> name == :new and arity == 0 end)) do
methods
else
methods ++ [quote do
def new() do
unquote(name).__struct__
end
end]
end
# Include all inherited methods
all_methods = Enum.reduce(super_classes, methods,
fn(super_class, methods) ->
module = Macro.expand(super_class,__CALLER__)
# Find out which functions we need to inherit
functions = module.__info__(:functions)
inherited_functions = Enum.filter(functions, fn{ name, arity } ->
if (name == :new or name == :__struct__) do
false
else
overriding_method = search_methods(methods,
fn(m_name, m_arity) ->
name == m_name and arity == m_arity
end)
if (overriding_method != nil) do
false
else
true
end
end
end)
# Construct defdelegate statements
# Based on https://gist.github.com/orenbenkiki/5174435
signatures = Enum.map(inherited_functions,
fn { name, arity } ->
args = if arity == 0 do
[]
else
Enum.map 1 .. arity, fn(i) -> { gen_param_name(i), [], nil } end
end
{ name, [], args }
end)
delegates = Enum.map(signatures,
fn(signature) ->
quote do
defdelegate unquote(signature), to: unquote(module)
end
end)
methods ++ delegates
end)
behaviours = Enum.reduce(super_classes, [], fn(super_class, behaviours) ->
module = Macro.expand(super_class,__CALLER__)
# Only add a @behaviour for super classes with @callbacks
if (function_exported?(module, :behaviour_info, 1)) do
behaviour = quote do
@behaviour unquote(super_class)
end
behaviours ++ [behaviour]
else
behaviours
end
end)
quote do
defmodule unquote(name) do
defstruct(
unquote(all_fields)
)
unquote(attributes)
unquote(behaviours)
unquote(all_methods)
end
end
end
@doc """
Defines a new field in a class, with its default value
(The default value cannot be an anonymous function.)
## Examples
```Elixir
var species: "Mammal"
var dimensions: [20, 40]
```
"""
defmacro var([keyword]) do
keyword
end
@doc """
Call a function using dynamic dispatch
The function is dispatched based on the type of the first argument.
(To use static dispatch, use the `.` operator instead of `~>`.)
## Examples
```Elixir
use Class
defclass Animal do
def sound(this), do: "..."
end
defclass Cat do
extends Animal
def sound(this), do: "Meow!"
end
c = Cat.new()
Animal.sound(c) # "..."
Animal~>sound(c) # "Meow!"
```
"""
defmacro _module ~> expr do
receiver = List.first(elem(expr, 2))
quote do
module = unquote(receiver).__struct__
module.unquote(expr)
end
end
# Create an instance of the given class
defp instantiate_class(class, env) do
module = Macro.expand(class, env)
has_default_constructor = :erlang.function_exported(module, :new, 0)
if (has_default_constructor) do
module.new()
else
module.__struct__
end
end
# Search for a specific method based on the given condition
defp search_methods(methods, condition_fn) do
Enum.find(methods,
fn(method) ->
api = List.first(elem(method, 2))
name = elem(api, 0)
arity = if (elem(api, 2) == nil) do
0
else
length(elem(api, 2))
end
condition_fn.(name, arity)
end)
end
# Generates a parameter name given an index
# Index 0 produces "a"; 1 produces "b"; 2 produces "c"; and so on
defp gen_param_name(i) do
# See https://stackoverflow.com/questions/41313995/elixir-convert-integer-to-unicode-character
x = 97 + i # The UTF-8 code point for the letter "a" is 97
String.to_atom(<<x::utf8>>)
end
defp rename_attribute(attribute_ast, new_name) do
body = elem(attribute_ast, 2)
body_first = List.first(body)
new_body_first = put_elem(body_first, 0, new_name)
new_body = List.replace_at(body,0,new_body_first)
put_elem(attribute_ast, 2, new_body)
end
# Given an AST of an attribute, get its name
defp attribute_name(attribute_ast) do
elem(List.first(elem(attribute_ast, 2)), 0)
end
# Given an AST representing the application of a macro,
# get the name of that macro
defp macro_name(ast) do
elem(ast, 0)
end
end
|
lib/classy_structs.ex
| 0.656108
| 0.802633
|
classy_structs.ex
|
starcoder
|
defmodule D7 do
@moduledoc """
--- Day 7: Handy Haversacks ---
You land at the regional airport in time for your next flight. In fact, it looks like you'll even have time to grab some food: all flights are currently delayed due to issues in luggage processing.
Due to recent aviation regulations, many rules (your puzzle input) are being enforced about bags and their contents; bags must be color-coded and must contain specific quantities of other color-coded bags. Apparently, nobody responsible for these regulations considered how long they would take to enforce!
How many bag colors can eventually contain at least one shiny gold bag? (The list of rules is quite long; make sure you get all of it.)
--- Part Two ---
It's getting pretty expensive to fly these days - not because of ticket prices, but because of the ridiculous number of bags you need to buy!
Consider again your shiny gold bag and the rules from the above example:
Of course, the actual rules have a small chance of going several levels deeper than this example; be sure to count all of the bags, even if the nesting becomes topologically impractical!
How many individual bags are required inside your single shiny gold bag?
"""
@behaviour Day
@regex ~r/^(\d+) (\w+ \w+) bags?/
def dfs(graph, node) do
graph
|> Graph.out_edges(node)
|> Enum.map(fn %Graph.Edge{v2: out, weight: weight} ->
weight + weight * dfs(graph, out)
end)
|> Enum.sum()
end
def solve(input) do
input_map =
input
|> Enum.flat_map(fn line ->
[key, value_strings] = String.split(line, " bags contain ")
values =
if value_strings == "no other bags." do
[]
else
value_strings
|> String.split(", ")
|> Enum.map(&Regex.run(@regex, &1, capture: :all_but_first))
end
edges =
values
|> Enum.map(fn [weight_string, to_key] ->
{key, to_key, weight: Utils.to_int(weight_string)}
end)
edges
end)
graph = Graph.add_edges(Graph.new(), input_map)
part_1 = graph |> Graph.reaching_neighbors(["shiny gold"]) |> length
part_2 = dfs(graph, "shiny gold")
{part_1, part_2}
end
end
|
lib/days/07.ex
| 0.652131
| 0.602412
|
07.ex
|
starcoder
|
defmodule Machinery do
@moduledoc """
This is the main Machinery module.
It keeps most of the Machinery logics, it's the module that will be
imported with `use` on the module responsible for the state machine.
Declare the states as an argument when importing `Machinery` on the module
that will control your states transitions.
Machinery expects a `Keyword` as argument with two keys `states` and `transitions`.
## Parameters
- `opts`: A Keyword including `states` and `transitions`.
- `states`: A List of Strings representing each state.
- `transitions`: A Map for each state and it allowed next state(s).
## Example
```
defmodule YourProject.UserStateMachine do
use Machinery,
# The first state declared will be considered
# the intial state
states: ["created", "partial", "complete"],
transitions: %{
"created" => ["partial", "complete"],
"partial" => "completed"
}
end
```
"""
@doc """
Main macro function that will be executed upon the load of the
module using it.
It basically stores the states and transitions.
It expects a `Keyword` as argument with two keys `states` and `transitions`.
- `states`: A List of Strings representing each state.
- `transitions`: A Map for each state and it allowed next state(s).
P.S. The first state declared will be considered the intial state
"""
defmacro __using__(opts) do
field = Keyword.get(opts, :field, :state)
states = Keyword.get(opts, :states)
transitions = Keyword.get(opts, :transitions)
# Quoted response to be inserted on the abstract syntax tree (AST) of
# the module that imported this using `use`.
quote bind_quoted: [
field: field,
states: states,
transitions: transitions
] do
# Functions to hold and expose internal info of the states.
def _machinery_initial_state(), do: List.first(unquote(states))
def _machinery_states(), do: unquote(states)
def _machinery_transitions(), do: unquote(Macro.escape(transitions))
def _field(), do: unquote(field)
end
end
@doc """
Start function that will trigger a supervisor for the Machinery.Transitions, a
GenServer that controls the state transitions.
"""
def start(_type, _args) do
children = [{Machinery.Transitions, name: Machinery.Transitions}]
opts = [strategy: :one_for_one, name: Machinery.Supervisor]
Supervisor.start_link(children, opts)
end
@doc """
Triggers the transition of a struct to a new state, accordinly to a specific
state machine module, if it passes any existing guard functions.
It also runs any before or after callbacks and returns a tuple with
`{:ok, struct}`, or `{:error, "reason"}`.
## Parameters
- `struct`: The `struct` you want to transit to another state.
- `state_machine_module`: The module that holds the state machine logic, where Machinery as imported.
- `next_state`: String of the next state you want to transition to.
## Examples
Machinery.transition_to(%User{state: :partial}, UserStateMachine, :completed)
{:ok, %User{state: :completed}}
"""
@spec transition_to(struct, module, Atom.t()) :: {:ok, struct} | {:error, String.t()}
def transition_to(struct, state_machine_module, next_state) do
GenServer.call(Machinery.Transitions, {
:run,
struct,
state_machine_module,
next_state
}, :infinity)
catch
:exit, error_tuple ->
exception = deep_first_of_tuple(error_tuple)
raise exception
end
defp deep_first_of_tuple(tuple) when is_tuple(tuple) do
tuple
|> elem(0)
|> deep_first_of_tuple
end
defp deep_first_of_tuple(value), do: value
end
|
deps/machinery/lib/machinery.ex
| 0.907102
| 0.969728
|
machinery.ex
|
starcoder
|
defmodule Snitch.Data.Model.ShippingMethod do
@moduledoc """
ShippingMethod API
"""
use Snitch.Data.Model
import Ecto.Query
alias Snitch.Data.Schema.ShippingMethod, as: SM
alias Snitch.Data.Schema.ShippingCategory, as: SC
alias Snitch.Data.Schema.Zone
@doc """
Creates a ShippingMethod with given Zones.
* The `zone_structs` must be `Snitch.Data.Schema.Zone.t` structs.
* The `category_structs` must be `Snitch.Data.Schema.ShippingCategory.t`
structs.
"""
@spec create(map, [Zone.t()], [SC.t()]) :: {:ok, SM.t()} | {:error, Ecto.Changeset.t()}
def create(params, zone_structs, category_structs) do
cs = SM.create_changeset(%SM{}, params, zone_structs, category_structs)
Repo.insert(cs)
end
@doc """
Updates a ShippingMethod.
The `zone_structs` must be `Snitch.Data.Schema.Zone.t` structs. These `zones`
are set as the zones in this `shipping_method` and effectively replace the
previous ones.
Similarily for `category_structs`, these must be
`Snitch.Data.Schema.ShippingCategory.t` structs.
## Updating the zones, shipping-categories
```
new_params = %{name: "hyperloop"}
new_zones = Repo.all(from z in Schema.Zone, where: like(z.name, "spacex%"))
new_categories = Repo.all(
from sc in Schema.ShippingCategory,
where: like(sc.name, "heavy%")
)
{:ok, sm} =
Model.ShippingMethod.update(
shipping_method,
new_params,
new_zones,
new_categories
)
```
## Updating only params (not zones or shipping-categories)
```
new_params = %{name: "hyperloop"}
sm_preloaded = Repo.preload(shipping_method, [:zones, :shipping_categories])
{:ok, sm} =
Model.ShippingMethod.update(
sm_preloaded,
new_params,
sm_preloaded.zones,
sm_preloaded.shipping_categories
)
```
"""
@spec update(SM.t(), map, [Zone.t()], [SC.t()]) :: {:ok, SM.t()} | {:error, Ecto.Changeset.t()}
def update(shipping_method, params, zone_structs, category_structs) do
cs = SM.update_changeset(shipping_method, params, zone_structs, category_structs)
Repo.update(cs)
end
@spec delete(non_neg_integer | SM.t()) :: {:ok, SM.t()} | {:error, Ecto.Changeset.t()}
def delete(id_or_instance) do
QH.delete(SM, id_or_instance, Repo)
end
@spec get(map | non_neg_integer) :: {:ok, SM.t()} | {:error, atom}
def get(query_fields_or_primary_key) do
QH.get(SM, query_fields_or_primary_key, Repo)
end
@spec get_all :: [SM.t()]
def get_all, do: Repo.all(SM)
@spec for_package_query([Zone.t()], SC.t()) :: Ecto.Query.t()
def for_package_query(zones, %SC{} = shipping_category)
when is_list(zones) do
zone_ids = Enum.map(zones, fn %{id: id} -> id end)
from(
sm_z in "snitch_shipping_methods_zones",
join: sm_c in "snitch_shipping_methods_categories",
on: sm_c.shipping_method_id == sm_z.shipping_method_id,
join: sm in SM,
on: sm_z.shipping_method_id == sm.id,
where: sm_z.zone_id in ^zone_ids,
where: sm_c.shipping_category_id == ^shipping_category.id,
distinct: sm.id,
select: sm
)
end
end
|
apps/snitch_core/lib/core/data/model/shipping_method.ex
| 0.869112
| 0.781497
|
shipping_method.ex
|
starcoder
|
defmodule AWS.RedshiftData do
@moduledoc """
You can use the Amazon Redshift Data API to run queries on Amazon Redshift
tables.
You can run individual SQL statements, which are committed if the statement
succeeds.
"""
@doc """
Cancels a running query.
To be canceled, a query must be running.
"""
def cancel_statement(client, input, options \\ []) do
request(client, "CancelStatement", input, options)
end
@doc """
Describes the details about a specific instance when a query was run by the
Amazon Redshift Data API.
The information includes when the query started, when it finished, the query
status, the number of rows returned, and the SQL statement.
"""
def describe_statement(client, input, options \\ []) do
request(client, "DescribeStatement", input, options)
end
@doc """
Describes the detailed information about a table from metadata in the cluster.
The information includes its columns. A token is returned to page through the
column list. Depending on the authorization method, use one of the following
combinations of request parameters:
* AWS Secrets Manager - specify the Amazon Resource Name (ARN) of
the secret and the cluster identifier that matches the cluster in the secret.
* Temporary credentials - specify the cluster identifier, the
database name, and the database user name. Permission to call the
`redshift:GetClusterCredentials` operation is required to use this method.
"""
def describe_table(client, input, options \\ []) do
request(client, "DescribeTable", input, options)
end
@doc """
Runs an SQL statement, which can be data manipulation language (DML) or data
definition language (DDL).
This statement must be a single SQL statement. Depending on the authorization
method, use one of the following combinations of request parameters:
* AWS Secrets Manager - specify the Amazon Resource Name (ARN) of
the secret and the cluster identifier that matches the cluster in the secret.
* Temporary credentials - specify the cluster identifier, the
database name, and the database user name. Permission to call the
`redshift:GetClusterCredentials` operation is required to use this method.
"""
def execute_statement(client, input, options \\ []) do
request(client, "ExecuteStatement", input, options)
end
@doc """
Fetches the temporarily cached result of an SQL statement.
A token is returned to page through the statement results.
"""
def get_statement_result(client, input, options \\ []) do
request(client, "GetStatementResult", input, options)
end
@doc """
List the databases in a cluster.
A token is returned to page through the database list. Depending on the
authorization method, use one of the following combinations of request
parameters:
* AWS Secrets Manager - specify the Amazon Resource Name (ARN) of
the secret and the cluster identifier that matches the cluster in the secret.
* Temporary credentials - specify the cluster identifier, the
database name, and the database user name. Permission to call the
`redshift:GetClusterCredentials` operation is required to use this method.
"""
def list_databases(client, input, options \\ []) do
request(client, "ListDatabases", input, options)
end
@doc """
Lists the schemas in a database.
A token is returned to page through the schema list. Depending on the
authorization method, use one of the following combinations of request
parameters:
* AWS Secrets Manager - specify the Amazon Resource Name (ARN) of
the secret and the cluster identifier that matches the cluster in the secret.
* Temporary credentials - specify the cluster identifier, the
database name, and the database user name. Permission to call the
`redshift:GetClusterCredentials` operation is required to use this method.
"""
def list_schemas(client, input, options \\ []) do
request(client, "ListSchemas", input, options)
end
@doc """
List of SQL statements.
By default, only finished statements are shown. A token is returned to page
through the statement list.
"""
def list_statements(client, input, options \\ []) do
request(client, "ListStatements", input, options)
end
@doc """
List the tables in a database.
If neither `SchemaPattern` nor `TablePattern` are specified, then all tables in
the database are returned. A token is returned to page through the table list.
Depending on the authorization method, use one of the following combinations of
request parameters:
* AWS Secrets Manager - specify the Amazon Resource Name (ARN) of
the secret and the cluster identifier that matches the cluster in the secret.
* Temporary credentials - specify the cluster identifier, the
database name, and the database user name. Permission to call the
`redshift:GetClusterCredentials` operation is required to use this method.
"""
def list_tables(client, input, options \\ []) do
request(client, "ListTables", input, options)
end
@spec request(AWS.Client.t(), binary(), map(), list()) ::
{:ok, map() | nil, map()}
| {:error, term()}
defp request(client, action, input, options) do
client = %{client | service: "redshift-data"}
host = build_host("redshift-data", client)
url = build_url(host, client)
headers = [
{"Host", host},
{"Content-Type", "application/x-amz-json-1.1"},
{"X-Amz-Target", "RedshiftData.#{action}"}
]
payload = encode!(client, input)
headers = AWS.Request.sign_v4(client, "POST", url, headers, payload)
post(client, url, payload, headers, options)
end
defp post(client, url, payload, headers, options) do
case AWS.Client.request(client, :post, url, payload, headers, options) do
{:ok, %{status_code: 200, body: body} = response} ->
body = if body != "", do: decode!(client, body)
{:ok, body, response}
{:ok, response} ->
{:error, {:unexpected_response, response}}
error = {:error, _reason} -> error
end
end
defp build_host(_endpoint_prefix, %{region: "local", endpoint: endpoint}) do
endpoint
end
defp build_host(_endpoint_prefix, %{region: "local"}) do
"localhost"
end
defp build_host(endpoint_prefix, %{region: region, endpoint: endpoint}) do
"#{endpoint_prefix}.#{region}.#{endpoint}"
end
defp build_url(host, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}/"
end
defp encode!(client, payload) do
AWS.Client.encode!(client, payload, :json)
end
defp decode!(client, payload) do
AWS.Client.decode!(client, payload, :json)
end
end
|
lib/aws/generated/redshift_data.ex
| 0.8119
| 0.615637
|
redshift_data.ex
|
starcoder
|
defmodule Game.Stats do
@moduledoc """
Regen stats during ticks
"""
alias Data.Stats
@doc """
Regen statistics (hp/sp) every few ticks
iex> Game.Stats.regen(%{health_points: 10, max_health_points: 15}, :health_points, 3)
%{health_points: 13, max_health_points: 15}
iex> Game.Stats.regen(%{health_points: 13, max_health_points: 15}, :health_points, 3)
%{health_points: 15, max_health_points: 15}
iex> Game.Stats.regen(%{skill_points: 10, max_skill_points: 15}, :skill_points, 3)
%{skill_points: 13, max_skill_points: 15}
iex> Game.Stats.regen(%{skill_points: 13, max_skill_points: 15}, :skill_points, 3)
%{skill_points: 15, max_skill_points: 15}
iex> Game.Stats.regen(%{endurance_points: 10, max_endurance_points: 15}, :endurance_points, 3)
%{endurance_points: 13, max_endurance_points: 15}
iex> Game.Stats.regen(%{endurance_points: 13, max_endurance_points: 15}, :endurance_points, 3)
%{endurance_points: 15, max_endurance_points: 15}
"""
@spec regen(atom, Stats.t(), map) :: Stats.t()
def regen(stats, field, regen)
def regen(stats, :health_points, health_points) do
case %{stats | health_points: stats.health_points + health_points} do
%{health_points: health_points, max_health_points: max_health_points}
when health_points > max_health_points ->
%{stats | health_points: max_health_points}
stats ->
stats
end
end
def regen(stats, :skill_points, skill_points) do
case %{stats | skill_points: stats.skill_points + skill_points} do
%{skill_points: skill_points, max_skill_points: max_skill_points}
when skill_points > max_skill_points ->
%{stats | skill_points: max_skill_points}
stats ->
stats
end
end
def regen(stats, :endurance_points, endurance_points) do
case %{stats | endurance_points: stats.endurance_points + endurance_points} do
%{endurance_points: endurance_points, max_endurance_points: max_endurance_points}
when endurance_points > max_endurance_points ->
%{stats | endurance_points: max_endurance_points}
stats ->
stats
end
end
end
|
lib/game/stats.ex
| 0.805211
| 0.555978
|
stats.ex
|
starcoder
|
defmodule Absinthe.ValidationPhaseCase do
import ExUnit.Assertions
alias Absinthe.{Blueprint, Schema, Phase, Pipeline, Language}
@type error_checker_t :: ([{Blueprint.t(), Phase.Error.t()}] -> boolean)
def get_error_location(line) do
case List.wrap(line) do
[] ->
"(at any line number)"
[single] ->
"(from line ##{single})"
multiple when is_list(multiple) ->
numbers = multiple |> Enum.join(", #")
"(from lines ##{numbers})"
end
end
defmacro __using__(opts) do
phase = Keyword.fetch!(opts, :phase)
quote do
use Absinthe.Case, unquote(opts)
import unquote(__MODULE__)
def bad_value(node_kind, message, line, check \\ []) do
location = unquote(__MODULE__).get_error_location(line)
expectation_banner =
"\nExpected #{node_kind} node with error #{location}:\n---\n#{message}\n---"
check_fun = node_check_function(check)
fn pairs ->
assert !Enum.empty?(pairs), "No errors were found.\n#{expectation_banner}"
matched =
Enum.any?(pairs, fn
{%str{} = node, %Phase.Error{phase: unquote(phase), message: ^message} = err}
when str == node_kind ->
if check_fun.(node) do
if !line do
true
else
List.wrap(line)
|> Enum.all?(fn l ->
Enum.any?(err.locations, fn
%{line: ^l} ->
true
_ ->
false
end)
end)
end
else
false
end
_ ->
false
end)
formatted_errors =
Enum.map(pairs, fn {_, error} ->
"#{error.message} (from line #{inspect(error.locations)})"
end)
assert matched,
"Could not find error.\n#{expectation_banner}\n\n Did find these errors...\n ---\n " <>
Enum.join(formatted_errors, "\n ") <> "\n ---"
end
end
defp node_check_function(check) when is_list(check) do
fn node ->
Enum.all?(check, fn {key, value} -> Map.get(node, key) == value end)
end
end
defp node_check_function(check) when is_function(check) do
check
end
@spec assert_passes_validation(PhaseLanguage.Source.t(), map) :: no_return
def assert_passes_validation(document, options) do
assert_valid(Absinthe.Fixtures.PetsSchema, [unquote(phase)], document, options)
end
@spec assert_fails_validation(
Language.Source.t(),
map,
[Absinthe.ValidationPhaseCase.error_checker_t()]
| Absinthe.ValidationPhaseCase.error_checker_t()
) :: no_return
def assert_fails_validation(document, options, error_checker) do
assert_invalid(
Absinthe.Fixtures.PetsSchema,
[unquote(phase)],
document,
options,
error_checker
)
end
@spec assert_passes_validation_with_schema(Schema.t(), Language.Source.t(), map) ::
no_return
def assert_passes_validation_with_schema(schema, document, options) do
assert_valid(schema, [unquote(phase)], document, options)
end
@spec assert_fails_validation_with_schema(
Schema.t(),
Language.Source.t(),
map,
Absinthe.ValidationPhaseCase.error_checker_t()
) :: no_return
def assert_fails_validation_with_schema(schema, document, options, error_checker) do
assert_invalid(schema, [unquote(phase)], document, options, error_checker)
end
end
end
def assert_valid(schema, validations, document, options) do
result =
case run(schema, validations, document, options) do
{:ok, result, _} ->
result
# :jump, etc
{_other, result, _config} ->
result
end
formatted_errors =
result
|> error_pairs
|> Enum.map(fn {_, error} ->
error.message
end)
assert Enum.empty?(formatted_errors),
"Expected no errors, found:\n ---\n " <>
Enum.join(formatted_errors, "\n ") <> "\n ---"
end
def assert_invalid(schema, validations, document, options, error_checkers) do
result =
case run(schema, validations, document, options) do
{:ok, result, _} ->
result
# :jump, etc
{_other, result, _config} ->
result
end
pairs = error_pairs(result)
List.wrap(error_checkers)
|> Enum.each(& &1.(pairs))
end
defp run(schema, validations, document, options) do
pipeline = pre_validation_pipeline(schema, validations, options)
Pipeline.run(document, pipeline)
end
defp pre_validation_pipeline(schema, validations, :schema) do
Pipeline.for_schema(schema)
|> Pipeline.upto(Phase.Schema)
|> Kernel.++(validations)
end
defp pre_validation_pipeline(schema, validations, options) do
options = Keyword.put(options, :jump_phases, false)
Pipeline.for_document(schema, options)
|> Pipeline.upto(Phase.Document.Validation.Result)
|> Pipeline.reject(fn phase ->
Regex.match?(~r/Validation/, Atom.to_string(phase)) and
phase not in [Phase.Document.Validation.Result | validations]
end)
end
# Build a map of node => errors
defp nodes_with_errors(input) do
{_, errors} = Blueprint.prewalk(input, [], &do_nodes_with_errors/2)
errors
end
defp error_pairs(input) do
input
|> nodes_with_errors()
|> Enum.flat_map(fn %{errors: errors} = node ->
Enum.map(errors, &{node, &1})
end)
end
defp do_nodes_with_errors(%{raw: raw} = node, acc) do
{_, errors} = Blueprint.prewalk(raw, acc, &do_nodes_with_errors/2)
{node, errors}
end
defp do_nodes_with_errors(%{errors: []} = node, acc) do
{node, acc}
end
defp do_nodes_with_errors(%{errors: _} = node, acc) do
{node, [node | acc]}
end
defp do_nodes_with_errors(node, acc) do
{node, acc}
end
end
|
test/support/validation_phase_case.ex
| 0.698329
| 0.439627
|
validation_phase_case.ex
|
starcoder
|
defmodule Game.Format.Rooms do
@moduledoc """
Format functions for rooms
"""
import Game.Format.Context
alias Data.Exit
alias Data.Room
alias Game.Door
alias Game.Format
alias Game.Format.NPCs, as: FormatNPCs
@doc """
Display a room's name
"""
def room_name(room) do
"{room}#{room.name}{/room}"
end
@doc """
Display a zone's name
"""
def zone_name(zone) do
"{zone}#{zone.name}{/zone}"
end
@doc """
Format full text for a room
"""
@spec room(Room.t(), [Item.t()], Map.t()) :: String.t()
def room(room, items, map) do
"""
#{room_name(room)}
#{Format.underline(room.name)}
#{room_description(room)}\n
#{map}
#{who_is_here(room)}
#{maybe_exits(room)}#{maybe_items(room, items)}#{shops(room)}
"""
|> String.trim()
end
@doc """
Template a room's description
"""
def room_description(room) do
description = room_description_with_features(room)
context =
context()
|> assign(:room, "{green}#{room.name}{/green}")
|> assign(:zone, "{white}#{room.zone.name}{/white}")
|> assign(:features, Enum.join(features(room.features), " "))
context =
Enum.reduce(room.features, context, fn room_feature, context ->
assign(context, room_feature.key, feature(room_feature))
end)
Format.template(context, Format.resources(description))
end
defp room_description_with_features(room) do
contains_features? = String.contains?(room.description, "[features]")
contains_sub_features? = Enum.any?(room.features, fn feature ->
String.contains?(room.description, "[#{feature.key}]")
end)
case contains_features? || contains_sub_features? do
true ->
room.description
false ->
"#{room.description} [features]"
end
end
@doc """
Display a room's feature
"""
def feature(feature) do
String.replace(feature.short_description, feature.key, "{white}#{feature.key}{/white}")
end
@doc """
Display room features
"""
def features(features) do
Enum.map(features, &feature/1)
end
@doc """
Peak at a room from the room you're in
Example:
iex> Rooms.peak_room(%{name: "Hallway"}, "north")
"{room}Hallway{/room} is north."
"""
@spec peak_room(Room.t(), String.t()) :: String.t()
def peak_room(room, direction) do
"#{room_name(room)} is #{direction}."
end
@doc """
Output for an overworld look
"""
@spec overworld_room(Overworld.t(), String.t()) :: String.t()
def overworld_room(room, map) do
"""
{bold}#{map}{/bold}
#{who_is_here(room)}
#{maybe_exits(room)}
"""
|> String.trim()
end
defp maybe_exits(room) do
case room |> Room.exits() do
[] ->
""
_ ->
"Exits: #{exits(room)}\n"
end
end
defp exits(room) do
room
|> Room.exits()
|> Enum.sort()
|> Enum.map(fn direction ->
case Exit.exit_to(room, direction) do
%{door_id: door_id, has_door: true} ->
"{exit}#{direction}{/exit} (#{Door.get(door_id)})"
_ ->
"{exit}#{direction}{/exit}"
end
end)
|> Enum.join(", ")
end
@doc """
Format full text for who is in the room
Example:
iex> Rooms.who_is_here(%{players: [%{name: "Mordred"}], npcs: [%{name: "Arthur", extra: %{status_line: "[name] is here."}}]})
"{npc}Arthur{/npc} is here.\\n{player}Mordred{/player} is here."
"""
def who_is_here(room) do
[npcs(room), players(room)]
|> Enum.reject(fn line -> line == "" end)
|> Enum.join("\n")
end
@doc """
Format Player text for who is in the room
Example:
iex> Rooms.players(%{players: [%{name: "Mordred"}, %{name: "Arthur"}]})
"{player}Mordred{/player} is here.\\n{player}Arthur{/player} is here."
"""
@spec players(Room.t()) :: String.t()
def players(%{players: players}) do
players
|> Enum.map(fn player -> "#{Format.player_name(player)} is here." end)
|> Enum.join("\n")
end
def players(_), do: ""
@doc """
Format NPC text for who is in the room
Example:
iex> mordred = %{name: "Mordred", extra: %{status_line: "[name] is in the room."}}
iex> arthur = %{name: "Arthur", extra: %{status_line: "[name] is here."}}
iex> Rooms.npcs(%{npcs: [mordred, arthur]})
"{npc}Mordred{/npc} is in the room.\\n{npc}Arthur{/npc} is here."
"""
@spec npcs(Room.t()) :: String.t()
def npcs(%{npcs: npcs}) do
npcs
|> Enum.map(&FormatNPCs.npc_status/1)
|> Enum.join("\n")
end
def npcs(_), do: ""
@doc """
Maybe display items
"""
def maybe_items(room, items) do
case Enum.empty?(items) and room.currency == 0 do
true ->
""
false ->
"Items: #{items(room, items)}\n"
end
end
@doc """
Format items for a room
"""
def items(room, items) when is_list(items) do
items = items |> Enum.map(&Format.item_name/1)
(items ++ [Format.currency(room)])
|> Enum.reject(&(&1 == ""))
|> Enum.join(", ")
end
def items(_, _), do: ""
@doc """
Format Shop text for shops in the room
Example:
iex> Rooms.shops(%{shops: [%{name: "Hole in the Wall"}]})
"Shops: {shop}Hole in the Wall{/shop}\\n"
iex> Rooms.shops(%{shops: [%{name: "Hole in the Wall"}]}, label: false)
" - {shop}Hole in the Wall{/shop}"
"""
@spec shops(Room.t()) :: String.t()
def shops(room, opts \\ [])
def shops(%{shops: []}, _opts), do: ""
def shops(%{shops: shops}, label: false) do
shops
|> Enum.map(fn shop -> " - #{Format.shop_name(shop)}" end)
|> Enum.join(", ")
end
def shops(%{shops: shops}, _) do
shops =
shops
|> Enum.map(&Format.shop_name/1)
|> Enum.join(", ")
"Shops: #{shops}\n"
end
def shops(_, _), do: ""
end
|
lib/game/format/rooms.ex
| 0.5769
| 0.461441
|
rooms.ex
|
starcoder
|
defmodule VintageNet.IP.IPv4Config do
@moduledoc """
This is a helper module for VintageNet.Technology implementations that use
IPv4.
IPv4 configuration is specified under the `:ipv4` key in the configuration map.
Fields include:
* `:method` - `:dhcp`, `:static`, or `:disabled`
The `:dhcp` method currently has no additional fields.
The `:static` method uses the following fields:
* `:address` - the IP address
* `:prefix_length` - the number of bits in the IP address to use for the subnet (e.g., 24)
* `:netmask` - either this or `prefix_length` is used to determine the subnet. If you
have a choice, use `prefix_length`
* `:gateway` - the default gateway for this interface (optional)
* `:name_servers` - a list of DNS servers (optional)
* `:domain` - DNS search domain (optional)
Configuration normalization converts `:netmask` to `:prefix_length`.
"""
alias VintageNet.Interface.RawConfig
alias VintageNet.{Command, IP}
@doc """
Normalize the IPv4 parameters in a configuration.
"""
# @spec normalize(map()) :: map()
def normalize(%{ipv4: ipv4} = config) do
new_ipv4 = normalize_by_method(ipv4)
%{config | ipv4: new_ipv4}
end
def normalize(config) do
# No IPv4 configuration, so default to DHCP
Map.put(config, :ipv4, %{method: :dhcp})
end
defp normalize_by_method(%{method: :dhcp}), do: %{method: :dhcp}
defp normalize_by_method(%{method: :disabled}), do: %{method: :disabled}
defp normalize_by_method(%{method: :static} = ipv4) do
new_prefix_length = get_prefix_length(ipv4)
ipv4
|> normalize_address()
|> Map.put(:prefix_length, new_prefix_length)
|> normalize_gateway()
|> normalize_name_servers()
|> Map.take([
:method,
:address,
:prefix_length,
:gateway,
:domain,
:name_servers
])
end
defp normalize_by_method(_other) do
raise ArgumentError, "specify an IPv4 address method (:disabled, :dhcp, or :static)"
end
defp normalize_address(%{address: address} = config),
do: %{config | address: IP.ip_to_tuple!(address)}
defp normalize_address(_config),
do: raise(ArgumentError, "IPv4 :address key missing in static config")
defp normalize_gateway(%{gateway: gateway} = config),
do: %{config | gateway: IP.ip_to_tuple!(gateway)}
defp normalize_gateway(config), do: config
defp normalize_name_servers(%{name_servers: servers} = config) when is_list(servers) do
%{config | name_servers: Enum.map(servers, &IP.ip_to_tuple!/1)}
end
defp normalize_name_servers(%{name_servers: one_server} = config) do
%{config | name_servers: [IP.ip_to_tuple!(one_server)]}
end
defp normalize_name_servers(config), do: config
defp get_prefix_length(%{prefix_length: prefix_length}), do: prefix_length
defp get_prefix_length(%{netmask: mask}) do
with {:ok, mask_as_tuple} <- IP.ip_to_tuple(mask),
{:ok, prefix_length} <- IP.subnet_mask_to_prefix_length(mask_as_tuple) do
prefix_length
else
{:error, _reason} ->
raise ArgumentError, "invalid subnet mask #{inspect(mask)}"
end
end
defp get_prefix_length(_unspecified),
do: raise(ArgumentError, "specify :prefix_length or :netmask")
@doc """
Add IPv4 configuration commands for supporting static and dynamic IP addressing
"""
@spec add_config(RawConfig.t(), map(), keyword()) :: RawConfig.t()
def add_config(
%RawConfig{
ifname: ifname,
up_cmds: up_cmds,
down_cmds: down_cmds
} = raw_config,
%{ipv4: %{method: :disabled}},
opts
) do
# Even though IPv4 is disabled, the interface is still brought up
ip = Keyword.fetch!(opts, :bin_ip)
new_up_cmds = up_cmds ++ [{:run, ip, ["link", "set", ifname, "up"]}]
new_down_cmds =
down_cmds ++
[
{:run_ignore_errors, ip, ["addr", "flush", "dev", ifname, "label", ifname]},
{:run, ip, ["link", "set", ifname, "down"]}
]
%RawConfig{
raw_config
| up_cmds: new_up_cmds,
down_cmds: new_down_cmds
}
end
def add_config(
%RawConfig{
ifname: ifname,
child_specs: child_specs,
up_cmds: up_cmds,
down_cmds: down_cmds
} = raw_config,
%{ipv4: %{method: :dhcp}} = config,
opts
) do
udhcpc = Keyword.fetch!(opts, :bin_udhcpc)
ip = Keyword.fetch!(opts, :bin_ip)
new_up_cmds = up_cmds ++ [{:run, ip, ["link", "set", ifname, "up"]}]
new_down_cmds =
down_cmds ++
[
{:run_ignore_errors, ip, ["addr", "flush", "dev", ifname, "label", ifname]},
{:run, ip, ["link", "set", ifname, "down"]}
]
hostname = config[:hostname] || get_hostname()
new_child_specs =
child_specs ++
[
Supervisor.child_spec(
{MuonTrap.Daemon,
[
udhcpc,
[
"-f",
"-i",
ifname,
"-x",
"hostname:#{hostname}",
"-s",
udhcpc_handler_path()
],
Command.add_muon_options(
stderr_to_stdout: true,
log_output: :debug,
log_prefix: "udhcpc(#{ifname}): "
)
]},
id: :udhcpc
),
{VintageNet.Interface.InternetConnectivityChecker, ifname}
]
%RawConfig{
raw_config
| up_cmds: new_up_cmds,
down_cmds: new_down_cmds,
child_specs: new_child_specs
}
end
def add_config(
%RawConfig{
ifname: ifname,
up_cmds: up_cmds,
down_cmds: down_cmds,
child_specs: child_specs
} = raw_config,
%{ipv4: %{method: :static} = ipv4},
opts
) do
ip = Keyword.fetch!(opts, :bin_ip)
addr_subnet = IP.cidr_to_string(ipv4.address, ipv4.prefix_length)
route_manager_up =
case ipv4[:gateway] do
nil ->
{:fun, VintageNet.RouteManager, :clear_route, [ifname]}
gateway ->
{:fun, VintageNet.RouteManager, :set_route,
[ifname, [{ipv4.address, ipv4.prefix_length}], gateway, :lan]}
end
resolver_up =
case ipv4[:name_servers] do
nil -> {:fun, VintageNet.NameResolver, :clear, [ifname]}
[] -> {:fun, VintageNet.NameResolver, :clear, [ifname]}
servers -> {:fun, VintageNet.NameResolver, :setup, [ifname, ipv4[:domain], servers]}
end
new_up_cmds =
up_cmds ++
[
{:run_ignore_errors, ip, ["addr", "flush", "dev", ifname, "label", ifname]},
{:run, ip, ["addr", "add", addr_subnet, "dev", ifname, "label", ifname]},
{:run, ip, ["link", "set", ifname, "up"]},
route_manager_up,
resolver_up
]
new_down_cmds =
down_cmds ++
[
{:fun, VintageNet.RouteManager, :clear_route, [ifname]},
{:fun, VintageNet.NameResolver, :clear, [ifname]},
{:run_ignore_errors, ip, ["addr", "flush", "dev", ifname, "label", ifname]},
{:run, ip, ["link", "set", ifname, "down"]}
]
# If there's a default gateway, then check for internet connectivity.
checker =
case ipv4[:gateway] do
nil -> {VintageNet.Interface.LANConnectivityChecker, ifname}
_exists -> {VintageNet.Interface.InternetConnectivityChecker, ifname}
end
new_child_specs = child_specs ++ [checker]
%RawConfig{
raw_config
| up_cmds: new_up_cmds,
down_cmds: new_down_cmds,
child_specs: new_child_specs
}
end
defp get_hostname() do
{:ok, hostname} = :inet.gethostname()
to_string(hostname)
end
defp udhcpc_handler_path() do
Application.app_dir(:vintage_net, ["priv", "udhcpc_handler"])
end
end
|
lib/vintage_net/ip/ipv4_config.ex
| 0.891111
| 0.417271
|
ipv4_config.ex
|
starcoder
|
defmodule AshPostgres.Repo do
@moduledoc """
Resources that use the `AshPostgres` data layer use a `Repo` to access the database.
This repo is a slightly modified version of an `Ecto.Repo`.
You can use `Ecto.Repo`'s `init/2` to configure your repo like normal, but
instead of returning `{:ok, config}`, use `super(config)` to pass the
configuration to the `AshPostgres.Repo` implementation.
## Installed Extensions
To configure your list of installed extensions, define `installed_extensions/0`
Extensions that are relevant to ash_postgres:
* `"uuid-ossp"` - Sets UUID primary keys defaults in the migration generator
* `"pg_trgm"` - Makes the `AshPostgres.Predicates.Trigram` custom predicate available
```
def installed_extensions() do
["pg_trgm", "uuid-ossp"]
end
```
"""
@doc "Use this to inform the data layer about what extensions are installed"
@callback installed_extensions() :: [String.t()]
@doc "Return a list of all schema names (only relevant for a multitenant implementation)"
@callback all_tenants() :: [String.t()]
@doc "The path where your tenant migrations are stored (only relevant for a multitenant implementation)"
@callback tenant_migrations_path() :: String.t()
@doc "The path where your migrations are stored"
@callback migrations_path() :: String.t()
defmacro __using__(opts) do
quote bind_quoted: [opts: opts] do
otp_app = opts[:otp_app] || raise("Must configure OTP app")
use Ecto.Repo,
adapter: Ecto.Adapters.Postgres,
otp_app: otp_app
def installed_extensions, do: []
def tenant_migrations_path, do: nil
def migrations_path, do: nil
def all_tenants, do: []
def init(_, config) do
new_config =
config
|> Keyword.put(:installed_extensions, installed_extensions())
|> Keyword.put(:tenant_migrations_path, tenant_migrations_path())
|> Keyword.put(:migrations_path, migrations_path())
{:ok, new_config}
end
defoverridable installed_extensions: 0, all_tenants: 0, tenant_migrations_path: 0
end
end
end
|
lib/repo.ex
| 0.849191
| 0.673963
|
repo.ex
|
starcoder
|
defmodule MehrSchulferien.Timetables do
@moduledoc """
The Timetables context.
"""
import Ecto.Query, warn: false
alias MehrSchulferien.Repo
alias MehrSchulferien.Timetables.Year
@doc """
Returns the list of years.
## Examples
iex> list_years()
[%Year{}, ...]
"""
def list_years do
Repo.all(Year)
end
@doc """
Gets a single year by id or slug.
Raises `Ecto.NoResultsError` if the Year does not exist.
## Examples
iex> get_year!(123)
%Year{}
iex> get_year!("2017")
%Year{}
iex> get_year!(456)
** (Ecto.NoResultsError)
"""
def get_year!(id_or_slug) do
query = case is_integer(id_or_slug) do
true -> as_string = Integer.to_string(id_or_slug)
from y in Year, where: y.slug == ^as_string
_ -> from y in Year, where: y.slug == ^id_or_slug
end
year = Repo.one(query)
case year do
nil -> Repo.get!(Year, id_or_slug)
_ -> year
end
end
@doc """
Creates a year.
## Examples
iex> create_year(%{field: value})
{:ok, %Year{}}
iex> create_year(%{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def create_year(attrs \\ %{}) do
%Year{}
|> Year.changeset(attrs)
|> Repo.insert()
end
@doc """
Updates a year.
## Examples
iex> update_year(year, %{field: new_value})
{:ok, %Year{}}
iex> update_year(year, %{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def update_year(%Year{} = year, attrs) do
year
|> Year.changeset(attrs)
|> Repo.update()
end
@doc """
Deletes a Year.
## Examples
iex> delete_year(year)
{:ok, %Year{}}
iex> delete_year(year)
{:error, %Ecto.Changeset{}}
"""
def delete_year(%Year{} = year) do
Repo.delete(year)
end
@doc """
Returns an `%Ecto.Changeset{}` for tracking year changes.
## Examples
iex> change_year(year)
%Ecto.Changeset{source: %Year{}}
"""
def change_year(%Year{} = year) do
Year.changeset(year, %{})
end
alias MehrSchulferien.Timetables.Month
@doc """
Returns the list of months.
## Examples
iex> list_months()
[%Month{}, ...]
"""
def list_months do
Repo.all(Month)
end
@doc """
Gets a single month by id or slug.
Raises `Ecto.NoResultsError` if the Month does not exist.
## Examples
iex> get_month!(123)
%Month{}
iex> get_month!("2017-02")
%Month{}
iex> get_month!(456)
** (Ecto.NoResultsError)
"""
def get_month!(id_or_slug) do
case is_integer(id_or_slug) or Regex.match?(~r/^[1-9][0-9]*$/, id_or_slug) do
true ->
Repo.get!(Month, id_or_slug)
false ->
query = from f in Month, where: f.slug == ^id_or_slug
Repo.one!(query)
end
end
@doc """
Creates a month.
## Examples
iex> create_month(%{field: value})
{:ok, %Month{}}
iex> create_month(%{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def create_month(attrs \\ %{}) do
%Month{}
|> Month.changeset(attrs)
|> Repo.insert()
end
@doc """
Updates a month.
## Examples
iex> update_month(month, %{field: new_value})
{:ok, %Month{}}
iex> update_month(month, %{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def update_month(%Month{} = month, attrs) do
month
|> Month.changeset(attrs)
|> Repo.update()
end
@doc """
Deletes a Month.
## Examples
iex> delete_month(month)
{:ok, %Month{}}
iex> delete_month(month)
{:error, %Ecto.Changeset{}}
"""
def delete_month(%Month{} = month) do
Repo.delete(month)
end
@doc """
Returns an `%Ecto.Changeset{}` for tracking month changes.
## Examples
iex> change_month(month)
%Ecto.Changeset{source: %Month{}}
"""
def change_month(%Month{} = month) do
Month.changeset(month, %{})
end
alias MehrSchulferien.Timetables.Day
@doc """
Returns the list of days.
## Examples
iex> list_days()
[%Day{}, ...]
"""
def list_days do
Repo.all(Day)
end
@doc """
Gets a single day by id or slug.
Raises `Ecto.NoResultsError` if the Day does not exist.
## Examples
iex> get_day!(123)
%Day{}
iex> get_day!("2017-12-24")
%Day{}
iex> get_day!(456)
** (Ecto.NoResultsError)
"""
def get_day!(id_or_slug) do
case is_integer(id_or_slug) or Regex.match?(~r/^[1-9]+[0-9]*$/, id_or_slug) do
true ->
Repo.get!(Day, id_or_slug)
false ->
query = from f in Day, where: f.slug == ^id_or_slug
Repo.one!(query)
end
end
@doc """
Creates a day.
## Examples
iex> create_day(%{field: value})
{:ok, %Day{}}
iex> create_day(%{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def create_day(attrs \\ %{}) do
%Day{}
|> Day.changeset(attrs)
|> Repo.insert()
end
@doc """
Updates a day.
## Examples
iex> update_day(day, %{field: new_value})
{:ok, %Day{}}
iex> update_day(day, %{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def update_day(%Day{} = day, attrs) do
day
|> Day.changeset(attrs)
|> Repo.update()
end
@doc """
Deletes a Day.
## Examples
iex> delete_day(day)
{:ok, %Day{}}
iex> delete_day(day)
{:error, %Ecto.Changeset{}}
"""
def delete_day(%Day{} = day) do
Repo.delete(day)
end
@doc """
Returns an `%Ecto.Changeset{}` for tracking day changes.
## Examples
iex> change_day(day)
%Ecto.Changeset{source: %Day{}}
"""
def change_day(%Day{} = day) do
Day.changeset(day, %{})
end
alias MehrSchulferien.Timetables.Category
@doc """
Returns the list of categories.
## Examples
iex> list_categories()
[%Category{}, ...]
"""
def list_categories do
Repo.all(Category)
end
@doc """
Gets a single category by id or slug.
Raises `Ecto.NoResultsError` if the Category does not exist.
## Examples
iex> get_category!(123)
%Category{}
iex> get_category!("footbar")
%Category{}
iex> get_category!(456)
** (Ecto.NoResultsError)
"""
def get_category!(id_or_slug) do
case is_integer(id_or_slug) or Regex.match?(~r/^[1-9]+[0-9]*$/, id_or_slug) do
true ->
Repo.get!(Category, id_or_slug)
false ->
query = from f in Category, where: f.slug == ^id_or_slug
Repo.one!(query)
end
end
@doc """
Creates a category.
## Examples
iex> create_category(%{field: value})
{:ok, %Category{}}
iex> create_category(%{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def create_category(attrs \\ %{}) do
%Category{}
|> Category.changeset(attrs)
|> Repo.insert()
end
@doc """
Updates a category.
## Examples
iex> update_category(category, %{field: new_value})
{:ok, %Category{}}
iex> update_category(category, %{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def update_category(%Category{} = category, attrs) do
category
|> Category.changeset(attrs)
|> Repo.update()
end
@doc """
Deletes a Category.
## Examples
iex> delete_category(category)
{:ok, %Category{}}
iex> delete_category(category)
{:error, %Ecto.Changeset{}}
"""
def delete_category(%Category{} = category) do
Repo.delete(category)
end
@doc """
Returns an `%Ecto.Changeset{}` for tracking category changes.
## Examples
iex> change_category(category)
%Ecto.Changeset{source: %Category{}}
"""
def change_category(%Category{} = category) do
Category.changeset(category, %{})
end
alias MehrSchulferien.Timetables.Period
@doc """
Returns the list of periods.
## Examples
iex> list_periods()
[%Period{}, ...]
"""
def list_periods do
Repo.all(Period)
end
@doc """
Gets a single period by id or slug.
Raises `Ecto.NoResultsError` if the Period does not exist.
## Examples
iex> get_period!(123)
%Period{}
iex> get_period!("2016-10-31-2016-11-04-herbst-bayern")
%Period{}
iex> get_period!(456)
** (Ecto.NoResultsError)
"""
def get_period!(id_or_slug) do
case is_integer(id_or_slug) or Regex.match?(~r/^[1-9][0-9]*$/, id_or_slug) do
true ->
Repo.get!(Period, id_or_slug)
false ->
query = from f in Period, where: f.slug == ^id_or_slug
Repo.one!(query)
end
end
@doc """
Creates a period.
## Examples
iex> create_period(%{field: value})
{:ok, %Period{}}
iex> create_period(%{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def create_period(attrs \\ %{}) do
result = %Period{}
|> Period.changeset(attrs)
|> Repo.insert()
case result do
{:ok, period} -> # create slots for this period
query = from d in Day, where: d.value >= ^period.starts_on,
where: d.value <= ^period.ends_on
days = Repo.all(query)
for day <- days do
create_slot(%{day_id: day.id, period_id: period.id})
end
result
{_ , _} -> result
end
end
@doc """
Updates a period.
## Examples
iex> update_period(period, %{field: new_value})
{:ok, %Period{}}
iex> update_period(period, %{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def update_period(%Period{} = period, attrs) do
period
|> Period.changeset(attrs)
|> Repo.update()
end
@doc """
Deletes a Period.
## Examples
iex> delete_period(period)
{:ok, %Period{}}
iex> delete_period(period)
{:error, %Ecto.Changeset{}}
"""
def delete_period(%Period{} = period) do
Repo.delete(period)
end
@doc """
Returns an `%Ecto.Changeset{}` for tracking period changes.
## Examples
iex> change_period(period)
%Ecto.Changeset{source: %Period{}}
"""
def change_period(%Period{} = period) do
Period.changeset(period, %{})
end
alias MehrSchulferien.Timetables.Slot
@doc """
Returns the list of slots.
## Examples
iex> list_slots()
[%Slot{}, ...]
"""
def list_slots do
Repo.all(Slot)
end
@doc """
Gets a single slot.
Raises `Ecto.NoResultsError` if the Slot does not exist.
## Examples
iex> get_slot!(123)
%Slot{}
iex> get_slot!(456)
** (Ecto.NoResultsError)
"""
def get_slot!(id), do: Repo.get!(Slot, id)
@doc """
Creates a slot.
## Examples
iex> create_slot(%{field: value})
{:ok, %Slot{}}
iex> create_slot(%{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def create_slot(attrs \\ %{}) do
%Slot{}
|> Slot.changeset(attrs)
|> Repo.insert()
end
@doc """
Updates a slot.
## Examples
iex> update_slot(slot, %{field: new_value})
{:ok, %Slot{}}
iex> update_slot(slot, %{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def update_slot(%Slot{} = slot, attrs) do
slot
|> Slot.changeset(attrs)
|> Repo.update()
end
@doc """
Deletes a Slot.
## Examples
iex> delete_slot(slot)
{:ok, %Slot{}}
iex> delete_slot(slot)
{:error, %Ecto.Changeset{}}
"""
def delete_slot(%Slot{} = slot) do
Repo.delete(slot)
end
@doc """
Returns an `%Ecto.Changeset{}` for tracking slot changes.
## Examples
iex> change_slot(slot)
%Ecto.Changeset{source: %Slot{}}
"""
def change_slot(%Slot{} = slot) do
Slot.changeset(slot, %{})
end
alias MehrSchulferien.Timetables.InsetDayQuantity
@doc """
Returns the list of inset_day_quantities.
## Examples
iex> list_inset_day_quantities()
[%InsetDayQuantity{}, ...]
"""
def list_inset_day_quantities do
Repo.all(InsetDayQuantity)
end
@doc """
Gets a single inset_day_quantity.
Raises `Ecto.NoResultsError` if the Inset day quantity does not exist.
## Examples
iex> get_inset_day_quantity!(123)
%InsetDayQuantity{}
iex> get_inset_day_quantity!(456)
** (Ecto.NoResultsError)
"""
def get_inset_day_quantity!(id), do: Repo.get!(InsetDayQuantity, id)
@doc """
Creates a inset_day_quantity.
## Examples
iex> create_inset_day_quantity(%{field: value})
{:ok, %InsetDayQuantity{}}
iex> create_inset_day_quantity(%{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def create_inset_day_quantity(attrs \\ %{}) do
%InsetDayQuantity{}
|> InsetDayQuantity.changeset(attrs)
|> Repo.insert()
end
@doc """
Updates a inset_day_quantity.
## Examples
iex> update_inset_day_quantity(inset_day_quantity, %{field: new_value})
{:ok, %InsetDayQuantity{}}
iex> update_inset_day_quantity(inset_day_quantity, %{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def update_inset_day_quantity(%InsetDayQuantity{} = inset_day_quantity, attrs) do
inset_day_quantity
|> InsetDayQuantity.changeset(attrs)
|> Repo.update()
end
@doc """
Deletes a InsetDayQuantity.
## Examples
iex> delete_inset_day_quantity(inset_day_quantity)
{:ok, %InsetDayQuantity{}}
iex> delete_inset_day_quantity(inset_day_quantity)
{:error, %Ecto.Changeset{}}
"""
def delete_inset_day_quantity(%InsetDayQuantity{} = inset_day_quantity) do
Repo.delete(inset_day_quantity)
end
@doc """
Returns an `%Ecto.Changeset{}` for tracking inset_day_quantity changes.
## Examples
iex> change_inset_day_quantity(inset_day_quantity)
%Ecto.Changeset{source: %InsetDayQuantity{}}
"""
def change_inset_day_quantity(%InsetDayQuantity{} = inset_day_quantity) do
InsetDayQuantity.changeset(inset_day_quantity, %{})
end
end
|
phoenix/lib/mehr_schulferien/timetables/timetables.ex
| 0.873613
| 0.427038
|
timetables.ex
|
starcoder
|
defmodule <%= components_module %>.Form.MultiSelect do
use <%= web_module %>, :component
alias Phoenix.LiveView.JS
import <%= components_module %>.Icon
defp close_select_dropdown(js \\ %JS{}, id) do
js
|> JS.remove_class("show", to: "##{id}.show")
|> JS.remove_class("show", to: "##{id}.show .dropdown-menu")
|> JS.set_attribute({"aria-expanded", "false"}, to: "##{id}.show .dropdown-menu")
end
defp toggle_select_dropdown(js \\ %JS{}, id) do
js
|> close_select_dropdown(id)
|> JS.add_class("show", to: "##{id}:not(.show)")
|> JS.add_class("show", to: "##{id}:not(.show) .dropdown-menu")
|> JS.set_attribute({"aria-expanded", "true"}, to: "##{id}:not(.show) .dropdown-menu")
end
defp name(%{form: form, field: field}), do: input_name(form, field) <> "[]"
defp name(%{name: name}), do: to_string(name)
defp value(%{form: form, field: field}), do: Enum.reject(input_value(form, field) || [], &(&1 in ["", nil]))
defp value(%{value: value}), do: Enum.reject(value, &(&1 in ["", nil]))
defp id(%{form: form, field: field}, value), do: "#{input_id(form, field)}-#{value}"
defp id(%{name: name}, value), do: "#{name}-#{value}"
@doc """
Combines `multi_select` and `multi_select_values` to a form element with a dropdown for the checkboxes.
Needs either `form`-`field` or `name`-`value` attributes to be present.
## attributes
* options: list of options to select. `[{"label 1", "value 1"} | ...]`
* form: Phoenix.HTML.FormData
* field: form field name
* value: list of selected values. Not needed if form and field are given. `["value 1" | ...]`
* name: name of the field. Not needed if form and field are given.
```
"""
def multi_select_dropdown(assigns) do
assigns = assign_new(assigns, :options, fn -> [] end)
assigns = assign_new(assigns, :name, fn -> name(assigns) end)
assigns = assign_new(assigns, :value, fn -> value(assigns) end)
assigns =
assign_new(assigns, :id, fn ->
case assigns do
%{form: form, field: field} -> input_id(form, field)
%{name: name} -> to_string(name)
end
end)
assigns = assign_new(assigns, :style, fn -> "" end)
assigns = assign_new(assigns, :class, fn -> "" end)
attrs = assigns_to_attributes(assigns, [:options, :name, :form, :field, :value, :style, :class])
~H"""
<div {attrs} class={@class} style={"position: relative #{@style}"}>
<div class="input-group">
<.multi_select_values name={@name} value={@value} options={@options} class="form-control" />
<button phx-click={toggle_select_dropdown(@id)} class="btn btn-outline-secondary dropdown-toggle" type="button" id={"#{@id}-label"} aria-haspopup="true" aria-expanded="false"></button>
</div>
<div>
<div class="dropdown-menu p-1" style="width: calc(100% - 2.2rem)" aria-labelledby={"#{@id}-label"} phx-click-away={close_select_dropdown(@id)} phx-window-keydown={close_select_dropdown(@id)} phx-key="escape">
<div style="max-height: 40vh; overflow-y: scroll;">
<.multi_select name={@name} value={@value} options={@options} />
</div>
</div>
</div>
</div>
"""
end
@doc """
Generates an area with lables of the selected values of a `multi_select` form element.
The labels are clickable to unselect the values in the corresponding `multi_select`.
Needs either `form`-`field` or `name`-`value` attributes to be present.
## attributes
* options: list of options to select. `[{"label 1", "value 1"} | ...]`
* form: Phoenix.HTML.FormData
* field: form field name
* value: list of selected values. Not needed if form and field are given. `["value 1" | ...]`
* name: name of the field. Not needed if form and field are given.
```
"""
def multi_select_values(assigns) do
assigns = assign_new(assigns, :options, fn -> [] end)
assigns = assign_new(assigns, :value, fn -> value(assigns) end)
assigns = assign_new(assigns, :style, fn -> "" end)
assigns = assign_new(assigns, :class, fn -> "" end)
~H"""
<div class={"bg-light border border-1 rounded d-flex #{@class}"} style={"flex-wrap: wrap; align-content: center; align-items: center; min-height: 2rem; padding: 0.1rem 0.1rem; #{@style}"}>
<%%= for {label, value} <- @options, value in @value do %>
<label for={id(assigns, value)} class="badge badge-secondary text-bg-secondary bg-secondary" style="cursor: pointer; margin: 0.1rem 0.1rem;">× <%%= label %></label>
<%% end %>
</div>
"""
end
@doc """
Generates a list of checkboxes, that submit a list of values.
Needs either `form`-`field` or `name`-`value` attributes to be present.
## attributes
* options: list of options to select. `[{"label 1", "value 1"} | ...]`
* form: Phoenix.HTML.FormData
* field: form field name
* value: list of selected values. Not needed if form and field are given. `["value 1" | ...]`
* name: name of the field. Not needed if form and field are given.
## empty list
To determin, that no element of the list is checked, an empty string will be
submitted as a first list element. This "empty element" will be filtered out
when using an Ecto.Changeset for casting. Without changeset you have to handle it by your own:
```elixir
defmodule FormCs do
alias Ecto.Changeset
@types %{example_field: {:array, :string}}
@fields Map.keys(@types)
def cast(params) do
Changeset.cast({%{example_field: []}, @types}, params, @fields)
end
end
iex(9)> FormCs.cast(%{example_field: ["", "first", "second"]}).changes
%{example_field: ["first", "second"]},
```
"""
def multi_select(assigns) do
assigns = assign_new(assigns, :options, fn -> [] end)
assigns = assign_new(assigns, :name, fn -> name(assigns) end)
assigns = assign_new(assigns, :value, fn -> value(assigns) end)
assigns = assign_new(assigns, :style, fn -> "" end)
assigns = assign_new(assigns, :class, fn -> "" end)
attrs = assigns_to_attributes(assigns, [:options, :name, :form, :field, :value, :style, :class])
~H"""
<div {attrs} class={@class} style={"padding: 0.1rem 0.1rem; #{@style}"}>
<input type="hidden" name={@name} value="" />
<%%= for {label, value} <- @options do %>
<.multi_check_start name={@name} checked_value={value} values={@value} disabled={false} id={id(assigns, value)}><%%= label %></.multi_check_start>
<%% end %>
</div>
"""
end
# checkbox, that generates an array of values with a label at the start (RTL aware).
defp multi_check_start(assigns) do
~H"""
<div class="form-check">
<label class="form-check-label">
<%%= render_slot(@inner_block) %>
<.multi_checkbox name={@name} values={@values} checked_value={@checked_value} disabled={@disabled} id={@id} />
<span class="form-check-sign">
<span class="check" />
</span>
</label>
</div>
"""
end
# checkbox, that generates an array of values.
defp multi_checkbox(assigns) do
assigns = assign_new(assigns, :disabled, fn -> false end)
attrs = assigns_to_attributes(assigns, [:class, :form, :field, :checked_value, :values])
# We html escape all values to be sure we are comparing
# apples to apples. After all we may have true in the data
# but "true" in the params and both need to match.
values = Enum.map(assigns.values, &html_escape/1)
checked_value = html_escape(assigns.checked_value)
~H"""
<input type="checkbox" class="form-check-input" checked={checked_value in values} value={checked_value} {attrs} />
"""
end
end
|
priv/templates/gen.components/components/form/multi_select.ex
| 0.694199
| 0.660128
|
multi_select.ex
|
starcoder
|
defmodule CarCache do
@external_resource "README.md"
@moduledoc "README.md"
|> File.read!()
|> String.split("<!-- MDOC !-->")
|> Enum.fetch!(1)
use GenServer
alias CarCache.Cache
@options_schema [
name: [
type: :atom,
required: true
],
max_size: [
type: :non_neg_integer,
required: true
]
]
# Public Functions
@doc """
Starts a new cache.
Options:
#{NimbleOptions.docs(@options_schema)}
"""
def start_link(opts) do
opts = NimbleOptions.validate!(opts, @options_schema)
GenServer.start_link(__MODULE__, opts, name: opts[:name])
end
@impl true
def init(opts) do
cache = Cache.new(opts[:name], opts)
{:ok, %{cache: cache}}
end
@doc """
Insert a value into the cache under a given key
## Example
```
CarCache.put(:my_cache, user_id, profile)
```
"""
@spec put(atom(), any(), any()) :: :ok
def put(name, key, value) do
GenServer.call(name, {:put, key, value})
end
@doc """
Get a value from the cache, if it exists in the cache
## Example
```
CarCache.get(:my_cache, user_id)
```
"""
@spec get(atom(), any()) :: any()
def get(name, key) do
Cache.get(name, key)
end
@doc """
Delete a key from the cache
## Example
```
CarCache.delete(:my_cache, user_id)
```
"""
@spec delete(atom(), any()) :: any()
def delete(name, key) do
GenServer.call(name, {:delete, key})
end
@doc """
Fetches the value from the cache if it exists, otherwise executes `fallback`.
The fallback function can return either `{:commit, any()}` or
`{:ignore, any()}`. If `{:commit, any()}` is returned, the value will be
stored in the cache
## Example
```
CarCache.get(:my_cache, user_id, fn ->
case Profile.get(user_id) do
{:ok, profile} -> {:commit, profile}
{:error, _reason} = error -> {:ignore, error}
end
end)
```
"""
@spec fetch(atom(), any(), (() -> {:commit, any()} | {:ignore, any()})) :: any()
def fetch(name, key, fallback) do
with nil <- get(name, key) do
case fallback.() do
{:commit, value} ->
put(name, key, value)
value
{:ignore, value} ->
value
end
end
end
# Callbacks
@impl true
def handle_call({:put, key, value}, _from, state) do
cache = Cache.put(state.cache, key, value)
{:reply, :ok, %{state | cache: cache}}
end
@impl true
def handle_call({:delete, key}, _from, state) do
cache = Cache.delete(state.cache, key)
{:reply, :ok, %{state | cache: cache}}
end
end
|
lib/car_cache.ex
| 0.826852
| 0.71607
|
car_cache.ex
|
starcoder
|
defmodule Request.Validator.Plug do
alias Plug.Conn
alias Request.Validator
import Plug.Conn
@doc ~S"""
Init the Request.Validator.Plug with an optional error callback
and handlers with their corresponding request validator module.
```elixir
plug Request.Validator.Plug,
register: App.Requests.RegisterRequest,
on_error: fn conn, errors -> json_resp(conn, "Handle your errors: #{inspect errors}") end
```
"""
def init(opts) when is_map(opts), do: init(Keyword.new(opts))
def init(opts) do
opts
|> Keyword.put_new(:on_error, &Validator.Plug.on_error/2)
end
@doc ~S"""
The default callback to be invoked when there is a param that fails validation.
"""
def on_error(conn, errors) do
json_resp(conn, 422, %{message: "Unprocessable entity", errors: errors}) |> halt()
end
defp unauthorized(conn) do
json_resp(conn, 403, %{message: "Forbidden"}) |> halt
end
@doc ~S"""
Performs validations on `conn.params`
If all validations are successful returns the connection struct
Otherwise returns an error map in the following structure: `%{param: ["some error", ...]}`
Will call the given `on_error` callback in case some validation failed
"""
def call(conn, opts) do
with action <- Map.get(conn.private, :phoenix_action),
module <- get_validator(opts, action),
false <- is_nil(module),
{:authorized, true} <- {:authorized, module.authorize(conn)},
:ok <- module.validate(Conn.fetch_query_params(conn)) do
conn
else
{:authorized, false} ->
unauthorized(conn)
{:error, errors} when is_map(errors) ->
opts[:on_error].(conn, errors)
_ ->
conn
end
end
defp get_validator(opt, key) when is_map(opt), do: Map.get(opt, key)
defp get_validator(opt, key) when is_list(opt), do: Keyword.get(opt, key)
defp json_resp(conn, status, body) do
conn
|> put_resp_header("content-type", "application/json")
|> send_resp(status, json_library().encode_to_iodata!(body))
end
defp json_library do
Application.get_env(:request_validator, :json_library, Jason)
end
end
|
lib/plug.ex
| 0.756627
| 0.449876
|
plug.ex
|
starcoder
|
defmodule XDR.VariableArray do
@moduledoc """
This module manages the `Variable-Length Array` type based on the RFC4506 XDR Standard.
"""
@behaviour XDR.Declaration
alias XDR.{UInt, FixedArray, VariableArrayError}
defstruct [:elements, :type, :max_length]
@type elements :: list() | binary()
@typedoc """
`XDR.VariableArray` structure type specification.
"""
@type t :: %XDR.VariableArray{elements: elements(), type: module(), max_length: integer()}
@doc """
Create a new `XDR.VariableArray` structure with the `elements`, `type` and `max_length` passed.
"""
@spec new(elements :: elements(), type :: module(), max_length :: integer()) :: t()
def new(elements, type, max_length \\ 4_294_967_295),
do: %XDR.VariableArray{elements: elements, type: type, max_length: max_length}
@doc """
Encode a `XDR.VariableArray` structure into a XDR format.
"""
@impl true
def encode_xdr(%{max_length: max_length}) when not is_integer(max_length),
do: {:error, :not_number}
def encode_xdr(%{max_length: max_length}) when max_length <= 0,
do: {:error, :exceed_lower_bound}
def encode_xdr(%{max_length: max_length}) when max_length > 4_294_967_295,
do: {:error, :exceed_upper_bound}
def encode_xdr(%{elements: elements}) when not is_list(elements), do: {:error, :not_list}
def encode_xdr(%{elements: elements, max_length: max_length})
when length(elements) > max_length,
do: {:error, :length_over_max}
def encode_xdr(%{elements: elements, type: type}) do
array_length = length(elements)
encoded_length = array_length |> UInt.new() |> UInt.encode_xdr!()
encoded_array = FixedArray.new(elements, type, array_length) |> FixedArray.encode_xdr!()
{:ok, encoded_length <> encoded_array}
end
@doc """
Encode a `XDR.VariableArray` structure into a XDR format.
If the `variable_array` is not valid, an exception is raised.
"""
@impl true
def encode_xdr!(variable_array) do
case encode_xdr(variable_array) do
{:ok, binary} -> binary
{:error, reason} -> raise(VariableArrayError, reason)
end
end
@doc """
Decode the Variable-Length Array in XDR format to a `XDR.VariableArray` structure.
"""
@impl true
def decode_xdr(_bytes, %{max_length: max_length}) when not is_integer(max_length),
do: {:error, :not_number}
def decode_xdr(_bytes, %{max_length: max_length}) when max_length <= 0,
do: {:error, :exceed_lower_bound}
def decode_xdr(_bytes, %{max_length: max_length}) when max_length > 4_294_967_295,
do: {:error, :exceed_upper_bound}
def decode_xdr(bytes, _struct) when not is_binary(bytes), do: {:error, :not_binary}
def decode_xdr(<<xdr_len::big-unsigned-integer-size(32), _::binary>>, %{max_length: max_length})
when xdr_len > max_length,
do: {:error, :invalid_length}
def decode_xdr(<<xdr_len::big-unsigned-integer-size(32), rest::binary>>, _struct)
when xdr_len * 4 > byte_size(rest),
do: {:error, :invalid_binary}
def decode_xdr(bytes, %{type: type}) do
{array_length, rest} = UInt.decode_xdr!(bytes)
fixed_array =
FixedArray.decode_xdr!(rest, %XDR.FixedArray{type: type, length: array_length.datum})
{:ok, fixed_array}
end
@doc """
Decode the Variable-Length Array in XDR format to a `XDR.VariableArray` structure.
If the binaries are not valid, an exception is raised.
"""
@impl true
def decode_xdr!(bytes, struct) do
case decode_xdr(bytes, struct) do
{:ok, result} -> result
{:error, reason} -> raise(VariableArrayError, reason)
end
end
end
|
lib/xdr/variable_array.ex
| 0.905956
| 0.542197
|
variable_array.ex
|
starcoder
|
|QUESTIONNAME|
Produce a timestamp for 1 a.m. on the 31st of August 2012
|QUESTION|
Produce a timestamp for 1 a.m. on the 31st of August 2012.
|QUERY|
select timestamp '2012-08-31 01:00:00';
|ANSWER|
<p>Here's a pretty easy question to start off with! SQL has a bunch of different date and time types, which you can peruse at your leisure over at the excellent <a href="http://www.postgresql.org/docs/current/static/datatype-datetime.html">Postgres documentation</a>. These basically allow you to store dates, times, or timestamps (date+time).</p>
<p>The approved answer is the best way to create a timestamp under normal circumstances. You can also use casts to change a correctly formatted string into a timestamp, for example:</p>
<sql>
select '2012-08-31 01:00:00'::timestamp;
select cast('2012-08-31 01:00:00' as timestamp);
</sql>
<p>The former approach is a Postgres extension, while the latter is SQL-standard. You'll note that in many of our earlier questions, we've used bare strings without specifying a data type. This works because when Postgres is working with a value coming out of a timestamp column of a table (say), it knows to cast our strings to timestamps.</p>
<p>Timestamps can be stored with or without time zone information. We've chosen not to here, but if you like you could format the timestamp like "2012-08-31 01:00:00 +00:00", assuming UTC. Note that timestamp with time zone is a different type to timestamp - when you're declaring it, you should use <c>TIMESTAMP WITH TIME ZONE 2012-08-31 01:00:00 +00:00</c>.</p>
<p>Finally, have a bit of a play around with some of the different date/time serialisations described in the Postgres docs. You'll find that Postgres is extremely flexible with the formats it accepts, although my recommendation to you would be to use the standard serialisation we've used here - you'll find it unambiguous and easy to port to other DBs.</p>
|HINT|
There's a bunch of ways to do this, but the easiest is probably to look at the <c>TIMESTAMP</c> keyword.
|SORTED|
0
|PAGEID|
4FBCBF61-D92D-460B-91EF-F295390794C0
|WRITEABLE|
0
|
questions/date/00010000-timestamp.ex
| 0.555676
| 0.54153
|
00010000-timestamp.ex
|
starcoder
|
defmodule SyncPrimitives.CyclicBarrier do
@moduledoc """
A CyclicBarrier expects a predefined number of `parties` to `await/2`
before all calls to `await/2` can continue.
Parties _arrive_ at the _barrier_ by calling `await/2`.
When all `parties` have _arrived_, all calls to `await/2`
unblock, and parties may proceed.
Thereafter, the barrier resets (which is what makes it cyclic).
Although this fully describes the happy path, documentation for time outs,
participating parties that exit, and other sad paths is currently lacking.
## Example
iex> barrier = SyncPrimitives.CyclicBarrier.start(2, fn -> IO.puts("barrier action") end)
{SyncPrimitives.CyclicBarrier, #PID<0.149.0>}
iex> spawn_link(fn ->
...> IO.puts("process 1, before wait")
...> SyncPrimitives.CyclicBarrier.await(barrier)
...> IO.puts("process 1, after wait")
...> end)
process 1, before wait
#PID<0.155.0>
iex> spawn_link(fn ->
...> IO.puts("process 2, before wait")
...> SyncPrimitives.CyclicBarrier.await(barrier)
...> IO.puts("process 2, after wait")
...> end)
process 2, before wait
#PID<0.161.0>
barrier action
process 1, after wait
process 2, after wait
iex> SyncPrimitives.CyclicBarrier.stop(barrier)
"""
require Record
Record.defrecordp(:barrier, __MODULE__, pid: nil)
@type barrier :: record(:barrier, pid: pid)
@server_module Module.concat(__MODULE__, Server)
@doc """
Starts a new CyclicBarrier that expects `parties` processes to call `await/1`
or `await/2` before it releases. Calls to `await/1` block until all expected
parties have called `await/1`. Thereafter, the barrier resets (which is what
makes it cyclic).
"""
@spec start(pos_integer, nil | (() -> any)) :: barrier
def start(parties, action \\ nil)
when is_integer(parties) and parties > 0 and (action === nil or is_function(action, 0)) do
{:ok, server_pid} = GenServer.start_link(@server_module, parties: parties, action: action)
barrier(pid: server_pid)
end
@spec stop(barrier) :: :ok
def stop(_barrier = barrier(pid: pid)) do
GenServer.stop(pid)
end
@spec alive?(barrier) :: boolean
def alive?(_barrier = barrier(pid: pid)) do
Process.alive?(pid)
end
@spec broken?(barrier) :: boolean
@doc """
Returns `true`, if any of the parties waiting for the barrier timed out or,
exited since construction or the last reset, `false` otherwise.
"""
def broken?(barrier(pid: pid)) do
case call(pid, :status) do
:waiting ->
false
:broken ->
true
end
end
@spec number_waiting(barrier) :: false | integer
@doc """
Returns the number of parties currently waiting for the barrier.
"""
def number_waiting(barrier(pid: pid)) do
GenServer.call(pid, :number_waiting)
end
@spec parties(barrier) :: false | integer
@doc """
Returns the number of parties required to trip this barrier.
"""
def parties(barrier(pid: pid)) do
GenServer.call(pid, :parties)
end
@spec reset(barrier) :: boolean
@doc """
Resets the barrier to its initial state. If any parties are currently waiting
at the barrier, the `await/1` or `await/2` calls will return `:broken`.
"""
def reset(barrier(pid: pid)) do
case call(pid, :reset) do
:reset ->
true
:broken ->
true
_ ->
false
end
end
@spec await(barrier, :infinity | integer) :: :fulfilled | :broken
def await(barrier(pid: pid), timeout \\ :infinity)
when timeout === :infinity or is_integer(timeout) do
case call(pid, :await, timeout) do
:fulfilled ->
:fulfilled
:broken ->
:broken
:timeout ->
:broken
end
end
defp call(pid, request, timeout \\ :infinity) do
try do
GenServer.call(pid, request, timeout)
catch
:exit, {:timeout, _} ->
GenServer.cast(pid, :attendant_timedout)
:timeout
end
end
end
|
lib/sync_primitives/cyclic_barrier.ex
| 0.785103
| 0.498413
|
cyclic_barrier.ex
|
starcoder
|
defmodule UploadcareEx.Config do
@moduledoc """
Configuration
### Required parameters:
```
config :uploadcare_ex,
public_key: "public_key",
private_key: "private_key"
```
By default [Uploadcare.Simple auth-scheme](https://uploadcare.com/docs/api_reference/rest/requests_auth/) is used.
### Optional parameters
```
config :uploadcare_ex,
...
accept_header: "application/vnd.uploadcare-v0.5+json",
store: "0",
retry_period: 1_000,
retry_expiry: 5_000
```
```retry_period``` and ```retry_expiry``` parameters are used for request retries in case of Uploadcare service server errors. Their default values are 1_000 and 5_000 respectively.
"""
@spec public_key() :: binary()
def public_key do
get_env_var!(:public_key)
end
@spec private_key() :: binary()
def private_key do
get_env_var!(:private_key)
end
@spec accept_header() :: binary()
def accept_header do
Application.get_env(:uploadcare_ex, :accept_header) || "application/vnd.uploadcare-v0.5+json"
end
@spec store() :: binary()
def store do
Application.get_env(:uploadcare_ex, :store) || "0"
end
@spec retry_period() :: number()
def retry_period do
Application.get_env(:uploadcare_ex, :retry_period) || 1_000
end
@spec retry_expiry() :: number()
def retry_expiry do
Application.get_env(:uploadcare_ex, :retry_expiry) || 5_000
end
@spec upload_url_retry_period() :: number()
def upload_url_retry_period do
Application.get_env(:uploadcare_ex, :upload_url_retry_period) || 300
end
@spec upload_url_retry_expiry() :: number()
def upload_url_retry_expiry do
Application.get_env(:uploadcare_ex, :upload_url_retry_expiry) || 20_000
end
@spec get_env_var!(atom()) :: binary() | number()
defp get_env_var!(key) do
case Application.get_env(:uploadcare_ex, key) do
value when value != "" ->
value
els ->
raise ArgumentError,
message:
"Please set config variable `config :uploadcare_ex, #{key}`, got: `#{inspect(els)}``"
end
end
end
|
lib/uploadcare_ex/config.ex
| 0.78345
| 0.635081
|
config.ex
|
starcoder
|
defmodule Delaunay do
use Bitwise
alias __MODULE__
alias Delaunay.Internals
alias Delaunay.Utils
@moduledoc """
Documentation for Delaunay.
"""
@epsilon :math.pow(2, -52)
defstruct [
:coords,
:halfedges,
:hull,
:hull_next,
:hull_prev,
:hull_start,
:hull_tri,
:hull_hash,
:hull_size,
:triangles,
:triangles_len,
:hash_size,
:cx,
:cy,
:edge_stack
]
@doc """
Delaunay triangulation of points
points is an array of tuples: `[{x, y}, ...]`
## Examples
iex> points |> Delaunay.from
%Delaunay{
...
triangles: [1, 18, 5, 1, 3, 18, 1, 29, 3, 3, 29, 18, ...],
...
}
"""
def from(points) do
n = length(points)
{_, coords} =
points
|> List.foldl(
{0, List.duplicate(0, n * 2)},
fn p, {i, c} ->
{
i + 1,
c
|> List.replace_at(
2 * i,
p
|> Utils.defaultGetX
)
|> List.replace_at(
2 * i + 1,
p
|> Utils.defaultGetY
)
}
end
)
new(coords)
end
@doc """
"""
def new(coords) do
n = length(coords)
|> bsr(1)
max_triangles = 2 * n - 5
# arrays that will store the triangulation graph
triangles = List.duplicate(0, max_triangles * 3)
halfedges = List.duplicate(0, max_triangles * 3)
#temporary arrays for tracking the edges of the advancing convex hull
hash_size = :math.sqrt(n)
|> Float.ceil
|> trunc
hull_prev = List.duplicate(0, n) # edge to prev edge
hull_next = List.duplicate(0, n) # edge to next edge
hull_tri = List.duplicate(0, n) # edge to adjacent triangle
hull_hash = List.duplicate(-1, hash_size) # angular edge hash
{
ids,
_,
min_x,
min_y,
max_x,
max_y
} =
0..(n - 1)
|> Enum.to_list
|> List.foldl(
{List.duplicate(0, n), 0, nil, nil, nil, nil},
fn _, {
ids,
i,
min_x,
min_y,
max_x,
max_y
} ->
x = coords
|> Enum.at(2 * i)
y = coords
|> Enum.at(2 * i + 1)
min_x = if (min_x == nil || x < min_x), do: x, else: min_x
min_y = if (min_y == nil || y < min_y), do: y, else: min_y
max_x = if (max_x == nil || x > max_x), do: x, else: max_x
max_y = if (max_y == nil || y > max_y), do: y, else: max_y
{
ids
|> List.replace_at(i, i),
i + 1,
min_x,
min_y,
max_x,
max_y
}
end
)
cx = (min_x + max_x) / 2
cy = (min_y + max_y) / 2
{_, _, i0} = findCenter(coords, n, cx, cy)
i0x = coords
|> Enum.at(2 * i0)
i0y = coords
|> Enum.at(2 * i0 + 1)
{_, _, i1} = findClosestCenterPoint(coords, n, i0, i0x, i0y)
i1x = coords
|> Enum.at(2 * i1)
i1y = coords
|> Enum.at(2 * i1 + 1)
{_, min_radius, i2} = findSmallestCircle(coords, n, i0, i0x, i0y, i1, i1x, i1y)
i2x = coords
|> Enum.at(2 * i2)
i2y = coords
|> Enum.at(2 * i2 + 1)
if (min_radius == nil) do
raise "No Delaunay triangulation exists for this input."
end
# swap the order of the seed points for counter-clockwise orientation
{i1, i1x, i1y, i2, i2x, i2y} =
if (Utils.orient(i0x, i0y, i1x, i1y, i2x, i2y)) do
i = i1
x = i1x
y = i1y
{i2, i2x, i2y, i, x, y}
else
{i1, i1x, i1y, i2, i2x, i2y}
end
{cx, cy} = Utils.circumcenter(i0x, i0y, i1x, i1y, i2x, i2y)
dists =
0..(n - 1)
|> Enum.to_list
|> List.foldl(
List.duplicate(0, n),
fn i, l ->
l
|> List.replace_at(
i,
Utils.dist(
coords
|> Enum.at(2 * i),
coords
|> Enum.at(2 * i + 1),
cx,
cy
)
)
end
)
# sort the points by distance from the seed triangle circumcenter
ids = Utils.quicksort(ids, dists, 0, n - 1)
# set up the seed triangle as the starting hull
hull_start = i0
hull_size = 3
delaunay = %Delaunay{
coords: coords,
triangles: triangles,
halfedges: halfedges,
hash_size: hash_size,
hull_start: hull_start,
hull_prev: hull_prev,
hull_next: hull_next,
hull_tri: hull_tri,
hull_hash: hull_hash,
hull_size: hull_size,
cx: cx,
cy: cy,
edge_stack: List.duplicate(0, 512)
}
hull_next = hull_next
|> List.replace_at(i0, i1)
|> List.replace_at(i1, i2)
|> List.replace_at(i2, i0)
hull_prev = hull_prev
|> List.replace_at(i2, i1)
|> List.replace_at(i0, i2)
|> List.replace_at(i1, i0)
hull_tri = hull_tri
|> List.replace_at(i0, 0)
|> List.replace_at(i1, 1)
|> List.replace_at(i2, 2)
hull_hash = hull_hash
|> List.replace_at(Internals.hashKey(delaunay, i0x, i0y), i0)
|> List.replace_at(Internals.hashKey(delaunay, i1x, i1y), i1)
|> List.replace_at(Internals.hashKey(delaunay, i2x, i2y), i2)
new_delaunay = %{
delaunay
|
triangles_len: 0,
hull_start: hull_start,
hull_prev: hull_prev,
hull_next: hull_next,
hull_tri: hull_tri,
hull_hash: hull_hash
}
|> Internals.addTriangle(i0, i1, i2, -1, -1, -1)
|> elem(0)
new_delaunay =
0..(length(ids) - 1)
|> Enum.reduce(
{
new_delaunay,
0,
0,
ids,
coords,
i0,
i1,
i2
},
&addTriangles/2
)
|> elem(0)
|> (fn %Delaunay{hull_size: hull_size} = d -> %{d | hull: List.duplicate(0, hull_size)} end).()
0..(new_delaunay.hull_size - 1)
|> Enum.reduce(
{new_delaunay, nil},
&fillHull/2
)
|> elem(0)
|> (
fn %Delaunay{triangles: triangles, halfedges: halfedges, triangles_len: triangles_len} = d ->
%{
d
|
# get rid of temporary arrays
hull_prev: nil,
hull_next: nil,
hull_tri: nil,
# trim typed triangle mesh arrays
triangles: triangles
|> Enum.slice(0, triangles_len),
halfedges: halfedges
|> Enum.slice(0, triangles_len)
}
end).()
end
@doc """
"""
def getTriangles(
%Delaunay{
triangles: triangles
} = delaunay,
i \\ 0
) do
if (i < length(triangles)) do
p0 = triangles
|> Enum.at(i)
p1 = triangles
|> Enum.at(i + 1)
p2 = triangles
|> Enum.at(i + 2)
[
[
getPoint(delaunay, p0),
getPoint(delaunay, p1),
getPoint(delaunay, p2)
]
| getTriangles(delaunay, i + 3)
]
else
[]
end
end
@doc """
"""
def getEdgePairs(
%Delaunay{
triangles: triangles
} = delaunay,
i \\ 0
) do
if (i < length(triangles)) do
p0 = triangles
|> Enum.at(i)
p1 = triangles
|> Enum.at(i + 1)
p2 = triangles
|> Enum.at(i + 2)
[
(if (p0 < p1), do: {p0, p1}, else: {p1, p0}),
(if (p1 < p2), do: {p1, p2}, else: {p2, p1}),
(if (p0 < p2), do: {p0, p2}, else: {p2, p0})
| getEdgePairs(delaunay, i + 3)
]
else
[]
end
end
@doc """
"""
def getEdgePairsWithWeigth(delaunay) do
getEdgePairs(delaunay)
|> Enum.uniq
|> Enum.map(
fn {p0, p1} ->
{p0x, p0y} = getPoint(delaunay, p0)
{p1x, p1y} = getPoint(delaunay, p1)
{p0, p1, Utils.dist(p0x, p0y, p1x, p1y)}
end
)
end
defp getPoint(%Delaunay{coords: coords}, p) do
px = coords
|> Enum.at(2 * p)
py = coords
|> Enum.at(2 * p + 1)
{px, py}
end
defp addTriangles(
k,
{
%Delaunay{
hash_size: hash_size,
hull_prev: hull_prev,
hull_next: hull_next,
hull_tri: hull_tri,
hull_hash: hull_hash,
hull_size: hull_size
} = delaunay,
xp,
yp,
ids,
coords,
i0,
i1,
i2
} = acc
) do
i = ids
|> Enum.at(k)
x = coords
|> Enum.at(2 * i)
y = coords
|> Enum.at(2 * i + 1)
cond do
# skip near-duplicate points
(k > 0 && abs(x - xp) <= @epsilon && abs(y - yp) <= @epsilon) ->
acc
# skip seed triangle points
(i == i0 || i == i1 || i == i2) ->
acc
true ->
# find a visible edge on the convex hull using edge hash
key = delaunay
|> Internals.hashKey(x, y)
start =
0..(hash_size - 1)
|> Enum.reduce_while(
0,
fn j, _ ->
hull_k = rem(key + j, hash_size)
start = hull_hash
|> Enum.at(hull_k)
if (
start != -1 && start != hull_next
|> Enum.at(start)) do
{:halt, start}
else
{:cont, start}
end
end
)
|> (
fn s ->
hull_prev
|> Enum.at(s)
end).()
e = findClosePoint(coords, hull_next, x, y, start, start)
if (e == -1) do
# likely a near-duplicate point; skip it
acc
else
# add the first triangle from the point
{new_delaunay, t} = Internals.addTriangle(
delaunay,
e,
i,
hull_next
|> Enum.at(e),
-1,
-1,
hull_tri
|> Enum.at(e)
)
# recursively flip triangles from the point until they satisfy the Delaunay condition
{new_delaunay, ar} = Internals.legalize(new_delaunay, t + 2)
new_hull_tri = hull_tri
|> List.replace_at(i, ar)
|> List.replace_at(e, t) # keep track of boundary triangles on the hull
new_delaunay = %{new_delaunay | hull_tri: new_hull_tri, hull_size: hull_size + 1}
# walk forward through the hull, adding more triangles and flipping recursively
n = hull_next
|> Enum.at(e)
{new_delaunay, n} = addTrianglesForward(
new_delaunay,
coords,
x,
y,
n,
i
)
# walk backward from the other side, adding more triangles and flipping
{new_delaunay, e} =
if (e != start) do
{new_delaunay, e}
else
addTrianglesBackward(
new_delaunay,
coords,
x,
y,
e,
i
)
end
# update the hull indices
new_hull_prev = hull_prev
|> List.replace_at(i, e)
|> List.replace_at(n, i)
new_hull_next = hull_next
|> List.replace_at(e, i)
|> List.replace_at(i, n)
new_delaunay = %{
new_delaunay
|
hull_start: e,
hull_prev: new_hull_prev,
hull_next: new_hull_next
}
# save the two new edges in the hash table
new_hull_hash = hull_hash
|> List.replace_at(Internals.hashKey(new_delaunay, x, y), i)
|> List.replace_at(
Internals.hashKey(
new_delaunay,
coords
|> Enum.at(2 * e),
coords
|> Enum.at(2 * e + 1)
),
e
)
{
%{
new_delaunay
|
hull_hash: new_hull_hash
},
xp,
yp,
ids,
coords,
i0,
i1,
i2
}
end
end
end
defp addTrianglesForward(
%Delaunay{hull_next: hull_next, hull_tri: hull_tri, hull_size: hull_size} = delaunay,
coords,
x,
y,
n,
i
) do
q = hull_next
|> Enum.at(n)
if (
Utils.orient(
x,
y,
coords
|> Enum.at(2 * n),
coords
|> Enum.at(2 * n + 1),
coords
|> Enum.at(2 * q),
coords
|> Enum.at(2 * q + 1)
)) do
{new_delaunay, t} = Internals.addTriangle(
delaunay,
n,
i,
q,
hull_tri
|> Enum.at(i),
-1,
hull_tri
|> Enum.at(n)
)
{new_delaunay, ar} = Internals.legalize(new_delaunay, t + 2)
addTrianglesForward(
%{
new_delaunay
|
hull_next: hull_next
|> List.replace_at(n, n),
hull_tri: hull_tri
|> List.replace_at(i, ar),
hull_size: hull_size - 1,
},
coords,
x,
y,
q,
i
)
else
{delaunay, n}
end
end
defp addTrianglesBackward(
%Delaunay{
hull_next: hull_next,
hull_prev: hull_prev,
hull_tri: hull_tri,
hull_size: hull_size
} = delaunay,
coords,
x,
y,
e,
i
) do
q = hull_prev
|> Enum.at(e)
if (
Utils.orient(
x,
y,
coords
|> Enum.at(2 * q),
coords
|> Enum.at(2 * q + 1),
coords
|> Enum.at(2 * e),
coords
|> Enum.at(2 * e + 1)
)) do
{new_delaunay, t} = Internals.addTriangle(
delaunay,
q,
i,
e,
-1,
hull_tri
|> Enum.at(e),
hull_tri
|> Enum.at(q)
)
{new_delaunay, _ar} = Internals.legalize(new_delaunay, t + 2)
addTrianglesBackward(
%{
new_delaunay
|
hull_next: hull_next
|> List.replace_at(e, e),
hull_tri: hull_tri
|> List.replace_at(q, t),
hull_size: hull_size - 1,
},
coords,
x,
y,
q,
i
)
else
{delaunay, e}
end
end
defp findClosePoint(coords, hull_next, x, y, start, e) do
q = hull_next
|> Enum.at(e)
if (
Utils.orient(
x,
y,
coords
|> Enum.at(2 * e),
coords
|> Enum.at(2 * e + 1),
coords
|> Enum.at(2 * q),
coords
|> Enum.at(2 * q + 1)
)) do
e
else
if (q == start) do
-1
else
findClosePoint(coords, hull_next, x, y, start, q)
end
end
end
# pick a seed point close to the center
defp findCenter(coords, n, cx, cy) do
0..(n - 1)
|> Enum.to_list
|> List.foldl(
{0, nil, nil},
fn _, {i, min_dist, i0} ->
d = Utils.dist(
cx,
cy,
coords
|> Enum.at(2 * i),
coords
|> Enum.at(2 * i + 1)
)
if (min_dist == nil || d < min_dist) do
{i + 1, d, i}
else
{i + 1, min_dist, i0}
end
end
)
end
# find the point closest to the seed
defp findClosestCenterPoint(coords, n, i0, i0x, i0y) do
0..(n - 1)
|> Enum.to_list
|> List.foldl(
{0, nil, nil},
fn _, {i, min_dist, i1} ->
if (i == i0) do
{i + 1, min_dist, i1}
else
d = Utils.dist(
i0x,
i0y,
coords
|> Enum.at(2 * i),
coords
|> Enum.at(2 * i + 1)
)
if ((min_dist == nil || d < min_dist && d > 0)) do
{i + 1, d, i}
else
{i + 1, min_dist, i1}
end
end
end
)
end
# find the third point which forms the smallest circumcircle with the first two
defp findSmallestCircle(coords, n, i0, i0x, i0y, i1, i1x, i1y) do
0..(n - 1)
|> Enum.to_list
|> List.foldl(
{0, nil, nil},
fn _, {i, min_radius, i2} ->
if (i == i0 || i == i1) do
{i + 1, min_radius, i2}
else
r = Utils.circumradius(
i0x,
i0y,
i1x,
i1y,
coords
|> Enum.at(2 * i),
coords
|> Enum.at(2 * i + 1)
)
if (min_radius == nil || r < min_radius) do
{i + 1, r, i}
else
{i + 1, min_radius, i2}
end
end
end
)
end
defp fillHull(
i,
{
%Delaunay{
hull: hull,
hull_start: hull_start,
hull_next: hull_next
} = delaunay,
e
}
) do
e = if (e != nil), do: e, else: hull_start
{
%{
delaunay |
hull: hull
|> List.replace_at(i, e)
},
hull_next
|> Enum.at(e)
}
end
end
|
lib/delaunay.ex
| 0.820218
| 0.4474
|
delaunay.ex
|
starcoder
|
defmodule Akd.Stop.Release do
@moduledoc """
A native Hook module that comes shipped with Akd.
This module uses `Akd.Hook`.
Provides a set of operations that can stop an app built and released using
distillery.
If fails, it restarts the stopped node.
# Options:
* `run_ensure`: `boolean`. Specifies whether to a run a command or not.
* `ignore_failure`: `boolean`. Specifies whether to continue if this hook fails.
# Defaults:
* `run_ensure`: `true`
* `ignore_failure`: `false`
"""
use Akd.Hook
@default_opts [run_ensure: true, ignore_failure: false]
@doc """
Callback implementation for `get_hooks/2`.
This function returns a list of operations that can be used to stop an app
built on the `publish_to` destination of a deployment.
## Examples
iex> deployment = %Akd.Deployment{mix_env: "prod",
...> build_at: Akd.Destination.local("."),
...> publish_to: Akd.Destination.local("."),
...> name: "name",
...> vsn: "0.1.1"}
iex> Akd.Stop.Release.get_hooks(deployment, [])
[%Akd.Hook{ensure: [], ignore_failure: false,
main: [%Akd.Operation{cmd: "bin/name stop", cmd_envs: [],
destination: %Akd.Destination{host: :local, path: ".",
user: :current}}],
rollback: [%Akd.Operation{cmd: "bin/name start", cmd_envs: [],
destination: %Akd.Destination{host: :local, path: ".",
user: :current}}], run_ensure: true}]
"""
@spec get_hooks(Akd.Deployment.t(), Keyword.t()) :: list(Akd.Hook.t())
def get_hooks(deployment, opts \\ []) do
opts = uniq_merge(opts, @default_opts)
[stop_hook(deployment, opts)]
end
# This function takes a deployment and options and returns an Akd.Hook.t
# struct using FormHook DSL
defp stop_hook(deployment, opts) do
destination = Akd.DestinationResolver.resolve(:publish, deployment)
cmd_envs = Keyword.get(opts, :cmd_envs, [])
form_hook opts do
main("bin/#{deployment.name} stop", destination, cmd_envs: cmd_envs)
rollback("bin/#{deployment.name} start", destination, cmd_envs: cmd_envs)
end
end
# This function takes two keyword lists and merges them keeping the keys
# unique. If there are multiple values for a key, it takes the value from
# the first value of keyword1 corresponding to that key.
defp uniq_merge(keyword1, keyword2) do
keyword2
|> Keyword.merge(keyword1)
|> Keyword.new()
end
end
|
lib/akd/base/stop/release.ex
| 0.872062
| 0.517022
|
release.ex
|
starcoder
|
defmodule PhoneNumber do
@doc """
Remove formatting from a phone number if the given number is valid. Return an error otherwise.
"""
@spec clean(String.t()) :: {:ok, String.t()} | {:error, String.t()}
def clean(raw) do
validators = [
&validate_digits/1,
&validate_length/1,
&validate_country_code/1,
&validate_area_code/1,
&validate_exchange_code/1
]
Enum.reduce_while(validators, {:ok, remove_separators(raw)}, fn validator, acc ->
{:ok, number} = acc
case validator.(number) do
{:ok, number} -> {:cont, {:ok, number}}
{:error, error} -> {:halt, {:error, error}}
end
end)
end
defp remove_separators(number) do
String.replace(number, ~r/[\s+.()-]/, "")
end
defp validate_digits(number) do
valid_digits? =
number
|> String.codepoints()
|> Enum.all?(fn <<x>> -> x in ?0..?9 end)
if valid_digits?, do: {:ok, number}, else: {:error, "must contain digits only"}
end
defp validate_length(number) do
if String.length(number) in 10..11 do
{:ok, number}
else
{:error, "incorrect number of digits"}
end
end
defp validate_country_code(number) do
case String.length(number) do
10 ->
{:ok, number}
11 ->
if String.starts_with?(number, "1") do
{:ok, number |> String.split_at(1) |> elem(1)}
else
{:error, "11 digits must start with 1"}
end
end
end
defp validate_area_code(number) do
{area_code, _} = String.split_at(number, 3)
cond do
String.starts_with?(area_code, "0") -> {:error, "area code cannot start with zero"}
String.starts_with?(area_code, "1") -> {:error, "area code cannot start with one"}
true -> {:ok, number}
end
end
defp validate_exchange_code(number) do
{_area_code, rest} = String.split_at(number, 3)
{exchange_code, _} = String.split_at(rest, 3)
cond do
String.starts_with?(exchange_code, "0") -> {:error, "exchange code cannot start with zero"}
String.starts_with?(exchange_code, "1") -> {:error, "exchange code cannot start with one"}
true -> {:ok, number}
end
end
end
|
exercises/practice/phone-number/.meta/example.ex
| 0.769643
| 0.437103
|
example.ex
|
starcoder
|
defmodule Workflows.Rule do
@moduledoc """
Choice state rule.
"""
@type t :: %__MODULE__{
next: String.t(),
rule: (map() -> boolean())
}
defstruct [:next, :rule]
@doc """
Create a rule that can be matched on an input.
"""
def create(%{"Next" => next} = rule) do
case do_create(rule) do
{:ok, rule} ->
{:ok, %__MODULE__{next: next, rule: rule}}
err ->
err
end
end
def create(_rule) do
{:error, :missing_next}
end
def call(%__MODULE__{rule: rule}, args) do
rule.(args)
end
## Private
defp do_create(%{"Not" => inner_case}) do
with {:ok, inner_rule} <- do_create(inner_case) do
rule = fn args ->
not inner_rule.(args)
end
{:ok, rule}
end
end
defp do_create(%{"Or" => cases}) do
with {:ok, inner_rules} <- do_create_cases(cases) do
rule = fn args ->
Enum.any?(inner_rules, fn rule -> rule.(args) end)
end
{:ok, rule}
end
end
defp do_create(%{"And" => cases}) do
with {:ok, inner_rules} <- do_create_cases(cases) do
rule = fn args ->
Enum.all?(inner_rules, fn rule -> rule.(args) end)
end
{:ok, rule}
end
end
defp do_create(%{"StringEquals" => value, "Variable" => variable}),
do: compare_with_value(&==/2, &is_binary/1, variable, value)
defp do_create(%{"StringEqualsPath" => value, "Variable" => variable}),
do: compare_with_path_value(&==/2, &is_binary/1, variable, value)
defp do_create(%{"StringLessThan" => value, "Variable" => variable}),
do: compare_with_value(&</2, &is_binary/1, variable, value)
defp do_create(%{"StringLessThanPath" => value, "Variable" => variable}),
do: compare_with_path_value(&</2, &is_binary/1, variable, value)
defp do_create(%{"StringGreaterThan" => value, "Variable" => variable}),
do: compare_with_value(&>/2, &is_binary/1, variable, value)
defp do_create(%{"StringGreaterThanPath" => value, "Variable" => variable}),
do: compare_with_path_value(&>/2, &is_binary/1, variable, value)
defp do_create(%{"StringLessThanEquals" => value, "Variable" => variable}),
do: compare_with_value(&<=/2, &is_binary/1, variable, value)
defp do_create(%{"StringLessThanEqualsPath" => value, "Variable" => variable}),
do: compare_with_path_value(&<=/2, &is_binary/1, variable, value)
defp do_create(%{"StringGreaterThanEquals" => value, "Variable" => variable}),
do: compare_with_value(&>=/2, &is_binary/1, variable, value)
defp do_create(%{"StringGreaterThanEqualsPath" => value, "Variable" => variable}),
do: compare_with_path_value(&>=/2, &is_binary/1, variable, value)
defp do_create(%{"StringMatches" => _value, "Variable" => _variable}),
do: {:error, "Not implemented"}
defp do_create(%{"NumericEquals" => value, "Variable" => variable}),
do: compare_with_value(&==/2, &is_number/1, variable, value)
defp do_create(%{"NumericEqualsPath" => value, "Variable" => variable}),
do: compare_with_path_value(&==/2, &is_number/1, variable, value)
defp do_create(%{"NumericLessThan" => value, "Variable" => variable}),
do: compare_with_value(&</2, &is_number/1, variable, value)
defp do_create(%{"NumericLessThanPath" => value, "Variable" => variable}),
do: compare_with_path_value(&</2, &is_number/1, variable, value)
defp do_create(%{"NumericGreaterThan" => value, "Variable" => variable}),
do: compare_with_value(&>/2, &is_number/1, variable, value)
defp do_create(%{"NumericGreaterThanPath" => value, "Variable" => variable}),
do: compare_with_path_value(&>/2, &is_number/1, variable, value)
defp do_create(%{"NumericLessThanEquals" => value, "Variable" => variable}),
do: compare_with_value(&<=/2, &is_number/1, variable, value)
defp do_create(%{"NumericLessThanEqualsPath" => value, "Variable" => variable}),
do: compare_with_path_value(&<=/2, &is_number/1, variable, value)
defp do_create(%{"NumericGreaterThanEquals" => value, "Variable" => variable}),
do: compare_with_value(&>=/2, &is_number/1, variable, value)
defp do_create(%{"NumericGreaterThanEqualsPath" => value, "Variable" => variable}),
do: compare_with_path_value(&>=/2, &is_number/1, variable, value)
defp do_create(%{"BooleanEquals" => value, "Variable" => variable}),
do: compare_with_value(&==/2, &is_boolean/1, variable, value)
defp do_create(%{"BooleanEqualsPath" => value, "Variable" => variable}),
do: compare_with_path_value(&==/2, &is_boolean/1, variable, value)
defp do_create(%{"TimestampEquals" => value, "Variable" => variable}),
do: compare_with_value(×tamp_eq/2, &is_timestamp/1, variable, value)
defp do_create(%{"TimestampEqualsPath" => value, "Variable" => variable}),
do: compare_with_path_value(×tamp_eq/2, &is_timestamp/1, variable, value)
defp do_create(%{"TimestampLessThan" => value, "Variable" => variable}),
do: compare_with_value(×tamp_lt/2, &is_timestamp/1, variable, value)
defp do_create(%{"TimestampLessThanPath" => value, "Variable" => variable}),
do: compare_with_path_value(×tamp_lt/2, &is_timestamp/1, variable, value)
defp do_create(%{"TimestampGreaterThan" => value, "Variable" => variable}),
do: compare_with_value(×tamp_gt/2, &is_timestamp/1, variable, value)
defp do_create(%{"TimestampGreaterThanPath" => value, "Variable" => variable}),
do: compare_with_path_value(×tamp_gt/2, &is_timestamp/1, variable, value)
defp do_create(%{"TimestampLessThanEquals" => value, "Variable" => variable}),
do: compare_with_value(×tamp_lte/2, &is_timestamp/1, variable, value)
defp do_create(%{"TimestampLessThanEqualsPath" => value, "Variable" => variable}),
do: compare_with_path_value(×tamp_lte/2, &is_timestamp/1, variable, value)
defp do_create(%{"TimestampGreaterThanEquals" => value, "Variable" => variable}),
do: compare_with_value(×tamp_gte/2, &is_timestamp/1, variable, value)
defp do_create(%{"TimestampGreaterThanEqualsPath" => value, "Variable" => variable}),
do: compare_with_path_value(×tamp_gte/2, &is_timestamp/1, variable, value)
defp do_create(%{"IsNull" => true, "Variable" => variable}) do
with {:ok, variable_fn} <- path_value(variable, result_type: :value_path) do
rule = fn args ->
case variable_fn.(args) do
# returned nil because the value is not present
{nil, ""} -> false
# returned nil because the value is null
{nil, _} -> true
# value not null
{_, _} -> false
end
end
{:ok, rule}
end
end
defp do_create(%{"IsPresent" => true, "Variable" => variable}) do
with {:ok, variable_fn} <- path_value(variable, result_type: :path) do
rule = fn args ->
case variable_fn.(args) do
"" -> false
_ -> true
end
end
{:ok, rule}
end
end
defp do_create(%{"IsNumeric" => true, "Variable" => variable}),
do: is_type(&is_number/1, variable)
defp do_create(%{"IsString" => true, "Variable" => variable}),
do: is_type(&is_binary/1, variable)
defp do_create(%{"IsBoolean" => true, "Variable" => variable}),
do: is_type(&is_boolean/1, variable)
defp do_create(%{"IsTimestamp" => true, "Variable" => variable}),
do: is_type(&is_timestamp/1, variable)
defp do_create(_rule) do
{:error, :invalid_rule}
end
defp do_create_cases(cases) when is_list(cases) do
do_create_cases(cases, [])
end
defp do_create_cases(_cases) do
{:error, :invalid_rule_cases}
end
defp do_create_cases([], acc), do: {:ok, acc}
defp do_create_cases([rule | cases], acc) do
case do_create(rule) do
{:ok, rule} -> do_create_cases(cases, [rule | acc])
err -> err
end
end
defp compare_with_value(compare, check_type, variable, value) do
with {:ok, variable_fn} <- path_value(variable) do
rule = fn args ->
variable_value = variable_fn.(args)
check_type.(variable_value) and
check_type.(value) and
compare.(variable_value, value)
end
{:ok, rule}
end
end
defp compare_with_path_value(compare, check_type, variable, value) do
with {:ok, variable_fn} <- path_value(variable),
{:ok, value_fn} <- path_value(value) do
rule = fn args ->
variable_value = variable_fn.(args)
value_value = value_fn.(args)
check_type.(variable_value) and
check_type.(value_value) and
compare.(variable_value, value_value)
end
{:ok, rule}
end
end
defp is_type(check_type, variable) do
with {:ok, variable_fn} <- path_value(variable) do
rule = fn args ->
variable_value = variable_fn.(args)
check_type.(variable_value)
end
{:ok, rule}
end
end
defp path_value(path, opts \\ []) do
with {:ok, expr} <- Warpath.Expression.compile(path) do
value_fn = fn args -> Warpath.query!(args, expr, opts) end
{:ok, value_fn}
end
end
defp timestamp_eq(ts1, ts2),
do: timestamp_compare(ts1, ts2) == :eq
defp timestamp_lt(ts1, ts2),
do: timestamp_compare(ts1, ts2) == :lt
defp timestamp_gt(ts1, ts2),
do: timestamp_compare(ts1, ts2) == :gt
defp timestamp_lte(ts1, ts2) do
cmp = timestamp_compare(ts1, ts2)
cmp == :lt || cmp == :eq
end
defp timestamp_gte(ts1, ts2) do
cmp = timestamp_compare(ts1, ts2)
cmp == :gt || cmp == :eq
end
defp timestamp_compare(ts1, ts2) do
with {:ok, ts1, _} <- DateTime.from_iso8601(ts1),
{:ok, ts2, _} <- DateTime.from_iso8601(ts2) do
DateTime.compare(ts1, ts2)
else
_ -> :error
end
end
defp is_timestamp(value) do
case DateTime.from_iso8601(value) do
{:ok, _, _} -> true
_ -> false
end
end
end
|
lib/workflows/rule.ex
| 0.607197
| 0.512205
|
rule.ex
|
starcoder
|
defmodule ECS.Entity do
@moduledoc """
Functions to work with entities.
An entity is a map container of components. Components are placed in the map
with their key as a converted atom based on their struct atom by default. The
key is created by converting the struct name after `Component` to snakecase,
delimited by underscores.
## Examples
# Create a monster entity.
monster = ECS.Entity.new([
Component.Health.new(100),
Component.Name.new("Monty")
])
# Output its name.
IO.puts monster.name
#=> "Monty"
# Attach an attack component to the monster.
attack_monster = ECS.Entity.attach(monster, Component.Attack.new(:melee, 24))
"""
@spec attach(map, struct) :: map
@spec attach(map, struct, atom) :: map
@doc "Attaches a `component` to an `entity` at the given `key`. If key is not
provided, a key based on the struct's name will be used by default."
def attach(entity, component, key \\ nil)
def attach(entity, component, nil) do
attach(entity, component, component_atom(component))
end
def attach(entity, component, key) do
Map.put_new(entity, key, component)
end
@doc "Detaches a component at `key` from an `entity`."
def detach(entity, key) do
Map.delete(entity, key)
end
@doc "Returns a new entity map containing `components`."
def new(components \\ []) do
Enum.reduce(components, %{}, &attach(&2, &1))
end
@doc "Sets `entity` component at `key` to `value`."
def set(entity, key, value) do
update(entity, key, fn _ -> value end)
end
@doc "Updates `entity`'s component value at `key` using `update_fn`."
def update(entity, key, update_fn) do
Map.update!(entity, key, update_fn)
end
defp component_atom(component) do
component.__struct__
|> Atom.to_string()
|> String.split(".")
|> Enum.drop_while(&(&1 !== "Component"))
|> Enum.drop(1)
|> Enum.map(&String.replace(&1, ~r/(.)([A-Z])/, "\\1_\\2"))
|> Enum.reverse()
|> Enum.join("_")
|> String.downcase()
|> String.to_atom()
end
end
|
lib/ecs/entity.ex
| 0.883927
| 0.434041
|
entity.ex
|
starcoder
|
defmodule Cannes.Dumper do
@moduledoc """
This module is for communicating with linux can sockets over candump.
TODO:
- Add logic to mount a can interface
- Test if desired interface is reacable
- Test if candump is available
- Pass options to candump
"""
alias Porcelain.Process, as: Proc
@doc """
Spawns a candump process on the given interface.
## Example
iex> Cannes.Dumper.start("vcan0")
%Porcelain.Process{
err: nil,
out: #Function<63.104660160/2 in Stream.unfold/2>,
pid: #PID<0.212.0>
}
"""
@spec start(binary) :: Porcelain.Process.t()
def start(interface) when is_binary(interface) do
proc = Porcelain.spawn("candump", ["-L", interface], out: :stream)
case proc do
{:error, message} -> raise message
_ -> proc
end
end
@doc """
Stops a running candump process carefully.
## Example
proc = Cannes.Dumper.stop(proc)
true
"""
@spec stop(Porcelain.Process.t()) :: true
def stop(dumper_process) do
Proc.stop(dumper_process)
end
@doc """
Check if the candump process is still running.
"""
@spec alive?(Porcelain.Process.t()) :: boolean
def alive?(dumper_process) do
Proc.alive?(dumper_process)
end
@doc """
Returns a stream of formatted can messages.
## Example
iex> Cannes.Dumper.get_formatted_stream(proc)
#Stream<[
enum: #Function<63.104660160/2 in Stream.unfold/2>,
funs: [#Function<51.104660160/1 in Stream.reject/2>,
#Function<38.104660160/1 in Stream.map/2>]
]>
"""
@spec get_formatted_stream(Porcelain.Process.t()) :: Stream.t()
def get_formatted_stream(dumper_process) do
get_stream(dumper_process)
|> Stream.map(fn item ->
format_candump_string(item)
end)
end
@spec get_stream(Porcelain.Process.t()) ::
(any, any -> {:halted, any} | {:suspended, any, (any -> any)})
def get_stream(%Proc{out: outstream}) do
outstream
|> Stream.flat_map(&String.split(&1, "\n", trim: true))
end
@doc """
Parses the given candump string in the candump log format into a map.
## Example
iex> Cannes.Dumper.format_candump_string("(1398128227.045337) can0 133#0000000098")
%{
data: <<0, 0, 0, 0, 0, 0, 0, 9>>,
identifier: <<1, 51>>,
interface: "can0",
timestamp: 1398128227.045337
}
"""
@spec format_candump_string(binary, any) :: %{
data: binary,
identifier: binary,
interface: any,
timestamp: number
}
def format_candump_string(dump_string, unix_timestamp? \\ false) do
message = dump_string |> String.split(" ")
payload = String.split(Enum.at(message, 2), "#")
timestamp =
case unix_timestamp? do
true -> DateTime.utc_now() |> DateTime.to_unix()
_ -> String.slice(Enum.at(message, 0), 1..-2) |> String.to_float()
end
%{
timestamp: timestamp,
interface: Enum.at(message, 1),
identifier: Enum.at(payload, 0) |> String.pad_leading(4, "0") |> Base.decode16!(),
data: Enum.at(payload, 1) |> String.pad_leading(16, "0") |> Base.decode16!()
}
end
end
|
lib/dumper.ex
| 0.675444
| 0.448185
|
dumper.ex
|
starcoder
|
defmodule Strukt.Test.Fixtures do
use Strukt
defmodule Classic do
@moduledoc "This module uses Kernel.defstruct/1, even though our defstruct/1 is in scope, since it is given only a list of field names"
use Strukt
defstruct [:name]
end
defmodule Simple do
@moduledoc "This module represents the simplest possible use of defstruct/1"
use Strukt
defstruct do
field(:name, :string, default: "")
end
end
defstruct Inline do
@moduledoc "This module represents the simplest possible use of defstruct/2, i.e. inline definition of a struct and its module"
field(:name, :string, default: "")
@doc "This function is defined in the Inline module"
def test, do: true
end
defstruct Embedded do
@moduledoc "This module demonstrates that embedding structs inline works just like the top-level"
embeds_many :items, Item do
field(:name, :string, required: [message: "must provide item name"])
end
end
defstruct AltPrimaryKey do
@moduledoc "This module demonstrates the use of a custom primary key, rather than the default of :uuid"
field(:id, :integer, primary_key: true)
field(:name, :string, default: "")
end
defstruct AttrPrimaryKey do
@moduledoc "This module demonstrates the use of a custom primary key, using the @primary_key attribute"
@primary_key {:id, :integer, autogenerate: {System, :unique_integer, []}}
field(:name, :string, default: "")
end
defstruct JSON do
@moduledoc "This module demonstrates how to derive JSON serialization for your struct"
@timestamps_opts [type: :utc_datetime_usec]
@derives [Jason.Encoder]
field(:name, :string, default: "")
timestamps(autogenerate: {DateTime, :utc_now, []})
end
defmodule OuterAttrs do
use Strukt
@derives [Jason.Encoder]
@timestamps_opts [type: :utc_datetime_usec, autogenerate: {DateTime, :utc_now, []}]
defstruct do
field(:name, :string, default: "")
timestamps()
end
end
defmodule OuterScope do
# This imports defstruct and sets up shared defaults in the outer scope
use Strukt.Test.Macros
defstruct InnerScope do
# Since this is a new module scope, we want to set up defaults
# like we did in the outer scope. If working properly,
# this macro should be expanded before the schema definition
use Strukt.Test.Macros
field(:name, :string, default: "")
timestamps()
end
end
defstruct Validations do
@moduledoc "This module uses a variety of validation rules in various combinations"
field(:name, :string, default: "", required: true)
field(:email, :string, required: [message: "must provide an email"], format: ~r/^.+@.+$/)
field(:age, :integer, number: [greater_than: 0])
field(:status, Ecto.Enum, values: [:red, :green, :blue])
@doc "This is an override of the validate/1 callback, where you can add additional validations to be run automatically"
def validate(changeset) do
changeset
|> validate_length(:name, min: 2, max: 100)
end
end
defstruct ValidateRequiredEmbed do
embeds_one :embedded, Embed, [required: [message: "embed must be set"]] do
field(:name, :string, required: true)
end
end
defstruct ValidateLengths do
@moduledoc "This module excersizes validations on string length"
field(:exact, :string,
required: [message: "must be 3 characters"],
length: [is: 3, message: "must be 3 characters"]
)
field(:bounded_graphemes, :string,
required: [message: "must be between 1 and 3 graphemes"],
length: [min: 1, max: 3, message: "must be between 1 and 3 graphemes"]
)
field(:bounded_bytes, :string,
required: [message: "must be between 1 and 3 bytes"],
length: [count: :bytes, min: 1, max: 3, message: "must be between 1 and 3 bytes"]
)
end
defstruct ValidateSets do
@moduledoc "This module excersizes validations based on set membership"
field(:one_of, :string, one_of: [values: ["a", "b", "c"], message: "must be one of [a, b, c]"])
field(:none_of, :string,
none_of: [values: ["a", "b", "c"], message: "cannot be one of [a, b, c]"]
)
field(:subset_of, {:array, :string}, subset_of: [values: ["a", "b", "c"]])
end
defstruct ValidateNumbers do
@moduledoc "This module excersizes validations on numbers"
field(:bounds, :integer, number: [greater_than: 1, less_than: 100])
field(:bounds_inclusive, :integer,
number: [greater_than_or_equal_to: 1, less_than_or_equal_to: 100]
)
field(:eq, :integer, number: [equal_to: 1])
field(:neq, :integer, number: [not_equal_to: 1])
field(:range, :integer, range: 1..100)
end
end
|
test/support/defstruct_fixtures.ex
| 0.873741
| 0.609757
|
defstruct_fixtures.ex
|
starcoder
|
defmodule Pass.Hash do
@moduledoc """
Implements methods for password hashing, verification, and formatting for data
storage.
"""
defp config, do: Application.get_env(:pass, __MODULE__, %{})
@doc """
Given a plaintext string it generates a new salt, hashes the string, and
returns a string formatted for data storage. The formatted string is broken up
into multiple sections with the "$" delimiter. The current format is:
"hash_algorithm$#blocks$#interations$salt$hash". Both the salt and the hash
are unpadded, URL-safe, Base 64 encoded values.
"""
def db_password(password) do
blocks = config[:blocks] || 2
cost = config[:cost] || 160_000
salt = new_salt
hash = password(password, salt, blocks, cost)
"pbkdf2_sha512$#{blocks}$#{cost}$#{Base.url_encode64 salt, padding: false}$#{Base.url_encode64 hash, padding: false}"
end
@doc """
Implements the PBKDF2 algorithm with SHA512 to hash a password.
"""
def password(password, salt, blocks \\ 2, cost \\ 96_000) when is_binary(password) do
block_iterations(%{
block: 1,
blocks: blocks,
cost: cost,
salt: salt,
password: password
})
end
@doc """
Takes in a plaintext and a string formatted using the db_password/1 function
and returns true if the formatted string is the derived key for the plaintext
string provided.
"""
def verify(password, hash) when is_binary(password) and is_binary(hash) do
[_, blocks, cost, salt, hash] = String.split(hash, "$")
password(
password,
Base.url_decode64!(salt, padding: false),
String.to_integer(blocks),
String.to_integer(cost)
) == Base.url_decode64!(hash, padding: false)
end
defp new_salt(size \\ 32) do
:crypto.strong_rand_bytes(size)
end
defp block_iterations(%{block: block, blocks: blocks, password: password, salt: salt, cost: cost})
when block >= blocks do
hash_iterations(cost, password, salt <> <<block::big-integer-size(32)>>)
end
defp block_iterations(%{block: block, blocks: blocks, password: password, salt: salt, cost: cost}) do
hash_iterations(cost, password, salt <> <<block::big-integer-size(32)>>) <>
block_iterations(%{block: block + 1, blocks: blocks, password: password, salt: salt, cost: cost})
end
defp hash_iterations(1, key, data) do
hash_function(key, data)
end
defp hash_iterations(cost, key, data) do
first = hash_function(key, data)
:crypto.exor(first, hash_iterations(cost - 1, key, first))
end
defp hash_function(key, data) do
:crypto.hmac(:sha512, key, data)
end
end
|
lib/pass/actions/hash.ex
| 0.758868
| 0.439266
|
hash.ex
|
starcoder
|
defmodule EctoSharding do
@moduledoc """
EctoSharding ShardRegistry and Repo supervisor.
This is the supervisor that will supervise the internal shard registry and
build and supervise the ecto repo for each shard specified in the configuration.
Before `start_link` is called all of the shard configuration must be present in
the `ecto_sharding` application env. In otherwords,
`Application.get_env(:ecto_sharding, Ecto.Sharding)` must return all of the
configured shards.
This also provides the only public API to the shard registry that consumers
should need to interact with, which allows for setting the current shard and
retriving the correct repo for the current shard.
"""
use Supervisor
alias EctoSharding.ShardRegistry
@typedoc """
The identifier for a shard repo.
This will be used to store and lookup each shard repo. It currently only allows
integers or strings, but in the future it may support anything that implements
`String.Chars` protocol.
"""
@type shard :: integer | String.t
@doc """
Start the sharding supervisor.
"""
@spec start_link :: {:ok, pid}
def start_link do
Supervisor.start_link(__MODULE__, :ok, [])
end
@doc """
Set the current shard to be used for all queries including sharded schemas.
"""
@spec current_shard(shard) :: :ok
def current_shard(shard) do
ShardRegistry.current_shard(shard)
end
@doc """
Get the currently set shard.
"""
@spec current_shard :: shard
def current_shard do
ShardRegistry.current_shard
end
@doc """
Get the repo based on the current shard.
"""
@spec current_repo :: EctoSharding.Repo.t
def current_repo do
ShardRegistry.current_repo
end
@doc """
Get the repo corresponding to the give shard.
"""
@spec repo_for_shard(shard) :: EctoSharding.Repo.t
def repo_for_shard(shard) do
ShardRegistry.repo_for_shard(shard)
end
@doc false
def init(:ok) do
shard_repos = EctoSharding.Configuration.shard_repos
children =
shard_repos
|> own_children()
|> Enum.concat(shard_children(shard_repos))
Supervisor.init(children, strategy: :one_for_one)
end
defp own_children(shard_repos) do
[
{ShardRegistry, [shard_repos: shard_repos]}
]
end
defp shard_children(shard_repos) do
shard_repos
|> Enum.map(fn({_shard, module}) -> supervisor(module, []) end)
end
end
|
lib/ecto_sharding.ex
| 0.64232
| 0.414662
|
ecto_sharding.ex
|
starcoder
|
defmodule Scientist.Observation do
@moduledoc """
A set of functions for working with experiment observations.
A `Scientist.Observation` struct contains information about the execution of a
given candidate, including its execution duration, value, and cleaned value.
The timestamp is recorded as the system time, and along with the duration, is
reported in milliseconds.
"""
@timeunit :milli_seconds
import Scientist.Experiment, only: [guarded: 3]
defstruct [
name: "",
experiment: nil,
timestamp: nil,
value: nil,
cleaned_value: nil,
exception: nil,
stacktrace: nil,
duration: nil,
]
@doc """
Creates a new observation for `experiment`.
Evaluates `candidate`, capturing any exceptions raised. The observation will
be cleaned using the experiment's configured clean function.
"""
def new(experiment, name, candidate) do
observation = %Scientist.Observation{
name: name,
experiment: experiment,
timestamp: System.system_time(@timeunit),
}
try do
value = candidate.()
cleaned = if experiment.clean do
guarded experiment, :clean, do: experiment.clean.(value)
else
value
end
duration = System.system_time(@timeunit) - observation.timestamp
%__MODULE__{ observation |
value: value,
duration: duration,
cleaned_value: cleaned
}
rescue
except ->
%__MODULE__{ observation |
exception: {:raised, except},
stacktrace: System.stacktrace
}
catch
except ->
%__MODULE__{ observation |
exception: {:thrown, except},
stacktrace: System.stacktrace
}
end
end
@doc """
Returns true if the observations match.
The observations will be compared using the experiment's configured
compare function.
"""
def equivalent?(observation, other, compare \\ &Kernel.==/2) do
case {observation.exception, other.exception} do
{nil, nil} ->
compare.(observation.value, other.value)
{nil, _} -> false;
{_, nil} -> false;
{except, other_except} -> except == other_except
end
end
@doc """
Re-raises or throws the exception that occurred during observation, if any.
"""
def except!(observation)
def except!(%Scientist.Observation{exception: nil}), do: nil
def except!(%Scientist.Observation{exception: {:raised, e}, stacktrace: s}), do: reraise e, s
def except!(%Scientist.Observation{exception: {:thrown, e}}), do: throw e
@doc """
Returns true if the observation threw or raised an exception.
"""
def except?(observation)
def except?(%Scientist.Observation{exception: nil}), do: false
def except?(_), do: true
@doc """
Returns true if the observation raised an exception.
"""
def raised?(observation)
def raised?(%Scientist.Observation{exception: {:raised, _}}), do: true
def raised?(_), do: false
@doc """
Returns true if the observation threw an exception.
"""
def thrown?(observation)
def thrown?(%Scientist.Observation{exception: {:thrown, _}}), do: true
def thrown?(_), do: false
end
|
lib/scientist/observation.ex
| 0.934447
| 0.81648
|
observation.ex
|
starcoder
|
defmodule ExStringUtil do
@moduledoc """
This module helps validate any string from user input.
"""
@doc ~S"""
This function checks whether a given string is alphanumeric.
It will return true when the string is valid or false on the contrary.
## Parameters
- string0: String to validate
## Returns
- true when the `string0` is alphanumeric
- false when the `string0` is not a valid alphanumeric
## Examples
iex>ExStringUtil.is_alphanumeric("Password12")
true
iex>ExStringUtil.is_alphanumeric("[]")
false
"""
@spec is_alphanumeric(String.t) :: boolean
def is_alphanumeric(string0) when is_binary(string0) do
Regex.match?(~r/^[\p{Ll}\p{Lm}\p{Lo}\p{Lt}\p{Lu}\p{Nd}]+$/, string0)
end
@doc ~S"""
This function checks whether a given string `string0` is a valid email.
It will return true when the string is a valid email or false on the contrary.
The validation is not done against the domain name.
## Parameters
- string0: String to validate
## Returns
- true when the `string0` is valid email
- false when the `string0` is not a valid email.
## Examples
iex>ExStringUtil.is_email("<EMAIL>")
true
iex>ExStringUtil.is_email("elixir")
false
"""
@spec is_email(String.t) :: boolean
def is_email(string0) when is_binary(string0) do
Regex.match?(~r/^[A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\.[A-Za-z]{2,4}$/, string0)
end
@doc ~S"""
This function checks whether a given string `string0` contains only alphabets.
It will return true when the string contains only alphabets or false on the contrary.
## Parameters
- string0: String to validate
## Returns
- true when the `string0` contains only alphabets
- false when the `string0` does not contains only alphabets.
## Examples
iex>ExStringUtil.is_alpha("<PASSWORD>")
false
iex>ExStringUtil.is_alphanumeric("Password")
true
"""
@spec is_alpha(String.t) :: boolean
def is_alpha(string0) when is_binary(string0) do
Regex.match?(~r/^[a-zA-Z]$/, string0)
end
@doc ~S"""
This function checks whether a given `string0` is empty or not.
## Parameters
- string0: String to validate
## Returns
- true when the `string0` is empty
- false when the `string0` is not empty
## Examples
iex>ExStringUtil.is_empty("<PASSWORD>")
false
iex>ExStringUtil.is_empty("")
true
"""
@spec is_empty(String.t) :: boolean
def is_empty(string0) when is_binary(string0) do
String.length(string0) == 0
end
@doc ~S"""
This function helps check whether the string `string0` is a valid url.
## Parameters
- string0: String to validate
## Returns
- true when the `string0` is valid url
- false when the `string0` is not a valid url
## Examples
iex>ExStringUtil.is_url("<PASSWORD>")
false
iex>ExStringUtil.is_url("https://www.google.com")
true
iex>ExStringUtil.is_url("www.swift-rider.com")
true
"""
@spec is_url(String.t) :: boolean
def is_url(string0) when is_binary(string0) do
regex =
~r"^(?:(?:(ht|f)tps?|file|news|gopher):\/\/)?(([\w!~*'().&=+$%-]+:)?[\w!~*'().&=+$%-]+@)?(([0-9]{1,3}.){3}[0-9]{1,3}|([\w!~*'()-]+.)*([\w^-][\w-]{0,61})?[\w].[a-z]{2,6})(:[0-9]{1,4})?((/*)|(/+[\w!~*'().;?:@&=+$,%#-]+)+/*)$"
Regex.match?(regex, string0)
end
@doc ~S"""
This function helps check whether the string `string0` is in valid money format.
The validation is done based upon the currency position in the string.
## Parameters
- string0: String to validate
- right: boolean the currency symbol position in the string `string0`
## Returns
- true when the `string0` is valid url
- false when the `string0` is not a valid url
"""
@spec is_money(String.t, boolean) :: boolean
def is_money(string0, right \\ true) when is_binary(string0) do
regex = cond do
is_empty(string0) == true ->
false
true ->
if right do
~r/^(?!0,?\d)(?:\d{1,3}(?:([,\.])\d{3})?(?:\1\d{3})*|(?:\d+))((?!\1)[,\.]\d{2})?(?<!\x{00a2})\p{Sc}?$/
else
~r/^(?!\x{00a2})\p{Sc}?(?!0,?\d)(?:\d{1,3}(?:([,\.])\d{3})?(?:\1\d{3})*|(?:\d+))((?!\1)[,\.]\d{2})?$/
end
end
Regex.match?(regex, string0)
end
@doc ~S"""
This function helps check whether the string `string0` is in valid time format.
Validates times as 12hr or 24hr (HH:MM) or am/pm ([H]H:MM[a|p]m).
Matches times separated by either : or . will match a 24 hour time, or a 12 hour time with AM or PM specified.
Allows 0-59 minutes, and 0-59 seconds. Seconds are not required.
## Parameters
- string0: String to validate
## Returns
- true when the `string0` is valid time
- false when the `string0` is not a valid time
## Examples
iex>ExStringUtil.is_time("12:23pm")
true
iex>ExStringUtil.is_time("01:34am")
true
"""
@spec is_time(String.t) :: boolean
def is_time(string0) when is_binary(string0) do
regex = ~r/^((([0]?[1-9]|1[0-2])(:|\.)[0-5][0-9]((:|\.)[0-5][0-9])?( )?(AM|am|aM|Am|PM|pm|pM|Pm))|(([0]?[0-9]|1[0-9]|2[0-3])(:|\.)[0-5][0-9]((:|\.)[0-5][0-9])?))$/
Regex.match?(regex, string0)
end
@doc ~S"""
Time validation, determines if the string passed is a valid time.
Validates time as 24hr (HH:MM) or am/pm ([H]H:MM[a|p]m)
Does not allow/validate seconds.
## Parameters
- string0: String to validate
## Returns
- true when the `string0` is valid time as 24
- false when the `string0` is not a time as 24
## Examples
iex>ExStringUtil.is_time_24("13:00")
true
iex>ExStringUtil.is_time_24("1:23 PM")
true
"""
@spec is_time_24(String.t) :: boolean
def is_time_24(string0) when is_binary(string0) do
regex = ~r/^((0?[1-9]|1[012])(:[0-5]\d){0,2} ?([AP]M|[ap]m))$|^([01]\d|2[0-3])(:[0-5]\d){0,2}$/
Regex.match?(regex, string0)
end
@doc ~S"""
Checks whether a given string `string0` is a numeric value.
It returns true when the string is numeric string and false on the contrary.
## Parameters
- string0: String to validate
## Returns
- true when the `string0` is a number
- false when the `string0` is not a number
## Examples
iex>ExStringUtil.is_numeric("123")
true
iex>ExStringUtil.is_numeric("A2")
false
"""
@spec is_numeric(String.t) :: boolean
def is_numeric(string0) when is_binary(string0) do
regex = ~r/^([-+]?[0-9]+)$/
Regex.match?(regex, string0)
end
@doc ~S"""
Checks whether the string `string0` is a valid boolean.
## Parameters
- string0: String to validate
## Returns
- true when the `string0` is a number
- false when the `string0` is not a number
## Examples
iex>ExStringUtil.is_boolean("1")
true
iex>ExStringUtil.is_boolean("0")
true
"""
@spec is_boolean(String.t) :: boolean
def is_boolean(string0) when is_binary(string0) do
if is_empty(string0) do
false
else
cond do
is_numeric(string0) == true ->
{parsed, ""} = Integer.parse(string0)
parsed == 1 ||parsed == 0
string0 == "true" || string0 == "false" -> true
string0 == "1" || string0 == "0" -> true
true -> false
end
end
end
@doc ~S"""
Checks if a value is a natural number.
## Parameters
- string0: String to validate
## Returns
- true when the `string0` is a natural number
- false when the `string0` is not a natural number
## Examples
iex>ExStringUtil.is_natural_number("123")
true
iex>ExStringUtil.is_natural_number("023", true)
false
"""
@spec is_natural_number(String.t, boolean) :: boolean
def is_natural_number(string0, allow_zero \\ false) when is_binary(string0) do
regex =
if allow_zero do
~r/^(?:0|[1-9][0-9]*)$/
else
~r/^[1-9][0-9]*$/
end
Regex.match?(regex, string0)
end
@doc ~S"""
Checks that a value is a valid UUID - http://tools.ietf.org/html/rfc4122
## Parameters
- string0: String to validate
## Returns
- true when the `string0` is a UUID
- false when the `string0` is not a UUID
## Examples
iex>ExStringUtil.is_uuid("3F2504E0-4F89-11D3-9A0C-0305E82C3301")
true
"""
@spec is_uuid(String.t) :: boolean
def is_uuid(string0) when is_binary(string0) do
Regex.match?(~r/^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[0-5][a-fA-F0-9]{3}-[089aAbB][a-fA-F0-9]{3}-[a-fA-F0-9]{12}$/, string0)
end
@doc ~S"""
Returns true if field is left blank -OR- only whitespace characters are present in its value
Whitespace characters include Space, Tab, Carriage Return, Newline
## Parameters
- string0: String to validate
## Returns
- true when the `string0` is blank
- false when the `string0` is not blank
## Examples
iex>ExStringUtil.is_blank("\t")
true
"""
def is_blank(string0) when is_binary(string0) do
Regex.match?(~r/[^\\s]/, string0)
end
@doc ~S"""
Date validation, determines if the string passed is a valid date.
keys that expect full month, day and year will validate leap years.
Years are valid from 1800 to 2999.
## Parameters
- string0: String to validate
- format: date format to use. The following formats are allowed:
- `dmy` 27-12-2006 or 27-12-06 separators can be a space, period, dash, forward slash
- `mdy` 12-27-2006 or 12-27-06 separators can be a space, period, dash, forward slash
- `ymd` 2006-12-27 or 06-12-27 separators can be a space, period, dash, forward slash
- `dMy` 27 December 2006 or 27 Dec 2006
-`Mdy` December 27, 2006 or Dec 27, 2006 comma is optional
- `My` December 2006 or Dec 2006
- `my` 12/2006 or 12/06 separators can be a space, period, dash, forward slash
- `ym` 2006/12 or 06/12 separators can be a space, period, dash, forward slash
- `y` 2006 just the year without any separators
## Returns
- true when the `string0` is blank
- false when the `string0` is not blank
## Examples
iex>ExStringUtil.is_date!("2006-12-27")
true
"""
@spec is_date!(String.t, String.t) :: boolean | ArgumentError.t
def is_date!(string0, format \\ "ymd") when is_binary(string0) do
month = "(0[123456789]|10|11|12)"
separator = "([- /.])"
four_digit_year = "(([1][8-9][0-9][0-9])|([2][0-9][0-9][0-9]))"
two_digit_year = "([0-9]{2})"
year = "(?:" <> four_digit_year <> "|" <> two_digit_year <> ")"
regex = cond do
format == "dmy" ->
"^(?:(?:31(\\/|-|\\.|\\x20)(?:0?[13578]|1[02]))\\1|(?:(?:29|30)" <>
separator <> "(?:0?[1,3-9]|1[0-2])\\2))(?:(?:1[6-9]|[2-9]\\d)?\\d{2})$|^(?:29" <>
separator <>
"0?2\\3(?:(?:(?:1[6-9]|[2-9]\\d)?(?:0[48]|[2468][048]|[13579][26])|(?:(?:16|[2468][048]|[3579][26])00))))$|^(?:0?[1-9]|1\\d|2[0-8])" <>
separator <> "(?:(?:0?[1-9])|(?:1[0-2]))\\4(?:(?:1[6-9]|[2-9]\\d)?\\d{2})$"
format == "mdy" ->
"^(?:(?:(?:0?[13578]|1[02])(\\/|-|\\.|\\x20)31)\\1|(?:(?:0?[13-9]|1[0-2])" <>
separator <> "(?:29|30)\\2))(?:(?:1[6-9]|[2-9]\\d)?\\d{2})$|^(?:0?2" <>
separator <> "'29\\3(?:(?:(?:1[6-9]|[2-9]\\d)?(?:0[48]|[2468][048]|[13579][26])|(?:(?:16|[2468][048]|[3579][26])00))))$|^(?:(?:0?[1-9])|(?:1[0-2]))" <>
separator <> "(?:0?[1-9]|1\\d|2[0-8])\\4(?:(?:1[6-9]|[2-9]\\d)?\\d{2})$"
format == "ymd" ->
"^(?:(?:(?:(?:(?:1[6-9]|[2-9]\\d)?(?:0[48]|[2468][048]|[13579][26])|(?:(?:16|[2468][048]|[3579][26])00)))" <> separator <> "(?:0?2\\1(?:29)))|(?:(?:(?:1[6-9]|[2-9]\\d)?\\d{2})" <> separator <> "(?:(?:(?:0?[13578]|1[02])\\2(?:31))|(?:(?:0?[1,3-9]|1[0-2])\\2(29|30))|(?:(?:0?[1-9])|(?:1[0-2]))\\2(?:0?[1-9]|1\\d|2[0-8]))))$"
format == "dMy" ->
"^((31(?!\\ (Feb(ruary)?|Apr(il)?|June?|(Sep(?=\\b|t)t?|Nov)(ember)?)))|((30|29)(?!\\ Feb(ruary)?))|(29(?=\\ Feb(ruary)?\\ (((1[6-9]|[2-9]\\d)(0[48]|[2468][048]|[13579][26])|((16|[2468][048]|[3579][26])00)))))|(0?[1-9])|1\\d|2[0-8])\\ (Jan(uary)?|Feb(ruary)?|Ma(r(ch)?|y)|Apr(il)?|Ju((ly?)|(ne?))|Aug(ust)?|Oct(ober)?|(Sep(?=\\b|t)t?|Nov|Dec)(ember)?)\\ ((1[6-9]|[2-9]\\d)\\d{2})$"
format == "Mdy" ->
"^(?:(((Jan(uary)?|Ma(r(ch)?|y)|Jul(y)?|Aug(ust)?|Oct(ober)?|Dec(ember)?)\\ 31)|((Jan(uary)?|Ma(r(ch)?|y)|Apr(il)?|Ju((ly?)|(ne?))|Aug(ust)?|Oct(ober)?|(Sep)(tember)?|(Nov|Dec)(ember)?)\\ (0?[1-9]|([12]\\d)|30))|(Feb(ruary)?\\ (0?[1-9]|1\\d|2[0-8]|(29(?=,?\\ ((1[6-9]|[2-9]\\d)(0[48]|[2468][048]|[13579][26])|((16|[2468][048]|[3579][26])00)))))))\\,?\\ ((1[6-9]|[2-9]\\d)\\d{2}))$"
format == "My" ->
"^(Jan(uary)?|Feb(ruary)?|Ma(r(ch)?|y)|Apr(il)?|Ju((ly?)|(ne?))|Aug(ust)?|Oct(ober)?|(Sep(?=\\b|t)t?|Nov|Dec)(ember)?)" <> separator <> "((1[6-9]|[2-9]\\d)\\d{2})$"
format == "my" ->
"^(" <> month <> separator <> year <> ")$"
format == "ym" ->
"^(" <> year <> separator <> month <> ")$"
format == "y" ->
"^(" <> four_digit_year <> ")$"
true ->
"^(?:(?:(?:(?:(?:1[6-9]|[2-9]\\d)?(?:0[48]|[2468][048]|[13579][26])|(?:(?:16|[2468][048]|[3579][26])00)))" <> separator <> "(?:0?2\\1(?:29)))|(?:(?:(?:1[6-9]|[2-9]\\d)?\\d{2})" <> separator <> "(?:(?:(?:0?[13578]|1[02])\\2(?:31))|(?:(?:0?[1,3-9]|1[0-2])\\2(29|30))|(?:(?:0?[1-9])|(?:1[0-2]))\\2(?:0?[1-9]|1\\d|2[0-8]))))$"
end
case Regex.compile(regex) do
{:ok, reg} ->
Regex.match?(reg, string0)
{:error, reason} -> raise ArgumentError, message: reason
end
end
@doc ~S"""
Validation of an IP address.
## Parameters
- string0: String to validate
- version: the version of IP to check for. The following values are accepted
- v4 for IP v4
- v6 for IP v6
## Returns
- true when the `string0` is an ip
- false when the `string0` is not an ip
## Examples
iex>ExStringUtil.is_ip!("192.168.254.46")
true
"""
@spec is_ip!(String.t, String.t) :: boolean | ArgumentError.t
def is_ip!(string0, version \\ "v4") when is_binary(string0) do
reg_v4 = "(?:(?:25[0-5]|2[0-4][0-9]|(?:(?:1[0-9])?|[1-9]?)[0-9])\.){3}(?:25[0-5]|2[0-4][0-9]|(?:(?:1[0-9])?|[1-9]?)[0-9])"
reg_v6 = "((([0-9A-Fa-f]{1,4}:){7}(([0-9A-Fa-f]{1,4})|:))|(([0-9A-Fa-f]{1,4}:){6}"
<>
"(:|((25[0-5]|2[0-4]\d|[01]?\d{1,2})(\.(25[0-5]|2[0-4]\d|[01]?\d{1,2})){3})"
<>
"|(:[0-9A-Fa-f]{1,4})))|(([0-9A-Fa-f]{1,4}:){5}((:((25[0-5]|2[0-4]\d|[01]?\d{1,2})"
<>
"(\.(25[0-5]|2[0-4]\d|[01]?\d{1,2})){3})?)|((:[0-9A-Fa-f]{1,4}){1,2})))|(([0-9A-Fa-f]{1,4}:)"
<>
"{4}(:[0-9A-Fa-f]{1,4}){0,1}((:((25[0-5]|2[0-4]\d|[01]?\d{1,2})(\.(25[0-5]|2[0-4]\d|[01]?\d{1,2}))"
<>
"{3})?)|((:[0-9A-Fa-f]{1,4}){1,2})))|(([0-9A-Fa-f]{1,4}:){3}(:[0-9A-Fa-f]{1,4}){0,2}"
<>
"((:((25[0-5]|2[0-4]\d|[01]?\d{1,2})(\.(25[0-5]|2[0-4]\d|[01]?\d{1,2})){3})?)|"
<>
"((:[0-9A-Fa-f]{1,4}){1,2})))|(([0-9A-Fa-f]{1,4}:){2}(:[0-9A-Fa-f]{1,4}){0,3}"
<>
"((:((25[0-5]|2[0-4]\d|[01]?\d{1,2})(\.(25[0-5]|2[0-4]\d|[01]?\d{1,2}))"
<>
"{3})?)|((:[0-9A-Fa-f]{1,4}){1,2})))|(([0-9A-Fa-f]{1,4}:)(:[0-9A-Fa-f]{1,4})"
<>
"{0,4}((:((25[0-5]|2[0-4]\d|[01]?\d{1,2})(\.(25[0-5]|2[0-4]\d|[01]?\d{1,2})){3})?)"
<>
"|((:[0-9A-Fa-f]{1,4}){1,2})))|(:(:[0-9A-Fa-f]{1,4}){0,5}((:((25[0-5]|2[0-4]"
<>
"\d|[01]?\d{1,2})(\.(25[0-5]|2[0-4]\d|[01]?\d{1,2})){3})?)|((:[0-9A-Fa-f]{1,4})"
<>
"{1,2})))|(((25[0-5]|2[0-4]\d|[01]?\d{1,2})(\.(25[0-5]|2[0-4]\d|[01]?\d{1,2})){3})))(%.+)?"
case version do
"v4" ->
case Regex.compile(reg_v4) do
{:ok, regex} -> Regex.match?(regex, string0)
{:error, reason} -> raise(reason)
end
"v6" ->
case Regex.compile(reg_v6) do
{:ok, regex} -> Regex.match?(regex, string0)
{:error, reason} -> raise(reason)
end
_ -> raise ArgumentError, message: "no ip version specified"
end
end
end
|
lib/ex_string_util.ex
| 0.895383
| 0.550607
|
ex_string_util.ex
|
starcoder
|
defmodule Mollie.Customers do
import Mollie
alias Mollie.Client
@moduledoc """
Creates a simple minimal representation of a customer in the Mollie API to use for the Mollie Checkout and Recurring features. These customers will appear in your Mollie Dashboard where you can manage their details, and also see their payments and subscriptions.
"""
@doc """
Retrieve all customers created. Results are paginated.
## Example
Mollie.Customers.list client
More info at: https://docs.mollie.com/reference/v2/customers-api/list-customers
"""
@spec list(Client.t(), map | list) :: Mollie.response()
def list(client, params \\ %{}) do
get("v2/customers", client, params)
end
@doc """
Retrieve a single customer by its ID.
## Example
Mollie.Customers.find client, "cst_8wmqcHMN4U"
More info at: https://docs.mollie.com/reference/v2/customers-api/get-customer
"""
@spec find(Client.t(), binary, map | list) :: Mollie.response()
def find(client, id, params \\ %{}) do
get("v2/customers/#{id}", client, params)
end
@doc """
Creates a customer.
Customer body example
```
%{
"name" => "<NAME>",
"email" => "<EMAIL>"
}
```
## Example
Mollie.Customers.create client, customer_body
More info at: https://docs.mollie.com/reference/v2/customers-api/create-customer
"""
@spec create(Client.t(), map) :: Mollie.response()
def create(client, body) do
post("v2/customers", client, body)
end
@doc """
Update an existing customer.
Customer body example
```
%{
"name" => "<NAME>",
"email" => "<EMAIL>",
"locale" => "en_US"
}
```
## Example
Mollie.Customers.update client, "cst_8wmqcHMN4U", customer_body
More info at: https://docs.mollie.com/reference/v2/customers-api/update-customer
"""
@spec update(Client.t(), binary, map) :: Mollie.response()
def update(client, id, body) do
patch("v2/customers/#{id}", client, body)
end
@doc """
Delete a customer. All mandates and subscriptions created for this customer will be canceled as well.
## Example
Mollie.Customers.remove client, "cst_8wmqcHMN4U"
More info at: https://docs.mollie.com/reference/v2/customers-api/delete-customer
"""
@spec remove(Client.t(), binary, map | list) :: Mollie.response()
def remove(client, id, params \\ %{}) do
delete("v2/customers/#{id}", client, params)
end
end
|
lib/mollie/customers.ex
| 0.809088
| 0.454048
|
customers.ex
|
starcoder
|
defmodule Edeliver.Relup.Instructions.Info do
@moduledoc """
This upgrade instruction logs the given info
message on the node which runs the upgrade
and in the running upgrade script which was started
by the `$APP/bin/$APP upgarde $RELEASE` command.
Usage:
```
Edeliver.Relup.Instructions.Info.modify_relup(config,
_up_message="Synchronizing nodes",
_down_message="Desynchronizing nodes",
_insert_where = &append_after_point_of_no_return/2)`
```
Available sections are:
* `:check` -> Checks whether upgrade is possible. Before "point of no return"
* `:suspend` -> Suspends processes before the upgrade. Right after the "point of no return"
* `:upgrade` -> Runs the upgrade by (un-)loading new(/old) code and updating processes and applications
* `:resume` -> Resumes processes after the upgrade that were suspended in the `:suspend` section.
* `:finished` -> The upgrade finished successfully
"""
use Edeliver.Relup.RunnableInstruction
@spec modify_relup(instructions::Instructions.t, config::Edeliver.Relup.Config.t, up_message::String.t, down_message::String.t, insert_where::Instruction.insert_fun) :: Instructions.t
def modify_relup(instructions = %Instructions{}, _config = %{}, up_message \\ "", down_message \\ "", insert_where_fun \\ &append_after_point_of_no_return/2) do
up_instruction = call_this(up_message)
down_instruction = call_this(down_message)
%{instructions |
up_instructions: insert_where_fun.(instructions.up_instructions, up_instruction)
|> ensure_module_loaded_before_first_runnable_instructions(up_instruction),
down_instructions: insert_where_fun.(instructions.down_instructions, down_instruction)
|> ensure_module_unloaded_after_last_runnable_instruction(down_instruction)
}
end
@doc """
Logs the message on the node which is upgraded
and in the upgrade script which was started by the
`$APP/bin/$APP upgarde $RELEASE` command.
"""
@spec run(message::String.t) :: :ok
def run(message) do
Logger.info message
format_in_upgrade_script('~s~n', [String.to_charlist(message)])
end
end
|
lib/edeliver/relup/instructions/info.ex
| 0.71889
| 0.690976
|
info.ex
|
starcoder
|
defmodule Transmit do
@moduledoc """
Plug for handling the creation of presigned urls for direct client-side uploading
## Setup
In your router, add the following:
```elixir
defmodule MyAppWeb.Router do
use MyAppWeb, :router
...
forward("/signer", Transmit, signer: Transmit.S3Signer, bucket: "images", path: "uploads")
...
```
This makes it so that requests to `/signer` will be routed to the Transmit plug.
The `signer` that will be used is the `Transmit.S3Signer` for creating S3 presigned URLs.
The `bucket` and `path` options are specific to `Transmit.S3Signer` which will use them to
create URLs that point to the `images` bucket under the path `uploads`
For more setup information for S3 signing, make sure to check the `Transmit.S3Signer` documentation
Different signers can be used by implementing the `Transmit.Signer` behaviour
## JavaScript setup
- Add `"transmit": "file:../deps/transmit"` to your `dependencies` in your `package.json`
- run `npm install`
## Usage
```javascript
import Transmit from 'transmit'
const imageUploadElement = document.getElementById('avatar_uploader')
const urlElement = document.getElementById('avatar_url')
// Set up so that we upload to S3 when a file is given to the imageUploadElement
imageUploadElement.onchange = async () => {
const file = imageUploadElement.files[0]
if (file === null) {
alert('No file selected.')
} else {
const url = await Transmit.uploadFileToS3(
file,
'/uploads/sign'
)
//saved to avatar_url element
urlElement.value = url
}
}
```
"""
import Plug.Conn
def init(opts) do
opts
end
def call(conn, opts) do
conn
|> fetch_query_params()
|> handle(opts)
end
defp handle(%Plug.Conn{query_params: %{"file_name" => file_name}} = conn, opts) do
signer = Keyword.get(opts, :signer, Transmit.S3Signer)
uri = URI.parse(file_name)
uuid = UUID.uuid4()
extension =
uri.path
|> Path.extname()
|> String.downcase()
basename =
uri.path
|> Path.basename(extension)
|> String.downcase()
new_file_name = "#{basename}-#{uuid}#{extension}"
result = signer.get_presigned_url(new_file_name, opts)
case result do
{:ok, %{presigned_url: signed_url, file_url: url}} ->
response = %{
data: %{
signed_request: signed_url,
file_name: new_file_name,
file_type: MIME.type(String.replace_leading(extension, ".", "")),
url: url
}
}
json(conn, opts, 200, response)
{:error, _} ->
json(conn, opts, 422, %{
errors: [
%{
title: "S3 Error",
detail: "Unable to create presigned url."
}
]
})
end
end
defp handle(conn, opts) do
json(conn, opts, 422, %{
errors: [
%{
title: "Missing query parameter",
detail: "The request does not contain a 'file_name' query parameter"
}
]
})
end
defp json(conn, opts, status, map) do
conn
|> put_resp_content_type("application/json")
|> send_resp(
status,
json_library(opts).encode!(map)
)
end
defp json_library(opts) do
Keyword.get(opts, :json_library, Jason)
end
end
|
lib/transmit.ex
| 0.770249
| 0.735
|
transmit.ex
|
starcoder
|
defmodule Entice.Logic.Vitals do
@moduledoc """
Responsible for the entities vital stats like (health, mana, regen, degen).
If the entity has no explicit level, it is implicitly assumed to be 20.
If and entity dies, a local broadcast will be send that looks like this:
{:entity_dead, %{entity_id: entity_id, attributes: attribs}}
If the entity then gets resurrected, a similar message will be broadcasted:
{:entity_resurrected, %{entity_id: entity_id, attributes: attribs}}
The regeneration of health and energy works in a value-per-second fashion.
Usually, for health we have max/minimum of +-20 HP/s and for energy we have a
max/minimum of +-3 mana/s (both are equal to +-10 pips on the health / energy bars)
Note that the client assumes a standard mana regen of 0.71 mana/s (2 pips),
which is 0.355 * 2, so we think 0.355 is the standard 1 pip regen.
"""
alias Entice.Entity
alias Entice.Entity.Coordination
alias Entice.Logic.Vitals
defmodule Health, do: defstruct(health: 500, max_health: 620, regeneration: 0.0)
defmodule Energy, do: defstruct(mana: 50, max_mana: 70, regeneration: 0.666) # hell yeah
defmodule Morale, do: defstruct(morale: 0)
def register(entity_id),
do: Entity.put_behaviour(entity_id, Vitals.AliveBehaviour, [])
def unregister(entity_id) do
Entity.remove_behaviour(entity_id, Vitals.AliveBehaviour)
Entity.remove_behaviour(entity_id, Vitals.DeadBehaviour)
end
@doc "Damage is dealt till death of the entity. (It then needs to be resurrected)"
def damage(entity, amount),
do: Coordination.notify(entity, {:vitals_entity_damage, amount})
@doc "Heals the entity until `max_health` is reached"
def heal(entity, amount),
do: Coordination.notify(entity, {:vitals_entity_heal, amount})
@doc "Kills an entity, reduces the lifepoints to 0."
def kill(entity),
do: Coordination.notify(entity, :vitals_entity_kill)
@doc "Resurrect with percentage of health and energy. (Entity needs to be dead :P)"
def resurrect(entity, percent_health, percent_energy),
do: Coordination.notify(entity, {:vitals_entity_resurrect, percent_health, percent_energy})
def health_regeneration(entity, value),
do: Coordination.notify(entity, {:vitals_health_regeneration, value})
def energy_regeneration(entity, value),
do: Coordination.notify(entity, {:vitals_energy_regeneration, value})
defmodule AliveBehaviour do
use Entice.Entity.Behaviour
alias Entice.Logic.Vitals.Health
alias Entice.Logic.Vitals.Energy
alias Entice.Logic.Vitals.Morale
alias Entice.Logic.Vitals.DeadBehaviour
alias Entice.Logic.Player.Level
@regeneration_interval 500
@min_accumulated_health 5 # these values need to accumulate over time before
@min_accumulated_energy 1 # the attribute is updated
def init(entity, {:entity_resurrected, percent_health, percent_energy}) do
entity.id |> Coordination.notify_locally({
:entity_resurrected,
%{entity_id: entity.id, attributes: entity.attributes}})
%Health{max_health: max_health} = get_max_health(entity.attributes)
resurrected_health = round(max_health / 100 * percent_health)
%Energy{max_mana: max_mana} = get_max_energy(entity.attributes)
resurrected_mana = round(max_mana / 100 * percent_energy)
self |> Process.send_after({:vitals_regeneration_update, %{
interval: @regeneration_interval,
health_accumulator: 0,
energy_accumulator: 0}}, @regeneration_interval)
{:ok,
entity
|> put_attribute(%Health{health: resurrected_health, max_health: max_health})
|> put_attribute(%Energy{mana: resurrected_mana, max_mana: max_mana})}
end
def init(entity, _args) do
self |> Process.send_after({:vitals_regeneration_update, %{
interval: @regeneration_interval,
health_accumulator: 0,
energy_accumulator: 0}}, @regeneration_interval)
{:ok,
entity
|> put_attribute(%Morale{morale: 0})
|> attribute_transaction(
fn attrs ->
attrs |> Map.merge(%{
Health => get_max_health(attrs),
Energy => get_max_energy(attrs)})
end)}
end
def handle_event(
{:vitals_entity_damage, amount},
%Entity{attributes: %{Health => %Health{health: health}}} = entity) do
new_health = health - amount
cond do
new_health <= 0 -> {:become, DeadBehaviour, :entity_died, entity}
new_health > 0 ->
{:ok, entity |> update_attribute(Health, fn health -> %Health{health | health: new_health} end)}
end
end
def handle_event(
{:vitals_entity_heal, amount},
%Entity{attributes: %{Health => %Health{health: health, max_health: max_health}}} = entity) do
new_health = health + amount
if new_health > max_health, do: new_health = max_health
{:ok, entity |> update_attribute(Health, fn health -> %Health{health | health: new_health} end)}
end
def handle_event(:vitals_entity_kill, entity), do: {:become, DeadBehaviour, :entity_died, entity}
def handle_event({:vitals_health_regeneration, value}, entity) when (-10 <= value) and (value <= 10),
do: {:ok, entity |> update_attribute(Health, fn health -> %Health{health | regeneration: value} end)}
def handle_event({:vitals_energy_regeneration, value}, entity) when (-3 <= value) and (value <= 3),
do: {:ok, entity |> update_attribute(Energy, fn energy -> %Energy{energy | regeneration: value} end)}
def handle_event(
{:vitals_regeneration_update, %{interval: interval, health_accumulator: health_acc, energy_accumulator: energy_acc}},
%Entity{attributes: %{
Health => %Health{regeneration: health_regen} = health,
Energy => %Energy{regeneration: energy_regen} = energy}} = entity) do
health_acc = health_acc + (health_regen * interval/1000)
energy_acc = energy_acc + (energy_regen * interval/1000)
{new_health, new_health_acc} = regenerate_health(health, health_acc)
{new_energy, new_energy_acc} = regenerate_energy(energy, energy_acc)
self |> Process.send_after({:vitals_regeneration_update, %{
interval: @regeneration_interval,
health_accumulator: new_health_acc,
energy_accumulator: new_energy_acc}}, @regeneration_interval)
{:ok, entity |> update_attribute(Health, fn _ -> new_health end)
|> update_attribute(Energy, fn _ -> new_energy end)}
end
def terminate({:become_handler, DeadBehaviour, _}, entity),
do: {:ok, entity}
def terminate(_reason, entity) do
{:ok,
entity
|> remove_attribute(Morale)
|> remove_attribute(Health)
|> remove_attribute(Energy)}
end
# Internal
defp get_max_health(%{Level => %Level{level: level}, Morale => %Morale{morale: morale}}), do: get_max_health(level, morale)
defp get_max_health(%{Morale => %Morale{morale: morale}}), do: get_max_health(20, morale)
defp get_max_health(%{}), do: get_max_health(20, 0)
def get_max_health(level, morale) do
health = calc_life_points_for_level(level)
max_health_with_morale = round(health / 100 * (100 + morale))
%Health{health: max_health_with_morale, max_health: max_health_with_morale}
end
#TODO: Take care of Armor, Runes, Weapons...
defp calc_life_points_for_level(level),
do: 100 + ((level - 1) * 20) # Dont add 20 lifePoints for level1
#TODO: Take care of Armor, Runes, Weapons...
defp get_max_energy(%{Morale => %Morale{morale: morale}}), do: get_max_energy(morale)
defp get_max_energy(%{}), do: get_max_energy(0)
defp get_max_energy(morale) do
inital_mana = 70
mana_with_morale = round(inital_mana / 100 * (100 + morale))
%Energy{mana: mana_with_morale, max_mana: mana_with_morale}
end
defp regenerate_health(health, amount) when amount >= @min_accumulated_health do
health_addition = trunc(amount)
leftover = amount - health_addition
{%Health{health | health: ((health.health + health_addition) |> min(health.max_health))}, leftover}
end
defp regenerate_health(health, amount), do: {health, amount}
defp regenerate_energy(energy, amount) when amount >= @min_accumulated_energy do
energy_addition = trunc(amount)
leftover = amount - energy_addition
{%Energy{energy | mana: ((energy.mana + energy_addition) |> min(energy.max_mana))}, leftover}
end
defp regenerate_energy(energy, amount), do: {energy, amount}
end
defmodule DeadBehaviour do
use Entice.Entity.Behaviour
alias Entice.Logic.Vitals.Morale
alias Entice.Logic.Vitals.AliveBehaviour
def init(%Entity{attributes: %{Morale => %Morale{morale: morale}}} = entity, :entity_died) do
entity.id |> Coordination.notify_locally({
:entity_dead,
%{entity_id: entity.id, attributes: entity.attributes}})
new_morale = morale - 15
if new_morale < -60, #-60 is max negative morale
do: new_morale = -60
{:ok,
entity
|> put_attribute(%Morale{morale: new_morale})
|> update_attribute(Health, fn health -> %Health{health | health: 0} end)}
end
def handle_event({:vitals_entity_resurrect, percent_health, percent_energy}, entity),
do: {:become, AliveBehaviour, {:entity_resurrected, percent_health, percent_energy}, entity}
def terminate({:become_handler, AliveBehaviour, _}, entity),
do: {:ok, entity}
def terminate(_reason, entity) do
{:ok,
entity
|> remove_attribute(Morale)
|> remove_attribute(Health)
|> remove_attribute(Energy)}
end
end
end
|
lib/entice/logic/vitals.ex
| 0.638385
| 0.601535
|
vitals.ex
|
starcoder
|
defmodule Sworm do
@moduledoc """
Sworm takes the accessible API from
[Swarm](https://github.com/bitwalker/swarm), and combines it with
the robustness of [Horde](https://github.com/derekkraan/horde).
It strives to be a combination of a global, distributed process
registry and supervisor, accessible through a friendly API.
## Usage
The concept behind Sworm is that there can be multiple, distinct
"sworms" living inside a cluster of BEAM nodes. To define a Sworm,
you define a module like this:
defmodule MyProcesses do
use Sworm
end
Now, the `MyProcesses` module must be added to your application's supervison tree.
When you now start the application, you can use the functions from
the `Sworm` module inside your `MyProcesses` module:
{:ok, pid} = MyProcesses.register_name("my worker", MyWorker, :start_link, [arg1, arg2])
"""
alias Sworm.Main
@doc """
Create a child specification for adding a new Sworm to the supervisor tree.
"""
@spec child_spec(sworm :: atom(), opts :: [term()]) :: Supervisor.child_spec()
defdelegate child_spec(sworm, opts \\ []), to: Main
@doc """
Start and link a Sworm in a standalone fashion.
> You almost will never need this function, as it is more usual to
> start a Sworm directly in a supervisor tree, using the provided
> child_spec function.
"""
@spec start_link(sworm :: atom(), opts :: [term()]) :: {:ok, pid()}
defdelegate start_link(sworm, opts \\ []), to: Main
@doc """
Register a name in the given Sworm. This function takes a
module/function/args triplet, and starts the process, registers the
pid with the given name, and handles cluster topology changes by
restarting the process on its new node using the given MFA.
Processes that are started this way are added to the Sworm's dynamic
Horde supervisor, distributed over the members of the Horde
according to its cluster strategy, and restarted when they crash.
When the node on which the process is spawned exits, the processes
are restarted on one of the other nodes in the cluster.
"""
@spec register_name(
sworm :: atom(),
name :: term(),
module :: atom(),
function :: atom(),
args :: [term]
) :: {:ok, pid} | {:error, term}
defdelegate register_name(sworm, name, m, f, a), to: Main
@doc """
Registers the given name to the given process. Names
registered this way will not be shifted when the cluster
topology changes, and are not restarted by Sworm.
If no pid is given, `self()` is used for the registration.
"""
@spec register_name(sworm :: atom(), name :: term(), pid :: pid()) :: :yes | :no
defdelegate register_name(sworm, name, pid \\ self()), to: Main
@doc """
Either finds the named process in the sworm or registers it using
the ``register/4`` function.
"""
@spec whereis_or_register_name(
sworm :: atom(),
name :: term(),
module :: atom(),
function :: atom(),
args :: [term]
) :: {:ok, pid()} | {:error, term()}
defdelegate whereis_or_register_name(sworm, name, m, f, a), to: Main
@doc """
Unregisters the given name from the sworm.
"""
@spec unregister_name(sworm :: atom(), name :: term()) :: :ok
defdelegate unregister_name(sworm, name), to: Main
@doc """
Get the pid of a registered name within a sworm.
"""
@spec whereis_name(sworm :: atom(), name :: term()) :: pid() | nil
defdelegate whereis_name(sworm, name), to: Main
@doc """
Gets a list of all registered names and their pids within a sworm
"""
@spec registered(sworm :: atom()) :: [{name :: term(), pid()}]
defdelegate registered(sworm), to: Main
@doc """
Joins a process to a group.
Returns an error when the given process is not part of the sworm.
"""
@spec join(sworm :: atom(), term(), pid()) :: :ok | {:error, :not_found}
defdelegate join(sworm, group, pid \\ self()), to: Main
@doc """
Removes a process from a group
Returns an error when the given process is not part of the sworm.
"""
@spec leave(sworm :: atom(), term(), pid()) :: :ok | {:error, :not_found}
defdelegate leave(sworm, group, pid \\ self()), to: Main
@doc """
Gets all the members of a group within the sworm.
Returns a list of pids.
"""
@spec members(sworm :: atom(), term()) :: [pid()]
defdelegate members(sworm, group), to: Main
defmacro __using__(opts), do: Sworm.Macro.using(opts)
end
|
lib/sworm.ex
| 0.78287
| 0.708906
|
sworm.ex
|
starcoder
|
defmodule OkrApp.Objectives.Objective do
@moduledoc """
Objectives are high level goals that an individual wishes to complete. They are
contained with other objectives in an Okr.
They are scored automatically based on the completion of key results.
"""
use Ecto.Schema
import Ecto.Changeset
alias OkrApp.Objectives.{KeyResult, Okr, ObjectiveAssessment, ObjectiveLink}
schema "objectives" do
field(:content, :string, null: false)
field(:cancelled_at, :utc_datetime)
field(:deleted_at, :utc_datetime)
belongs_to(:okr, Okr)
has_one(:user, through: [:okr, :user])
has_one(:group, through: [:okr, :group])
has_many(:key_results, KeyResult)
has_one(:objective_assessment, ObjectiveAssessment)
has_many(:contributes_to_objective_links, ObjectiveLink, foreign_key: :source_objective_id)
has_many(:contributed_by_objective_links, ObjectiveLink, foreign_key: :linked_to_objective_id)
has_many(:contributes_to_objectives, through: [:contributes_to_objective_links, :linked_to_objective])
has_many(:contributed_by_objectives, through: [:contributed_by_objective_links, :source_objective])
timestamps()
end
def create_changeset(params) do
%__MODULE__{}
|> cast(params, [:content, :okr_id])
|> validate_required([:content, :okr_id])
|> foreign_key_constraint(:okr_id)
end
def update_changeset(objective = %__MODULE__{}, params) do
changeset =
objective
|> cast(params, [:content, :cancelled_at, :deleted_at])
if Map.has_key?(changeset.changes, :content) do
validate_required(changeset, [:content])
else
changeset
end
end
def mid_score(%{key_results: key_results}) do
key_results
|> Enum.filter(&is_nil(&1.cancelled_at))
|> Enum.map(& &1.mid_score)
|> score_average()
end
def final_score(%{key_results: key_results}) do
key_results
|> Enum.filter(&is_nil(&1.cancelled_at))
|> Enum.map(& &1.final_score)
|> score_average()
end
defp score_average(scores) do
no_score = Decimal.new(0)
count = Decimal.new(length(scores))
sum =
Enum.reduce(scores, no_score, fn score, sum ->
Decimal.add(score, sum)
end)
if count == no_score do
no_score
else
Decimal.div(sum, count)
end
|> OkrApp.Objectives.round_score(:objective)
end
end
|
lib/okr_app/objectives/objective.ex
| 0.728169
| 0.423667
|
objective.ex
|
starcoder
|
defmodule GenFRP do
@moduledoc """
The main GenFRP module.
GenFRP wraps GenServer, which means
that a separate process is spawned using `start` or start_link`.
After this, the other functions in this module
can be used to send messages to this process.
The idea is to:
- start a GenFRP process with your desired GenFRP behaviour.
- Send events using `send_event` to this process, which will update its state.
- At some point, `render` the state the process has during that time.
The results of `render` are cached, to restrict the amount of work that is done.
Instead of sending events manually, `register_callback` can be used to
register _callbacks_ that might send an event whenever they are triggered.
A callback is basically a wrapper with some setup and some tear-down code.
"""
alias GenFRP.Callback
defstruct [:module, :state, callbacks: MapSet.new, last_rendered_state: nil, last_render: nil]
defmacro __using__(opts) do
quote do
use GenFRP.Behaviour, unquote(opts)
end
end
use GenServer
# EXTERNAL
@doc """
Starts the given `frp_module` in a as a GenFRP process.
It returns the PID of the started process, which is to be used
for most of the other methods in this module.
`start/1` uses the initial state from calling `init/0` on `frp_module`.
`start/2` uses the specified `initial_state` instead.
"""
def start(frp_module) do
start(frp_module, frp_module.init())
end
def start(frp_module, initial_state) do
GenServer.start(__MODULE__, %__MODULE__{module: frp_module, state: initial_state, callbacks: %{}})
end
@doc """
Atomically starts the given `frp_module` as a GenFRP process
and sets up a link to it; if it crashes, the process that started it will also crash.
`start_link/1` uses the initial state from calling `init/0` on `frp_module`.
`start_link/2` uses the specified `initial_state` instead.
"""
def start_link(frp_module) do
start(frp_module, frp_module.init())
end
def start_link(frp_module, initial_state) do
GenServer.start_link(__MODULE__, %__MODULE__{module: frp_module, state: initial_state, callbacks: %{}})
end
@doc """
Adds the given `callback` to the given FRP process `pid`.
See the GenFRP.Callback module for more information.
"""
def register_callback(pid, callback = %Callback{}) do
GenServer.call(pid, {:register_callback, callback})
end
@doc """
Deregisters a previously-registered callback.
To deregister a previously-registered callback, simply specify the
exact same callback to this function (or reconstruct one from the same original parameters).
"""
def deregister_callback(pid, callback = %Callback{}) do
GenServer.call(pid, {:deregister_callback, callback})
end
@doc """
Sends the given `event`
(which might be anything that your FRP implementation
expects in its implementation of the `GenFRP.Behaviour.update/2` function)
to the FRP process `pid`.
"""
def send_event(pid, event) do
GenServer.cast(pid, {:send_event, event})
end
@doc """
Requests to render the internal state of `pid`.
Internally, the GenFRP.Behaviour's `render/1` function will be called
on your FRP implementation.
But this will only happen if the state has changed since the last call to `render`.
If it has not, then the older, cached state will be returned.
"""
def render(pid) do
GenServer.call(pid, :render)
end
@doc """
A simple function that just dumps the full internal state of the GenFRP process.
Should only be used for debugging/testing purposes of the GenFRP process itself;
might be removed in future releases.
(Debugging your GenFRP behaviour can be done by checking the outcome of calling
the `update/2` function directly)
"""
def debug(pid) do
GenServer.call(pid, :debug)
end
# INTERNAL
@doc false
def handle_call({:register_callback, callback = %Callback{}}, _from, gen_server_state = %__MODULE__{}) do
callback_starting_state = callback.start_fun.(self())
new_gen_server_state = %{gen_server_state | callbacks: Map.put(gen_server_state.callbacks, callback, callback_starting_state)}
{:reply, :ok, new_gen_server_state}
end
def handle_call({:deregister_callback, callback}, _from, gen_server_state = %__MODULE__{}) do
callback.stop_fun.(self(), gen_server_state.callbacks[callback])
new_gen_server_state = Map.put(gen_server_state, :callbacks, Map.delete(gen_server_state.callbacks, callback))
{:reply, :ok, new_gen_server_state}
end
# Prevent unneccesary work.
def handle_call(:render, _from, gen_server_state = %__MODULE__{state: state, last_rendered_state: state}) do
{:reply, gen_server_state.last_render, gen_server_state}
end
def handle_call(:render, _from, gen_server_state = %__MODULE__{module: module, state: state, last_rendered_state: last_rendered_state}) do
render = module.render(state, last_rendered_state)
new_gen_server_state = %__MODULE__{gen_server_state | last_render: render, last_rendered_state: state}
{:reply, render, new_gen_server_state}
end
def handle_call(:debug, _from, gen_server_state) do
{:reply, gen_server_state, gen_server_state}
end
@doc false
def handle_cast({:send_event, event}, gen_server_state = %__MODULE__{module: module, state: state}) do
state = module.update(state, event)
new_gen_server_state = %__MODULE__{gen_server_state | state: state}
{:noreply, new_gen_server_state}
end
end
|
lib/gen_frp.ex
| 0.811303
| 0.582046
|
gen_frp.ex
|
starcoder
|
defmodule Snitch.Domain.ShipmentEngine do
@moduledoc """
Finds the optimal shipment for a given order.
`ShipmentEngine` models the problem as a [Constraint Satisfaction
Problem][csp] and find the optimal `shipment` subject to the following
constraints:
1. Two (or more) `packages` cannot fulfill the same `lineitem`.
2. Selected `packages` together fulfill the entire `order`.
Returns a shipment, which is a list of `packages`.
In case we are unable to find such a shipment, the empty `list` is returned.
## Limitations and future work
Constraints that could be added in future:
* prefer packages that are "on-hand" over backorder packages.
* prefer shorter shipping distance (analogous to shipping time)
* prefer smaller shipping cost (to user/store owner)
[csp]: https://en.wikipedia.org/wiki/Constraint_Satisfaction_Problem
"""
use Snitch.Domain
alias Aruspex.Problem
alias Aruspex.Strategy.SimulatedAnnealing
alias Snitch.Data.Schema.Order
@domain [true, false]
@doc """
Returns the optimal shipment from the `packages` fulfilling the `order`.
The CSP is modelled as a graph of packages, where 2 packages are linked by an
edge if they include the same line-item.
Uses [Simulated Annealing][sa] to find the optimal shipment, see
`Aruspex.Strategy.SimulatedAnnealing`. Note that this technique is
probabilistic and [AC-3][ac3] should replace it.
[sa]: https://en.wikipedia.org/wiki/Simulated_annealing
[ac3]: https://en.wikipedia.org/wiki/AC-3_algorithm
"""
@spec run(list, Order.t()) :: list
def run([], _), do: []
def run(packages, %Order{} = order) when is_list(packages) do
packages_with_id = append_keys(packages)
edges = create_csp(packages_with_id)
vars =
Enum.reduce(packages_with_id, [], fn p, acc ->
[p.id | acc]
end)
item_var_map = item_var_mapping(packages_with_id)
problem = Problem.new()
for var <- vars, do: Problem.add_variable(problem, var, @domain)
binary_constraint(problem, edges)
summation_constraint(problem, vars, item_var_map, Repo.preload(order, [:line_items]))
result =
problem
|> SimulatedAnnealing.set_strategy()
|> Enum.take(1)
bindings = variable_assignment(result)
filter_packages(item_var_map, bindings)
end
defp variable_assignment([]), do: []
defp variable_assignment([result]) do
Enum.reject(result.binding, fn
{{:hidden, _, _}, _} -> true
_ -> false
end)
end
defp filter_packages(packages, bindings) do
Enum.reduce(bindings, [], fn
{id, true}, acc -> [packages[id] | acc]
{_, false}, acc -> acc
end)
end
defp binary_constraint(problem, edges) do
Enum.map(edges, fn {x, y} ->
Problem.post(problem, x.id, y.id, &(not (&1 and &2)))
end)
end
defp summation_constraint(problem, vars, item_var_map, order) do
Problem.post(problem, vars, fn values ->
item_count =
vars
|> Stream.zip(values)
|> Enum.reduce(0, fn
{var, true}, acc -> length(item_var_map[var].items) + acc
{_, false}, acc -> acc
end)
item_count == length(order.line_items)
end)
end
defp item_var_mapping(packages) do
Enum.map(packages, fn pkg ->
{pkg.id, pkg}
end)
end
defp create_csp(packages) do
packages
|> create_edges()
|> find_unique_edges()
end
defp append_keys(packages) do
packages
|> Stream.with_index(1)
|> Enum.map(fn {package, index} ->
Map.put(package, :id, String.to_atom("p#{index}"))
end)
end
defp create_edges(packages) do
for package1 <- packages,
package2 <- packages,
package1 != package2,
not MapSet.disjoint?(package1.variants, package2.variants) do
{package1, package2}
end
end
def find_unique_edges(package_edges) do
Enum.reduce(package_edges, [], fn {package1, package2}, acc ->
case Enum.member?(acc, {package1, package2}) or Enum.member?(acc, {package2, package1}) do
true -> acc
false -> [{package1, package2} | acc]
end
end)
end
end
|
apps/snitch_core/lib/core/domain/shipment_engine.ex
| 0.857261
| 0.768168
|
shipment_engine.ex
|
starcoder
|
defmodule Day05 do
def part1(input) do
memory = read_program(input)
memory = set_input(memory, 1)
List.last(execute(memory))
end
def part2(program, input) do
memory = read_program(program)
memory = set_input(memory, input)
List.last(execute(memory))
end
defp execute(memory, ip \\ 0) do
{opcode, modes} = fetch_opcode(memory, ip)
case opcode do
1 ->
memory = exec_arith_op(&+/2, modes, memory, ip)
execute(memory, ip + 4)
2 ->
memory = exec_arith_op(&*/2, modes, memory, ip)
execute(memory, ip + 4)
3 ->
memory = exec_input(memory, ip)
execute(memory, ip + 2)
4 ->
memory = exec_output(modes, memory, ip)
execute(memory, ip + 2)
5 ->
ip = exec_if(&(&1 !== 0), modes, memory, ip)
execute(memory, ip)
6 ->
ip = exec_if(&(&1 === 0), modes, memory, ip)
execute(memory, ip)
7 ->
memory = exec_cond(&(&1 < &2), modes, memory, ip)
execute(memory, ip + 4)
8 ->
memory = exec_cond(&(&1 === &2), modes, memory, ip)
execute(memory, ip + 4)
99 ->
Enum.reverse(Map.get(memory, :output, []))
end
end
defp exec_arith_op(op, modes, memory, ip) do
[in1, in2] = read_operand_values(memory, ip + 1, modes, 2)
out_addr = read(memory, ip + 3)
result = op.(in1, in2)
write(memory, out_addr, result)
end
defp exec_input(memory, ip) do
out_addr = read(memory, ip + 1)
write(memory, out_addr, Map.fetch!(memory, :input))
end
defp exec_output(modes, memory, ip) do
[value] = read_operand_values(memory, ip + 1, modes, 1)
output = Map.get(memory, :output, [])
output = [value | output]
Map.put(memory, :output, output)
end
defp exec_if(op, modes, memory, ip) do
[value, new_ip] = read_operand_values(memory, ip + 1, modes, 2)
case op.(value) do
true -> new_ip
false -> ip + 3
end
end
defp exec_cond(op, modes, memory, ip) do
[operand1, operand2] = read_operand_values(memory, ip + 1, modes, 2)
out_addr = read(memory, ip + 3)
result = case op.(operand1, operand2) do
true -> 1
false -> 0
end
write(memory, out_addr, result)
end
defp read_operand_values(_memory, _addr, _modes, 0), do: []
defp read_operand_values(memory, addr, modes, n) do
operand = read(memory, addr)
operand = case rem(modes, 10) do
0 -> read(memory, operand)
1 -> operand
end
[operand | read_operand_values(memory, addr + 1, div(modes, 10), n - 1)]
end
defp fetch_opcode(memory, ip) do
opcode = read(memory, ip)
modes = div(opcode, 100)
opcode = rem(opcode, 100)
{opcode, modes}
end
defp set_input(memory, input) do
Map.put(memory, :input, input)
end
defp read(memory, addr) do
Map.fetch!(memory, addr)
end
defp write(memory, addr, value) do
Map.put(memory, addr, value)
end
defp read_program(input) do
String.split(input, ",")
|> Stream.map(&String.to_integer/1)
|> Stream.with_index
|> Stream.map(fn {code, index} -> {index, code} end)
|> Map.new
end
end
|
day05/lib/day05.ex
| 0.531939
| 0.499756
|
day05.ex
|
starcoder
|
defmodule Day24.BugLyfe2 do
@doc """
iex> Day24.BugLyfe2.part2("day24-sample.txt", 10)
99
iex> Day24.BugLyfe2.part2("day24.txt", 200)
2009
"""
def part2(filename, time_limit) do
level0 = parse_input(filename)
iterate(%{0 => level0}, time_limit)
end
def parse_input(filename) do
"inputs/#{filename}"
|> File.stream!()
|> Stream.map(&String.trim/1)
|> Enum.map(&String.graphemes/1)
|> List.flatten()
|> Enum.map(fn c ->
case c do
"#" -> 1
_ -> 0
end
end)
end
def iterate(state, time_limit, minute \\ 1) do
new_state = evolve(state)
bug_count =
new_state
|> Map.values()
|> List.flatten()
|> Enum.count(&(&1 == 1))
if minute < time_limit do
iterate(new_state, time_limit, minute + 1)
else
bug_count
end
end
def draw(state, width \\ 5) do
state
|> Enum.map(fn
0 -> "."
1 -> "#"
end)
|> Enum.chunk_every(width)
|> Enum.join("\n")
|> IO.puts()
IO.puts("")
state
end
def evolve(state) do
levels_to_check = Map.keys(state)
{min, max} = Enum.min_max(levels_to_check)
min = if Enum.any?(state[min], &(&1 == 1)), do: min - 1, else: min
max = if Enum.any?(state[max], &(&1 == 1)), do: max + 1, else: max
levels_to_check =
[min, levels_to_check, max]
|> List.flatten()
|> Enum.uniq()
|> Enum.drop(1)
evolve(state, %{}, [], 0, min, levels_to_check)
end
def evolve(state, new_state, buffer, index, level, levels_to_check)
def evolve(_state, new_state, buffer, 25, level, []) do
Map.put(new_state, level, Enum.reverse(buffer))
end
def evolve(state, new_state, buffer, 25, level, [next_level | levels]) do
new_state = Map.put(new_state, level, Enum.reverse(buffer))
evolve(state, new_state, [], 0, next_level, levels)
end
def evolve(state, new_state, buffer, 12, level, levels_to_check) do
evolve(state, new_state, [0 | buffer], 13, level, levels_to_check)
end
def evolve(state, new_state, buffer, index, level, levels_to_check) do
neighbor_count =
neighbors(state, index, level)
|> Enum.count(&(&1 == 1))
tile =
if state[level] do
Enum.at(state[level], index)
else
0
end
new_tile =
cond do
tile == 1 and neighbor_count != 1 -> 0
tile == 0 and (neighbor_count == 1 or neighbor_count == 2) -> 1
true -> tile
end
evolve(state, new_state, [new_tile | buffer], index + 1, level, levels_to_check)
end
def neighbors(state, index, level, size \\ 5) do
col = rem(index, size)
row = div(index, size)
[
n_neighbor(col, row, level, size - 1),
s_neighbor(col, row, level, size - 1),
e_neighbor(col, row, level, size - 1),
w_neighbor(col, row, level, size - 1)
]
|> List.flatten()
|> Enum.map(fn point -> at_loc(state, size, point) end)
end
# added one more special case each to include the recursive grids and
# gave the edges neighbors in the level above
def n_neighbor(_col, 0, depth, _max), do: [{2, 1, depth - 1}]
def n_neighbor(2, 3, depth, max) do
for col <- 0..max, do: {col, max, depth + 1}
end
def n_neighbor(col, row, depth, _max), do: [{col, row - 1, depth}]
def s_neighbor(_col, max, depth, max), do: [{2, 3, depth - 1}]
def s_neighbor(2, 1, depth, max) do
for col <- 0..max, do: {col, 0, depth + 1}
end
def s_neighbor(col, row, depth, _max), do: [{col, row + 1, depth}]
def w_neighbor(0, _row, depth, _max), do: [{1, 2, depth - 1}]
def w_neighbor(3, 2, depth, max) do
for row <- 0..max, do: {max, row, depth + 1}
end
def w_neighbor(col, row, depth, _max), do: [{col - 1, row, depth}]
def e_neighbor(max, _row, depth, max), do: [{3, 2, depth - 1}]
def e_neighbor(1, 2, depth, max) do
for row <- 0..max, do: {0, row, depth + 1}
end
def e_neighbor(col, row, depth, _max), do: [{col + 1, row, depth}]
def at_loc(state, size, {col, row, level}) do
if state[level] do
Enum.at(state[level], row * size + col)
else
0
end
end
end
|
lib/day24/bug_lyfe2.ex
| 0.511473
| 0.503235
|
bug_lyfe2.ex
|
starcoder
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.