code
stringlengths 114
1.05M
| path
stringlengths 3
312
| quality_prob
float64 0.5
0.99
| learning_prob
float64 0.2
1
| filename
stringlengths 3
168
| kind
stringclasses 1
value |
|---|---|---|---|---|---|
defmodule ListComp do
# private function to return a list of people
@spec get_people() :: list(tuple())
defp get_people() do
[{"Federico", "M", 22}, {"Kim", "F", 45}, {"Hansa", "F", 30},
{"Tran", "M", 47}, {"Cathy", "F", 32}, {"Elias", "M", 50}]
end
@doc """
using pattern matching on a list of tuples to extract male over 40 (logical and)
## Examples
iex> ListComp.extract_male_over_40
[{"Elias", "M", 50}, {"Tran", "M", 47}]
"""
def extract_male_over_40(list \\ get_people()) do
extract_male_over_40(list, [])
end
@doc """
using pattern matching on a list of tuples to extract male or over 40.
## Examples
iex> ListComp.extract_male_or_over_40
[{"Elias", "M", 50}, {"Tran", "M", 47}, {"Kim", "F", 45}, {"Federico", "M", 22}]
"""
def extract_male_or_over_40(list \\ get_people) do
extract_male_or_over_40(list, [])
end
@doc """
using list comprehension and pattern matching on a list of tuples to extract male over
40 (logical and)
## Examples
iex> ListComp.extract_male_over_40_lc
[{"Tran", "M", 47}, {"Elias", "M", 50}]
"""
def extract_male_over_40_lc(list \\ get_people()) do
for {name, sex, age} <- list, sex == "M", age > 40 do
{name, sex, age}
end
end
@doc """
using list comprehension and pattern matching on a list of tuples to extract male or over 40.
## Examples
iex> ListComp.extract_male_or_over_40_lc
[{"Federico", "M", 22}, {"Kim", "F", 45}, {"Tran", "M", 47}, {"Elias", "M", 50}]
"""
def extract_male_or_over_40_lc(list \\ get_people) do
for {name, sex, age} <- list, sex == "M" or age > 40 do
{name, sex, age}
end
end
# private helper functions
defp extract_male_over_40([], rl), do: rl
defp extract_male_over_40([ {name, sex, age} | cdr ], rl) when sex == "M" and age > 40 do
extract_male_over_40(cdr, [ {name, sex, age} | rl ])
end
defp extract_male_over_40([ _ | cdr ], rl) do
extract_male_over_40(cdr, rl)
end
defp extract_male_or_over_40([], rl), do: rl
defp extract_male_or_over_40([ {name, sex, age} | cdr ], rl) when sex == "M" or age > 40 do
extract_male_or_over_40(cdr, [ {name, sex, age} | rl ])
end
defp extract_male_or_over_40([ _ | cdr ], rl) do
extract_male_or_over_40(cdr, rl)
end
end
|
higher_order/lib/list_comp.ex
| 0.516595
| 0.613844
|
list_comp.ex
|
starcoder
|
defmodule Polyline do
@moduledoc ~S"""
Encode and decode Polylines to and from List of `{lon, lat}` tuples.
The encode functions accept a `precision` parameter that defines the
number of significant digits to retain when encoding. The same precision
must be supplied to the decode or the resulting linestring will be incorrect.
The default is `5`, which correlates to approximately 1 meter of precision.
## Examples
iex> Polyline.encode([{-120.2, 38.5}, {-120.95, 40.7}, {-126.453, 43.252}])
"_p~iF~ps|U_ulLnnqC_mqNvxq`@"
iex> Polyline.decode("_p~iF~ps|U_ulLnnqC_mqNvxq`@")
[{-120.2, 38.5}, {-120.95, 40.7}, {-126.453, 43.252}]
"""
@default_precision 5
use Bitwise, only_operators: true
@doc ~S"""
Encode a List of coordinate tuples into a Polyline String. Also works with
`Geo.LineString` structs (see https://hex.pm/packages/geo).
## Examples
iex> Polyline.encode([{-120.2, 38.5}, {-120.95, 40.7}, {-126.453, 43.252}])
"_p~iF~ps|U_ulLnnqC_mqNvxq`@"
iex> Polyline.encode([{-120.2, 38.5}, {-120.95, 40.7}, {-126.453, 43.252}], 6)
"_izlhA~rlgdF_{geC~ywl@_kwzCn`{nI"
iex> "LINESTRING(-120.2 38.5, -120.95 40.7, -126.453 43.252)"
...> |> Geo.WKT.decode!
...> |> Map.get(:coordinates)
...> |> Polyline.encode
"_p~iF~ps|U_ulLnnqC_mqNvxq`@"
"""
def encode(coordinates, precision \\ @default_precision) do
factor = :math.pow(10, precision)
rounded_coordinates =
Enum.map(coordinates, fn {x, y} ->
{round(x * factor), round(y * factor)}
end)
elem(do_encode(rounded_coordinates), 0)
end
defp do_encode([]), do: {"", nil}
defp do_encode([{x0, y0} | coordinates]) do
Enum.reduce(coordinates, encode_step({x0, y0}, {"", {0, 0}}), fn t, acc ->
encode_step(t, acc)
end)
end
defp encode_step({x, y}, {acc, {x_prev, y_prev}}) do
{acc <> encode_int(y - y_prev) <> encode_int(x - x_prev), {x, y}}
end
defp encode_int(x) do
x <<< 1
|> unsign
|> collect_chars
|> to_string
end
defp collect_chars(c) when c < 0x20, do: [c + 63]
defp collect_chars(c), do: [(0x20 ||| (c &&& 0x1F)) + 63 | collect_chars(c >>> 5)]
@doc ~S"""
Decode a polyline String into a List of `{lon, lat}` tuples.
## Examples
iex> Polyline.decode("_p~iF~ps|U_ulLnnqC_mqNvxq`@")
[{-120.2, 38.5}, {-120.95, 40.7}, {-126.453, 43.252}]
iex> Polyline.decode("_izlhA~rlgdF_{geC~ywl@_kwzCn`{nI", 6)
[{-120.2, 38.5}, {-120.95, 40.7}, {-126.453, 43.252}]
"""
def decode(str, precision \\ @default_precision)
def decode(str, _) when str == "", do: []
def decode(str, precision) do
factor = :math.pow(10, precision)
chars = String.to_charlist(str)
{terms, _} =
Enum.reduce_while(chars, {[], chars}, fn
_, {values, ''} ->
{:halt, {values, ''}}
_, {values, remain} ->
{next_one, remain} = decode_next(remain, 0)
{:cont, {values ++ [sign(next_one) / factor], remain}}
end)
Enum.reduce(Enum.chunk_every(terms, 2, 2, :discard), nil, fn
[y, x], nil -> [{x, y}]
[y, x], acc -> acc ++ [Vector.add({x, y}, List.last(acc))]
end)
end
defp decode_next([head | []], shift), do: {decode_char(head, shift), ''}
defp decode_next([head | tail], shift) when head < 95, do: {decode_char(head, shift), tail}
defp decode_next([head | tail], shift) do
{next, remain} = decode_next(tail, shift + 5)
{decode_char(head, shift) ||| next, remain}
end
defp decode_char(char, shift), do: (char - 63 &&& 0x1F) <<< shift
defp unsign(x) when x < 0, do: -(x + 1)
defp unsign(x), do: x
defp sign(result) when (result &&& 1) === 1, do: -((result >>> 1) + 1)
defp sign(result), do: result >>> 1
end
|
lib/polyline.ex
| 0.928644
| 0.681237
|
polyline.ex
|
starcoder
|
defmodule InterviewPractice.Arrays do
@moduledoc """
Interview Practice - Arrays solutions with Elixir.
"""
@doc """
Given an array a that contains only numbers in the range from 1 to a.length,
find the first duplicate number for which the second occurrence has the
minimal index. In other words, if there are more than 1 duplicated numbers,
return the number for which the second occurrence has a smaller index than
the second occurrence of the other number does. If there are no such
elements, return -1.
## Examples
iex> InterviewPractice.Arrays.first_duplicate [2, 1, 3, 5, 3, 2]
3
iex> InterviewPractice.Arrays.first_duplicate [2, 2]
2
iex> InterviewPractice.Arrays.first_duplicate [2, 4, 3, 5, 1]
-1
"""
def first_duplicate(list) do
unique = Enum.uniq(list)
if list == unique do
-1
else
List.first(list -- unique)
end
end
@doc """
Given a string s consisting of small English letters, find and return the
first instance of a non-repeating character in it. If there is no such
character, return '_'.
## Examples
iex> InterviewPractice.Arrays.first_not_repeating_character "abacabad"
"c"
iex> InterviewPractice.Arrays.first_not_repeating_character "abacabaabacaba"
"_"
"""
def first_not_repeating_character(string) do
frequencies_map =
String.codepoints(string)
|> Enum.frequencies()
String.codepoints(string)
|> Enum.find("_", fn char -> {char, 1} in frequencies_map end)
end
@doc """
You are given an n x n 2D matrix that represents an image. Rotate the image
by 90 degrees (clockwise).
Note: Try to solve this task in-place (with O(1) additional memory), since
this is what you'll be asked to do during an interview.
## Examples
iex> InterviewPractice.Arrays.rotate_image [[1, 2, 3], [4, 5, 6], [7, 8, 9]]
[[7, 4, 1], [8, 5, 2], [9, 6, 3]]
"""
def rotate_image(list) do
Enum.reverse(list)
|> Enum.zip()
|> Enum.map(fn tuple -> Tuple.to_list(tuple) end)
end
end
|
lib/interview_practice/arrays.ex
| 0.786377
| 0.728652
|
arrays.ex
|
starcoder
|
defmodule RDF.Serialization.Decoder do
@moduledoc """
A behaviour for decoders of strings encoded in a specific `RDF.Serialization` format.
"""
alias RDF.{Dataset, Graph}
@doc """
Decodes a serialized `RDF.Graph` or `RDF.Dataset` from a string.
It returns an `{:ok, data}` tuple, with `data` being the deserialized graph or
dataset, or `{:error, reason}` if an error occurs.
"""
@callback decode(String.t(), keyword) :: {:ok, Graph.t() | Dataset.t()} | {:error, any}
@doc """
Decodes a serialized `RDF.Graph` or `RDF.Dataset` from a string.
As opposed to `decode/2`, it raises an exception if an error occurs.
Note: The `__using__` macro automatically provides an overridable default
implementation based on the non-bang `decode` function.
"""
@callback decode!(String.t(), keyword) :: Graph.t() | Dataset.t()
@doc """
Decodes a serialized `RDF.Graph` or `RDF.Dataset` from a stream.
It returns an `{:ok, data}` tuple, with `data` being the deserialized graph or
dataset, or `{:error, reason}` if an error occurs.
"""
@callback decode_from_stream(Enumerable.t(), keyword) ::
{:ok, Graph.t() | Dataset.t()} | {:error, any}
@doc """
Decodes a serialized `RDF.Graph` or `RDF.Dataset` from a stream.
As opposed to `decode_from_stream/2`, it raises an exception if an error occurs.
Note: The `__using__` macro automatically provides an overridable default
implementation based on the non-bang `decode` function.
"""
@callback decode_from_stream!(Enumerable.t(), keyword) :: Graph.t() | Dataset.t()
@optional_callbacks decode_from_stream: 2, decode_from_stream!: 2
defmacro __using__(_) do
quote bind_quoted: [], unquote: true do
@behaviour unquote(__MODULE__)
@impl unquote(__MODULE__)
@spec decode!(String.t(), keyword) :: Graph.t() | Dataset.t()
def decode!(content, opts \\ []) do
case decode(content, opts) do
{:ok, data} -> data
{:error, reason} -> raise reason
end
end
defoverridable unquote(__MODULE__)
@before_compile unquote(__MODULE__)
end
end
defmacro __before_compile__(_env) do
quote do
@stream_support __MODULE__
|> Module.definitions_in()
|> Keyword.has_key?(:decode_from_stream)
@doc false
def stream_support?, do: @stream_support
if @stream_support and
not (__MODULE__ |> Module.definitions_in() |> Keyword.has_key?(:decode_from_stream!)) do
@impl unquote(__MODULE__)
def decode_from_stream!(stream, opts \\ []) do
case decode_from_stream(stream, opts) do
{:ok, data} -> data
{:error, reason} -> raise reason
end
end
end
end
end
end
|
lib/rdf/serialization/decoder.ex
| 0.913903
| 0.6922
|
decoder.ex
|
starcoder
|
defmodule Stripe.SetupIntent do
@moduledoc """
A [SetupIntent](https://stripe.com/docs/api/setup_intents) guides you through
the process of setting up a customer's payment credentials for future payments.
You can:
- [Create a SetupIntent](https://stripe.com/docs/api/setup_intents/create)
- [Retrieve a SetupIntent](https://stripe.com/docs/api/setup_intents/retrieve)
- [Update a SetupIntent](https://stripe.com/docs/api/setup_intents/update)
- [Confirm a SetupIntent](https://stripe.com/docs/api/setup_intents/confirm)
- [Cancel a SetupIntent](https://stripe.com/docs/api/setup_intents/cancel)
- [List all SetupIntents](https://stripe.com/docs/api/setup_intents/list)
"""
use Stripe.Entity
import Stripe.Request
require Stripe.Util
@type last_setup_error :: %{
code: String.t(),
decline_code: String.t(),
doc_url: String.t(),
message: String.t(),
param: String.t(),
payment_method: map,
type: String.t()
}
@type next_action :: %{
redirect_to_url: redirect_to_url | nil,
type: String.t(),
use_stripe_sdk: map | nil
}
@type redirect_to_url :: %{
return_url: String.t(),
url: String.t()
}
@type payment_method_options_card :: %{
request_three_d_secure: String.t()
}
@type payment_method_options :: %{
card: payment_method_options_card | nil
}
@type t :: %__MODULE__{
id: Stripe.id(),
object: String.t(),
application: Stripe.id() | nil,
cancellation_reason: String.t() | nil,
client_secret: String.t(),
created: Stripe.timestamp(),
customer: Stripe.id() | Stripe.Customer.t() | nil,
description: String.t() | nil,
last_setup_error: last_setup_error | nil,
livemode: boolean,
metadata: Stripe.Types.metadata(),
next_action: next_action | nil,
on_behalf_of: Stripe.id() | Stripe.Account.t() | nil,
payment_method: Stripe.id() | Stripe.PaymentMethod.t() | nil,
payment_method_options: payment_method_options | nil,
payment_method_types: list(String.t()),
status: String.t(),
usage: String.t()
}
defstruct [
:id,
:object,
:application,
:cancellation_reason,
:client_secret,
:created,
:customer,
:description,
:last_setup_error,
:livemode,
:metadata,
:next_action,
:on_behalf_of,
:payment_method,
:payment_method_options,
:payment_method_types,
:status,
:usage
]
@plural_endpoint "setup_intents"
@doc """
Creates a SetupIntent object.
See the [Stripe docs](https://stripe.com/docs/api/setup_intents/create).
"""
@spec create(params, Stripe.options()) :: {:ok, t} | {:error, Stripe.Error.t()}
when params:
%{
optional(:confirm) => boolean,
optional(:customer) => Stripe.id() | Stripe.Customer.t(),
optional(:description) => String.t(),
optional(:metadata) => map,
optional(:on_behalf_of) => Stripe.id() | Stripe.Account.t(),
optional(:payment_method) => Stripe.id(),
optional(:payment_method_options) => payment_method_options,
optional(:payment_method_types) => [String.t()],
optional(:return_url) => String.t(),
optional(:usage) => String.t()
}
| %{}
def create(params, opts \\ []) do
new_request(opts)
|> put_endpoint(@plural_endpoint)
|> put_params(params)
|> put_method(:post)
|> cast_to_id([:on_behalf_of, :customer, :payment_method])
|> make_request()
end
@doc """
Retrieves the details of a SetupIntent that has previously been created.
See the [Stripe docs](https://stripe.com/docs/api/setup_intents/retrieve).
"""
@spec retrieve(Stripe.id() | t, params, Stripe.options()) ::
{:ok, t} | {:error, Stripe.Error.t()}
when params:
%{
optional(:client_secret) => String.t()
}
| %{}
def retrieve(id, params, opts \\ []) do
new_request(opts)
|> put_endpoint(@plural_endpoint <> "/#{get_id!(id)}")
|> put_params(params)
|> put_method(:get)
|> make_request()
end
@doc """
Updates a SetupIntent object.
See the [Stripe docs](https://stripe.com/docs/api/setup_intents/update).
"""
@spec update(Stripe.id() | t, params, Stripe.options()) :: {:ok, t} | {:error, Stripe.Error.t()}
when params:
%{
optional(:customer) => Stripe.id() | Stripe.Customer.t(),
optional(:description) => String.t(),
optional(:metadata) => map,
optional(:payment_method) => Stripe.id(),
optional(:payment_method_types) => [String.t()]
}
| %{}
def update(id, params, opts \\ []) do
new_request(opts)
|> put_endpoint(@plural_endpoint <> "/#{get_id!(id)}")
|> put_method(:post)
|> put_params(params)
|> cast_to_id([:customer, :payment_method])
|> make_request()
end
@doc """
Confirm that your customer intends to set up the current or provided payment method.
See the [Stripe docs](https://stripe.com/docs/api/setup_intents/confirm).
"""
@spec confirm(Stripe.id() | t, params, Stripe.options()) ::
{:ok, t} | {:error, Stripe.Error.t()}
when params:
%{
optional(:payment_method) => Stripe.id(),
optional(:payment_method_options) => payment_method_options,
optional(:return_url) => String.t()
}
| %{}
def confirm(id, params, opts \\ []) do
new_request(opts)
|> put_endpoint(@plural_endpoint <> "/#{get_id!(id)}" <> "/confirm")
|> put_method(:post)
|> put_params(params)
|> cast_to_id([:payment_method])
|> make_request()
end
@doc """
A SetupIntent object can be canceled when it is in one of these statuses:
`requires_payment_method`, `requires_capture`, `requires_confirmation`, `requires_action`.
See the [Stripe docs](https://stripe.com/docs/api/setup_intents/cancel).
"""
@spec cancel(Stripe.id() | t, params, Stripe.options()) :: {:ok, t} | {:error, Stripe.Error.t()}
when params:
%{
optional(:cancellation_reason) => String.t()
}
| %{}
def cancel(id, params, opts \\ []) do
new_request(opts)
|> put_endpoint(@plural_endpoint <> "/#{get_id!(id)}" <> "/cancel")
|> put_method(:post)
|> put_params(params)
|> make_request()
end
@doc """
Returns a list of SetupIntents.
See the [Stripe docs](https://stripe.com/docs/api/setup_intents/list).
"""
@spec list(params, Stripe.options()) :: {:ok, Stripe.List.t(t)} | {:error, Stripe.Error.t()}
when params: %{
optional(:created) => Stripe.date_query(),
optional(:customer) => Stripe.id() | Stripe.Customer.t(),
optional(:ending_before) => t | Stripe.id(),
optional(:limit) => 1..100,
optional(:starting_after) => t | Stripe.id()
}
def list(params \\ %{}, opts \\ []) do
new_request(opts)
|> prefix_expansions()
|> put_endpoint(@plural_endpoint)
|> put_method(:get)
|> put_params(params)
|> cast_to_id([:customer, :ending_before, :starting_after])
|> make_request()
end
end
|
lib/stripe/core_resources/setup_intent.ex
| 0.730866
| 0.441312
|
setup_intent.ex
|
starcoder
|
defmodule Gateway.Blacklist do
@moduledoc """
Enables blacklisting of JWTs by their jti claim.
The entries representing the banned claims feature an expiration timestamp,
which prevents the blacklist from growing indefinitely.
In a distributed setting, the node that does the blacklisting spreads the
information via Phoenix' PubSub Server as Phoenix Presence information. The
other nodes react by tracking the same record themselves, which means that
for one record and n nodes there are n items in the Presence list. The
following properties are a result of this:
- Blacklisting can occur on/by any node.
- The blacklist is eventually consistent over all nodes.
- Any node can go down and come up at any time without affecting the
blacklist, except if all nodes go down at the same time (in that case
there is nothing to synchronize from -- the list is not stored on disk).
"""
use Gateway.Config, [:default_expiry_hours]
require Logger
alias Gateway.Blacklist.Serializer
@typep state_t :: map
@default_tracker_mod Gateway.Blacklist.Tracker
def start_link(tracker_mod \\ nil, opts \\ []) do
tracker_mod = if tracker_mod, do: tracker_mod, else: @default_tracker_mod
Logger.debug("Blacklist with tracker #{inspect tracker_mod}")
GenServer.start_link(
__MODULE__,
_state = %{tracker_mod: tracker_mod},
Keyword.merge([name: __MODULE__], opts))
end
@spec add_jti(pid | atom, String.t, nil | String.t | Timex.DateTime.t, nil | pid) :: pid
def add_jti(server, jti, expiry \\ nil, listener \\ nil)
def add_jti(server, jti, _expiry = nil, listener) do
conf = config()
default_expiry = Timex.now() |> Timex.shift(hours: conf.default_expiry_hours)
add_jti(server, jti, default_expiry, listener)
end
def add_jti(server, jti, expiry, listener) do
expires_at =
case Timex.is_valid? expiry do
true -> expiry
_ -> Serializer.deserialize_datetime!(expiry)
end
GenServer.cast(server, {:add, jti, expires_at, listener})
server # allow for chaining calls
end
@spec contains_jti?(pid, String.t) :: boolean
def contains_jti?(server, jti) do
GenServer.call(server, {:contains?, jti})
end
# callbacks
@spec init(state_t) :: {:ok, state_t}
def init(state) do
send(self(), :expire_stale_records)
{:ok, state}
end
@spec handle_cast({:add, String.t, Timex.DateTime.t, nil | pid}, state_t) :: {:noreply, state_t}
def handle_cast({:add, jti, expiry, listener}, state) do
Logger.info("Blacklisting JWT with jti=#{jti}")
with {:ok, _phx_ref} <- state.tracker_mod.track(jti, expiry) do
remaining_ms = max(
(Timex.diff(expiry, Timex.now(), :seconds) + 1) * 1_000,
0
)
Process.send_after(self(), {:expire, jti, listener}, _timeout = remaining_ms)
end
{:noreply, state}
end
@spec handle_call({:contains?, String.t}, any, state_t) :: {:reply, boolean, state_t}
def handle_call({:contains?, jti}, _from, state) do
contains? = case state.tracker_mod.find(jti) do
{_jti, _meta} -> true
nil -> false
end
{:reply, contains?, state}
end
@spec handle_info({:expire, String.t, nil | pid}, state_t) :: {:noreply, state_t}
def handle_info({:expire, jti, listener}, state) do
expire(state.tracker_mod, jti, listener)
{:noreply, state}
end
@spec handle_info(:expire_stale_records, state_t) :: {:noreply, state_t}
def handle_info(:expire_stale_records, state) do
now = Timex.now()
state.tracker_mod.list()
|> Stream.filter(fn({_jti, meta}) -> meta.expiry |> Timex.before?(now) end)
|> Enum.each(fn({jti, _meta}) -> expire(state.tracker_mod, jti) end)
{:noreply, state}
end
# private functions
@spec expire(atom, String.t, nil | pid) :: any
defp expire(tracker_mod, jti, listener \\ nil) do
Logger.info("Removing JWT with jti=#{jti} from blacklist (entry expired)")
tracker_mod.untrack(jti)
if listener, do: send_expiration_notification(listener, jti)
end
@spec send_expiration_notification(pid, String.t) :: any
defp send_expiration_notification(listener, jti) do
send(listener, {:expired, jti})
Logger.debug("notified #{inspect listener} about expiration of JTI #{inspect jti}")
end
end
|
lib/gateway/blacklist.ex
| 0.793906
| 0.485539
|
blacklist.ex
|
starcoder
|
defmodule Datex.Time do
@moduledoc """
Simple Time which can be used in different formats including elixir format `~T[15:45:56]`.
It includes different functions to compare time and provide relative time in friendly formats and
functions which can format time in multiple ways user needs.
"""
@doc """
Get current time with or without timezone.
It takes 1 optional argument i.e `time_zone name`.
It returns `utc time` by default when no arguments is passed and time_zone specific time when time_zone is provided as `String`.
## Examples
iex()> Datex.Time.now()
"04:21 AM"
iex()> Datex.Time.now("Pacific/Fakaofo")
"06:24 PM"
"""
def now(zone_name \\ :utc) do
utc_time = Time.utc_now()
case zone_name do
:utc -> format_hour(utc_time)
_ ->
seconds = Datex.Timezones.get_seconds_from_timezone(zone_name)
utc_time
|> Time.add(seconds)
|> format_hour
end
end
@doc """
It adds `time` to a given specific time.
It takes 4 arguments.
Last 2 arguments are optional i.e `unit` and `format`.
To use a specific `format`, you need to apply `unit` as well.
Unit defaults to `:second` and format to `:hour_12`.
Arguments are
1. `time` as `elixir time` or standard time formats in `String`
2. `time to add` as `Integer`
3. `unit` as `atom` from `:hour`, `:minute`, `:second`, `:millisecond`
4. `format` as `atom` from `:hour_12`, `:hour_24` and `:elixir`
## Examples
iex()> Datex.Time.add(~T[16:56:56], 5)
"04:57 PM"
iex()> Datex.Time.add("4:56 PM", 5, :hour)
"09:56 PM"
iex()> Datex.Time.add("4:56:56", 5, :hour, :elixir)
~T[09:56:56.000000]
iex()> Datex.Time.add("4:56:56", 5, :minute, :hour_12)
"05:01 AM"
"""
def add(time, added, unit \\ :second, format \\ :hour_12) do
elixir_time = check_time(time)
time =
case unit do
:hour -> Time.add(elixir_time, added * 3600)
:minute -> Time.add(elixir_time, added * 60)
:second -> Time.add(elixir_time, added)
:millisecond -> Time.add(elixir_time, added, :millisecond)
_ -> "Invalid Unit"
end
format_hour(time, format)
end
@doc """
It provides difference between 2 times in multiple `units`. It returns difference as `Integer`
It takes 3 arguments.
Last 2 arguments are optional i.e `time2` and `format`.
To use a specific `format`, you need to apply `time2` as well or default it to current time in utc i.e Datex.Time.now() function.
Unit defaults to `:second` and time2 to `current time in utc`.
Argumets are
1, 2. `time` as `elixir time` or standard time formats in `String`
3. `unit` as `atom` from `:hour`, `:minute`, `:second`, `:millisecond`
## Examples
iex()> Datex.Time.difference("10:30 am")
3480
iex()> Datex.Time.difference("11:10 am", Datex.Time.now())
20040
iex()> Datex.Time.difference("11:10 am", "10:10:10", :minute)
59
"""
def difference(time1, time2 \\ now(), unit \\ :second) do
time1 = check_time(time1)
time2 = check_time(time2)
diff = Time.diff(time1, time2)
case unit do
:second -> diff
:millisecond -> diff * 1000
:micro -> diff * 1000000
:minute -> div(diff, 60)
:hour -> div(diff, 3600)
end
end
@doc """
It compares two times and provides human friendly results.
It takes 2 arguments. Both are time as `elixir time` or standard time formats in `String`.
Second argument is optional as it provides `current time in utc` by default.
## Examples
iex()> Datex.Time.compare("10:30 am")
"55 minutes later"
iex()> Datex.Time.compare("10:30 am", "15:34:25")
"5 hours and 4 minutes later"
iex()> Datex.Time.compare(~T[23:16:45], "15:34:25")
"7 hours and 42 minutes later"
"""
def compare(time1, time2 \\ now()) do
diff = difference(time1, time2)
cond do
diff >= 0 ->
cond do
diff < 60 -> "Just now"
diff > 60 && diff < 120 -> "a minute later"
diff >= 120 && diff < 3600 -> "#{div(diff, 60)} minutes later"
diff >= 3600 && diff < 7200 ->
min = rem(diff, 3600) |> div(60)
"an hour and #{min} minutes later"
diff >= 7200 ->
min = rem(diff, 3600) |> div(60)
"#{div(diff, 3600)} hours and #{min} minutes later"
end
diff < 0 ->
diff = abs(diff)
cond do
diff < 60 -> "Just now"
diff > 60 && diff < 120 -> "a minute ago"
diff >= 120 && diff < 3600 -> "#{div(diff, 60)} minutes ago"
diff >= 3600 && diff < 7200 ->
min = rem(diff, 3600) |> div(60)
"an hour and #{min} minutes ago"
diff >= 7200 ->
min = rem(diff, 3600) |> div(60)
"#{div(diff, 3600)} hours and #{min} minutes ago"
end
end
end
@doc """
It converts time in given format.
It takes 2 arguments. First is time as `elixir time` or standard time formats in `String`.
Second argument is format which could be `HH:MM`, `HH:MM:SS`, `HH:MM:SS:uS`, `12 HOUR FORMAT` or `elixir` as `String`.
## Examples
iex()> Datex.Time.format_time(~T[23:16:45], "HH:MM:SS")
"23:16:45"
iex(6)> Datex.Time.format_time("9 pm", "HH:MM:SS")
"21:00:00""
iex(7)> Datex.Time.format_time("23:12:34", "12 HOUR FORMAT")
"11:12 PM"
iex(1)> Datex.Time.format_time("23:12:34", "elixir")
~T[23:12:34.000000]
"""
def format_time(time, format) do
elixir_time = check_time(time)
{hrs, min, sec, micro} = {elixir_time.hour, elixir_time.minute, elixir_time.second, elixir_time.microsecond}
case format do
"HH:MM" -> "#{left_pad(hrs)}:#{left_pad(min)}"
"HH:MM:SS" -> "#{left_pad(hrs)}:#{left_pad(min)}:#{left_pad(sec)}"
"HH:MM:SS:uS" -> "#{left_pad(hrs)}:#{left_pad(min)}:#{left_pad(sec)}:#{micro}"
"12 HOUR FORMAT" -> format_hour(elixir_time, :hour_12)
"elixir" -> elixir_time
_ -> "Invalid Time Format"
end
end
defp convert_to_elixir_time(time) do
cond do
String.match?(time, ~r/AM/i) ->
[hrs | rest] = get_time_array(time)
hours =
cond do
hrs == 12 -> 00
true -> hrs
end
parse_time([hours | rest])
String.match?(time, ~r/PM/i) ->
[hrs | rest] = get_time_array(time)
hours =
cond do
hrs == 12 -> hrs
true -> hrs + 12
end
parse_time([hours | rest])
true ->
list = String.trim(time) |> String.split([":", ": ", " : ", " "]) |> Enum.map(&String.to_integer/1)
cond do
Enum.count(list) > 1 -> parse_time(list)
true -> "Invalid Time Format"
end
end
end
defp format_hour(time, format \\ :hour_12) do
{hrs, min, sec} = {time.hour, time.minute, time.second}
case format do
:hour_12 ->
cond do
hrs == 0 -> "12:#{left_pad(min)} AM"
hrs == 12 -> "12:#{left_pad(min)} PM"
hrs > 12 -> "#{left_pad(hrs - 12)}:#{left_pad(min)} PM"
true -> "#{left_pad(hrs)}:#{left_pad(min)} AM"
end
:hour_24 ->
"#{left_pad(hrs)}:#{left_pad(min)}:#{left_pad(sec)}"
:elixir -> time
_ -> "Invalid time format"
end
end
defp check_time(time) do
cond do
String.valid?(time) == false && time.hour >= 0 ->
time
String.valid?(time) && String.match?(time, ~r/:/) || String.match?(time, ~r/ /) ->
convert_to_elixir_time(time)
true ->
"Invalid Time format"
end
end
defp parse_time(time) do
case time do
[hrs, min, sec, micro] when hrs in 0..23 and min in 0..60 and sec in 0..60 -> new_time(hrs, min, sec, micro)
[hrs, min, sec] when hrs in 0..23 and min in 0..60 and sec in 0..60 -> new_time(hrs, min, sec)
[hrs, min] when hrs in 0..23 and min in 0..60 -> new_time(hrs, min)
[hrs] when hrs in 0..23 -> new_time(hrs)
end
end
defp new_time(hrs, min \\ 0, sec \\ 0, micro \\ 0) do
{:ok, time} = Time.new(hrs, min, sec, micro)
time
end
defp get_time_array(time) do
time_array = String.trim(time) |> String.split([":", ": ", " : ", " "])
last = List.last(time_array)
time_array -- [last] |> Enum.map(&String.to_integer/1)
end
defp left_pad(number) do
cond do
number < 10 -> "0#{number}"
true -> number
end
end
end
|
lib/datex/time/time.ex
| 0.887534
| 0.752331
|
time.ex
|
starcoder
|
defmodule Posexional.Row do
@moduledoc """
this module represent a row in a positional file
"""
alias Posexional.{Field, Row}
alias Posexional.Protocol.{FieldLength, FieldName, FieldRead, FieldSize, FieldWrite}
defstruct name: nil,
fields: [],
separator: "",
row_guesser: :never,
struct_module: nil
@spec new(atom, [struct], Keyword.t()) :: %Row{}
def new(name, fields, opts \\ []) do
struct!(Row, Keyword.merge([name: name, fields: fields], opts))
end
@spec add_field(%Row{}, struct) :: %Row{}
def add_field(row = %Row{fields: fields}, field) do
%{row | fields: fields ++ [field]}
end
@spec add_fields(%Row{}, []) :: %Row{}
def add_fields(row, fields) do
Enum.reduce(fields, row, fn field, row -> add_field(row, field) end)
end
@spec manage_counters(%Row{}, [{atom, pid}]) :: %Row{}
def manage_counters(row = %Posexional.Row{fields: fields}, counters) do
new_fields =
Stream.map(fields, fn
f = %Field.ProgressiveNumber{name: name} -> %{f | counter: Keyword.get(counters, name)}
f -> f
end)
%{row | fields: new_fields}
end
@doc """
outputs a row
## Examples
iex> Posexional.Row.new(:row_test, []) |> Posexional.Row.write([test: "test"])
{:ok, ""}
iex> Posexional.Row.new(:row_test, [Posexional.Field.Value.new(:test1, 5), Posexional.Field.Value.new(:test2, 10)])
...> |> Posexional.Row.write([test1: "test1", test2: "test2"])
{:ok, "test1test2 "}
iex> Posexional.Row.new(:row_test, [Posexional.Field.Value.new(:test1, 5), Posexional.Field.Value.new(:test2, 10)])
...> |> Posexional.Row.write([test1: "test1", non_existent: "test2"])
{:ok, "test1 "}
iex> Posexional.Row.new(:row_test, [Posexional.Field.Value.new(:test1, 6)])
...> |> Posexional.Row.write([test1: "test1", not_configured: "test2"])
{:ok, "test1 "}
iex> Posexional.Row.new(:row_test, [Posexional.Field.Value.new(:test1, 5)])
...> |> Posexional.Row.write([not_configured: "test2", another: "test3"])
{:ok, " "}
iex> Posexional.Row.new(:row_test, [Posexional.Field.Empty.new(5)])
...> |> Posexional.Row.write([])
{:ok, " "}
"""
@spec write(%Row{}, Keyword.t()) :: {atom, binary}
def write(%Row{fields: []}, _), do: {:ok, ""}
def write(row = %Row{separator: separator}, values) do
result = do_output(row, values)
if Enum.all?(result, &valid?/1) do
{:ok,
result
|> Enum.map(&elem(&1, 1))
|> Enum.join(separator)}
else
{:error, error_message(result)}
end
end
defp do_output(%Row{fields: fields}, values) do
fields
|> Enum.map(fn field ->
{field, Keyword.get(values, FieldName.name(field), nil)}
end)
|> Enum.map(fn {field, value} ->
{:ok, FieldWrite.write(field, value)}
end)
end
defp error?({:ok, _}), do: false
defp error?({:error, _}), do: true
defp valid?({:ok, _}), do: true
defp valid?({:error, _}), do: false
defp error_message(results) do
results
|> Enum.filter(&error?/1)
|> do_error_message
end
defp do_error_message([error]) do
"error on the field #{elem(error, 1)}"
end
defp do_error_message(errors) do
field_names =
errors
|> Enum.map(&elem(&1, 1))
|> Enum.join(", ")
"errors on fields #{field_names}"
end
@doc """
read a positional file row and convert it back to a keyword list of values
"""
@spec read(%Row{}, binary) :: Keyword.t()
def read(%Row{name: name, fields: fields, separator: separator, struct_module: struct_module}, content) do
res =
fields
|> Enum.reduce({[], content}, fn field, {list, content} ->
field_content = String.slice(content, 0, FieldSize.size(field))
{list ++ [{FieldName.name(field), FieldRead.read(field, field_content)}],
String.slice(content, (FieldSize.size(field) + String.length(separator))..-1)}
end)
|> elem(0)
|> Enum.filter(fn {k, _} -> not (k in [:empty_field]) end)
if is_nil(struct_module),
do: [{name, res}],
else: [{name, struct(struct_module, res)}]
end
@doc """
finds a field in the row by its name
## Examples
iex> Posexional.Row.new(:row_test, []) |> Posexional.Row.find_field(:test)
nil
iex> Posexional.Row.new(:row_test, [Posexional.Field.Value.new(:test, 5)]) |> Posexional.Row.find_field(:test)
Posexional.Field.Value.new(:test, 5)
iex> Posexional.Row.new(:row_test, [Posexional.Field.Value.new(:test, 5), Posexional.Field.Value.new(:test2, 5)])
...> |> Posexional.Row.find_field(:test2)
Posexional.Field.Value.new(:test2, 5)
"""
@spec find_field(%Row{}, atom) :: %Field.Value{}
def find_field(%Row{fields: fields}, name) do
Enum.find(fields, nil, fn %Field.Value{name: field_name} -> field_name == name end)
end
@doc """
calculate the row total length based on the passed fields
## Examples
iex> Posexional.Row.new(:row_test, [])
...> |> Posexional.Row.length
0
iex> Posexional.Row.new(:row_test, [Posexional.Field.Value.new(:test1, 10), Posexional.Field.Value.new(:test2, 20)])
...> |> Posexional.Row.length
30
"""
@spec length(%Row{}) :: integer
def length(%Row{fields: []}), do: 0
def length(%Row{fields: fields}), do: do_length(0, fields)
defp do_length(acc, []), do: acc
defp do_length(acc, [field | other_fields]) do
do_length(acc + FieldLength.length(field), other_fields)
end
@doc """
Given a row and a field name calculate the field offset
## Examples
iex> Posexional.Row.new(:test, [Posexional.Field.Value.new(:test1, 10), Posexional.Field.Value.new(:test2, 20)])
...> |> Posexional.Row.offset(:test1)
1
iex> Posexional.Row.new(:test, [Posexional.Field.Value.new(:test1, 10), Posexional.Field.Value.new(:test2, 20)])
...> |> Posexional.Row.offset(:test2)
11
iex> Posexional.Row.new(:test, [Posexional.Field.Value.new(:test1, 10), Posexional.Field.Value.new(:test2, 20)])
...> |> Posexional.Row.offset(:test_not_existent)
** (ArgumentError) the field test_not_existent doesn't exists
iex> Posexional.Row.new(:test, [])
...> |> Posexional.Row.offset(:test)
nil
"""
@spec offset(%Row{}, atom) :: integer
def offset(%Row{fields: []}, _), do: nil
def offset(%Row{fields: fields}, field_name), do: do_offset(1, fields, field_name)
defp do_offset(_, [], field_name), do: raise(ArgumentError, "the field #{field_name} doesn't exists")
defp do_offset(acc, :ok, _), do: acc
defp do_offset(acc, [field | other_fields], field_name) do
if field_name == FieldName.name(field) do
do_offset(acc, :ok, field_name)
else
do_offset(acc + FieldLength.length(field), other_fields, field_name)
end
end
@doc """
merge fields from another row
"""
@spec fields_from(%Row{}, %Row{}) :: %Row{}
def fields_from(to, %Row{fields: other_fields}) do
%{to | fields: to.fields ++ other_fields}
end
end
|
lib/posexional/row.ex
| 0.805517
| 0.49823
|
row.ex
|
starcoder
|
defmodule Wand.CLI.Commands.Init do
use Wand.CLI.Command
alias Wand.CLI.Display
alias WandCore.WandFile
alias WandCore.WandFile.Dependency
alias WandCore.Interfaces.File
@moduledoc """
# Init
Convert an elixir project to use wand for dependencies.
### Usage
**wand** init [path] [flags]
## Examples
```
wand init
wand init /path/to/project
wand init --overwrite
```
## Options
By default, wand init will refuse to overwrite an existing wand.json file. It will also refuse to install the wand.core task without confirmation. This is controllable via flags.
```
--overwrite Ignore the presence of an existing wand.json file, and create a new one
```
"""
@doc false
def help(:banner), do: Display.print(@moduledoc)
@doc false
def help(:verbose) do
"""
wand init walks through the current list of dependencies for a project, and transfers it to wand.json.
Additionally, it will attempt to modify the mix.exs file to use the wand.core task to load the modules. If that fails, you need to manually edit your mix.exs file.
The task attempts to be non-destructive. It will not create a new wand.json file if one exists, unless the overwrite flag is present.
## Options
By default, wand init will refuse to overwrite an existing wand.json file. This is controllable via flags.
```
--overwrite Ignore the presence of an existing wand.json file, and create a new one
```
"""
|> Display.print()
end
@doc false
def help({:invalid_flag, flag}) do
"""
#{flag} is invalid.
Valid flags are --overwrite and --force
See wand help init --verbose for more information
"""
|> Display.print()
end
def options() do
[require_core: true]
end
@doc false
def validate(args) do
flags = [
overwrite: :boolean
]
{switches, [_ | commands], errors} = OptionParser.parse(args, strict: flags)
case Wand.CLI.Command.parse_errors(errors) do
:ok -> get_path(commands, switches)
error -> error
end
end
@doc false
def execute({path, switches}, %{}) do
file = %WandFile{}
with :ok <- can_write?(path, switches),
{:ok, deps} <- get_dependencies(path),
{:ok, file} <- add_dependencies(file, deps) do
message = """
Successfully initialized wand.json and copied your dependencies to it.
Type wand add [package] to add new packages, or wand upgrade to upgrade them
"""
{:ok, %Result{wand_file: file, wand_path: path, message: message}}
else
error -> error
end
end
def after_save({path, _switches}) do
update_mix_file(path)
end
@doc false
def handle_error(:file_already_exists, path) do
"""
# Error
File already exists
The file #{path} already exists.
If you want to override it, use the --overwrite flag
"""
end
@doc false
def handle_error(:wand_core_api_error, _reason) do
"""
# Error
Unable to read existing deps
mix wand.init did not return successfully.
Usually that means your mix.exs file is invalid. Please make sure your existing deps are correct, and then try again.
"""
end
@doc false
def handle_error(:mix_file_not_updated, nil) do
"""
# Partial Success
wand.json was successfully created with your dependencies, however your mix.exs file could not be updated to use it. To complete the process, you need to change your deps() in mix.exs to the following:
deps: Mix.Tasks.WandCore.Deps.run([])
"""
end
defp get_path([], switches), do: {:ok, {"wand.json", switches}}
defp get_path([path], switches) do
path =
case Path.basename(path) do
"wand.json" -> path
_ -> Path.join(path, "wand.json")
end
{:ok, {path, switches}}
end
defp get_path(_, _), do: {:error, :wrong_command}
defp can_write?(path, switches) do
cond do
Keyword.get(switches, :overwrite) -> :ok
File.impl().exists?(path) -> {:error, :file_already_exists, path}
true -> :ok
end
end
defp get_dependencies(path) do
deps =
Path.dirname(path)
|> Wand.CLI.Mix.get_deps()
case deps do
{:ok, deps} ->
Enum.map(deps, &convert_dependency/1)
|> validate_dependencies()
{:error, reason} ->
{:error, :wand_core_api_error, reason}
end
end
defp validate_dependencies(dependencies) do
case Enum.find(dependencies, &(elem(&1, 0) == :error)) do
nil ->
dependencies = Enum.unzip(dependencies) |> elem(1)
{:ok, dependencies}
{:error, error} ->
{:error, :wand_core_api_error, error}
end
end
defp add_dependencies(file, dependencies) do
Enum.reduce(dependencies, {:ok, file}, fn dependency, {:ok, file} ->
WandFile.add(file, dependency)
end)
end
defp convert_dependency([name, opts]) when is_list(opts) do
opts = WandCore.Opts.decode(opts)
convert_dependency([name, nil, opts])
end
defp convert_dependency([name, requirement]), do: convert_dependency([name, requirement, []])
defp convert_dependency([name, requirement, opts]) do
opts =
WandCore.Opts.decode(opts)
|> Enum.into(%{}, fn [key, val] -> {String.to_atom(key), val} end)
{:ok, %Dependency{name: name, requirement: requirement, opts: opts}}
end
defp convert_dependency(_), do: {:error, :invalid_dependency}
defp update_mix_file(path) do
mix_file =
Path.dirname(path)
|> Path.join("mix.exs")
with true <- File.impl().exists?(mix_file),
{:ok, contents} <- File.impl().read(mix_file),
true <- String.contains?(contents, "deps: deps()"),
new_contents <-
String.replace(contents, "deps: deps()", "deps: Mix.Tasks.WandCore.Deps.run([])"),
:ok <- File.impl().write(mix_file, new_contents) do
:ok
else
_ -> {:error, :mix_file_not_updated, nil}
end
end
end
|
lib/cli/commands/init.ex
| 0.511229
| 0.660658
|
init.ex
|
starcoder
|
defmodule AlertProcessor.DayType do
@moduledoc """
Determine the type (weekday vs weekend) of dates and figure out future instances of a type of day.
"""
@type day_type_test_function :: (Date.t() | tuple -> :boolean)
@type next_day_of_type_function :: (Date.t() | nil -> Date.t())
@doc """
Determine whether a date is a weekday (Monday–Friday).
iex> ~D[2015-07-06] |> AlertProcessor.DayType.weekday?() # Monday
true
iex> ~D[2015-07-05] |> AlertProcessor.DayType.weekday?() # Sunday
false
iex> {2015, 7, 6} |> AlertProcessor.DayType.weekday?() # Monday
true
iex> {2015, 7, 5} |> AlertProcessor.DayType.weekday?() # Sunday
false
"""
@spec weekday?(Date.t() | tuple) :: boolean
def weekday?(date), do: Calendar.Date.day_of_week(date) < 6
@doc """
Determine whether a date is a weekend day (Saturday or Sunday).
iex> ~D[2015-07-06] |> AlertProcessor.DayType.weekend?() # Monday
false
iex> ~D[2015-07-05] |> AlertProcessor.DayType.weekend?() # Sunday
true
iex> {2015, 7, 6} |> AlertProcessor.DayType.weekend?() # Monday
false
iex> {2015, 7, 5} |> AlertProcessor.DayType.weekend?() # Sunday
true
"""
@spec weekend?(Date.t() | tuple) :: boolean
def weekend?(date), do: not weekday?(date)
@doc """
Find the next weekday day (Monday–Friday) starting with either today or a given date (inclusive).
iex> AlertProcessor.DayType.next_weekday(~D[2015-07-06])
~D[2015-07-06]
iex> AlertProcessor.DayType.next_weekday(~D[2015-07-04])
~D[2015-07-06]
"""
@spec next_weekday(Date.t() | nil) :: Date.t()
def next_weekday(date \\ today()), do: next_day_of_type(date, &weekday?/1)
@doc """
Find the next weekend day (Saturday or Sunday) starting with either today or a given date (inclusive).
iex> AlertProcessor.DayType.next_weekend_day(~D[2015-07-05])
~D[2015-07-05]
iex> AlertProcessor.DayType.next_weekend_day(~D[2015-07-06])
~D[2015-07-11]
"""
@spec next_weekend_day(Date.t() | nil) :: Date.t()
def next_weekend_day(date \\ today()), do: next_day_of_type(date, &weekend?/1)
@doc """
Find the next Saturday starting with either today or a given date (inclusive).
iex> AlertProcessor.DayType.next_saturday(~D[2018-08-25])
~D[2018-08-25]
iex> AlertProcessor.DayType.next_saturday(~D[2018-08-26])
~D[2018-09-01]
"""
@spec next_saturday(Date.t() | nil) :: Date.t()
def next_saturday(date \\ today()), do: next_day_of_type(date, &Calendar.Date.saturday?/1)
@doc """
Takes the first `amount` weekdays (Monday–Friday) starting with either today or a given date (inclusive).
iex> AlertProcessor.DayType.take_weekdays(4, ~D[2015-07-09])
[~D[2015-07-09], ~D[2015-07-10], ~D[2015-07-13], ~D[2015-07-14]]
iex> AlertProcessor.DayType.take_weekdays(2, ~D[2018-08-25])
[~D[2018-08-27], ~D[2018-08-28]]
"""
@spec take_weekdays(non_neg_integer, Date.t() | nil) :: [Date.t()]
def take_weekdays(amount, date \\ today()),
do: take_days_using_next_function(amount, date, &next_weekday/1)
@doc """
Takes the first `amount` weekend day (Saturday or Sunday) starting with either today or a given date (inclusive).
iex> AlertProcessor.DayType.take_weekend_days(4, ~D[2015-07-05])
[~D[2015-07-05], ~D[2015-07-11], ~D[2015-07-12], ~D[2015-07-18]]
iex> AlertProcessor.DayType.take_weekend_days(3, ~D[2018-08-23])
[~D[2018-08-25], ~D[2018-08-26], ~D[2018-09-01]]
"""
@spec take_weekend_days(non_neg_integer, Date.t() | nil) :: [Date.t()]
def take_weekend_days(amount, date \\ today()),
do: take_days_using_next_function(amount, date, &next_weekend_day/1)
@doc """
Takes the first `amount` weekend day (Saturday or Sunday) starting with either today or a given date (inclusive).
iex> AlertProcessor.DayType.take_saturdays(3, ~D[2015-07-04])
[~D[2015-07-04], ~D[2015-07-11], ~D[2015-07-18]]
iex> AlertProcessor.DayType.take_saturdays(3, ~D[2018-08-23])
[~D[2018-08-25], ~D[2018-09-01], ~D[2018-09-08]]
"""
@spec take_saturdays(non_neg_integer, Date.t() | nil) :: [Date.t()]
def take_saturdays(amount, date \\ today()),
do: take_days_using_next_function(amount, date, &next_saturday/1)
@spec today() :: Date.t()
defp today, do: Calendar.Date.today!("America/New_York")
@spec next_day(Date.t()) :: Date.t()
defp next_day(date) do
{:ok, next_date} = Calendar.Date.add(date, 1)
next_date
end
@spec next_day_of_type(Date.t(), day_type_test_function) :: Date.t()
defp next_day_of_type(date, day_type_test_function) do
date
|> Stream.unfold(&{&1, next_day(&1)})
|> Enum.find(day_type_test_function)
end
@spec take_days_using_next_function(non_neg_integer, Date.t(), next_day_of_type_function) :: [
Date.t()
]
defp take_days_using_next_function(amount, date, next_function) do
date
|> next_function.()
|> Stream.unfold(&{&1, &1 |> next_day() |> next_function.()})
|> Enum.take(amount)
end
end
|
apps/alert_processor/lib/day_type.ex
| 0.745584
| 0.577108
|
day_type.ex
|
starcoder
|
defmodule Plaid.Investments do
@moduledoc """
[Plaid Investments APIs](https://plaid.com/docs/api/products/#investments)
"""
alias Plaid.Castable
defmodule GetHoldingsResponse do
@moduledoc """
[Plaid API /investments/holdings/get response schema.](https://plaid.com/docs/api/products/#investmentsholdingsget)
"""
@behaviour Castable
alias Plaid.Account
alias Plaid.Investments.{Holding, Security}
alias Plaid.Item
@type t :: %__MODULE__{
accounts: [Account.t()],
holdings: [Holding.t()],
securities: [Security.t()],
item: Item.t(),
request_id: String.t()
}
defstruct [
:accounts,
:holdings,
:securities,
:item,
:request_id
]
@impl true
def cast(generic_map) do
%__MODULE__{
accounts: Castable.cast_list(Account, generic_map["accounts"]),
holdings: Castable.cast_list(Holding, generic_map["holdings"]),
securities: Castable.cast_list(Security, generic_map["securities"]),
item: Castable.cast(Item, generic_map["item"]),
request_id: generic_map["request_id"]
}
end
end
@doc """
Get user-authorized stock position data for investment-type accounts.
Does a `POST /investments/holdings/get` call to retrieve
invesment holdings associated with an access_token's item.
Params:
* `access_token` - Token to fetch investment holdings for.
Options:
* `:account_ids` - Specific account ids to fetch investment holdings for.
## Examples
Investments.get_holdings("access-sandbox-123xxx", client_id: "123", secret: "abc")
{:ok, %Investments.GetHoldingsResponse{}}
"""
@spec get_holdings(String.t(), options, Plaid.config()) ::
{:ok, GetHoldingsResponse.t()} | {:error, Plaid.Error.t()}
when options: %{optional(:account_ids) => [String.t()]}
def get_holdings(access_token, options \\ %{}, config) do
options_payload = Map.take(options, [:account_ids])
payload = %{access_token: access_token, options: options_payload}
Plaid.Client.call(
"/investments/holdings/get",
payload,
GetHoldingsResponse,
config
)
end
defmodule GetTransactionsResponse do
@moduledoc """
[Plaid API /investments/transactions/get response schema.](https://plaid.com/docs/api/products/#investmentstransactionsget)
"""
@behaviour Castable
alias Plaid.Account
alias Plaid.Investments.{Security, Transaction}
alias Plaid.Item
@type t :: %__MODULE__{
item: Item.t(),
accounts: [Account.t()],
securities: [Security.t()],
investment_transactions: [Transaction.t()],
total_investment_transactions: integer(),
request_id: String.t()
}
defstruct [
:item,
:accounts,
:securities,
:investment_transactions,
:total_investment_transactions,
:request_id
]
@impl true
def cast(generic_map) do
%__MODULE__{
item: Castable.cast(Item, generic_map["item"]),
accounts: Castable.cast_list(Account, generic_map["accounts"]),
securities: Castable.cast_list(Security, generic_map["securities"]),
investment_transactions:
Castable.cast_list(Transaction, generic_map["investment_transactions"]),
total_investment_transactions: generic_map["total_investment_transactions"],
request_id: generic_map["request_id"]
}
end
end
@doc """
Get information about all available investment transactions.
Does a `POST /investments/transactions/get` call which gives you high level
account data along with investment transactions and associated securities
from all investment accounts contained in the access_token's item.
Params:
* `access_token` - Token to fetch investment holdings for.
* `start_date` - Start of query for investment transactions.
* `end_date` - End of query for investment transactions.
Options:
* `:account_ids` - Specific account ids to fetch investment holdings for.
* `:count` - Amount of investment transactions to pull (optional).
* `:offset` - Offset to start pulling investment transactions (optional).
## Examples
Investments.get_transactions("access-sandbox-123xxx", "2020-01-01", "2020-01-31", client_id: "123", secret: "abc")
{:ok, %Investments.GetTransactionsResponse{}}
"""
@spec get_transactions(String.t(), String.t(), String.t(), options, Plaid.config()) ::
{:ok, GetTransactionsResponse.t()} | {:error, Plaid.Error.t()}
when options: %{
optional(:account_ids) => [String.t()],
optional(:count) => integer(),
optional(:offset) => integer()
}
def get_transactions(access_token, start_date, end_date, options \\ %{}, config) do
options_payload = Map.take(options, [:account_ids, :count, :offset])
payload = %{
access_token: access_token,
start_date: start_date,
end_date: end_date,
options: options_payload
}
Plaid.Client.call(
"/investments/transactions/get",
payload,
GetTransactionsResponse,
config
)
end
end
|
lib/plaid/investments.ex
| 0.876601
| 0.445288
|
investments.ex
|
starcoder
|
defmodule DataLogger.Destination.Controller do
@moduledoc """
A worker process, created and supervised per destination and per `topic`/sub-destination.
The first time `DataLogger.log/2` is called with a given `topic` `DataLogger.Destination.Controller`s for this `topic` for
every configured destination are created. They are supervised by a new supervisor, created for the given `topic`.
A `DataLogger.Destination.Controller` process is registered in a pub-sub registry with its `topic`, so when data is sent to the topic,
every such process is notified and data is *casted* to it in the form of `{:log_data, topic, data}`.
If the `destination` of a `DataLogger.Destination.Controller` is configured with `send_async: true`, the process
will be creating a task per *cast* and will be responsible for invoking the `on_error/4`/`on_success/4` callbacks of the
`destination` when the task finishes.
It will also react when the task is `:DOWN`.
"""
use GenServer
alias __MODULE__, as: Mod
require Logger
@doc false
def start_link(topic: topic, name: name, destination: %{module: _, options: _} = destination) do
GenServer.start_link(Mod, Map.put_new(destination, :topic, topic), name: name)
end
@impl true
def init(%{topic: topic, options: options, module: destination} = state) do
Registry.register(DataLogger.PubSub, topic, nil)
initialized_state = %{state | options: destination.initialize(options)}
initialized_state.options
|> Map.get(:send_async, false)
|> if(
do: {:ok, Map.put_new(initialized_state, :tasks, %{})},
else: {:ok, initialized_state}
)
end
@impl true
def handle_cast({:log_data, topic, data}, %{topic: topic, options: options} = state) do
{:noreply, log_data(options, topic, data, state)}
end
@impl true
def handle_info({_ref, {data, result}}, %{topic: topic} = state) do
{:noreply, handle_send_data_result(result, topic, data, state)}
end
@impl true
def handle_info({:DOWN, monitored_ref, :process, task_pid, _}, %{tasks: tasks} = state) do
updated_tasks =
tasks
|> Map.get(monitored_ref)
|> update_tasks(tasks, monitored_ref, task_pid)
{:noreply, %{state | tasks: updated_tasks}}
end
@impl true
def handle_info(
{:ssl_closed, {:sslsocket, {:gen_tcp, _port, :tls_connection, :undefined}, _}},
state
) do
Logger.warn(
"For destinations working with Hackney, handled a runaway connection closed message."
)
{:noreply, state}
end
@impl true
def handle_info({:mojito_response, _, {:error, :closed}}, state) do
Logger.warn(
"For destinations working with Mojito, handled a runaway connection closed message."
)
{:noreply, state}
end
defp log_data(
%{send_async: true},
topic,
data,
%{module: destination, options: options, tasks: tasks} = state
) do
action = fn ->
try do
result = destination.send_data(topic, data, options)
{data, result}
rescue
e -> {data, {:error, e}}
end
end
%Task{pid: pid, ref: ref} = Task.Supervisor.async_nolink(DataLogger.TaskSupervisor, action)
%{state | tasks: Map.put_new(tasks, ref, pid)}
end
defp log_data(
_,
topic,
data,
%{module: destination, options: options} = state
) do
destination.send_data(topic, data, options)
|> handle_send_data_result(topic, data, state)
end
defp update_tasks(pid, tasks, ref, pid) when is_pid(pid) do
Map.delete(tasks, ref)
end
defp handle_send_data_result(
result,
topic,
data,
%{module: destination, options: options} = state
) do
case result do
:ok ->
destination.on_success(:ok, topic, data, options)
state
{:ok, reason} ->
destination.on_success(reason, topic, data, options)
state
{:error, reason} ->
destination.on_error(reason, topic, data, options)
state
{:ok, reason, updated_options} ->
destination.on_success(reason, topic, data, options)
%{state | options: updated_options}
{:error, reason, updated_options} ->
destination.on_error(reason, topic, data, options)
%{state | options: updated_options}
end
end
end
|
lib/data_logger/destination/controller.ex
| 0.823044
| 0.671844
|
controller.ex
|
starcoder
|
defmodule ExPokerEval.Rank do
@moduledoc """
Ranking functions for poker cards
"""
# Order of ranks
@order ~w(
straight_flush
four_of_a_kind
full_house
flush
straight
three_of_a_kind
two_pairs
pair
high_card
)a
@doc """
Using @order finds the highest rank of the given cards
## Examples
```
iex>ExPokerEval.Rank.highest([])
{}
```
"""
def highest([]), do: {}
def highest(cards), do: highest(cards, 0)
def highest(_cards, invalid_offset) when invalid_offset < 0 or invalid_offset >= length(@order) do
{:error, :invalid_offset}
end
def highest(cards, offset) do
ranks = @order |> Enum.slice(offset, length(@order))
Enum.find_value(ranks, {}, fn rank_name ->
rank_func = "get_#{rank_name}" |> String.to_atom
case apply(ExPokerEval.Rank, rank_func, [cards]) do
{rank_name, value} -> {Enum.find_index(@order, &(&1 == rank_name)), rank_name, value}
_ -> false
end
end)
end
@doc """
Gets a straight if found
"""
def get_straight(cards) do
cards = cards
|> Enum.map(&(&1[:value]))
|> ace_as_one
with lower <- cards |> List.first,
upper <- cards |> List.last,
sequence <- lower..upper |> Enum.to_list,
true <- cards == sequence
do
{:straight, upper}
else
_ -> {}
end
end
@doc """
Helper to use Ace as value 1 when needed
to complete a lower straight.
## Examples
```
iex>ExPokerEval.Rank.ace_as_one([2,3,4,5,14])
[1,2,3,4,5]
iex>ExPokerEval.Rank.ace_as_one([14,2,3,4,5])
[1,2,3,4,5]
iex>ExPokerEval.Rank.ace_as_one([3,4,5,6,7])
[3,4,5,6,7]
```
"""
def ace_as_one([2,3,4,5,14]), do: [1,2,3,4,5]
def ace_as_one([14,2,3,4,5]), do: [1,2,3,4,5]
def ace_as_one(values), do: values
@doc """
Gets a flush, if present, with the higest value on it
"""
def get_flush(cards) do
cards
|> group_by(:suit)
|> Map.values
|> Enum.member?(5)
|> case do
true -> {:flush, List.last(cards)[:value]}
_ -> {}
end
end
@doc """
Gets a straight flush with the highest card's value
"""
def get_straight_flush(cards) do
with {:straight, value} <- get_straight(cards),
{:flush, ^value} <- get_flush(cards)
do
{:straight_flush, value}
else
_ -> {}
end
end
@doc """
Gets a pair from a set of cards.
If more than one is present the returned value will contain
the highest one by value.
"""
def get_pair(cards) do
pairs = cards
|> group_by(:value)
|> Enum.filter(fn {_value, hits} -> hits > 1 end)
case pairs do
[] -> {}
_ -> {:pair, pairs |> List.last |> Tuple.to_list |> List.first }
end
end
@doc """
Gets two pairs from a set of cards and returns the highest value.
Each pair is expected to be of different value.
"""
def get_two_pairs(cards) do
pairs = cards
|> group_by(:value)
|> Enum.filter(fn {_value, hits} -> hits > 1 end)
cond do
length(pairs) > 1 ->
{:two_pairs, pairs |> List.last |> Tuple.to_list |> List.first }
true ->
{}
end
end
@doc """
Gets the value of four_of_a_kind rank if present
"""
def get_four_of_a_kind(cards) do
{value, 4} = cards
|> group_by(:value)
|> Enum.find({0, 4}, fn {_value, hits} -> hits == 4 end)
case value do
0 -> {}
_ -> {:four_of_a_kind, value }
end
end
@doc """
Gets the value of three_of_a rank if present
"""
def get_three_of_a_kind(cards) do
{value, _hits} = cards
|> group_by(:value)
|> Enum.find({0, 3}, fn {_value, hits} -> hits >= 3 end)
case value do
0 -> {}
_ -> {:three_of_a_kind, value }
end
end
@doc """
Gets a full house if present with the highest card value
"""
def get_full_house(cards) do
with groups <- cards |> group_by(:value),
hits <- Map.values(groups),
[2, 3] <- Enum.sort(hits)
do
highest_value = groups |> Map.keys |> List.last
{:full_house, highest_value}
else
_ -> {}
end
end
@doc """
Gets the :high_card of the hand
"""
def get_high_card(cards) do
value = cards |> List.last |> Keyword.get(:value)
{:high_card, value}
end
@doc """
Helper to group cards by the given field
"""
def group_by(cards, field) do
cards
|> Enum.reduce(%{}, fn card, acc ->
val = card[field]
hits = case acc[val] do
nil -> 1
_ -> acc[val] + 1
end
Map.put(acc, val, hits)
end)
end
end
|
lib/ex_poker_eval/rank.ex
| 0.835383
| 0.849535
|
rank.ex
|
starcoder
|
defmodule AWS.StepFunctions do
@moduledoc """
AWS Step Functions
AWS Step Functions is a service that lets you coordinate the components of
distributed applications and microservices using visual workflows.
You can use Step Functions to build applications from individual
components, each of which performs a discrete function, or *task*, allowing
you to scale and change applications quickly. Step Functions provides a
console that helps visualize the components of your application as a series
of steps. Step Functions automatically triggers and tracks each step, and
retries steps when there are errors, so your application executes
predictably and in the right order every time. Step Functions logs the
state of each step, so you can quickly diagnose and debug any issues.
Step Functions manages operations and underlying infrastructure to ensure
your application is available at any scale. You can run tasks on AWS, your
own servers, or any system that has access to AWS. You can access and use
Step Functions using the console, the AWS SDKs, or an HTTP API. For more
information about Step Functions, see the * [AWS Step Functions Developer
Guide](https://docs.aws.amazon.com/step-functions/latest/dg/welcome.html)
*.
"""
@doc """
Creates an activity. An activity is a task that you write in any
programming language and host on any machine that has access to AWS Step
Functions. Activities must poll Step Functions using the `GetActivityTask`
API action and respond using `SendTask*` API actions. This function lets
Step Functions know the existence of your activity and returns an
identifier for use in a state machine and when polling from the activity.
<note> This operation is eventually consistent. The results are best effort
and may not reflect very recent updates and changes.
</note>
"""
def create_activity(client, input, options \\ []) do
request(client, "CreateActivity", input, options)
end
@doc """
Creates a state machine. A state machine consists of a collection of states
that can do work (`Task` states), determine to which states to transition
next (`Choice` states), stop an execution with an error (`Fail` states),
and so on. State machines are specified using a JSON-based, structured
language.
<note> This operation is eventually consistent. The results are best effort
and may not reflect very recent updates and changes.
</note>
"""
def create_state_machine(client, input, options \\ []) do
request(client, "CreateStateMachine", input, options)
end
@doc """
Deletes an activity.
"""
def delete_activity(client, input, options \\ []) do
request(client, "DeleteActivity", input, options)
end
@doc """
Deletes a state machine. This is an asynchronous operation: It sets the
state machine's status to `DELETING` and begins the deletion process. Each
state machine execution is deleted the next time it makes a state
transition.
<note> The state machine itself is deleted after all executions are
completed or deleted.
</note>
"""
def delete_state_machine(client, input, options \\ []) do
request(client, "DeleteStateMachine", input, options)
end
@doc """
Describes an activity.
<note> This operation is eventually consistent. The results are best effort
and may not reflect very recent updates and changes.
</note>
"""
def describe_activity(client, input, options \\ []) do
request(client, "DescribeActivity", input, options)
end
@doc """
Describes an execution.
<note> This operation is eventually consistent. The results are best effort
and may not reflect very recent updates and changes.
</note>
"""
def describe_execution(client, input, options \\ []) do
request(client, "DescribeExecution", input, options)
end
@doc """
Describes a state machine.
<note> This operation is eventually consistent. The results are best effort
and may not reflect very recent updates and changes.
</note>
"""
def describe_state_machine(client, input, options \\ []) do
request(client, "DescribeStateMachine", input, options)
end
@doc """
Describes the state machine associated with a specific execution.
<note> This operation is eventually consistent. The results are best effort
and may not reflect very recent updates and changes.
</note>
"""
def describe_state_machine_for_execution(client, input, options \\ []) do
request(client, "DescribeStateMachineForExecution", input, options)
end
@doc """
Used by workers to retrieve a task (with the specified activity ARN) which
has been scheduled for execution by a running state machine. This initiates
a long poll, where the service holds the HTTP connection open and responds
as soon as a task becomes available (i.e. an execution of a task of this
type is needed.) The maximum time the service holds on to the request
before responding is 60 seconds. If no task is available within 60 seconds,
the poll returns a `taskToken` with a null string.
<important> Workers should set their client side socket timeout to at least
65 seconds (5 seconds higher than the maximum time the service may hold the
poll request).
Polling with `GetActivityTask` can cause latency in some implementations.
See [Avoid Latency When Polling for Activity
Tasks](https://docs.aws.amazon.com/step-functions/latest/dg/bp-activity-pollers.html)
in the Step Functions Developer Guide.
</important>
"""
def get_activity_task(client, input, options \\ []) do
request(client, "GetActivityTask", input, options)
end
@doc """
Returns the history of the specified execution as a list of events. By
default, the results are returned in ascending order of the `timeStamp` of
the events. Use the `reverseOrder` parameter to get the latest events
first.
If `nextToken` is returned, there are more results available. The value of
`nextToken` is a unique pagination token for each page. Make the call again
using the returned token to retrieve the next page. Keep all other
arguments unchanged. Each pagination token expires after 24 hours. Using an
expired pagination token will return an *HTTP 400 InvalidToken* error.
"""
def get_execution_history(client, input, options \\ []) do
request(client, "GetExecutionHistory", input, options)
end
@doc """
Lists the existing activities.
If `nextToken` is returned, there are more results available. The value of
`nextToken` is a unique pagination token for each page. Make the call again
using the returned token to retrieve the next page. Keep all other
arguments unchanged. Each pagination token expires after 24 hours. Using an
expired pagination token will return an *HTTP 400 InvalidToken* error.
<note> This operation is eventually consistent. The results are best effort
and may not reflect very recent updates and changes.
</note>
"""
def list_activities(client, input, options \\ []) do
request(client, "ListActivities", input, options)
end
@doc """
Lists the executions of a state machine that meet the filtering criteria.
Results are sorted by time, with the most recent execution first.
If `nextToken` is returned, there are more results available. The value of
`nextToken` is a unique pagination token for each page. Make the call again
using the returned token to retrieve the next page. Keep all other
arguments unchanged. Each pagination token expires after 24 hours. Using an
expired pagination token will return an *HTTP 400 InvalidToken* error.
<note> This operation is eventually consistent. The results are best effort
and may not reflect very recent updates and changes.
</note>
"""
def list_executions(client, input, options \\ []) do
request(client, "ListExecutions", input, options)
end
@doc """
Lists the existing state machines.
If `nextToken` is returned, there are more results available. The value of
`nextToken` is a unique pagination token for each page. Make the call again
using the returned token to retrieve the next page. Keep all other
arguments unchanged. Each pagination token expires after 24 hours. Using an
expired pagination token will return an *HTTP 400 InvalidToken* error.
<note> This operation is eventually consistent. The results are best effort
and may not reflect very recent updates and changes.
</note>
"""
def list_state_machines(client, input, options \\ []) do
request(client, "ListStateMachines", input, options)
end
@doc """
List tags for a given resource.
"""
def list_tags_for_resource(client, input, options \\ []) do
request(client, "ListTagsForResource", input, options)
end
@doc """
Used by workers to report that the task identified by the `taskToken`
failed.
"""
def send_task_failure(client, input, options \\ []) do
request(client, "SendTaskFailure", input, options)
end
@doc """
Used by workers to report to the service that the task represented by the
specified `taskToken` is still making progress. This action resets the
`Heartbeat` clock. The `Heartbeat` threshold is specified in the state
machine's Amazon States Language definition. This action does not in itself
create an event in the execution history. However, if the task times out,
the execution history contains an `ActivityTimedOut` event.
<note> The `Timeout` of a task, defined in the state machine's Amazon
States Language definition, is its maximum allowed duration, regardless of
the number of `SendTaskHeartbeat` requests received.
</note> <note> This operation is only useful for long-lived tasks to report
the liveliness of the task.
</note>
"""
def send_task_heartbeat(client, input, options \\ []) do
request(client, "SendTaskHeartbeat", input, options)
end
@doc """
Used by workers to report that the task identified by the `taskToken`
completed successfully.
"""
def send_task_success(client, input, options \\ []) do
request(client, "SendTaskSuccess", input, options)
end
@doc """
Starts a state machine execution.
<note> `StartExecution` is idempotent. If `StartExecution` is called with
the same name and input as a running execution, the call will succeed and
return the same response as the original request. If the execution is
closed or if the input is different, it will return a 400
`ExecutionAlreadyExists` error. Names can be reused after 90 days.
</note>
"""
def start_execution(client, input, options \\ []) do
request(client, "StartExecution", input, options)
end
@doc """
Stops an execution.
"""
def stop_execution(client, input, options \\ []) do
request(client, "StopExecution", input, options)
end
@doc """
Add a tag to a Step Functions resource.
"""
def tag_resource(client, input, options \\ []) do
request(client, "TagResource", input, options)
end
@doc """
Remove a tag from a Step Functions resource
"""
def untag_resource(client, input, options \\ []) do
request(client, "UntagResource", input, options)
end
@doc """
Updates an existing state machine by modifying its `definition` and/or
`roleArn`. Running executions will continue to use the previous
`definition` and `roleArn`. You must include at least one of `definition`
or `roleArn` or you will receive a `MissingRequiredParameter` error.
<note> All `StartExecution` calls within a few seconds will use the updated
`definition` and `roleArn`. Executions started immediately after calling
`UpdateStateMachine` may use the previous state machine `definition` and
`roleArn`.
</note>
"""
def update_state_machine(client, input, options \\ []) do
request(client, "UpdateStateMachine", input, options)
end
@spec request(map(), binary(), map(), list()) ::
{:ok, Poison.Parser.t | nil, Poison.Response.t} |
{:error, Poison.Parser.t} |
{:error, HTTPoison.Error.t}
defp request(client, action, input, options) do
client = %{client | service: "states"}
host = get_host("states", client)
url = get_url(host, client)
headers = [{"Host", host},
{"Content-Type", "application/x-amz-json-1.0"},
{"X-Amz-Target", "AWSStepFunctions.#{action}"}]
payload = Poison.Encoder.encode(input, [])
headers = AWS.Request.sign_v4(client, "POST", url, headers, payload)
case HTTPoison.post(url, payload, headers, options) do
{:ok, response=%HTTPoison.Response{status_code: 200, body: ""}} ->
{:ok, nil, response}
{:ok, response=%HTTPoison.Response{status_code: 200, body: body}} ->
{:ok, Poison.Parser.parse!(body), response}
{:ok, _response=%HTTPoison.Response{body: body}} ->
error = Poison.Parser.parse!(body)
exception = error["__type"]
message = error["message"]
{:error, {exception, message}}
{:error, %HTTPoison.Error{reason: reason}} ->
{:error, %HTTPoison.Error{reason: reason}}
end
end
defp get_host(endpoint_prefix, client) do
if client.region == "local" do
"localhost"
else
"#{endpoint_prefix}.#{client.region}.#{client.endpoint}"
end
end
defp get_url(host, %{:proto => proto, :port => port}) do
"#{proto}://#{host}:#{port}/"
end
end
|
lib/aws/step_functions.ex
| 0.923507
| 0.876423
|
step_functions.ex
|
starcoder
|
defmodule Zaryn.Replication do
@moduledoc """
Zaryn replication algorithms are based on a replication tree during the transaction mining
and include several kind of roles: chain storage nodes, beacon storage nodes, I/O storage node.
From this, different validation and storage mechanisms are used.
Moreover because Zaryn supports network transaction to constantly enhanced the system,
those transactions will be loaded into the subsystems (Node, Shared Secrets, Governance, etc..)
"""
alias Zaryn.Account
alias Zaryn.BeaconChain
alias Zaryn.Contracts
alias Zaryn.Crypto
alias Zaryn.Election
alias Zaryn.P2P
alias Zaryn.P2P.Message.AcknowledgeStorage
alias Zaryn.P2P.Message.NotifyLastTransactionAddress
alias Zaryn.P2P.Node
alias Zaryn.OracleChain
alias Zaryn.Reward
alias Zaryn.SharedSecrets
alias __MODULE__.TransactionContext
alias __MODULE__.TransactionValidator
alias Zaryn.TransactionChain
alias Zaryn.TransactionChain.Transaction
alias Zaryn.TransactionChain.Transaction.ValidationStamp
alias Zaryn.TransactionChain.Transaction.ValidationStamp.LedgerOperations
alias Zaryn.TransactionChain.TransactionData
alias Zaryn.TransactionChain.TransactionData.Keys
alias Zaryn.Utils
@type role :: :chain | :IO | :beacon
@type nodes_by_roles :: [
chain: list(Node.t()),
beacon: list(Node.t()),
IO: list(Node.t())
]
require Logger
@doc """
Process transaction replication by validating the transaction and process it depending on the storage node role
- Chain storage nodes: will download the transaction chain and unspent outputs, ensure the entire correctness and persist a new transaction chain
- Beacon chain storage: will verify the transaction integrity and will store the transaction in the beacon chain
- IO storage node: will verify the transaction integrity and store the transaction
Once the transaction is stored, the transaction is loaded into the system,
so for instance if the transaction is a network transaction, a dedicated behavior will be applied
- Node: Identification of a a new node or a node update
- Node Shared Secrets: schedule of the node shared secret renewal
- Origin Shared Secrets: load the new origin public keys
- Code Approval: manage approvals of code proposals
Options:
- ack_storage?: Determines if the storage node must notify the welcome node about the replication
- self_repair?: Determines if the replication is from a self repair cycle. This switch will be determine to fetch unspent outputs or transaction inputs for a chain role validation
"""
@spec process_transaction(
validated_tx :: Transaction.t(),
role_list :: list(role()),
options :: [ack_storage?: boolean(), welcome_node: Node.t(), self_repair?: boolean()]
) ::
:ok | {:error, :invalid_transaction}
def process_transaction(tx = %Transaction{}, roles, opts \\ [])
when is_list(roles) and is_list(opts) do
if :chain in roles do
do_process_transaction_as_chain_storage_node(tx, roles, opts)
else
do_process_transaction(tx, roles, opts)
end
end
defp do_process_transaction_as_chain_storage_node(
tx = %Transaction{address: address, type: type},
roles,
opts
) do
ack_storage? = Keyword.get(opts, :ack_storage?, false)
self_repair? = Keyword.get(opts, :self_repair?, false)
Logger.info("Replication started", transaction: "#{type}@#{Base.encode16(address)}")
Logger.debug("Retrieve chain and unspent outputs...",
transaction: "#{type}@#{Base.encode16(address)}"
)
{chain, inputs_unspent_outputs} = fetch_context(tx, self_repair?)
Logger.debug("Size of the chain retrieved: #{Enum.count(chain)}",
transaction: "#{type}@#{Base.encode16(address)}"
)
case TransactionValidator.validate(
tx,
Enum.at(chain, 0),
Enum.to_list(inputs_unspent_outputs)
) do
:ok ->
:ok = TransactionChain.write(Stream.concat([tx], chain))
:ok = ingest_transaction(tx)
if :beacon in roles do
BeaconChain.add_transaction_summary(tx)
end
if ack_storage? do
welcome_node = Keyword.fetch!(opts, :welcome_node)
:ok = acknowledge_storage(tx, welcome_node)
else
:ok
end
# unless self_repair? do
# forward_replication(tx)
# end
Logger.info("Replication finished", transaction: "#{type}@#{Base.encode16(address)}")
{:error, reason} ->
:ok = TransactionChain.write_ko_transaction(tx)
Logger.error("Invalid transaction for replication - #{inspect(reason)}",
transaction: "#{type}@#{Base.encode16(address)}"
)
{:error, :invalid_transaction}
end
end
defp do_process_transaction(tx = %Transaction{address: address, type: type}, roles, _opts)
when is_list(roles) do
Logger.info("Replication started", transaction: "#{type}@#{Base.encode16(address)}")
case TransactionValidator.validate(tx) do
:ok ->
if :IO in roles do
:ok = TransactionChain.write_transaction(tx)
ingest_transaction(tx)
end
if :beacon in roles do
BeaconChain.add_transaction_summary(tx)
end
Logger.info("Replication finished", transaction: "#{type}@#{Base.encode16(address)}")
:ok
{:error, reason} ->
:ok = TransactionChain.write_ko_transaction(tx)
Logger.error("Invalid transaction for replication - #{inspect(reason)}",
transaction: "#{type}@#{Base.encode16(address)}"
)
{:error, :invalid_transaction}
end
end
defp fetch_context(
tx = %Transaction{type: type, validation_stamp: %ValidationStamp{timestamp: timestamp}},
self_repair?
) do
prev_address = Transaction.previous_address(tx)
if Transaction.network_type?(type) do
do_fetch_context_for_network_transaction(prev_address, timestamp, self_repair?)
else
fetch_context_for_regular_transaction(prev_address, timestamp, self_repair?)
end
end
defp do_fetch_context_for_network_transaction(previous_address, timestamp, self_repair?) do
Logger.debug("Try to fetch network previous transaction locally",
transaction: Base.encode16(previous_address)
)
previous_chain = TransactionChain.get(previous_address)
# If the transaction is missing (orphan) and the previous chain has not been synchronized
# We request other nodes to give us the information
previous_chain =
if Enum.empty?(previous_chain) do
Logger.debug(
"Try to fetch network previous transaction from remote nodes (possibility of an orphan state)",
transaction: Base.encode16(previous_address)
)
TransactionContext.fetch_transaction_chain(previous_address, timestamp, true)
else
previous_chain
end
inputs_unspent_outputs =
fetch_inputs_unspent_outputs(previous_address, timestamp, self_repair?)
{previous_chain, inputs_unspent_outputs}
end
defp fetch_context_for_regular_transaction(previous_address, timestamp, self_repair?) do
Logger.debug("Fetch regular previous transaction",
transaction: Base.encode16(previous_address)
)
[{%Task{}, {:ok, previous_chain}}, {%Task{}, {:ok, inputs_unspent_outputs}}] =
Task.yield_many([
Task.async(fn ->
TransactionContext.fetch_transaction_chain(previous_address, timestamp)
end),
Task.async(fn ->
fetch_inputs_unspent_outputs(previous_address, timestamp, self_repair?)
end)
])
{previous_chain, inputs_unspent_outputs}
end
defp fetch_inputs_unspent_outputs(previous_address, timestamp, _self_repair = true) do
Logger.debug("Fetch transaction inputs", transaction: Base.encode16(previous_address))
TransactionContext.fetch_transaction_inputs(previous_address, timestamp)
end
defp fetch_inputs_unspent_outputs(previous_address, timestamp, _self_repair = false) do
Logger.debug("Fetch transaction unspent outputs", transaction: Base.encode16(previous_address))
TransactionContext.fetch_unspent_outputs(previous_address, timestamp)
end
@doc """
Send an acknowledgment of the replication of the transaction to the welcome node and the previous storage pool
"""
@spec acknowledge_storage(Transaction.t(), Node.t()) :: :ok
def acknowledge_storage(
tx = %Transaction{
address: address,
validation_stamp: %ValidationStamp{timestamp: timestamp}
},
welcome_node = %Node{}
) do
Task.start(fn -> P2P.send_message!(welcome_node, %AcknowledgeStorage{address: address}) end)
Task.start(fn ->
acknowledge_previous_storage_nodes(address, Transaction.previous_address(tx), timestamp)
end)
:ok
end
@doc """
Notify the previous storage pool than a new transaction on the chain is present
"""
@spec acknowledge_previous_storage_nodes(binary(), binary(), DateTime.t()) :: :ok
def acknowledge_previous_storage_nodes(address, previous_address, timestamp)
when is_binary(address) and is_binary(previous_address) do
TransactionChain.register_last_address(previous_address, address, timestamp)
Contracts.stop_contract(previous_address)
if previous_address != address do
case TransactionChain.get_transaction(previous_address, [:previous_public_key]) do
{:ok, tx} ->
next_previous_address = Transaction.previous_address(tx)
if previous_address != next_previous_address do
previous_storage_nodes =
chain_storage_nodes(next_previous_address, P2P.authorized_nodes())
if Utils.key_in_node_list?(previous_storage_nodes, Crypto.first_node_public_key()) do
acknowledge_previous_storage_nodes(address, next_previous_address, timestamp)
else
P2P.broadcast_message(previous_storage_nodes, %NotifyLastTransactionAddress{
address: address,
previous_address: next_previous_address,
timestamp: timestamp
})
end
end
_ ->
:ok
end
else
:ok
end
end
@doc """
Generate a replication tree from a list of storage nodes and validation nodes by grouping
the closest nodes by the shorter path.
## Rationale
Given a list of storage nodes: S1, S2, .., S16 and list of validation nodes: V1, .., V5
Nodes coordinates (Network Patch ID : numerical value)
S1: F36 -> 3894 S5: 143 -> 323 S9: 19A -> 410 S13: E2B -> 3627
S2: A23 -> 2595 S6: BB2 -> 2994 S10: C2A -> 3114 S14: AA0 -> 2720
S3: B43 -> 2883 S7: A63 -> 2659 S11: C23 -> 3107 S15: 042 -> 66
S4: 2A9 -> 681 S8: D32 -> 3378 S12: F22 -> 3874 S16: 3BC -> 956
V1: AC2 -> 2754 V2: DF3 -> 3571 V3: C22 -> 3106 V4: E19 -> 3609 V5: 22A -> 554
The replication tree is computed by find the nearest storages nodes for each validations
Foreach storage nodes its distance is computed with each validation nodes and then sorted to the get the closest.
Table below shows the distance between storages and validations
|------------|------------|------------|------------|------------|------------|-------------|------------|
| S1 | S2 | S3 | S4 | S5 | S6 | S7 | S8 |
|------------|------------|------------|------------|------------|------------|-------------|------------|
| V1 , 1140 | V1 , 159 | V1 , 129 | V1 , 2073 | V1 , 2431 | V1 , 240 | V1 , 95 | V1 , 624 |
| V2 , 323 | V2 , 976 | V2 , 688 | V2 , 2890 | V2 , 3248 | V2 , 577 | V2 , 912 | V2 , 193 |
| V3 , 788 | V3 , 511 | V3 , 223 | V3 , 2425 | V3 , 2783 | V3 , 112 | V3 , 447 | V3 , 272 |
| V4 , 285 | V4 , 1014 | V4 , 726 | V4 , 2928 | V4 , 3286 | V4 , 615 | V4 , 950 | V4 , 231 |
| V5 , 3340 | V5 , 2041 | V5 , 2329 | V5 , 127 | V5 , 231 | V5 , 2440 | V5 , 2105 | V5 , 2824 |
|------------|------------|------------|------------|------------|------------|-------------|------------|
| S9 | S10 | S11 | S12 | S13 | S14 | S15 | S16 |
|------------|------------|------------|------------|------------|------------|-------------|------------|
| V1 , 2344 | V1 , 360 | V1 , 353 | V1 , 1120 | V1 , 873 | V1 , 34 | V1 , 2688 | V1 , 1798 |
| V2 , 3161 | V2 , 457 | V2 , 464 | V2 , 303 | V2 , 56 | V2 , 851 | V2 , 3505 | V2 , 2615 |
| V3 , 2696 | V3 , 8 | V3 , 1 | V3 , 768 | V3 , 521 | V3 , 386 | V3 , 3040 | V3 , 2150 |
| V4 , 3199 | V4 , 495 | V4 , 502 | V4 , 265 | V4 , 18 | V4 , 889 | V4 , 3543 | V4 , 2653 |
| V5 , 144 | V5 , 2560 | V5 , 2553 | V5 , 3320 | V5 , 3078 | V5 , 2166 | V5 , 488 | V5 , 402 |
By sorting them we can reverse and to find the closest storages nodes.
Table below shows the storages nodes by validation nodes
|-----|-----|-----|-----|-----|
| V1 | V2 | V3 | V4 | V5 |
|-----|-----|-----|-----|-----|
| S14 | S8 | S6 | S1 | S4 |
| S7 | S13 | S11 | S10 | S9 |
| S2 | S5 | S3 | S12 | S15 |
| | | | | S16 |
## Examples
iex> validation_nodes = [
...> %Node{network_patch: "AC2", last_public_key: "key_v1"},
...> %Node{network_patch: "DF3", last_public_key: "key_v2"},
...> %Node{network_patch: "C22", last_public_key: "key_v3"},
...> %Node{network_patch: "E19", last_public_key: "key_v4"},
...> %Node{network_patch: "22A", last_public_key: "key_v5"}
...> ]
iex> storage_nodes = [
...> %Node{network_patch: "F36", first_public_key: "key_S1", last_public_key: "key_S1"},
...> %Node{network_patch: "A23", first_public_key: "key_S2", last_public_key: "key_S2"},
...> %Node{network_patch: "B43", first_public_key: "key_S3", last_public_key: "key_S3"},
...> %Node{network_patch: "2A9", first_public_key: "key_S4", last_public_key: "key_S4"},
...> %Node{network_patch: "143", first_public_key: "key_S5", last_public_key: "key_S5"},
...> %Node{network_patch: "BB2", first_public_key: "key_S6", last_public_key: "key_S6"},
...> %Node{network_patch: "A63", first_public_key: "key_S7", last_public_key: "key_S7"},
...> %Node{network_patch: "D32", first_public_key: "key_S8", last_public_key: "key_S8"},
...> %Node{network_patch: "19A", first_public_key: "key_S9", last_public_key: "key_S9"},
...> %Node{network_patch: "C2A", first_public_key: "key_S10", last_public_key: "key_S10"},
...> %Node{network_patch: "C23", first_public_key: "key_S11", last_public_key: "key_S11"},
...> %Node{network_patch: "F22", first_public_key: "key_S12", last_public_key: "key_S12"},
...> %Node{network_patch: "E2B", first_public_key: "key_S13", last_public_key: "key_S13"},
...> %Node{network_patch: "AA0", first_public_key: "key_S14", last_public_key: "key_S14"},
...> %Node{network_patch: "042", first_public_key: "key_S15", last_public_key: "key_S15"},
...> %Node{network_patch: "3BC", first_public_key: "key_S16", last_public_key: "key_S16"}
...> ]
iex> Replication.generate_tree(validation_nodes, storage_nodes)
%{
"key_v1" => [
%Node{first_public_key: "key_S14", last_public_key: "key_S14", network_patch: "AA0"},
%Node{first_public_key: "key_S7", last_public_key: "key_S7", network_patch: "A63"},
%Node{first_public_key: "key_S2", last_public_key: "key_S2", network_patch: "A23"}
],
"key_v2" => [
%Node{first_public_key: "key_S13", last_public_key: "key_S13", network_patch: "E2B"},
%Node{first_public_key: "key_S8", last_public_key: "key_S8", network_patch: "D32"},
%Node{first_public_key: "key_S5", last_public_key: "key_S5", network_patch: "143"}
],
"key_v3" => [
%Node{first_public_key: "key_S11", last_public_key: "key_S11", network_patch: "C23"},
%Node{first_public_key: "key_S6", last_public_key: "key_S6", network_patch: "BB2"},
%Node{first_public_key: "key_S3", last_public_key: "key_S3", network_patch: "B43"}
],
"key_v4" => [
%Node{first_public_key: "key_S12", last_public_key: "key_S12", network_patch: "F22"},
%Node{first_public_key: "key_S10", last_public_key: "key_S10", network_patch: "C2A"},
%Node{first_public_key: "key_S1", last_public_key: "key_S1", network_patch: "F36"}
],
"key_v5" => [
%Node{first_public_key: "key_S16", last_public_key: "key_S16", network_patch: "3BC"},
%Node{first_public_key: "key_S15", last_public_key: "key_S15", network_patch: "042"},
%Node{first_public_key: "key_S9", last_public_key: "key_S9", network_patch: "19A"},
%Node{first_public_key: "key_S4", last_public_key: "key_S4", network_patch: "2A9"}
]
}
"""
@spec generate_tree(validation_nodes :: list(Node.t()), storage_nodes :: list(Node.t())) ::
replication_tree :: map()
def generate_tree(validation_nodes, storage_nodes) do
storage_nodes
|> Enum.reduce(%{}, fn storage_node, tree_acc ->
%Node{last_public_key: validation_node_key} =
find_closest_validation_node(tree_acc, storage_node, validation_nodes)
Map.update(tree_acc, validation_node_key, [storage_node], &[storage_node | &1])
end)
end
defp find_closest_validation_node(tree, storage_node, validation_nodes) do
{closest_validation_node, _} =
validation_nodes
# Get the number of replicas by nodes
|> Enum.reduce(%{}, &Map.put(&2, &1, tree_sub_size(tree, &1.last_public_key)))
# Sort each validation nodes by its network patch from the storage node network patch
|> Enum.sort_by(fn {validation_node, _} ->
sort_closest_node(validation_node, storage_node)
end)
# Balance the validation nodes load to find the closest nodes with the less nodes to replicate
|> Enum.min(&sort_by_less_load/2, fn -> 0 end)
closest_validation_node
end
defp tree_sub_size(tree, public_key) do
length(Map.get(tree, public_key, []))
end
defp sort_by_less_load({_node_a, nb_replicas_a}, {_node_b, nb_replicas_b}),
do: nb_replicas_a <= nb_replicas_b
defp sort_closest_node(validation_node, storage_node) do
validation_weight = Node.get_network_patch_num(validation_node)
storage_weight = Node.get_network_patch_num(storage_node)
abs(storage_weight - validation_weight)
end
@doc """
Ingest the transaction into system delaying the network to several handlers.
Most of the application contexts allow the transaction loading/ingestion.
Some transaction some have an impact the memory state or behaviors. For instance:
- Node transaction increments the number of node keys
- Node shared secrets transaction increments the number of node shared keys and can authorize new nodes
- Transactions mutates the account ledgers
- Update P2P view
- Transactions with smart contract deploy instances of them or can put in pending state waiting approval signatures
- Code approval transactions may trigger the TestNets deployments or hot-reloads
"""
@spec ingest_transaction(Transaction.t()) :: :ok
def ingest_transaction(tx = %Transaction{}) do
TransactionChain.load_transaction(tx)
Crypto.load_transaction(tx)
P2P.load_transaction(tx)
SharedSecrets.load_transaction(tx)
Account.load_transaction(tx)
Contracts.load_transaction(tx)
BeaconChain.load_transaction(tx)
OracleChain.load_transaction(tx)
Reward.load_transaction(tx)
:ok
end
@doc """
Determine the list of roles for a given transaction and a node public key
"""
@spec roles(Transaction.t(), Crypto.key()) :: list(role())
def roles(
tx = %Transaction{
address: address,
type: type,
validation_stamp: %ValidationStamp{timestamp: timestamp}
},
node_public_key
) do
[
chain:
chain_storage_node?(
address,
type,
node_public_key,
P2P.available_nodes()
),
beacon:
beacon_storage_node?(
address,
timestamp,
node_public_key,
P2P.authorized_nodes()
),
IO: io_storage_node?(tx, node_public_key, P2P.available_nodes())
]
|> Utils.get_keys_from_value_match(true)
end
@spec chain_storage_node?(
binary(),
Transaction.transaction_type(),
Crypto.key(),
list(Node.t())
) :: boolean()
def chain_storage_node?(
address,
type,
public_key,
node_list \\ P2P.authorized_nodes()
)
when is_binary(address) and is_atom(type) and is_binary(public_key) and is_list(node_list) do
address
|> chain_storage_nodes_with_type(type, node_list)
|> Utils.key_in_node_list?(public_key)
end
@spec beacon_storage_node?(binary(), DateTime.t(), Crypto.key(), list(Node.t())) :: boolean()
def beacon_storage_node?(
address,
timestamp = %DateTime{},
public_key,
node_list \\ P2P.authorized_nodes()
)
when is_binary(address) and is_binary(public_key) and is_list(node_list) do
address
|> beacon_storage_nodes(timestamp, node_list)
|> Utils.key_in_node_list?(public_key)
end
@spec io_storage_node?(Transaction.t(), Crypto.key(), list(Node.t())) :: boolean()
def io_storage_node?(
tx = %Transaction{},
public_key,
node_list \\ P2P.authorized_nodes()
)
when is_binary(public_key) and is_list(node_list) do
tx
|> io_storage_nodes(node_list)
|> Utils.key_in_node_list?(public_key)
end
@doc """
Return the storage nodes for the transaction chain based on the transaction address, the transaction type and set a nodes
"""
@spec chain_storage_nodes_with_type(binary(), Transaction.transaction_type(), list(Node.t())) ::
list(Node.t())
def chain_storage_nodes_with_type(address, type, node_list \\ P2P.authorized_nodes())
when is_binary(address) and is_atom(type) and is_list(node_list) do
if Transaction.network_type?(type) do
node_list
else
chain_storage_nodes(address, node_list)
end
end
@doc """
Return the storage nodes for the transaction chain based on the transaction address and set a nodes
"""
@spec chain_storage_nodes(binary(), list(Node.t())) :: list(Node.t())
def chain_storage_nodes(address, node_list \\ P2P.authorized_nodes()) when is_binary(address) do
Election.storage_nodes(address, node_list, Election.get_storage_constraints())
end
@doc """
Return the storage nodes for the transaction IO and a set of nodes
"""
@spec io_storage_nodes(Transaction.t(), list(Node.t())) :: list(Node.t())
def io_storage_nodes(
tx = %Transaction{
validation_stamp: %ValidationStamp{ledger_operations: ops, recipients: recipients}
},
node_list \\ P2P.authorized_nodes()
) do
operations_nodes = operation_storage_nodes(ops, node_list)
recipients_nodes = Enum.map(recipients, &chain_storage_nodes(&1, node_list))
additional_nodes = additional_storage_nodes(tx)
P2P.distinct_nodes([operations_nodes, recipients_nodes, additional_nodes])
end
defp additional_storage_nodes(%Transaction{
type: :node_shared_secrets,
data: %TransactionData{keys: keys}
}) do
keys
|> Keys.list_authorized_keys()
|> Enum.map(&P2P.get_node_info!/1)
end
defp additional_storage_nodes(_), do: []
defp operation_storage_nodes(ops, node_list) do
ops
|> LedgerOperations.movement_addresses()
|> Election.io_storage_nodes(node_list, Election.get_storage_constraints())
end
@doc """
Return the beacon storage nodes for the transaction based on the transaction address, transaction datetime and a set of nodes
"""
@spec beacon_storage_nodes(binary(), DateTime.t(), list(Node.t())) :: list(Node.t())
def beacon_storage_nodes(
address,
timestamp = %DateTime{},
node_list \\ P2P.authorized_nodes()
)
when is_binary(address) and is_list(node_list) do
subset = BeaconChain.subset_from_address(address)
slot_time = BeaconChain.next_slot(timestamp)
Election.beacon_storage_nodes(
subset,
slot_time,
node_list,
Election.get_storage_constraints()
)
end
end
|
lib/zaryn/replication.ex
| 0.830972
| 0.581392
|
replication.ex
|
starcoder
|
defmodule Beeline do
@schema Beeline.Config.schema()
@producer_schema Beeline.Producer.schema()
@moduledoc """
A tool for building in-order GenStage topologies for EventStoreDB
Beeline provides a Broadway-like experience for building GenStage
topologies for consuming streams of events from EventStoreDB in-order,
usually one event at a time. Beeline aims to close over the supervision
and basic configuration of the producer(s), as well as some of the
run-of-the-mill procedure done in the `c:GenStage.init/1` callback of
the consumer such as linking the producer process(es).
## The Beeline Topology
Beeline creates a topology of GenStage, GenServer, and Supervisor processes.
This topology looks like the following:
```text
Supervisor
├── HealthChecker*
└── StageSupervisor
├── Producer*
└── Consumer
```
Let's break these down from the bottom up:
* "Consumer" - the GenStage consumer module which invokes
`Beeline.start_link/2`, handles events, and increments stream
positions.
* "Producer*" - one or more GenStage producers which feed the consumer.
These producers are declared with the `:producers` key and may either
be `Kelvin.InOrderSubscription`, `Volley.InOrderSubscription`, or
`Beeline.DummyProducer` producer modules.
* "StageSupervisor" - a supervisor for the GenStage pipeline. This supervisor
has a `:transient` restart strategy so that if the GenStage pipeline halts
on an event it cannot handle, the `StageSupervisor` supervision tree is
brought down but not the entire supervision tree. This behavior is
desirable so that the health-checker process can continue reading the
stream positions and so that an operator can perform any necessary
manual intervention on the crashed supervision tree (for example,
skipping the failure event).
* "HealthChecker*" - a GenServer which periodically polls the stream positions
of a producer. There is one health checker process per producer.
* "Supervisor" - a top-level supervisor. This supervisor has a `:permanent`
restart strategy.
See the `start_link/2` documentation for a full configuration reference and
examples.
"""
defmacro __using__(_opts) do
quote do
use GenStage
@impl GenStage
def init(config) do
producers =
Enum.map(config.producers, fn {_key, producer} ->
{producer.name,
min_demand: producer.min_demand, max_demand: producer.max_demand}
end)
{:consumer, config.context, subscribe_to: producers}
end
defoverridable init: 1
end
end
@doc """
Starts a Beeline topology
## Options
#{NimbleOptions.docs(@schema)}
#### Producer options
#{NimbleOptions.docs(@producer_schema)}
## Examples
defmodule MyEventHandler do
use Beeline
def start_link(_opts) do
Beeline.start_link(MyEventHandler,
name: MyEventHandler,
producers: [
default: [
name: MyEventHandler.EventListener,
stream_name: "$ce-BoundedContext.AggregateName",
connection: MyEventHandler.EventStoreDBConnection,
adapter: :kelvin
]
]
)
end
# .. GenStage callbacks
@impl GenStage
def handle_events([subscription_event], _from, state) do
# .. handle the events one-by-one ..
{:noreply, [], state}
end
end
"""
@doc since: "0.1.0"
@spec start_link(module :: module(), opts :: Keyword.t()) ::
Supervisor.on_start()
def start_link(module, opts) do
case NimbleOptions.validate(opts, @schema) do
{:error, reason} ->
raise ArgumentError,
"invalid configuration given to Beeline.start_link/2, " <>
reason.message
{:ok, opts} ->
opts
|> Keyword.put(:module, module)
|> Beeline.Config.source()
|> Beeline.Topology.start_link()
end
end
@doc """
Restarts the supervision tree of GenStages for the given Beeline topology
This can be useful for manual intervention by a human operator in a remote
console session, if the GenStage supervision tree crashes and exceeds the
retry limits.
## Examples
iex> Beeline.restart_stages(MyEventHandler)
:ok
"""
@spec restart_stages(module()) :: :ok | {:error, term()}
def restart_stages(beeline) when is_atom(beeline) do
beeline
|> Module.concat(Topology)
|> GenServer.call(:restart_stages)
end
@doc """
Decodes the body of a subscription event
This function performs JSON decoding if necessary and converts maps with
string keys into maps keyed by atoms. This This can potentially lead to
atom exhaustion, but the allowed atom count is quite high and usually this
concern is only theoretical.
## Examples
@impl GenStage
def handle_events([subscription_event], _from, state) do
event = Beeline.decode_event(subscription_event)
# ..
"""
defdelegate decode_event(subscription_event), to: Beeline.EventStoreDB
@doc """
Determines the stream position of the subscription event
This function prefers link stream positions if available. This means that if
the subscription from which the event is emitted is reading a projected
stream such as a category stream, the returned stream position will reflect
the position in the projected stream instead of the origin stream.
## Examples
@impl GenStage
def handle_events([subscription_event], _from, state) do
# consume the events
MyApp.Repo.transaction(fn ->
# save some state
producer = Beeline.producer(subscription_event)
stream_position = Beeline.stream_position(subscription_event)
MyApp.StreamPosition.persist(producer, stream_position)
end)
end
"""
defdelegate stream_position(subscription_event), to: Beeline.EventStoreDB
@doc """
Determines which producer emitted the subscription event
This can be useful in order to save stream positions when a consumer is
subscribed to multiple producers. Should be used in tandem with
`stream_position/1`.
"""
defdelegate producer(subscription_event), to: Beeline.EventStoreDB
@doc """
Wraps an event in a subscription event packet
This can be useful for building test events to pass through the dummy
producer.
"""
@spec as_subscription_event(map(), atom()) :: {atom(), map()}
def as_subscription_event(event, producer) do
{producer, %{event: %{data: Jason.encode!(event), event_number: 0}}}
end
@doc """
Gives a set of events to a topology's dummy producer
This function can be used to test running events through a topology.
If there are multiple producers, one is picked at random.
"""
def test_events(events, beeline) when is_atom(beeline) do
beeline
|> Module.concat(Topology)
|> GenServer.call({:test_events, events})
end
end
|
lib/beeline.ex
| 0.861567
| 0.793466
|
beeline.ex
|
starcoder
|
defmodule SmartChain.Aggregates do
@moduledoc """
Functions to return metadata and other more useful stats/data than the node returns natively.
"""
require IEx
@doc """
Return recent average difficulty, blocktime, and nethash for last N blocks
## Example:
iex> SmartChain.get_recent_averages(20)
"""
@spec get_recent_averages(integer()) :: {:ok, float(), float(), float()} | {:error, String.t}
def get_recent_averages(sample_size) do
blocks = get_recent_blocks(sample_size)
average_difficulty = get_average_difficulty(blocks)
average_blocktime = get_average_blocktime(blocks)
average_nethash = average_difficulty / average_blocktime
{:ok, {average_blocktime, average_difficulty, average_nethash}}
end
@doc """
Get a recent chunk of blocks
## Example:
iex> SmartChain.get_recent_blocks(10)
"""
@spec get_recent_blocks(binary()) :: float
def get_recent_blocks(sample_size) do
{:ok, highest_block_num} = SmartChain.block_number()
range_floor = highest_block_num - (sample_size - 1)
range = highest_block_num..range_floor
blocks = Enum.map(range, fn blocknum ->
{ok, block} =
blocknum
|> SmartChain.get_block_by_number()
block
end)
end
@doc """
Get average blocktime from sample set of blocks
## Example:
iex> SmartChain.get_average_blocktime(blocks)
"""
@spec get_average_blocktime(List.t) :: float
def get_average_blocktime(blocks) do
[head | tail ] = blocks
last_timestamp = head["timestamp"] |> SmartChain.unhex()
{_, sample_total} = Enum.reduce(tail, {last_timestamp, 0},
fn(block, {previous_timestamp, interval_total}) ->
current_timestamp = SmartChain.unhex(block["timestamp"])
interval = previous_timestamp - current_timestamp
interval_total = interval_total + interval
{current_timestamp, interval_total}
end)
sample_total / Enum.count(tail)
end
@doc """
Get average difficulty from sample set of blocks
## Example:
iex> SmartChain.get_average_difficulty(blocks)
"""
@spec get_average_difficulty(binary()) :: float
def get_average_difficulty(blocks) do
[first_block | _ ] = blocks
seed_acc = first_block["difficulty"] |> SmartChain.unhex()
total_diff = Enum.reduce(blocks, seed_acc, fn(block, acc) ->
acc + SmartChain.unhex(block["difficulty"])
end)
average_diff = total_diff / Enum.count(blocks)
end
@doc """
Get recent blocktimes as useful for a timeseries history chart
## Example:
iex> SmartChain.Aggregates.get_recent_blocktimes(20)
"""
@spec get_recent_blocktimes(integer()) :: List.t()
def get_recent_blocktimes(sample_size) do
all = get_recent_blocks(sample_size + 1) |> Enum.reverse() # |> Enum.map(fn x -> Map.put(x, "number_int", SmartChain.unhex(x["number"])) end)
[head | tail ] = all
last_timestamp = head |> Map.get("timestamp") |> SmartChain.unhex()
{_, recent_blocktimes} = Enum.reduce(tail, {last_timestamp, []},
fn(block, {previous_timestamp, all_intervals}) ->
current_timestamp = SmartChain.unhex(block["timestamp"])
interval = current_timestamp - previous_timestamp
number = SmartChain.unhex(block["number"])
{current_timestamp, all_intervals ++ [[number,interval]]}
end)
recent_blocktimes
end
@doc """
Get a chunk of blocks with blocktime info
## Example:
iex> SmartChain.get_recent_blocktimes(20)
"""
@spec get_recent_blocks_with_blocktimes(integer()) :: List.t()
def get_recent_blocks_with_blocktimes(sample_size) do
all = get_recent_blocks(sample_size + 1) |> Enum.reverse()
[head | tail ] = all
last_timestamp = head |> Map.get("timestamp") |> Ethereum.unhex()
{_, recent_blocks} = Enum.reduce(tail, {last_timestamp, []},
fn(block, {previous_timestamp, blocks}) ->
current_timestamp = SmartChain.unhex(block["timestamp"])
# interval = :os.system_time(:seconds) - previous_timestamp
number = SmartChain.unhex(block["number"])
b = %{
number: number,
hash: block["hash"],
timestamp: current_timestamp,
transaction_count: Enum.count(block["transactions"]),
extra_data: decode_extra(block["extraData"])
}
{current_timestamp, blocks ++ [b]}
end)
recent_blocks |> Enum.reverse
end
@doc """
Get transactions count for recent blocks as useful for a timeseries history chart.
## Example:
iex> SmartChain.get_recent_transactions_per_block(20)
"""
@spec get_recent_transactions_per_block(integer) :: List.t()
def get_recent_transactions_per_block(sample_size) do
blocks = get_recent_blocks(sample_size)
recent_transactions_per_block = Enum.map(blocks, fn block ->
number = SmartChain.unhex(block["number"])
txn_count = Enum.count(block["transactions"])
[number, txn_count]
end)
|> Enum.reverse()
end
@doc """
Decode the extra_data field on a block // private
## Example:
iex> decode_extra(data)
"""
@spec decode_extra(String.t()) :: String.t()
defp decode_extra(input) do
if input do
case input |> String.slice(2..-1) |> String.upcase |> Base.decode16 do
{:ok, str} ->
case String.valid?(str) do
true -> str
false -> false
end
:error -> :error
end
else
:error
end
end
end
|
lib/smart_chain/aggregates.ex
| 0.886981
| 0.606498
|
aggregates.ex
|
starcoder
|
defmodule NarouEx.Narou.API do
@moduledoc """
API Interface to Shosetsuka-ni-naro
API Documentation: https://dev.syosetu.com/man/api/
"""
alias NarouEx.Models.{Work, Works}
alias NarouEx.Narou.API.Queries
@endpoint_url "https://api.syosetu.com/novelapi/api/"
@doc """
Fetch works list of specific user.
## Examples
```
NarouEx.Narou.API.fetch_by_user(1623940)
:ok,
[
%NarouEx.Models.Work{
isr15: 0,
biggenre: 4,
updated_at: "2021-01-26 06:03:51",
weekly_unique: 0,
all_hyoka_cnt: 15,
weekly_point: 0,
story: "ポストアポカリプスSF百合ファンタジー\n\n今から300年ほど前のこと。\nこの惑星エミーラには高度な文明が築かれていました。人の必要とする殆どのものを生み出す自動工場、地球の反対側のことすら瞬時にわかるという通信網。人々の生活圏は惑星中を埋め尽くし、重力のくびきを脱そうとしつつありました。しかしそんな文明の絶頂期が長く続くことはなかったのです。\n\n長い黄昏の時代を経て、人々の暮らしは安定しつつあります。少なくとも恐ろしい暴走無人機が片端から人を殺そうとうろついてはいませんし、それらを討ち滅ぼすために軍隊が街ごと焦土に変えてしまうようなこともなくなりました。ただし、そんな機械や爆弾を作れるだけのわざ自体を忘れてもしまいましたが。\n\nそんな時代に彼女は生まれて、そして気がついてしまいました。私は、この世界で生きていくのに向いていない。\nとても恵まれた境遇に生まれたにもかかわらず、それでも彼女は思ってしまうのです。息苦しいと。彼女は苦し紛れにあがき、そして一つの希望を手にします。古代の力を今に取り戻す古術学者の道。古代、人々が貴族と平民とに分かたれず、誰もが平等に、そして豊かに暮らしていたとされる理想の時代。\n\n彼女のあこがれは世界を動かす複雑な歯車の動きとたまたま噛み合って、そして一つの出会いを生み出しました。人の力が今よりもずっと強かった時代、人が貪欲にその勢力圏を広げ、自分たち自身すらも征服しようとした努力の結晶、全自律無人機。人の姿をして、人のように語り、そして機械のように力強いもの。\n\n彼女たちの出会いが世界に何をもたらすのか。全自律無人機の高度な演算能力をもってすら、それは予測のできないことでした。\n\n特設サイトを公開しています。ショートPVなどもありますので、ご興味の方は是非ご覧ください。\nhttps://emilla.space",
review_cnt: 0,
isstop: 0,
length: 222113,
quarter_point: 16,
keyword: "残酷な描写あり ガールズラブ 冒険 人工知能 シリアス 女主人公 アンドロイド ロボット 未来 人外 ポストアポカリプス",
impression_cnt: 0,
daily_point: 0,
isbl: 0,
general_firstup: "2019-04-26 23:23:28",
writer: "壕野一廻",
iszankoku: 1,
genre: 403,
title: "明ける世界の夢見る機械",
sasie_cnt: 4,
general_lastup: "2021-01-25 22:46:49",
global_point: 326,
pc_or_k: 2,
ncode: "N7878FL",
userid: 1623940,
novel_type: 1,
istenni: 0,
isgl: 1,
monthly_point: 2,
yearly_point: 154,
all_point: 144,
novelupdated_at: "2021-01-25 22:46:49",
fav_novel_cnt: 91,
gensaku: "",
kaiwaritsu: 49,
istensei: 0,
end: 1
}
]
```
"""
@spec fetch_by_user(Queries.user_id(), non_neg_integer() | 0) :: {:ok, list(Work.t())} | {:error, atom()}
def fetch_by_user(user_id, cooldown_duration \\ 0) when is_integer(user_id) or is_list(user_id) and is_integer(cooldown_duration) do
queries = %__MODULE__.Queries{} |> Queries.encode_userid(user_id)
:timer.sleep(cooldown_duration)
case queries |> generate_url() |> HTTPoison.get() do
{:ok, %HTTPoison.Response{status_code: 200, body: body}} ->
{:ok, result} = body |> :zlib.gunzip() |> parse_body()
{:ok, result}
{:ok, %HTTPoison.Response{status_code: 404}} ->
{:error, :not_found}
{:error, %HTTPoison.Error{reason: reason}} ->
{:error, reason}
end
end
@spec generate_url(__MODULE__.Queries.t()) :: String.t()
defp generate_url(query_parameters) when is_struct(query_parameters, __MODULE__.Queries) do
endpoint_uri = @endpoint_url
|> URI.parse()
queries = query_parameters |> Map.from_struct() |> URI.encode_query()
%{endpoint_uri | query: queries }
|> URI.to_string()
end
@spec parse_body(String.t()) :: {:ok, Works.t()} | {:error, atom()}
defp parse_body(body) when is_bitstring(body) do
case body |> Jason.decode() do
{:ok, decoded_response} when is_list(decoded_response) ->
result = decoded_response
|> Enum.drop(1)
|> Enum.map(&keys_to_atoms/1)
|> Enum.map(&Work.from/1)
{:ok, result}
{:ok, _decoded_response} ->
{:error, :unexpected_response_body}
{:error, reason} ->
IO.inspect reason
{:error, reason}
end
end
@spec keys_to_atoms(map()) :: {atom(), any()} | %{}
defp keys_to_atoms(json) when is_map(json) do
Map.new(json, &reduce_keys_to_atoms/1)
end
@spec reduce_keys_to_atoms({binary(), any()}) :: {atom(), any()}
defp reduce_keys_to_atoms({key, val}) when is_map(val), do: {String.to_atom(key), keys_to_atoms(val)}
defp reduce_keys_to_atoms({key, val}) when is_list(val), do: {String.to_atom(key), Enum.map(val, &keys_to_atoms(&1))}
defp reduce_keys_to_atoms({key, val}), do: {String.to_atom(key), val}
end
|
lib/narou/api.ex
| 0.675551
| 0.653424
|
api.ex
|
starcoder
|
defmodule Fsharpy.FromFsharp do
@moduledoc """
Helper functions to take FSI output and convert `val` items into their
approximate Elixir equivalents.
"""
def get_vals(raw_output) do
raw_output
|> String.replace("list =\n", "list = ")
|> String.split("\n")
|> Enum.filter(fn z -> String.starts_with?(z, "val ") end)
|> Enum.map(fn z -> get_val(z) end)
end
def get_val(line) do
[name_part | [rest]] = String.split(line, " : ", parts: 2, trim: true)
[type_part | [value_part]] = String.split(rest, " = ", parts: 2, trim: true)
variable_name = String.trim_leading(name_part, "val ")
map = %{name: variable_name, type: type_part, value: value_part}
attempt_deserializion(map)
end
defp attempt_deserializion(%{name: variable_name, type: "int", value: value_part}) do
%{variable_name => to_int(value_part)}
end
defp attempt_deserializion(%{name: variable_name, type: "float", value: value_part}) do
%{variable_name => to_float(value_part)}
end
defp attempt_deserializion(%{name: variable_name, type: "string", value: value_part}) do
new_value =
value_part
|> String.replace_prefix("\"", "")
|> String.replace_suffix("\"", "")
%{variable_name => new_value}
end
defp attempt_deserializion(%{name: variable_name, type: "bool", value: value_part}) do
new_value =
case value_part do
"true" -> true
"false" -> false
end
%{variable_name => new_value}
end
defp attempt_deserializion(%{name: variable_name, type: "int * int", value: value_part}) do
%{variable_name => to_tuple(value_part, &to_int/1)}
end
defp attempt_deserializion(%{name: variable_name, type: "int * int * int", value: value_part}) do
%{variable_name => to_tuple(value_part, &to_int/1)}
end
defp attempt_deserializion(%{name: variable_name, type: "float * float", value: value_part}) do
%{variable_name => to_tuple(value_part, &to_float/1)}
end
defp attempt_deserializion(%{
name: variable_name,
type: "float * float * float",
value: value_part
}) do
%{variable_name => to_tuple(value_part, &to_float/1)}
end
defp attempt_deserializion(%{name: variable_name, type: "int list", value: value_part}) do
new_value =
value_part
|> String.replace_prefix("[", "")
|> String.replace_suffix("]", "")
|> String.split("; ")
|> Enum.map(fn x -> to_int(x) end)
%{variable_name => new_value}
end
defp attempt_deserializion(map) do
map
end
defp to_tuple(value_part, parser_fun) do
elements =
value_part
|> String.trim_leading("(")
|> String.trim_trailing(")")
|> String.split(", ", trim: true)
list = elements |> Enum.map(fn x -> parser_fun.(x) end)
list |> List.to_tuple()
end
defp to_int(int_string) do
{result, _} = Integer.parse(int_string)
result
end
defp to_float(float_string) do
{result, _} = Float.parse(float_string)
result
end
end
|
lib/fsharpy/from_fsharp.ex
| 0.762645
| 0.468669
|
from_fsharp.ex
|
starcoder
|
defmodule Aoc.Day08 do
@moduledoc false
import Aoc.Utils
def run(1), do: solve1(get_input(8))
def run(2), do: solve2(get_input(8))
def solve1(input) do
String.split(input, "\n")
|> Enum.map(&String.split/1)
|> Enum.reduce(Matrex.zeros(6, 50), &exec/2)
|> Enum.sum()
end
def exec(["rect", size], screen) do
[cols, rows] = String.split(size, "x") |> Enum.map(&String.to_integer/1)
{scr_rows, scr_cols} = Matrex.size(screen)
Matrex.new(scr_rows, scr_cols, fn row, col ->
if row <= rows and col <= cols, do: 1, else: screen[row][col]
end)
end
def exec(["rotate", "column", col, "by", by], screen) do
[_, {col, _}] = String.split(col, "=") |> Enum.map(&Integer.parse/1)
by = String.to_integer(by)
rotated_col =
[Matrex.column_to_list(screen, col + 1) |> rotate(by)] |> Matrex.new() |> Matrex.transpose()
Matrex.set_column(screen, col + 1, rotated_col)
end
def exec(["rotate", "row", row, "by", by], screen) do
[_, {row, _}] = String.split(row, "=") |> Enum.map(&Integer.parse/1)
by = String.to_integer(by)
rotated_row = [Matrex.row_to_list(screen, row + 1) |> rotate(by)] |> Matrex.new()
set_row(screen, row + 1, rotated_row)
end
def set_row(m, n, m_row) do
range1 =
case 1..(n - 1) do
n..n -> n
r -> r
end
range2 =
case (n + 1)..elem(Matrex.size(m), 0) do
n..n -> n
r -> r
end
cond do
n == 1 -> Matrex.new([[m_row], [m[range2]]])
n == elem(Matrex.size(m), 0) -> Matrex.new([[m[range1]], [m_row]])
true -> Matrex.new([[m[range1]], [m_row], [m[range2]]])
end
end
def rotate([], _), do: []
def rotate(l, 0), do: l
def rotate(l, 1) do
{l1, [last]} = Enum.split(l, -1)
[last | l1]
end
def rotate(l, n) do
{l1, [last]} = Enum.split(l, -1)
rotate([last | l1], n - 1)
end
def solve2(input) do
String.split(input, "\n")
|> Enum.map(&String.split/1)
|> Enum.reduce(Matrex.zeros(6, 50), &exec/2)
|> Matrex.heatmap(:mono8)
end
end
|
lib/aoc/day08.ex
| 0.655997
| 0.432842
|
day08.ex
|
starcoder
|
module Integer
module Behavior
mixin Numeric
% Iterates the given function n times, passing values from zero
% to n - 1.
%
% ## Examples
%
% 5.times -> (x) IO.puts x % => 0 1 2 3 4
%
def times(function)
if function.arity == 0
times_0(self, 0, function)
else
times_1(self, 0, function)
end
self
end
% Iterates the given function n times, passing values from zero
% to n - 1. Also has an accumulator similar to fold to store a
% value between computations.
%
% ## Examples
%
% 5.times(0, -> (acc, x) acc + x) % => 10
%
def times(acc, function)
times_2(self, 0, function, acc)
end
['div, 'rem].each do (op)
define_erlang_method __FILE__, __LINE__, op, 1, [
{
'clause, __LINE__, [{'var, __LINE__, 'other}], [], [
{ 'op, __LINE__, op, {'var, __LINE__, 'self}, {'var, __LINE__, 'other} }
]
}
]
end
% Convert an integer to the equivalent bit string.
%
% ## Examples
%
% 65.chr % => "A"
% 97.chr % => "a"
%
def chr
<<self>>
end
% Conver an integer to a char list of its digits.
%
% ## Examples
%
% 123.to_char_list % => [49, 50, 51]
%
def to_char_list
Erlang.integer_to_list(self)
end
def inspect
Erlang.integer_to_list(self).to_bin
end
private
def times_0(limit, limit, _function)
end
def times_0(limit, counter, function)
function.()
times_0(limit, 1 + counter, function)
end
def times_1(limit, limit, _function)
end
def times_1(limit, counter, function)
function.(counter)
times_1(limit, 1 + counter, function)
end
def times_2(limit, limit, _function, acc)
acc
end
def times_2(limit, counter, function, acc)
new_acc = function.(counter, acc)
times_2(limit, 1 + counter, function, new_acc)
end
end
end
|
lib/integer.ex
| 0.52342
| 0.655084
|
integer.ex
|
starcoder
|
defmodule ApproximateHistogram do
@type t :: %__MODULE__{
bins: [bin],
options: options
}
@type bin :: {value, count}
@type value :: number()
@type count :: non_neg_integer()
@type options :: %{max_bins: pos_integer()}
defstruct [
:bins,
:options,
]
@default_size 50
@spec new(pos_integer()) :: t
def new(size \\ @default_size) do
%__MODULE__{
bins: [],
options: %{max_bins: size},
}
end
@spec size(t) :: non_neg_integer()
def size(%__MODULE__{} = histo) do
Enum.reduce(histo.bins, 0, fn {_, count}, total -> total + count end)
end
@spec max_bins(t) :: non_neg_integer()
def max_bins(%__MODULE__{} = histo) do
histo.options.max_bins
end
@spec add(t, value) :: t
def add(%__MODULE__{} = histo, value) do
if at_capacity?(histo) do
# Split the list into:
# [before] | closest | [after]
# Use a weighted average to merge the value and increment count correctly into a new middle bin
{bef, closest, aft} = split(histo.bins, value)
new_value =
((bin_value(closest) * bin_count(closest)) + value * 1) / (bin_count(closest) + 1)
new_bin = {new_value, bin_count(closest) + 1}
new_bins = bef ++ [new_bin] ++ aft
%{histo | bins: new_bins}
else
# Split the list into:
# [before] | closest | [after]
# Based on closest, come up with a 1 or 2 element list in the middle, then concat all 3 lists.
# [before] [closest, new] [after] <-- value is bigger than the closest
# [before] [new, closest] [after] <-- value is smaller than the closest
# [before] [new] [after] <-- First element and identical value cases
float_value = value / 1
{bef, closest, aft} = split(histo.bins, float_value)
middle = cond do
closest == nil ->
[{float_value, 1}]
bin_value(closest) == float_value ->
[{float_value, bin_count(closest) + 1}]
bin_value(closest) < float_value ->
[closest, {float_value, 1}]
bin_value(closest) > float_value ->
[{float_value, 1}, closest]
end
new_bins = bef ++ middle ++ aft
%{histo | bins: new_bins}
end
end
def bin_value({value, _}), do: value
def bin_count({_, count}), do: count
def bins_used(%__MODULE__{} = histo) do
Enum.count(histo.bins)
end
@spec to_list(t) :: list(bin)
def to_list(%__MODULE__{} = histo) do
histo.bins
end
def percentile(%__MODULE__{} = histo, percentile) do
target = size(histo) * (percentile / 100)
Enum.reduce_while(
histo.bins,
target,
fn {value, count}, remaining ->
next = remaining - count
if next <= 0 do
{:halt, value}
else
{:cont, next}
end
end
)
end
# Figure out which percentile this value would slot into
def percentile_for_value(%__MODULE__{} = histo, target) do
found_at = Enum.reduce_while(
histo.bins,
0,
fn {bin_val, bin_count}, count ->
if bin_val > target do
{:halt, count}
else
{:cont, count + bin_count}
end
end
)
# Protect against div by 0
s = size(histo)
if s == 0 do
0
else
found_at / size(histo) * 100
end
end
@spec at_capacity?(t) :: boolean()
defp at_capacity?(%__MODULE__{} = histo) do
histo.options.max_bins == Enum.count(histo.bins)
end
# returns three-tuple: {[before], closest, [after]}
# before and after may be empty lists
defp split(bins, value) do
{bef, aft} = Enum.split_while(bins, fn {bin_val, _} -> value > bin_val end)
bef_closest = List.last(bef)
bef_rest = Enum.drop(bef, -1)
aft_closest = List.first(aft)
aft_rest = Enum.drop(aft, 1)
cond do
bef_closest == nil ->
{[], aft_closest, aft_rest}
aft_closest == nil ->
{bef_rest, bef_closest, []}
true ->
dist_to_bef = value - bin_value(bef_closest)
dist_to_aft = value - bin_value(aft_closest)
if dist_to_bef < dist_to_aft do
{bef_rest, bef_closest, aft}
else
{bef, aft_closest, aft_rest}
end
end
end
end
|
lib/approximate_histogram.ex
| 0.826397
| 0.576482
|
approximate_histogram.ex
|
starcoder
|
defmodule XlsxReader.Conversion do
@moduledoc """
Conversion of cell values to Elixir types.
"""
@typedoc """
Date system identified by its reference year
"""
@type date_system :: 1900 | 1904
@typedoc """
Supported number types identified by module name
"""
@type number_type :: Integer | Float | Decimal | String
@typedoc """
Supported number value types
"""
@type number_value :: integer() | float() | Decimal.t() | String.t()
@doc """
Converts the string representation of a truth value into to a boolean.
Cells with type attribute `"b"` store boolean values as a single digit: `"1"` or `"0"`.
## Examples
iex> XlsxReader.Conversion.to_boolean("1")
{:ok, true}
iex> XlsxReader.Conversion.to_boolean("0")
{:ok, false}
iex> XlsxReader.Conversion.to_boolean("true")
:error
"""
@spec to_boolean(String.t()) :: {:ok, boolean()} | :error
def to_boolean("1"), do: {:ok, true}
def to_boolean("0"), do: {:ok, false}
def to_boolean(_), do: :error
@doc """
Converts a string into the given number type.
Supported number types are: `Integer`, `Float`, `String` or `Decimal` (requires the [decimal](https://github.com/ericmj/decimal) library)
## Examples
iex> XlsxReader.Conversion.to_number("123", Integer)
{:ok, 123}
iex> XlsxReader.Conversion.to_number("-123.45", Float)
{:ok, -123.45}
iex> XlsxReader.Conversion.to_number("0.12345e3", Float)
{:ok, 123.45}
iex> XlsxReader.Conversion.to_number("-123.45", Decimal)
{:ok, %Decimal{coef: 12345, exp: -2, sign: -1}}
iex> XlsxReader.Conversion.to_number("0.12345E3", Decimal)
{:ok, %Decimal{coef: 12345, exp: -2, sign: 1}}
iex> XlsxReader.Conversion.to_number("-123.45", String)
{:ok, "-123.45"}
iex> XlsxReader.Conversion.to_number("0.12345e3", String)
{:ok, "0.12345e3"}
iex> XlsxReader.Conversion.to_number("123.0", Integer)
:error
"""
@spec to_number(String.t(), number_type()) :: {:ok, number_value()} | :error
def to_number(string, Integer) do
to_integer(string)
end
def to_number(string, Float) do
to_float(string)
end
def to_number(string, Decimal) do
to_decimal(string)
end
def to_number(string, String) do
{:ok, string}
end
@doc """
Converts a string into a float.
## Examples
iex> XlsxReader.Conversion.to_float("123")
{:ok, 123.0}
iex> XlsxReader.Conversion.to_float("-123.45")
{:ok, -123.45}
iex> XlsxReader.Conversion.to_float("0.12345e3")
{:ok, 123.45}
iex> XlsxReader.Conversion.to_float("0.12345E3")
{:ok, 123.45}
iex> XlsxReader.Conversion.to_float("bogus")
:error
"""
@spec to_float(String.t()) :: {:ok, float()} | :error
def to_float(string) do
case Float.parse(string) do
{number, ""} ->
{:ok, number}
_ ->
:error
end
end
@doc """
Converts a string into an arbitrary precision [decimal](https://github.com/ericmj/decimal).
## Examples
iex> XlsxReader.Conversion.to_decimal("123")
{:ok, %Decimal{coef: 123, exp: 0, sign: 1}}
iex> XlsxReader.Conversion.to_decimal("-123.45")
{:ok, %Decimal{coef: 12345, exp: -2, sign: -1}}
iex> XlsxReader.Conversion.to_decimal("0.12345e3")
{:ok, %Decimal{coef: 12345, exp: -2, sign: 1}}
iex> XlsxReader.Conversion.to_decimal("0.12345E3")
{:ok, %Decimal{coef: 12345, exp: -2, sign: 1}}
iex> XlsxReader.Conversion.to_decimal("bogus")
:error
"""
@spec to_decimal(String.t()) :: {:ok, Decimal.t()} | :error
def to_decimal(string) do
case Decimal.parse(string) do
{:ok, decimal} ->
{:ok, decimal}
{decimal, ""} ->
{:ok, decimal}
_ ->
:error
end
end
@doc """
Converts a string into an integer.
## Examples
iex> XlsxReader.Conversion.to_integer("123")
{:ok, 123}
iex> XlsxReader.Conversion.to_integer("-123")
{:ok, -123}
iex> XlsxReader.Conversion.to_integer("123.45")
:error
iex> XlsxReader.Conversion.to_integer("bogus")
:error
"""
@spec to_integer(String.t()) :: {:ok, integer()} | :error
def to_integer(string) do
case Integer.parse(string) do
{number, ""} ->
{:ok, number}
_ ->
:error
end
end
# This is why we can't have nice things: http://www.cpearson.com/excel/datetime.htm
@base_date_system_1900 ~D[1899-12-30]
@base_date_system_1904 ~D[1904-01-01]
@doc """
Returns the base date for the given date system.
## Examples
iex> XlsxReader.Conversion.base_date(1900)
~D[1899-12-30]
iex> XlsxReader.Conversion.base_date(1904)
~D[1904-01-01]
iex> XlsxReader.Conversion.base_date(2019)
:error
"""
@spec base_date(date_system()) :: Date.t() | :error
def base_date(1900), do: @base_date_system_1900
def base_date(1904), do: @base_date_system_1904
def base_date(_date_system), do: :error
@doc """
Converts a serial date into a `Date`.
## Examples
iex> XlsxReader.Conversion.to_date("40396")
{:ok, ~D[2010-08-06]}
iex> XlsxReader.Conversion.to_date("43783")
{:ok, ~D[2019-11-14]}
iex> XlsxReader.Conversion.to_date("1", ~D[1999-12-31])
{:ok, ~D[2000-01-01]}
iex> XlsxReader.Conversion.to_date("-1", ~D[1999-12-31])
:error
"""
@spec to_date(String.t(), Date.t()) :: {:ok, Date.t()} | :error
def to_date(string, base_date \\ @base_date_system_1900) do
case split_serial_date(string) do
{:ok, days, _fraction_of_24} when days > 0.0 ->
{:ok, Date.add(base_date, days)}
{:ok, _days, _fraction_of_24} ->
:error
error ->
error
end
end
@doc """
Converts a serial date to a `NaiveDateTime`.
## Examples
iex> XlsxReader.Conversion.to_date_time("43783.0")
{:ok, ~N[2019-11-14 00:00:00]}
iex> XlsxReader.Conversion.to_date_time("43783.760243055556")
{:ok, ~N[2019-11-14 18:14:45]}
iex> XlsxReader.Conversion.to_date_time("0.4895833333333333")
{:ok, ~N[1899-12-30 11:45:00]}
iex> XlsxReader.Conversion.to_date_time("1.760243055556", ~D[1999-12-31])
{:ok, ~N[2000-01-01 18:14:45]}
iex> XlsxReader.Conversion.to_date_time("-30.760243055556", ~D[1999-12-31])
:error
"""
@spec to_date_time(String.t(), Date.t()) :: {:ok, NaiveDateTime.t()} | :error
def to_date_time(string, base_date \\ @base_date_system_1900) do
with {:ok, days, fraction_of_24} when days >= 0.0 <- split_serial_date(string),
date <- Date.add(base_date, days),
{:ok, time} <- fraction_of_24_to_time(fraction_of_24) do
NaiveDateTime.new(date, time)
else
{:ok, _, _} ->
:error
{:error, _} ->
:error
error ->
error
end
end
## Private
# Splits a serial date into `{:ok, days_since_base_date, time_as_fraction_of_24}`
@spec split_serial_date(String.t()) :: {:ok, integer(), float()} | :error
defp split_serial_date(string) do
with {:ok, value} <- to_float(string) do
days = Float.floor(value)
{:ok, trunc(days), value - days}
end
end
@seconds_per_day 60 * 60 * 24
@spec fraction_of_24_to_time(float()) :: {:ok, Time.t()} | {:error, atom()}
defp fraction_of_24_to_time(fraction_of_24) do
seconds = round(fraction_of_24 * @seconds_per_day)
Time.new(
seconds |> div(3600),
seconds |> div(60) |> rem(60),
seconds |> rem(60)
)
end
end
|
lib/xlsx_reader/conversion.ex
| 0.923988
| 0.596492
|
conversion.ex
|
starcoder
|
defmodule Temple.Component do
@moduledoc """
API for defining components.
Component modules are basically normal Phoenix View modules. The contents of the `render` macro are compiled into a `render/2` function. This means that you can define functions in your component module and use them in your component markup.
Since component modules are view modules, the assigns you pass to the component are accessible via the `@` macro and the `assigns` variable.
You must `require Temple.Component` in your views that use components, as the `c` and `slot` generate markup that uses macros provided by Temple.
## Components
```elixir
defmodule MyAppWeb.Components.Flash do
import Temple.Component
def border_class(:info), do: "border-blue-500"
def border_class(:warning), do: "border-yellow-500"
def border_class(:error), do: "border-red-500"
def border_class(:success), do: "border-green-500"
render do
div class: "border rounded p-2 #\{assigns[:class]} #\{border_class(@message_type)}" do
slot :default
end
end
end
```
Components are used by calling the `c` keyword, followed by the component module and any assigns you need to pass to the template.
`c` is a _**compile time keyword**_, not a function or a macro, so you won't see it in the generated documention.
```
c MyAppWeb.Components.Flash, class: "font-bold", message_type: :info do
ul do
for info <- infos do
li class: "p-4" do
info.message
end
end
end
end
```
Since components are just modules, if you alias your module, you can use them more ergonomically.
```
# lib/my_app_web/views/page_view.ex
alias MyAppWeb.Components.Flex
# lib/my_app_web/templates/page/index.html.exs
c Flex, class: "justify-between items center" do
for item <- items do
div class: "p-4" do
item.name
end
end
end
```
## Slots
Components can use slots, which are named placeholders for markup that can be passed to the component by the caller.
Slots are invoked by using the `slot` keyword, followed by the name of the slot and any assigns you'd like to pass into the slot.
`slot` is a _**compile time keyword**_, not a function or a macro, so you won't see it in the generated documention.
```elixir
defmodule Flex do
import Temple.Component
render do
div class: "flex #\{@class}" do
slot :default
end
end
end
```
You can also use "named slots", which allow for data to be passed back into them. This is very useful
when a component needs to pass data from the inside of the component back to the caller, like when rendering a form in LiveView.
```elixir
defmodule Form do
import Temple.Component
render do
form = form_for(@changeset, @action, assigns)
form
slot :f, form: form
"</form>"
end
end
```
By default, the body of a component fills the `:default` slot.
Named slots can be defined by invoking the `slot` keyword with the name of the slot and a do block.
You can also pattern match on any assigns that are being passed into the slot as if you were defining an anonymous function.
`slot` is a _**compile time keyword**_, not a function or a macro, so you won't see it in the generated documention.
```elixir
# lib/my_app_web/templates/post/new.html.lexs
c Form, changeset: @changeset,
action: @action,
class: "form-control",
phx_submit: :save,
phx_change: :validate do
slot :f, %{form: f} do
label f do
"Widget Name"
text_input f, :name, class: "text-input"
end
submit "Save!"
end
end
```
"""
@doc false
defmacro __component__(module, assigns \\ [], block \\ []) do
{inner_block, assigns} =
case {block, assigns} do
{[do: do_block], _} -> {rewrite_do(do_block), assigns}
{_, [do: do_block]} -> {rewrite_do(do_block), []}
{_, _} -> {nil, assigns}
end
if is_nil(inner_block) do
quote do
Phoenix.View.render(unquote(module), :self, unquote(assigns))
end
else
quote do
Phoenix.View.render(
unquote(module),
:self,
Map.put(Map.new(unquote(assigns)), :inner_block, unquote(inner_block))
)
end
end
end
@doc false
defmacro __render_block__(inner_block, argument \\ []) do
quote do
unquote(inner_block).(unquote(argument))
end
end
defp rewrite_do([{:->, meta, _} | _] = do_block) do
{:fn, meta, do_block}
end
defp rewrite_do(do_block) do
quote do
fn _ ->
unquote(do_block)
end
end
end
@doc """
Defines a component template.
## Usage
```elixir
defmodule MyAppWeb.Components.Flash do
import Temple.Component
def border_class(:info), do: "border-blue-500"
def border_class(:warning), do: "border-yellow-500"
def border_class(:error), do: "border-red-500"
def border_class(:success), do: "border-green-500"
render do
div class: "border rounded p-2 #\{assigns[:class]} #\{border_class(@message_type)}" do
slot :default
end
end
end
```
"""
defmacro render(block) do
quote do
def render(var!(assigns)) do
require Temple
_ = var!(assigns)
Temple.compile(unquote(Temple.Component.__engine__()), unquote(block))
end
def render(:self, var!(assigns)) do
require Temple
_ = var!(assigns)
Temple.compile(unquote(Temple.Component.__engine__()), unquote(block))
end
end
end
@doc """
Defines a component module.
This macro makes it easy to define components without creating a separate file. It literally inlines a component module.
Since it defines a module inside of the current module, local function calls from the outer module won't be available. For convenience, the outer module is aliased for you, so you can call remote functions with a shorter module name.
## Usage
```elixir
def MyAppWeb.SomeView do
use MyAppWeb.SomeView, :view
import Temple.Component, only: [defcomp: 2]
# define a function in outer module
def foobar(), do: "foobar"
# define a component
defcomp Button do
button id: SomeView.foobar(), # `MyAppWeb.SomeView` is aliased for you.
class: "text-sm px-3 py-2 rounded #\{assigns[:extra_classes]}",
type: "submit" do
slot :default
end
end
end
# use the component in a SomeView template. Or else, you must alias `MyAppWeb.SomeView.Button`
c Button, extra_classes: "border-2 border-red-500" do
"Submit!"
end
```
"""
defmacro defcomp(module, [do: block] = _block) do
quote location: :keep do
defmodule unquote(module) do
import Temple.Component
alias unquote(__CALLER__.module)
render do
unquote(block)
end
end
end
end
@doc false
def __engine__() do
cond do
Code.ensure_loaded?(Phoenix.LiveView.Engine) ->
Phoenix.LiveView.Engine
Code.ensure_loaded?(Phoenix.HTML.Engine) ->
Phoenix.HTML.Engine
true ->
nil
end
end
end
|
lib/temple/component.ex
| 0.872836
| 0.86988
|
component.ex
|
starcoder
|
defmodule Cog.Events.ApiEvent do
@moduledoc """
Encapsulates information about REST API request processing
events. Each event is a map; all events share a core set of fields,
while each event sub-type will have an additional set of fields
particular to that sub-type.
The functions in this module generate API events from `Plug.Conn`
instances, and depend on various metadata having been added to the
`Conn` beforehand; see `Cog.Plug.Util` and `Cog.Plug.Event`
# Common Fields
* `request_id`: The unique identifier assigned to the request. All
events emitted in the processing of the request will share the
same ID.
* `event`: label indicating which API request lifecycle event is
being recorded.
* `timestamp`: When the event was created, in UTC, as an ISO-8601
extended-format string (e.g. `"2016-01-07T15:08:00.000000Z"`). For
pipelines that execute in sub-second time, also see
`elapsed_microseconds`.
* `elapsed_microseconds`: Number of microseconds elapsed since
beginning of request processing to the creation of this event.
* `http_method`: the HTTP method of the request being processed as
an uppercase string.
* `path`: the path portion of the request URL
# Event-specific Data
Depending on the type of event, the map will contain additional
different keys. These are detailed here for each event.
## `api_request_started`
No extra fields
## `api_request_authenticated`
* `user`: (String) the Cog username of the authenticated
requestor. Note that this is not a chat handle.
## `api_request_finished`
* `status`: (Integer) the HTTP status code of the response.
"""
alias Plug.Conn
import Cog.Events.Util, only: [elapsed: 2]
import Cog.Plug.Util, only: [get_request_id: 1,
get_start_time: 1,
get_user: 1]
@type event_label :: :api_request_started |
:api_request_authenticated |
:api_request_finished
@doc "Create an `api_request_started` event."
def started(%Conn{}=conn),
do: new(conn, :api_request_started)
@doc "Create an `api_request_started` event."
def authenticated(%Conn{}=conn) do
# Should never be called if there's no user set
new(conn, :api_request_authenticated, %{user: get_user(conn).username})
end
@doc "Create an `api_request_started` event."
def finished(%Conn{}=conn),
do: new(conn, :api_request_finished, %{status: conn.status})
# Centralize common event creation logic
defp new(conn, event, extra_fields \\ %{}) do
start = get_start_time(conn)
{now, elapsed_us} = case event do
:api_request_started -> {start, 0}
_ ->
now = DateTime.utc_now()
{now, elapsed(start, now)}
end
Map.merge(extra_fields,
%{request_id: get_request_id(conn),
http_method: conn.method,
path: conn.request_path,
event: event,
timestamp: DateTime.to_iso8601(now),
elapsed_microseconds: elapsed_us})
end
end
|
lib/cog/events/api_event.ex
| 0.888187
| 0.546254
|
api_event.ex
|
starcoder
|
defmodule CSV.Encoding.Encoder do
use CSV.Defaults
@moduledoc ~S"""
The Encoder CSV module takes a table stream and transforms it into RFC 4180
compliant stream of lines for writing to a CSV File or other IO.
"""
@doc """
Encode a table stream into a stream of RFC 4180 compliant CSV lines for
writing to a file or other IO.
## Options
These are the options:
* `:separator` – The separator token to use, defaults to `?,`.
Must be a codepoint (syntax: ? + your separator token).
* `:delimiter` – The delimiter token to use, defaults to `\"\\r\\n\"`.
* `:headers` – When set to `true`, uses the keys of the first map as
the first element in the stream. All subsequent elements are the values
of the maps. When set to a list, will use the given list as the first
element in the stream and order all subsequent elements using that list.
When set to `false` (default), will use the raw inputs as elements.
When set to anything but `false`, all elements in the input stream are
assumed to be maps.
## Examples
Convert a stream of rows with cells into a stream of lines:
iex> [~w(a b), ~w(c d)]
iex> |> CSV.Encoding.Encoder.encode
iex> |> Enum.take(2)
[\"a,b\\r\\n\", \"c,d\\r\\n\"]
Convert a stream of maps into a stream of lines:
iex> [%{"a" => 1, "b" => 2}, %{"a" => 3, "b" => 4}]
iex> |> CSV.Encoding.Encoder.encode(headers: true)
iex> |> Enum.to_list()
[\"a,b\\r\\n\", \"1,2\\r\\n\", \"3,4\\r\\n\"]
Convert a stream of rows with cells with escape sequences into a stream of
lines:
iex> [[\"a\\nb\", \"\\tc\"], [\"de\", \"\\tf\\\"\"]]
iex> |> CSV.Encoding.Encoder.encode(separator: ?\\t, delimiter: \"\\n\")
iex> |> Enum.take(2)
[\"\\\"a\\\\nb\\\"\\t\\\"\\\\tc\\\"\\n\", \"de\\t\\\"\\\\tf\\\"\\\"\\\"\\n\"]
"""
def encode(stream, options \\ []) do
headers = options |> Keyword.get(:headers, false)
encode_stream(stream, headers, options)
end
defp encode_stream(stream, false, options) do
stream |> Stream.transform(0, fn row, acc ->
{[encode_row(row, options)], acc + 1} end)
end
defp encode_stream(stream, headers, options) do
stream |> Stream.transform(0, fn
row, 0 -> {[encode_row(get_headers(row, headers), options),
encode_row(get_values(row, headers), options)], 1}
row, acc -> {[encode_row(get_values(row, headers), options)], acc + 1}
end)
end
defp get_headers(row, true), do: Map.keys(row)
defp get_headers(_row, headers), do: headers
defp get_values(row, true), do: Map.values(row)
defp get_values(row, headers), do: headers |> Enum.map(&Map.get(row, &1))
defp encode_row(row, options) do
separator = options |> Keyword.get(:separator, @separator)
delimiter = options |> Keyword.get(:delimiter, @delimiter)
encoded = row
|> Enum.map(&encode_cell(&1, separator, delimiter))
|> Enum.join(<< separator :: utf8 >>)
encoded <> delimiter
end
defp encode_cell(cell, separator, delimiter) do
CSV.Encode.encode(cell, separator: separator, delimiter: delimiter)
end
end
|
deps/csv/lib/csv/encoding/encoder.ex
| 0.908648
| 0.550849
|
encoder.ex
|
starcoder
|
defmodule Netstrings do
@moduledoc """
netstring encoding and decoding
An implementation of djb's [netstrings](http://cr.yp.to/proto/netstrings.txt).
Please note that the decoder violates spec by accepting leading zeros in the `len` part.
However, the encoder will never generate such leading zeros.
"""
@doc """
Encode a netstring
"""
@spec encode(String.t()) :: String.t() | {:error, String.t()}
def encode(str) when is_binary(str),
do: (str |> byte_size |> Integer.to_string()) <> ":" <> str <> ","
def encode(_), do: {:error, "Can only encode binaries"}
@doc """
Encode a netstring, raise exception on error
"""
@spec encode!(String.t()) :: String.t() | no_return
def encode!(str) do
case encode(str) do
{:error, e} -> raise(e)
s -> s
end
end
@doc """
Decode netstrings
The decoder will stop as soon as it encounters an improper or incomplete netstring.
Upon success, decoded strings will appear in the second element of the tuple as a list. Any remaining (undecoded)
part of the string will appear as the third element.
There are no guarantees that the remainder is the start of a proper netstring. Appending more received data
to the remainder may or may not allow it to be decoded.
"""
@spec decode(String.t()) :: {[String.t()], String.t()} | {:error, String.t()}
# This extra string will be stripped at output
def decode(ns) when is_binary(ns), do: recur_decode(ns, [], "")
def decode(_), do: {:error, "Can only decode binaries"}
@doc """
Decode netstrings, raise exception on error
Note that the strings must be correct and complete, having any remainder will raise an exception.
"""
@spec decode!(String.t()) :: {[String.t()], String.t()} | no_return
def decode!(str) do
case decode(str) do
{:error, e} -> raise(e)
data -> data
end
end
@spec recur_decode(String.t(), list, any) :: {list(String.t()), String.t()}
defp recur_decode(rest, acc, nil), do: {acc |> Enum.reverse() |> Enum.drop(1), rest}
defp recur_decode(ns, acc, prev) do
{this_one, rest} =
if String.contains?(ns, ":") do
[i | r] = String.split(ns, ":", parts: 2)
case i |> Integer.parse() do
{n, ""} -> pull_string(n, r)
_ -> bad_path(i, r)
end
else
{nil, ns}
end
recur_decode(rest, [prev | acc], this_one)
end
@spec pull_string(non_neg_integer, list) :: tuple
defp pull_string(count, []), do: bad_path(count, "")
defp pull_string(count, [s]) do
if byte_size(s) > count and binary_part(s, count, 1) == "," do
f = binary_part(s, 0, count)
{f, String.replace_prefix(s, f <> ",", "")}
else
bad_path(Integer.to_string(count), s)
end
end
@spec bad_path(String.t() | non_neg_integer, String.t() | list) :: {nil, String.t()}
defp bad_path(n, s), do: {nil, Enum.join([n, ":", s], "")}
@spec stream(atom | pid) :: Enumerable.t()
@doc """
Converts an io device into a `Netstrings.Stream`
Behaves similarly to an `IO.Stream` with the values marshaled into and out of
netstring format. The device should be opened in raw format for predictability.
Note that netstrings approaching or above 64kib may not be properly handled.
"""
def stream(device), do: Netstrings.Stream.__build__(device)
end
|
lib/netstrings.ex
| 0.867696
| 0.622302
|
netstrings.ex
|
starcoder
|
defmodule XMLRPC do
alias XMLRPC.DecodeError
alias XMLRPC.EncodeError
alias XMLRPC.Decoder
alias XMLRPC.Encoder
@moduledoc ~S"""
Encode and decode elixir terms to [XML-RPC](http://wikipedia.org/wiki/XML-RPC) parameters.
All XML-RPC parameter types are supported, including arrays, structs and Nil (optional).
This module handles the parsing and encoding of the datatypes, but can be used
in conjunction with HTTPoison, Phoenix, etc to create fully featured XML-RPC
clients and servers.
XML input (ie untrusted) is validated against an [XML Schema](http://en.wikipedia.org/wiki/XML_schema),
which should help enforce correctness of input. [erlsom](https://github.com/willemdj/erlsom)
is used to decode the xml as xmerl creates atoms during decoding, which has
the risk that a malicious client can exhaust out atom space and crash the vm.
## Example
iex> _request_body = %XMLRPC.MethodCall{method_name: "test.sumprod", params: [2,3]} |> XMLRPC.encode!
"<?xml version=\"1.0\" encoding=\"UTF-8\"?><methodCall><methodName>test.sumprod</methodName><params><param><value><int>2</int></value></param><param><value><int>3</int></value></param></params></methodCall>"
# Now use HTTPoison to call your RPC
response = HTTPoison.post!("http://www.advogato.org/XMLRPC", request_body).body
iex> _response = "<?xml version=\"1.0\"?><methodResponse><params><param><value><array><data><value><int>5</int></value><value><int>6</int></value></data></array></value></param></params></methodResponse>" |> XMLRPC.decode
{:ok, %XMLRPC.MethodResponse{param: [5, 6]}}
## Datatypes
XML-RPC only allows limited parameter types. We map these to Elixir as follows:
| XMLRPC | Elixir |
| ---------------------|---------------------------|
| `<boolean>` | Boolean, eg true/false |
| `<string>` | Bitstring, eg "string" |
| `<int>` (`<i4>`) | Integer, eg 17 |
| `<double>` | Float, eg -12.3 |
| `<array>` | List, eg [1, 2, 3] |
| `<struct>` | Map, eg %{key: "value"} |
| `<dateTime.iso8601>` | %XMLRPC.DateTime |
| `<base64>` | %XMLRPC.Base64 |
| `<nil/>` (optional) | nil |
Note that array and struct parameters can be composed of the fundamental types,
and you can nest to arbitrary depths. (int inside a struct, inside an array, inside a struct, etc).
Common practice seems to be to use a struct (or sometimes an array) as the top
level to pass (named) each way.
The XML encoding is performed through a protocol and so abstract datatypes
can be encoded by implementing the `XMLRPC.ValueEncoder` protocol.
### Nil
Nil is not defined in the core specification, but is commonly implemented as
an option. The use of nil is enabled by default for encoding and decoding.
If you want a <nil/> input to be treated as an error then pass
[exclude_nil: true] in the `options` parameter
## API
The XML-RPC api consists of a call to a remote url, passing a "method_name"
and a number of parameters.
%XMLRPC.MethodCall{method_name: "test.sumprod", params: [2,3]}
The response is either "failure" and a `fault_code` and `fault_string`, or a
response which consists of a single parameter (use a struct/array to pass back
multiple values)
%XMLRPC.Fault{fault_code: 4, fault_string: "Too many parameters."}
%XMLRPC.MethodResponse{param: 30}
To encode/decode to xml use `XMLRPC.encode/2` or `XMLRPC.decode/2`
### Options
The en/decoder take an array of options:
* `:iodata` - When false (default), converts output of encoder to a string
* `:exclude_nil` - When false (default), allows nil to be a valid type in encoder/decoder
"""
defmodule Fault do
@moduledoc """
struct defining an xml-rpc 'fault' response
"""
@type t :: %__MODULE__{fault_code: Integer, fault_string: String.t}
defstruct fault_code: 0, fault_string: ""
end
defmodule MethodCall do
@moduledoc """
struct defining an xml-rpc call (note array of params)
"""
@type t :: %__MODULE__{method_name: String.t, params: [ XMLRPC.t ]}
defstruct method_name: "", params: nil
end
defmodule MethodResponse do
@moduledoc """
struct defining an xml-rpc response (note single param)
"""
@type t :: %__MODULE__{param: XMLRPC.t}
defstruct param: nil
end
@type t :: nil | number | boolean | String.t | map() | [nil | number | boolean | String.t]
@doc """
Encode an XMLRPC call or response elixir structure into XML as iodata
Raises an exception on error.
"""
@spec encode_to_iodata!(XMLRPC.t, Keyword.t) :: {:ok, iodata} | {:error, {any, String.t}}
def encode_to_iodata!(value, options \\ []) do
encode!(value, [iodata: true] ++ options)
end
@doc """
Encode an XMLRPC call or response elixir structure into XML as iodata
"""
@spec encode_to_iodata(XMLRPC.t, Keyword.t) :: {:ok, iodata} | {:error, {any, String.t}}
def encode_to_iodata(value, options \\ []) do
encode(value, [iodata: true] ++ options)
end
@doc ~S"""
Encode an XMLRPC call or response elixir structure into XML.
Raises an exception on error.
iex> %XMLRPC.MethodCall{method_name: "test.sumprod", params: [2,3]} |> XMLRPC.encode!
"<?xml version=\"1.0\" encoding=\"UTF-8\"?><methodCall><methodName>test.sumprod</methodName><params><param><value><int>2</int></value></param><param><value><int>3</int></value></param></params></methodCall>"
"""
@spec encode!(XMLRPC.t, Keyword.t) :: iodata | no_return
def encode!(value, options \\ []) do
iodata = Encoder.encode!(value, options)
unless options[:iodata] do
iodata |> IO.iodata_to_binary
else
iodata
end
end
@doc ~S"""
Encode an XMLRPC call or response elixir structure into XML.
iex> %XMLRPC.MethodCall{method_name: "test.sumprod", params: [2,3]} |> XMLRPC.encode
{:ok, "<?xml version=\"1.0\" encoding=\"UTF-8\"?><methodCall><methodName>test.sumprod</methodName><params><param><value><int>2</int></value></param><param><value><int>3</int></value></param></params></methodCall>"}
"""
@spec encode(XMLRPC.t, Keyword.t) :: {:ok, iodata} | {:ok, String.t} | {:error, {any, String.t}}
def encode(value, options \\ []) do
{:ok, encode!(value, options)}
rescue
exception in [EncodeError] ->
{:error, {exception.value, exception.message}}
end
@doc ~S"""
Decode XMLRPC call or response XML into an Elixir structure
iex> XMLRPC.decode("<?xml version=\"1.0\"?><methodResponse><params><param><value><array><data><value><int>5</int></value><value><int>6</int></value></data></array></value></param></params></methodResponse>")
{:ok, %XMLRPC.MethodResponse{param: [5, 6]}}
"""
@spec decode(iodata, Keyword.t) :: {:ok, Fault.t} | {:ok, MethodCall.t} | {:ok, MethodResponse.t} | {:error, String.t}
def decode(value, options \\ []) do
{:ok, decode!(value, options)}
rescue
exception in [DecodeError] ->
{:error, exception.message}
end
@doc ~S"""
Decode XMLRPC call or response XML into an Elixir structure
Raises an exception on error.
iex> XMLRPC.decode!("<?xml version=\"1.0\"?><methodResponse><params><param><value><array><data><value><int>5</int></value><value><int>6</int></value></data></array></value></param></params></methodResponse>")
%XMLRPC.MethodResponse{param: [5, 6]}
"""
@spec decode!(iodata, Keyword.t) :: Fault.t | MethodCall.t | MethodResponse.t | no_return
def decode!(value, options \\ []) do
Decoder.decode!(value, options)
end
end
|
lib/xml_rpc.ex
| 0.831759
| 0.407687
|
xml_rpc.ex
|
starcoder
|
defmodule RayTracer.Tasks.Chapter7 do
@moduledoc """
This module tests camera from Chapter 7
"""
alias RayTracer.RTuple
alias RayTracer.Sphere
alias RayTracer.Canvas
alias RayTracer.Material
alias RayTracer.Color
alias RayTracer.Light
alias RayTracer.World
alias RayTracer.Camera
alias RayTracer.Matrix
import RTuple, only: [point: 3, vector: 3]
import Light, only: [point_light: 2]
import RayTracer.Transformations
@doc """
Generates a file that tests rendering a world
"""
@spec execute :: :ok
def execute(w \\ 100, h \\ 50) do
# RayTracer.Tasks.Chapter7.execute
world = build_world()
camera = build_camera(w, h)
camera
|> Camera.render(world)
|> Canvas.export_to_ppm_file
:ok
end
defp build_world do
objects = [
left_wall(), right_wall(), floor(), left_sphere(), middle_sphere(), right_sphere()
]
light = point_light(point(-10, 10, -10), Color.new(1, 1, 1))
World.new(objects, light)
end
defp build_camera(w, h) do
transform = view_transform(point(0, 1.5, -5), point(0, 1, 0), vector(0, 1, 0))
Camera.new(w, h, :math.pi / 3, transform)
end
defp left_wall do
transform =
translation(0, 0, 5)
|> Matrix.mult(rotation_y(-:math.pi / 4))
|> Matrix.mult(rotation_x(:math.pi / 2))
|> Matrix.mult(scaling(10, 0.01, 10))
%Sphere{floor() | transform: transform}
end
defp right_wall do
transform =
translation(0, 0, 5)
|> Matrix.mult(rotation_y(:math.pi / 4))
|> Matrix.mult(rotation_x(:math.pi / 2))
|> Matrix.mult(scaling(10, 0.01, 10))
%Sphere{floor() | transform: transform}
end
defp floor do
transform = scaling(10, 0.01, 10)
material = %Material{Material.new | color: Color.new(1, 0.9, 0.9), specular: 0}
%Sphere{Sphere.new | material: material, transform: transform}
end
defp left_sphere do
transform = translation(-1.5, 0.33, -0.75) |> Matrix.mult(scaling(0.33, 0.33, 0.33))
material = %Material{Material.new | color: Color.new(1, 0.8, 0.1), specular: 0.3, diffuse: 0.7}
%Sphere{Sphere.new | material: material, transform: transform}
end
defp middle_sphere do
transform = translation(-0.5, 1, 0.5)
material = %Material{Material.new | color: Color.new(0.1, 1, 0.5), specular: 0.3, diffuse: 0.7}
%Sphere{Sphere.new | material: material, transform: transform}
end
defp right_sphere do
transform = translation(1.5, 0.5, -0.5) |> Matrix.mult(scaling(0.5, 0.5, 0.5))
material = %Material{Material.new | color: Color.new(0.5, 1, 0.1), specular: 0.3, diffuse: 0.7}
%Sphere{Sphere.new | material: material, transform: transform}
end
end
|
lib/tasks/chapter7.ex
| 0.906808
| 0.626924
|
chapter7.ex
|
starcoder
|
defmodule AdventOfCode.Day11 do
@moduledoc ~S"""
[Advent Of Code day 11](https://adventofcode.com/2018/day/11).
iex> AdventOfCode.Day11.power_level({3, 5}, 8)
4
iex> AdventOfCode.Day11.power_level({33, 45}, 18)
4
iex> AdventOfCode.Day11.power_level({122, 79}, 57)
-5
iex> AdventOfCode.Day11.power_level({217, 196}, 39)
0
iex> AdventOfCode.Day11.power_level({101, 153}, 71)
4
iex> AdventOfCode.Day11.solve("1", 18)
{{33, 45}, 29, 3}
iex> AdventOfCode.Day11.solve("1", 18, 16)
{{90, 269}, 113, 16}
iex> AdventOfCode.Day11.solve("1", 42)
{{21, 61}, 30, 3}
"""
@grid_size 300
@spec solve(day :: String.t(), input :: integer) :: integer
def solve("1", serial_number, square_size) do
calculate_power_levels(serial_number) |> do_solve(square_size)
end
def solve("1", serial_number), do: solve("1", serial_number, 3)
# TODO: figure out how to make it faster
def solve("2", serial_number) do
power_levels = calculate_power_levels(serial_number)
1..300
|> Task.async_stream(&do_solve(power_levels, &1), ordered: false, timeout: 1_000_000)
|> Stream.map(fn {:ok, res} -> res end)
|> Enum.max_by(fn {_first_point, power_level, _square_size} -> power_level end)
end
defp do_solve(power_levels, square_size) do
range = 1..@grid_size
{point, power_level} =
for(x <- range, y <- range, square_inside_grid?({x, y}, square_size), do: {{x, y}, total_power({x, y}, power_levels, square_size)})
|> Enum.max_by(fn {_point, power} -> power end)
{point, power_level, square_size}
end
defp calculate_power_levels(serial_number) do
Enum.into(for(x <- 1..@grid_size, y <- 1..@grid_size, do: {x, y}), %{}, fn point -> {point, power_level(point, serial_number)} end)
end
defp total_power(points, power_levels) when is_list(points) do
Enum.reduce(points, 0, fn point, acc -> acc + Map.fetch!(power_levels, point) end)
end
defp total_power({_, _} = first_point, power_levels, square_size) do
square_points(first_point, square_size) |> total_power(power_levels)
end
defp square_points({px, py}, square_size) do
range = 0..(square_size - 1)
for x <- range, y <- range, do: {px + x, py + y}
end
defp square_inside_grid?({x, y}, square_size) do
x + square_size - 1 <= @grid_size && y + square_size - 1 <= @grid_size
end
@spec power_level(cell :: {integer, integer}, serial_number :: integer) :: integer
def power_level({x, y}, serial_number) do
rack_id = x + 10
hundreds((rack_id * y + serial_number) * rack_id) - 5
end
defp hundreds(int), do: int |> to_string |> String.codepoints() |> Enum.at(-3) |> String.to_integer()
end
|
lib/advent_of_code/day_11.ex
| 0.583559
| 0.586523
|
day_11.ex
|
starcoder
|
defmodule Holidays.DateCalculator.Easter do
alias Holidays.DateCalculator.DateMath
@doc ~S"""
Returns the date of Easter for the given `year`
## Examples
iex> Holidays.DateCalculator.Easter.gregorian_easter_for(2016)
{2016, 3, 27}
iex> Holidays.DateCalculator.Easter.gregorian_easter_for(2015)
{2015, 4, 5}
"""
def gregorian_easter_for(year) do
y = year
a = rem(y, 19)
b = div(y, 100)
c = rem(y, 100)
d = div(b, 4)
e = rem(b, 4)
f = (b + 8) |> div(25)
g = (b - f + 1) |> div(3)
h = (19 * a + b - d - g + 15) |> rem(30)
i = div(c, 4)
k = rem(c, 4)
l = (32 + 2 * e + 2 * i - h - k) |> rem(7)
m = (a + 11 * h + 22 * l) |> div(451)
month = (h + l - 7 * m + 114) |> div(31)
day = ((h + l - 7 * m + 114) |> rem(31)) + 1
{year, month, day}
end
@doc ~S"""
Returns the date of Orthodox Easter in the given `year`
## Examples
iex> Holidays.DateCalculator.Easter.gregorian_orthodox_easter_for(2016)
{2016, 5, 1}
iex> Holidays.DateCalculator.Easter.gregorian_orthodox_easter_for(1815)
{1815, 4, 30}
iex> Holidays.DateCalculator.Easter.gregorian_orthodox_easter_for(2101)
{2101, 4, 24}
"""
def gregorian_orthodox_easter_for(year) do
j_date = julian_orthodox_easter_for(year)
offset =
case year do
# between the years 1583 and 1699 10 days are added to the julian day count
_y when (year >= 1583 and year <= 1699) -> 10
# after 1700, 1 day is added for each century, except if the century year is exactly divisible by 400 (in which case no days are added).
# Safe until 4100 AD, when one leap day will be removed.
year when year >= 1700 ->
div(year - 1600, 100) - div(year - 1600, 400) + 10
# up until 1582, julian and gregorian easter dates were identical
_ -> 0
end
DateMath.add_days(j_date, offset)
end
def julian_orthodox_easter_for(year) do
y = year
g = rem(y, 19)
i = (19 * g + 15) |> rem(30)
j = (year + div(year, 4) + i) |> rem(7)
j_month = 3 + div((i - j + 40), 44)
j_day = i - j + 28 - 31 * div(j_month, 4)
{year, j_month, j_day}
end
end
|
lib/holidays/date_calculator/easter.ex
| 0.874426
| 0.513973
|
easter.ex
|
starcoder
|
defmodule Honeyland.Geolocation do
@moduledoc """
The Geolocation context.
"""
@enforce_keys [:latitude, :longitude, :timestamp]
defstruct [:latitude, :longitude, :accuracy, :timestamp, :address]
alias Honeyland.Astarte
alias Honeyland.Astarte.Device
alias Honeyland.Geolocation
alias Honeyland.Repo
@type t() :: %__MODULE__{
latitude: float,
longitude: float,
accuracy: number | nil,
address: String.t() | nil,
timestamp: DateTime.t()
}
def fetch_location(%Device{} = device) do
with {:ok, coordinates} <- fetch_device_coordinates(device) do
address = get_address(coordinates)
location = %Geolocation{
latitude: coordinates.latitude,
longitude: coordinates.longitude,
accuracy: coordinates.accuracy,
timestamp: coordinates.timestamp,
address: address
}
{:ok, location}
end
end
defp fetch_device_coordinates(device) do
with {:error, _reason} <- fetch_device_wifi_coordinates(device),
{:error, _reason} <- fetch_device_ip_coordinates(device) do
{:error, :device_coordinates_not_found}
end
end
defp fetch_device_wifi_coordinates(device) do
with {:ok, wifi_scan_results} <- Astarte.fetch_wifi_scan_results(device),
{:ok, wifi_scan_results} <- filter_latest_wifi_scan_results(wifi_scan_results),
{:ok, coordinates} <- geolocate_wifi(wifi_scan_results) do
timestamp =
case Enum.empty?(wifi_scan_results) do
false -> List.first(wifi_scan_results).timestamp
true -> DateTime.utc_now()
end
coordinates = Enum.into(%{timestamp: timestamp}, coordinates)
{:ok, coordinates}
end
end
defp filter_latest_wifi_scan_results([_scan | _] = wifi_scan_results) do
latest_scan = Enum.max_by(wifi_scan_results, & &1.timestamp, DateTime)
latest_wifi_scan_results =
Enum.filter(wifi_scan_results, &(&1.timestamp == latest_scan.timestamp))
{:ok, latest_wifi_scan_results}
end
defp filter_latest_wifi_scan_results(_wifi_scan_results) do
{:error, :wifi_scan_results_not_found}
end
defp fetch_device_ip_coordinates(device) do
device = Repo.preload(device, :realm)
with {:ok, device_status} <- Astarte.get_device_status(device.realm, device.device_id),
{:ok, coordinates} <- geolocate_ip(device_status.last_seen_ip) do
device_last_seen =
[device_status.last_connection, device_status.last_disconnection]
|> Enum.reject(&is_nil/1)
|> Enum.sort({:desc, DateTime})
|> List.first()
timestamp = device_last_seen || DateTime.utc_now()
coordinates = Enum.into(%{timestamp: timestamp}, coordinates)
{:ok, coordinates}
end
end
defp get_address(coordinates) do
case reverse_geocode(coordinates) do
{:ok, address} -> address
_ -> nil
end
end
defp geolocate_ip(ip_address) do
ip_geolocation_provider = Application.get_env(:honeyland, :ip_geolocation_provider)
case ip_geolocation_provider do
nil -> {:error, :ip_geolocation_provider_not_found}
_ -> ip_geolocation_provider.geolocate(ip_address)
end
end
defp geolocate_wifi(wifi_scan_results) do
wifi_geolocation_provider = Application.get_env(:honeyland, :wifi_geolocation_provider)
case wifi_geolocation_provider do
nil -> {:error, :wifi_geolocation_provider_not_found}
_ -> wifi_geolocation_provider.geolocate(wifi_scan_results)
end
end
defp reverse_geocode(coordinates) do
geocoding_provider = Application.get_env(:honeyland, :geocoding_provider)
case geocoding_provider do
nil -> {:error, :geocoding_provider_not_found}
_ -> geocoding_provider.reverse_geocode(coordinates)
end
end
end
|
backend/lib/honeyland/geolocation.ex
| 0.819929
| 0.636141
|
geolocation.ex
|
starcoder
|
defmodule Interledger.Packet do
@moduledoc """
Decodes a binary block of data into the correct data types
"""
# ----------------------------------------------------------------------------
# Module Uses, Requires and Imports
# ----------------------------------------------------------------------------
require Logger
# ----------------------------------------------------------------------------
# Module Types
# ----------------------------------------------------------------------------
@type ilp_packet_type ::
:ilp_packet_prepare
| :ilp_packet_fulfill
| :ilp_packet_reject
| :ilp_packet_unknown
@type ilp_prepare :: {integer, DateTime.t(), binary, String.t(), binary}
@type ilp_fulfill :: {integer, binary}
@type ilp_reject :: {ilp_packet_error, String.t(), String.t(), binary}
@type ilp_packet_error ::
:f00_bad_request
| :f01_invalid_packet
| :f02_unreachable
| :f03_invalid_amount
| :f04_insufficient_destination_amount
| :f05_wrong_condition
| :f06_unexpected_payment
| :f07_cannot_receive
| :f08_amount_too_large
| :f09_invalid_peer_response
| :f99_application_error
| :t00_internal_error
| :t01_peer_unreachable
| :t02_peer_busy
| :t03_connector_busy
| :t04_insufficient_liquidity
| :t05_rate_limited
| :t99_application_error
| :r00_transfer_timed_out
| :r01_insufficient_source_amount
| :r02_insufficient_timeout
| :r99_application_error
# ----------------------------------------------------------------------------
# Module Contants
# ----------------------------------------------------------------------------
# @maxAddressLen 1023
# ----------------------------------------------------------------------------
# Public API
# ----------------------------------------------------------------------------
@doc """
Parse out packet types from the binary payload
"""
@spec parse(binary()) ::
{:ok, {ilp_packet_type, ilp_prepare | ilp_fulfill | ilp_reject}}
| {:error, term}
def parse(data) do
# Read the packet type
<<
type::size(8)-unsigned-big,
# This should be the length of the packet
len::size(24)-unsigned-big,
data::binary
>> = data
toIlpPacketType(type)
|> toIlpPacket(len, data)
end
# ----------------------------------------------------------------------------
# Private API
# ----------------------------------------------------------------------------
# Parse out the prepare packet
defp toIlpPacket(:ilp_packet_prepare, _len, rawData) do
<<
# Read out the amount value
amount::size(64)-unsigned-big,
# Read out the expire Timestamp
expY::size(32)-bitstring,
expM::size(16)-bitstring,
expD::size(16)-bitstring,
expH::size(16)-bitstring,
expm::size(16)-bitstring,
expS::size(16)-bitstring,
expmm::size(24)-bitstring,
# Read out the condition field
con::size(256)-unsigned-big,
# Read out the distenation ILP address
# NOTE: This does a thing where if the high bit is 1 then the size
# is the other 7 bits + the next 8 bits
# TODO: Fix this up to handle longer addresses
destLen::size(8)-unsigned-big,
dest::size(destLen)-binary,
data::binary
>> = rawData
strTime = "#{expY}-#{expM}-#{expD}T#{expH}:#{expm}:#{expS}.#{expmm}Z"
{:ok, expiresAt, 0} = DateTime.from_iso8601(strTime)
{:ok, {:ilp_packet_prepare, {amount, expiresAt, con, dest, data}}}
end
# Parse out the fulfill packet
defp toIlpPacket(:ilp_packet_fulfill, _len, rawData) do
<<
# Read out the condition field
fulfillment::size(256)-unsigned-big,
# Read out the distenation ILP address
# NOTE: This does a thing where if the high bit is 1 then the size
# is the other 7 bits + the next 8 bits
# TODO: Fix this up to handle longer addresses
data::binary
>> = rawData
{:ok, {:ilp_packet_fulfill, {fulfillment, data}}}
end
# Parse a reject packet.
defp toIlpPacket(:ilp_packet_reject, _len, rawData) do
<<
# Read out the condition field
code::size(24)-bitstring,
# Read out the distenation ILP address
# NOTE: This does a thing where if the high bit is 1 then the size
# is the other 7 bits + the next 8 bits
# TODO: Fix this up to handle longer addresses
destLen::size(8)-unsigned-big,
dest::size(destLen)-binary,
# Read out the Message string.
# TODO: This feels like a variable length string
msgLen::size(8)-unsigned-big,
msg::size(msgLen)-binary,
data::binary
>> = rawData
{:ok, {:ilp_packet_reject, {toError(code), dest, msg, data}}}
end
# I don't know what the packet type is so just error out
defp toIlpPacket(:ilp_packet_unknown, _, _), do: {:error, :ilp_packet_unknown}
# Get the atom value of the type based on the value read in.
defp toIlpPacketType(12), do: :ilp_packet_prepare
defp toIlpPacketType(13), do: :ilp_packet_fulfill
defp toIlpPacketType(14), do: :ilp_packet_reject
defp toIlpPacketType(_), do: :ilp_packet_unknown
# Convert the string error to an atom. This is mostly to make
# life easier for me when tracking error types, etc.
defp toError("F00"), do: :f00_bad_request
defp toError("F01"), do: :f01_invalid_packet
defp toError("F02"), do: :f02_unreachable
defp toError("F03"), do: :f03_invalid_amount
defp toError("F04"), do: :f04_insufficient_destination_amount
defp toError("F05"), do: :f05_wrong_condition
defp toError("F06"), do: :f06_unexpected_payment
defp toError("F07"), do: :f07_cannot_receive
defp toError("F08"), do: :f08_amount_too_large
defp toError("F09"), do: :f09_invalid_peer_response
defp toError("F99"), do: :f99_application_error
defp toError("T00"), do: :t00_internal_error
defp toError("T01"), do: :t01_peer_unreachable
defp toError("T02"), do: :t02_peer_busy
defp toError("T03"), do: :t03_connector_busy
defp toError("T04"), do: :t04_insufficient_liquidity
defp toError("T05"), do: :t05_rate_limited
defp toError("T99"), do: :t99_application_error
defp toError("R00"), do: :r00_transfer_timed_out
defp toError("R01"), do: :r01_insufficient_source_amount
defp toError("R02"), do: :r02_insufficient_timeout
defp toError("R99"), do: :r99_application_error
defp toError(e) do
Logger.error("[Interledger.toError/1] Unknown error code: #{inspect(e)}")
:t00_internal_error
end
end
|
servers/exilp/apps/interledger/lib/packet.ex
| 0.539226
| 0.403332
|
packet.ex
|
starcoder
|
defmodule Contex.PointPlot do
@moduledoc """
A simple point plot, plotting points showing y values against x values.
It is possible to specify multiple y columns with the same x column. It is not
yet possible to specify multiple independent series.
The x column can either be numeric or date time data. If numeric, a
`Contex.ContinuousLinearScale` is used to scale the values to the plot,
and if date time, a `Contex.TimeScale` is used.
Fill colours for each y column can be specified with `colours/2`.
A column in the dataset can optionally be used to control the colours. See
`colours/2` and `set_colour_col_name/2`
"""
import Contex.SVG
alias __MODULE__
alias Contex.{Scale, ContinuousLinearScale, TimeScale}
alias Contex.CategoryColourScale
alias Contex.Dataset
alias Contex.Axis
alias Contex.Utils
defstruct [:dataset, :width, :height, :x_col, :y_cols, :fill_col, :size_col, :x_scale, :y_scale, :fill_scale, :colour_palette]
@type t() :: %__MODULE__{}
@doc """
Create a new point plot definition and apply defaults.
"""
@spec new(Contex.Dataset.t(), keyword()) :: Contex.PointPlot.t()
def new(%Dataset{} = dataset, _options \\ []) do
%PointPlot{dataset: dataset, width: 100, height: 100}
|> defaults()
end
@doc """
Sets the default values for the plot.
By default, the first column in the dataset is used for the x values and the second column
for the y values.
The colour palette is set to :default.
"""
@spec defaults(Contex.PointPlot.t()) :: Contex.PointPlot.t()
def defaults(%PointPlot{} = plot) do
x_col_index = 0
y_col_index = 1
x_col_name = Dataset.column_name(plot.dataset, x_col_index)
y_col_names = [Dataset.column_name(plot.dataset, y_col_index)]
%{plot | colour_palette: :default}
|> set_x_col_name(x_col_name)
|> set_y_col_names(y_col_names)
end
@doc """
Set the colour palette for fill colours.
Where multiple y columns are defined for the plot, a different colour will be used for
each column.
If a single y column is defined and a colour column is defined (see `set_colour_col_name/2`),
a different colour will be used for each unique value in the colour column.
If a single y column is defined and no colour column is defined, the first colour
in the supplied colour palette will be used to plot the points.
"""
@spec colours(Contex.PointPlot.t(), Contex.CategoryColourScale.colour_palette()) :: Contex.PointPlot.t()
def colours(plot, colour_palette) when is_list(colour_palette) or is_atom(colour_palette) do
%{plot | colour_palette: colour_palette}
|> set_y_col_names(plot.y_cols)
end
def colours(plot, _) do
%{plot | colour_palette: :default}
|> set_y_col_names(plot.y_cols)
end
@doc false
def set_size(%PointPlot{} = plot, width, height) do
# We pretend to set the x & y columns to force a recalculation of scales - may be expensive.
# We only really need to set the range, not recalculate the domain
%{plot | width: width, height: height}
|> set_x_col_name(plot.x_col)
|> set_y_col_names(plot.y_cols)
end
@doc false
def get_svg_legend(%PointPlot{y_cols: y_cols, fill_col: fill_col}=plot) when length(y_cols) > 0 or is_nil(fill_col) do
# We do the point plotting with a index to look up the colours. For the legend we need the names
series_fill_colours
= CategoryColourScale.new(y_cols)
|> CategoryColourScale.set_palette(plot.colour_palette)
Contex.Legend.to_svg(series_fill_colours)
end
def get_svg_legend(%PointPlot{fill_scale: scale}) do
Contex.Legend.to_svg(scale)
end
def get_svg_legend(_), do: ""
@doc false
def to_svg(%PointPlot{x_scale: x_scale, y_scale: y_scale} = plot) do
axis_x = get_x_axis(x_scale, plot.height)
axis_y = Axis.new_left_axis(y_scale) |> Axis.set_offset(plot.width)
[
Axis.to_svg(axis_x),
Axis.to_svg(axis_y),
"<g>",
get_svg_points(plot),
"</g>"
#,get_svg_line(plot)
]
end
defp get_x_axis(x_scale, offset) do
axis
= Axis.new_bottom_axis(x_scale)
|> Axis.set_offset(offset)
case length(Scale.ticks_range(x_scale)) > 8 do
true -> %{axis | rotation: 45}
_ -> axis
end
end
defp get_svg_points(%PointPlot{dataset: dataset, x_scale: x_scale, y_scale: y_scale} = plot) do
x_tx_fn = Scale.domain_to_range_fn(x_scale)
y_tx_fn = Scale.domain_to_range_fn(y_scale)
x_col_index = Dataset.column_index(dataset, plot.x_col)
y_col_indices = Enum.map(plot.y_cols, fn col -> Dataset.column_index(dataset, col) end)
fill_col_index = Dataset.column_index(dataset, plot.fill_col)
dataset.data
|> Enum.map(fn row ->
get_svg_point(row, x_tx_fn, y_tx_fn, plot.fill_scale, x_col_index, y_col_indices, fill_col_index)
end)
end
defp get_svg_line(%PointPlot{dataset: dataset, x_scale: x_scale, y_scale: y_scale} = plot) do
x_col_index = Dataset.column_index(dataset, plot.x_col)
y_col_index = Dataset.column_index(dataset, plot.y_col)
x_tx_fn = Scale.domain_to_range_fn(x_scale)
y_tx_fn = Scale.domain_to_range_fn(y_scale)
style = ~s|stroke="red" stroke-width="2" fill="none" stroke-dasharray="13,2" stroke-linejoin="round" |
last_item = Enum.count(dataset.data) - 1
path = ["M",
dataset.data
|> Stream.map(fn row ->
x = Dataset.value(row, x_col_index)
y = Dataset.value(row, y_col_index)
{x_tx_fn.(x), y_tx_fn.(y)}
end)
|> Stream.with_index()
|> Enum.map(fn {{x_plot, y_plot}, i} ->
case i < last_item do
true -> ~s|#{x_plot} #{y_plot} L |
_ -> ~s|#{x_plot} #{y_plot}|
end
end)
]
[~s|<path d="|, path, ~s|"|, style, "></path>"]
end
defp get_svg_point(row, x_tx_fn, y_tx_fn, fill_scale, x_col_index, [y_col_index]=y_col_indices, fill_col_index) when length(y_col_indices) == 1 do
x_data = Dataset.value(row, x_col_index)
y_data = Dataset.value(row, y_col_index)
fill_data = if is_integer(fill_col_index) and fill_col_index >= 0 do
Dataset.value(row, fill_col_index)
else
0
end
x = x_tx_fn.(x_data)
y = y_tx_fn.(y_data)
fill = CategoryColourScale.colour_for_value(fill_scale, fill_data)
get_svg_point(x, y, fill)
end
defp get_svg_point(row, x_tx_fn, y_tx_fn, fill_scale, x_col_index, y_col_indices, _fill_col_index) do
x_data = Dataset.value(row, x_col_index)
x = x_tx_fn.(x_data)
Enum.with_index(y_col_indices)
|> Enum.map(fn {col_index, index} ->
y_data = Dataset.value(row, col_index)
y = y_tx_fn.(y_data)
fill = CategoryColourScale.colour_for_value(fill_scale, index)
get_svg_point(x, y, fill)
end)
end
defp get_svg_point(x, y, fill) when is_number(x) and is_number(y) do
circle(x, y, 3, fill: fill)
end
defp get_svg_point(_x, _y, _fill), do: ""
@doc """
Specify which column in the dataset is used for the x values.
This column must contain numeric or date time data.
"""
@spec set_x_col_name(Contex.PointPlot.t(), Contex.Dataset.column_name()) :: Contex.PointPlot.t()
def set_x_col_name(%PointPlot{width: width} = plot, x_col_name) do
case Dataset.check_column_names(plot.dataset, x_col_name) do
{:ok, []} ->
x_scale = create_scale_for_column(plot.dataset, x_col_name, {0, width})
%{plot | x_col: x_col_name, x_scale: x_scale}
{:error, missing_column} ->
raise "Column \"#{hd(missing_column)}\" not in the dataset."
_ -> plot
end
end
@doc """
Specify which column(s) in the dataset is/are used for the y values.
These columns must contain numeric data.
Where more than one y column is specified the colours are used to identify data from
each column.
"""
@spec set_y_col_names(Contex.PointPlot.t(), [Contex.Dataset.column_name()]) :: Contex.PointPlot.t()
def set_y_col_names(%PointPlot{height: height} = plot, y_col_names) when is_list(y_col_names) do
case Dataset.check_column_names(plot.dataset, y_col_names) do
{:ok, []} ->
{min, max} =
get_overall_domain(plot.dataset, y_col_names)
|> Utils.fixup_value_range()
y_scale = ContinuousLinearScale.new()
|> ContinuousLinearScale.domain(min, max)
|> Scale.set_range(height, 0)
fill_indices = Enum.with_index(y_col_names) |> Enum.map(fn {_, index} -> index end)
series_fill_colours
= CategoryColourScale.new(fill_indices)
|> CategoryColourScale.set_palette(plot.colour_palette)
%{plot | y_cols: y_col_names, y_scale: y_scale, fill_scale: series_fill_colours}
{:error, missing_columns} ->
columns_string =
Stream.map(missing_columns, &("\"#{&1}\""))
|> Enum.join(", ")
raise "Column(s) #{columns_string} not in the dataset."
_ -> plot
end
end
defp get_overall_domain(dataset, col_names) do
combiner = fn {min1, max1}, {min2, max2} -> {Utils.safe_min(min1, min2), Utils.safe_max(max1, max2)} end
Enum.reduce(col_names, {nil, nil}, fn col, acc_extents ->
inner_extents = Dataset.column_extents(dataset, col)
combiner.(acc_extents, inner_extents)
end )
end
defp create_scale_for_column(dataset, column, {r_min, r_max}) do
{min, max} = Dataset.column_extents(dataset, column)
case Dataset.guess_column_type(dataset, column) do
:datetime ->
TimeScale.new()
|> TimeScale.domain(min, max)
|> Scale.set_range(r_min, r_max)
:number ->
ContinuousLinearScale.new()
|> ContinuousLinearScale.domain(min, max)
|> Scale.set_range(r_min, r_max)
end
end
@doc """
If a single y column is specified, it is possible to use another column to control the point colour.
Note: This is ignored if there are multiple y columns.
"""
@spec set_colour_col_name(Contex.PointPlot.t(), Contex.Dataset.column_name()) :: Contex.PointPlot.t()
def set_colour_col_name(%PointPlot{} = plot, colour_col_name) do
case Dataset.check_column_names(plot.dataset, colour_col_name) do
{:ok, []} ->
vals = Dataset.unique_values(plot.dataset, colour_col_name)
colour_scale = CategoryColourScale.new(vals)
%{plot | fill_col: colour_col_name, fill_scale: colour_scale}
{:error, missing_column} ->
raise "Column \"#{hd(missing_column)}\" not in the dataset."
_ -> plot
end
end
end
|
lib/chart/pointplot.ex
| 0.907176
| 0.888759
|
pointplot.ex
|
starcoder
|
alias Experimental.GenStage
defmodule LocalityDispatcher do
@moduledoc """
A dispatcher that sends batches to the highest demand.
This is the default dispatcher used by `GenStage`. In order
to avoid greedy consumers, it is recommended that all consumers
have exactly the same maximum demand.
"""
@behaviour GenStage.Dispatcher
@doc false
def init(opts) do
mapper = Keyword.get(opts, :mapper)
if is_nil(mapper) do
raise ArgumentError, "the :mapper option must be specified for the locality dispatcher"
end
{:ok, {[], 0, nil, mapper}}
end
@doc false
def notify(msg, {demands, _, _, _} = state) do
Enum.each(demands, fn {_, pid, ref, _} ->
Process.send(pid, {:"$gen_consumer", {self(), ref}, {:notification, msg}}, [:noconnect])
end)
{:ok, state}
end
@doc false
def subscribe(opts, {pid, ref}, {demands, pending, max, mapper}) do
locale = Keyword.get(opts, :locale)
if is_nil(locale) do
raise ArgumentError, "the :locale option is required when subscribing to a producer with locality dispatcher"
end
{:ok, 0, {demands ++ [{0, pid, ref, locale}], pending, max, mapper}}
end
@doc false
def cancel({_, ref}, {demands, pending, max, mapper}) do
{current, _locale, demands} = pop_demand(ref, demands)
{:ok, 0, {demands, current + pending, max, mapper}}
end
@doc false
def ask(counter, {pid, ref}, {demands, pending, max, mapper}) do
max = max || counter
if counter > max do
:error_logger.warning_msg('GenStage producer DemandDispatcher expects a maximum demand of ~p. ' ++
'Using different maximum demands will overload greedy consumers. ' ++
'Got demand for ~p events from ~p~n', [max, counter, pid])
end
{current, locale, demands} = pop_demand(ref, demands)
demands = add_demand(current + counter, pid, ref, locale, demands)
already_sent = min(pending, counter)
{:ok, counter - already_sent, {demands, pending - already_sent, max, mapper}}
end
@doc false
def dispatch(events, length, {demands, pending, max, mapper}) do
{events, demands} = dispatch_demand(events, length, demands, mapper)
{:ok, events, {demands, pending, max, mapper}}
end
defp dispatch_demand([], _length, demands, _) do
{[], demands}
end
defp dispatch_demand(events, num_dispatch, demands, mapper) do
by_locale = Enum.group_by(events, mapper)
{by_locale, _, demands} = Enum.reduce(
demands,
{by_locale, num_dispatch, demands},
fn
{0, _, _, _}, acc -> acc
{counter, pid, ref, locale}, {by_locale, num_dispatch, demands} ->
case Map.get(by_locale, locale, []) do
[] ->
{by_locale, num_dispatch, demands}
local_events ->
{now, later, counter} = split_events(local_events, counter)
Process.send(pid, {:"$gen_consumer", {self(), ref}, now}, [:noconnect])
demands = Enum.filter(demands, fn
{_, _, ^ref, _} -> false
_ -> true
end)
demands = add_demand(counter, pid, ref, locale, demands)
by_locale = Map.put(by_locale, locale, later)
{by_locale, num_dispatch - length(now), demands}
end
end)
leftovers = Enum.flat_map(by_locale, fn {_, events} -> events end)
{leftovers, demands}
end
defp split_events(events, counter) when length(events) <= counter do
{events, [], counter - length(events)}
end
defp split_events(events, counter) do
{now, later} = Enum.split(events, counter)
{now, later, 0}
end
defp add_demand(counter, pid, ref, locale, [{c, _, _, _} | _] = demands) when counter > c,
do: [{counter, pid, ref, locale} | demands]
defp add_demand(counter, pid, ref, locale, [demand | demands]),
do: [demand | add_demand(counter, pid, ref, locale, demands)]
defp add_demand(counter, pid, ref, locale, []) when is_integer(counter),
do: [{counter, pid, ref, locale}]
defp pop_demand(ref, demands) do
{{current, _pid, ^ref, locale}, rest} = List.keytake(demands, ref, 2)
{current, locale, rest}
end
end
|
lib/localitydispatcher.ex
| 0.84869
| 0.479077
|
localitydispatcher.ex
|
starcoder
|
defmodule ConfigSmuggler do
@moduledoc """
<img src="https://github.com/appcues/config_smuggler/raw/master/assets/smuggler.jpg?raw=true" height="170" width="170" align="right">
ConfigSmuggler is a library for converting Elixir-style configuration
statements to and from string-encoded key/value pairs.
Elixir (and Erlang)'s configuration system is somewhat richer than
naïve environment variables, i.e., `System.get_env/1`-style key/value
configs alone can capture.
Configs in Elixir are namespaced by app, can be arbitrarily nested, and
contain Elixir-native data types like atoms, keyword lists, etc.
ConfigSmuggler provides a bridge between Elixir applications and
key/value configuration stores, especially those available at runtime.
It makes it dead-simple to use platform-agnostic configuration
systems with Elixir services.
## WARNING!
The functions in `ConfigSmuggler` are *not suitable* for use on
untrusted inputs! Code is `eval`ed, atoms are created, etc.
Configs are considered privileged inputs, so don't worry about using
ConfigSmuggler for its intended purpose. But please, *never* let user
input anywhere near this module. You've been warned.
## Example
iex> encoded_configs = %{
...> # imagine you fetch this every 60 seconds at runtime
...> "elixir-logger-level" => ":debug",
...> "elixir-my_api-MyApi.Endpoint-url-port" => "8888",
...> }
iex> ConfigSmuggler.apply(encoded_configs)
iex> Application.get_env(:logger, :level)
:debug
iex> ConfigSmuggler.encode([my_api: Application.get_all_env(:my_api)])
{:ok, %{"elixir-my_api-MyApi.Endpoint-url-port" => "8888"}}
## Overview
* `apply/1` applies encoded or decoded configs to the current environment.
* `decode/1` converts an encoded config map into Elixir-native decoded
configs, also returning a list of zero or more encoded key/value pairs
that could not be decoded.
* `encode/1` converts Elixir-native decoded configs
(i.e., a keyword list with app name as key and keyword list of
configs as value) into an encoded config map.
* `encode_file/1` converts an entire `config.exs`-style file
(along with all files included with `Mix.Config.import_config/1`)
into an encoded config map.
* `encode_statement/1` converts a single `config` statement from a
`config.exs`-style file into an encoded config map.
* At the command line, `mix smuggle encode <filename.exs>` encodes
a file with `encode_file/1` and emits a JSON object as output.
## Encoding Scheme
The encoded key begins with `elixir` and is a hyphen-separated "path"
of atoms and modules leading to the config value we wish to set.
The value is any valid Elixir term, encoded using normal Elixir syntax.
Encoding is performed by `Kernel.inspect/2`.
Decoding is performed by `Code.eval_string/1` and `String.to_atom/1`.
## See Also
If you build and deploy Erlang releases, and you want to apply encoded
configs before any other apps have started, look into [Distillery
config providers](https://hexdocs.pm/distillery/config/runtime.html#config-providers).
This feature allows specified modules to make environment changes
with `Application.put_env/3`, after which these changes are persisted to
the release's `sys.config` file and the release is started normally.
## Gotchas
Atoms and modules are expected to follow standard Elixir convention,
namely that atoms begin with a lowercase letter, modules begin with
an uppercase letter, and neither contains any hyphen characters.
If a config file or statement makes reference to `Mix.env()`, the current
Mix env will be substituted. This may be different than what the config
file intended.
## Authorship and License
Copyright 2019, [Appcues, Inc.](https://www.appcues.com)
ConfigSmuggler is released under the [MIT
License](https://github.com/appcues/config_smuggler/blob/master/MIT_LICENSE.txt).
"""
alias ConfigSmuggler.Apply
alias ConfigSmuggler.Decoder
alias ConfigSmuggler.Encoder
@type encoded_key :: String.t()
@type encoded_value :: String.t()
@type encoded_config_map :: %{encoded_key => encoded_value}
@type decoded_configs :: [{atom, Keyword.t()}]
@type validation_error :: {{encoded_key, encoded_value}, error_reason}
@type error_reason ::
:bad_input
| :bad_key
| :bad_value
| :load_error
@doc ~S"""
Applies the given config to the current environment (i.e., calls
`Application.put_env/3` a bunch of times). Accepts Elixir-
native decoded configs or encoded config maps.
iex> ConfigSmuggler.apply([my_app: [foo: 22]])
iex> Application.get_env(:my_app, :foo)
22
iex> ConfigSmuggler.apply(%{"elixir-my_app-bar" => "33"})
iex> Application.get_env(:my_app, :bar)
33
"""
@spec apply(decoded_configs | encoded_config_map) ::
:ok | {:error, error_reason}
def apply(config) when is_list(config), do: Apply.apply_decoded(config)
def apply(%{} = config), do: Apply.apply_encoded(config)
def apply(_), do: {:error, :bad_input}
@doc ~S"""
Decodes a map of string-encoded key/value pairs into a keyword list of
Elixir configs, keyed by app. Also returns a list of zero or more invalid
key/value pairs along with their errors.
iex> ConfigSmuggler.decode(%{
...> "elixir-my_app-some_key" => "22",
...> "elixir-my_app-MyApp.Endpoint-url-host" => "\"localhost\"",
...> "elixir-logger-level" => ":info",
...> "elixir-my_app-MyApp.Endpoint-url-port" => "4444",
...> "bad key" => "22",
...> "elixir-my_app-foo" => "bogus value",
...> })
{:ok,
[
my_app: [
{:some_key, 22},
{MyApp.Endpoint, [
url: [
port: 4444,
host: "localhost",
]
]},
],
logger: [
level: :info,
],
],
[
{{"elixir-my_app-foo", "bogus value"}, :bad_value},
{{"bad key", "22"}, :bad_key},
]
}
"""
@spec decode(encoded_config_map) :: {:ok, decoded_configs, [validation_error]}
def decode(encoded_config_map) do
Decoder.decode_and_merge(encoded_config_map)
end
@doc ~S"""
Converts Elixir-native decoded configs (i.e., a keyword list with
app name as key and keyword list of configs as value) into an
encoded config map.
iex> ConfigSmuggler.encode([logger: [level: :info], my_app: [key: "value"]])
{:ok, %{
"elixir-logger-level" => ":info",
"elixir-my_app-key" => "\"value\"",
}}
"""
@spec encode(decoded_configs) ::
{:ok, encoded_config_map} | {:error, error_reason}
def encode(decoded_configs) when is_list(decoded_configs) do
try do
{:ok,
decoded_configs
|> Enum.flat_map(&encode_app_and_opts/1)
|> Enum.into(%{})}
rescue
_e -> {:error, :bad_input}
end
end
def encode(_), do: {:error, :bad_input}
defp encode_app_and_opts({app, opts}) when is_list(opts) do
Encoder.encode_app_path_and_opts(app, [], opts)
end
@doc ~S"""
Reads a config file and returns a map of encoded key/value pairs
representing the configuration. Respects `Mix.Config.import_config/1`.
iex> ConfigSmuggler.encode_file("config/config.exs")
{:ok, %{
"elixir-logger-level" => ":info",
# ...
}}
"""
@spec encode_file(String.t()) ::
{:ok, encoded_config_map} | {:error, error_reason}
def encode_file(filename) do
try do
{env, _files} = Mix.Config.eval!(filename)
encode(env)
rescue
Code.LoadError ->
{:error, :load_error}
File.Error ->
{:error, :load_error}
_e ->
{:error, :bad_input}
end
end
@doc ~S"""
Encodes a single `Mix.Config.config/2` or `Mix.Config.config/3`
statement into one or more encoded key/value pairs.
iex> ConfigSmuggler.encode_statement("config :my_app, key1: :value1, key2: \"value2\"")
{:ok, %{
"elixir-my_app-key1" => ":value1",
"elixir-my_app-key2" => "\"value2\"",
}}
iex> ConfigSmuggler.encode_statement("config :my_app, MyApp.Endpoint, url: [host: \"localhost\", port: 4444]")
{:ok, %{
"elixir-my_app-MyApp.Endpoint-url-host" => "\"localhost\"",
"elixir-my_app-MyApp.Endpoint-url-port" => "4444",
}}
"""
@spec encode_statement(String.t()) ::
{:ok, encoded_config_map} | {:error, error_reason}
def encode_statement(stmt) when is_binary(stmt) do
case String.split(stmt, ":", parts: 2) do
[_, config] ->
case Code.eval_string("[:#{config}]") do
{[app, path | opts], _} when is_atom(path) ->
{:ok,
Encoder.encode_app_path_and_opts(app, [path], opts)
|> Enum.into(%{})}
{[app | opts], _} ->
{:ok,
Encoder.encode_app_path_and_opts(app, [], opts)
|> Enum.into(%{})}
_ ->
{:error, :bad_input}
end
_ ->
{:error, :bad_input}
end
end
def encode_statement(_), do: {:error, :bad_input}
end
|
lib/config_smuggler.ex
| 0.841891
| 0.452415
|
config_smuggler.ex
|
starcoder
|
defmodule Thumbnex.ExtractFrame do
alias Thumbnex.Animations
import FFmpex
use FFmpex.Options
@doc """
Extract a single frame from the input file.
Specify the time offset in seconds (0 for still images).
Returns the path of the single frame image file.
Options:
* `:output_path` - Where to store the resulting file. Defaults to temporary file.
* `:output_ext` - File extension for output. Ignored if `:output_path` is set. Defaults to `".png"`.
"""
def single_frame(file_path, time_offset_seconds, opts \\ []) do
output_ext = Keyword.get(opts, :output_ext, ".png")
output_path = Keyword.get(opts, :output_path, temporary_file(output_ext))
command =
new_command()
|> add_input_file(file_path)
|> add_output_file(output_path)
|> add_file_option(option_ss(time_offset_seconds))
|> add_file_option(option_vframes(1))
:ok = execute(command)
output_path
end
@doc """
Extract multiple frames from the input file.
Specify the number of frames, and the frames per second, to output.
Returns the path of the output file, a single file containing multiple frames.
Options:
* `:output_path` - Where to store the resulting file. Defaults to temporary file.
* `:output_ext` - File extension for output. Ignored if `:output_path` is set. Defaults to `".gif"`.
"""
def multiple_frames(file_path, frame_count, fps, opts \\ []) do
output_ext = Keyword.get(opts, :output_ext, ".gif")
output_path = Keyword.get(opts, :output_path, temporary_file(output_ext))
original_duration = Animations.duration(file_path)
# use setpts filter to prevent output FPS from influencing which input frames are chosen
secs_per_frame = original_duration / frame_count
setpts_string = "setpts=PTS/#{secs_per_frame}/#{fps}"
fps_string = "fps=#{fps}"
vf_value = "#{setpts_string},#{fps_string}"
command =
new_command()
|> add_input_file(file_path)
|> add_output_file(output_path)
|> add_file_option(option_vframes(frame_count))
|> add_file_option(option_vf(vf_value))
:ok = execute(command)
output_path
end
defp temporary_file(ext) do
random = rand_uniform(999_999_999_999)
Path.join(System.tmp_dir, "thumbnex-#{random}#{ext}")
end
defp rand_uniform(high) do
Code.ensure_loaded(:rand)
if function_exported?(:rand, :uniform, 1) do
:rand.uniform(high)
else
# Erlang/OTP < 19
apply(:crypto, :rand_uniform, [1, high])
end
end
end
|
lib/thumbnex/extract_frame.ex
| 0.813201
| 0.400749
|
extract_frame.ex
|
starcoder
|
defmodule Exred.NodeTest do
@moduledoc """
A helper module to set up tests for Exred nodes.
See example usage below. The `module` and `config` arguments can be specified either on the
`use` line or in the `start_node` call.
`module is the node module that will be tested
`config` is the config overrides to start the node with (overrides the config in the actual node being tested)
`start_node/0` will start the node. It will add the `:pid` and `:node` keys to the test context.
`:pid` is the process id of the running node GenServer
`:node` is the node attributes map:
%{
module :: atom,
id :: atom
name :: string,
category :: string,
info :: string,
config :: map,
ui_attributes :: map
}
## Example
(this would go into a test file)
defmodule Exred.Node.MynodeTest do
use ExUnit.Case
use Exred.NodeTest, module: Exred.Node.Mynode
setup_all do
start_node()
end
test "runnnig", context do
assert is_pid(context.pid)
end
end
"""
defmacro __using__(opts) do
quote do
require Logger
def start_node(),
do: start_node(Keyword.get(unquote(opts), :module), Keyword.get(unquote(opts), :config, %{}))
def start_node(module), do: start_node(module, %{})
def start_node(module, config) do
# figure out the node attributes we need to start the module up with
# (merge config into the default attributes)
modattrs = module.attributes()
node_attributes = %{modattrs | config: Map.merge(modattrs.config, config)}
assert is_map(node_attributes)
node =
node_attributes
|> Map.put(:module, module)
|> Map.put(:id, module)
if node.category == "daemon" do
# start child processes
child_specs = node.module.daemon_child_specs(node.config)
assert is_list(child_specs)
log("Starting #{length(child_specs)} child process(es)")
child_specs
|> Enum.each(&start_supervised!/1)
end
# create a dummy event sending function
send_event_fun = fn event, payload ->
log("EVENT: #{inspect(event)} | PAYLOAD: #{inspect(payload)}")
end
# start the node
start_args = [node.id, node.config, send_event_fun]
child_spec = Supervisor.child_spec({node.module, start_args}, id: node.id)
pid = start_supervised!(child_spec)
[pid: pid, node: node]
end
def log(msg) do
Logger.info("TEST OUTPUT: " <> msg)
end
end
end
end
|
lib/exred_nodetest.ex
| 0.774541
| 0.659025
|
exred_nodetest.ex
|
starcoder
|
defmodule Snmp.Plug do
@moduledoc """
Plug for exposing MIB through REST API
# API
* `/get`: Retrieve MIB objects
* Params: a list of OIDS
* Returns:
```json
{
errors: {
"1.2.3": "noSuchObject"
},
objects: {
"1.3.6.1.2.1.1.1.0": "SNMP Agent"
}
}
```
* Example: GET /get?1.3.6.1.2.1.1.1.0&1.2.3
* `/getnext`: Retrieve a list of objects starting from given OID
* Params: one OID
* Returns:
```json
{
errors: {},
objects: {
"1.3.6.1.2.1.1.1.0": "SNMP Agent"
},
next: "1.3.6.1.3.1"
}
```
"""
use Plug.Builder
alias Snmp.Plug.Get
alias Snmp.Plug.GetNext
alias Snmp.Plug.GetTable
plug Plug.Parsers, parsers: []
plug :mib
def init(opts) do
agent = Keyword.get(opts, :agent)
if Kernel.function_exported?(agent, :__agent__, 1) do
%{agent: agent}
else
raise "Missing/bad parameter for plug #{__MODULE__}: :agent"
end
end
def call(conn, %{agent: agent} = opts) do
conn
|> put_private(:snmp_agent, agent)
|> super(opts)
|> assign(:called_all_plugs, true)
end
def mib(%{path_info: ["get"]} = conn, _opts) do
case conn.method do
"GET" -> get(conn, Get.Request.parse(conn))
_ -> send_resp(conn, 405, "")
end
end
def mib(%{path_info: ["getnext"]} = conn, _opts) do
case conn.method do
"GET" -> get_next(conn, GetNext.Request.parse(conn))
_ -> send_resp(conn, 405, "")
end
end
def mib(%{path_info: ["table" | _]} = conn, _opts) do
case conn.method do
"GET" -> get_table(conn, GetTable.Request.parse(conn))
_ -> send_resp(conn, 405, "")
end
end
def mib(conn, _opts), do: send_resp(conn, 404, "NOT FOUND")
def get(conn, %{valid?: false} = req) do
body = Get.Response.encode(req)
send_resp(conn, 400, Jason.encode!(body))
end
def get(conn, %{oids: oids}) when is_list(oids) do
agent = conn.private[:snmp_agent]
body =
oids
|> Enum.map(&elem(&1, 1))
|> agent.get()
|> case do
{:error, _} = e ->
Get.Response.encode(e)
objects ->
[Enum.map(oids, &elem(&1, 0)), objects]
|> Enum.zip()
|> Get.Response.encode()
end
conn
|> put_resp_content_type("application/json")
|> send_resp(200, Jason.encode!(body))
end
def get_next(conn, %{valid?: false} = req) do
body = GetNext.Response.encode(req)
send_resp(conn, 400, Jason.encode!(body))
end
def get_next(conn, %{oid: oid, limit: limit}) do
agent = conn.private[:snmp_agent]
body =
oid
|> agent.stream()
|> Enum.take(limit)
|> GetNext.Response.encode()
conn
|> put_resp_content_type("application/json")
|> send_resp(200, Jason.encode!(body))
end
def get_table(conn, _, %{valid?: false} = req) do
body = GetTable.Response.encode(req)
send_resp(conn, 400, Jason.encode!(body))
end
def get_table(conn, %{table_name: table_name, start: start, limit: limit}) do
agent = conn.private[:snmp_agent]
body =
table_name
|> agent.table_stream(start)
|> Enum.take(limit)
|> GetTable.Response.encode()
send_resp(conn, 200, Jason.encode!(body))
end
end
|
lib/snmp/plug.ex
| 0.820721
| 0.783368
|
plug.ex
|
starcoder
|
defmodule Nerves.Grove.PCA9685.Servo do
alias Nerves.Grove.PCA9685.ServoSweep
@servo_registry_name :servo_proccess_registry_name
@server Nerves.Grove.PCA9685.ServoServer
# mS
@default_step_delay 300
@moduledoc """
Represents a positionable servo connected to an specific channel and pin on a PCA9685 device.
[%{bus: 1, address: 0x40, channel: 0, position: 90, min: 175, max: 575},
%{bus: 1, address: 0x40, channel: 1, position: 90, min: 175, max: 575},
%{bus: 1, address: 0x40, channel: 2, position: 90, min: 175, max: 575}]
"""
@doc """
Connect to the channel via the PCA9695 device.
"""
def start_link(config), do: GenServer.start_link(@server, config, name: via_tuple(config))
# registry lookup handler
defp via_tuple(%{bus: bus, address: address, channel: channel}),
do: {:via, Registry, {@servo_registry_name, {bus, address, channel}}}
@doc """
Returns the current position of the servo.
"""
@spec position(map) :: 0..180
def position(map), do: GenServer.call(via_tuple(map), :position)
@doc """
Sets the angle position of the servo.
It accepts a map with servo tuple id and a angle value 0..180
"""
def position(map, degrees)
when is_integer(degrees) and degrees >= 0 and degrees <= 180,
do: GenServer.cast(via_tuple(map), {:position, degrees})
@doc """
Begin the process of sweeping to a new target position over a period of time.
See `ServoSweep` for more information.
"""
def tsweep(map, degrees, duration, step_delay \\ @default_step_delay)
when is_integer(step_delay) and step_delay > 0 do
total_steps =
case round(duration / step_delay) do
0 -> 1
x -> x
end
sweep(map, degrees, total_steps, step_delay)
end
@doc """
Begin the process of sweeping to a new target position n total_steps times.
See `ServoSweep` for more information.
"""
def nsweep(map, degrees, total_steps, step_delay \\ @default_step_delay)
when is_integer(step_delay) and step_delay > 0,
do: sweep(map, degrees, total_steps, step_delay)
defp sweep(map, degrees, total_steps, step_delay)
when is_map(map) and degrees in 0..180,
do: ServoSweep.start_link(map, degrees, total_steps, step_delay)
end
|
lib/nerves_grove/pca9685/servo.ex
| 0.829734
| 0.446736
|
servo.ex
|
starcoder
|
defmodule GuardLog do
@moduledoc """
Module for parsing (`parse_log/1`) guard sleep logs and generating
guard sleep maps (`to_guard_sleep_map/1`).
**Note:** It's possible to remove public struct and parse_log altogether here;
we can directly implement `to_guard_sleep_map/1` functionality,
saving one `Enum.reduce` and one `Enum.reverse`
(sleep map can be computed in one pass after sorting log by timestamp).
However, this is the original solution. Also, separation of parsing from
sleep map generation is beneficial from separation of concerns perspective.
"""
alias Timex
alias Timex.Duration
@enforce_keys [:datetime, :guard_id, :event]
defstruct [:datetime, :event, :guard_id]
@type log_line :: %GuardLog{datetime: NaiveDateTime.t(), event: atom(), guard_id: integer()}
@type t :: [log_line]
@type parsed_line ::
{:falls_asleep, NaiveDateTime.t()}
| {:wakes_up, NaiveDateTime.t()}
| {:shift_begins, NaiveDateTime.t(), any()}
@spec parse_log([binary]) :: t
def parse_log(lines) when is_list(lines) do
{_, log} =
lines
|> Enum.map(&parse_log_line/1)
|> Enum.sort_by(fn
{_event, datetime} -> datetime |> Timex.to_unix()
{_event, datetime, _guard_id} -> datetime |> Timex.to_unix()
end)
|> Enum.reduce({nil, []}, fn
{:shift_begins = event, datetime, new_guard_id}, {_old_guard_id, parsed} ->
item = %GuardLog{datetime: datetime, guard_id: new_guard_id, event: event}
{new_guard_id, [item | parsed]}
{event, datetime}, {guard_id, parsed} ->
item = %GuardLog{datetime: datetime, guard_id: guard_id, event: event}
{guard_id, [item | parsed]}
end)
log |> Enum.reverse()
end
@doc """
Converts `GuardLog` list into a map from `guard_id` to a list of all minutes he slept.
Minutes are represented as simple integers.
"""
@spec to_guard_sleep_map(t) :: map()
def to_guard_sleep_map(log) when is_list(log) do
log
|> Enum.reduce(%{}, fn elem, acc ->
%GuardLog{datetime: datetime, event: event, guard_id: guard_id} = elem
case event do
:falls_asleep ->
Map.put(acc, :started_sleep, datetime)
:wakes_up ->
sleep_minutes = get_sleep_minutes(acc.started_sleep, datetime)
acc
|> Map.update(guard_id, sleep_minutes, fn all_sleep_minutes -> all_sleep_minutes ++ sleep_minutes end)
|> Map.delete(:started_sleep)
:shift_begins ->
acc
end
end)
end
@one_minute Duration.from_minutes(1)
@spec get_sleep_minutes(Time.t(), Time.t()) :: [integer()]
defp get_sleep_minutes(start_dt, end_dt) do
end_dt_discounting_last_minute = Timex.subtract(end_dt, @one_minute)
Stream.unfold({:init, start_dt}, fn
# don't forget to add initial value to output
{:init, current} ->
{current, current}
current ->
if Timex.equal?(current, end_dt_discounting_last_minute, :minutes) do
nil
else
new_dt = Timex.add(current, @one_minute)
{new_dt, new_dt}
end
end)
|> Enum.map(fn %NaiveDateTime{minute: minute} -> minute end)
end
@spec parse_log_line(binary()) :: parsed_line()
defp parse_log_line(line) when is_binary(line) do
splitted = [date, time | _] = String.split(line, [" ", "[", "]", "#"], trim: true)
datetime = date_and_time_to_datetime(date, time)
case splitted |> Enum.drop(2) do
["Guard", guard_id, "begins", "shift"] ->
{:shift_begins, datetime, String.to_integer(guard_id)}
["falls", "asleep"] ->
{:falls_asleep, datetime}
["wakes", "up"] ->
{:wakes_up, datetime}
end
end
@spec date_and_time_to_datetime(binary(), binary()) :: NaiveDateTime.t()
defp date_and_time_to_datetime(date, time) when is_binary(date) and is_binary(time) do
{:ok, dt} = NaiveDateTime.from_iso8601("#{date}T#{time}:00Z")
dt
end
end
|
lib/day04/guard_log.ex
| 0.737442
| 0.476884
|
guard_log.ex
|
starcoder
|
defmodule DecemberFour do
@moduledoc """
Fourth Advent of Code task.
"""
@start 246_515
@stop 739_105
@doc """
Part one of day 4.
"""
def part_one do
list_to_duples_and_triples()
|> Enum.filter(fn a -> Enum.at(a[:result], 0) end)
|> Kernel.length()
end
@doc """
Part two of day 4.
"""
def part_two do
list_to_duples_and_triples()
|> Enum.filter(fn a -> Enum.at(a[:result], 0) && !Enum.at(a[:result], 1) end)
|> Kernel.length()
end
@doc """
Convert each list of digits to a list of two booleans.
0: Has duplicates
1: Has triplets overlaping all duplicates
"""
def list_to_duples_and_triples do
@start..@stop
|> Enum.map(&Integer.digits/1)
|> Enum.map(fn a ->
%{
:list => a,
:result => check_progression(a)
}
end)
end
@doc """
Traverse each digit and store the position from the tail for each place a
duplicate and triplet is found. If a triplet is found, store where both
preceding digits were found. End by checking if there were any dupes and if
there were any dupes not overlaped by a triplet.
## Examples
iex> DecemberFour.check_progression([ 1, 2, 1 ])
[false, false]
iex> DecemberFour.check_progression([ 1, 1, 2 ])
[true, false]
iex> DecemberFour.check_progression([ 1, 1, 1 ])
[true, true]
iex> DecemberFour.check_progression([ 1, 1, 2, 2, 2 ])
[true, false]
iex> DecemberFour.check_progression([ 1, 1, 1, 2, 2 ])
[true, false]
iex> DecemberFour.check_progression([ 1, 1, 1, 2, 2, 2])
[true, true]
"""
def check_progression(
[digit | tail],
previous \\ 0,
secondPrevious \\ 0,
dupes \\ [],
trips \\ []
) do
dupes =
if digit == previous do
[Kernel.length(tail) | dupes]
else
dupes
end
trips =
if digit == previous && digit == secondPrevious do
[Kernel.length(tail) | [Kernel.length(tail) + 1 | trips]]
else
trips
end
case digit do
x when x < previous ->
[false, false]
_ ->
if Kernel.length(tail) == 0 do
hasDupes =
dupes
|> Kernel.length() > 0
hasTrips =
dupes
|> Enum.filter(fn el -> !Enum.member?(trips, el) end)
|> Kernel.length() == 0
[hasDupes, hasTrips]
else
check_progression(tail, digit, previous, dupes, trips)
end
end
end
end
|
04/elixir/lib/december_four.ex
| 0.734024
| 0.589716
|
december_four.ex
|
starcoder
|
defmodule SnapFramework.Engine.Builder do
@moduledoc """
## Overview
This module is responsible for taking the parsed EEx template and building the graph.
"""
def build_graph(list, acc \\ %{}) do
Enum.reduce(list, acc, fn item, acc ->
case item do
[type: :graph, opts: opts] ->
Scenic.Graph.build(opts)
[type: :component, module: module, data: data, opts: opts] ->
children = if opts[:do], do: opts[:do], else: nil
acc |> module.add_to_graph(data, Keyword.put_new(opts, :children, children))
[type: :component, module: module, data: data, opts: opts, children: children] ->
acc |> module.add_to_graph(data, Keyword.put_new(opts, :children, children))
[type: :primitive, module: module, data: data, opts: opts] ->
acc |> module.add_to_graph(data, opts)
[
type: :layout,
children: children,
padding: padding,
width: width,
height: height,
translate: translate
] ->
do_layout(acc, children, padding, width, height, translate).graph
"\n" ->
acc
list ->
if is_list(list) do
build_graph(list, acc)
else
acc
end
end
end)
end
defp do_layout(%Scenic.Graph{} = graph, children, padding, width, height, translate) do
layout = %{
last_x: 0,
last_y: 0,
padding: padding,
width: width,
height: height,
largest_width: 0,
largest_height: 0,
graph: graph,
translate: translate
}
Enum.reduce(children, layout, fn child, layout ->
case child do
[type: :component, module: module, data: data, opts: opts] ->
children = if opts[:do], do: opts[:do], else: nil
translate_and_render(layout, module, data, Keyword.put_new(opts, :children, children))
[type: :component, module: module, data: data, opts: opts, children: children] ->
translate_and_render(layout, module, data, Keyword.put_new(opts, :children, children))
[type: :primitive, module: _module, data: _data, opts: _opts] ->
layout
[
type: :layout,
children: children,
padding: padding,
width: width,
height: height,
translate: translate
] ->
{x, y} = translate
{prev_x, prev_y} = layout.translate
nested_layout = %{
layout |
# | last_x: x + prev_x + layout.padding,
# last_y: layout.last_y + y + layout.largest_height + layout.padding,
padding: padding,
width: width,
height: height,
translate: {x + prev_x, y + prev_y}
}
graph = do_layout(nested_layout, children).graph
%{layout | graph: graph}
"\n" ->
layout
list ->
if is_list(list) do
do_layout(layout, list)
else
layout
end
end
end)
end
defp do_layout(layout, children) do
Enum.reduce(children, layout, fn child, layout ->
case child do
[type: :component, module: module, data: data, opts: opts] ->
children = if opts[:do], do: opts[:do], else: nil
translate_and_render(layout, module, data, Keyword.put_new(opts, :children, children))
[type: :component, module: module, data: data, opts: opts, children: children] ->
translate_and_render(layout, module, data, Keyword.put_new(opts, :children, children))
[type: :primitive, module: _module, data: _data, opts: _opts] ->
layout
[
type: :layout,
children: children,
padding: padding,
width: width,
height: height,
translate: translate
] ->
{x, y} = translate
{prev_x, prev_y} = layout.translate
nested_layout = %{
layout |
# | last_x: x + prev_x + layout.padding,
# last_y: layout.last_y + y + layout.largest_height + layout.padding,
padding: padding,
width: width,
height: height,
translate: {x + prev_x, y + prev_y}
}
do_layout(nested_layout, children)
"\n" ->
layout
list ->
if is_list(list) do
do_layout(layout, list)
else
layout
end
end
end)
end
defp translate_and_render(layout, module, data, opts) do
{l, t, r, b} = get_bounds(module, data, opts)
{tx, ty} = layout.translate
layout =
case fits_in_x?(r + layout.last_x + layout.padding, layout.width) do
true ->
x = l + layout.last_x + layout.padding + tx
y = layout.last_y + ty
%{
layout
| last_x: r + layout.last_x + layout.padding,
graph:
module.add_to_graph(layout.graph, data, Keyword.merge(opts, translate: {x, y}))
}
false ->
x = l + tx + layout.padding
y = t + layout.last_y + layout.largest_height + layout.padding
%{
layout
| last_x: l + tx + r + layout.padding,
last_y: t + layout.last_y + layout.largest_height + layout.padding,
graph:
module.add_to_graph(layout.graph, data, Keyword.merge(opts, translate: {x, y}))
}
end
layout = if r > layout.largest_width, do: %{layout | largest_width: r}, else: layout
if b > layout.largest_height, do: %{layout | largest_height: b}, else: layout
end
defp get_bounds(mod, data, opts) do
mod.bounds(data, opts)
end
defp fits_in_x?(potential_x, max_x), do: potential_x <= max_x
# defp fits_in_y?(potential_y, max_y), do: potential_y <= max_y
end
|
lib/engine/builder.ex
| 0.642545
| 0.442697
|
builder.ex
|
starcoder
|
defmodule Expo.Translation do
@moduledoc """
Translation Structs
"""
alias Expo.Translation.Plural
alias Expo.Translation.Singular
@type msgid :: [String.t()]
@type msgstr :: [String.t()]
@type msgctxt :: String.t()
@type t :: Singular.t() | Plural.t()
@typedoc """
key that can be used to identify a translation
"""
@opaque key ::
{msgctxt :: String.t(),
(msgid :: String.t()) | {msgid :: String.t(), msgid_plural :: String.t()}}
@doc """
Returns a "key" that can be used to identify a translation.
This function returns a "key" that can be used to uniquely identify a
translation assuming that no "same" translations exist; for what "same"
means, look at the documentation for `same?/2`.
The purpose of this function is to be used in situations where we'd like to
group or sort translations but where we don't need the whole structs.
## Examples
iex> t1 = %Expo.Translation.Singular{msgid: ["foo"], msgstr: []}
iex> t2 = %Expo.Translation.Singular{msgid: ["", "foo"], msgstr: []}
iex> Expo.Translation.key(t1) == Expo.Translation.key(t2)
true
iex> t1 = %Expo.Translation.Singular{msgid: ["foo"], msgstr: []}
iex> t2 = %Expo.Translation.Singular{msgid: ["bar"], msgstr: []}
iex> Expo.Translation.key(t1) == Expo.Translation.key(t2)
false
"""
@spec key(translation :: t()) :: key()
def key(translation)
def key(%Singular{} = translation), do: Singular.key(translation)
def key(%Plural{} = translation), do: Plural.key(translation)
@doc """
Tells whether two translations are the same translation according to their
`msgid`.
This function returns `true` if `translation1` and `translation2` are the same
translation, where "the same" means they have the same `msgid` or the same
`msgid` and `msgid_plural`.
## Examples
iex> t1 = %Expo.Translation.Singular{msgid: ["foo"], msgstr: []}
iex> t2 = %Expo.Translation.Singular{msgid: ["", "foo"], msgstr: []}
iex> Expo.Translation.same?(t1, t2)
true
iex> t1 = %Expo.Translation.Singular{msgid: ["foo"], msgstr: []}
iex> t2 = %Expo.Translation.Singular{msgid: ["bar"], msgstr: []}
iex> Expo.Translation.same?(t1, t2)
false
"""
@spec same?(translation1 :: t(), translation2 :: t()) :: boolean
def same?(translation1, translation2), do: key(translation1) == key(translation2)
@doc """
Tells whether the given translation has the flag specified
### Examples
iex> Expo.Translation.has_flag?(%Expo.Translation.Singular{msgid: [], msgstr: [], flags: [["foo"]]}, "foo")
true
iex> Expo.Translation.has_flag?(%Expo.Translation.Singular{msgid: [], msgstr: [], flags: [["foo"]]}, "bar")
false
"""
@spec has_flag?(translation :: t(), flag :: String.t()) :: boolean()
def has_flag?(translation, flag)
def has_flag?(%Singular{flags: flags}, flag), do: flag in List.flatten(flags)
def has_flag?(%Plural{flags: flags}, flag), do: flag in List.flatten(flags)
@doc """
Append flag to translation
Keeps the line formatting intact
### Examples
iex> translation = %Expo.Translation.Singular{msgid: [], msgstr: [], flags: []}
iex> Expo.Translation.append_flag(translation, "foo")
%Expo.Translation.Singular{msgid: [], msgstr: [], flags: [["foo"]]}
"""
@spec append_flag(translation :: t(), flag :: String.t()) :: t()
def append_flag(translation, flag)
def append_flag(%Singular{flags: flags} = translation, flag),
do: %Singular{translation | flags: _append_flag(flags, flag)}
def append_flag(%Plural{flags: flags} = translation, flag),
do: %Plural{translation | flags: _append_flag(flags, flag)}
defp _append_flag(flags, flag) do
if flag in List.flatten(flags) do
flags
else
case flags do
[] -> [[flag]]
[flag_line] -> [flag_line ++ [flag]]
_multiple_lines -> flags ++ [[flag]]
end
end
end
end
|
lib/expo/translation.ex
| 0.860852
| 0.404802
|
translation.ex
|
starcoder
|
defmodule HTTPoison.Request do
@moduledoc """
`Request` properties:
* `:method` - HTTP method as an atom (`:get`, `:head`, `:post`, `:put`,
`:delete`, etc.)
* `:url` - target url as a binary string or char list
* `:body` - request body. See more below
* `:headers` - HTTP headers as an orddict (e.g., `[{"Accept", "application/json"}]`)
* `:options` - Keyword list of options
* `:params` - Query parameters as a map, keyword, or orddict
`:body`:
* binary, char list or an iolist
* `{:form, [{K, V}, ...]}` - send a form url encoded
* `{:file, "/path/to/file"}` - send a file
* `{:stream, enumerable}` - lazily send a stream of binaries/charlists
`:options`:
* `:timeout` - timeout for establishing a TCP or SSL connection, in milliseconds. Default is 8000
* `:recv_timeout` - timeout for receiving an HTTP response from the socket. Default is 5000
* `:stream_to` - a PID to stream the response to
* `:async` - if given `:once`, will only stream one message at a time, requires call to `stream_next`
* `:proxy` - a proxy to be used for the request; it can be a regular url
or a `{Host, Port}` tuple, or a `{:socks5, ProxyHost, ProxyPort}` tuple
* `:proxy_auth` - proxy authentication `{User, Password}` tuple
* `:socks5_user`- socks5 username
* `:socks5_pass`- socks5 password
* `:ssl` - SSL options supported by the `ssl` erlang module
* `:follow_redirect` - a boolean that causes redirects to be followed
* `:max_redirect` - an integer denoting the maximum number of redirects to follow
* `:params` - an enumerable consisting of two-item tuples that will be appended to the url as query string parameters
* `:max_body_length` - a non-negative integer denoting the max response body length. See :hackney.body/2
Timeouts can be an integer or `:infinity`
"""
@enforce_keys [:url]
defstruct method: :get, url: nil, headers: [], body: "", params: %{}, options: []
@type method :: :get | :post | :put | :patch | :delete | :options | :head
@type headers :: [{atom, binary}] | [{binary, binary}] | %{binary => binary} | any
@type url :: binary | any
@type body :: binary | charlist | iodata | {:form, [{atom, any}]} | {:file, binary} | any
@type params :: map | keyword | [{binary, binary}] | any
@type options :: keyword | any
@type t :: %__MODULE__{
method: method,
url: binary,
headers: headers,
body: body,
params: params,
options: options
}
end
defmodule HTTPoison.Response do
defstruct status_code: nil, body: nil, headers: [], request_url: nil, request: nil
@type t :: %__MODULE__{
status_code: integer,
body: term,
headers: list,
request: HTTPoison.Request.t(),
request_url: HTTPoison.Request.url()
}
end
defmodule HTTPoison.AsyncResponse do
defstruct id: nil
@type t :: %__MODULE__{id: reference}
end
defmodule HTTPoison.AsyncStatus do
defstruct id: nil, code: nil
@type t :: %__MODULE__{id: reference, code: integer}
end
defmodule HTTPoison.AsyncHeaders do
defstruct id: nil, headers: []
@type t :: %__MODULE__{id: reference, headers: list}
end
defmodule HTTPoison.AsyncChunk do
defstruct id: nil, chunk: nil
@type t :: %__MODULE__{id: reference, chunk: binary}
end
defmodule HTTPoison.AsyncRedirect do
defstruct id: nil, to: nil, headers: []
@type t :: %__MODULE__{id: reference, to: String.t(), headers: list}
end
defmodule HTTPoison.AsyncEnd do
defstruct id: nil
@type t :: %__MODULE__{id: reference}
end
defmodule HTTPoison.Error do
defexception reason: nil, id: nil
@type t :: %__MODULE__{id: reference | nil, reason: any}
def message(%__MODULE__{reason: reason, id: nil}), do: inspect(reason)
def message(%__MODULE__{reason: reason, id: id}), do: "[Reference: #{id}] - #{inspect(reason)}"
end
defmodule HTTPoison do
@moduledoc """
The HTTP client for Elixir.
The `HTTPoison` module can be used to issue HTTP requests and parse HTTP responses to arbitrary urls.
iex> HTTPoison.get!("https://api.github.com")
%HTTPoison.Response{status_code: 200,
headers: [{"content-type", "application/json"}],
body: "{...}"}
It's very common to use HTTPoison in order to wrap APIs, which is when the
`HTTPoison.Base` module shines. Visit the documentation for `HTTPoison.Base`
for more information.
Under the hood, the `HTTPoison` module just uses `HTTPoison.Base` (as
described in the documentation for `HTTPoison.Base`) without overriding any
default function.
See `request/5` for more details on how to issue HTTP requests
"""
use HTTPoison.Base
end
|
lib/httpoison.ex
| 0.858585
| 0.459015
|
httpoison.ex
|
starcoder
|
defmodule Cashtrail.Contacts do
@moduledoc """
The Contacts context manages the contact data of one entity.
See `Cashtrail.Contacts.Contact` to have more info about what contacts mean in
the application.
"""
import Ecto.Query, warn: false
alias Cashtrail.Repo
alias Cashtrail.{Contacts, Entities, Paginator}
import Cashtrail.Entities.Tenants, only: [to_prefix: 1, put_prefix: 2]
import Cashtrail.QueryBuilder, only: [build_filter: 3, build_search: 3]
@type category :: Contacts.Category.t()
@doc """
Returns a `%Cashtrail.Paginator.Page{}` struct with a list of contact categories
in the `:entries` field.
## Expected arguments
* entity - The `%Cashtrail.Entities.Entity{}` that the category references.
* options - A `keyword` list of the following options:
* `:search` - search categories by its `:description`.
* See `Cashtrail.Paginator.paginate/2` to know about the pagination options.
## Examples
iex> list_categories(entity)
%Cashtrail.Paginator{entries: [%Cashtrail.Contacts.Category{}, ...]}
iex> list_categories(entity, search: "My desc")
%Cashtrail.Paginator{entries: [%Cashtrail.Contacts.Category{description: "My Description"}, ...]}
"""
@spec list_categories(Entities.Entity.t(), keyword) :: Paginator.Page.t(category)
def list_categories(entity, options \\ []) do
Contacts.Category
|> build_search(Keyword.get(options, :search), [:description])
|> put_prefix(entity)
|> Paginator.paginate(options)
end
@doc """
Gets a single contact category.
Raises `Ecto.NoResultsError` if the Category does not exist.
See `Cashtrail.Contacts.Category` to have more detailed info about the returned
struct.
## Expected Arguments
* entity - The `%Cashtrail.Entities.Entity{}` that the category references.
* id - A `string` that is the unique id of the category to be found.
## Examples
iex> get_category!(entity, 123)
%Cashtrail.Contacts.Category{}
iex> get_category!(entity, 456)
** (Ecto.NoResultsError)
"""
@spec get_category!(Entities.Entity.t(), Ecto.UUID.t() | String.t()) :: category
def get_category!(%Entities.Entity{} = entity, id),
do: Repo.get!(Contacts.Category, id, prefix: to_prefix(entity))
@doc """
Creates a contact category.
## Expected Arguments
* entity - The `%Cashtrail.Entities.Entity{}` that the category references.
* params - A `map` with the params of the category to be created:
* `:description` (required) - A `string` with the description of the category.
See `Cashtrail.Contacts.Category` to have more detailed info about
the fields.
## Returns
* `{:ok, %Cashtrail.Contacts.Category{}}` in case of success.
* `{:error, %Ecto.Changeset{}}` in case of error.
## Examples
iex> create_category(entity, %{field: value})
{:ok, %Cashtrail.Contacts.Category{}}
iex> create_category(entity, %{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
@spec create_category(Entities.Entity.t(), map) ::
{:ok, category} | {:error, Ecto.Changeset.t(category)}
def create_category(%Entities.Entity{} = entity, attrs) do
%Contacts.Category{}
|> Contacts.Category.changeset(attrs)
|> Repo.insert(prefix: to_prefix(entity))
end
@doc """
Updates a contact category.
## Expected Arguments
* category - The `%Cashtrail.Contacts.Category{}` to be updated.
* params - A `map` with the field of the category to be updated. See
`create_category/2` to know about the params that can be given.
## Returns
* `{:ok, %Cashtrail.Contacts.Category{}}` in case of success.
* `{:error, %Ecto.Changeset{}}` in case of error.
## Examples
iex> update_category(category, %{field: new_value})
{:ok, %Cashtrail.Contacts.Category{}}
iex> update_category(category, %{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
@spec update_category(category, map) :: {:ok, category} | {:error, Ecto.Changeset.t(category)}
def update_category(%Contacts.Category{} = category, attrs) do
category
|> Contacts.Category.changeset(attrs)
|> Repo.update()
end
@doc """
Deletes a contact category.
## Expected Arguments
* category - The `%Cashtrail.Contacts.Category{}` to be deleted.
## Returns
* `{:ok, %Cashtrail.Contacts.Category{}}` in case of success.
* `{:error, %Ecto.Changeset{}}` in case of error.
## Examples
iex> delete_category(category)
{:ok, %Cashtrail.Contacts.Category{}}
iex> delete_category(category)
{:error, %Ecto.Changeset{}}
"""
@spec delete_category(category) :: {:ok, category} | {:error, Ecto.Changeset.t(category)}
def delete_category(%Contacts.Category{} = category) do
Repo.delete(category)
end
@doc """
Returns an `%Ecto.Changeset{}` for tracking contact category changes.
## Expected Arguments
* category - The `%Cashtrail.Contacts.Category{}` to be tracked.
## Examples
iex> change_category(category)
%Ecto.Changeset{source: %Cashtrail.Contacts.Category{}}
"""
@spec change_category(category) :: Ecto.Changeset.t(category)
def change_category(%Contacts.Category{} = category) do
Contacts.Category.changeset(category, %{})
end
@type contact :: Contacts.Contact.t()
@doc """
Returns a `%Cashtrail.Paginator.Page{}` struct with a list of contacts in the
`:entries` field.
## Expected arguments
* entity - The `%Cashtrail.Entities.Entity{}` that the contact references.
* options - A `keyword` list of the following options:
* `:filter` - filters by following attributes:
* `:type` or `"type"`
* `:customer` or `"customer"`
* `:supplier` or `"supplier"`
* `:category_id` or `"category_id"`
* `:search` - search contacts by `:name` or `:legal_name`.
* See `Cashtrail.Paginator.paginate/2` to know about the pagination options.
See `Cashtrail.Contacts.Contact` to have more detailed info about the fields to
be filtered or searched.
## Examples
iex> list_contacts(entity)
%Cashtrail.Paginator{entries: [%Cashtrail.Contacts.Contact{}, ...]}
iex> list_contacts(entity, filter: %{type: :company})
%Cashtrail.Paginator{entries: [%Cashtrail.Contacts.Contact{type: :company}, ...]}
iex> list_contacts(entity, search: "my")
%Cashtrail.Paginator{entries: [%Cashtrail.Contacts.Contact{name: "My name"}, ...]}
"""
@spec list_contacts(Entities.Entity.t(), keyword) :: Paginator.Page.t(contact)
def list_contacts(%Entities.Entity{} = entity, options \\ []) do
Contacts.Contact
|> build_filter(Keyword.get(options, :filter), [:type, :customer, :supplier, :category_id])
|> build_search(Keyword.get(options, :search), [:name, :legal_name])
|> put_prefix(entity)
|> Paginator.paginate(options)
end
@doc """
Gets a single contact.
Raises `Ecto.NoResultsError` if the Contact does not exist.
See `Cashtrail.Contacts.Contact` to have more detailed info about the returned
struct.
## Expected Arguments
* entity - The `%Cashtrail.Entities.Entity{}` that the contact references.
* id - A `string` that is the unique id of the contact to be found.
## Examples
iex> get_contact!(entity, 123)
%Cashtrail.Contacts.Contact{}
iex> get_contact!(entity, 456)
** (Ecto.NoResultsError)
"""
@spec get_contact!(Entities.Entity.t(), Ecto.UUID.t() | String.t()) :: contact
def get_contact!(%Entities.Entity{} = entity, id),
do: Repo.get!(Contacts.Contact, id, prefix: to_prefix(entity))
@doc """
Creates a contact.
## Expected Arguments
* entity - The `%Cashtrail.Entities.Entity{}` that the category references.
* params - A `map` with the params of the contact to be created:
* `:name` (required) - A `string` with the description of the contact.
* `:type` (required) - A `string` with the type of contact. It can receive
`:company` or `:person`. Defaults to `:company`.
* `:legal_name` - A `string` that is the legal name of the contact.
* `:customer` - A `boolean` that says if the contact is a customer. Defaults to false.
* `:supplier` - A `boolean` that says if the contact is a supplier. Defaults to false.
* `:phone` - A `string` with the contact phone number. It can receive any phone number format.
* `:email` - A `string` with the contact email.
* `:category_id` - A `string` with the id of `Cashtrail.Contacts.Category` that
relates to the contact.
* `:address` - A `map` containing the address of the contact:
* `:street` - A `string` with the street of the contact address.
* `:number` - A `string` with the number of the contact address.
* `:complement` - A `string` with the complement of the contact address.
* `:district` - A `string` with the district of the contact address.
* `:city` - A `string` with the city of the contact address.
* `:state` - A `string` with the state or province of the contact address.
* `:country` - A `string` with the country of the contact address.
* `:zip` - A `string` with the zip code of the contact address. You can
insert whatever the zip code of any country you want.
* `:line_1` - A `string` with line 1 of the contact address, if preferred.
* `:line_2` - A `string` with line 2 of the contact address, if preferred.
See `Cashtrail.Contacts.Contact` to have more detailed info about the fields
of the contact, and `Cashtrail.Contacts.Address` to have more detailed info
about the field of the address.
## Returns
* `{:ok, %Cashtrail.Contacts.Contact{}}` in case of success.
* `{:error, %Ecto.Changeset{}}` in case of error.
## Examples
iex> create_contact(entity, %{field: value})
{:ok, %Cashtrail.Contacts.Contact{}}
iex> create_contact(entity, %{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
@spec create_contact(Entities.Entity.t(), map) ::
{:ok, contact} | {:error, Ecto.Changeset.t(contact)}
def create_contact(%Entities.Entity{} = entity, attrs) do
%Contacts.Contact{}
|> Contacts.Contact.changeset(attrs)
|> Repo.insert(prefix: to_prefix(entity))
end
@doc """
Updates a contact.
## Expected Arguments
* category - The `%Cashtrail.Contacts.Category{}` to be updated.
* params - A `map` with the field of the contact to be updated. See
`create_contact/2` to know about the params that can be given.
## Returns
* `{:ok, %Cashtrail.Contacts.Contact{}}` in case of success.
* `{:error, %Ecto.Changeset{}}` in case of error.
## Examples
iex> update_contact(contact, %{field: new_value})
{:ok, %Cashtrail.Contacts.Contact{}}
iex> update_contact(contact, %{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
@spec update_contact(contact, map) :: {:ok, contact} | {:error, Ecto.Changeset.t(contact)}
def update_contact(%Contacts.Contact{} = contact, attrs) do
contact
|> Contacts.Contact.changeset(attrs)
|> Repo.update()
end
@doc """
Deletes a contact.
## Expected Arguments
* contact - The `%Cashtrail.Contacts.Contact{}` to be deleted.
## Returns
* `{:ok, %Cashtrail.Contacts.Contact{}}` in case of success.
* `{:error, %Ecto.Changeset{}}` in case of error.
## Examples
iex> delete_contact(contact)
{:ok, %Cashtrail.Contacts.Contact{}}
iex> delete_contact(contact)
{:error, %Ecto.Changeset{}}
"""
@spec delete_contact(contact) :: {:ok, contact} | {:error, Ecto.Changeset.t(contact)}
def delete_contact(%Contacts.Contact{} = contact) do
Repo.delete(contact)
end
@doc """
Returns an `%Ecto.Changeset{}` for tracking contact changes.
## Expected Arguments
* category - The `%Cashtrail.Contacts.Contact{}` to be tracked.
## Examples
iex> change_contact(contact)
%Ecto.Changeset{source: %Cashtrail.Contacts.Contact{}}
"""
@spec change_contact(contact) :: Ecto.Changeset.t(contact)
def change_contact(%Contacts.Contact{} = contact) do
Contacts.Contact.changeset(contact, %{})
end
end
|
apps/cashtrail/lib/cashtrail/contacts.ex
| 0.902054
| 0.447521
|
contacts.ex
|
starcoder
|
defmodule RingCentral.API do
@moduledoc """
The Main module to interact with RingCentral [REST APIs](https://developer.ringcentral.com/api-reference).
"""
@default_doc """
It will uses `ringcentral.http_client` to send the request to RingCentral API,
which by default is the `RingCentral.HTTPClient.DefaultClient`.
"""
@default_api_prefix "/restapi/v1.0/"
alias RingCentral.HTTPClient
alias RingCentral.Response
require Logger
@doc """
Send a `GET` request to the REST API.
#{@default_doc}
"""
@spec get(RingCentral.t(), String.t(), list()) ::
{:error, RingCentral.Error.t()} | {:ok, RingCentral.Response.t()}
def get(ringcentral, path, headers \\ []) do
request(ringcentral, :get, path, "", headers)
end
@doc """
Send a `POST` request to the REST API.
#{@default_doc}
"""
@spec post(RingCentral.t(), String.t(), map(), list()) ::
{:error, RingCentral.Error.t()} | {:ok, RingCentral.Response.t()}
def post(ringcentral, path, body, headers \\ []) do
req_body = RingCentral.JSON.encode!(ringcentral, body)
request(ringcentral, :post, path, req_body, headers)
end
@doc """
Send a `PUT` request to the REST API.
#{@default_doc}
"""
@spec put(RingCentral.t(), String.t(), map(), list()) ::
{:error, RingCentral.Error.t()} | {:ok, RingCentral.Response.t()}
def put(ringcentral, path, body, headers \\ []) do
req_body = RingCentral.JSON.encode!(ringcentral, body)
request(ringcentral, :put, path, req_body, headers)
end
@doc """
Send a `DELETE` request to the REST API.
#{@default_doc}
"""
@spec delete(RingCentral.t(), String.t(), list()) ::
:ok | {:error, RingCentral.Error.t()} | {:ok, RingCentral.Response.t()}
def delete(ringcentral, path, headers \\ []) do
with {:ok, %Response{status: 204}} <-
request(ringcentral, :post, path, "", headers) do
:ok
end
end
@doc false
def request(ringcentral, method, path, body, headers \\ [])
def request(%RingCentral{token_info: nil}, _method, _path, _body, _headers) do
raise ArgumentError, message: "Missing `token_info` in the RingCentral client."
end
def request(%RingCentral{} = ringcentral, method, path, body, headers) do
headers =
[
{"accept", "application/json"},
{"content-type", "application/json"},
{"authorization", "bearer #{ringcentral.token_info["access_token"]}"}
]
|> Enum.concat(headers)
url = build_path(ringcentral, path)
Logger.info("Will request #{url}")
with {:ok, %Response{body: resp_body} = response} <-
HTTPClient.perform_request(ringcentral, method, url, body, headers) do
resp =
response
|> Map.put(:data, RingCentral.JSON.decode!(ringcentral, resp_body))
{:ok, resp}
end
end
defp build_path(%RingCentral{} = client, path) do
full_path = Path.join(@default_api_prefix, path)
client.server_url
|> URI.merge(full_path)
|> URI.to_string()
end
end
|
lib/ring_central/api.ex
| 0.838911
| 0.406803
|
api.ex
|
starcoder
|
defmodule Time do
@moduledoc """
A Time struct and functions.
The Time struct contains the fields hour, minute, second and microseconds.
New times can be built with the `new/4` function or using the
`~T` (see `Kernel.sigil_T/2`) sigil:
iex> ~T[23:00:07.001]
~T[23:00:07.001]
Both `new/4` and sigil return a struct where the time fields can
be accessed directly:
iex> time = ~T[23:00:07.001]
iex> time.hour
23
iex> time.microsecond
{1000, 3}
The functions on this module work with the `Time` struct as well
as any struct that contains the same fields as the `Time` struct,
such as `NaiveDateTime` and `DateTime`. Such functions expect
`t:Calendar.time/0` in their typespecs (instead of `t:t/0`).
Developers should avoid creating the Time structs directly
and instead rely on the functions provided by this module as well
as the ones in third-party calendar libraries.
## Comparing times
Comparisons in Elixir using `==/2`, `>/2`, `</2` and similar are structural
and based on the `Time` struct fields. For proper comparison between
times, use the `compare/2` function.
"""
@enforce_keys [:hour, :minute, :second]
defstruct [:hour, :minute, :second, microsecond: {0, 0}, calendar: Calendar.ISO]
@type t :: %__MODULE__{
hour: Calendar.hour(),
minute: Calendar.minute(),
second: Calendar.second(),
microsecond: Calendar.microsecond(),
calendar: Calendar.calendar()
}
@parts_per_day 86_400_000_000
@doc """
Returns the current time in UTC.
## Examples
iex> time = Time.utc_now()
iex> time.hour >= 0
true
"""
@doc since: "1.4.0"
@spec utc_now(Calendar.calendar()) :: t
def utc_now(calendar \\ Calendar.ISO) do
{:ok, _, time, microsecond} = Calendar.ISO.from_unix(:os.system_time(), :native)
{hour, minute, second} = time
iso_time = %Time{
hour: hour,
minute: minute,
second: second,
microsecond: microsecond,
calendar: Calendar.ISO
}
convert!(iso_time, calendar)
end
@doc """
Builds a new time.
Expects all values to be integers. Returns `{:ok, time}` if each
entry fits its appropriate range, returns `{:error, reason}` otherwise.
Microseconds can also be given with a precision, which must be an
integer between 0 and 6.
The built-in calendar does not support leap seconds.
## Examples
iex> Time.new(0, 0, 0, 0)
{:ok, ~T[00:00:00.000000]}
iex> Time.new(23, 59, 59, 999_999)
{:ok, ~T[23:59:59.999999]}
iex> Time.new(24, 59, 59, 999_999)
{:error, :invalid_time}
iex> Time.new(23, 60, 59, 999_999)
{:error, :invalid_time}
iex> Time.new(23, 59, 60, 999_999)
{:error, :invalid_time}
iex> Time.new(23, 59, 59, 1_000_000)
{:error, :invalid_time}
# Invalid precision
Time.new(23, 59, 59, {999_999, 10})
{:error, :invalid_time}
"""
@spec new(
Calendar.hour(),
Calendar.minute(),
Calendar.second(),
Calendar.microsecond() | non_neg_integer,
Calendar.calendar()
) :: {:ok, t} | {:error, atom}
def new(hour, minute, second, microsecond \\ {0, 0}, calendar \\ Calendar.ISO)
def new(hour, minute, second, microsecond, calendar) when is_integer(microsecond) do
new(hour, minute, second, {microsecond, 6}, calendar)
end
def new(hour, minute, second, {microsecond, precision}, calendar)
when is_integer(hour) and is_integer(minute) and is_integer(second) and
is_integer(microsecond) and is_integer(precision) do
case calendar.valid_time?(hour, minute, second, {microsecond, precision}) do
true ->
time = %Time{
hour: hour,
minute: minute,
second: second,
microsecond: {microsecond, precision},
calendar: calendar
}
{:ok, time}
false ->
{:error, :invalid_time}
end
end
@doc """
Converts the given `time` to a string.
### Examples
iex> Time.to_string(~T[23:00:00])
"23:00:00"
iex> Time.to_string(~T[23:00:00.001])
"23:00:00.001"
iex> Time.to_string(~T[23:00:00.123456])
"23:00:00.123456"
iex> Time.to_string(~N[2015-01-01 23:00:00.001])
"23:00:00.001"
iex> Time.to_string(~N[2015-01-01 23:00:00.123456])
"23:00:00.123456"
"""
@spec to_string(Calendar.time()) :: String.t()
def to_string(time)
def to_string(%{
hour: hour,
minute: minute,
second: second,
microsecond: microsecond,
calendar: calendar
}) do
calendar.time_to_string(hour, minute, second, microsecond)
end
@doc """
Parses the extended "Local time" format described by
[ISO 8601:2004](https://en.wikipedia.org/wiki/ISO_8601).
Time zone offset may be included in the string but they will be
simply discarded as such information is not included in times.
As specified in the standard, the separator "T" may be omitted if
desired as there is no ambiguity within this function.
Time representations with reduced accuracy are not supported.
Note that while ISO 8601 allows times to specify 24:00:00 as the
zero hour of the next day, this notation is not supported by Elixir.
Leap seconds are not supported as well by the built-in Calendar.ISO.
## Examples
iex> Time.from_iso8601("23:50:07")
{:ok, ~T[23:50:07]}
iex> Time.from_iso8601("23:50:07Z")
{:ok, ~T[23:50:07]}
iex> Time.from_iso8601("T23:50:07Z")
{:ok, ~T[23:50:07]}
iex> Time.from_iso8601("23:50:07,0123456")
{:ok, ~T[23:50:07.012345]}
iex> Time.from_iso8601("23:50:07.0123456")
{:ok, ~T[23:50:07.012345]}
iex> Time.from_iso8601("23:50:07.123Z")
{:ok, ~T[23:50:07.123]}
iex> Time.from_iso8601("2015:01:23 23-50-07")
{:error, :invalid_format}
iex> Time.from_iso8601("23:50:07A")
{:error, :invalid_format}
iex> Time.from_iso8601("23:50:07.")
{:error, :invalid_format}
iex> Time.from_iso8601("23:50:61")
{:error, :invalid_time}
"""
@spec from_iso8601(String.t(), Calendar.calendar()) :: {:ok, t} | {:error, atom}
def from_iso8601(string, calendar \\ Calendar.ISO) do
with {:ok, {hour, minute, second, microsecond}} <- Calendar.ISO.parse_time(string) do
convert(
%Time{hour: hour, minute: minute, second: second, microsecond: microsecond},
calendar
)
end
end
@doc """
Parses the extended "Local time" format described by
[ISO 8601:2004](https://en.wikipedia.org/wiki/ISO_8601).
Raises if the format is invalid.
## Examples
iex> Time.from_iso8601!("23:50:07,123Z")
~T[23:50:07.123]
iex> Time.from_iso8601!("23:50:07.123Z")
~T[23:50:07.123]
iex> Time.from_iso8601!("2015:01:23 23-50-07")
** (ArgumentError) cannot parse "2015:01:23 23-50-07" as time, reason: :invalid_format
"""
@spec from_iso8601!(String.t(), Calendar.calendar()) :: t
def from_iso8601!(string, calendar \\ Calendar.ISO) do
case from_iso8601(string, calendar) do
{:ok, value} ->
value
{:error, reason} ->
raise ArgumentError, "cannot parse #{inspect(string)} as time, reason: #{inspect(reason)}"
end
end
@doc """
Converts the given time to
[ISO 8601:2004](https://en.wikipedia.org/wiki/ISO_8601).
By default, `Time.to_iso8601/2` returns times formatted in the "extended"
format, for human readability. It also supports the "basic" format through
passing the `:basic` option.
### Examples
iex> Time.to_iso8601(~T[23:00:13])
"23:00:13"
iex> Time.to_iso8601(~T[23:00:13.001])
"23:00:13.001"
iex> Time.to_iso8601(~T[23:00:13.001], :basic)
"230013.001"
iex> Time.to_iso8601(~N[2010-04-17 23:00:13])
"23:00:13"
"""
@spec to_iso8601(Calendar.time(), :extended | :basic) :: String.t()
def to_iso8601(time, format \\ :extended)
def to_iso8601(%{calendar: Calendar.ISO} = time, format) when format in [:extended, :basic] do
%{
hour: hour,
minute: minute,
second: second,
microsecond: microsecond
} = time
Calendar.ISO.time_to_string(hour, minute, second, microsecond, format)
end
def to_iso8601(%{calendar: _} = time, format) when format in [:extended, :basic] do
time
|> convert!(Calendar.ISO)
|> to_iso8601(format)
end
@doc """
Converts given `time` to an Erlang time tuple.
WARNING: Loss of precision may occur, as Erlang time tuples
only contain hours/minutes/seconds.
## Examples
iex> Time.to_erl(~T[23:30:15.999])
{23, 30, 15}
iex> Time.to_erl(~N[2010-04-17 23:30:15.999])
{23, 30, 15}
"""
@spec to_erl(Calendar.time()) :: :calendar.time()
def to_erl(time) do
%{hour: hour, minute: minute, second: second} = convert!(time, Calendar.ISO)
{hour, minute, second}
end
@doc """
Converts an Erlang time tuple to a `Time` struct.
## Examples
iex> Time.from_erl({23, 30, 15}, {5000, 3})
{:ok, ~T[23:30:15.005]}
iex> Time.from_erl({24, 30, 15})
{:error, :invalid_time}
"""
@spec from_erl(:calendar.time(), Calendar.microsecond(), Calendar.calendar()) ::
{:ok, t} | {:error, atom}
def from_erl(tuple, microsecond \\ {0, 0}, calendar \\ Calendar.ISO)
def from_erl({hour, minute, second}, microsecond, calendar) do
with {:ok, time} <- new(hour, minute, second, microsecond, Calendar.ISO),
do: convert(time, calendar)
end
@doc """
Converts an Erlang time tuple to a `Time` struct.
## Examples
iex> Time.from_erl!({23, 30, 15})
~T[23:30:15]
iex> Time.from_erl!({23, 30, 15}, {5000, 3})
~T[23:30:15.005]
iex> Time.from_erl!({24, 30, 15})
** (ArgumentError) cannot convert {24, 30, 15} to time, reason: :invalid_time
"""
@spec from_erl!(:calendar.time(), Calendar.microsecond(), Calendar.calendar()) :: t
def from_erl!(tuple, microsecond \\ {0, 0}, calendar \\ Calendar.ISO) do
case from_erl(tuple, microsecond, calendar) do
{:ok, value} ->
value
{:error, reason} ->
raise ArgumentError,
"cannot convert #{inspect(tuple)} to time, reason: #{inspect(reason)}"
end
end
@doc """
Adds the `number` of `unit`s to the given `time`.
This function accepts the `number` measured according to `Calendar.ISO`.
The time is returned in the same calendar as it was given in.
Note the result value represents the time of day, meaning that it is cyclic,
for instance, it will never go over 24 hours for the ISO calendar.
## Examples
iex> Time.add(~T[10:00:00], 27000)
~T[17:30:00.000000]
iex> Time.add(~T[11:00:00.005], 2400)
~T[11:40:00.005000]
iex> Time.add(~T[00:00:00], 86_399_999, :millisecond)
~T[23:59:59.999000]
iex> Time.add(~T[17:10:05], 86400)
~T[17:10:05.000000]
iex> Time.add(~T[23:00:00], -60)
~T[22:59:00.000000]
"""
@doc since: "1.6.0"
@spec add(Calendar.time(), integer, System.time_unit()) :: t
def add(%{calendar: calendar} = time, number, unit \\ :second) when is_integer(number) do
number = System.convert_time_unit(number, unit, :microsecond)
total = time_to_microseconds(time) + number
parts = Integer.mod(total, @parts_per_day)
{hour, minute, second, microsecond} = calendar.time_from_day_fraction({parts, @parts_per_day})
%Time{
hour: hour,
minute: minute,
second: second,
microsecond: microsecond,
calendar: calendar
}
end
defp time_to_microseconds(%{
calendar: Calendar.ISO,
hour: 0,
minute: 0,
second: 0,
microsecond: {0, _}
}) do
0
end
defp time_to_microseconds(time) do
iso_days = {0, to_day_fraction(time)}
Calendar.ISO.iso_days_to_unit(iso_days, :microsecond)
end
@doc """
Compares two time structs.
Returns `:gt` if first time is later than the second
and `:lt` for vice versa. If the two times are equal
`:eq` is returned.
## Examples
iex> Time.compare(~T[16:04:16], ~T[16:04:28])
:lt
iex> Time.compare(~T[16:04:16], ~T[16:04:16])
:eq
iex> Time.compare(~T[16:04:16.01], ~T[16:04:16.001])
:gt
This function can also be used to compare across more
complex calendar types by considering only the time fields:
iex> Time.compare(~N[1900-01-01 16:04:16], ~N[2015-01-01 16:04:16])
:eq
iex> Time.compare(~N[2015-01-01 16:04:16], ~N[2015-01-01 16:04:28])
:lt
iex> Time.compare(~N[2015-01-01 16:04:16.01], ~N[2000-01-01 16:04:16.001])
:gt
"""
@doc since: "1.4.0"
@spec compare(Calendar.time(), Calendar.time()) :: :lt | :eq | :gt
def compare(%{calendar: calendar} = time1, %{calendar: calendar} = time2) do
%{hour: hour1, minute: minute1, second: second1, microsecond: {microsecond1, _}} = time1
%{hour: hour2, minute: minute2, second: second2, microsecond: {microsecond2, _}} = time2
case {{hour1, minute1, second1, microsecond1}, {hour2, minute2, second2, microsecond2}} do
{first, second} when first > second -> :gt
{first, second} when first < second -> :lt
_ -> :eq
end
end
def compare(time1, time2) do
{parts1, ppd1} = to_day_fraction(time1)
{parts2, ppd2} = to_day_fraction(time2)
case {parts1 * ppd2, parts2 * ppd1} do
{first, second} when first > second -> :gt
{first, second} when first < second -> :lt
_ -> :eq
end
end
@doc """
Converts given `time` to a different calendar.
Returns `{:ok, time}` if the conversion was successful,
or `{:error, reason}` if it was not, for some reason.
## Examples
Imagine someone implements `Calendar.Holocene`, a calendar based on the
Gregorian calendar that adds exactly 10,000 years to the current Gregorian
year:
iex> Time.convert(~T[13:30:15], Calendar.Holocene)
{:ok, %Time{calendar: Calendar.Holocene, hour: 13, minute: 30, second: 15, microsecond: {0, 0}}}
"""
@doc since: "1.5.0"
@spec convert(Calendar.time(), Calendar.calendar()) :: {:ok, t} | {:error, atom}
# Keep it multiline for proper function clause errors.
def convert(
%{
calendar: calendar,
hour: hour,
minute: minute,
second: second,
microsecond: microsecond
},
calendar
) do
time = %Time{
calendar: calendar,
hour: hour,
minute: minute,
second: second,
microsecond: microsecond
}
{:ok, time}
end
def convert(%{microsecond: {_, precision}} = time, calendar) do
{hour, minute, second, {microsecond, _}} =
time
|> to_day_fraction()
|> calendar.time_from_day_fraction()
time = %Time{
calendar: calendar,
hour: hour,
minute: minute,
second: second,
microsecond: {microsecond, precision}
}
{:ok, time}
end
@doc """
Similar to `Time.convert/2`, but raises an `ArgumentError`
if the conversion between the two calendars is not possible.
## Examples
Imagine someone implements `Calendar.Holocene`, a calendar based on the
Gregorian calendar that adds exactly 10,000 years to the current Gregorian
year:
iex> Time.convert!(~T[13:30:15], Calendar.Holocene)
%Time{calendar: Calendar.Holocene, hour: 13, minute: 30, second: 15, microsecond: {0, 0}}
"""
@doc since: "1.5.0"
@spec convert!(Calendar.time(), Calendar.calendar()) :: t
def convert!(time, calendar) do
case convert(time, calendar) do
{:ok, value} ->
value
{:error, reason} ->
raise ArgumentError,
"cannot convert #{inspect(time)} to target calendar #{inspect(calendar)}, " <>
"reason: #{inspect(reason)}"
end
end
@doc """
Returns the difference between two times, considering only the hour, minute,
second and microsecond.
As with the `compare/2` function both `Time` structs and other structures
containing time can be used. If for instance a `NaiveDateTime` or `DateTime`
is passed, only the hour, month, second, and microsecond is considered. Any
additional information about a date or time zone is ignored when calculating
the difference.
The answer can be returned in any `unit` available from
`t:System.time_unit/0`. If the first unit is smaller than
the second, a negative number is returned.
This function returns the difference in seconds where seconds
are measured according to `Calendar.ISO`.
## Examples
iex> Time.diff(~T[00:29:12], ~T[00:29:10])
2
# When passing a `NaiveDateTime` the date part is ignored.
iex> Time.diff(~N[2017-01-01 00:29:12], ~T[00:29:10])
2
# Two `NaiveDateTime` structs could have big differences in the date
# but only the time part is considered.
iex> Time.diff(~N[2017-01-01 00:29:12], ~N[1900-02-03 00:29:10])
2
iex> Time.diff(~T[00:29:12], ~T[00:29:10], :microsecond)
2_000_000
iex> Time.diff(~T[00:29:10], ~T[00:29:12], :microsecond)
-2_000_000
"""
@doc since: "1.5.0"
@spec diff(Calendar.time(), Calendar.time(), System.time_unit()) :: integer
def diff(time1, time2, unit \\ :second)
def diff(
%{
calendar: Calendar.ISO,
hour: hour1,
minute: minute1,
second: second1,
microsecond: {microsecond1, @parts_per_day}
},
%{
calendar: Calendar.ISO,
hour: hour2,
minute: minute2,
second: second2,
microsecond: {microsecond2, @parts_per_day}
},
unit
) do
total =
(hour1 - hour2) * 3_600_000_000 + (minute1 - minute2) * 60_000_000 +
(second1 - second2) * 1_000_000 + (microsecond1 - microsecond2)
System.convert_time_unit(total, :microsecond, unit)
end
def diff(time1, time2, unit) do
fraction1 = to_day_fraction(time1)
fraction2 = to_day_fraction(time2)
Calendar.ISO.iso_days_to_unit({0, fraction1}, unit) -
Calendar.ISO.iso_days_to_unit({0, fraction2}, unit)
end
@doc """
Returns the given time with the microsecond field truncated to the given
precision (`:microsecond`, `millisecond` or `:second`).
The given time is returned unchanged if it already has lower precision than
the given precision.
## Examples
iex> Time.truncate(~T[01:01:01.123456], :microsecond)
~T[01:01:01.123456]
iex> Time.truncate(~T[01:01:01.123456], :millisecond)
~T[01:01:01.123]
iex> Time.truncate(~T[01:01:01.123456], :second)
~T[01:01:01]
"""
@doc since: "1.6.0"
@spec truncate(t(), :microsecond | :millisecond | :second) :: t()
def truncate(%Time{microsecond: microsecond} = time, precision) do
%{time | microsecond: Calendar.truncate(microsecond, precision)}
end
## Helpers
defp to_day_fraction(%{
hour: hour,
minute: minute,
second: second,
microsecond: {_, _} = microsecond,
calendar: calendar
}) do
calendar.time_to_day_fraction(hour, minute, second, microsecond)
end
defimpl String.Chars do
def to_string(time) do
%{
hour: hour,
minute: minute,
second: second,
microsecond: microsecond,
calendar: calendar
} = time
calendar.time_to_string(hour, minute, second, microsecond)
end
end
defimpl Inspect do
def inspect(time, _) do
%{
hour: hour,
minute: minute,
second: second,
microsecond: microsecond,
calendar: calendar
} = time
"~T[" <>
calendar.time_to_string(hour, minute, second, microsecond) <> suffix(calendar) <> "]"
end
defp suffix(Calendar.ISO), do: ""
defp suffix(calendar), do: " " <> inspect(calendar)
end
end
|
lib/elixir/lib/calendar/time.ex
| 0.95003
| 0.788502
|
time.ex
|
starcoder
|
defmodule ExJenga.Signature do
@moduledoc """
## signatures
To ensure the security of your transactions or requests, we have implemented security controls that we ensure that transactions can only be initiated by you and no one else. To achieve this, we use message signatures.
How it works
All requests, other than the Identity API, will be required to have a signature in the header. The structure of the header will be as follows:
## generate signature:
To generate the signature, you will need create a key pair of private key and public key. You will share the public key with us and use the private key to generate the signature.
Creating private & public keys
Use following command in command prompt to generate a keypair with a self-signed certificate.
In this command, we are using the openssl. You can use other tools e.g. keytool (ships with JDK - Java Developement Kit), keystore explorer e.t.c
## GENERATE YOUR KEY PAIR
openssl genrsa -out privatekey.pem 2048 -nodes
Once you are successful with the above command a file (privatekey.pem) will be created on your present directory, proceed to export the public key from the keypair generated. The command below shows how to do it.
## NOW EXPORT YOUR PUBLIC KEY
openssl rsa -in privatekey.pem -outform PEM -pubout -out publickey.pem
If the above command is successful, a new file (publickey.pem) will be created on your present directory. Copy the contents of this file and add it on our jengaHQ portal.
"""
@doc """
This is a example for signing of Opening and Closing Account Balance API.
read more on https://developer.jengaapi.io/docs/generating-signatures
Example
ExJenga.Signature.sign_transaction(%{country_code: "KE", account_id: "0011547896523"})
"jCUM84oReyyWfwSM2/EGoMUnJLXSRZ8YR7mS7VaUv3KIsYR4o3r/hoSENhQw9Z3BsWAxvKQoZX+VNrLyEmoIK2lUyE0vqUd9IN2RDZYr4rWzuXwmjsM5eq+ALRd8pDRGmwIPTA17y7MCeJhY6jmLjXUghaziWzo5ViJS1QdPvvxDiiOip2HxjPtVN9dsPIoc9i/mzAzAJssauvgVRzz2w6DQwg6bZIeDOcroRVqMBkBk2IcGHK6PSWWftaVSFL/2pbHNZyIMytI9qYpPHolLoPK2uKqftQXP8GNHUwasmsMmEBtYQ3mqC2fsb+YPGHLam8KEZ/FvS63GfQGJ+YivYA=="
"""
def sign_transaction(%{
country_code: country_code,
account_id: account_id
}) do
date = Date.utc_today() |> Date.to_string()
message = "#{account_id}#{country_code}#{date}"
sign(message)
end
def sign(message) do
raw_secret_key = Application.get_env(:ex_jenga, :jenga)[:private_key]
# Cleaning blank spaces
secret_key =
raw_secret_key
|> String.trim()
|> String.split(~r{\n *}, trim: true)
|> Enum.join("\n")
[pem_entry] = :public_key.pem_decode(secret_key)
private_key = :public_key.pem_entry_decode(pem_entry)
signature = :public_key.sign(message, :sha256, private_key)
:base64.encode(signature)
end
end
|
lib/ex_jenga/signature.ex
| 0.70069
| 0.552238
|
signature.ex
|
starcoder
|
defmodule Apiv4.Router do
use Apiv4.Web, :router
@moduledoc """
The router is where I describe how my data are related to each other
as well as how they relate to the context at hand.
In a separate file, say character.ex, I would find the description
for the individual data in my app.
"""
## Autox Installed
import Autox.Manifest
pipeline :api do
plug :accepts, ["json", "json-api"]
plug :fetch_session
plug Autox.RepoContextPlug
plug Autox.UnderscoreParamsPlug, "data"
end
## End Autox
pipeline :browser do
plug :accepts, ["html"]
plug :fetch_session
plug :fetch_flash
plug :protect_from_forgery
plug :put_secure_browser_headers
end
pipeline :auth do
plug Autox.AuthSessionPlug
plug Autox.SessionContextPlug, session: :user, context: :parent
end
pipeline :account do
plug Autox.AuthSessionPlug, {Apiv4.Permissions, :warehouse_employee?}
plug Autox.SessionContextPlug, session: :account, context: :parent
end
pipeline :management do
plug Autox.AuthSessionPlug, {Apiv4.Permissions, :warehouse_management?}
plug Autox.SessionContextPlug, session: :account, context: :parent
end
pipeline :admin do
plug Autox.AuthHeaderPlug, :simwms_master_key
end
pipeline :realtime do
plug Autox.BroadcastSessionPlug, key: :account, class: Apiv4.Permissions, check: :live?
end
pipeline :echo do
plug Autox.RepoContextPlug, Autox.EchoRepo
end
pipeline :paranoia do
plug Autox.RepoContextPlug, Autox.ParanoiaRepo
end
scope "/print", Apiv4 do
pipe_through [:browser, :echo] # Use the default browser stack
the Report, [:show]
end
@moduledoc """
Requires admin master key
"""
scope "/admin", Apiv4 do
pipe_through [:api, :admin]
the ServicePlan, [:update, :create, :delete]
end
@moduledoc """
requires user account management authorization
"""
scope "/api", Apiv4 do
pipe_through [:api, :auth, :account, :management, :realtime]
the Wall, [:create, :delete]
the Road, [:create, :delete]
the Gate, [:create, :delete]
the Dock, [:create, :delete]
the Cell, [:create, :delete]
the Desk, [:create, :delete]
the Scale, [:create, :delete]
the Employee, [:create, :delete]
an Account, [:update] do
one ServicePlan, [:create, :delete]
end
end
scope "/api", Apiv4 do
pipe_through [:api, :auth, :account, :management, :paranoia]
an Account, [:delete]
end
@moduledoc """
requires user account authentication
"""
scope "/api", Apiv4 do
pipe_through [:api, :auth, :account, :realtime]
the Wall, [:show, :index]
the Road, [:show, :index]
the Desk, [:show, :index], do: many [History, Camera]
the Gate, [:show, :index], do: many [History, Camera]
the Dock, [:show, :update, :index], do: many [History, Camera]
the Cell, [:show, :update, :index] do
many [History, Camera, Batch]
end
the Scale, [:show, :update, :index], do: many [History, Camera]
the Truck, do: many [History, Picture]
the Batch do
many [History, Picture]
one [Cell, InAppointment, OutAppointment]
end
the Weighticket
the Employee, [:show, :index, :update], do: many [History, Picture]
the Company do
many Appointment
end
an Appointment do
one [Truck, Weighticket, Company]
many [InBatch, OutBatch, History, Picture]
end
end
scope "/api", Apiv4 do
pipe_through [:api, :auth, :account]
the Report, [:create]
end
@moduledoc """
requires user authentication
"""
scope "/api", Apiv4 do
pipe_through [:api, :auth]
an Account, [:create, :show], do: one ServicePlan, [:show]
an AccountDetail, [:show]
the User, [:update] do
many [Employee, UnconfirmedEmployee], [:index]
end
can_logout!
end
scope "/api", Apiv4 do
pipe_through [:api, :auth, :paranoia]
an Employee, [:show] do
many [Account], [:index]
end
the User, [:show] do
many [Account], [:index]
end
end
@moduledoc """
anonymous api, no login required
"""
scope "/api", Apiv4 do
pipe_through :api
the ServicePlan, [:index, :show]
can_login!
end
end
|
web/router.ex
| 0.512937
| 0.544801
|
router.ex
|
starcoder
|
defmodule ExWire.Packet.NewBlock do
@moduledoc """
Eth Wire Packet for advertising new blocks.
```
**NewBlock** [`+0x07`, [`blockHeader`, `transactionList`, `uncleList`], `totalDifficulty`]
Specify a single block that the peer should know about. The composite item in
the list (following the message ID) is a block in the format described in the
main Ethereum specification.
* `totalDifficulty` is the total difficulty of the block (aka score).
```
"""
require Logger
alias EVM.Block.Header
alias ExWire.Struct.Block
alias ExthCrypto.Math
@behaviour ExWire.Packet
@type t :: %__MODULE__{
block_header: Header.t(),
block: Block.t(),
total_difficulty: integer()
}
defstruct [
:block_header,
:block,
:total_difficulty
]
@doc """
Given a NewBlock packet, serializes for transport over Eth Wire Protocol.
## Examples
iex> %ExWire.Packet.NewBlock{
...> block_header: %EVM.Block.Header{parent_hash: <<1::256>>, ommers_hash: <<2::256>>, beneficiary: <<3::160>>, state_root: <<4::256>>, transactions_root: <<5::256>>, receipts_root: <<6::256>>, logs_bloom: <<>>, difficulty: 5, number: 1, gas_limit: 5, gas_used: 3, timestamp: 6, extra_data: "Hi mom", mix_hash: <<7::256>>, nonce: <<8::64>>},
...> block: %ExWire.Struct.Block{transactions_list: [], ommers: []},
...> total_difficulty: 100_000
...> }
...> |> ExWire.Packet.NewBlock.serialize
[
[<<1::256>>, <<2::256>>, <<3::160>>, <<4::256>>, <<5::256>>, <<6::256>>, <<>>, 5, 1, 5, 3, 6, "Hi mom", <<7::256>>, <<8::64>>],
[],
[],
100000
]
"""
@spec serialize(t) :: ExRLP.t()
def serialize(packet = %__MODULE__{}) do
[trx_list, ommers] = Block.serialize(packet.block)
[
Header.serialize(packet.block_header),
trx_list,
ommers,
packet.total_difficulty
]
end
@doc """
Given an RLP-encoded NewBlock packet from Eth Wire Protocol,
decodes into a NewBlock struct.
## Examples
iex> [
...> [<<1::256>>, <<2::256>>, <<3::160>>, <<4::256>>, <<5::256>>, <<6::256>>, <<>>, <<5>>, <<1>>, <<5>>, <<3>>, <<6>>, "Hi mom", <<7::256>>, <<8::64>>],
...> [],
...> [],
...> <<10>>
...> ]
...> |> ExWire.Packet.NewBlock.deserialize()
%ExWire.Packet.NewBlock{
block_header: %EVM.Block.Header{parent_hash: <<1::256>>, ommers_hash: <<2::256>>, beneficiary: <<3::160>>, state_root: <<4::256>>, transactions_root: <<5::256>>, receipts_root: <<6::256>>, logs_bloom: <<>>, difficulty: 5, number: 1, gas_limit: 5, gas_used: 3, timestamp: 6, extra_data: "Hi mom", mix_hash: <<7::256>>, nonce: <<8::64>>},
block: %ExWire.Struct.Block{transactions: [], ommers: [], transactions_list: []},
total_difficulty: 10
}
"""
@spec deserialize(ExRLP.t()) :: t
def deserialize(rlp) do
[
block_header,
trx_list,
ommers,
total_difficulty
] = rlp
%__MODULE__{
block_header: Header.deserialize(block_header),
block: Block.deserialize([trx_list, ommers]),
total_difficulty: total_difficulty |> :binary.decode_unsigned()
}
end
@doc """
Handles a NewBlock message. Right now, we ignore these advertisements.
## Examples
iex> %ExWire.Packet.NewBlock{
...> block_header: %EVM.Block.Header{parent_hash: <<1::256>>, ommers_hash: <<2::256>>, beneficiary: <<3::160>>, state_root: <<4::256>>, transactions_root: <<5::256>>, receipts_root: <<6::256>>, logs_bloom: <<>>, difficulty: 5, number: 1, gas_limit: 5, gas_used: 3, timestamp: 6, extra_data: "Hi mom", mix_hash: <<7::256>>, nonce: <<8::64>>},
...> block: %ExWire.Struct.Block{transactions_list: [], ommers: []},
...> total_difficulty: 100_000
...> }
...> |> ExWire.Packet.NewBlock.handle()
:ok
"""
@spec handle(ExWire.Packet.packet()) :: ExWire.Packet.handle_response()
def handle(packet = %__MODULE__{}) do
_ =
Logger.debug(fn ->
"[Packet] Peer sent new block with hash #{
packet.block_header |> Header.hash() |> Math.bin_to_hex()
}"
end)
:ok
end
end
|
apps/ex_wire/lib/ex_wire/packet/new_block.ex
| 0.901527
| 0.726741
|
new_block.ex
|
starcoder
|
defmodule NervesHubLinkHTTP.Client do
@moduledoc """
A behaviour module for customizing the tool used for HTTP requests to NervesHub.
Also allows handling FWUP messages and errors
By default, `:hackney` is used for completing HTTP requests and all FWUP messages
are logged to STDOUT. To specify your own module to use, update your `config.exs`
```elixir
config :nerves_hub_link_http, client: MyApp.NervesHubLinkHTTP.Client
```
"""
alias NervesHubLinkHTTP.{Certificate, Client.Default}
@typedoc "Firmware update progress, completion or error report"
@type fwup_message ::
{:ok, non_neg_integer(), String.t()}
| {:warning, non_neg_integer(), String.t()}
| {:error, non_neg_integer(), String.t()}
| {:progress, 0..100}
@type method :: :get | :put | :post
@type url :: binary()
@type header :: {binary(), binary()}
@type body :: binary()
@type opts :: keyword()
@type response :: {:ok, map()} | {:error, any()}
@doc """
Called on firmware update reports.
The return value of this function is not checked.
"""
@callback handle_fwup_message(fwup_message()) :: :ok
@doc """
Called when downloading a firmware update fails.
The return value of this function is not checked.
"""
@callback handle_error(any()) :: :ok
@doc """
Performs the HTTP request
"""
@callback request(method(), url(), [header()], body(), opts()) :: response()
@cert "nerves_hub_cert"
@key "nerves_hub_key"
@spec me() :: response()
def me(), do: request(:get, "/device/me", [])
@doc """
This function is called internally by NervesHubLinkHTTP to notify clients of fwup progress.
"""
@spec handle_fwup_message(fwup_message()) :: :ok
def handle_fwup_message(data) do
_ = apply_wrap(client(), :handle_fwup_message, [data])
:ok
end
@doc """
This function is called internally by NervesHubLinkHTTP to notify clients of fwup errors.
"""
@spec handle_error(any()) :: :ok
def handle_error(data) do
_ = apply_wrap(client(), :handle_error, [data])
end
@spec update() :: response()
def update(), do: request(:get, "/device/update", [])
@spec request(method(), binary(), map() | binary() | list()) :: response()
def request(:get, path, params) when is_map(params) do
url = url(path) <> "?" <> URI.encode_query(params)
client().request(:get, url, headers(), [], opts())
|> check_response()
end
def request(verb, path, params) when is_map(params) do
with {:ok, body} <- Jason.encode(params) do
request(verb, path, body)
end
end
def request(verb, path, body) do
client().request(verb, url(path), headers(), body, opts())
|> check_response()
end
@spec url(binary()) :: url()
def url(path), do: endpoint() <> path
# Catches exceptions and exits
defp apply_wrap(client, function, args) do
apply(client, function, args)
catch
:error, reason -> {:error, reason}
:exit, reason -> {:exit, reason}
err -> err
end
defp client(), do: Application.get_env(:nerves_hub_link_http, :client, Default)
defp check_response(response) do
case response do
{:ok, _} ->
NervesHubLinkHTTP.Connection.connected()
{:error, _} ->
NervesHubLinkHTTP.Connection.disconnected()
_ ->
raise(
"invalid HTTP response. request/5 must return a tuple with {:ok, resp} or {:error, resp}"
)
end
response
end
defp endpoint do
host = Application.get_env(:nerves_hub_link_http, :device_api_host)
port = Application.get_env(:nerves_hub_link_http, :device_api_port)
"https://#{host}:#{port}"
end
defp headers do
headers = [{"Content-Type", "application/json"}]
Nerves.Runtime.KV.get_all_active()
|> Enum.reduce(headers, fn
{"nerves_fw_" <> key, value}, headers ->
[{"X-NervesHubLinkHTTP-" <> key, value} | headers]
_, headers ->
headers
end)
end
defp opts() do
[
ssl_options: ssl_options(),
recv_timeout: 60_000
]
end
defp ssl_options() do
cert = Nerves.Runtime.KV.get(@cert) |> Certificate.pem_to_der()
key =
with key when not is_nil(key) <- Nerves.Runtime.KV.get(@key) do
case X509.PrivateKey.from_pem(key) do
{:error, :not_found} -> <<>>
{:ok, decoded} -> X509.PrivateKey.to_der(decoded)
end
else
nil -> <<>>
end
sni = Application.get_env(:nerves_hub_link_http, :device_api_sni)
[
cacerts: Certificate.ca_certs(),
cert: cert,
key: {:ECPrivateKey, key},
server_name_indication: to_charlist(sni)
]
end
end
|
lib/nerves_hub_link_http/client.ex
| 0.782039
| 0.522994
|
client.ex
|
starcoder
|
defmodule Mnemonic do
@moduledoc """
BIP39 Implementation
"""
alias Mnemonic.Wordlist
@languages [
:english,
:chinese_simplified,
:chinese_traditional,
:french,
:italian,
:japanese,
:korean,
:spanish,
:czech,
:portuguese
]
@valid_entropy_length [128, 160, 192, 224, 256]
@valid_mnemonic_length [12, 15, 18, 21, 24]
# SEE: https://github.com/bitcoin/bips/blob/master/bip-0039/bip-0039-wordlists.md
@ideographic_space "\u3000"
defguardp valid_ent?(ent) when ent in @valid_entropy_length
defguardp valid_entropy?(entropy) when valid_ent?(bit_size(entropy))
defguardp supported_lang?(lang) when lang in @languages
@typedoc """
Supported mnemonic languages
"""
@type language ::
:english
| :chinese_simplified
| :chinese_traditional
| :french
| :italian
| :japanese
| :korean
| :spanish
| :czech
| :portuguese
@doc ~S"""
Generate mnemonic sentences with given entropy length(in bits) and mnemonic language.
Allowed entropy length are 128, 160, 192, 224 and 256. Supported languages are English,
Chinese(Simplified), Chinese(Tranditional), Japanese, Korean, Spanish, Czech, Portuguese,
French and Italian.
"""
@spec generate(ent :: integer(), lang :: language()) :: String.t() | {:error, term()}
def generate(ent, lang) when valid_ent?(ent) and supported_lang?(lang) do
ent
|> generate_entropy()
|> from_entropy(lang)
end
def generate(_, _), do: {:error, :invalid_entropy_length_or_language}
@doc ~S"""
Generate mnemonic sentences with given entropy and mnemonic language. The bits size of entropy
should be in 128, 160, 192, 224 and 256. Supported languages are English, Chinese(Simplified),
Chinese(Tranditional), Japanese, Korean, Spanish, Czech, Portuguese, French and Italian.
## Examples
iex> entropy = <<0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0>>
iex> Mnemonic.from_entropy(entropy, :english)
"abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon about"
"""
@spec from_entropy(entropy :: binary(), lang :: language()) :: String.t() | {:error, term()}
def from_entropy(entropy, lang) when valid_entropy?(entropy) and supported_lang?(lang) do
entropy
|> append_checksum()
|> generate_mnemonic(lang)
end
@doc ~S"""
Generate seed by given mnemonic, passphrase and language. The seed is 64 bytes.
## Examples
iex> mnemonic = "abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon about"
iex> Mnemonic.to_seed(mnemonic, "TREZOR", :english)
<<197, 82, 87, 195, 96, 192, 124, 114, 2, 154, 235, 193, 181, 60, 5, 237, 3, 98,
173, 163, 142, 173, 62, 62, 158, 250, 55, 8, 229, 52, 149, 83, 31, 9, 166, 152,
117, 153, 209, 130, 100, 193, 225, 201, 47, 44, 241, 65, 99, 12, 122, 60, 74,
183, 200, 27, 47, 0, 22, 152, 231, 70, 59, 4>>
"""
@spec to_seed(mnemonic :: String.t(), passphrase :: String.t(), lang :: language()) ::
binary | {:error, term()}
def to_seed(mnemonic, passphrase \\ "", lang) when is_binary(mnemonic) do
with mnemonic = normalize(mnemonic),
{:ok, _entropy} <- validate(mnemonic, lang) do
:crypto.pbkdf2_hmac(:sha512, mnemonic, salt(passphrase), 2048, 64)
end
end
@doc ~S"""
Check the given mnemonic is valid or not.
## Examples
iex(17)> mnemonic = "abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon about"
iex(19)> Mnemonic.validate(mnemonic, :english)
{:ok, <<0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0>>}
"""
@spec validate(mnemonic :: String.t(), lang :: language()) :: {:ok, binary()} | {:error, term()}
def validate(mnemonic, lang) when is_binary(mnemonic) and supported_lang?(lang) do
mnemonic
|> mnemonic_to_words()
|> words_to_checksummed_entropy(lang)
|> checksummed_entropy_to_entropy()
end
defp salt(passphrase), do: normalize("mnemonic" <> passphrase)
defp generate_entropy(ent) do
ent
|> div(8)
|> :crypto.strong_rand_bytes()
end
defp append_checksum(entropy) do
cs =
entropy
|> bit_size()
|> div(32)
<<checksum::bits-size(cs), _rest::bits>> = :crypto.hash(:sha256, entropy)
<<entropy::bits, checksum::bits>>
end
defp generate_mnemonic(entropy, lang) do
joiner =
case lang do
:japanese -> @ideographic_space
_otherwise -> " "
end
entropy
|> split_to_group()
|> Enum.map(&Wordlist.at(lang, &1))
|> Enum.join(joiner)
end
defp split_to_group(entropy) do
do_split_to_group(entropy, [])
end
defp do_split_to_group(<<>>, groups), do: groups
defp do_split_to_group(<<group::11, rest::bits>>, groups) do
do_split_to_group(rest, groups ++ [group])
end
defp mnemonic_to_words(mnemonic) do
mnemonic
|> String.trim()
|> normalize()
|> String.split(" ")
|> case do
words when length(words) in @valid_mnemonic_length -> {:ok, words}
_otherwise -> {:error, :invalid_words}
end
end
defp words_to_checksummed_entropy({:error, error}, _lang), do: {:error, error}
defp words_to_checksummed_entropy({:ok, words}, lang) when is_list(words) do
indexes = Enum.map(words, &Wordlist.find_index(lang, &1))
case Enum.any?(indexes, &is_nil(&1)) do
true ->
{:error, :invalid_words}
false ->
checksummed_entropy =
indexes
|> Enum.reverse()
|> Enum.reduce(<<>>, &<<&1::11, &2::bits>>)
{:ok, checksummed_entropy}
end
end
defp checksummed_entropy_to_entropy({:error, error}), do: {:error, error}
defp checksummed_entropy_to_entropy({:ok, checksummed_entropy}) do
checksummed_entropy
|> extract_entropy()
|> validate_checksum()
end
defp extract_entropy(checksummed_entropy) when is_bitstring(checksummed_entropy) do
ent =
bit_size(checksummed_entropy)
|> Kernel.*(32)
|> div(33)
cs = div(ent, 32)
with <<entropy::bits-size(ent), checksum::bits-size(cs)>> <- checksummed_entropy do
{:ok, entropy, checksum}
else
_error -> {:error, :invalid_mnemonic}
end
end
defp validate_checksum({:error, error}), do: {:error, error}
defp validate_checksum({:ok, entropy, checksum}) do
cs = bit_size(checksum)
<<valid_checksum::bits-size(cs), _rest::bits>> = :crypto.hash(:sha256, entropy)
if valid_checksum == checksum do
{:ok, entropy}
else
{:error, :invalid_mnemonic_checksum}
end
end
defp normalize(string), do: :unicode.characters_to_nfkd_binary(string)
end
|
lib/mnemonic.ex
| 0.841631
| 0.536738
|
mnemonic.ex
|
starcoder
|
if Code.ensure_loaded?(Phoenix) do
defmodule PromEx.Plugins.Phoenix do
@moduledoc """
This plugin captures metrics emitted by Phoenix. Specifically, it captures HTTP request metrics and
Phoenix channel metrics.
This plugin supports the following options:
- `router`: This is a REQUIRED option and is the full module name of your Phoenix Router (e.g MyAppWeb.Router).
- `event_prefix`: This option is OPTIONAL and allows you to set the event prefix for the Telemetry events. This
value should align with what you pass to `Plug.Telemetry` in your `endpoint.ex` file (see the plug docs
for more information https://hexdocs.pm/plug/Plug.Telemetry.html)
This plugin exposes the following metric groups:
- `:phoenix_http_event_metrics`
- `:phoenix_channel_event_metrics`
To use plugin in your application, add the following to your PromEx module:
```
defmodule WebApp.PromEx do
use PromEx, otp_app: :web_app
@impl true
def plugins do
[
...
{PromEx.Plugins.Phoenix, router: WebAppWeb.Router}
]
end
@impl true
def dashboards do
[
...
{:prom_ex, "phoenix.json"}
]
end
end
```
"""
use PromEx.Plugin
require Logger
alias Phoenix.Socket
alias Plug.Conn
@impl true
def event_metrics(opts) do
otp_app = Keyword.fetch!(opts, :otp_app)
metric_prefix = PromEx.metric_prefix(otp_app, :phoenix)
# Event metrics definitions
[
http_events(metric_prefix, opts),
channel_events(metric_prefix)
]
end
defp http_events(metric_prefix, opts) do
# Fetch user options
phoenix_router = Keyword.fetch!(opts, :router)
event_prefix = Keyword.get(opts, :event_prefix, [:phoenix, :endpoint])
# Shared configuration
phoenix_stop_event = event_prefix ++ [:stop]
http_metrics_tags = [:status, :method, :path, :controller, :action]
Event.build(
:phoenix_http_event_metrics,
[
# Capture request duration information
distribution(
metric_prefix ++ [:http, :request, :duration, :milliseconds],
event_name: phoenix_stop_event,
measurement: :duration,
description: "The time it takes for the application to respond to HTTP requests.",
reporter_options: [
buckets: exponential!(1, 2, 12)
],
tag_values: get_conn_tags(phoenix_router),
tags: http_metrics_tags,
unit: {:native, :millisecond}
),
# Capture response payload size information
distribution(
metric_prefix ++ [:http, :response, :size, :bytes],
event_name: phoenix_stop_event,
description: "The size of the HTTP response payload.",
reporter_options: [
buckets: exponential!(1, 4, 12)
],
measurement: fn _measurements, metadata ->
case metadata.conn.resp_body do
nil -> 0
_ -> :erlang.iolist_size(metadata.conn.resp_body)
end
end,
tag_values: get_conn_tags(phoenix_router),
tags: http_metrics_tags,
unit: :byte
),
# Capture the number of requests that have been serviced
counter(
metric_prefix ++ [:http, :requests, :total],
event_name: phoenix_stop_event,
description: "The number of requests have been serviced.",
tag_values: get_conn_tags(phoenix_router),
tags: http_metrics_tags
)
]
)
end
defp channel_events(metric_prefix) do
Event.build(
:phoenix_channel_event_metrics,
[
# Capture the number of channel joins that have occurred
counter(
metric_prefix ++ [:channel, :joined, :total],
event_name: [:phoenix, :channel_joined],
description: "The number of channel joins that have occurred.",
tag_values: fn %{result: result, socket: %Socket{transport: transport}} ->
%{
transport: transport,
result: result
}
end,
tags: [:result, :transport]
),
# Capture channel handle_in duration
distribution(
metric_prefix ++ [:channel, :handled_in, :duration, :milliseconds],
event_name: [:phoenix, :channel_handled_in],
measurement: :duration,
description: "The time it takes for the application to respond to channel messages.",
reporter_options: [
buckets: exponential!(1, 2, 12)
],
unit: {:native, :millisecond}
)
]
)
end
defp get_conn_tags(router) do
fn
%{conn: %Conn{} = conn} ->
router
|> Phoenix.Router.route_info(conn.method, conn.request_path, "")
|> case do
%{route: path, plug: controller, plug_opts: action} ->
%{
path: path,
controller: normalize_module_name(controller),
action: action
}
_ ->
%{
path: "Unknown",
controller: "Unknown",
action: "Unknown"
}
end
|> Map.merge(%{
status: conn.status,
method: conn.method
})
_ ->
# TODO: Change this to warning as warn is deprecated as of Elixir 1.11
Logger.warn("Could not resolve path for request")
end
end
defp normalize_module_name(name) when is_atom(name) do
name
|> Atom.to_string()
|> String.trim_leading("Elixir.")
end
defp normalize_module_name(name), do: name
end
else
defmodule PromEx.Plugins.Phoenix do
@moduledoc false
use PromEx.Plugin
@impl true
def event_metrics(_opts) do
PromEx.Plugin.no_dep_raise(__MODULE__, "Phoenix")
end
end
end
|
lib/prom_ex/plugins/phoenix.ex
| 0.869091
| 0.596756
|
phoenix.ex
|
starcoder
|
defmodule ExJenga.SendMoney.Swift do
@moduledoc """
Swift allows sending of money to cross border banks.
## Country Codes
To lookup the country codes you will use in your application you can visit https://en.wikipedia.org/wiki/ISO_3166-1_alpha-2#Officially_assigned_code_elements
## Bank Codes (BIC and IBAN)
To lookup the BIC and IBANs you will use in your application you can visit https://www.theswiftcodes.com/
## Currency Codes
To lookup the ISO currency code (ISO 4217) you will use in your application you can visit https://en.wikipedia.org/wiki/ISO_4217
"""
import ExJenga.JengaBase
alias ExJenga.Signature
@doc """
Send Money To Other Banks Via SWIFT
## Parameters
attrs: - a map containing:
- `source` - a map containing; `countryCode`, `name` and `accountNumber`
- `destination` - a map containing; `type` with a value of "SWIFT", `countryCode`, `name`, `bankCode`, `branchCode`, and `accountNumber`
- `transfer` - a map containing; `type`, `amount`, `currencyCode`, `reference`, `date`, `description` and `chargeOption` which can be either `SELF` or `OTHER`
Read More about the parameters' descriptions here: https://developer.jengaapi.io/reference#swift
## Example
iex> ExJenga.SendMoney.Swift.request(%{source: %{countryCode: "KE", name: "<NAME>", accountNumber: "0011547896523" }, destination: %{type: "bank", countryCode: "JP", name: "<NAME>", bankBic: "BOTKJPJTXXX", accountNumber: "12365489", addressline1: "Post Box 56" }, transfer: %{ type: "SWIFT", amount: "2.00", currencyCode: "USD", reference: "692194625798", date: "2020-12-06", description: "some remarks", chargeOption: "SELF"}})
{:ok,
%{
"transactionId" => "1452854",
"status" => "SUCCESS"
}}
"""
@spec request(map()) :: {:error, any()} | {:ok, any()}
def request(
%{
source: %{
countryCode: _countryCode,
name: _name,
accountNumber: srcAccNo
},
destination: %{
type: _type,
countryCode: _cc,
name: _n,
bankBic: _bankBic,
accountNumber: destAccNo,
addressline1: _address
},
transfer: %{
amount: amount,
currencyCode: _currencyCode,
reference: reference,
date: date,
description: _description,
chargeOption: _chargeOption
}
} = requestBody
) do
message = "#{reference}#{date}#{srcAccNo}#{destAccNo}#{amount}"
make_request("/transaction/v2/remittance#swift", requestBody, [
{"signature", Signature.sign(message)}
])
end
def request(_), do: {:error, "Required Parameters missing, check your request body"}
end
|
lib/ex_jenga/send_money/swift.ex
| 0.839175
| 0.647701
|
swift.ex
|
starcoder
|
defmodule OMG.State.Transaction do
@moduledoc """
Internal representation of transaction spent on Plasma chain.
This module holds the representation of a "raw" transaction, i.e. without signatures nor recovered input spenders
This module also contains the public Transaction API to be prefered to access data of different transaction "flavors",
like `Transaction.Signed` or `Transaction.Recovered`
NOTE: consider splitting the "raw" struct out of here to `Transaction.Raw` and have only the public Transaction API
remain here
"""
alias OMG.Crypto
alias OMG.Utxo
require Utxo
@zero_address OMG.Eth.zero_address()
@max_inputs 4
@max_outputs 4
@default_metadata nil
defstruct [:inputs, :outputs, metadata: @default_metadata]
@type t() :: %__MODULE__{
inputs: list(input()),
outputs: list(output()),
metadata: metadata()
}
@type any_flavor_t() :: t() | __MODULE__.Signed.t() | __MODULE__.Recovered.t()
@type currency() :: Crypto.address_t()
@type tx_bytes() :: binary()
@type tx_hash() :: Crypto.hash_t()
@type metadata() :: binary() | nil
@type input() :: %{
blknum: non_neg_integer(),
txindex: non_neg_integer(),
oindex: non_neg_integer()
}
@type output() :: %{
owner: Crypto.address_t(),
currency: currency(),
amount: non_neg_integer()
}
@type decode_error() ::
:malformed_transaction_rlp
| :malformed_inputs
| :malformed_outputs
| :malformed_address
| :malformed_metadata
| :malformed_transaction
defmacro is_metadata(metadata) do
quote do
unquote(metadata) == nil or (is_binary(unquote(metadata)) and byte_size(unquote(metadata)) == 32)
end
end
defmacro max_inputs do
quote do
unquote(@max_inputs)
end
end
defmacro max_outputs do
quote do
unquote(@max_outputs)
end
end
@type input_index_t() :: 0..3
@doc """
Creates a new transaction from a list of inputs and a list of outputs.
Adds empty (zeroes) inputs and/or outputs to reach the expected size
of `@max_inputs` inputs and `@max_outputs` outputs.
assumptions:
```
length(inputs) <= @max_inputs
length(outputs) <= @max_outputs
```
"""
@spec new(
list({pos_integer, pos_integer, 0 | 1}),
list({Crypto.address_t(), currency(), pos_integer}),
metadata()
) :: t()
def new(inputs, outputs, metadata \\ @default_metadata)
def new(inputs, outputs, metadata)
when is_metadata(metadata) and length(inputs) <= @max_inputs and length(outputs) <= @max_outputs do
inputs =
inputs
|> Enum.map(fn {blknum, txindex, oindex} -> %{blknum: blknum, txindex: txindex, oindex: oindex} end)
inputs = inputs ++ List.duplicate(%{blknum: 0, txindex: 0, oindex: 0}, @max_inputs - Kernel.length(inputs))
outputs =
outputs
|> Enum.map(fn {owner, currency, amount} -> %{owner: owner, currency: currency, amount: amount} end)
outputs =
outputs ++
List.duplicate(
%{owner: @zero_address, currency: @zero_address, amount: 0},
@max_outputs - Kernel.length(outputs)
)
%__MODULE__{inputs: inputs, outputs: outputs, metadata: metadata}
end
@doc """
Transaform the structure of RLP items after a successful RLP decode of a raw transaction, into a structure instance
"""
def reconstruct([inputs_rlp, outputs_rlp | rest_rlp])
when rest_rlp == [] or length(rest_rlp) == 1 do
with {:ok, inputs} <- reconstruct_inputs(inputs_rlp),
{:ok, outputs} <- reconstruct_outputs(outputs_rlp),
{:ok, metadata} <- reconstruct_metadata(rest_rlp),
do: {:ok, %__MODULE__{inputs: inputs, outputs: outputs, metadata: metadata}}
end
def reconstruct(_), do: {:error, :malformed_transaction}
defp reconstruct_inputs(inputs_rlp) do
Enum.map(inputs_rlp, fn [blknum, txindex, oindex] ->
%{blknum: parse_int(blknum), txindex: parse_int(txindex), oindex: parse_int(oindex)}
end)
|> inputs_without_gaps()
rescue
_ -> {:error, :malformed_inputs}
end
defp reconstruct_outputs(outputs_rlp) do
outputs =
Enum.map(outputs_rlp, fn [owner, currency, amount] ->
with {:ok, cur12} <- parse_address(currency),
{:ok, owner} <- parse_address(owner) do
%{owner: owner, currency: cur12, amount: parse_int(amount)}
end
end)
if(error = Enum.find(outputs, &match?({:error, _}, &1)),
do: error,
else: outputs
)
|> outputs_without_gaps()
rescue
_ -> {:error, :malformed_outputs}
end
defp reconstruct_metadata([]), do: {:ok, nil}
defp reconstruct_metadata([metadata]) when is_metadata(metadata), do: {:ok, metadata}
defp reconstruct_metadata([_]), do: {:error, :malformed_metadata}
defp parse_int(binary), do: :binary.decode_unsigned(binary, :big)
# necessary, because RLP handles empty string equally to integer 0
@spec parse_address(<<>> | Crypto.address_t()) :: {:ok, Crypto.address_t()} | {:error, :malformed_address}
defp parse_address(binary)
defp parse_address(""), do: {:ok, <<0::160>>}
defp parse_address(<<_::160>> = address_bytes), do: {:ok, address_bytes}
defp parse_address(_), do: {:error, :malformed_address}
@spec decode(tx_bytes()) :: {:ok, t()} | {:error, decode_error()}
def decode(tx_bytes) do
with {:ok, raw_tx_rlp_decoded_chunks} <- try_exrlp_decode(tx_bytes),
do: reconstruct(raw_tx_rlp_decoded_chunks)
end
def decode!(tx_bytes) do
{:ok, tx} = decode(tx_bytes)
tx
end
defp try_exrlp_decode(tx_bytes) do
{:ok, ExRLP.decode(tx_bytes)}
rescue
_ -> {:error, :malformed_transaction_rlp}
end
@spec encode(t()) :: tx_bytes()
defp encode(transaction) do
get_data_for_rlp(transaction)
|> ExRLP.encode()
end
@doc """
Turns a structure instance into a structure of RLP items, ready to be RLP encoded, for a raw transaction
"""
def get_data_for_rlp(%__MODULE__{inputs: inputs, outputs: outputs, metadata: metadata}) when is_metadata(metadata),
do:
[
# contract expects 4 inputs and outputs
Enum.map(inputs, fn %{blknum: blknum, txindex: txindex, oindex: oindex} -> [blknum, txindex, oindex] end) ++
List.duplicate([0, 0, 0], 4 - length(inputs)),
Enum.map(outputs, fn %{owner: owner, currency: currency, amount: amount} -> [owner, currency, amount] end) ++
List.duplicate([@zero_address, @zero_address, 0], 4 - length(outputs))
] ++ if(metadata, do: [metadata], else: [])
@spec hash(t()) :: tx_hash()
defp hash(%__MODULE__{} = tx) do
tx
|> encode
|> Crypto.hash()
end
@doc """
Returns all inputs, never returns zero inputs
"""
@spec get_inputs(any_flavor_t()) :: list(input())
def get_inputs(%__MODULE__.Recovered{signed_tx: signed_tx}), do: get_inputs(signed_tx)
def get_inputs(%__MODULE__.Signed{raw_tx: raw_tx}), do: get_inputs(raw_tx)
def get_inputs(%__MODULE__{inputs: inputs}) do
inputs
|> Enum.map(fn %{blknum: blknum, txindex: txindex, oindex: oindex} -> Utxo.position(blknum, txindex, oindex) end)
|> Enum.filter(&Utxo.Position.non_zero?/1)
end
@doc """
Returns all outputs, never returns zero outputs
"""
@spec get_outputs(any_flavor_t()) :: list(output())
def get_outputs(%__MODULE__.Recovered{signed_tx: signed_tx}), do: get_outputs(signed_tx)
def get_outputs(%__MODULE__.Signed{raw_tx: raw_tx}), do: get_outputs(raw_tx)
def get_outputs(%__MODULE__{outputs: outputs}) do
outputs
|> Enum.reject(&match?(%{owner: @zero_address, currency: @zero_address, amount: 0}, &1))
end
@doc """
Returns the encoded bytes of the raw transaction involved, i.e. without the signatures
"""
@spec raw_txbytes(any_flavor_t()) :: binary
def raw_txbytes(%__MODULE__.Recovered{signed_tx: signed_tx}), do: raw_txbytes(signed_tx)
def raw_txbytes(%__MODULE__.Signed{raw_tx: raw_tx}), do: raw_txbytes(raw_tx)
def raw_txbytes(%__MODULE__{} = raw_tx), do: encode(raw_tx)
@doc """
Returns the hash of the raw transaction involved, i.e. without the signatures
"""
@spec raw_txhash(any_flavor_t()) :: binary
def raw_txhash(%__MODULE__.Recovered{signed_tx: signed_tx}), do: raw_txhash(signed_tx)
def raw_txhash(%__MODULE__.Signed{raw_tx: raw_tx}), do: raw_txhash(raw_tx)
def raw_txhash(%__MODULE__{} = raw_tx), do: hash(raw_tx)
defp inputs_without_gaps(inputs),
do: check_for_gaps(inputs, %{blknum: 0, txindex: 0, oindex: 0}, {:error, :inputs_contain_gaps})
defp outputs_without_gaps({:error, _} = error), do: error
defp outputs_without_gaps(outputs),
do:
check_for_gaps(
outputs,
%{owner: @zero_address, currency: @zero_address, amount: 0},
{:error, :outputs_contain_gaps}
)
# Check if any consecutive pair of elements contains empty followed by non-empty element
# which means there is a gap
defp check_for_gaps(items, empty, error) do
items
# discard - discards last unpaired element from a comparison
|> Stream.chunk_every(2, 1, :discard)
|> Enum.any?(fn
[^empty, elt] when elt != empty -> true
_ -> false
end)
|> if(do: error, else: {:ok, items})
end
end
|
apps/omg/lib/omg/state/transaction.ex
| 0.91065
| 0.773345
|
transaction.ex
|
starcoder
|
defmodule Site.PhoneNumber do
@moduledoc "Functions for working with phone numbers"
@doc """
Takes a string holding a possibly formatted phone number with optional leading 1.
Returns a pretty human-readable format.
Returns the original input if parsing/formatting fails.
Returns "" if given nil.
"""
@spec pretty_format(String.t() | nil) :: String.t()
def pretty_format(nil) do
""
end
def pretty_format(number) do
case parse_phone_number(number) do
{area_code, prefix, line} ->
"#{area_code}-#{prefix}-#{line}"
nil ->
number
end
end
@doc """
Takes a string holding a possibly formatted phone number with optional leading 1.
Returns a number in the format +1-617-222-3200, suitable for use with <a href="tel:">
Returns nil if parsing/formatting fails.
"""
@spec machine_format(String.t() | nil) :: String.t() | nil
def machine_format(nil) do
nil
end
def machine_format(number) do
case parse_phone_number(number) do
{area_code, prefix, line} ->
"+1-#{area_code}-#{prefix}-#{line}"
nil ->
nil
end
end
@doc """
Format read by screenreaders in a nicer manner
Inspired by https://jhalabi.com/blog/accessibility-phone-number-formatting
"""
def aria_format(nil), do: nil
def aria_format(number) do
case parse_phone_number(number) do
{area_code, prefix, line} ->
[area_code, prefix, line]
|> Enum.map(fn num ->
String.split(num, "", trim: true)
|> Enum.join(" ")
end)
|> Enum.join(". ")
nil ->
nil
end
end
@spec parse_phone_number(String.t()) :: {String.t(), String.t(), String.t()} | nil
def parse_phone_number(number) do
case number |> digits |> without_leading_one do
<<area_code::bytes-size(3), prefix::bytes-size(3), line::bytes-size(4)>> ->
{area_code, prefix, line}
_ ->
nil
end
end
@spec digits(String.t()) :: String.t()
defp digits(str) when is_binary(str) do
String.replace(str, ~r/[^0-9]/, "")
end
@spec without_leading_one(String.t()) :: String.t()
defp without_leading_one("1" <> rest), do: rest
defp without_leading_one(phone), do: phone
end
|
apps/site/lib/site/phone_number.ex
| 0.773259
| 0.451568
|
phone_number.ex
|
starcoder
|
defmodule ElixirSense.Core.Normalized.Code do
@moduledoc """
Shim to replicate the behavior of deprecated `Code.get_docs/2`
"""
@type doc_t :: nil | false | String.t()
@type fun_doc_entry_t ::
{{atom, non_neg_integer}, pos_integer, :function | :macro, term, doc_t, map}
@type doc_entry_t ::
{{atom, non_neg_integer}, pos_integer, :callback | :macrocallback | :type, doc_t, map}
@type moduledoc_entry_t :: {pos_integer, doc_t, map}
@spec get_docs(module, :docs) :: nil | [fun_doc_entry_t]
@spec get_docs(module, :callback_docs | :type_docs) :: nil | [:doc_entry_t]
@spec get_docs(module, :moduledoc) :: nil | moduledoc_entry_t
def get_docs(module, category) do
case Code.fetch_docs(module) do
{:docs_v1, moduledoc_anno, :elixir, "text/markdown", moduledoc, metadata, docs} ->
case category do
:moduledoc ->
moduledoc_en = extract_docs(moduledoc)
{:erl_anno.line(moduledoc_anno), moduledoc_en, metadata}
:docs ->
get_fun_docs(module, docs)
:callback_docs ->
for {{kind, _name, _arity}, _anno, _signatures, _docs, _metadata} = entry
when kind in [:callback, :macrocallback] <- docs do
map_doc_entry(entry)
end
:type_docs ->
for {{:type, _name, _arity}, _anno, _signatures, _docs, _metadata} = entry <- docs do
map_doc_entry(entry)
end
end
_ ->
nil
end
end
defp map_doc_entry({{kind, name, arity}, anno, signatures, docs, metadata}) do
docs_en = extract_docs(docs)
line = :erl_anno.line(anno)
case kind do
kind when kind in [:function, :macro] ->
args_quoted =
signatures
|> Enum.join(" ")
|> Code.string_to_quoted()
|> case do
{:ok, {^name, _, args}} -> args
_ -> []
end
{{name, arity}, line, kind, args_quoted, docs_en, metadata}
_ ->
{{name, arity}, line, kind, docs_en, metadata}
end
end
@spec extract_docs(%{required(String.t()) => String.t()} | :hidden | :none) ::
String.t() | false | nil
def extract_docs(%{"en" => docs_en}), do: docs_en
def extract_docs(:hidden), do: false
def extract_docs(_), do: nil
defp get_fun_docs(module, docs) do
docs_from_module =
Enum.filter(
docs,
&match?(
{{kind, _name, _arity}, _anno, _signatures, _docs, _metadata}
when kind in [:function, :macro],
&1
)
)
non_documented =
docs_from_module
|> Stream.filter(fn {{_kind, _name, _arity}, _anno, _signatures, docs, _metadata} ->
docs in [:hidden, :none]
end)
|> Enum.into(MapSet.new(), fn {{_kind, name, arity}, _anno, _signatures, _docs, _metadata} ->
{name, arity}
end)
docs_from_behaviours = get_docs_from_behaviour(module, non_documented)
Enum.map(
docs_from_module,
fn
{{kind, name, arity}, anno, signatures, docs, metadata} ->
{signatures, docs, metadata} =
Map.get(docs_from_behaviours, {name, arity}, {signatures, docs, metadata})
{{kind, name, arity}, anno, signatures, docs, metadata}
|> map_doc_entry
end
)
end
defp get_docs_from_behaviour(module, funs) do
if Enum.empty?(funs) do
# Small optimization to avoid needless analysis of behaviour modules if the collection of
# required functions is empty.
%{}
else
module
|> behaviours()
|> Stream.flat_map(&callback_documentation/1)
|> Stream.filter(fn {name_arity, {_signatures, _docs, _metadata}} ->
Enum.member?(funs, name_arity)
end)
|> Enum.into(%{})
end
end
def callback_documentation(module) do
case Code.fetch_docs(module) do
{:docs_v1, _moduledoc_anno, :elixir, _mime_type, _moduledoc, _metadata, docs} ->
docs
_ ->
[]
end
|> Stream.filter(
&match?(
{{kind, _name, _arity}, _anno, _signatures, _docs, _metadata}
when kind in [:callback, :macrocallback],
&1
)
)
|> Stream.map(fn {{_kind, name, arity}, _anno, signatures, docs, metadata} ->
{{name, arity}, {signatures, docs, metadata}}
end)
end
defp behaviours(module) do
if function_exported?(module, :module_info, 1),
do: module.module_info(:attributes) |> Keyword.get_values(:behaviour) |> Enum.concat(),
else: []
end
end
|
lib/elixir_sense/core/normalized/code.ex
| 0.749362
| 0.447762
|
code.ex
|
starcoder
|
defmodule Cloak.Ciphers.AES.CTR do
@moduledoc """
A `Cloak.Cipher` which encrypts values with the AES cipher in CTR (stream) mode.
Internally relies on Erlang's `:crypto.stream_encrypt/2`.
"""
@behaviour Cloak.Cipher
alias Cloak.Tags.{Encoder, Decoder}
@doc """
Callback implementation for `Cloak.Cipher`. Encrypts a value using
AES in CTR mode.
Generates a random IV for every encryption, and prepends the key tag and IV to
the beginning of the ciphertext. The format can be diagrammed like this:
+-----------------------------------+----------------------+
| HEADER | BODY |
+-------------------+---------------+----------------------+
| Key Tag (n bytes) | IV (16 bytes) | Ciphertext (n bytes) |
+-------------------+---------------+----------------------+
| |__________________________________
| |
+---------------+-----------------+-------------------+
| Type (1 byte) | Length (1 byte) | Key Tag (n bytes) |
+---------------+-----------------+-------------------+
The `Key Tag` component of the header breaks down into a `Type`, `Length`,
and `Value` triplet for easy decoding.
"""
@impl true
def encrypt(plaintext, opts) when is_binary(plaintext) do
key = Keyword.fetch!(opts, :key)
tag = Keyword.fetch!(opts, :tag)
iv = :crypto.strong_rand_bytes(16)
state = do_init(key, iv, true)
ciphertext = do_encrypt(state, to_string(plaintext))
{:ok, Encoder.encode(tag) <> iv <> ciphertext}
end
@doc """
Callback implementation for `Cloak.Cipher`. Decrypts a value
encrypted with AES in CTR mode.
Uses the key tag to find the correct key for decryption, and the IV included
in the header to decrypt the body of the ciphertext.
### Parameters
- `ciphertext` - Binary ciphertext generated by `encrypt/2`.
### Examples
iex> encrypt("Hello") |> decrypt
"Hello"
"""
@impl true
def decrypt(ciphertext, opts) when is_binary(ciphertext) do
if can_decrypt?(ciphertext, opts) do
key = Keyword.fetch!(opts, :key)
%{remainder: <<iv::binary-16, ciphertext::binary>>} = Decoder.decode(ciphertext)
state = do_init(key, iv, false)
plaintext = do_decrypt(state, ciphertext)
{:ok, plaintext}
else
:error
end
end
@doc """
Callback implementation for `Cloak.Cipher`. Determines if
a ciphertext can be decrypted with this cipher.
"""
@impl true
def can_decrypt?(ciphertext, opts) when is_binary(ciphertext) do
tag = Keyword.fetch!(opts, :tag)
case Decoder.decode(ciphertext) do
%{tag: ^tag, remainder: <<_iv::binary-16, _ciphertext::binary>>} ->
true
_other ->
false
end
end
# TODO: remove this once support for Erlang/OTP 21 is dropped
if System.otp_release() >= "22" do
defp do_init(key, iv, encoder?) do
:crypto.crypto_init(:aes_256_ctr, key, iv, encoder?)
end
defp do_encrypt(state, plaintext) do
:crypto.crypto_update(state, plaintext)
end
defp do_decrypt(state, ciphertext) do
:crypto.crypto_update(state, ciphertext)
end
else
defp do_init(key, iv, _) do
:crypto.stream_init(:aes_ctr, key, iv)
end
defp do_encrypt(state, plaintext) do
{_state, cyphertext} = :crypto.stream_encrypt(state, plaintext)
cyphertext
end
defp do_decrypt(state, ciphertext) do
{_state, plaintext} = :crypto.stream_decrypt(state, ciphertext)
plaintext
end
end
end
|
lib/cloak/ciphers/aes_ctr.ex
| 0.857216
| 0.569823
|
aes_ctr.ex
|
starcoder
|
defmodule ExDiceRoller.RandomizedRolls do
@moduledoc """
Generates and executes randomized ExDiceRoller roll expressions via
`ExDiceRoller.ExpressionBuilder.randomize/2`.
"""
alias ExDiceRoller.{Compiler, ExpressionBuilder}
@type error_keyword :: [err: any, expr: String.t(), var_values: Keyword.t(), stacktrace: list]
@var_value_types [:number, :list, :expression, :function]
@timeout_error %RuntimeError{message: "roll task timed out"}
@doc """
Generates and rolls `num_expressions` number of expressions, where each
expression has a maximum nested depth of `max_depth`. Returns a list of
`t:error_keyword/0`.
A list of expected error messages can be passed in, preventing those errors
from being returned in the final error list.
"""
@spec run(integer, integer, list(String.t())) :: list(error_keyword)
def run(num_expressions, max_depth, known_errors) do
{:ok, pid} = Task.Supervisor.start_link()
Enum.reduce(1..num_expressions, [], fn _, acc ->
do_run(pid, max_depth, known_errors, acc)
end)
end
@doc "Handles processing errors generated while executing dice roll expressions."
@spec handle_error(any, list(String.t()), String.t(), Keyword.t(), list(any)) :: list(any)
def handle_error(%{message: msg} = err, known_errors, expr, args, acc) do
case msg in known_errors do
true -> acc
false -> [create_error_list(err, expr, args)] ++ acc
end
end
def handle_error(%FunctionClauseError{} = err, known_errors, expr, args, acc) do
case {err.module, err.function} in known_errors do
true -> acc
false -> [create_error_list(err, expr, args)] ++ acc
end
end
@doc """
Executes an individual role as its own supervised task. If the a roll task
takes too long to execute, it is shutdown and a timeout error added to the
provided `acc` accumulator. If any other error is encountered, it too is added
to the accumulator. Finally, the accumulator is returned.
"""
@spec execute_roll(pid, String.t(), Keyword.t(), list(String.t()), list) :: list
def execute_roll(supervisor, expr, args, known_errors, acc) do
task = Task.Supervisor.async_nolink(supervisor, roll_func(expr, args), trap_exit: true)
case Task.yield(task, 500) || Task.shutdown(task) do
{:ok, :ok} -> acc
{:ok, err} -> handle_error(err, known_errors, expr, args, acc)
nil -> handle_error(@timeout_error, known_errors, expr, args, acc)
end
end
# builds and executes a single dice roll expression
@spec do_run(pid, integer, list(String.t()), list) :: list
defp do_run(pid, max_depth, known_errors, acc) do
expr = ExpressionBuilder.randomize(Enum.random(1..max_depth), true)
var_values = build_variable_values(expr, max_depth)
args = var_values ++ [opts: options()] ++ filters()
execute_roll(pid, expr, args, known_errors, acc)
end
# the roll function used in the async task
@spec roll_func(String.t(), Keyword.t()) :: :ok | any
defp roll_func(expr, args) do
fn ->
try do
ExDiceRoller.roll(expr, args)
:ok
rescue
err -> err
end
end
end
# randomly selects whether or not to use filters, and which to use
@spec filters() :: Keyword.t()
defp filters do
case :rand.uniform(2) do
1 ->
[]
2 ->
Enum.random([
[>=: :rand.uniform(10)],
[<=: :rand.uniform(10)],
[=: :rand.uniform(10)],
[!=: :rand.uniform(10)],
[>: :rand.uniform(10)],
[<: :rand.uniform(10)],
[drop_highest: true],
[drop_lowest: true],
[drop_highest_lowest: true]
])
end
end
# randomly selects whether or not to use options, and which to use
@spec options() :: Keyword.t()
defp options do
case :rand.uniform(2) do
1 ->
[]
2 ->
options = [:keep, :explode, :highest, :lowest]
len = length(options)
max = :rand.uniform(len + 2)
1..max
|> Enum.map(fn _ -> Enum.random(options) end)
|> Enum.uniq()
end
end
@spec create_error_list(any, String.t(), Keyword.t()) :: Keyword.t()
defp create_error_list(err, expr, args) do
[err: err, expr: expr, args: args, stacktrace: System.stacktrace()]
end
@spec build_variable_values(String.t(), integer) :: Keyword.t()
defp build_variable_values(expr, max_depth) do
~r/[aAbBcCe-zE-Z]/
|> Regex.scan(expr)
|> List.flatten()
|> Enum.uniq()
|> generate_var_values(max_depth - 1)
end
@spec generate_var_values(list(String.t()), integer) :: Keyword.t()
defp generate_var_values(var_names, max_depth) do
Enum.map(
var_names,
fn name ->
key = String.to_atom(name)
{key, do_generate_var_value(Enum.random(@var_value_types), max_depth)}
end
)
end
@spec do_generate_var_value(atom, integer) :: Compiler.compiled_val() | String.t()
defp do_generate_var_value(_, n) when n <= 0 do
do_generate_var_value(:number, 1)
end
defp do_generate_var_value(:number, _) do
case :rand.uniform(2) do
1 -> ExpressionBuilder.int()
2 -> ExpressionBuilder.float()
end
end
defp do_generate_var_value(:list, max_depth) do
max = :rand.uniform(5)
Enum.map(1..max, fn _ ->
do_generate_var_value(Enum.random(@var_value_types), max_depth - 2)
end)
end
defp do_generate_var_value(:expression, max_depth) do
ExpressionBuilder.randomize(Enum.random(1..(max_depth - 1)), false)
end
defp do_generate_var_value(:function, max_depth) do
:expression
|> do_generate_var_value(max_depth - 1)
|> ExDiceRoller.compile()
|> elem(1)
end
end
|
test/support/randomized_rolls.ex
| 0.767777
| 0.545891
|
randomized_rolls.ex
|
starcoder
|
defmodule LearnKit.Regression.Linear do
@moduledoc """
Module for Linear Regression algorithm
"""
defstruct factors: [], results: [], coefficients: []
alias LearnKit.Regression.Linear
use Linear.Calculations
use LearnKit.Regression.Score
@type factors :: [number]
@type results :: [number]
@type coefficients :: [number]
@doc """
Creates predictor with empty data_set
## Examples
iex> predictor = LearnKit.Regression.Linear.new
%LearnKit.Regression.Linear{factors: [], results: [], coefficients: []}
"""
@spec new() :: %Linear{factors: [], results: [], coefficients: []}
def new, do: Linear.new([], [])
@doc """
Creates predictor with data_set
## Parameters
- factors: Array of predictor variables
- results: Array of criterion variables
## Examples
iex> predictor = LearnKit.Regression.Linear.new([1, 2, 3, 4], [3, 6, 10, 15])
%LearnKit.Regression.Linear{factors: [1, 2, 3, 4], results: [3, 6, 10, 15], coefficients: []}
"""
@spec new(factors, results) :: %Linear{factors: factors, results: results, coefficients: []}
def new(factors, results) when is_list(factors) and is_list(results) do
%Linear{factors: factors, results: results}
end
@doc """
Fit train data
## Parameters
- predictor: %LearnKit.Regression.Linear{}
- options: keyword list with options
## Options
- method: method for fit, "least squares"/"gradient descent", default is "least squares", optional
## Examples
iex> predictor = predictor |> LearnKit.Regression.Linear.fit
%LearnKit.Regression.Linear{
coefficients: [-1.5, 4.0],
factors: [1, 2, 3, 4],
results: [3, 6, 10, 15]
}
iex> predictor = predictor |> LearnKit.Regression.Linear.fit([method: "gradient descent"])
%LearnKit.Regression.Linear{
coefficients: [-1.4975720508482548, 3.9992148848913356],
factors: [1, 2, 3, 4],
results: [3, 6, 10, 15]
}
"""
@spec fit(%Linear{factors: factors, results: results}) :: %Linear{factors: factors, results: results, coefficients: coefficients}
def fit(linear = %Linear{factors: factors, results: results}, options \\ []) when is_list(options) do
coefficients =
Keyword.merge([method: ""], options)
|> define_method_for_fit()
|> do_fit(linear)
%Linear{factors: factors, results: results, coefficients: coefficients}
end
defp define_method_for_fit(options) do
case options[:method] do
"gradient descent" -> "gradient descent"
_ -> ""
end
end
@doc """
Predict using the linear model
## Parameters
- predictor: %LearnKit.Regression.Linear{}
- samples: Array of variables
## Examples
iex> predictor |> LearnKit.Regression.Linear.predict([4, 8, 13])
{:ok, [14.5, 30.5, 50.5]}
"""
@spec predict(%Linear{coefficients: coefficients}, list) :: {:ok, list}
def predict(linear = %Linear{coefficients: _}, samples) when is_list(samples) do
{
:ok,
do_predict(linear, samples)
}
end
@doc """
Predict using the linear model
## Parameters
- predictor: %LearnKit.Regression.Linear{}
- sample: Sample variable
## Examples
iex> predictor |> LearnKit.Regression.Linear.predict(4)
{:ok, 14.5}
"""
@spec predict(%Linear{coefficients: coefficients}, list) :: {:ok, list}
def predict(%Linear{coefficients: [alpha, beta]}, sample) do
{:ok, sample * beta + alpha}
end
end
|
lib/learn_kit/regression/linear.ex
| 0.945764
| 0.905322
|
linear.ex
|
starcoder
|
defmodule Elixush.PushState do
@moduledoc "Keeps track of and modifies Push states and stacks."
import Enum, only: [empty?: 1]
import Elixush.Globals.Agent
@doc "Returns an empty push state."
def make_push_state do
%{exec: [],
code: [],
integer: [],
float: [],
boolean: [],
char: [],
string: [],
zip: [],
vector_integer: [],
vector_float: [],
vector_boolean: [],
vector_string: [],
input: [],
output: [],
auxiliary: [],
tag: [],
return: [],
environment: [],
genome: []
}
end
@doc "Add the provided name to the global list of registered instructions."
def register_instruction(name) do
if MapSet.member?(get_globals(:registered_instructions), name) do
raise(ArgumentError, message: "Duplicate Push instruction defined: #{name}")
else
update_globals(:registered_instructions, MapSet.put(get_globals(:registered_instructions), name))
end
end
def define_registered(instruction, definition) do
register_instruction(instruction)
old_instruction_table = get_globals(:instruction_table)
new_instruction_table = Map.put(old_instruction_table, instruction, definition)
update_globals(:instruction_table, new_instruction_table)
end
def state_pretty_print(state) do
Enum.each(get_globals(:push_types), fn(t) ->
IO.puts "#{t} = #{Macro.to_string(Map.get(state, t))}"
end)
end
@doc """
Returns a copy of the state with the value pushed on the named stack. This is
a utility, not for use in Push programs.
"""
def push_item(value, type, state) do
Map.put(state, type, List.insert_at(state[type], 0, value))
end
@doc """
Returns the top item of the type stack in state. Returns :no-stack-item if
called on an empty stack. This is a utility, not for use as an instruction
in Push programs.
"""
def top_item(type, state) do
stack = state[type]
if empty?(stack), do: :no_stack_item, else: List.first(stack)
end
@doc """
Returns the indicated item of the type stack in state. Returns :no_stack_item
if called on an empty stack. This is a utility, not for use as an instruction
in Push programs. NOT SAFE for invalid positions.
"""
@spec stack_ref(atom, integer, map) :: any
def stack_ref(type, position, state) do
stack = Map.get(state, type)
if empty?(stack), do: :no_stack_item, else: Enum.at(stack, position)
end
@doc """
Puts value at position on type stack in state. This is a utility, not for use
as an instruction in Push programs. NOT SAFE for invalid positions.
"""
def stack_assoc(value, type, position, state) do
stack = Map.get(state, type)
new_stack = List.insert_at(stack, position, value)
Map.put(state, type, new_stack)
end
@doc """
Returns a copy of the state with the specified stack popped. This is a
utility, not for use as an instruction in Push programs.
"""
@spec pop_item(atom, map) :: map
def pop_item(type, state) do
Map.put(state, type, state |> Map.get(type) |> tl)
end
@doc """
Ends the current environment by popping the :environment stack and replacing
all stacks with those on the environment stack. Then, everything on the old
:return stack is pushed onto the :exec stack.
"""
@spec end_environment(map) :: map
def end_environment(state) do
new_env = top_item(:environment, state)
new_exec = Enum.concat(Map.get(state, :exec), Map.get(new_env, :exec))
# HACK: anonymous functions used to emulate loop/recur from clojure
loop = fn
(_f, old_return, new_state) when hd(old_return) -> new_state
(f, old_return, new_state) -> f.(f, tl(old_return), push_item(hd(old_return), :exec, new_state))
end
loop.(loop, Map.get(state, :return), Map.merge(new_env, %{:exec => new_exec, :auxiliary => Map.get(state, :auxiliary)}))
end
@doc """
Returns a list of all registered instructions with the given type name
as a prefix.
"""
def registered_for_type(type, args \\ []) do
include_randoms = args[:include_randoms] && true
for_type = Enum.filter(get_globals(:registered_instructions), &(String.starts_with?(Atom.to_string(&1), Atom.to_string(type))))
if include_randoms do
for_type
else
Enum.filter(for_type, &(not(String.ends_with?(Atom.to_string(&1), "_rand"))))
end
end
@doc """
Returns a list of all registered instructions aside from random instructions.
"""
@spec registered_nonrandom :: Enum.t
def registered_nonrandom do
:registered_instructions
|> get_globals
|> Enum.filter(&(not(String.ends_with?(Atom.to_string(&1), "_rand"))))
end
@doc """
Takes a list of stacks and returns all instructions that have all
of their stack requirements fulfilled. This won't include random instructions
unless :random is in the types list. This won't include parenthesis-altering
instructions unless :parentheses is in the types list.
"""
@spec registered_for_stacks(Enum.t) :: Enum.t
def registered_for_stacks(types_list) do
types_list
end
@doc """
Takes a map of stack names and entire stack states, and returns a new
push-state with those stacks set.
"""
@spec push_state_from_stacks(map) :: map
def push_state_from_stacks(stack_assignments) do
Map.merge(make_push_state, stack_assignments)
end
end
|
lib/pushstate.ex
| 0.708918
| 0.493531
|
pushstate.ex
|
starcoder
|
defmodule Astarte.Flow.Blocks.HttpSink do
@moduledoc """
This is a consumer block that takes `data` from incoming `Message` and makes a POST request
to the configured URL containing `data`. This block supports only incoming messages with type `:binary`,
so serialization to binary format must be handled in a separate block before the message arrives here.
The `subtype` of the message, if present, is added as `Content-Type` header.
Additionally, static headers can be added to the POST requests with the initial configuration.
For the time being, the delivery is best-effort (i.e. if a message is not delivered, it is discarded).
"""
use GenStage
require Logger
alias Astarte.Flow.Message
defmodule Config do
@moduledoc false
@type t() :: %__MODULE__{
client: Tesla.Client.t()
}
defstruct [
:client
]
end
@doc """
Starts the `HttpSink`.
## Options
* `:url` (required) - The target URL for the POST request.
* `:headers` - A list of `{key, value}` tuples where `key` and `value` are `String` and represent
headers to be set in the POST request.
* `:ignore_ssl_errors` - If `true`, ignore SSL errors that happen while performing the request.
Defaults to `false`.
"""
@spec start_link(options) :: GenServer.on_start()
when options: [option],
option:
{:url, url :: String.t()}
| {:headers, headers :: [{String.t(), String.t()}]}
def start_link(opts) do
GenStage.start_link(__MODULE__, opts)
end
# Callbacks
@impl true
def init(opts) do
url = Keyword.fetch!(opts, :url)
headers = Keyword.get(opts, :headers, [])
with :ok <- validate_headers(headers) do
client = build_client(url, opts)
{:consumer, %Config{client: client}}
else
{:error, reason} ->
{:stop, reason}
end
end
@impl true
def handle_events(events, _from, %Config{client: client} = config) do
for %Message{data: data, type: :binary, subtype: subtype} <- events do
opts =
if String.valid?(subtype) do
[headers: [{"content-type", subtype}]]
else
[]
end
_ = post(client, data, opts)
end
{:noreply, [], config}
end
defp post(client, data, opts) do
case Tesla.post(client, "/", data, opts) do
{:ok, %{status: status}} when status < 400 ->
:ok
{:ok, %{status: status, body: body}} ->
_ =
Logger.warn("HttpSink received error status",
status: status,
body: body
)
{:error, :http_error_response}
{:error, reason} ->
_ = Logger.warn("HttpSink cannot make POST request", reason: reason)
{:error, :request_failed}
end
end
defp validate_headers([]) do
:ok
end
defp validate_headers([{key, value} | tail]) when is_binary(key) and is_binary(value) do
validate_headers(tail)
end
defp validate_headers(_) do
{:error, :invalid_headers}
end
defp build_client(url, opts) do
headers = Keyword.get(opts, :headers, [])
middleware = [
{Tesla.Middleware.BaseUrl, url},
{Tesla.Middleware.Headers, headers}
]
if Keyword.get(opts, :ignore_ssl_errors) do
# Build adapter with insecure SSL to ignore SSL errors
adapter_opts = [insecure: true]
adapter = {Tesla.Adapter.Hackney, adapter_opts}
Tesla.client(middleware, adapter)
else
# Use default adapter
Tesla.client(middleware)
end
end
end
|
lib/astarte_flow/blocks/http_sink.ex
| 0.895177
| 0.506897
|
http_sink.ex
|
starcoder
|
defmodule ExLTTB.Stream do
@moduledoc """
ExLTTB with lazy evalutation
"""
alias ExLTTB.SampleUtils
@doc """
Downsamples a samples stream using a modified version of [LTTB](https://skemman.is/bitstream/1946/15343/3/SS_MSthesis.pdf).
The difference with the original algorithm is that since a Stream could be infinite, it doesn't make sense to define the number of output buckets (threshold). Instead, the parameter here is the average bucket size (≈ downsample rate, see Return).
## Arguments
* `samples_stream`: a `Stream` of samples. These can have any representation provided that access functions are provided (see Options). The stream is assumed to be sorted by the `x` coordinate.
* `avg_bucket_size`: the average size of a single bucket. The first (and if the Stream is not infinite, also the last) bucket has only one sample like in the original algorithm
* `opts`: a keyword list of options.
## Options
* `sample_to_x_fun`: a function that takes as argument a sample and returns its x coordinate. Defaults to `sample[:x]`
* `sample_to_y_fun`: a function that takes as argument a sample and returns its y coordinate. Defaults to `sample[:y]`
* `xy_to_sample_fun`: a function that takes as argument `x` and `y` and returns a sample with these coordinates. Defaults to `%{x: x, y: y}`
## Return
A downsampled `Stream`. If the starting `samples_stream` has a limited length L, than the length of the returned stream length is `N <= 2 + ceil(L / avg_bucket_size)`.
"""
def lttb(samples_stream, avg_bucket_size, opts \\ [])
def lttb(samples_stream, avg_bucket_size, _opts) when avg_bucket_size == 1.0 do
samples_stream
end
def lttb(samples_stream, avg_bucket_size, opts) when avg_bucket_size > 1 do
make_buckets(samples_stream, avg_bucket_size, opts)
|> select_samples(opts)
end
defp make_buckets(samples_stream, avg_bucket_size, opts) when is_integer(avg_bucket_size) do
make_buckets(samples_stream, avg_bucket_size / 1, opts)
end
defp make_buckets(samples_stream, avg_bucket_size, opts) do
# chunk_fun and after_fun use these
# Accumulator: {curr_idx, avg_acc, next_bucket_acc, ready_bucket}
# Emitted data: a Stream of [{[sample | samples] = bucket, next_bucket_avg_sample}]
# This way in the next step we can choose the candidate sample with a single Stream element
# The extra list wrapping of the elements is needed to be able to emit multiple chunks in after_fun, flat_mapping afterwards
chunk_fun = fn
first_sample, {0, 0, [], []} ->
{:cont, {1, avg_bucket_size, [], [first_sample]}}
sample, {current_index, avg_acc, bucket_acc, ready_bucket} ->
next_index = current_index + 1
if current_index > avg_acc do
new_ready_bucket = [sample | bucket_acc]
new_ready_bucket_avg_sample = SampleUtils.average_sample(new_ready_bucket, opts)
{:cont, [{ready_bucket, new_ready_bucket_avg_sample}],
{next_index, avg_acc + avg_bucket_size, [], new_ready_bucket}}
else
{:cont, {next_index, avg_acc, [sample | bucket_acc], ready_bucket}}
end
end
after_fun = fn
{_current_index, _avg_acc, [], [last_sample | []]} ->
{:cont, [{[last_sample], nil}], []}
{_current_index, _avg_acc, [], [last_sample | ready_bucket_tail]} ->
{:cont, [{ready_bucket_tail, last_sample}, {[last_sample], nil}], []}
{_current_index, _avg_acc, [last_sample | []], ready_bucket} ->
{:cont, [{ready_bucket, last_sample}, {[last_sample], nil}], []}
{_current_index, _avg_acc, [last_sample | last_bucket], ready_bucket} ->
last_bucket_avg_sample = SampleUtils.average_sample(last_bucket, opts)
{:cont,
[
{ready_bucket, last_bucket_avg_sample},
{last_bucket, last_sample},
{[last_sample], nil}
], []}
end
Stream.chunk_while(samples_stream, {0, 0, [], []}, chunk_fun, after_fun)
|> Stream.flat_map(fn x -> x end)
end
defp select_samples(samples_stream, opts) do
Stream.transform(samples_stream, nil, fn
{[first_sample | []], _next_samples_avg_sample}, nil ->
{[first_sample], first_sample}
{[last_sample | []], nil}, _prev_sample ->
{[last_sample], last_sample}
{[initial_candidate | candidates_tail], next_samples_avg}, prev_sample ->
initial_area =
SampleUtils.triangle_area(prev_sample, initial_candidate, next_samples_avg, opts)
{selected_sample, _selected_area} =
Enum.reduce(candidates_tail, {initial_candidate, initial_area}, fn candidate_sample,
{best_sample,
best_area} ->
candidate_area =
SampleUtils.triangle_area(prev_sample, candidate_sample, next_samples_avg, opts)
if candidate_area > best_area do
{candidate_sample, candidate_area}
else
{best_sample, best_area}
end
end)
{[selected_sample], selected_sample}
end)
end
end
|
lib/ex_lttb/stream.ex
| 0.872944
| 0.776623
|
stream.ex
|
starcoder
|
defmodule StateMachine do
@moduledoc """
StateMachine package implements state machine abstraction.
It supports Ecto out of the box and can work as both
data structure and a process powered by gen_statem.
Check out the [article](https://dev.to/youroff/state-machines-for-business-np8) for motivation.
Here's an example of a simple state machine created with this package:
defmodule Cat do
use StateMachine
defstruct [:name, :state, hungry: true]
defmachine field: :state do
state :asleep
state :awake
state :playing
state :eating, after_enter: &Cat.feed_up/1
event :wake do
transition from: :asleep, to: :awake
end
event :give_a_mouse do
transition from: :awake, to: :playing, unless: &Cat.hungry/1
transition from: :awake, to: :eating, if: &Cat.hungry/1
transition from: :playing, to: :eating
end
event :pet do
transition from: [:eating, :awake], to: :playing
end
event :sing_a_lullaby do
transition from: :awake, to: :asleep
transition from: :playing, to: :asleep
end
end
def hungry(cat) do
cat.hungry
end
def feed_up(cat) do
{:ok, %{cat | hungry: false}}
end
end
And later use it like this:
cat = %Cat{name: "Thomas", state: :asleep}
{:ok, %Cat{state: :awake}} = Cat.trigger(cat, :wake)
## Features
* Validation of state machine definition at compile time
* Full support for callbacks (on states, events and transitions) and guards (on events and transitions)
* Optional payload can be supplied with the event
* One-line conversion to a state machine as a process (powered by gen_statem)
* With Ecto support activated every transition is wrapped in transaction
* With Ecto support activated the Ecto.Type implementation is generated automatically
"""
alias StateMachine.{State, Event, Context}
@type t(m) :: %__MODULE__{
states: %{optional(atom) => State.t(m)},
events: %{optional(atom) => Event.t(m)},
field: atom(),
state_getter: (Context.t(m) -> atom()),
state_setter: (Context.t(m), atom() -> Context.t(m)),
misc: keyword()
}
defstruct states: %{},
events: %{},
field: :state,
state_getter: &State.get/1,
state_setter: &State.set/2,
misc: []
defmacro __using__(_) do
quote do
import StateMachine.DSL
alias StateMachine.Introspection
@after_compile StateMachine
end
end
def __after_compile__(env, _) do
unless function_exported?(env.module, :__state_machine__, 0) do
raise CompileError, file: env.file, description: "Define state machine using `defmachine` macro"
end
end
end
|
lib/state_machine.ex
| 0.792705
| 0.69022
|
state_machine.ex
|
starcoder
|
defmodule Junex do
@moduledoc """
Junex is a library for help you to interact to the Juno API in a easier way!
## WARNINGS
1. Although you can build the maps mannualy, like `charge_info` and `payment_billing_info`,
Junex provide a bunch of helper functions to build the exactly structure that the Juno API requests, so
consider using them!
2. All main function receive as last param an atom that could be `:prod` or `:sandbox`
3. The `create_client` and `get_access_token` functions can also been called with config from `config.exs`
## Config
You can provide config information for Junex in three ways:
1. On `config.exs` config file
2. Calling from code, with `Junex.configure/1 or /2`
3. Providing manually all configs
The available configs are:
1. `client_id`
2. `client_secret`
3. `resource_token`
4. `mode`
Example config on `config.exs`:
config :junex, :tokens,
client_id: System.get_env("CLIENT_ID"),
client_secret: System.get_env("CLIENT_SECRET"),
resource_token: System.get_env("RESOURCE_TOKEN"),
mode: :prod
## Example of use
As an example, see how you could create a charge and a payment:
First, you need an `access_token`, to get one, you need to have a `client_id` and `client_secret` pair.
You can generate one for production or sandbox on the Juno's Integration screen.
After that:
defmodule MyApp.Payment do
def jwt_token(client_id, client_secret) do
token_params = [
client_id: System.get_env("CLIENT_ID"),
client_secret: System.get_env("CLIENT_SECRET"),
mode: :sandbox
]
case Junex.get_access_token(token_params) do
{:ok, token} ->
token
{:error, error} ->
{:error, error}
end
end
end
Now you have an `access_token` you can make another requests! Let create a charge now:
For this, you need first to create a client, providing the `access_token` and also the `resource_token`, that
is the `Private Token` that you also can generate on the Integration screen.
defmodule MyApp.Payment do
def charges do
with {:ok, client} <- Junex.create_client/2 or /1,
{:ok, charge_info} <- Junex.get_charge_info(params),
{:ok, charge_billing_info} <- Junex.get_charge_billing_info(params),
{:ok, charges} <-
Junex.create_charges(client, charge_info: charge_info, charge_billing_info: charge_billing_info) do
charges
else
{:error, error} ->
{:error, error}
end
end
end
Ok, charges created and returned as a list, so, if the `payment_type` was `:credit_card`, you can
generate the payment in sequence
defmodule MyApp.Payment do
def payment do
with {:ok, card_info} <- Junex.get_card_info(params),
{:ok, payment_billing_info} <- Junex.get_payment_billing_info(params),
{:ok, params} <-
Junex.get_payment_info(card_info: card_info, payment_billing_info: payment_billing_info),
payment_results <- charges |> Task.async_stream(&do_payment(&1, params)) do
payment_results
else
error ->
error
end
end
def do_payment(charge) do
case Junex.create_payment(client, payment_info: payment_info, mode: :sandbox) do
{:ok, payment} ->
payment
{:error, error} ->
{:error, error}
end
end
end
"""
# ----------- Junex Settings -----------
@doc """
Provides configuration settings for accessing Juno server.
The specified configuration applies globally. Use `Junex.configure/2`
for setting different configurations on each processes.
## Example
Junex.configure(
client_id: System.get_env("CLIENT_ID"),
client_secret: System.get_env("CLIENT_SECRET"),
mode: System.get_env("JUNO_MODE")
)
"""
defdelegate configure(tokens), to: Junex.Config, as: :set
@doc """
Provides configuration settings for accessing Juno server.
## Options
The `scope` can have one of the following values.
* `:global` - configuration is shared for all processes.
* `:process` - configuration is isolated for each process.
## Example
Junex.configure(
:global,
client_id: System.get_env("CLIENT_ID"),
client_secret: System.get_env("CLIENT_SECRET"),
mode: System.get_env("JUNO_MODE")
)
"""
defdelegate configure(scope, tokens), to: Junex.Config, as: :set
@doc """
Returns current Junex configuration settings for accessing Juno server.
"""
defdelegate configure, to: Junex.Config, as: :get
# ----------- Junex Client -----------
@doc """
Returns a new client to perform other requests!
## Params
- access_token: Got from Junex.Auth.get_access_token
- resource_token: You can generate one on your Juno's account, is the "Private Token"
## Examples
Junex.Client.create(
System.get_env("ACCESS_TOKEN"),
System.get_env("RESOURCE_TOKEN")
)
"""
defdelegate create_client(access_token, resource_token), to: Junex.Client, as: :create
@doc """
Same as `Junex.create_client/2` however uses config from `config.exs`
## Params
- access_token: Got from Junex.get_access_token/1 or /0
## Examples
Junex.create_client(access_token)
"""
defdelegate create_client(access_token), to: Junex.Client, as: :create
# ----------- Junex Charges -----------
@doc """
Returns a charge_info map to be used on Junex.create_charges/2
## Example
Junex.get_charge_info(
description: "description",
amount: 123,
installments: 2,
payment_type: :boleto
)
"""
defdelegate get_charge_info(values), to: Junex.API.Charge, as: :get_charge_info
@doc """
Return a new charge_billing_info map to be used on Junex.create_charges/2
## Example
Junex.get_charge_billing_info(
name: "name",
document: "document",
email: "email",
phone: "phone"
)
"""
defdelegate get_charge_billing_info(values), to: Junex.API.Charge, as: :get_charge_billing_info
@doc """
Creates and return a new charge
## Parameters
- client: Got from Junex.create_client/1
- charge_info: Build mannualy or generated with Junex.get_charge_info/1
- billing: Build mannualy or generated with Junex.get_charge_billing_info/1
- mode: :prod | :sandbox
## Example
Junex.create_charges(
Junex.create_client(params),
Map.merge(Junex.get_charge_info(), Junex.get_charge_billing_info())
)
"""
defdelegate create_charges(client, values), to: Junex.API.Charge, as: :create_charges
@doc """
Returns the latest charge status
## Parameters
- client: Got from Junex.create_client/1
- charge_id: One of results do Junex.create_charges/2
- mode: :prod | :sandbox
## Example
Junex.check_charge_status(
Junex.create_client(params),
client_id: "client_id",
mode: :sandbox
)
"""
defdelegate check_charge_status(client, values), to: Junex.API.Charge, as: :check_charge_status
# ----------- Junex Account -----------
@doc """
List all possible banks for Juno transfers
## Parameters
- client: from Junex.create_client/1
- mode: :prod | :sandbox
## Examples
Junex.list_banks(Junex.create_client(), :sandbox)
"""
defdelegate list_banks(client, values), to: Junex.API.Account, as: :list_banks
@doc """
Return you current balance!
## Parameters
- client: Get from Junex.create_client/1
- mode: :prod | :sandbox
## Examples
Junex.get_balance(Junex.create_client(), :sandbox)
"""
defdelegate get_balance(client, values), to: Junex.API.Account, as: :get_balance
# ----------- Junex Payment -----------
@doc """
Returns a payment_billing_info map to use on Junex.get_payment_info/1
## Examples
Junex.get_payment_billing_info(
email: "email",
street: "street",
st_number: 12,
city: "city",
state: "state",
complement: "complement",
post_code: "post_code"
)
"""
defdelegate get_payment_billing_info(values),
to: Junex.API.Payment,
as: :get_payment_billing_info
@doc """
Returns a payment_info map to be used on Junex.create_payment/2
## Parameters
- charge_id: Result of one entries of Junex.create_charges/2
- card_info: Build mannualy or got from Junex.get_card_info/1
- payment_billing_info: Build mannually or got from Junex.get_payment_billing_info/1
## Example
Junex.get_payment_info(
charge_id: "charge_id",
card_info: Junex.get_card_info(params),
payment_billing_info: Junex.get_payment_billing_info(params)
)
"""
defdelegate get_payment_info(values), to: Junex.API.Payment, as: :get_payment_info
@doc """
Creates and returns a new Payment
## Parameters
- client: Got from Junex.create_client/1
- payment_info: Build mannualy or got from Junex.get_payment_info/1
- mode: :prod | :sandbox
## Example
Junex.create_payment(
Junex.create_client(params),
payment_info: Junex.get_payment_info(params),
mode: :sandbox
)
"""
defdelegate create_payment(client, values), to: Junex.API.Payment, as: :create_payment
@doc """
Return a card_info map to use on Junex.get_payment_info/1
"""
defdelegate get_card_info(values), to: Junex.API.Payment, as: :get_card_info
# ----------- Junex Auth -----------
@doc """
Return a access_token to be used on other Junex requests
You can get the client_id and client_secret on the Integration section
on your Juno account and generate the pair!
## Parameters
- client_id: string
- client_secret: string
- mode: :prod | :sandbox
## Examples
Junex.Auth.get_access_token(client_id: "client_id", client_secret: "client_secret", mode: :mode)
"""
defdelegate get_access_token(values), to: Junex.Auth, as: :get_access_token
@doc """
Same as Junex.get_access_token/1, however, uses config from `config.exs`
"""
defdelegate get_access_token, to: Junex.Auth, as: :get_access_token
end
|
lib/junex.ex
| 0.860925
| 0.477189
|
junex.ex
|
starcoder
|
defmodule StarkInfra.IssuingWithdrawal do
alias __MODULE__, as: IssuingWithdrawal
alias StarkInfra.Error
alias StarkInfra.Utils.Rest
alias StarkInfra.Utils.Check
alias StarkInfra.User.Project
alias StarkInfra.User.Organization
@moduledoc """
# IssuingWithdrawal struct
"""
@doc """
The IssuingWithdrawal structs created in your Workspace return cash from your Issuing balance to your Banking balance.
## Parameters (required):
- `:amount` [integer]: IssuingWithdrawal value in cents. Minimum = 0 (any value will be accepted). ex: 1234 (= R$ 12.34)
- `:external_id` [string] IssuingWithdrawal external ID. ex: "12345"
- `:description` [string]: IssuingWithdrawal description. ex: "sending money back"
## Parameters (optional):
- `:tags` [list of strings, default []]: list of strings for tagging. ex: ["tony", "stark"]
## Attributes (return-only):
- `:id` [string]: unique id returned when IssuingWithdrawal is created. ex: "5656565656565656"
- `:transaction_id` [string]: Stark Infra ledger transaction ids linked to this IssuingWithdrawal
- `:issuing_transaction_id` [string]: issuing ledger transaction ids linked to this IssuingWithdrawal
- `:updated` [DateTime]: latest update DateTime for the IssuingWithdrawal. ex: ~U[2020-3-10 10:30:0:0]
- `:created` [DateTime]: creation datetime for the IssuingWithdrawal. ex: ~U[2020-03-10 10:30:0:0]
"""
@enforce_keys [
:amount,
:external_id,
:description
]
defstruct [
:amount,
:external_id,
:description,
:tags,
:id,
:transaction_id,
:issuing_transaction_id,
:updated,
:created
]
@type t() :: %__MODULE__{}
@doc """
Send a list of IssuingWithdrawal structs for creation in the Stark Infra API
## Parameters (required):
- `:withdrawal` [IssuingWithdrawal struct]: IssuingWithdrawal struct to be created in the API.
## Options:
- `:user` [Organization/Project, default nil]: Organization or Project struct returned from StarkInfra.project(). Only necessary if default project or organization has not been set in configs.
## Return:
- IssuingWithdrawal struct with updated attributes
"""
@spec create(
withdrawal: IssuingWithdrawal.t(),
user: (Organization.t() | Project.t()) | nil
) ::
{:ok, IssuingWithdrawal.t()} |
{:error, [Error.t()]}
def create(withdrawal, options \\ []) do
Rest.post_single(
resource(),
withdrawal,
options
)
end
@doc """
Same as create(), but it will unwrap the error tuple and raise in case of errors.
"""
@spec create!(
withdrawal: IssuingWithdrawal.t(),
user: (Organization.t() | Project.t()) | nil
) :: any
def create!(withdrawal, options \\ []) do
Rest.post_single!(
resource(),
withdrawal,
options
)
end
@doc """
Receive a single IssuingWithdrawal struct previously created in the Stark Infra API by its id
## Parameters (required):
- `:id` [string]: struct unique id. ex: "5656565656565656"
## Options:
- `:user` [Organization/Project, default nil]: Organization or Project struct returned from StarkInfra.project(). Only necessary if default project or organization has not been set in configs.
## Return:
- IssuingWithdrawal struct with updated attributes
"""
@spec get(
id: binary,
user: (Organization.t() | Project.t()) | nil
) ::
{:ok, [IssuingWithdrawal.t()]} |
{:error, [Error.t()]}
def get(id, options \\ []) do
Rest.get_id(
resource(),
id,
options
)
end
@doc """
Same as get(), but it will unwrap the error tuple and raise in case of errors.
"""
@spec get!(
id: binary,
user: (Organization.t() | Project.t()) | nil
) :: any
def get!(id, options \\ []) do
Rest.get_id!(
resource(),
id,
options
)
end
@doc """
Receive a stream of IssuingWithdrawal structs previously created in the Stark Infra API
## Options:
- `:limit` [integer, default nil]: maximum number of structs to be retrieved. Unlimited if nil. ex: 35
- `:external_ids` [list of strings, default []]: external IDs. ex: ["5656565656565656", "4545454545454545"]
- `:after` [Date or string, default nil]: date filter for structs created only after specified date. ex: ~D[2020-03-25]
- `:before` [Date or string, default nil]: date filter for structs created only before specified date. ex: ~D[2020-03-25]
- `:tags` [list of strings, default nil]: tags to filter retrieved structs. ex: ["tony", "stark"]
- `:user` [Organization/Project, default nil]: Organization or Project struct returned from StarkInfra.project(). Only necessary if default project or organization has not been set in configs.
## Return:
- stream of IssuingWithdrawals structs with updated attributes
"""
@spec query(
limit: integer,
external_ids: [binary] | [],
after: Date.t() | binary,
before: Date.t() | binary,
tags: [binary],
user: (Organization.t() | Project.t()) | nil
) ::
{:ok, [IssuingWithdrawal.t()]} |
{:error, [Error.t()]}
def query(options \\ []) do
Rest.get_list(
resource(),
options
)
end
@doc """
Same as query(), but it will unwrap the error tuple and raise in case of errors.
"""
@spec query!(
limit: integer,
external_ids: [binary] | [],
after: Date.t() | binary,
before: Date.t() | binary,
tags: [binary],
user: (Organization.t() | Project.t()) | nil
) :: any
def query!(options \\ []) do
Rest.get_list!(
resource(),
options
)
end
@doc """
Receive a list of IssuingWithdrawal structs previously created in the Stark Infra API and the cursor to the next page.
## Options:
- `:limit` [integer, default 100]: maximum number of structs to be retrieved. Unlimited if nil. ex: 35
- `:external_ids` [list of strings, default []]: external IDs. ex: ["5656565656565656", "4545454545454545"]
- `:after` [Date or string, default nil]: date filter for structs created only after specified date. ex: ~D[2020-03-25]
- `:before` [Date or string, default nil]: date filter for structs created only before specified date. ex: ~D[2020-03-25]
- `:tags` [list of strings, default nil]: tags to filter retrieved structs. ex: ["tony", "stark"]
- `:user` [Organization/Project, default nil]: Organization or Project struct returned from StarkInfra.project(). Only necessary if default project or organization has not been set in configs.
## Return:
- list of IssuingWithdrawal structs with updated attributes
- cursor to retrieve the next page of IssuingWithdrawal structs
"""
@spec page(
limit: integer,
external_ids: [binary] | [],
after: Date.t() | binary,
before: Date.t() | binary,
tags: [binary],
user: (Organization.t() | Project.t()) | nil
) ::
{:ok, [IssuingWithdrawal.t()], binary} |
{:error, [Error.t()]}
def page(options \\ []) do
Rest.get_page(
resource(),
options
)
end
@doc """
Same as page(), but it will unwrap the error tuple and raise in case of errors.
"""
@spec page!(
limit: integer,
external_ids: [binary] | [],
after: Date.t() | binary,
before: Date.t() | binary,
tags: [binary],
user: (Organization.t() | Project.t()) | nil
) :: any
def page!(options \\ []) do
Rest.get_page!(
resource(),
options
)
end
@doc false
def resource() do
{
"IssuingWithdrawal",
&resource_maker/1
}
end
@doc false
def resource_maker(json) do
%IssuingWithdrawal{
amount: json[:amount],
external_id: json[:external_id],
description: json[:description],
tags: json[:tags],
id: json[:id],
transaction_id: json[:transaction_id],
issuing_transaction_id: json[:issuing_transaction_id],
updated: json[:updated] |> Check.datetime(),
created: json[:created] |> Check.datetime()
}
end
end
|
lib/issuing_withdrawal/issuing_withdrawal.ex
| 0.863708
| 0.554139
|
issuing_withdrawal.ex
|
starcoder
|
defmodule Construct do
@moduledoc """
Construct internally divided into three components:
* `Construct` — defining structures;
* `Construct.Cast` — making structure instances;
* `Construct.Type` — type-coercion and custom type behaviour.
## Construct definition
defmodule StructureName do
use Construct, struct_opts
structure do
include AnotherStructure
field name, type, options
end
end
`struct_opts` is options passed to `c:make/2` and `c:make!/2`, described in `Construct.Cast.make/3`.
When you type `use Construct` — library bootstrapped few functions with `Construct` behaviour:
* `c:make/2` — just an alias to `Construct.Cast.make/3`;
* `c:make!/2` — alias to `c:make/2` but throws `Construct.MakeError` exception if provided params are invalid;
* `c:cast/2` — alias to `c:make/2` too, for follow `Construct.Type` behaviour and use defined structure as type.
"""
@type t :: struct
@type_checker_name Construct.TypeRegistry
@no_default :__construct_no_default__
# elixir 1.9.0 do not raise deadlocks for Code.ensure_compiled/1
@no_raise_on_deadlocks Version.compare(System.version(), "1.9.0") != :lt
@doc false
defmacro __using__(opts \\ [])
defmacro __using__({:%{}, _, _} = types) do
quote do
use Construct do
unquote(__ast_from_types__(types))
end
end
end
defmacro __using__(opts) when is_list(opts) do
{definition, opts} = Keyword.pop(opts, :do)
pre_ast =
if definition do
defstructure(definition)
else
quote do
import Construct, only: [structure: 1]
end
end
quote do
@behaviour Construct
@construct_opts unquote(opts)
unquote(pre_ast)
def make(params \\ %{}, opts \\ []) do
Construct.Cast.make(__MODULE__, params, Keyword.merge(opts, unquote(opts)))
end
def make!(params \\ %{}, opts \\ []) do
case make(params, opts) do
{:ok, structure} -> structure
{:error, reason} -> raise Construct.MakeError, %{reason: reason, params: params}
end
end
def cast(params, opts \\ []) do
make(params, opts)
end
defoverridable make: 2
end
end
@doc """
Defines a structure.
"""
defmacro structure([do: block]) do
defstructure(block)
end
defp defstructure(block) do
quote do
import Construct
Construct.__ensure_type_checker_started__()
Construct.__register_as_complex_type__(__MODULE__)
Module.register_attribute(__MODULE__, :fields, accumulate: true)
Module.register_attribute(__MODULE__, :construct_fields, accumulate: true)
Module.register_attribute(__MODULE__, :construct_fields_enforce, accumulate: true)
unquote(block)
Module.eval_quoted __ENV__, {:__block__, [], [
Construct.__defstruct__(@construct_fields, @construct_fields_enforce),
Construct.__types__(@fields),
Construct.__typespecs__(@fields)]}
end
end
@doc """
Includes provided structure and checks definition for validity at compile-time.
## Options
* `:only` - (integer) specify fields that should be taken from included module,
throws an error when field doesn't exist in provided module.
If included structure is invalid for some reason — this macro throws an
`Construct.DefinitionError` exception with detailed reason.
"""
@spec include(t, keyword) :: Macro.t()
defmacro include(struct, opts \\ []) do
quote do
module = unquote(struct)
opts = unquote(opts)
only = Keyword.get(opts, :only)
unless Construct.__is_construct_module__(module) do
raise Construct.DefinitionError, "provided #{inspect(module)} is not Construct module"
end
types = module.__construct__(:types)
types =
if is_list(only) do
Enum.each(only, fn(field) ->
unless Map.has_key?(types, field) do
raise Construct.DefinitionError,
"field #{inspect(field)} in :only option " <>
"doesn't exist in #{inspect(module)}"
end
end)
Map.take(types, only)
else
types
end
Enum.each(types, fn({name, {type, opts}}) ->
Construct.__field__(__MODULE__, name, type, opts)
end)
end
end
@doc """
Defines field on the structure with given name, type and options.
Checks definition validity at compile time by name, type and options.
For custom types checks for module existence and `c:Construct.Type.cast/1` callback.
If field definition is invalid for some reason — it throws an `Construct.DefinitionError`
exception with detailed reason.
## Options
* `:default` — sets default value for that field:
* The default value is calculated at compilation time, so don't use expressions like
DateTime.utc_now or Ecto.UUID.generate as they would then be the same for all structures;
* Value from params is compared with default value before and after type cast;
* If you pass `field :a, type, default: nil` and `make(%{a: nil})` — type coercion will
not be used, `nil` compares with default value and just appends that value to structure;
* If field doesn't exist in params, it will use default value.
By default this option is unset. Notice that you can't use functions as a default value.
"""
@spec field(atom, Construct.Type.t, Keyword.t) :: Macro.t()
defmacro field(name, type \\ :string, opts \\ [])
defmacro field(name, opts, [do: _] = contents) do
make_nested_field(name, contents, opts)
end
defmacro field(name, [do: _] = contents, _opts) do
make_nested_field(name, contents, [])
end
defmacro field(name, type, opts) do
quote do
Construct.__field__(__MODULE__, unquote(name), unquote(type), unquote(opts))
end
end
@doc """
Alias to `Construct.Cast.make/3`.
"""
@callback make(params :: map, opts :: Keyword.t) :: {:ok, t} | {:error, term}
@doc """
Alias to `c:make/2`, but raises an `Construct.MakeError` exception if params have errors.
"""
@callback make!(params :: map, opts :: Keyword.t) :: t
@doc """
Alias to `c:make/2`, used to follow `c:Construct.Type.cast/1` callback.
To use this structure as custom type.
"""
@callback cast(params :: map, opts :: Keyword.t) :: {:ok, t} | {:error, term}
@doc """
Collects types from defined Construct module to map
"""
def types_of!(module) do
if construct_definition?(module) do
deep_collect_construct_types(module)
else
raise ArgumentError, "not a Construct definition"
end
end
defp deep_collect_construct_types(module) do
Enum.into(module.__construct__(:types), %{}, fn({name, {type, opts}}) ->
# check if type is not circular also
if module != type && is_atom(type) && construct_definition?(type) do
{name, {deep_collect_construct_types(type), opts}}
else
{name, {type, opts}}
end
end)
end
@doc """
Checks if provided module is Construct definition
"""
def construct_definition?(module) do
ensure_compiled?(module) && function_exported?(module, :__construct__, 1)
end
@doc false
def __ast_from_types__({:%{}, _, types}) do
Enum.reduce(Enum.reverse(types), [], fn
({name, {{:%{}, _, _} = types, opts}}, acc) ->
[{:field, [], [name, opts, [do: {:__block__, [], __ast_from_types__(types)}]]} | acc]
({name, {:%{}, _, _} = types}, acc) ->
[{:field, [], [name, [], [do: {:__block__, [], __ast_from_types__(types)}]]} | acc]
({name, {type, opts}}, acc) ->
[{:field, [], [name, type, opts]} | acc]
({name, type}, acc) ->
[{:field, [], [name, type, []]} | acc]
end)
end
@doc false
def __defstruct__(construct_fields, construct_fields_enforce) do
{fields, enforce_fields} =
Enum.reduce(construct_fields, {[], construct_fields_enforce}, fn
({key, value}, {fields, enforce}) when is_function(value) ->
{[{key, nil} | fields], [key | enforce]}
(field, {fields, enforce}) ->
{[field | fields], enforce}
end)
fields =
fields
|> Enum.reverse()
|> Enum.uniq_by(fn({k, _}) -> k end)
|> Enum.reverse()
quote do
enforce_keys = Keyword.get(@construct_opts, :enforce_keys, true)
if enforce_keys do
@enforce_keys unquote(enforce_fields)
end
defstruct unquote(Macro.escape(fields))
end
end
@doc false
def __types__(fields) do
fields = Enum.uniq_by(fields, fn({k, _v, _opts}) -> k end)
types =
fields
|> Enum.into(%{}, fn({name, type, opts}) -> {name, {type, opts}} end)
|> Macro.escape
quote do
def __construct__(:types), do: unquote(types)
end
end
@doc false
def __typespecs__(fields) do
typespecs =
Enum.map(fields, fn({name, type, opts}) ->
type = Construct.Type.spec(type)
type =
case Keyword.fetch(opts, :default) do
{:ok, default} ->
typeof_default = Construct.Type.typeof(default)
if type == typeof_default do
type
else
quote do: unquote(type) | unquote(typeof_default)
end
:error ->
type
end
{name, type}
end)
modulespec =
{:%, [],
[
{:__MODULE__, [], Elixir},
{:%{}, [], typespecs}
]}
quote do
@type t :: unquote(modulespec)
end
end
@doc false
def __field__(mod, name, type, opts) do
check_field_name!(name)
check_type!(type)
case default_for_struct(type, opts) do
@no_default ->
Module.put_attribute(mod, :fields, {name, type, opts})
Module.put_attribute(mod, :construct_fields, {name, nil})
Module.put_attribute(mod, :construct_fields_enforce, name)
default ->
Module.put_attribute(mod, :fields, {name, type, Keyword.put(opts, :default, default)})
Module.put_attribute(mod, :construct_fields, {name, default})
pop_attribute(mod, :construct_fields_enforce, name)
end
end
@doc false
def __ensure_type_checker_started__ do
case Agent.start(fn -> MapSet.new end, name: @type_checker_name) do
{:ok, _pid} -> :ok
{:error, {:already_started, _pid}} -> :ok
_ -> raise Construct.DefinitionError, "unexpected compilation error"
end
end
@doc false
def __register_as_complex_type__(module) do
Agent.update(@type_checker_name, &MapSet.put(&1, module))
end
@doc false
def __is_construct_module__(module) do
construct_module?(module)
end
defp make_nested_field(name, contents, opts) do
check_field_name!(name)
nested_module_name = String.to_atom(Macro.camelize(Atom.to_string(name)))
quote do
opts = unquote(opts)
current_module_name_ast =
__MODULE__
|> Atom.to_string()
|> String.split(".")
|> Enum.map(&String.to_atom/1)
derives = Keyword.get(opts, :derive, Module.get_attribute(__MODULE__, :derive))
current_module_ast =
{:__aliases__, [alias: false], current_module_name_ast ++ [unquote(nested_module_name)]}
|> Macro.expand(__ENV__)
defmodule current_module_ast do
@derive derives
use Construct do
unquote(contents)
end
end
Construct.__field__(__MODULE__, unquote(name), current_module_ast, opts)
end
end
defp pop_attribute(mod, key, value) do
old = Module.get_attribute(mod, key)
Module.delete_attribute(mod, key)
Enum.each(old -- [value], &Module.put_attribute(mod, key, &1))
end
defp check_type!({:array, type}) do
check_type!(type)
end
defp check_type!({:map, type}) do
check_type!(type)
end
defp check_type!({typec, _arg}) do
check_typec_complex!(typec)
end
defp check_type!(type_list) when is_list(type_list) do
Enum.each(type_list, &check_type!/1)
end
defp check_type!(type) do
unless Construct.Type.primitive?(type), do: check_type_complex!(type)
end
defp check_type_complex!(module) do
check_type_complex!(module, {:cast, 1})
end
defp check_typec_complex!(module) do
check_type_complex!(module, {:castc, 2})
end
defp check_type_complex!(module, {f, a}) do
unless construct_module?(module) do
unless ensure_compiled?(module) do
raise Construct.DefinitionError, "undefined module #{inspect(module)}"
end
unless function_exported?(module, f, a) do
raise Construct.DefinitionError, "undefined function #{f}/#{a} for #{inspect(module)}"
end
end
end
defp check_field_name!(name) when is_atom(name) do
:ok
end
defp check_field_name!(name) do
raise Construct.DefinitionError, "expected atom for field name, got `#{inspect(name)}`"
end
defp default_for_struct(maybe_module, opts) when is_atom(maybe_module) do
case check_default!(Keyword.get(opts, :default, @no_default)) do
@no_default -> try_to_make_struct_instance(maybe_module)
val -> val
end
end
defp default_for_struct(_, opts) do
check_default!(Keyword.get(opts, :default, @no_default))
end
defp check_default!(default) when is_function(default, 0) do
default
end
defp check_default!(default) when is_function(default) do
raise Construct.DefinitionError, "functions in default values should be zero-arity"
end
defp check_default!(default) do
default
end
defp try_to_make_struct_instance(module) do
if construct_module?(module) do
make_struct(module)
else
@no_default
end
end
defp make_struct(module) do
struct!(module)
rescue
[ArgumentError, UndefinedFunctionError] -> @no_default
end
defp construct_module?(module) do
if @no_raise_on_deadlocks, do: Code.ensure_compiled(module)
Agent.get(@type_checker_name, &MapSet.member?(&1, module)) ||
ensure_compiled?(module) && function_exported?(module, :__construct__, 1)
end
defp ensure_compiled?(module) do
case Code.ensure_compiled(module) do
{:module, _} -> true
{:error, _} -> false
end
end
end
|
lib/construct.ex
| 0.882801
| 0.610047
|
construct.ex
|
starcoder
|
defmodule ConsumerSupervisor do
@moduledoc ~S"""
A supervisor that starts children as events flow in.
A `ConsumerSupervisor` can be used as the consumer in a `GenStage` pipeline.
A new child process will be started per event, where the event is appended
to the arguments in the child specification.
A `ConsumerSupervisor` can be attached to a producer by returning
`:subscribe_to` from `init/1` or explicitly with `GenStage.sync_subscribe/3`
and `GenStage.async_subscribe/2`.
Once subscribed, the supervisor will ask the producer for `:max_demand` events
and start child processes as events arrive. As child processes terminate, the
supervisor will accumulate demand and request more events once `:min_demand`
is reached. This allows the `ConsumerSupervisor` to work similar to a pool,
except a child process is started per event. The minimum amount of concurrent
children per producer is specified by `:min_demand` and the maximum is given
by `:max_demand`.
## Example
Let's define a GenStage consumer as a `ConsumerSupervisor` that subscribes
to a producer named `Producer` and starts a new process for each event
received from the producer. Each new process will be started by calling
`Printer.start_link/1`, which simply starts a task that will print the
incoming event to the terminal.
defmodule Consumer do
use ConsumerSupervisor
def start_link(arg) do
ConsumerSupervisor.start_link(__MODULE__, arg)
end
def init(_arg) do
children = [%{id: Printer, start: {Printer, :start_link, []}}]
opts = [strategy: :one_for_one, subscribe_to: [{Producer, max_demand: 50}]]
ConsumerSupervisor.init(children, opts)
end
end
Then in the `Printer` module:
defmodule Printer do
def start_link(event) do
Task.start_link(fn ->
IO.inspect({self(), event})
end)
end
end
Similar to `Supervisor`, `ConsumerSupervisor` also provides `start_link/3`,
which allows developers to start a supervisor with the help of a callback
module.
## Name Registration
A supervisor is bound to the same name registration rules as a `GenServer`.
Read more about it in the `GenServer` docs.
"""
@behaviour GenStage
@typedoc "Options used by the `start*` functions"
@type option ::
{:registry, atom}
| {:name, Supervisor.name()}
| {:strategy, Supervisor.Spec.strategy()}
| {:max_restarts, non_neg_integer}
| {:max_seconds, non_neg_integer}
| {:subscribe_to, [GenStage.stage() | {GenStage.stage(), keyword()}]}
@doc """
Callback invoked to start the supervisor and during hot code upgrades.
## Options
* `:strategy` - the restart strategy option. Only `:one_for_one`
is supported by consumer supervisors.
* `:max_restarts` - the maximum amount of restarts allowed in
a time frame. Defaults to 3 times.
* `:max_seconds` - the time frame in which `:max_restarts` applies
in seconds. Defaults to 5 seconds.
* `:subscribe_to` - a list of producers to subscribe to. Each element
represents the producer or a tuple with the producer and the subscription
options, for example, `[Producer]` or `[{Producer, max_demand: 20, min_demand: 10}]`.
"""
@callback init(args :: term) ::
{:ok, [:supervisor.child_spec()], options :: keyword()}
| :ignore
defstruct [
:name,
:mod,
:args,
:template,
:max_restarts,
:max_seconds,
:strategy,
children: %{},
producers: %{},
restarts: [],
restarting: 0
]
@doc false
defmacro __using__(opts) do
quote location: :keep, bind_quoted: [opts: opts] do
@behaviour ConsumerSupervisor
import Supervisor.Spec
@doc false
def child_spec(arg) do
default = %{
id: __MODULE__,
start: {__MODULE__, :start_link, [arg]},
type: :supervisor
}
Supervisor.child_spec(default, unquote(Macro.escape(opts)))
end
defoverridable child_spec: 1
@doc false
def init(arg)
end
end
defmodule Default do
@moduledoc false
def init(args) do
args
end
end
@doc """
Starts a supervisor with the given children.
A strategy is required to be given as an option. Furthermore,
the `:max_restarts`, `:max_seconds`, and `:subscribe_to`
values can be configured as described in the documentation for the
`c:init/1` callback.
The options can also be used to register a supervisor name.
The supported values are described under the "Name Registration"
section in the `GenServer` module docs.
The child processes specified in `children` will be started by appending
the event to process to the existing function arguments in the child specification.
Note that the consumer supervisor is linked to the parent process
and will exit not only on crashes but also if the parent process
exits with `:normal` reason.
"""
@spec start_link([Supervisor.Spec.spec()], [option]) :: Supervisor.on_start()
def start_link(children, options) when is_list(children) do
{sup_options, start_options} =
Keyword.split(options, [:strategy, :max_restarts, :max_seconds, :subscribe_to])
start_link(Default, init(children, sup_options), start_options)
end
@doc """
Starts a consumer supervisor module with the given `args`.
To start the supervisor, the `c:init/1` callback will be invoked in the given
module, with `args` passed to it. The `c:init/1` callback must return a
supervision specification which can be created with the help of the
`Supervisor` module.
If the `c:init/1` callback returns `:ignore`, this function returns
`:ignore` as well and the supervisor terminates with reason `:normal`.
If it fails or returns an incorrect value, this function returns
`{:error, term}` where `term` is a term with information about the
error, and the supervisor terminates with reason `term`.
The `:name` option can also be given in order to register a supervisor
name. The supported values are described under the "Name Registration"
section in the `GenServer` module docs.
"""
@spec start_link(module, any) :: Supervisor.on_start()
@spec start_link(module, any, [option]) :: Supervisor.on_start()
def start_link(mod, args, opts \\ []) do
GenStage.start_link(__MODULE__, {mod, args, opts[:name]}, opts)
end
@doc """
Starts a child in the consumer supervisor.
The child process will be started by appending the given list of
`args` to the existing function arguments in the child specification.
This child is started separately from any producer and does not
count towards the demand of any of them.
If the child process starts, function returns `{:ok, child}` or
`{:ok, child, info}`, the pid is added to the supervisor, and the
function returns the same value.
If the child process start function returns `:ignore`, an error tuple,
or an erroneous value, or if it fails, the child is discarded and
`:ignore` or `{:error, error}` where `error` is a term containing
information about the error is returned.
"""
@spec start_child(Supervisor.supervisor(), [term]) :: Supervisor.on_start_child()
def start_child(supervisor, args) when is_list(args) do
call(supervisor, {:start_child, args})
end
@doc """
Terminates the given child pid.
If successful, the function returns `:ok`. If there is no
such pid, the function returns `{:error, :not_found}`.
"""
@spec terminate_child(Supervisor.supervisor(), pid) :: :ok | {:error, :not_found}
def terminate_child(supervisor, pid) when is_pid(pid) do
call(supervisor, {:terminate_child, pid})
end
@doc """
Returns a list with information about all children.
Note that calling this function when supervising a large number
of children under low memory conditions can cause an out of memory
exception.
This function returns a list of tuples containing:
* `id` - as defined in the child specification but is always
set to `:undefined` for consumer supervisors
* `child` - the pid of the corresponding child process or the
atom `:restarting` if the process is about to be restarted
* `type` - `:worker` or `:supervisor` as defined in the child
specification
* `modules` - as defined in the child specification
"""
@spec which_children(Supervisor.supervisor()) :: [
{:undefined, pid | :restarting, Supervisor.Spec.worker(), Supervisor.Spec.modules()}
]
def which_children(supervisor) do
call(supervisor, :which_children)
end
@doc """
Returns a map containing count values for the supervisor.
The map contains the following keys:
* `:specs` - always `1` as consumer supervisors have a single specification
* `:active` - the count of all actively running child processes managed by
this supervisor
* `:supervisors` - the count of all supervisors whether or not the child
process is still alive
* `:workers` - the count of all workers, whether or not the child process
is still alive
"""
@spec count_children(Supervisor.supervisor()) :: %{
specs: non_neg_integer,
active: non_neg_integer,
supervisors: non_neg_integer,
workers: non_neg_integer
}
def count_children(supervisor) do
call(supervisor, :count_children)
end
@doc """
Receives a template to initialize and a set of options.
This is typically invoked at the end of the `c:init/1` callback of module-based supervisors.
This function returns a the child specification and the supervisor flags.
## Examples
Using the child specification changes introduced in Elixir 1.5:
defmodule MyConsumerSupervisor do
use ConsumerSupervisor
def start_link(arg) do
ConsumerSupervisor.start_link(__MODULE__, arg)
end
def init(_arg) do
ConsumerSupervisor.init([MyConsumer], strategy: :one_for_one, subscribe_to: MyProducer)
end
end
"""
def init([{_, _, _, _, _, _} = template], opts) do
{:ok, [template], opts}
end
def init([template], opts) when is_tuple(template) or is_map(template) or is_atom(template) do
{:ok, {_, [template]}} = Supervisor.init([template], opts)
{:ok, [template], opts}
end
@compile {:inline, call: 2}
defp call(supervisor, req) do
GenStage.call(supervisor, req, :infinity)
end
## Callbacks
@doc false
def init({mod, args, name}) do
Process.put(:"$initial_call", {:supervisor, mod, 1})
Process.flag(:trap_exit, true)
case mod.init(args) do
{:ok, children, opts} ->
case validate_specs(children) do
:ok ->
state = %ConsumerSupervisor{mod: mod, args: args, name: name || {self(), mod}}
case init(state, children, opts) do
{:ok, state, opts} -> {:consumer, state, opts}
{:error, message} -> {:stop, {:bad_opts, message}}
end
{:error, message} ->
{:stop, {:bad_specs, message}}
end
:ignore ->
:ignore
other ->
{:stop, {:bad_return_value, other}}
end
end
defp init(state, [child], opts) when is_list(opts) do
{strategy, opts} = Keyword.pop(opts, :strategy)
{max_restarts, opts} = Keyword.pop(opts, :max_restarts, 3)
{max_seconds, opts} = Keyword.pop(opts, :max_seconds, 5)
template = normalize_template(child)
with :ok <- validate_strategy(strategy),
:ok <- validate_restarts(max_restarts),
:ok <- validate_seconds(max_seconds),
:ok <- validate_template(template) do
state = %{
state
| template: template,
strategy: strategy,
max_restarts: max_restarts,
max_seconds: max_seconds
}
{:ok, state, opts}
end
end
defp init(_state, [_], _opts) do
{:error, "supervisor's init expects a keywords list as options"}
end
defp validate_specs([_] = children) do
:supervisor.check_childspecs(children)
end
defp validate_specs(_children) do
{:error, "consumer supervisor expects a list with a single item as a template"}
end
defp validate_strategy(strategy) when strategy in [:one_for_one], do: :ok
defp validate_strategy(nil), do: {:error, "supervisor expects a strategy to be given"}
defp validate_strategy(_), do: {:error, "unknown supervision strategy for consumer supervisor"}
defp validate_restarts(restart) when is_integer(restart), do: :ok
defp validate_restarts(_), do: {:error, "max_restarts must be an integer"}
defp validate_seconds(seconds) when is_integer(seconds), do: :ok
defp validate_seconds(_), do: {:error, "max_seconds must be an integer"}
@doc false
def handle_subscribe(:producer, opts, {_, ref} = from, state) do
# GenStage checks these options before allowing susbcription
max = Keyword.get(opts, :max_demand, 1000)
min = Keyword.get(opts, :min_demand, div(max, 2))
GenStage.ask(from, max)
{:manual, put_in(state.producers[ref], {from, 0, 0, min, max})}
end
@doc false
def handle_cancel(_, {_, ref}, state) do
{:noreply, [], update_in(state.producers, &Map.delete(&1, ref))}
end
@doc false
def handle_events(events, {pid, ref} = from, state) do
%{template: child, children: children} = state
{new, errors} = start_events(events, from, child, 0, [], state)
new_children = Enum.into(new, children)
started = map_size(new_children) - map_size(children)
{:noreply, [], maybe_ask(ref, pid, started + errors, errors, new_children, state)}
end
defp start_events([extra | extras], from, child, errors, acc, state) do
{_, ref} = from
{_, {m, f, args}, restart, _, _, _} = child
args = args ++ [extra]
case start_child(m, f, args) do
{:ok, pid, _} when restart == :temporary ->
acc = [{pid, [ref | :undefined]} | acc]
start_events(extras, from, child, errors, acc, state)
{:ok, pid, _} ->
acc = [{pid, [ref | args]} | acc]
start_events(extras, from, child, errors, acc, state)
{:ok, pid} when restart == :temporary ->
acc = [{pid, [ref | :undefined]} | acc]
start_events(extras, from, child, errors, acc, state)
{:ok, pid} ->
acc = [{pid, [ref | args]} | acc]
start_events(extras, from, child, errors, acc, state)
:ignore ->
start_events(extras, from, child, errors + 1, acc, state)
{:error, reason} ->
:error_logger.error_msg(
'ConsumerSupervisor failed to start child from: ~tp with reason: ~tp~n',
[from, reason]
)
report_error(:start_error, reason, :undefined, args, child, state)
start_events(extras, from, child, errors + 1, acc, state)
end
end
defp start_events([], _, _, errors, acc, _) do
{acc, errors}
end
defp maybe_ask(ref, pid, events, down, children, state) do
%{producers: producers} = state
case producers do
%{^ref => {to, count, pending, min, max}} ->
if count + events > max do
:error_logger.error_msg(
'ConsumerSupervisor has received ~tp events in excess from: ~tp~n',
[count + events - max, {pid, ref}]
)
end
pending =
case pending + down do
ask when ask >= min ->
GenStage.ask(to, ask)
0
ask ->
ask
end
count = count + events - down
producers = Map.put(producers, ref, {to, count, pending, min, max})
%{state | children: children, producers: producers}
%{} ->
%{state | children: children}
end
end
@doc false
def handle_call(:which_children, _from, state) do
%{children: children, template: child} = state
{_, _, _, _, type, mods} = child
reply =
for {pid, args} <- children do
maybe_pid =
case args do
{:restarting, _} -> :restarting
_ -> pid
end
{:undefined, maybe_pid, type, mods}
end
{:reply, reply, [], state}
end
def handle_call(:count_children, _from, state) do
%{children: children, template: child, restarting: restarting} = state
{_, _, _, _, type, _} = child
specs = map_size(children)
active = specs - restarting
reply =
case type do
:supervisor ->
%{specs: 1, active: active, workers: 0, supervisors: specs}
:worker ->
%{specs: 1, active: active, workers: specs, supervisors: 0}
end
{:reply, reply, [], state}
end
def handle_call({:terminate_child, pid}, _from, %{children: children} = state) do
case children do
%{^pid => [producer | _] = info} ->
:ok = terminate_children(%{pid => info}, state)
{:reply, :ok, [], delete_child_and_maybe_ask(producer, pid, state)}
%{^pid => {:restarting, [producer | _]} = info} ->
:ok = terminate_children(%{pid => info}, state)
{:reply, :ok, [], delete_child_and_maybe_ask(producer, pid, state)}
%{} ->
{:reply, {:error, :not_found}, [], state}
end
end
def handle_call({:start_child, extra}, _from, %{template: child} = state) do
handle_start_child(child, extra, state)
end
defp handle_start_child({_, {m, f, args}, restart, _, _, _}, extra, state) do
args = args ++ extra
case reply = start_child(m, f, args) do
{:ok, pid, _} ->
{:reply, reply, [], save_child(restart, :dynamic, pid, args, state)}
{:ok, pid} ->
{:reply, reply, [], save_child(restart, :dynamic, pid, args, state)}
_ ->
{:reply, reply, [], state}
end
end
defp start_child(m, f, a) do
try do
apply(m, f, a)
catch
kind, reason ->
{:error, exit_reason(kind, reason, System.stacktrace())}
else
{:ok, pid, extra} when is_pid(pid) -> {:ok, pid, extra}
{:ok, pid} when is_pid(pid) -> {:ok, pid}
:ignore -> :ignore
{:error, _} = error -> error
other -> {:error, other}
end
end
defp save_child(:temporary, producer, pid, _, state),
do: put_in(state.children[pid], [producer | :undefined])
defp save_child(_, producer, pid, args, state),
do: put_in(state.children[pid], [producer | args])
defp exit_reason(:exit, reason, _), do: reason
defp exit_reason(:error, reason, stack), do: {reason, stack}
defp exit_reason(:throw, value, stack), do: {{:nocatch, value}, stack}
@doc false
def handle_cast(_msg, state) do
{:noreply, [], state}
end
@doc false
def handle_info({:EXIT, pid, reason}, state) do
case maybe_restart_child(pid, reason, state) do
{:ok, state} -> {:noreply, [], state}
{:shutdown, state} -> {:stop, :shutdown, state}
end
end
def handle_info({:"$gen_restart", pid}, state) do
%{children: children, template: child, restarting: restarting} = state
state = %{state | restarting: restarting - 1}
case children do
%{^pid => restarting_args} ->
{:restarting, [producer | args]} = restarting_args
case restart_child(producer, pid, args, child, state) do
{:ok, state} ->
{:noreply, [], state}
{:shutdown, state} ->
{:stop, :shutdown, state}
end
# We may hit clause if we send $gen_restart and then
# someone calls terminate_child, removing the child.
%{} ->
{:noreply, [], state}
end
end
def handle_info(msg, state) do
:error_logger.error_msg('ConsumerSupervisor received unexpected message: ~tp~n', [msg])
{:noreply, [], state}
end
@doc false
def code_change(_, %{mod: mod, args: args} = state, _) do
case mod.init(args) do
{:ok, children, opts} ->
case validate_specs(children) do
:ok ->
case init(state, children, opts) do
{:ok, state, _} -> {:ok, state}
{:error, message} -> {:error, {:bad_opts, message}}
end
{:error, message} ->
{:error, {:bad_specs, message}}
end
:ignore ->
{:ok, state}
error ->
error
end
end
@doc false
def terminate(_, %{children: children} = state) do
:ok = terminate_children(children, state)
end
defp terminate_children(children, %{template: template} = state) do
{_, _, restart, shutdown, _, _} = template
{pids, stacks} = monitor_children(children, restart)
size = map_size(pids)
stacks =
case shutdown do
:brutal_kill ->
for {pid, _} <- pids, do: Process.exit(pid, :kill)
wait_children(restart, shutdown, pids, size, nil, stacks)
:infinity ->
for {pid, _} <- pids, do: Process.exit(pid, :shutdown)
wait_children(restart, shutdown, pids, size, nil, stacks)
time ->
for {pid, _} <- pids, do: Process.exit(pid, :shutdown)
timer = :erlang.start_timer(time, self(), :kill)
wait_children(restart, shutdown, pids, size, timer, stacks)
end
for {pid, reason} <- stacks do
report_error(:shutdown_error, reason, pid, :undefined, template, state)
end
:ok
end
defp monitor_children(children, restart) do
Enum.reduce(children, {%{}, %{}}, fn
{_, {:restarting, _}}, {pids, stacks} ->
{pids, stacks}
{pid, _}, {pids, stacks} ->
case monitor_child(pid) do
:ok ->
{Map.put(pids, pid, true), stacks}
{:error, :normal} when restart != :permanent ->
{pids, stacks}
{:error, reason} ->
{pids, Map.put(stacks, pid, reason)}
end
end)
end
defp monitor_child(pid) do
ref = Process.monitor(pid)
Process.unlink(pid)
receive do
{:EXIT, ^pid, reason} ->
receive do
{:DOWN, ^ref, :process, ^pid, _} -> {:error, reason}
end
after
0 -> :ok
end
end
defp wait_children(_restart, _shutdown, _pids, 0, nil, stacks) do
stacks
end
defp wait_children(_restart, _shutdown, _pids, 0, timer, stacks) do
_ = :erlang.cancel_timer(timer)
receive do
{:timeout, ^timer, :kill} -> :ok
after
0 -> :ok
end
stacks
end
defp wait_children(restart, :brutal_kill, pids, size, timer, stacks) do
receive do
{:DOWN, _ref, :process, pid, :killed} ->
wait_children(restart, :brutal_kill, Map.delete(pids, pid), size - 1, timer, stacks)
{:DOWN, _ref, :process, pid, reason} ->
wait_children(
restart,
:brutal_kill,
Map.delete(pids, pid),
size - 1,
timer,
Map.put(stacks, pid, reason)
)
end
end
defp wait_children(restart, shutdown, pids, size, timer, stacks) do
receive do
{:DOWN, _ref, :process, pid, {:shutdown, _}} ->
wait_children(restart, shutdown, Map.delete(pids, pid), size - 1, timer, stacks)
{:DOWN, _ref, :process, pid, :shutdown} ->
wait_children(restart, shutdown, Map.delete(pids, pid), size - 1, timer, stacks)
{:DOWN, _ref, :process, pid, :normal} when restart != :permanent ->
wait_children(restart, shutdown, Map.delete(pids, pid), size - 1, timer, stacks)
{:DOWN, _ref, :process, pid, reason} ->
stacks = Map.put(stacks, pid, reason)
wait_children(restart, shutdown, Map.delete(pids, pid), size - 1, timer, stacks)
{:timeout, ^timer, :kill} ->
for {pid, _} <- pids, do: Process.exit(pid, :kill)
wait_children(restart, shutdown, pids, size, nil, stacks)
end
end
defp maybe_restart_child(pid, reason, state) do
%{children: children, template: child} = state
{_, _, restart, _, _, _} = child
case children do
%{^pid => [producer | args]} ->
maybe_restart_child(restart, reason, producer, pid, args, child, state)
%{} ->
{:ok, state}
end
end
defp maybe_restart_child(:permanent, reason, producer, pid, args, child, state) do
report_error(:child_terminated, reason, pid, args, child, state)
restart_child(producer, pid, args, child, state)
end
defp maybe_restart_child(_, :normal, producer, pid, _args, _child, state) do
{:ok, delete_child_and_maybe_ask(producer, pid, state)}
end
defp maybe_restart_child(_, :shutdown, producer, pid, _args, _child, state) do
{:ok, delete_child_and_maybe_ask(producer, pid, state)}
end
defp maybe_restart_child(_, {:shutdown, _}, producer, pid, _args, _child, state) do
{:ok, delete_child_and_maybe_ask(producer, pid, state)}
end
defp maybe_restart_child(:transient, reason, producer, pid, args, child, state) do
report_error(:child_terminated, reason, pid, args, child, state)
restart_child(producer, pid, args, child, state)
end
defp maybe_restart_child(:temporary, reason, producer, pid, args, child, state) do
report_error(:child_terminated, reason, pid, args, child, state)
{:ok, delete_child_and_maybe_ask(producer, pid, state)}
end
defp delete_child_and_maybe_ask(:dynamic, pid, %{children: children} = state) do
%{state | children: Map.delete(children, pid)}
end
defp delete_child_and_maybe_ask(ref, pid, %{children: children} = state) do
children = Map.delete(children, pid)
maybe_ask(ref, pid, 0, 1, children, state)
end
defp restart_child(producer, pid, args, child, state) do
case add_restart(state) do
{:ok, %{strategy: strategy} = state} ->
case restart_child(strategy, producer, pid, args, child, state) do
{:ok, state} ->
{:ok, state}
{:try_again, state} ->
send(self(), {:"$gen_restart", pid})
{:ok, state}
end
{:shutdown, state} ->
report_error(:shutdown, :reached_max_restart_intensity, pid, args, child, state)
{:shutdown, delete_child_and_maybe_ask(producer, pid, state)}
end
end
defp add_restart(state) do
%{max_seconds: max_seconds, max_restarts: max_restarts, restarts: restarts} = state
now = :erlang.monotonic_time(1)
restarts = add_restart([now | restarts], now, max_seconds)
state = %{state | restarts: restarts}
if length(restarts) <= max_restarts do
{:ok, state}
else
{:shutdown, state}
end
end
defp add_restart(restarts, now, period) do
for then <- restarts, now <= then + period, do: then
end
defp restart_child(:one_for_one, producer, current_pid, args, child, state) do
{_, {m, f, _}, restart, _, _, _} = child
case start_child(m, f, args) do
{:ok, pid, _} ->
state = %{state | children: Map.delete(state.children, current_pid)}
{:ok, save_child(restart, producer, pid, args, state)}
{:ok, pid} ->
state = %{state | children: Map.delete(state.children, current_pid)}
{:ok, save_child(restart, producer, pid, args, state)}
:ignore ->
{:ok, delete_child_and_maybe_ask(producer, current_pid, state)}
{:error, reason} ->
report_error(:start_error, reason, {:restarting, current_pid}, args, child, state)
state = restart_child(current_pid, state)
{:try_again, update_in(state.restarting, &(&1 + 1))}
end
end
defp restart_child(pid, %{children: children} = state) do
case children do
%{^pid => {:restarting, _}} ->
state
%{^pid => info} ->
%{state | children: Map.put(children, pid, {:restarting, info})}
end
end
defp report_error(error, reason, pid, args, child, %{name: name}) do
:error_logger.error_report(
:supervisor_report,
supervisor: name,
errorContext: error,
reason: reason,
offender: extract_child(pid, args, child)
)
end
defp extract_child(pid, args, {id, {m, f, _}, restart, shutdown, type, _}) do
[
pid: pid,
id: id,
mfargs: {m, f, args},
restart_type: restart,
shutdown: shutdown,
child_type: type
]
end
def format_status(:terminate, [_pdict, state]) do
state
end
def format_status(_, [_pdict, %{mod: mod} = state]) do
[
data: [{~c"State", state}],
supervisor: [{~c"Callback", mod}]
]
end
defp normalize_template(%{id: id, start: {mod, _, _} = start} = child),
do: {
id,
start,
Map.get(child, :restart, :permanent),
Map.get(child, :shutdown, 5_000),
Map.get(child, :type, :worker),
Map.get(child, :modules, [mod])
}
defp normalize_template({_, _, _, _, _, _} = child), do: child
defp validate_template({_, _, :permanent, _, _, _}) do
error = """
a child specification with :restart set to :permanent \
is not supported in ConsumerSupervisor
Set the :restart option either to :temporary, so children \
spawned from events are never restarted, or :transient, \
so they are restarted only on abnormal exits
"""
{:error, error}
end
defp validate_template({_, _, _, _, _, _}) do
:ok
end
end
|
deps/gen_stage/lib/consumer_supervisor.ex
| 0.912499
| 0.633127
|
consumer_supervisor.ex
|
starcoder
|
defmodule Fly.Repo do
@moduledoc """
This wraps the built-in `Ecto.Repo` functions to proxy writable functions like
insert, update and delete to be performed on the an Elixir node in the primary
region.
To use it, rename your existing repo module and add a new module with the same
name as your original repo like this.
Original code:
```elixir
defmodule MyApp.Repo do
use Ecto.Repo,
otp_app: :my_app,
adapter: Ecto.Adapters.Postgres
end
```
Changes to:
```elixir
defmodule MyApp.Repo.Local do
use Ecto.Repo,
otp_app: :my_app,
adapter: Ecto.Adapters.Postgres
# Dynamically configure the database url based for runtime environment.
def init(_type, config) do
{:ok, Keyword.put(config, :url, Fly.Postgres.database_url())}
end
end
defmodule Core.Repo do
use Fly.Repo, local_repo: MyApp.Repo.Local
end
```
Using the same name allows your existing code to seamlessly work with the new
repo.
When explicitly managing database transactions like using Multi or
`start_transaction`, when used to modify data, those functions should be
called by an RPC so they run in the primary region.
```elixir
Fly.RPC.rpc_region(:primary, MyModule, :my_function_that_uses_multi, [my,
args], opts)
```
"""
defmacro __using__(opts) do
quote bind_quoted: [opts: opts] do
@local_repo Keyword.fetch!(opts, :local_repo)
@timeout Keyword.get(opts, :timeout, 5_000)
# Here we are injecting as little as possible then calling out to the
# library functions.
@doc """
Calculate the given `aggregate`.
See `Ecto.Repo.aggregate/3` for full documentation.
"""
def aggregate(queryable, aggregate, opts \\ []) do
unquote(__MODULE__).__exec_local__(:aggregate, [queryable, aggregate, opts])
end
@doc """
Calculate the given `aggregate` over the given `field`.
See `Ecto.Repo.aggregate/4` for full documentation.
"""
def aggregate(queryable, aggregate, field, opts) do
unquote(__MODULE__).__exec_local__(:aggregate, [queryable, aggregate, field, opts])
end
@doc """
Fetches all entries from the data store matching the given query.
See `Ecto.Repo.all/2` for full documentation.
"""
def all(queryable, opts \\ []) do
unquote(__MODULE__).__exec_local__(:all, [queryable, opts])
end
@doc """
Deletes a struct using its primary key.
See `Ecto.Repo.delete/2` for full documentation.
"""
def delete(struct_or_changeset, opts \\ []) do
unquote(__MODULE__).__exec_on_primary__(:delete, [struct_or_changeset, opts])
end
@doc """
Same as `delete/2` but returns the struct or raises if the changeset is invalid.
See `Ecto.Repo.delete!/2` for full documentation.
"""
def delete!(struct_or_changeset, opts \\ []) do
unquote(__MODULE__).__exec_on_primary__(:delete!, [struct_or_changeset, opts])
end
@doc """
Deletes all entries matching the given query.
See `Ecto.Repo.delete_all/2` for full documentation.
"""
def delete_all(queryable, opts \\ []) do
unquote(__MODULE__).__exec_on_primary__(:delete_all, [queryable, opts])
end
@doc """
Checks if there exists an entry that matches the given query.
See `Ecto.Repo.exists?/2` for full documentation.
"""
def exists?(queryable, opts \\ []) do
unquote(__MODULE__).__exec_local__(:exists?, [queryable, opts])
end
@doc """
Fetches a single struct from the data store where the primary key matches the given id.
See `Ecto.Repo.get/3` for full documentation.
"""
def get(queryable, id, opts \\ []) do
unquote(__MODULE__).__exec_local__(:get, [queryable, id, opts])
end
@doc """
Similar to `get/3` but raises `Ecto.NoResultsError` if no record was found.
See `Ecto.Repo.get!/3` for full documentation.
"""
def get!(queryable, id, opts \\ []) do
unquote(__MODULE__).__exec_local__(:get!, [queryable, id, opts])
end
@doc """
Fetches a single result from the query.
See `Ecto.Repo.get_by/3` for full documentation.
"""
def get_by(queryable, clauses, opts \\ []) do
unquote(__MODULE__).__exec_local__(:get_by, [queryable, clauses, opts])
end
@doc """
Similar to `get_by/3` but raises `Ecto.NoResultsError` if no record was found.
See `Ecto.Repo.get_by!/3` for full documentation.
"""
def get_by!(queryable, clauses, opts \\ []) do
unquote(__MODULE__).__exec_local__(:get_by!, [queryable, clauses, opts])
end
@doc """
Inserts a struct defined via Ecto.Schema or a changeset.
See `Ecto.Repo.insert/2` for full documentation.
"""
def insert(struct_or_changeset, opts \\ []) do
unquote(__MODULE__).__exec_on_primary__(:insert, [struct_or_changeset, opts])
end
@doc """
Same as `insert/2` but returns the struct or raises if the changeset is invalid.
See `Ecto.Repo.insert!/2` for full documentation.
"""
def insert!(struct_or_changeset, opts \\ []) do
unquote(__MODULE__).__exec_on_primary__(:insert!, [struct_or_changeset, opts])
end
@doc """
Inserts all entries into the repository.
See `Ecto.Repo.insert_all/3` for full documentation.
"""
def insert_all(schema_or_source, entries_or_query, opts \\ []) do
unquote(__MODULE__).__exec_on_primary__(:insert_all, [
schema_or_source,
entries_or_query,
opts
])
end
@doc """
Inserts or updates a changeset depending on whether the struct is persisted or not
See `Ecto.Repo.insert_or_update/2` for full documentation.
"""
def insert_or_update(changeset, opts \\ []) do
unquote(__MODULE__).__exec_on_primary__(:insert_or_update, [changeset, opts])
end
@doc """
Same as `insert_or_update!/2` but returns the struct or raises if the changeset is invalid.
See `Ecto.Repo.insert_or_update!/2` for full documentation.
"""
def insert_or_update!(changeset, opts \\ []) do
unquote(__MODULE__).__exec_on_primary__(:insert_or_update!, [changeset, opts])
end
@doc """
Fetches a single result from the query.
See `Ecto.Repo.one/2` for full documentation.
"""
def one(queryable, opts \\ []) do
unquote(__MODULE__).__exec_local__(:one, [queryable, opts])
end
@doc """
Similar to a `one/2` but raises Ecto.NoResultsError if no record was found.
See `Ecto.Repo.one!/2` for full documentation.
"""
def one!(queryable, opts \\ []) do
unquote(__MODULE__).__exec_local__(:one!, [queryable, opts])
end
@doc """
Preloads all associations on the given struct or structs.
See `Ecto.Repo.preload/3` for full documentation.
"""
def preload(structs_or_struct_or_nil, preloads, opts \\ []) do
unquote(__MODULE__).__exec_local__(:preload, [
structs_or_struct_or_nil,
preloads,
opts
])
end
@doc """
A user customizable callback invoked for query-based operations.
See `Ecto.Repo.preload/3` for full documentation.
"""
def prepare_query(operation, query, opts \\ []) do
unquote(__MODULE__).__exec_local__(:prepare_query, [operation, query, opts])
end
@doc """
Reloads a given schema or schema list from the database.
See `Ecto.Repo.reload/2` for full documentation.
"""
def reload(struct_or_structs, opts \\ []) do
unquote(__MODULE__).__exec_local__(:reload, [struct_or_structs, opts])
end
@doc """
Similar to `reload/2`, but raises when something is not found.
See `Ecto.Repo.reload!/2` for full documentation.
"""
def reload!(struct_or_structs, opts \\ []) do
unquote(__MODULE__).__exec_local__(:reload, [struct_or_structs, opts])
end
@doc """
Rolls back the current transaction.
Defaults to the primary database repo. Assumes the transaction was used for
data modification.
See `Ecto.Repo.rollback/1` for full documentation.
"""
def rollback(value) do
unquote(__MODULE__).__exec_local__(:rollback, [value])
end
@doc """
Returns a lazy enumerable that emits all entries from the data store matching the given query.
See `Ecto.Repo.stream/2` for full documentation.
"""
def stream(queryable, opts \\ []) do
unquote(__MODULE__).__exec_local__(:stream, [queryable, opts])
end
@doc """
Runs the given function or Ecto.Multi inside a transaction.
This defaults to the primary (writable) repo as it is assumed this is being
used for data modification. Override to operate on the replica.
See `Ecto.Repo.transaction/2` for full documentation.
"""
def transaction(fun_or_multi, opts \\ []) do
unquote(__MODULE__).__exec_local__(:transaction, [fun_or_multi, opts])
end
@doc """
Updates a changeset using its primary key.
See `Ecto.Repo.update/2` for full documentation.
"""
def update(changeset, opts \\ []) do
unquote(__MODULE__).__exec_on_primary__(:update, [changeset, opts])
end
@doc """
Same as `update/2` but returns the struct or raises if the changeset is invalid.
See `Ecto.Repo.update!/2` for full documentation.
"""
def update!(changeset, opts \\ []) do
unquote(__MODULE__).__exec_on_primary__(:update!, [changeset, opts])
end
@doc """
Updates all entries matching the given query with the given values.
See `Ecto.Repo.update_all/3` for full documentation.
"""
def update_all(queryable, updates, opts \\ []) do
unquote(__MODULE__).__exec_on_primary__(:update_all, [queryable, updates, opts])
end
def __exec_local__(func, args) do
apply(@local_repo, func, args)
end
def __exec_on_primary__(func, args) do
Fly.Postgres.rpc_and_wait(@local_repo, func, args, timeout: @timeout)
end
end
end
end
|
fly_postgres/lib/repo.ex
| 0.84075
| 0.732161
|
repo.ex
|
starcoder
|
defmodule FarmbotFirmware.StubTransport do
@moduledoc "Stub for transporting GCODES. Simulates the _real_ Firmware."
use GenServer
alias FarmbotFirmware.StubTransport, as: State
alias FarmbotFirmware.{GCODE, Param}
require Logger
defstruct status: :boot,
handle_gcode: nil,
position: [x: 0.0, y: 0.0, z: 0.0],
encoders_scaled: [x: 0.0, y: 0.0, z: 0.0],
encoders_raw: [x: 0.0, y: 0.0, z: 0.0],
pins: %{},
params: []
@type t :: %State{
status: FarmbotFirmware.status(),
handle_gcode: (FarmbotFirmware.GCODE.t() -> :ok),
position: [x: float(), y: float(), z: float()],
encoders_scaled: [x: float(), y: float(), z: float()],
encoders_raw: [x: float(), y: float(), z: float()],
pins: %{},
params: [{Param.t(), float() | nil}]
}
def init(args) do
handle_gcode = Keyword.fetch!(args, :handle_gcode)
{:ok, %State{status: :boot, handle_gcode: handle_gcode}, 0}
end
def handle_info(:timeout, %{status: :boot} = state) do
state.handle_gcode.(
GCODE.new(:report_debug_message, ["ARDUINO STARTUP COMPLETE"])
)
{:noreply, goto(state, :no_config), 0}
end
def handle_info(:timeout, %{status: :no_config} = state) do
state.handle_gcode.(GCODE.new(:report_no_config, []))
{:noreply, state}
end
def handle_info(:timeout, %{status: :emergency_lock} = state) do
resp_codes = [
GCODE.new(:report_emergency_lock, [])
]
{:noreply, state, {:continue, resp_codes}}
end
def handle_info(:timeout, %{status: :idle} = state) do
resp_codes = [
GCODE.new(:report_position, state.position),
GCODE.new(:report_encoders_scaled, state.encoders_scaled),
GCODE.new(:report_encoders_raw, state.encoders_raw),
GCODE.new(:report_idle, [])
]
{:noreply, state, {:continue, resp_codes}}
end
def handle_call({tag, {:command_emergency_lock, _}} = code, _from, state) do
resp_codes = [
GCODE.new(:report_echo, [GCODE.encode(code)]),
GCODE.new(:report_begin, [], tag),
GCODE.new(:report_emergency_lock, [], tag)
]
{:reply, :ok, %{state | status: :emergency_lock}, {:continue, resp_codes}}
end
def handle_call({tag, {:command_emergency_unlock, _}} = code, _from, state) do
resp_codes = [
GCODE.new(:report_echo, [GCODE.encode(code)]),
GCODE.new(:report_begin, [], tag),
GCODE.new(:report_success, [], tag)
]
{:reply, :ok, %{state | status: :idle}, {:continue, resp_codes}}
end
def handle_call(
{tag, {:parameter_write, [{:param_config_ok = param, 1.0 = value}]}} =
code,
_from,
state
) do
new_state = %{state | params: Keyword.put(state.params, param, value)}
resp_codes = [
GCODE.new(:report_echo, [GCODE.encode(code)]),
GCODE.new(:report_begin, [], tag),
GCODE.new(:report_success, [], tag)
]
{:reply, :ok, goto(new_state, :idle), {:continue, resp_codes}}
end
def handle_call(
{tag, {:parameter_write, [{param, value}]}} = code,
_from,
state
) do
new_state = %{state | params: Keyword.put(state.params, param, value)}
resp_codes = [
GCODE.new(:report_echo, [GCODE.encode(code)]),
GCODE.new(:report_begin, [], tag),
GCODE.new(:report_success, [], tag)
]
{:reply, :ok, new_state, {:continue, resp_codes}}
end
def handle_call({tag, {:parameter_read_all, []}} = code, _from, state) do
resp_codes =
[
GCODE.new(:report_echo, [GCODE.encode(code)]),
GCODE.new(:report_begin, [], tag),
Enum.map(state.params, fn {p, v} ->
GCODE.new(:report_parameter_value, [{p, v}])
end),
GCODE.new(:report_success, [], tag)
]
|> List.flatten()
{:reply, :ok, state, {:continue, resp_codes}}
end
def handle_call({tag, {:parameter_read, [param]}} = code, _from, state) do
resp_codes = [
GCODE.new(:report_echo, [GCODE.encode(code)]),
GCODE.new(:report_begin, [], tag),
GCODE.new(:report_parameter_value, [{param, state.params[param] || -1.0}]),
GCODE.new(:report_success, [], tag)
]
{:reply, :ok, state, {:continue, resp_codes}}
end
def handle_call(
{tag, {:position_write_zero, [:x, :y, :z]}} = code,
_from,
state
) do
position = [
x: 0.0,
y: 0.0,
z: 0.0
]
state = %{state | position: position}
resp_codes = [
GCODE.new(:report_echo, [GCODE.encode(code)]),
GCODE.new(:report_begin, [], tag),
GCODE.new(:report_busy, [], tag),
GCODE.new(:report_position, state.position),
GCODE.new(:report_success, [], tag)
]
{:reply, :ok, state, {:continue, resp_codes}}
end
def handle_call({tag, {:position_write_zero, [axis]}} = code, _from, state) do
position = Keyword.put(state.position, axis, 0.0) |> ensure_order()
state = %{state | position: position}
resp_codes = [
GCODE.new(:report_echo, [GCODE.encode(code)]),
GCODE.new(:report_begin, [], tag),
GCODE.new(:report_position, state.position),
GCODE.new(:report_success, [], tag)
]
{:reply, :ok, state, {:continue, resp_codes}}
end
def handle_call(
{tag, {:command_movement_calibrate, [axis]}} = code,
_from,
state
) do
position = [x: 0.0, y: 0.0, z: 0.0]
state = %{state | position: position}
param_nr_steps = :"movement_axis_nr_steps_#{axis}"
param_nr_steps_val = Keyword.get(state.params, :param_nr_steps, 10_0000.00)
param_endpoints = :"movement_invert_endpoints_#{axis}"
param_endpoints_val = Keyword.get(state.params, :param_endpoints, 1.0)
resp_codes = [
GCODE.new(:report_echo, [GCODE.encode(code)]),
GCODE.new(:report_begin, [], tag),
GCODE.new(:report_busy, [], tag),
GCODE.new(:report_calibration_state, [:idle]),
GCODE.new(:report_calibration_state, [:home]),
GCODE.new(:report_calibration_state, [:end]),
GCODE.new(:report_calibration_parameter_value, [
{param_nr_steps, param_nr_steps_val}
]),
GCODE.new(:report_calibration_parameter_value, [
{param_endpoints, param_endpoints_val}
]),
GCODE.new(:report_position, state.position),
GCODE.new(:report_success, [], tag)
]
{:reply, :ok, state, {:continue, resp_codes}}
end
def handle_call({tag, {:software_version_read, _}} = code, _from, state) do
resp_codes = [
GCODE.new(:report_echo, [GCODE.encode(code)]),
GCODE.new(:report_begin, [], tag),
GCODE.new(:report_software_version, ["8.0.0.S.stub"]),
GCODE.new(:report_success, [], tag)
]
{:reply, :ok, state, {:continue, resp_codes}}
end
# Everything under this clause should be blocked if emergency_locked
def handle_call(
{_tag, {_, _}} = code,
_from,
%{status: :emergency_lock} = state
) do
Logger.error("Stub Transport emergency lock")
resp_codes = [
GCODE.new(:report_echo, [GCODE.encode(code)]),
GCODE.new(:report_emergency_lock, [])
]
{:reply, :ok, state, {:continue, resp_codes}}
end
def handle_call({tag, {:pin_read, args}} = code, _from, state) do
p = Keyword.fetch!(args, :p)
m = Keyword.get(args, :m, state.pins[p][:m] || 0)
state =
case Map.get(state.pins, p) do
nil -> %{state | pins: Map.put(state.pins, p, m: m, v: 0)}
[m: ^m, v: v] -> %{state | pins: Map.put(state.pins, p, m: m, v: v)}
_ -> state
end
resp_codes = [
GCODE.new(:report_echo, [GCODE.encode(code)]),
GCODE.new(:report_begin, [], tag),
GCODE.new(:report_pin_value, p: p, v: Map.get(state.pins, p)[:v]),
GCODE.new(:report_success, [], tag)
]
{:reply, :ok, state, {:continue, resp_codes}}
end
def handle_call({tag, {:pin_write, args}} = code, _from, state) do
p = Keyword.fetch!(args, :p)
m = Keyword.get(args, :m, state.pins[p][:m] || 0)
v = Keyword.fetch!(args, :v)
state = %{state | pins: Map.put(state.pins, p, m: m, v: v)}
resp_codes = [
GCODE.new(:report_echo, [GCODE.encode(code)]),
GCODE.new(:report_begin, [], tag),
GCODE.new(:report_success, [], tag)
]
{:reply, :ok, state, {:continue, resp_codes}}
end
def handle_call({tag, {:pin_mode_write, _args}} = code, _from, state) do
resp_codes = [
GCODE.new(:report_echo, [GCODE.encode(code)]),
GCODE.new(:report_begin, [], tag),
GCODE.new(:report_success, [], tag)
]
{:reply, :ok, state, {:continue, resp_codes}}
end
def handle_call({tag, {:position_read, _}} = code, _from, state) do
resp_codes = [
GCODE.new(:report_echo, [GCODE.encode(code)]),
GCODE.new(:report_begin, [], tag),
GCODE.new(:report_position, state.position),
GCODE.new(:report_success, [], tag)
]
{:reply, :ok, state, {:continue, resp_codes}}
end
def handle_call({tag, {:command_movement, args}} = code, _from, state) do
position = [
x: args[:x] || state.position[:x],
y: args[:y] || state.position[:y],
z: args[:z] || state.position[:z]
]
state = %{state | position: position}
resp_codes = [
GCODE.new(:report_echo, [GCODE.encode(code)]),
GCODE.new(:report_begin, [], tag),
GCODE.new(:report_busy, [], tag),
GCODE.new(:report_position, state.position),
GCODE.new(:report_success, [], tag)
]
{:reply, :ok, state, {:continue, resp_codes}}
end
def handle_call(
{tag, {:command_movement_home, [:x, :y, :z]}} = code,
_from,
state
) do
position = [
x: 0.0,
y: 0.0,
z: 0.0
]
state = %{state | position: position}
resp_codes = [
GCODE.new(:report_echo, [GCODE.encode(code)]),
GCODE.new(:report_begin, [], tag),
GCODE.new(:report_busy, [], tag),
GCODE.new(:report_position, state.position),
GCODE.new(:report_success, [], tag)
]
{:reply, :ok, state, {:continue, resp_codes}}
end
def handle_call({tag, {:command_movement_home, [axis]}} = code, _from, state) do
position = Keyword.put(state.position, axis, 0.0) |> ensure_order()
state = %{state | position: position}
resp_codes = [
GCODE.new(:report_echo, [GCODE.encode(code)]),
GCODE.new(:report_begin, [], tag),
GCODE.new(:report_busy, [], tag),
GCODE.new(:report_position, state.position),
GCODE.new(:report_success, [], tag)
]
{:reply, :ok, state, {:continue, resp_codes}}
end
def handle_call(
{tag, {:command_movement_find_home, [axis]}} = code,
_from,
state
) do
position = Keyword.put(state.position, axis, 0.0) |> ensure_order()
state = %{state | position: position}
resp_codes = [
GCODE.new(:report_echo, [GCODE.encode(code)]),
GCODE.new(:report_begin, [], tag),
GCODE.new(:report_busy, [], tag),
GCODE.new(:report_position, state.position),
GCODE.new(:report_success, [], tag)
]
{:reply, :ok, state, {:continue, resp_codes}}
end
def handle_call({tag, {_, _}} = code, _from, state) do
Logger.error(
"STUB HANDLER: unknown code: #{inspect(code)} for state: #{state.status}"
)
resp_codes = [
GCODE.new(:report_echo, [GCODE.encode(code)]),
GCODE.new(:report_invalid, [], tag)
]
{:reply, :ok, state, {:continue, resp_codes}}
end
def handle_continue([code | rest], state) do
state.handle_gcode.(code)
{:noreply, state, {:continue, rest}}
end
def handle_continue([], %{status: :idle} = state) do
{:noreply, state, 5_000}
end
def handle_continue([], %{status: _} = state) do
{:noreply, state}
end
defp goto(%{status: _old} = state, status), do: %{state | status: status}
defp ensure_order(pos) do
[
x: Keyword.fetch!(pos, :x),
y: Keyword.fetch!(pos, :y),
z: Keyword.fetch!(pos, :z)
]
end
end
|
farmbot_firmware/lib/farmbot_firmware/transports/stub_transport.ex
| 0.690455
| 0.429788
|
stub_transport.ex
|
starcoder
|
defmodule Mix.Tasks.Absinthe.Gen.Schema do
use Mix.Task
alias Mix.AbsintheGeneratorUtils
@shortdoc "Generates an absinthe schema"
@moduledoc """
Generates an Absinthe Schema
### Options
#{NimbleOptions.docs(AbsintheGenerator.Schema.definitions())}
### Specifying Middleware
To specify middleware we can utilize the following syntax
```bash
pre_middleware:mutation:AuthMiddleware post_middleware:all:ChangesetErrorFormatter
```
Middleware can be set for `mutation`, `query`, `subscription` or `all` and can
also be set to either run pre or post resolution using `pre_middleware` or `post_middleware`
### Example
```bash
mix absinthe.gen.schema pre_middleware:mutation:MyMiddlwareModule post_middleware:all:MyAllMiddleware
--app-name MyApp
--query test
--query user
--mutation user
--mutation session
--type MyApp
--moduledoc "this is the test"
--data-source "EctoSchemas.Cats"
```
"""
@middleware_regex ~r/(pre_middleware|post_middleware):(mutation|query|subscription|all):[a-zA-Z_]+/
def run(args) do
AbsintheGeneratorUtils.ensure_not_in_umbrella!("absinthe.gen.schema")
{args, extra_args} = AbsintheGeneratorUtils.parse_path_opts(args, [
path: :string,
app_name: :string,
moduledoc: :string,
type: :keep,
query: :keep,
mutation: :keep,
subscription: :keep,
data_source: :keep
])
parsed_middleware = extra_args
|> validate_middleware_string
|> parse_middleware
path = Keyword.get(args, :path, "./lib/#{Macro.underscore(args[:app_name])}_web/schema.ex")
args
|> AbsintheGeneratorUtils.collect_arguments([:query, :mutation, :subscription, :type, :data_source])
|> Map.merge(parsed_middleware)
|> serialize_to_schema_struct
|> AbsintheGenerator.Schema.run
|> AbsintheGeneratorUtils.write_template(path)
end
defp validate_middleware_string(middleware_args) do
middleware_string = Enum.join(middleware_args, " ")
if middleware_string === "" or Regex.match?(@middleware_regex, middleware_string) do
middleware_args
else
Mix.raise("""
\n
Middleware format doesn't match what's expected, please make sure it matches the following Regex:
#{inspect @middleware_regex}
Example:
pre_middleware:mutation:MyMiddlewareModule
pre_middleware:query:MyMiddlewareModule
pre_middleware:all:MyMiddlewareModule
""")
end
end
defp parse_middleware(extra_args) do
middleware_acc = %{
pre_middleware: [],
post_middleware: []
}
Enum.reduce(extra_args, middleware_acc, fn (arg_string, acc) ->
[
middleware_type,
middleware_query_type,
middleware_module
] = String.split(arg_string, ":")
middleware_type = String.to_atom(middleware_type)
middleware = [%{
type: middleware_query_type,
module: middleware_module
}]
Map.update(acc, middleware_type, middleware, &(&1 ++ middleware))
end)
end
defp serialize_to_schema_struct(params) do
data_sources = params
|> Map.get(:data_source, [])
|> Enum.map(&struct!(AbsintheGenerator.Schema.DataSource, %{source: &1, query: nil}))
pre_middleware = params
|> Map.get(:pre_middleware, [])
|> serialize_middleware
post_middleware = params
|> Map.get(:post_middleware, [])
|> serialize_middleware
%AbsintheGenerator.Schema{
app_name: params[:app_name],
moduledoc: params[:moduledoc],
queries: params[:query] || [],
mutations: params[:mutation] || [],
subscriptions: params[:subscription] || [],
types: params[:type] || [],
data_sources: data_sources,
post_middleware: post_middleware,
pre_middleware: pre_middleware
}
end
defp serialize_middleware(middleware_params) do
middleware_params
|> Enum.group_by(&(&1.module), &(&1.type))
|> Enum.map(fn {module, types} -> %{types: types, module: module} end)
|> Enum.map(&struct!(AbsintheGenerator.Schema.Middleware, &1))
end
end
|
lib/mix/tasks/schema.ex
| 0.81309
| 0.591487
|
schema.ex
|
starcoder
|
defmodule EventStore.RecordedEvent do
@moduledoc """
`EventStore.RecordedEvent` contains the persisted data and metadata for a
single event.
Events are immutable once recorded.
## Recorded event fields
- `event_number` - position of the event within the stream.
This will be identical to the `stream_version` when fetching events from a
single stream. For the `$all` stream it will be the globally ordered event
number.
- `event_id` - a globally unique UUID to identify the event.
- `stream_uuid` - the original stream identity for the event.
- `stream_version` - the original version of the stream for the event.
- `correlation_id` - an optional UUID identifier used to correlate related
messages.
- `causation_id` - an optional UUID identifier used to identify which
message you are responding to.
- `data` - the deserialized event data.
- `metadata` - a deserialized map of event metadata.
- `created_at` - a `DateTime` (in UTC) indicating when the event was
created.
"""
alias EventStore.RecordedEvent
@type uuid :: String.t()
@type t :: %__MODULE__{
event_number: non_neg_integer(),
event_id: uuid(),
stream_uuid: String.t(),
stream_version: non_neg_integer(),
correlation_id: uuid() | nil,
causation_id: uuid() | nil,
event_type: String.t(),
data: any(),
metadata: map() | nil,
created_at: DateTime.t()
}
defstruct [
:event_number,
:event_id,
:stream_uuid,
:stream_version,
:correlation_id,
:causation_id,
:event_type,
:data,
:metadata,
:created_at
]
def deserialize(%RecordedEvent{} = recorded_event, serializer) do
%RecordedEvent{data: data, metadata: metadata, event_type: event_type} = recorded_event
%RecordedEvent{
recorded_event
| data: serializer.deserialize(data, type: event_type),
metadata: serializer.deserialize(metadata, [])
}
end
def fetch(map, key) when is_map(map) do
Map.fetch(map, key)
end
def get_and_update(map, key, fun) when is_map(map) do
Map.get_and_update(map, key, fun)
end
end
|
lib/event_store/recorded_event.ex
| 0.877994
| 0.635505
|
recorded_event.ex
|
starcoder
|
defmodule BSV.Transaction.Input do
@moduledoc """
Module for parsing and serialising transaction inputs.
"""
alias BSV.Script
alias BSV.Transaction.Output
alias BSV.Util
alias BSV.Util.VarBin
@max_sequence 0xFFFFFFFF
@p2pkh_script_size 108
defstruct output_txid: nil,
output_index: nil,
script: nil,
sequence: @max_sequence,
utxo: nil
@typedoc "Transaction input"
@type t :: %__MODULE__{
output_txid: String.t,
output_index: integer,
script: binary,
sequence: integer,
utxo: Output.t | nil
}
@doc """
Parse the given binary into a transaction input. Returns a tuple containing
the transaction input and the remaining binary data.
## Options
The accepted options are:
* `:encoding` - Optionally decode the binary with either the `:base64` or `:hex` encoding scheme.
## Examples
BSV.Transaction.Input.parse(data)
{%BSV.Trasaction.Input{}, ""}
"""
@spec parse(binary, keyword) :: {__MODULE__.t, binary}
def parse(data, options \\ []) do
encoding = Keyword.get(options, :encoding)
<<txid::bytes-32, index::little-32, data::binary>> = data
|> Util.decode(encoding)
{script, data} = VarBin.parse_bin(data)
<<sequence::little-32, data::binary>> = data
txid = txid |> Util.reverse_bin |> Util.encode(:hex)
{struct(__MODULE__, [
output_txid: txid,
output_index: index,
script: (if is_null(txid, index), do: Script.get_coinbase(script), else: Script.parse(script)),
sequence: sequence
]), data}
end
@doc """
Serialises the given transaction input struct into a binary.
## Options
The accepted options are:
* `:encode` - Optionally encode the returned binary with either the `:base64` or `:hex` encoding scheme.
## Examples
BSV.Transaction.Input.serialize(input)
<<binary>>
"""
@spec serialize(__MODULE__.t, keyword) :: binary
def serialize(%__MODULE__{} = input, options \\ []) do
encoding = Keyword.get(options, :encoding)
txid = input.output_txid
|> Util.decode(:hex)
|> Util.reverse_bin
script = case input.script do
%Script{} = s -> Script.serialize(s) |> VarBin.serialize_bin
_ -> <<>>
end
<<
txid::binary,
input.output_index::little-32,
script::binary,
input.sequence::little-32
>>
|> Util.encode(encoding)
end
@doc """
Returns the size of the given input. If the input has a script, it's actual
size is calculated, otherwise a P2PKH input is estimated.
"""
@spec get_size(__MODULE__.t) :: integer
def get_size(%__MODULE__{script: script} = tx) do
case script do
nil -> 40 + @p2pkh_script_size
%Script{chunks: []} -> 40 + @p2pkh_script_size
_ -> serialize(tx) |> byte_size
end
end
@doc """
Gets whether this is a null input (coinbase transaction input).
## Examples
iex> {%BSV.Transaction{inputs: [coinbase_input]}, ""} = BSV.Transaction.parse("01000000010000000000000000000000000000000000000000000000000000000000000000ffffffff0704ffff001d0104ffffffff0100f2052a0100000043410496b538e853519c726a2c91e61ec11600ae1390813a627c66fb8be7947be63c52da7589379515d4e0a604f8141781e62294721166bf621e73a82cbf2342c858eeac00000000", encoding: :hex)
iex> BSV.Transaction.Input.is_null(coinbase_input)
true
iex> {%BSV.Transaction{inputs: [input]}, ""} = BSV.Transaction.parse("0100000001ae13d3386c0541b9f8528c7f215713e94c52279318090a95e39e5b123360ee48110000006a47304402206d0cf8f9ac8cadcb5061072ff28ca434620bbb0d442f9578d560e840a9cce90a022023aaae374be08838cb42cafd35459f140c6b440db45e6ecc007d9d5d95c89d504121036ce3ac90505e8ca49c0f43d5db1ebf67dc502d79518db2ab54e86947ab1c91fefeffffff01a0aae219020000001976a914ab1cad2d09eedfb15794cc01edc2141b7ccc587388ac77d50900", encoding: :hex)
iex> BSV.Transaction.Input.is_null(input)
false
"""
@spec is_null(__MODULE__.t) :: boolean
def is_null(%__MODULE__{output_txid: transaction, output_index: index}) do
is_null(transaction, index)
end
@spec is_null(String.t(), non_neg_integer) :: boolean
defp is_null(previous_transaction, previous_index) do
previous_transaction == "0000000000000000000000000000000000000000000000000000000000000000" and previous_index == 0xFFFFFFFF
end
end
|
lib/bsv/transaction/input.ex
| 0.86212
| 0.48182
|
input.ex
|
starcoder
|
defmodule Tesla.Middleware.Logger do
@behaviour Tesla.Middleware
@moduledoc """
Log requests as single line.
Logs request method, url, response status and time taken in milliseconds.
### Example usage
```
defmodule MyClient do
use Tesla
plug Tesla.Middleware.Logger
end
```
### Logger output
```
2017-09-30 13:39:06.663 [info] GET http://example.com -> 200 (736.988 ms)
```
See `Tesla.Middleware.DebugLogger` to log request/response body
"""
require Logger
def call(env, next, _opts) do
{time, env} = :timer.tc(Tesla, :run, [env, next])
_ = log(env, time)
env
rescue
ex in Tesla.Error ->
stacktrace = System.stacktrace()
_ = log(env, ex)
reraise ex, stacktrace
end
defp log(env, %Tesla.Error{message: message}) do
Logger.error("#{normalize_method(env)} #{env.url} -> #{message}")
end
defp log(env, time) do
ms = :io_lib.format("~.3f", [time / 1000])
message = "#{normalize_method(env)} #{env.url} -> #{env.status} (#{ms} ms)"
cond do
env.status >= 400 -> Logger.error(message)
env.status >= 300 -> Logger.warn(message)
true -> Logger.info(message)
end
end
defp normalize_method(env) do
env.method |> to_string() |> String.upcase()
end
end
defmodule Tesla.Middleware.DebugLogger do
@behaviour Tesla.Middleware
@moduledoc """
Log full reqeust/response content
### Example usage
```
defmodule MyClient do
use Tesla
plug Tesla.Middleware.DebugLogger
end
```
### Logger output
```
2017-09-30 13:41:56.281 [debug] > POST https://httpbin.org/post
2017-09-30 13:41:56.281 [debug]
2017-09-30 13:41:56.281 [debug] > a=3
2017-09-30 13:41:56.432 [debug]
2017-09-30 13:41:56.432 [debug] < HTTP/1.1 200
2017-09-30 13:41:56.432 [debug] < access-control-allow-credentials: true
2017-09-30 13:41:56.432 [debug] < access-control-allow-origin: *
2017-09-30 13:41:56.432 [debug] < connection: keep-alive
2017-09-30 13:41:56.432 [debug] < content-length: 280
2017-09-30 13:41:56.432 [debug] < content-type: application/json
2017-09-30 13:41:56.432 [debug] < date: Sat, 30 Sep 2017 11:41:55 GMT
2017-09-30 13:41:56.432 [debug] < server: meinheld/0.6.1
2017-09-30 13:41:56.432 [debug] < via: 1.1 vegur
2017-09-30 13:41:56.432 [debug] < x-powered-by: Flask
2017-09-30 13:41:56.432 [debug] < x-processed-time: 0.0011260509491
2017-09-30 13:41:56.432 [debug]
2017-09-30 13:41:56.432 [debug] > {
"args": {},
"data": "a=3",
"files": {},
"form": {},
"headers": {
"Connection": "close",
"Content-Length": "3",
"Content-Type": "",
"Host": "httpbin.org"
},
"json": null,
"origin": "0.0.0.0",
"url": "https://httpbin.org/post"
}
```
"""
require Logger
def call(env, next, _opts) do
env
|> log_request
|> log_headers("> ")
|> log_params("> ")
|> log_body("> ")
|> Tesla.run(next)
|> log_response
|> log_headers("< ")
|> log_body("< ")
rescue
ex in Tesla.Error ->
stacktrace = System.stacktrace()
_ = log_exception(ex, "< ")
reraise ex, stacktrace
end
defp log_request(env) do
_ = Logger.debug("> #{env.method |> to_string |> String.upcase()} #{env.url}")
env
end
defp log_response(env) do
_ = Logger.debug("")
_ = Logger.debug("< HTTP/1.1 #{env.status}")
env
end
defp log_headers(env, prefix) do
for {k, v} <- env.headers do
_ = Logger.debug("#{prefix}#{k}: #{v}")
end
env
end
defp log_params(env, prefix) do
encoded_query = Enum.flat_map(env.query, &Tesla.encode_pair/1)
for {k, v} <- encoded_query do
_ = Logger.debug("#{prefix} Query Param '#{k}': '#{v}'")
end
env
end
defp log_body(%Tesla.Env{} = env, _prefix) do
Map.update!(env, :body, &log_body(&1, "> "))
end
defp log_body(nil, _), do: nil
defp log_body([], _), do: nil
defp log_body(%Stream{} = stream, prefix), do: log_body_stream(stream, prefix)
defp log_body(stream, prefix) when is_function(stream), do: log_body_stream(stream, prefix)
defp log_body(%Tesla.Multipart{} = mp, prefix), do: log_multipart_body(mp, prefix)
defp log_body(data, prefix) when is_binary(data) or is_list(data) do
_ = Logger.debug("")
_ = Logger.debug(prefix <> to_string(data))
data
end
defp log_body_stream(stream, prefix) do
_ = Logger.debug("")
Stream.each(stream, fn line -> Logger.debug(prefix <> line) end)
end
defp log_multipart_body(%Tesla.Multipart{} = mp, prefix) do
_ = Logger.debug("")
_ = Logger.debug(prefix <> "boundary: " <> mp.boundary)
_ = Logger.debug(prefix <> "content_type_params: " <> inspect(mp.content_type_params))
Enum.each(mp.parts, &Logger.debug(prefix <> inspect(&1)))
mp
end
defp log_exception(%Tesla.Error{message: message, reason: reason}, prefix) do
_ = Logger.debug(prefix <> message <> " (#{inspect(reason)})")
end
end
|
lib/tesla/middleware/logger.ex
| 0.798462
| 0.669684
|
logger.ex
|
starcoder
|
defmodule Oban.Plugins.Reindexer do
@moduledoc """
Periodically rebuild indexes to minimize database bloat.
Over time various Oban indexes may grow without `VACUUM` cleaning them up properly. When this
happens, rebuilding the indexes will release bloat.
The plugin uses `REINDEX` with the `CONCURRENTLY` option to rebuild without taking any locks
that prevent concurrent inserts, updates, or deletes on the table.
Note: This plugin requires the `CONCURRENT` option, which is only available in Postgres 12 and
above.
## Using the Plugin
By default, the plugin will reindex once a day, at midnight UTC:
config :my_app, Oban,
plugins: [Oban.Plugins.Reindexer],
...
To run on a different schedule you can provide a cron expression. For example, you could use the
`"@weekly"` shorthand to run once a week on Sunday:
config :my_app, Oban,
plugins: [{Oban.Plugins.Reindexer, schedule: "@weekly"}],
...
## Options
* `:schedule` — a cron expression that controls when to reindex. Defaults to `"@midnight"`.
* `:timezone` — which timezone to use when evaluating the schedule. To use a timezone other than
the default of "Etc/UTC" you *must* have a timezone database like [tzdata][tzdata] installed
and configured.
[tzdata]: https://hexdocs.pm/tzdata
"""
use GenServer
alias Oban.{Config, Peer, Repo}
alias Oban.Cron.Expression
alias Oban.Plugins.Cron
@type option :: {:conf, Config.t()} | {:name, GenServer.name()} | {:schedule, binary()}
defmodule State do
@moduledoc false
defstruct [:conf, :name, :schedule, :timer, timezone: "Etc/UTC"]
end
@doc false
@spec start_link([option()]) :: GenServer.on_start()
def start_link(opts) do
GenServer.start_link(__MODULE__, opts, name: opts[:name])
end
@impl GenServer
def init(opts) do
opts =
opts
|> Keyword.put_new(:schedule, "@midnight")
|> Keyword.update!(:schedule, &Expression.parse!/1)
state = struct!(State, opts)
{:ok, schedule_reindex(state)}
end
@impl GenServer
def terminate(_reason, %State{timer: timer}) do
if is_reference(timer), do: Process.cancel_timer(timer)
:ok
end
@impl GenServer
def handle_info(:reindex, %State{} = state) do
meta = %{conf: state.conf, plugin: __MODULE__}
:telemetry.span([:oban, :plugin], meta, fn ->
case check_leadership_and_reindex(state) do
{:ok, _} ->
{:ok, meta}
error ->
{:error, Map.put(meta, :error, error)}
end
end)
{:noreply, schedule_reindex(state)}
end
# Scheduling
defp schedule_reindex(state) do
timer = Process.send_after(self(), :reindex, Cron.interval_to_next_minute())
%{state | timer: timer}
end
# Reindexing
defp check_leadership_and_reindex(state) do
if Peer.leader?(state.conf) do
{:ok, datetime} = DateTime.now(state.timezone)
if Expression.now?(state.schedule, datetime) do
table = "#{state.conf.prefix}.oban_jobs"
Repo.query(state.conf, "REINDEX TABLE CONCURRENTLY #{table}", [])
end
else
{:ok, []}
end
end
end
|
lib/oban/plugins/reindexer.ex
| 0.865039
| 0.595228
|
reindexer.ex
|
starcoder
|
defmodule DBConnection.Sojourn.Regulator do
@moduledoc """
A `:sregulator` callback module using an unlimited queue and a CoDel strategy
for the valve.
### Connection options
* `:idle_interval` - The time interval in milliseconds before the pool will
start increasing connections, ideally the 95-99 percentile the connection
and handshake time for a database connection (default: `100`)
* `:idle_target` - The target idle time for a connection in milliseconds,
ideally 5-10% of the `:idle_interval` so the connection queue does not
shrink or grow too quickly (default: `div(idle_interval, 20)`)
### Underload alarm options
* `:underload_alarm` - Either `true` to set an alarm on underload or `false`
not to - underload means less connection processes are queuing with the
regulator than expected and should only occur if connection processes are
unable to connect to the database (default: `true`)
* `:underload_target` - The target time in milliseconds for the regulator to
wait for a connection process (default: `500`)
* `:underload_interval` - The interval in milliseconds for an alarm to be
set when the regulator is waiting longer than the target time for a
connection process (default: `5_000`)
"""
if Code.ensure_loaded?(:sregulator) do
@behaviour :sregulator
end
@doc false
def init(opts) do
queue = {:sbroker_drop_queue, {:out, :drop, :infinity}}
conn_valve = conn_valve(opts)
meters = underload_meters(opts)
{:ok, {queue, conn_valve, meters}}
end
## Helpers
defp conn_valve(opts) do
interval = Keyword.get(opts, :idle_interval, 100)
target = Keyword.get(opts, :idle_target, div(interval, 20))
size = Keyword.get(opts, :pool_size, 10)
overflow = Keyword.get(opts, :pool_overflow, 0)
{:sregulator_codel_valve, {target, interval, size, size+overflow}}
end
defp underload_meters(opts) do
case Keyword.get(opts, :underload_alarm, true) do
true ->
target = Keyword.get(opts, :underload_target, 500)
interval = Keyword.get(opts, :underload_interval, 5_000)
[{:sregulator_underload_meter, {target, interval, alarm_id(opts)}}]
false ->
[]
end
end
defp alarm_id(opts) do
case Keyword.get(opts, :name) do
nil -> {:underload, Keyword.fetch!(opts, :broker_pid)}
name -> {:underload, name}
end
end
end
|
deps/db_connection/lib/db_connection/sojourn/regulator.ex
| 0.791902
| 0.493164
|
regulator.ex
|
starcoder
|
defmodule Ethereum do
@moduledoc """
This library presents a convenient interface to control a full Ethereum node from Elixir,
abstracting away the need to deal with the JSON-RPC API directly.
It decodes the hex responses when necessary and functions return the idiomatic {:ok, data} | {:error, reason}
tuples whenever possible. The goal is to cover the entire JSON-RPC API for Geth/Parity.
The main module acts as an "interface" or "facade".
It delegates all functionality to submodules for clarity and to keep modules smaller.
"""
alias Ethereum.Web3
alias Ethereum.Eth
alias Ethereum.Net
alias Ethereum.Personal
alias Ethereum.Aggregates
alias Ethereum.HexUtils
alias Ethereum.ABI
alias Ethereum.TxPool
## Eth Namespace Functions
# ALPHA AND REFERENCE
defdelegate get_transaction_by_hash(hash), to: Eth
defdelegate get_transaction_receipt_by_hash(hash), to: Eth
defdelegate get_block_by_hash(hash, full_txns), to: Eth
defdelegate get_block_by_number(number, full \\ false), to: Eth
defdelegate get_balance(account_hash), to: Eth
defdelegate protocol_version(), to: Eth
defdelegate syncing(), to: Eth
defdelegate coinbase(), to: Eth
defdelegate mining(), to: Eth
defdelegate hashrate(), to: Eth
defdelegate gas_price(), to: Eth
defdelegate accounts(), to: Eth
defdelegate block_number(), to: Eth
defdelegate transaction_count(hash), to: Eth
defdelegate get_filter_changes(hash), to: Eth
defdelegate get_filter_logs(hash), to: Eth
defdelegate eth_call(params), to: Eth
defdelegate eth_send(transaction), to: Eth, as: :eth_send_transaction
defdelegate uninstall_filter(id), to: Eth
defdelegate new_filter(map), to: Eth
defdelegate transaction_count(hash), to: Eth
defdelegate get_filter_changes(hash), to: Eth
defdelegate uninstall_filter(id), to: Eth
defdelegate new_filter(map), to: Eth
## Web3 Namespace Functions
defdelegate client_version(), to: Web3
defdelegate sha3(str), to: Web3
defdelegate decode_abi_event(data, signature), to: Web3, as: :decode_event
## Net Namespace Functions
defdelegate version(), to: Net
defdelegate peer_count(), to: Net
defdelegate listening(), to: Net
## Personal Namespace Functions
defdelegate new_account(password, password_confirmation), to: Personal
defdelegate unlock_account(account, password), to: Personal
defdelegate lock_account(account), to: Personal
defdelegate send_transaction(from, to, amount, password), to: Personal
## Aggregate/Stats Functions
defdelegate get_recent_averages(sample_size), to: Aggregates
defdelegate get_recent_blocks(sample_size), to: Aggregates
defdelegate get_recent_blocktimes(sample_size), to: Aggregates
defdelegate get_recent_blocks_with_blocktimes(sample_size), to: Aggregates
defdelegate get_recent_transactions_per_block(sample_size), to: Aggregates
defdelegate get_average_blocktime(blocks), to: Aggregates
defdelegate get_average_difficulty(blocks), to: Aggregates
## Encoding + Utils
defdelegate unhex(str), to: HexUtils
defdelegate to_hex(str), to: HexUtils
defdelegate is_valid_address?(address), to: HexUtils
defdelegate hex_to_decimal(hex_string), to: HexUtils
## ABI Functions
defdelegate load_abi(file), to: ABI
defdelegate reformat_abi(abi), to: ABI
defdelegate abi_method_signature(abi, name), to: ABI, as: :method_signature
defdelegate encode_abi_event(signature), to: ABI, as: :encode_event
defdelegate encode_abi_data(types_signature, data), to: ABI, as: :encode_data
defdelegate encode_abi_options(options, keys), to: ABI, as: :encode_options
defdelegate encode_abi_option(value), to: ABI, as: :encode_option
defdelegate encode_abi_method_call(abi, name, input), to: ABI, as: :encode_method_call
defdelegate decode_abi_data(types_signature, data), to: ABI, as: :decode_data
defdelegate decode_abi_output(abi, name, output), to: ABI, as: :decode_output
defdelegate abi_keys_to_decimal(map, keys), to: ABI, as: :keys_to_decimal
# TX Pool functions
end
|
lib/ethereum.ex
| 0.736021
| 0.547222
|
ethereum.ex
|
starcoder
|
defmodule Sanbase.SmartContracts.Utils do
require Logger
@type address :: String.t()
@type contract_function :: String.t() | %ABI.FunctionSelector{}
@doc ~s"""
Example usage:
If we have the abi specification obtained by:
```
File.read!("some_abi.json")
|> Jason.decode!
|> ABI.parse_specification
|> Enum.find(&(&1.function == "walletProposals"))
```
```
function_selector = %ABI.FunctionSelector{
function: "walletProposals",
input_names: ["startRequestId", "pageSize"],
inputs_indexed: nil,
method_id: <<50, 0, 127, 55>>,
returns: [
array: {:tuple,
[
{:uint, 256},
:address,
{:uint, 256},
{:uint, 8},
:bool,
{:uint, 256},
{:uint, 256},
{:uint, 256},
{:uint, 256},
{:uint, 256},
{:uint, 256}
]}
],
type: :function,
types: [uint: 256, uint: 256]
}
```
We can execute a contract function/event that way:
```
call_contract(
contract_address_string
function_selector
[0, 10], #input args of the corresponding `function_selector.input_names` with types `function_selector.types`
function_selector.returns
)
```
"""
@spec call_contract(address, contract_function, list(), list()) :: any()
def call_contract(contract, contract_function, args, return_types) do
# https://docs.soliditylang.org/en/latest/abi-spec.html#function-selector-and-argument-encoding
Logger.info(
"[EthNode] Eth call contract with function #{get_function_name(contract_function)}."
)
function_signature =
ABI.encode(contract_function, args)
|> Base.encode16(case: :lower)
with {:ok, hex_encoded_binary_response} <-
Ethereumex.HttpClient.eth_call(%{data: "0x" <> function_signature, to: contract}) do
hex_encoded_binary_response
# Strip `0x` prefix
|> String.slice(2..-1)
|> Base.decode16!(case: :lower)
|> ABI.TypeDecoder.decode_raw(return_types)
end
end
def call_contract_batch(contract, contract_function, args_lists, return_types, opts \\ []) do
transform_args = Keyword.get(opts, :transform_args_list_fun, fn x -> x end)
Logger.info(
"[EthNode] Eth call contract batch with function #{get_function_name(contract_function)}."
)
requests =
Enum.map(args_lists, fn args ->
function_signature = ABI.encode(contract_function, args) |> Base.encode16(case: :lower)
eth_call_args = [%{data: "0x" <> function_signature, to: contract}] |> transform_args.()
{:eth_call, eth_call_args}
end)
with {:ok, result} <- Ethereumex.HttpClient.batch_request(requests) do
Enum.map(result, fn hex_encoded_binary_response ->
hex_encoded_binary_response
# Strip `0x` prefix
|> String.slice(2..-1)
|> Base.decode16!(case: :lower)
|> ABI.TypeDecoder.decode_raw(return_types)
end)
end
end
def format_number(number, decimals \\ 18)
def format_number(number, decimals) do
number / Sanbase.Math.ipow(10, decimals)
end
def format_address(address) do
{:ok, address} =
address
|> String.slice(2..-1)
|> Base.decode16(case: :mixed)
address
end
def encode_address(address) do
"0x" <> Base.encode16(address, case: :lower)
end
def address_strip_zeros(address) do
address =
address
|> String.slice(2..-1)
|> Integer.parse(16)
|> elem(0)
|> Integer.to_string(16)
|> String.downcase()
"0x" <> address
end
defp get_function_name(function) when is_binary(function), do: function
defp get_function_name(%{function: function}), do: function
defp get_function_name(function), do: inspect(function) <> "Unexpected"
end
|
lib/sanbase/smart_contracts/utils.ex
| 0.866486
| 0.710766
|
utils.ex
|
starcoder
|
defmodule Hologram do
@moduledoc ~S'''
Hologram is a full stack isomorphic Elixir web framework that can be used on top of Phoenix.
## Inspired by
Hologram was inspired by Elm, Phoenix LiveView, Surface, Svelte, Vue.js, Mint, Ruby on Rails.
## How it works
The Hologram concept is that your web app is composed from the basic Hologram blocks of Layouts, Pages and Components.
Hologram builds a call graph from the content of your Pages (which must follow some basic conventions) and determines what code is to used on the client and what code is to be used on the server. Hologram then transpiles the code to be used on the client to JavaScript.
Because the state is kept on the client, the programming model is simpler and thanks to stateless or stateful components the app is easily scalable.
Code that is to be run on the client is encapsulated in “actions”, and code that is to be run on the server is encapsulated in “commands”.
Actions can trigger commands, commands can trigger actions. Both actions and commands can be triggered directly by DOM events.
The Client communicates with the Server using websockets. There is no boilerplate code required, Hologram automatically works out what is required.
## I want to see some code!
To see how Hologram app is structured, and see some actual code, take a look at the Hologram’s test app: [hologram/test/e2e](https://github.com/bartblast/hologram/tree/master/test/e2e)
## Basic example
defmodule MyPage do
use Hologram.Page
route "/my-page-path"
def init(_params, _conn) do
%{
count: 0
}
end
def template do
~H"""
<div>Count is {@count}</div>
<button on:click={:increment, by: 3}>Increment by</button>
<Link to={MyOtherPage}>Go to other page</Link>
"""
end
def action(:increment, params, state) do
put(state, :count, state.count + params.by)
end
def command(:save_to_db, _params) do
# Repo.update (…)
:counter_saved
end
end
## Is it used in production?
Yes, it's used in Segmetric: https://www.segmetric.com/. Take a look at the “join beta” and “contact pages" which showcase form handling, or check the mobile menu.
This all works on transpiled Elixir code! (But please, submit the forms only if you actually want to join the Segmetric beta or contact Segmetric - it is a live page, thanks!)
## Selling Points
* State on the client - and all of the problems that get solved by this approach (below)…
* No latency issues as most of the code is run immediately on the client. This makes it possible to create rich UI or even games.
At the moment with LiveView you need something like fly.io to make it bearable, but you still have latency and can’t guarantee
the response time (there is always some variance). And you still need some JS or Alpine to make it work. Until someone manages
to create quantum internet (e.g. by taking advantage of entanglement), there are no workarounds for this problem.
Not sure if this is even technically possible, though :wink:
* Better offline support (internet connection loss, poor signal, etc.). Since most of the code is run on the client and you only hit the server to run some command from time to time,
Hologram can work offline most of the time. This would also make it possible to create PWA’s or mobile apps through WebView, assuming you use something like LocalStorage.
* Less server RAM used - state is kept in the browser instead of the socket.
* Less CPU used - most of the code is run by the browser not by the server.
* Less bandwidth used - only commands need to communicate with the server, no need to send diffs to rerender components.
* No state sync problems - state is kept only in one place (browser) and the websocket communication used is stateless.
* No JS or Alpine.js needed except for communication with some third party scripts or widgets,
but this can also be solved by creating some standardized libs for popular packages that would handle the interop.
* Very friendly to new Elixir converts or beginner devs. I want it to be very, very intuitive, so that you can focus on working on new features in your project instead
of solving technical problems and writing boilerplate code.
## Roadmap
This is work in progress (although usable and used in production). To check what works and what is planned - take a look at the roadmap in the readme at: [Github bartblast/hologram](https://github.com/bartblast/hologram#readme)
To meet the objective of being a very friendly developer experience, Hologram will provide out of the box such things as UI component library (CSS framework agnostic),
authentication, authorization, easy debugging (with time travel), caching, localization and some other features that you typically use in a web app.
I believe that using Hologram’s approach, i.e. Elixir-JS transpilation, code on client and action/command architecture it will be possible to create something as productive as Rails,
without its shortcomings related to scalability, efficiency, etc.
## History / background
I tried to write this kind of framework first in Ruby, and actually managed to create a working prototype, but the performance was not satisfactory.
Then I tried Crystal, but it was very hard to work with its AST. Then I moved to Kotlin, but I realised that it’s better to use a dynamically typed language …
Then I found Elixir in 2018 and fell in love with it. I started work on Hologram in the summer of 2020.
'''
end
|
lib/hologram/hologram.ex
| 0.712632
| 0.68941
|
hologram.ex
|
starcoder
|
defmodule ExSentry.LoggerBackend do
@moduledoc ~S"""
`ExSentry.LoggerBackend` is a backend for the Elixir `Logger` app.
It captures all log messages above a given severity level (default `:error`)
with `ExSentry.capture_message`.
## Usage
1. Install the logger backend with:
Logger.add_backend(ExSentry.LoggerBackend)
or, in `mix.exs`:
# Warning! Removes other configured backends!
config :logger, backends: [ExSentry.LoggerBackend]
2. (Optional) Configure the log level with:
Logger.configure_backend(ExSentry.LoggerBackend, level: :warn)
or, in `mix.exs`:
config :exsentry, :logger_backend, level: :warn
## Available configuration parameters
* `:level` - Sets log level to `level` and above. Atom.
* `:log_levels` - Sets log levels specifically; supersedes `:level`.
List of atoms.
"""
use GenEvent
defmodule State do
@moduledoc false
@levels %{
error: [:error],
warn: [:error, :warn],
info: [:error, :warn, :info],
debug: [:error, :warn, :info, :debug]
}
defstruct log_levels: @levels.error
def new(opts \\ []), do: %__MODULE__{} |> set(opts)
def set(%__MODULE__{}=state, opts) do
log_levels = opts[:log_levels] || @levels[opts[:level]] || state.log_levels
%{state | log_levels: log_levels}
end
end
@doc false
def init(_) do
{:ok, State.new(
level: get_config(:level),
log_levels: get_config(:log_levels),
)}
end
@doc false
def handle_call({:configure, opts}, %State{}=state) do
{:ok, :ok, State.set(state, opts)}
end
@doc false
def handle_event({level, _gl, {Logger, msg, _ts, _md}}, %State{}=state) do
## The lack of "when node(gl) == node()" is deliberate. I'd rather
## ExSentry tend toward over-reporting rather than potentially miss
## valuable crash information from another node.
if level in state.log_levels, do: ExSentry.capture_message(msg)
{:ok, state}
end
@doc false
def handle_event(_event, %State{}=state) do
{:ok, state}
end
defp get_config(key) do
Application.get_env(:exsentry, :logger_backend, []) |> Keyword.get(key)
end
end
|
lib/exsentry/logger_backend.ex
| 0.726717
| 0.406567
|
logger_backend.ex
|
starcoder
|
use Croma
defmodule Antikythera.Time do
@moduledoc """
Data structure to represent date and time in milli-seconds resolution.
Note that all values of `Antikythera.Time.t` are in UTC.
`Poison.Encoder` protocol is implemented for `Antikythera.Time.t`,
so that values of this type can be directly converted to `Antikythera.IsoTimestamp.t` on `Poison.encode/1`.
iex> Poison.encode(%{time: {Antikythera.Time, {2017, 1, 1}, {0, 0, 0}, 0}})
{:ok, "{\"time\":\"2017-01-01T00:00:00.000+00:00\"}"}
See also `new/1`.
"""
alias Croma.Result, as: R
alias Antikythera.{IsoTimestamp, ImfFixdate}
alias Antikythera.IsoTimestamp.Basic, as: IsoBasic
alias Antikythera.MilliSecondsInGregorian
@typep milliseconds :: 0..999
@type t :: {__MODULE__, :calendar.date, :calendar.time, milliseconds}
defun valid?(v :: term) :: boolean do
{__MODULE__, date, {h, m, s}, ms} -> :calendar.valid_date(date) and h in 0..23 and m in 0..59 and s in 0..59 and ms in 0..999
_ -> false
end
@doc """
Convert timestamps into `Antikythera.Time.t`, leveraging `recursive_new?` option of `Croma.Struct`.
Only `Antikythera.IsoTimestamp.t` can be converted.
"""
defun new(s :: v[IsoTimestamp.t]) :: R.t(t), do: from_iso_timestamp(s)
defun truncate_to_day( {__MODULE__, date, {_ , _ , _ }, _} :: t) :: t, do: {__MODULE__, date, {0 , 0 , 0 }, 0}
defun truncate_to_hour( {__MODULE__, date, {hour, _ , _ }, _} :: t) :: t, do: {__MODULE__, date, {hour, 0 , 0 }, 0}
defun truncate_to_minute({__MODULE__, date, {hour, minute, _ }, _} :: t) :: t, do: {__MODULE__, date, {hour, minute, 0 }, 0}
defun truncate_to_second({__MODULE__, date, {hour, minute, second}, _} :: t) :: t, do: {__MODULE__, date, {hour, minute, second}, 0}
defun now() :: t do
from_epoch_milliseconds(System.system_time(:millisecond))
end
defun to_iso_timestamp({__MODULE__, {y, mon, d}, {h, min, s}, millis} :: t) :: IsoTimestamp.t do
import Antikythera.StringFormat
<<Integer.to_string(y) :: binary-size(4), "-",
pad2(mon) :: binary-size(2), "-",
pad2(d) :: binary-size(2), "T",
pad2(h) :: binary-size(2), ":",
pad2(min) :: binary-size(2), ":",
pad2(s) :: binary-size(2), ".",
pad3(millis) :: binary-size(3), "+00:00">>
end
defun to_iso_basic({__MODULE__, {y, mon, d}, {h, min, s}, _} :: t) :: IsoBasic.t do
import Antikythera.StringFormat
<<Integer.to_string(y) :: binary-size(4),
pad2(mon) :: binary-size(2),
pad2(d) :: binary-size(2), "T",
pad2(h) :: binary-size(2),
pad2(min) :: binary-size(2),
pad2(s) :: binary-size(2), "Z">>
end
defun from_iso_timestamp(s :: v[String.t]) :: R.t(t) do
R.try(fn ->
<<year :: binary-size(4), "-",
month :: binary-size(2), "-",
day :: binary-size(2), "T",
hour :: binary-size(2), ":",
minute :: binary-size(2), ":",
second :: binary-size(2),
rest1 :: binary>> = s
{millis, rest2} = extract_millis(rest1)
time = {
__MODULE__,
{String.to_integer(year), String.to_integer(month ), String.to_integer(day )},
{String.to_integer(hour), String.to_integer(minute), String.to_integer(second)},
millis,
}
adjust_by_timezone_offset(time, rest2)
end)
|> R.bind(&R.wrap_if_valid(&1, __MODULE__))
end
R.define_bang_version_of(from_iso_timestamp: 1)
defun from_iso_basic(s :: v[String.t]) :: R.t(t) do
R.try(fn ->
<<year :: binary-size(4),
month :: binary-size(2),
day :: binary-size(2), "T",
hour :: binary-size(2),
minute :: binary-size(2),
second :: binary-size(2),
rest :: binary>> = s
time = {
__MODULE__,
{String.to_integer(year), String.to_integer(month ), String.to_integer(day )},
{String.to_integer(hour), String.to_integer(minute), String.to_integer(second)},
0,
}
adjust_by_timezone_offset(time, rest)
end)
|> R.bind(&R.wrap_if_valid(&1, __MODULE__))
end
R.define_bang_version_of(from_iso_basic: 1)
defp extract_millis(str) do
case str do
<<".", millis :: binary-size(3), rest :: binary>> -> {String.to_integer(millis), rest}
_ -> {0, str}
end
end
defp adjust_by_timezone_offset(t, str) do
case extract_timezone_offset_minutes(str) do
0 -> t
offset_minutes -> shift_minutes(t, -offset_minutes)
end
end
defp extract_timezone_offset_minutes(str) do
case str do
<<"+", h :: binary-size(2), ":", m :: binary-size(2)>> -> convert_to_minutes(h, m)
<<"+", h :: binary-size(2), m :: binary-size(2)>> -> convert_to_minutes(h, m)
<<"-", h :: binary-size(2), ":", m :: binary-size(2)>> -> -convert_to_minutes(h, m)
<<"-", h :: binary-size(2), m :: binary-size(2)>> -> -convert_to_minutes(h, m)
"Z" -> 0
end
end
defp convert_to_minutes(hour, minute) do
String.to_integer(hour) * 60 + String.to_integer(minute)
end
defun shift_milliseconds(t :: v[t], milliseconds :: v[integer]) :: t do
from_gregorian_milliseconds(to_gregorian_milliseconds(t) + milliseconds)
end
defun shift_seconds(t :: v[t], seconds :: v[integer]) :: t, do: shift_milliseconds(t, seconds * 1_000)
defun shift_minutes(t :: v[t], minutes :: v[integer]) :: t, do: shift_milliseconds(t, minutes * 60 * 1_000)
defun shift_hours( t :: v[t], hours :: v[integer]) :: t, do: shift_milliseconds(t, hours * 60 * 60 * 1_000)
defun shift_days( t :: v[t], days :: v[integer]) :: t, do: shift_milliseconds(t, days * 24 * 60 * 60 * 1_000)
defun diff_milliseconds(t1 :: v[t], t2 :: v[t]) :: integer do
to_gregorian_milliseconds(t1) - to_gregorian_milliseconds(t2)
end
defun to_gregorian_milliseconds({__MODULE__, d, t, ms} :: t) :: integer do
seconds = :calendar.datetime_to_gregorian_seconds({d, t})
seconds * 1000 + ms
end
defun from_gregorian_milliseconds(milliseconds :: v[integer]) :: t do
m = rem(milliseconds, 1000)
s = div(milliseconds, 1000)
{date, time} = :calendar.gregorian_seconds_to_datetime(s)
{__MODULE__, date, time, m}
end
defun to_epoch_milliseconds(t :: v[t]) :: integer do
to_gregorian_milliseconds(t) - MilliSecondsInGregorian.time_epoch_offset_milliseconds()
end
defun from_epoch_milliseconds(milliseconds :: v[MilliSecondsInGregorian.t]) :: t do
from_gregorian_milliseconds(milliseconds + MilliSecondsInGregorian.time_epoch_offset_milliseconds())
end
@doc """
Returns date/time in IMF-fixdate format.
The format is subset of Internet Message Format (RFC5322, formarly RFC822, RFC1123).
Defined as 'preferred' format in RFC7231 and modern web servers or clients should send in this format.
https://tools.ietf.org/html/rfc7231#section-7.1.1.1
"""
defun to_http_date({__MODULE__, {y, mon, d} = date, {h, min, s}, _} :: t) :: ImfFixdate.t do
# Not using `:httpd_util.rfc1123_date/2` since it reads inputs as localtime and forcibly perform UTC conversion
import Antikythera.StringFormat
day_str = :httpd_util.day(:calendar.day_of_the_week(date))
mon_str = :httpd_util.month(mon)
"#{day_str}, #{pad2(d)} #{mon_str} #{y} #{pad2(h)}:#{pad2(min)}:#{pad2(s)} GMT"
end
@doc """
Parses HTTP-date formats into Antikythera.Time.t.
Supports IMF-fixdate format, RFC 850 format and ANSI C's `asctime()` format for compatibility.
Note: An HTTP-date value must represents time in UTC(GMT). Thus timezone string in the end must always be 'GMT'.
Any other timezone string (such as 'JST') will actually be ignored and parsed as GMT.
https://tools.ietf.org/html/rfc7231#section-7.1.1.1
"""
defun from_http_date(s :: v[String.t]) :: R.t(t) do
case :httpd_util.convert_request_date(String.to_charlist(s)) do # safe to use since it always read input as UTC
{date, time} -> R.wrap_if_valid({__MODULE__, date, time, 0}, __MODULE__)
:bad_date -> {:error, {:bad_date, s}}
end
end
R.define_bang_version_of(from_http_date: 1)
end
|
lib/util/time.ex
| 0.764012
| 0.511046
|
time.ex
|
starcoder
|
defmodule Dispatch.Registry do
@moduledoc """
Provides a distributes registry for services.
This module implements the `Phoenix.Tracker` behaviour to provide a distributed
registry of services. Services can be added and removed to and from the
registry.
Services are identified by their type. The type can be any valid Elixir term,
such as an atom or string.
When a node goes down, all associated services will be removed from the
registry when the CRDT syncs.
A hash ring is used to determine which service to use for a particular
term. The term is arbitrary, however the same node and service pid will
always be returned for a term unless the number of services for the type
changes.
## Optional
* `:test` - If set to true then a registry and hashring will not be
started when the application starts. They should be started manually
with `Dispatch.Registry.start_link/1` and
`Dispatch.Supervisor.start_hash_ring/2`. Defaults to `false`
"""
@behaviour Phoenix.Tracker
@doc """
Start a new registry. The `pubsub` config value from `Dispatch` will be used.
## Examples
iex> Dispatch.Registry.start_link()
{:ok, #PID<0.168.0>}
"""
def start_link(opts \\ []) do
pubsub_server = Application.get_env(:dispatch, :pubsub) |> Keyword.get(:name, Dispatch.PubSub)
full_opts = Keyword.merge([name: __MODULE__, pubsub_server: pubsub_server], opts)
Phoenix.Tracker.start_link(__MODULE__, full_opts, full_opts)
end
@doc """
Add a service to the registry. The service is set as online.
* `type` - The type of the service. Can be any elixir term
* `pid` - The pid that provides the service
## Examples
iex> Dispatch.Registry.add_service(:downloader, self())
{:ok, "g20AAAAIlB7XfDdRhmk="}
"""
def add_service(type, pid) do
Phoenix.Tracker.track(__MODULE__, pid, type, pid, %{node: node(), state: :online})
end
@doc """
Set a service as online. When a service is online it can be used.
* `type` - The type of the service. Can be any elixir term
* `pid` - The pid that provides the service
## Examples
iex> Dispatch.Registry.enable_service(:downloader, self())
{:ok, "g20AAAAI9+IQ28ngDfM="}
"""
def enable_service(type, pid) do
Phoenix.Tracker.update(__MODULE__, pid, type, pid, %{node: node(), state: :online})
end
@doc """
Set a service as offline. When a service is offline it can't be used.
* `type` - The type of the service. Can be any elixir term
* `pid` - The pid that provides the service
## Examples
iex> Dispatch.Registry.disable_service(:downloader, self())
{:ok, "g20AAAAI4oU3ICYcsoQ="}
"""
def disable_service(type, pid) do
Phoenix.Tracker.update(__MODULE__, pid, type, pid, %{node: node(), state: :offline})
end
@doc """
Remove a service from the registry.
* `type` - The type of the service. Can be any elixir term
* `pid` - The pid that provides the service
## Examples
iex> Dispatch.Registry.remove_service(:downloader, self())
{:ok, "g20AAAAI4oU3ICYcsoQ="}
"""
def remove_service(type, pid) do
Phoenix.Tracker.untrack(__MODULE__, pid, type, pid)
end
@doc """
List all of the services for a particular type.
* `type` - The type of the service. Can be any elixir term
## Examples
iex> Dispatch.Registry.get_services(:downloader)
[{#PID<0.166.0>,
%{node: :"slave2@127.0.0.1", phx_ref: "g20AAAAIHAHuxydO084=",
phx_ref_prev: "g20AAAAI4oU3ICYcsoQ=", state: :online}}]
"""
def get_services(type) do
Phoenix.Tracker.list(__MODULE__, type)
end
@doc """
List all of the services that are online for a particular type.
* `type` - The type of the service. Can be any elixir term
## Examples
iex> Dispatch.Registry.get_online_services(:downloader)
[{#PID<0.166.0>,
%{node: :"slave2@127.0.0.1", phx_ref: "g20AAAAIHAHuxydO084=",
phx_ref_prev: "g20AAAAI4oU3ICYcsoQ=", state: :online}}]
"""
def get_online_services(type) do
get_services(type)
|> Enum.filter(&(elem(&1, 1)[:state] == :online))
end
@doc """
Find a service to use for a particular `key`
* `type` - The type of service to retrieve
* `key` - The key to lookup the service. Can be any elixir term
## Examples
iex> Dispatch.Registry.find_service(:uploader, "file.png")
{:ok, :"slave1@127.0.0.1", #PID<0.153.0>}
"""
def find_service(type, key) do
with(%HashRing{} = hash_ring <- GenServer.call(hash_ring_server(), {:get, type}),
{:ok, service_info} <- HashRing.key_to_node(hash_ring, key),
do: service_info)
|> case do
{host, pid} when is_pid(pid) -> {:ok, host, pid}
_ -> {:error, :no_service_for_key}
end
end
@doc """
Find a list of `count` service instances to use for a particular `key`
* `count` - The number of service instances to retrieve
* `type` - The type of services to retrieve
* `key` - The key to lookup the service. Can be any elixir term
## Examples
iex> Dispatch.Registry.find_multi_service(2, :uploader, "file.png")
[{:ok, :"slave1@127.0.0.1", #PID<0.153.0>}, {:ok, :"slave2@127.0.0.1", #PID<0.145.0>}]
"""
def find_multi_service(count, type, key) do
with(%HashRing{} = hash_ring <- GenServer.call(hash_ring_server(), {:get, type}),
{:ok, service_info} <- HashRing.key_to_nodes(hash_ring, key, count),
do: service_info)
|> case do
list when is_list(list) -> list
_ -> []
end
end
@doc false
def init(opts) do
server = Keyword.fetch!(opts, :pubsub_server)
{:ok, %{pubsub_server: server, hash_rings: %{}}}
end
@doc false
def handle_diff(diff, state) do
hash_rings = GenServer.call(hash_ring_server(), :get_all)
hash_rings =
Enum.reduce(diff, hash_rings, fn {type, _} = event, hash_rings ->
hash_ring =
hash_rings
|> Map.get(type, HashRing.new())
|> remove_leaves(event, state)
|> add_joins(event, state)
Map.put(hash_rings, type, hash_ring)
end)
GenServer.call(hash_ring_server(), {:put_all, hash_rings})
{:ok, state}
end
defp remove_leaves(hash_ring, {type, {joins, leaves}}, state) do
Enum.reduce(leaves, hash_ring, fn {pid, meta}, acc ->
service_info = {meta.node, pid}
any_joins = Enum.any?(joins, fn({jpid, %{state: meta_state}}) ->
jpid == pid && meta_state == :online
end)
Phoenix.PubSub.direct_broadcast(node(), state.pubsub_server, type, {:leave, pid, meta})
case any_joins do
true -> acc
_ -> HashRing.remove_node(acc, service_info)
end
end)
end
defp add_joins(hash_ring, {type, {joins, _leaves}}, state) do
Enum.reduce(joins, hash_ring, fn {pid, meta}, acc ->
service_info = {meta.node, pid}
Phoenix.PubSub.direct_broadcast(node(), state.pubsub_server, type, {:join, pid, meta})
case meta.state do
:online ->
HashRing.add_node(acc, service_info)
_ -> acc
end
end)
end
defp hash_ring_server() do
Module.concat(__MODULE__, HashRing)
end
end
|
lib/dispatch/registry.ex
| 0.877444
| 0.607838
|
registry.ex
|
starcoder
|
defmodule Membrane.Element.Fade do
alias Membrane.Buffer
alias Membrane.Caps.Audio.Raw
alias Membrane.Time
alias __MODULE__.Fading
alias __MODULE__.MultiplicativeFader, as: Fader
use Membrane.Filter
use Membrane.Log, tags: :membrane_element_fade
def_options fadings: [
default: [],
spec: [Fading.t()],
description: """
List containing `#{inspect(Fading)}` structs, which specify fade
parameters over time. See the docs for `t:#{inspect(Fading)}.t/0` for details.
"""
],
initial_volume: [
default: 0,
type: :number,
spec: number,
description: """
The fader performs all-time multiplication of the signal.
`:initial_volume` is an indication of how loud it should be at
start. It can be specified as non-negative number, with 0 being
a muted signal, and 1 being 100% loudness. Values greater than 1
amplify the signal and may cause clipping.
"""
],
step_time: [
default: 5 |> Time.milliseconds(),
type: :time,
description: """
Determines length of each chunk having equal volume level while
fading.
"""
]
def_output_pad :output, caps: Raw
def_input_pad :input, demand_unit: :bytes, caps: Raw
def handle_init(%__MODULE__{
fadings: fadings,
initial_volume: initial_volume,
step_time: step_time
}) do
fadings = fadings |> Enum.sort_by(& &1.at_time)
with :ok <- fadings |> validate_fadings do
{:ok,
%{
time: 0,
fadings: fadings,
step_time: step_time,
leftover: <<>>,
static_volume: initial_volume,
fader_state: nil
}}
end
end
defp validate_fadings(fadings) do
fadings
|> Enum.chunk_every(2, 1, :discard)
|> Bunch.Enum.try_each(fn
[%Fading{at_time: t1, duration: d}, %Fading{at_time: t2}] when t1 + d <= t2 ->
:ok
[f1, f2] ->
{:error, {:overlapping_fadings, f1, f2}}
end)
end
def handle_caps(:input, caps, _ctx, state) do
{{:ok, caps: {:output, caps}}, %{state | leftover: <<>>}}
end
def handle_demand(:output, size, :bytes, _ctx, state) do
{{:ok, demand: {:input, size}}, state}
end
def handle_process(:input, %Buffer{payload: payload}, ctx, state) do
caps = ctx.pads.input.caps
{payload, state} =
state
|> Map.get_and_update!(
:leftover,
&((&1 <> payload) |> Bunch.Binary.split_int_part(Raw.frame_size(caps)))
)
{payload, state} = payload |> process_buffer(caps, state)
{{:ok, buffer: {:output, %Buffer{payload: payload}}}, state}
end
defp process_buffer(<<>>, _caps, state) do
{<<>>, state}
end
defp process_buffer(data, caps, %{fadings: [fading | fadings]} = state) do
%{time: time, static_volume: static_volume, fader_state: fader_state} = state
bytes_to_revolume = max(0, fading.at_time - time) |> Raw.time_to_bytes(caps)
bytes_to_fade = (fading.duration - max(0, time - fading.at_time)) |> Raw.time_to_bytes(caps)
{{to_revolume, to_fade, rest}, end_of_fading} =
case data do
<<to_revolume::binary-size(bytes_to_revolume), to_fade::binary-size(bytes_to_fade),
rest::binary>> ->
{{to_revolume, to_fade, rest}, true}
<<to_revolume::binary-size(bytes_to_revolume), to_fade::binary>> ->
{{to_revolume, to_fade, <<>>}, false}
_ ->
{{data, <<>>, <<>>}, false}
end
revolumed = to_revolume |> Fader.revolume(caps, static_volume)
step = (caps.sample_rate * state.step_time) |> div(1 |> Time.second()) |> min(1)
frames_left = bytes_to_fade |> Raw.bytes_to_frames(caps)
{faded, fader_state} =
to_fade
|> Fader.fade(
frames_left,
step,
fading.to_level,
fading.tanh_arg_range,
static_volume,
caps,
fader_state
)
consumed_bytes = byte_size(to_revolume) + byte_size(to_fade)
time = time + (consumed_bytes |> Raw.bytes_to_time(caps))
state =
if end_of_fading do
%{state | fadings: fadings, time: time, static_volume: fading.to_level, fader_state: nil}
else
%{state | time: time, fader_state: fader_state}
end
{rest, state} = rest |> process_buffer(caps, state)
{revolumed <> faded <> rest, state}
end
defp process_buffer(data, caps, %{fadings: []} = state) do
data = data |> Fader.revolume(caps, state.static_volume)
{data, state |> Map.update!(:time, &(&1 + (data |> byte_size |> Raw.bytes_to_time(caps))))}
end
end
|
lib/membrane_element_fade.ex
| 0.790611
| 0.49585
|
membrane_element_fade.ex
|
starcoder
|
defmodule Validatex.Validation do
@moduledoc """
This module helps with validation of input forms.
"""
alias Validatex.MapExtra
@type key() :: String.t() | atom()
@type error() :: String.t()
@type errors() :: [error()]
@type error_or_errors :: error() | errors()
@type validator(a, b) :: (a -> Result.t(error_or_errors(), b))
@type not_validated() :: :not_validated
@type valid(a) :: {:valid, a}
@type invalid() :: {:invalid, error_or_errors()}
@typedoc """
This type defines three state of `field` (or simply consider the `field` as an input form):
* `not_validated()`: it's default value and mean it that input form has not validated yet,
* `valid(a)`: input form has been validated and has a valid value,
* `invalid()`: in opposite case, it has invalid value and thus contain one or more error messages.
"""
@type validity(a) :: not_validated() | valid(a) | invalid()
@typedoc """
Defines `field` data type. It contains a (`raw`) value from input form
and an information about validation of this value.
"""
@type field(raw, a) :: {:field, raw, validity(a)}
@typedoc """
Defines `optional_field` data type. For cases when you need optional an input form.
"""
@type optional_field(raw, a) :: field(raw, ExMaybe.t(a))
@type on_submit() :: :on_submit
@type on_blur() :: :on_blur
@type on_related_change() :: :on_related_change
@type on_change(val) :: {:on_change, val}
@typedoc """
Event describes four different action for `field`s:
* `on_blur()` validates `field` when user leaves an input form
* `on_change(raw)` validates `field` when user changes value in input field
* `on_related_change()` validates `field` which is tied with another `field`,
for example: password and his confirm form
* `on_submit()` validates all model data (it means all fields) before submitting to server
"""
@type event(raw) :: on_submit() | on_blur() | on_related_change() | on_change(raw)
@type model() :: %{required(key()) => field(any(), any())}
@doc """
Guard for verifying if key of map is atom or binary.
"""
defguard key?(k) when is_binary(k) or is_atom(k)
@doc """
Guard for verifying if validation function has an arity equal to 1.
"""
defguard validator?(f) when is_function(f, 1)
@doc """
Gets `raw` value from `field`.
iex> {:field, "bar", :not_validated} |> Validatex.Validation.raw_value()
"bar"
"""
@spec raw_value(field(raw, any())) :: raw when raw: var
def raw_value({:field, raw, _}) do
raw
end
@doc """
Gets `validity` from `field`.
iex> {:field, "bar", :not_validated} |> Validatex.Validation.validity()
:not_validated
"""
@spec validity(field(any(), a)) :: validity(a) when a: var
def validity({:field, _, validity}) do
validity
end
@doc """
Defines `field` with `:not_validated` validity. It's used as init value of your forms,
e.g. for name, password,...
See [example](https://github.com/iodevs/validatex_example/blob/master/lib/server_web/live/user/register_live.ex#L157)
of using.
iex> Validatex.Validation.field("foo")
{:field, "foo", :not_validated}
"""
@spec field(raw) :: {:field, raw, :not_validated} when raw: var
def field(raw) do
{:field, raw, :not_validated}
end
@doc """
Has similar functionality as for `field`. But in this case is for an optional input form.
"""
@spec optional_field(raw) :: {:field, raw, :not_validated} when raw: var
def optional_field(raw) do
field(raw)
end
@doc """
If you need to have the `field` with `valid(a)` validity.
iex> "5" |> Validatex.Validation.pre_validated_field(& &1)
{:field, "5", {:valid, "5"}}
"""
@spec pre_validated_field(val, (val -> String.t())) :: {:field, String.t(), valid(val)}
when val: var
def pre_validated_field(val, f) do
{:field, f.(val), {:valid, val}}
end
@doc """
If you need to have the `field` with `invalid()` validity. Then you have to add an error
message.
iex> Validatex.Validation.field("bar") |> Validatex.Validation.invalidate("Expected foo!")
{:field, "bar", {:invalid, "Expected foo!"}}
"""
@spec invalidate(field(raw, any()), String.t()) :: {:field, raw, {:invalid, error()}}
when raw: var
def invalidate({:field, raw, _}, err) when is_binary(err) or is_list(err) do
{:field, raw, {:invalid, err}}
end
@doc """
Applying function on concrete or all valid `field`s according to your needs.
iex> nv = Validatex.Validation.field("bar")
{:field, "bar", :not_validated}
iex> pv = Validatex.Validation.pre_validated_field("foo", & &1)
{:field, "foo", {:valid, "foo"}}
iex> data = %{"name" => nv, "surname" => pv}
%{
"name" => {:field, "bar", :not_validated},
"surname" => {:field, "foo", {:valid, "foo"}}
}
iex> Validatex.Validation.apply(data, ["name", "surname"], & &1)
{:invalid, "'name' field isn't valid.'"}
iex> Validatex.Validation.apply(data, ["surname"],
...> fn %{"surname" => s} -> %{"surname" => String.capitalize(s)} end)
{:valid, %{"surname" => "Foo"}}
"""
@spec apply(%{required(key()) => field(any(), a)}, [key()], (%{required(key()) => a} -> b)) ::
validity(b)
when a: var, b: var
def apply(data, fields, f) when is_map(data) and is_list(fields) and is_function(f, 1) do
data
|> take(fields)
|> map(f)
end
@doc """
Gets error from `field`.
iex> Validatex.Validation.field("bar") |>
...> Validatex.Validation.invalidate("Expected foo!") |>
...> Validatex.Validation.extract_error()
"Expected foo!"
"""
@spec extract_error(field(any(), any())) :: ExMaybe.t(error_or_errors())
def extract_error({:field, _, {:invalid, error}}) do
error
end
def extract_error({:field, _, _}) do
nil
end
@doc """
Validation of optional variable.
iex> f = Validatex.Validation.optional(&Validatex.Validators.not_empty(&1))
iex> f.("")
{:ok, nil}
iex> f.("foo")
{:ok, "foo"}
"""
@spec optional(validator(String.t(), a)) :: validator(String.t(), ExMaybe.t(a)) when a: var
def optional(validator) when validator?(validator) do
fn
"" ->
{:ok, nil}
raw when is_binary(raw) ->
validator.(raw)
end
end
@doc """
Verification if `field` has valid value.
iex> "5" |> Validatex.Validation.pre_validated_field(& &1) |> Validatex.Validation.valid?()
true
iex> {:field, "bar", :not_validated} |> Validatex.Validation.valid?()
false
"""
@spec valid?(field(any(), any())) :: boolean()
def valid?({:field, _, {:valid, _}}), do: true
def valid?(_), do: false
@doc """
Runs validation on map which contains `field`s with given validator for `on_blur` event action.
See [example](https://github.com/iodevs/validatex_example/blob/master/lib/server_web/live/user/register_live.ex#L110)
of using.
"""
@spec validate_on_blur(model(), key(), validator(any(), any())) :: model()
def validate_on_blur(map, field, validator) when is_map(map) and key?(field) do
Map.update!(
map,
field,
&validate(&1, validator, :on_blur)
)
end
@doc """
Runs validation on map which contains `field`s with given validator for `on_change` event action.
See [example](https://github.com/iodevs/validatex_example/blob/master/lib/server_web/live/user/register_live.ex#L92)
of using.
"""
@spec validate_on_change(model(), key(), any(), validator(any(), any())) :: model()
def validate_on_change(map, field, value, validator) when is_map(map) and key?(field) do
Map.update!(
map,
field,
&validate(&1, validator, {:on_change, value})
)
end
@doc """
Runs validation on map which contains `field`s with given validator for `on_related_change` event action.
See [example](https://github.com/iodevs/validatex_example/blob/master/lib/server_web/live/user/register_live.ex#L31)
of using.
"""
@spec validate_on_related_change(model(), key(), key(), validator(any(), any())) :: model()
def validate_on_related_change(map, field, related_field, validator)
when is_map(map) and key?(field) and key?(related_field) and validator?(validator) do
related = MapExtra.get!(map, related_field)
Map.update!(
map,
field,
&validate(&1, validator.(related), :on_related_change)
)
end
@doc """
Runs validation on map which contains `field`s with given validator for `on_submit` event action.
See [example](https://github.com/iodevs/validatex_example/blob/master/lib/server_web/live/user/register_live.ex#L123)
of using.
"""
@spec validate_on_submit(model(), key(), validator(any(), any())) :: model()
def validate_on_submit(map, field, validator)
when is_map(map) and key?(field) and validator?(validator) do
Map.update!(
map,
field,
&validate(&1, validator, :on_submit)
)
end
@doc """
Runs validation for `field` which is tied with another `field` with `on_submit` event action.
For example `password` and `confirm_password`.
**Important note**: This function has to be placed immediately after calling `validate_on_submit` function.
See [example](https://github.com/iodevs/validatex_example/blob/master/lib/server_web/live/user/register_live.ex#L123)
of using.
"""
@spec validate_on_related_submit(model(), key(), key(), validator(any(), any())) :: model()
def validate_on_related_submit(map, field, related_field, validator)
when is_map(map) and key?(field) and key?(related_field) and validator?(validator) do
validate_on_submit(map, field, validator.(MapExtra.get!(map, related_field)))
end
@doc """
If all `field`s have a valid values then you can use this function to send these data to server.
See [example](https://github.com/iodevs/validatex_example/blob/master/lib/server_web/live/user/register_live.ex#L123)
of using.
"""
@spec submit_if_valid(
%{required(key()) => field(any(), a)},
[key()],
(%{required(key()) => a} -> b)
) ::
b
when a: var, b: Result.t(any(), any())
def submit_if_valid(data, fields, f)
when is_map(data) and is_list(fields) and is_function(f, 1) do
data
|> take(fields)
|> to_result()
|> Result.and_then(f)
end
@doc """
Runs validation for `field` with given validator and event action.
iex> nv = Validatex.Validation.field("bar")
{:field, "bar", :not_validated}
iex> Validatex.Validation.validate(nv, &Validatex.Validators.not_empty(&1), :on_submit)
{:field, "bar", {:valid, "bar"}}
iex> Validatex.Validation.validate(nv,
...> &Validatex.Validators.not_empty(&1),
...> {:on_change, "foo"})
{:field, "foo", :not_validated}
"""
@spec validate(field(raw, a), validator(raw, a), event(raw)) :: field(raw, a)
when raw: var, a: var
def validate({:field, _, _} = field, validator, :on_submit) when validator?(validator) do
validate_always(field, validator)
end
def validate({:field, _, _} = field, validator, :on_blur) when validator?(validator) do
validate_always(field, validator)
end
def validate({:field, _, _} = field, validator, :on_related_change)
when validator?(validator) do
validate_if_validated(field, validator)
end
def validate({:field, _, validity}, validator, {:on_change, val})
when validator?(validator) do
validate_if_validated({:field, val, validity}, validator)
end
# Private
@spec validate_always(field(raw, a), validator(raw, a)) :: field(raw, a) when a: var, raw: var
defp validate_always({:field, raw, _}, validator) when validator?(validator) do
{:field, raw, raw |> validator.() |> to_validity()}
end
@spec validate_if_validated(field(raw, a), validator(raw, a)) :: field(raw, a)
when a: var, raw: var
defp validate_if_validated({:field, _, :not_validated} = field, _) do
field
end
defp validate_if_validated({:field, raw, _}, validator) when validator?(validator) do
{:field, raw, raw |> validator.() |> to_validity()}
end
@spec to_validity(Result.t(error_or_errors(), val)) :: validity(val) when val: var
defp to_validity({:ok, val}) do
{:valid, val}
end
defp to_validity({:error, err}) do
{:invalid, err}
end
@spec take(%{required(key()) => field(any(), a)}, [key()]) :: validity(%{required(key()) => a})
when a: var
defp take(data, fields) when is_map(data) and is_list(fields) do
Enum.reduce_while(
fields,
{:valid, %{}},
fn field, {:valid, acc} ->
case data |> Map.get(field) |> validity() do
{:valid, value} ->
{:cont, {:valid, Map.put(acc, field, value)}}
_ ->
{:halt, {:invalid, "'#{field}' field isn't valid.'"}}
end
end
)
end
@spec map(validity(a), (a -> b)) :: validity(b) when a: var, b: var
defp map({:valid, data}, f) when is_function(f, 1) do
{:valid, f.(data)}
end
defp map(validity, _f) do
validity
end
@spec to_result(validity(a)) :: Result.t(error_or_errors(), a) when a: var
defp to_result({:valid, data}), do: {:ok, data}
defp to_result({:invalid, err}), do: {:error, err}
defp to_result(:not_validated), do: {:error, "Not validated"}
end
|
lib/validatex/validation.ex
| 0.937569
| 0.65303
|
validation.ex
|
starcoder
|
defmodule Bunch.Struct do
@moduledoc """
A bunch of functions for easier manipulation on structs.
"""
import Kernel, except: [get_in: 2, put_in: 2, update_in: 3, get_and_update_in: 3, pop_in: 2]
use Bunch
@compile {:inline, map_keys: 1}
@gen_common_docs fn fun_name ->
"""
Wraps `Bunch.Access.#{fun_name}` to make it work with structs that do not
implement `Access` behaviour.
"""
end
@doc """
#{@gen_common_docs.("get_in/2")}
"""
@spec get_in(struct, Access.key() | [Access.key()]) :: Access.value()
def get_in(struct, keys), do: struct |> Bunch.Access.get_in(keys |> map_keys())
@doc """
#{@gen_common_docs.("put_in/3")}
"""
@spec put_in(struct, Access.key() | [Access.key()], Access.value()) :: Access.value()
def put_in(struct, keys, v), do: struct |> Bunch.Access.put_in(keys |> map_keys(), v)
@doc """
#{@gen_common_docs.("update_in/3")}
"""
@spec update_in(struct, Access.key() | [Access.key()], (Access.value() -> Access.value())) ::
struct
def update_in(struct, keys, f), do: struct |> Bunch.Access.update_in(keys |> map_keys(), f)
@doc """
#{@gen_common_docs.("get_and_update_in/3")}
"""
@spec get_and_update_in(struct, Access.key() | [Access.key()], (a -> {b, a})) :: {b, struct}
when a: Access.value(), b: any
def get_and_update_in(struct, keys, f),
do: struct |> Bunch.Access.get_and_update_in(keys |> map_keys(), f)
@doc """
#{@gen_common_docs.("pop_in/2")}
"""
@spec pop_in(struct, Access.key() | [Access.key()]) :: {Access.value(), struct}
def pop_in(struct, keys), do: struct |> Bunch.Access.pop_in(keys |> map_keys())
@doc """
#{@gen_common_docs.("delete_in/2")}
"""
@spec delete_in(struct, Access.key() | [Access.key()]) :: struct
def delete_in(struct, keys), do: struct |> Bunch.Access.delete_in(keys |> map_keys())
@spec map_keys(Access.key() | [Access.key()]) :: [Access.access_fun(struct | map, term)]
defp map_keys(keys), do: keys |> Bunch.listify() |> Enum.map(&Access.key(&1, nil))
end
|
lib/bunch/struct.ex
| 0.66356
| 0.444806
|
struct.ex
|
starcoder
|
defmodule GitHooks.Tasks.File do
@moduledoc """
Represents a file that will be executed as a git hook task.
A file should be configured as `{:file, file_path, opts}`, being `opts` an
optional configuration. The file should be readable and have execution
permissions.
See `#{__MODULE__}.new/1` for more information.
For example:
```elixir
config :git_hooks,
hooks: [
pre_commit: [
{:file, "opt/scripts/checks", include_hook_args: true}
]
]
```
"""
@typedoc """
Represents a `file` to be executed.
"""
@type t :: %__MODULE__{
file_path: String.t(),
args: [any],
env: [{String.t(), String.t()}],
git_hook_type: atom,
result: term
}
defstruct [:file_path, :args, :env, :git_hook_type, result: nil]
@doc """
Creates a new `file` struct.
This function expects a tuple or triple with `:file`, the file path and
the opts.
### Options
* `include_hook_args`: Whether the git options will be passed as argument when
executing the file. You will need to check [which arguments are being sent by each git hook](https://git-scm.com/docs/githooks).
* `env`: The environment variables that will be set in the execution context of the file.
### Examples
iex> #{__MODULE__}.new({:file, :test, env: [{"var", "test"}], include_hook_args: true}, :pre_commit, ["commit message"])
%#{__MODULE__}{file_path: :test, args: ["commit message"], env: [{"var", "test"}], git_hook_type: :pre_commit}
iex> #{__MODULE__}.new({:file, :test, include_hook_args: false}, :pre_commit, ["commit message"])
%#{__MODULE__}{file_path: :test, args: [], env: [], git_hook_type: :pre_commit}
"""
@spec new(
{:file, path :: String.t(), [any]},
GitHooks.git_hook_type(),
GitHooks.git_hook_args()
) ::
__MODULE__.t()
def new({:file, script_file, opts}, git_hook_type, git_hook_args) when is_list(opts) do
args =
if Keyword.get(opts, :include_hook_args, false) do
git_hook_args
else
[]
end
%__MODULE__{
file_path: script_file,
args: args,
git_hook_type: git_hook_type,
env: Keyword.get(opts, :env, [])
}
end
end
defimpl GitHooks.Task, for: GitHooks.Tasks.File do
alias GitHooks.Config
alias GitHooks.Tasks.File
alias GitHooks.Printer
def run(
%File{file_path: script_file, env: env, args: args, git_hook_type: git_hook_type} = file,
_opts
) do
result =
script_file
|> Path.absname()
|> System.cmd(
args,
into: Config.io_stream(git_hook_type),
env: env
)
Map.put(file, :result, result)
end
def success?(%File{result: {_result, 0}}), do: true
def success?(%File{result: _}), do: false
def print_result(%File{file_path: file_path, result: {_result, 0}} = file) do
Printer.success("`#{file_path}` was successful")
file
end
def print_result(%File{file_path: file_path, result: {_result, _code}} = file) do
Printer.error("`#{file_path}` execution failed")
file
end
end
|
lib/tasks/file.ex
| 0.840357
| 0.837421
|
file.ex
|
starcoder
|
defmodule Indicado.OBV do
@moduledoc """
This is the OBV module used for calculating On-Balance Volume
"""
@typedoc """
The argument passed to eval functions should be a list of ovb_data_map type.
"""
@type ovb_data_map :: %{
close: float,
volume: float
}
@doc """
Calculates OBV for the list. The list argument passed to eval function should be list of ovb_data_map type.
Returns `{:ok, ovb_list}` or `{:error, :not_enough_data}`
## Examples
iex> Indicado.OBV.eval([%{close: 1, volume: 2}, %{close: 2, volume: 5}])
{:ok, [5]}
iex> Indicado.OBV.eval([%{close: 2, volume: 3}, %{close: 1, volume: 5}])
{:ok, [-5]}
iex> Indicado.OBV.eval([%{close: 2, volume: 3}, %{close: 1, volume: 5}, %{close: 2, volume: 4}])
{:ok, [-5, -1]}
iex> Indicado.OBV.eval([])
{:error, :not_enough_data}
"""
@spec eval(nonempty_list(ovb_data_map)) ::
{:ok, nonempty_list(float)} | {:error, :not_enough_data}
def eval(list), do: calc(list)
@doc """
Calculates OBV for the list. The list argument passed to eval function should be list of ovb_data_map type.
Raises `NotEnoughDataError` if the given list lenght is zero.
## Examples
iex> Indicado.OBV.eval!([%{close: 1, volume: 1}, %{close: 2, volume: 3}])
[3]
iex> Indicado.OBV.eval!([%{close: 1, volume: 1}, %{close: 1, volume: 3}])
[0]
iex> Indicado.OBV.eval!([])
** (NotEnoughDataError) not enough data
"""
def eval!(list) do
case calc(list) do
{:ok, result} -> result
{:error, :not_enough_data} -> raise NotEnoughDataError
end
end
defp calc([]), do: {:error, :not_enough_data}
defp calc(list) do
{:ok,
list
|> Enum.chunk_every(2, 1, :discard)
|> Enum.map(fn [x, y] -> ovb(y, x) end)
|> sum}
end
defp sum(list, results \\ [])
defp sum([head | tail], []), do: sum(tail, [head])
defp sum([head | tail], [rhead | _rtail] = results) do
sum(tail, [head + rhead | results])
end
defp sum([], results), do: Enum.reverse(results)
defp ovb(last, prev) when last.close == prev.close, do: 0
defp ovb(last, prev) when last.close > prev.close, do: last.volume
defp ovb(last, prev) when last.close < prev.close, do: -last.volume
end
|
lib/indicado/obv.ex
| 0.912004
| 0.715474
|
obv.ex
|
starcoder
|
defmodule ExOpenAI do
use HTTPoison.Base
@moduledoc """
An Elixir client for the OpenAI API.
The OpenAI API lets developers use new AI technologies from OpenAI,
integrating them into products and services. The API is general-purpose
and can be tried on virtually any natural language task, and its success
is roughly correlated with how complex the task is.
Each module corresponds to each major area of the API.
## Authentication
Each request is authenticated with an `api_key` which is sent to
the OpenAI API in the form of an `Authorization` header.
The `api_key`can be specified in one of three ways.
1. The `api_key` can be specified in an environment variable called
`OPENAI_API_KEY`.
2. The `api_key` can be specified via the config. This will override the
environment variable.
```
# config/config.exs
config :ex_openai, api_key: {:system, "sk-myapikey"}
```
3. The `api_key` can be specified on a per request basis in the
`options` parameter with an `api_key` key.
```
result = ExOpenAI.Completion.create(:davinci, [], [api_key: "sk-yourapikey"])
```
## Responses
Responses from each request are returned with an `:ok` or `:error` tuple. If
the response contains an error, then a tuple with an `:error` key is returned.
An error is returned even in the response is successful but it has an error in the
body.
```
{:error, response} = ExOpenAI.Completion.create(:davinci)
```
If the response is successful, an `:ok` keyed tuple is returned.
```
{:ok, response} = ExOpenAI.Completion.create(:davinci)
```
"""
@base_url "https://api.openai.com/"
@version "v1"
@user_agent [{"User-agent", "ex_openai"}]
@doc """
Makes a `POST` request to the OpenAI API.
Returns the wrapped response with either an `ok` or `error` tuple along
with the `HTTPoison.Response` as the second element in the tuple.
parameters: A `Keyword` list of parameters that will be passed with the
request body as json.
options:
* `api_key` - A binary API key to be used for this request. This will
override any API key specified in the config or as an environment variable.
"""
def post(url, parameters, options \\ []) do
{api_key, opts} = Keyword.pop(options, :api_key, system_api_key())
make_request(
:post,
url(url),
body(parameters),
default_headers() ++ [authorization_header(api_key)],
opts
)
end
@doc false
def process_response_body(""), do: nil
@doc false
def process_response_body(body), do: Jason.decode!(body)
defp authorization_header(api_key) do
{"Authorization", "Bearer #{api_key}"}
end
defp base_url do
URI.merge(@base_url, @version) |> to_string()
end
defp body(parameters) do
Jason.encode!(Enum.into(parameters, %{}))
end
defp default_headers do
[{"Content-Type", "application/json"}] ++ @user_agent
end
defp make_request(method, url, body, headers, options) do
method
|> request!(url, body, headers, options)
|> process_response
|> wrap_response
end
defp system_api_key do
Application.get_env(:ex_openai, :api_key)
end
defp url("/" <> _path = full_path), do: base_url() <> full_path
defp url(path), do: url("/#{path}")
defp wrap_response(%HTTPoison.Response{body: %{"error" => _error}} = response) do
{:error, response}
end
defp wrap_response(response) do
{:ok, response}
end
end
|
lib/ex_openai.ex
| 0.874553
| 0.909506
|
ex_openai.ex
|
starcoder
|
defmodule Bench do
# This is the benchmark of the single operations in the
# Huffman encoding and decoding process.
# added bench
def bench() do
bench(1000000)
IO.puts(" Warm up -------------------------------------------------")
bench(10000)
bench(100000)
bench(200000)
bench(400000)
bench(800000)
end
def bench(n) do
bench("lib/text.txt",n)
end
def bench(file,n) do
bench1(file,n)
bench2(file,n)
end
def bench1(file, n) do
{text, b} = read(file, n)
c = length(text)
{tree, t2} = time(fn -> Huffman.tree(text) end)
{encode, t3} = time(fn -> Encode.encode_table(tree) end)
s = length(encode)
{decode, _} = time(fn -> Decode.decode_table(tree) end)
{encoded, t5} = time(fn -> Encode.encode(text, encode) end)
e = div(length(encoded), 8)
r = Float.round(e / b, 3)
{_, t6} = time(fn -> Decode.decode(encoded, decode) end)
IO.puts("text of #{c} characters")
IO.puts("tree built in #{t2} ms")
IO.puts("table of size #{s} in #{t3} ms")
IO.puts("encoded in #{t5} ms")
IO.puts("decoded in #{t6} ms")
IO.puts("source #{b} bytes, encoded #{e} bytes, compression #{r}")
IO.puts("1")
end
# Measure the execution time of a function.
def time(func) do
initial = Time.utc_now()
result = func.()
final = Time.utc_now()
{result, Time.diff(final, initial, :microsecond) / 1000}
end
# Get a suitable chunk of text to encode.
def read(file, n) do
{:ok, fd} = File.open(file, [:read, :utf8])
binary = IO.read(fd, n)
File.close(fd)
length = byte_size(binary)
case :unicode.characters_to_list(binary, :utf8) do
{:incomplete, chars, rest} ->
{chars, length - byte_size(rest)}
chars ->
{chars, length}
end
end
def bench2(file, n) do
{text, b} = read(file, n)
c = length(text)
{tree, t2} = time(fn -> Huffman.tree(text) end)
{encode, t3} = time(fn -> Encode.encode_tuple(tree) end)
s = tuple_size(encode)
{decode, _} = time(fn -> Decode.decode_table(tree) end)
{encoded, t5} = time(fn -> Encode.encode_tuple(text, encode) end)
e = div(length(encoded), 8)
r = Float.round(e / b, 3)
{_, t6} = time(fn -> Decode.decode(encoded, decode) end)
IO.puts("text of #{c} characters")
IO.puts("tree built in #{t2} ms")
IO.puts("table of size #{s} in #{t3} ms")
IO.puts("encoded in #{t5} ms")
IO.puts("decoded in #{t6} ms")
IO.puts("source #{b} bytes, encoded #{e} bytes, compression #{r}")
IO.puts("----------------------2-------------------")
end
end
|
huffman/lib/bench.ex
| 0.703753
| 0.441131
|
bench.ex
|
starcoder
|
defmodule Dune do
@moduledoc """
A sandbox for Elixir to safely evaluate untrusted code from user input.
## Features
- only authorized modules and functions can be executed (see
`Dune.Allowlist.Default`)
- no access to environment variables, file system, network...
- code executed in an isolated process
- execution within configurable limits: timeout, maximum reductions and memory
(inspired by [Luerl](https://github.com/rvirding/luerl))
- captured standard output
- atoms, without atom leaks: parsing and runtime do not
[leak atoms](https://hexdocs.pm/elixir/String.html#to_atom/1) (i.e. does not
keep
[filling the atom table](https://learnyousomeerlang.com/starting-out-for-real#atoms)
until the VM crashes)
- modules, without actual module creation: Dune does not let users define any
actual module (would leak memory and modify the state of the VM globally), but
`defmodule` simulates the basic behavior of a module, including private and
recursive functions
The list of modules and functions authorized by default is defined by the
`Dune.Allowlist.Default` module, but this list can be extended and customized
(at your own risk!) using `Dune.Allowlist`.
If you need to keep the state between evaluations, you might consider
`Dune.Session`.
"""
alias Dune.{Success, Failure, Parser, Eval, Opts}
@doc ~S"""
Evaluates the `string` in the sandbox.
Available options are detailed in `Dune.Opts`.
Returns a `Dune.Success` struct if the execution went successfully,
a `Dune.Failure` else.
## Examples
iex> Dune.eval_string("IO.puts('Hello world!')")
%Dune.Success{inspected: ":ok", stdio: "Hello world!\n", value: :ok}
iex> Dune.eval_string("File.cwd!()")
%Dune.Failure{message: "** (DuneRestrictedError) function File.cwd!/0 is restricted", type: :restricted}
iex> Dune.eval_string("List.duplicate(:spam, 100_000)")
%Dune.Failure{message: "Execution stopped - memory limit exceeded", stdio: "", type: :memory}
iex> Dune.eval_string("Foo.bar()")
%Dune.Failure{message: "** (UndefinedFunctionError) function Foo.bar/0 is undefined (module Foo is not available)", type: :exception}
iex> Dune.eval_string("][")
%Dune.Failure{message: "unexpected token: ]", type: :parsing}
Atoms used during parsing and execution might be transformed to prevent atom leaks:
iex> Dune.eval_string("some_variable = IO.inspect(:some_atom)")
%Dune.Success{inspected: ":some_atom", stdio: ":some_atom\n", value: :a__Dune_atom_2__}
The `value` field shows the actual runtime value, but `inspected` and `stdio` are safe to display to the user.
"""
@spec eval_string(String.t(), Keyword.t()) :: Success.t() | Failure.t()
def eval_string(string, opts \\ []) when is_binary(string) do
opts = Opts.validate!(opts)
string
|> Parser.parse_string(opts)
|> Eval.run(opts)
end
@doc ~S"""
Evaluates the quoted `ast` in the sandbox.
Available options are detailed in `Dune.Opts` (parsing restrictions have no effect)..
Returns a `Dune.Success` struct if the execution went successfully,
a `Dune.Failure` else.
## Examples
iex> Dune.eval_quoted(quote do: [1, 2] ++ [3, 4])
%Dune.Success{inspected: "[1, 2, 3, 4]", stdio: "", value: [1, 2, 3, 4]}
iex> Dune.eval_quoted(quote do: System.get_env())
%Dune.Failure{message: "** (DuneRestrictedError) function System.get_env/0 is restricted", type: :restricted}
iex> Dune.eval_quoted(quote do: Process.sleep(500))
%Dune.Failure{message: "Execution timeout - 50ms", type: :timeout}
"""
@spec eval_quoted(Macro.t(), Keyword.t()) :: Success.t() | Failure.t()
def eval_quoted(ast, opts \\ []) do
opts = Opts.validate!(opts)
ast
|> Parser.parse_quoted(opts)
|> Eval.run(opts)
end
@doc ~S"""
Returns the AST corresponding to the provided `string`, without leaking atoms.
Available options are detailed in `Dune.Opts` (runtime restrictions have no effect).
Returns a `Dune.Success` struct if the execution went successfully,
a `Dune.Failure` else.
## Examples
iex> Dune.string_to_quoted("1 + 2")
%Dune.Success{inspected: "{:+, [line: 1], [1, 2]}", stdio: "", value: {:+, [line: 1], [1, 2]}}
iex> Dune.string_to_quoted("[invalid")
%Dune.Failure{stdio: "", message: "missing terminator: ] (for \"[\" starting at line 1)", type: :parsing}
The `pretty` option can make the AST more readable by adding newlines to `inspected`:
iex> Dune.string_to_quoted("IO.puts('hello world')", pretty: true).inspected
"{{:., [line: 1], [{:__aliases__, [line: 1], [:IO]}, :puts]}, [line: 1],\n ['hello world']}"
iex> Dune.string_to_quoted("IO.puts('hello world')").inspected
"{{:., [line: 1], [{:__aliases__, [line: 1], [:IO]}, :puts]}, [line: 1], ['hello world']}"
Since the code isn't executed, there is no allowlist restriction:
iex> Dune.string_to_quoted("System.halt()")
%Dune.Success{
inspected: "{{:., [line: 1], [{:__aliases__, [line: 1], [:System]}, :halt]}, [line: 1], []}",
stdio: "",
value: {{:., [line: 1], [{:__aliases__, [line: 1], [:System]}, :halt]}, [line: 1], []}
}
Atoms might be transformed during parsing to prevent atom leaks:
iex> Dune.string_to_quoted("some_variable = :some_atom")
%Dune.Success{
inspected: "{:=, [line: 1], [{:some_variable, [line: 1], nil}, :some_atom]}",
stdio: "",
value: {:=, [line: 1], [{:a__Dune_atom_1__, [line: 1], nil}, :a__Dune_atom_2__]}
}
The `value` field shows the actual runtime value, but `inspected` is safe to display to the user.
"""
@spec string_to_quoted(String.t(), Keyword.t()) :: Success.t() | Failure.t()
def string_to_quoted(string, opts \\ []) when is_binary(string) do
opts = Opts.validate!(opts)
Parser.string_to_quoted(string, opts)
end
end
|
lib/dune.ex
| 0.856107
| 0.662946
|
dune.ex
|
starcoder
|
defmodule ReWeb.Types.Address do
@moduledoc """
GraphQL types for addresses
"""
use Absinthe.Schema.Notation
alias ReWeb.Resolvers
object :address do
field :id, :id
field :street, :string
field :street_number, :string
field :neighborhood, :string
field :city, :string
field :state, :string
field :postal_code, :string
field :lat, :float
field :lng, :float
field :street_slug, :string
field :neighborhood_slug, :string
field :city_slug, :string
field :state_slug, :string
field :neighborhood_description, :string,
resolve: &Resolvers.Addresses.neighborhood_description/3
field :is_covered, :boolean, resolve: &Resolvers.Addresses.is_covered/3
end
object :district do
field :state, :string
field :city, :string
field :name, :string
field :state_slug, :string
field :city_slug, :string
field :name_slug, :string
field :description, :string
field :status, :string
field :uuid, :uuid
end
input_object :address_input do
field :street, non_null(:string)
field :street_number, non_null(:string)
field :neighborhood, non_null(:string)
field :city, non_null(:string)
field :state, non_null(:string)
field :postal_code, non_null(:string)
field :lat, non_null(:float)
field :lng, non_null(:float)
end
object :address_queries do
@desc "Get all neighborhoods"
field :neighborhoods, list_of(:string), resolve: &Resolvers.Addresses.neighborhoods/2
@desc "Get all districts"
field :districts, list_of(:district), resolve: &Resolvers.Addresses.districts/2
@desc "Show district"
field :district, :district do
arg :state_slug, non_null(:string)
arg :city_slug, non_null(:string)
arg :name_slug, non_null(:string)
resolve &Resolvers.Addresses.district/2
end
@desc "Get address coverage"
field :address_is_covered, :boolean do
arg :state, non_null(:string)
arg :city, non_null(:string)
arg :neighborhood, non_null(:string)
resolve &Resolvers.Addresses.is_covered/2
end
end
object :address_mutations do
@desc "Insert address"
field :address_insert, type: :address do
arg :input, non_null(:address_input)
resolve &Resolvers.Addresses.insert/2
end
end
end
|
apps/re_web/lib/graphql/types/address.ex
| 0.712732
| 0.460228
|
address.ex
|
starcoder
|
defmodule Cashtrail.Statuses do
@moduledoc """
Provides a set of functions to work with status on Ecto Schemas records that implements
`WithStatus` protocol.
If the record has one field called `:archived_at` and want map this field to `:active` or
`:archived` state. It is only necessary to set `@derive Cashtrail.Statuses.WithStatus` on the
Ecto.Schema model. For example:
```
defmodule MySchema do
use Ecto.Schema
@derive Cashtrail.Statuses.WithStatus
schema "my_table" do
field :description, :string
field :archived_at, :naive_datetime
end
end
```
In other cases is necessary implement the protocol functions from `Cashtrail.Statuses.WithStatus`,
like for example:
```
defimpl WithStatus, for: Any do
import Ecto.Query
def status(%{archived_at: archived_at}) when not is_nil(archived_at) do
:archived
end
def status(_) do
:active
end
def filter_condition(_schema, :active) do
dynamic([q], is_nil(q.archived_at))
end
def filter_condition(_schema, :archived) do
dynamic([q], not is_nil(q.archived_at))
end
def filter_condition(_schema, _status), do: nil
end
```
"""
alias Cashtrail.Statuses.WithStatus
import Ecto.Query
@doc """
Gets the status of one record.
"""
@spec status(struct) :: atom
def status(record) do
WithStatus.status(record)
end
@doc """
Returns if the record is archived or don't based on it status.
"""
@spec archived?(struct) :: boolean
def archived?(record) do
case status(record) do
:archived -> true
_ -> false
end
end
@doc """
Returns if the record is active or don't based on it status. It is considered active if it is not
archived.
"""
@spec active?(struct) :: boolean
def active?(record) do
case status(record) do
:archived -> false
_ -> true
end
end
@doc """
Returns a `Ecto.Query` with the queries based on the given status.
The query is mounted according with the implementation of the `WithStatus.filter_condition/2`
function.
## Arguments
* query
"""
@spec filter_by_status(Ecto.Queryable.t() | Ecto.Query.t(), map(), atom()) :: Ecto.Query.t()
def filter_by_status(queryable, params, status_key \\ :status)
def filter_by_status(queryable, nil, _status_key), do: queryable
def filter_by_status(queryable, params, status_key) do
status_or_statuses =
case Map.get(params, status_key) || Map.get(params, to_string(status_key)) do
[status | _] = statuses when is_binary(status) ->
Enum.map(statuses, &String.to_existing_atom/1)
status when is_binary(status) ->
String.to_existing_atom(status)
status_or_statuses ->
status_or_statuses
end
build_filter(queryable, status_or_statuses)
end
@spec build_filter(Ecto.Queryable.t() | Ecto.Query.t(), atom() | list(atom())) :: Ecto.Query.t()
defp build_filter(%Ecto.Query{from: %{source: {_, schema}}} = query, status_or_statuses) do
schema = Cashtrail.Repo.load(schema, %{})
build_filter(query, status_or_statuses, schema)
end
defp build_filter(query, status_or_statuses) do
schema = Cashtrail.Repo.load(query, %{})
build_filter(query, status_or_statuses, schema)
end
@spec build_filter(Ecto.Queryable.t(), atom() | list(atom()), Ecto.Schema.t()) :: Ecto.Query.t()
defp build_filter(query, statuses, schema) when is_list(statuses) do
conditions =
Enum.reduce(statuses, false, fn status, condition ->
case WithStatus.filter_condition(schema, status) do
nil -> condition
filter_condition -> dynamic([q], ^filter_condition or ^condition)
end
end)
from(query, where: ^conditions)
end
defp build_filter(query, status, schema) do
conditions = WithStatus.filter_condition(schema, status) || []
from(query, where: ^conditions)
end
defprotocol WithStatus do
@moduledoc """
Gets the status from the record and helps build filters for the record and the status.
"""
@doc """
Gets the status of the record.
"""
@spec status(struct()) :: atom()
def status(record)
@doc """
Build the filter condition for the record and the given status.
"""
@spec filter_condition(struct(), atom()) :: map() | nil
def filter_condition(record, status)
end
defimpl WithStatus, for: Any do
@moduledoc false
import Ecto.Query
@doc false
@spec status(struct) :: :archived | :active
def status(%{archived_at: archived_at}) when not is_nil(archived_at) do
:archived
end
def status(_) do
:active
end
@doc false
@spec filter_condition(struct, atom) :: map() | nil
def filter_condition(_schema, :active) do
dynamic([q], is_nil(q.archived_at))
end
def filter_condition(_schema, :archived) do
dynamic([q], not is_nil(q.archived_at))
end
def filter_condition(_schema, _status), do: nil
end
end
|
apps/cashtrail/lib/cashtrail/statuses.ex
| 0.882054
| 0.938801
|
statuses.ex
|
starcoder
|
defmodule Membrane.MP4.Track.SampleTable do
@moduledoc """
A module that defines a structure and functions allowing to store samples,
assemble them into chunks and flush when needed. Its public functions take
care of recording information required to build a sample table.
For performance reasons, the module uses prepends when storing information
about new samples. To compensate for it, use `#{inspect(&__MODULE__.reverse/1)}`
when it's known that no more samples will be stored.
"""
@type t :: %__MODULE__{
chunk: [binary],
chunk_first_timestamp: non_neg_integer | nil,
last_timestamp: non_neg_integer | nil,
sample_count: non_neg_integer,
sample_sizes: [pos_integer],
sync_samples: [pos_integer],
chunk_offsets: [non_neg_integer],
decoding_deltas: [
%{
sample_delta: Ratio.t(),
sample_count: pos_integer
}
],
samples_per_chunk: [
%{
first_chunk: pos_integer,
sample_count: pos_integer
}
]
}
defstruct chunk: [],
chunk_first_timestamp: nil,
last_timestamp: nil,
sample_count: 0,
sample_sizes: [],
sync_samples: [],
chunk_offsets: [],
decoding_deltas: [],
samples_per_chunk: []
@spec store_sample(__MODULE__.t(), Membrane.Buffer.t()) :: __MODULE__.t()
def store_sample(sample_table, buffer) do
sample_table
|> maybe_store_first_timestamp(buffer)
|> do_store_sample(buffer)
|> update_decoding_deltas(buffer)
|> maybe_store_sync_sample(buffer)
|> store_last_timestamp(buffer)
end
@spec chunk_duration(__MODULE__.t()) :: non_neg_integer
def chunk_duration(%{chunk_first_timestamp: nil}), do: 0
def chunk_duration(sample_table) do
use Ratio
sample_table.last_timestamp - sample_table.chunk_first_timestamp
end
@spec flush_chunk(__MODULE__.t(), non_neg_integer) :: {binary, __MODULE__.t()}
def flush_chunk(%{chunk: []} = sample_table, _chunk_offset),
do: {<<>>, sample_table}
def flush_chunk(sample_table, chunk_offset) do
chunk = sample_table.chunk
sample_table =
sample_table
|> Map.update!(:chunk_offsets, &[chunk_offset | &1])
|> update_samples_per_chunk(length(chunk))
|> Map.merge(%{chunk: [], chunk_first_timestamp: nil})
chunk = chunk |> Enum.reverse() |> Enum.join()
{chunk, sample_table}
end
@spec reverse(__MODULE__.t()) :: __MODULE__.t()
def reverse(sample_table) do
to_reverse = [
:sample_sizes,
:sync_samples,
:chunk_offsets,
:decoding_deltas,
:samples_per_chunk
]
Enum.reduce(to_reverse, sample_table, fn key, sample_table ->
reversed = sample_table |> Map.fetch!(key) |> Enum.reverse()
%{sample_table | key => reversed}
end)
end
defp do_store_sample(sample_table, %{payload: payload}),
do:
Map.merge(sample_table, %{
chunk: [payload | sample_table.chunk],
sample_sizes: [byte_size(payload) | sample_table.sample_sizes],
sample_count: sample_table.sample_count + 1
})
defp maybe_store_first_timestamp(%{chunk: []} = sample_table, %{
metadata: %{timestamp: timestamp}
}),
do: %{sample_table | chunk_first_timestamp: timestamp}
defp maybe_store_first_timestamp(sample_table, _buffer), do: sample_table
defp update_decoding_deltas(%{last_timestamp: nil} = sample_table, _buffer) do
Map.put(sample_table, :decoding_deltas, [%{sample_count: 1, sample_delta: 0}])
end
defp update_decoding_deltas(sample_table, %{metadata: %{timestamp: timestamp}}) do
Map.update!(sample_table, :decoding_deltas, fn previous_deltas ->
use Ratio
new_delta = timestamp - sample_table.last_timestamp
case previous_deltas do
# there was only one sample in the sample table - we should assume its delta is
# equal to the one of the second sample
[%{sample_count: 1, sample_delta: 0}] ->
[%{sample_count: 2, sample_delta: new_delta}]
# the delta did not change, simply increase the counter in the last entry to save space
[%{sample_count: count, sample_delta: ^new_delta} | rest] ->
[%{sample_count: count + 1, sample_delta: new_delta} | rest]
_different_delta_or_empty ->
[%{sample_count: 1, sample_delta: new_delta} | previous_deltas]
end
end)
end
defp maybe_store_sync_sample(sample_table, %{metadata: %{mp4_payload: %{key_frame?: true}}}) do
Map.update!(sample_table, :sync_samples, &[sample_table.sample_count | &1])
end
defp maybe_store_sync_sample(sample_table, _buffer), do: sample_table
defp store_last_timestamp(sample_table, %{metadata: %{timestamp: timestamp}}),
do: %{sample_table | last_timestamp: timestamp}
defp update_samples_per_chunk(sample_table, sample_count) do
Map.update!(sample_table, :samples_per_chunk, fn previous_chunks ->
case previous_chunks do
[%{first_chunk: _, sample_count: ^sample_count} | _rest] ->
previous_chunks
_different_count ->
[
%{first_chunk: length(sample_table.chunk_offsets), sample_count: sample_count}
| previous_chunks
]
end
end)
end
end
|
lib/membrane_mp4/track/sample_table.ex
| 0.873916
| 0.640256
|
sample_table.ex
|
starcoder
|
defexception IO.StreamError, [:reason, :message] do
def exception(opts) do
reason = opts[:reason]
formatted = iolist_to_binary(:file.format_error(reason))
IO.StreamError[message: "error during streaming: #{formatted}", reason: reason]
end
end
defmodule IO.Stream do
@moduledoc """
Defines a `IO.Stream` struct returned by `IO.stream/2` and `IO.binstream/2`.
The following fields are public:
* `device` - the IO device
* `raw` - a boolean indicating if bin functions should be used
* `line_or_bytes` - if reading should read lines or a given amount of bytes
"""
defstruct device: nil, raw: true, line_or_bytes: :line
defimpl Collectable do
def empty(stream) do
stream
end
def into(%{ device: device, raw: raw } = stream) do
{ :ok, into(stream, device, raw) }
end
defp into(stream, device, raw) do
fn
:ok, { :cont, x } ->
case raw do
true -> IO.binwrite(device, x)
false -> IO.write(device, x)
end
:ok, _ -> stream
end
end
end
defimpl Enumerable do
def reduce(%{ device: device, raw: raw, line_or_bytes: line_or_bytes }, acc, fun) do
next_fun =
case raw do
true -> &IO.each_binstream(&1, line_or_bytes)
false -> &IO.each_stream(&1, line_or_bytes)
end
Stream.unfold(device, next_fun).(acc, fun)
end
def count(_stream) do
{ :error, __MODULE__ }
end
def member?(_stream, _term) do
{ :error, __MODULE__ }
end
end
end
defmodule IO do
@moduledoc """
Functions handling IO.
Many functions in this module expects an IO device as argument.
An IO device must be a pid or an atom representing a process.
For convenience, Elixir provides `:stdio` and `:stderr` as
shortcuts to Erlang's `:standard_io` and `:standard_error`.
The majority of the functions expect data encoded in UTF-8
and will do a conversion to string, via the `String.Chars`
protocol (as shown in typespecs).
The functions starting with `bin*` expects iodata as arguments,
i.e. iolists or binaries with no particular encoding.
"""
@type device :: atom | pid
@type chardata :: char_list | String.Chars.t
@type nodata :: { :error, term } | :eof
import :erlang, only: [group_leader: 0]
defmacrop is_iolist(data) do
quote do
is_list(unquote(data)) or is_binary(unquote(data))
end
end
@doc """
Reads `count` characters from the IO device or until
the end of the line if `:line` is given. It returns:
* `data` - The input characters.
* `:eof` - End of file was encountered.
* `{:error, reason}` - Other (rare) error condition,
for instance `{:error, :estale}` if reading from an
NFS file system.
"""
@spec read(device, :line | non_neg_integer) :: chardata | nodata
def read(device \\ group_leader, chars_or_line)
def read(device, :line) do
:io.get_line(map_dev(device), '')
end
def read(device, count) when count >= 0 do
:io.get_chars(map_dev(device), '', count)
end
@doc """
Reads `count` bytes from the IO device or until
the end of the line if `:line` is given. It returns:
* `data` - The input characters.
* `:eof` - End of file was encountered.
* `{:error, reason}` - Other (rare) error condition,
for instance `{:error, :estale}` if reading from an
NFS file system.
"""
@spec binread(device, :line | non_neg_integer) :: iodata | nodata
def binread(device \\ group_leader, chars_or_line)
def binread(device, :line) do
case :file.read_line(map_dev(device)) do
{ :ok, data } -> data
other -> other
end
end
def binread(device, count) when count >= 0 do
case :file.read(map_dev(device), count) do
{ :ok, data } -> data
other -> other
end
end
@doc """
Writes the given argument to the given device.
By default the device is the standard output.
The argument is expected to be a chardata (i.e.
a char list or an unicode binary).
It returns `:ok` if it succeeds.
## Examples
IO.write "sample"
#=> "sample"
IO.write :stderr, "error"
#=> "error"
"""
@spec write(device, chardata) :: :ok
def write(device \\ group_leader(), item) do
:io.put_chars map_dev(device), to_chardata(item)
end
@doc """
Writes the given argument to the given device
as a binary, no unicode conversion happens.
Check `write/2` for more information.
"""
@spec binwrite(device, iodata) :: :ok | { :error, term }
def binwrite(device \\ group_leader(), item) when is_iolist(item) do
:file.write map_dev(device), item
end
@doc """
Writes the argument to the device, similar to `write/2`,
but adds a newline at the end. The argument is expected
to be a chardata.
"""
@spec puts(device, chardata) :: :ok
def puts(device \\ group_leader(), item) do
erl_dev = map_dev(device)
:io.put_chars erl_dev, [to_chardata(item), ?\n]
end
@doc """
Inspects and writes the given argument to the device
followed by a newline. A set of options can be given.
It sets by default pretty printing to true and the
width to be the width of the device, with a minimum
of 80 characters.
## Examples
IO.inspect Process.list
"""
@spec inspect(term, Keyword.t) :: term
def inspect(item, opts \\ []) do
inspect group_leader(), item, opts
end
@doc """
Inspects the item with options using the given device.
"""
@spec inspect(device, term, Keyword.t) :: term
def inspect(device, item, opts) when is_list(opts) do
opts = Keyword.put_new(opts, :pretty, true)
unless Keyword.get(opts, :width) do
opts = case :io.columns(device) do
{ :ok, width } -> [width: width] ++ opts
{ :error, _ } -> opts
end
end
puts device, Kernel.inspect(item, opts)
item
end
@doc """
Gets a number of bytes from the io device. If the
io device is a unicode device, `count` implies
the number of unicode codepoints to be retrieved.
Otherwise, `count` is the number of raw bytes to be retrieved.
It returns:
* `data` - The input characters.
* `:eof` - End of file was encountered.
* `{:error, reason}` - Other (rare) error condition,
for instance `{:error, :estale}` if reading from an
NFS file system.
"""
@spec getn(chardata, pos_integer) :: chardata | nodata
@spec getn(device, chardata) :: chardata | nodata
def getn(prompt, count \\ 1)
def getn(prompt, count) when is_integer(count) do
getn(group_leader, prompt, count)
end
def getn(device, prompt) do
getn(device, prompt, 1)
end
@doc """
Gets a number of bytes from the io device. If the
io device is a unicode device, `count` implies
the number of unicode codepoints to be retrieved.
Otherwise, `count` is the number of raw bytes to be retrieved.
"""
@spec getn(device, chardata, pos_integer) :: chardata | nodata
def getn(device, prompt, count) do
:io.get_chars(map_dev(device), to_chardata(prompt), count)
end
@doc """
Reads a line from the IO device. It returns:
* `data` - The characters in the line terminated
by a LF (or end of file).
* `:eof` - End of file was encountered.
* `{:error, reason}` - Other (rare) error condition,
for instance `{:error, :estale}` if reading from an
NFS file system.
"""
@spec gets(device, chardata) :: chardata | nodata
def gets(device \\ group_leader(), prompt) do
:io.get_line(map_dev(device), to_chardata(prompt))
end
@doc """
Converts the io device into a `IO.Stream`.
An `IO.Stream` implements both `Enumerable` and
`Collectable`, allowing it to be used for both read
and write.
The device is iterated line by line if `:line` is given or
by a given number of codepoints.
This reads the IO as utf-8. Check out
`IO.binstream/2` to handle the IO as a raw binary.
Note that an IO stream has side effects and every time
you go over the stream you may get different results.
## Examples
Here is an example on how we mimic an echo server
from the command line:
Enum.each IO.stream(:stdio, :line), &IO.write(&1)
"""
@spec stream(device, :line | pos_integer) :: Enumerable.t
def stream(device, line_or_codepoints) do
%IO.Stream{ device: map_dev(device), raw: false, line_or_bytes: line_or_codepoints }
end
@doc """
Converts the IO device into a `IO.Stream`.
An `IO.Stream` implements both `Enumerable` and
`Collectable`, allowing it to be used for both read
and write.
The device is iterated line by line or by a number of bytes.
This reads the IO device as a raw binary.
Note that an IO stream has side effects and every time
you go over the stream you may get different results.
"""
@spec binstream(device, :line | pos_integer) :: Enumerable.t
def binstream(device, line_or_bytes) do
%IO.Stream{ device: map_dev(device), raw: true, line_or_bytes: line_or_bytes }
end
@doc false
def each_stream(device, what) do
case read(device, what) do
:eof ->
nil
{ :error, reason } ->
raise IO.StreamError, reason: reason
data ->
{ data, device }
end
end
@doc false
def each_binstream(device, what) do
case binread(device, what) do
:eof ->
nil
{ :error, reason } ->
raise IO.StreamError, reason: reason
data ->
{ data, device }
end
end
# Map the Elixir names for standard io and error to Erlang names
defp map_dev(:stdio), do: :standard_io
defp map_dev(:stderr), do: :standard_error
defp map_dev(other) when is_atom(other) or is_pid(other) or is_tuple(other), do: other
defp to_chardata(list) when is_list(list), do: list
defp to_chardata(other), do: to_string(other)
end
|
lib/elixir/lib/io.ex
| 0.876145
| 0.679219
|
io.ex
|
starcoder
|
defmodule IP2Country do
@moduledoc """
IP2Country is a simple geolocating module (IP -> Country Code) with built-in database.
Database is provided by https://db-ip.com, and it is compiled into the module. Compilation time is about
20 seconds on 2-core machine. Searching time is about 4µs.
To update the database, download it from https://db-ip.com/db/download/country, unzip and replace the file
located in `lib/db`.
"""
@db "db/dbip-country.csv"
@external_resource @db
import IP2Country.Converters
IO.puts "Compiling IP database ..."
dbip_list = File.stream!(Path.join([__DIR__, @db]), [], :line)
|> Stream.filter(&String.contains?(&1, ".")) # check if it is a line with IPv4
|> Stream.map(&decode_line/1)
|> Stream.map(fn [ip1, ip2, country] -> {ip2integer(ip1), ip2integer(ip2), String.to_atom(country)} end)
|> Enum.sort()
|> List.to_tuple
@dbip_list dbip_list
IO.puts "Finshed compiling IP database"
@doc """
Retrieves country code based on the IP address.
Expects string with a proper IP as an argument. Currently works only with IPv4.
Returns atom with the Country Code, uppercased, or `:ZZ` for private addresses.
## Examples
iex> IP2Country.whereis("172.16.17.32")
:FR
iex> IP2Country.whereis("172.16.31.10")
:DE
iex> IP2Country.whereis("172.16.31.10")
:PL
iex> IP2Country.whereis("0.0.0.0")
:ZZ
iex> IP2Country.whereis("255.255.255.255")
:ZZ
iex> IP2Country.whereis("172.16.31.10")
:PL
iex> IP2Country.whereis("192.168.127.12")
:PL
iex> IP2Country.whereis("2.17.213.0")
:SE
iex> IP2Country.whereis("2.17.213.1")
:SE
"""
@spec whereis(String.t) :: atom
def whereis(ip) when is_binary(ip) do
ipint = ip2integer(ip)
# {_, country} = Enum.find(@dbip_list, fn {ip2, _} -> ipint <= ip2 end)
t = @dbip_list
{:ok, country} = bsearch(ipint, t, 0, tuple_size(t))
country
end
def whereis({a, b, c, d}) do
whereis("#{a}.#{b}.#{c}.#{d}")
end
# bsearch borrowed from microcrawler:
# https://github.com/ApolloCrawler/microcrawler-webapp/blob/master/lib/microcrawler_webapp/ip_info.ex
defp bsearch(ip, ranges, low, high) do
mid = div(low + high, 2)
{start, stop, code} = elem(ranges, mid)
case {ip >= start, ip <= stop} do
{true, true} -> {:ok, code}
{true, false} -> bsearch(ip, ranges, mid + 1, high)
{false, true} -> bsearch(ip, ranges, low, mid - 1)
end
end
end
|
lib/ip2country.ex
| 0.820541
| 0.464962
|
ip2country.ex
|
starcoder
|
defmodule Exeration.Operation.Argument do
defmodule Invalid do
defexception message: "Argument is not in valid format"
end
alias Exeration.Operation.Argument
@type t :: %Exeration.Operation.Argument{
name: atom(),
type: atom(),
required: boolean() | nil,
struct: struct() | nil
}
@enforce_keys [:name, :type, :required]
defstruct [:name, :type, :required, :struct]
@allowed_types [
:boolean,
:integer,
:float,
:string,
:tuple,
:map,
:struct,
:list,
:atom,
:function,
:dont_check
]
defmacrop allowed_types do
Application.get_env(:exeration, :custom_validators, [])
|> Keyword.keys()
|> Enum.concat(@allowed_types)
end
@doc false
def cast(params) when is_list(params) do
Enum.into(params, %{}) |> cast()
end
@spec cast(%{
name: atom(),
type:
:atom
| :boolean
| :dont_check
| :float
| :function
| :integer
| :list
| :map
| :string
| :struct
| :tuple
}) :: Exeration.Operation.Argument.t()
def cast(%{name: name, type: :struct, struct: struct, required: required})
when is_atom(name) and is_atom(struct) and is_boolean(required) do
%Argument{name: name, type: :struct, struct: struct, required: required}
end
def cast(%{name: name, type: :struct, struct: struct})
when is_atom(name) and is_atom(struct) and not is_nil(struct) do
%Argument{name: name, type: :struct, struct: struct, required: false}
end
def cast(%{name: name, type: :struct})
when is_atom(name) do
raise Invalid, message: "Type `:struct` require struct name with module as value"
end
def cast(%{name: name, type: type, required: required})
when is_atom(name) and type in allowed_types() and is_boolean(required) do
%Argument{name: name, type: type, required: required}
end
def cast(%{name: name, type: type})
when is_atom(name) and type in allowed_types() do
%Argument{name: name, type: type, required: false}
end
def cast(%{type: type}) when type not in allowed_types() do
raise Invalid,
message:
"Type `#{type}` in not allowed type or not registred custom type, allowed types: #{
Enum.join(allowed_types(), ", ")
}"
end
def cast(%{name: name}) do
raise Invalid, message: "Argument #{name} is not in valid format"
end
def cast(_) do
raise Invalid
end
end
|
lib/exeration/annotations/argument.ex
| 0.79546
| 0.406126
|
argument.ex
|
starcoder
|
defmodule Gorpo.Announce do
@moduledoc """
register one or more services on consul and periodically update its
health status. Normally, you should start :gorpo otp application and
declare the services in the config file. Refer to `Gorpo` module for
more information about this. If you ever need to register a service
dynamically:
iex> service = %Gorpo.Service{id: "foobar", name: "foobar", check: %Gorpo.Check{}}
iex> Gorpo.Announce.register(service)
:ok
To unregister a service:
iex> service = %Gorpo.Service{id: "foobar", name: "foobar", check: %Gorpo.Check{}}
iex> :ok = Gorpo.Announce.register(service)
iex> Gorpo.Announce.unregister(service)
:ok
Lastly, you may ask for the `Gorpo.Announce.Unit` pid [and then check its status]:
iex> service = %Gorpo.Service{id: "foobar", name: "foobar"}
iex> :ok = Gorpo.Announce.register(service)
iex> Gorpo.Announce.whereis(service)
...> |> Gorpo.Announce.Unit.stat
...> |> Keyword.keys
[:service, :heartbeat]
"""
use GenServer
require Logger
defstruct [:services, :consul, :supervisor]
@typep state :: %__MODULE__{
services: [Gorpo.Service.t],
consul: Gorpo.Consul.t,
supervisor: pid
}
@spec start_link(Gorpo.Consul.t, [Gorpo.Service.t]) :: GenServer.on_start
@doc """
Starts this process.
You must provide a valid Consul structure in the first argument and an
optional list of services. Notice that this process gets started by the
`Gorpo` application which means you shouldn't need to manage it directly.
"""
def start_link(consul, services \\ []) do
GenServer.start_link(__MODULE__, [services: services, consul: consul], name: __MODULE__)
end
@spec init(Keyword.t) :: {:ok, state}
@doc false
def init(params) do
consul = Keyword.fetch!(params, :consul)
services = Keyword.fetch!(params, :services)
{:ok, supervisor} =
services
|> Enum.map(& child_service(consul, &1))
|> Supervisor.start_link(strategy: :one_for_one)
state = %__MODULE__{
consul: consul,
services: services,
supervisor: supervisor
}
{:ok, state}
end
@spec register(Gorpo.Service.t) :: :ok | {:error, term}
@doc """
Registers a service.
It uses the `Gorpo.Service.id` to avoid registering the process twice.
Nevertheless, it is ok to invoke this function multiple times - only one
process will get registered.
Each service starts a `Gorpo.Announce.Unit` process. You may use the `whereis`
to find its pid later.
iex> service = %Gorpo.Service{id: "foo", name: "bar"}
iex> :ok = Gorpo.Announce.register(service)
iex> Gorpo.Announce.register(service)
:ok
"""
def register(service) do
GenServer.call(__MODULE__, {:register, service})
end
@spec unregister(Gorpo.Service.t) :: :ok | {:error, term}
@doc """
Unregisters a service.
Differently from register it is an error to try to unregister a service that
doesn't exist.
iex> service = %Gorpo.Service{id: "foo", name: "bar"}
iex> :ok = Gorpo.Announce.register(service)
iex> :ok = Gorpo.Announce.unregister(service)
iex> Gorpo.Announce.unregister(service)
{:error, :not_found}
"""
def unregister(service) do
GenServer.call(__MODULE__, {:unregister, service})
end
@spec whereis(Gorpo.Service.t) :: pid | :unknown
@doc """
Returns the pid of the `Gorpo.Announce.Unit` process of a given service.
Returns either the pid of the process or `:unknown`.
"""
def whereis(service) do
GenServer.call(__MODULE__, {:whereis, service})
end
@spec terminate(term, state) :: :ok
@doc false
def terminate(reason, state) do
Supervisor.stop(state.supervisor, reason)
end
@spec handle_call(:killall, GenServer.from, state) :: {:reply, :ok, state}
@doc false
def handle_call(:killall, _, state) do
state.supervisor
|> Supervisor.which_children()
|> Enum.each(fn {id, _, _, _} ->
with :ok <- Supervisor.terminate_child(state.supervisor, id) do
Supervisor.delete_child(state.supervisor, id)
end
end)
{:reply, :ok, state}
end
@spec handle_call({:whereis, Gorpo.Service.t}, GenServer.from, state) :: {:reply, pid | :unknown, state}
def handle_call({:whereis, service}, _, state) do
service_id = Gorpo.Service.id(service)
location =
state.supervisor
|> Supervisor.which_children()
|> Enum.find_value(:unknown, fn {id, pid, type, _} ->
id == service_id
&& type == :worker
&& pid
end)
{:reply, location, state}
end
@spec handle_call({:register, Gorpo.Service.t}, GenServer.from, state) :: {:reply, :ok | {:error, term}, state}
def handle_call({:register, service}, _, state) do
child = child_service(state.consul, service)
case Supervisor.start_child(state.supervisor, child) do
{:error, {:already_started, _pid}} ->
{:reply, :ok, state}
{:ok, _pid} ->
{:reply, :ok, state}
error ->
{:reply, error, state}
end
end
@spec handle_call({:unregister, Gorpo.Service.t}, GenServer.from, state) :: {:reply, :ok | {:error, :not_found}, state}
def handle_call({:unregister, service}, _, state) do
service_id = Gorpo.Service.id(service)
case Supervisor.terminate_child(state.supervisor, service_id) do
:ok ->
Supervisor.delete_child(state.supervisor, service_id)
{:reply, :ok, state}
{:error, :not_found} ->
{:reply, {:error, :not_found}, state}
end
end
@spec child_service(Gorpo.Consul.t, Gorpo.Service.t) :: Supervisor.Spec.spec
defp child_service(consul, service) do
Supervisor.Spec.worker(
Gorpo.Announce.Unit,
[[consul: consul, service: service]],
id: Gorpo.Service.id(service),
restart: :transient,
shutdown: 5_000)
end
end
|
lib/gorpo/announce.ex
| 0.737158
| 0.467636
|
announce.ex
|
starcoder
|
defmodule Freshcom.Include do
alias Ecto.Queryable
@spec to_ecto_preloads(module, String.t() | list, map) :: list
def to_ecto_preloads(schema, include, filters \\ %{})
def to_ecto_preloads(_, [], _), do: []
def to_ecto_preloads(_, nil, _), do: []
def to_ecto_preloads(schema, include, filters) when is_binary(include) do
to_ecto_preloads(schema, to_preload_paths(include), filters)
end
def to_ecto_preloads(schema, [assoc | rest], filters) do
to_ecto_preloads(schema, assoc, filters) ++ to_ecto_preloads(schema, rest, filters)
end
def to_ecto_preloads(schema, {assoc, nested}, filters) do
reflection = schema.__schema__(:association, assoc)
if reflection do
query = Queryable.to_query(reflection.queryable)
assoc_schema = reflection.related
nested_preload = to_ecto_preloads(assoc_schema, nested, filters)
Keyword.put([], assoc, {query, nested_preload})
else
[]
end
end
def to_ecto_preloads(schema, assoc, filters) when is_atom(assoc) do
to_ecto_preloads(schema, {assoc, nil}, filters)
end
@doc """
Converts JSON API style include string to a keyword list that can be passed
in to `BlueJet.Repo.preload`.
"""
@spec to_preload_paths(String.t()) :: keyword
def to_preload_paths(include_paths) when byte_size(include_paths) == 0, do: []
def to_preload_paths(include_paths) do
preloads = String.split(include_paths, ",")
preloads = Enum.sort_by(preloads, fn(item) -> length(String.split(item, ".")) end)
Enum.reduce(preloads, [], fn(item, acc) ->
preload = to_preload_path(item)
# If its a chained preload and the root key already exist in acc
# then we need to merge it.
with [{key, value}] <- preload,
true <- Keyword.has_key?(acc, key)
do
# Merge chained preload with existing root key
existing_value = Keyword.get(acc, key)
index = Enum.find_index(acc, fn(item) ->
is_tuple(item) && elem(item, 0) == key
end)
List.update_at(acc, index, fn(_) ->
{key, List.flatten([existing_value]) ++ value}
end)
else
_ ->
acc ++ preload
end
end)
end
defp to_preload_path(preload) do
preload =
preload
|> Inflex.underscore()
|> String.split(".")
|> Enum.map(fn(item) -> String.to_existing_atom(item) end)
nestify(preload)
end
defp nestify(list) when length(list) == 1 do
[Enum.at(list, 0)]
end
defp nestify(list) do
r_nestify(list)
end
defp r_nestify(list) do
case length(list) do
1 -> Enum.at(list, 0)
_ ->
[head | tail] = list
Keyword.put([], head, r_nestify(tail))
end
end
end
|
lib/freshcom/core/include.ex
| 0.596903
| 0.428951
|
include.ex
|
starcoder
|
defmodule Xgit.PersonIdent do
@moduledoc ~S"""
A combination of a person identity and time in git.
"""
alias Xgit.Util.ParseCharlist
alias Xgit.Util.ParseDecimal
import Xgit.Util.ForceCoverage
@typedoc "Time zone offset in minutes +/- from GMT."
@type tz_offset :: -720..840
@typedoc ~S"""
The tuple of name, email, time, and time zone that specifies who wrote or
committed something.
## Struct Members
* `:name`: (string) human-readable name of the author or committer
* `:email`: (string) e-mail address for this person
* `:when`: (integer) time in the Unix epoch in milliseconds
* `:tz_offset`: (integer) time zone offset from GMT in minutes
"""
@type t :: %__MODULE__{
name: String.t(),
email: String.t(),
when: integer,
tz_offset: tz_offset()
}
@enforce_keys [:name, :email, :when, :tz_offset]
defstruct [:name, :email, :when, :tz_offset]
@doc ~S"""
Parse a name line (e.g. author, committer, tagger) into a `PersonIdent` struct.
## Parameters
`b` should be a charlist of an "author" or "committer" line pointing to the
character after the header name and space.
## Return Value
Returns a `PersonIdent` struct or `nil` if the charlist did not point to a
properly-formatted identity.
"""
@spec from_byte_list(b :: [byte]) :: t() | nil
def from_byte_list(b) when is_list(b) do
with {_, [?< | email_start]} <- Enum.split_while(b, &(&1 != ?<)),
true <- has_closing_angle_bracket?(email_start),
{email, _} <- Enum.split_while(email_start, &(&1 != ?>)),
name <- parse_name(b),
{time, tz} <- parse_tz(email_start) do
%__MODULE__{
name: ParseCharlist.decode_ambiguous_charlist(name),
email: ParseCharlist.decode_ambiguous_charlist(email),
when: time,
tz_offset: tz
}
else
_ -> cover nil
end
end
defp has_closing_angle_bracket?(b), do: Enum.any?(b, &(&1 == ?>))
defp parse_name(b) do
b
|> Enum.take_while(&(&1 != ?<))
|> Enum.reverse()
|> drop_first_if_space()
|> Enum.reverse()
end
defp drop_first_if_space([?\s | b]), do: cover(b)
defp drop_first_if_space(b), do: cover(b)
defp parse_tz(first_email_start) do
# Start searching from end of line, as after first name-email pair,
# another name-email pair may occur. We will ignore all kinds of
# "junk" following the first email.
[?> | first_email_end] = Enum.drop_while(first_email_start, &(&1 != ?>))
rev = Enum.reverse(first_email_end)
{tz, rev} = trim_word_and_rev(rev)
{time, _rev} = trim_word_and_rev(rev)
case {time, tz} do
{[_ | _], [_ | _]} ->
{time |> ParseDecimal.from_decimal_charlist() |> elem(0),
tz |> parse_timezone_offset() |> elem(0)}
_ ->
cover {0, 0}
end
end
defp trim_word_and_rev(rev) do
rev = Enum.drop_while(rev, &(&1 == ?\s))
word =
rev
|> Enum.take_while(&(&1 != ?\s))
|> Enum.reverse()
cover {word, Enum.drop(rev, Enum.count(word))}
end
defp parse_timezone_offset(b) do
{v, b} = ParseDecimal.from_decimal_charlist(b)
tz_min = rem(v, 100)
tz_hour = div(v, 100)
cover {tz_hour * 60 + tz_min, b}
end
@doc ~S"""
Sanitize the given string for use in an identity and append to output.
Trims whitespace from both ends and special characters `\n < >` that
interfere with parsing; appends all other characters to the output.
"""
@spec sanitized(s :: String.t()) :: String.t()
def sanitized(s) when is_binary(s) do
s
|> String.trim()
|> String.replace(~r/[<>\x00-\x0C\x0E-\x1F]/, "")
end
@doc ~S"""
Formats a timezone offset.
"""
@spec format_timezone(offset :: tz_offset()) :: String.t()
def format_timezone(offset) when is_integer(offset) do
sign =
if offset < 0 do
cover "-"
else
cover "+"
end
offset =
if offset < 0 do
cover -offset
else
offset
end
offset_hours = div(offset, 60)
offset_mins = rem(offset, 60)
hours_prefix =
if offset_hours < 10 do
cover "0"
else
cover ""
end
mins_prefix =
if offset_mins < 10 do
cover "0"
else
cover ""
end
cover "#{sign}#{hours_prefix}#{offset_hours}#{mins_prefix}#{offset_mins}"
end
@doc ~S"""
Returns `true` if the struct is a valid `PersonIdent`.
"""
@spec valid?(person_ident :: any) :: boolean
def valid?(person_ident)
def valid?(%__MODULE__{name: name, email: email, when: whxn, tz_offset: tz_offset})
when is_binary(name) and is_binary(email) and is_integer(whxn) and is_integer(tz_offset) and
tz_offset >= -720 and tz_offset <= 840,
do: cover(true)
def valid?(_), do: cover(false)
@doc ~S"""
Formats the person identity for git storage.
"""
@spec to_external_string(person_ident :: t) :: String.t()
def to_external_string(person_ident)
def to_external_string(%__MODULE__{name: name, email: email, when: whxn, tz_offset: tz_offset})
when is_binary(name) and is_binary(email) and is_integer(whxn) and is_integer(tz_offset) do
cover "#{sanitized(name)} <#{sanitized(email)}> #{div(whxn, 1000)} #{
format_timezone(tz_offset)
}"
end
defimpl String.Chars do
defdelegate to_string(person_ident), to: Xgit.PersonIdent, as: :to_external_string
end
end
|
lib/xgit/person_ident.ex
| 0.829596
| 0.417004
|
person_ident.ex
|
starcoder
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.